From 233b32c699d7ed32d0fbdbf54681c7a214a190f1 Mon Sep 17 00:00:00 2001 From: Ivo Brett Date: Wed, 8 Jan 2025 21:05:58 +0000 Subject: [PATCH 1/4] Vectorizing Udemy Subtitle files --- ...izing_subtitles_from_llm_engineering.ipynb | 4200 +++++++++++++++++ .../subtitles/srts/59166281/en_US.srt | 55 + .../subtitles/srts/59166281/ja_JP.srt | 43 + .../subtitles/srts/59166281/ko_KR.srt | 55 + .../subtitles/srts/59166317/en_US.srt | 124 + .../subtitles/srts/59166317/ja_JP.srt | 103 + .../subtitles/srts/59166317/ko_KR.srt | 121 + .../subtitles/srts/59166353/en_US.srt | 61 + .../subtitles/srts/59166353/ja_JP.srt | 52 + .../subtitles/srts/59166353/ko_KR.srt | 61 + .../subtitles/srts/59166421/en_US.srt | 319 ++ .../subtitles/srts/59166421/ja_JP.srt | 283 ++ .../subtitles/srts/59166421/ko_KR.srt | 313 ++ .../subtitles/srts/59166443/en_US.srt | 202 + .../subtitles/srts/59166443/ja_JP.srt | 166 + .../subtitles/srts/59166443/ko_KR.srt | 199 + .../subtitles/srts/59166453/en_US.srt | 583 +++ .../subtitles/srts/59166453/ja_JP.srt | 511 ++ .../subtitles/srts/59166453/ko_KR.srt | 568 +++ .../subtitles/srts/59166461/en_US.srt | 610 +++ .../subtitles/srts/59166461/ja_JP.srt | 571 +++ .../subtitles/srts/59166461/ko_KR.srt | 607 +++ .../subtitles/srts/59166465/en_US.srt | 469 ++ .../subtitles/srts/59166465/ja_JP.srt | 421 ++ .../subtitles/srts/59166465/ko_KR.srt | 466 ++ .../subtitles/srts/59166481/en_US.srt | 889 ++++ .../subtitles/srts/59166481/ja_JP.srt | 799 ++++ .../subtitles/srts/59166481/ko_KR.srt | 859 ++++ .../subtitles/srts/59166847/en_US.srt | 106 + .../subtitles/srts/59166847/ja_JP.srt | 91 + .../subtitles/srts/59166847/ko_KR.srt | 97 + .../subtitles/srts/59166915/en_US.srt | 592 +++ .../subtitles/srts/59166915/ja_JP.srt | 523 ++ .../subtitles/srts/59166915/ko_KR.srt | 577 +++ .../subtitles/srts/59166919/en_US.srt | 43 + .../subtitles/srts/59166919/ja_JP.srt | 40 + .../subtitles/srts/59166919/ko_KR.srt | 43 + .../subtitles/srts/59166947/en_US.srt | 313 ++ .../subtitles/srts/59166947/ja_JP.srt | 259 + .../subtitles/srts/59166947/ko_KR.srt | 304 ++ .../subtitles/srts/59166949/en_US.srt | 463 ++ .../subtitles/srts/59166949/ja_JP.srt | 391 ++ .../subtitles/srts/59166949/ko_KR.srt | 442 ++ .../subtitles/srts/59166951/en_US.srt | 343 ++ .../subtitles/srts/59166951/ja_JP.srt | 322 ++ .../subtitles/srts/59166951/ko_KR.srt | 340 ++ .../subtitles/srts/59166981/en_US.srt | 211 + .../subtitles/srts/59166981/ja_JP.srt | 169 + .../subtitles/srts/59166981/ko_KR.srt | 208 + .../subtitles/srts/59167007/en_US.srt | 205 + .../subtitles/srts/59167007/ja_JP.srt | 163 + .../subtitles/srts/59167007/ko_KR.srt | 205 + .../subtitles/srts/59167009/en_US.srt | 304 ++ .../subtitles/srts/59167009/ja_JP.srt | 250 + .../subtitles/srts/59167009/ko_KR.srt | 286 ++ .../subtitles/srts/59167015/en_US.srt | 424 ++ .../subtitles/srts/59167015/ja_JP.srt | 391 ++ .../subtitles/srts/59167015/ko_KR.srt | 418 ++ .../subtitles/srts/59169985/en_US.srt | 73 + .../subtitles/srts/59169985/ja_JP.srt | 58 + .../subtitles/srts/59169985/ko_KR.srt | 70 + .../subtitles/srts/59169991/en_US.srt | 127 + .../subtitles/srts/59169991/ja_JP.srt | 97 + .../subtitles/srts/59169991/ko_KR.srt | 127 + .../subtitles/srts/59170025/en_US.srt | 163 + .../subtitles/srts/59170025/ja_JP.srt | 136 + .../subtitles/srts/59170025/ko_KR.srt | 154 + .../subtitles/srts/59170037/en_US.srt | 70 + .../subtitles/srts/59170037/ja_JP.srt | 58 + .../subtitles/srts/59170037/ko_KR.srt | 70 + .../subtitles/srts/59170043/en_US.srt | 412 ++ .../subtitles/srts/59170043/ja_JP.srt | 334 ++ .../subtitles/srts/59170043/ko_KR.srt | 397 ++ .../subtitles/srts/59170055/en_US.srt | 472 ++ .../subtitles/srts/59170055/ja_JP.srt | 400 ++ .../subtitles/srts/59170055/ko_KR.srt | 451 ++ .../subtitles/srts/59170057/en_US.srt | 34 + .../subtitles/srts/59170057/ja_JP.srt | 25 + .../subtitles/srts/59170057/ko_KR.srt | 34 + .../subtitles/srts/59170093/en_US.srt | 229 + .../subtitles/srts/59170093/ja_JP.srt | 169 + .../subtitles/srts/59170093/ko_KR.srt | 220 + .../subtitles/srts/59170107/en_US.srt | 58 + .../subtitles/srts/59170107/ja_JP.srt | 43 + .../subtitles/srts/59170107/ko_KR.srt | 52 + .../subtitles/srts/59170135/en_US.srt | 154 + .../subtitles/srts/59170135/ja_JP.srt | 130 + .../subtitles/srts/59170135/ko_KR.srt | 151 + .../subtitles/srts/59170165/en_US.srt | 130 + .../subtitles/srts/59170165/ja_JP.srt | 100 + .../subtitles/srts/59170165/ko_KR.srt | 127 + .../subtitles/srts/59170223/en_US.srt | 220 + .../subtitles/srts/59170223/ja_JP.srt | 196 + .../subtitles/srts/59170223/ko_KR.srt | 211 + .../subtitles/srts/59170227/en_US.srt | 508 ++ .../subtitles/srts/59170227/ja_JP.srt | 439 ++ .../subtitles/srts/59170227/ko_KR.srt | 502 ++ .../subtitles/srts/59170233/en_US.srt | 475 ++ .../subtitles/srts/59170233/ja_JP.srt | 406 ++ .../subtitles/srts/59170233/ko_KR.srt | 451 ++ .../subtitles/srts/59170235/en_US.srt | 565 +++ .../subtitles/srts/59170235/ja_JP.srt | 472 ++ .../subtitles/srts/59170235/ko_KR.srt | 550 +++ .../subtitles/srts/59170255/en_US.srt | 463 ++ .../subtitles/srts/59170255/ja_JP.srt | 409 ++ .../subtitles/srts/59170255/ko_KR.srt | 454 ++ .../subtitles/srts/59170291/en_US.srt | 562 +++ .../subtitles/srts/59170291/ja_JP.srt | 502 ++ .../subtitles/srts/59170291/ko_KR.srt | 556 +++ .../subtitles/srts/59170297/en_US.srt | 604 +++ .../subtitles/srts/59170297/ja_JP.srt | 511 ++ .../subtitles/srts/59170297/ko_KR.srt | 592 +++ .../subtitles/srts/59271655/en_US.srt | 529 +++ .../subtitles/srts/59271655/ja_JP.srt | 430 ++ .../subtitles/srts/59271655/ko_KR.srt | 520 ++ .../subtitles/srts/59295363/en_US.srt | 64 + .../subtitles/srts/59295363/ja_JP.srt | 43 + .../subtitles/srts/59295363/ko_KR.srt | 58 + .../subtitles/srts/59295377/en_US.srt | 229 + .../subtitles/srts/59295377/ja_JP.srt | 190 + .../subtitles/srts/59295377/ko_KR.srt | 214 + .../subtitles/srts/59295423/en_US.srt | 223 + .../subtitles/srts/59295423/ja_JP.srt | 175 + .../subtitles/srts/59295423/ko_KR.srt | 214 + .../subtitles/srts/59295429/en_US.srt | 286 ++ .../subtitles/srts/59295429/ja_JP.srt | 217 + .../subtitles/srts/59295429/ko_KR.srt | 271 ++ .../subtitles/srts/59295431/en_US.srt | 211 + .../subtitles/srts/59295431/ja_JP.srt | 175 + .../subtitles/srts/59295431/ko_KR.srt | 211 + .../subtitles/srts/59295435/en_US.srt | 133 + .../subtitles/srts/59295435/ja_JP.srt | 106 + .../subtitles/srts/59295435/ko_KR.srt | 124 + .../subtitles/srts/59295439/en_US.srt | 325 ++ .../subtitles/srts/59295439/ja_JP.srt | 283 ++ .../subtitles/srts/59295439/ko_KR.srt | 313 ++ .../subtitles/srts/59295441/en_US.srt | 286 ++ .../subtitles/srts/59295441/ja_JP.srt | 238 + .../subtitles/srts/59295441/ko_KR.srt | 274 ++ .../subtitles/srts/59295451/en_US.srt | 523 ++ .../subtitles/srts/59295451/ja_JP.srt | 427 ++ .../subtitles/srts/59295451/ko_KR.srt | 502 ++ .../subtitles/srts/59295459/en_US.srt | 508 ++ .../subtitles/srts/59295459/ja_JP.srt | 418 ++ .../subtitles/srts/59295459/ko_KR.srt | 490 ++ .../subtitles/srts/59295493/en_US.srt | 115 + .../subtitles/srts/59295493/ja_JP.srt | 97 + .../subtitles/srts/59295493/ko_KR.srt | 109 + .../subtitles/srts/59295527/en_US.srt | 79 + .../subtitles/srts/59295527/ja_JP.srt | 70 + .../subtitles/srts/59295527/ko_KR.srt | 76 + .../subtitles/srts/59295541/en_US.srt | 205 + .../subtitles/srts/59295541/ja_JP.srt | 184 + .../subtitles/srts/59295541/ko_KR.srt | 205 + .../subtitles/srts/59295545/en_US.srt | 73 + .../subtitles/srts/59295545/ja_JP.srt | 58 + .../subtitles/srts/59295545/ko_KR.srt | 70 + .../subtitles/srts/59295549/en_US.srt | 235 + .../subtitles/srts/59295549/ja_JP.srt | 220 + .../subtitles/srts/59295549/ko_KR.srt | 232 + .../subtitles/srts/59295553/en_US.srt | 217 + .../subtitles/srts/59295553/ja_JP.srt | 175 + .../subtitles/srts/59295553/ko_KR.srt | 211 + .../subtitles/srts/59295579/en_US.srt | 403 ++ .../subtitles/srts/59295579/ja_JP.srt | 355 ++ .../subtitles/srts/59295579/ko_KR.srt | 385 ++ .../subtitles/srts/59295583/en_US.srt | 373 ++ .../subtitles/srts/59295583/ja_JP.srt | 313 ++ .../subtitles/srts/59295583/ko_KR.srt | 367 ++ .../subtitles/srts/59295587/en_US.srt | 337 ++ .../subtitles/srts/59295587/ja_JP.srt | 295 ++ .../subtitles/srts/59295587/ko_KR.srt | 325 ++ .../subtitles/srts/59295599/en_US.srt | 319 ++ .../subtitles/srts/59295599/ja_JP.srt | 277 ++ .../subtitles/srts/59295599/ko_KR.srt | 310 ++ .../subtitles/srts/59295601/en_US.srt | 352 ++ .../subtitles/srts/59295601/ja_JP.srt | 295 ++ .../subtitles/srts/59295601/ko_KR.srt | 346 ++ .../subtitles/srts/59295607/en_US.srt | 349 ++ .../subtitles/srts/59295607/ja_JP.srt | 283 ++ .../subtitles/srts/59295607/ko_KR.srt | 337 ++ .../subtitles/srts/59295609/en_US.srt | 400 ++ .../subtitles/srts/59295609/ja_JP.srt | 337 ++ .../subtitles/srts/59295609/ko_KR.srt | 382 ++ .../subtitles/srts/59295619/en_US.srt | 475 ++ .../subtitles/srts/59295619/ja_JP.srt | 430 ++ .../subtitles/srts/59295619/ko_KR.srt | 463 ++ .../subtitles/srts/59297561/en_US.srt | 58 + .../subtitles/srts/59297561/ja_JP.srt | 49 + .../subtitles/srts/59297561/ko_KR.srt | 55 + .../subtitles/srts/59297575/en_US.srt | 49 + .../subtitles/srts/59297575/ja_JP.srt | 43 + .../subtitles/srts/59297575/ko_KR.srt | 46 + .../subtitles/srts/59297585/en_US.srt | 85 + .../subtitles/srts/59297585/ja_JP.srt | 67 + .../subtitles/srts/59297585/ko_KR.srt | 85 + .../subtitles/srts/59297593/en_US.srt | 151 + .../subtitles/srts/59297593/ja_JP.srt | 127 + .../subtitles/srts/59297593/ko_KR.srt | 148 + .../subtitles/srts/59297595/en_US.srt | 124 + .../subtitles/srts/59297595/ja_JP.srt | 103 + .../subtitles/srts/59297595/ko_KR.srt | 124 + .../subtitles/srts/59297599/en_US.srt | 133 + .../subtitles/srts/59297599/ja_JP.srt | 100 + .../subtitles/srts/59297599/ko_KR.srt | 127 + .../subtitles/srts/59297601/en_US.srt | 202 + .../subtitles/srts/59297601/ja_JP.srt | 175 + .../subtitles/srts/59297601/ko_KR.srt | 199 + .../subtitles/srts/59297603/en_US.srt | 184 + .../subtitles/srts/59297603/ja_JP.srt | 151 + .../subtitles/srts/59297603/ko_KR.srt | 175 + .../subtitles/srts/59297609/en_US.srt | 208 + .../subtitles/srts/59297609/ja_JP.srt | 181 + .../subtitles/srts/59297609/ko_KR.srt | 205 + .../subtitles/srts/59297693/en_US.srt | 295 ++ .../subtitles/srts/59297693/ja_JP.srt | 226 + .../subtitles/srts/59297693/ko_KR.srt | 286 ++ .../subtitles/srts/59297721/en_US.srt | 394 ++ .../subtitles/srts/59297721/ja_JP.srt | 313 ++ .../subtitles/srts/59297721/ko_KR.srt | 391 ++ .../subtitles/srts/59297723/en_US.srt | 421 ++ .../subtitles/srts/59297723/ja_JP.srt | 358 ++ .../subtitles/srts/59297723/ko_KR.srt | 421 ++ .../subtitles/srts/59297733/en_US.srt | 460 ++ .../subtitles/srts/59297733/ja_JP.srt | 385 ++ .../subtitles/srts/59297733/ko_KR.srt | 442 ++ .../subtitles/srts/59297735/en_US.srt | 532 +++ .../subtitles/srts/59297735/ja_JP.srt | 478 ++ .../subtitles/srts/59297735/ko_KR.srt | 520 ++ .../subtitles/srts/59297743/en_US.srt | 526 +++ .../subtitles/srts/59297743/ja_JP.srt | 460 ++ .../subtitles/srts/59297743/ko_KR.srt | 514 ++ .../subtitles/srts/59297749/en_US.srt | 586 +++ .../subtitles/srts/59297749/ja_JP.srt | 520 ++ .../subtitles/srts/59297749/ko_KR.srt | 571 +++ .../subtitles/srts/59297773/en_US.srt | 754 +++ .../subtitles/srts/59297773/ja_JP.srt | 616 +++ .../subtitles/srts/59297773/ko_KR.srt | 736 +++ .../subtitles/srts/59471979/en_US.srt | 253 + .../subtitles/srts/59471979/ja_JP.srt | 211 + .../subtitles/srts/59471979/ko_KR.srt | 241 + .../subtitles/srts/59472007/en_US.srt | 367 ++ .../subtitles/srts/59472007/ja_JP.srt | 286 ++ .../subtitles/srts/59472007/ko_KR.srt | 352 ++ .../subtitles/srts/59472011/en_US.srt | 427 ++ .../subtitles/srts/59472011/ja_JP.srt | 337 ++ .../subtitles/srts/59472011/ko_KR.srt | 415 ++ .../subtitles/srts/59472017/en_US.srt | 532 +++ .../subtitles/srts/59472017/ja_JP.srt | 460 ++ .../subtitles/srts/59472017/ko_KR.srt | 508 ++ .../subtitles/srts/59472027/en_US.srt | 655 +++ .../subtitles/srts/59472027/ja_JP.srt | 526 +++ .../subtitles/srts/59472027/ko_KR.srt | 625 +++ .../subtitles/srts/59472067/en_US.srt | 82 + .../subtitles/srts/59472067/ja_JP.srt | 76 + .../subtitles/srts/59472067/ko_KR.srt | 82 + .../subtitles/srts/59472137/en_US.srt | 61 + .../subtitles/srts/59472137/ja_JP.srt | 43 + .../subtitles/srts/59472137/ko_KR.srt | 58 + .../subtitles/srts/59472307/en_US.srt | 325 ++ .../subtitles/srts/59472307/ja_JP.srt | 256 + .../subtitles/srts/59472307/ko_KR.srt | 310 ++ .../subtitles/srts/59472333/en_US.srt | 178 + .../subtitles/srts/59472333/ja_JP.srt | 160 + .../subtitles/srts/59472333/ko_KR.srt | 175 + .../subtitles/srts/59472383/en_US.srt | 418 ++ .../subtitles/srts/59472383/ja_JP.srt | 334 ++ .../subtitles/srts/59472383/ko_KR.srt | 412 ++ .../subtitles/srts/59472413/en_US.srt | 292 ++ .../subtitles/srts/59472413/ja_JP.srt | 271 ++ .../subtitles/srts/59472413/ko_KR.srt | 283 ++ .../subtitles/srts/59472421/en_US.srt | 295 ++ .../subtitles/srts/59472421/ja_JP.srt | 253 + .../subtitles/srts/59472421/ko_KR.srt | 283 ++ .../subtitles/srts/59472425/en_US.srt | 295 ++ .../subtitles/srts/59472425/ja_JP.srt | 226 + .../subtitles/srts/59472425/ko_KR.srt | 292 ++ .../subtitles/srts/59472429/en_US.srt | 400 ++ .../subtitles/srts/59472429/ja_JP.srt | 337 ++ .../subtitles/srts/59472429/ko_KR.srt | 382 ++ .../subtitles/srts/59472441/en_US.srt | 334 ++ .../subtitles/srts/59472441/ja_JP.srt | 286 ++ .../subtitles/srts/59472441/ko_KR.srt | 331 ++ .../subtitles/srts/59472463/en_US.srt | 325 ++ .../subtitles/srts/59472463/ja_JP.srt | 262 + .../subtitles/srts/59472463/ko_KR.srt | 313 ++ .../subtitles/srts/59472491/en_US.srt | 508 ++ .../subtitles/srts/59472491/ja_JP.srt | 421 ++ .../subtitles/srts/59472491/ko_KR.srt | 496 ++ .../subtitles/srts/59472503/en_US.srt | 400 ++ .../subtitles/srts/59472503/ja_JP.srt | 331 ++ .../subtitles/srts/59472503/ko_KR.srt | 391 ++ .../subtitles/srts/59472505/en_US.srt | 496 ++ .../subtitles/srts/59472505/ja_JP.srt | 403 ++ .../subtitles/srts/59472505/ko_KR.srt | 469 ++ .../subtitles/srts/59472693/en_US.srt | 70 + .../subtitles/srts/59472693/ja_JP.srt | 67 + .../subtitles/srts/59472693/ko_KR.srt | 70 + .../subtitles/srts/59472873/en_US.srt | 157 + .../subtitles/srts/59472873/ja_JP.srt | 127 + .../subtitles/srts/59472873/ko_KR.srt | 151 + .../subtitles/srts/59472883/en_US.srt | 85 + .../subtitles/srts/59472883/ja_JP.srt | 79 + .../subtitles/srts/59472883/ko_KR.srt | 85 + .../subtitles/srts/59473019/en_US.srt | 163 + .../subtitles/srts/59473019/ja_JP.srt | 118 + .../subtitles/srts/59473019/ko_KR.srt | 160 + .../subtitles/srts/59473021/en_US.srt | 787 +++ .../subtitles/srts/59473021/ja_JP.srt | 670 +++ .../subtitles/srts/59473021/ko_KR.srt | 763 +++ .../subtitles/srts/59473071/en_US.srt | 280 ++ .../subtitles/srts/59473071/ja_JP.srt | 232 + .../subtitles/srts/59473071/ko_KR.srt | 274 ++ .../subtitles/srts/59473089/en_US.srt | 394 ++ .../subtitles/srts/59473089/ja_JP.srt | 349 ++ .../subtitles/srts/59473089/ko_KR.srt | 385 ++ .../subtitles/srts/59473101/en_US.srt | 328 ++ .../subtitles/srts/59473101/ja_JP.srt | 283 ++ .../subtitles/srts/59473101/ko_KR.srt | 322 ++ .../subtitles/srts/59473137/en_US.srt | 394 ++ .../subtitles/srts/59473137/ja_JP.srt | 352 ++ .../subtitles/srts/59473137/ko_KR.srt | 385 ++ .../subtitles/srts/59473147/en_US.srt | 466 ++ .../subtitles/srts/59473147/ja_JP.srt | 409 ++ .../subtitles/srts/59473147/ko_KR.srt | 460 ++ .../subtitles/srts/59473159/en_US.srt | 520 ++ .../subtitles/srts/59473159/ja_JP.srt | 448 ++ .../subtitles/srts/59473159/ko_KR.srt | 508 ++ .../subtitles/srts/59473191/en_US.srt | 493 ++ .../subtitles/srts/59473191/ja_JP.srt | 433 ++ .../subtitles/srts/59473191/ko_KR.srt | 481 ++ .../subtitles/srts/59473201/en_US.srt | 457 ++ .../subtitles/srts/59473201/ja_JP.srt | 373 ++ .../subtitles/srts/59473201/ko_KR.srt | 433 ++ .../subtitles/srts/59503703/en_US.srt | 166 + .../subtitles/srts/59503703/ja_JP.srt | 136 + .../subtitles/srts/59503703/ko_KR.srt | 163 + .../subtitles/srts/59503705/en_US.srt | 247 + .../subtitles/srts/59503705/ja_JP.srt | 211 + .../subtitles/srts/59503705/ko_KR.srt | 244 + .../subtitles/srts/59504769/en_US.srt | 301 ++ .../subtitles/srts/59504769/ja_JP.srt | 250 + .../subtitles/srts/59504769/ko_KR.srt | 292 ++ .../subtitles/srts/59504785/en_US.srt | 220 + .../subtitles/srts/59504785/ja_JP.srt | 178 + .../subtitles/srts/59504785/ko_KR.srt | 217 + .../subtitles/srts/59504887/en_US.srt | 391 ++ .../subtitles/srts/59504887/ja_JP.srt | 352 ++ .../subtitles/srts/59504887/ko_KR.srt | 379 ++ .../subtitles/srts/59505329/en_US.srt | 160 + .../subtitles/srts/59505329/ja_JP.srt | 142 + .../subtitles/srts/59505329/ko_KR.srt | 157 + .../subtitles/srts/59505337/en_US.srt | 160 + .../subtitles/srts/59505337/ja_JP.srt | 142 + .../subtitles/srts/59505337/ko_KR.srt | 157 + .../subtitles/srts/59506507/en_US.srt | 70 + .../subtitles/srts/59506507/ja_JP.srt | 58 + .../subtitles/srts/59506507/ko_KR.srt | 70 + .../subtitles/srts/59506611/en_US.srt | 268 ++ .../subtitles/srts/59506611/ja_JP.srt | 217 + .../subtitles/srts/59506611/ko_KR.srt | 265 ++ .../subtitles/srts/59506713/en_US.srt | 271 ++ .../subtitles/srts/59506713/ja_JP.srt | 202 + .../subtitles/srts/59506713/ko_KR.srt | 250 + .../subtitles/srts/59506929/en_US.srt | 256 + .../subtitles/srts/59506929/ja_JP.srt | 202 + .../subtitles/srts/59506929/ko_KR.srt | 250 + .../subtitles/srts/59507017/en_US.srt | 217 + .../subtitles/srts/59507017/ja_JP.srt | 193 + .../subtitles/srts/59507017/ko_KR.srt | 214 + .../subtitles/srts/59507313/en_US.srt | 163 + .../subtitles/srts/59507313/ja_JP.srt | 127 + .../subtitles/srts/59507313/ko_KR.srt | 148 + .../subtitles/srts/59507329/en_US.srt | 274 ++ .../subtitles/srts/59507329/ja_JP.srt | 244 + .../subtitles/srts/59507329/ko_KR.srt | 271 ++ .../subtitles/srts/59507423/en_US.srt | 445 ++ .../subtitles/srts/59507423/ja_JP.srt | 400 ++ .../subtitles/srts/59507423/ko_KR.srt | 433 ++ .../subtitles/srts/59507435/en_US.srt | 190 + .../subtitles/srts/59507435/ja_JP.srt | 136 + .../subtitles/srts/59507435/ko_KR.srt | 178 + .../subtitles/srts/59507489/en_US.srt | 205 + .../subtitles/srts/59507489/ja_JP.srt | 178 + .../subtitles/srts/59507489/ko_KR.srt | 205 + .../subtitles/srts/59507635/en_US.srt | 382 ++ .../subtitles/srts/59507635/ja_JP.srt | 352 ++ .../subtitles/srts/59507635/ko_KR.srt | 373 ++ .../subtitles/srts/59507687/en_US.srt | 454 ++ .../subtitles/srts/59507687/ja_JP.srt | 373 ++ .../subtitles/srts/59507687/ko_KR.srt | 442 ++ .../subtitles/srts/59507785/en_US.srt | 790 ++++ .../subtitles/srts/59507785/ja_JP.srt | 667 +++ .../subtitles/srts/59507785/ko_KR.srt | 754 +++ .../subtitles/srts/59508055/en_US.srt | 82 + .../subtitles/srts/59508055/ja_JP.srt | 61 + .../subtitles/srts/59508055/ko_KR.srt | 79 + .../subtitles/srts/59508057/en_US.srt | 91 + .../subtitles/srts/59508057/ja_JP.srt | 76 + .../subtitles/srts/59508057/ko_KR.srt | 85 + .../subtitles/srts/59508121/en_US.srt | 148 + .../subtitles/srts/59508121/ja_JP.srt | 121 + .../subtitles/srts/59508121/ko_KR.srt | 145 + .../subtitles/srts/59508175/en_US.srt | 274 ++ .../subtitles/srts/59508175/ja_JP.srt | 226 + .../subtitles/srts/59508175/ko_KR.srt | 262 + .../subtitles/srts/59508289/en_US.srt | 673 +++ .../subtitles/srts/59508289/ja_JP.srt | 577 +++ .../subtitles/srts/59508289/ko_KR.srt | 646 +++ .../subtitles/srts/59508297/en_US.srt | 97 + .../subtitles/srts/59508297/ja_JP.srt | 79 + .../subtitles/srts/59508297/ko_KR.srt | 91 + .../subtitles/srts/59509185/en_US.srt | 526 +++ .../subtitles/srts/59509185/ja_JP.srt | 439 ++ .../subtitles/srts/59509185/ko_KR.srt | 499 ++ .../subtitles/srts/59665127/en_US.srt | 238 + .../subtitles/srts/59665127/ja_JP.srt | 202 + .../subtitles/srts/59665127/ko_KR.srt | 229 + .../subtitles/srts/59665129/en_US.srt | 334 ++ .../subtitles/srts/59665129/ja_JP.srt | 286 ++ .../subtitles/srts/59665129/ko_KR.srt | 328 ++ .../subtitles/srts/59666211/en_US.srt | 136 + .../subtitles/srts/59666211/ja_JP.srt | 121 + .../subtitles/srts/59666211/ko_KR.srt | 133 + .../subtitles/srts/59666831/en_US.srt | 334 ++ .../subtitles/srts/59666831/ja_JP.srt | 283 ++ .../subtitles/srts/59666831/ko_KR.srt | 328 ++ .../subtitles/srts/59667357/en_US.srt | 187 + .../subtitles/srts/59667357/ja_JP.srt | 151 + .../subtitles/srts/59667357/ko_KR.srt | 184 + .../subtitles/srts/59667365/en_US.srt | 580 +++ .../subtitles/srts/59667365/ja_JP.srt | 475 ++ .../subtitles/srts/59667365/ko_KR.srt | 556 +++ .../subtitles/srts/59667829/en_US.srt | 148 + .../subtitles/srts/59667829/ja_JP.srt | 130 + .../subtitles/srts/59667829/ko_KR.srt | 145 + .../subtitles/srts/59667841/en_US.srt | 139 + .../subtitles/srts/59667841/ja_JP.srt | 103 + .../subtitles/srts/59667841/ko_KR.srt | 133 + .../subtitles/srts/59668027/en_US.srt | 202 + .../subtitles/srts/59668027/ja_JP.srt | 163 + .../subtitles/srts/59668027/ko_KR.srt | 199 + .../subtitles/srts/59668181/en_US.srt | 361 ++ .../subtitles/srts/59668181/ja_JP.srt | 292 ++ .../subtitles/srts/59668181/ko_KR.srt | 352 ++ .../subtitles/srts/59668923/en_US.srt | 721 +++ .../subtitles/srts/59668923/ja_JP.srt | 655 +++ .../subtitles/srts/59668923/ko_KR.srt | 709 +++ .../subtitles/srts/59669049/en_US.srt | 877 ++++ .../subtitles/srts/59669049/ja_JP.srt | 775 +++ .../subtitles/srts/59669049/ko_KR.srt | 865 ++++ .../subtitles/srts/59669211/en_US.srt | 67 + .../subtitles/srts/59669211/ja_JP.srt | 55 + .../subtitles/srts/59669211/ko_KR.srt | 64 + .../subtitles/srts/59669217/en_US.srt | 91 + .../subtitles/srts/59669217/ja_JP.srt | 79 + .../subtitles/srts/59669217/ko_KR.srt | 91 + .../subtitles/srts/59669375/en_US.srt | 202 + .../subtitles/srts/59669375/ja_JP.srt | 169 + .../subtitles/srts/59669375/ko_KR.srt | 196 + .../subtitles/srts/59669389/en_US.srt | 226 + .../subtitles/srts/59669389/ja_JP.srt | 181 + .../subtitles/srts/59669389/ko_KR.srt | 220 + .../subtitles/srts/59669631/en_US.srt | 427 ++ .../subtitles/srts/59669631/ja_JP.srt | 367 ++ .../subtitles/srts/59669631/ko_KR.srt | 421 ++ .../subtitles/srts/59670073/en_US.srt | 331 ++ .../subtitles/srts/59670073/ja_JP.srt | 298 ++ .../subtitles/srts/59670073/ko_KR.srt | 322 ++ .../subtitles/srts/59670087/en_US.srt | 334 ++ .../subtitles/srts/59670087/ja_JP.srt | 289 ++ .../subtitles/srts/59670087/ko_KR.srt | 322 ++ .../subtitles/srts/59670121/en_US.srt | 373 ++ .../subtitles/srts/59670121/ja_JP.srt | 334 ++ .../subtitles/srts/59670121/ko_KR.srt | 370 ++ .../subtitles/srts/59670171/en_US.srt | 409 ++ .../subtitles/srts/59670171/ja_JP.srt | 328 ++ .../subtitles/srts/59670171/ko_KR.srt | 388 ++ .../subtitles/srts/59670259/en_US.srt | 79 + .../subtitles/srts/59670259/ja_JP.srt | 58 + .../subtitles/srts/59670259/ko_KR.srt | 76 + .../subtitles/srts/59670369/en_US.srt | 166 + .../subtitles/srts/59670369/ja_JP.srt | 139 + .../subtitles/srts/59670369/ko_KR.srt | 160 + .../subtitles/srts/59670933/en_US.srt | 199 + .../subtitles/srts/59670933/ja_JP.srt | 148 + .../subtitles/srts/59670933/ko_KR.srt | 187 + .../subtitles/srts/59671221/en_US.srt | 454 ++ .../subtitles/srts/59671221/ja_JP.srt | 367 ++ .../subtitles/srts/59671221/ko_KR.srt | 436 ++ .../subtitles/srts/59671231/en_US.srt | 442 ++ .../subtitles/srts/59671231/ja_JP.srt | 391 ++ .../subtitles/srts/59671231/ko_KR.srt | 436 ++ .../subtitles/srts/59671315/en_US.srt | 469 ++ .../subtitles/srts/59671315/ja_JP.srt | 409 ++ .../subtitles/srts/59671315/ko_KR.srt | 445 ++ .../subtitles/srts/59671441/en_US.srt | 535 +++ .../subtitles/srts/59671441/ja_JP.srt | 457 ++ .../subtitles/srts/59671441/ko_KR.srt | 517 ++ .../subtitles/srts/59671567/en_US.srt | 700 +++ .../subtitles/srts/59671567/ja_JP.srt | 625 +++ .../subtitles/srts/59671567/ko_KR.srt | 682 +++ .../subtitles/srts/59673431/en_US.srt | 94 + .../subtitles/srts/59673431/ja_JP.srt | 73 + .../subtitles/srts/59673431/ko_KR.srt | 94 + .../subtitles/srts/59673449/en_US.srt | 115 + .../subtitles/srts/59673449/ja_JP.srt | 91 + .../subtitles/srts/59673449/ko_KR.srt | 109 + .../subtitles/srts/59673595/en_US.srt | 283 ++ .../subtitles/srts/59673595/ja_JP.srt | 247 + .../subtitles/srts/59673595/ko_KR.srt | 277 ++ .../subtitles/srts/59673639/en_US.srt | 352 ++ .../subtitles/srts/59673639/ja_JP.srt | 322 ++ .../subtitles/srts/59673639/ko_KR.srt | 343 ++ .../subtitles/srts/59673663/en_US.srt | 184 + .../subtitles/srts/59673663/ja_JP.srt | 157 + .../subtitles/srts/59673663/ko_KR.srt | 181 + .../subtitles/srts/59673721/en_US.srt | 460 ++ .../subtitles/srts/59673721/ja_JP.srt | 379 ++ .../subtitles/srts/59673721/ko_KR.srt | 442 ++ .../subtitles/srts/60395261/en_US.srt | 529 +++ .../subtitles/srts/60395261/ja_JP.srt | 457 ++ .../subtitles/srts/60395261/ko_KR.srt | 514 ++ .../subtitles/srts/60595637/en_US.srt | 256 + .../subtitles/srts/60595637/ja_JP.srt | 208 + .../subtitles/srts/60595637/ko_KR.srt | 244 + .../subtitles/srts/60614541/en_US.srt | 37 + .../subtitles/srts/60614541/ja_JP.srt | 31 + .../subtitles/srts/60614541/ko_KR.srt | 37 + .../subtitles/srts/60614589/en_US.srt | 208 + .../subtitles/srts/60614589/ja_JP.srt | 187 + .../subtitles/srts/60614589/ko_KR.srt | 208 + .../subtitles/srts/60614591/en_US.srt | 82 + .../subtitles/srts/60614591/ja_JP.srt | 55 + .../subtitles/srts/60614591/ko_KR.srt | 79 + .../subtitles/srts/60616407/en_US.srt | 217 + .../subtitles/srts/60616407/ja_JP.srt | 193 + .../subtitles/srts/60616407/ko_KR.srt | 214 + .../subtitles/srts/60616423/en_US.srt | 262 + .../subtitles/srts/60616423/ja_JP.srt | 193 + .../subtitles/srts/60616423/ko_KR.srt | 259 + .../subtitles/srts/60616493/en_US.srt | 94 + .../subtitles/srts/60616493/ja_JP.srt | 82 + .../subtitles/srts/60616493/ko_KR.srt | 94 + .../subtitles/srts/60616623/en_US.srt | 298 ++ .../subtitles/srts/60616623/ja_JP.srt | 259 + .../subtitles/srts/60616623/ko_KR.srt | 295 ++ .../subtitles/srts/60616629/en_US.srt | 292 ++ .../subtitles/srts/60616629/ja_JP.srt | 256 + .../subtitles/srts/60616629/ko_KR.srt | 289 ++ .../subtitles/srts/60616663/en_US.srt | 568 +++ .../subtitles/srts/60616663/ja_JP.srt | 493 ++ .../subtitles/srts/60616663/ko_KR.srt | 562 +++ .../subtitles/srts/60616833/en_US.srt | 118 + .../subtitles/srts/60616833/ja_JP.srt | 94 + .../subtitles/srts/60616833/ko_KR.srt | 115 + .../subtitles/srts/60616845/en_US.srt | 265 ++ .../subtitles/srts/60616845/ja_JP.srt | 241 + .../subtitles/srts/60616845/ko_KR.srt | 259 + .../subtitles/srts/60616855/en_US.srt | 322 ++ .../subtitles/srts/60616855/ja_JP.srt | 265 ++ .../subtitles/srts/60616855/ko_KR.srt | 307 ++ .../subtitles/srts/60616895/en_US.srt | 484 ++ .../subtitles/srts/60616895/ja_JP.srt | 430 ++ .../subtitles/srts/60616895/ko_KR.srt | 478 ++ .../subtitles/srts/60616927/en_US.srt | 628 +++ .../subtitles/srts/60616927/ja_JP.srt | 568 +++ .../subtitles/srts/60616927/ko_KR.srt | 622 +++ .../subtitles/srts/60617163/en_US.srt | 40 + .../subtitles/srts/60617163/ja_JP.srt | 34 + .../subtitles/srts/60617163/ko_KR.srt | 40 + .../subtitles/srts/60617251/en_US.srt | 325 ++ .../subtitles/srts/60617251/ja_JP.srt | 268 ++ .../subtitles/srts/60617251/ko_KR.srt | 307 ++ .../subtitles/srts/60617255/en_US.srt | 334 ++ .../subtitles/srts/60617255/ja_JP.srt | 277 ++ .../subtitles/srts/60617255/ko_KR.srt | 328 ++ .../subtitles/srts/60617259/en_US.srt | 280 ++ .../subtitles/srts/60617259/ja_JP.srt | 235 + .../subtitles/srts/60617259/ko_KR.srt | 271 ++ .../subtitles/srts/60619123/en_US.srt | 82 + .../subtitles/srts/60619123/ja_JP.srt | 58 + .../subtitles/srts/60619123/ko_KR.srt | 82 + .../subtitles/srts/60619149/en_US.srt | 184 + .../subtitles/srts/60619149/ja_JP.srt | 172 + .../subtitles/srts/60619149/ko_KR.srt | 184 + .../subtitles/srts/60619227/en_US.srt | 229 + .../subtitles/srts/60619227/ja_JP.srt | 196 + .../subtitles/srts/60619227/ko_KR.srt | 226 + .../subtitles/srts/60619247/en_US.srt | 259 + .../subtitles/srts/60619247/ja_JP.srt | 217 + .../subtitles/srts/60619247/ko_KR.srt | 256 + .../subtitles/srts/60619275/en_US.srt | 190 + .../subtitles/srts/60619275/ja_JP.srt | 160 + .../subtitles/srts/60619275/ko_KR.srt | 178 + .../subtitles/srts/60619281/en_US.srt | 334 ++ .../subtitles/srts/60619281/ja_JP.srt | 271 ++ .../subtitles/srts/60619281/ko_KR.srt | 322 ++ .../subtitles/srts/60619289/en_US.srt | 199 + .../subtitles/srts/60619289/ja_JP.srt | 175 + .../subtitles/srts/60619289/ko_KR.srt | 196 + .../subtitles/srts/60619299/en_US.srt | 259 + .../subtitles/srts/60619299/ja_JP.srt | 214 + .../subtitles/srts/60619299/ko_KR.srt | 247 + .../subtitles/srts/60619429/en_US.srt | 112 + .../subtitles/srts/60619429/ja_JP.srt | 85 + .../subtitles/srts/60619429/ko_KR.srt | 106 + .../subtitles/srts/60619439/en_US.srt | 133 + .../subtitles/srts/60619439/ja_JP.srt | 106 + .../subtitles/srts/60619439/ko_KR.srt | 127 + .../subtitles/srts/60619447/en_US.srt | 184 + .../subtitles/srts/60619447/ja_JP.srt | 151 + .../subtitles/srts/60619447/ko_KR.srt | 181 + .../subtitles/srts/60619501/en_US.srt | 328 ++ .../subtitles/srts/60619501/ja_JP.srt | 274 ++ .../subtitles/srts/60619501/ko_KR.srt | 322 ++ .../subtitles/srts/60619577/en_US.srt | 124 + .../subtitles/srts/60619577/ja_JP.srt | 100 + .../subtitles/srts/60619577/ko_KR.srt | 112 + .../subtitles/srts/60619619/en_US.srt | 121 + .../subtitles/srts/60619619/ja_JP.srt | 91 + .../subtitles/srts/60619619/ko_KR.srt | 112 + .../subtitles/srts/60619651/en_US.srt | 211 + .../subtitles/srts/60619651/ja_JP.srt | 154 + .../subtitles/srts/60619651/ko_KR.srt | 196 + .../subtitles/srts/60619721/en_US.srt | 418 ++ .../subtitles/srts/60619721/ja_JP.srt | 340 ++ .../subtitles/srts/60619721/ko_KR.srt | 406 ++ .../subtitles/srts/60619883/en_US.srt | 142 + .../subtitles/srts/60619883/ja_JP.srt | 124 + .../subtitles/srts/60619883/ko_KR.srt | 142 + .../subtitles/srts/60620025/en_US.srt | 298 ++ .../subtitles/srts/60620025/ja_JP.srt | 253 + .../subtitles/srts/60620025/ko_KR.srt | 292 ++ .../subtitles/srts/60620143/en_US.srt | 448 ++ .../subtitles/srts/60620143/ja_JP.srt | 379 ++ .../subtitles/srts/60620143/ko_KR.srt | 436 ++ .../subtitles/srts/60620169/en_US.srt | 430 ++ .../subtitles/srts/60620169/ja_JP.srt | 352 ++ .../subtitles/srts/60620169/ko_KR.srt | 421 ++ .../subtitles/srts/60620375/en_US.srt | 115 + .../subtitles/srts/60620375/ja_JP.srt | 94 + .../subtitles/srts/60620375/ko_KR.srt | 115 + .../subtitles/srts/60620395/en_US.srt | 181 + .../subtitles/srts/60620395/ja_JP.srt | 136 + .../subtitles/srts/60620395/ko_KR.srt | 175 + .../subtitles/srts/60620397/en_US.srt | 196 + .../subtitles/srts/60620397/ja_JP.srt | 136 + .../subtitles/srts/60620397/ko_KR.srt | 187 + .../subtitles/srts/60622463/en_US.srt | 328 ++ .../subtitles/srts/60622463/ja_JP.srt | 274 ++ .../subtitles/srts/60622463/ko_KR.srt | 325 ++ 652 files changed, 187932 insertions(+) create mode 100644 week5/community-contributions/day3 - vectorizing_subtitles_from_llm_engineering.ipynb create mode 100755 week5/community-contributions/subtitles/srts/59166281/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166281/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166281/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166317/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166317/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166317/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166353/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166353/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166353/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166421/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166421/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166421/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166443/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166443/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166443/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166453/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166453/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166453/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166461/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166461/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166461/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166465/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166465/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166465/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166481/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166481/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166481/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166847/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166847/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166847/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166915/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166915/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166915/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166919/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166919/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166919/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166947/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166947/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166947/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166949/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166949/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166949/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166951/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166951/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166951/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59166981/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59166981/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59166981/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59167007/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59167007/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59167007/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59167009/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59167009/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59167009/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59167015/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59167015/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59167015/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59169985/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59169985/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59169985/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59169991/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59169991/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59169991/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170025/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170025/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170025/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170037/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170037/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170037/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170043/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170043/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170043/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170055/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170055/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170055/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170057/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170057/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170057/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170093/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170093/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170093/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170107/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170107/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170107/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170135/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170135/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170135/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170165/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170165/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170165/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170223/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170223/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170223/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170227/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170227/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170227/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170233/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170233/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170233/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170235/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170235/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170235/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170255/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170255/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170255/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170291/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170291/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170291/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59170297/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59170297/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59170297/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59271655/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59271655/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59271655/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295363/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295363/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295363/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295377/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295377/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295377/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295423/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295423/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295423/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295429/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295429/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295429/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295431/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295431/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295431/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295435/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295435/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295435/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295439/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295439/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295439/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295441/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295441/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295441/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295451/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295451/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295451/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295459/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295459/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295459/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295493/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295493/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295493/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295527/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295527/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295527/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295541/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295541/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295541/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295545/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295545/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295545/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295549/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295549/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295549/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295553/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295553/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295553/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295579/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295579/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295579/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295583/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295583/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295583/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295587/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295587/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295587/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295599/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295599/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295599/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295601/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295601/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295601/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295607/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295607/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295607/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295609/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295609/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295609/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59295619/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59295619/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59295619/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297561/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297561/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297561/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297575/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297575/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297575/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297585/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297585/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297585/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297593/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297593/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297593/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297595/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297595/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297595/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297599/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297599/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297599/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297601/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297601/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297601/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297603/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297603/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297603/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297609/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297609/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297609/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297693/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297693/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297693/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297721/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297721/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297721/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297723/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297723/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297723/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297733/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297733/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297733/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297735/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297735/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297735/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297743/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297743/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297743/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297749/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297749/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297749/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59297773/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59297773/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59297773/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59471979/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59471979/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59471979/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472007/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472007/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472007/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472011/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472011/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472011/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472017/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472017/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472017/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472027/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472027/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472027/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472067/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472067/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472067/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472137/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472137/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472137/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472307/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472307/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472307/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472333/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472333/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472333/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472383/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472383/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472383/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472413/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472413/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472413/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472421/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472421/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472421/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472425/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472425/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472425/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472429/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472429/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472429/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472441/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472441/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472441/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472463/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472463/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472463/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472491/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472491/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472491/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472503/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472503/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472503/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472505/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472505/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472505/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472693/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472693/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472693/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472873/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472873/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472873/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59472883/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59472883/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59472883/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473019/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473019/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473019/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473021/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473021/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473021/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473071/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473071/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473071/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473089/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473089/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473089/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473101/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473101/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473101/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473137/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473137/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473137/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473147/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473147/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473147/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473159/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473159/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473159/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473191/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473191/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473191/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59473201/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59473201/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59473201/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59503703/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59503703/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59503703/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59503705/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59503705/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59503705/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59504769/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59504769/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59504769/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59504785/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59504785/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59504785/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59504887/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59504887/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59504887/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59505329/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59505329/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59505329/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59505337/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59505337/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59505337/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59506507/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59506507/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59506507/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59506611/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59506611/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59506611/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59506713/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59506713/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59506713/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59506929/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59506929/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59506929/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507017/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507017/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507017/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507313/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507313/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507313/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507329/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507329/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507329/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507423/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507423/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507423/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507435/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507435/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507435/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507489/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507489/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507489/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507635/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507635/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507635/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507687/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507687/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507687/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59507785/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59507785/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59507785/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59508055/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59508055/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59508055/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59508057/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59508057/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59508057/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59508121/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59508121/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59508121/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59508175/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59508175/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59508175/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59508289/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59508289/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59508289/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59508297/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59508297/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59508297/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59509185/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59509185/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59509185/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59665127/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59665127/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59665127/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59665129/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59665129/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59665129/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59666211/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59666211/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59666211/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59666831/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59666831/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59666831/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59667357/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59667357/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59667357/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59667365/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59667365/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59667365/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59667829/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59667829/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59667829/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59667841/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59667841/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59667841/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59668027/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59668027/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59668027/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59668181/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59668181/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59668181/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59668923/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59668923/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59668923/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59669049/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59669049/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59669049/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59669211/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59669211/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59669211/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59669217/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59669217/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59669217/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59669375/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59669375/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59669375/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59669389/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59669389/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59669389/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59669631/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59669631/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59669631/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59670073/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59670073/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59670073/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59670087/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59670087/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59670087/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59670121/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59670121/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59670121/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59670171/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59670171/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59670171/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59670259/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59670259/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59670259/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59670369/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59670369/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59670369/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59670933/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59670933/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59670933/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59671221/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59671221/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59671221/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59671231/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59671231/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59671231/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59671315/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59671315/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59671315/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59671441/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59671441/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59671441/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59671567/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59671567/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59671567/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59673431/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59673431/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59673431/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59673449/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59673449/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59673449/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59673595/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59673595/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59673595/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59673639/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59673639/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59673639/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59673663/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59673663/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59673663/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/59673721/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/59673721/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/59673721/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60395261/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60395261/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60395261/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60595637/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60595637/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60595637/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60614541/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60614541/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60614541/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60614589/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60614589/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60614589/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60614591/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60614591/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60614591/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616407/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616407/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616407/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616423/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616423/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616423/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616493/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616493/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616493/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616623/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616623/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616623/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616629/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616629/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616629/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616663/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616663/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616663/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616833/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616833/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616833/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616845/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616845/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616845/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616855/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616855/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616855/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616895/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616895/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616895/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60616927/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60616927/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60616927/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60617163/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60617163/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60617163/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60617251/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60617251/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60617251/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60617255/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60617255/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60617255/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60617259/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60617259/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60617259/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619123/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619123/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619123/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619149/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619149/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619149/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619227/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619227/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619227/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619247/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619247/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619247/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619275/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619275/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619275/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619281/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619281/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619281/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619289/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619289/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619289/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619299/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619299/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619299/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619429/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619429/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619429/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619439/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619439/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619439/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619447/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619447/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619447/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619501/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619501/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619501/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619577/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619577/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619577/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619619/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619619/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619619/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619651/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619651/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619651/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619721/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619721/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619721/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60619883/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60619883/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60619883/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60620025/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60620025/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60620025/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60620143/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60620143/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60620143/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60620169/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60620169/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60620169/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60620375/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60620375/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60620375/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60620395/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60620395/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60620395/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60620397/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60620397/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60620397/ko_KR.srt create mode 100755 week5/community-contributions/subtitles/srts/60622463/en_US.srt create mode 100755 week5/community-contributions/subtitles/srts/60622463/ja_JP.srt create mode 100755 week5/community-contributions/subtitles/srts/60622463/ko_KR.srt diff --git a/week5/community-contributions/day3 - vectorizing_subtitles_from_llm_engineering.ipynb b/week5/community-contributions/day3 - vectorizing_subtitles_from_llm_engineering.ipynb new file mode 100644 index 0000000..5076f8f --- /dev/null +++ b/week5/community-contributions/day3 - vectorizing_subtitles_from_llm_engineering.ipynb @@ -0,0 +1,4200 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "dfe37963-1af6-44fc-a841-8e462443f5e6", + "metadata": {}, + "source": [ + "## Udemy Video Subtitle Vectorization (Expert on LLM engineering) \n", + "\n", + "This project will uses subtitle files from Ed Donners excellent LLM engineering course on Udemy.\n", + "\n", + "These can be downloaded using the following process:\n", + "- Useing an android phone, download Udemy app and open the LLM engineering course. \n", + "- There is option to download the videos as single files or section wise. \n", + "- Download them and along with those videos subs or cc are also downloaded as .srt’s.\n", + "- Plug in your laptop to the android phone using USB and select file transfer in the notification.\n", + "- Open a file explorer and copy the subtitle files (srt format)\n", + "- Here’s the location of subs in android \"internal storage/android/data/com.udemy.android/files/udemy-subtitle-downloads\"\n", + "\n", + "the raw srt files are stored in the folder \"subtitles/srts\". The code below will use the langchain textloader but will preprocess the srt files to remove the timestamps.\n", + "\n", + "### Note: this is only for educational and testing purposes and you should contact Ed Donnner to seek his permission if you want to use the subtitles." + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "id": "ba2779af-84ef-4227-9e9e-6eaf0df87e77", + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "\n", + "import os\n", + "import glob\n", + "from dotenv import load_dotenv\n", + "import gradio as gr" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "id": "802137aa-8a74-45e0-a487-d1974927d7ca", + "metadata": {}, + "outputs": [], + "source": [ + "# imports for langchain and Chroma and plotly\n", + "\n", + "from langchain.document_loaders import DirectoryLoader, TextLoader\n", + "from langchain.text_splitter import CharacterTextSplitter\n", + "from langchain.schema import Document\n", + "from langchain_openai import OpenAIEmbeddings, ChatOpenAI\n", + "from langchain_chroma import Chroma\n", + "import numpy as np\n", + "from sklearn.manifold import TSNE\n", + "import plotly.graph_objects as go\n", + "import re" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "id": "58c85082-e417-4708-9efe-81a5d55d1424", + "metadata": {}, + "outputs": [], + "source": [ + "# price is a factor for our company, so we're going to use a low cost model\n", + "\n", + "MODEL = \"gpt-4o-mini\"\n", + "db_name = \"vector_db\"" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "id": "ee78efcb-60fe-449e-a944-40bab26261af", + "metadata": {}, + "outputs": [], + "source": [ + "# Load environment variables in a file called .env\n", + "\n", + "load_dotenv()\n", + "os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY', 'your-key-if-not-using-env')" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "id": "730711a9-6ffe-4eee-8f48-d6cfb7314905", + "metadata": {}, + "outputs": [], + "source": [ + "# Read in documents using LangChain's loaders\n", + "# Take everything in all the sub-folders of our knowledgebase\n", + "\n", + "folders = glob.glob(\"subtitles/srts/*\")\n", + "\n", + "# With thanks to CG and Jon R, students on the course, for this fix needed for some users \n", + "text_loader_kwargs = {'encoding': 'utf-8'}\n", + "# If that doesn't work, some Windows users might need to uncomment the next line instead\n", + "# text_loader_kwargs={'autodetect_encoding': True}\n", + "\n", + "def preprocess_srt_content(content):\n", + " \"\"\"\n", + " Preprocess the content of an SRT file to remove timing information and the WEBVTT header.\n", + " \"\"\"\n", + " # Remove the WEBVTT header\n", + " content = re.sub(r'^WEBVTT\\s*', '', content, flags=re.IGNORECASE)\n", + " # Remove timing lines (e.g., 00:00.680 --> 00:08.540)\n", + " content = re.sub(r'\\d{2}:\\d{2}\\.\\d{3} --> \\d{2}:\\d{2}\\.\\d{3}', '', content)\n", + " # Remove extra newlines and strip leading/trailing whitespace\n", + " return \"\\n\".join(line.strip() for line in content.splitlines() if line.strip())\n", + "\n", + "documents = []\n", + "for folder in folders:\n", + " video_number = os.path.basename(folder)\n", + " loader = DirectoryLoader(folder, glob=\"**/en_US.srt\", loader_cls=TextLoader)\n", + " folder_docs = loader.load()\n", + "\n", + " for doc in folder_docs:\n", + " # Preprocess the document content\n", + " cleaned_content = preprocess_srt_content(doc.page_content)\n", + " # Replace the original content with the cleaned content\n", + " doc.page_content = cleaned_content\n", + " # Add metadata\n", + " doc.metadata[\"video_number\"] = video_number\n", + " documents.append(doc)\n" + ] + }, + { + "cell_type": "markdown", + "id": "f065d4b1-80b7-4e15-abd4-60a83e752ea8", + "metadata": {}, + "source": [ + "# Please note:\n", + "\n", + "In the next cell, we split the text into chunks.\n", + "\n", + "If you have problems, you can try to fix them by changing the chunk_size from 1,000 to 2,000 and the chunk_overlap from 200 to 400. \n", + "This shouldn't be required; but if it happens to you, please make that change! \n", + "(Note that LangChain may give a warning about a chunk being larger than 1,000 - this can be safely ignored)." + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "7310c9c8-03c1-4efc-a104-5e89aec6db1a", + "metadata": {}, + "outputs": [], + "source": [ + "text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200)\n", + "chunks = text_splitter.split_documents(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "id": "cd06e02f-6d9b-44cc-a43d-e1faa8acc7bb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "217" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(chunks)" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "id": "2c54b4b6-06da-463d-bee7-4dd456c2b887", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Video numbers found: 60616407, 59170043, 59507329, 59505329, 60614541, 59471979, 59166453, 59295587, 59295545, 59670259, 59166421, 59295493, 59166461, 59166919, 60616845, 59472873, 59668027, 59472017, 59668181, 60614589, 59473021, 59166443, 59507017, 60619721, 59170055, 59665129, 59295439, 59673721, 59472441, 59507423, 59473201, 59472011, 59671567, 60616927, 59170297, 59667365, 60620395, 59295599, 59669375, 59507435, 59297749, 59297599, 59297603, 59472491, 59297595, 60616663, 59170165, 59472383, 59506713, 59297561, 60620397, 59166951, 59472503, 59295609, 59670933, 59170291, 59295429, 59473071, 59472027, 59166949, 60616629, 60619227, 59297733, 59669211, 59473191, 59667829, 59295423, 59170037, 59170025, 59170227, 59671231, 59673449, 59503703, 59669631, 59166353, 59671441, 59673663, 59668923, 60619619, 59170255, 59508289, 59507785, 60619299, 60619501, 60616623, 59473147, 59170135, 59473089, 59295435, 59472425, 59295579, 59669389, 60617259, 59673639, 59508297, 60619247, 60619289, 59472137, 59669049, 59472693, 60620143, 59295363, 59503705, 59167009, 59508175, 59669217, 59166915, 59295441, 59508055, 59667841, 59472421, 60619123, 59297721, 59508057, 59297601, 59297735, 59670369, 59170223, 59271655, 59297773, 59170057, 59504785, 59473159, 59166281, 60617251, 59295459, 59472413, 59665127, 59295619, 59670121, 59666831, 60619447, 59670171, 60616493, 59473101, 59473019, 59666211, 59671315, 60619439, 59295451, 59297723, 59673431, 59169991, 59472333, 60619149, 59295607, 60619281, 59297575, 59472429, 60619883, 59670073, 59167007, 59671221, 59295553, 59166981, 60595637, 59170235, 59297593, 60614591, 59504887, 60616895, 59166947, 60620025, 60617163, 60622463, 59506611, 59166481, 59472505, 59295431, 59472463, 59167015, 59170233, 60395261, 59508121, 59166847, 60620169, 60616423, 59473137, 59170107, 59297743, 59506507, 59472883, 59295541, 59507489, 60619577, 59507687, 59506929, 59170093, 59166465, 59166317, 59295601, 59509185, 60619651, 59169985, 59505337, 59295527, 59667357, 59673595, 59295549, 59297693, 60620375, 59297585, 59670087, 59472067, 59295583, 60616855, 59295377, 60619275, 59504769, 59507635, 60616833, 59297609, 60619429, 59472307, 59507313, 60617255, 59472007\n" + ] + } + ], + "source": [ + "video_numbers = set(chunk.metadata['video_number'] for chunk in chunks)\n", + "print(f\"Video numbers found: {', '.join(video_numbers)}\")" + ] + }, + { + "cell_type": "markdown", + "id": "77f7d2a6-ccfa-425b-a1c3-5e55b23bd013", + "metadata": {}, + "source": [ + "## A sidenote on Embeddings, and \"Auto-Encoding LLMs\"\n", + "\n", + "We will be mapping each chunk of text into a Vector that represents the meaning of the text, known as an embedding.\n", + "\n", + "OpenAI offers a model to do this, which we will use by calling their API with some LangChain code.\n", + "\n", + "This model is an example of an \"Auto-Encoding LLM\" which generates an output given a complete input.\n", + "It's different to all the other LLMs we've discussed today, which are known as \"Auto-Regressive LLMs\", and generate future tokens based only on past context.\n", + "\n", + "Another example of an Auto-Encoding LLMs is BERT from Google. In addition to embedding, Auto-encoding LLMs are often used for classification.\n", + "\n", + "### Sidenote\n", + "\n", + "In week 8 we will return to RAG and vector embeddings, and we will use an open-source vector encoder so that the data never leaves our computer - that's an important consideration when building enterprise systems and the data needs to remain internal." + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "id": "78998399-ac17-4e28-b15f-0b5f51e6ee23", + "metadata": {}, + "outputs": [], + "source": [ + "# Put the chunks of data into a Vector Store that associates a Vector Embedding with each chunk\n", + "\n", + "embeddings = OpenAIEmbeddings()\n", + "\n", + "# If you would rather use the free Vector Embeddings from HuggingFace sentence-transformers\n", + "# Then replace embeddings = OpenAIEmbeddings()\n", + "# with:\n", + "# from langchain.embeddings import HuggingFaceEmbeddings\n", + "# embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-MiniLM-L6-v2\")" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "id": "763e51ff-5787-4a56-8176-36b7c5796fe3", + "metadata": {}, + "outputs": [], + "source": [ + "# Check if a Chroma Datastore already exists - if so, delete the collection to start from scratch\n", + "\n", + "if os.path.exists(db_name):\n", + " Chroma(persist_directory=db_name, embedding_function=embeddings).delete_collection()" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "id": "99fe3a37-480f-4d55-be48-120588d5846b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vectorstore created with 217 documents\n" + ] + } + ], + "source": [ + "# Create our Chroma vectorstore!\n", + "\n", + "vectorstore = Chroma.from_documents(documents=chunks, embedding=embeddings, persist_directory=db_name)\n", + "print(f\"Vectorstore created with {vectorstore._collection.count()} documents\")" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "id": "057868f6-51a6-4087-94d1-380145821550", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The vectors have 1,536 dimensions\n" + ] + } + ], + "source": [ + "# Get one vector and find how many dimensions it has\n", + "\n", + "collection = vectorstore._collection\n", + "sample_embedding = collection.get(limit=1, include=[\"embeddings\"])[\"embeddings\"][0]\n", + "dimensions = len(sample_embedding)\n", + "print(f\"The vectors have {dimensions:,} dimensions\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "61e393a0-dd4c-419f-842f-60c1cb3b716b", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "b0d45462-a818-441c-b010-b85b32bcf618", + "metadata": {}, + "source": [ + "## Visualizing the Vector Store\n", + "\n", + "Let's take a minute to look at the documents and their embedding vectors to see what's going on." + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "id": "cfb855dc-1610-4aaf-8e5f-68c26ce640a5", + "metadata": {}, + "outputs": [], + "source": [ + "# Convert the video numbers into unique colors that we can visualize\n", + "import hashlib\n", + "\n", + "def video_numbers_to_hex_colors(video_numbers):\n", + " return [f\"#{hashlib.sha256(v.encode()).hexdigest()[:6]}\" for v in video_numbers]\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "b98adf5e-d464-4bd2-9bdf-bc5b6770263b", + "metadata": {}, + "outputs": [], + "source": [ + "# Prework\n", + "\n", + "result = collection.get(include=['embeddings', 'documents', 'metadatas'])\n", + "vectors = np.array(result['embeddings'])\n", + "documents = result['documents']\n", + "video_numbers = [metadata['video_number'] for metadata in result['metadatas']]\n", + "colors = video_numbers_to_hex_colors(strings)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "id": "427149d5-e5d8-4abd-bb6f-7ef0333cca21", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.plotly.v1+json": { + "config": { + "plotlyServerURL": "https://plot.ly" + }, + "data": [ + { + "hoverinfo": "text", + "marker": { + "color": [ + "#d01f72", + "#75195e", + "#3678a7", + "#5b3f83", + "#74a788", + "#571122", + "#4099c1", + "#659222", + "#188ca3", + "#6d4052", + "#35303c", + "#a9e927", + "#29fa15", + "#71c500", + "#9b9d6e", + "#cf7e83", + "#badd6d", + "#85fa26", + "#22463b", + "#ce865d", + "#f59c06", + "#011995", + "#793548", + "#ad8b14", + "#d937bd", + "#2b9f18", + "#046e5c", + "#75b5e3", + "#c959de", + "#72e048", + "#8e8cab", + "#20f2c3", + "#64f999", + "#e69670", + "#6a0fce", + "#d65c3a", + "#7bee34", + "#4f86b8", + "#b43417", + "#4dfb77", + "#2ae342", + "#c3e1f2", + "#12897b", + "#2b3af3", + "#7ea8e9", + "#6ad041", + "#0bdacc", + "#99fe53", + "#4aaf9f", + "#d156c8", + "#505bd9", + "#dc152c", + "#b52bf6", + "#9baca0", + "#a03134", + "#d43c00", + "#5af098", + "#2c168d", + "#c6016b", + "#f090af", + "#482281", + "#39821f", + "#e0a8df", + "#480c89", + "#08808d", + "#ac5faf", + "#0faf59", + "#79c82a", + "#e6e164", + "#0d2037", + "#8afd40", + "#2e1afc", + "#3ec815", + "#fbfef2", + "#a63fa4", + "#b27d2e", + "#ca3592", + "#b9fd23", + "#ac9648", + "#804ce2", + "#9b5e28", + "#a64739", + "#c457d7", + "#de30e4", + "#1f6ab0", + "#6ff3c5", + "#6df6ca", + "#ed694d", + "#2fef1a", + "#335dcf", + "#845aa9", + "#574e28", + "#dc95ec", + "#b2140a", + "#15ae86", + "#70d1d9", + "#6f745a", + "#b3dba5", + "#108c41", + "#268bba", + "#913568", + "#1a6fdf", + "#422abb", + "#cb725f", + "#fe62a5", + "#dfc6c7", + "#b25d7b", + "#bd53b1", + "#796278", + "#048452", + "#c6eff5", + "#d24e5d", + "#fe8e92", + "#22398f", + "#3e5237", + "#8069bc", + "#7740be", + "#cc8ec0", + "#b280bb", + "#91f4db", + "#ac55ba", + "#c97596", + "#116019", + "#43c2e8", + "#2a2d25", + "#fc2b74", + "#ae7afe", + "#92b4fa", + "#dd8cd7", + "#4862ce", + "#af0f59", + "#ad6bd0", + "#3f0a72", + "#e01073", + "#144ada", + "#5cb9ca", + "#51d0da", + "#d6d07a", + "#b61e76", + "#474ff9", + "#68bece", + "#d01b19", + "#ee26df", + "#2ebca4", + "#539908", + "#ec0a37", + "#1a5613", + "#da28db", + "#246fa5", + "#bbfe83", + "#d54222", + "#580c96", + "#02cada", + "#996ff1", + "#e2a239", + "#ae5204", + "#4ce72d", + "#2cde7f", + "#b64eac", + "#591ab9", + "#a958c9", + "#696eaa", + "#4c4355", + "#6a6c06", + "#df5d2e", + "#9780cf", + "#682d42", + "#efed10", + "#1b312a", + "#dbde1c", + "#e1b5db", + "#a95826", + "#4e797a", + "#10384a", + "#9a5ba2", + "#d34482", + "#8a29da", + "#fb9dce", + "#ff2d6a", + "#50f10d", + "#f8d349", + "#7b4427", + "#11a70e", + "#987252", + "#c932c1", + "#2d7f7d", + "#c1e3c5", + "#0c777d", + "#0f8781", + "#dd889c", + "#799a24", + "#4212f1", + "#e6f378", + "#805527", + "#091a90", + "#a9541c", + "#fcdcad", + "#01f59b", + "#94a85d", + "#426575", + "#7f03bd", + "#2dcfac", + "#52b6df", + "#73e76a", + "#d70d97", + "#601568", + "#d4b1ce", + "#7341ee", + "#bb0ee6", + "#f645e0", + "#1c2c7e", + "#7dd58b", + "#4b9a93", + "#9df332", + "#612b32", + "#b1c27d", + "#3626a5" + ], + "opacity": 0.8, + "size": 5 + }, + "mode": "markers", + "text": [ + "Video: 59506507
Text: Well, I realized that was a whole lot of theory, but I hope it gave you a good intuition that will\nb...", + "Video: 59671315
Text: Okay, so here we are, back in Jupyter Lab, ready for the next use of a frontier model, and see this\n...", + "Video: 60616895
Text: It feels like 100 videos ago that I told you that we were going to have instant gratification with o...", + "Video: 60619275
Text: And we will conclude our expedition into the world of frontier models through their chat interface b...", + "Video: 59472693
Text: Friends.\nI am absolutely exhausted.\nI am exhausted and a little tiny bit traumatized.\nAnd you are so...", + "Video: 59670121
Text: So it's business time right now.\nWe are going to build a Rag pipeline to estimate the price of produ...", + "Video: 59295619
Text: Welcome back to the the moment when we bring it all together into a beautiful user interface.\nBut fi...", + "Video: 60617163
Text: And already that wraps up day two.\nNow that you have built that solution.\nAnd congratulations on tha...", + "Video: 60616423
Text: So I hope you've just enjoyed yourself experimenting with different LMS locally on your box using th...", + "Video: 59170227
Text: Welcome back to Google Colab.\nHere we are ready to explore the wonderful world of Tokenizers.\nSo, uh...", + "Video: 59169985
Text: So I hope you enjoyed that whirlwind tour of Google Colab.\nHere's just a little screenshot example o...", + "Video: 60616927
Text: It's time for our first LM experiment at this point.\nSo some of this you may know well, you may know...", + "Video: 59673721
Text: And here we are in JupyterLab for the last time, and we are looking here at day five, the last day\no...", + "Video: 59508055
Text: I'm so very happy that you've reached this epic moment in the course and that you're hanging in ther...", + "Video: 59670259
Text: It's remarkable.\nBut you are now at the 95% point.\nThere's 5% remaining of this course.\nUh, maybe it...", + "Video: 60616623
Text: So we're now going to start week one of the course when we are going to be looking at exploring fron...", + "Video: 59472383
Text: And welcome back to the week six folder.\nWe're now at day two, which is the second and final stage o...", + "Video: 59670171
Text: So as the very final step on this part four of day two of week eight, we are now going to build an\ne...", + "Video: 59297721
Text: And so now the time has come to talk about the most crucial aspect of Rag, which is the idea of vect...", + "Video: 59297599
Text: Well, that was a sneaky detour I took you on in the last one.\nI hope you enjoyed it though, and I ho...", + "Video: 59507635
Text: Look, I hope you're excited.\nYou really should be.\nYou've been through 80% of the course and it's al...", + "Video: 59669375
Text: Here we are for the day.\n2.1 notebook.\nAnd don't let it be said that I don't ever do anything for yo...", + "Video: 59297733
Text: Welcome back to JupyterLab and welcome to your first experiment with the world of Long Chain.\nLet me...", + "Video: 59670369
Text: It is terrific that you're hanging on in there and making such great progress with this course.\nAs w...", + "Video: 59166281
Text: And with that, amazingly, you completed day one of week two already and that gets you to the 15% poi...", + "Video: 59671567
Text: Well, the first thing you're going to notice is that I don't have a notebook open for you.\nAnd that'...", + "Video: 59297593
Text: And welcome to continuing our journey with Hrag.\nAnd today it's time to unveil Liang Chen.\nSo first,...", + "Video: 59166461
Text: And welcome back to the lab.\nHere we are in Jupyter Lab and we are going to go into week two.\nAnd we...", + "Video: 59167007
Text: Well, how fabulous is that?\nI hope that you are as wowed as I am by our new airline, I assistant and...", + "Video: 59508121
Text: The moment has arrived.\nHere we go.\nWe're in fine tuning.\nWe do fine tuning.\nTrain.\nThere is also a ...", + "Video: 59295579
Text: All right.\nAre you excited to see how this goes?\nLet's give it a try.\nSo in this next section, I cre...", + "Video: 60620375
Text: And with that, we've reached an important milestone.\nThe first week of our eight week journey is com...", + "Video: 59472491
Text: Welcome back.\nIf you are following along with me in JupyterLab, as I hope you are, then you will nee...", + "Video: 59472425
Text: Welcome to week six, day three.\nToday is going to be a day that you will either love or you will hat...", + "Video: 59508057
Text: Actually slight change in plan.\nI'm going to wrap up the day.\nDay three at this point, and say that ...", + "Video: 60619577
Text: And for the final piece of background information, I wanted to take another moment to talk about API...", + "Video: 59170291
Text: Welcome back to Colab and welcome back to our business project.\nSo again our assignment, we are due ...", + "Video: 60619651
Text: I mentioned before an AI company called vellum.\nWhen we were talking about the different questions, ...", + "Video: 59473191
Text: And you thought we'd never get here.\nHere we are in Jupyter Lab, running our fine tuning for a front...", + "Video: 59170297
Text: And here we are in Google Colab, ready for fun with models.\nSo first we do the usual Pip installs an...", + "Video: 59167015
Text: Welcome back to Jupyter Lab and welcome to Day Five's Lab.\nAnd this is going to be lots of creativit...", + "Video: 59170043
Text: Let me enthusiastically welcome you all back to week three of our LLM engineering journey.\nIf you en...", + "Video: 59473147
Text: Well, I'm very relieved.\nI've got that behind me.\nNo more human testing for me.\nWe'll have one final...", + "Video: 59166453
Text: Welcome back and welcome to our continuing JupyterLab experience.\nUh, I'm hopefully going to keep yo...", + "Video: 59166915
Text: Welcome back to the wonderful world of JupyterLab.\nAnd here we are in week two.\nDay three.\nUh, bring...", + "Video: 59667365
Text: Here we are back in Colab, looking at the week seven, day five of the Colab notebooks and I'm on a\nT...", + "Video: 60616845
Text: We're on the home stretch.\nThis is the final step in the environment setup, and it's an easy one.\nIt...", + "Video: 59295459
Text: And welcome back to More Leaderboard Fest as we go through some more leaderboards.\nBut this time we'...", + "Video: 59471979
Text: So we now turn to the parts of the problem, which is perhaps, let's say, not as glamorous as some\nof...", + "Video: 59503705
Text: And so now we talk about quantization the q and q Laura.\nQ stands for quantized quantized.\nLaura.\nAn...", + "Video: 59472505
Text: So the good news is that this is the very final video about data set curation.\nYou were probably fed...", + "Video: 59669217
Text: And welcome to the next part of visualizing the data.\nAnd just very quickly to show it to you in 3D....", + "Video: 59671221
Text: I gotta tell you, I don't like to toot my horn a whole lot, but I do think that I've done a great\njo...", + "Video: 59503703
Text: Well.\nHello there everybody.\nI am so grateful that you've made it through to the start of week seven...", + "Video: 59473201
Text: Well, before we do a postmortem on what happened, let's just quickly look at the standing the rankin...", + "Video: 60622463
Text: In this video, we're going to set up a full data science environment for Mac users.\nIn the next vide...", + "Video: 60619299
Text: Well, I hope you found that both educational and enjoyable.\nAs we went through and learned so much a...", + "Video: 59295607
Text: So to revisit then the solution that we built in the previous day and talk about the metrics.\nAs I s...", + "Video: 59297575
Text: Well, welcome to the final part on rag.\nAnd this is the session where you go from being a rag expert...", + "Video: 59507687
Text: It's time for action, everybody.\nWe've set up our colab.\nHere we are, week seven, day three.\nWe've g...", + "Video: 59671441
Text: And welcome once more to our favorite place to be Jupyter Lab, the Paradise for a data scientist exp...", + "Video: 59673431
Text: And here we have it.\nThe user interface is completed.\nThe extra notification came through on my phon...", + "Video: 59473137
Text: Let's get straight to it.\nSo the place where you can see everything that's going on and get knee dee...", + "Video: 59166421
Text: Welcome back to the radio day in the lab.\nMore to do.\nLet's keep going.\nWhere we left off is we had ...", + "Video: 59295599
Text: Welcome to the Jupyter Lab for day four.\nIt's going to look very familiar because it's actually I've...", + "Video: 59669631
Text: Here we are in our favorite place to be in JupyterLab, ready for some coding and a lot of coding tha...", + "Video: 59673663
Text: But wait, there's more.\nWe need to add some more to the user interface just to make it look more coo...", + "Video: 59506929
Text: And we return to the hugging face open LLM leaderboard.\nThe first place you go when selecting your b...", + "Video: 59504785
Text: So at this point we're going to talk about hyperparameters.\nAnd we're going to introduce three of th...", + "Video: 59505337
Text: So we're now going to look at four bit quantization, the rather remarkable effect of reducing the pr...", + "Video: 59271655
Text: So here we are on Hugging Face's main landing page at Hugging Face Core.\nA URL you know.\nWell, since...", + "Video: 59472883
Text: Okay, time to reveal the results.\nIt has run to completion.\nAnd here it is.\nSo a moment to pause.\nIt...", + "Video: 59673639
Text: And welcome now to the code for our user interface, which we will find in this Python module.\nPrice ...", + "Video: 59472463
Text: So last time we looked at a humble linear regression model with feature engineering, and now we say\n...", + "Video: 59297595
Text: So by the time you're watching this, hopefully you have played yourself with vectors.\nYou've created...", + "Video: 60619149
Text: So we're going to start our exploration into the world of frontier models by playing with the famous...", + "Video: 59297735
Text: And at last the time has come to see rag in action.\nAfter all of this talk, and here we are.\nWe're i...", + "Video: 60616407
Text: And now over to my Mac people.\nAnd I have news for you.\nIt's exactly the same thing.\nYou go to a fav...", + "Video: 59170235
Text: So here we are in Google Colab for our first collaborative session on the cloud using a GPU box.\nOn ...", + "Video: 59472067
Text: So we've covered steps 1 to 4 of the five step strategy.\nAnd that brings us to step five, which is p...", + "Video: 59472011
Text: Welcome everybody.\nSo in the past I've said quite a few times, I am excited to start this this week ...", + "Video: 59295553
Text: Welcome back.\nIn the last part, we gave our GPT four and clawed the challenge of converting a simple...", + "Video: 59297773
Text: Well, I hope you're eager with anticipation for this session in JupyterLab as we finally get to see\n...", + "Video: 59295583
Text: And here we are back in JupyterLab.\nIt's been a minute.\nWe've been working in Colab for last week, a...", + "Video: 59507329
Text: Okay.\nIt's moment of truth time.\nI have just taken our class tester.\nYou remember this class?\nUh, it...", + "Video: 59295429
Text: Continuing our investigation of benchmarks, and this will become more real when we actually see some...", + "Video: 60595637
Text: Here we are back in the Colab, which has been running overnight for me and probably for you too, I\nh...", + "Video: 59668027
Text: And so here we are at the home page for modal.\nAt modal.com spelt model not not model which is confu...", + "Video: 59295527
Text: I'm so happy to welcome you to week four, day four on our journey to LLM Engineering and Mastery.\nHe...", + "Video: 59295377
Text: Just before we go on to some of the more advanced metrics, I want to mention for a second something\n...", + "Video: 59666211
Text: So before we try our new model and one more recap on the models so far and keep notes of this so we\n...", + "Video: 59170107
Text: And once again, it's that moment when you take a pause and congratulate yourself on another day of\ns...", + "Video: 60616833
Text: So I realized that day one of week one has been a pretty long day, and I assure you that the other,\n...", + "Video: 59472413
Text: Wonderful.\nWhere we left off is we had just created the Get Features function, which builds our feat...", + "Video: 59297561
Text: And would you believe at this point you're 55% of the way along the journey?\nUh, it's been a while s...", + "Video: 59669211
Text: Well, we took on a lot today and we seem to have been successful.\nThese red icons that you see on th...", + "Video: 59166981
Text: Welcome to week two, day five.\nThe last day of week two where a lot is coming together.\nI am so grat...", + "Video: 60619227
Text: And now let's move to Claude from anthropic, my favorite model and typically the favorite model of\nm...", + "Video: 60620395
Text: Welcome back to Jupyter Lab, where I want to show you the assignment, the homework exercise for you\n...", + "Video: 59665127
Text: Well hi there everybody.\nI'm not going to give you my usual song and dance about how excited you are...", + "Video: 59668923
Text: Well, welcome back to Jupyter Lab for what will be an epic finale to our time in this platform.\nAnd ...", + "Video: 59504887
Text: Well, here we are again in Google Colab.\nIt's been a minute since we were here, and welcome back to ...", + "Video: 59170165
Text: Welcome, everybody to the last day of week three.\nWeek three.\nDay five.\nWe're here already wrapping ...", + "Video: 60617251
Text: Congratulations are definitely in order.\nYesterday was a mammoth first day on this course and you go...", + "Video: 59166951
Text: All right, back to the lab.\nBack to our project.\nTime to work with tools.\nI am in the week two folde...", + "Video: 60619619
Text: Well, day four was an information dense day.\nI do hope that you learned some something useful here, ...", + "Video: 60616663
Text: Well.\nHi there, this is time for PC people to get set up.\nSo all you Mac people out there, you don't...", + "Video: 59508175
Text: So I'm taking a moment now to explain that the training costs of optimizing a model for this course\n...", + "Video: 59670087
Text: And welcome to part four of day two of week eight.\nUh, there's a lot happening this week, and I have...", + "Video: 59506713
Text: Hi everyone.\nSo the reason I'm so fired up about week seven is that this is the time when we actuall...", + "Video: 60620169
Text: Hopefully you found this super satisfying to be able to have this nice business result and have it c...", + "Video: 59295435
Text: Well, just before we wrap up, let me introduce this week's challenge and talk about what we're going...", + "Video: 59297609
Text: Last week, we worked with models that were able to speed up code by a factor of 60,000 times, which\n...", + "Video: 59507489
Text: Continuing our adventure through hyperparameters for training.\nThe next one is pretty crucial and it...", + "Video: 59295549
Text: And welcome back to our challenge again.\nAnd this time we are working with our beautiful prototype.\n...", + "Video: 59665129
Text: And now let me make this real for you by showing you some, some diagrams, particularly now looking\na...", + "Video: 59169991
Text: Okay, so that was your introduction to Hugging Face.\nAnd now I'm going to turn to a different resour...", + "Video: 59472027
Text: And now the time has come to curate our data set.\nAnd the way we're going to do this is we're going ...", + "Video: 59472307
Text: Welcome to week six.\nDay two a day.\nWhen we get back into the data, we look back in anger at our dat...", + "Video: 59508289
Text: So here we are now, back in the Colab, in the same one that we kicked off in the previous day.\nIt's ...", + "Video: 59472333
Text: Thank you for putting up with me during my foray into traditional machine learning.\nI think it was u...", + "Video: 59295431
Text: Now I want to take a quick moment to give you a flyby of five different ways that llms are used comm...", + "Video: 59673449
Text: Well, I have to tell you that I'm a little bit sad.\nThis is the beginning of the beginning of the en...", + "Video: 59669389
Text: Well.\nHi there.\nSo you've made it to day two of week eight, and I am super grateful that you've been...", + "Video: 59170057
Text: And so at the beginning of this week, we started by talking about hugging face pipelines.\nAnd you us...", + "Video: 59166949
Text: Welcome back to making chatbots.\nLet's keep going.\nSo for the next part we're going to beef up the s...", + "Video: 59473019
Text: Welcome back to an action packed time of of training.\nSo now, after waiting about five minutes when ...", + "Video: 59297585
Text: Before we move on, let me show you one more time this fabulous slide that describes the simple three...", + "Video: 59170255
Text: And welcome back to us continuing our journey through the model class in Hugging Face Transformers l...", + "Video: 60614589
Text: So we're now going to run a large language model directly on your box using a platform called llama,...", + "Video: 59297601
Text: I'm not going to lie, at this point you have every reason to be impatient with me.\nWe've been yammer...", + "Video: 60616629
Text: And welcome back to team PC and Team Mac as we come back together again for a quick video.\nIn this o...", + "Video: 59297749
Text: It's always welcome back to JupyterLab, my favorite place to be.\nAnd now we are, of course in the we...", + "Video: 59170135
Text: Welcome.\nIt's week three.\nIt's day four.\nWe are back on the adventure in open source land, back inve...", + "Video: 59472017
Text: And this is the first time that we'll be coding against our big project of the course.\nWelcome to Ju...", + "Video: 59507017
Text: Welcome to Colab.\nWelcome to the week seven day two Colab.\nAnd just before we try our base model, we...", + "Video: 60619883
Text: And now we've arrived at an exciting moment in our first week.\nThe conclusion of the first week is w...", + "Video: 59508297
Text: What more is there to say, really?\nTomorrow is the day for results.\nA day that very excited indeed a...", + "Video: 60619247
Text: We're going to spend a little bit more time with GPT just to try out a few more interesting things.\n...", + "Video: 59504769
Text: Without further ado, we're going to get stuck into it.\nTalking about Laura.\nLow rank adaptation.\nAnd...", + "Video: 59170233
Text: Welcome back to our continued exploits with Tokenizers.\nWhat we're now going to look at is what's ca...", + "Video: 59671231
Text: And here we are back in the Jupyter Lab for the fast approaching conclusion with a really great proj...", + "Video: 60620397
Text: Well, that's a fantastic result to have now arrived towards the end of week one and having completed...", + "Video: 59170093
Text: I'm delighted to see you again.\nAs we get started with day three of week three of our adventure and ...", + "Video: 59473089
Text: Welcome back.\nSo hopefully you are still impressed by the GPT four mini results.\nThe frontier model ...", + "Video: 60395261
Text: Let's keep going with our project to equip our LM with a tool.\nWe just created this piece of code to...", + "Video: 60617259
Text: I'm excited to introduce you to your first exercise, and I'm looking forward to seeing what you make...", + "Video: 59507313
Text: And it's this time again, when we look at the podium of how our models are performing across the boa...", + "Video: 60619721
Text: Now it's time to talk for a minute about tokens.\nTokens are the individual units which get passed in...", + "Video: 59295451
Text: I know that everybody.\nIt seems like just the other day that we were embarking on our quest together...", + "Video: 59166919
Text: And with that, it concludes our session on tools.\nAnd at this point, you are probably an expert on t...", + "Video: 59295441
Text: Okay, so welcome to our leaderboard fast as we go through a ton of essential leaderboards for your\nc...", + "Video: 59295541
Text: And welcome back.\nYou've just seen GPT four zero spectacularly failed to work on our hard Python con...", + "Video: 59473101
Text: Welcome back.\nSo about ten minutes later, maybe 15 minutes later, the run has completed.\nAnd how do ...", + "Video: 59507423
Text: So you may remember eons ago when we were building our data set.\nAt the end of that, we uploaded our...", + "Video: 59295545
Text: I really hope you've enjoyed this week.\nWe've got tons done.\nWe've experimented with all sorts of ne...", + "Video: 59472503
Text: Welcome back to Jupyter Lab.\nLast time, we looked at some silly models for predicting the price of p...", + "Video: 60614591
Text: The mantra of this course is that the best way to learn is by doing, and we will be doing stuff toge...", + "Video: 59473021
Text: Welcome to our favorite place to be to JupyterLab.\nHere we are again now in day three.\nIn week six.\n...", + "Video: 60617255
Text: I'm now going to talk for a bit about models.\nA term you often hear is the term frontier models, whi...", + "Video: 59667829
Text: Well.\nHello there.\nLook, I know what you're thinking.\nYou're thinking I peaked too early.\nLast week ...", + "Video: 59505329
Text: Welcome back.\nYou may, like me, have just gone off and got a coffee while things loaded back up agai...", + "Video: 59669049
Text: So what you just saw was an ephemeral app, as it's called, which means just a temporary app that you...", + "Video: 60619439
Text: This now brings us to an extremely important property of LMS called the context window that I want t...", + "Video: 59668181
Text: And so it gives me great pleasure to introduce to you the project that I've lined up for you this we...", + "Video: 59472441
Text: Welcome back.\nSo we've been doing the thoroughly distasteful, unsavory work of feature engineering.\n...", + "Video: 59507785
Text: Well, I'm sure you're fed up of me saying that the moment of truth has arrived, but it really has.\nT...", + "Video: 59295587
Text: When I left you, we had just created this simple user interface for converting from Python to C plus...", + "Video: 59166465
Text: Welcome back to the JupyterLab on Gradio day, so you'll remember where we left off.\nWe'd written two...", + "Video: 59473071
Text: Hey, gang.\nLook, I know what you're thinking.\nThis week was supposed to be training week.\nI set it a...", + "Video: 59295423
Text: Welcome to day two of week four, when we get more into leaderboards so that by the end of this, you'...", + "Video: 59297723
Text: So I know what you're thinking.\nYou're thinking, what's going on here?\nWe're on day five.\nWe're on d...", + "Video: 59166947
Text: Well, thank you for coming along for week two, day four.\nWe have lots of good stuff in store today.\n...", + "Video: 59666831
Text: Take one more moment to look at this very nice diagram that lays it all out, and we will move on.\nNo...", + "Video: 59295493
Text: And welcome to week four, day three.\nAs we are about to embark upon another business project which w...", + "Video: 60616855
Text: Now I know what you're thinking.\nWe've been building environments for so long.\nAre we not done yet?\n...", + "Video: 59506611
Text: So in a future day, I'm going to be training, fine tuning a model and creating a fine tuned model.\nA...", + "Video: 60616493
Text: I'll just take a quick moment to introduce myself to convince you that I am actually qualified to be...", + "Video: 59166317
Text: And welcome to week two, day two, as we continue our adventure into the realm of LMS.\nUh, so today, ...", + "Video: 59295439
Text: So I'm aware that there's a big risk that you are getting fed up of leaderboards, because we've done...", + "Video: 59472421
Text: And welcome back to our final time in Jupyter Lab with traditional machine learning.\nIt's almost ove...", + "Video: 59472137
Text: Well, well, well, it's been a long day, but congratulations, you've made it.\nWe've gone through and ...", + "Video: 59297693
Text: So at the end of each week, it's customary for me to give you a challenge, an assignment to do on\nyo...", + "Video: 60620143
Text: So we're going to make a call to GPT four.\nOh, that's going to ask it to look through a set of links...", + "Video: 60619501
Text: I welcome to day four of our time together.\nThis is a very important day.\nToday we're going to be lo...", + "Video: 59297743
Text: And welcome to day five.\nFor reals.\nWe're actually in the proper Jupyter notebook.\nThis time we're i...", + "Video: 59166847
Text: Well, they say that time flies when you're having fun, and it certainly feels like time is flying.\nU...", + "Video: 59170223
Text: Well.\nFantastic.\nIt's coming up to the end of the week, and that means it's coming up to a challenge...", + "Video: 59170037
Text: So how does it feel to be 30% of the way down the journey to being a proficient LLM engineer?\nTake a...", + "Video: 59295609
Text: You must be feeling absolutely exhausted at this point.\nAnd if you are, that is okay.\nYou have done ...", + "Video: 60619281
Text: Well, I'm delighted to welcome you to day three of our eight week journey together.\nAnd today we're ...", + "Video: 59472429
Text: And continuing on our strategy to solve commercial problems with LMS, we get to step four, which is\n...", + "Video: 59167009
Text: Welcome back.\nIt's time to make our full agent framework.\nI'm super excited about this.\nIt's pulling...", + "Video: 59166481
Text: And here, once more we find ourselves in our favorite place, the Jupyter Lab.\nReady to go with weeks...", + "Video: 59670933
Text: I realized my enthusiasm for this project is a bit insane, but I have to tell you that I am very sat...", + "Video: 59670073
Text: Okay, it's time to complete the Rag workflow in our Jupyter Lab on day 2.3.\nWe've got this function ...", + "Video: 59673595
Text: That concludes a mammoth project.\nThree weeks in the making.\nIn the course of those three weeks, sta...", + "Video: 59297603
Text: And I'm delighted to welcome you back to LM engineering on the day that we turn to vectors.\nFinally,...", + "Video: 60614541
Text: I am delighted to welcome you to the first day of our eight weeks together as you join me on this ad...", + "Video: 59667357
Text: Let's now see our results side by side.\nWe started our journey with a constant model that was at $1....", + "Video: 59667841
Text: Now, look, I know that I went through that very fast, but maybe, uh, you're still, uh, blinking\nat t...", + "Video: 59472007
Text: So I hope you enjoyed our first bash at Scrubbing Data, and that you are now looking through the cod...", + "Video: 59507435
Text: So I'm now going to talk about five important hyperparameters for the training process.\nAnd some of ...", + "Video: 59509185
Text: So this is where I left you looking at this satisfying chart on training loss and seeing the trainin...", + "Video: 59473159
Text: Welcome to Jupyter Lab and welcome to our experiments at the frontier.\nSo we are going to put our fr...", + "Video: 60619447
Text: I want to take a moment to talk about something that's very fundamental to an LLM, which is the numb...", + "Video: 59166353
Text: Well, congratulations on leveling up yet again.\nYou've got some real hard skills that you've added t...", + "Video: 60619123
Text: So what we're now going to do is we're going to look at some models in practice and start to compare...", + "Video: 59295363
Text: Well, another congratulations moment.\nYou have 40% on the way to being an LM engineer at a high leve...", + "Video: 60619289
Text: And now we'll go a bit faster through the other models.\nWe'll start with Google's Gemini.\nI have the...", + "Video: 59472873
Text: So it's quite an adventure that we had at the frontier of what's capable with Llms today, solving a\n...", + "Video: 60619429
Text: Let me talk about some other phenomena that have happened over the last few years.\nOne of them has b...", + "Video: 59295601
Text: So it's time to continue our journey into the world of open source and understand which models we sh...", + "Video: 59170025
Text: And a massive welcome back one more time to LM engineering.\nWe are in week three, day two and we are...", + "Video: 59166443
Text: And welcome back everybody.\nWelcome to week two day three.\nIt's a continuation of our enjoyment of r...", + "Video: 60620025
Text: And welcome back to Jupyter Lab, one of my very favorite places to be.\nWhen Jupyter Lab sprung up on...", + "Video: 59170055
Text: Welcome to the world of Google Colab.\nYou may already be very familiar with Google Colab, even if so..." + ], + "type": "scatter", + "x": [ + -0.859648, + 7.3309765, + 0.21870197, + -13.03976, + -3.0766428, + 11.100553, + 4.6001115, + -9.268545, + -5.360921, + 1.8335935, + 2.2128375, + -1.7025363, + 2.7415411, + 1.6968822, + -9.437864, + -2.4456034, + 13.673174, + 9.69971, + 5.391895, + 0.9950481, + -2.6140966, + 14.227348, + 2.5340881, + -10.256354, + -7.6409054, + 2.7219393, + 1.1424255, + 1.6502428, + -6.957909, + 4.086808, + 8.104448, + -7.2092853, + 13.410249, + -2.9087114, + 2.019522, + 7.0692005, + -0.84805, + -9.599044, + -0.36659604, + 2.8821077, + -1.7564659, + 0.22077061, + 7.1004896, + -0.5071637, + -1.5469455, + 9.606234, + -7.6583476, + -8.758075, + 16.001204, + -0.45763963, + 12.072926, + 14.450202, + -7.893885, + -4.888164, + 7.238137, + -6.4890647, + -10.677237, + -6.450742, + 0.29829141, + 3.5972733, + 7.056694, + 3.3955274, + 4.1175003, + 2.2164605, + 3.3678567, + 10.912271, + 4.3282537, + -1.8016068, + -2.778665, + 0.33017898, + 3.1186757, + 8.368695, + 3.8920324, + 9.047157, + 4.2369857, + -13.133919, + 0.30549568, + 10.36587, + 2.0417519, + -4.207513, + -2.7341063, + 9.276742, + 3.7855272, + 2.2184367, + 9.518204, + -7.6228004, + 3.5007627, + 4.166524, + -6.947239, + -6.4718704, + 6.777542, + -1.643389, + -4.0581813, + 13.556551, + 3.738945, + -9.4638, + -7.085359, + -12.116256, + 1.8722422, + -1.235673, + 1.5310236, + 2.681954, + -1.2896698, + -3.3085613, + -3.5033119, + -7.8056912, + -6.380733, + 12.077981, + 9.891831, + -2.583847, + 0.049997784, + -7.109494, + -1.6533405, + -0.35486424, + 8.023757, + -1.5843254, + 4.68254, + 12.040059, + -4.070594, + 3.5485406, + 8.321888, + -10.936198, + -5.665428, + -3.9380574, + -1.2327232, + -2.4456801, + 5.2406054, + -0.036940902, + 3.5880437, + 10.343754, + 0.10399394, + -6.0591764, + 0.5472898, + -0.18098946, + 12.552157, + 2.215009, + -2.0987718, + -4.3202305, + 10.194152, + -1.0280695, + 0.6394854, + -7.001653, + -2.6180403, + 0.5332797, + 6.908162, + -4.1370797, + 0.36955032, + 6.766898, + -5.599071, + -6.2765083, + -6.5416136, + -8.705647, + 8.097455, + 6.401871, + 10.086735, + -6.55865, + 13.3281975, + -11.958505, + 9.180207, + -10.071172, + -5.573983, + 1.7291324, + 3.2020307, + -9.81586, + 4.254864, + 13.542623, + 3.1633458, + 6.4809103, + 1.6912766, + -0.96716404, + -9.644825, + 4.948192, + -4.875502, + -2.7658813, + -7.0795684, + 6.3749175, + -0.2840374, + -13.407808, + 0.97872597, + -8.729023, + 9.891307, + -2.5329638, + -11.002493, + 0.6183121, + -10.363856, + 0.267183, + -8.229537, + -6.164332, + -7.064035, + -5.55934, + -11.237544, + -2.4159808, + -7.657407, + -0.47880024, + -4.861272, + 11.012814, + -5.3301964, + 4.517483, + -13.10771, + 8.053061, + 2.3658233, + -4.5009966, + 0.74033785, + 3.0659394, + 7.927173, + -5.8426704, + -7.692328, + -11.8179, + -10.170092, + -13.525137, + 7.471072, + -9.561237, + -7.660354, + 0.98921955, + -2.5871053, + 0.7735228, + 4.697858 + ], + "y": [ + -16.108109, + -2.802871, + 5.55556, + -3.1165593, + -2.5197542, + 1.6573303, + 7.9436374, + 1.7033308, + 2.729909, + -3.110688, + 0.19969198, + 7.2094626, + 11.474268, + -9.306534, + 6.4831786, + 3.4693139, + -3.6395743, + -1.3802856, + 17.943478, + 20.674412, + -14.37641, + 0.2662799, + 17.946259, + 5.1479177, + 3.269782, + 12.8965645, + 19.676033, + 10.8139715, + 4.6734076, + -12.667667, + 9.158181, + 2.501612, + -2.853026, + -2.8742912, + -9.390684, + 3.5931249, + 1.5709282, + -10.765733, + -5.9113226, + -5.507533, + 10.479771, + -0.30162513, + -5.1619964, + 9.435609, + 8.602637, + -4.3179398, + 14.847089, + -5.8406625, + -4.0188546, + -16.03366, + -2.9351225, + 0.8814765, + 9.312446, + 0.2861319, + -8.754338, + 14.149203, + -2.501995, + -4.5788355, + 20.002163, + -6.9779773, + -1.2691092, + 13.288892, + -9.499816, + 9.733308, + 7.6684027, + 0.11708519, + 12.023214, + -8.592282, + -14.5351715, + -16.749748, + -1.4396764, + -10.056349, + 11.6244335, + 0.4102241, + 18.943052, + -4.8392525, + 17.31309, + 12.829157, + -0.31426865, + -4.913969, + -5.8585067, + 8.703345, + 17.946308, + 7.5203032, + -9.040579, + -8.977853, + -10.744503, + 2.9780662, + -2.9896638, + -9.919191, + -7.825369, + -0.5688983, + 2.7128513, + -8.081976, + 19.224987, + 5.6850524, + 7.2608743, + -1.9696628, + 5.7763453, + -11.9261, + 3.7726462, + -6.2928514, + 0.6002692, + 3.240406, + 10.033546, + 1.7159785, + 14.183074, + -4.955666, + -1.2268807, + -6.7443852, + 8.091246, + -1.4330128, + 17.374035, + -12.052618, + 8.407009, + -12.653764, + 0.5208274, + -2.3776338, + -5.5375533, + -11.549568, + -6.591003, + -7.744704, + 5.603869, + 1.1318715, + -1.3157955, + 12.294856, + -11.588596, + 18.72359, + -4.533707, + 12.797578, + 18.353394, + 14.767065, + 16.229063, + -1.1066937, + -3.7252734, + -3.5343199, + 5.829706, + -1.1521066, + 6.080864, + -14.84926, + -3.3232324, + 10.510039, + 5.957106, + -2.2022781, + -5.182772, + 9.717215, + 4.3090715, + -8.085696, + -12.127335, + -6.2474174, + 7.0910845, + -6.4494314, + 7.989585, + -10.92101, + -4.208281, + -3.0467856, + -7.2040524, + 3.4879417, + -2.9318397, + -3.7214146, + 1.688086, + -6.546599, + 2.838505, + -12.067736, + 4.953533, + -7.6888204, + -8.374947, + 8.707888, + 9.738594, + -5.715931, + -7.3781533, + 19.546906, + 8.7370205, + -11.739149, + -1.4666423, + 3.6902168, + -14.634209, + 7.613645, + 11.104671, + -8.19435, + -6.881912, + -3.8555307, + 3.0141797, + 7.749215, + -1.4897541, + 14.488473, + 3.882484, + 3.0612788, + 0.7137344, + -7.4118447, + -3.123873, + 17.030315, + 8.942967, + 7.1438923, + 4.7416067, + 2.027668, + 4.4894767, + 17.905304, + 7.7233586, + -7.9623003, + 2.6870794, + -7.5661163, + -12.301454, + -11.477651, + -3.8346057, + -11.767478, + 5.598828, + -4.312641, + -6.8031745, + -4.3621974, + -6.8772163, + 9.279575, + -4.0924034, + -0.7417347, + 8.393606, + 5.9398475, + 0.6119152 + ] + } + ], + "layout": { + "height": 600, + "margin": { + "b": 10, + "l": 10, + "r": 20, + "t": 40 + }, + "scene": { + "xaxis": { + "title": { + "text": "x" + } + }, + "yaxis": { + "title": { + "text": "y" + } + } + }, + "template": { + "data": { + "bar": [ + { + "error_x": { + "color": "#2a3f5f" + }, + "error_y": { + "color": "#2a3f5f" + }, + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "bar" + } + ], + "barpolar": [ + { + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "barpolar" + } + ], + "carpet": [ + { + "aaxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "baxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "type": "carpet" + } + ], + "choropleth": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "choropleth" + } + ], + "contour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "contour" + } + ], + "contourcarpet": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "contourcarpet" + } + ], + "heatmap": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmap" + } + ], + "heatmapgl": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmapgl" + } + ], + "histogram": [ + { + "marker": { + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "histogram" + } + ], + "histogram2d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2d" + } + ], + "histogram2dcontour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2dcontour" + } + ], + "mesh3d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "mesh3d" + } + ], + "parcoords": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "parcoords" + } + ], + "pie": [ + { + "automargin": true, + "type": "pie" + } + ], + "scatter": [ + { + "fillpattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + }, + "type": "scatter" + } + ], + "scatter3d": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatter3d" + } + ], + "scattercarpet": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattercarpet" + } + ], + "scattergeo": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergeo" + } + ], + "scattergl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergl" + } + ], + "scattermapbox": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattermapbox" + } + ], + "scatterpolar": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolar" + } + ], + "scatterpolargl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolargl" + } + ], + "scatterternary": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterternary" + } + ], + "surface": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "surface" + } + ], + "table": [ + { + "cells": { + "fill": { + "color": "#EBF0F8" + }, + "line": { + "color": "white" + } + }, + "header": { + "fill": { + "color": "#C8D4E3" + }, + "line": { + "color": "white" + } + }, + "type": "table" + } + ] + }, + "layout": { + "annotationdefaults": { + "arrowcolor": "#2a3f5f", + "arrowhead": 0, + "arrowwidth": 1 + }, + "autotypenumbers": "strict", + "coloraxis": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "colorscale": { + "diverging": [ + [ + 0, + "#8e0152" + ], + [ + 0.1, + "#c51b7d" + ], + [ + 0.2, + "#de77ae" + ], + [ + 0.3, + "#f1b6da" + ], + [ + 0.4, + "#fde0ef" + ], + [ + 0.5, + "#f7f7f7" + ], + [ + 0.6, + "#e6f5d0" + ], + [ + 0.7, + "#b8e186" + ], + [ + 0.8, + "#7fbc41" + ], + [ + 0.9, + "#4d9221" + ], + [ + 1, + "#276419" + ] + ], + "sequential": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "sequentialminus": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ] + }, + "colorway": [ + "#636efa", + "#EF553B", + "#00cc96", + "#ab63fa", + "#FFA15A", + "#19d3f3", + "#FF6692", + "#B6E880", + "#FF97FF", + "#FECB52" + ], + "font": { + "color": "#2a3f5f" + }, + "geo": { + "bgcolor": "white", + "lakecolor": "white", + "landcolor": "#E5ECF6", + "showlakes": true, + "showland": true, + "subunitcolor": "white" + }, + "hoverlabel": { + "align": "left" + }, + "hovermode": "closest", + "mapbox": { + "style": "light" + }, + "paper_bgcolor": "white", + "plot_bgcolor": "#E5ECF6", + "polar": { + "angularaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "radialaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "scene": { + "xaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "yaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "zaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + } + }, + "shapedefaults": { + "line": { + "color": "#2a3f5f" + } + }, + "ternary": { + "aaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "baxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "caxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "title": { + "x": 0.05 + }, + "xaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + }, + "yaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + } + } + }, + "title": { + "text": "2D Chroma Vector Store Visualization" + }, + "width": 800 + } + }, + "image/png": "iVBORw0KGgoAAAANSUhEUgAABPAAAAJYCAYAAADsXBi6AAAgAElEQVR4XuydBZgUx7qG/1kHFncNGoiRQIS4u7vbSUJcTjw5cXd3txt3Je4eokQgxHBfWGF9Z25Vk53sboCdqenuqZp5+zx57j3Z/qv+er+GQ95UdUdi6hIuCEAAAhCAAAQgAAEIQAACEIAABCAAAQhAwEoCEQSelbnQFAQgAAEIQAACEIAABCAAAQhAAAIQgAAEPAIIPB4ECEAAAhCAAAQgAAEIQAACEIAABCAAAQhYTACBZ3E4tAYBCEAAAhCAAAQgAAEIQAACEIAABCAAAQQezwAEIAABCEAAAhCAAAQgAAEIQAACEIAABCwmgMCzOBxagwAEIAABCEAAAhCAAAQgAAEIQAACEIAAAo9nAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACFhNA4FkcDq1BAAIQgAAEIAABCEAAAhCAAAQgAAEIQACBp56B7376TW574AX5efJfEovGZMUh/eXog3eWdUevHH9Ctt3/DJk2c278vxcU5EuPrp1k9Goryr67bi6rrzwkoacpqsZ/6c1P5IXXP5ZJv02Vqppa6a7GWXv14XLwXtvIiKED4uPsfOg5MnhAb7nx4uMTGtvmm6rVOjfd47/eOm+57KRltrrDQWdJoWL73H2X2Lycf/X2wWffy2PPvy0TVaYLS8ulqLBAhg8ZIPvsvJnsuNV6Vq3liNOukYmTp8p7z94o+Xm5S+3twmsflOfHfSTvPnODHHDcpeo5HyaXnz029HWcd/X98vGXP8h7z9zoza1/HYbRy8tvfipnXX63vPnEtdK3V7fQ182EEIAABCAAAQhAAAIQgAAEIACBpgSyXuBNmPinHHT8pbKmEkuHKIEWiUTk3sdele9+/E2evOuCuFDT4qC4XRs549j9PH41tbXyx9RZ8tIbn3jS5rhDd5Vj1V/Lu+rqG+TEc2+WDz//XrbeZC3ZdP1R0q5tkUyZPkeefvl9mT13gVx5zlGy7WbreMNkksDT67n0xkfkqZffk3efvkG6den4L1Tf/jhZDjz+Mjn3vwfJfrtu4cuv1NLyxbL+TsfJV+PulLZtinwZs+UgWnSde9V9suu2G8o2m64tXTt3lAULSz0B9uYH4+V/Jx4gB+y+lVf29kdfy12PvCxP331hIL0kMugb738lp1x4myeGt9p4rX+VaNm6ye4nyQZrryrXX3icvPrO515eY0atlMjwvt7TUuAF1cvlNz/qyczTj93X61//mvz865+UfF3f+zXKBQEIQAACEIAABCAAAQhAAAIQSCeBrBd4WmR8M2GyvPn4NaJ31emrrKJSNtzleCX0tpVTj97b+3ta4OmdOPddf0azvPSOuitvfUwefe4tJTuOVQJniXxb2nXjPc/IPY++IpeddYQne5pelVU1cvipV8sfU2aqXq6Vjh3aZZzAm/T7NNn98PPktKP3kf/su92/EGlZ85qSRe+rnWHti9v68uvioy8myNFnXheowNv+wDOlV/cucv8NZ/6r5xPOuUmUFZZbLj3R+9n1dz0ln339c1oFnhbJm+/5X1l1xGC548qT/9Vz4+6ze689XdZbaxVfcjAdpKXAMx2ntbp9j7lY1lS7aRsFXmv383MIQAACEIAABCAAAQhAAAIQgECYBLJe4M0vKRUtzwb07dGM+0a7nuDtkLvkjMO8v78sgad/1tAQlZ0OOds7Nrmso59V1bWy8W4nyJojV5Q7rzp1qRnPmrNAtFxp7EXvwBs2qK9svuFoufX+52Xm7PnSs3tnTzI07px65a3P5MzL7pKHb/6fXHDtA14v4x69SrRYfODJcfLsqx94dUVFhTJq1WFy0hF7xHcV6t1MZ1xypzx++3ly7Z1PejsJ9Rr0brE9dthY9DHKbyb8KnlqZ9LO22zgibfG669ps+Wme59Ru5R+lqrqGunRrbNsv8W63k7E/Py8ZT7D+x59kSyurJaXH76i2T1L+Jzo7WC79MzDvZ998tWPcvf/vSyT/5iuuNTLyJWGyMlH7SWrDh8Ury1XsvWme5+Vtz4cLxWLq2TwCn3kyAN39Pjc9sDzcvtDL8bv3Xjd1T1hlQib86+5X35UuzMP338HuULtztp8w1Fy8elLnoWW11b7niYD+/WSe649bbm/dg856QoZ//2k+D2NIlML4xuU2Hv3k2+947edO7aXTdZbXU4+ci/v/9fXsvqJxWLe0d3nXvtI7RqbLYUqv43HrO6J56Xtcmyc/Lo7n5IHnxrn7YbUR7ibXoedfJXMUM/M649d7e1IbXlsVT8nN97ztOLzlyyuqlbysrPstPUGcvRBO0tOTkSeeeUD71l85+nrPbHZeB15+rWeHH/ijvO9v5XIM7S8I7Q6d/18LO3SOwf1s6Sv/3v2LXnqpfe8XXVt2xTKcHVMXbNtPPa+yqaHNhtC77z9c8qsfx2hfe/Tb73dk78qEa2vFQf3k8P228HbTauveQsWecfEr1K7aPVu0nc/+cZ7JvWx+HNOOqjZ8fjlPij8EAIQgAAEIAABCEAAAhCAAAQg0IJA1gu8pT0R+litFk1X/G+s7KzEhL6WJ/D0z2+4+2nv6O1HL9wiXTotkS5Nr6++myiH/vdKTwjuvv3GCT2IWuA1NDTIwP69ZOwBO0pubq7awfWkkgO/eeJFz/PG+1+q45C3e3JOv29tmJIKWhjofh566nU57Zh9lYhcQx3pLPN2Cuodfi8/dIUSbp1U7ZKjlPqdYlpOraAk1NW3Py6PPPOmrDZikJxx3H7eO/4aj4jefc1p3rFKLcC23f90b5fchaceKp06FiupMd0THgfusZUnCZd1Pfvqh56MekxJw6bvDdTvBDznynvjf1+Lrv+cfKVsudGacsLhe3gctMT8dPyP8uy9FyvJ2dObQt8zbeY8JUgOlN49usrLb30qDz75uuhede9aYmqR95Z6l1mH9u28Y9CJsLnkhofl/U+/kz5q1+VRB+2k2PSU/n2aS97GNWqumtkOSmDq9yGOVO9DzFNZtby0bNRZ6eO1eidnGyVV9fv+9DvmZip5e/4ph8gI9f5FLcguuv4htZ4u8riSXVqiLasfLZRuvu9ZOVEx0u/a0xL4YtV7jqp5+p6LlvmOu6kz5sh2B5wp/x27p/dsNV5a3G2thGTTv99U4NWrHDZTkkqv8fj/7Obx1LJK93f0wbvI4fttn5DAS/QZWp7AKy1bLKXlFc0wn3/NA94z/vTdF3myu/HZ1c/yZkrI6+PBdz3yknyq5PCr/3eV92tIS9Ot9jlVdtlmQzn+sN2853rcO180E3iNOzn32nFTOWhPdRxa8dXP2XOvfSi3X3GyJ1z1OBvucoI3r+a33ebrKoFXKYefskSE6ueWCwIQgAAEIAABCEAAAhCAAAQgYEIAgdeCWsmictn/2Euks/oH+/+75RwlzXK8O1oTeHqHj5Yu+h/Sm36IonH4xp1yD954lqy9xoiEstICb+GiMvUi/euU7Cnwar78dqInrfQuvo3GrBaXcE2Fi97Npo8A62O65518cHwuvRNP7xZrvLdR4DU90vvL5Cmy59gLvA9qnKmkh770rr7VtzxcSaLd1e62nTyBpwWQfqecFoGN10nn3eLt3HpGiaNlXXq3o36/2vZbjJGLTvtP/Da9O00LmRceuNT7e1p66B1lWrJoyaUvva4t9zlFtla76y5Q4lAffT7ohMv+9S43LQj18dC9d9pUHnhinLe7sPEdeImy0e/re/yFd/4lGpe2Lr1rUu9GfPz5dzxBpHd5rb7KUFlvzVVkxy3X84RO43XMWTeI3vXZ+A68xjxbHr9+Ub1b8X9X3BOff2n91NTWeTlvpHbc6frGa8Ivf4g+EnrNecd4nJd16Z12s+eVyGuKceOlZefd//eKt3uucQdfU4GnP+Si/7v+oMUualdm46WPR+t3xfXr3T1hgZfIM5TMRyz08XTd//03nOVJaX0tKq3w1tj01+SvakfnboedK7defpIn9fS11rZHKgG+efwIbcuPWOjnc6Ea60X1fGoZpy+9+1FLUL1jVgvjRoGnmWv2jZfuSx+f//bNe+LH9JcZCj+AAAQgAAEIQAACEIAABCAAAQgshQACrwkULRSOPP06T8DoHVKNxxf1La0JvEefe1suv/n/5KUHL5MhA/v+C/Urb6ujrpfe5b0nLdGPAWiB16dnl2ZHbn9Xu4t2PuR/cu35x6gdPmPiAk8fodXHc/XVuIPw6vOO9naFNb30ET8tN/QRw0aBp48MNh5LbdyBpeXanjtuEi8ds8Mx3n8/Xe3o05eWIA8//YZ8r77gq8VGNBb1jiJr6fP2k9ct9xfbRdc96H0Y4YPnbvbE5NQZc5UIOUN97OFAdXx3S692zW2O9NbXeJy2ccDj/3eTzJm/0BNgD6n5r77tce+deS2PgTbe31LgJcqm8YMb3711n3csNJGrUh0n1UeK9e7B8T9Mkp8m/eXtgDvv5EO8I8n6ainw7n/iNdHHWVuuQR/31O/Wa/ygx9L6aVxLS5mm5/Hy2kHl9fdHGZbW/2tql9npl9whj9zyP2+3ohZSW+93uqyy4sBmXz5uKvC0zNVyUIs8veNzfbUjc7Ta/dn02HSiR2gTeYYSFXj6wzDHnn2jJ4UbWes16x2DT774nrz+3hfqKPkC9dXnGu9L0/oor362dttuIw9NawJPP487qQ9aXHjaoc1Q6iPon47/ST5+8Za4wNPHlw/bd/v4fU+++K63K/KD525a7rHmRJ4x7oEABCAAAQhAAAIQgAAEIACB7CSAwPs7d72b6/hzbpSVhq7gyYuWH1FoTeBpwfLkS+/K56/csdSvVjbuFtM74vbdZfOEnralfYVWf/l2p4PPju+uapRweufasEH9vHE/U0LhiNOu8d73pt/71vTaUdXqj3HcdfWpcYHXtLZR4F35PyUstl4/XtpUCE2fNU92/c85MmSFvt5uvv5qB5J+T54WmD//OqVVgde4y69RPOkjoPo44vtKcHRQxxe1dFl9i8M9caaPDTe99FHaLp06eDJEH6m94+EXl/uBipYCL1E2Os/X3v1cPn3ptoSyWtpNWgiffMFt8vtfM+Ttp5bsaGsp8Brf4zb+9bvjuyz1WHqXnt6pqN/VdoR6D9/S+mlciz6uG2khGevq6j0BqkXvsi59z6bqYxZ6F5qWWVo+6g+pNB6Vbqxr+Q48/V43/d49/YVdnaXeibnjlut6x7X1LrxEBF6iz1AiAu9P9WtCS0V9BFZ/8bfppY/2Pql2x+pj3XqdxcVtvHdCHnTC5QkLPL2DUj+P+sMrTd8DqefRu2718W+9u65xB15TEa3vQeAZ/xKiEAIQgAAEIAABCEAAAhCAAAT+JoDAUyD0O7zGKuGl31l1wamHLPX9ZcsTePrYpH5vmP6Agj4iu7SrVh133FgJGf1Sf/2hi6Xt6tJH/Z5T74jTx1f1u8VMBd6Pk/6UfY66SJa1A2+t1Yd7Ymdp8i8RgdcoxcY9enWzj380vo+utR14ms9eR16o1lgk919/pvf+sXVGreQdy2y81t7uKO9oqH4nWctLv99NvxfwMXVk9bKbHvGEYe+eXZfKvaXAS5RNMgJPCyG9A3BpH+94+6OvRR8t1h+4WH+tVf8l8Br7W9YOvAvUe/H2VjvdltaP3uG391EXersiN1bvYGt56Z2kTT8isTRA19z+hDz18nvy4fO3yIXXPSDfKpH9hvoic+MxUV3TUuA1HUcfUR2ndrfpr+tuvsFouerco9SHU5a857DlRyy0NNMfI9EfsUj0GWpN4On3Cu6j3lep13m3Ytzy3YN6Z90W6j2K+sMSjZfeIamPxCazA0+Ps+OWS9+B98W3v3hCGYHH/65CAAIQgAAEIAABCEAAAhCAQFAEsl7g6Y877HjQWbK1+mJl03eytQS+LIGnjxSee9V98tKbn8h9150h66658jKz0l9E1e/oOuGw3dUL/3dudp8+fnrMWdfL5D+nyysPX+m9XN9U4GmhqN+NpnckNX0Hnt71tI06Iqlf6H+IkoSmAq/xwwlfvHqHJxr1pY/B6p2B3buqI7Rqt1lrl35n4CU3Piw3XHS8J7j+79ZzvA9xNF5jT7vW+9hDy6/66nn0V08L1HvxGt/11lTE6Ho9nn4Xmz4+2iiKvnztTm93WKJsEhV4H385QY4647r4UdeW677l/ufkzodfih+t1jvw9NdKG98T+PUPv8rBJ17uvcNum03XiZc3fnxB37fSsBWWKvC0FN5AfTRh9+03krNPaL7zTB+1HjygdzMRt7RM9O41vStT77jUxzzHHrCD957DpldTgadl5bfq2HTLo9n6S8g/K6Govy6sd+adfMGt3vsMG3eF6vf1baZ2++mPj2iBl+gztDyBp9/FeOzZ18ufU2fLU3ddKB07tGvWtz4SPGqrI2S/3baMv89R36B/vWq+LQXe3jtt5v3a0FfLd+Dp9wXOV79X6CPyjVfjhzhWHNzfe58eAq+1X/X8HAIQgAAEIAABCEAAAhCAAARMCWS9wNP/MK9Fln4nXOPHEhph6vezrbbSYO+/aomhZdUZxzZ+2KHB+5DDM2q30c+//uXtgjp0n22Xm4M+GqqPVL778Tey3lqryHabjZFOHYplyozZ8sQL76ovai6WWy87Kf6RC1OBp5vQx1Lvf/w1OUuJnY3HjJS5ShpdcfOj3vvjXlQSoqP6IqupwGv88IIWPfqddb/9OUOuuu0xT9a8+eF4eeH+S6WvEmj6/W/LuhZXVsume5zkHb/sqBg0FSO6pvErtHtsv4n3ZVd9n5Zl19zxhJx61N7e1271pY986mOc5550sPelWH3sVR/HbfzIh/5KqJZAWlBpETZ0UN+E2CQq8LQkOuGcm+XDL74X/YXSDdWHRXSm+oMcH33xg3d8cxslh6+7YMlHJvSXevXXbe9WR5i7qiO1+jiz3pk2Y/Y8TyAPVQx/nPiH91VX3a8+6qyvZfWjRZg+RnyKOmqrj0vrHW5Pv/y+9943/aXfVYYPbPX3Bi0Q9Tv3tIB6R8nXlu8TbCrwGqWpFsC7qI+kaCmqpfP5ivG2m62jvgZ8kGhRrN9puN+uS8SZlqb6+fjgs++9nZJa4CX6DF2sjqh+/OUP8t4zN8Z/Hep3OOrdmnrX34PqS8s3XHi8+vpy8/dO6udFH1nWO+1mqH5uvfy/3vHs/3v2Le9djc+8+oF3lF0frdXH5bfY6xSvNy289YdZPv5iQrOv0OqvH2uprGv0Dtl6xfk+9etLf/X4oZv0OwSHIfBafdK4AQIQgAAEIAABCEAAAhCAAARMCWS9wNO7gubOX7RUfvqY5quPXBkXB/rF/Y2XPmKod8npl//rf6Bv/Opla0Fo4aO/SPvcuA+VeJoqemeS3lG2njpeeZh6x5beOdZ4pSLw9DwPPDlOyZwPvHd+adGidwfqd6r179PDm8JU4Ola/WVN/eGOMiUdV1YfPTjz+P2lrRKeY0+/VvSxRi1plvYxj6Z89DFLfdxS7x5rFHJNf67fyXbbgy94glQfOda7t/SHE/SR0sZLf4zgevURiHc+/lq0FBykdp3p3Y1bqS/V6ktL0aNUT5r16qsMEf2xj0TYJCrw9BxazOr3vulctdRdVFah3mdXqN4R2Ed2VB8+2HvnTeNHO3/4+XdP4urdhYfus533DkHNS8uodz/51pNAWjxtvcna6qu/e3gfVNHX8vrRX8vVX8D1vuqqch4xZIAcpRgk+rGUxi8kb7HRaLn5khP/9Qi3PEKr+7xX5T9ZiVu9dn18VUvKYw/ZJf6VVS1O9c7DEvUVZS0ED9tve/XBk99lsvr4if5oSqLP0INPvbFMgbeD2jn717TZS/0lp7/+q4/z6p9fcO0DSor+6Ql4/WXmE9TXlK+69TF5WmW2vXpPoJaBekeo/lpxNBr1ZL4WsFq2vvnEtZ5k1ZcWr3cqWTpJrUEf49bP/XH/2VXWHb1k1y078Fr73Y+fQwACEIAABCAAAQhAAAIQgIApgawXeKbgqIMABCAAAQhAAAIQgAAEIAABCEAAAhCAQBgEEHhhUGYOCEAAAhCAAAQgAAEIQAACEIAABCAAAQgYEkDgGYKjDAIQgAAEIAABCEAAAhCAAAQgAAEIQAACYRBA4IVBmTkgAAEIQAACEIAABCAAAQhAAAIQgAAEIGBIAIFnCI4yCEAAAhCAAAQgAAEIQAACEIAABCAAAQiEQQCBFwZl5oAABCAAAQhAAAIQgAAEIAABCEAAAhCAgCEBBJ4hOMogAAEIQAACEIAABCAAAQhAAAIQgAAEIBAGAQReGJSZAwIQgAAEIAABCEAAAhCAAAQgAAEIQAAChgQQeIbgKIMABCAAAQhAAAIQgAAEIAABCEAAAhCAQBgEEHhhUGYOCEAAAhCAAAQgAAEIQAACEIAABCAAAQgYEkDgGYKjDAIQgAAEIAABCEAAAhCAAAQgAAEIQAACYRBA4IVBmTkgAAEIQAACEIAABCAAAQhAAAIQgAAEIGBIAIFnCI4yCEAAAhCAAAQgAAEIQAACEIAABCAAAQiEQQCBFwZl5oAABCAAAQhAAAIQgAAEIAABCEAAAhCAgCEBBJ4hOMogAAEIQAACEIAABCAAAQhAAAIQgAAEIBAGAQReGJSZAwIQgAAEIAABCEAAAhCAAAQgAAEIQAAChgQQeIbgKIMABCAAAQhAAAIQgAAEIAABCEAAAhCAQBgEEHhhUGYOCEAAAhCAAAQgAAEIQAACEIAABCAAAQgYEkDgGYKjDAIQgAAEIAABCEAAAhCAAAQgAAEIQAACYRBA4IVBmTkgAAEIQAACEIAABCAAAQhAAAIQgAAEIGBIIO0Cb+aCqlZb79WlSOaUVEus1Tu5AQJuESjMz5F2RXlSUl7rVuN0C4EECHQqzpea2qhU1TYkcDe3QMAdAsVt8qRD23ypqKqXsso6dxqnUwgkQKCoIEfaFOTJwgr+bJIALm5xjEDn4gL155J6qVZ/PuGCQKYR6NO1jSTiVzJt3TavR2fi54XA85MmY0EgSQIIvCSBcbtTBBB4TsVFs0kQQOAlAYtbnSOAwHMuMhpOggACLwlY3OocAQSefZEh8OzLhI4gYEwAgWeMjkIHCCDwHAiJFo0IIPCMsFHkCAEEniNB0aYRAQSeETaKHCGAwLMvKASefZnQEQSMCSDwjNFR6AABBJ4DIdGiEQEEnhE2ihwhgMBzJCjaNCKAwDPCRpEjBBB49gWFwLMvEzqCgDEBBJ4xOgodIIDAcyAkWjQigMAzwkaRIwQQeI4ERZtGBBB4RtgocoQAAs++oBB49mVCRxAwJoDAM0ZHoQMEEHgOhESLRgQQeEbYKHKEAALPkaBo04gAAs8IG0WOEEDg2RcUAs++TOgIAsYEEHjG6Ch0gAACz4GQaNGIAALPCBtFjhBA4DkSFG0aEUDgGWGjyBECCDz7gkLg2ZcJHUHAmAACzxgdhQ4QQOA5EBItGhFA4Blho8gRAgg8R4KiTSMCCDwjbBQ5QgCBZ19QCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAgg8I2wUOUIAgedIULRpRACBZ4SNIkcIIPDsCwqBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aEQAgWeEjSJHCCDwHAmKNo0IIPCMsFHkCAEEnn1BIfDsy4SOIGBMAIFnjI5CBwgg8BwIiRaNCCDwjLBR5AgBBJ4jQdGmEQEEnhE2ihwhgMCzLygEnn2Z0BEEjAkg8IzRUegAAQSeAyHRohEBBJ4RNoocIYDAcyQo2jQigMAzwkaRIwQQePYFhcCzLxM6goAxAQSeMToKHSCAwHMgJFo0IoDAM8JGkSMEEHiOBEWbRgQQeEbYKHKEAALPvqAQePZlQkcQMCaAwDNGR6EDBBB4DoREi0YEEHhG2ChyhAACz5GgaNOIAALPCBtFjhBA4NkXFALPvkzoCALGBBB4xugodIAAAs+BkGjRiAACzwgbRY4QQOA5EhRtGhFA4Blho8gRAgg8+4JC4NmXCR1BwJgAAs8YHYUOEEDgORASLRoRQOAZYaPIEQIIPEeCok0jAgg8I2wUOUIAgWdfUAg8+zKhIwgYE0DgGaOj0AECCDwHQqJFIwIIPCNsFDlCAIHnSFC0aUQAgWeEjSJHCCDw7AsKgWdfJnQEAWMCCDxjdBQ6QACB50BItGhEAIFnhI0iRwgg8BwJijaNCCDwjLBR5AgBBJ59QSHw7MuEjiBgTACBZ4yOQgcIIPAcCIkWjQgg8IywUeQIAQSeI0HRphEBBJ4RNoocIYDAsy8oBJ59mdARBIwJIPCM0VHoAAEEngMh0aIRAQSeETaKHCGAwHMkKNo0IoDAM8JGkSMEEHj2BYXAsy8TOoKAMQEEnjE6Ch0ggMBzICRaNCKAwDPCRpEjBBB4jgRFm0YEEHhG2ChyhAACz76gEHj2ZUJHEDAmgMAzRkehAwQQeA6ERItGBBB4RtgocoQAAs+RoGjTiAACzwgbRY4QQODZFxQCz75M6AgCxgQQeMboKHSAAALPgZBo0YgAAs8IG0WOEEDgORIUbRoRQOAZYaPIEQIIPPuCQuDZlwkdQcCYAALPGB2FDhBA4DkQEi0aEUDgGWGjyBECCDxHgqJNIwIIPCNsFDlCAIFnX1AIPPsyoSMIGBNA4Bmjo9ABAgg8B0KiRSMCCDwjbBQ5QgCB50hQtGlEAIFnhI0iRwgg8OwLCoFnXyZ0BAFjAgg8Y3QUOkAAgedASLRoRACBZ4SNIkcIIPAcCYo2jQgg8IywUeQIAQSefUEh8OzLhI4gYEwAgWeMjkIHCCDwHAiJFo0IIPCMsFHkCAEEniNB0aYRAQSeETaKHCGAwLMvKASefZnQEQSMCSDwjNFR6AABBJ4DIdGiEQHbBF5D+QKpnvy5xBrqpXDwmpLftZ/RuiiCgCaAwOM5yGQCCLxMTpe1IfDsewYQePZlQkcQMCaAwDNGR6EDBBB4DoREi0YEbBJ4DZWlUvbOvRKrq16ylpxc6bDJIZLXuW/hyMsAACAASURBVLfR2iiCAAKPZyCTCSDwMjld1obAs+8ZQODZlwkdQcCYAALPGB2FDhBA4DkQEi0aEbBJ4FX/8Y1Ufjeu2TqKVlxP2q66udHaKIIAAo9nIJMJIPAyOV3WhsCz7xlA4NmXCR1BwJgAAs8YHYUOEEDgORASLRoRsEng1c39U8o/fqzZOtqO2l6KBo0yWhtFEEDg8QxkMgEEXiany9oQePY9Awg8+zKhIwgYE0DgGaOj0AECCDwHQqJFIwI2CTy9gMrv35Dq38d7a8nvPUyK19lDIrm5RmujCAIIPJ6BTCaAwMvkdFkbAs++ZwCBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aETANoGnFxGrrpBofa3kFncxWhNFEGgkgMDjWchkAgi8TE6XtSHw7HsGEHj2ZUJHEDAmgMAzRkehAwQQeA6ERItGBGwUeEYLoQgCSyGAwOOxyGQCCLxMTpe1IfDsewasFni3P/SiPP3ye1JTWycbjRkpF5xyqLRtUyjTZs6V866+Xyb9NlX69Oom55x0kIxebZhHd+aCqlYp9+pSJHNKqiXW6p3cAAG3CCDw3MqLbpMjgMBLjhd3u0MAgedOVnSaPAEEXvLMqHCHAALPnazoNHkCCLzkmQVdYa3Ae+P9r+Sme5+RB288W4rbtZETzr1J1hw5XI49ZBc55KQrZPMNR8uBu28ln47/Scm8++StJ6+T/LxcBF7QTwzjW00AgWd1PDSXIgEEXooAKbeWQDICL1pTLw3TSiRvha4Syee9dNaGSmNxAgg8HoZMJoDAy+R0WRsCz75nwFqBN2Hin1JXV6d21q3oUXvo6Tfk50l/yRnH7Sfb7n+GfPbKbZL39wuV9xx7gZxx7H6yzqgRCDz7njE6CpEAAi9E2EwVOgEEXujImTAkAokKvOpvpkrppa9LtKxKcru0k07nbisFI/uF1CXTQMCMAALPjBtVbhBA4LmRE12aEUDgmXELsspagddy0Uefeb1stv4aMmxwf7n4+ofkhQcujd9y6kW3y5jRK8veO20q8xZVt8qra8dCKSmt4Qhtq6S4IVACEf9HL8jNkaLCXCmrrPN/cEYMlEBEAnggAu04/MGL2+apf7ETk5q6hvAnZ0YIBEigjfp9u11RnlTVNMji6vplzjTzgAekfmZp/Of5g7pJ7/sPDLAzhoZA6gQK8nOkUO0WLefPJqnDZATfCMR8+ifB9m3yvT+X1NZHfeuNgSAQJ5Dmd35171SUkF8hsfAI6Ez8vCIxdfk5oB7r9gdfkPHfT5K7rz1Nvvz2F7n53mfliTsviE9z7lX3yYqD+8nBe22T0G+e+bkRqWvwvU2/l814mU4ggEcwkiOSG4lIPc+3e08P/q7VzPJyIhJV/xMTDeDXTquTcwMEAiSQq55t/VeDerj1X0u76hcslsk73NHsR/oI7YiPTg6wM4aGQOoE1KMtOfrPJvzmnTpMRvCPgE9/lshT/1zp/dkEf+dfNoz0D4E0//NBQV5OQn6FyMIjoDPx8/JV4GkXeMUtj8pf02bLTZecKG2KCuTbHyfLBdc+KC89eFm871MuvE3WX2tV2XPHTThC62eajOUcAY7QOhcZDSdBgCO0ScDiVqcIJHqEduFVb0r1W7/E19Zmx9Wk0383d2qtNJt9BDhCm32ZZ9OKOUKbTWln31o5Qmtf5lYfob36tsdl9ryFctW5R3kfqNDXwtJy2XLvU+XjF2/1hJ6+tjvgDLn87LEyatVhCDz7njE6CpEAAi9E2EwVOgEEXujImTAkAokKvGhtvVS+8L3U/jhTCkf1l7Y7jZSIz/8mdllLnl9RJrd9Mk5+mj1N1ugzSI5cbyvpVtwhJEJM4zIBBJ7L6dF7awQQeK0R4ucuE0Dg2ZeetQLvq+8mypW3PiZP3HG+5OfnNSN3+ClXy9prjJCxB+wo4977wjtSO+7RqyVXvf9r5oKqVin36lIkc0qqfXrzQavTcQMEQiOAwAsNNROlgQACLw3QmTIUAokKvFCaWcYkZ7/yiEyYNTX+09X7DJTLdjggnS0xt+UEyn7/TmoWzpYeq6wtnbr3loUVtZZ3THsQSJ4AAi95ZlS4QwCBZ19W1gq8sy+/R155+1Ml5ZbsvNPX0IF95Zl7LpIZs+fL/664Ryb9Pk369+khF556qKwyfKB3DwLPvoeMjsIjgMALjzUzhU8AgRc+c2YMh4DtAq+mvk72fPAaafqa47YFhfLUIaeFA4hZnCMw5aVbZOEvn3p95xUUychDzpNol8HOrYOGIdAaAQRea4T4ucsEEHj2pWetwDNFhcAzJUddJhBA4GVCiqxhWQQQeDwbmUrAdoGnubMDL1OfPv/XVVM6V36586T4wDnqKxY9V9tAem57nP+TMSIE0kwAgZfmAJg+UAIIvEDxGg2OwDPCRhEE7CSAwLMzF7ryhwACzx+OjGIfARcE3ryKUrn9k9d5B559j491HdVXLJQfbzu2mcDrNmJt6bMTX0y2LiwaSpkAAi9lhAxgMQEEnn3hIPDsy4SOIGBMAIFnjI5CBwgg8BwIiRaNCLgg8IwWRpFvBMrqq+WLRdOkQ16RrNmxr+RFcnwbO4iBprxyuyz86SNvaI7QBkGYMW0hgMCzJQn6CIIAAi8IqqmNicBLjR/VELCKAALPqjhoxmcCCDyfgTKcNQQQeNZEYWUjc2sq5JxJb8ji+hqvv+HFPeR/QzeT/Jx/3hNtXeOxmJT9ob6YvGiOdF95LT5iYV1ANOQXAQSeXyQZx0YCCDz7UkHg2ZcJHUHAmAACzxgdhQ4QQOA5EBItGhFA4Blhy5qix2d8Jy/P+bnZes8Ysqms0bGPEwyKCnKkTUEeX6F1Ii2aTJYAAi9ZYtzvEgEEnn1pIfDsy4SOIGBMAIFnjI5CBwgg8BwIiRaNCCDwjLBlTRECL2uiZqEOEkDgORgaLSdMAIGXMKrQbkTghYaaiSAQPAEEXvCMmSF9BBB46WPPzMESQOAFy9f10efUVsi5E5seoe2ujtBubnSEtqakSuZ+OE3qymul27p9pcOwzoHjYQde4IiZII0EEHhphM/UgRNA4AWOOOkJEHhJI6MAAvYSQODZmw2dpU4AgZc6Q0awkwACz85cbOrKj49YNFTXy8/XfCF1ZUvepaevYUeNkvZDg5V4CDybniR68ZsAAs9vooxnEwEEnk1pLOkFgWdfJnQEAWMCCDxjdBQ6QACB50BItGhEAIFnhI2iJAmUTlwgv9/3fbMqvQtvwB7DkxwpudsReMnx4m63CCDw3MqLbpMjgMBLjlcYdyPwwqDMHBAIiQACLyTQTJMWAgi8tGBn0gAJzC+LyO9zc6QhGpHhfXJlYPeolFfVBTgjQ2czgcpZFTLx+i+bIeiz7WDptcXAQLEg8ALFy+BpJoDAS3MATB8oAQReoHiNBkfgGWGjCAJ2EkDg2ZkLXflDAIHnD0dGsYNAjfJ0n/2aK9GoSE5ORHLVXyv3F+lajMCzI6HM7GLm63/I7Hf+8hZXPKiTDDlidcktyA10sQi8QPEyeJoJIPDSHADTB0oAgRcoXqPBEXhG2CiCgJ0EEHh25kJX/hBA4PnDkVHsIDCnNCI/Tc3xmmkUeL3Uq8iG9ULg2ZFQ5nZRV1ErDZV1UtSjXSiLROCFgplJ0kQAgZcm8EwbCgEEXiiYk5oEgZcULm6GgN0EEHh250N3qRFA4KXGj2q7CLADz6486CY4Agi84NgycvoJIPDSnwEdBEcAgRccW9OREXim5KiDgIUEEHgWhkJLvhFA4PmGkoEsIbCsd+BFK2ukZs4iKRrQXSK5S3bpcUHAVQIIPFeTo+9ECCDwEqHEPa4SQODZlxwCz75M6AgCxgQQeMboKHSAAALPgZBo0YhA06/QTn35K5l51ziJqS16Bb27yIDz9pPCvl2NxqUIAjYQQODZkAI9BEUAgRcUWca1gQACz4YUmveAwLMvEzqCgDEBBJ4xOgodIIDAcyAkWjQi0CjwyhZVyZe7Xy6x2vr4OB3WX0n6n7mX0bgUQcAGAgg8G1Kgh6AIIPCCIsu4NhBA4NmQAgJPenUpkjkl1RKzLw86gkBKBBB4KeGj2HICCDzLA6I9YwKNAm/213/IhJPuaTZOXqdiGf7QKcZjUwiBdBNA4KU7AeYPkgACL0i6jJ1uAgi8dCfw7/nZgWdfJnQEAWMCCDxjdBQ6QACB50BItGhEoOkR2u+VwKv8eWp8nF6Hby1dd17XaFyKIGADAQSeDSnQQ1AEEHhBkWVcGwgg8GxIoXkPCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAk0F3sK55bLg5S+k+q850n7McOm82UiRSMRoXIqWTSDaEJXSWbOkfffukldY4AyqWH1M6hfUSW6XfMnJd+O5MBV4DdEGmTF7hvTs1ksKC9zJyJmHiUZ9IYDA8wUjg1hKAIFnXzAIPPsyoSMIGBNA4Bmjo9ABAgg8B0KiRSMCTQVeWWWd0RgUJU5g4fSZ8vE9D0pVyULJb1Mka+2/t/RfY7XEB0jTnXWz62TB8wslWtYgOUU50nmnTlI4uDCQbmqlQV7KnSI/5SyUUdFusmPDAMkRM2FoIvCmzJwqtz56tyxcVCJtitrI2L0OlZEj7M8okDAY1GoCCDyr46G5FAkg8FIEGEA5Ai8AqAwJgXQRQOClizzzhkEAgRcGZeZIBwEEXrjUP7j1bpkzaXJ80ryiQtntqoslkpMTbiNJzjb/0RKpnV4Tr8otzpWex/VIcpTEbr8q/3v5IGdm/OadG1aQo+tXTqy4xV0mAu+Ku6+T36f8Hh+pU4dOcu2ZlxvNTxEEgiSAwAuSLmOnmwACL90J/Ht+BJ59mdARBIwJIPCM0VHoAAEEngMh0aIRAQSeETbjoufPOE/qqqqb1W97zunSoVcwMsy40RaFs26cLbGa5p9g66UEXo4SeX5eNWr33T5F70htrOEfgSaF8ljN5kbTmAi8Ey45Vaqqq5rNd91ZV0jH9h2NeqAIAkERQOAFRZZxbSCAwLMhheY9IPDsy4SOIGBMAIFnjI5CBwgg8BwIiRaNCCDwjLAZF/342pvy87i34vU9R6womxw31ni8sApL3yuXxV9WxKcrGl4kXXbtHMj0Bxe+L/PlH4E2LNZRbqpd32guE4H3zBsvyOsfvhmfb81VRskx+9ufkREgipwmgMBzOj6ab4UAAs++RwSBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aEQAgWeEzbgoFo3KH599JbN+/kW6DOgvwzbZUPLVMVrbr1hUpPLbxVI9pUYKexVI23XaSU6e2XvpWlvr1znz5Or8H6RcaqWzFMn5taNkeKxTa2VL/bmJwIuqjN7/4kP56feJMrjfCrLFeptLUaH9GRkBoshpAgg8p+OjeQSec88AAs+5yGgYAssmgMDj6chkAgi8TE43u9eGwMvu/G1dfY06Qjs9p0IGxNqL+uatcZsmAs94MgohEDIBBF7IwJkuVALswAsVd0KTIfASwsRNEHCDAALPjZzo0owAAs+MG1X2E0Dg2Z8RHZoTQOCZs6PSfgIIPPszokNzAgg8c3ZBVSLwgiLLuBBIAwEEXhqgM2VoBBB4oaFO30SxmLT95UcpmjFdokVtpGKlVaS+W/f09RPSzMkIvFg0Jg2zyyWnY6HktONIYUgRMU0KBBB4KcCj1HoCCDzrI6LBFAgg8FKAF1ApAi8gsAwLgXQQQOClgzpzhkUAgRcW6fTNU/Tnb1L884/xBmK5uVKy2VYSKyxKX1MhzJyowIuWVUvp7Z9J/YxSieTmSLvdVpU2mwwOoUOmgIA5AQSeOTsq7SeAwLM/Izo0J4DAM2cXVCUCLyiyjAuBNBBA4KUBOlOGRgCBFxrqtE3U4ctPpWDe3Gbzl669ntT16Jm2nsKYOFGBV/7U91L90Z//tBSJSNdLtla78dqE0SZzQMCIAALPCBtFjhBA4DkSFG0aEUDgGWELtAiBFyheBodAuAQQeOHyZrZwCSDwwuWdjtnYgVcvZZV1y0S/8Mr3vN13Ta+OR68nBatktuBMx7PInP4RQOD5x5KR7COAwLMvEzryjwACzz+Wfo2EwPOLJONAwAICCDwLQqCFwAgg8AJDu9yBy0ti8vPnMenYTWT42jmiNn0Fd6l34LWb9IsUTpvCO/CWQrn6q2lS/vDX8Z/k9u4gnc/YVCJ55l8IDS5MRobAEgIIPJ6ETCaAwMvkdFkbAs++ZwCBZ18mdAQBYwIIPGN0FDpAAIEXfkjTf43KwxfHpHpxzJt8+FoROeCc3PAbyfAZEz1CqzFUfzdDar+eKZHORdJu6xUlp5gPWWT44+H88hB4zkfIApZDAIHH45HJBBB49qWLwLMvEzqCgDEBBJ4xOgodIIDACz+k526OynfvRZtNfMItudK9X5Db8MJfZ7pnTEbgpbtX5odAsgQQeMkS436XCCDwXEqLXpMlgMBLlljw9yPwgmfMDBAIjQACLzTUTJQGAgi88KG/dGdUxr/RXOAdc12O9B7MkU0/00Dg+UmTsWwjgMCzLRH68ZMAAs9PmoxlGwEEnm2JiCDw7MuEjiBgTACBZ4yOQgcIIPDCD2nWHzG579wGqa1aMvew0RE56DyO0PqdBALPb6KMZxMBBJ5NadCL3wQQeH4TZTybCCDwbEpjSS8IPPsyoSMIGBNA4Bmjo9ABAgi89IRUOj8mE78Q6dg9FvxHLNKzxLTPisBLewQ0ECABBF6AcBk67QQQeGmPgAYCJIDACxCu4dAIPENwlEHARgIIPBtToSe/CCDw/CLJOLYRQODZlgj9+EkAgecnTcayjQACz7ZE6MdPAgg8P2n6MxYCzx+OjAIBKwgg8KyIgSYCIoDACwgsw6adAAIv7RHQQIAEEHgBwmXotBNA4KU9AhoIkAACL0C4hkMj8AzBUQYBGwkg8GxMhZ78IoDA84sk49hGAIFnWyL04ycBBJ6fNBnLNgIIPNsSoR8/CSDw/KTpz1gIPH84MgoErCCAwLMiBpoIiAACLyCwDJt2Agi8tEdAAwESQOAFCJeh004AgZf2CGggQAIIvADhGg6NwDMERxkEbCSAwLMxFXryiwACzy+SjGMbAb8F3rzpC+XX8VOl9+BuMnhkX9uWSz9ZRgCBl2WBZ9lyEXhZFniWLReBZ1/gCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAn4KvF8+/0sev/x1aWiIer1stOdo2fY/6xr1RREE/CCAwPODImPYSgCBZ2sy9OUHAQSeHxT9HQOB5y9PRoNAWgkg8NKKn8kDJoDACxgww6eNgJ8C745TnpXpk+bE1xKJROT8Z46QgqL8tK2PibObAAIvu/PP9NUj8DI94exeHwLPvvwRePZlQkcQMCaAwDNGR6EDBBB4DoREi0YEAhd4z46VgsI8o94ogkCqBBB4qRKk3mYCCDyb06G3VAkg8FIl6H89As9/powIgbQRQOClDT0Th0AAgRcCZKZICwE/BR5HaNMSIZMuhwACj8cjkwkg8DI5XdaGwLPvGUDg2ZcJHUHAmAACzxgdhQ4QQOA5EBItGhHwU+DpBuZNX6Q+YjGFj1gYpUGR3wQQeH4TZTybCCDwbEqDXvwmgMDzm2jq4yHwUmfICBCwhgACz5ooaCQAAgi8AKAypBUE/BZ4ViyKJiDwNwEEHo9CJhNA4GVyuqwNgWffM4DAsy8TOoKAMQEEnjE6Ch0ggMBzICRaNCKAwDPCRpEjBBB4jgRFm0YEEHhG2ChyhAACz76gEHj2ZUJHEDAmgMAzRkehAwQQeA6ERItGBBB4RtgocoQAAs+RoGjTiAACzwgbRY4QQODZFxQCz75M6AgCxgQQeMboKHSAAALPgZBo0YgAAs8IG0WOEEDgORIUbRoRQOAZYaPIEQIIPPuCQuDZlwkdQcCYAALPGB2FDhBA4DkQEi0aEUDgGWGjyBECCDxHgqJNIwIIPCNsFDlCAIFnX1AIPPsyoSMIGBNA4Bmjo9ABAgg8B0KiRSMCCDwjbBQ5QgCB50hQtGlEAIFnhI0iRwgg8OwLCoFnXyZ0BAFjAgg8Y3QUOkAAgedASLRoRACBZ4SNIkcIIPAcCYo2jQgg8IywUeQIAQSefUEh8OzLhI4gYEwAgWeMjkIHCCDwHAiJFo0IIPCMsFHkCAEEniNB0aYRAQSeETaKHCGAwLMvKKsFXsmicjnrsrtk9ryF8tKDl8Xp7XvMxTJx8hSRSMT7ex2K28qHz9/s/f8zF1S1SrlXlyKZU1ItsVbv5AYIuEUAgedWXnSbHAEEXnK8uNsdAgg8d7Ki0+QJIPCSZ0aFOwQQeO5kRafJE0DgJc8s6AprBd7iymrZT4m6TdZbQz74/PtmAm+Hg86Smy4+QYYO6vsvPgi8oB8ZxreZAALP5nToLVUCCLxUCVJvKwEEnq3J0JcfBBB4flBkDFsJIPBsTYa+/CCAwPODor9jWCvwKquqZX5JqffXhdc91EzgbbL7SfLkXRdIr+5dEHj+Pg+M5jgBBJ7jAdL+cgkg8HhAMpUAAi9Tk2VdmgACj+cgkwkg8DI5XdaGwLPvGbBW4DWi+mbCr/8SeKO2Hisbjxkp3/44Wbp16Sj/HbunbLzu6l4JO/Dse8joKDwCCLzwWDNT+AQQeOEzZ8ZwCCDwwuHMLOkhgMBLD3dmDYcAAi8czsySHgIIvPRwX96szgm8aDQm5119n2y72RhZb62V5YNPv5ezLr9LXn74Cm9HXjTW+pvt9JvzWr/LvrDoKLMILHmDI5ftBMZXL5Y7S+fJnIY62bpNBzm6Uw/J//v9m/72zhPRGk+N3fu9m9/AW0PFzx0k0PjbSgJ/jHFwdbSc1QTU793en735vTurHwP7Fs8DaV8mdNSSQLqf0hz1h5NE/ArJhUdAZ+LnFYmpy88Bl7YDr+X4/zn5Stljh01kxy3Xk1kJfMSip/qIxVw+YuFnTIxlQMDXXyh/z88OPIMgllNSFm2QIyumSW0sGr/r4KIusmthJ38nUqP5/HuxUX/zJpTJgh8qRHJi0mvtTtJpaDujcYIq6tguX2pro1JV1xDUFIwLgbQQKC7Kk/Zt86Wiul7KK+vS0gOTQiAoAkX5OeoYbZ4sWlwb1BSMC4GkCfj1T6zswEsaPQVJEPBX1SQx8d+39u7aJiG/kvzIVJgS0Jn4eQUu8CqramTyn9Nl9ZWHxPs+8PjL5KA9t5ZtNl2bI7R+pslYzhFA4Pkb2Td1lXJp5exmg66R10bOb9fb34ksGK18apX8NW5us06G7t5L2nQvtKC7JS1whNaaKGjEZwIcofUZKMNZRYAjtFbFQTM+E0Dg+QyU4awiwBFaq+LwmnHuCO2i0grZat/T5KZLjpf111pVPvriBzn9kjvl1UeulK6dOyDw7HvG6ChEAgg8f2FXxBrkiPKpagfeP/slg9qB52/nyY8264uFMv+7smaFvdbpJN1HdUx+sIAqEHgBgWXYtBNA4KU9AhoIkAACL0C4DJ12Agi8tEdAAwESQOAFCNdwaGsF3tsffS2nXXyH98KMuvoGyc/Pk0H9e8nz918qH3z2vVx7xxMyd8Ei6durm5xx3H6y7uiVPQR8xMLwSaAsIwgg8PyP8fv6KnmkukTmR+tl4/xiOUgdoQ3mHXj+957MiEvdgbdHb2nTrSCZYQK9F4EXKF4GTyOBRAXe5DmzZEbpQlmlT3/pXtw+jR0zNQQSJ4DAS5wVd7pHAIHnXmZ0nDgBBF7irMK601qBZwoAgWdKjrpMIIDAy4QU07eG+eodePMnVKj38cWkx+iO0nl4cfqaWcrMCDyr4qAZHwkkIvCeGP+pfPL7JG/WvNxcGbvB5rJy734+dsFQEAiGAAIvGK6MagcBBJ4dOdBFMAQQeMFwTWVUBF4q9KiFgGUEEHiWBUI7vhJA4PmKk8EsItCawCurrpRzX3pKHUr45zj/Sr36yrGbbG3RKmgFAksngMDjychkAgi8TE6XtSHw7HsGEHj2ZUJHEDAmgMAzRkehAwQQeA6ERItGBFoVeFWVcs5LTzYbG4FnhJqiNBBA4KUBOlOGRgCBFxpqJkoDAQReGqC3MiUCz75M6AgCxgQQeMboMqYwWrFQYhWLJLfXIIk1RKX8j6nSUFkt7VccKHltipxeJwLP6fhofjkEWhN4uvTpbz6XDyf/4o3CEVoeJ5cIIPBcSotekyWAwEuWGPe7RACBZ19aCDz7MqEjCBgTQOAZo8uIwppx90jt2w+ptcQkMnB1mdd5K6meU+KtLUfJu0EH7CwFHd198T0CLyMeUxaxFAKJCDxd9uvcWTJzER+x4CFyiwACz6286DY5Agi85Hhxt1sEEHj25YXAsy8TOoKAMQEEnjE65wsbZv0hldceFF9HdayDzGu/qUQ6dIv/vS5rj5SeG6/t7FoReM5GR+OtEEhU4AESAi4SQOC5mBo9J0oAgZcoKe5zkQACz77UEHj2ZUJHEDAmgMAzRud8Ye2nL0jNs9fE11EV6yjzCsdITrd/vlKJwHM+ZhaQoQQQeBkaLMvyCCDweBAymQACL5PTZW0IPPueAQSefZnQEQSMCSDwjNE5XxhTX6lcfNV+Eiub761Ff6xywYqHS/XiJV+t5Ait8xGzgAwmgMDL4HBZGgKPZyCjCSDwMjrerF8cAs++RwCBZ18mdAQBYwIIPGN0GVEYLZkldR88IdHyEslfdxfJHTKaj1hkRLIsItMJIPAyPeHsXh878LI7/0xfPQIv0xPO7vUh8OzLH4FnXyZ0BAFjAgg8Y3QUOkCAd+A5EBItGhFA4Blho8gRAgg8R4KiTSMCCDwjbBQ5QgCBZ19QCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAgg8I2wUOUIAgedIULRpRACBZ4SNIkcIIPDsCwqBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aEQAgWeEjSJHCCDwHAmKNo0IIPCMsFHkCAEEnn1BIfDsy4SOIGBMAIFnjI5CBwggYoarlgAAIABJREFU8BwIiRaNCCDwjLBR5AgBBJ4jQdGmEQEEnhE2ihwhgMCzLygEnn2Z0BEEjAkg8IzRUegAAQSeAyHRohEBBJ4RNoocIYDAcyQo2jQigMAzwkaRIwQQePYFhcCzLxM6goAxAQSeMToKHSCAwHMgJFo0IoDAM8LmdNHimjIpr14kPTr0k5xIjtNraa15BF5rhPi5ywQQeC6nR++tEUDgtUYo/J8j8MJnzowQCIwAAi8wtAxsAQEEngUh0EIgBBB4gWC1dtBPfxsnr3z/oDREG6R3x4Fy+MbnSPuiztb2m2pjCLxUCVJvMwEEns3p0FuqBBB4qRL0vx6B5z9TRoRA2ggg8NKGnolDIIDACwEyU6SFAAIvLdjTMml51UK57NUjJRaLxedfd8g2stvosWnpJ4xJEXhhUGaOdBFA4KWLPPOGQQCBFwbl5OZA4CXHi7shYDUBBJ7V8dBcigQQeCkCpNxaAgg8a6PxvbEfZ3whj3x6TbNx+3cZJsdvcYXvc9kyIALPliToIwgCCLwgqDKmLQQQeLYk8U8fCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAgg8I2xOFtU31MnN75wpc0qnxvvfZ50TZPQKmzi5nkSaRuAlQol7XCWAwHM1OfpOhAACLxFK4d6DwAuXN7NBIFACCLxA8TJ4mgkg8NIcANMHRgCBFxhao4FrG2pkTtV06dmmnxTkFhqNsbyiCvUBi/d+eU5Kq+bLyP4bysh+6/o+h00DIvBsSoNe/CaAwPObKOPZRACBZ1MaS3pB4NmXCR1BwJgAAs8YHYUOEEDgORASLRoRQOAZYQuk6LdFP8p9k66VspoSaZffQY5c6SwZ2mnVQObKlkEReNmSdHau00TgVeZ+K3U5s6Vt/VqSH+ueneBYtRMEEHj2xYTAsy8TOoKAMQEEnjE6Ch0ggMBzICRaNCKAwDPCFkjRJV8fJ7MXT4uP3atdfzlvzdsCmStbBkXgZUvS2bnOZAReSWmdvPDxuzJtZkz6DJwh6275lQzN+6+0aRiRnfBYtfUEEHj2RYTAsy8TOoKAMQEEnjE6Ch0ggMBzICRaNCKAwDPCFkjRSR/vIfXRuvjYkUhErlv/SSnMLQpkvmwYFIGXDSln7xqTEXj3PvWn/DX3zzisfkOmyG67l0nPqpOyFyArt5oAAs++eBB49mVCRxAwJoDAM0ZHoQMEEHgOhESLRgQQeEbYAil69Ndb5NPZb8XHXr/XVnLAiicEMle2DIrAy5aks3OdiQq8uvqYXHn7H1KTMyUOKr+wVo48cYL0rjo9O+GxausJIPDsiwiBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aEQAgWeELZCi+mi9fDDrFZm86CcZ0nEl2azPzpKXkxfIXNkyKAIvW5LOznUmKvA0nQefmS1/zJom0Ui5B6v/kOly8I7rcIQ2Ox8dJ1aNwLMvJgSefZnQEQSMCSDwjNFR6AABBJ4DIdGiEQEEnhE2ihwhgMBzJCjaNCKQjMDT78B784MS+XPmAhkwsEK23XAF6Vrc02heiiAQBgEEXhiUk5sDgZccL+6GgNUEEHhWx0NzKRJA4KUIkHJrCSDwrI2GxnwggMDzASJDWEsgGYFn7SJoDALLIIDAs+/RQODZlwkdQcCYAALPGB2FDhBA4IUfUsGnk6Twu7+koWdHqdpulMTaFobfRBbMiMDLgpCzeIkIvCwOPwuWjsDLgpCzeIkIPPvCR+DZlwkdQcCYAALPGB2FDhBA4IUbUtG7P0q7xz+OT1o/qKeUnrWrSE4k3EayYDYEXhaEnMVLROBlcfhZsHQEXhaEnMVLRODZFz4Cz75M6AgCxgQQeMboKHSAAAIv3JA6XP685P85p9mkiy7eRxp6dw63kSyYDYGXBSFn8RIReFkcfhYsHYGXBSFn8RIRePaFj8CzLxM6goAxAQSeMToKHSCAwAs3pHb3viNFX0yOTxrLy5GSG/4jUpQfbiNZMBsCLwtCtnSJNQ11MrNmvvQp7CaFucH82kbgWRo+bflCAIHnC0YGsZQAAs++YBB49mVCRxAwJoDAM0ZHoQMEEHjhhpQzv0w63PCK5M4tEy3vFh+4sdRsMCLcJrJkNgRelgRt2TJ/Lp8iN0x5ThbVlUv7vLZy2sC9ZOX2K/jeJQLPd6QMaBEBBJ5FYdCK7wQQeL4jTXlABF7KCBkAAvYQQODZkwWd+E8Agec/01ZHbIhK7syFEu1azAcsWoVlfgMCz5wdleYETp54h0yvmhcfoF+b7nLDiGPMB1xGJQLPd6QMaBEBBJ5FYdCK7wQQeL4jTXlABF7KCBkAAvYQQODZkwWd+E8Agec/U0a0gwACz44csq2L/b6/XOqj9fFlRyQiD69+lhTl+HuUFoGXbU9Wdq0XgZddeWfbahF49iWOwLMvEzqCgDEBBJ4xOgodIIDAcyAkWjQigMAzwkZRigTunPqyvLPg2/goW3QdJUcP2CnFUf9djsDzHSkDWkQAgWdRGLTiOwEEnu9IUx4QgZcyQgaAgD0EEHj2ZEEn/hNA4PnPlBHtIIDAsyMHF7uojdbI/Prp0iN/gORFkts5Vx9rkHHzvpSfK6bKiHb9ZYceY9QYub5jQOD5jpQBLSKAwLMoDFrxnQACz3ekKQ+IwEsZIQNAwB4CCDx7sqAT/wkg8Pxnyoh2EEDg2ZGDa138Vf2jPFpyuVQ2lKmPUHSRfTufIQOLVrVuGQg86yKhIR8JIPB8hMlQ1hFA4FkXiSDw7MuEjiBgTACBZ4yOQgcIIPAcCIkWjQgg8IywBVpUIWVSmlMifaIrqDfDRQKdy3TwG2cfK/PqpsbLu6tdeP/tdbvpcIHVIfACQ8vAFhBA4FkQAi0ERgCBFxha44EReMboKISAfQQQePZlQkf+EUDg+ceSkewigMCzK48385+RlwsekQb1nwHRoXJs9fnSIdbFqibLGxbKVTMPlpj6T+OVq47QXtzveav61M0g8KyLhIZ8JIDA8xEmQ1lHAIFnXSTswLMvEjqCgDkBBJ45OyrtJ4DAsz8jOjQjgMAz4xZEVUlkrpzX9vBmYmzT+h1l75qjg5gupTGfLrlevlv8bnyMtYu3lV07H5/SmEEUI/CCoMqYthBA4NmSBH0EQQCBFwTV1MZkB15q/KiGgFUEEHhWxUEzPhNA4PkMlOGsIYDAsyYKGZ/3gdxfeE2zhlaIDpczq66zp8m/O2mI1csnFS/KlJqfZWjhGrJO8XaSG8mzrk8EnnWR0JCPBBB4PsJkKOsIIPCsi4QdePZFQkcQMCeAwDNnR6X9BBB49mdEh2YEEHhm3IKoqpM6uaTtMTI/Mjs+/GHVp8taDZsEMV1WjInAy4qYs3aRCLysjT4rFo7Asy9mduDZlwkdQcCYAALPGB2FDhBA4DkQEi0aEbBK4NXVS5v3vpP832ZK3YgBUrXxaiJ5uUbrcrWoNLJA3sp7XhbmzpMxtZvLyOgYV5diRd8IPCtioImACCDwAgLLsFYQQOBZEUOzJhB49mVCRxAwJoDAM0ZHoQMEEHgOhESLRgRsEnjtH3hDij7/Jb6Oqs3WkIp9NzVaF0UQ0AQQeDwHmUwAgZfJ6bI2BJ59zwACz75M6AgCxgQQeMboKHSAAALPgZBo0YiANQIvGpVuJ90hkdq6+DqiHdrJgmvGGq2LIggg8HgGMp0AAi/TE87u9SHw7MsfgWdfJnQEAWMCCDxjdBQ6QACB50BItGhEwBqBp7rvcu6DkjtvUXwd9f17yMJz9zdaF0UQQODxDGQ6AQRepiec3etD4NmXPwLPvkzoCALGBBB4xugodIAAAs+BkGjRiIBNAi//l6nS4d5xklNRJdGOxVJ2xLZSt2I/o3Utq2h6WZ38MK9GaupiMrRLvqzWo8jX8RnMLgIcobUrD7rxlwACz1+ejGYXAQSeXXnobhB49mVCRxAwJoDAM0ZHoQMEEHgOhESLRgRsEnjeAtQR2rzZC6W+d1eRfH8/YFFeG5XXJpdLNPYPqjF928jgzgVG7CiynwACz/6M6NCcAALPnB2V9hNA4NmXEQLPvkzoCALGBBB4xugodIAAAs+BkGjRiIB1As9oFYkVTSmtk0+nVTa7eYWOBbJ+/zaJDcBdzhFA4DkXGQ0nQQCBlwQsbnWOAALPvsgQePZlQkcQMCaAwDNGl9bCWFWVNMydLbn9Bkgk19/dLmldmM+TI/B8Bspw1hDIJoHHDjxrHrvQGkHghYaaidJAAIGXBuhMGRoBBF5oqBOeCIGXMCpuhID9BBB49mfUssPaD96RxQ/cJVJTIzk9e0vxWedLbq8+7i0khI4ReCFATsMUM6Iz5Z3o+97Mm+VsLP1z/H3fWhqWlPSU2STwNBzvHXhz1Tvw6mMypEuBjOxRmDQzCtwhgMBzJys6TZ4AAi95ZlS4QwCBZ19WCDz7MqEjCBgTQOAZo0tLYUxJu0VHHqjeN1Ubnz9/nfWl+OQz09KP7ZMi8GxPKPn+5sbmyeW1V0ud+o++CiIFclb+adIz0iP5wRyuyDaB53BUtG5AAIFnAI0SZwgg8JyJikYNCCDwDKAFXILACxgww0MgTAIIvDBppz5X3a8TpeKC5rIup1Nn6XjHg6kPnoEjIPAyL9R3G96XZ+tfaLawXfJ2lK1zt8y8xS5nRQi8rIo76xaLwMu6yLNqwQi8rIo76xaLwLMvcgSefZnQEQSMCSDwjNGlrbD8wrOlftLP8fnbHHSYFG2/S9r6sXliBJ7N6Zj19k30W7mv7qFmxYfnHyKjc0aZDehoFQLP0eBoOyECCLyEMHGTowQQeI4GR9sJEUDgJYQp1JusFngli8rlrMvuktnzFspLD14WBzNt5lw57+r7ZdJvU6VPr25yzkkHyejVhnk/n7mgqlWAvboUyZySaom1eic3QMAtAgg8t/LS3cYWL5bqcS9Jw9S/JH+tMVK40WYikYh7CwmhYwReCJBDniIqUXmo/lEZ3/C1N/NaOaPlkPwDJUf9J5suBF42pZ19a0XgZV/m2bRiBF42pZ19a0Xg2Ze5tQJvcWW17HfMxbLJemvIB59/30zgHXLSFbL5hqPlwN23kk/H/6Rk3n3y1pPXSX5eLgLPvmeMjkIkgMALETZThU4AgRc68tAmLImVKJUXk26RrqHNadNECDyb0qAXvwkg8Pwmyng2EUDg2ZQGvfhNAIHnN9HUx7NW4FVWVcv8klLvrwuveygu8BYsLJNt9z9DPnvlNsnLzfUI7Dn2Ajnj2P1knVEjEHipPxOM4DABBJ7D4dF6qwQyUuBFY9Lui7+kcMJMibUrkMUbDJHagV1aZcENmUUAgZdZebKa5gQQeDwRmUwAgZfJ6bI2BJ59z4C1Aq8R1TcTfm0m8L6ZMFkuvv4heeGBS+M0T73odhkzemXZe6dNEXj2PWN0FCIBBF6IsJkqdAKZKPCKfpgpxe9N+odlTkRK/rOuRIuLQufLhOkjgMBLH3tmDp4AAi94xsyQPgIIvPSxZ+bgCSDwgmec7AzOCbxPx/8oN9/7rDxx5wXxtZ571X2y4uB+cvBe20h1bUOrDLTkqKmLtnofN0DANQI56t1pubkRqavn+XYtO/ptnUB+XkQa1KMdVbvWMuXKe2GC5Pwyu9ly6ndZTaIr98qUJbKOBAjkqd+383JzpL4hpv7i9+8EkHGLQwRy1L+YyFV/8WcTh0Kj1YQJ5OflqD+bxDLqzyYJL54bM55AUUFuQn4l40FYtECdiZ9XJKYuPwdsuQPv2x8nywXXPtjsnXinXHibrL/WqrLnjptISXltq9N3bl8gi9R9vjba6qzcAIHgCeg/RBQpQV1eVR/8ZMwAgZAJtGuTJ/XqX77UZJCgLvhuhhS9MzFOMqbe5Vpx+LoSYwdeyE9XeqfTfxhrW7jkD8mVNa3/i8j0duvf7LNmz5JvJnwnA/uvIKuMWDnlgRfMKJNcxbJT93Ypj8UA/hEoUH82KVB/Nqngzyb+QWUkawjoHdS16s8mtRn0ZxNr4NJI2gl0Ud4kEb+S9kazqAGdiZ9X4AJvYWm5bLn3qfLxi7dKm6IlzW93wBly+dljZdSqwzhC62eajOUcAY7QOhcZDSdBIG1HaNX/7uTd+qhEvp8osRUHSv3xB4j06p5E58u5teU78NYfLLWDsvNDDv4AdXOUbDxC+80P38it99wsDQ1LhOU2m20r+++lfm0ZXFrsP3PlJzL5y5le9Vo7DJPtjh5tMBIlQRDgCG0QVBnTFgIcobUlCfoIggBHaIOgmtqYzh2h1cs9/JSrZe01RsjYA3aUce994R2pHffo1eroYA4CL7XngWrHCSDwHA+Q9pdLIF0CL/eS2yVXfQ298YoNXUHqbvofaUHANwLZKPCuuOEymTj5n92nEfUKiDuvv0eKCguT5vrN67/Lq7eNb1a3/8WbypBRPZMeiwL/CSDw/GfKiPYQQODZkwWd+E8Agec/01RHtFbgvf3R13LaxXeIqBO5dfUNkp+fJ4P695Ln779UZsyeL/+74h6Z9Ps06d+nh1x46qGyyvCBHouZC6paZdKrS5HMKanmCG2rpLjBNQIIPNcSo99kCKRL4OXv/V+JLG7yvy1KNNQ+c5Mo05BM+9wLgWUSyEaBd9HVF8off/0eZ6IF3l033CuFBckfDdHyTku8ptfmB68mG+yV+rFcHtvUCSDwUmfICPYSQODZmw2dpU4AgZc6Q79HsFbgmS4UgWdKjrpMIIDAy4QUWcOyCKRL4LEDj2dyaQQqFsfkm+9E2raJyRqr56iPUJhzykaB98NP38uNd94QP0K73Zbby76772cEceZvJfLA6e9I9O93UBW1K5Ajb9laOvIuPCOefhch8Pwmyng2EUDg2ZQGvfhNAIHnN9HUx0Pgpc6QESBgDQEEnjVR0EgABNIl8KTJO/BkpSFSd8y+/r0DLwBODBk8gfkLRK69KSrl5Us+hzVgQEROOSFH1GEBoysbBZ4GNXvObPnup+9khb4DZKXhqe2WmzZxvox/9TfJUx9LWH+PlaRr3/ZGWVDkPwEEnv9MGdEeAgg8e7KgE/8JIPD8Z5rqiFkp8HqrI7Sz1BFaLghkGoFUBd7curlSVl8qvQp6S3Fucabh+dd66stF5j8RkYofY1K0gkiPg0QKfPo2QcbDS8MC0ybw0rBWprSbwKuvR+W1N5p/y/7YI3NklZUiRo1nq8AzgkWRcwT0V5bbFOTIwoo653qnYQi0RgCB1xohfu4yAQSefekh8OzLhI4gYEwgFYH3zeLx8mf1H97cORG1g6F4Q+lZ0Mu4FxcKZ96h5N03//xDuJZ4A851ofPs7BGBl52527jqF16OylvvIvBszIae7COAwLMvEzryjwACzz+WjGQfAQSenZn42VUkpi4/B0x2rETegccOvGSpcr8rBEwFXnW0Sl5d+HKzZfbM7yUbdtjYlaUb9fn7qRFpKGvyW5baPDP0FiUw+TaBEc+gixB4QRNm/EQJlJbG5IrrYvEjtIMH5ciJx0Y4QpsoQO7LKgIIPPfjXjBvkUz+5S+prqqRvgN6ytARK4j+8AyXCAKPpyCTCSDw7EuXHXj2ZUJHEDAmgMBLDl3LHXiF/URWuCC5Mbg7PAIIvPBYM1PrBPiIReuMuAMCmgACz+3noLa2Tj56Z7z6SExDfCErrjJIVhjc1+2F+dQ9As8nkAxjJQEEnn2xIPDsy4SOIGBMwFTg6QlbHqHdoP3G0iO/h3EvLhTqd+DN/T+RyonqHzCW8w68mFSrf9NcKRLrrJbFv3FOV7YIvHSRZ96gCfAOvKAJM346CSDw0kk/9bnnz1ko3375U7OBunbvLKPXXSX1wTNgBAReBoTIEpZJAIFn38OBwLMvEzqCgDGBVASennRO3Rwpry/Lmo9YJAI6kvOTxHI/V9ouqgReV4nVb6vK2iZSyj0+E0Dg+QyU4awhgMCzJgoaCYAAAi8AqCEO6e3Ae/sriTaoPwf9fbED758AEHghPoxMFToBBF7oyFudEIHXKiJugIA7BFIVeO6sNKxOKyWS/5ia7J/35MWiK4s0bBBWA8zThAACj8chUwkg8DI1WdalCSDw3H8Omr4Dr3e/7rLiyoN4B97fsSLw3H++WcGyCSDw7Hs6EHj2ZUJHEDAmgMAzRrf0wsg0ieS93uxnsZg6Vly/i88TMVwiBJoKvLqGBvnq5wkyo2SeDOrZV0YPX0l9PZnjzYlw5B77CCDw7MuEjvwjkLkCr0Htzv9T/Ss+9QJdKfIPGCOlTKChqlJqyxZJUfdeEsnJSXm85Q2AwAsUL4OnmQACL80BLGV6BJ59mdARBIwJIPCM0S2jUL2wOe8F9W+ZS+I/j9VvqjbkDfN7IsZLgEBTgffch+/IpKl/xavWWXk12WL0OgmMwi0QsI8AAs++TOjIPwKZKPByZLLkRS6RHJmrXo3bTmqjZ6gXbYzxDxojGROY9/n7Mm3cM+q0RIO06dVPhhx4jBR06mI8XmuFCLzWCPFzlwkg8OxLD4FnXyZ0BAFjAgg8Y3TLKawSyZ2gJF6Zev/LEInEBgUxCWMmQKBR4JVV1sh1Tz4ssdg/R5vbtWkrJ+6xXwKjcItLBOobYjJhWq3MWNAg7YpyZNSgAunUNtjdFOngg8BLB3XmDItAJgq8gsipSt7982GHmHSVmtijYSFlnmUQqCsvlQlXn63+Res/fz7oNmZjGbBTcH8+QODxOGYyAQSefeki8OzLhI4gYEwAgWeMjkIHCDTdgXfLc09IReXieNd9e/SUg7fe0YFV0GIyBH6YUiu/za6Ll+jf47ZZvUjycjPruDQCL5mngntdI5CJAq8wsoc6PvvP/wbpTGpij6njtMHt9HIt93T0W/rrT/L7w7c2m7ptv0Ey4ugzAmsHgRcYWga2gAACz4IQWrSAwLMvEzqCgDEBBJ4xOgodINBU4P0xc7q88PF7UlNbK8Vq991em20lvbp0c2AV9rZYp1gumj1LOvXqLfkFBVY0+s6EKimt/OfLh7qpTVYukq7tc63oz68mEHh+kWQcGwlkosDLi9wvefJUHHdDbEOpk3NtxG/ck97lPnHyD1JaViIjho6UTp26Go8VVmEs2iC/3Ha5VM+ZGZ9yhT0Pla5rBHe8GYEXVrrMkw4CCLx0UF/+nAg8+zKhIwgYE0DgGaOj0AECLb9CW1tfLyXqJdXdOnZWO7IyS+iEHcfMXyfJG3fcIdUV5VJU3F62OeYY6bPi8LDb+Nd87MBLewQ0AIGUCWSiwBP1xrtceUV9POkbtetuuNRFd1Wv2miTMiubBnjy+Xvlh5+/8loqKCiUw/Y/Wfr3tf81InWLy2Xux29LTcl86bzaaOm86pqBYkXgBYqXwdNMAIGX5gCWMj0Cz75M6AgCxgQQeMboKHSAQEuB50DLzrT4xAXnycKZ/+xY6Nynj+x70SVp71+/A09LvJkLeQde2sOgAQgYEshMgWcIw5GyefNny413XdCs25Erry377HaEIysIr00EXnismSl8Agi88Jm3NiMCrzVC/BwCDhFA4DkUFq0mTQCBlzSyhAvuOvYoidbVx+/Pyc+To26/K+F6bkyNAEdoU+NHtd0EEHh257O07hB4iWeGwEucFXe6RwCBZ19mCDz7MqEjCBgTQOAZo6PQAQIIvOBCev+hB+WXjz+KT7DShhvJpoccGtyEjNyMQKPAm7e4VhbW1EtRLKLer8UFgcwggMBzM8cnX1BHaH9y7wht2LQReGETZ74wCSDwwqSd2FwIvMQ4cRcEnCCAwHMiJpo0JIDAMwSXQFmDep/gD++8LbN+myy9hw6TkVtsKbl5KKQE0PlyixZ4FTkic5W8a4jGvDE7RyNS5MvoDAKB9BJA4KWXv+nsLT9iUdW+k+SpD1t0538bmiFF4Jk+YdS5QACBZ19KCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAu2UwJvS0CBRJe8aBV6+8njd1E48Lgikm4B+Jj+dtkCmlFbKkM7Fsk7fzpKbk/izicBLd4KpzV+npN1VJaXyWXWNN9A27drIiZ06pDZoBlUj8DIoTJbyLwIIPPseCgSefZnQEQSMCSDwjNFR6AABBJ4DIdGiEYG2SuBNReAZsaMoeAIv/DJDxs9cFJ9oLSXwdh3RJ+GJEXgJo7LyxrcXV8kNi8qa9XZJt84yurDAyn7DbgqBFzZx5guTAAIvTNqJzYXAS4wTd0HACQIIPCdioklDAgg8Q3CUWU8gyCO0ddWLpbayQtp27iGRSOK7pqyHRoOhEIiq3VeXfDBR6hqi8fmKC/PlrA1XTHh+BF7CqKy88VYl78Ypidf0OqRDsezdvp2V/YbdFAIvbOLMFyYBBF6YtBObC4GXGKeMvmvOgp/kj2nvS0z9p3+vtdVfYzJ6vZm8OAReJqfL2hB4PAOZSiCoj1jM/PkL+fPL19TR3KgUd+0jq2x1oBS0bZ+pGJ1ZV1lNjSysqpYBHTs4IVVv+vw3mbd4yfFJfXVvVygnrTs0Yd4IvIRRWXnjVPWF8uPnLZCGJa/nlHaRHLmtR2fehfd3Wgg8Kx9bmvKJAALPJ5A+DoPA8xGmVUOp/5UtmFchEfWf2m5tJZan3o69lKt88SwZ/+P9zX6y2op7S7fOw6xaDs0kRgCBlxgn7nKTAALPzdzounUCjQKvoqpeyirrWi9I4I7aynL58slrRL+IvvHqvdI6MnS9nRKo5pagCDw7cZI8OuEn9a7DqHqfXGc5b6MNpHMbuz9X8sfCxfLEhOlSqUROe7X7bp9V+srAzonvvkLgBfU0hTfur7V18nJllfcRi52L28mgfD5y1EgfgRfec8hM4RNA4IXPvLUZEXitEXLw55H6Bunw7WzJ/fsfAhqK8qRsdG+J5ef+azVTZn7i7b5reg3ovZ4MGbC5gyunZQQez0AmE0DgZXK62b22IATe/Ck/yy/vPN4MbHH3fjJqp6MyFnZ0/pJjnjndlv4vLdO98HmLK2Xsq+OaSdUdhg3Wzoj/AAAgAElEQVSVI0evke7WWp2/Vh2hnV9ZIz3aFUleEh+w0AMj8FrFyw0OE0DgORwerbdKAIHXKqLQb0DghY48+AkLZ1dIu0nzm020eFhXqenz72MzZWoH3tfswAs+lJBmQOCFBJpp0kIAgZcW7EwaAoEgBF60oV6+fu4WqS4via9g+CZ7SY8hI0NYUchTKG/X8HCVRL+o9SaOrF0geYe2USYv5D5ame6jqdPk2s++aHbXsK5d5NotM/tfmiLw7HoO6cZfAgg8f3kyml0EEHh25aG7QeDZl0nKHRXMXSzFv8xrLvCGdJaafh2XOnb8HXixqAzova70U+/B43KTAALPzdzoOjECCLzEOHGXewSCEHiagv6AxdTvP5TaxaVK3K0hXVcY4R6cBDqOfl0nDfdWNrsz94i2krNmfgLV4d2iPwRx/OtvyOyKxfFJT1tvHdlowIDwmkjDTAi8NEBnytAIIPBCQ81EaSCAwEsD9FamRODZl0nqHdVHpdO3sySn6RHaNdUR2rx/H6FNfTJGsIkAAs+mNOjFbwIIPL+JMp4tBIISeLasL+g+Gp6vluib/3xkQc+Xs3Wh5O5m37vlSqqq5PmJv6rjqFWy6cABMqZvn6DxpH18BF7aI6CBAAkg8AKEy9BpJ4DAS3sE/2oAgWdfJv50pCRewfxK9QkLWe5HLPyZjFFsIYDAsyWJ5fcxZ85UGT/+bakoXyiDBq0io9fcQvLy7NopYiNJBJ6NqfjXU0OsSkrlSzVgRDrKOpIbsU+++Lfa5iMh8FIjG5sdlforK0Rq/v5gR4FI3tnFEunFv7hMjaw/1Qg8fzgyip0EEHh25kJX/hBA4PnD0c9REHh+0mQsCKSZAAIvzQEkMH1dbY0888xNov9v4zVy9Y1ljVGbJFCd3bcg8DI3/3pZLJOj5yv/MstbZFGkjwzNuVDypNjqRdeqI6ozJ34pObl50nelMZKbp8yRwYXAM4DWoiQ2rV6i79WpD0SI5G6eL5H+fCUzdar+jIDA84cjo9hJAIFnZy505Q8BBJ4/HP0cBYHnJ03GgkCaCSDw0hxAAtPPnTtdXn/tgWZ3du3eV3bY4bAEqpd+S/1fE6XmpQckVjpf8kZtLIXb7i+RDNzRh8AzfkSsL5wfe1umR+9v1me/nMOkW2RLa3uvrlgk7z98vlSpX3f66tRzkGx44HmSX5D8zkEEnrUx05gPBBB4PkBkCGsJIPCsjYbGfCCAwPMBos9DIPB8BspwEEgnAQReOuknNrfeeffsMzdLbW11vCCVHXixqgpZfNXxEqv+5wXuhVvtIwVb7JFYQw7dhcBzKKwkW3VR4P362Yvy0/tPNlvp2rscL/1WXj/J1Ysg8JJGRoFDBBB4DoVFq0kTQOAljYwChwgg8OwLC4FnXyZ0BAFjAgg8Y3ShFjZ9B97AgSvLWmtvJbnqCJ7JVT/pW6l64Ipmpbkrri5tDzvHZDiraxB4VseTUnP1UqGO0J6rjtDO9cYpjPSWYTkXqyO07VIaN8hiBF6QdBk7kwgg8DIpTdbSkgACj2cikwkg8OxLF4FnXyZ0BAFjAgg8Y3TOFi7ZgXeC2oG3OL4GduA5G2dWN+7aRyyq1Udo3n/kgvgR2o49B8rGB14geQWFSefIDrykkVHgEAEEnkNh0WrSBBB4SSOjwCECCDz7wkLg2ZcJHUHAmAACzxid04UNf/4i1S8/yDvwnE6R5l0kwEcsXEyNnsMmgMALmzjzhUkAgRcmbeYKmwACL2zirc+HwGudEXdAwBkCCDxnoqJRAwIcoTWARokTBNiB50RMNGlIAIFnCI4yJwgg8JyIiSYNCSDwDMEFWIbACxAuQ0MgbAIIvLCJM1+YBBB4YdJmrjAJIPDCpM1cYRNA4IVNnPnCJIDAC5M2c4VNAIEXNvHW50Pgtc6IOyDgDIEgBF5l5ULJyyuUgoK2znCg0cwkgMDLzFxZFV+h5RnIbAIIvMzON9tXh8DL9icgs9ePwLMvXwSefZnQEQSMCfgp8Boa6uTnCS9JWelMr5++/deUgYPXN+6NQgikSgCBlypB6m0lwA48W5OhLz8IIPD8oMgYthJA4NmaDH35QQCB5wdFf8dA4PnLk9EgkFYCfgq86VO/kil/ft5sPSNH7SXtO/RK6xqZPHsJIPCyN/tMXzkCL9MTzu71IfCyO/9MXz0CL9MTzu71IfDsyx+BZ18mdAQBYwJ+CryfJrwoi0qmNutl8LDNpHefVY37oxACqRBA4KVCj1qbCSDwbE6H3lIlgMBLlSD1NhNA4NmcDr2lSgCBlypB/+sReP4zZUQIpI2AnwKvdOF0+fGH5+NrKShsJ2usuZ/k57dJ2/qYOLsJ/D975wEgV1W2/2fu9Nnekmw2vXeSEEhCSQi9S1WUrqB8ooIiqPiJKGBBROFTBAVFQP8oijTpLQTSIBDSe91sdjfby8zs1P+dCdlkUnZm7txy7p3nYlTYc973Pb/3bJj89t5zKfDyu/9WXj0FnpW7y7VR4HEPWJkABZ6Vu8u1UeCJtwco8MTrCSsiAcUE1BR4iSLaWneioX6NLO08GFgzDR5vseLaOJEEciVAgZcrQc4XlQAFnqidYV1qEKDAU4MiY4hKgAJP1M6wLjUIUOCpQVHdGBR46vJkNBIwlIDaAs/QxTA5CRxEgAKPW8KqBCjwrNpZritBgAKP+8DKBI4k8NpeXYnWV1bAZrOh/ILpKD5pnJUxcG0WJUCBJ15jKfDE6wkrIgHFBCjwFKPjRBMQoMDTrknS2jCwM4L4FBfiA+zaJWLkwxIwQuCFwh2wQZLvsC5kV0hAUwIUeJriZXCDCRxO4HUv34G6X72SUtnguy6CZ0SVwdUyPQlkR4ACLzteeoymwNODMnOQgE4EKPB0As00hhCgwNMGu+Pv3bC/EdwbXHZ34RuLEJvm0iYZoxou8GLxKHY1voOu7p3JWspKxmNAxSx2hgQ0I0CBpxlaBhaAwOEEXtP/W4LWl5anVFf5hWNRdv40ASpmCSSQOQEKvMxZ6TWSAk8v0sxDAjoQoMDTATJTGEaAAk8D9KE43De0APH9sWMjHAj/qERRstZoFK93dKA+EsYIlxunFBXBJ0mKYuXTJD3vwGvtWI/6poUpeAdXn4FC70ChkHeHA9jjb8GQ4mpINu4hoZqTZTEUeFkC43BTETicwPOv2oVdP38pZR1D7r4I7uG8A89UzWWxoMATbxNQ4InXE1ZEAooJUOApRseJJiBAgadBk3pkgfc/6gm8x5tb0CjLu33XGI8HF5Qok4EarFbYkHoKvN2yvGuTJd6BV7/yo1FROkUYPm/tWIRHVz6DaCyKwcUDcdsx16G/r0KY+lhIdgQo8LLjxdHmItD3GXgrYZPkM/A+N41n4Jmrraz2MwIUeOJtBQo88XrCikhAMQE1Bd7K7i6sCwThsgFTCwsx1O1RXBcnkoAaBCjw1KB4aAy1HqHtlmXL7/c0pSQosNtxY2WlNoVbKKqeAi/Q04Rtdf+V77qMJQnaJReG13xOmLPwApEefPm1HyTl3b7r+JqjcdP0qyzU8fxaCgVefvU731bLt9DmW8fza70UeOL1mwJPvJ6wIhJQTEAtgbetJ4gF7e0pdZxfXoESh0NxbWacGNrTgcQv+QVicPUvhbOch70fro91rbvw53ceweaGjZg2bAauOek6FHvVv+uKAk+77yK1XmLBO/CU9SidwNvV0oVNDW0YWlmEYVW5f2/5g41o7ViXfDS1onQyXM7cYypb+aGzVjdtxE8W/S7lCzWF/fGbeberlYJxdCZAgaczcKbTlQAFnq64mUxnAhR4OgPPIB0FXgaQOIQEzEJALYG3uLMDGwOBlGUfW1SMsV6vWVDkXGekI4DA1oaUOL7R1bD73DnHtlqA2566GdubtvYua+bo4/Cdc76n+jIp8FRHqnpAnoGnDGlfAm/ZlgY8vXAd4p+dU3j2tOE4edIQZYlMMCsm3xn4/QW/xrb22t5qvzL5Upwx7AQTVM8SD0eAAo/7wsoEKPCs3F2ujQJPvD1AgSdeT1gRCSgmoJbAO/gOvMTx4efm2R14wbpWhPek3oXori6Fq1+p4v5YcWJbdwu+9qdrU5ZWUlCGP17/uOrLpcBTHSkDCkKgL4F37wsforHd31up0yHhnstOkO+ek28NtuiVeIHFfza+gd3de3BCzXTMHsg3N5q51RR4Zu4ea09HgAIvHSF+3cwEKPDE6x4Fnng9YUUkoJiAWgIvUUDqGXhF8hl4+XXnWaQriMDm+pRe+MbId+B584tDJpuRd+BlQoljSODIBLITeHZZ4B1vaYHHvWItAhR41uonV5NKgAKPO8LKBCjwxOsuBZ54PWFFJKCYgJoCT3ERFpqYPAOvqVNekfymTp6Bd8TO8gw8C216LsUQAnyE1hDsTKoTAQo8nUAzjSEEKPAMwc6kOhGgwNMJdBZpTCnwLvufn2Ldxu1IniwvX8WFPrz3nweT/7+uOfXcrsOxqC73YHdLMAtMHEoCBhOQDz8Kdu6Aw1MGh6v4iMVQ4BncJ6bXlAAfodUUL4MbSEDvl1gYuFSmzkMCFHh52PQ8WjIFXh41Ow+XSoEnXtNNKfDOufL7eOCn38So4TWHEKXAE2+TsaLcCERC7di26A4E2jfBJtnRb8zl8q8vHDYoBV5urDlbbAIUeGL3h9UpJ5BO4CmPzJkkYDwBCjzje8AKtCNAgacdW0Y2ngAFnvE9OLgCUwq8uRfdhH888mMMqCqnwBNvT7EilQnsXv0YmjY/mxJ13Gl/gdPb75BMFHgqw2c4oQhQ4AnVDhajIgEKPBVhMpRwBCjwhGsJC1KRAAWeijAZSjgCFHjCtQSmFHjTTr8ec2ZOwSerNqKyvAQ3X38J5sw6KkmXd+CJt8lYUW4ENi34DgKt61OCDD76NpTWzKXAyw0tZ5uMAAWeyRrGcjMmQIGXMSoOVIFAfOsKRFctgK2gFPZjzwYKy1SIeuQQFHia4mVwgwlQ4BncAKbXlAAFnqZ4FQU3ncCLxeL40b2P4cx5MzF7xgTMX/gpvv+zR/DiEz9P3pHX4Q+nBVHsc2Y0Lm0gDiABHQi01H6AjYvu6s3kLhiAKWf+CZLkPCS7w26Dy2GHvyeiQ2VMQQL6EvC67IhE4whHY/omZjYS0JhA4u5pt9OOnnBM/hXVOBvD5zOB8IaP4f/Xb3oR2Mv6o+DLd8Hm9mqGxWGX4JQ/nwRC3NuaQWZgwwh4XQ75c0k0+fmEFwlYjQC9iXgdTfREzcsWly81A2YS69pv/wIXnzMX5546G12B9OKiwGNHd5AfIjJhyzFiEGjdtQh7drwJl/zYbM3YS+THZysOW5hdssHpsCEYouAQo3OsQk0CbpeEqPwB+cAPyXX1XUj84bBflU/NVIxFAroScDkkuGSJF4rEEJIlHi8S0IpA939+h/CaxSnhfZfdCtfIvU+yaHElfriY+MXPJlrQZUyjCXjkzyaJzyUUeEZ3gvm1IJB4QiATv6JFbsY8PIFET9S8NBd4/kAPNm6txVETRvbWfcU37sGVl5yOM046ho/QqtlNxjIdAZ6BZ7qWpRQcbZV/sGCX/1Ms/xevQwgc+AhtWJYc9z24FEs+rEuOO+3kYbjxq9NJjQRMSYCP0JqybaYsOvb+vxFd8mJK7Y5rfwFb+QDN1sNHaDVDy8ACEOAjtAI0gSVoRoCP0GqGVnFg0z1C29behdMu+y4euOsbOG7GJCxYsgK33vUw/vvkL1BRVkyBp3grcKIVCFDgmbOL8UgcHS92oWdjKLkA73QPik4tMOdiNKz6QIH39rvb8eDDy1Ky/fj2EzBtyqEvd9GwJIYmAVUIUOCpgpFBMiAQD3Yh8qz8CO3uzck329uOuwD2medlMFP5EAo85ew4U3wCFHji94gVKidAgaecnVYzTSfwEiDmL/oU9/3haTQ2t6FmQCVuu/GLmDV9QpIRX2Kh1VZhXDMQoMAzQ5cOrTHwaRCdr3WnfKH088VwDVP3jANz0tlf9YEC7w+PfoLX3tyasqQrL5uIiy8Ya/Zlsv48JECBl4dNN3LJ8mk3seY62HxF8q9izSuhwNMcMRMYSIACz0D4TK05AQo8zRFnncCUAq+vVVLgZb0HOMFCBCjwzNnMzte7EVgeTCm+cI4PvlnaHSpuKKl4DL7QNnjCTYhIHnS7hiLqKElb0oECb2dtB27+/tuIymeGJa4C+UDX3957CqoqeRZeWpAcIBwBCjzhWsKCVCRAgaciTIYSjgAFnnAtYUEqEqDAUxGmSqEo8FQCyTAkIAIBCjwRupB9DeH6CFr/1g589m4dm9uG8mtLLHsWnrdnhyzwdvaCitscaPXNQFx+nKuv60CBlxi3cXML/vvaVsjn/+Pcs0Zh2ND0EjD77nAGCWhPgAJPe8bMYBwBCjzj2DOz9gQo8LRnzAzGEaDAM479kTJT4InXE1ZEAooJUOApRmf4xNCuMIKfymfg2eLwzfTCUW7dF1kU+1fAGe1MYd7mm4yove9HuQ4WeIY3jQWQgEoElAi8+pZdeGXps/D3dOO06edizOBJKlXDMCSgLgEKPHV5MppYBCjwxOoHq1GXAAWeujzViEaBpwZFxiABQQhQ4AnSCJbRJ4FD78Czy3fgHZP1HXjETAJWIZCtwOvwt+O2R66X5V1XEoHNZsOdV/8WwweMtgoSrsNCBCjwLNRMLuUQAhR43BRWJkCBJ153KfDE6wkrIgHFBCjwFKPjRD0JxKPyI7TbczoDT89ymYsEtCaQrcBbvGY+HnrhlyllnTPzUnxh3rVal8r4JJA1AQq8rJFxgokIUOCZqFksNWsCFHhZI9N8AgWe5oiZgAT0I0CBpx9rZtKfAB+h1Z85M+pDIFuBt752Ne556taU4q489QacNuN8fQpmFhLIggAFXhawONR0BCjwTNcyFpwFAQq8LGDpNJQCTyfQTEMCehCgwNODMnMYRYACzyjyzKs1gWwFXqKeJ998GG989EKytInDpuKmi+6Ax+XRulTGJ4GsCVDgZY2ME0xEgALPRM1iqVkToMDLGpnmEyjwNEfMBCSgHwEKPPlFrvEIYvIjmk7JrR94ZtKFAAWeLpiZxAACSgReosz2rhb5HDw/qisGGVA1U5JAZgQo8DLjxFHmJECBZ86+serMCFDgZcZJz1EUeHrSZi4S0JhAvgu8rmgb2qKNiMfjcNk8qHLWQLI5NKbO8HoRoMDTizTz6E1AqcDTu07mIwElBCjwlFDjHLMQoMAzS6dYpxICFHhKqGk7hwJPW76MTgK6EshngReNh1EX2pLCu9BeijJHf117wGTaEaDA044tIxtLwMoCLxaJYtML76N+6Rp4yoow+pJ5KBtZYyxwZteVAAWerriZTGcCFHg6A2c6XQlQ4OmKO6NkFHgZYeIgEjAHgXwWeP5YJ5rDdSmNctq8GOAaYo7mscq0BCjw0iLiAJMSsLLA2/rqYmz578Lezji8bhx351fg9PG8PpNu16zLtorAC328GZHVtZAqi+A+aRJsXlfWLDjBegQo8KzXU65oPwEKPPF2AwWeeD1hRSSgmEA+C7zEY7P14W2IxEO9/Coc1fDZixXz5ESxCFDgidUPVqMeASsLvI8f+CdaN9WmwJp6w4WomDhcPYCMJDQBKwi80MJ1CL74YS9n++Aq+G44AzbJpir7eFcbQktfQ7y7A85Jx8E+fKKq8RlMfQIUeOozZURxCFDgidOLfZVQ4InXE1bUB4GALGda490YIJVCgrofmqwAPp8FXqJ/iRdYdERaEEMEXqlYlneFVmgr1/AZAQo8bgWrErCywOMdeFbdtZmvywoCr+uhVxDb2ZSy6MJvnw+pX0nmINKMjPcE4X/ibsQ7mntHej53AxyjjlItBwOpT4ACT32mjCgOAQo8cXpBgdcSFK8brKhPAvPD6/Bs6ENEZT1TI5Xja+55KJcoaA6Elu8Cj99C1iZAgadvf+ORCALLPkRol/zIWFERCmYcC3tZub5F5Ek2Kwu85Bl4z72H+o/W8Qy8PNnPBy/TCgLP/4/3EVm+df/S7BIKf/R5SG6nal2NbF2N4LO/S4nnGDsDnnO/oloOBlKfAAWe+kwZURwCFHji9IICjwJPvN3YR0UdMT9+GPgX4vJf+64TnWPxBdcsU61D62Ip8LQmzPhGEqDA05e+/5Nl6Fm/tjep5PGi5PyLIL/aWd9C8iCblQVeHrSPS0xDwAoCL9rShcCf30SsuROQ5Z33wllwHj1S1d7HWhrg/8udKTFdM8+C64TzVc3DYOoSoMBTlyejiUWAAk+sfiSq4SO04vWEFR2GwOrILvyh582UrwyVqnCr92zyOoAABZ422yHeFUH003bYCh2wT5LP1LNTYGhDuu+o+SrwIp1tSTCOolJdsXe8+l9E21pTchaffR7sxeo9MqbrggRORoEncHNYWs4ErCDwEhDi0RhiDe2Qygo0e4FFaMkrCL3/QpK5VD0c3ou+AZvHl3MPGEA7AhR42rFlZOMJUOAZ34ODK6DAE68nrOgwBBKPzf4i8BJ2x/b/YfIq1wk41qnuTz/NDp8CT/0OxppD6LlvE+CP7P1APbIQrq8Ph81Biac+7b4j5pvAi8dj6Jz/Inq2rEmCcY+ciKI558Jmk3RBzzvwdMGcTEKBpx9rZtKfgFUEnl7kYv4OxP1dsFdUQ/4NX6+0zKOQAAWeQnCcZgoCFHjitYkCT7yesKIjEOiMB/FWeDWa4p2Ybh+G6Y5hZHUQAQo89bdE+IXdiLy1JyWw62vDYZ9QpH4yRuyTQL4JvJ6ta9HxznMpTIrnXQD38PG67BSegacLZgo8/TAzk0EEKPAMAs+0uhCgwNMFM5MYRIACzyDwfaSlwBOvJ6zIJASC0SB2B2sxxCfLHJtdiKop8NRvg5UF3vtbt+CfK5ajOxTGWWPH4ZIpYr/pLt8EXtfiNxFY82HKpvZOnoXCY+apv9EZ0VACvAPPUPxMrjEBCjyNATO8oQQo8AzFz+QaE6DA0xiwgvAUeAqgcQoJLG5ZgEe23i8/VdmNCnc/fG/MTzHUN8JwMBR46rfgkEdoRxTAdeMI0z9Cu6u9Hd96/tkUYN+dOw+zhw5TH6JKEfNN4EXbm9H6wl8QD4f3EnQ4UXb+tXCUVqhElGFEIUCBJ0onLFpHLAb7uo9g27EOqBqM8FEnyP8OU+/tqemoUeClI8Svm5kABZ6Zu8fa0xGgwEtHSP+vU+Dpz5wZTU4gJp9Lde2yCxGMBnpXMrX0GPxg7D2Gr4wCT5sWWPElFq+tX48/LlmYAuy0MWNxw6zjtIGoQtR8E3gJZJHmevjXfJSk55swA46KASqQZAjRCFDgidYRa9VjX/gyHJ8u6F1UdMQkRM64XLdFUuDphpqJDCBAgWcAdKbUjQAFnm6oM05EgZcxKg4kgb0EdgV24DsrrkvBUeIsxx+nP204Igo8w1tgmgJ4B55pWsVC84AABZ71mrxhzTrMf+Nt+Lu7Me3YGZhzqnGPvrv++jPY/J37IcsvRuj5yp2A06ULeAo8XTAziUEEKPAMAs+0uhCgwNMFc1ZJKPCywsXBJLCXwD3rvo8V7R/34vjCoGtwUc2XDMdDgWd4C0xVwL4z8PzyGXhn6nQGXiwex7s76rC8sQkjS4tx+vAh8DoyO0NSlDvwYu0xxNpikMokSMX6vBHWVBuLxWZNgAIva2QZT4gEY+iqDaJ4uBeSXZ83era2tOKRXz+ImPzo6r7r3EsuwJSjp2Vct5oDnf95GFL99t6Q8YJihK76gZop+oxFgacbaiYygAAFngHQmVI3AhR4uqHOOBEFXsaoOJAE9hMIyI/PvrT7GWz1b8b00pk4uepMSDbj/yBPgcddKjqBf63fiuc2bOktc1JVOb4/K7M/1Iog8KJbI4hsjOytX3YBjglO2GsyE5Ci94b1GUeAAk8b9g0fduLDe3Yg3B2Ft58Ls+4citLRXm2SHRB1zacr8dzT/0rJM2HKJFzwxUs1z324BLY9u+B65QmguwNwexA+9YuIDRmjWy0UeLqhZiIDCFDgGQCdKXUjQIGnG+qME1HgZYyKA0lAfAIepwSvx4HWzpD4xbLCvCRw2zuLUdfV3bt2+Uku/OnMk+DJ4C68skInAqEYgqGoMeziQM/8HiAk/5/PLqlAgvN4fR5DM2bRzKoHAQo8bSi/ctlaBJs/ewmMnKJ8YgHm/nakNskOiCraHXjJ0uS7AW0tuxEvqdLt0dl9SCjwNN9yTGAggbIiFwLBCILh/XfcGlgOU5OAqgQo8FTFqUowCjxVMDIICYhBgAJPjD4cXMWHUgc+lDpRFXPizFg5iuAQs1AdqvrVkuX4tLG5N1NC3P3xzLnyHazpH20TQuC9HQQO8IcUeDpsmjxIQYGnfpO760N4/Ur5rasHXM4CO859bqL6yQ4TMXEG3ruvv4WA34+pxxyNuaedrEteEZNQ4InYFdakFgEKPLVIMo6IBCjwxOsKBZ54PWFFJKCYAAWeYnSaTXxXasOT9t298YfEPfhRZBikxPOXeXhtb+/Erz9cgZZAEF6nA9cfNR7HVvfLiIThAk+uMrpdfoR2/QGP0E6UH6EdyEdoM2ogBx2RAAWeNptjyU+3o25Be2/w0V/oh0nX8U3O2tA+clQKPL2JM5+eBCjw9KTNXHoToMDTm3j6fBR46RlxBAmYhgAFnnitut+xA6tt+x8ZTVR4d2QEquNu8YrVqaKo/CKLnR2dGFBQkNGjs/vKEkHgJWrhSyx02ih5lIYCT5tmR+VH7jc904SWDX4MOLYYw84qg03Kzx+eaEM4s6gUeJlx4ihzEqDAM2ffWHVmBCjwMuOk5ygKPD1pMxcJaEyAAk9jwArCPy7ffbdAvgtv32WX77y7PzwKhXn8GK0CjMkpogg8pfVzHgkciQAFHveGlQlQ4Fm5u1wbBR73gJUJUOCJ110KPPF6wopIQDEBCjzF6DSbuMcWxv85arELQSTk3Rej/TEvVjkt0nwAACAASURBVKZZPisHpsCzcnfze20UePndf6uvngLP6h3O7/VR4OV3/62+ego88TpMgSdeT1gRCSgmQIGnGJ2mE2OIoxY9qIQTPlnj8VJGgAJPGTfOEp8ABZ74PWKFyglQ4Clnx5niE6DAE79HrFA5AQo85ey0mkmBpxVZxiUBAwhQ4BkAnSl1I0CBpxtqJtKZAAWezsCZTlcCFHi64mYynQmIJvACjduw58OXEI/FUHn0WSgYOFpnIkxnJQIUeOJ1kwJPvJ6wIhJQTIACTzE6TjQBAQo8EzSJJSoiQIGnCBsnmYQABZ5JGsUyFREQSeCF2vdg7R+/iXi4Z+9a7A6Mv+63cFfUKFobJ5EABZ54e4ACT7yesCISUEyAAk8xOk40AQEKPBM0iSUqIkCBpwgbJ5mEAAWeSRrFMhUREEngNX38GmpffThlHdUnXYH+x12saG2cRAIUeOLtgbwUeOVFLrR0hsTrBisigRwJuBwSEh+UO/zhHCNxOgmIR6DI50AoHEOP/IsXCViJAAWelbrJtRxMwO2U4JJ/dfojhEMCliNQ7HOiJxRFT8T4zyYdGz/ClmfuSWE85LybUD75JMtx54L0IVBR7EJzB72JPrQzy5KXAq+63IPdLcHMCHEUCZiIgF534LV378aKbS8hHA1i4pAz0L90jIkoHaHUeBzR9mbEutphkxyQyqogeX3mX5cBKwh3RdC2oRuuIgdKRhVAfvmuKhfvwFMFI4MISIACT8CmsCTVCPAOPNVQMpCABES6Ay+BZ/uLD6J15TtJUiVjZmHAqV9Dd3sHygcPgiRJAhJkSSIT4B144nWHAk+8nrAiElBMQA+B1x1sxjPvfxehSHeyTpvNhgtm/QxVJSMV1y3CxGhHK2Kte/aXIq/LXjMcknx+CK/MCXTXBeSf/tYjGowmJxXLAm/ExdWZB+hjJAWeKhgZREACFHgCNoUlqUaAAk81lAwkIAHRBF4CUeIsvFgsinULl2P+408gFomgatgwnH/7bSiuqhSQIksSlQAFnnidocATryesiAQUE9BD4K3f9S7mr3wopcajhn8OM8derrhuESZGGmsRD/hTSrH3q5HvwpPvIOOVMYHtLzfKP/ntSBk/7roh8FS4Mo5xpIEUeDkjZIAjEOhAHT62/Q1t2IkaTMfU2OfhtOl3By4FHremlQlQ4Fm5u1ybiAIv0ZXu1lb86Ss3IC4/YbLvOuqs03HyV69j00ggYwIUeBmj0m0gBZ5uqJmIBLQnoIfAq21agZc/ujtlMSdO/CrGDz5V+wVqmOGQO/DkxwzsA4fxDrwsme98bQ+al7enzBpzzSD4+nuyjHTocAq8nBEywGEIxBHDK7YfohtNvV8dHj8eM3CNbrwo8HRDzUQGEKDAMwA6U+pGQFSBt3XZJ3ju7p+ncOg/ejS+dG/qGXm6gWIiUxKgwBOvbRR44vWEFZGAYgJ6CLxEcR+sfQyrt7+WrHNw1TScPu27sEtOxXULMZFn4KnShkBDDzb+fRdiob2HOReN8GHkpQNViU2BpwpGBjmIQAd24zXbHSn/tNg2AGfE7tKNFQWebqiZyAACFHgGQGdK3QiIKvBi0SieuuV7aN6+o5fFmTd/A+PnztGNDROZnwAFnng9pMATryesiAQUE9BL4CUK9Pe0IhwJoqRAnfPNFC+aE4UjEOoIo2OjH85ivsRCuOawoEMI8A48bgoS0JYABZ62fBndWAKiCrzkZ/WODix77kW01zdgzPGzk794kUA2BCjwsqGlz1gKPH04MwsJ6EJAT4Gny4KYhAQOIMA78LgdtCLAM/C0Isu4JABQ4HEXWJmAyALPyty5Nn0IUODpwzmbLBR42dDiWBIQnAAFnuANYnk5ERBV4HVH41jTHYXPbsM4nx3y//AigawI8BHarHBxsMkIUOCZrGEsNysCFHhZ4eJgkxGgwBOvYRR44vWEFZGAYgIUeIrRcaIJCIgo8FrCMTxa14Pu6N4z/4Z67bhqgAcOSjwT7ChxSqTAE6cXrER9AhR46jNlRHEIUOCJ0wtWoj4BCjz1meYakQIvV4KcTwICEaDAE6gZLEV1AiIKvNebQ/igPZyy1itkgTdavhOPFwlkSoACL1NSHGdGAhR4Zuwaa86UAAVepqQ4zowEKPDE6xoFnng9YUUkoJgABZ5idJxoAgIUeCZoEktURIACTxE2TjIJAQo8kzSKZSoiQIGnCBsnaUGg24/YniZIQwYBkqRKBgo8VTCqGoQCT1WcDEYCxhKgwDOWP7NrS0BEgdcSiePRXcHeR2iHeOy4upqP0Gq7E6wXnQLPej3livYToMDjbrAyAQo8K3fXPGuLvvkuwo8+AVskAtvQQXB879uQ+lXlvAAKvJwRqh6AAk91pAxIAsYRoMAzjr2SzE3+Lsj+BwMKCpVMz7s5Igq8RBP4Eou824qqL5gCT3WkDCgQAQo8gZrBUlQnQIGnOlIGzJJAPBBEz7VfT8q7fZd04mw4b/qfLCMdOpwCL2eEqgegwFMdKQOSgHEEKPCMY59t5j98vBBvb9+UnHZs9RDcfMyJcNp5blpfHEUVeNn2nuNJ4GACFHjcE1YmQIFn5e5ybRR43ANGE4itWoPwnb9IKUOqqYbzgV/mXBoFXs4IVQ9Agac6UgYkAeMIUOAZxz6bzJ827sbdH7yRMuWr02bjtGGjswmTd2Mp8PKu5RkvuLWpBwvfaEJ3ZxTHzCnH8HHmuquVAi/jVnOgAAQ+WdKAN57fhp5gDCecVoN5Zw3psyoKPAGaxhI0I0CBpxlaBs6UQCyG8G0/Rmzb9t4Z9uuvhuOMUzKNcMRxFHg5I1Q9AAWe6kgZkASMI0CBZxz7bDL/c+1yPLNuRcqU04aPwVenzsomTN6NpcDLu5ZntOCAP4Lf3bEBnQe8DfjaW0aaSuJR4GXUag4SgEDttk784tbFCPfE4HRL8BQ6cPU3J2Hy9COftUSBJ0DjWIJmBCjwNEPLwNkQkF9gEfnPi4jX1cN+wixIx83MZjYFniq09AlCgacPZ2YhAV0IUODpgjnnJE3+btz81vPo+eysCrv8pqifzz0Lw0srco5t5QAUeFburvK1rfmkHU8/tC0lwIy5FTj/CvktbCa5KPBM0iiWiSd+vhLvvr2zl4S7wIFTLhyKS64ee0Q6FHjcOGoTkKQ44vIZwvG4Te3QWcejwMsaGSeYiADvwBOvWRR44vWEFZGAYgIUeIrR6T6xrqsDz29YJb/EIo6zRozFqLJK3WswW0IKPLN1TJ96d23z45F7NqYkO+WCAZh7Tn99ClAhCwWeChAZQhcCf/3ep5j/8a7eXDbJhpvvORqTZ/SjwNOlA/mdxGaLo6gAcEixJIhAjyT/MlbiUeDl9560+uop8MTrMAWeeD1hRSSgmAAFnmJ0nGgCAgcKvM7uduzeUwtJrru631AU+Mx15pkJcJuqxDee3Y0FrzQmax46uhBX3jQcLvnxPrNcFHhm6RTrfPnXG7F+XQu21LcjEgVGDy/Bd37f96NavAOP+0YtAh5XDD6PfOvdAVdbp4SYgXfiUeCp1V3G6YtAJBLDB0vq0CUfG3LirGoUF7l1AUaBpwvmrJJQ4GWFi4NJQGwCFHhi94fV5UZgn8BramvDinVL5Udn9v4EXpLf3jt9wnFwuzy5JeBsUxPo6ggj0B1DVbU+H2rVhEWBlzvNdatWY+mixSguKcHJp5+K4tLS3IMywiEE9mzpwsK/1yIgf7+55Mdnj798MPqP6vsHKBR43EhqESjyyWcvOlIFXndQQk/IuLvwKPDU6i7jHIlAOBzDD+5ehDXrmpNDSkrcuP+uE1Hd36c5NAo8zRFnncCUAm9nXSN+dO+fsX7TDgwcUIkf3nQlpk/e+/bGuuZAWgjV5R7sbgmmHccBJGA2AhR4ZusY682GwD6Bt2nHJmzftSll6ogh41FdZZ4zz7JZN8danwAFXm49XvXpCvzl4Ud6g1RUVuKW238At5dSPzeyh58djcTR0RBEUZUbDlf6O10p8LToQn7GdMjyrliWePuuxBl4bZ02xJG/Ai8ajKDpk3oE9nShZFQ5ysbLj7MbhyM/N6bGq/7okwbc8YslKVkuOX8Uvnz5BI0zAxR4miPOOoEpBd7VN/0cJ58wHVdcdBoWfrRalnmP4Y1//Fr+iYydAi/rLcAJViJAgWelbnItBxPYJ/B27K7Fxm2rUr48fuQ0lJfyHEHuGnMSoMDLrW9PPvZnLP9oWUqQ6278OsZPmphbYM5WhQAFnioYGeQzAk4H4HbGktIuIN+PYeTjs4mSjL4Db8sza9Bd2967P6rnDkPl9GruFwsRoMCzUDNVWIrpBF5zawfO/NJtWPTS7+GQH5tKXJdc/2Pc9vUv4thp4yjwVNgUDGFeAhR45u0dK09PYJ/A8wfDWL91JVra9p55VlUxEGOG8Q/q6QlyhKgEKPBy68zLz7+It159NSXI9+/8Mar6H/nFCrll5OxsCFDgZUOLY81GwEiBF+kOYe0fU3944R1QiFFfnGw2jKy3DwKJR2hvv2cRVq/d/wjtb+4+EQP68RHafNw4phN4H6/ciJ/e/1c895e7e/t1y08ewszpE/D5805Cj7zB011up3xWQgbj0sXh10lANALyy+Bgl/8rHE09H0S0OlkPCSgh4LTbEJXf2hv77Lf57oAfks0Gr8erJBznkIAwBBK/bzsS+zsWl18MwN+/s21MV1c3HrjvAWzdsg12+Ye7n7voPJx93tnZhuF4jQhI8lO2dvn3an420QgwwxpKIPnZRP69W/6P7ldU/vPs0t8sQjQkv1Hms6tiXCXGX6L9o5W6LzbPEyZeYvHuB7Xo7A7LTyIOQkmxPuf90puIt/ESPVHzssXlS82AB8da+NEqPPjov/H0wz/u/dL//vIxjBkxCFddeoaWqRmbBEiABEiABEiABEhAQAKJj591u3ajqLgIxfIvXiRAAiSQDwTqVzRg3YvrEZNlnrfciylfnISCqoJ8WDrXSAIkoAIBzQXeJ6s24sf3PY4XHr+nt9zv3Pl7HDdjEi45dy6aO0Jpl1FR7MpoXNpAHEACghFwOWzymzjt6JRfMc6LBKxGoMhrR498eHqId1BbrbV5vx6v2w6f/Csg30XhD+6/k+JAMD3ynXmOz+5kymdgsVgEwc4GuAv7yXfbOfMZhWnW7pLvFnDLn086A4ff26ZZCAslgcMQKPI55LfgRhGSP58YdUV7ouhpC8Ariztb4nEcXiSgEgF6E5VAqhgm0RM1L80FXmt7J079/C14//nfyY9N7S3+rMtvw89+cD2mTRrNM/DU7CZjmY4Az8AzXctYcBYE9p2BFzzgUZEspnMoCQhLoK8z8MLyc1ktoRj2/dmw3C3Bm6d/QOtu3YKdK/+GcE8H7K4CDJl8BQrLRwnbVxa2lwDPwONOsDIBI8/AszJXrk0MAnwLrRh9OLAK052Blyj+K9+5F8dMHYfrLz8Xr7yzJPlI7St/u1f+SaxEgSfeHmNFOhKgwNMRNlPpToACT3fkTKgTgb4EXlPizooDjveVjxJDtccuv38x/64Ni+5DT1dD78Ldhf0xZvZ38w+EyVZMgWeyhrHcrAhQ4GWFi4NNRoACT7yGmVLg7apvwu0//xPWb96JwQP74c5brsHEscOSdOuaA2kpV5d7sLtFfu84LxKwGAEKPIs1lMtJIUCBxw1hVQJ9CbzdwZh8OHrqo1n95MdtVT7D2BRoV731A8TlR2h7L9lmTph3t/wDXHUfJzEFDBMVSYFnomax1KwJUOBljYwTTESAAk+8ZplS4PWFkQJPvE3GivQjQIGnH2tm0p8ABZ7+zI3IuHndMnzw7jPwd7ZjyoyTMWvuxZASr7G08NWXwOuU3zzXEd4v8NzyGw8rXdbmcaRW71r7L7TULun9cnnNsaiZcKmFd4Y1lkaBZ40+chWHJ0CBx51hZQIUeOJ1lwJPvJ6wIhJQTIACTzE6TjQBAQo8EzQpxxK7Olrw2AM3Ixbbf9j9qeddh8nT5+UYWezpfQm8ROXd8gssAvIvl3z2XaEs8PL0CDz57ruoLPAWobNlEwpKh6Fi8PGQ+CILsTe3XB0FnvAtYoE5EKDAywEepwpPgAJPvBZR4InXE1ZEAooJUOApRseJJiBAgWeCJuVY4vpVi/Dyv3+XEmXsxNk4+5Jv5BhZ7OnpBJ7e1bdJa7BHWgAHilAdOR0eVOpdAvNZiAAFnoWayaUcQoACj5vCygQo8MTrLgWeeD1hRSSgmAAFnmJ0nGgCAhR4JmhSjiXyDrwIOvzhHCnmNr0D67Da+aveIE6U4qjwnXDKMo8XCSghQIGnhBrnmIUABZ5ZOsU6lRCgwFNCTds5FHja8mV0EtCVAAWerriZTGcCFHg6AzcoHc/AM1bgbbE/gQZpfkr3x0VuRll8skE7gmnNToACz+wdZP19EaDA4/6wMgEKPPG6S4EnXk9YEQkoJkCBpxgdJ5qAAAWeCZrEEhUREOkR2l3SK9hh/1fKOiaFfyjffzdC0do4iQQo8LgHrEyAAs/K3eXaKPDE2wMUeOL1hBWRgGICFHiK0XGiCQhQ4JmgSSyxTwLx2lbYClxAWUHKOJEEXgRBrHP+Fp3YmKxxYOxsDI1ezM6SgGICFHiK0XGiCQhQ4JmgSSxRMQEKPMXoNJtIgacZWgYmAf0JUODpz5wZ9SOgtcCLx4IIB7fIb7X0weEaCths+i2OmSxNIB4MI/rzV4FPa5PrlC6aBumqWb1rFkng7SsqYNsNe7wALhRbujdcnPYEKPC0Z8wMxhGgwDOOPTNrT4ACT3vG2WagwMuWGMeTgMAEKPAEbg5Ly5mAlgIvGm5BoO0VQJZ4icvurIa37CxKvJy7xgAJAtFnP0H8icUpMKR7L4I0pn/yn4ko8Ng5ElCLAAWeWiQZx2gCjTvrsGXVOrh9HoyZOhkFJUWgwDO6K8yvJQEKPC3pKotNgaeMG2eRgJAEKPCEbAuLUomAlgKvp3Mxwv7VKZV6y86F3bVXsPAyB4F4PI6Vn36M1tYmTJ4yHeUVVUIUHrvvDcTe35Qq8G6YA+nMiRR4QnSIRWhJgAIvM7qhngjefHoZ1izZjrL+hTjti0djyGeSP7MIHKUlgcbaOsx/9mUk/j2TuDwFPpx++UWo7leCQFA+fCAc0zI9Y5OAIQQo8AzB3mdSCjzxesKKSEAxAQo8xeg40QQEtBV4H8gCb10KBQo8E2yKg0r8858exLKPFiX/qdvjwbdu/iGGDR9l+ELiK3cheseLkP/kl6zFJp+BJz1wKWzF3uTf8w48w1vEAjQkQIGXGdx3/70cC55f0TvYW+jBN399IdxeZ2YBOEoVAo325djhmA9vvBQjw+fDEy9Lxv3o7fexZeXalByzzjoZRx09ngJPFfIMIiIBCjzxukKBJ15PWBEJJAlE6oKwFTlgl39lelHgZUrKuHHRPU0IvPIqOnfI/1sxBP3OnYPCwZXGFWSizFoKvFi0E4Hm52S/EkoSsbvkR2hL+QitibYH6nfvwl13fjel5KNnzMaXr/+WEMtISLz4a2sQl19ikTgDz9Z//9lyFHhCtCjnIuocy7DJ/Sqith4MDc3FiNApOce0QgAKvMy6+NhPXkbd5qaUwdfccSYGj+qXWQCOyplAvbQMH3rv643ji/fHXP/P4YAXaz/6FCs/WJqS45QvfA6jRg+iwMuZPAOISoACT7zOUOCJ1xNWlOcEYv4o2h/eifDm7iSJgnOqUHB2Zh/eKPDE3zydD/4e9fUx1AXln+jK70iQfF6MvnQ2BkwZKH7xBleopcBLLC0W9SPSs40vsTC4z0rTiy7w+loXBZ7Sroszr0PahQ8KfplS0LTAdRgQmSJOkQZVQoGXGfiD78Bz+5z4tnynrtOd+Q9yM8vEUUcisMzzIOrse+/i3nfNDH4P/aJTEQ6F8cGLr6GxdnfyS+NmTMWU44/hGXjcTpYmQIEnXnsp8MTrCSvKcwJd/22E/+U9KRTKbh8JZ40nLRkKvLSIDB0QbW1DlyzwVnUORiQuJWuxORxwDxuEmf9zgqG1mSG51gLPDAxYY98E/vLo/+GjDxcmB4n0CG26vlHgpSMk/td3ON/Has8/UwodHD4ek4JfEL94jSukwMsMcO8ZeIu3oWxAEc/AywybqqPWuf6Bjc7nUmLO89+Pwnh17z/raGmDy+OGR/4BbOLiSyxUbQGDCUaAAk+whsjlUOCJ1xNWlOcE2n63HaG1XSkUii4bCO+Je8/g6OuiwEtHyPivJ+7AW7GjaL/A8/ngHdwfx3z1eOOLE7wCCjzBGyRAeaK+xCIdGgq8dITE/3qHrRYfFN6bUijvwNuLgwJP/P3LCvcSCNm6sMTzS7RJmyDBjjGhSzA6fEGfeCjwuHusTIACT7zuUuCJ1xNWlOcEQuu70fbgtl4KUokTFXeMhM1jT0uGAi8tIsMHJM7A2/7k69hZF5d76oFUXIIRp43DwGmDDK9N9AIo8ETvEOtTSoACTyk5seYlzsDb7HkVESTOwJsjn4F3qlgFGlQNBZ5B4JlWEYE44ui07YIbRXDHS9LGoMBLi4gDTEyAAk+85lHgidcTVkQCCK3rRuD9Fthleec9uRz2CldGVCjwMsIkxKDO+nZ07GpHyaBSFB5wmL0QxQlaBAWeoI1hWTkToMDLGSEDCEyAAk/g5rC0nAlQ4OWMkAEEJkCBJ15zKPDE6wkrkgnE4lGEYn64pAJItr1nhfFKT4ACLz0jjjAvAQo88/aOlfdNgAKPO8TKBCjwrNxdro0Cj3vAygQo8MTrLgWeeD3J+4q6Iq3YHVyDaCwEh+TGQM94+Bzpz3/Le3AyAAo87gIrE6DAs3J383ttFHj53X+rr54Cz+odzu/1UeDld/+tvnoKPPE6TIEnXk/yvqLNXYsRjgV6ObjthRhecEzec8kEAAVeJpQ4xqwEKPDM2jnWnY4ABV46Qvy6mQlQ4Jm5e6w9HQEKvHSE+HUzE6DAE697FHji9SSvK4rEe7Cpc2EKA5v8CO3Yorl5zSXTxVPgZUqK48xIgALPjF1jzZkQUFvgdUR64I9FMMBVkEl6jiEBTQlQ4GmKl8ENJkCBZ3ADmF5TAhR4muJVFJwCTxE2TtKSQF1gLTrC9b0pSpwDUe0dq2VKy8SmwLNMK7mQwxCgwOO2sCoBNQXevxs3YH5rbRLVWF8ZvlIzBR4p/VvMrcqW6zKeAAWe8T1gBdoRoMDTji0jG0+AAs/4HhxcAQWeeD3J+4pi8RhaQ7XwR9tRKJ99V+qqgU3+i1d6AhR46RlxhHkJUOCZt3esvG8Cagm8zYE2PLDj45Rk51eNxKnlQ9kCEjCMAAWeYeiZWAcCFHg6QGYKwwhQ4BmG/oiJKfDE6wkrIgHFBCjwFKPjRBMQoMAzQZNYoiICagm8N5q34cWmLSk1TC/uj2uqJyqqi5NIQA0CFHhqUGQMUQlQ4KXvTDwWR/e6BvTsaofd44BvfH+4KgvTT+QIwwlQ4BnegkMKoMATryesyKIEdtqa0YwODEN/lMZ9mqySAk8TrAwqCAEKvMM3IhKNoCPYgWJPMRx2hyDdYhnZEFBL4HVFQ7h76xL5DvZwMn3i3vVvDZ6Okb7SbMrhWBJQlQAFnqo4GUwwAhR46RsS2LQH3esb9w+021B+ylhITh7vkJ6esSMo8Izlf7jsFHji9YQVWZDAW9JKrLLtSK7MDgnnxWZgaLxK9ZVS4KmOlAEFIkCBd2gzGjsa8cHmRQiEAnA73Thx1PHoV9xPoK6xlEwIqCXwErkaQ3683boDwVgUJ5bUUN5l0gCO0ZQABZ6meBncYAIUeOkb0L50O8J7ulIGFh8zBK5+Reknc4ShBCjwDMV/2OQUeOL1hBVZjIAfQfzJ/lbKqhLy7oLYsaqvlAJPdaQMKBABCrxDm/HSylfQ4W/v/UKxrwTnTj5LoK6xlEwIqCnwMsnHMSSgJwEKPD1pM5feBCjw0hP3b2mCf21D70CbXULZKWN4B156dIaPoMAzvAWHFECBJ15PWJHFCFDgWayhXI5hBCjwDkX/9IfPICbfabXvSrzw55IZF8PJR2kN26dKElPgKaHGOWYhQIFnlk6xTiUEKPDSU+MZeOkZiTqCAk+8zlDgidcTVmRBAgc/Qnt+7BgMiVeqvlLegac6UgYUiAAF3qHNWLJlKTbv2f/SgpFVIzBzhPp39wq0DSxZCgWeJdvKRX1GgAJP/a1Q+9EGbFuwGr6KIow/bya8ZXwUUX3KmUWkwMuME0eZkwAFnnh9o8ATryesyKIEttua0IpOvsTCov3lsrQnQIF3KONYPIb1DRvQ2LEHVUWVGDdAPhTaJu0dWLcO2LMVKKwAhhwFyGfk8RKTAAWemH1hVeoQoMBTh+O+KAl5t+BX/+oNWti/DGfdex0cHqe6iRgtIwIUeBlhssygzfY6fOTcgB4pikmhIZgeGW2ZtR1uIRR44rWXAk+8nrAiElBMgHfgKUYnxMRgPIAmWz2q40Ngt/HNXAc3hQIvi226YwVs6xfsn1AyAPFjL84iAIfqSYACT0/azKU3AQo8dYkvfOA5bF+4JiXoqXddhaoxg9RNZJJosVAUTW9uQ9fqZjjKPKg8fSh8Q0p0q54CL3PUHy3z48Nl3bDZbDhuVgGmTPZmPlmAkW22LjzjfQ8xxHurmddzFMZErfu9R4EnwMY7qAQKPPF6wopIQDEBCjzF6AyfuBRv4Z/SHxFGDypRjRtid6BK/t+0VzyOluUvoXPTIviqx6H82Ethd5nrA1HaNX42gAIvU1LyuKX/gq19/4HRiZnxOdcA7oIsgnCoXgQo8PQizTxGEKDAU5f6iqfnY/V/PkgJes5vbkDxwHJ1E5kkWvM7O9Dy3s7eau0+J4Z962hIbn1+EEqBl9lG2bY9hH8805oy+MovlWPgQPPcObrasQPvu1amrGF8ZAjmhCZnBsGEoyjwxGsaBZ54PTGsomg0hlWLNmLHpt0YOKwKcaDn/AAAIABJREFURx0/Dg6nPv/yM2zRR0i8s3EJdjYuhNddifHDPgePs1i0Eg9bDwWeKdp0SJEhWdr9wH4lIvFQ79eOih+HL8dvS7ugPe8/gYb5j/aOKxw+A8O+dH/aeWYcQIGXRddWvgFb/YbeCXFJQnzudbA5zPNBOYvVmn4oBd7+FjZs3YZ1C5fC6fVg8kknoqi8zPT9zfcFUOCpuwOCHX7M/+U/0bKpDpL8Ns9Jl56IiRcer24SE0Xb8egK9OzqTKl40JcnwztYn8/uFHiZbZZ33+vCkqXdKYPnnliIWTPN84NF3oGXWa85SlsCFHja8jVV9AUvfYxP5u+/JX/0UUNx1hUnmmoNahS7rX4Blq75fW+o4sLBOG3Gz2CXxP+DLwWeGjtA/xhbsQ6/lb6fkrgIZbg79pe0xWx8+Ar0NO9IGTfupv/AkTj3zGIXBV7mDY0HOiB9/ALgb0dcfiOtbdwcxAeOzzwAR+pKgAJvL+76zVvx2h//grh8Z3Hi8hYX4XPfvhGeAvP8AU/XjWOSZBR46jcq8T3SXtsET2kBPEU+9ROYKOLBd+BJHjuGf+cYSDrdhECBl9lmOdwdeFd8qQw1A12ZBRBkFM/AE6QReVwGBV4eN//gpT/603/D3xno/ccOlx033HUZJMmWV5TeW/4z1LesSFnzyUf/FJUlY4TnQIEnfIuOWOAD0u3Ygv0C/QJ8GfNi56dd0Lanb0PX5sW94yT58dnxt7wCm3zHldUuCrzsOhqPxYDuFti88l0IDnN9QM5upeYfrbXA6whHkpCKnQ6hYS189gVsWPxhSo1zv3Qphk+dInTdLK5vAhR43CFaEug9A29lExwVXp6BpyXsHGMnzsBbKv+SbxzFbPnOO7OdgZfj8k05nY/Qitc2CjzxemJYRc/8/nXs3tbYm7+g2Iev/Ogiw+oxKvGyDY9hc+0bvekTB62ed9xD8LjFf4yHAs+oXZN73kC8G+9IL6AO2zElfiyOwTzY5L/SXcE927DtH7ciIp93ZvcUYOA530PJuJPSTTPl1ynwTNk2Fp0BAa0EXky+S+e9xmZs6uhKVjGiqAAn9a+U31Sc/veWDMpWfcjKdxdg2cuvp8Q95xtfRdWQwarnYkD9CFDg6ceamfQnwDvw9GfOjPoRoMDTj3WmmSjwMiWVB+PqdzThv399D93y2RqeAg/O+OJxGDp2YB6sPHWJwVAr3lv+C7R1bZfvPrRj6qirMGrQGabgQIFnijapXmQ8FkWwcTNc5fLba10e1eOLEpACT5ROsA61CWgl8LZ0dePt3XtSyj25ugojCsV8JDXcE8Jbjz+VfJQ2cU2eNwdHn3Wa2rgZT2cCFHg6A2c6XQlQ4OmKm8l0JkCBpzPwDNJR4GUAKZ+GRCJRtDS0o6yqGE6X2I/aaNmXeDyGju5d8l13pXA7i7RMpWpsCjxVcTKYYAQo8ARrCMtRjYBWAm9pUytWtLan1DmlrATHVop9R3lb4x64PZ7kGXiZXgH588umzm645WM/RhYXwi7oXYaZrsdK4yjwrNRNruVgAhR43BNWJkCBJ153KfDE6wkrIgHFBCjwFKPjRBMQoMAzQZNYoiICWgm8tlAYz+2sQyS296UQdlluXTh4IEpd4r+UKRuQ7eEwntlWh0Bk71l//eQ32F4ydCAlXjYQNRxLgachXIY2nAAFnuEtELKAOOIIx1rkfw/55F9eIWvMpCgKvEwo6TuGAk9f3sxGApoSoMDTFC+DG0yAAs/gBjC9ZgS0EniJgpt6erCmrVP+owQwsbQIlW63ZuswKvAH8jl/Hze3paQ/f3A1hhbm99s5jerHwXkp8ETpBOvQggAFnhZUzR0zKp9rXR94ET2xRthsEipcc+SXSJnzZUwUeOLtRQo88XrCikggYwL+jgjm/7UWWz/pwICRPpz51cGoGVGE1s5QxjE4kATMQoACzyydYp3ZEtBS4GVbixnHU+CJ3TUKPLH7w+pyI0CBlxu/bGd3dMfw11c6sXxDD0bUOHH1WUUY1E+sY5+aet5BR3jlAUuzYYjvy3BIYp4/21cPKPCy3aHaj6fA054xM5CAZgRefmAbNixq7Y3fb4gPN/5hMgWeZsQZ2EgCFHj60G+p3YymbetRNWI8ygYO1ydpnmehwMttA7SHI/IjtLt6H6Gt9nlx4ZBqPkKbG1bVZlPgqYZS80CRBfIj9+/VQaopgPOSUbAVWutxey0AUuBpQfXIMR/8ZzsWrQr2DhjU34Ff3VihbxFpstX6/4lQrD5l1ADP+fA5hglVZybFUOBlQknfMRR4+vJmNhJQlcAfb1gFf1u4N6YkH9r9nSenIeLYe94RLxKwEgEKPO27uXHR61j+wuO9iaaefw1Gzz5d+8R5noECL/cNEIjKL7Ho4EssciepfgQKPPWZahEx/PJ2hP6w/64haUwZvL86DpDPzuR1ZAIUePrujhvubUJ7VzQl6R9urURpkV3fQvrI1hlehz09r/eOcNorMMhzmfw4rTg1ZgqLAi9TUvqNo8DTjzUzkYDqBA6+A6//cB++/jvegac6aAYUggAFnvZtePFnX0ewc/9ZYp6iUpx3+0PaJ87zDBR4eb4BLL58CjxzNDj4g0WIrmpOKdb70EmQBheaYwEGVUmBpy/4g+/AGzrAiV98vVzfIjLI1hXZhO7IBvlO8CKUuWaY9kUWogm8YCSMtlBQfllVofyzhfz84QIFXgbfgBxCAqISOOQMvOuHoGZkIR+hVbFh0VgMG5rr4Q+FMK5fNQqc1jsAXkVcmoaiwNMUbzI4BZ72jA+XgQLPGO7Mqg8BCjx9OOeaJXjfJ4jO39UbJu6QUPD302HzinW+WK7rVHs+BZ7aRPuOd/AZeNeeU4yBlea7s01fasqziSTwPtqzE6/vWo9YPIYqbxEuGz4VJW7zvuFXaVco8JSS4zwSEJAA30KrblMisrz72/KFqG3b+xNpr9OFa2fMRZmXbzZUl3Rm0SjwMuOUy6gN77+MT//7VG8IPkKbC83M51LgZc6KI81HgALPHD2LNfgR/NFixHf7kZB3nhsnw3HqYHMUb2CVFHgGwmdqzQmIIvACkRDuXzkfcfmvfdfUihqcO2Si5gxES0CBJ1pHWA8J5ECAAi8HeIeZur11D576ZGHKV2YPGY2TR01QNxGjZUSAAi8jTDkP4kssckaYdQAKvKyRcYKJCFDgmahZsTii2zoh9fPyBRYZtm2fwAvULkC89lV5Vgy26lNgG3hyhhE4jATEJSCKwNvU3oSnt3ycAqrSU4Abxh8vLjyNKqPA0wgsw+pHoHFrFM21cVSPllA6QNIvsYCZKPDUbcrm5gY8/eliCjx1sSqORoGnGB0nCk6AAk/wBrG8nAhQ4OWEj5N1ILC7ZyOao7UY5JqAUkf/rDImBF53wwYEP/llyjzbpJtgKxmXVSwOJgHRCIgi8GLxOB5bvwQNgY5eRIm77xJ34eXbRYFnhY6H4vC86odzRRjRCgnBs7yIDcuP176vfieC5a/ufRNR4hzL2Z93YPj0/D0HgQJP3W/oxL8snvrkA+zkI7TqglUYjQJPIThOE54ABZ7wLWKBORCgwMsBHqdqTuDDrhexsvvNZB5J/uvk0q9giHtSxnkTAq9zw0sIbflP6pya0yENuzDjOBxIAiISEEXgJdh0y4/RLm7YhtZQABPK+mNC6QARkWleEwWe5oi1T+CW5Z37vWBvorhPQsdtJbC5rP1mFvn8SvzjxyFEZYG57yruJ+G8W/JDXh5uZ1Hgqf/9xpdYqM9UaUQKPKXkOE90AhR4oneI9eVCgAIvF3qcqyWBSDyMJxtvTTlXq9I5FOeXfyfjtMk78PZsRnDZz1Lm8A68jBFyoMAERBJ4AmPStTQKPF1xa5PM94cOOHZGUoJ33VCE2BBri6ykwLujB9Hw/qVT4Enwehx8C60232qmiRqNxlG3ow39qovglveDVS4KPKt0kus4mAAFHvdEL4FwDNEtfsQ6wpAGuGEfbP6XJlHgcX+LSiAcD+GpxttyFniBYAS9Z+DJf0CxDeQZeKL2nHVlR4ACLzteeoymwNODssY5PG8E4Hon0JslX+7ASyyYj9Cmbi7egafxN5sJwu/Y0oo//moR2pr88BY4cfU3j8Wko6tNUHn6Einw0jPiCHMSoMAzZ99Ur1p+oCD8QTPiraHe0PZJxbAPL1A9lZ4BKfD0pM1c2RJQ4xHahMALyvKdFwlYjQAFnngdpcATryfZV7TvDLxP5TPwKvPrDLwELL7EYv+WocDL/tvHajPu/993sWV9U++ySsq9uOeRcyyxTAo8S7SRizgMAQo8bosEgXhnBOF396TCKHXBdWKFqQFR4Jm6fXlRfK4vsaDAy4ttkpeLpMATr+0UeOL1hBWRgGICFHiK0Vlm4nevfh5B/wHPlcsr+9kfz0Vxmcf0a6TAM30LuYAjEKDA49ZICrxIHKHX6mE74EYeW40XzumlpgZEgWfq9rH4NAQSZ+BR4HGbWJUABZ54nTWlwLvsf36KdRu3733tqHwVF/rw3n8eTP7/uub9j5IeCXd1uQe7W/a/9EG8tuhXUfv2xWha9TxiYT/KRp+Cyonn65ecmVQnQIGnOlLTBXzuqZV48/n1vXVPnVWD626Zbbp1HK5gCjxLtJGLOAwBCjxui30EYjv9CK9oT0o8W4Ed9lkVkHx2UwOiwDN1+1g8BR73QB4ToMATr/mmFHjnXPl9PPDTb2LU8JpDiFLgZb7Jejp2Y8t/f5AyYdAJN6Jo8DGZB+FIoQhQ4AnVDkOKicXiWPDaZqxb2YihI8tw0tmj4fFa40UWegu8zYt7sHFRD2wSMPZED4ZNdxnSUya1PgEKPOv3OJsVxuWztOL+KGxFDvn3n70/rDbzRYFn5u6x9nQEeAdeOkL7vx6XP6P6N7Qi2hWGb1wZHIX8XJU5PWNGUuAZw72vrKYUeHMvugn/eOTHGFBVToGXw55q3fQO6j/8a0qE0lHzUH3M1TlE5VQjCbidEgrkt462dO4/ANvIepibBNQkUFroRE8ohkAoqmbYw8Zq3BLBB090pXxt7nWFKB9kDRmqOUAmyIoABV5WuDjYZAQo8EzWMJabFYEyWUIFQvJLLOTPJ7yOTCAh7xr+vg7B7R3JQZLbgQFXjoerv/nftG3lvlPgidddUwq8aadfjzkzp+CTVRtRWV6Cm6+/BHNmHZWkyzvwMt9kvAMvc1ZmGUmBZ5ZOsU4lBPQUeKvkt3tv/KAnpcyJp3gwRr4TjxcJqE2AAk9toownEgEKPJG6wVrUJkCBlxnRYG0n6v+6JmVw0fR+qDhreGYBOMoQAhR4hmDvM6mwAm/dph2IRFPvsnA6HBg9fBB+dO9jOHPeTMyeMQHzF36K7//sEbz4xM+Td+QlHh9LdyWOzounH5YujCW+3rhpIXZ+/CyiIT+qJ56GwdMutMS68nYR8t5OPGzD/W3CHWD+p6Q0h25L/uad+I/2v4Hv2hDCKw+3pazpvG+Vof8wp+brZIL8I5D49k/s78TOjvM38PzbABZfsS3xyST52Vv737vNgjLBYn3DYrR278K46hNQ5hvQW3pXKIR/r1mOLe0tOH7QcJw2Ykzy9wdeKhNQaTt+9tEk+fmE15EJdMl33q3/w6cpA6pmDsCQC0cTm8AEJPkYh0z8isBLsFxpiZ6oednkfyGp8tvX//7yMfmNPql3P5QWF+JH377qkHqv/fYvcPE5c3HuqbPll1Okf4nFAPltjPWtfImFmo1nLDEIuB0SfPIjtK1dfIRWjI5kXkXyDzi8+iRQUuhIPkKr12MqmxJn4C0MQrLbMOYEN4Yf7WaHSEATAomjD4p8DnQHI+j0RzTJwaAkYBQBt0uCV/7V1sW9va8HL6z8CdY2vpv8W6fdg89PvReDSicn//72+S9gY9ue3nZdPXEmzh01yaj2WTavWj8MLC1wyZ9L5Edo5bMreR2ZQOIR2vq/yY/Q7jjgEdqrxsPdj4/Qirxvqsu9GfkVkddgtdoSPVHzUk3gHakof0D+A9XWWhw1YWTvkCu+cQ+uvOR0nHHSMXyEVs1uMpbpCPARWtO1jAVnQUDPR2izKItDSSBnAnyENmeEDCAwAT5Cm9qc5u7t+PPia1L+4bj+83DepDvQFvTja6/9v5SvjSyrws/mnC9wh/O7ND5Cm3n/+RKLzFmJMpKP0IrSif11CPsI7ZFQtbV34bTLvosH7voGjpsxCQuWrMCtdz2M/z75C1SUFVPgibfHWJGOBCjwdITNVLoToMDTHTkT6kSAAk8n0ExjCAEKvMwFXiASxvWv/g3hA44Rmlk9DN859hRDesek6QlQ4KVnxBHmJUCBJ17vTCfwEgjnL/oU9/3haTQ2t6FmQCVuu/GLmDV9QpIuX2Ih3iZjRfoRoMDTjzUz6U+AAk9/5syoDwEKPH0455plp7Qbu+17MCI6BJWx0lzD5c18CrxDW/3SqruwtuHt5BeSj9BOux8DS8Yn/37+jg14dMUihKIRDCgoxvdnnY7qwpK82S9mWygFntk6xnqzIUCBlw0tfcaaUuD1hYYCT5+NwyxiEqDAE7MvrEodAiIIvLYtLdizuj65oMoJ/VE2skKdxTFKXhOgwBO//e+4l2Kxc3myUEn+63PBkzEuMkL8wgWokALv0CYkjgzf2rwU7YE6jKicjRLv/pdYJEYHwiE0+DsxuKgMdkkSoIss4UgEKPC4N6xMgAJPvO5S4InXE1ZEAooJUOApRseJJiBgtMDz7+nC9rc3p5AaMm8kCvoVmoAeSxSZAAWeyN0Bwojg14V/SXkDdnW0H64JXCB24YJUR4EnSCNYhiYEKPA0wcqgghCgwBOkEQeUQYEnXk9YEQkoJkCBpxgdJ5qAgNECb8+qejStbkghVT6uH/ofVW0CeizxQAKx2BZEwy8l/5HdeS4kydg7qSjwxN6fIVng3U+Bp7hJFHiK0XGiCQhQ4JmgSSxRMQEKPMXoNJtIgacZWgYmAf0JmFXgdUuNcMZ9cMV5J5P+u8Y8GY0WeP5G+Q68d1LvwBt60kj4+nPfmmcXAbHYLoSCtwDxnr1l2zxwe+6DTaoxbBkUeIahzzjxwY/QXhg4FWOiwzKen88DKfDyufvWXzsFnvV7nM8rpMATr/sUeOL1hBWRgGICZhN4EVsAqwv+hg5pe3LNQ+UzhYaE5ilePydam4DRAi9Bt3VzM5rXNiJxfpFZz8Dbs24Rdi5+FpEeP6qnnobBM/PrMcBI+N+IhJ5K+WZxuK6Aw3lxzt9AMfncqs5P3pctYRSFU2bD7stM7lLg5YxelwA7pDrU25v4EossaVPgZQmMw01FgALPVO1isVkSoMDLEpgOwynwdIDMFCSgFwGzCbxtnjex0zU/Bc+MrpvgjVXqhYx5TERABIFnIlyHLdXfUodPHr815Wvjzv0WKsbMNPvSMq4/GvkA4Z77UsY73d+F3XF8xjEONzDWE8TO334PwdotyS87yqsw9Du/gqMk/YtOKPByQs/JghOgwBO8QSwvJwIUeDnh42TBCVDgidcgCjzxesKKSEAxAbMJvFW+v6LVsSlVJgQuRVV4imIGnGhdAhR4ufd296dvYctbf04JNGDKKRh56pdzD26aCDGEeh5ALPJesmK740Q43TfL/y+3Nz12LnsPdY//KoVC1XlXofz0S9OSocBLi4gDTEyAAs/EzWPpaQlQ4KVFxAEmJkCBJ17zKPDE6wkrIgHFBMwm8Jrt67BGfoR23+WOleLorm/ADrdiBpxoXQIUeLn3lnfg7WcYi+19IYkk9c8drByBAk8VjAxiQQIUeBZsKpfUS4ACj5vBygQo8MTrLgWeeD1hRSSgmIDZBF5ioS32DWhwfQIXClETPA6eeJni9XNiegJhqQu2uPx4nwlfGEKBl76/mYzI9zPwMmGkZEw0GEDtAz+QH6Hd+6KTxCO0w265D/bi8rTheAdeWkQcYGICFHgmbh5LT0uAAi8tIg4wMQEKPPGaR4EnXk9YEQkoJmBGgad4sZyYFYE44mj0vge/c2dyXmFkBCr9s2GT/zLLRYFnlk7lb537XmIRj0RQPO0ESF5fRjAo8DLCpPmgUCSOpVt7EIvbcOwwFzwu8/z+qDmcHBJQ4OUAj1OFJ0CBJ3yLWGAOBCjwcoCn0VQKPI3AMiwJGEGAAs8I6ubI2enYgibfwpRi+wXmoCA8xBwLkKukwDNNq1holgQo8LIEpsHwYDiOHz/fjt1t0WT0ykIJP72wBIXu3M5G1KBU04WkwDNdy1hwFgQo8LKAxaGmI0CBJ17LKPDE6wkrIgHFBMws8Doa/diytBFFVR6MOLY/bDbe+aB4IxxmYpN3CTqdG1O+UtIzEeU909RMo2ksCjxN8TK4gQQo8AyE/1nqxZt78NA7XSmFXH18IU4ZzzNZc+0OBV6uBDlfZAIUeCJ3h7XlSoACL1eC6s+nwFOfKSOSgGEEzCrwdm9oxb//dzHCgUiS3dg5NTj71umGcbRi4rDUgdrCl+SlxZLLs8mvCqnpOg/OWKFplkuBZ5pWsdAsCeS7wOvqDCZ/Zyou8mRJTr3hhxN4X5rlw5mTvOolydNIFHh52vg8WTYFXp40Ok+XSYEnXuMp8MTrCSsiAcUEzCrwXr73Y6xfsCtl3Vc/NA/lg80jlxQ3TceJPfYmtLs2yC+xsKE4NA7umLleGEKBp+NmYSpdCeSzwHv33Q3YvHHvG4GHDqvAvJPHwW7X/7HVgPwI7U/kR2jrPnuEtrzQjnsuLEYBH6HN+XuBAi9nhMIHiHSGsWdxI8JdYZRNqUDRyCLha1arQAo8tUgyjogEKPDE6woFnng9YUUkoJgABZ5idJxoAgIUeCZoEktURCBfBd6OHS1447XVKczmnDQGo0f3V8Qx10kHvsTiGPklFl6+xCJXpMn5FHiqYBQ2SDQUw8ZH1yHcHuqtcejFw1E8pkTYmtUsjAJPTZqMJRoBCjzROgJQ4InXE1ZEAooJmFXgHfII7YnyI7S38RFaxRvBohMp8LRrbDwex2vbluHD+vUYU1aD80ceB6/TpV1CRk4hkK8Cb+nSrVj5aW0Ki3Hjq3H8CaO4QyxEgALPQs08zFK6t3diy983p3yldHwZBl8w1NoL/2x1FHh50ea8XSQFnnitp8ATryesiAT6JNDxQgjtz8tnxdljKLvcjcK5zt7xZhV4iQXwJRbc+OkIUOClI6T86/9Y9y6eXP1Wb4Cp/Ubi7hOvUR6QM7MikK8Cr6sriH//62NEwnvf/CpJNlx48XSUlvqy4sfBYhOgwNOvPw2NHXDIj6BXVOh3BElPSw82PLI2ZZH9juuP/nOr9Vu4gZko8AyEz9SaE6DA0xxx1gko8LJGxgkkYBwB/7IIGn4SSClg4P0+uEfZk//MzALPOKrMbBYCFHjadeqG1x9AbWdTSoInz7kNZZ78OcdIO7rpI+erwEuQaWnuxupVuxCLAxMmVqOqinsu/Y4x1wgKPO37FZYl+B/+9B6WLd+eTHbKvHG46ouztE/8WYbGhQ1omL87+XfegT4M/8II2D0O3fLrkigegyfwBlzBJYhJhQh6z0DEPREUeLrQZxKDCFDgGQS+j7QUeOL1hBWRwBEJtDzeg/Zn958xkhhYdpULpZe4k3Mo8Lh5rEyAAk+77t75wRP4qH5jbwKP/PjsP8/7ISSb/i8T0G6V4kbOZ4EnbleMqcwWjsC5dhvsDa2IFRcgPH5o8n/NfFHgad+9d97bgMefWpiS6NabT8ekCQO1T/5ZhkgggkhXBJ5K+W3SNt3S6pYoIe68Xf86IJ+EjrLbUVpShUAogqB8FiAvErAaAQo88TpKgSdeT1gRCRyRwGHvwPuNfAfeSN6Bx21jfQIUeNr1eEdHI+744Ek0+dvgc3nwrekX4ISaidolZOQUAhR43BD7CDhXb4Vje30vkLjXg+BJU2UhYl4jQoGn/f5+/KlFeOe99SmJLr1wOs49a4r2yfMkg6/zb3D2LE9Zrb/ochRWHEuBlyd7IB+XSYEnXtcp8MTrCSsigT4JpJyB9wX5DLxTrHEGHttOAukIUOClI5Tb16Py40Hb2utRU1gJj4MvsMiNZnazKfCy42Xl0Z73lsPWlXpURnDOVMQLvaZdNgWe9q3bur0Jd/3yZUQje+8C83lduPuO83U9C0/7VRqbgXfgGcuf2Y0hQIFnDPe+slLgidcTVkQCignwEVrF6DjRBAQo8EzQJJaoiAAFniJslpzEO/As2VZdFrVxc6N8F94GyO+wwDlnTsaA/iW65M2bJJ+dgecMLkZcKuIZeHnTeGB1bDHejz+HEHoww3YqZkvn5M3qKfDEazUFnng9YUUkoJgABZ5idJxoAgIUeCZoEktURIACTxE2S07adwae1NCCeHEhz8CzZJe5KCsR4EssrNTNQ9eyJ74Lf4renvKFi+w3YpztWGsv/LPVUeCJ12YKPPF6wopIQDGBbAVeXH7UIt7kBxIH75a6IBXLBw/zIgEVCMT2NCPe0QXbgCpIBT4VIspbtNCJHnmvBkJRVeIxCAmIQoACT5ROsA4tCPARWi2oMqYoBCjwROmENnV8HHsbr8b+mhJ8mjQPZ0nXaJNQsKgUeII1RC6HAk+8nrAiElBMICuBFweim1uAYKQ3n21QEaRS856zoxgcJ6pKILJ8NWLbdu6NKT/L45g9A1Jlec45KPByRsgAghKgwBO0MSxLFQIUeKpgZBBBCVDgCdoYlco63B14F9u/hbG2o1XKIHYYCjzx+kOBJ15PWBEJKCaQjcCLB8KIbW5NyWUrlO/CG1aqOD8nkgCCPQi9+k7qvupXCedxM3KGo4bAi8XjWBlqQXs0hKPcFSix82UNOTeGAXImQIGXM0IGEJgABZ7AzWFpOROgwMsZofABDjwD72jbKThOOlf4mtUqkAJPLZLqxaHAU48lI5GA4QSyEnjhKGLrm1NFS7kX0sAiw9fBAkxMQGCBl5B3D7evxdqeveLaa3PgprK73OF1AAAgAElEQVRJqHEWmBg4S7cCAQo8K3SRazgSAQo87g0rE6DAs3J3uTYKPPH2AAWeeD1hRWYjIEuBSJt81pfXBbvHbWj12Qi8RKHx5gDi9V2Iy2uAxwF74u47h/z6Ml4kkAMBUR+h3RrqwP2tK1NWdrxvAC4rGpnDajmVBHInQIGXO0NGEJcABZ64vWFluROgwMudodkj7NrQgdr1HbDZbBgyoQQDRhSafUm99VPgiddKCjzxesKKTEQgFgqh/e2PEG7YeydbwbRxKJgy2rAVZCvwkoVG5RdZhGOwuR2AzbDSmdhiBGKN8kssOsV6iQUFnsU2mYWWQ4FnoWZyKYcQoMADIvJTD90tfhRWFMDOH5Ra6ruEAs9S7cx6MS27A1j1XmPKvGmnDUBRubE3dWS9kCNMoMBTi6R6cSjw1GPJSHlIoPuTdehesTFl5eUXzIOjxJifvCgSeHnYNy7ZnARyPQMv8QjtI/IjtGsOeIT2ZvkR2oF8hNacG8JCVVPgWaiZXAoF3kEE9mxvxccvrkNIPnvYU+jG9HPHomIwzxu2yrcKBZ5VOqlsHVs+bUXtuo6UycOnlGLw+BJlAQWbRYEnWEPkcijwxOsJKzIRgbY3FiNUtyel4uI50+EZXmPIKijwDMHOpDoRyFXgJcrkSyx0ahbTZEWAAi8rXBxsMgL5fgfe23/6EN1tgd6uFVUV4KRrzPMGyy21dWhobsaYYUNRUVJsst2nfbkUeNozFjlDa0MQK99tSClx+unVKCyzxkvSKPDE230UeOL1hBWZiEDPjnq0v/Nhb8VSoQ8V582FzSU/jmrARYFnAHSm1I2AGgJPt2KZiASyIECBlwUsDjUdgXwWeD3dIbz+0OKUnknyI7TnfPsEU/Tx+Xfew7LVa5O12u12fOnsMzB66GBT1K5XkRR4epEWN0/yDLwN8rExtjiGyHfe8Qw8cXtlhcoo8KzQRa7BUAI9tY0IbtoJu88N36RRkHwew+qhwDMMPRPrQIACTwfITGEIAQo8Q7AzqU4E8lngJRCv+90muN6Lwh6R0FDTCfd5RTjqdOPOS8607V3dftz7lydTho8aMghXnX9OpiHyYhwFXl60uc9F9jTsRuNj/wf/ulUomDQd/b/8dTgr+1kCDO/AE6+NFHji9YQVkYBiAmYVeH60Y720AJ54AcbET4QdxtzBqBg8J+pCgAJPF8xMYgABCjwDoDOlbgTyWeBFd0TQ85MWhLrDyRdZOFx2FN5SAccM437Ym2njKfAyI0WBlxknK4/a/r83w792Ze8SC6bOwJAf/dISS6bAE6+NFHji9YQVkYBiAmYUeG2ox5OOb8GP1uS6B2AsLo/8RlZ4TsUcONGaBCjwrNlXrgqgwOMusDKBfBZ4kXcDCD/ZmdJe+0leuK4sMkXLD36E9orzzsLIQcac8ywqMAo8UTujT12xnh6sv1y+K1V+Udq+SyoowNgnXtCnAI2zUOBpDFhBeAo8BdA4hQREJWBGgfeu9CiWSP9IQXpp9B6MiB8rKmbWZRABCjyDwDOt5gQo8DRHrGuCWCyGtt0N8iHmpXD5vLrmFjFZPgu82I6wfAfe3h9Q7rtc3yiBfZpbxFYdtqZNO2uxp6WVL7E4Qsco8EyzlTUrlHfgaYaWgQ9DgAKP24IELESAAs9CzeRSDiFAgcdNYVUCFHjW6WxHUzPeeOhRtO7aDclhx+zPX4Txc4+3zgIVrCSfBV4CV2RpENEX/IgH47Cf4oXzLJ8CipwiKgEKPFE7o19dPANPP9bMBFDgcReQgIUImFHgtdp24yn7Tb2P0NZgIi6L/IqP0FpoX6q1FAo8tUgyjmgEKPBE64jyeuY//ndsXLS0N0BC4n3p/7d3JmBOlWf/fpLMDDPsIDuiAiJKUQH3pYILSqtt1Y96uVRbW/laq1brQm21H4pal7pUq1XbarHaVq271r2LoiIqUsQNEVR2kG0YZs1k8j8Jf0YzDCR5kpy8z8kdL656Ded53+e9f6/TM/ec5bqpUundUlWqn1IXeKWae6msG4FXKkmX5jq5hda93BF47mVCRxBQE7Ao8BKLrZP13kssXuElFurkS6MQgRe8nFc1vC/rootkQNVo6VLWN3gLzHBFCLwMQRk47KEpv5L1K1aldHrUOT+UQSN3M9B9YVpE4BWGK6O6QQCB50YOdFEYAgi8wnDNZVQEXi70qIWAYwSsCjzHMNKOowQQeI4Go2xr9vp7ZX7N88nqcKhcDtruXOlftadyNNtlCDzb+X25+3mvzpTpf/5b65e2G7S9fOsX50s4HA7OIrNcCQIvS2AcbooAAs9UXDSbJQEEXpbAfDgcgecDZKaAgF8EEHh+kWaeYhBA4BWDemHmbI43yaNLzpC498/mT7/K3eWQ3pMLM6HjoyLwHA8oy/YWzpotC96YLd369pbdxx8qVV06ZzlCsA5H4OUnz5qP58vGRZ9K9xEjpapf//wMyig5E0Dg5YyQARwmgMBzLxwEnnuZ0BEE1AQQeGp0FBoggMAzEFKGLTa3NMqjSyelCLw+HXaVcX0uyXCEYB2GwAtWnqwmlQACL/cdsfixh2XZ888mBwpFIrLz6WdIzzF75z4wI+RMAIGXM0IGcJgAAs+9cBB47mVCRxBQE0DgqdFRaIAAAs9ASFm0mHILrUTkoF4/5Rba+mbZUBfNgiKHQsB9Agi83DJqaWqSt356tsTjX1yx3GmnwTJy8i9yG5jqvBBA4OUFI4M4SgCB514wCDz3MqEjCKgJIPDU6NIWrpVGWRGulT7xKunl/eHjPwEEnv/MCz1j60ssKkdJl/J+hZ7O2fG5As/ZaGgsDwQQeLlBjDU2yqzzz0Hg5YaxYNUIvIKhZWAHCCDwHAihTQsIPPcyoSMIqAkg8NTotln4YXi9vBlZ2fq0rr1aestXYj0LMxmjbpUAAo/NEVQCCLygJsu6EgQQeLnvg7a30A4740fSY89RuQ9coiOs3tAgz81dITXeVc/jdustuw7spiaBwFOjo9AAAQSeeyEh8NzLhI4goCaAwFOj22bhw2ULpDbU3HpMlZTJt6NDCzMZoyLw2AMlRwCBV3KRl9SCEXj5iXvDR/OkdskiXmKRI86ahqj839/nSnVtU+tIk7+xm1riIfByDIRypwkg8NyLB4HnXiZ0BAE1AQSeGt02Cx8v/0Sq5YsTvcqkwBsiIe8fPv4R4Ao8/1gzk78EEHj+8mY2fwkg8PzlzWzbJjDrk7Vy23MfpRw0dkRf+e4hg1XoEHgqbBQZIYDAcy8oBJ57mdARBNQEEHhqdNssnB+ulhmRFa3HcAttYTinGxWBl44Qf2+VQKkKvIbYevms7j9SGekuO1QdIqFQ2GqE9L0NAgg8todLBBaurJErH30vpaWJ+w+Sr48aqGoTgafCRpERAgg894JC4LmXCR1BQE0AgadGl7ZwbahBVoTqeIlFWlKFOwCBVzi2jFxcAqUo8Gqal8vTy38gTS3VSfh9K0fL+D63bFPitbTEZd1nUenSu0wqOiP7irtrM58dgZc5K470h8BDMxfJ07OXJSfbpX9X+enXd5XEObTmg8DTUKPGCgEEnntJIfDcy4SOIKAmgMBTo6PQAAEEnoGQaFFFoBQF3tvrbpf3NtyXwmtCvzuld4eR7TLcsLJZ/nnVKk/gNUm4LCQH/mg72WV8ZxVvivwlgMDzlzezZUZgQ31UahuapX+PqswKtnIUAi8nfBQ7TgCB515ACDz3MqEjCKgJIPDU6Cg0QACBZyAkWlQRQOBtwja+72+lX+WYdhlOv3m1zP/Xxta/S0i8k+8bJBVVuqtmVEEZK4p7Vyy2LK6T+JomCXlXF4V37CihruW+rwKB5ztyJvSRAALPR9hM5TsBBJ7vyNNOiMBLi4gDIGCHAALPTlZ0mj0BBF72zKiwQaAUBV5N8zLvFtozWm+h7d1hTzmq761bvYX2kbOWyvol0ZRAj762n/TdtdJGyEXosmVpnbQsqf9i5khIIqO6S6jMX+mJwCtC+EzpGwEEnm+omagIBBB4RYCeZkoEnnuZ0BEE1AQQeGp0FBoggMAzEBItqgiUosBLgEq8xOLTun9JVaSnDKo6WMKhsq3y++iFjfLKratb/773LpVy9LV9JRzmbeBbgxb7cIPEq1OlZ2R4Fwl1r1DtU20RAk9LjjoLBBB4FlKiRy0BBJ6WXOHqEHiFY8vIEPCdAALPd+RM6CMBBJ6PsJnKVwKlKvCyhfzJq7Wy8OVa6TqgXEYe21WqukWyHaKkjt/iCjzvtuPInlyBV1KbgMUWnAACr+CImaCIBBB4RYS/lamdFnhr19fIxVfdKSs+XydPTLuqdQmLl62SX153t8z7eJEM6NdLLjn3VBmz+7Dk3y9b86VbBbay6P49K2X52gb30qAjCORIAIGXI0DKnSaAwHM6HprLgQACLwd4lG6VQNtn4IV2qJJwN3+vvks0xxV4bNIgE0DgBTld1obAc28POCvwausa5KQzp8rYA0bJS6/PSRF43z33ajns4DHynePHy2tvvefJvLvkhQdukPKyCALPvT1GRz4SQOD5CNvRqd79uE5mfVAr3bqUyWF7d5Gunbd+S5qjS9hqWwg8a4nRb6YEEHiZkuI4iwQQeBZTo+dMCSDwMiXFcRYJIPDcS81ZgVdX3yCr11Yn/1x2wz2tAm/Nug0y4eTJMuOp26QssunWiYmTpsjkH58k+47eFYHn3h6jIx8JIPB8hO3gVAl5d/fjn7d2tl33MrnwtAHSoTwYz4hC4Dm46WgpLwQQeHnByCCOEkDgORoMbeWFAAIvLxgZxFECCDz3gnFW4G1G9fbcj1IE3ttz58vUG++Rx/50ZSvNCy7/new3ZoSc8I1xCDz39hgd+UgAgecjbAenuvep1TJ7Xm1KZz85qa/sNCAYb2lE4Dm46WgpLwQQeHnByCCOEkDgORpMAdp64bGwTH8+Ih0q4zJhYkz2OjBegFncGhKB51YedJNfAgi8/PLMx2hFFXiJq+mWr1qzxToGes+169GtS/LrbQXea2+9K7f88WG5/44prXWXXnuX7DJkeznt20dJfWMsLZfKirA0NLWkPY4DIGCNQDgs3pWpYWmKluj+NnyhWT5af+Rfa+TpV9enbNurzhwkfXv5/8yjQvy3U+49gD3mbe2WluD/QFAIfozpLoGySCj5vbs5Fvf+lOj3b3fjobMcCSTeFOxtb4k28707R5ROl8+aIfLbq1PPZi7/TYvsOCQfZzj5X3q+dmOFt7ljLS3iffvmA4PA7YGqDpGM/ErgFu7wghKZ5PMTinufTAecPvMdeezZV7Y4/IRvHir7jd6tXYE3+935MuX6aSnPxDv/stvkwL1HysRjxsramsa00yd+U7JuY1Pa4zgAAtYIlHsnEYmr8DY2NFtrPT/9ZvzdJz/T5XWUUO4nuBvrYnL7Qyvls+WNEvbGO/qr3eWoA7rntc1iDtapqkyaPTnd2IzgKGYOzJ1/AokrlDp6J2QNTTGpy+AXkfnvgBEhUDgCFWVhqUicm9SX6LlJ4dA6NfJf7vSuvnsh9Vzm5EktcshRjp6cZf4j6zY5J85Nmrzzkmip/vL8y3RyP5V1ak/TjEjPLh0y8iuw8o9AIpN8frISeJlM3PYKvHXVNXLECRfIK4/fKlWVm64q+dopk+VXP58ko0cO4xbaTKByTGAJcAttYKPNeGGJ89EVa5qka6eIdKrK729oMm6iQAdyC22BwDJs0QlwC23RI6CBAhLgFtoCwnVo6DlvhuSuG1JfnDX5mmbZfidHBV6e2HELbZ5AMoyTBLiF1r1YinoLbSY42gq8RM0Pzr9O9hm1q0w65Rh55t8zk7fUPvOX6yTiXX20bE192mH796yU5Wsb0h7HARCwRgCBZy0x+s2GAAIvG1oca4kAAs9SWvSaLQEEXrbE7B7PM/DsZkfnEGiPAALPvX3hrMB7cfosuXDq7SLe5STR5piUl5fJ4EH95NG7r5SlK1bLL67+g8xbsFgGDegjl13wPfnK8J2SdBF47m0yOvKPAALPP9bM5D8BBJ7/zJnRHwIIPH84M0txCCDwisOdWf0hwBV4/nBmluIQQOAVh/u2ZnVW4GlRIfC05KgLAgEEXhBSZA1bI4DAY28ElQACL6jJsq4EAQQe+yDIBBB4QU6XtSHw3NsDCDz3MqEjCKgJIPDU6Cg0QACBZyAkWlQRQOCpsFFkhAACz0hQtKkigMBTYaPICAEEnntBIfDcy4SOIKAmgMBTo6PQAAEEnoGQaFFFAIGnwhaoovpQszRKi3SPb3pBW5A+CLwgpcla2hJA4LEngkwAgedeugg89zKhIwioCWgF3hsfr5I3568S7z0wcsiIATJiUE91DxRCoFAEEHiFIsu4xSaAwCt2AsWdf2b5avkgUp1son+sSg6L9pNy8f4POSAfBF5AgmQZ7RJA4LExgkwAgedeugg89zKhIwioCWgE3vzl1fLwjAUpc55+2K7St3tHdR8UQqAQBBB4haDKmC4QQOC5kEJxelgZrpdnKpalTL5XtKfsHutRnIYKMCsCrwBQGdIZAgg8Z6KgkQIQQOAVAGqOQyLwcgRIOQRcIqAReC/OWSJvLViVsowjR+0gY4b0cmlp9AIBQeCxCYJKAIEX1GTTr2tu2TqZVbY25cDBLZ1lbFPf9MVGjgiKwItFRRbMiMmaz0Q69QzJkP1D0nm7kJEUaLNQBBB4hSLLuC4QQOC5kEJqDwg89zKhIwioCWgEHlfgqXFT6DMBBJ7PwJnONwIIPN9QOzdRQygmj1YslkbvfxOfhA6a0DhA+sarnOtV21BQBN7CGS2y9L2WVgwVHUOy74kRCQXnbmdtxCVdh8Ar6fgDv3gEnnsRI/Dcy4SOIKAmoBF4icl4Bp4aOYU+EkDg+QibqXwlgMDzFbdzk1WHovJeZL1Ewy2ya7RroORdAnZQBN6sh2JStz6esn/2mhiRjt25Cs+5/6h8bAiB5yNspvKdAALPd+RpJ0TgpUXEARCwQ0Ar8OyskE5LmQACr5TTD/baEXjBzrfUVxcUgccVeKW+k9tfPwKPfRFkAgg899JF4LmXCR1BQE0AgadGR6EBAgg8AyHRoooAAk+FjSIjBIIi8HgGnpEN53ObCDyfgTOdrwQQeL7izmgyBF5GmDgIAjYIIPBs5ESXOgIIPB03qtwngMBzPyM61BMIisDTE6AyyAQQeEFOl7Uh8NzbAwg89zKhIwioCSDw1OgoNEAAgWcgJFpUEUDgqbBRZIQAAs9IULSpIoDAU2GjyAgBBJ57QSHw3MuEjiCgJoDAU6Oj0AABBJ6BkGhRRQCBp8LmTNGSlgUyR2ZIQ7xehoVGyh7hA5zpzYVGEHgupEAPhSKAwCsUWcZ1gQACz4UUUntA4LmXCR1BQE0AgadGR6EBAgg8AyHRoooAAk+FzYmiGqmWp5rvkRbvn82f/SNHytDQiKz7WxjeKEvLGqVCQjIk2kl6xztkPYaLBekEXuLZchsWh5Otd9u+RcIVLq6CniDQPgEEHjsjyAQQeO6li8BzLxM6goCaAAJPjY5CAwQQeAZCokUVAQSeCpsTRZ/G58mrsWdSetkxPFwODn8tq/6WhRvkg7INrTUh79/2j24nHeORrMZx8eBtCbxYk8jC58qlcUNixSIVnUWGTGiSsmC4SxfjoKc8E0Dg5RkowzlFAIHnVBzJZkpS4PXrWSkr1zZI3L086AgCORFA4OWEj+IMCFQvXyfllRXSsUenDI7O7yEIvNx5zvv0M3nulRlSWVEhx4z7qgzo0zv3QRkhZwIIvJwRFm2AfF2B964n71Z6Eu/Ln5HNXaVvS2XR1paviSsrwlJVUSbrNnq2rs1n7fyILHsjVVIO2DcmPYfF8jU940CgoAQQeAXFy+BFJoDAK3IA7UyPwHMvEzqCgJoAAk+NjsI0BKINUXnmusdlyTuLkkeOnDBKDjnjMF+5IfByw71w8RK55De3SSy26Qfjzh07yq8vOk+269E9t4GpzpkAAi9nhEUdIPEMvP/GX5NGaZCdvWfg7al4Bl6wr8BD4BV1gzJ5QQkg8AqKl8GLTACBV+QAEHibCHAFnnsbkY7yQwCBlx+OjLIlgblPz5bpd/875S+OnXqCDBixvW+4EHi5ob7vyafliX/+J2WQc087WQ4aMyq3gR2r3tAYldqmmPTvYufKJQSeY5uoSO0E9xl4Wxd4zY0in7xQIY3Vm6BXdInL0AlRifAcvCLtQqbNlgACL1tiHG+JAALPvbS4As+9TOgIAmoCCDw1OgrTEHj+pn/Ix6/OSzlq/1MOljHH7esbOwRebqhffG2m/P7Bh1MGueTMSbLn8GG5DexQ9SPvL5fnPl6Z7GiX7TrLWfsNlsoy958hhsBzaBPRSt4JbOsW2sRkvMQi78gZ0EcCCDwfYTOV7wQQeL4jTzshAi8tIg6AgB0CWxN40aY6+fSTl7yFhGTw0MOkrIxfbdtJ1Y1OVy1YIQ9f/DeJxzc9PbRDxw5y8q2nS1XXjr41iMDLDXW0uVlunHafzHr3/eRARx58gJwx8bjcBnWo+pN1tXLN9PkpHR03or9M2Lmvb102x1rkxZc/lvkLV8uIYX1k7EFDpCyy6e2a2/og8NIR4u8tE0gn8Cyvjd4hgMBjDwSZAALPvXQReO5lQkcQUBNoT+DV162RJx87SzbWrEiO273HjnLMcXdIRXmVeh4KS5PA8g+WyLvPvSPlHStk1DF7SfcBPbYA8f7SdfLqvOXSuapcxo/cXnp1yd8+Q+DlZ98t/3y1VHWokO5du+ZnQEdGeenTNfLXdxandLP3wB4yaa8dfevwngdmyfTXP22d75ADBstpJ4xJOz8CLy0iDjBMAIFnODxaT0sAgZcWEQcYJoDAcy88BJ57mdARBNQE2hN4777zd3ljxm0pY447/JcyZOfD1fNQCIH2CHy0bJ1c//R/W/+qm3eV3pTj95bO3ltr8/FB4OWDYnDH2NjULFP/M0+qvReuJD6hkMj5BwyVXXp18WXRLS1xOfvnj0uT9/y9zZ+u3nP4bpx6dNr5EXhpEXGAYQIIPMPh0XpaAgi8tIg4wDABBJ574SHw3MuEjiCgJtCewPvg/SdkxvQbU8b86rifybDhX1PPQyEE2iNwn/eMvJc/WJbyVz85ag8ZOWi7vABD4OUFY6AHWV3bKC8s/Fw2ehLt4B22k916d/Z1vb+89gVZvmJD65z9+3WVK342Pm0Pfgq8ao/Nwo1R6VYeliFd8iPX0y6QA0qaAAKvpOMP/OIReIGPuKQXiMBzL34EnnuZ0BEE1ATaE3gNjRvk8YcmSe3GTQ9279Z9B/nm8b+X8nI7b2hUA6HQVwLPvvOZPPLGwpQ5f/7NMTK4T7e89IHAywtGBikggQ/mfy533DNTaj2R2K1rlfzwtH1kl6G9087ol8BbVBuVRxbVSNS7WjDxGdG9gxw90F/JmRYGBwSOAAIvcJGyoC8RQOCxHYJMAIHnXroIPPcyoSMIqAls7SUWTU21yZdYhEIRGTzkUF5ioSZM4bYINERjcov3jLyPV6xPHjZhzx3l+H2G5A0aAi9vKBmogAQavSvcVq7aIP37dZPysvQvsEi04pfAS8i7BTVNKav/8fAe0inDPguIjaEDTACBF+BwWZog8NgEQSaAwHMvXQSee5nQEQTUBLYm8NQDUggBBYHl6+ukU4cy6VqV39vzEHiKMCgxQcAvgffkko3yYXVjCpNJw7pL94qICU40aZMAAs9mbnSdGQEEXmacOMomAQSee7kh8NzLhI4goCaAwFOjo9AAAQSegZBoUUXAL4G3pC4qD35WIzFuoVXlRJGOAAJPx40qGwQQeDZyoksdAQSejlshqxB4haTL2BDwmQACz2fgTOcrAQSer7iLPtn61fUy99VlsmFNg/Qf3FX2OGiAlAX0SjG/BF4i1PXeLb4LaqLSo4KXWBR9kxewgaZ4vcyKPimLYnOlR3iA7FN2rPSI9C/gjFsfGoFXFOxM6hMBBJ5PoJmmKAQQeEXBvs1JEXjuZUJHEFATQOCp0VFogAACz0BIeWqxxbtC7Nl7P5BG74qxzZ+hu/eSPQ4emKcZ3BrGT4Hn1srpplAEZjQ+KAtib7YO3znUU75V9TMJSWbPZcxnXwi8fNJkLNcIIPBcS4R+8kkAgZdPmvkZC4GXH46MAgEnCCDwnIiBJgpEAIFXILAODluzrlFevP/DlM669KiUI04c7mC3ubeEwMudISOkEniy4ddS3bIq5YvfqLxIuoX7+I4Kgec7cib0kQACz0fYTOU7AQSe78jTTojAS4uIAyBghwACz05WdJo9AQRe9sysVnAFntXk6NsVAlyB50oS9BF0Agi8oCdc2utD4LmXPwLPvUzoCAJqAgg8NToKDRBA4BkIKY8tJp6BN2f6Mtm4btMz8HY/aKCUe89tC+KHK/CCmGpx15R4Bt4b0cdkaez9Tc/AK/eegRfmGXjFTYXZg0gAgRfEVFnTZgIIPPf2AgLPvUzoCAJqAgg8NToKDRBA4BkIiRZVBBB4KmwUGSHALbRGgqJNFQEEngobRUYIIPDcCwqB514mdAQBNQEEnhodhQYIIPAMhESLKgIIPBU2iowQQOAZCYo2VQQQeCpsFBkhgMBzLygEnnuZ0BEE1AQQeGp0FBoggMAzEBItqggg8FTYKDJCAIFnJCjaVBFA4KmwUWSEAALPvaAQeO5lQkcQUBNA4KnRUWiAAALPQEi0qCKAwFNho8gIAQSekaBoU0UAgafCRpERAgg894JC4LmXCR1BQE0AgadGR6EBAgg8AyHRoooAAk+FjSIjBBB4RoKiTRUBBJ4KG0VGCCDw3AsKgedeJnQEATUBBJ4aHYUGCCDwDIREiyoCCDwVNoqMEEDgGQmKNlUEEHgqbBQZIYDAcy8oBJ57mdARBNQEEHhqdBQaIIDAMxASLaoIIPBU2CgyQgCBZyQo2lQRQOCpsFFkhAACz72gEBv4shMAAB2RSURBVHjuZUJHEFATQOCp0VFogAACz0BItKgigMBTYaPICAEEnpGgaFNFAIGnwkaREQIIPPeCQuC5lwkdQUBNAIGnRkehAQIIPAMh0aKKAAJPhY0iIwQQeEaCok0VAQSeChtFRggg8NwLCoHnXiZ0BAE1AQSeGh2FBggg8AyERIsqAgg8FTaKjBBA4BkJijZVBBB4KmwUGSGAwHMvKASee5nQEQTUBBB4anQUGiCAwDMQEi2qCCDwVNgoMkIAgWckKNpUEUDgqbBRZIQAAs+9oBB47mVCRxBQE0DgqdFRaIAAAs9ASLSoIoDAU2GjyAgBBJ6RoGhTRQCBp8JGkRECCDz3gkLguZcJHUFATQCBp0ZHoQECCDwDIdGiigACT4WNIiMEEHhGgqJNFQEEngobRUYIIPDcCwqB514mdAQBNQEEnhodhQYIIPAMhESLKgIIPBU2iowQQOAZCYo2VQQQeCpsFBkhgMBzLygEnnuZ0BEE1AQQeGp0FBoggMAzEBItqggg8FTYKDJCAIFnJCjaVBFA4KmwUWSEAALPvaAQeO5lQkcQUBNA4KnRUWiAAALPQEi0qCKAwFNho8gIAQSekaBoU0UAgafCRpERAgg894JC4LmXCR1BQE0AgadGR6EBAgi8woUUjzVKpO4ziXXcUUKRDoWbiJHbJYDAY2MEmQACL8jpsjYEHnsgyAQQeO6li8BzLxM6goCaAAJPjY5CAwQQeIUJKbJ+jlR9MFVCjWskXt5N6r8yVWLd9yzMZIyKwGMPlBwBBF7JRV5SC0bglVTcJbdYBJ57kSPw3MuEjiCgJoDAU6Oj0AABBF7uIcXjUQnF14qEenl/IskBO775PYnUfto6eKzTTlK3z7TcJ2OEjAlwBV7GqDjQIAEEnsHQaDljAgi8jFFxoEECCDz3QkPguZcJHUFATQCBp0ZHoQECCLwcQ4p9IqHo0yLxOpFwZ+9qu2O9/+0vnV8eL6GW6BeDh0JSc/AzIpHKHCekPFMCCLxMSXGcRQIIPIup0XOmBBB4mZLiOIsEEHjupYbAcy8TOoKAmgACT42OQgMEEHi5hRRq+L0n76q/GCQyUOIVJ0vlvF9L+fJ/tH492v9oaRh+UW6TUS3L3qiXj5+oFe/xgjJobKXsfEznrVJB4LFhgkwAgRfkdFkbAo89EGQCCDz30kXguZcJHUFATQCBp0ZHoQECCLwcQopvlFDD7akDhDpIvPInIi3NUrHsEQl7z8KLddtdogMnelfmleUwGaU1S5vllSlrUkCM/lE36bd3+1c1IvDYM0EmgMALcrqsDYHHHggyAQSee+k6LfDWrq+Ri6+6U1Z8vk6emHZVK70Tz5wqH87/zHt+Tyj5ta6dO8rLj96S/Pdla+rTUu7Xs1JWrm2QeNojOQACtggg8GzlRbfZEUDgZcdri6O922dDze+1fjleto9I+bgcB7VV3tIisnq1SOfOcenYcdM5RCE+i/5TL+/dtyFl6EFjq2TkqV3bnQ6BV4gUGNMVAgg8V5Kgj0IQQOAVgipjukIAgedKEl/04azAq61rkJM8UTf2gFHy0utzUgTe0adeLDdPPUd2HjxwC6IIPPc2GR35RwCB5x9rZvKfAAIvN+bxeLOEYm97V9wtlVB4R4mXJd40u+lFFqXw2bhR5G9/C8vy5d6qvWUfOT4u++xbmF/ltXsF3lndpd/oDgi8UthsrDGFAAKPDRFkAgi8IKfL2hB47u0BZwVeXX2DrF5bnfxz2Q33pAi8scefKw/cOUX69e6JwHNvT9FREQkg8IoIn6kLTgCBV3DEgZ7g6adD8tZbX1x1l7iI/6fnxaRzl8JcifflZ+DtcGiVDP16p63y5Qq8QG+9kl8cAq/kt0CgASDwAh1vyS8OgefeFnBW4G1G9fbcj7YQeKOPnCSH7LeHzH53vvTq2U3OmzRRDtk/cSUBt9C6t8XoyE8CCDw/aTOX3wQQeH4TD9Z8f7wrLMuWpq7p5JNaZOdhxV8nAq/4GdBB4Qgg8ArHlpGLTwCBV/wM6KBwBBB4hWOrHbmoAm/Nug2yfFXqQ54TCxnYr5f06NYluaa2Aq+lJS6/vO4umXDofnLA3iPkpdfmyMW/ulOe/PPVySvy1tc2pWXRtWO51NRFeQZeWlJfHFCY6xOyaIBDMyJQFglLRVlY6hqbMzqeg9whEPr/z/R0pyP3OqmqiEhzLC7RmPcgMz4QyJLA7P/G5YG/f1HUt6/IOT8OSZkD7+tI/PKlQ3lEGqMt3p9YlivjcAi4TaAsEpJy7771+ibOTdxOqrS6i8fz8wiFqg5lEm1u8c5PODcprR1UGqvt1qlCqjPwK9Zo5Oe//uKsuruXST4/Ie+bYcY8ps98Rx579pUt5j/hm4fKfqN3S369vSvw2hac/tNr5H+OHivHHHGA1NanPznoWBmRugZOkLMJPuNQsxmUY/NOwPN3Uu4JvIYmTiLyDpcBi06gQ0VYYp7AS0g8PhDQEHj3vbjMniPSvZvIoeNC0nnrd7VqhlfXJH7xUuFJvCbvh8AmT+LxgUCQCCQEXuIP5yZBSpW1bCZQ6X3vbvbekNTMj5ZsigASSNwhsDEDv2Jt6ZYvTurkZZLPT1YCL5OJ2wq8uvpGmf/JEtlzxNDW8u+cfZWcOvFIOWrcPryFNhOoHBNYAtxCG9hoWZhHgFto2QZBJcAttEFNlnUlCHALLfsgyAS4hTbI6bI2bqF1bw8U9RbaTHC0FXjrqzfK+BMvlJuvOFsO3HukJK7iu+iKO+Qf914j2/XoisDLBCrHBJYAAi+w0bIwBB57IMAEEHgBDpelIfDYA4EmgMALdLwlvzgEnntbwFmB9+L0WXLh1NtFvDtyo941yeXlZTJ4UD959O4r5aUZc+T62++XVWvWJ5+XN/msk2T/MSOSdJetqU9LuV/PSlm5toFn4KUlxQHWCCDwrCVGv9kQ4Aq8bGhxrCUCCDxLadFrtgS4Ai9bYhxviQACz1Ja9JotAQRetsQKf7yzAk+7dASelhx1QSCAwAtCiqxhawQQeOyNoBJA4AU1WdaVIIDAYx8EmQACL8jpsjYEnnt7AIHnXiZ0BAE1AQSeGh2FBggg8AyERIsqAgg8FTaKjBBA4BkJijZVBBB4KmwUGSGAwHMvKASee5nQEQTUBBB4anQUGiCAwDMQEi2qCCDwVNgoMkIAgWckKNpUEUDgqbBRZIQAAs+9oBB47mVCRxBQE0DgqdFRaIAAAs9ASLSoIoDAU2GjyAgBBJ6RoGhTRQCBp8JGkRECCDz3gkLguZcJHUFATQCBp0ZHoQECCDwDIdGiigACT4WNIiMEEHhGgqJNFQEEngobRUYIIPDcCwqB514mdAQBNQEEnhodhQYIIPAMhESLKgIIPBU2iowQQOAZCYo2VQQQeCpsFBkhgMBzLygEnnuZ0BEE1AQQeGp0FBoggMAzEFIJthhviUvLmqiEu5RJqDKsIoDAU2GjyAgBBJ6RoGhTRQCBp8JGkRECCDz3gkLguZcJHUFATQCBp0ZHoQECCDwDIRltMV7XJE0zPxYpj0jFvkMlVFGW0Upi66NS98hyia1qFImEpOqI3tJhdLeMar98EAIva2QUGCKAwDMUFq1mTQCBlzUyCgwRQOC5FxYCz71M6AgCagIIPDU6Cg0QQOAZCMlgi/H1tVJ92SMSX12T7L5sp97S6dJjJVxZnnY1df9YKU1zN3xxnCfxup49WMJVkbS1CLysEHGwYQIIPMPh0XpaAgi8tIg4wDABBJ574SHw3MuEjiCgJoDAU6Oj0AABBJ6BkAy22PDE21L/4OspnXc6a7xUHDAs7Wpq/vCZxNY0pdZ+e4CUD+2UthaBlxUiDjZMAIFnODxaT0sAgZcWEQcYJoDAcy88BJ57mdARBNQEEHhqdBQaIIDAMxCSwRZzEXiN72yQ+qdXtq460rdSOn93ewmFQ1mR4BbarHBxsDECCDxjgdFuVgQQeFnh4mBjBBB47gWGwHMvEzqCgJoAAk+NjkIDBBB4BkIy2GLLulrZcPkXt9BGduwlXf7vOAl1SH8LbWK50Q9rpOn9jRLuUS4d9ush4Y7Z3T6bGAOBZ3Dj0HLGBBB4GaPiQIMEEHgGQ6PljAkg8DJG5duBCDzfUDMRBApPAIFXeMbMUDwCCLzisQ/6zPHaRom+9YnEvWfYZfMSi3xxQeDliyTjuEgAgediKvSULwIIvHyRZBwXCSDw3EsFgedeJnQEATUBBJ4aHYUGCCDwDIREiyoCCDwVNoqMEEDgGQmKNlUEEHgqbBQZIYDAcy8oBJ57mdARBNQEEHhqdBQaIIDAMxASLaoIIPBU2CgyQgCBZyQo2lQRQOCpsFFkhAACz72gEHjuZUJHEFATQOCp0VFogAACz0BItKgikK3Ai8Vi8tnipdK/bx+pqqpUzUkRBPwigMDzizTzFIMAAq8Y1JnTLwIIPL9IZz4PAi9zVhwJAecJIPCcj4gGcyCAwMsBHqVOE8hG4C1eulyuuek2WbHyc6moqJCzzjhNDj5gH6fXR3OlTQCBV9r5B331CLygJ1za60PguZc/As+9TOgIAmoCCDw1OgoNEEDgGQiJFlUEshF419x0u7z59n9b56ms7CD33vkbCYfDqrkpgkChCSDwCk2Y8YtJAIFXTPrMXWgCCLxCE85+fARe9syogICzBBB4zkZDY3kggMDLA0SGcJJANgLvB2dPlvXV1SnruPnay2X7Af2cXBtNQQCBxx4IMgEEXpDTZW0IPPf2AALPvUzoCAJqAgg8NToKDRBA4BkIiRZVBLIReI8//bz8+W8Pt86zx8jdZMrPzlPNSxEE/CCAwPODMnMUiwACr1jkmdcPAgg8PyhnNwcCLzteHA0Bpwkg8JyOh+ZyJIDAyxEg5c4SyEbgxeNx+c8rr8vrb82WwTsOkm8cdbh06tTR2bXRGAQQeOyBIBNA4AU5XdaGwHNvDyDw3MuEjiCgJoDAU6Oj0AABBJ6BkGhRRSAbgaeagCIIFJEAAq+I8Jm64AQQeAVHzARFJIDAKyL8rUyNwHMvEzqCgJoAAk+NjkIDBBB4BkKiRRUBBJ4KG0VGCCDwjARFmyoCCDwVNoqMEEDguRcUAs+9TOgIAmoCCDw1OgoNEEDgGQiJFlUEEHgqbBQZIYDAMxIUbaoIIPBU2CgyQgCB515QCDz3MqEjCKgJIPDU6Cg0QACBZyAkWlQRQOCpsFFkhAACz0hQtKkigMBTYaPICAEEnntBIfDcy4SOIKAmgMBTo6PQAAEEnoGQaFFFAIGnwkaREQIIPCNB0aaKAAJPhY0iIwQQeO4FhcBzLxM6goCaAAJPjY5CAwQQeAZCokUVAQSeChtFRggg8IwERZsqAgg8FTaKjBBA4LkXFALPvUzoCAJqAgg8NToKDRBA4BkIiRZVBBB4KmwUGSGAwDMSFG2qCCDwVNgoMkIAgedeUAg89zKhIwioCSDw1OgoNEAAgWcgJFpUEUDgqbBRZIQAAs9IULSpIoDAU2GjyAgBBJ57QSHw3MuEjiCgJoDAU6Oj0AABBJ6BkGhRRQCBp8JGkRECCDwjQdGmigACT4WNIiMEEHjuBYXAcy8TOoKAmgACT42OQgMEEHgGQqJFFQEEngobRUYIIPCMBEWbKgIIPBU2iowQQOC5FxQCz71M6AgCagIIPDU6Cg0QQOAZCIkWVQQQeCpsFBkhgMAzEhRtqggg8FTYKDJCAIHnXlAIPPcyoSMIqAkg8NToKDRAAIFnICRaVBFA4KmwUWSEAALPSFC0qSKAwFNho8gIAQSee0Eh8NzLhI4goCaAwFOjo9AAAQSegZBoUUUAgafCRpERAgg8I0HRpooAAk+FjSIjBBB47gWFwHMvEzqCgJoAAk+NjkIDBBB4BkKiRRUBBJ4KG0VGCCDwjARFmyoCCDwVNoqMEEDguRcUAs+9TOgIAmoCCDw1OgoNEEDgGQiJFlUEEHgqbBQZIYDAMxIUbaoIIPBU2CgyQgCB515QCDz3MqEjCKgJIPDU6Cg0QACBZyAkWlQRQOCpsFFkhAACz0hQtKkigMBTYaPICAEEnntBIfDcy4SOIKAmgMBTo6PQAAEEnoGQaFFFAIGnwkaREQIIPCNB0aaKAAJPhY0iIwQQeO4FhcBzLxM6goCaAAJPjY5CAwQQeAZCokUVAQSeChtFRggg8IwERZsqAgg8FTaKjBBA4LkXFALPvUzoCAJqAgg8NToKDRBA4BkIiRZVBBB4KmwUGSGAwDMSFG2qCCDwVNgoMkIAgedeUAg89zKhIwioCSDw1OgoNEAAgWcgJFpUEUDgqbBRZIQAAs9IULSpIoDAU2GjyAgBBJ57QSHw3MuEjiCgJoDAU6Oj0AABBJ6BkGhRRQCBp8JGkRECCDwjQdGmigACT4WNIiMEEHjuBYXAcy8TOoKAmgACT42OQgMEEHgGQqJFFQEEngobRUYIIPCMBEWbKgIIPBU2iowQQOC5FxQCz71M6AgCagIIPDU6Cg0QQOAZCIkWVQQQeCpsFBkhgMAzEhRtqggg8FTYKDJCAIHnXlAIPPcyoSMIqAkg8NToKDRAAIFnICRaVBFA4KmwUWSEAALPSFC0qSKAwFNho8gIAQSee0Eh8NzLhI4goCaAwFOjo9AAAQSegZBoUUUAgafCRpERAgg8I0HRpooAAk+FjSIjBBB47gWFwHMvEzqCgJoAAk+NjkIDBBB4BkKiRRUBBJ4KG0VGCCDwjARFmyoCCDwVNoqMEEDguRcUAs+9TOgIAmoCCDw1OgoNEEDgGQiJFlUEEHgqbBQZIYDAMxIUbaoIIPBU2CgyQgCB515QgRN47iGmIwhAAAIQgAAEIAABCEAAAhCAAAQgAAEIuEMgFPc+7rRDJxCAAAQgAAEIQAACEIAABCAAAQhAAAIQgMCXCSDw2A8QgAAEIAABCEAAAhCAAAQgAAEIQAACEHCYAALP4XBoDQIQgAAEIAABCEAAAhCAAAQgAAEIQAACzgu8D+Z/Jj+dcpscvO/ucul5pyYTa2qKyugjJ0l5eVlrgocdNFpuvOwsEoWAGQLNsZj85g8PyZ/uf0Zeefy30qNbl9be//jXf8j9j/1TmqLNcsQhe8svfnKKlEUiZtZGoxDYTIDv1+yFoBFYvGyV/PK6u2Xex4tkQL9ecsm5p8qY3YcFbZmspwQJTJ85V868+EYpK/vifOOiM0+UU44/ogRpsOSgEHjqxRly+Q3T5MqfnSFHjduHc+2gBMs6ZO36Grn4qjtlxefr5IlpV7USufHOB2Xag89KOBxu/doDd0yR4UMHQS0ABJwWeLPfnS9X/uZe2XnwQOnSqWOrwFu9tlq+dfol8urjtwYgApZQqgTOueRm2XXnHeSOe5+Qlx+9pVXgvTH7Q5ly/d1y728vkY5VHeScS2+Rww/eS04+7vBSRcW6DRPg+7Xh8Gi9XQLfPfdqOezgMfKd48fLa2+958m8u+SFB26Q8i9JD9BBwCKBp/85U154+U256fKzLbZPzxDYgkBCYsyaM08+X7NeTj/x660Cj3NtNot1ArV1DXLSmVNl7AGj5KXX56QIvISwHjZkED87Wg95K/07LfAWLV0pvXp2kz///XlJ/BC4+Qq8TxYt935DeJM8+9frAhoLyyoFAh96V28kBN7uh52eIvCuuOnP0q9PT5l0yjFJDP9+bbZMe+BZuefmn5cCFtYYMAJ8vw5YoCW+nDXrNsiEkyfLjKdua70qeuKkKTL5xyfJvqN3LXE6LN86gQef+LfM/fATuWLy960vhf4hkCSQONdOXHV0xgW/lhO+eWirwONcmw1inUBdfUPSjyT+XHbDPSkC78Kpt8vY/feUbxx5oPVl0n87BJwWeJv7vePPT6QIvHfeX5C8KmnIjv1l/sKlMnznQfLL806TnQb1I2QImCPQVuD94ILr5MRvHSbjvVtnE5+FnrA+/bxr5KVHbja3NhqGAN+v2QNBIvD23Pky9cZ75LE/Xdm6rAsu/53sN2aEnPCNcUFaKmspQQKJx3c8/9Kb3qNqmmVddY18db895OfnnCKdOlaWIA2WHCQCPzj/uhSBx7l2kNIt7bW8PfejLQTeDyffIC0tcfl0yQoJeXi+7Z2fbL4wpLRpBWP1RRd4i5aukg0ba7egucvg7aWiojz59bYCb8GnS+Xeh16Q0044SgZ6z5/53bTH5KUZc1JOqIMRD6uwTKChsUk+9vZq20/3rp1l+/69W7/cVuCdctaV8qPTvpk8cU58lq9cI8d+/1KZ+Y/bLeOg9wAT2NZeb/T+O+D7dYDDL7GlvfbWu3LLHx+W+71nyWz+XHrtXbLLkO3ltG8fVWI0WG7QCLzw8lvy3rxP5XsnTJCWeFwuuuJ2GbrjAO85vN8J2lJZT4kRaCvwONcusQ0Q4OW2J/B+f9+T0qVzR/mfrx8in3l3NCaEXuKXMZsvDgkwjpJYWtEFXuLWwLkfLtwCdmKTJW6fbU/gtT042hyTfSb8rzx//w3Sp1f3kgiORbpPYNmK1XKD9xDRtp+99hie8kyCtgLvjAt/Ld8+ZlzrZf4JYZ342r8f+o37i6bDkiSQ6V5PwOH7dUlukcAsOvFs3inXT0u5VeX8y26TA/ceKROPGRuYdbIQCCQIvOU9OyzxwpZn/nItQCBgmkBbgce5tuk4af5LBNoTeG0B/e6ex2Xl52vl8gtPh10ACBRd4GXCsO0VeIkHkW6oqZWhOw1MlifecriXJ/C+/CKATMblGAi4QKCtwPvVLfdJ186d5OzvH5ds76kXZshjz70if7z+IhfapQcIZEWA79dZ4eJgxwkkbis84oQLvDeH3ypVlRXJbr92ymT51c8nyeiRvInW8fhoLw2BxC8Mu3jnH5t/GT7De0nL1bf+NUVYAxECFgm0FXica1tMkZ7bI9CewEt8beTwwa13M97s3TlQs7Gu9X0CkLRNwKTAmz7zneTrwO+55RfSr3dPuW3ao8k3wd1/+//ZToPuS5JAW4GX+KY7+Yo75L7bLpVOVZWSeE7HScceLsd97aslyYdF2ybA92vb+dH9lgQSPwjuM2rX5PNknvn3zOQttc/85TqJRMLggoBpAjfc8aDM/2Sx3HjZWcnnJyWuLk28bOv8H55gel00D4G2Ao9zbfZEUAi0J/BO/NHlcoj3EosfnfYtWbJ8lXzPe5b65Rd+33s80+5BWXZJr8NpgXeN91u/+x//l3cS0SJx71kckUjEu7VwrFxy7qnyh788JX999EVp9K6+SxjmKRd8L/k8PD4QsEBgffVGGTfxvGSr0WizlJeXJf/9xQduSN46/qf7n5F7H35eYrEW+frh+8tFZ54o4XDiMaR8IGCPAN+v7WVGx1snsNR7PMIvrv6DzFuwWAYN6COXeecfXxm+E8ggYJ5AXX2jTL3pHnn59TlSXlYmhx44Wn529smtV5uaXyALKDkCibeEJ55H3ew9bikSDkvIO5e+9pL/9R5Tsy/n2iW3G4K14Benz5LE22Y9SZJ8PE3iZ8nB3gs9H737Svl08QrvxRbTkm9h7uo9C+/UiUcm//AJBgGnBV4wELMKCEAAAhCAAAQgAAEIQAACEIAABCAAAQjoCSDw9OyohAAEIAABCEAAAhCAAAQgAAEIQAACEIBAwQkg8AqOmAkgAAEIQAACEIAABCAAAQhAAAIQgAAEIKAngMDTs6MSAhCAAAQgAAEIQAACEIAABCAAAQhAAAIFJ4DAKzhiJoAABCAAAQhAAAIQgAAEIAABCEAAAhCAgJ4AAk/PjkoIQAACEIAABCAAAQhAAAIQgAAEIAABCBScAAKv4IiZAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACegIIPD07KiEAAQhAAAIQgAAEIAABCEAAAhCAAAQgUHACCLyCI2YCCEAAAhCAAAQgAAEIQAACEIAABCAAAQjoCSDw9OyohAAEIAABCEAAAhCAAAQgAAEIQAACEIBAwQkg8AqOmAkgAAEIQAACEIAABCAAAQhAAAIQgAAEIKAngMDTs6MSAhCAAAQgAAEIQAACEIAABCAAAQhAAAIFJ4DAKzhiJoAABCAAAQhAAAIQgAAEIAABCEAAAhCAgJ4AAk/PjkoIQAACEIAABCAAAQhAAAIQgAAEIAABCBScwP8D/9u5iHINDGUAAAAASUVORK5CYII=", + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# We humans find it easier to visalize things in 2D!\n", + "# Reduce the dimensionality of the vectors to 2D using t-SNE\n", + "# (t-distributed stochastic neighbor embedding)\n", + "\n", + "tsne = TSNE(n_components=2, random_state=42)\n", + "reduced_vectors = tsne.fit_transform(vectors)\n", + "\n", + "# Create the 2D scatter plot\n", + "fig = go.Figure(data=[go.Scatter(\n", + " x=reduced_vectors[:, 0],\n", + " y=reduced_vectors[:, 1],\n", + " mode='markers',\n", + " marker=dict(size=5, color=colors, opacity=0.8),\n", + " text=[f\"Video: {t}
Text: {d[:100]}...\" for t, d in zip(video_numbers , documents)],\n", + " hoverinfo='text'\n", + ")])\n", + "\n", + "fig.update_layout(\n", + " title='2D Chroma Vector Store Visualization',\n", + " scene=dict(xaxis_title='x',yaxis_title='y'),\n", + " width=800,\n", + " height=600,\n", + " margin=dict(r=20, b=10, l=10, t=40)\n", + ")\n", + "\n", + "fig.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "e1418e88-acd5-460a-bf2b-4e6efc88e3dd", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.plotly.v1+json": { + "config": { + "plotlyServerURL": "https://plot.ly" + }, + "data": [ + { + "hoverinfo": "text", + "marker": { + "color": [ + "#d01f72", + "#75195e", + "#3678a7", + "#5b3f83", + "#74a788", + "#571122", + "#4099c1", + "#659222", + "#188ca3", + "#6d4052", + "#35303c", + "#a9e927", + "#29fa15", + "#71c500", + "#9b9d6e", + "#cf7e83", + "#badd6d", + "#85fa26", + "#22463b", + "#ce865d", + "#f59c06", + "#011995", + "#793548", + "#ad8b14", + "#d937bd", + "#2b9f18", + "#046e5c", + "#75b5e3", + "#c959de", + "#72e048", + "#8e8cab", + "#20f2c3", + "#64f999", + "#e69670", + "#6a0fce", + "#d65c3a", + "#7bee34", + "#4f86b8", + "#b43417", + "#4dfb77", + "#2ae342", + "#c3e1f2", + "#12897b", + "#2b3af3", + "#7ea8e9", + "#6ad041", + "#0bdacc", + "#99fe53", + "#4aaf9f", + "#d156c8", + "#505bd9", + "#dc152c", + "#b52bf6", + "#9baca0", + "#a03134", + "#d43c00", + "#5af098", + "#2c168d", + "#c6016b", + "#f090af", + "#482281", + "#39821f", + "#e0a8df", + "#480c89", + "#08808d", + "#ac5faf", + "#0faf59", + "#79c82a", + "#e6e164", + "#0d2037", + "#8afd40", + "#2e1afc", + "#3ec815", + "#fbfef2", + "#a63fa4", + "#b27d2e", + "#ca3592", + "#b9fd23", + "#ac9648", + "#804ce2", + "#9b5e28", + "#a64739", + "#c457d7", + "#de30e4", + "#1f6ab0", + "#6ff3c5", + "#6df6ca", + "#ed694d", + "#2fef1a", + "#335dcf", + "#845aa9", + "#574e28", + "#dc95ec", + "#b2140a", + "#15ae86", + "#70d1d9", + "#6f745a", + "#b3dba5", + "#108c41", + "#268bba", + "#913568", + "#1a6fdf", + "#422abb", + "#cb725f", + "#fe62a5", + "#dfc6c7", + "#b25d7b", + "#bd53b1", + "#796278", + "#048452", + "#c6eff5", + "#d24e5d", + "#fe8e92", + "#22398f", + "#3e5237", + "#8069bc", + "#7740be", + "#cc8ec0", + "#b280bb", + "#91f4db", + "#ac55ba", + "#c97596", + "#116019", + "#43c2e8", + "#2a2d25", + "#fc2b74", + "#ae7afe", + "#92b4fa", + "#dd8cd7", + "#4862ce", + "#af0f59", + "#ad6bd0", + "#3f0a72", + "#e01073", + "#144ada", + "#5cb9ca", + "#51d0da", + "#d6d07a", + "#b61e76", + "#474ff9", + "#68bece", + "#d01b19", + "#ee26df", + "#2ebca4", + "#539908", + "#ec0a37", + "#1a5613", + "#da28db", + "#246fa5", + "#bbfe83", + "#d54222", + "#580c96", + "#02cada", + "#996ff1", + "#e2a239", + "#ae5204", + "#4ce72d", + "#2cde7f", + "#b64eac", + "#591ab9", + "#a958c9", + "#696eaa", + "#4c4355", + "#6a6c06", + "#df5d2e", + "#9780cf", + "#682d42", + "#efed10", + "#1b312a", + "#dbde1c", + "#e1b5db", + "#a95826", + "#4e797a", + "#10384a", + "#9a5ba2", + "#d34482", + "#8a29da", + "#fb9dce", + "#ff2d6a", + "#50f10d", + "#f8d349", + "#7b4427", + "#11a70e", + "#987252", + "#c932c1", + "#2d7f7d", + "#c1e3c5", + "#0c777d", + "#0f8781", + "#dd889c", + "#799a24", + "#4212f1", + "#e6f378", + "#805527", + "#091a90", + "#a9541c", + "#fcdcad", + "#01f59b", + "#94a85d", + "#426575", + "#7f03bd", + "#2dcfac", + "#52b6df", + "#73e76a", + "#d70d97", + "#601568", + "#d4b1ce", + "#7341ee", + "#bb0ee6", + "#f645e0", + "#1c2c7e", + "#7dd58b", + "#4b9a93", + "#9df332", + "#612b32", + "#b1c27d", + "#3626a5" + ], + "opacity": 0.8, + "size": 5 + }, + "mode": "markers", + "text": [ + "Video: 59506507
Text: Well, I realized that was a whole lot of theory, but I hope it gave you a good intuition that will\nb...", + "Video: 59671315
Text: Okay, so here we are, back in Jupyter Lab, ready for the next use of a frontier model, and see this\n...", + "Video: 60616895
Text: It feels like 100 videos ago that I told you that we were going to have instant gratification with o...", + "Video: 60619275
Text: And we will conclude our expedition into the world of frontier models through their chat interface b...", + "Video: 59472693
Text: Friends.\nI am absolutely exhausted.\nI am exhausted and a little tiny bit traumatized.\nAnd you are so...", + "Video: 59670121
Text: So it's business time right now.\nWe are going to build a Rag pipeline to estimate the price of produ...", + "Video: 59295619
Text: Welcome back to the the moment when we bring it all together into a beautiful user interface.\nBut fi...", + "Video: 60617163
Text: And already that wraps up day two.\nNow that you have built that solution.\nAnd congratulations on tha...", + "Video: 60616423
Text: So I hope you've just enjoyed yourself experimenting with different LMS locally on your box using th...", + "Video: 59170227
Text: Welcome back to Google Colab.\nHere we are ready to explore the wonderful world of Tokenizers.\nSo, uh...", + "Video: 59169985
Text: So I hope you enjoyed that whirlwind tour of Google Colab.\nHere's just a little screenshot example o...", + "Video: 60616927
Text: It's time for our first LM experiment at this point.\nSo some of this you may know well, you may know...", + "Video: 59673721
Text: And here we are in JupyterLab for the last time, and we are looking here at day five, the last day\no...", + "Video: 59508055
Text: I'm so very happy that you've reached this epic moment in the course and that you're hanging in ther...", + "Video: 59670259
Text: It's remarkable.\nBut you are now at the 95% point.\nThere's 5% remaining of this course.\nUh, maybe it...", + "Video: 60616623
Text: So we're now going to start week one of the course when we are going to be looking at exploring fron...", + "Video: 59472383
Text: And welcome back to the week six folder.\nWe're now at day two, which is the second and final stage o...", + "Video: 59670171
Text: So as the very final step on this part four of day two of week eight, we are now going to build an\ne...", + "Video: 59297721
Text: And so now the time has come to talk about the most crucial aspect of Rag, which is the idea of vect...", + "Video: 59297599
Text: Well, that was a sneaky detour I took you on in the last one.\nI hope you enjoyed it though, and I ho...", + "Video: 59507635
Text: Look, I hope you're excited.\nYou really should be.\nYou've been through 80% of the course and it's al...", + "Video: 59669375
Text: Here we are for the day.\n2.1 notebook.\nAnd don't let it be said that I don't ever do anything for yo...", + "Video: 59297733
Text: Welcome back to JupyterLab and welcome to your first experiment with the world of Long Chain.\nLet me...", + "Video: 59670369
Text: It is terrific that you're hanging on in there and making such great progress with this course.\nAs w...", + "Video: 59166281
Text: And with that, amazingly, you completed day one of week two already and that gets you to the 15% poi...", + "Video: 59671567
Text: Well, the first thing you're going to notice is that I don't have a notebook open for you.\nAnd that'...", + "Video: 59297593
Text: And welcome to continuing our journey with Hrag.\nAnd today it's time to unveil Liang Chen.\nSo first,...", + "Video: 59166461
Text: And welcome back to the lab.\nHere we are in Jupyter Lab and we are going to go into week two.\nAnd we...", + "Video: 59167007
Text: Well, how fabulous is that?\nI hope that you are as wowed as I am by our new airline, I assistant and...", + "Video: 59508121
Text: The moment has arrived.\nHere we go.\nWe're in fine tuning.\nWe do fine tuning.\nTrain.\nThere is also a ...", + "Video: 59295579
Text: All right.\nAre you excited to see how this goes?\nLet's give it a try.\nSo in this next section, I cre...", + "Video: 60620375
Text: And with that, we've reached an important milestone.\nThe first week of our eight week journey is com...", + "Video: 59472491
Text: Welcome back.\nIf you are following along with me in JupyterLab, as I hope you are, then you will nee...", + "Video: 59472425
Text: Welcome to week six, day three.\nToday is going to be a day that you will either love or you will hat...", + "Video: 59508057
Text: Actually slight change in plan.\nI'm going to wrap up the day.\nDay three at this point, and say that ...", + "Video: 60619577
Text: And for the final piece of background information, I wanted to take another moment to talk about API...", + "Video: 59170291
Text: Welcome back to Colab and welcome back to our business project.\nSo again our assignment, we are due ...", + "Video: 60619651
Text: I mentioned before an AI company called vellum.\nWhen we were talking about the different questions, ...", + "Video: 59473191
Text: And you thought we'd never get here.\nHere we are in Jupyter Lab, running our fine tuning for a front...", + "Video: 59170297
Text: And here we are in Google Colab, ready for fun with models.\nSo first we do the usual Pip installs an...", + "Video: 59167015
Text: Welcome back to Jupyter Lab and welcome to Day Five's Lab.\nAnd this is going to be lots of creativit...", + "Video: 59170043
Text: Let me enthusiastically welcome you all back to week three of our LLM engineering journey.\nIf you en...", + "Video: 59473147
Text: Well, I'm very relieved.\nI've got that behind me.\nNo more human testing for me.\nWe'll have one final...", + "Video: 59166453
Text: Welcome back and welcome to our continuing JupyterLab experience.\nUh, I'm hopefully going to keep yo...", + "Video: 59166915
Text: Welcome back to the wonderful world of JupyterLab.\nAnd here we are in week two.\nDay three.\nUh, bring...", + "Video: 59667365
Text: Here we are back in Colab, looking at the week seven, day five of the Colab notebooks and I'm on a\nT...", + "Video: 60616845
Text: We're on the home stretch.\nThis is the final step in the environment setup, and it's an easy one.\nIt...", + "Video: 59295459
Text: And welcome back to More Leaderboard Fest as we go through some more leaderboards.\nBut this time we'...", + "Video: 59471979
Text: So we now turn to the parts of the problem, which is perhaps, let's say, not as glamorous as some\nof...", + "Video: 59503705
Text: And so now we talk about quantization the q and q Laura.\nQ stands for quantized quantized.\nLaura.\nAn...", + "Video: 59472505
Text: So the good news is that this is the very final video about data set curation.\nYou were probably fed...", + "Video: 59669217
Text: And welcome to the next part of visualizing the data.\nAnd just very quickly to show it to you in 3D....", + "Video: 59671221
Text: I gotta tell you, I don't like to toot my horn a whole lot, but I do think that I've done a great\njo...", + "Video: 59503703
Text: Well.\nHello there everybody.\nI am so grateful that you've made it through to the start of week seven...", + "Video: 59473201
Text: Well, before we do a postmortem on what happened, let's just quickly look at the standing the rankin...", + "Video: 60622463
Text: In this video, we're going to set up a full data science environment for Mac users.\nIn the next vide...", + "Video: 60619299
Text: Well, I hope you found that both educational and enjoyable.\nAs we went through and learned so much a...", + "Video: 59295607
Text: So to revisit then the solution that we built in the previous day and talk about the metrics.\nAs I s...", + "Video: 59297575
Text: Well, welcome to the final part on rag.\nAnd this is the session where you go from being a rag expert...", + "Video: 59507687
Text: It's time for action, everybody.\nWe've set up our colab.\nHere we are, week seven, day three.\nWe've g...", + "Video: 59671441
Text: And welcome once more to our favorite place to be Jupyter Lab, the Paradise for a data scientist exp...", + "Video: 59673431
Text: And here we have it.\nThe user interface is completed.\nThe extra notification came through on my phon...", + "Video: 59473137
Text: Let's get straight to it.\nSo the place where you can see everything that's going on and get knee dee...", + "Video: 59166421
Text: Welcome back to the radio day in the lab.\nMore to do.\nLet's keep going.\nWhere we left off is we had ...", + "Video: 59295599
Text: Welcome to the Jupyter Lab for day four.\nIt's going to look very familiar because it's actually I've...", + "Video: 59669631
Text: Here we are in our favorite place to be in JupyterLab, ready for some coding and a lot of coding tha...", + "Video: 59673663
Text: But wait, there's more.\nWe need to add some more to the user interface just to make it look more coo...", + "Video: 59506929
Text: And we return to the hugging face open LLM leaderboard.\nThe first place you go when selecting your b...", + "Video: 59504785
Text: So at this point we're going to talk about hyperparameters.\nAnd we're going to introduce three of th...", + "Video: 59505337
Text: So we're now going to look at four bit quantization, the rather remarkable effect of reducing the pr...", + "Video: 59271655
Text: So here we are on Hugging Face's main landing page at Hugging Face Core.\nA URL you know.\nWell, since...", + "Video: 59472883
Text: Okay, time to reveal the results.\nIt has run to completion.\nAnd here it is.\nSo a moment to pause.\nIt...", + "Video: 59673639
Text: And welcome now to the code for our user interface, which we will find in this Python module.\nPrice ...", + "Video: 59472463
Text: So last time we looked at a humble linear regression model with feature engineering, and now we say\n...", + "Video: 59297595
Text: So by the time you're watching this, hopefully you have played yourself with vectors.\nYou've created...", + "Video: 60619149
Text: So we're going to start our exploration into the world of frontier models by playing with the famous...", + "Video: 59297735
Text: And at last the time has come to see rag in action.\nAfter all of this talk, and here we are.\nWe're i...", + "Video: 60616407
Text: And now over to my Mac people.\nAnd I have news for you.\nIt's exactly the same thing.\nYou go to a fav...", + "Video: 59170235
Text: So here we are in Google Colab for our first collaborative session on the cloud using a GPU box.\nOn ...", + "Video: 59472067
Text: So we've covered steps 1 to 4 of the five step strategy.\nAnd that brings us to step five, which is p...", + "Video: 59472011
Text: Welcome everybody.\nSo in the past I've said quite a few times, I am excited to start this this week ...", + "Video: 59295553
Text: Welcome back.\nIn the last part, we gave our GPT four and clawed the challenge of converting a simple...", + "Video: 59297773
Text: Well, I hope you're eager with anticipation for this session in JupyterLab as we finally get to see\n...", + "Video: 59295583
Text: And here we are back in JupyterLab.\nIt's been a minute.\nWe've been working in Colab for last week, a...", + "Video: 59507329
Text: Okay.\nIt's moment of truth time.\nI have just taken our class tester.\nYou remember this class?\nUh, it...", + "Video: 59295429
Text: Continuing our investigation of benchmarks, and this will become more real when we actually see some...", + "Video: 60595637
Text: Here we are back in the Colab, which has been running overnight for me and probably for you too, I\nh...", + "Video: 59668027
Text: And so here we are at the home page for modal.\nAt modal.com spelt model not not model which is confu...", + "Video: 59295527
Text: I'm so happy to welcome you to week four, day four on our journey to LLM Engineering and Mastery.\nHe...", + "Video: 59295377
Text: Just before we go on to some of the more advanced metrics, I want to mention for a second something\n...", + "Video: 59666211
Text: So before we try our new model and one more recap on the models so far and keep notes of this so we\n...", + "Video: 59170107
Text: And once again, it's that moment when you take a pause and congratulate yourself on another day of\ns...", + "Video: 60616833
Text: So I realized that day one of week one has been a pretty long day, and I assure you that the other,\n...", + "Video: 59472413
Text: Wonderful.\nWhere we left off is we had just created the Get Features function, which builds our feat...", + "Video: 59297561
Text: And would you believe at this point you're 55% of the way along the journey?\nUh, it's been a while s...", + "Video: 59669211
Text: Well, we took on a lot today and we seem to have been successful.\nThese red icons that you see on th...", + "Video: 59166981
Text: Welcome to week two, day five.\nThe last day of week two where a lot is coming together.\nI am so grat...", + "Video: 60619227
Text: And now let's move to Claude from anthropic, my favorite model and typically the favorite model of\nm...", + "Video: 60620395
Text: Welcome back to Jupyter Lab, where I want to show you the assignment, the homework exercise for you\n...", + "Video: 59665127
Text: Well hi there everybody.\nI'm not going to give you my usual song and dance about how excited you are...", + "Video: 59668923
Text: Well, welcome back to Jupyter Lab for what will be an epic finale to our time in this platform.\nAnd ...", + "Video: 59504887
Text: Well, here we are again in Google Colab.\nIt's been a minute since we were here, and welcome back to ...", + "Video: 59170165
Text: Welcome, everybody to the last day of week three.\nWeek three.\nDay five.\nWe're here already wrapping ...", + "Video: 60617251
Text: Congratulations are definitely in order.\nYesterday was a mammoth first day on this course and you go...", + "Video: 59166951
Text: All right, back to the lab.\nBack to our project.\nTime to work with tools.\nI am in the week two folde...", + "Video: 60619619
Text: Well, day four was an information dense day.\nI do hope that you learned some something useful here, ...", + "Video: 60616663
Text: Well.\nHi there, this is time for PC people to get set up.\nSo all you Mac people out there, you don't...", + "Video: 59508175
Text: So I'm taking a moment now to explain that the training costs of optimizing a model for this course\n...", + "Video: 59670087
Text: And welcome to part four of day two of week eight.\nUh, there's a lot happening this week, and I have...", + "Video: 59506713
Text: Hi everyone.\nSo the reason I'm so fired up about week seven is that this is the time when we actuall...", + "Video: 60620169
Text: Hopefully you found this super satisfying to be able to have this nice business result and have it c...", + "Video: 59295435
Text: Well, just before we wrap up, let me introduce this week's challenge and talk about what we're going...", + "Video: 59297609
Text: Last week, we worked with models that were able to speed up code by a factor of 60,000 times, which\n...", + "Video: 59507489
Text: Continuing our adventure through hyperparameters for training.\nThe next one is pretty crucial and it...", + "Video: 59295549
Text: And welcome back to our challenge again.\nAnd this time we are working with our beautiful prototype.\n...", + "Video: 59665129
Text: And now let me make this real for you by showing you some, some diagrams, particularly now looking\na...", + "Video: 59169991
Text: Okay, so that was your introduction to Hugging Face.\nAnd now I'm going to turn to a different resour...", + "Video: 59472027
Text: And now the time has come to curate our data set.\nAnd the way we're going to do this is we're going ...", + "Video: 59472307
Text: Welcome to week six.\nDay two a day.\nWhen we get back into the data, we look back in anger at our dat...", + "Video: 59508289
Text: So here we are now, back in the Colab, in the same one that we kicked off in the previous day.\nIt's ...", + "Video: 59472333
Text: Thank you for putting up with me during my foray into traditional machine learning.\nI think it was u...", + "Video: 59295431
Text: Now I want to take a quick moment to give you a flyby of five different ways that llms are used comm...", + "Video: 59673449
Text: Well, I have to tell you that I'm a little bit sad.\nThis is the beginning of the beginning of the en...", + "Video: 59669389
Text: Well.\nHi there.\nSo you've made it to day two of week eight, and I am super grateful that you've been...", + "Video: 59170057
Text: And so at the beginning of this week, we started by talking about hugging face pipelines.\nAnd you us...", + "Video: 59166949
Text: Welcome back to making chatbots.\nLet's keep going.\nSo for the next part we're going to beef up the s...", + "Video: 59473019
Text: Welcome back to an action packed time of of training.\nSo now, after waiting about five minutes when ...", + "Video: 59297585
Text: Before we move on, let me show you one more time this fabulous slide that describes the simple three...", + "Video: 59170255
Text: And welcome back to us continuing our journey through the model class in Hugging Face Transformers l...", + "Video: 60614589
Text: So we're now going to run a large language model directly on your box using a platform called llama,...", + "Video: 59297601
Text: I'm not going to lie, at this point you have every reason to be impatient with me.\nWe've been yammer...", + "Video: 60616629
Text: And welcome back to team PC and Team Mac as we come back together again for a quick video.\nIn this o...", + "Video: 59297749
Text: It's always welcome back to JupyterLab, my favorite place to be.\nAnd now we are, of course in the we...", + "Video: 59170135
Text: Welcome.\nIt's week three.\nIt's day four.\nWe are back on the adventure in open source land, back inve...", + "Video: 59472017
Text: And this is the first time that we'll be coding against our big project of the course.\nWelcome to Ju...", + "Video: 59507017
Text: Welcome to Colab.\nWelcome to the week seven day two Colab.\nAnd just before we try our base model, we...", + "Video: 60619883
Text: And now we've arrived at an exciting moment in our first week.\nThe conclusion of the first week is w...", + "Video: 59508297
Text: What more is there to say, really?\nTomorrow is the day for results.\nA day that very excited indeed a...", + "Video: 60619247
Text: We're going to spend a little bit more time with GPT just to try out a few more interesting things.\n...", + "Video: 59504769
Text: Without further ado, we're going to get stuck into it.\nTalking about Laura.\nLow rank adaptation.\nAnd...", + "Video: 59170233
Text: Welcome back to our continued exploits with Tokenizers.\nWhat we're now going to look at is what's ca...", + "Video: 59671231
Text: And here we are back in the Jupyter Lab for the fast approaching conclusion with a really great proj...", + "Video: 60620397
Text: Well, that's a fantastic result to have now arrived towards the end of week one and having completed...", + "Video: 59170093
Text: I'm delighted to see you again.\nAs we get started with day three of week three of our adventure and ...", + "Video: 59473089
Text: Welcome back.\nSo hopefully you are still impressed by the GPT four mini results.\nThe frontier model ...", + "Video: 60395261
Text: Let's keep going with our project to equip our LM with a tool.\nWe just created this piece of code to...", + "Video: 60617259
Text: I'm excited to introduce you to your first exercise, and I'm looking forward to seeing what you make...", + "Video: 59507313
Text: And it's this time again, when we look at the podium of how our models are performing across the boa...", + "Video: 60619721
Text: Now it's time to talk for a minute about tokens.\nTokens are the individual units which get passed in...", + "Video: 59295451
Text: I know that everybody.\nIt seems like just the other day that we were embarking on our quest together...", + "Video: 59166919
Text: And with that, it concludes our session on tools.\nAnd at this point, you are probably an expert on t...", + "Video: 59295441
Text: Okay, so welcome to our leaderboard fast as we go through a ton of essential leaderboards for your\nc...", + "Video: 59295541
Text: And welcome back.\nYou've just seen GPT four zero spectacularly failed to work on our hard Python con...", + "Video: 59473101
Text: Welcome back.\nSo about ten minutes later, maybe 15 minutes later, the run has completed.\nAnd how do ...", + "Video: 59507423
Text: So you may remember eons ago when we were building our data set.\nAt the end of that, we uploaded our...", + "Video: 59295545
Text: I really hope you've enjoyed this week.\nWe've got tons done.\nWe've experimented with all sorts of ne...", + "Video: 59472503
Text: Welcome back to Jupyter Lab.\nLast time, we looked at some silly models for predicting the price of p...", + "Video: 60614591
Text: The mantra of this course is that the best way to learn is by doing, and we will be doing stuff toge...", + "Video: 59473021
Text: Welcome to our favorite place to be to JupyterLab.\nHere we are again now in day three.\nIn week six.\n...", + "Video: 60617255
Text: I'm now going to talk for a bit about models.\nA term you often hear is the term frontier models, whi...", + "Video: 59667829
Text: Well.\nHello there.\nLook, I know what you're thinking.\nYou're thinking I peaked too early.\nLast week ...", + "Video: 59505329
Text: Welcome back.\nYou may, like me, have just gone off and got a coffee while things loaded back up agai...", + "Video: 59669049
Text: So what you just saw was an ephemeral app, as it's called, which means just a temporary app that you...", + "Video: 60619439
Text: This now brings us to an extremely important property of LMS called the context window that I want t...", + "Video: 59668181
Text: And so it gives me great pleasure to introduce to you the project that I've lined up for you this we...", + "Video: 59472441
Text: Welcome back.\nSo we've been doing the thoroughly distasteful, unsavory work of feature engineering.\n...", + "Video: 59507785
Text: Well, I'm sure you're fed up of me saying that the moment of truth has arrived, but it really has.\nT...", + "Video: 59295587
Text: When I left you, we had just created this simple user interface for converting from Python to C plus...", + "Video: 59166465
Text: Welcome back to the JupyterLab on Gradio day, so you'll remember where we left off.\nWe'd written two...", + "Video: 59473071
Text: Hey, gang.\nLook, I know what you're thinking.\nThis week was supposed to be training week.\nI set it a...", + "Video: 59295423
Text: Welcome to day two of week four, when we get more into leaderboards so that by the end of this, you'...", + "Video: 59297723
Text: So I know what you're thinking.\nYou're thinking, what's going on here?\nWe're on day five.\nWe're on d...", + "Video: 59166947
Text: Well, thank you for coming along for week two, day four.\nWe have lots of good stuff in store today.\n...", + "Video: 59666831
Text: Take one more moment to look at this very nice diagram that lays it all out, and we will move on.\nNo...", + "Video: 59295493
Text: And welcome to week four, day three.\nAs we are about to embark upon another business project which w...", + "Video: 60616855
Text: Now I know what you're thinking.\nWe've been building environments for so long.\nAre we not done yet?\n...", + "Video: 59506611
Text: So in a future day, I'm going to be training, fine tuning a model and creating a fine tuned model.\nA...", + "Video: 60616493
Text: I'll just take a quick moment to introduce myself to convince you that I am actually qualified to be...", + "Video: 59166317
Text: And welcome to week two, day two, as we continue our adventure into the realm of LMS.\nUh, so today, ...", + "Video: 59295439
Text: So I'm aware that there's a big risk that you are getting fed up of leaderboards, because we've done...", + "Video: 59472421
Text: And welcome back to our final time in Jupyter Lab with traditional machine learning.\nIt's almost ove...", + "Video: 59472137
Text: Well, well, well, it's been a long day, but congratulations, you've made it.\nWe've gone through and ...", + "Video: 59297693
Text: So at the end of each week, it's customary for me to give you a challenge, an assignment to do on\nyo...", + "Video: 60620143
Text: So we're going to make a call to GPT four.\nOh, that's going to ask it to look through a set of links...", + "Video: 60619501
Text: I welcome to day four of our time together.\nThis is a very important day.\nToday we're going to be lo...", + "Video: 59297743
Text: And welcome to day five.\nFor reals.\nWe're actually in the proper Jupyter notebook.\nThis time we're i...", + "Video: 59166847
Text: Well, they say that time flies when you're having fun, and it certainly feels like time is flying.\nU...", + "Video: 59170223
Text: Well.\nFantastic.\nIt's coming up to the end of the week, and that means it's coming up to a challenge...", + "Video: 59170037
Text: So how does it feel to be 30% of the way down the journey to being a proficient LLM engineer?\nTake a...", + "Video: 59295609
Text: You must be feeling absolutely exhausted at this point.\nAnd if you are, that is okay.\nYou have done ...", + "Video: 60619281
Text: Well, I'm delighted to welcome you to day three of our eight week journey together.\nAnd today we're ...", + "Video: 59472429
Text: And continuing on our strategy to solve commercial problems with LMS, we get to step four, which is\n...", + "Video: 59167009
Text: Welcome back.\nIt's time to make our full agent framework.\nI'm super excited about this.\nIt's pulling...", + "Video: 59166481
Text: And here, once more we find ourselves in our favorite place, the Jupyter Lab.\nReady to go with weeks...", + "Video: 59670933
Text: I realized my enthusiasm for this project is a bit insane, but I have to tell you that I am very sat...", + "Video: 59670073
Text: Okay, it's time to complete the Rag workflow in our Jupyter Lab on day 2.3.\nWe've got this function ...", + "Video: 59673595
Text: That concludes a mammoth project.\nThree weeks in the making.\nIn the course of those three weeks, sta...", + "Video: 59297603
Text: And I'm delighted to welcome you back to LM engineering on the day that we turn to vectors.\nFinally,...", + "Video: 60614541
Text: I am delighted to welcome you to the first day of our eight weeks together as you join me on this ad...", + "Video: 59667357
Text: Let's now see our results side by side.\nWe started our journey with a constant model that was at $1....", + "Video: 59667841
Text: Now, look, I know that I went through that very fast, but maybe, uh, you're still, uh, blinking\nat t...", + "Video: 59472007
Text: So I hope you enjoyed our first bash at Scrubbing Data, and that you are now looking through the cod...", + "Video: 59507435
Text: So I'm now going to talk about five important hyperparameters for the training process.\nAnd some of ...", + "Video: 59509185
Text: So this is where I left you looking at this satisfying chart on training loss and seeing the trainin...", + "Video: 59473159
Text: Welcome to Jupyter Lab and welcome to our experiments at the frontier.\nSo we are going to put our fr...", + "Video: 60619447
Text: I want to take a moment to talk about something that's very fundamental to an LLM, which is the numb...", + "Video: 59166353
Text: Well, congratulations on leveling up yet again.\nYou've got some real hard skills that you've added t...", + "Video: 60619123
Text: So what we're now going to do is we're going to look at some models in practice and start to compare...", + "Video: 59295363
Text: Well, another congratulations moment.\nYou have 40% on the way to being an LM engineer at a high leve...", + "Video: 60619289
Text: And now we'll go a bit faster through the other models.\nWe'll start with Google's Gemini.\nI have the...", + "Video: 59472873
Text: So it's quite an adventure that we had at the frontier of what's capable with Llms today, solving a\n...", + "Video: 60619429
Text: Let me talk about some other phenomena that have happened over the last few years.\nOne of them has b...", + "Video: 59295601
Text: So it's time to continue our journey into the world of open source and understand which models we sh...", + "Video: 59170025
Text: And a massive welcome back one more time to LM engineering.\nWe are in week three, day two and we are...", + "Video: 59166443
Text: And welcome back everybody.\nWelcome to week two day three.\nIt's a continuation of our enjoyment of r...", + "Video: 60620025
Text: And welcome back to Jupyter Lab, one of my very favorite places to be.\nWhen Jupyter Lab sprung up on...", + "Video: 59170055
Text: Welcome to the world of Google Colab.\nYou may already be very familiar with Google Colab, even if so..." + ], + "type": "scatter3d", + "x": [ + -8.122976, + 34.908234, + 11.130736, + -9.978768, + 20.378115, + 40.165966, + -46.047855, + -3.5225513, + -41.603718, + -29.750912, + -16.69231, + -11.540651, + -48.240433, + 26.591553, + -33.749554, + 12.921923, + 63.537235, + 22.33331, + 39.7503, + -4.0843143, + 19.89367, + 53.601818, + 9.313611, + -15.544816, + -27.25799, + -40.441216, + 7.4084144, + -44.058147, + -34.108322, + 35.800552, + -75.9669, + -17.08466, + 67.46077, + 30.020157, + 26.199474, + -32.092762, + -59.50946, + -31.125465, + 17.727507, + -1.9556097, + -33.15905, + -35.514206, + 49.95782, + -31.869379, + -22.008738, + 33.544445, + -2.50848, + -30.35998, + 56.3363, + 1.8218645, + 54.121468, + 50.3909, + -38.989643, + 3.3299105, + 42.596157, + 11.633402, + -9.411688, + -19.470694, + 8.411452, + 11.9569235, + 29.455364, + -30.15862, + 45.700237, + -56.659325, + -46.378384, + 38.17704, + -39.15447, + -23.810238, + 18.694654, + 2.2662356, + -33.611332, + 46.75266, + -50.577423, + 42.481358, + 29.093426, + -24.221292, + 16.573559, + 11.913546, + -14.086581, + 36.418083, + 14.4376545, + -71.27086, + 21.494228, + -40.991734, + 37.30921, + -28.095816, + 38.05271, + -18.56597, + -44.791924, + -13.7468815, + 28.898296, + -36.335644, + -2.9895551, + 65.62672, + 22.887362, + -21.60234, + -19.166574, + 2.6002665, + 6.9061046, + 21.161528, + 1.7614158, + 6.4912224, + -49.481483, + 2.3821418, + -19.138437, + -11.099696, + 2.0873141, + 66.853096, + 27.287766, + 9.592437, + -25.921122, + -56.57392, + 23.216122, + 26.908329, + -64.204666, + 10.844664, + -6.4146757, + 51.051907, + 36.656914, + 33.13656, + 46.039726, + -38.186977, + -20.540487, + 8.277669, + -38.28821, + -7.520119, + 52.012684, + 23.770021, + -12.45263, + 7.2831774, + 14.093998, + 2.9064524, + 14.353067, + -44.2576, + 54.878136, + -29.27205, + 0.48731115, + 6.6884475, + -32.002, + 2.7302628, + -41.821613, + -35.146507, + -7.324495, + -36.21966, + 58.4483, + -21.80948, + 0.6451577, + 29.801828, + 6.4110775, + -17.06338, + -17.830246, + -34.42378, + -63.04847, + 55.62894, + 36.156605, + -49.79336, + 59.208763, + -0.80916214, + 42.169895, + -11.784577, + -4.374742, + 3.4122179, + -12.354422, + -20.188608, + 3.917022, + 69.92149, + 21.692152, + -62.446087, + -45.395638, + 16.804968, + -31.221453, + 32.466534, + -26.018362, + 11.981998, + -57.391186, + -24.381496, + 4.2317467, + 21.573854, + -42.14884, + -39.03866, + 50.671337, + 44.122208, + 4.3523436, + -17.679241, + 2.4215934, + 23.360334, + -35.800457, + -9.750219, + -25.919231, + -6.5914946, + -7.792405, + 28.21505, + -41.201225, + 45.70155, + -11.035862, + 35.946297, + -11.847502, + 32.496883, + 13.333166, + 41.13373, + -8.510606, + 50.68757, + 37.495113, + 41.12895, + 39.27697, + -2.0569484, + -25.762125, + -14.475436, + -24.457497, + -18.797113, + 33.985275, + -52.042458, + -37.08094, + -26.450697, + -24.56304, + 17.96638, + 1.2891247 + ], + "y": [ + 22.495209, + 7.7246327, + 24.009363, + 58.233845, + -20.684736, + -45.860653, + 19.024555, + -23.34507, + 4.077665, + -9.023953, + -33.54387, + 13.529735, + -20.47289, + -18.097763, + -52.109207, + 20.917074, + 6.882738, + -27.837515, + -73.38036, + -59.17266, + 30.898888, + -48.80887, + -62.344166, + -65.681435, + -18.074602, + -35.760197, + -64.24934, + -8.450979, + -32.271553, + 16.685429, + 24.04524, + -18.558027, + -6.361417, + -20.432753, + -7.4502926, + 46.95327, + -24.022396, + 43.53084, + -2.2562957, + -4.3477573, + 2.0038416, + -8.453099, + 7.296199, + 15.414882, + 3.5748003, + -9.886501, + 52.628384, + 41.841442, + 26.300909, + 27.636545, + -10.974157, + -52.99635, + -46.490845, + -17.097885, + 7.991843, + 42.728287, + 56.11593, + 25.378807, + -63.396446, + -10.103928, + 10.261616, + -31.910042, + -10.533649, + 0.86511475, + 7.581515, + -38.008648, + -21.695356, + 20.21065, + 41.538765, + 15.004162, + 2.7370741, + 23.941662, + -23.825634, + -29.004723, + -63.43282, + 61.37925, + -47.60969, + 51.35244, + -21.343243, + 45.543747, + 5.428183, + 24.95672, + -65.141624, + 20.076067, + 21.163311, + 43.15092, + 0.7405279, + 5.9581037, + 15.877678, + 42.451416, + 10.054633, + -27.470036, + 6.871376, + -26.58876, + -74.47105, + -54.44899, + -36.45757, + 65.086754, + 10.824065, + 29.205353, + 0.1051746, + 4.70403, + -20.799795, + 21.76516, + 7.780378, + -8.377405, + 38.573914, + 4.597273, + -28.45224, + 13.7212, + 6.9242682, + 6.1574836, + -38.563107, + 39.609455, + 25.59941, + 31.040857, + -35.56833, + -2.9297552, + 32.898632, + 12.454539, + -7.2873325, + 61.07097, + -42.95049, + -25.684706, + -33.060715, + -3.51661, + 11.432408, + -57.957485, + -8.321207, + 42.622173, + -51.096336, + 36.391785, + -42.812637, + -16.89856, + 1.257138, + 3.1051168, + 2.437643, + -11.519589, + 74.24569, + 32.010456, + -16.717613, + -51.03449, + -4.0209484, + -19.666466, + 7.805317, + 12.423793, + 25.94904, + 0.67196953, + 57.910053, + 41.923744, + -34.208702, + 39.797775, + 36.71461, + 12.872997, + -11.363612, + 24.884459, + -18.462435, + -55.471058, + -15.552036, + 41.96635, + -30.533445, + -0.34056988, + -1.1686171, + 31.996092, + -25.59134, + -16.72122, + 2.3426278, + 11.931674, + 4.3598084, + -4.1114182, + 50.8286, + -55.08115, + 1.6512612, + 41.550625, + 6.6282744, + 46.497314, + 18.768253, + 42.851322, + -15.206347, + 44.767014, + -19.802797, + 27.249903, + -50.63222, + 18.110355, + 54.369324, + -57.773426, + -15.498092, + -19.211893, + -18.757973, + 29.953873, + 54.00189, + -28.973417, + -37.20934, + -28.751331, + -33.398895, + -45.811794, + -33.892204, + -72.404495, + 34.78276, + -6.6268015, + 11.053642, + 29.57501, + 33.051342, + 8.585847, + -0.19063602, + 51.39862, + -19.615374, + 69.09646, + 60.87389, + 72.40716, + -0.828027, + -53.986324, + 25.217634, + -19.691027, + -9.348319, + 13.64766, + -30.232576 + ], + "z": [ + 59.56954, + -20.305794, + -45.233562, + -16.850801, + 4.6954994, + -25.423595, + -8.237618, + 12.466315, + 65.523895, + 47.91345, + 35.15053, + -39.112038, + -38.72094, + 52.405907, + -4.370467, + -14.430673, + -35.952686, + -28.531855, + 7.7115927, + 23.013548, + 56.077496, + -5.514938, + -0.008566526, + -9.867553, + -12.98698, + -39.226494, + 17.066427, + -34.561573, + -3.3118968, + 36.583973, + -7.119071, + -4.328642, + -31.052425, + -1.8609456, + 56.33368, + -28.372967, + 8.456723, + -15.707632, + -17.333271, + 42.80706, + -23.080353, + 23.041517, + -0.6556977, + -37.74239, + -37.274963, + -26.551325, + -51.976124, + 1.4318302, + -22.758234, + 64.70892, + -32.6177, + 5.8516145, + -25.99061, + -7.1243777, + 17.436174, + -50.29335, + 9.49166, + 10.1172285, + 30.861927, + 40.322426, + -35.62156, + -47.271034, + 55.355392, + -37.411285, + -17.425415, + -13.6007805, + -55.389553, + 42.268906, + 58.610935, + 70.393776, + 29.802158, + 16.140657, + -51.085945, + 1.4574273, + 6.6880107, + -5.813659, + 9.803923, + -28.55834, + 36.59922, + -20.453058, + 3.8719976, + -23.767431, + -5.49314, + -25.137175, + 3.7973387, + 40.51548, + 45.6967, + 15.470758, + 13.113324, + 37.64396, + 15.948028, + 19.090908, + -7.007089, + 0.6152462, + 7.8243833, + -6.225681, + -6.8244658, + -3.5654526, + -31.484634, + 32.221977, + 28.18344, + 42.78351, + 15.948768, + -7.45289, + -20.154951, + 0.96260214, + -46.05605, + -16.378876, + -18.391024, + 12.343808, + -59.11657, + 20.030552, + 20.922937, + 39.888527, + -11.43447, + 38.497936, + 51.98297, + -42.20368, + -16.99875, + 49.101273, + 6.4250507, + 15.585115, + -24.075558, + -19.80848, + 31.00568, + -33.391098, + 43.36078, + 23.481417, + 51.57835, + -24.75155, + 21.64222, + -58.816345, + -2.0315907, + 29.727232, + -25.19327, + 50.22198, + -51.279667, + 6.860358, + -15.767823, + 51.40322, + 53.975826, + -39.052284, + -57.70952, + 41.125294, + 6.987128, + -9.902869, + -29.477993, + 22.374899, + 36.041515, + 19.617472, + 5.1244507, + 13.6424465, + -11.48539, + 28.757114, + -37.475647, + 19.265785, + -9.775182, + -33.181618, + -13.116752, + -2.6527267, + -9.879043, + 58.218357, + 25.997261, + -20.186256, + -39.330612, + -4.208988, + 43.7187, + -8.064093, + -41.142452, + -6.0501204, + 21.441114, + -3.8814995, + -4.5298624, + 27.965652, + 8.871029, + -39.658558, + 49.448414, + 1.3307765, + -23.730022, + 31.832657, + 16.64137, + -31.655207, + -19.97523, + -56.55883, + 13.901519, + -22.935688, + -1.5958619, + -19.140667, + 10.399207, + 16.90549, + -3.3822806, + 27.030895, + -18.985811, + -34.180725, + -39.81326, + -36.173004, + -27.309599, + -5.8692102, + -1.0676264, + 20.961613, + 29.313066, + -8.199473, + 46.47283, + 57.448467, + -10.296155, + 41.969986, + -28.692223, + 6.5019803, + 25.493446, + -9.792015, + 6.515994, + -19.527716, + 6.6179695, + 30.123577, + -38.85623, + -43.729652, + 46.972412 + ] + } + ], + "layout": { + "height": 700, + "margin": { + "b": 10, + "l": 10, + "r": 20, + "t": 40 + }, + "scene": { + "xaxis": { + "title": { + "text": "x" + } + }, + "yaxis": { + "title": { + "text": "y" + } + }, + "zaxis": { + "title": { + "text": "z" + } + } + }, + "template": { + "data": { + "bar": [ + { + "error_x": { + "color": "#2a3f5f" + }, + "error_y": { + "color": "#2a3f5f" + }, + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "bar" + } + ], + "barpolar": [ + { + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "barpolar" + } + ], + "carpet": [ + { + "aaxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "baxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "type": "carpet" + } + ], + "choropleth": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "choropleth" + } + ], + "contour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "contour" + } + ], + "contourcarpet": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "contourcarpet" + } + ], + "heatmap": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmap" + } + ], + "heatmapgl": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmapgl" + } + ], + "histogram": [ + { + "marker": { + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "histogram" + } + ], + "histogram2d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2d" + } + ], + "histogram2dcontour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2dcontour" + } + ], + "mesh3d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "mesh3d" + } + ], + "parcoords": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "parcoords" + } + ], + "pie": [ + { + "automargin": true, + "type": "pie" + } + ], + "scatter": [ + { + "fillpattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + }, + "type": "scatter" + } + ], + "scatter3d": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatter3d" + } + ], + "scattercarpet": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattercarpet" + } + ], + "scattergeo": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergeo" + } + ], + "scattergl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergl" + } + ], + "scattermapbox": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattermapbox" + } + ], + "scatterpolar": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolar" + } + ], + "scatterpolargl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolargl" + } + ], + "scatterternary": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterternary" + } + ], + "surface": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "surface" + } + ], + "table": [ + { + "cells": { + "fill": { + "color": "#EBF0F8" + }, + "line": { + "color": "white" + } + }, + "header": { + "fill": { + "color": "#C8D4E3" + }, + "line": { + "color": "white" + } + }, + "type": "table" + } + ] + }, + "layout": { + "annotationdefaults": { + "arrowcolor": "#2a3f5f", + "arrowhead": 0, + "arrowwidth": 1 + }, + "autotypenumbers": "strict", + "coloraxis": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "colorscale": { + "diverging": [ + [ + 0, + "#8e0152" + ], + [ + 0.1, + "#c51b7d" + ], + [ + 0.2, + "#de77ae" + ], + [ + 0.3, + "#f1b6da" + ], + [ + 0.4, + "#fde0ef" + ], + [ + 0.5, + "#f7f7f7" + ], + [ + 0.6, + "#e6f5d0" + ], + [ + 0.7, + "#b8e186" + ], + [ + 0.8, + "#7fbc41" + ], + [ + 0.9, + "#4d9221" + ], + [ + 1, + "#276419" + ] + ], + "sequential": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "sequentialminus": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ] + }, + "colorway": [ + "#636efa", + "#EF553B", + "#00cc96", + "#ab63fa", + "#FFA15A", + "#19d3f3", + "#FF6692", + "#B6E880", + "#FF97FF", + "#FECB52" + ], + "font": { + "color": "#2a3f5f" + }, + "geo": { + "bgcolor": "white", + "lakecolor": "white", + "landcolor": "#E5ECF6", + "showlakes": true, + "showland": true, + "subunitcolor": "white" + }, + "hoverlabel": { + "align": "left" + }, + "hovermode": "closest", + "mapbox": { + "style": "light" + }, + "paper_bgcolor": "white", + "plot_bgcolor": "#E5ECF6", + "polar": { + "angularaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "radialaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "scene": { + "xaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "yaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "zaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + } + }, + "shapedefaults": { + "line": { + "color": "#2a3f5f" + } + }, + "ternary": { + "aaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "baxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "caxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "title": { + "x": 0.05 + }, + "xaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + }, + "yaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + } + } + }, + "title": { + "text": "3D Chroma Vector Store Visualization" + }, + "width": 900 + } + }, + "image/png": "iVBORw0KGgoAAAANSUhEUgAABPAAAAK8CAYAAABhiUEuAAAgAElEQVR4XuydB5wdVdnGT7J90xNCFRWQJiiI+vlhRRREiihYP8UCKohYUEQUu4KA2FDB3htVkaag2FAEFSwoTUSQEhLSNsn23Xznmc3ZzM7ee6eXu/d/vt9+kdw5M+f8z5nZzHPf931mbbTN0CAAAQhAAAIQgAAEIAABCEAAAhCAAAQgAIFKEpiFgFfJdWFQEIAABCAAAQhAAAIQgAAEIAABCEAAAhDwCCDgsREgAAEIQAACEIAABCAAAQhAAAIQgAAEIFBhAgh4FV4chgYBCEAAAhCAAAQgAAEIQAACEIAABCAAAQQ89gAEIAABCEAAAhCAAAQgAAEIQAACEIAABCpMAAGvwovD0CAAAQhAAAIQgAAEIAABCEAAAhCAAAQggIDHHoAABCAAAQhAAAIQgAAEIAABCEAAAhCAQIUJIOBVeHEYGgQgAAEIQAACEIAABCAAAQhAAAIQgAAEEPDsHrj2upvMN86/ytx+13/N2Ni42XWn7c2xR73APGvfvSZ3yEH/d7L57wPLJ/+7s7PDbLlkodnncbuYl79wf7PXY3eKtJvGxzean1z9O/Pjn15nbv/XvWZgaNgsted58l67mle/5Hlmt8c8cvI8L3jtqWbHR25jPvOREyKdu8oHDdp57nfk2715fu60t9Ud6iFHnWK6LNtLvvbRKk9n2th+ff1fzfd/9HNzm13T1WvXme6uTruPHmle9oJnm0MP2LdSc3n9SZ8wt915r/nlxZ8xHe1tNcf2obO/aX501W/NtRd92rzyzR+z+3xnc/p73lD4PN5/1tfNdTf+zfzyos9419Z9WMRYLrv69+aU079srv7h2Wa7rbcofN5cEAIQgAAEIAABCEAAAhCAAAQg4CfQ8gLeT395o3nnh881Rxz8THPIc//XjIyMmm9e8FPzx7/cZr73+feZx+2+46RwMHdOjzn5+Fd4/z00PGz+fe+D5ic/+50n2rz5tS80x9ufRm1kdMy89X3nmN/84a/mwGc9yez31CeYOb3d5p77HjIXXvYrs2z5SnPGqceag579P95pZpKAp/l87DPfMRdc9ktz7YWfNlssXjAN1c233GledcJp5n1vP8q84oXPyeROXbtug3nqYW82f7zqi6a3pzuTcwZPIqHrfWd+zbzwoKeb5+33ZLNk0QKzcvVaTwC7+td/Mu996yvNK484wOv289/+2XzpO5eZC7/8oVzGEuWkP/vVH807PvQFTxg+4JlPmtZFYuuzjnibedqT9zSf+tCbzRW/+IO3Xk95wu5RTp/pMUEBL6+xnH7O9zwx813Hv9wbv+7JP/z5H1Z8fap3j9IgAAEIQAACEIAABCAAAQhAAAJlEmh5Ae9Np3zaDAwOmW9+5pTJddjQP2j2Pex4T3R595snBDtF/igS52ufOnnKeimi7ozPf99875JrrNhxvBVwJsS3Wu0zX7nIfOV7l5vTTnm9J/b4W//AkDnmnWeZf9/zgLn6B2ebBfPnzDgBTxGORxzzfnPScS8zr3v586chklhzpRWLfmUjw+bN7c3kvvjtDX83x737k7kKeAe/6t1m66WLzdc//e5pY37LqZ81ZtYs87mPvdX77FNfusBc/+d/lirgSUje/8VvN3vutqM574wTp43ZRZ999ex3mX2ftEcm65D0JEEBL+l5wvq9/E0fMU+00bROwAs7ns8hAAEIQAACEIAABCAAAQhAAAJFEmh5Aa8WbIlp+x56vHnViw8w73rTREROPQFPnynt9rDXvMdLm6yX+jkwOGye+aK3mCc+fhfzxTPfWXONH3xopZG48sjttvQ+VwTezjtsZ/Z/+j7m81//kXlg2cNmq6WLPJHBRU5dfs315t2nfcl8+5z3mg+e/Q1vLFd970wjYVFpwRdf8WuvX3d3l3nCnjubt73+yMk0XUUznfzRL5ofnPt+c/YXz/ciCTUHCZdHHvJMozTKm/5+h2m3kUkveN7TPOHNtf/8d5n57FcvslFK//QE0C23WGQOfs7/epGIHR3tdffwy4/7sJFAetm3Pz7lmAk+b/Ui2D727mO8z373x1vMl797mbnz3/dZLqPm8bvvZE489iVmz113mOy7bn2/HcfF5prf/Mms3zBgdnzUtuaNrzrU4/OFb/zInPutSyePfeb/7uUJVlHYfOATXze33Ha3Oeb/DjEft9FZ+z/9CeYj7zq65rwOePlJ5tGP2Np85eyTGt67r3nbx82f/nr75DFOyOyzc/i0Ffau/d3NXvrtogXzvPTtE9/4Eu9/q9Ubz8aNG73U3Uuu/K2NGltmuuz6PfMpe5l3HvfSmlGO7uKf/OIFNtL0Ki8aUinc/nb0iWea++2e+en3z7La46xpaavaJ5/5yoWWz3/MhoFBK14uMocd+DRznE07nz17lrno8l97e/EXF37KEzZde+O7zjaa6w/P+4D3V1H2UKMUWq279ketpshB7SW17158jbngJ7/0oup6e7rMrjZNXWxd2vse+712yinO/9IHzd33PDgthfaXv7/Zi568wwrRarvs+Ahz9CsO8aJp1VasXOOliZ9po2gVTXrt727y9qTS4k9921FT0uMbbhQ+hAAEIAABCEAAAhCAAAQgAAEIBAgg4G0CIuFLQpSEi3O/+WPvBfx7X3if2X7bCTGtkYCnzz/95QvNV79/hfntjz9nFi+cEF38TSm5r337GeajJx/tpetGaRLwxsbGzKO339q84ZWHmra2NhvBdb4d27884UXX+dmvbrTpkOd64pzqre1sRQUJBhrPt2wq8ElWgNzvqXvblM4+L1JQEX6XfevjVnBbaPtOpFKqppjEqUdZEeqsc39gvnPR1eZxu+1gTrbRh6rx51JEv/yJk7y0SglgB/3fu7wouQ+987Vm4YK5VtS4zxM8XnXkAZ5IWK9dfMVvPDHq+1Y09NcNVE3AU8/46uTfS+h63YlnmOc+44nmLccc6XGQiPn7P91iLv7qR6zIuZV3CR3z3wdWWIHkVWabLZeYy675vfnm+T81GqvGLhFTQt41tpbZ/HlzjNKgo7D56Ke/bX71+7+YbW3U5bFHHWbZbDW5F4JzE1cxO8QKmKqH+HhbD7HdrlWwSWzUWim9VpGcPVZUVb0/1Zh7wIq3H3jHa8xutv6iBLIPf+pbdj6LzQ+s2CURrd54JCid87WLzVstI9Xakwj8ETv22bbPhV/5cN0ad/fe/5B5/ivfbd7+hhd7e8s17f8DrSDp/3t/3blRuw7PtiKV5njC617k8dS9ovEd9+rDzTGvODiSgBd1DzUS8Nb2bTBr162fgvkDn/iGt8cv/PKHPbHb7V3t5WfblHWlB3/pOz8xv7fi8BXfPdO7hySaHvCyd5rDn/d0c8LRL/L29VW/uGGKgOciOV9y6H7mKCvsK6pS++ySK39jzv34iZ7gqvM8/fC3eNcVv+fv/79WwOs3x7xjQgjVvqVBAAIQgAAEIAABCEAAAhCAAASSEEDA20RNYs2b3ztRKH/vPR7jFeyXaONamICnCB+JLnpJ9xtRuP4uUk6puk/ee7dIayUBb/WaPltI/5NW7On0+tx4822eaKUovmc85XGTIpxfcFE029MPP8FL033/ia+evJYi8RQt5o51Ap4/pffWO+8xL37DBz1DDZc+LHFzr+ceY0WiI2x022GegCcBSDXlJAS69rb3f84TQC+ywlG9puhG1Vc7+DlPMR8+6XWThyk6TYLMj7/xMe/vJHoookwii0QuNc3ruS97hznQRtd90AqHN/39TnPUW06bVstNAqHSQ1962H7mGz+8yosudDXworJRvb4f/PgX04TGWvNS1KSiEX/wo194ApGivPaye2jfJ+5hDn3uvp6g45pSth9etXYyhdatZzD9+lJbW/G9H//K5PVrjWdoeMRb52fYiDv1d+3vt/7bKCX0E+9/k8e5XlOk3bIVq8yVlrFrEju//N3Lveg5V6fQL+DJyEX/rfvjcBuV6ZrSo1Ur7hHbLI0s4EXZQ3FMLJServF//dOneKK02pq16705+u/JO2xE54uOfp/5/Olv80Q9tScd9EYrgO8/mUIbNLHQ/lxtz3Wp3Z8S49QU/SgRVBGzEoydgCfmYu+axqX0+Zuv/oqR+Q0NAhCAAAQgAAEIQAACEIAABCAQlwAC3iZiMjtQquZDD6/2DCXuvPs+88Uz3jHFxKJWDTwH/HuX/Nycfs53zU++eZrZ6dHbTVuHy39uU10/9iWvTlpUMwAJeNtutXhKyu1dNrroBa95rzn7A2+yET5PmRTwlEKr9Fy1v9vUT6WqnvX+47yoMH9Tip/EDaUYOgFPKYMuLdVFYElce/Ghz5rs+pRD3uT9t0splgjy7Qt/Zv76j395wsb4xnEjcU6iz8/P/2TDffjhT37TM0b49SXneMLkvfcvt0LIydbs4VU2ffe5Xt8nPu+N3vxcOq074Qnv/ezEGlkTiG/Z65/1hR94NfOCaaDu+KCAF5WNM9z4yzVf89JCo7R+m06qlGJFD/7pb7ebf9z+Hy8C7v0nvsZLSVYLCnhf/+GVRumswTko3VO19ZyhR63xuLkExTRdx1uvQ+x6bTJlqDX+K22U2bs+ep75zufe60UrSpA68BXvMnvs8ugpzsd+AU9irsRBCXmK+Hyqjcjcx0Z/+tOmo6bQRtlDUQU8GcMc/57PeKKwY605K2Lw/Et/aX76yxtsKvlK6/o8ZDZaAVqpvNpbL3r+Mzw0YQKe9uNh1tDiQye9dgpKpaD//k//MNdd+rlJAU/py0e//ODJ486/9FovKvLXl3y2YVpzlD3GMRCAAAQgAAEIQAACEIAABCDQmgQQ8GqsuyKqXnbsh7z0QAljamEReBJYzv/JteYPl59X07XSRYspIu7lh+8fabfVcqGV8+1hr37PZHSVE+EUubbzDo/wznu9FRRef9InvHpvqvvmb4favhIiv3TWOycFPH9fJ+Cd8V4rWBz41MmufkHovgdXmBe+7lSz06O286L5trcRSKqTJwHzn3fcEyrguSg/JzwpBVTpiL+yAsd8m74o0WWv5xzjCWdKG/Y3pdIuXjjfE0OUUnvety9taFARFPCistF6XnntH8zvf/KFSGtV6yBFmJ34wS+Yu/5zv/n5BRMRbUEBz9Vx+9NPvzwZZalzKUpPkYqq1fZ6W4ev1njcXJSuOysgMspNWQKohN56TcfsZ80sFIUmMUvio4xUXKq06+cX8PR3quumunty2NVaKhLzUOvgrHRtReFFEfCi7qEoAt7d9p6QqKgUWDn++ptSe8+30bFK69Y8587t8WpCHvWW0yMLeIqg1H6U8Yq/DqSuo6hbpX8rus5F4PmFaB2DgJf4FqIjBCAAAQhAAAIQgAAEIAABCGwi0NICnoS6a6/7s03729Lsseujp2yK95z+FfPrP/xlUsBpJOApbVJ1w2Sg4Hez9Z9w2KY7PtMKMirqL6OLWlFdSvW7xNaIU/qqxMOkAt4tt99tBcj6EXhP2mtXT9ipJf5FEfCcKHbV986aNNzQXF09urAIPB37kjdKIO02X//Uu736Y//zhN29tEzXnvz8Y73UUNUkCzbVd1NdwO/blNXTPvsdTzDcZqslNW/qoIAXlU0cAU+CkCIAa5l3/Py3fzZKLZbBxVOftOc0Ac+Nr14E3gdtXbyX2ki3WuNRhN9LrdCsqMhn2hpswaZUXr+JRC1Anzj3h+aCy35pfvOjz5kPffIb5mablvyzH3xiMk1UfYICnv88SlG9yka3yV13/6ftY85837HWOGWizmHQxEKimcxIZGIRdQ+FCXiqK/gyG22qeX7ZMg7WHlRk3XNsHUUZS7imCEmlxMaJwNN5Dn1u7Qi8G26+1ROUEfD4vQoBCEAAAhCAAAQgAAEIQAACeRFoaQFPUBXlJDHoW599zyRj1Xh7oa2RpfRHV3i+noCnlML3nfk185Orf2e+9smTzf8+8bF110qOqKrR9Zajj7AF/18w5Tiln77plE95qbuXf/sMr7h+UgFPgqJqoykiyV8DT1FPz7Mpkiro/xorEiYV8Jxxwg1XnOcJjWpKg1Vk4NIlNoXWRpuFNdUM/Ohnvm0+/eETPIHru58/1TPicO0NJ53tmT0EXX11HbmeqpaYq/XmF2LUX+dTLTaljzqh6MYrv+hFh0VlE1XAu+7Gv5tjT/7kZKprcN6f+/ol5ovf/slkarUi8ORW6uoE/vlvd5hXv/V0r4bd8/b7n8nuznxBx+2+86NqCngShZ9mTROOOPgZ5j1vmRp5plTrHR+5zRQhrtaaKHpNUZmKuFSa5xteeYhX59Df/AKexMqbbdp0MDVbTsj/tIKi3IUVmXfiBz/v1TN0UaGq1/dsG+0n8xEJeFH3UCMBT/fp8e/5lLn73mXmgi99yCyYP2fKuJUS/IQDXm9e8aLnTtZz1AG6X8U3KOC99LBne/eGWrAGnuoFPmyNYJQi75oz4thlx+29enoIeGF3PZ9DAAIQgAAEIAABCEAAAhCAQFICLS/gOfMJpYuqxpVe+i++4teeCPGZj5xgDrCGCWoSMSRWnXz8xAu+UjmVInmRjTb65x3/8aKgXvuygxqug1JDlVJ57XU3mX2ftId5/rOfYhbOn2vuuX+Z+eGPr7WOmhvM509726TJRVIBT4NQWurXf3ClOcUKO898yuPNcisaffyc73n14y61IsQC68iaVMBzxgsSelSz7l9332/O/ML3PbHm6t/8yfz46x8z21kBTQJovbahf9Dsd+TbvPTLBZaBXxhRH+dCe+TBz/KcXXWcxLJPnPdD885jX+q53aop5VNpnO9726s90xGlvSod15l8yCVUIpAEKglhj9lhu0hsogp42i9vOfUc85sb/mrkUPp0ayyiNZUhx29v+JuXvvm8/Z5sPvnBCZMJOfXKMOXLNoV5iU2pVTqzItPuX7bCq9/2GMvwltv+7bm6arxKdVarNx4JYUojfodNtVW6tCLcVMNRdd/k9BuMLK21HhIQVXNPAtQvrPgarCfoF/CcaCoB+HBrkiJRVKLzByzjg579P9YN+CgjoVg1DV/xwgnhTKKp9sevr/+rFykpAS/qHvqITVG97sa/mV9eNGEw4x+Lov6+aZ2WP/2hE6z78tS6k9ovSllWpN39djyfP/3tXnr2dy++xqvVeJG9x5XKrtRauc4+5yXv8MYmwVvGLNfd8PcpLrRyP5aorD6KkB21nL9m7y+5Hn/rs6ohuDMCXtLfQvSDAAQgAAEIQAACEIAABCAAgVACLS/giZDEui9/9zKjqKVuG9mlVFjVu3quTb1zTcKBCve7JidKRcmp+L9e6J3rZRhxCT5ypL3kqt9Y4eleo8gkRZTta9Mrj7bXVOSYa2kEPF3nG+dfZcWcX3s1vyS0KDpQNdW233ZL7xJJBTz1lbOmjDv6rOj4WGt68O4T/s/0WkOKN7zrbKO0Rok0tcw8/HyUZql0S0WPOUHO/7lqsn3hmz/2BFKlHCt6S8YJSil1TWYEn7ImEL+wqdASBXewUWeKbnTCq0TRY+2YxHqvPXbyahpGYRNVwNM4JMyq7pvWVaLumr71tp5dl60RuK051IrCL33BfpOpnX/7512eiKvowte+7PleDUHxkhh17e9u9kQgCU8HPuvJ1vX3SM/RVq3ReOSWKwdcz9XVrvNuOz3SHGsZRDVLcQ7Jz3nGPuacj7512hYOptBqnF+163+nFW41d6WvSqQ8/jWHT7qsSjhV5OEq66IsQfDoVxxsDU/u8oxiZJoSdQ9984Kf1RXwDjnqFPOf/y6recvJ/VfpvPr8g2d/w4qid3sCvJyZ32LdlM/8/PfNhXbNDrZ1ApW6LSFfbsXj4+OewYsEWImtV//wbE9kVZPw+kUrlt5u56A0bu37N7/uheZ/95mIuiUCL+zpx+cQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABLnY+nEAACAASURBVCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACEIAABCAAAQhAAAIQgEABBBDwCoDMJSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJQAAl5ScvSDAAQgAAEIQAACBRIYH99oBkfGTfvsWWZoZMwMDI2Znu5209Ux23S2zy5wJFwKAhCAAAQgAAEIQKBoAgh4RRPnehCAAAQgAAEIQCAigdGxcTM8utH0D42aYSveqc3raff+XDcwOnkWq+mZnq52+9OGmBeRLYdBAAIQgAAEIACBZiKAgNdMq8VYIQABCEAAAhCY8QQk2g1ZsW5geGxStPNPupaA5//cL+YpWm+2/oIGAQhAAAIQgAAEINDUBBDwmnr5GDwEIAABCEAAAjOBwPCoFexsSqyi7EasgNeohQl4QTGvu7PNptnqZzZi3kzYLMwBAhCAAAQgAIGWJICA15LLzqQhAAEIQAACECibgKtjp2i7MVvfLmqLI+AFxbwOWytPgl6P/SEyLypxjoMABCAAAQhAAALlE0DAK38NGAEEIAABCEAAAi1CYHB43AwOj3opsnFEOz+epAJeELEi8iai82ab9jZMMFpkCzJNCEAAAhCAAASalAACXpMuHMOGAAQgAAEIQKD6BJxz7KBXz27MxAi0qzu5rAQ8/wU8J9tNabY42lZ/XzFCCEAAAhCAAARajwACXuutOTOGAAQgAAEIQCBHArWcY7O8XB4Cnn98Mr7oUpotjrZZLhvnggAEIAABCEAAAqkIIOClwkdnCEAAAhCAAAQgYEyYc2yWjPIW8Pxj9TvaEpmX5SpyLghAAAIQgAAEIBCPAAJePF4cDQEIQAACEIAABDwCSo9dNzAayTk2S2RFCnj+cS+c22E2Wq+Nttk23bZ9FiYYWS4q54IABCAAAQhAAAIhBBDw2CIQgAAEIAABCEAgIgHnHDtqxbuFczrN8jWDEXtmd1iZAl7/oK3lNzpuFJmHo212a8qZIAABCEAAAhCAQBgBBLwwQnwOAQhAAAIQgEBLE6jlHNtmFawl87taSsBbNK/TbFDEoRXwgs052vbY2nmzpe7RIAABCEAAAhCAAAQyJYCAlylOTgYBCEAAAhCAQLMTiOocu+XC7pYS8JbM7zTr+msLeP41d462PZ2zTXvb7GbfDowfAhCAAAQgAAEIVIIAAl4lloFBQAACEIAABCBQJoEkzrFbL+o2y1a3TgptVAHPv45ytO3pbjeeqNeOmFfmHufaEIAABCAAAQg0NwEEvOZeP0YPAQhAAAIQgEBCAmmdY7dd0mMeWDmQ8OrJu5VVAy+JgOefJY62ydecnhCAAAQgAAEIQAABjz0AAQhAAAIQgEDLEFD9toEha8QwMm5GxqbXcosDohUFvLUbRszomLWiTdkk5nXbenm9NjpPUXrUzUsJlO4QgAAEIAABCMx4Agh4M36JmSAEIAABCECgtQk459ghK9qNWffYrForCnhr1o9kylBr4Rxte7smUm0R87LaoZwHAhCAAAQgAIGZRAABbyatJnOBAAQgAAEIQMAjUMs5Nms0ZZlYzLFRaxK91llH2CLb0gVdZtW64cwFvOAccLQtclW5FgQgAAEIQAACzUIAAa9ZVopxQgACEIAABCBQl4AzoRgcHrPi3VghpCTgrVg7aDZmF9QXadw9XW1e2mnRAp7mu7JvKHcBzw/BiXn6E0fbSNuDgyAAAQhAAAIQmKEEEPBm6MIyLQhAAAIQgMBMJ5DEOTZLJmUIWhp/Kwl4/vVS5OHcnnZPQMTRNsudzLkgAAEIQAACEGgGAgh4zbBKjBECEIAABCAAAY+Ac46VgCNDBZlSlNUQ8IolL+FSkXiqw6cIxC5rgqG/Q8wrdh24GgQgAAEIQAAC5RBAwCuHO1eFAAQgAAEIQCAiAZlQTNS0G5tM31w0r9NssDXgyhTwiqoJF8TUqhF4mndH22zT1z8yBYnqAfZYAwzHBROMiDcWh0EAAhCAAAQg0FQEEPCaarkYLAQgAAEIQKA1CIQ5xy6c22EGh6ywZ51ly2pL5nd60WBZOttGmUuZAt7yNYNRhpjLMVHMOyTmddvIvK6OiWg9xLxcloKTQgACEIAABCBQAgEEvBKgc0kIQAACEIAABKYSGLd1zYZHN9oou1Ev0s7+Z8MmAW/IincDVsQrq0nAUxrv6FixLhatKuDNs/Xv1KKad0jM67Cp1hL0euwPYl5ZdwrXhQAEIAABCEAgCwIIeFlQ5BwQgAAEIAABCMQmkMY5VmKORL4Ng6Oxr5tVBwl46/qLT+MtS8DbdkmPeWDlQFb4Yp8n7ZrjaBsbOR0gAAEIQAACEKgQAQS8Ci0GQ4EABCAAAQjMdAJZOcfGjcbKgysCXh5U658zy6hLiXmdm9JsMcEodh25GgQgAAEIQAACyQgg4CXjRi8IQAACEIAABCISkNHEiP3pHxwzI2PZ1KyLUg8t4vASH4aAlxhdoo5ZCnj+AeBom2g56AQBCEAAAhCAQMEEEPAKBs7lIAABCEAAAq1AoJZzbJbzrudImuU1ws4lQUmiZNFOuK2aQluEcYnf0ZbIvLA7gM8hAAEIQAACECiSAAJekbS5FgQgAAEIQGAGEwhzjs1y6hKxlAYpF9iyWqsJeFsu7DZlutAumtdpNgwUV3PQOdr2drcbRelhglHWncZ1IQABCEAAAhAQAQQ89gEEIAABCEAAAokIxHWOTXSROp0UHTXHGlmsXjec5WljnSuvlM5Gg2hvm2Xm9XZ4rqouyrF/aNRsLMAIt2wBr6yUZa2Hc7Tt7Wr3hGPEvFi3CgdDAAIQgAAEIJABAQS8DCByCghAAAIQgECrEEjjHJslIwl483rbzcq+mS/gtVn1qNsKdnO62zyhbtTa747amoJDI+Pe3/faaETVFuwfGjODw2O5iHmzZhmzdEG5EXhLF3SZVVawHZP9cMltqY1G1Bog5pW8EFweAhCAAAQg0EIEEPBaaLGZKgQgAAEIQCAJASfaSUhStNeAFYrKbjNdwBPrThvpNdemb0o8GxweNxsGRz3xqlYNPPHotQJfZ3ubPWbcrOsf9US9rCLzNJ4l87tKTaFVBODKvqFKCHj+sThH257O2aa9bXbZtwbXhwAEIAABCEBghhJAwJuhC8u0IAABCEAAAmkI1HKOnWdTVtXW2TpkZTcJSottTbQVa4dKG4p4KBouK0HTiXaKqNP/HrYRduutaDc6NjXirJGJhcS+DisiKTqxbfZsLyJPP2mNNhDwpm6zrRd1m4fWDE4TSFUrr8eKrp6oZ0VVGgQgAAEIQAACEMiKAAJeViQ5DwQgAAEIQKDJCYQ5x86xwoRqgVVFwCs7IkwCnrI5FRmXtElwc2mwTrRTKmwjwS2qC63OrZpt3TYyTKKeS7FNIuZVRcAr00TDv8bbLukxD6wcaLjsONomvSvoBwEIQAACEIBALQIIeOwLCEAAAhCAQAsTiOMcWwXnV/9SlW2qkCYiUSwl3HXZKC1FyIWJdv55RxXw/H2CYp4i+xThF1XMq0LEYxTRrIhbOUk9QL+Yh6NtEavENSAAAQhAAAIzjwAC3sxbU2YEAQhAAAIQqEsgjXNsFerO+SdWtqATV8DrtmmVEu3041JbB62IFrclEfCCYp6iKZXmqTRbV9ewkTmEBDy57pZpGqK01WWrB+Piyvz4tNGIEvM88bZDPzjaZr5AnBACEIAABCAwQwkg4M3QhWVaEIAABCAAAUfAmVBIqFHUVdJWNQGvbEEnioDnmUvYaDsZUozYWnZOuEtjLpFWwPOvv8QonU+ptjK/kFmG9klwfGULeEmi3pLu87B+7W2zzII52YiZEvM67B6RoNdjf2brL2gQgAAEIAABCECgBgEEPLYFBCAAAQhAYAYSyEq0C4o9ZRtH+MejFNoVa6cbCRS1nPWENL9op6g2Lz3WCqeNItzijDlLAc9/XY3b1eOTg61fzCtbvE0b9RaHb9ixiqSUUcXqdcNhh8b+XBF5E9F5ONrGhkcHCEAAAhCAwAwngIA3wxeY6UEAAhCAQOsQUD2zISsUDVrBSAJM1k1RUFtZ0awKaYyamwS8lX1DmQljcXlJSJM5RF//iFFUljOMyEO0848tLwGvkZg3MrrRRorNKi2Ftgo1+ByfompBek62m9JscbSNe3dyPAQgAAEIQGDmEUDAm3lryowgAAEIQKCFCMQxocgCS9l15/xzWDK/06zdMGJGbWpqGU1CzlwbiSVhUymnzuU1q0i7enMqQsALinlzrOOuDDckDK/rH/X+TJMGHHe9skxbjXvt4PF+4TbtuaL2l/FFl9JslY5t14EGAQhAAAIQgEDrEUDAa701Z8YQgAAEINDkBJTaODg86kXb5S0WBVGVnbYaFPAkJkV1Us1i2RUJpnp2Eu5Uu2zERj2usqmURa5D0QKeuEk0koi3YWDUzOtt98wvhkete+6gTQ+2DPJuZafw+ucXpfZhnjxwtM2TLueGAAQgAAEIVJcAAl5114aRQQACEIAABDwCco6VW6kMEIZHxoz9z9La0gVdhQtW9SarCLwiBDwn2smMQv9b9ezWD44aRUV1279bs36k0PUoS8Dr7d48V0UdupRhpRG76MO8xDyvrqDv+oUCD1xsfm+HF4E4YFPVy27O0ba7s93WziMyr+z14PoQgAAEIACBPAkg4OVJl3NDAAIQgAAEUhCQKJLWOTbF5Wt2LUo0izLuhXM7vHp/EjezbhKonKGDE+08MwpftFlZolIVBDw/76CYJ3FTImeWYl5Rdeei7CPtO0W/VkHA849X0bHrB0a8yFAcbaOsJMdAAAIQgAAEmosAAl5zrRejhQAEIACBGUxAzrGeMGAj7RbO6SzVoKEe5kXzOr00yizFmaRLmoeQIqHIcwG1IogiHoOinX+sLq00DzfSRkzKELOiXlNi3hybXiwDBqXZSoCW0JU2xTjq9ZPupTj9qnQP+McdrE/pHG0R8+KsLsdCAAIQgAAEqksAAa+6a8PIIAABCECgBQhICJPAoYglv3NslSLd/MuQh2iWdJlVi2zU5hOnjYTqtmKTRDv9SLTzfiJE9ZVVl60MMSvJNRW5qH5KtR0bnxCnN9jovCTmF2UYR9Tbl2Wbp9Qal4TTpQu6zfI1gzWH7Rxte2yabbtNeaZBAAIQgAAEINB8BBDwmm/NGDEEIAABCDQ5gSjOsYryGbBiRxQhqUgcZRfw9881zVi89Fc5elrxbsS62DrhLo64hIAXfeeJlUtJllAtIxZF50XlnWato48y2pFKVV3ZN5Q6qjDa1aIdJbF0sX1mrFg7FNpBtRt7NkVJ4mgbiosDIAABCEAAApUhgIBXmaVgIBCAAAQgMJMJxHWOVaF8pR0qYqlKTemRKpy/zqbRlt3iijp+0U5svfTYFE6+rSbgyayirz+9YUcSMS/uWue5N6vkxOzmqShSiXJx07n9jraIeXnuGs4NAQhAAAIQSE8AAS89Q84AAQhAAAIQmEYgrXOshDJF1WQhmGS5PElSKbO8vv9cUdIq29tmTbqlZiHa+a+v9VFK8cq+4bymWPO8ZaxBXsYZzgiks73NS7OVq7Ai9IKReVmlS2exUMFac1mcM+05stgTztG21z57FKU3W39BgwAEIAABCECgMgQQ8CqzFAwEAhCAAASanYBMKIZHFdk14cKZpiWNqElzzSh9yzJuqDW2eqLF5tprbZ4QpEg7pcimNVIIjgEBL8qOiXaMargpwm9er4Tr2fY+sgYig5tdf6tSezGs1ly02WZ/VNYRitLu5Gar+oWqn4eYl/2acUYIQAACEIBAXAIIeHGJcTwEIAABCEDAR8DvHJtWtPODLSs9M2xxqzQuF721Zv2IF62oenZzbfSQRBalLCv9OGvRzs9H11wyv6uucUAYy6SfZxFtFffaRaZOa/0kHHVbwwWJehJgO9pneX+mNSyJO+9aom0Zax42bqXcK3IxLz442oatAJ9DAAIQgAAE8ieAgJc/Y64AAQhAAAIzjEA959gsp1mWOBQ2hzjF8sPOlfZzRSnOnzNRK1DjkoC63op2o9aUoohW1hqVIeBlHeEVdX2cmDenu83WXpwQ8RRNqXuwjKaU7AV2zxWdNh02V0UoDopNysjfsOvocyfm6U8cbaMQ4xgIQAACEIBANgQQ8LLhyFkgAAEIQGCGE4jiHJs1girW2io7hdBF2slBVtFZ4zZHVhF4ZQg6ZbFoJQHP3VNL5neaDdY4pd2mdUo4UpqtUtUl5hUl2GosVUoh9z9vxGfthpFCWej6uge77b2oNcEEI+vfAJwPAhCAAAQgMJUAAh47AgIQgAAEIFCHQFzn2KxBbr2o2zy0ZnBaQf+srxP3fEULixLKujvbvJ8uK+BItFEkliLvyjCR8POSI+lyu0ZFtlYV8PwC1eY6h+2e+cWQjTxTynTQ/CLrdSmDfZQ5aB+u7BvKNWU8bBwyvuiy96gYIeaF0eJzCEAAAhCAQHwCCHjxmdEDAhCAAARmKIG0zrFZY1m6oMusXj9ceFRN2DwkFqxYm7+wqBRZJ9xJtPN+fCmCZaWwtqqAN2oF07xqrIXtOd0Lq9YN1xSoJBa5+oeqAyfhXdF5eYh5UZyPw+aSx+cS+5etLlZIbjQPmWD02DqGzr0YE4w8Vp1zQgACEIBAqxFAwGu1FWe+EIAABCAwhUCWzrFZo1Va3Lp+62hbUr2vevNpJKakZeAZUyiCx4p3I7aWnRPuaokxZaWw+udYdDSirl1GFJhq4JUp4EWNMNP+keirPZSHmFdWLcBG91UV7oMwMU8mKHN7OqyouhFH27QPSfpDAAIQgEDLEkDAa9mlZ+IQgAAEWpeARDvV0FJ0mwSiqjalhyo1sKyop3pcshYW/aKd0mKVHitDiigOsmUIaK0o4OXtchp2DyaJ+nQuxZ3tbV6arWroDVkxPE1kXtkcanGqkrFMvXX0ommtqNpn6/R1bBJZe6zQSmRe2M7ncwhAAAIQgMBmAgh47AYIQAACEGgJAjKhmKhpN1E7rarpqf7FqGK0j8a3aN6EoUCayEC5ec61UTmKtIsr2pUtoJV9/TIi8MoWk9OkiCpCTWYL83rbPeF+eNTWUBxM5mRbNodaD+uqGmv4x6oIPAmNff0jU6aAo21L/PplkhCAAAQgkBEBBLyMQHIaCEAAAhCoHoFGzrESoQZs0Xt/TbWqzaCq9baSihibjQfavCgoRdo5QTUp+ySRWUmvVatfq5hYJF3zrFhnFWnpDFGci7Hbg1HF6CzE66yYuPOUIejGnYMiFyXUy2ikXvOcbDtwtI3LluMhAAEIQKB1CCDgtc5aM1MIQAACLUEgqnNslBfKsoEpskZRQyv7hsseypTrKzLQvos3fBl3HSTauZpkEk+0PnqJj5IeG2XSUWujRTlXkmMQ8JJQi98nTQRevatpP/Zao4XuztlehF4UMU/p43433Pgzyb6HottkGrHORsVWtcX9wgRH26quJOOCAAQgAIEyCSDglUmfa0MAAhCAQGoCzoTCmR1EPWG9lK6o/Ys4TmmmC+Z0VFLA0/zrCQYS7ZQaqygn/W/Vs1tvRbtRa0qRdZOgsmb9SGaCYNzxlSEglhFxpQi8QUVM+lyA47JKenwRJg26hp4JigJTmq1cbLVvg5F5Zax3GLdm+DIiDTe/o62+1KBBAAIQgAAEWpUAAl6rrjzzhgAEINDEBLJwjq1qdJt/WSQqbLWw2yxbPVip1aolINUS7TwzipwddLM21IgLOo0wEfda7viyBLykdeOSztP1095aMr/LLF9TzH2wOdW73TO/kJGMokaV9l12ynYtlmWnN0dZX0VQPmTXL42BiK4jMc+L6LViq6L0MMGIQp9jIAABCEBgphBAwJspK8k8IAABCMxwAhKCRuyPRIQR6yKbtim6bdHcTrNi7VDaU+XaP6vaX1kO0hXNX2NdfPUyrZ8uGxmjKMgiRDv/XMoW8GSGsmrdcKERgK0o4C22NSvLuFe11xVNKsMVPXe6bI22B1cNpBaisrwfq5jW659fXgKsxDw52ioNWpGTiHlZ7irOBQEIQAACVSSAgFfFVWFMEIAABCDgEQg6x2aNpYriWHCOZUR4hXFWDTyv7pZ9g+6XEYgV7spIrdQ4FX1UVmSYrl9GCm8ZAl6ZQmlVUskl5klIVJOYp3qOSrVNG1UWdr+FfV7FZ4R/zEV9WeIcbXvsFwqIeWG7hs8hAAEIQKAZCSDgNeOqMWYIQAACM5hAI+fYrKet6KnVNoosj9psWY21TOHEPweJF6ppp2g7iReqE7ZibfqUuLScyk4fLCP6qdUEvKqku/sjybz7obvNdLa3eWm2zgCjDDEvD4OPtPelv38Z+9U52vZYg5J2a1BCgwAEIAABCMwEAgh4M2EVmQMEIACBJiYwbu1Mh0c32miWiUguuZsW1aoijjWab1z3xizZOdFOKYRyjfXSY209sHGrUixd0F1YTbJGc1IBfwmKA3ZsZbQy9lAZgkgZ83Tr6cQymZWU2STgBVN5VadSDrZyi5aoPTxq08htmn/etR8dh6rWyfSvkyJ21cpyyVWtvJ5NBiWYYJR5B3FtCEAAAhBISwABLy1B+kMAAhCAQGwCSZ1jY18opEMzuDcWPUalu6neV1C0k4Dnb1VJPy5bHChD2CpLwCvL7beM+dZ6dLjaj6ttzcNaTWKaZ7BgI1Ul6rmovDzFvLzqy2X5rC07StY/Fxxts1xZzgUBCEAAAkUTQMArmjjXgwAEINCiBLJwjs0aneq46QW4r7/cyJ5G8/JqzVlhIM/oFTHQdbptuplSAJ3wEBTt/OOsStoeAl7Wd0Xt85VR68+NpCoCXpxxSMyTuYLuqTzFvDBRsZjd0fgqZaSZR5m3X8zD0TYKMY6BAAQgAIGyCSDglb0CXB8CEIDADCaQtXNs1qi6bWqoUqvqRdRkfb0k58trjBLtXLSQxAYV5N9gDSkaiXb+8ZfhvlqLn0QVvXznKXA2WrcyTDTiCElJ9lytPmULeGWusV9IlBgXV/DX/SWBXHXZlGYr4wulomcRmVfGXoi7p6pusqH5SMzrtA7DMsDA0TbuCnM8BCAAAQgURQABryjSXAcCEIBAixDI2zk2S4xFuSOmGXOWBfwl2ik1Vil++t8SEdZb0S6JiUcZqaP1BDy9cJdVH61oAU/rNn9Ohyc0jIza2n+2bqSE17zNE8oUYcqOsnT7LotxaP0kuik6T+YXQ/YeVP3GqMJ58B4oIkI3zfNLfauSbh91HrqnN47bo62oh6NtVGocBwEIQAACRRBAwCuCMteAAAQgMMMJOOdYCQsPrS7fmTQO7qqkgtYbc63C+XHmV0u088worPiTplVFwFOEYrcVRMoU8JwIk4Zno77+2mpaz5GxjVaw22jWD4x6UZSqWSgjj3X9NrIr5brWG0fZAp5KMEqoLLNJwMtyHBLnJai79VMUrKLz4oixRdfIjMu/GWr0BecUdCfXFwS6z/QnjrZxdwDHQwACEIBAlgQQ8LKkybkgAAEItAiBes6xVRF14iyDBLyH1lRbdIwbweIEH++l04oEcvfNQrTzc61KYfosIxTj7Bt3bJ4cNrsAt9loyc1rWMuV1XFQiqeiKrOOyitTwCvbabiotXYp7RJjo4p5ee6/JPdDsE9eJQCyGFu9czT6nSART6m23p/22UqDAAQgAAEIFEkAAa9I2lwLAhCAQBMTiOIcW/VokFr4m0F0jCoyetFocsG0EVn9VsSRcDdoU/TyaFlHIyUdY9kCXtbikouYFF+lVTpDEX9UVi0Bz/Fz9dZcVFet/klYS8BbsbYcobsqIpXGMWijV/O6p9y6uPXtbG/z0mwbrWFVDSLcXJqhRp//fogTMai6jF32eas5IuYlearQBwIQgAAE4hJAwItLjOMhAAEItBCBuM6xzeDqGly+ol7K02ybYEqX/1wuSkvC3ZBNn/REO/sTJw0vydiyqAeW5LrBPnrh1hqu7BvO4nSxzyEOo1ZoUx2zNM1FKnW0TdQmlClHvbpojQS84N6YY8enqDy3L5Km2ErAW24jVctoVblHixb7JcZq7eb1yi17tk2PtlGYg2NeurS7v8uMjIyyF5rtS52kEYM42kbZDRwDAQhAAAJpCSDgpSVIfwhAAAIzjEAa59iyo6GSLEVVhKhGYw8KB5tTK2dPRmlJ9ElaCD8JN0WdVeWOJAAAIABJREFUJHHkTHKtRn3iRMxkfW2dL42Ap7Grv+qgqa7dgKImI0RMRhXw3HwlBMk0obtzIuUvanqmn1eZAt6ieZ1mgxU0k4qPWa17mc7L/jqIuu9cVN5iy6bKJQCqEj0ZdQ9k8SWUxDx9odLdae/t9llmtv6CBgEIQAACEMiAAAJeBhA5BQQgAIFmJ+BMKFSMP40IpJfMrWykzjJrZNEsLWnERZHz00uwnGKVsiWxx6VWFi3a+edctnmEX5xauqC86LAkAvCEC+mEE7Ai7eKuY1wBz79u6jshLti6epsiuqIIY2UKeEVHvtW7t6sS7eYXZLtsPTaZmSjCMso6Fvnc0rUaRQ8XPZYo18tacJR217HpnsPRNsoKcAwEIAABCDQigIDH/oAABCDQogSyEu2C+KLWa6sK9ipHDUrgUUSIi5zaYNPn9KKeRmTNinuVuJXpJBxVwGu3qbGqSyfhLK2pSBoBzy98OtOEKMYXcY1UstpnOo8EPLkMl73vy9xntXi66FMZlugZ4dZRgnBVxLxm+32Qt+DoHG0R87J8QnAuCEAAAq1DAAGvddaamUIAAi1OQM6xSs+Tm6UEBPufubSqRMtEnVzVogb1Uu6EFY1N6Y7jtuDVbPsfff0jUaeV+3ESpBbMKa/2nH+CZYpLjQQ8vyGFapbJHTZutF2thcxCwPOfV+NUVKAzvljXPz1dtUzGVYl8q5qAFxTR3ToqXVrmF4qoVm3GsoTPqj1bozwUixQcnaNtjxVf221aNA0CEIAABCAQRgABL4wQn0MAAhBoYgJxTSiymGqzFS3XnIt8aavF2Ak9Lq1SIo/EHqXNqulFXWYEq9eVY9RQa8x6OS8zddU/pjLTOyV8KbVZqbCu+WsUhhlSJLnnshbw/GNwopCL5pLYr33Y6gJe2bUWa+2TRg6vWkel2ztRNkndwyR7099HIv+iuZ3WvXgo7akK6V/mGusZ0mMjdD1Rz64dDQIQgAAEIFDz398bbQMNBCAAAQjMHAJliHZ+es1QUy642ooaXLthZFIwK2I31BLtVJi+VupblaLd/GzKFHWCAt7KvqFSIo2cgKe1c7XtXI3CtM609fZhngKeu6YEWqVvK9VPEaAS9MoySyhToHU8dL/KMKJKYlRUwwVX91BfEMjBtigxryp1MqM+z6vyu8vvaIuYF3X1OA4CEIBAaxAgAq811plZQgACM5yARB+JBYr20Qtama1KtdGicpDLZVQH0KjnrPmt2aQ7YZvpslEWUWuhVSnazT+vqqQUlukOqohTCSMSuSTiFZGyWISA519nXU/ildLutWeLNkyowj6r4nMtSbRzUMxzbrZ5fJ0fVWBM80zNsm8Vx+scbXutmK4oPRxts1xxzgUBCECg+Qgg4DXfmjFiCEAAAh6BvEwo0uJtxrpHqmMmcULF4PNoXiSKjWTSS1i/vYYnglixNU6rSrSbf8xVqU1WdASlM6RQiqKEj1G7eYpMby5awNOaa61XrB20YuVmU5WiIrmqsPerGE2WxjFVz2lFVc7rbbduyLO9L3422DRw/ZmVmCeBUefLKxI1zvMzyrFpeEY5f9pjnKOt7kGl2iLmpSVKfwhAAALNRwABr/nWjBFDAAItTEAvzIPDo15x8rIKk0fBn7eTX5QxxDlGqY96mc3SJMLVQZNwN2QjJF3kUtKX46qIZX6uRQtn9da0COMUl/KsmmISP1TzThGvHbbOV7fdP3JJLaoVLeDVqg3mIrm0v534k5fzaRVSaBvVmytq3YPXyWrfaz/7HYldVF7a9Swqsjkr/s32ewtH26xWnvNAAAIQaB4CCHjNs1aMFAIQaEECzjnWS1mz7rF5OcdmjbbZXtyySo/zmxe4OmhZuI5qfbJ6Wc9yrasyJkXO9A/Wrh+Ydr5BQwq/uYjOXbSYVsY1GxX3l/ijdHCZrDjjC0WyJhWqg+tVprGAfyxKr1QElN+sJO3eSts/D1Ff6+miLLWeacS8PMaXllmj/mWbGaWZm8S8+dYVvM0uIJF5aUjSFwIQgEC1CSDgVXt9GB0EINCiBEas6+PaDcNehE8zNqWkqlXpZbcRxzQF6v3plFmLdv4xV1EUlXA2aOu+xU0HznpPZ536pv2giKQ53W1epKurbVdr3K0u4PmZOOML53y6rt9GKdro0zStKgJe3mn2SRjlLTgFxTyJ1/qdFHVNq1C7MCrXquyzqOOtdZyLIJxlJqKCcbRNQ5O+EIAABKpJAAGvmuvCqCAAgRYjIOdYpcUO2Eg7Y2tqqS7Ryr7hpqVQFTe/OADj1NnSy54icro7J2qguSiVPNOakxSsjzP/JMdWpcZVVuNQmqSEO6XFSqiQAB22pllFb8bhX7RomETgdlxcVJ6iiEftFxNxm64tgTaL5+Elt99tbl252v6sMbsvWWh/Fk38ucWi0GFlLRKHXjDkgKJrjWodJlyW2+0zb6P3u6qRYUuzCWLN+DsrKJ5vZetUBp2iZXzRZZ9pWjscbdPedfSHAAQgUD4BBLzy14ARQAACLUqgnnNs0S9meeBXVNqiuZ226P1QHqfP5Zxh9Y9cVJYcR7VGqkeoVMEwgSerwVYxha8qkZZpoqO0ruovQwpFvsZ1I24VAS+piOai8nqsiCCn3rjGF1nwlXB3yR13170VJeIdscsODYW8qkSbukmUKZBpTXS/uEjLWmvabL8DquhAG+d3RxTeSgHvsQKsxDwcbePQ5VgIQAAC1SGAgFedtWAkEIBACxCI6hwbJiY1A6o4EW1VmE+tem7OuECinf63orKCNdCKGnsVXTDzMP9IwjOukOgv2q91dYYUScTYLASmuHMuIwIvqYDnn5vf+MKZuoSlY6qP6usldfk97fc3eRF3Udqp+z6hrohXlXqPbh4SbBbYmmdZRCZGYVPvGLemekbKzMSJeYpkVQpnkeYuaeZRtQjLuHORANnRPisyb4l5E2s0sU7UzYtLnOMhAAEIlEMAAa8c7lwVAhBoIQJJnGNV70wvuEpRatbWbCKkS1EVd0WX+EU7pciGCQ15r1MZQlHYnKoiKrqIkrCai5sNKdo8U5gs1jXLFM8w3u7zogW8rMUif201zUnlA+oZX6TZY3HEO41DkXinPnWfmsug59mqdcOFRdyG7YUquuL6xTxvXW39wzXrhzMzNAljkubzZvt9FZyrBMiR0Y3efRS3SczrsEK5BD1FyiLmxSXI8RCAAASKI4CAVxxrrgQBCLQIgSycY5s9nUdLXUXThXpbUILCgt4OT7ibbf9DIl4W4k6WWz5JHbIsr1/rXFURFRuJGS6KUlF6zpBC65ulS2oW0Wlx1qpoAS/PdfZH5Y2N27qDAeOLpELVrQ+vNqddf3McrN6xSqU9YtcdpvWrmqNqlX9H6Hm60JZQUP1DNbeuitDL6r6LvbAhHfI2BMl6vMHzZSlAKiLPRVC2b1rDvMfP+SEAAQhAIBoBBLxonDgKAhCAQEMCMqEYtt9+9w9NuPSlbXqpVXpUM9WQC845blpjWmZJ+rvC5Yo6UHqzWtkpaY3mUTVXx6wjs5KsofrUitJyaxvHkCLJ9csQVmeSgOeYS/Tp2pQu64wvFE00YSoy2/T1j8RanrC6d41O9t3D9p/2cdXuvTR1H2OBTHiwUo7XbhjxRHOtoSKatY7O8KfsiGb/tMqsJ5gQ75Ruune2WdxjHlw1kLlA6jnZbkqzxQQji9XiHBCAAATSEUDAS8eP3hCAQAsT8DvHZiHaBVE2Ww254Pir6urn0ij1UqkUL1eLSy9xVTfeqFqUSFVefF2dtD4rGDhDChdtl3caehkMyhDwervbItfXSvtrwRlfyCTB6j9euvNaK+DFid561WXXJh5GMwh4Va/ZViti0Z86XSUxr6q/q6Ju4CgGFlHP1eg4HG2zoMg5IAABCKQjgICXjh+9IQCBFiNQzzk2DwxVK5oed45FvVREGdfm2mezJ9MoJboGTQuqFmUTnFuWaVJRuEU5pgrMJNoppVAupxsGxzxRNokhRZT5Bo+RKLF0QbdZvmYwSfdEfWa6gOeHotqUit5SixO9lUbAC5pZlCHShm0MlSjYMGAjvu2XEFVsYc+FoJgncyA9k8uYT5XTkaOsbRnj9zvaEpkXZZU4BgIQgEA2BBDwsuHIWSAAgRlMIKpzbNYInKlCkqLUWY8l6fnKjCKMKtr556aokRVrB2NF+iRlk6RfFUXdsBf1JPOM0kcCsSK0VLdwZGyjUXRIWSnn2jcIeFFWLf4xEmdHbRieRFkJFUrpU3Nup/Wi8tIIeMEIvDLSpMNIVa0mn3+8cQVPHa9ah71d7d5pVIpCkbNFifBVj2YM2wtlj9852vba+1PPYUwwwlaMzyEAAQgkJ4CAl5wdPSEAgRlMIIlzbNY49EKjNM/V1vmwWVvREWMSdfQS2N052xPhXMRO1BfBKgpk/rUv+0Wt1j4sUkjwG1JofV3Ujsa12EYkIeDl86RI4wSbdkT6IkPmB/5UaL/xhUuBD0ZuxXWg9Y8zKODlaeKRlE/V0un980iTkirWEuUlzmvdw4TapPz8/Yr+PZXFmKs6fudoq9/DEtsR87Jebc4HAQi0OgEEvFbfAcwfAhDwCDgTCvcyWAUsVUpBTcqjCCdaiTquSLrSsvTCp6jFqKJd1QUy//iqaAziitWP2ii4vJo/mlJpdhLu/NcrI43VP9eiI02LTqFN6gSbxX5oJFr70zDlHj1go/R070vcTWpisfuShebUp+4zZehlCpj1GJYV+RplTbVfkhiPBM/thFqlUOcp5lVZDA3jrXtgKxsBvGx1cSn8YWPyf+4cbWUUhZgXhxzHQgACEKhNAAGPnQEBCEDAEni4bygT99isYTbzi4VY5FWbx0Vi6cVO/7uWqJNkLaookPnnkdWLcRI29frkFbW4Oa2ubbJuYSNDijIFDQS8zbvjppVrpm2VfawolrRJwOu3dQ3DaqMFI7fW9Y+al/7o57EvG6x/pxOUKWDWmkDcFNXYEFJ2yKP8Q1DMc9HVcYxNak2rbPE/JWrTTI71ztG2x0bIt1tHYhoEIAABCMQngIAXnxk9IACBGUhgrXWvrGKtubzEkaKWMMvUs1qinV7iwl7s48w1TepXnOskPda5rVYprVoCy6Bdh0EbGZdFm6iFNSHMxommLFpE88+1aPGwjAi8sIgqCXc3rZou3jlO+yxeaJIIeXHNGiTIaKzzetvNbXZM77z6D2Y84tasFX2n8euLCKUGrrOmEVVois5eMKfDrOyrZnmFPFP93frOsbURtc5jdnEl1ipCL4mYV8Vnapw9lteXZHHGkORY1crr2VTTEhOMJATpAwEItCoBBLxWXXnmDQEITCEgo4oqvgwpkmGjfSupyotj3G2TNg1YL2suPVYva0pxzlq0888pS8ExLqsox1fxxb1WjbIoc/Ef4zekUDRlkjUu2kjCP/6ir12GgKcX7nrPoSvuW2YeHAhP4dump9sc8oitY22PNCnaen7ctbbPfOS6P5uN9v8k5NUTeeqJdxqsInOtj0ZlvuSpYkqvf1GLqik37feD/T0eJVrTP9ZmFcDcHPIUS2PdqCkOljg+f07npNt0ilPRFQIQgMCMJ4CAN+OXmAlCAAJRCIzbt7Mq1pCpekRYFLZJ0oDdvFU3p9/WtPJqE2YU4dVozFWvJ1TFdK+kaccuolLF6jUviUMS75LULtSalukg3MoCXlTxzt13cUU8CXhr1o8k3he67q0PrzY/uvNuc7uNEJxlN9u4VfEk5OlHwt0Ru+xgdt9iUd1HQxYidZRnZdRjqphK7x97kmd+1LnXO85fD1Ff9rgU27AI7TzSfdPOJU5/PXtWrRuaUhM0Tv+qHDvPflnpfpdUZUyMAwIQgEAVCSDgVXFVGBMEIFAKgRVrhrw0nCq1iVpHnWa5HVuztqhpwF5UiRXs9DM0KvdBK9rZnyRpUWlYlfHyGWe8ZaaK1hpnXDHBGVJonbOMqFTUzyrr2JxUAIyzBsFjixYPqxKB92D/oLni/mWx0R2y3dZmm97uSP2ydjmWucVsW35LTqdHPX7nSC6nWaeJR5p4g4OqFhHoH2oVvgQJinnOrbqWmBf191PaNcujfxVYZzWvLezzm1TarGhyHghAYCYTQMCbyavL3CAAgVgEqloHT4LSirVDpQgTsQDWObhRhIPfXVTCi5c6mSIKK4vxpknZy+L6YefIWtAIu17Y51HS+Vy0nYQHt85Zi7NlrlvRa1K0gFcvyjKs7l29vRMnCi9PcdQZI0hMHh6tn34Ztw5f2D2T9vMqp02mLZuQlk2wvzPDUTS3oi/7h0aNzHCc0F/1L2wa8ah6yYeoa6kUWt3nuNRGJcZxEIBAKxNAwGvl1WfuEIDAFAIqmK9UlKo1vTwOKI20gBTSPOYerDFURdHOP++q865axEijl0hF50kc6bBF9yXMKk02rwi5MrkUHf1XFQHvq3f+J/Ej4/U7PzpS3yLSk/0RW202PE8ij0yNXPRvFmm8kSYb8aCqCYr+YUcR9CNOM/PDgk7FQ/aZpBT+KpbPiDL5Zq/f5+Yod9ol87uiTJljIAABCLQ8AQS8lt8CAIAABByBUZs+W8VUVUW/KHKgr3+kKRdLL01yTNTLUnfnbO+l2NUnykvMSQMqaU23NNeM07dqAmPQWEMRL2KoFMWRsY2Fic9lihpFCzytJOAVnTKu/SzRWcKOSirI4XTh3E5rclSdKOiiBeM4z6dmEZWc+6wi82RipS/wJNwWXbIhDtvgsVWOxIwzL+0Z/RuBBgEIQAAC4QQQ8MIZcQQEINBCBKpYB69Z02Qk5OhFeE53m2m3RcXX2+grRbVUUbTzb/G4Nd2Kvj2qVnR9ok5jl42uG/FcBPXfaQ0pkjAts05ZKwh4ozbFXamH/lZEBJ5SHMuKkHLP3q6ONu8LFKV9j1pRuuyWZ1px2rlVzfCj0Xyc2Kh1dW7nEm2bRcwrOnU/7d6o15/6d3mR5bwQgMBMJICANxNXlTlBAAKJCaxZP+xFh1WpNVOh6qCzqNImVUB8CyvwPLRmsCmiG6oumOqlUzWDJJKV3Vw6dK8dk9yCvRqG1oCkjFamcFB0+q7bo4oOK4K3IirLEvCKSKEN26+KApSA56LyyhZ4yhQ1w1hVLUK40XiDX4bod60cbOfY/a4/x8YnIjAl6lUtMq+Z/l0Qtme0n6l/F0aJzyEAAQhMEEDAYydAAAIQ8BGQACERr2pNKVOr7biqEP0RZONEOxd9JdEuKOQULXCkWT8XUbbcCo5VbGXXmPIbUuilVgLtgt6O0qKk3BrVE5mKWMMi97ciRCUkaR2ca3beQl49tnmbWFThXpRQsnRBty2vMPE8cKmXEnicU3YRIqrbx1Vg0uieaqaosEZp91p3F5XnrbVNs+0fLO8LiiDzqn/RFPW5S/27qKQ4DgIQgAACHnsAAhCAwDQCVa2DV7WohmkvNzYFqVH0VdXSPsO2ftF1t8LG4/+8rBc3CYc9VjiqZUhRhZS+MmsX5i3gSbRR5KVqSEogl3AncWFl37AnKM3rnYgYkpjqN1+Is68aHVsvuvHB/kFzxf3LYl/mkO22Ntv0dof207wXWxMfuXCX1eoJZn7jC42tqKi8YM3JsrjUu26VowODY4763PKvte4zV8O1SOE2OPZmqTUYtj+pfxdGiM8hAAEITCVABB47AgIQgECAwEOrBytXp60q/1h3rqIq/K2USS8CJYI7blXGH3WzVz3isShRw29IodqFenEN1kETUwlYazeMlBohWmZqserv5RGd43dsljAnkUjrUEvElcggBr1d7ZlHhjUqlh83Cm+bnm5zyCO2jnQrav/p2hIqy2pRBDOthwRV/Uhc3WDT2/MSd8qOwG20DlWPDvSPPWkKalXEvJliYLF4Xpf3xQQNAhCAAASiEUDAi8aJoyAAgRYisGrdsPcCXKVWVtSVGHgvjJteTodsfTOXNhanJlCZ40+yjnlHVCUZk79P3lEuEmpdSrQT7RqZj1SBV5nmI1kLeJqLIgrriaaN7qdgZFgW6bVhYsEV9y0zDw6Ep5zHEe+036vw3Ijj+BsUd/KIiCxzn4c9tyR2LrKOvWVGTIaN0X2exVglWGo99IWWnOLlYluU0UkzpSo3WhPq30XdsRwHAQhAYIIAAh47AQIQgECAQFXr4BWZ1umP/HEiglL3kjrIJo12KGtzVj3lVy89WZuC6IVWtdU6rWBbq45ho7Uo0wHWjavMyKQs5u+PdhR/mZTUu9+iCltZpddGmV9YOu0+ixeafZYsjHVLxxHPYp04xsESaFSna836kRi9jHXennDhdsYXWQipGoCEXRuE6aVKV60lZVXGPLIeq/aqnp1uvYfsPZxHOrv38majbbdaWJ47c1brpXTkpQu7sjod54EABCDQEgQQ8FpimZkkBCAQh4BSnx4useZSvbEqrVNpinmlZmUt2gXnkYfoFGdd4xxb9ZTfrFJ8axlSJBFqy3SAdevqzAVW2wjaoltYhFqj8fjvO4l2UfhHFfDcddOm18aJMJSQF4zGiyvcuXGXKcq6MUjoabeRVmlcn4NCapoorTR7Le/7osw6lHHnlueXNC6lWlHMSqnOuj5i3Ps/Lpuijqf+XVGkuQ4EIDCTCCDgzaTVZC4QgEBmBJatGvCiHKrUFs7psC8DGzONvFCUiGpmqQaNUmJdce6kkXaNeFUhzTLqepYpBkUZY1qWftFIgpFS/dI4HFfhxb3Ml9q4AqYzgWmUJhsm+sm4Im5tuGB6rWq1RalhGUfAi7J/ox6TdZRU1Ov6j8tyb/uF1LHxZMJOI+fUJPPLsk+VxcXgPIsyhspDzKv6F0xR95TSrXWP0yAAAQhAIDoBBLzorDgSAhBoIQJVrIPnvczaNJ01NgovTVPUlVK75nRP/MNZ0QFK9clDtPOPM8+IhzQ8avXNoj5S1mPyny/Ji7Kr16SokEaGFEnGXaaBhBtv0Gxg+V3rzS3XLDfL/71hckpb7jjHbLnTHLPnAVslmWbdPhJ5Rq3iX8vgw9/Jpcl22Pt4xEb6NkqTbTTALMTKOOm1ZQp4aaPf0i50XHE26vVcenBne1ss0xFF3+r3U97P66jz8B8nUWxdf7lmNlHHXXQNOYm3ShmdY58V+lMCrgR01ZWNU09W80vy/I/KpcjjqH9XJG2uBQEIzBQCCHgzZSWZBwQgkCkBCVpKV61SSyMquVRJ1efRi0QWUVdx2Sgdrsdev4wUx7hj1fFF1hyMO744UUF+Q4qw2mpxx+GOr0Kqo/a4c+e99ov/niLcBeclIW/PA7a0Yt7cpFOe0i9MwBMfvbhrjFHTZPMW8Nz5/VFhwyNjXhRuME0/bcRnUshVEIbzFkuCUZFhtdMkPK1YOxhb9Em6BnH6FS2KxRmb/9iya8i5CFx9mSIxb1D33eD0+67e/JqFc6P1kTC/pa3lSoMABCAAgXgEEPDi8eJoCECgRQgM2X9Qx01PKwJNnDpyTrRzbqJxjQmynk8WUUNZj6nR+ar8ohzmRBk0pPCcg22qbF6tCinHeikeWT5iLvrEbZGnuf+xO2Qi4tUyFvDXF1S0VNR01SiDz+Ne8osKGoM/OqgsAa8Khg1Fpqy6dEtFSCtCq5bxRd4O1FH2X71jqjw2/5jTfBmWhk+tvkHnYlfGol6t27LFx6zmr3+XLLQptDQIQAACEIhHAAEvHi+OhgAEWohAFevghb1IT/tm3wo3tSJqyljGZnvxCGNdBkN3zVqCWTDKMotIr6hzDKavRu2X9XHXf+Mec8+tfbFO+/KzHhfr+FoH+yMi47jJJr1wHgKefyxufyllX/URe6ygVEbaZlhkY1J+cfrpOaBo7DQ1IuNcT8fqWSn2Lt1Sa6Co8Nn2gyXzu8zyNYNxT5n78dr3VR1bcPJVqK1Ya0GiiHkumreKXzDG2WTUv4tDi2MhAAEIbCaAgMdugAAEIFCHwMN9Q16qaZVavTpyeiFR1IZetPvti17eEVdJmWTlnpr0+nH65VX7Ks4Y6h3rF8ycIYXWX+tehmDrT1/NYn5JzqGad7/92j1ebbk4Tam0aWviKdVTRjBqLk1WaxG3tlXUcecl4N1y6+3mH7fe4Q3jZUcc5glJmptS71WrS1F5eblg15p73umrUXiXna7oXwNFco7bTSUxNa+9FYVJrWOaqURCM9RjdTVLu2z6fdvs2fa5PuFQrfqZ+qzP1hps5rblwi7TbtOHaRCAAAQgEI8AAl48XhwNAQi0EAFFMKkgd5Wa/yXJqztmRRv96OXaE+1yFA2y4FCU818WY41TZy6L68U5hwS8LWwkjl7mnSFF2Wtfdvqc6t6tuqc/dqSU6uHtf9yOcfBPHhtMk62V8pjoxCGdshbwzr/kMnPBjy6fdtU9dtvF7LH7Lub41x7pmd04kdKl+eUtIlVFwKtKzTmJqe65FJZqmce+a3TOsLT+osfT6HrN9HtI89D93ml/30tIV9Ozfq39t0ne919ea0b9u7zIcl4IQKAVCCDgtcIqM0cIQCARgSrWwVOE3YI5HVOEG30rX0VHwlrQqyyKBcdbxYgSF2nZYQU8RS88tHqwMmtftunHD0/+u2UyK7aAp3WPm0YbrDEoB1obrOYZVBTRshLwFHF3wSWXm3/cNhF1V689aa/dzftOPtHbay7is7PDRvvaqCClduYlJJTlfuvnUPa+9o/FiWTrBka8yEj9PpCIL3FVa5HXOkTZ080Q1ebmUXZUZRSe9Y7R2IdHx0y3vf9GxvTFXflrH3c+1L+LS4zjIQABCGwmgIDHboAABCDQgMADKwdK57P5hXm29wIt1zql9xZZkykrCFUUxerNrSp13fw11UbG9LI+ZgasU2jVXkLLNv0oQsDzO/pKrNM6qBUdfZSVgHfkUcdGurV1L+y6y87mI+995+Tx/tROCQl5RB+WXYdSc1y6oLsyNedqmXr4jS9cFHaRac5uQ1QhWjLKZm62Wqz+OQXH7tZeglgziXn6ElICNA0CEIAABOJB4CnLAAAgAElEQVQTQMCLz4weEIBACxEoqw6eXph7uybqaimqwqVLScDTS22WjpZFLmeV3P/C5l3mi57fjMTVVAtGWpYtbgT5aTxr1o+UFhEoAa/N3jdjVuSM2xpF4LlaVHpJrufkXHRR/CwEvA+c9snQyDvH0UU2vvRFh3q18YLNFdbX32cZEVT2nqqaMUMjkcxvgCCziwEr9OcZHRncA81S37SZfgfVus96rPC12tZADAp7+mKvt7vNRshOOBgXleYe91mr46l/l4QafSAAAQhMEEDAYydAAAIQaEBA7oN6CSqi6WVR9ezm2H+EuxdhXTuYHttMaai1uFUpJS1sXVXX7SHr+FhUalocQ4qq1XEqW1BUDbyVtgbeuBW5465XLQHPH/kqUarWvej2j1eP0gp8EjCLaGkFPKXOfvD0T0Ueqj81+eLvfKluP380WBbptRKFynC/dROsShSuG0/Uez4YmZVHdGRwExT9rIy8eQMHFi22Jx1nrX6KWgszsJjmRD9ijY0Gxwo1n2k0Z/vPHLP14p4ssXAuCEAAAi1FAAGvpZabyUIAAnEJ5F0HzxXBV3Fq/cNbET7rrWjXKD027ct7XAZZH98skRqadxGilN8IQcKT1j9KXcOq1ZySuFC0S6l/b95yzUPm1l+s8GqCxRHwgi60esHX/ajzKIrFpck2ug88sc8K780i4NUzrag3R7+A9+H3vsPsufuuDR8LWaXXlp0mrnWd0zM94inrZ2LU88UVNLUOXZvmoAgtPVvyiMorM1o5Kjt3XNWem3HGH1XAdef0R2Vq/atgfKIvKRfb3xU0CEAAAhBIRgABLxk3ekEAAi1CQNE8y6xRQJbNCTZKydP/rpeWV++azfSyVGsOcV9CsmQf91x5jtXVA5QhRRThNjh2RWMomqEo44QwdnoxVh2mKIJX2LmSfn7hKbfEFvAUfeevM5hkLYoW8DRepVOu7JuaSheVWxoBz6XR/uWaaycvt/WOO5itd9qh5uXdFw76MG56bdl1FasWrZWGR1BUldgu9/I4Yne9/aX9KFFmxdqhqFuwtOPyfKbnPak0gnZVxDzq3+W9Szg/BCAw0wkg4M30FWZ+EIBAagIr1gx5wkSaNi2txdYn0rfhSYuNN1MUW5BbM6UAZz1Wv1Ck1OioEV619l7RaZth+79Wgf2wPll/vv7+QXPlOXdaES/amQ86YSfz6N0XmE6bAqvIJAlMSRydi47UKlPA26Gj3ey5xeKagPc+4Nlm7wP2ryvkKfpGP1HTayVYLLcp7GW1os1JwuaZVfkBJ6pmFZVVtWdRI45pRLCw9cnz8yy/uHN1Pbvsc69ttiLzJqK+k/57JM68t7Bp8dp/NAhAAAIQSEYAAS8ZN3pBAAItRCBNHTy9AOqFtUcvrZ5AMGYG7T+U07ZmjiJoJifarF5M/e6lTrRLIhT5903VUqmrEBEoEfGBO9ebK6yI16jpZfgl79rNLN1xTioR1V2j6LXIU8Ab32q+0c/o47efRNh5y31m5D/Lzfpb7jZ7b7u12Xu7revi3XrHR5uDjjum7udx0muzEqySPm+zFvCTjkP98jDUcGuh309KGY8bIenmE6U2W5q5Z9U3SxEsqzFFPU9evzf17NIXGCoboC8qh+y/TxRFnfb3U615Uf8u6mpzHAQgAIH6BBDw2B0QgAAEQgjopWbVuuipQZ7osynSRClKnmhnf7JIVWq2F6ZaaJvJBTCNMKN56qVIL0dx06Sj3JRVS1urQrqhX3BRTbxbrlk+BaWYPeUF25olj+o187fvyewlNc0+ibLWwWPyEvCGD3isFe8WTBvSxo02MtEWwx//78Pm+X0bzeJVAw2HHSbiuc6Om1xTJWwrEsj/nCw7Ak/7adSGc5aZFu5Y5f3c9BuQDI/GMz6oQvp8lPsob4ZRxpD0GDFW6+vPzygnaH6SVNCtN0dF/C2Z35UUAf0gAAEIQMASQMBjG0AAAhAIITBqv5VebtNoGzW/Y6VLjYxiRJAUftGCQdJx1utXdmRN1PkoYmPpguhpfH5DCl1D9eny3Adyfsy6RmNUNsHjik4jrTXOWlGAblyqNZjXeqQV1OIyz+J6Rx517JTL1hPvdNDYqBXWrJDVbtPtFvR0mSfdeG+oiNconbbW3vGn10owU0RYnHsvLsMox6vOoItIinJ8nsdkFQ0cNkZ/rTSXXhlmfNEsEeFV+JIhjH+9z4tmnIeYR/27pKtPPwhAAAKbCSDgsRsgAAEIRCBQqw5e0aKdf5h6ydpmcY95YGXjSJgIUyvlkGaq4RdFJHN7QSKEoi3T1DeMsyAa20O2RliW0Z1xru8/tgqisqtZJsFBkY+KoEpbazAKjywEtSjXccdkcb1bbr3dfPD0T3mnbCTebRy30XdjY8bYuoLzuztNR1ub1+fAn94eOuTXnvXR0GP8B9z48GrPzbfdRkrKiXu2/fOxc+bFOkeWB0vA6x9MXqs0y7GUUY8vKOKs67dfSNio8mBrlrpyzexAW9azXv/WUK1E3Zed7W32eTo+6WYb9/cO9e+yfCJwLghAoFUJIOC16sozbwhAIBaBNeuHvX+0KgWnt6vdpsjO9kQT/Z0EmzzqxYQNsJlEsOBcio4mCGPZ6PN6nF0hcLkJFyES1RpjlfZAFVJ6JXIssKlmMrEYtimfirgr4t7MQlCLs0ezut4HTvuk+fvqZVbA26Pu5ceteDduX9rnd20W73TwTv962P6sbDjsqFF4N6xYZW6w4p2/OeFgo33QvmSHR5ilnZ2FC9VL5nca1UCVmFh2K7Men1uLeb3tnpizflM9V8clypccZfPT9Zvp946fVx71D5OsxzQzLvuMlcCt2nlhYh7175IQpw8EIACB6QQQ8NgVEIAABCIQGLEvcLMUgmKb6sIowqcIYaDR0BbOseldNhqiCvWZIiCcckizFD13L30blAq7KfLEb0ih9NiiRKJajCUw1IuKibsmWRxfVs0ytyYSF3RP9FnRpcj7s+gXbCfgrfj3OjN2x3qzceXwtOVr22Wuads1PHrtoz+/1PxhbKp45j+ZBLx5nR1e+mywhUXhRRHwLr7nfnN//3SnWRv4Y9rsFyZjmyLxdpjfa16+0/ZenbyiBLUqRZZVJZ03aEKiFGN9qVWmW3DUZ1eV1jPqmHVcXgYWccYQPNafah3FzZj6d2lo0xcCEIDAZgIIeOwGCEAAAhEJ6AWlqBfHKEPy6vlYB7k1VqxotlaFdMuozFzxcEUQKC1TYu6AIlAycBOOOoZ6x+mlflBRoBUYi8ZYZG1Df71BrYlEVjVFCa3smy5opWXdqH/cWolpx6K5d/yn3/T9dU3oqdr3XWxmb1G/cPw5/73J/GtgjVm1eq3d14NmcHCi3md3d5fp6e423VYUHVhnRcIaITZhtfDCBLx64p2uL6Zt9v/JRMK1nRbOMa/b/VGeOFuEcC3BZ8XaaqSoVzF6zD3HuzrazHp7/ykavVaKbegmLeCAZnagrXrqbxQxb56NjHZRpAUsN5eAAAQgMGMJIODN2KVlYhCAQNYEHlo9WGhUT9j4m9lRrxnGXrQhRdh61/q8zLS6WuMpQvDQumjezt3XHwGpfaVC6UULeGJRZPTh2B3rjPnXBi86LUprJOK99Y5rG56iv29dXQEvLI22kYB334YBc8m9D9S9tkQBudMGIymPeOS2ZscFczyh1qVzhpksRGFU65giBemwMSpdftW64Ur9DtKY9UWSan+O2MhXRVmpZe1eGsYmyufN8Dun3jyqKN7WG6srLaG94O5PRarPt89lCb40CEAAAhBIRwABLx0/ekMAAhUl8Me/3GbW2RfEJ+21q5k/tzeTUerlSREGVWp6wXxw1UBo/ZkqjdmNpayi3GEsnCFFb3e7re8z6tX30QtqGaJQ2FjLKGzfaEx5igz+1GWJdsHU8btuW23+fcdam87X5qW477TrQrPTbovCEGb2eVEC3vjDQ2b0+lWT6aVRJjBria1f99QlNQ8NE/BGhoZM38OrEkXgNTKxaBR9p4Eq4nVWDQHvKVssMk9Zutiby+Y6lO3esznrCLCi1jTKGhYhjkcZR/CY4JcIzvjCE/Xss9NffiDJ+bPq08wOtFX9XRm2NtoL+qJlrv1dKkMaGgQgAAEIpCeAgJeeIWeAAAQqROD+ZQ+b1739DPPkvXcz69b3G/33OR97q9lu6y1Sj1KGFTKzqFKrWg20OGyqNHZ/tJ0zpJAYoKzBKkdu6AVpjo1GW23F5Sq0rNdU6yLDGNVMVBRHLXdfCXfnnfWXyel7Dqa+tMsDD3+0OfDwHXLHk7aQ/4MPrDEPPrh2cpzbbLPAbLPtwmnjHvn9Sq/mnasP5z9g7dqptewWLNgsYNaLwrtq5d1GP41a38MrzfCm1Fr/cY1q4G2946PNQccdU/e059x6V8Nr1hPw1Omtu+80pa9L4ZN4O25v2izSa4uuaxi2QasUDegfa73afMG0Shlf5BUpGcZOn1c9DbXeHKq2D6OwDh5D/bsk1OgDAQhAoDYBBDx2BgQg0NQEbvvXvd74d3vMI70/z/j89z2x7qgXH+j996lnfNX787RTXp96nqrt8/DaifpQVWnN+lIiflUYu0v/6rCpl40MKar68lxmymite0CpXllE3LgoyFppsv7rnnfmzeau26fWgQsKeDpe0XhvevcTcr1tk+6Rm/58j7n5ponnWLBtbUW8ffZ55BQhb/iyB73DnIA3ODhg1q5dY4aGphtB6LgFCxban0WmnqnFnf2rzefuu7khm/HhYbNq+cNTjlm0qt88+cb/1u130LFHm613qi+cZing+Qfh6rKlTa/Nyuk3i01XdI3FOGPWPb+uv7FTr55TishTJJai8rIQWOOMUcc2Uxqqf27ud1RVvqSJy13H6wsYlTagQQACEIBAegIIeOkZcgYIQKAEAi7STsKdRLw3v+5F5vDnPc289X3neOKdIvDUdNyRr/+AufqHZ2eSSrvMpqv6gntKmPnUS1bRnS4qlLKcaP011Fy0XZiTb56poVF51Tquai/2aZ0y9bKql3w1Rew0WperL73bXH3pf6ZhkXAjkSDY8hbxkkTgXXH538wyX9Rdvb1w8CGPmxTx/ALehg39ZvnyZaFbqKur22y11Tam87Btah7rjCzqnUj3zODAoFm7YuXkIY0MLMLEO50kTMDTNdVquQkHI/Dq3Rt6xsghNUl6bZWiW8VisRXKVlTsCyRxj+vsGhRYtTZFmEPFHWfoTVXQAVX4oivtVLew9Rupf5eWIv0hAAEITBBAwGMnQAACTUlAkXbz580xx7/mcNNnU2VdnTv9vdopJ/zf5LyUUnvUS55n9n9a+gicqtXBq3J6Z9jGKtKJVkKXIkCUYqeXYaViShyqJQ7UGnfWqaFhbOJ8njTyK841oh6relgSuJUqF7UFTSlqpcnWOtdJR/+y5iVqReC5A9908t651cWLWy8tqnjnxi4Rb8t1bWbsjvXeX42MDNmU2/omEEE4EvG2f/0+NZmFReFpjXSvqB6eRLx60XdKm937gP0bRt65AUQR8OR8W+sLkygCnruOP5VTfxc1+ktfjnTb58Wa9eW7fFf5i5okwrX3AmKfyRJYXVRe3sYXSccZ9TmW13FV/t0Tdc5iTw28qLQ4DgIQgEBjAgh47BAIQKApCUiUe7cV6eZZg4prr7vJm8PhBz3d/Omvt5szPvc9L+LOtXO/+WPvfx7/2hemnquEibUbyn+h80+kWQtc6wVuq4XdZpl1982ruVRMiXeK9IgqDgXHoygIRXWFRerlNY9G561ScXu9kCtwSiYTYc2fJhtXUK0XfadrNhLw8ozCiyPgqd7dlVf8PQzRtM+PecMzjIvAu/fexnXrap389kc+YA457MU1r9tIxHMCnjo+pmeheeZtU9OW9fdb77hDJOHOXTzMxELXrCXg+U0s4gKMk15bJYOYqhowZFWfzUU7Kno2SbRk2D6ocgRj2Nib9fe7m5fWdOnCrrBp8jkEIAABCEQkgIAXERSHQQAC1SLgGVU8YXdz4823mhc+/xnmtjvv8YQ8CXcHvvwkr+adS6PNUsAbGhmrnBtpM39Dn8fLid+QQiYUSsVUfbuo0Xa1dnocYaroO6VK6x9FaNAxitSLmr5ci2e96LswAU+fn/31Z+eyRHEEvLjRd27AT7D18PZ8oNPWvFtt+mzdO7u9I7frH7rBXP/QjebcL/2gYZ+gqYUSWRU9s0PXAvP8JTuYnXuzcfa9z7qEX3Jv/QjCegLeEY/c1jxiTk/kedc60EV/Kb122D7Tawn7Vbrng06vqSafYeesIwOD0ZJD9rmdhfFFlOdShlgyO1VWAmlmA0pwIurfJYBGFwhAAAINCCDgsT0gAIGmJCBR7gv25/rLz51Mn3XC3QO27p0+k/usnGhlZHHae95gnrzXrpnMtWp18Kr6chcFdpbik5fyZiPtnPGBhLusaitl/aIahU3UY7Iyjoh6vUbH1asbFkyTTbs2jQQ8f7RYrbHmKeCt7BuKJBR/7Su/TYRbzrSH7/EY89D195hVqyccZyWwRRHyLrjrEnPfhvvN29/xfrPLro8Nvb4i8v41sMZLOX/p9jvnkkraKApP15WjrER417br7TZHPmq70LFHPSAoGMmAZdCKRmp6rsrNuApRt1WNAM4zSlHPEj3P9TM2ns74Iklqf9Q9lOdxzSo8+pksntflOYnTIAABCEAgGwIIeNlw5CwQgEDBBGROIcFOEXdynVU79cyveVF3L7RmFt+5+BrzRxudp6bUWedSm8UwH7Yv6YroqkqTuDTHvmyu7BuuypAijyNtge4khhSRB+c7sGpur/45pGWYhEe9PsG6hptT42Z5abVpIyHddcMEvHErvNQTtfIS8OIYnSQR8LTXJTgd96ZnmTu+8Me6rrO11ua/6+8zF/77R95Hhxx6ZN002lp9vVTn7vxqwdUS8cZXb/Ci/pRCKwFvVneHecSS+ZmKd8G5BtNrO+y8lc5ZBQFP5jCDtmanExezvGfTnKsIYcwvsiZ1Fk5rrpOGUZq+VXq2J50H9e+SkqMfBCAAgdoEEPDYGRCAQNMSkGGFhDylyyrSTm6znzvtbZlF2tUDIyFiXX916uAVUUsur02S1IlWkQnOkELFz5VmlSZFNmx+VWZcpVS/iZSvTk+sS5sm22hNyhbwfnbFFeauO+40d9155+Qw99xjN/Os5x1kdnzMzmHbyUQV8LTvJGQpyk77W2KW6uCd/o6TzEt3OiL0OjrAL95VTcAbvG7C1favu84yNzy82ki427imf9q89v77g0Y/c0893HTsnl0EXi2AfnOFoVH7bJHwbP8ss0kcXr1+OLOI4qzmUrQwpi9SFJHnjC+0Nlojf5Rmrbk1qwNtlhHqWa15nPOoHumW1sCCBgEIQAAC2RFAwMuOJWeCAARKIHDuty41P7rqt+YRNgovK6fZsGlUsQ7elrZItCLw8hSxwrgk+TyOE61e3vTi5lJkvWLnBUZC5lGvLwmzYJ+quGW6aMheu0b9VlCViJfXfmxkYuGlXtaJwEtrYvGvO+4w533mszWXzaXu7rTzzub4E9/ecGnDBDyJSF4NOHsWby6+cEIJeFdcfpH5x69vMEc8+kUNr3O/TZm94N+XTOkfNYXWnTjrCDyJdoPXrZo27vH77jN/edSomTU+4bIrMxQ50Eq487fuI55seuxP3k3iycjoRtPRLvnUeHXy9MwJE4vyGFdVBagyhUV/xKRbm3pCa5WcuuPsj6r+zok6B+rfRSXFcRCAAASiE0DAi86KIyEAAQhMEnhg5UClaKgO2oAVTYoUtLIAEBbZ5jek0PWyTMOMO/6qRkPEEUHjzjnK8S6FW2ul9Vk0t9PkfX/cddtqc95Zf6k5vHrmBzr4TSfvbXbaLZkJQyPxTuf2194LE/HqmVhMCndWuKolfsrEYp8nPsoT8H525SVeRNa+W/2PNXXYzmw/9xGTPGRY8d/195v7++/3xuUi+CSIhZlYBKGmEYg33nK/d7pZe25nRu/t94S70XunPzvHHrSutoOboprH1pvZg/8yEuzr1bAsIhLPnxLtUsG7bFqt6jdmYawQ5d5yx1RVgKqCA7Y/YlJO4YrI7h8anRRam9UIolnH7d/X+l2gaHkaBCAAAQhkRwABLzuWnAkCEGghAlWrg5c0FbUKS6bowRVrh6ZEtnhRP/Yf/kqXUtRLLZfIosdeVZFUL3qLrYArhkU1v7Aqoclf/D+OG2ua8Z535s3mrtut8BNos/VGb2PXJFb5W9rou3ce/+aGww1G/jUS8W768z3m5pvunTyf0mQVcaborkZRiwcf8jizzbYLPVHuzce9IlZKpfq85MUvMwfbGnhxBKi4hfQl2o3/8E9m4y1THWbHlg2Zkd12NuNbLPF+XNs4MGzGl62dytaKeJ0jdzWc36LvHp9m+4T2rSVOBcWidf35p9fqmksXdJvlawZDx1zkAWFfvhQ5FnetzTU3J+oX6qft/9n7EgC5qjLrr7q6eu90d/aE7CshhLAqSNgRUEQQRDZZVHRmHGVwRn4ZcXRAUQR13EYcNzZZRFRA1qAou7InIUD2PelO73tXd1X3f8+r3OpXr95y31b12nx35hnSddfz7nude+p83xFEcLnIE9vRE520FyrYuH3uVPosdB38bi+Ns4FFoXHn8RgBRuAfGwEm8P6x7y+vjhFgBEJCoLN3SDsER6UUW4XlBwepbANxIXPb4b+LGbJmtp4ou/0i1KqxPfwDvtFN1ixM1o2Zg599g7ZmJJ4ZgeeXvEPOu5WPPuaKwEPlf7n632jBokWm7R57ZDXtberSjClANiJU1q5I9R3q4D407txAX7v+v1xBeOvP7iWQ/VXlIsxZqJRUiDw3TqMg79JfeThvTuk2YbAzOLq+5IqjsyRejvpO17I03UTDA7nhs/qOww6ltXumcM/w5QK+ZEAxqr5c3RSHysUg6FXmH9V5Ye5G4wvkyesQOQSLEf6sgqVZnbFuYMH577zeeW7HCDACjIA9Akzg8Q5hBBgBRsADAlHLgxdFNYQqrFC2JYRKAgUupWHmTlOdk1k9P6GEfsZVaasnG7rv/JvWpOfOv2t/li0/QFwzqPayo1W6Mq2jNw3B/bHLBVboUGNjPjwo2cQRXpBiGcLotLPniGuu57WjoZP6DnXMcu+dduYH6fQzz8wZW5/L8Y67X6Xdu/NVhMbJTp1WJ9xjD8n+GGPBQOArX/sKbVifcdt2Kvrcd3olmZPCFfceh3Hcd7tiRd6NiJDG4bZ89ZMk8dJbmk27xRxjPeZh0mhQumQ61V53jtOyPX+uSopLtXBZIq5MirqZlFSVtXdHy2U8yu9DPb74/YJ3AhxsEWJbCNWkm/trVbfQ79Eg5qzvA+R2vQih5cIIMAKMACMQLAJM4AWLJ/fGCDAC+wkCUMsUQvHkBs5iJhR3M0/U1Su5ZMggTDiiXKKscsS9b3phM7Xf9hINrsrkHTMrNZe9V5nIwz2qKCvRFFsgVlXDmHFgLoZzJ4g8lIQIv4YKDyS7X+JOYuiVwEP77/7kf7VujLkC+4UpAopVPjw5tl55J38mCTw8M+vXvU3f/97XLe/5wkVL6Av/8VXLzyU5iwpm5IYqgZc651bTMYZ7RD6ynsxajaX/nDPJjsCjvo1ZUwuz9mGF0XrJPxZWeG1UQynHStoGqQgGoS9VkyDzipHL0PIhNPlgrBtYcP47N3eb6zICjAAjoI4AE3jqWHFNRoARYARyEGjuEDmdxDf6USn11QlCqJAkBqIyLz3pAAdZOMnisCsNKTQyr6pUc9GNcolqLipgVrGhibZ+9j6lEDEo8iZ896OWUBvdft0qIqEMSwrCr1j7MAzCww+B95Nf/pQQfg2i2k79g7x4xgLDCrOiJ/Dk54/+8YG8qgsXHUSLFh+k9FhJghqV9fNUIfCG73tFy3tnVuwIPOTES5abm4poqQyTjRQbarScf5gEnp+8kpKsxcT9hteCKIOCzEkBqXSTA6yEEE/8/ivWc666FDMlJZ4f7Gv8LsIa8IUDfndGJcTWC4Gsikeh6nH+u0IhzeMwAozA/oYAE3j72x3n9TICjEBgCEQtD15UFREyxKxKHJb6RN5AMyWXarhaYDfPY0dRnGdy1U7qvOb3Wsio6gHUTIlnDJP1ejAHWYV0bsXKEantt4p4oEnrVQm8EXEDZCo7zVFW/M//3fZ/gYeFmxF4Hrd0XjNJ5GGM3oGMcs6JQLJS36GtHYGHz3uWv8d06sUk8IIKWw0ivDaqRFlUTX30m0mFCJP7Hao8mXd1UJB5xSxhfAlRyPXgfTF1fGUhh+SxGAFGgBHYbxBgAm+/udW8UEaAEQgaASgr2roL5/zpNH8cROqECq+QbqRWc9K7lIJUQrgSwjCtHDbHSvhvFOfZ+h8PUGqNcPzUkUdOewWfT/vTv2mhzCB+ESrrJkzWrv9iK4aCIl/0a1Qh8KR5Rkzk3wP5hH0/Z8EC+uwXrs6Bq2nTSmra/FQehNUN82jKvNOoZvx8x9sXJoEnB5d7A7msnEwAfBF47z1WyNTMHUJjg9YKvDBNLILO7+YnvBaK1gERbj0g3p9RKnDpbe1K2romF3u+bu6jvEcweEkPD/tWTvpZ+1g3sOD8d37uPrdlBBgBRsAeASbweIcwAowAI+ARgZQIvdkrwmijVKZPqKTdrf1FmxKUA8gzBFMKN4YUY0HNAVCLld/N7obuOfUHmkIKrJGTk6nsB4fVyZ8+lqZ+5lhX90llY7lxLVXpz22dMHIV/uR/vk+bNmywnQoIL9wGKCGlCk/vQtvTtkkQdyupt32zbT9T5r2fpsw/zXEsEDuFCDuHohLPdExsGivnWj8EXt/p76fhxk7T9Zb0WptYhBU+i4mEuYfl/gThC8UXMLVTzsLMAGrvVNrepdjtc+K3fhTVyMY1ef0yAfcIex7XYErcI6FELaQqb6wbWOCLRGDPhRFgBBgBRiB4BJjAC1vE04sAACAASURBVB5T7pERYAT2IwSilgevGP/wNxpS4FDqNvwSJAFK1PI8Gbdy1JQRcJyF2ywIORACVgpHPXGH+4UCV9qKS8zDF/08wmu7f0U4AGMfoEypOIymVhzuqcs37s11aJ16cAVNW1ZRcHJr4/r1dOv3f5A3LpAE7priTvy3PoR2/sKFOeq71U9do4yBE4lXCAWenCyezZRgJOFWi0M58oYZnWvtCDz0k260/qJDM7LYI+6zQYUXG+6hWP9GU8zCVN9hQK/Ej/INFhX1JBFIPLwzzZ7fKCrdopwPVH8P/ObjxDqhyINCOV6Cd1pKSw2gmqrAzX7Q1x3rBhYThakS9jcXRoARYAQYgeARYAIveEy5R0aAEdiPEOjoGcwSFVFYdqEIJhxspKMfyARJ2jkRSFYYIdSpUhAD7d3RNrKIWp5BSeAB17hQPaYtVDqaOmxfWKfMlefGkVZlb6/q+CWt7vxVhkwU4xnn8v4pP1Im8kDcvXFfLnkn5wAS77AL6y2JPJW8VyrrMdbRk3iSMAWBl96Xe1CG0Jqp7za9equj8s443rwj/tkynLYYBJ6elDc61/Zf+wcaeUuEcVsUqzx46YnjaXDFMVorI4kXT26kkVRPXo+lS6ZT7XXneLmFym0KmcfRKbw2iko3GN1AZVUIBajyTTOpGGTKA0m4IjwUxhd2hjR+5hzW+8vPnNy05fx3btDiuowAI8AIuEeACTz3mHELRoARYASyCEQtD17YRJhMyg7yzqjC8bMtcCBsqCmLRP4+u3WEEZ7pBzc9gQcM9WF2WZJJsEwIrZXEkhwvSAJvZePnqCn5RnYpVmTilPLD6LSpP7Zd8mPXNVLjWwOOsHzgG1MtSTyolvZ2OPfhOIihwu5tm+kHt3xPU9sBU31QY4bAy4TQGkNnN7/2U7dDEXLizT/yX0zbqRB4nbF3qatkHY0bXkx1Iwe6Hl82kAo8M1WtfB7673qZum9/yVaZlG4T5PxgbhhocsXRNDxxQnZuw+29NNLRRyUDGyk+0punSKu57mxKLDnA81pUG/pVbqmOY6ynNxCRX4pMGFceyl72Oke0GysmC2Eo2fCYlwt1WbVQpsL4Avld8bswqBDnsYKt1f7Bvw3g4MyFEWAEGAFGIBwEmMALB1fulRFgBPYTBKKWBy8MIkxvSIHbijBXO0MKr7e+2Pn7VOYdBr4q41rVgQNt23/8Tvs4IQ6VQ8I9UXM/FTKILMlkkTorKAJPT96N66ygcV2VmgKvo6aPuurySTQ7JZ4qeSfxsCLxglQt6fc/9v2bq96mx/74aF5OPChPPnT2WXTCaWfk3C4r0wqV+37I+28xrWZF4IG02xF/WCPujAVE3sz0h12TeXYEnhwDxFPqKw/R8JpdpmSxrKdX4unVd/q51lx8AMXefIfwrMkwbKjuCkHcyXkU2zhCH16LfdUizCKCIohU9p1TnUKEGDvNwenzQijZjOpJfKHnlNPQad6FUtE7zcPr55z/zity3I4RYAQYATUEmMBTw4lrMQKMACNgiUBT+4Bj7rFCwheU6kBz8NuXyBsKAygNwjxEBhnuFCbeUSMaYWKBAsIDBfmZVEKZx3/3PCpfPsMXVDJsdsaOBpqxc3y2LxgeIB8cys4ZbbRzZnvOOJfOfiFv3D1rBujxrzS6ns8nH5qT1yYIAk/mdpRqQhDXdrhCOVMq2BZjHkc/BJ5VLjwzAg/k3dqEOeGnB2jp0DWuSDw3ajTkwpPh2pqZh4Vx6sicyZR8z3sptT1juFM6q1K7KlZk1HhWWLreHB4bFCOXqNlUQeQBf6g7wwzbdAuTmz3htu+g6oetRjfOU6onocoD8YzfmV6ML8bK70Gr+8T574LawdwPI8AIMALmCDCBxzuDEWAEGAGfCLSJvG34x3pUip/DZxCGFF5xGCtOtDhg4Z6rkGResVBth0Nq/90vU8svX9SaYE4qCdbLlh9AE777UdVhLOs9tPoUOujt/JBGPYGHxl3j+untpaM50g6p+yQtr/9UTr9u1XeyMfLhHXZRfU5ffkJo9c+AG7WpFenkxrzCCLQqgadK3sn+3ZB4bsmatFDiIR+eyPmvEU9GIi928HQqufBIih1sHQpbbAIvKs+4PpxSTxDhy5RCmClYPfh4V3f3Rc8ZVz/fYuUrlaq8SvHlF/a+W1VeUF/A+X65e+iA8995AI2bMAKMACPgEgEm8FwCxtUZAUaAETAigG/bYWYRlYIQHKif3Di6yqT0IC8QJuikNgpjrWPFidYPQRoEbvqQThB22H87P/cbSq/ZrR0YVQi8INR3WMuG+y5TXpKexDPLhfers7cq96WvCFOLD944NY/AaxVhh25IVpnfsUyQotj/bp2Ui6nAe7EslwxVAfJ9g79UqaYpwAagKBLvBdUy8tYujcQbvu/VLJFXffnRNLBwCqWXTHPsptjvgqg4v5qFqkJpm3FGzeQh9ar0crwJNhWigo/dGtwSz37wsGqrD4WGgrIX6SdEmgOrErUUDW4xKRfvTuRs5MIIMAKMACMQHgJM4IWHLffMCDAC+wkC+Ad5S2cyMqtVDR3CYaFGOL+CsBgS7qX9SMbt4pAe9IJV5x30uG77A0GKw5hbgsftOMb6UhkmD+5GkrXjmt9R8s2deWYVxn6CIu+6nv01Ne1e6WpZehLPGEbrlcDDBIxhtCBZO3qGlAg8vaOqNA5wtah9la2Sz/sJoVXJgbcj/pCW985tQT68memzHZuBCOkb8BYOKDuHKgmkU3VFXLsnTg6eKnn3HCfuo0IQIdg+hs82tXvXSKUXXFFVMA1iPrKPqOBjt6YohaLK/V9RVpI1vjBTUI51AwvOfxfkU8Z9MQKMACNgjgATeLwzGAFGgBEIAIHGtn5H4iSAYZS6yCTvLhPOhfmkYqEMKZQmaqg0VtQHhU7grldH2oV0Yl4dt72UDac13oOgTCtkv00PfIW6Uttd3+q/HbNJaxM2gdfZax/iB1xBFEFxCuLOS74q/eK1nJGiTxCH+tLTtonCdKH1or6T81NR4QVB4OnxkKGg+JkVkVcskjxqBJWq+rGQ4bWFMIdw/VIxaRDVUFSpygPxasxrONYNLDj/XRA7l/tgBBgBRsAeASbweIcwAowAIxAAAlHLgze5vpxau0bztMnwQKneCoKwCAC2vC6iZhBhtsZCKAX1RKuqOtJIIHXf+Tdt+mXCqMKvWYURh/53niNcrYPvut4GMLUYWjCHTpv645y2QSvwzMgho6NskKHi2jMmFGZGAg+L3PTqrdTbvlkJq0ES/xcbotdWzKXuiXU0Kz2NZqenZ/9EJ3oTi7FG4EkQJOmEPHnI6YZQUBn+XczwR6ilJtVViC9A8h2UlW5ggJXwRYwTEa0fDl+C4B0PtWNY4bWFeP/5hXAskIzYZ+XinVEtvkSA8QWeAZB6+LdEmGZRfrG1ao/8dwithgM5F0aAEWAEGIHwEGACLzxsuWdGgBHYjxBAOAwOWlEpSDI+OJQmmAnIECvpjKeSI61Y63B7YC3GPHFIRqgQCNKgiz6s2W0uQkmIhDEv4zr9EnjjD/5InomFVwLPzMTCqBqT4ccJcWAeEiHvQRJ3ekLKisBTJfHaSjo08m7d+5Zq5J2xgMz7eP9Z/xAEXg5ugrgA8STNGfB8+Q3b9fpsYq+MF+/P5gikRfCaa06GbOLdj7yYTiHLbrCCchWEU5cwsYhqGQskox47GQ4NBV5S/N52a3wRhfvA+e+icBd4DowAI7A/IMAE3v5wl3mNjAAjEDoCUcqDJ0MDsWi3JFDoQDkMMFacaIPOAWUMk9WrkVTvSSGJB0ng9aVbqF9cbgoUeCeteCSvyRv3dtAb93W46Uqra8x/h5/J0EOoFxEmK40p8Dy4MbZwMxk7BZ7sx06J1xhv1qpZkXeyD5B4lyc/rK0RZK2dAq+3u516u/Mxra6tp+raBlIJoS2UactoTrdSsdQRTcnoN6zZzf2TdcMk6N3OJ4j3TNDhtXieRBpDzQU3qqVYDrR+8JApJPBFYG1VRpUnv3QrxnPgdi21gnyU5jNu23J9RoARYAQYAXUEmMBTx4prMgKMACNgi0Ax8+BJhRGIChAUQ6kRSpTGQlGJhbkNxsrBK4j8SrhnWC8SmweVhy2IA7/K/ZUEHup2Dm2n1EifSjOtzpRll9C4paeb1n/sukZqfEs9dNFMfYeO64WCC2o7kEJeHGWVF6OrqKqARE683vZN1LT5qWxrKO+2HjiVdi+eqTT0nOHp9PnSc7Xn+63Sm6mrZF1Ou8FkP3W0Ntr21b+rjFbUfY9mzjnItp4bQxDZ0ZZ1PbRVXH/5Y1NO33MWVdNJH55KcxfXWI6JezZROFlCPQwiG0RRWKSr2SRUiFilm+SzUtChvHjfZL4oQN7HtOe8j6p5+Xwu31fzYoZge5248XefJLQrhTIVKsqk+PLBzPjC63hBt+P8d0Ejyv0xAowAI2COABN4vDMYAUaAEQgIgZaupEbEFKrIfF5wksU/9nuFUyQOvDjs4u9TRD6axnZ1MqRQ87YbR5UEKfZc/aiSZD5CkK1BKyQLReANNW+j7ufuzt4GVRJvXOksGn/CpykxabblLVQl8aYeXEEfvHFqTj8gKOTzAGw7ChjW7lW59VzZa4TLbfl8/Fyq7ZwoHGhzXWhVyDuMtfvB8TSwu5zOv+w6WxLPLYH3l4cb84g749pA5H3ymgWWS5ah9OWJuLJzrVVnewfeobVdv6fmZH6+xqXjPkJL687N20MIBzTLZej2HvmpH5aiVu+Iivn19rtzH4+Su6sVvmNhjsa525GO0vgCYebpYZECoC9VFHWq3X7G7x7Of+fniee2jAAjwAioIcAEnhpOXIsRYAQYAUcECpUHTxJAVYK46xPqFCtDirF4iJHqmyjkn7K74V5UKPrQ5rBUYbjnSIJeCMVS17O/plTLqAutXThtaayKquITqXLygTTu+I87PktO4bRG5Z3RURYEDApwLlTRG0u4GfPXlX+k7fE9bppodRfHZ9B5nR/U/luvwtu7e4tjX1Df7XloQrbev391lIw1NnZD4P3qlo20dX2v4/ioYEfiGXO/qTjXmg26tvP3grz7g+18JpUfSCdNvi5bB3upVKjVCrl3zCaINcPgoF08z2EVL+G1uDfNnQNZw5Gw5uan3yAU0n7G99JW5fe1nnyVxhdRUOVx/jsvd5zbMAKMACPgDQEm8Lzhxq0YAUaAEchDAMmnwzIQ0LtnwoQCyd6d8nmNlXxyRiDHwuFL5vpxOuTrQ5uDCpO1e/T8KAPdPtJGFZ5sDyIPBWaEyJWVEORdoqRK+1ntcZfYqu+McwCRZyyHXVSv/QjYIkk/CBejkrEYJIxXAu+bNT9zC71WPyHMVK7p/LT2352xd2lt4haR7848551+ACN5h8+OOeFccZ1nOg9VAg9hs7d9Z5OrtZx01hQtpNZYrEgiPZGHLy76xWVV/rL3RlPVnVl9PYkXlRxv2MOFUgLq8w8OpUWopiC+zfKujQVl91hwoDXuQS+44lmAihuKY3nPksKgpxgmVQj/hfEMF0aAEWAEGIHwEWACL3yMeQRGgBHYjxDY3dof6Gqlmx4O6yApQNylRGJ+lTJW8skZ11JIEkoFR7M6uC8V4oBtFWanD5OFo2Ch8nh5UQZ6xQDtrEg8fIaQUv1edUveWc3LSIqaOcoWwymzmASexOreVWdQ5QHWii0z8g5tZ8xeQh+7/CumkKsSeF/99CpXW2mIBkgEb9J7fvnrnHYHjZxFB9YuE+HPKWor3Ugbyx/P+XxB8gO0NH2mRt7qnWv1xAXCZv/a/E1X85EkHpxAQYjYkYOuOvZYGe9vkOBOXxJ47N60mTG81uhcHlZYb5BrGGsOtPJd2VDjzfkY9wxqvGIaX4yvLddyuXJhBBgBRoARCB8BJvDCx5hHYAQYgf0IgSDy4BkNKZyUJlbwjpV8csb54wCNENAouxyaYYuDFAgFKHgwf6/3zc/jUgz1kBWJpyfwgiDv9M8FSA07BWoxjAi8Kn+CUODJPfO9Gy6hiulJjcRrOKonu5XaX6khkHfIeWdVrMJojeGsZu3dqu+ExYag7jq1rmZ/eB3NPnt9TrfDVR0UE+xVLeWr82TF9/R+niYOL9SMYGDM0JdMZZP8u1Hf6Qc+cdKXadHEQ2hAKPsGCpjP1AzTYhOJZuG15UL1ZffFhZ93V1Btx+IXV0HNWSoppSoPXx7huQhblcf574LavdwPI8AIMALOCDCB54wR12AEGAFGQBmBTpE03wvxJMkfqEpABASl2po+oZKCVgUqg+GxYlCHGY/DKzXTO0QaFWFuVJJKg7moVAzlmZwenGn1BYqQ/qrprkJmzZYqiQTgrJo7sBD5w4xz9eoaGjSB52K75FS1I/Cccp6pGFfIwbqpUSjvktmxjQReqrJdyDdBOoxQKZU7kngT0gs10x5J5MHI5xfrLvQEA0wtVsy40DKE1FOnHhsVWk1rNU09KaQ5nAt1YrENPuwgHYsOtGHMWb4Doc7DM4HLLCza4/bMNkP/k+qtvxjw2z+3ZwQYAUaAEchFgAk83hGMACPACASIgNs8eDLUEsot/APbypDC6xTHQjiqcW1jRTkI1QFyDiG82UkR5vX+uW2nJ66Gd/XTSNcQjXSPGjmUHFBJsVrhWjwu/HxFfg01pDGFFzVjsfYQ1Gp7O9w5P3txoRV8FX0ocTSdmD4yx5ESCjyvxY7Ac1qTkcAbFuQbIv2Nwf4lsRHqju3ImaKewEuX9dKwuGKCNQKBh1JBdVRJmbyHZgVKPJB4KCCb1vc+SGs6fq+NPSwIJzfqIxB4x8+8UCOoVI1gUlu3UWpbxsyl4oTjvMKf10468aqmTAhsYIuOgO342jLxBVOJ5oRaKHWX23WpmEG47TPs+mHOWR8WXSL+0i/+nRGk8QXnvwt7d3D/jAAjwAjkIsAEHu8IRoARYAQCRAAHxsZ2+wO83pACQ4dJ/oyFcFQj/F4Segd4C2270t87HIZwECpkjiqndSJstXqIqPXV1mzVtt7ufHJgagPFl4xz6s7X514JCKOjrBfViNd8dL4WLBp7IfC2xXfT3ZWPKA+NtSE32i2xzwuiaVDLfaW9R/pS9Otf3kA7t72j3JesaJcDT2VNegJvCKSZxQyEBkj7ZDDWRsOxzH/rCbyhmr3az/QEHv7eQLMt1zQ+tYDe23dV9nPpPIv3SAmAEkWVyEMevAsWfV2YESUdCbyeO35N6X3EnX5y8dmzNCKvdI71nFVukErosko/QdaRxkgwp5E516A4DpIQ8jvfsWCCpF9jIX/f4YsNfFkIpT+UlHhneHm/6ueP3H14Z3NhBBgBRoARKAwCTOAVBmcehRFgBPYjBJo7kto/jo1FMz4Q/3gOS21nBjH+YY3x2rutE9tH8dZE7RAmw2TlvQNpN0647lm5NRYNU6jt1nVTSpywQdy19Y3mQTPOqWp8Fc06Zm5oU8VhXxUfO0dZLxMsVrJ9FbLLbD0qJJ4k7qAM+/jAWXR49dys2k8qDp/50wP01BO/sSTQrLA0utBueLg3W3WcIAi7xEF//OIETVhcZtqFzIE3CGbHoozQsPi/UTWoJPEkgSfVd2bNa2gKJYQWz6roVXiSwJN13RB5UOCdNPtiWxUlFHe9d97tuC1B5NVc/nHHelYV8A50+jLIc+ceGxpJRWPOtSAIIY9T05p5zUPpZ0y/bfHswsG1uXM0rNxvn07t9ao8hMD6IWE5/50T2vw5I8AIMALBIsAEXrB4cm+MACPACJA+D56ZIQVCZd2EdfmBFIosr+52fsb12zYqob8gQGVeQqNSshiGEXa4Ilw2/W63FtK7tbWF+oecSdswSTyVHF765yOovI8SI1UyraS1k0rauiixYafWdGjhDO3P1MKZrrexn5yTVqG0euIO/Ngl/R+i2enppmo/kAH33/4N2r71bU1BpuJXrVffta4bpJdv6chZt96MBCTegg9XmxJ5P75pA+3cMEr8GcEbJvHeE5e+gMRb8avfaT+yI/CcwmjhTLtQXLLcv+PSvHsniTxo8hDiK6JA8woIvFPmXGxJnKmSd7JjrySe13yKrjesywZWpKI+hyu6LFZ47Vh0oC12zlc83/hiShpf4EsXpIZQ+TdKqVC4ThZEMxdGgBFgBBiBwiHABF7hsOaRGAFGYD9BAIeX/sFUlviBWybIH9WcSkHDFDU1m8r6ihn6qw+T1dxwxb0zc6QspmGEGYbpd7q0fHe7O1upb9CZvJN9PJ/YTleeNkp+qNwflTogOKEE7BeOnsZidJQNg9R2IvBA3JUK0i4uyDurAjLPDZHnVzUFJR6IvO3xPZqaCBGg2IMg7malp9Fxg0do5B2K1fp2CPLugTtv1NqjOBF55192Hc2ccxD9/ZZ2alsn4q8NRU/gyY/ec019DomXFAnv3lzbRX/4wRZLLM0IvIVnNVPDuQ9m5rkv/51ZB24JPCcXWoTWAh4jkXfBrLtoUp11HsPOG76psvVz6lRfdonrcNpiKUjtFqdKKso8nHCs9aPscg20aFBsMszLnMMwsPAyD7TROw8jH6+T8QXnv/OKNLdjBBgBRsA7AkzgeceOWzICjAAjYIkAEkX3i9xAZsRPoWGLiprNzbqLcRAzC5O1I12LZZRghWPq5TZBHCdpV2ebG6hpZfe7dNLxR9Ky6f5ydhkHBYGHos8R6MVR1tVidJXt1HAg78r//rZS1+nx42jw6KVKdZ1IQ6dO9MTmH/pf1Ig7FBB3xmI3Fki8l575Pe0S+fCsiDwo7xA6C/LOTHknx0uUxmgola/l+8AvJmen1IUvKUSd3/9gM+2yUOEZCbwpCxN0xtV19E7Vt7R+giTw9g68Q39tdibbhB+DIPJiGs5Las+hQ+rP04wazMIZB555jpLiclu8qPCK4aLstC4QuQj1bO1S+3KgGOG1+OIH6SvMvjRwWl+xPo9irkPjvbNSVHL+u2LtGh6XEWAE9mcEmMDbn+8+r50RYARCQ8AqD15oA9p0XEw1m9f1FpIcswuTtZt/1FQyIPCc8t6ZrWdTsoWeL9tO3/pQftih1/uHdnqFot5R1krR6Gcss7ZWBJcb8k72q6rE80rgmRGbf3i9P2dZB04rpSXTRt2DVcZ66Znf0Y6t7+QQeQcI4m76rAMFeXdetv/Hr8yYR5gVMwUe6iGUdqG4UJqFWkcIj7Xy98ea6OXH8vvT58CT5B3qbyu/m/ri230RePoceHINqiQe6k+rOojOX3D9PrXjiClJ5ZXAQ/91X/2yq+2N56Vc5EyFG25Uip85IbS1eh+hH2Z4rVfjnGJhXEgDC69rlGQycuVBkadX5U2uL6dS8XMujAAjwAgwAoVDgAm8wmHNIzECjMB+hADcIRGCEoUyFvMChX2wAflWUSYOlRWlQrGRCZP14sbnJ+eZfm9sacqEO27ZmzkMzZ08TPXVIyJ/odoOGt7VT7g2Nu9Ra2Co9cXdD9Ijn77OU1urRjjwI68S7iXCyBFOlxJYF6qA4GruHMjL5VT2t7W2YbNW8+v/4DGOU1ch1fSdSOIOP8P7AsohEHdG8k62AYn3kcMrNSLP7VjoQz+eNByAYcVGnWmFcZFWBB7qSRXerv7cd93ODT2aEs9I5E1cOEKHfrCKpi4aJSKbE89RS+J5bVjpQmucgxsXWn1bFRIPzrMnTc7sfahGM++EfIdOL+Gzci5uw2gxB4T4RsnhOog5Yf8hp2hZIi72e0rb70Gmlhhr6SIK+UWV48vLoYLe+CIupKtJQeZJUtZv39yeEWAEGAFGQB0BJvDUseKajAAj8A+MwENPPE/vbNxOpxx3BB21fLHvlUJl0NZdOFc5uwlnnPnKhLNiNOajCu6kunJq6UoqJdNW7XP0AFmikUp+cxNijm3C4dfrIRTEnSTtzNYAEg9knhORJw0s3BB46e1NlN7RpA37QOebdMP/flsVRtt62G847IMgRUHInVd8/EwIex4KJuPYlY+95KlbFRWeajicGZGGSX3z0S56d8+oU6vVRP/zzFo6YUmtrVuq3SL14792bwe98wdrt2I7Ak/mwtMr8OzG7acOGoh15lSRCjz80CyM1in/nZn6zjgHozMtPodhxaTyJTS5Ykm2ulSZ9Q2kqVa476JIxWghCbwohoIGmastjPBa1Rx9nh7+kBoVI1VEEEvB+6NGkN0wv+DCCDACjAAjUFgEmMArLN48GiPACEQMga6ePvrolV+low49kI46bAn95LY/0KXnn06Xnvd+XzNNCQVHlAgzKBOQ16kYRIpXIIPM3adXg4G0CypHkp85vr5ZhMj1ZpR3TuWwuWlHEg8htCoE3uALq2noxTXZIbvSA9Q1PEALJk6lqoNn0+QLT6DqZXOcppT3uT5/G/AdEk6GUGio5sxyPaBDA7NwutINO7Jus27HUyHwrEhDOZYVcYfPVck72dfNF0ygqbX+FI2Yz6vf7aDGtwc0V1Yz50k7Ak+G0coceCqYdlMjpWKjXybIHHho27l1NrXvmSj+S64rRgfM66aG+TtMux6fWkDv7btKZVilOnhPwFlTKt/0oc27b72dejdYm3TYDeA2hFbFwVlpQQFWahC5Abv7hgJX0crwWuQihAoVyjwVB1Tj0qKYN9AJ/iBJUaexgv4c+RBBQHJhBBgBRoARKCwCTOAVFm8ejRFgBCKAwNMvvEEnH3uYNhP895+ff51u/NKntL/vamyhT1x9E932/WvpgKk4SHovTe0DkSHMcPiKiqmGKqJ+c/fpCSWo7XA49BImazdfr3N0Ut6ZjXnyMvuQbLjQbthsTnTI/vrve4qGd+TmJ2tO9VByJKUReLLM+cZlyiQeCB6EypaJPFd6ctSLIqZpZCetHv4b7RV/yjI5NoOmiOuQkqNVt45Wz4xc9UPgqZhZWBF4dsQd5moXNmu1aOD+q080uMLErDLcZ9vXDxEMHVCMRJ4KgQcX2haZBE9hRpLE6y3ZRtsr7tGIBwLv1AAAIABJREFUux1/PYG6ts3RWo/ExF4X3HacMqG2DfO207xTn88h8oIm7zAOQmhhaNErwr31Be+S9PPPU+/Tz2aca13ypm4JvLDIMoVbY1lFVV3qdQw8I1B04UKeNRB5bkLux6KaLWxMvd4LlXac/04FJa7DCDACjEDwCDCBFzym3CMjwAhEGIEHn3yBrvvWz+l2QdBBdYe/P/3ca/TDb4yqOK676Rf0HvHZ2Wes8LUShFbiIBKFYuYIGoV52c3Ba+6+oMNk7eboNS/U02vchx4hlHbuFGvmAGG0za/tprY+83BIM/IuOZyi5nQPVSbK6IC68TlLdSLxpHIG5IaVqhHKz0ZBZKuUp9IP5BB3Zm1OjX9UI/NUCtQtCIXUk7Z+CDwvCjwn4k6u47JfuHMORjsQa2ctr9By4vkp+hx4IF2NRJ5KCC3GVw2jlXMdogHaW/Eord6xntbecXl2CQmqoEqq01RYAyVdlNap9Y74zD00f3Y5LUh+gCakF/pZtmlbu9DV1NZt1Hvn3VpuuoxzrRqRV37CcVQhLjclisSOm2fZzVqNdb2G10Yx7NgOh7DzvPq5B05t8QxMHe/vveM0Bn/OCDACjAAjYI4AE3i8MxgBRmC/QuCmH99D74pcd1DX3Xjtldp/X/WVH9LK+76TxQGqvLt++6SmwvNToPiCmUUUylhKli3xAnGAMB3VEEzpdIqDPwwTggqTtbt/XsK2vKjv5BxUVXg4HOrD0JDvbuA3f8pbilTfgbwDiWcsSx/6at7P9I6y0gjBCiPVQ//dqe8rPyaqJJ5ZGKIXB1o5MTcEHtSIIM0Rsu6EEfr3SuAtmBynL585Thk7s4qt6wbp5Vs6cj6SRJ4M8E5ZfA8hTSxkY6OZhd3EJoociXt2xOjOO3NdLEvADtiQY5ddNkxz5vhasmVjp9DVnjt+Telt27X2ksgbEeG+mmrRole36jt0o/rchINCfq+ZPKrlnnMuep2nPoQZv0+hjLQKrx1rDrRj8XeyvI8wIqmvyf994fU+cztGgBFgBBgBdQSYwFPHimsyAozAPwAC54p8dz8SarvzxJ9/e+Qn2opOu/CLdO3nL8mG1SIvHn4mP/e6bCh/WkTeuSiUsfptv9NBthBhsnb3DyRjgzjIIL+gavFD4Knkwuvd0kE739mVc9C1C501U9/JtUwS+fAmX3QCAWdJSrkx/7BygtVjpaK8M2J7SenVjnBbKXK8mlgk33sQDU+osx0XpibAyszF1KrhO3uG6FuPdjuux1hBKuPuvDJXOem6I9EAYbRt64bymuK9gXE0tZkhR57Mf2dspKLEA3lXLvq9444YbduWmwcS+GE8K6Jm9uwRuvxylzGsiqAgdNXJkVpP4qFbjezctwQjkefWfVZO0+m9p7icwKp5+aIisMFFR8bwWijbjekQxpoD7VgM+ZX3lPPfBbm7uS9GgBFgBNwhwASeO7y4NiPACIxhBEDMQW2H8FnkuTtHhMgiTBYOtHc+sJJ+94sbtNWhHgi+p3SqPK/Lbmzrd50vyetYTu1ALrQLRaCbvEJOfYb9udWc5YEyIUgAqDLg+lssgw63h20v4bMSZ6cwWlnvrY3radvqPTS/PJPHsfeWu7O3SppW4Ad25B0+rxamFod+/0qNvHND3MnBnFQxyHn3JxE667YsE/nwnHLiQQGXEgo4oxLTSxitU/47qUpEaCVUtwMi56Jq8ZL/Dn0HSeD1PPkzev7XK2gkXiU6FpeuxEW0N4gpfWhtw6IEvfca6/x7yInXJb7AMKbFqy2N0Tixl2S54YZc9R1+7kTgoU5YKjwnExI574FnnqOkuPQFHJ7EKD57NiWOW0Glc2arboNsvWKp3ewmiv2diJdQlzCxKGaxCq/1km+zmOvA2GPZwGKi+LcEfgdzYQQYAUaAESg8AkzgFR5zHpERYASKhADy3e0WJhWfvfxsuksQdrgOXDCLviFCaUHo4b/hRIvw2XM+cJxvJ1osM0p58MaikYV+znoVGMg6hFQVIkzWabuqqMz0fbhxnzWOrUrggYh4cdMmuv3vf6U1e7bTp2/flbcMq7BZWRF442B85OPXa1h7IUidXHphWLFGXF6KkwrPLu9j2d/WUrytS3lYK/WdJO4kuVlbVZqXd09lkGKE0JZtvZ3KxSVL86459OzDGTOf4bIJNLKP/AWBl94XQov9MHVpOZ3y1UlKocF2a3/mmRjhMhYVAu+EE0YIV9DFTe455MRLiXBa/CnDapHvrnL+HBp/0HyNiOwVORjduqqCmK2tSlC7yKHqVBpjfdRU0pdTbcpwFU0dySVhnfpx+tzK3MOpXZifyxBUEIsDQ2kNb9V0C2HOS7VvN3tNtc9C1OP8d4VAmcdgBBgBRsAaASbweHcwAozAfoMA8t+Nq62mXXua6ZU339WUdginhZkFCgi9bvGzk1ccrpF5QRSowzp7i6takOsYiyE7ODjiYIYi3QmR3y5KKkInksq4j/yE0DrlwJNj6XN5tby+nt750i+y04DqzizfHSpkwgEzxB0IO4QxmuXBU302nHKKucl9ZxzTicADuVa6z2DDbL6qJJ4ZeWck7iS5aWacoYKVVwLPq4lF5ZtXU2nHm3lTA4n39qsnU8vuudpn6drFpCfwZNisqjmH3dotCTxBYKWFgs+uhEng7e1QM11xuq+j7sxxjcSzy9+m7wt7q1yoFDt6rH9vgLhbFW8R5F2/6TSmDFfS8vTEwIg8p+fYCYswP5dq7HKhCMMXDWbhtWGO76XvsZrSQv4eHi9CzbkwAowAI8AIFAcBJvCKgzuPyggwAkVA4Lpv/1JLjo7QWZB2cJutramiaz93cWiziVIevLGWNBsH2ZqKDIHXKUK3oHLyogIL7ebu69jt4dYrgVdfPUKHz1MLzTQqZtaenQkPtyoZ0wJB3IkKkrhD3SoRQjv3xlGHULdYOjlDhk3gOYX96cNpdzXuoZdXvUG7mxq1ZY6UJ2ikooymLVpAh3/oDJq2eAFZEXcSF68EnpcwWhBEv/qEdRir1b2yIu+M9d9+5SQtnLZ06eepbHYJTVicf2j3Q+RFkcCbPqGSdreak2Ju976sL8M+q8qFOlOByHNytgZ5tzKxQ2k6pw3NDITEi3r6BbxnkDdRe2eJ3xuq5jFKIIZQaaz9LtZDwPnvQtgQ3CUjwAgwAi4QYALPBVhclRFgBP6xEIACD4o7ONKGWaKUBy+MA2qQ2BnDZJOCtKssi7syiQhyPip92YVqWrX3EkarYmAhxzPmrLIi8EAuAHMcfYf3Ke70c5YmFio4mNVxwiZMAg+H5KqKuK2SSc750W//gBrfXqf9daRUxIzi0hXgNPfgxXTul66i7v5Ulkju7NhJnR2j4ckgD2bMeo+rHHhymG8+2kXv7kkpQ33zBRNoaq27MFJj2KzTYFg3jT+Mupb9j21VJSIvLdxbh/cRTyUz6ZnnZ5uG0MrcfnYDjgUFnnH++vxtUIlBLWY0YkAbp3DVO8sy+1S1BEHiuU0ToDq3oOoZzUf04bVQbKuqH4Oaj1M/Y1ENL9fE+e+c7i5/zggwAoxAuAgwgRcuvtw7I8AIMALU0pXU1GNRKFFVUkg32SqhuOsTBy49SRJ10rFChLtVCOLGLuTNeO/be4je2JJLEtntDzfqO/RjdI3ce+8z1HzfM9khssSd4H/sVI1+wmcxmFMYqxcHWrkIpxBaVQLv0e/+mPas32gKvxEnqPHO/I/PCdJuJ23f+jJ1debmFoSKESrfmnEH0LJDz3X1yMONFko8FRLvP8+spROW1JLbcM/av57oak4ypLrj+L8otdMTedkclUMvUCz1omn762/+VxG3XZPzmQqBF4aJRaHMI4Ap1HjVglw2U4rZmRs8WbrdMmzW6gYhnPb0lPeUEGMh3NOKYMReAtYy/UJUwmvHqoEF579Teg1yJUaAEWAEQkWACbxQ4eXOGQFGgBEQDqARyoNXX52gpHCHjIL5gyR4oFrC4RmknVmYbFRJR7m3vYZDqZJ4bsk7zAsHV4Q66ZO6b73uDhp4e7uW3w7RZk7hyH7Vd5iHE7np1cRicmwGvT/+UdvXi8p9ef2PT9DrjzyR148dwTl/xcFUNss8zFISeELMqJWDl3+E6upnuHoNOoXTgrxbMi1BIC3cEHhxkfOuSuS+29xcRluyVznNnZQU16B2zROXvkgCr2/W5TQ45wrldQD7ysRuGup/jgYGtmr7zazcfu8ZtG3HVPGRCAWOZUJ0nQi82bNH6PLL3SkPVSaOdxCIlUIaIZgpF6Em6xYpA8zyfLpV38l1+1HhARfkPGvuTKrAWPA6KgSjVD9CzY1Q217xu8aNU3TQixqrBhbIzThhXHnQcHB/jAAjwAgwAi4QYALPBVhclRFgBBgBLwgkhUNeIQ+FdnPUEqSLw3VHEY01cCCsKCsRCpRSjURyOkxF3T1X5QBpdU9A4m3Zi/uR78aJNqqus8b+MadJdRmCR6ob+9Zuo1Vf+KUWKutU/Oa+k/0blYBm43pR4Tmp7zCOCiHzi3+6OmdKTsrEdHqQ+vs76KjPHGMKoZHAG1fnXoknOwaRpy8HTivViDtZ3BJ4CJ+9646HBXlnfQAHmXflCW3ZMbwSeFoHyfuoZGSHaW5F/bquv/mKfX/NkHgJ8X4aEl8yWJUw1HcYS1Wx6fTsePlcEnnYszCRgWrbSODBtGJVvNVL98LQYoJmauGlOJHwXvoMso3ZlxV2/Rc7vNbP74sgcfPSF9yRZVoEL+25DSPACDACjIB/BJjA848h98AIMAKMgCMCQSdGdxzQogIOOw01xVFTaAdkQSCWiW/xobTTh8narccpj5pXLIJsN7Whghrb/blXwtxCFqjuGnIjC11PF6HHCEcG3sBaqi63CCVe31vbLPsLQnknO1c5XDeN7KQ/pR9QXt+pQnk3RSjwnIoTgbdn3UZ69Hs/1rpxIu7kWP397cIhdYimHzGDDjhiZt4UjAQeKvgh8ezW6JbAu/vH36Ed777mBJumyJMknmcCzxA2a+ZuLCeydftUuuO+M8RfBTkZG2+rwAuLvMNcVNxfjeBtf3Ezde5oF1eH9lHdzHpxNdCs981zxNmsAt6R0uHTmLutWARe1PO1GfN9qgKP9wPaIsR2UHzJZpWTULU/1XoqX2qo9lXoepz/rtCI83iMACPACOQjwAQe7wpGgBFgBAqAQJTy4IFsahLKLKuwtqDhkG6yOMSDSEIeIjdjQwFSKdR67d254X1Bz9Nrf293N9H2VJsgyFIiPCvTy3nTlnntzne7zKFUKC0TcersHRQh3Om8PnvXbKVeExJv8kUn+B5f34Fq+J0qiadK3mEOehWi2aIQPvvGo09kTDwUQoql+g59uSHwUP/YEz4fKK7ozA2B9+uf3aaRd7FBNQWXJPGQ8yomgHQbQhvrv8VyvcAb90bvdozKWjjtzuWmBB7CZmFcMWdO4DBmO3TK16gfGcTd9he32E5m2QWHa2Se2yLfzyDO9M61b5YUR4Hn5CTtdn1B18f8NCW3+LLCS5E5CfHORHgtiDy3v6PcjBt1QtRqLXgX4J2jfUnBhRFgBBgBRqBoCDCBVzToeWBGgBHYnxDoFCGrXg8YQeM0YRxyLIl8czZhan7HxCEdBxWEykJt50fdUEzVoB0OIO5+t2cNvdOzVyOBcPjTE5Mg8QpJ5IGAgFpRJsavrSoVBJ55Li2/99dNezfqRKuceMtKjtZUdyrKO/3crEguYLXq0Sfphd896pgLUPY3ONhLuFAsCTycbQUjYAxTnjn7PTRrznvdwOZY147Au/+dVdn2g8kk/fmxlTReEBOTWt917FdW+NQJrbRgsiDNxXr63eTAszGt0A9uRuSVlB9Lr7x2gvZ+kgXkXZjEnRxHVem75jevZRV3TmC6JfH0Rhq9296gvc/dRv073tTCantGhJuquAbmHEgdJ36EknOWOA2f/dxPCG3UUxgYHWiVQTGpWIjw2qjjaYUf57/zs7O4LSPACDACwSHABF5wWHJPjAAjwAhYIhClPHh+FQt2t9lrmKzT1nFDAjn1FcTnIO5wyWIWOonPltRMpv9adGoQQ5r2oc8naAxLDvJg62cBxbx3RgdjSXICq2fuf4Reefhx5aWNBQJvbXMj/ffzK3PW1NrcKojHjHp1Qm8HLWrZSxP6+xzXDRXeZ04U+fAEedSp6EKrdapI4MkJ6Im8WNmxVF9/YlFyhoLASwkll53BD8Jl1/zmdUfs9BXckHj4sgJ5xl7/yT9T7/Y3c8aB8KmZRo0kQOQ1XfFlpbn4MbGAiVCbUD87md4oTSSESlYOtH6GkqYXNeJLKJg+IU9rUF94jVUDC3whB2MkLowAI8AIMALFRYAJvOLiz6MzAozAfoIAFDl+c6QFBVUYIamSGMEc9fnWgpozVINRUJNhPVDefWPDn/MO12bKK1QKg8STxhRlIkwW+ZvM8glGJfQtjAO26r6SKjU9cSexenvD2/SX7/w0p6tyKrHsOuoEnhl5h8Xs2bVndE0jwxQbGaSjd2xXIvG+df4eSh35Q+qtPkQVctcEnuwYe7qq5nhKVK4oCoEHB9qkIHbtCLznv5P73KuAgrx4yy44QqWq5tq85e5/o9YN5rkKO2mQUrFRExoVEm/KcCWdnpplO/7zJXfRC+IylmOHL6XDao+i6s4DleZf6EphG0LI8FooyVEGBqEmT7lKAaHHJOz5hok/578LE13umxFgBBgBdQSYwFPHimsyAowAI+ALgajkwcuEaZUJh9JRNYeXhY2SSP7DZJ3Gj1LY0cWv35M3XS1Jv8A1nTZ3eA0qnFaPOUKycaC0UsZAMQHVDgirYhbstY6eobx5Npb0Ey59OTQ1PtCpgsAjGskxTWmPDdLmeC91lAzR6k/ekDde2UgJmRF5egLP0oW2SCG0VuQdFpdD4OEHLki8m677IKXnfcLdHnKpwNPfgDJB3jU0nKSFo4fxRYDd5gKB1yfyRVoprVTy3ln1v+KLpyjt65bnbxNhs7fbOkW3xnLf210nfYQ6RUitlbm0nfpue2wV3Ru/xnZuUAWeP3gzzRpZrrSGQlYqZHqFIMJrw/jyrFB4Q0nN+e8KhTaPwwgwAoyANQJM4PHuYAQYAUagQAhEKQ+eHyML6aKXEAc7EEggksIOr4pK4m9j6Kx+6+AwmbIg8FDvnsMv9rzT9JirEhs4LFaIXG8gz4pZjDkXQdo9UbbbckqHphrID5EHMrWiLJMPELnDQJzL+wLy7vVExjEUpefdrbT55jvz5hIfEWowiuf8XJpY6PPfVextpsrmFpJ/okHHwUuob+JEGpg8Kds+bBOLrz33JL3d0mSKaR6Bh1qCxEM47TE7t1veh5GyCXTD/95KpYIF1pPA79yVGUf+OfGQapq0vIaWXDol25ediYXdXkxUX0RVVXO1HHi4f7iPRjfWsPayk8rXD4E3631zlZxp3/6WMJER5KUVGYe1D9EwdcVyn+md19+lGYOA+NS39UveYby4eAzSwgfnovQtkSPxvDgH+90/+vDaobRwU3eRTzYqv8fcYpCIl9Ck+nK3zbg+I8AIMAKMQAgIMIEXAqjcJSPACDACZgiA7Grr9qd6CwpZt0YWelIERg04VNuFmgU1T9mPVD+0dhXXidaOwEuUltCQjTHIVxaeQgfVjpIcKhjJ0E8vmKtgVrX+XUq0tlCirUWbztD4iTQ0YSL1LQouZE6vbHqibJdQ3Q04Ln3qcAWdMXiAYz19BS0EUxCWwEzmA8TYIDB3DSSpWSiXtlZmTChQEmJT49r07Tuod922vLHMSLzS+jQt/OAirW7D2ne0y67sPvE4mnTUmb4MLN7uX5s3xEGVS7MutHbqOzQ0JfD29Xjm5h3af0l32pF4pWBsqmikfIL2869/7xtZAq95VY9G2rWsHsXQODGQeBqRl7yPYsOZvlXLSMlMqqi5OId0lorTQhB5TrnewibwYFqx/Z6rBX9nT+BJPPXhtI1X/KdmagHFLUjrabFqWppsoMnDVZbwf7v0NMdbk3EhhmNwpuqXUrn5FR07CLlCmPlcnabuJbw2Skpyp/XpP+f8d27Q4rqMACPACISLABN44eLLvTMCjAAjkEUgSnnwVB0X9QfogcG0RtzZqczCut04LE0cV07NncUlQM3CZ+WanRR4bsJoJXE3JBR9XhOo496Nry0zxQykXd3fns+5XRt21NHjL82ijTvrtJ+nq2topLSUTrqghOYsjdHcgxEf6r7IXHwvpZvpzdJ25Q5USTwz4k4qQvtLR+jFtk6NwCsbn6aSstzhS8XGqiopobaHn6Wmh57Jm5s+nHbaogW04lPn0Fur/kDT/vKsprxTKZP+48dUOm+pStVsnfYNO+hvsVepsWdXTrvyumrChfLe8ctpBi0mOM7+9t1R11njQN1d3dTT3WM6/sKuIVrYba7QPO6UE+mDZ71fU3Rt/XsHPXfNZqU1QJF3/LcTghT8jVJ9WWmk7AKqFOo7qH26+nLnVAgizylXY9gE3t5nb6PWF0T4rMHN2gnEPkpR/ISLqFRcKNNGqmhe+ThBZpdq+THN3tlWOe+MY4HAIyj79hF4yIm3QlxRKVEhxFTDa8eqgcX42nLNUZ4LI8AIMAKMQPERYAKv+PeAZ8AIMAL7EQLNIu8cwm6KXZzUWQi/rBYhbDg4O+VaK9Ra/IT9BjVHOwIvLkJoQdJCLWdWnAg8OyLK6/zNHGDrXno+q7iT/f7w/mVZ4k4/liTx8LNP3BD3ROKBLAYJ9L+0znYZ6VQ/xUuFAkxXzhicTlNFEn6zIvGCOgvKR6ORR6NwXv1zV4dGiMQSI4LAs37uxok4waRQ4UGJZyTyFi08kA7/0Bk0bfECbRp9T9xNnX/8hdItqaysE2sqo8rPfE2ZxHvth/fTuoWNlJxmfWAGiVczvpbGxydS067xdN/buY6l+sklk0lqaxFusibFjsD78reu18JYG9/ooSevWq+0XlnpuFvm0aSD25RJPJB3FJ+lqSeNIbv6gcMk8pzckv0QeCpOtCDw2l683dMXJJOP+wRNPv4TOfdIhnpClQpCWx/qeU/8i7QjttrxngouVXuf6cNyo6TCixohZhdei88m1VWI3LPOCmTHG1PgCpz/rsCA83CMACPACNggwAQebw9GgBFgBAqIQEfPoHCxEwmFilzM3PBwOC4TxB0O7TjwYZ6FDJN1gsRt2K9Tf14+twuh1RJ824S/WYXQGs1AzBxlvcwVbYykhJnyzoq8k2P6JfFAyrxVJhRcQ3vzlgHSLjXQRsPiT2MprRhPM8um54XSSuIOexWhstinxhyMIO9WdrRrBDQIvHj1MJXWWDCr+wYGiYeQWmM5fKieGkZGpXvd136M0qlBGhzsE7nBzNVr8XiCyspEFj1B3qHE5x1EVZ/5b8fbqELejYBNEVxkRUWdSCqfoLe2pbSrpDpG8Rpz0s+KxLMi8C759BU0e95c7V3w1L9toN2vdzvO3Vjh3JXCuTYtcuylXrQNp5XkHdqrGq+MklOlmisovmSwIs5VJy4di+3qG11oB6ifkuJCqSNrAxYVE4ugCTz9OuQXMvgZiLyv06lKsIDAM37fFBUCL8qOrjLlBMhTFKTPwHsIXza0dxc3DYTSjddVAqE+WRhYcGEEGAFGgBGIBgJM4EXjPvAsGAFGYD9BIEp58JDzqV0Qijj44qBeJVxL+8RBOEgCKcjbWsx8R3IdtgSehQOpbGs0sdATd8AcZFTQZiDGvF4TH30w55YgZBaXXRmJC0K3piZbxa0SD+TBzxMb89Y2JIg7kHdO5dKeaVQz8RCNjMOB2I64k3092d5GTUNDWhvkFCtRIPAQTluHjP2GMjddTfPEhZL8029pUFyygMjLIfHEHkgkykTesEReP7U33W+71M2Pv0RvNr5MnYflz0FriNBKHfeP7VZWXkc9pWl6YZVQIHaUCL9dQRY2lIhQ4Xwi0iyU1ozAk+QdhsR74a7jX7c1VbBaVDYfHiqAyBM58WKCzEOuO8KFkjg2pznGSwmCUvWLg6CIvIwzd7mjOmrNb16jzh0dtJd2CeIuX0klAg1pnCDyKmhUNapqYIEceDvuvTowBZ7ZfZHK6y8nT9SIOSfS0ywtQFQIvEI60Dq9o+w+lwZElYK86xdpKLp68x25/fQfdlvOfxc2wtw/I8AIMALuEGACzx1eXJsRYAQYAV8IpMSpaa8Io41CgaINCc9xCA6LQApynVFx8LMKowWOwNOMhNOHz8pDNEgDVUdZrzjqVYswrKja8G5OV1d9b4VS16m6+mw95MP75NctSCaT3rDeX5blEniq5B26W7Hyh3TwybfQxOmHWiru9MNK9R1+5obAQ/0JIuefsdgReGbgIZQ6beJGXHbq+VQuLqvy56u+R9s/aUjSJysbyDv8GBQdCLuSqnp6bfsA7diamTt+XjpeSKcS+SQelHg9XT1CPZhRAX1wV192OrPmzqHjTj1RU97Jsv7Xe+ntuxr9E3hKuyxDGLoh8GS3fok87BMYnjiZ5Gygt+jPv/kj1e3ImHxYFRB5k+kAqptZT8suOEJp9ZjD2m8e74nAO/i6Z5XGkJVuTpwm1JvYKxmFqsxxZ+wkygReMRxoXYFsqIx8fSjlMDty6V7rZ1y/bRtqRAqAfUpCv31xe0aAEWAEGAH/CDCB5x9D7oERYAQYAVcINLUPBK60Up2APkwWbWCSMFZCepzy9qli4Lfe291N9I0NfzbtxuzAu6RmMv3XolO1Q1Chw5NBSgyIENMBoe4zEngq6ju5yOHyChquGA2juuH3+USXFa7A5LaKTVlSC2GzgwZzBrt7cuKff6QRSId8+FGl52ZVbw+t6s04pWK/o8Sq0o4htKhXKVgNmFroi57A6/vZf1N689u2W8gLgQf13RZxWRF4I6n88F9J4JULFV5LIkl/+WvuPamcLnIPCuD0+cv0Ez9UkEzH107SfnTcqSeZrqnQBB72a1LsVVUFnnGIyi6fAAAgAElEQVTSXok8qZKyexeCvPtRyVdo3PbxNOOFhY4kXnJmH11xwdXKrxs8Jz0v30Ubn/iZchtUNMt/59SBPgcetju+eDAj8qJM4EVBke2Es/5zmcMVP0MoLdTEwB0pABAG7qSGdDNWkHU5/12QaHJfjAAjwAj4R4AJPP8Ycg+MACPACLhCoBh58HAQqxEhsjg4wE0Wyi8thLOq1FF14mpxIVaOUs4jKxLPeOAFeffNZadrxJ3M1zYoDBcKVaSBBHKEBUngwZkWl0rBPrujclNWWZQU5J1ZzjurvqDA04iKxRfTlMWXOA6pJ/DA38XExhkpHbY1sZCdGgm8QeTo+9tttKctQ9od8cJuGt+aCZ2sKm/QLmMJmsCTOe+M40gCL5GoorQI233w2VROFeTES4wDSYA8ZrnmKgdNnELXH3e6I5ab7m2mNbfvKZgCDwRe30Ca/D4jbok8hHlXCEKlo8c8pyGAuqrknBy8QOLNFJex7B0cR2/HxlOrcIGdTNOz4bTzZ5TT6cfU04KZ5vnE5BzeuPVfqHe7tSmJfrzqWYfS3Eszz4ebsj22iu6NX5O7XwxEXkZRnJsDL0outFFxoFXB3SpEG8RxJi1A5vcyiLxiuLxbrYHz36ncXa7DCDACjEBhEWACr7B482iMACPACGjfuIPEK0SB6gsHBBmuqc+zFiVCTBWLKDjRyrmCxENOvHd6Rs0Z9ATepXMOpcvnHaYRd8XKKyhdPTetXkdla1bRhL27s1D/efUS+stbB1FMSHAQSmcsMWSw31eMCjw3BB662FLXTc8kMzj1d2xUvd1U17aTlr36e61+9YRlNO/YmxzbmhF4ILAqpjqbx+gJvN5kO7Wve4ySW1/KjjlvXTvNF5e+1FVNo4TOPdeKwLNzorVT4Jmp7zC+nsADiXfA44fTd6tfzE4NafhKx2dCnTUlovh/KPKWTFAj79Cub90APfn59VqortuSkwNPsXFQBJ4cTpXIw3OSEPu9q8+cwHs8dh/hsiog8wZ3HKARd4M7ZmSrIZR2igil1RcQef/6sal5XennsOWuqxxJPK/knRzYyolWKvJQD/kj9SYWUcl/h7lFzYHWbouDnK0UX6BZKTzt3GsVH51QquHfDvUihJYLI8AIMAKMQHQQYAIvOveCZ8IIMAL7CQJQl7R0hpcHD4d15IurKEOunRHqhUGChepLGllE6Vt/u20QBSda4/xA5EkST+bpu2T28qISd3KOUHi8u/JP9OqDj9PBk+po2aTRXHZPCwLv6dWCwNtH3kGplldE+5jYT3onWtRxS+AN1Q/T3QNbtO7dEHjLXvkd1bXvyk5rmQijdSp6Ak+fl7CkbMRRhScJPJB3feLq+uv3coZraOmnI1/ckzcFPYlnFnZo50KL53XHyr/ThkdeoM1X5JtfqBB4k7oOoMNfOkqb13eqXqD1pa3afyemjOYqBBaXHnI4XXHo4ZoTqYrKDYTabe97zROBd9wt82jS8lHzE6f7hs/xfHeKJP9Bv4+ciDwn91snAg9z3/2dq0yXOIvm5/3cjMTTq2XRAKYWvdvepL3P3ZbTHsTd5OM/QdWzD1OB1LbOt0tPs/wc/L2mXhX/hxx5F6ZuoVkjy32PGUQHY+3LJzfhvtIxGO8FfNkXhMOyV8w5/51X5LgdI8AIMALhIcAEXnjYcs+MACPACFgiEEYevNFwnBJl8qi+WuScEuSe15xThb7FVgehh//8KK3bsl5cG7JT+vDJZ9LiuQtp8bxFoU9TOsrCybdHEKY4dAXtKOtlEX/90U+pddPm7FwuOmi21k3JcIyeWXUwrVy7OKdbKxIv3ZAbKurWiRZqmad699Ab8TZlAk+vvpOTVCHwUPfOvU1aE6OxiBOJBxMLhM129u2h3jfvp3THzjzY9WG0+g8njpun/dWMwDNT3+ldiN944Fl69+EXqOkDpZSclhua7ETgIQfegk0H0tz1uUTRH8vXUe1pGffcpSJkdumkjOpL5pLEfzsReSDwXvv5blp1Wz5pabcfJx5STcd/J5+4ctrDIPAQxhrWs2NF5DmZZxjDZ43raPnNuTnKO/3n+jBa/c8/e/6UnHBav/n/nLC1+txKiQcCD/kTZ40cQmeUf5Lmxw/VCKUo/K4YKw60EnMv4b54TpHyQqa9KEZ47eT6cvE+U0uV4HX/cTtGgBFgBBgBdwgwgecOL67NCDACjEAgCLR1D2o5b4Io0hxBO5AL8sjNAUtz8hMHhQ6hehkLxehEu27zerrll9+3nTpIvGuu/EIoyzM6yiJn0LbHN2mJ+GWpmVdPtfPzc6WFMiFdp3s3bCIQeJgTnD1RTp49haZVVlJ5KqP0+uL9H86bhpHESw0JFc74cVQiyEkUty60aCOJmUdLd9KWrrccl25G3qGRKoH3ZHsbNQ0NaQQeiDK9ogskXmnNMJUYIsNKReW6eJxadr1MPZueNiXvMAcrFZ7MiWck8IzqOz1xp39e4UI7MDVGez+Yq8KzI/BiJQmqqKijkx8xV1JN/n/Wbqly78Ix1yrEW4a0/unqDdSyOmMMolK8qO/QLxTBeDeGReDJuRuJvIR4B+J9bPXutCPwkiJ0tvU351nCUkcNVEfjTT//3r9nCHVtXwmXUju1tAruXusgJ9722Gp6oeSubBeLyw6jIwcuoRnDh2g/k/sFocY94guKYirDxpoDrZ/UD8UKr0XU/dTxlV63FLdjBBgBRoARCAkBJvBCApa7ZQQYAUbADgEcfhAq5rXoSQA/5ghjTcmgd6JVIe8kvkGTeGaOsnue2kKN4ipByKmJ9efCfzqsoETe/VdlktTjwD2kS2R13qR5VFeXCa289S/vo03NE3O2oZ7AGxYc8+Bgpm5iVib81q36Dm1A4MGcoFzkgnr8qU/Sm4efa7n1Z236O+EyFlUTC7RrHByklR3tWnAwctKZhWSCyMMlyxE1tTR5pIx++/jHHR9LMxIvEa+guurpOQo8PXmHZxY5paCoAQFiJItkHjyjCs+OwCsT6rsjXz6GGlrzCaLEzFJquKjOcS1yLw8MIlfjUI4bpiTwugZ76NkvbqTONblGGfFYvhuxV/IOE4VSs7lzoGCOnHpyBOoy4/oleD+MfYU2xsyJ5+4X30u4rIpZCK2sq1fhFYq8dNwQ+ypY3YvRFA1xzXShGESem5BU1fWGVc/KwMLLeIUkUTn/nZc7xG0YAUaAEQgfASbwwseYR2AEGAFGIA8Br3nwvITJOsE/fUIl7W7td6oWic/1uY+uvO6zrubkl8TD2CBfzBxl1//0derZ3KHNBwc2KwVRoUg8qb7DfPQKPPz92K45NH2uWEdDJjTKisTTk3fauibX0LwjEvTJr4/mVVO5AcBjvFAXAT8ovbauvpP2rruHts/PJz3MiDs5xtz3fYtqJmbUQCpFknhmIa3G9qfVN9DUsowk72dPfEyle62O0dQCYbQYb2T2Eio79XwqnbdUq1ddkSHuJNlutT9e++H91LFxZ04orZULbVyo745+4xRT8k4b89hKcVUprwVzrK4QLrA6UiZRPkhNfXupN5VR3+24O0k77snN3xkXbhklsRLyYlphnBxIo70dGZffQhYQzLgnZaXmpJRdDjw/BN7px9RpzrQohSYv7fBVyTGnVzFCvVjItAFeQlILuZ/0Y6k4HLudmzG8Fvir5LR0M06dSK8BxTsXRoARYAQYgWghwARetO4Hz4YRYAT2IwQa2/q1HENORU8coa7bMFmn/qNoDGE3ZyhVfvWH39NDIu+d23LNp652nRNPbwpi5iirJ+8wHyNhZpxjIUi8tY+vpLWPP6UNrScUp3TW0dTODGEwTZB4IPJQ9CTesMiPNyKuVCo399EJHxikM74+XRlyvUoU5Ig+f9aah89U7gcVVR1ojZ2CxNuQGqAtPeYE9ZREgpZX12TJO7R3Q+DJ8aDIa2gdoOOWfoZmn38pNbZnSCipbnPjRCxJvM7D4oQLxajCm9A+mQ7eeQTVNluHZtuFz1qBrydlmns7aXf/NktDCZB5KOOWxWnBEXOpck8jxV/9E5XszpiVyDI8fS6NTJ9H6aNOdbznCDWU2DlWDrCCVL8NC9dVkBZV5aU5ROYGeot+VPIV0xHtCDy78Fl0pifwirV2s0VJ0r1ZwWypGCGeY8mBNky1YJjYc/67AF8w3BUjwAgwAgEiwARegGByV4wAI8AIuEGgpSupKXKsSlBhsk5zCvOA4TS2l89BOJ571ZWewuxgbPHhU9TIIyP+ZnnCEDaLS1+cCDzUPfzmk70sXbmNkcAbFgQauGI9gafvDGTepr0T6C8bFovccVOpROSCk+XomdtpxrhOOvDCaVT9gQMd52CW483osNnTspq2vPifjn2hglfyTnaOw/7qvV1iXYM5401JlOUQd/JDLwSebPuZM+4nEDGdfUNZpaZVfjm7xbdv2EEIqYUaD3nxcA13j1BN7VSak1pOEzqmmJplyD7rLxxHZbPyHW2VABeVQAzsHFwnlHViXLFxsH9sS0czTX/mRapsbbesBiIvdfZnbLspFollVL+ZmV08RvcRlHjGYkfg2YXPoh9J4AUZZql6j+3qeVWNgbCuEQQoiFAngxSv81RRB3rtO4x2hfqCLMjwWs5/F8ZO4D4ZAUaAEQgGASbwgsGRe2EEGAFGwDUCVnnwcHiqrizVlFPISxV2aBLGqxSHrnaRPH4sFBCOF/77p5XUi2br+cWNP7FdJkIgcQgtE7iAfAHJahXy+Pr/ezqvL7sQWll52vvnEq6wip7AAwkjNFwaXlYEnn4eFXXjqFJcxgLyzo7AszJnQD9QNeFQCDxlAYmHUNre1jWWMPgl79CxW1LIK4F3xILzacXSC6lO7E+EtHkh7uz2w+D2Ieq4rytbxSo02C95hwH2Dm6n5Ei/lj8QzC+2EJ4BwcvkF0HexfaRo/P+mFF9WhUnEq9YIbRWe8RI5N2UvJbWCzWevlgReFbus/q20sQC9xIhi61d0XgHG82C3L6n9E7HQTvXjrW8rX4MLNzijvpBhNci3B9pD7gwAowAI8AIRA8BJvCid094RowAI7CfIJAcSmcPbCA/QBhBqWQMNwwbjrF2INq8fSN9++f/49mp0orAg3oEibtxL1TDlK0IPKl4s7p3cKZd9M+Hh3Zr9TnwQJwJCkZTxYRB4NkRd3KBdoqepnV3U2/LmhwiD4YVIO/c5LyzAtMtKfTaxt8SLjcFRM+lJ3+TptYv0XLdhRkG2vtCH/W+0J+nwHOb885ufTuS67SPQeDBpVb7b2wk8f/a3pZEXm8Xxfq6s11Ne/FVWxUeKqaPPMU0nBYYTqorTg48pz2iJ/IeSP6acOnL7u9clf1rOQkzE+E6W0H2Dp7zZ5TTv35sqtbOq+LNzR51UxdfksD4xo2juVn/QarCZP9jyYG2mMpKY3gtHI4HbBT/+vvH+e/cPC1clxFgBBiBwiLABF5h8ebRGAFGgBHIQaBJ5MoCaVclFEp9wpkS7pRmjplhw1ZolYCf9YBw/JQwsJDEgtu+jASemaOsSp/dm9ppw/+9kVdVr3iz6yfsMNq//PBWat64WXNihbMsCDyU5dtn2y6vYdYM08/rP7+CyhaOOtaqEHeyIxzkoSothsrTiZwxW6yqCi/ZmhD4ipyBneOovnoaTZ4ymxbOW0DzFr1PZQv5quNlXSoDdqZaqCvdqlXVE3j4O0gBuCyjaGG1e3fldNmwbhM1rN/sOMzgv3wrr46bvGuOA7io4IZkMRJ5q5Nvaoo8qPBKXjyDygVp50TcyanpHWjxDoJbdJcIvY5CCdokYlTV7N+51hiOHwW8rOYQFbLRLZE6UeSZRRsujAAjwAgwAtFDgAm86N0TnhEjwAjsRwhAbYdDm12YZiHgKFSenqDW8s9f/VdPRKfMgadXPEpnULcuflYEnpEws1pz2ASeVOFhPiBdZBiwnQrPKnw2sWAiNVy1QluKG+JOrr2YIYLSoMAqDNrs/uxuW0uPvHy95XZN9ZXQYKtwLu0H8VIhyLtRcw8Z2rr80ONp+aEnBLXl8/opBIFnFaarEXmpQRppa6a+4TIaEBdK26o+al+VMQxZWrWNllZvM12/mQoP+6q+pvBhpF7GNcuR9+PfNNKmnUmKv7OGSlr2ape+pA48mNJLlmk/0pN3+HvUSCkvz4zKRg/CuRZ7ZCCZVlaTqcwrrDpRyy+L5xkGLRlH7LRmLGT8vcf578LaDdwvI8AIMALBIMAEXjA4ci+MACPACHhCAKGa3RFQXeAAiaLPUeZpQQVq9IM7fkBr1mfC/NyUaz/zBTryoIO0cGU3zqBWY5iF0BoJM7O2YYfQyjGlCs+Yl29+0xSqSVbkTK20vJxqp0wyXSrUd5WLJ2lEg8wN6Ca8rljqKiwG5HRHz5DrkGuQeAil3dP2dg4mwwNx6t2ewc5I3uFnetXalKmz6fQzLnOzRZXrFpPAwyRTPb3U0z2a0xA/0xN4ciEn1q2iyWWdOesyI/CKRfJi3FoRMupFHWok8r780V9S99Y9tvfwA9edR8efsTCnDkippAhvdPNMKW8UDxXd5o10O4Q+vDOZEnlekWtU/KlSxpIDbVS/GBvdt3HtvQj8cR8g0i4XvxsnjCtXuRVchxFgBBgBRqAICDCBVwTQeUhGgBFgBCQC+jx4xUSlmCGOXta9q3Ez/fePv+PKiXb54gPp61ddE6gxiBmBh/U4OdGGbWKhxxSGFuue/BOldE6i1QPlNLWzPkvi2ZF3E/7tOJpwyFRPxJ1+HmERTk77B4fozt4hT4pN9J0h8dZSU8c7muKue1u5ULE0aORdWWl+njNj2GlYSjwNz+4XKBZ/PQ+CkeFpItmcyLE4MqoMdMJJfj4w3EfNQzu0v1op8FLCpKOnXSjthtM53bYLBV7bPgWe/gMjiWdG4OEdVFUR18jWQpYg8s+BELn1/O9prr39yWGRDmGYunpHsSlPxKi8rITqajLuzmdffz4dsHRmdpkIWXVDYoWJTyFzEWIsKMKqxX0HkaTiXBs2uRgktmMhNYUxvBah3Aj95cIIMAKMACMQTQSYwIvmfeFZMQKMQMQR2NXYQgdMHc0H5nW6yCMVZsJ71XnhIDVFEAJRmIvKnKEEe/jpR+nuRx6yra6F+on/wZ/XXPkFWjB7gSvSz2kue57aQriMxYnACzt81jifdNNOevf1t2nt47kuocsmHU7jdpRQoiJfcdFw1kE06Zylvok7OZfpEyppd2smvLKQxQ85AuUgjE2gOoSz7O8fvI2aGs3DQuWajAQefn7ZFf8V7JJjuylR/lgOKWs2wMjQmZ5IPGliYUXgdTTvI+8MBN7uJztpYG8q7xmblOigk+pXZ6cYJQIviPxzD37tftq9dqe2vhKROgzvHOScHLYRlelJvLBCVt1uuk3rtlN7Wyd1tXflEN7zF82ihgnCmmNindsulevrnWutDBeKqeRVXsi+im5yK7rtO4z6eNYRWouwXy6MACPACDAC0UWACbzo3hueGSPACEQQgYeeeJ7ufGClNrPamiq68dorfRN5LV1JLZyz2GVyfbnmiusmV1ix5gzVTKUw/rjjwT9oRJ6xyGT7CGfFer74yatp8bxFoUx3/U9fp57NHTl92xF4C//pMKqd3xDKXKw6dSIIBje00NDGFq05DtJTzzvYlRuvymKKpcBDeGLfQH6uJ6c5g9ipEXsMxB1yRWEf3Xn7152a5Rk/oEGgKjxB3sUSjzqqPOVEvZB40sjCjMAbEGrGgT4ROjsi3lnp0RDa/sYh2r2yK4sPnsGsW634qT4nnhmBV6yE/9XiHiPvl9f0AbvW7qCHvpbvWuxE5B31sWMIFwqejebOgUC/XHDcqLoKbS2dtGm9IO9aOzUsYHpj9nvgyGOWhUriyfdPbVWp9v7Bc9crjJ3kPirWHnGDpaw7luYq54x7j70ojWq8rJvbMAKMACPACISLABN44eLLvTMCjMA/EAJdPX300Su/Srd9/1qNtPvJ7Q/Sg4LQW3nfd3ytEuF9OKQUuwTtPBjmekAsNNSUiUNvktZtXq+ReOu2bNCUdjj4wW8V6sazTjqTPnyKUCGFXIwkHuaA8TO+r6OlGOQdRlfJxSQVMJg7yIyg83F5zUXn99ZBUTKUVs8vJl2JjTkSV735DK1681nH6Zgp8AIl8EofoVjJHmUCDxP2QuLtHdxO6ZKBvNBjTX0nS1qEu+5jV3ZBfdeU/x6TRJ5ehWfmQgvcQXx7JdIcb4xFBah5EV7udb+/cv9LhMuq2BF5n33g37VmxQ4LXfnH57PTtyPwUKkQJB7G0UKqxZ6AKgzu7PgdCUIdmQCi8PvSab9FzcDCab74nPPfqaDEdRgBRoARKC4CTOAVF38enRFgBCKMANR2CJU96rAldNTyxfTgky/QK2++Szd+6VPZWZ924RfpX684h84+I+PQ6aUMDA5TW3fSS9NA20CJAvIGrrhjociQTKOjLA57qbSROgt/RfpwWoTQCWZDO2yiwLQCee8KrbyTq7ZzbgybuJNzUCERw7hLqgSNFXEn56RM4IG8FaSWXn0WGIEXfy2b884pTFuPpZYTL/Uh1/B2xXZR50BPTrscAm+fCs+KvJMN8TSgXDD5WRo64hRKH3Vq3lyKReC5JXiNE//JR7+nhCuIvBgNCGK/VdTPkKBf+N1m7U98ITGQuoRS9HGlvoKs9MqLazTlnSzaFyAI/7V5hRaKxMOc9EYheKd2QQEaAcW60z0o1vvOaV52n8PMRRpa+emH2zICjAAjwAiEhwATeOFhyz0zAozAGEVAKu2OOvRAOnDBLPpfobT73S9uoN2CzLvpx/do/y0LSD6o8KDK81qikgdPEjkIox0LBWGhUM4kxOE3CEfZoNYMIg9Y4iDcL8IvQd45EXfJ7SKMevsgdb8wSpaUzSyj8lllVLui1vfUcCgzKlcKRdzJyXsNZfW7eLO16/t0Iu5k3UgQePvUd5iTGwIP9UcGP+0aSijD1jXvpK40SKdM0RN4/SLquu2NHhrY0a3U98VH7KLKiz5ralbgdJ+UBvBQye++VCXwBFKCwGvTZihViUd/rJ2OuaA9J+x6cORmGqZDPKzEfROEzr760pqchthXCJ+1+woE+fCOet8y9wP6aCHztKILGdau6lzrY1jPTceCgYVxcRPF71T8XuDCCDACjAAjEF0EmMCL7r3hmTECjECREPjJHQ9RV3cvXfu5i7UZIFQW5bNCaQfF3Q+/cZVG7KGA7DvmQ5+ltX+93ddsmzuSWphfMctYMbIAMYbDfpVQDPaIUE+EU0Utb58bV9/u57tziDuzPTDhovGCzMs3mlDdL3p1kz5ZvIrro+oYTvX8Kp2c+rf63CrHmR4HOJ867aHGxq208om7HKehhU8rKvBa/0iES18qRarGqsUi7Pms/KFiZT/P/rAQBJ4+byHy4qG8+cwu7c+29VJXJ/4ymKRYZ+ZzqzJSVUsf+eIKjSBAjjMU/f5TVUo63gCXFZA6oFuojr2qdtUIvFHyTj+9ax7ckiHKxP/o91+hSDyYViD3nb7gC5EhBQXzaWd5V527vEXZ6pIUc+tc63U8r+3GmoGFXCfw5fx3Xu86t2MEGAFGoDAIMIFXGJx5FEaAERhDCDz9whtUW11JUOChQHV34MLZdM7px9J1N/1C+xnMK2R5vyD1bt+XF8/rMjt6BrWE3cUuULW1i7l4PcyGOX+ZEwmOoMiTBQIDpdA5s1TWqM/RZ1e/5Z5WGtyhpnj0Q+IBu7rqhEYsaZgJE4JCq1eKpbAyhmZKAhj7CMSdGxyUTCxMCLzTzriUpk6dk90KfeuIdn7XfieByJv5xdw6xSTw5Ez+cOuom2zeCnq7KNaXq8YDcScsc2ninPF03Nnzs02MRB7yb/nJRafyXJrV8Zub0SkHHsJlY5QhPY0FIbTYjyBNoMSWJN7wyCE0SDd7XZJyO33uO9koqgSemQNtsb6McAJ4LBpYcP47p7vKnzMCjAAjEA0EmMCLxn3gWTACjEBEEYDC7jxhXIGw2XHCdRY58T5x9U106fmn06XnvZ9A9t352yc1As9PiUoevHpB8iRT6gn//axZtS0OQ0hmbjRXkE607d1qBJjqeEHVc0pMj7DZ1nszIXWqxQuJJ0NEkZevTWDlhrBSnZdKPb9unypjmNXBPqkQ+wekJUhESQB7MS1QCaM1KvCmTJ1Np59xWXZqKuSdrGwk8aJA4L3zShO9+2qT69ux4sPzaNIBNXntJAmTiINQHSx4fjO/DrBWLrSjC90lCDyd8ce+D2Ys7afzb9izT/GUIdbxjIJjB5FXCBWeHwKvkHnwgI18jkG6G4s+HUCvcJzuS44617reqAE0GIsGFng/40seLowAI8AIMALRRoAJvGjfH54dI8AIFBkBhNOifPbys7MzAYkHJR7+XCJCab8kQm3hSuunpET47F4RRlvsEiUjC0k84TBrphhTVbm5xfTu+AZaXdJGa8QlyyWpBbRseDwdMjJBuTuoGVu6kjlmBvrGu7+9R7kvWRF58SZerDYHPX69QrGIBOVw7S1WwXxA0hTaJAUH/3rhWAz1YRDuuk8+cSc1NW6zhFELQdOZAOjVd27IOzlADonn0cQCfXnJgacPodUv2FaFZ4LMxOnVOeo7M/AQylopHEf7REh8pwhp1ZuAhLlnrdboZswHv3Y/7V6707RJjDaa/lwaWBj3i3StpZLLqD91sWNot5t5GusaCTzNxVv8D5SQTqXQIbT43YRi50Br5lxbqH2kx2ssGliMry0Xjr+c/85p3/PnjAAjwAgUGwEm8Ip9B3h8RoARiCwCZuq7V4ULrR/HWbvFNrUPhHpYUwEapBi+hS+WkQXUSzhE4LCmYkzhpHJTWbOsszrWSneXbswh7oztQeJ9e+i9St3aHeJU8t5ZDeKkwrMiPoPESgkAQyXtcF0R18JWC1FkqGyFIIWQXzLIPW1H4ukJGWPo7I7viIDK9eqr7xC+pMK3lKPjt+cAACAASURBVB770i7ae+AAzaZ2ujq2ikSmKqopiSsRLRhtJH24kHUdoT7wvppW5Fbzrh56/uGMg6pTUSHv0Ic0k0iUxsTzn9BUVCBrwiZggnourHLhmRF4H71+N808eECDzixnIn4OIm/cuL9qhg0gnp1yNDrdB7PPjTnwwD3HBIGnMlahCTzsj6Rwn1VRzgLTjHK7tGD7SI/vWDSw4Px3Xp4gbsMIMAKMQOERYAKv8JjziIwAIzBGEHhl1Tq6S4THnrLicM1pFuWzn/gIHbVcZJgPoUQlD970CZW0uzU/5CuEJWe71JMtbg6sIMk6e70noJcTAHl3bdnLSktUJfEQRjU4lDYNCfRD4NUeW2PqTOukWPQbKugEzt7VceptilHv3lwVR/XkYZp8SJoaDhDmI8K8IEgizWxO2EsIuUaoLEhgOAGHQUpbhdOCwDv88BPooIOPy5ve+s/kz/itAzPP2uSWUnFlQthA2jXGRkPD3zq7nd46u0P77PLYazQn1k5VgsSbHivX3IVlbkOre+RFfQc11qS6CqEMzhBNxqJC4oG8O/DIKaahs8b+9LnoMDZI/LAJGKc1Ou15/efPbnmXXrzjGWqckcnNWdmWpqq2Lpq4MVetqSfvUA9fmljlHE3SExoONeJy815UnbfRhRbPzohOPWrVz/xFs2j+4oyRU6EKFM1IAaBCLso5yX0k8UOe2bBTCJjl6isURl7HgTJ6Ur13kySv43I7RoARYAQYAfcIMIHnHjNuwQgwAvsJAg8++QJd962f0zlnrNAcaP2GyTrBhsMFSLxil0KG/+gNBaCsAAZuDmjI2YeD7YAgavyUD5Y/7qq5Colnl/MtSAJPEncgq+wOqEGRnQBq8+pdtGXN7ixmnVtLNOLusPdYqxNrpo7QER+JhUrgSSz0ewl7DOqdMIlDkHkoyHe3cO4CSzMGSeDtnThEIO6aJ6by9t3cd8to8rpMuKC+3PerLSJHWkaxdcnIqzRHqPHKR0DilWm500DimUU+jgydKSR4013tb1RWxc0sJ54b4k5OzMxMImwiLwiyZVt7C4G8297Rqi2lt72HhvoHaWhAKk2TNGFjM33o8A10zAXteffBjsAbGHlCqw8cpPOqijJZ9WaP9A7SupfX05SyKq1JT2qQuoaStKs/14jE2F+h1XcY349ScnQfxbXfL2Ga+IxFAwvOf6f6xHA9RoARYASKjwATeMW/BzwDRoARiCgCCKHtFlfYxJ1cPpQBLUXMUSbnUYgE3Pqk437ykgWRsw857xA667bcNPge25x4diYbQRB4euJOJcQOOcaQC8+PAgXE3dP3vJJD3iW7BHmUyqiOUD7wkXNp2gEzTOGcMDNG044PPg+fHRY4vE8cV16w/H9Wbrsy/93TK7pMiTsANkwjJLK/adgd8nwl1bfGszg+cMfWrMEBfviJktdoliDx6kdKqZ5KNXIPRF5aEHkIOx0ZnibCZkXorAfyDv2rEnhunxur+nYKK9zD2sqEFl7v531hHBvkGXJDejXCAXn36zdesIUgFtugfT6nvp0+efhreXWtCLzUyMdFAPXH8+q7fe7NJgfibnhvr2AbMyRjV2dPXjWQeLtNiLxCm1fIvThBPMNWalA3exDv5WphZoMSBpFXiN+fbtarUpfz36mgxHUYAUaAEYgGAkzgReM+8CwYAUaAEdAQiEIevDDdXeXhUzs8CTJJJZ+R3daQRKAfdZVb9Z2cD4wtLkkvtJyeXT5BPwTezCsm04TFVUo5AvWTw8ESueC8Yg7y7pf/mTF1kcVI3smfW5F4CZEHb8aJSaqe4pwkX+WVoEpmqBgVrB5amTfklJL5NCU+X2Uq2Tp2ysuf3mlN3qEDQavso+8y3elJvN/ctiUnH1xCkE/T021aOO0VNKqGTJQcIBw7Z9BA/6G+yNpCE3gqId56xS5yKfoho4Gv37yMNz6d+zyYb5SdQkGXCZM+ae5m7dIXKwLPzIV2x5oW+ts962nnW62aKi8T8kr0ngsX0tEXqaV20Mi7LZlwbH3p7emnYfF+EPRv9sdQ463rzigLUYpB3mFcOwdaVw+nrnJYzrUgotuFkt4qLNrrfMNsx/nvwkSX+2YEGAFGIFgEmMALFk/ujRFgBBgBXwggxw9CQotZwnB3dcrP5nW9QairvBJ4mPNjyQ9YTh1zm1JfQY3CnMSsuHWhlQf2RV+b6SmpvR2x5IS/GXmXFuKdwW5r10IzEk+SFQdf4i9UXJW4k+uyC78Dcbd66ClLCKaUzKP3V/yLE0TZzzG3UkGsgKDWl+1C7XTHM/ahiUlB4BnL8Q/ViPx3oznw8Pms/mE6oWOEZoo/UeqkCq9uAsXFJckJfAaiy01Yuhy/GASeqsJKvz4/Kio/zsh3vf58NmzWaXNIFR7q3XDyn3KqxwURm07nEtrDI4cIMvfmnHq//c8XNeLOWLJEnvjgozceQ9OX2rtUp9/aaz3dkWHq7U2K+Yz+DgKJl5pWXfCcd/pJ+nl3Od0buZeQB65HmKb4NU4ZawYWeFdNbqhwgok/ZwQYAUaAEYgIAkzgReRG8DQYAUaAEQACODzAlKHYJYhDiDQTwCE5yLxNRmz8zjUsAg/ztJubqgpPr7Spel811R5b62l7QMVSlohTV5/7/fWLax/MCZvFBIb6RfJ9cdmVT37uqpyP/RJ4UtWITt0QU1YE3lMDt1LTsLObqhsSz4rAu3dLF23aLFxVLSKIxSeUztHfZaCbLXLibTq7VXOhRblkd4pmD+QSPpLAk2DHJ8+gkooqjcjT3F1Fbkm3xEShCTwvOc78EnlW90rlAVNT38me+oRibpf2F70KD8+2FvasS15oRt79z1l/dJySfE9c9t3jaNLiBlN1IsJmRxA6a1Gs1IAlc+spVl3mOIewKrhxoPU6B+x3EIVwrfbqgBzGl19e16PajvPfqSLF9RgBRoARiAYCTOBF4z7wLBgBRoAR0BCISh48P0YWMsytShyG+gQhqZKfzc/t9zNXjBsmgedkHNFyTysN7jBXo+mJOxzwy2aW0cSL7dU1djiC7EDuJy/5vq478yd5XasQeEYVnlQbTV6W1pxpVYs+dLJLENxuTUvMQmhVyTs5R5B4J6RzTTrKqkWOOUOxIoVuXttGw4KDG2oyX7UVgVfdE6MNl2RUU2bkHX7eQAmhwovn0H+SxBt6vIlKt/VRan2Plh8vftoUKllQTfGFNbbwF5rAUwlztpqwV8WhVb5Cp32pkvsuv48Miacn8JC3ELGww/sIPDfKO6s54r3xzWc/amrWYKe+MyMT5RixydVUIq5iFS8OtF7n6se5diwaWDTUlBHmzYURYAQYAUZgbCDABN7YuE88S0aAEdiPEGhsE7mIgkkR5hk1Lwdbv46yXifrN2m4VwJPxYkWxhH9gsS0I5yMJJ6RuAMufsk79OGVkPnz3a9oxhXGokLgwZVW70yLOcAtddLBagSefk/5yZkIAqClK5nNIdeU3kRPJX+qvOUO6RWGEuk4VcbGaZe+xBNTqbJBOL3uK3YEHqpYkXhQ34HE05ehimHqmTpE/cu7LMk71IeJxfhYQhhWCN8KJEUTZaRziFIPiTDe5lGVZImIeI6LDZYSL5jY/GpKnDHFksjzmx9OGdx9Ff0QeHrs8e5SVfx6zQsJ19nntqxzu8R99Vvp+pPfFcq71VQCBk/cr8Hhj4vg6UO0S1+Q8+6BL7/kepyjL1pEJ3/iIGH8UZpD5NkReEYy0Tho/ODJrucRVAMv6sz/z953wElVne2/W2a2984uVdrSRGNXoqLYUGyoWBKjX6ppmva3BXtJTKL5Ur4UjS2iIjYssZeo2EWBBZZeFlhYlu11tvzPc2bPzJ07t5x7Z2Z3Fs4xE2DnlPc8597Z333med8n0rW1RB5qh8qkakf6uyjSmN2ML85NoWSWPqyaQkAhoBBQCAwPBBSBNzzOSUWpEFAIHEAIgGjAA+hQNidGFtFylHW730idaN260NqZWGA/skQo0mlbl7UGitKLlDoQd1nHZVLKqBS38ISMc0OSDAWBFy3iTmweSkhtyq1d3TsxLqcniQ5uTwtgmEwplJ1YZHgWablnUJK3zNAYAfXvntgSrH9nROJpHWixgCDv8PeiiY102S5zxaJIoeXkL/s/X20nS9VlMzLhXt874WnOSPVD48rOH44zJPEGk8DDeUfLZRT7ykhNYi+kDlvXNENqZidLL3aq6IyEwBuVW0DfOPQ4jr8dgfjhomr66PF1ru79a144i48T9SI7WC3OptV1IUYo2okFmWj25dFQEXjRvjbcgAkM0wdUalZE3nAzsFD179xcDWqMQkAhoBAYWgQUgTe0+KvVFQIKAYVAGALxUAfP/9DkpT2NJgW7NA+G2EAk6qhIL4FInWhXJNTTtd5PHIdhZWAhJpOpO+fUkMFxoJoBbpQs0STwBEkw5RLjtOFgHarEqF5T+lTmf7f/UgrGrzeHp5nmJ1aYjgWJl5ZeTumMQAJhqG1IoRUNghconjqYJ0FvU7AXTCxA3PWwV2dukLA7bFQDzWowJ/VL+r2USn4VDZR3/c09lMD+g6No72LjOoUhtRX/OCPM6CJaBN57K77gcc2aEZ5uHMQjgdfqi8RNWn8oQkGVnsJS+U2IPChk25jZiBs3W2c18ILRzRo7ib4+djL/Aa9P2Nlrur5M7Tuzi/HsP0+kzVuD5B8+Zw5LmUHelBSesjsg1NRck351rP7ngTMaIgVeLBxopW5+g07idw3qFsLwAoZTWrzcfEHiNpZojAMpmctSaFVTCCgEFAIKgeGDgCLwhs9ZqUgVAgqBAwSBLl9vVB9k3cKG1Bo8UOsdLGPlKOs2Tju3VzFvz+Jn+V8Tp05mr8qQ5ZySeHd3H0Ez+u3r0enJxdoPnubr4k/uoHvQNModM5WyDzvHlVOoU8zc1JIycqAV63bss0690qfQgrTKKOmn0SeFklvAIpPVTPQyow2oT2G64MY5FXH5nvcX6fecHazZpSdqtARe1scTKOvjiSFQdpXXU05JM1XM2EbtzJWzvT1IOCazenNoqZ40ysvMpzRvUKGHdNqcorNMCTzsH+QdVE4sKy+sNZKPmhJC3Wt9JV10dEq7KYGnN7Do294ROu9qRsxU8UxNw4aYcs8tp2SWTqs1uoiEwNu6exe9z4i7bbtrw9Y8bsYhYWSe2/Rumesf19Y7S9dxwhTX1WnzKwNY6JWZMvOJPjIEXk9Ce2DK5P50/vfLDjmWRucV8r/bre+GwOvq6qTmVsYKzw1Pe//+Ed+h1JRUys3ODSPyzAwseKAZHkoam8f/2vB4+Jc6eRdHRyFshH8sHWidnLe2r5FzLa5h1JOrazL/0svterEap+rfxQpZNa9CQCGgEIgdAorAix22amaFgEJAIeAagZ31uodw1zO5H6it3zZYjrJuozVze+2rWkO+m+4ynDbpwnMpmb1EkyXxZMk7zMtJuuwU5uC6nDY8cTtfyp/m6CdUejWkSumx5xNesWxuDT+MTCwQp10dPL2JBUiUyWf0UWJukKSKhgIRpF3P0nB3zcRJHkqc5KWiS3NDlE4g8Lw1+VT4zNGmcGf0MKYtv448s0JrkAkCTwwEkTcivzwwT/6I74QReFARfbyvk96oYUYSNtnxtdRFXQnBTl0HtdG1e/wOtPqW0p9IpRRU0Aj1nbbfqq299LeGUFJhfHsinVbvpQkdweL1xf84lKUJJjMSz8devYapwDLX5mOvv2RI3OnHXjLnDBpdUsZ/DPIoK93jymDFLKb1VXX08uLVtJ6ljYoGBSiuwXGVhXT6/Cl07LEj2ZcUXa7IYjMjC6ahpJ7EdurXEbGIoSzfS9+aOYfSekt4SFBs1TWxdGcTctUpgQfybu8+v+GJ99wVYdDMmzyXRmSP4D8vKiyhtNRU/nco8vAZ36P9QNKMholF1+4U2nWD+e+lvIu9FAsibzAcaGWua6M+Wufa3j528j39rly+3a4f6ThV/y5SBNV4hYBCQCEw+AgoAm/wMVcrKgQUAgoBWwTioQ4elA+egVpZqd4kni4Ua0dZW2BMOhgRU9033Un9VWttp/Tccl1AkQcS77HkDbQyMZjuKCaQqXlntFhK40b6+B+/NiXutGPGL7iRMkdNsY3ZbQe3Nb/M0mgRR1czU3j1hKdplpaX0xnnhhKSk07rpaLRiTy9NBrEHV//tw3UVx2q6NPjkz49lVJ+mRuodfb82/8IU91px3j7GNHj87MqiYX15P16kMTTE3jooyXxMnO+RnmFR3AyStTyg/su9nzXSpYzK9EEiQf1XU9pF1Pf9YYp8PTkHabVEnidCf1U5+2n92t66IMdoao+EQKIvB/X+BWEr89OpHU1+2j9pr2c5IJKEKc6d04lzTstVLFqtgVZ8k6Mv+6y/+F/jUTtZxQLyLs/3vyuKdI4FxDpC+85mfJH5kiciHGXR794n7Y1Bs8U5J0vqdl0vtOOTqbSwkRK7SmhEe1zOIG3p9GYnMUkTmrgack7jqkBgTciq4zmVZ4ZiK+8bBTHAcQmzhqq1zAykanvdv8zhTpX2btGp05LohF3+pWG0WrDoa4cMMxn6dgeJq/F70mk15qRodHCJdJ5cI+X5gfVw5HOp8YrBBQCCgGFwOAgoAi8wcFZraIQUAgoBBwhAKKspd2alHA0ocPOeMDNZg9uKcmoReZjDyV9rlQqDpd13V3v/idL3okFtSSe6yAMBrZuW01bFt/B3+lnT8YmApeQkbEk8WRNNYwwuP/a55iScKchPEYknl59N/ZkH+WV+2udock6hVqdhwx5h/FceTXRQx5G4rVu66CVj6+hjn5zoqWf3X9picmBpQWJB5ojiTm+GrWyvHKeTpue/TUqKTmSqXH6eEqwtj6k3szCam9FGYm056AmqmZqLi2BB+IO9e7gPKtvIn1WkHd4/zcfhxNEIBwEUQMSr2/5TtqQ2k0JOf6zAZmTNCDO62W8DajMX/5wFk0ab2zggTFIm130+suObp9RJaV06Zy5USXw7Mg7ESAwgPvmtXfOppET8l3VwcNcgsSTJe/E+iDxZnrPsiTwnLjQ1tXvpu5uv9IyobCVKUc3GZ6FVoXn9aZQUYFfDQgVpBACamvkNX2UTo1L5W3Ro03imamrHV1og9AZRGMTM6vBPY9yALLOtYMQmuESqv7dUCGv1lUIKAQUApEhoAi8yPBToxUCCgGFQEwQGKo6eEj1y0hL5sohpNGB8Kll7oXx3qDmgvqhmZGeqHXXO1DvTjbuBFYXz3vL9bLdbfuJlOPVj95CTVtW+5UttqOCHWb+apGD3vJdI60nZUXi9TK+GUq8no4E0pJ3xdN7qXiGPyVTkHdGtRXld+HvaZY2azSPMM9IPCuDtrY3UiurE7evr8ZwSV93D/Wzaz+bFfvXNi9LpfUUNTJyy7jun1Dh5eYfToVFR1ATuxY7WM01fQOJ98GeDtrebqyKQ/+R6cl08djswNC+znbq3WMcr3b+vj2MwGHus9tT/Cm425r76PE1xoYhYtyWZesJ3UvSUgIEHt6DQgckF15Q47EMQUsS79/3/ZlG7Q39rPhgsr9umlVDKu3kURWB+9euv937P7pgiV2XwPsgrXBv3vy7OTRxWhH70sSdocU7m7fS61vf4KSouM85fmylQycl08xJwVRlbXCT+0+n7hbrWppPXbeMalZZKzdl1HfadbUkHlR4/rP2YyEUefwea8um7bdaXz9GYJfdkUZp042JbunDYR3jwYFWJl6jOqz4nQQiD8Ygbq8rmbXd9slhX9Dh94FqCgGFgEJAITC8EFAE3vA6LxWtQkAhcIAgAAXEYBJnIp0RD6BI/xHEw3BIX8IloTWL6Jr/TVdXSTRUeNpagfs2VdFn9y/kD8doIEFkW6xUeNFIVbRKp519yeF00qWHh2xTGBTgh3D8zM6Au7ExKbyyZhUfu7t5D5VkF/O/F7M/xd+1E3f8j7/Wl0zjZ8CesnFfVU/yK+98/V3U0h+sjybmgWlFMuPd9ASep5IRXZXGqiYxdmLZBK7AS69ltc0+8buvps2/2DBEEHnb21h6a52/rhhIu1HsofrYYuO0tp7d26m/y7o2JlJoG9m8zcn+i23R6m7a3mJedK92VQ11NvvnLGHpdKnJQaLJj5mfuMPfQYKCjPjnH84LSbP0VNdQ8rPvUevnzCnDoH0wKZesiDyYWpxy+OGUzOaHWtGq9a7dQHjpW0JhPiVNHk+vvFVLLz+1WuaS4H2EccMZF0yhcy6Zxurw+QkNpDvLmqg88MUWSsqrpvHjtvOx6zb7ly/IZS/GX3rZFwtZ3mSuZtY2btzSX0pFzSdbxiujwmtuaaIWGFdgT8dtpMSi8HqQ+kWQTntY+aE0aUQlM7VgacQD9wfvB/V1WRa1vZxAux/tME6ttYg6Wio8fKGUxkgmpKPHc8N1ZGZgIX43IX6YqBiR+kOxt0KmGERsqikEFAIKAYXA8EJAEXjD67xUtAoBhcABhECs6+DhARK17aCy87HcTpAr3SztT9u0RhbxDL1QQOx8f7mpaYVd/HpTC7v+2veNTD52vLeEu826IfBiZWihd8V1skd9X7jTalNqjYg7XFv6NFKkxOnJaRB3K3f4yTujVpxVTNMrpgWIPCfqO8ynJfDWjm6ihFQ/q2pE4gnX2YrsrJBQkD6bcd4rlpAVflBLae/spqRkL/Xq8qVTz19gSubJnoMdiQfl4LZmPzlqR951NrVTbdUO/9Ko4cUe6HNSPZywC2LmJ/BES2TP+wvmTaO5p1RyIgLkXe4fllBrRzt/mbVtBan0xHF+wwp9kyHw+vbuo573P7GFadnyRlqyWl5VhNqE4pz+/NR8Pr+T2owg7zY3ttMRh66i/DzzlGzMW5DmDSHxhOptdOOltvsCiffRonWmSjxB4MmSd9oFT5p1Js09eA6v2eZjqrEERrCLtmlei79258CHmGGNPJPoxy0NvX9sN2nQAQoxrA1ldTw3mTiNnGvNzEtivVdV/y7WCKv5FQIKAYVA7BBQBF7ssFUzKwQUAgqBiBBAPZ02poaLdgs65yXa1iGTeTCJdnxu54NacPf9T5DvyWddTeGGwLNy5wV5hxeoIhAfdu6j2qBjReAJV9y6plBXUleAmQwSxg0g7j7e/Tl9Vrec97yg4hv8T33h/jdWv0V7WuTUdCdVzuYknlMCz58KytRECe1UX9hFCblB5QlIPNTD62Hur2h761r4n0XpaYxw8ZNB/hL/TJhkQuB5Nu+j9Lc2URpLzfV4C1j9uCAxpIUpuZKpvBb6ayK6bXYk3ta6NlpUZa28w9pa9R0lQ2KXQGPz0gNKO54LOqDA08e65B8XURIj75Juf5yr8ewIPIw3I/FQB+/7886mHqaONFInyZJ3fI1NDVTTlCBF4vFU0YG0UYwVBJ7Ya0ZqEksx9DDVVA//HNaTLYK8Q//TTlomdZwjsvyur2hOCDwxBqYWNcwIRZ9Sm3tYJ61PfkMqBn0nEHjz557Pa67qjRdA4InmlMizI/Davqymtq/WhYSTcfBEypg5KfCzeHag1QbuJE6o9ZBa6/UkmV5brg7SwSB8cQfTDdUUAgoBhYBCYPghoAi84XdmKmKFgELgAEEAxhH7WqJHtGiJFVkDgWgqtmJ9bHCibXzoKWpf9LSrpZzUwbMi7sTigsDDv5GppBM3WsYYKwIPixop4NwAtnnHTtrCXv4/d9HY8hE0YXQ5TT9oFL3X/Ro9sukhw2mvnHAFzS28hKfl2Snv9BNAiXfylNmuCDxB1qwtYXXsNASedg0Qec1trdTCXinMxaEwI0i4oJ8ZgVf469f5NH0JGYz0YfYS7CHdzIUyGiQeauL1NdWHpdQm5hTQXe9upa3b7T83UPuON3CTjGxFG53rdxBFJq2oiaZV4Ams7r/3PMq/dwl51/sVfE2tbZYKPDHOKJ0WCrwzjjzClMDzMeVdP1PgyTQQeGhLqpKoptk6PVBb9w1j9AQeh4Zhgy8x0lOSQ8iWzQ1t9MDyrYGQBIHXy1j6Pg1giYy5T2IptKIhlTaLzYWGzxAY24xpukxma5Z93vzvC/Tmey+6mgcE3sXzLqD65q6wtGEtgScmlyXyzAg8I+JOH7gg8oZLCQfEuY+l+cqmXeuvLTjXgiR2Mt7VYQ8MUvXvIkFPjVUIKAQUAkOLgCLwhhZ/tbpCQCGgEDBFIFp18HjdM5YaJVIZQd45eVAYUZBGO+uta2/FwzEiXbObqe+aH3nKVTgyCjwnJCgcaDc8cTuPBfW9oDCSbbEk8KCAq2vqDFMUycaGfv96dikn7UTjLq+M7GjzMYVbVz21Z9dRz0zjenEgtyZlTKfvl/+K3lzzlpNled/p5dNo8udjqWepfZ0vMblWbWVF4KF/Q+s+amjzE0GoX1YwkFLoqdxAXvbSt5wHPiPPlgaqb+umisKx/G0o8ND0abRibDTSac2A+97SL7kJR3+dvXmFlrzDfGOYAg98Ey5VKM60dfC0CrSHv38ET51FE4ROTV14PUGjGH97th8j0UDgzT2KufYy8kuvwHOivsN8gsCTUeFp06ox1ojA014/WiLvhTW76M1Nwf3OOeF95jrcw2ssGjUPY0Q9Hj9xJ1R4+CxBTcGxUSDwNm2tpvv//QezS8Ly53fe8HdTUt+IwNNiYpVaa0TgNbyyjHy7rQ05xPyekgKqvHg27Wb1Mocq1VQGUCMDC5lx+mtrMJ1rVf07Jyek+ioEFAIKgfhCQBF48XUeKhqFgEJAIRCCQF1jF3+wddNQywnEHR6yUBzebfFsKNuQzmumKHITWyzGiILntacucDW9FYGnJe6AI4qRy5CgX/72Eh6LUwVerEwsEEuk56kl70Dc4QEWD9ggLza1Dai62Dp9ua2GJB7IzF42ILunkE5MmufqrC7KOY+672l0NFaktdYc101tddbOypt2bwzMDRIvk6mmcmd/RomF4UqwrOtepVaWZpmY6KUR+eWBcYLYF4ntdgAAIABJREFUAkljpGLLe/x5R/HLdgaBJxpMLfqbDdLwUxJpy3ssfVEUaGQD8tI8/KX/uPG7krIO7IzFe4tmj6CMFz8KCWkfM1IAiQVVmVV7/NhS2l4YNOq47rL/4e7Eneye6mRfLmibmWmF2fy7d7ZQ10DZgfs+9Bh2a8pkJh9ZPbS9LHgNpDCV3eyZ42jB1Ck0vbjINHxgkcUw+tnLK/g1jK3uqm2k0SO30FGH+0lfs4Z7JTWFXSMDabRCpTmuOXIFHtb856O/p83bQlNS7a4ZqO9OOWEe+0xIMTSWaXi8ixoe9xPBHez+TtNcL2JuI0WekYlFd+1eanz1Q7uQQt4vOWoq9U8a52jMYHfGOULRBmftSBpwhNITqdv43RIr51pV/y6SU1JjFQIKAYXA0COgCLyhPwMVgUJAIaAQMEWgsbWbk0VOmijCjoeAaLjeZafjoZ6ZXMSgHp+Tfdn1FQ9Su665mfqr1tp1D3s/ZckjYT9zS9yJiUQaLVc1MW5CRoOXObKSxl/8a8fxyw6AMYmRYYnMeEHeiYd2EBiCyNzZwRxN+0KVmkYknlAeNbW30PSkw+kQ7zEyS4f0ueTIBeTEhRaDQeD1j2eGLQtSmTJywLzBZGWtCg9d9qTsoI2TXmcpphkhIyo+qqGRH9fwn5XljaA0b7iDLMgvkb6r5bYyf307eaZMd7x3uwG//2ADratvDekGRR51+cmxhBw/sbXzvc3UuTeoYizNTGE1/4IutPp1/GfuV+ctOpHt9YVQAq/b52NphH4nVKumTaOF+m7WjEM5gdfOYtSb6DhJn8WanR0+2rPLv3ejNNrtZR0hxJ2Is2hEJqUyYg5tWlER3XHi8ZZ7uPGt1QSRZWtbJ61dv5vKR3TQ/LODilSzwSDxxhVm87c9qIXWPp3yumbYQSb1vhsVHtR3+NzMYp/xZk6vn81ton0GDHQauyDy2cUtSD0tkVd+VwZzBR6w3x6Ifs/DL0jtQ3QSqtmCb5zpaNxgd45FndhYOtemsHR5ELaqKQQUAgoBhcDwREAReMPz3FTUCgGFwAGCgGwdPJAiSJHlaaRMxQLiTv8w7BYyEIIpTELWyFR48d5Q382NE61efRcpcafFacPjt1F7zRqu1rEj8GJN3iEuELJG6Yp2Z4tadw8998JA7a4gcSfGadV32rl8MzdSf26QKBK1v/a2+hVLV2b83G7psPdhZlG4M9eRCg9EReLPchix4KXWbR22JF5tw05q7+7g5N1bxcZquWOr99Fx1Q2m5J02cH1abazSaKv3ttIfloWn+upBbFi7hxrW+M1DcH+XZobW+zM7lIuYC+03+pqp76n3w1IbZUg8QeDBvOLSOXP5Mmakcvdz1q6/RjEKFd5H2xPpo5ogIblqQgtX3ukb1HfF5aGOqXYkHgg8tOVfBevgnT9vJ1WUWys7MaYiL4NSWTptQfIIytk32/G1bzXACYn37ct+RuNGT+Kuux7GzOqdXld0ddO1exvpqKWJ7GVO7JazWpFaZV7a9CSa9qdcHqZQkblR34kU5+w5R5G3tDCqOEVzMicGFk7XjYVzrap/5/QUVH+FgEJAIRBfCCgCL77OQ0WjEFAIKARCEOhhOWt7WBqtWXNSk80ttCA+8jK9rG6afWF8t2tEa5woet69YjX5brpLalqteUU0iTvt4ptYLbzW7au5esms6cm7JN9GSvaF1pHrSp8jtSerTiB5+cM1S6uWbXjIv//ppbRh2w7D1OGG7npq8BkbDehVeMI8oK7F3//01AupLGmkbCi8HxR4aF2/baC+ajliufTWYmoKZriaknhJTPoD9Q9SJJe3fUov5z5hGhsIvAv2pBkq74wGadNqvecuoLT5Fzvat2xnOxJP1C7b/u4mrsIrYeq7VAv1nXZdGFikM/Vd9ssfD7jU+lNJRbMj8UDgJVw4myvvROMGNK2+sGvLqQJPzAcS77HP+wNGFkibrZoYqkpEXyPyTsyBdNqL2cuowYF22dqdVLs7VHEoQ+JlpXioJCedpibOpY7GPNkjle4HEu/N/75omk47dtRE+s43gqQ5Pg/wuaRVWAvyTiw6/54kqqg2NwURJJ42dVarItu1rIoaP3emihZEf9qMUGdaaSAGqSNqihoZgERzeVHHFu6xrUwJb+SKLLueqn8ni5TqpxBQCCgE4hMBReDF57moqBQCCgGFQACB3Q2dYQ+2g0HcaY8AyrZ4LyaOeKHk6WAPOKil1Ve1hnoWP2uZTiuUd7Ei7gSGqM9X99EztPHNxYZXtta0AsRdavvrYeSdGNjJSLxIiDzE4vUkhSlujALDgyMUJr7efvrxXX8yvSutCDwM6j5hRWCsIPAa2pqZsUcPHeI52nEarSDwMGnv2m5bJZ73l7lUdlS2YZ2v2g/2EV4iLtRwS69Io9Jj8ylzVBpV1aykD9e8Ry9+uZTvwetJoSkV02gqe53Jsmc7nzYn+MwAQ1pt/iWXUdLZF0WslP20djV9xl7rGlfR+qaVfMm5oy+mkVnl9EJVaEqvMBtB2jNIt5H9CbT2jXXS5N0vfziLJo0vIk91DTex8NfHMzbsaO1oZ2pgH9tfkGDNTk+n9keuD1PuCQJvxboNtHpDkLQe3dpBk3pY7bVU5yl//7c8jdav9htNLDs0vD4dUmaROmvVnr9wvuHbb22uo7+/vpKl0IZ/qXHkYQ2m9fCA1L49mXTu+NOpPG1kTL8UAZG3eWtoTbwjRk6i1s3rqIW9Wjevp7LZc7kCr/SEuSH1B8/Y4VdmaltFdQLNv8dP/hu1lIs8dPy3wvHEZ0jXqvW077M1vE6mrCGFqBEoHGktD2qI3ozUwMJp2FpXZDfOtbhVQTiKe9bp+qq/QkAhoBBQCAw9AorAG/ozUBEoBBQCCgFLBLR18CJ1lHULNR6wY1VU221MRuOM1GUg8dB6B/6E4i5xaiUlX3guTwfFGKQfQ9WAlGUZcwqnMesLnaM2HhpUd5mjgiofkHeZTX+3nb7HM47acr5v28+oA66hDLZns5pXGANcQNzhTyijkI698M/mcTkh8ERtq9bOdurwdTom8IqziunkKeGph77n25garzugyEuc5KHESV7ynO2vWwcSupaR4fomakYi9RyqRO35b9u5kbbvCnfTzc7Mo1EjxlH69h3UetuNrs6h8MmlHGM0I/WZzKRXvnIrI+/WmHY9acwkGpl2LK3bU8TPEkor4ZR65qRSOou9qjfU0dJX1lD1xr2WSwryTnTK+f0S8q7z1/+zM+wQY/rOP5aSLpzF7jUfewVre+6s3c4Uni/TqvVB8xCMGZOaRleWj2TmDymUl50tTeQlTR7PUqXH0/qqOnrmndX0XPemgLlFNrsOUtj1n8nche2coa1UeHPv/Y8lXiDyKlhtPKTV1uxIpZqdadTVmE11uzPooe+fROnMrADnPlht3QN/4KSdvgmiDGRe2ewz6bHmNnqsxdzhGUQeXtr20bw+ujQrgy7NDq0RKfq0fVlN7SvWBYgjGSJvOBB4QmUYqYGF02tAEHlOnWtV/TunSKv+CgGFgEIg/hBQBF78nYmKSCGgEFAIhCCAenZdvt6oOMq6hXa4GFkIJ1orcgoYaIm7SBx6neBpRiCJOWTJO9HfLYln5ZpohUs0CTysA1fjfW0Njgk81L8ryS52Aj3vC+XJnsYggYcYQEwbEXdNLL131brPbdcAkVdx8222/cJIk8pplLXwDv5joXLEfS6bGreybjNd+cpt1NUbahpiFEhuWh+tuPKZMHJS3xck3tJXw8nAeadW0rzTKsOmFio87RvCsAMEjVG6eN3fr+ZkH5xcU72sriYjsJavXU93/N8/TF2urxhRQWPT0vkyZcxgQkaN5z3ntEBYj1etpifYK+wMcA1a5bSzAVYE3uV/e5P2dtg7j4LqQoYx3IyL0r08jEd/eLJh3TnHF5LEAKjt1j9wr2lPQZShQ+bYCfTAvCtoZbc7YvHlcuP7EgRe21d+NaBWtWlF5A0HAi8WBhYSRxro4tS5FvGiBp5qCgGFgEJAITB8EVAE3vA9OxW5QkAhcAAhAAIvGo6ybiGTJcbczh+tcXb1+oaCuBN7s0tDztn7K8cwuE2nNSKzhBLRjNC0IvA6ettpV6e5s6s2hRabFA/nvl4fHUdnSNfAM1PfyQCHPdc1dXJHWLFXoS7Ujpcl78QYqPDGPOlXeco2vQOtUNSksRpXLcxJFUpQq2anvNOPhRLv3hNulg1Pup8RiYfB+lRd/KzxZ/PJN6kiMLcgkk/5zjUEkguErllzQuIlH3cEJRbmB6aKFYH33fvfpi6mTJUh8VIYeVealcJITUZssqN97EdzeMq2kzqU0oei62imvBPdtAQefvaL+T+g1KJSV8uZEXiYTO9Ca0XkCZUu1LDFl5/lKpbBGBRLAwun8cs416r6d05RVf0VAgoBhUD8IaAIvPg7ExWRQkAhoBAIQ8CoDt5gwiTUSlaGGoMZj9VaIwrSaGd9qDJpKIk7ESvUXk3MydeIqEhhNe9Q985Nayr8reNhQg3oBJe3P/mM3v7EXJVm5kLbO2Y34aVtHmaMgrp6U7Jm0A8qfkVvrnnLdg+RkHeYHPhDcIW1rVSXK6s/o+YBh1zboAY6jGYEXgYj8mSanrzTjhGpy/iZ2bXyr5Uv0X2f/1tmqZA+1x51Dl0y+SLH4+wGgMRLf/GjQDptyH4Y1jRlFO396XmG1/3Nf/4brdm4iRG6iTy11yp9XZB4SKcdUVxkGJaevEOnWBF4L3yxmV5gZhZode2sDiMzHNI3qO6yvcnc5RdNKBSfuvpUbkbQwVSXsWy73nqRdr31kuUSegLvZ/OupJTCEkpKkXMl1k5uReBpVXjaMUZEnnCgTZs+gTJmToolRBHNPRgGFk4DtHKuxee+qn/nFFHVXyGgEFAIxBcCisCLr/NQ0SgEFAIKAUME9rV0M1VObB/27KAvzk3hRddli5DbzRer94UTLYgyJwRVrOIR80Kt0cke2GGwoW+REHitOd+jXs9BjsIHRj6mHkLtP5AIskogNyo8vfoOgQoC76bKe2hq9sG0u3kPraxZRXtawovno7/btFkBCurc5aR7eIqq1V6dqu+0oB+RVmpbD8+KvNPOJdJqocSDIk97z81a9ENq6jZ2/LW6CGKlwhNrgsjbsvE/tC5pwEij+xLyTayghKmjeNpeF68xGLqXC6/2q05BIoHAA7kgVGpGe0FNPKTTXjJpciCVNoGp7aC4Q807o7ZyTx3d+M67YW8lR5hCW72rgX7/0pch80KR1zVA5GWnGBs+zPvaWLr8+Ik8hRZ4aGsBOrqJJTp/ceMPLHtplW6iIwg8T1YOebJzJVYI7WJF4KFnwyvLyLe73nBeLZGHnOPkknzKOeUYxzEM1oDBNrBwui+9cy1+H+YzkyfVFAIKAYWAQmB4I6AIvOF9fip6hYBC4ABBAMQDFDlD2eDw2saK/MPQIJ4b4uxmKcd4QAZBNVg17uwwQdomFGA4S33LaPqbqeus3bxO02gRB2ohiZprTkw7Nu/YSQ8++4JpSDs7aqizL6h+9M3cSP254QXxQeBdP/G3nLzTNxB5ohWzWndu6t2J8VqDiiQo79qtr18z0wq7M8D70yZ+jXKy8qljyePUs3oV9azx7yP1/AX8z7T5F8tME9Ink51VOiOCQPQItdaMh5zPIyZd8a3HHcdgN2Bd4kp6ybuI1g8Qd9r+E3qn01xG5E3sm86uuST28rDrroffA4v/8zo99YpfdapVgQmVmnDKNVr/gtPmEF6y7ezFS8K6gty3u/bNXGjFZFoVnkwsE8ty6RdzD+FqUHyWprKUaXxGNbfbp03LzK/vY0fgCaWbMDbB+L8cczptLCyj9PLRjpe0I/AwoRWJh/dBjGWPKqYR82YNadkIu80PlYGFXVz693Gd43Mwk9WdHDCMdjqF6q8QUAgoBBQCcYSAIvDi6DBUKAoBhYBCwAwBkGZ7mfptKJuRw+tQxmO0ttY9NV6IOxEn6gh6PUn8YV3f3NS/E3PIEnh6MgsEgpEa0O5M7VJpRT08PXnX1N8emPqsqT+gcSlT6QzvTLvlXL0vroNepjoRzrJQQLYz51MrAtqIwBub3Udjs8Prs21uTqDNzf7USLSRZeOYM60zJaTM5rCXbKZgw8P3+1vW00VLr5UZZtgn2gQeyLv70q6zjQdE3jWdd3FyBuQxav3d/8zL9NgLr/Kx+jRO/Az7RnE8I6MDpwSeXoWHmntQ+1kReFYGFtoN/+6l5bRuV6MtBoK8Q0dt6qW2dpkdwWy7iK6DLYEHjFGXT3N5bygopb8ee4ZjAs/KhVYfd3ftXmr7cl2YGs9TUsBSZifSqMoKZnDSzd2ycR3E22c59jPUBhZOr4V8VoMRxjGqKQQUAgoBhcDwRkAReMP7/FT0CgGFQJwhsIM9mFRv3E5Zmel0+MHRrd1Tu6/D0NlxsCCIZ8WBNlUWaYdQW9k50Q4WbmIdK/ximUKrJe4EmWWlBpTBBUo8EHlbduwK6z6mvIxOPOIw+qz/Tapq/oo+bP6Imvs7KDOjnN0XFTSi5Eh+PiDX0M7wHBw1Ik97HTQzxaqWoJQh8LT173JT+unQInu16Rd1zE21K8GWwFtVu4uqdtfSk18F0y6nlpTStNJSuujgQ2xhx/WDVNSyP55n29esQzQJPFnyTsQiSDz8G+e09M036d8vvhpInRXXgzZ2bVql9n2nBB7mvOHtd2lVXR2f3o7Am8bcbu848XhpnO2UeFryDpMaOVLHgsizI/BwDkhZ1pdFgApv18FHSe8fHWXUdzIT4syLcoKO0QKXeCPy4snAQgZXVf9OBiXVRyGgEFAIxD8CisCL/zNSESoEFALDBIHnX/2AHnnqVTrpuEPpzfe/oMNnTqZrf3RJ1KLf29zFUkPtCYWoLaibKB5r/hjVuLNzoo0VPnbz6h9Mtf3dEHjPbUjhU1SMu4omFJeFLW9E3IlOUI9EywUTRJ5oIO607Y8dr9D6vlADC7yvJw4mJJbQT9NOs4PQ9H3Ml87SxLBnM7UOSMseJjWyMg4QCjxZ8k4EBBIvK/8gUwXer1/9DyfvrNqtp5zGyLzwc9SOwT4Pe+RK6uztYKSLsWvrWTlfC1nmhSa/8UhOai+9t2Cxa4z1A6/KONPxXFd33MXTadGQPvvMa29wVR6alQst+vivGb8izw2BhzWEoYU/ldBPXumbU/JOOx5Enr6ddejYkB9ZfQ6goxGRtzXlbT7H6K4THWFuR+AZKR+xQObYCXT1aZdJr3V3YS7NSIlOfTXsH8o7/Rcw8UbkxaOBhdmBoZxEEathq5pCQCGgEFAIDH8EFIE3/M9Q7UAhoBCIEwSOOvMqevr+W6m8tJC5WLbT/G8vpB9+6xw6+7TjohJhPNTB0xpERGVTLiexM6cwUri4XCqqw6zikkmjXbsviX7zaUYgpr7ELOpPzOb/Pn3qTDpj2iGcAIBSC+REY6vPME2QFzhnNcnwfqyaGXmH9YyUP25JPF7fiRGSMHmxMqgQxG4H69fK0oeNGgi89as20KmT+qhXl7GekMTiZsacickDjJNugq4SYwJShrwTU1mReCAgsdcLltxB79WEuwH/vPhMmpQ6wnBfIPHa81rp3hNujspxv+RZxOveOW1ChSdcrU+68mp+fSZ2dlFCZyf1tXUSY1j5tP35OaxwICMd0oJuqKI+3l3XXEVjRo52ZaiDdNoNjfX0yFerQhTNIO6QNjvdxOHW6V7N+mPvMBOAIZBVq8v+L21IeoPvUWtwm9MzhkYxIi+3N5QYNJpr3QN/oNbN602XMSPwymbPpbLZZ9L/q2ugld3mnxHTvR66NDsjauQdArVLTY0HIi8ev8yyupaAKX4nqKYQUAgoBBQCwx8BReAN/zNUO1AIKATiBIGpJ3yLqt55KBDN86+8T3956Dl67YnfRSXCLmbMUN/cHZW53E4Cg4gOVoDeTe00t2tqx9kRd6IvisTD9MNK1RONeJzOYRVXkm8jZTb93XTKuz9Jp+qGoLNlf4KX+pKKQvoDnxvPPItGZBVZ1nqLdTr0+t5a+mOnv76ZUUP9MX3tLfT7aeqpNCGpVApWK4Wh2QR48M5ixdxTWD1CXB/6enjVH2wlX20VTZ9uTNJhXhB5nszQ9xMT2Q+zJ1NHysiQpZ2Qd2LgM9+8ImQOsU8oB1vZvbdiz2a69KXrA31A2oG8s2vjckopf9JYSs4MEmJ2Y8zevzf1OkPTCpn5Hu17hRORII8fYym0Tz3wb0oYIO04vv5LI9D6QeJxMs8f95Tx4+h3v/wRV1y6NX+QUWPK7MVNHzOFmZirMWkzrcx4MDA1bhUjd14QeTPar7QMoWXzOlr/wL2mfcwIvENv/7/AmBVd3bSyi51VS9CMBvXupqd4okrciQWzmVu0jzGWVkpZ9B1KIi/Wn59uriurMXmZXn7PqaYQUAgoBBQCwx8BReAN/zNUO1AIKATiBIHzmeIOKbNInRXtlAW/oGt/fCnNPta+xpXMNnbWBx0+ZfpHu4+dOiLa64n5nD6s4SEQTrRDRTSa4WDn5GuWShsk7waYjQRGang15B37cQJjPoRz509OPM0wrVbEBZKkMDvFVgXk9jxf7v6SXvZ9ZTqcE3isad0v8W8ZFZ4wqEA6Jerc2TmJGgUhVIpdLCUdDq8gjD544iuq395E55zTbpqeGsBPR+JlpGeTLymbWtKnBZZDzbuFr73iGMKLDp7Ja+IJshoTiNqFYrJr3vozvbntA/7Pf4z6Lv8TiBon1bLUy+xSyvCk8X7ZB49xHJN+gJv0WZEG+6DvPwGlZPK1v6eFK7+gqoRe25j6y4s5iXfTj75HU8cfxPFBHTKOj427sH5yjOtkhOhQfD6ASAGBbKR+1ZN32riF+hDK2r6BSgoyJJ6VCk9bi1KsNeF/rqGssRNtzyNWHfAZ2cKMfmS/fHH6uyEacQ/V70G3sav6d26RU+MUAgoBhUD8IaAIvPg7ExWRQkAhYIMA0lPf+mB5WK/J40fR5INCFTCDCSYUd8+x14P3BV0i/8oUeGhXsVTaaLShroM32MoDtw9n8fqAJWMeASVeavvrlOzbxC+ZYNpsv5+k0ZN3mgsLRBQe7scXldJPZ59uecnFMs34R20PW67N+TvG6OgJPAz6c8blhmNl1ZdO7rMMlkacnpJMm9bW0dv/9ptLnHsuM4sRDInFZEmMQ01KTaDUlHRmyuFXRu7LOiYw4smvlocYVjiJ6/Xvfceynh/mAok3szPPNG1WrJfHapOVZZYHlk/KSKWM8XIqR7OYnRJ4wlEWRhR/bXuRT+v7+T3MiXQFrUxup6VJvbQ+IeiQmciujST2XyL7T9sW3ncHJ++0TXxGOCF0ZQxNnJyXk75W9SdXpP+LmpK3WN87DBLgI4i86W1X2KbTGpF43OR3gPQXCw41eYc4UFuurqnTcXq0298VTs5O9B1KAthpvMnsoIvzIlfdOl1X9VcIKAQUAgqB2CCgCLzY4KpmVQgoBGKMgCDGsAwIvUeXvEZ3XvvtqNWbcxs+FHd3XPedgANttAk8KHGgThjKFkviR+wr0oexwSYaZc8D6hsUFEfqn10DkQcS797/rqR1jQPpT8n+endWTZB4oiaeWV+3D8qma2/8hL+VsOlT+uG0dsZuMXVUMitsnxKs2SfGcndRHXkg3tMTeOiLGndWBhV2mFi9D3Lp06dXUu3mBq76AYGHBpMIM6MIMV92cUaAvMPPtASeSJ/tbmsgX1tjWAiJnlTyZuRSktevjEMDqYJ4Xv72ty3r+Yn+uz5bQ9uadzPlXbj2LoPNX5iWG1DeaQOIVIUnS+BxkgjmE4xdE2mxN+15jr587Ss68s/3UaunPhDWHxO9ISQezj2B/edh/01PSKYFlEqTFpxNvZeeZXicohYiahyiXqiJxwcfi1R2s/qQkVxLMmPNUkSt1HdG8wpFXpZvNE1rtU6lxXh9Oq2WRIdpBWreDaXyjn926BxoZfDU94n0d4fMmsPJwAKp5rkshVY1hYBCQCGgENg/EFAE3v5xjmoXCoEDGoEb7r6fsjLTo+r46hZQKAPv/tNjdNUV5xL7gKVHGLH4v7f/hBtbRKPFQx28WBpZROvhK9Ypom7P0q7+ldG8P1n8IKuB5WxFkBenVvpNLcxa1OoE7ttBCZ/7laaicQJP29Jz/WTeQJMl8LT139pZyqObdFk75PZua6RlT67g5AHUKmfOC41dT+RBtYSGP1MyvJSaGXR31BJ45z3yIHU07KI+HzNmsGgg8tLyygj1yPyGBf0shdafRmvVumobqWu3nxjs8HVRWw9US/7cyqL0vAAZiPmgTtO2lJJcSillZ+KyyZhYaPcjlklpHkOHr7mDRr+2hL2eNlz9ZUbWiYbrfnq/h2YlFPJ7AKRvwmv/ZASdccotjgYKN6gq2zp9pv0+f3YtvfSv8BTv0dOK6PhLptGY6SxdN0bNTL0Fp9ltA26zTpZm3wfQOfQbU/dlo7l2vfUiJ8RBWBd+/Qwny8W0L66ZLFb+QO9A62ZR7e8SXC/tXdakruwaw83AQtW/kz1Z1U8hoBBQCAwPBBSBNzzOSUWpEFAImCAAhdub73/B01azGYkXD+3Tr6p5Km1WRhp9Y/4pUSPvsDcoWWobrAmBWGMgW2TcSRzRIu60a0IpuLsxNBVrRdVeWrm6nh5bUk3TpxTQjCl+YvXSCyY5Cdd1X1kHSrHA+j276C//dV5HDeN7Gcfxp4tCDRG0gdvV45PapIa827q7kg/JyaijG47aFj5cR+IZ1d/CICjw3BhUSMVr0AnGFdXLtgbemTWriwoLB4qM2UyqJfD0NfBO/+OdtuSdmD6ZqfBS80oDqjG9kYVRGFoCzypMXHMgHUR9RPSNlMBbl7iS7ku7znBZ1DeEuku7Hl+TkXcT1/6cfN4mOu/HQRMOq9hBlYKgSmH/lST4SbXUt/9FqV5/DTm9CYmYS5iVGPV7+Lq22my/AAAgAElEQVS3aOeavcwowaxiIKsZyIi8y++a7faSshxnRpzLpM+aTXxQz2yannwKf1u2HqDTWnMxAUM3qROFsmw84ncLlM8wgbFTZ9rN6+ZLGLs5Y/l+cW4K+3LA4TdAsQxIza0QUAgoBBQCESGgCLyI4FODFQIKgaFE4PlXP6C/PPgsJ++ipXAbyv3Irj3UdfB4EfZk9gDNDAQibbEg7kRMeFAWD7Mg7kDagbwza3cvPIZmTI2OUtIKlxEFaSRrRvLaGmYGURVe71EGd5Rx+98LzQm8iIlYRt41vfs+bd1TyRxdQ91wv5i8jqqmrSCvR0c2a0g8IwfMyZ5S+nXBWZwAGqwURz2BN3myjyore2QgDlHgdXhHBlxot334NP172dv0bn++1Dzo5GHptN6MPN4/mgQe5hMGEkLhFymBhzn1TrRiDaj9jGobFq/9Fk1smkMfFD1A113+hjQuTJTF4x+V4K9v2v3S37laMSfDb2Bh5TYtPl/QD58FN57+OB/jYeOtCDz0iRWJZ5Z++V72QmlM9B1HdZ1Io9lLv18zghPj4zENNOLPJAsEgQ3SSVO9SRERefFaX9Vo66r+netbSg1UCCgEFAJxi4Ai8OL2aFRgCgGFgBUCazdupx/f8Ef60x0/HVLjiqE4JTywQkUwVA0Pz0jLqWvqch2CUFjxB2tW16+DpUdGu+FhECqgDz+vpWtvXSY1/WCQeEhBBglrVaNL4NPX76PLHnhAKnZ9p4MKS+nHJ5gbWcBQQ+DvZoEVi+upqabbcOiuwnp6+biP+Htp3hZWJ05zvWb7lVR6Ag9KsV8XnEkje4ticj2Y7VFP4KHfccd1UVGRvQpPKPD06rsP/nApbelPpUf6g+YRMhhnFI+lqSWldNup1gYkmEtWgSfW7e9iJii4zdgr2ZdJyZk5lJifSMkHBVNWZWLU9hEkntakwmyOwz6+n1WxS6OlFTfS7xZslF4KZC5eOZRNOQk5nMATDaQMT0ntDroJG02Mfo/9+h1a90Ut/0yQIfAwz/EXT+UptdFsZjVE3abQIjZB4GlxyUofuL9NHHoHo5apU9wGQxUozHDcEnmIsYP9/h0KB2OneKr6d04RU/0VAgoBhUD8I6AIvPg/IxWhQkAhoENgR+1euuLqu+mc02fR2aceG3gXdfDiJY02loeGh9V9Le7Js2jE5vbhTxBTeIhGTbNYEHdif1BKrFpTTz+89l1HW441iadVBuoD06eOej299L3HrB1dzTZ32pSZdNoU8zpqqZ5EppBLkjLU0K+x4vkOatoYbsyg7QcV3vLJ6/mPQki8ARWeIPBEyuW4hBL6Scqpjs4qGp1FDTztXIWFvTRrljE5qe0nCLzmtKnUk5zD34L6bvuHz/C/P9w3grYy0kq2peaW0h1nnkvTSstsh/S0dlL7xlrbfiDuCC9NS/UVUnJ/SiDN1XOYl5N5ThtcfH+fch2t7P3SlJCe0Dud5nZfQp+tGEXV2W/x1w9u3UEHrZYvBcD4N0MCL3ivJ7Hadx5e58woRXLLyj30yPVvB+oCYpydAk/MvfCFi5zCYtrfyqQhEgJvVvOthmuapaKDxCrITqE9rMRAPLWoG+tYbM4tkRePykWzbUKlit+DqikEFAIKAYXA/oOAIvD2n7NUO1EIHDAIoMbcX1nqrL5NnjCa/t8PL97vcYiHOnhWJJTRAWiJO9kaTZEeJFQ319++jD5ZvsfxVC8/Oc/xGNkBRmliZqlvaSl99MrqL+nZ5eEF9+3W+/ullzOC1JyUcVvLqXFXP618ppmRQjqjCoOAXjruQ6ot3MfqmPlYXbtWfw+40rIXCDw0pFwexMi7n6YGybvmddXUvJ692J8t69dR1oSJlD1xEmVPYC/2p1nb3NFLWwwMDk7Ms3ZhXHrPf8OmBIk3eXJPiBIPxhVaZ9r0EeWE1FlB3mESLYHnVIV34zFH0axj5E0F2jYwRVmbOQnT38ZUhDpxa2Kvl7zdhczflSnbcAYDxhlOSDw/AeSlbl8fPVf7CK1JWEHrk1ZSJ3VQDytFmpyexkk7kHcT+6bT7rYeenNzW4DAO2XJPjplSYPdJRx4XxB4WTMOp567f244ThhYpLEUyZYOH1flifbuolX07uNV/J/Yt7j2jAw+9JN/884To2ZqYVUD06kLrYhTr74zAgdEa1YaCM5ejg0nr6JkFiF9iDYdo+FA6yYWJ0TecDOwUPXv3FwRaoxCQCGgEIhvBBSBF9/no6JTCCgEFAKGCNQ1djEFiX2KX6zgE+mpdqm8Q0HciT3jYeusi1/gTotOWyxVeFBEICUQqcPi4dHL1HBGqcTJSf2UndFH3/zXI462cO4hBzP13UxLAk/UEatvtleaaRdf8VIvNW1hZJwEgYdxIp1WqPASGHmXlJ7Jp8TZgLibkFQaWGL1vfdw0s6sgcybcs0vw97+184ORt6Z3xNXlKXS2LQkw2mNVHiiI2riwdRCm1JbX59EidmFlM2cfvVNS+DhGtyWkE4P9tgr6o5P2EeXHXMijTr6fOmztlLhGSnvMLG3q4AS+4LOucHadf2UdKi1Eg99M9n1i/v6qR0P0av7HjOMdWzKdDop+xIalzo98P6iVU0BAg8/dJJGKwi89Ltvof4Z1oYzuKeQVosmviwIIfB4Sm4CVx8aGXzoNxTNNFo70tyNCs9Mfaffh9aht5cVyPT19LtS30pfnA47uv08criMaXcZIg+q5TR2/UfDJTdacZvNg98xpfny6t9Yx6PmVwgoBBQCCoHoIKAIvOjgqGZRCCgEFAKDikBjazdXUwxVs3uQGUriTmDy2FPV9OSz61wReJfOnxQzZ1qhtutl5JUZcac916z0XtpQV0t3/ec1qeOeXFpC159xKu1rNiartJMgHcxpGt1797N6dl1t0gSeWK973CrqOWg1JZWMpe68UXRY1kiq6CkKcRK1I+/EXFoSD6q7B3fJpQJakXgfPPEV1W9vksK4cFQOnfO9I/g9qE/ZFASeIIcEgfxuf56hqcVoploDeTcmoZNGHn2eIwIPwZqReP3N4WSmnrzTbjaRiTW9hckEJZ6R+YG4p5H2/oetv6TNXSttsfp20V0BEg8EHhpq4KEdtLqDpdLutJ1D1MDLmn4YZfzGOFXUaBLtffbiA1/SW4+t4t1AZAkCT/yb1/ADlgZkfzQJPG4AxEggGLSYNSdutNPbrqDc3rG2GGo7YP+5rIYpjIii4crqaHGLzrFwoHUTmxWRJ9JR7b64crNutMegxl8+q9enmkJAIaAQUAjsXwgoAm//Ok+1G4WAQuAAQWCo6+CZpYLFA3EnLgEQeE8wAg8px1aGEUaXTKwIPOCWzeoS4eG5qd0nVQNQqPDW7KrlqbRra3ebXuVQ3uHV0ZVgqb4TEzitZcjTZ5kCzw2BhzWPn/EM9Y09nLE3R3CVVDtLdxVkkRV590FDNm3vSKHtnUHl2GlHlNK3b/0e2Snv9GBZkXhGhhb68QUjc+jYBQdzIggP9CgUD0JG7GPXJ8/yNFoovJxed9MuuIFyRk5x/CkGEg+mFiKdVq++Q9psck9WiPLObJER87P5W8IFWBAaCezabWYGOn+rvVaKvBPzCxLvjc2ttKetl7vQ1qds5m/LkHjI8t06LY3yf/Mbmpg43jE2+Ez64MnV9PKDX7LPAr8hBlKhcT7ahvOEayd+rH0vmgSeVn1rtREZJZ4b8k6siXuvi6U/42yhqERabZtB6rljsCMYIKvqjmAJR0ONiDwQn8PFwELVv3N03KqzQkAhoBAYNggoAm/YHJUKVCGgEFAIBBHoYemze1ga7VA2kD+7WRF0kBTxRNwJTEDgPf4MS8VkAeqe1W1hizaBp0+VzWH1p2ob5FRjCBa18NJS/IQDiDyQeKIuHhR3lWWlnLhD8zGBXEu7vfoOfeGIu6+lO4zMMANo6xd9tI29eGuus8VR3+Hr05+h/jk/5D+GC24POxgoulDrbs19vwubbxsj7Z7cVWS6jq+gmPJPPpRGHu/MKfTWcawOn0lDOm31sq1hajwQd5OOGU2Fo3JDRorUP6GoxH5eucO58UF2RSVNv9CvTnPbQOT1sldfXS97+c8psY+lxWpSZu3mTmKutJmVXl4zDWn6niS/Ugv7erNpEb3ZvMhuirD37xz5YqAO3l5G3i1jJJ5oIPFQE8/I1ALE3fsXF9C0g8+jucmnOV5XDEAK7XtPVlESr2HIy/6ZXvNQIqJfLz43GITRNLHQXvN2m0FNvKbkLbQt5e1A15yeMZTDFHeju060G275Pu77BqbihuLQLo0/ooUcDIa7axsrK2Ck/nQwTdS7aok8TA73dT35G/VFozBhITtjqFBVUwgoBBQCCoH9CwFF4O1f56l2oxBQCBxACOxmBNBQPkigiD3qKKV6E3kcg2VOIXvEK6r20nW3LeNqG6jwnLRoEXh4+INCC6myKPiPlEtg5cZtUSjxrPYhq7wTczg1Iwko8DCBwzTa0cVrWHooq33H1HdoIDNwNs1MiWikvoPqbhl7WbWGglJK9Hgoe3QRTb18tvQRn5jnITtji57uJ0PmS/Yak3JahRrIrrZOH615e3HAiVY2KCP13cY9VbSpropeX704MM2cKRfSuKKpdFDxVNOpezb2UC97uWkg8FImeLhJBZxaPUwCJxSG128/082UvB7eSTmXhJlZ6CcDmYcX2mvz8ymf1SyckDCervH+yNW62kG3zXuSK+zEJ4GdQhLnOvHQUoKJRbTcsqF862SfAZ3ss2Aom9Hnj5mRzmDF6eYzcbBiwzog6hEjfpfEU+qxEQaq/t1gXhlqLYWAQkAhMLgIKAJvcPFWqykEFAIKgaghMJR18LSKO5FqF7WNRXGiuQuWUiJ7moE6ykmLhomFwAjEHQwqtGQryJEmlo7oxmCjd08jU1T1UHZ5Bnky/DWOnBJ3Ags3hAKvgSdaWyOTMpnX89JiPmpSE42eOznwI6QsQuXlSU6gly+7POR47JR3onNDfgklev0YVBw/VVqJNyY1ka4cYVzgHcRdjy+UvBPrJXsuIkHkaQ0dhAGJSJHGA/T7/7qJ9m1bLXXZGZF3f3vnJk7emTWQeN8/4RbDtyMh8LKnpFDOtFSuzBQKLWEK8b2qU6T2o+8EU4vvFN/Ff6x3pDWaEMKhNPZ/U5InREzeievsv4+voqX3f8mXE8Yd+LvVPXjF3bNp5tHlfEzd4y0B8k/EnDQ3mNItAwxUZi2MsHZz38vML9PHzkl1KIg8u5hk9hXrPqLuK9LI8eUDaszFK5GHOosF2c6uzVjjp+ZXCCgEFAIKgeggoAi86OCoZlEIKAQUAoOOANRcIPEGs2lJKdRQSmXqsnh25HNjZDF9SgH95qZjXcNqRdyJSZ0SZ63MXGH3RzXUVtMcFldGRTaVHFVBmSzF02nDgyjEiU6KsnMXWlYLL9BkSLwkD836aaFheCAMPvzBt7myRQgln9hZFFLvzmhgR1oGdaZlBgg89Dl6oXzqqj6Ntq+XmWww8q6vz5w0E3FkZd1OedmHcGUWyDt9w55Qg6r6ncVcjWfV9OQdVHd/f/cmqaM0I/HcEHiC1Eo7MoW6/SbBIa2mt4r+sftadkb+1FKzNnpPAZ2wajKNqQs977xZJ1P7kX7DBZB4e9jr7aa1vB5edfZbgenSGKE7wzuRp8y6qXmnjQvngHtNnNPD171FW1cFU79F3TuRLqsdC+XdmOnF1PtSF/WxFxNX8taj23vChCTyXGOekq2dEwqu+uahTcE0q19qdA1npSfzH8daXT3UDrQyN5vewAI44mcg8tq7esLMbGTmjFUfVf8uVsiqeRUCCgGFwNAjoAi8oT8DFYFCQCGgEHCFAGoF7WX1eAajGZFSw0E1AWwW3vkhffaVfL02t+o7GeJOnBWIM/5gbED+6M8T5N2mJfZKLhB5B11gnlZpdJ3IFtXXjg1Jo2Vv4EG2n6XT9nUwZ1qjlpLOUmezaPSh5vWYPr7qO1wpiWtqS5uXQODZNSMCb8o3T6ScMcV2Q/n7egKvu+PXUuRdMisviDpq3tTbqD/BHG+h/OqoXUvbq1fQxveWhMRl5jhrp7zTb86IxOvb10e+z+TJfeHCCpVoyimphvht6lxJ99ddx84p6OKqN+m4/K1jw4g7MVm5129A0XTeTPJV5EmdkdtOQXIlkRFmoTUeUQ/v3cdDSdpA3bsBElmQd75726h/fdDtWzjigmjWZuXLknhOTWPc7t9qHJRk+OLFyglXOx4lEmAwgXRqqM9iUbYhXhxorXCDetLIwCJoZpMcN0Seqn8XiztHzakQUAgoBOIDAUXgxcc5qCgUAgoBhYArBGJdB8+OlHJqguBqkxEOwoPXVf/vHVq+cq/tTG7IO4GRkwdcPER7PUm8/ptVkyXvxBxOSTwolNJTzR/mW7e2Utu2Vj59xqhMyhztl2aBxKv6Tx8n3EDiBGoM9jDSSKTUMtUdyzelnDKmqJprbaoBAg8N8y1rzKb3661r34n9alNo8TPZNFp9DTyrtNmqtdn01HMjaXV1qMLxovOy6dIF37W9poQaD4pVuH1aOdO+XrU4pN6d7eQDHb53/C1hNfG6P+2m/gbrWmuckEKK+YBjLurfJbOXWdPWwINqCk2kg1qRd96ENCry+FNR0WJJ4on70UwdifW3rNxDW9lLS+SNnl5EE2aW0jnfPYSTW233sJRZDXmnxQS4BYwuBsSoMiQeFHh7mPHPUDa3ZFkG+5xAyjuU33bXsdP9xZsDrVH8WtMmo/fjhchT9e+cXn2qv0JAIaAQGF4IKAJveJ2XilYhoBBQCIQggDpVnd1BhUi04LEj7sQ6ZqqEaMURjXlEmug/Hq2ix5ZUG07pxrRC1IpKZE9uqGfnxD1RjIU6yKqtuPdDxxCMmz9FOp3WLI7d79XS7vdrDdcu+3opTZ07mnZs9tFHT1vHP4qp7qyUd2KBmpeW0o6XXuD/lDGvEOMai0ZQQlKQHHRL4HW2nWe415vvnhpG3Gk7JiaOo9sXzqJpU+wVgyBAMlI9jLTFPWtMrDlV34lYzFJpu14zJ4tAwIFMFIqqhDxGKh/urydo1vQmFiIF9dI3jqHRe4xTpDFXVlI+ZbOXtkWbxNO6qepVd05uIuCSvo2R1Hc1MWz8jrVmDRwmCBOk1aJf8tXplDjRmACVTV11Equbvn41XZ8rY45YkVTx6kAr8MXZoaacDPkaK4xkz1rVv5NFSvVTCCgEFALDEwFF4A3Pc1NRKwQUAgoBjgBql4E8ilaTJe7EekjBxMONnZIsWvG5mYenjLE6RY0DOMGdduXqej4V6t3NmGpOPBitpyUKhHmB07jwkFeUY63G2f3hdl73zmlzosJDHIXswbROk4q98bENAdWddm3orUBuIHUQpM+4S8ZzRd7WL/p4TTxRFw+KO7xkiDvt/EKF54TA6ysfRS0agxIZAk9vYGGmvrvwimNsoU9IyGOqwTxpEg/XjjCEMDJ/+dVT823XNOvw2wtCU3TRD6m0qIenVeJBccdJJw1uMuQd5nuzaRG92bwoJIQxrObd5W8fxx2F+03khSJ9VjvQV55LTecf4nq/2oEyqjsnC4m6dzDT4Ne7BYuH+0LUx+sbn8RIPON6eLh3shh5NtQ1Q53W3zTCLdokVbw70DpNOwZm0cZI9vrFNSZKNMiOUf0UAgoBhYBCYPggoAi84XNWKlKFgEJAIRCGQLTq4Dkl7kQgskqyoTy6aBVIjwZxp8XBrh7WxqeqDE0rZLCccc3RMt14H20cZuQd9u5XGvWHpIAKEk96MYuOzeuqac19vyNZB9rU4hJKSk2lvSw1tXuAYJEh8K4oS6WxaUHVnhGBd8tvplLVWntTEEHgYVvPPWGs4jPasr+umJcR8CxVszOooI02gSfWBonXt6mHk+28fttAATcQd4n5iZZps/r4/7nnOtrctTLw4+NXTaITqoLuwvr+hcnllJJo7Pi79ycnRnTpREt1pw+i+6qgWYxRuqxR0JzgZoRf3oMFYa7T6I/PWKijZGrP1VA91ST4v2RAq+gvoAoqiAgrMTiaZQ8ESZXJvsiBI2urRE1P/SaGQy3VSFJ8B5vIU/XvonKbqEkUAgoBhUDcIqAIvLg9GhWYQkAhoBCQQ6B2X0dIQXW5Uf5ebok7scZwePhCrHZkmRVm0SbuxFoF2V6untQqobRxuEmfFeOdEHiiLpdR2qwg7qC40xbtF+ugLt5Bl/rNCaLRRCrtPZsqLKfz5OSQNyc30EeQeHYutHryDhNoCTyQNah1t/AuOTMQLYG3YH4l4SXbhMkFSB2Rgh0LAk9cvwlsc43bu8hX7ycMrWrdWe0BZhZQ4QkSz4rA09e+08/bfuSYgDOtLG6iHz67QByhhICMGYyT+bUEnhjnJ/LY9cIIUKs6hnn/yuOp0npnUhnDGBB3Tyd9ZBhqeX8+HdU3MWIiL5LPQjMMI/mMxBcseZneEBWwk7MajL7RSPEdLCIP5wuVrWoKAYWAQkAhsH8ioAi8/fNc1a4UAgqBAwiBvc1d1M1USLINDxJIKUWaDcbh4TcSZ0EoOhpau02JKNm4YtnPTYxaJ0urgvhu47Z7KBwsAk8QiV/cvjywFWFuoK2Rpt2nr5sRjz4fdba10+6xm/lbReVlVFxRRlOP/JpbSPg4KPEW//sdeuWT8Bp8UNyl5jHizpMStkblSdOo/XBjAg1psyfmeUOUd2ICEHi9PU/yNEjs94lnR3LTCpmmJfDQ34kKT8wPFStPa2R18b7/8Nkyyxr2MUqhxT2Oex3qKFzD0WxCiXfTk8YxFzPTirSkdEb8GpO/iMUNgSfIoiRG/BilIT/70jNh25w8oZIqJ8qTq0YEnpgUKjs0s/p43r9mh6RPCpUlzgIKVrNzWJL4Ie1I2Gd7ROf3HuWaxJNJ3bcNwKKDUGSjS0t7j1RdUCfKxEhii2SsnYGFk7ljSeSp+ndOTkL1VQgoBBQCwxMBReANz3NTUSsEFAIKgQACsnXw8OCbzlQreGCKBnEnAsjN8FAXq+IebYIgmkdsR5Zp18IDFpQ9wAl7gutiJASn2T6EuQbOz6gNFoEHbDa9XkPb397J02RFmqXZnlsamjh5J1prbgO15TUG/g0ib+qRhzIyb0RER3jX716hFZ+uDcwhFHc4Hx6jRhU4eWIpXfeL03jftxtCjTXGMPMIbcqsNihBBu2tP4sR0P53ZGrfiTkSEsoYWRNMD3VD4Im5MhnB88bqxfTs54ssFV5GoM6ZciHNmXph4C1/0X1vVO9zs8Pc+s6TNGF5qHGD1rACsaDhvPSl5JwSeEJ151e3hRKSIO6efelZ02tu8oTJdP01N0hdk1YEHibAjrpamjg5qVXjpTFVKAg80cT15WGsH/q2sS9LOg2+bJEl78S8bkm8aJUTsAPRCZEXSXqqXRzReN+JgYWT9WJB5EHlmcN+H6umEFAIKAQUAvsvAorA23/PVu1MIaAQOEAQ6PL1kpWbaayIOwEvHqo9TL4Uz0YWoqi3XapdpCnFTi45O9zcmlggBicptHiABnm3492dIa6kRnvRk3fooyfwxLgTzpsbFRJv7TpjN1ykieEheML4Erru537yzknDNYEzwDXR1Hg9I5iq+HB5Ag+paqEkZSQEHtbeVFdF9793M7c0hVpLtgn1nVY1Codqs/Rs2Xll+qV/vJnSP95i2dVv9DBQf0/DeMnWwLNT3d157x20dn2Q7DULRpbEg4kFXkato6mROpqbAm/hGgxsiQlDs64opYy5oa7EQmUJUlyfNm+VNmsF6k9758ocT0ifwVa7yRB58e5k7sbAwsnBRJPIU/XvnCCv+ioEFAIKgeGJgCLwhue5qagVAgoBhUAIAjvrO8IQiTVxJxYcLFVHJEeOh7A0pk4wc4AcTOJO7MPOAKR1exNtWrLa8bZLjqqgkqPlUkCFK+qOd3bR+te2Wyq/jMg7BGdG4OG9C3/yHcfx6wesqa6l5174kvREHlR3lZNL6TuXHjlQb0wuRVTrWCrUlX29q6i7cyFfWpbA06vvMDZSAg9zbNxTRf/8702c8DKrPajF6HvH30IHFU8N1LN0k+796QvP047qatrJ0pdHTJxE5ZMmDfxpbk4hYvDUNFDOM19KnTNPzWaMBdRoXSPkXGitVHdY1E55pw9MlsQzUuE176mlni5jYg9ETGJJIvV7E8gzIZ3yrh4TsjTS+JHKDOIY6dItHT5+vzlV34lJj+ybQEf1T5TCXXSSqcPnaELJzn7TFg+/nvVpz6jBWc/KQMRC5SwZnmW3wVIIRoPIU/XvonHiag6FgEJAIRDfCCgCL77PR0WnEFAIKASkENDWwRss4k4bWCwKo0ttXLKTWaH0oSDuRMg4p3yWvlrXZEwIoJ8bJ1oZ9Z3+Gtn531204bUaUzRR8661Mag60na0IvCQShtpTTy7IxZmEJ5k45poWrxBnsDMoZmZh+gJA0Hiof7dU89bE6BG5B3WiQaBh3ler1pMr7N0WpwT9tfbG55+in4g7yaWTuOkkNm+rPADcffpC0tNu4DMO+cXv7I7Asp5ejl5dgTTqO0GcHLy0sOpuSjLtE6anepOrPHNq75ht1zY++fOPZfOnWvtGty3rod67mvnYz+pradSXxt1dPi/KPEkJvJXhkeTOszUd54y5m7MzquPlSRNHh9K4gmzGD1Rc0f3847jFwOcqvBARvlY8b6hKneQwdLZs9JQ77GXmtr9afgljMCrbeh0jUGsByIVXbaeXzRicUvkQQVflBteGzQaMak5FAIKAYWAQiB+EFAEXvychYpEIaAQUAi4RgBpgO1M3RGLGncyQQ32Q45MTPo+IwrSSCgVocjLZrWCjBQhbuZ2O0Ybk9kcTki8cfOnUObIHMtwtAo0KILw4JfBCKB3r//EdFwHM6uAYYVRsyLw0D8aKjwZfKFoRP0nfX00bU1D3CdW5AVIvK9WvkI33Wm+ohl5N21KId2+8OsyoUr1gRIPJN7mvblRqvUAACAASURBVFWUDMILZhADXjXjiqYS6t7NHH2wa5OK5373W664E62jt4safK3U2ResIZia6KWDKqfTFdfdZhuzExKv6byZlDCmgJ9XFzfS8avRRMM1is8yKCStzsup+k67iUf++qjtnrbes5eeeXYL5SX209dSjbunJydTRmYyU98x8o41rsQbMLrI/vFopsbL4D8XBJ6YRRDPd/U8zz+HrJxtzQJ1SuAhXbWFEWeDkVptFrOWoOpmhSfx+WP1JYbtIcW4QzQNLJyE6pTIU/XvnKCr+ioEFAIKgeGLgCLwhu/ZqcgVAgoBhUAAAX9B//5BKVpvBPtgpRlFcuRIYcPDK8gqqHuQytXNzDeGsiEm1CqzSx+zI/EyKrIJqbNW5J2Z2lCkQH/yf6upbVurIRwNe/Ya/rw7tYMayoxr1IkBg0XgCfIECh+hxvMyohbqNKdppTfc8hqtWr1dt+fUEMMKPSC3L5xF06aE1j6L1rUFRR7OaebomVSSNYlfL5GYVOjJu52d9SHEnT7uTnZ9/eCGu2lq1hj+Vtvy9/ifGYfMCukqQ+KBvPNV5AXGQZWVkephphTM1ZillmrrVdrdF7K174zOwY7A+2jJLvpoSS31dzLyrqWX8vxeHOGN8XYZBV7KzAs1DwABUzq/jNJOL+KKs7xMY7XtH5NeIjjqonGjD/nSh+SUwIundFXgk8MUgcIlGWY+TvYerXvLap5YGVg4iV2WyMvPSmFYDjDHThZQfRUCCgGFgEJgWCGgCLxhdVwqWIWAQkAhYI7AbpaGZPfAGyv87GrMxWpd2XmFKyf626mwZOeMRj8nykXUxGuraaZW9sKfaCDtQN5ZEXeizh366+tPiT3gwX7TV3tp06INhtsyI/D2le4iX5p1+ttgEngieJCVIEx6WLogDF6c3herVtfRjbf6SSqZFkvyTrs+7rNcti8YeOxlqdduCOgd1Wvp+d/fE5h2U/sumS1SweRiurRkDKUlhafppR9yXIDMQ008mFroU2rhOOsrzw0h78TCICkKs1MY8ZrIlcSNLMVZprlJnxXzXnf19VQ5sdJwmZrVLbTk1uC9cFIzq68IUxEtuQbObcBhF5PklaaQN82vwtO2iQ8dTOms/mYHI/GManCCwEPzK/f8RB5SpmWaGwJvT2P8pKviix988YQG528oguOJyBts0w+rM7cj8lT9O5k7RvVRCCgEFALDHwFF4A3/M1Q7UAgoBBQCHAFtHbzBhsSsxtxgx6FfT9TRghLLxx6Ke5jiTutEW/fkw7R38SNhYaZPZQ/d7FV00eUx28L2VVVUv76a3nn0Cc4NoFVMnUIjp02loxdcEPG62r3bkZbFrHYSHpx3VzPjDAMSz4jAkyHvsInBJPC0e0adO6+HOSTb1MYzAxok3hNL1jAlnrH6UIwbLPJOm/oM0kOo1to65cw7RLzaund2yjsxJq+jl7zs/vn6nMk0Lq3MEDJP6SjKPeNSx9etODMMhFINClkQWKiRZqfIioTAs1LgLbl1PdWsDqpROYEn0UrGpof0QirtlEcP4TUXcS1CIaVX/upNLEDU+B17gynTRks7NbGIBzWZfh9aB1onn1cSRxGVLvGoLNcTeVAXg/YtzjPJ8Y4KEmoShYBCQCGgEIgXBBSBFy8noeJQCCgEDngEmlvb6a8PPUfVG7bR4YdU0mXnz6HszNAHQiuQmthDIkiYoWpDVSvIaL9GD4OokQZyQKhgti78GbVXfWUJF0i80bf+IeqQLr7xZqqpWs0FPAnsicxIIXbBbTdxMs9p0xpU2BF3Ym6tEUTN6gba+e6ukHRavQOtLHmH+QeLwBN107p5TbXgfeB3wPTyFE2nZBfiB4mHl74tmF/JUmYLY5Y2K9bTElzYl7hW8HPUccQ1ZKasNLp2/vrd/+E/Rs27XV37bC8vQd6hIwi8vORMyvNkGY5zSuKZ1brTptVanVksUmj16jtsVJbA06rwkiDGY8R88V+mBs5MpKtjTmGMUEP19HTSR2F4gvyDY29jW7AmITrhc8ybnETn9x5FFVRge36iA9bOYoo3Mydu6Ymi2NEopVe4c8dDmQMnCukowiI1lSDyoFzEZwLUq6opBBQCCgGFwP6PgCLw9v8zVjtUCCgEhgkC37r6bjpi5mQ6+7Tj6NElr9GnX66lp++/VTp61I/a12LuaCo9kcuO8fCwoyWvoExASpZQ8WhVgjLknYAh2iSeIO8wP1fbsP/rERI8HfZOSTytSgsGAE5TR7Vk16bXd/Bodr9fSzCx2Ovx/7stT95ttKi8jE48/0yXV5TcMJwr3HwFcWe0Z+DMU08dkl1yEcSuF+rB4UytiFhh3oEUTZn0Q0HgyajvMthnSiZ7iQYCD81MhYf3tOm0ZsgI8rGfXfdaUlLbX5DKRqo10c+ticXkCZPp+mtuMAxP1L7TvilL4GXkeigr38MUdCwNlsGGz57iv0wJWwdnlpsJN1a/gcdTCR/SjoRQMrWVEc54GbWkfan03fQTaBRz8ZVtuI5gGNE84P4qOy6W/azcywWRh/UH0wVWu994+lLK7BxwnxSw+ndQmaumEFAIKAQUAvs/AorA2//PWO1QIaAQGAYI7KjdSyDwXn/id4Foz//2QjrpuEPpqm+dI7UDFECvZXXwhqqBbAAPNVQqQEF24KEYMRgROXggW/m3vxumzVrhVnjhN6OSTqsl77AeUp9QwN7KFfJnzy62PVIzgwrbgQYdRM08fRrj4v/9p+PpTjhvLhVXjHA8TmaAVmUJIxAZZ81I1XgycUWjj6jZaEVKateRIbtEf0HgydS+K2kNVfTKEHhYp+jK60xhgLouPcVf78zKYVZMIIgcfLYgFVV/X7tJo7Wqf2dE4I3t6qNxXfZ16UDeZbMXM1flLeOMIsqYa25sIpSGcE5+pPt92s7UeGj7WjtN6xsmN6RSxmflvN+owky65OsTpS65of581geJaxzEu50D7VARefGYcmx20CiBkAzWWDWFgEJAIaAQ2O8RUATefn/EaoMKAYVAvCEAsg6psmtZqmwWS5G949pv8z9PWfAL+ujFvwbChQLvhrvvp9c0pJ7dXuoau1itt6FxVtWnqNrFGq33nZBXUAm+PyfUOVM2jsqn35TtatgPNe+e+vUtYe95GIGH+nxm7eiLLjCtiec2jVJmI35yITlQs2tPzU565xl/wX2ZFkvyDmQEFCdQGcqQQNp441mNpyUl3ZhvCDVeF08jNq4hJxxo7Qg8vfoOGMoSeEYqPC0xbKa6s7qutGRXqyZFes26NXTXfXfKXJK8j5X6Du8bEXi5PcyJtt3+cxVOtFDhiWakvtMHGqxplkQPdLxHnzVuNyXvUjbmUebWAq7sE0SmLInHFX/sfulk10Y8NKcGEX7y3cP37SRl3O1encbndp1IxyUzIlTVv4sURTVeIaAQUAgMHwQUgTd8zkpFqhBQCOwnCEBZd87ps2j2sYfQ86+8z1NlH7zvWsLPvzn/FJ5CK9oVTJX3jQtO5X1lWmNrNyc1hqLhQbQoJ4X2MBJxMJoT4k7E07DkUdr9xEO2xfGN4o9UhffhE0/Rh08+FTa1HYGHAXoV3mAVfBfpqVA1IvVOlsQzI++qPU/z/U/yne/qEoELK+q+ySrTrBaJNzWeNv1ZW8PPDVCC7Gpu7+ZpmtomTCycEng5eel08GGj+FRWKbR4X0/gOVXdme3ZTGkoS+LZkXdY14jAw89lVHgg70Diodmp7/R7xD29r62D/vHaGmofUx/2GZW6KT8wBKngcKwVRheXzJpgm06Lz+YG9vtBRqnq5ppzOsatIhDXUlYa0o97TdOvncZi1D8eDSyM4kxnqdEoD6CaQkAhoBBQCBwYCCgC78A4Z7VLhYBCIE4QgOruxzf+byBVVvtvkHl/Yco8reIOSj2o877BiD2ZNtR18JDK40Y5JLM30ccNcSfG1j/1CE+fRbqx0xYpgfeHcy80XBIkGVQlVo6bgsADgYGi5XZ10Zzuzaq/IE1SGHmGVFXEWvXx5+z1Rdgw1LybeuShIWmzIO3Wef3EnbYV9FZSQe8UKTIvUmUa1v3PkkT2grNAaLvom4k0b8HgqHqMcDYzqYj0DIXiDfPoFUtIo7WrgadX4M34GnOZzfeb6sgSeJGq7sww0KZVir2BxENNvLXr1xoOO3fuuXTu3POkYL1vwXLDfoe29VKexfcjwoXWMyGd8q4eI7WWttOi/66j7fWtnJyDAYZdDUthdHFcZRkdNbHUcj0YRtQ1dTr+8mL3xj7avSn087JkXAKVHBRZymYkikC9E6tM7UenhxEPNV1lYs5j5B1+H6imEFAIKAQUAgcGAorAOzDOWe1SIaAQiBMEkD67k70OZ2YVaJ9+VU1vvvc5XfujS/i/kUZ7DlPgibp3qIv3wyvOpcMPniS1gx6WPjtYCjijgPJYTaMOVtsqFmlaovA7HmrdFjUHebdvySOE+m5OW6wIPJAcUNLYEXiCuLSq8ed0T076+/GXd3Pdm7iaE3f1SeEOrtp1QeQd0/lr01BkjBys9rF+dQL96dZky61OPTiBLrgsgUZO6HHlVOsER23fSPcms65QGqLOmkg93VG9lh66eyE1+FpNp9ASeFr1XWqil0akGLufdvr891X212bRiONOcFTrTmYv+j7+e8LDVMc4t6BhDYg80SZPqKTKiZWOpjdT4WESs3RaqO/yjsjhyjvvxAxH64nOdz8TJMX1KjurCfEZ8tvLjyI4kXf3GKfIWhlGGM0N4m7FG720R0feib7FjMSbcXKSayLPyIHWKWixJPKGg4EF8FL175xeNaq/QkAhoBAY3ggoAm94n5+KXiGgEBjmCOgJOhB8SJuF6q6ltZ1mMxMLQe7JbnU3M7KwU27IzuW0H2qm4WEymk6HQsWDP63cOGVirXvyYWp8+lFXaWSxJPAQu9mZ4SH1rreej0raqAxGdkQBlDMQMCJd24p0fCHDT0rLNCMSLxqEpRF5d1T6M4SXttX4KgmvU2+ZTxOn+tV4LdUraO/Lj1PHhlVhWyg4/WIqPONima0Z9nFqUmG1UHL1TvKs2xnSpeOsw0L+rVVRCpIHJN7/3XEtdfZ1G07vYSR3fkcvack7dNSr75CS2aozeNg25QJqzaqgygovTSyzJk9dgzgw0ImBh5O1lty6nmpWhxOcODu0bFZLDmQeWsWUTJp+eYVr4k7EpSXwxM+Eyg6qYSvh8MILD6OsdD/WOGNtqqysYYRYE+TdG/8INTAxw+7k7ya7IvGcEopWZyeIPKiTYY4SqSLPKV5Orqto9sWlWJqfFs0p1VwKAYWAQkAhEOcIKAIvzg9IhacQUAjsXwiAoCsvLeSbamYE3U9YOu1DrP6dvgmDC9HXCQpDWQdPpLYhjTbSFos6b22rvqSdt/7CFYE36pbfU8a0ma63pXegFRPh4TOR/Z8RgQcM8JB2DXOijZfaVYhb1DQzU/wsS73NVnmnB3Ji9/k8nVYYc/B7xMB11MkB/GRB0FCgwrOG5ufcYTk8dcIkGrPgPGpd8xVtfeZRS8IEE438yR2UPmG6dEjRSAUWi4G4S3vhszDyTrwPEk9P5AkVK1ScMLnYsHoF3fvwnZRa0xy2h86KbDorIzOQNosOZd58SktKCfRt7ewjveCrnRF3mxiBpyV3Z01OoaLs2Kb5IRU9h9VHRHNicrBr55dUu+tLWv75Q4F9lZbNpLIRM2njS8cGSDzcp1wtqyPSQN7NXzhB+hqw6mhE4In+nDhk/8P6RsT5tecdyrvqzxh9UTsylakVgYtdc0LeibmcknixIsii9TtjuBhYqPp3dlezel8hoBBQCOx/CCgCb/87U7UjhYBCIA4RQH2759hr9qyv0TfOn8Mj/OvDz1NWRho3rXj7/S94/Ts40or0WrfbgIkFSLyhaHjILWG1lmqZCtBti9ZDmNn66y482fQh2GxM+tSDafStf3C7JT7OzIUWep4kRj7oFTN4XgepN2LKFLrw9psjWjsWg8VDuJHjqRP1nTa2S/oWR62+n7bmnQx5J+JIT6ynJkZ6JKHEF87AxhNGlsSLpkkFyLvs3y+1PVbfxBHU8ot5Yf0ymYtvmjeJK7WWN2yimzc8QjnLagL9mo6p4H8f2eSji6qa+N9lyDv02zR5PrVljwxbczBIvHUb+mn79kTysMODO3EPu38mHEQ0cbxfNadvL79wNSfvrFpB17W09cNSTqTrSfSj5pfSUfPLbM9BtoMVgYc5ONk/oADUlwEQBJ7oBzV0ekoyTy8GiSejjG7bvZb++3hBSNpsYnIqJXpSLbeAdNo53wuS5Xb7jTVBJr5Iwp5BWpqlFZvFOVwMLEBY45xVUwgoBBQCCoEDBwFF4B04Z612qhBQCAwBAoK4AykHok6rqIP6DmmyaEbvuw0XDyt7mwbHCdYoRrduh7Em7kSs/etXUfV1P3VUzD1S9Z1Y20yFJ5xogQEe0rUqnwtuu4lGTpvq9nKI6Tgjgwsz0wqrQPzqJqITem6itHa5eo92GxPqOyfkXX9fHyX3tVBGyj6q78vmZ2GkutKunTZ+Go366Z2m4UTbpEKWvBMBmZF4epOLx3e8Q0/Vvhu2jx83ldOY9dtDlHeodSfq3WkHmJF36FOYlUhfr7QmguzO1Ox9EHcvv9pP6zcGe/AacpwEJxrPSLwzTk0IIfJkyDvMlswmOfvc/6WcguncoRYtmqSddk8wsdi217wuoegbuC4ZM8cuWYKJBV76Ju5PELaof2hW2gDE3d5Vz9Gu6naqWn6ZIczeLEZiWhB5TlR4bh1onV4fWrMTJ3VTYWChT0N2uvZg9C9kzsLYo2oKAYWAQkAhcOAgoAi8A+es1U4VAgqBQUbg+Vc/oB276sKIOxEGHGbR9MReNMIcyjp4uUwV0NndK21kASIBqUBQZUA1AwVhLGv4QbEAN9rNDz0gBXWkte+0i5ip8EDgoaHGlXbvQ0HetW9poo6tftWVthUcP8oULzxEQg0Ctc8XvYsNXWfNBiezzEoohEC0iDRaqYOx6SQIPKTNgsSTaX0+plxlwZSkb6O9vVnUTX5VkZ0az0yFFwuTiqzfLTVNmzXbo1E6reiLdOiMVA+rG+YzNfDo3rWVmv6zKDB9Y3u4UYIVeScGxkKFB/Luj381N6XhZ8carq+fXuUn8WTIO6S1CxUsZr/yu+/IXEIR9dlW10KL3lsvPYeoj/etEydRYbbfIdiooW6ll91ovYzt0xNZIO+2vXU3H7Z98yz+MmtWJN50ZmgxY45cmnQkDrTS4Gg6OiXyhoOBhap/5+ZKUGMUAgoBhcDwR0AReMP/DNUOFAIKAYVAGAL7Wro5iTYUzYmRhSA4ullBeBhUxJK4E1iIh7ltyz6hbTf93BKiaJJ3YiEtiQeFDBQ+aEj309a2GgrybvvDKxl5F14PTcRecPxIMiPysBe41FYlPkWf9zxpe+mBWMEYECti37Eg8K4uNFYUGQXY1+1XroLAa+lLo5b+YIF4KzWe3tQC9djymSNztK9rp+o77R73/eP7pmeCveWke3gqt5XyqG35e+TbtY3qNm3hc7Wxenet2RW0p/xo2/NGh8pyD39Fq9mRd2IdfnZQtjIm7oJzvqINa66xDAH3JCeVNTfkIV/7FuEV6yarwhNxjCrMpO+fNpXgNmyWLgpVNH4ncDXogNEF+jbvWhMg7zCfHYGHPmYknhMCT8QzGJ/32vPyOzJ7+O8ZszqJsarPF+3rJpWlwOMzRjWFgEJAIaAQOLAQUATegXXearcKAYXAAYIAXPjwID4UTRSTtzKyEPXAok1wyOxXX6cPzrTtVV/xFxrq3eFVdNHlMtO56oOHxK+ee4befuQJTtwlwcSCkQXgCyqmTqGjF1wQkjbbunUVtW6rClmrdNZFrtY2G2RH3olxaaOzaeTl5sYNm9KeIaTR4iHZqNi+SJcFmYL0P22LJwIPce3szQ+Dy0yNN+lPS/0ECUtX9DLTAFz/0SYoYFqBl5vW/PN51DNphOVQIwME7QDsb0tdL32+qTNo8NH9ISX4Pgybt9/DSD1vKLEX7TTa+/7SF5I2a4cLuPKC/8/eewDYVdVb478pd3pN75OQhDRKQlEpUSSPIAoSBATp+PD5HiD6fP4fUTAqUsL7fO8hKH4qIoggCBFQBAl/iiYUQUgo6QmpJJM6vd4p317nZs/sOXPKPu22+e3vu4bM7LPL2ufevLPu+q1V/iDVjH/Q/t5U3ovm8ZKhwsOcuiQeyLtLPnmksUwnlZk58RWfvyBs33nqB9Swu1+dqkPgwROvoGLMIKi9EHijhE/qvnr/Pqlu5+z2e6hOy4sTSnHzF0dR+/O5rU339+x/p4sU92MEGAFGILsQYAIvu86Td8MIMAKMgIFAqn3wxg0vpt0H2wadRiqJO3UxqSqRsvP5s/OEAnFXu+KxQeSd3EvZpDk07bIfBr7rdck7OZETiSc98FAaaybp1HJZq0Wf09Jfphl0Uyih9eR/1y3M/kViRSy3nYYV7TOmtyLw8HMrNd7cX/zZIO9QBg5SIIoWhMBzKqNV14q9JQIQEqmlMgBAqmVfW99C7+0QpcbdOymn/XHXbfYWXSjkb/2hFl/4mH2pp+tgSgdd9Z15zMqif6cZR7xn/NhIdD3cASS6EVqCm9amnXX2XUY6bTLaynV7CC+7Zud7ZyZhcf3IysGEGba7/tGrjATsnsNeejoEHsazUuHpEnjponDrv88TQR/40gtfOGRKgAX73yXjXchzMAKMACOQfggwgZd+Z8IrYgQYAUYgFARqD7X1q2RCGVF/EJiAq15LkriLd/dSo1AGhq1M0l9Zoqd5fV6v99NfKrOk6lAdo0gotgpieQNM5kHebX54idZU0y69hcpqjtLqa+508K876OBfd3q+dsIVR1HJ5ErL62QKbcKjK1EiiwfmLoeq7uHds+jk9u96XofdBTKF1msJbXXhXirIS5TSmgm8bXkjaVv+KON3p3Ws6fPGAwt0wn3PRV4G7sf/TuKjS+DJ/lJJCz4LHo3tnShzj9O+hm5asWazFnknx5IkXpgltH8WoRUIrvDaRpaeTpUVotxb3Logk+S9aZDNVpJRZYIgZbT76tbS/vq1JP8cWTWbRlXPJvmn3T6sSDyrwArz9QiuQAptp3jT5Yg3X50ooVXb/vefMoIr0GR4Dj6XX33xO66Q5hdXEV5qu/ROvXJOfNYVHSaHXSdKQgczkVco1pfuARbsf5eEG4OnYAQYAUYgTRFgAi9ND4aXxQgwAoxAUAQONHYYHlypaFLFEBeJuDAsx3MxHoqkmicVa1LnTKbKQkd1KMvfZNmxF/JO7mvud/7gC1av6js5iZMf3oHctfR68a0kCTxcg3JZB3ETndR2M43ome1rD3YXQYXnhcDL62roU99JDzyQdq8UzqHth4k7da7J3fvo9Pha+tTnP0NVZ15kGwIR1qaCKPB0SmjN65SqO0jT1JCLJ1++1fOWeku/GaoHXlACr1IQeDKoApuRJexOG/OrwHv5nVsM8s6ugcT79HF6ZL0X4BEoBK80EJNQhUIdKptK4OFnILJyBTP0/tuXUv0h+8Aa9DUTeKOOyKEzvqrnbQh1JxoUb+nUJJGHfxuQ2Auy2oXPTdnyQTIOryhM2fw8MSPACDACjEDqEGACL3XY88yMACPACESKQCp98CRphQ2aHxwj3bTm4FaKN81LtbvJUjZdnz88bCNEAGWLG35zs23ZrN0C/JbTbrzlVe09mTseueQUy2uB7+slt9Keng+MkAo0pyTXKMg7zLlpbQ7t+fGd2im0o0t3UU9HovQbKbQb88bRg6WfdsXn2HmzaOnVJxlqQztzfNdBNDoEIfCcQizMU0ORBZWqVN2B6KoQ9yf2t2rDS/TuxleEd6PGgpUu8MT7wqkLvF3k0NsvgVcSe5DGjXiQhlejdLRfdYc9o6lltebp/XjguZF3co4wSTx5frKcG3/HFyloUhltJvDkOkBkuanwzAServoOcyQ7gdbLDaeGz4D4bGiNDyA9vYwVZV/2v4sSXR6bEWAEGIH0RoAJvPQ+H14dI8AIMAK+EeiIdxtG+sls0uMtofqglBqVO+3brHgLEyPV5w7lwu0eVJAwV2//aC2984tv+1J/+FHhhUngmUMc9tIaQ4knGzgSqHzUgIuoyDs5Z9um9QaJ59QKi3qpopqoSITOtu/YTB29+fR2zlQt8i63sJgKRo2nGeMq6HsXHCU8tAoGqNXCvLcw1rB/+b+eh/RSPitVd1aJpnjfvLfxYdp7cCsdavbG4I0ZXkMnHXO557XbXeCXwCvMW001Y75JFeWitNs0OCg83MNW5bRjxs6lz55zl6f165J3ctAwSDx8eVImVG5QkIGAVRvOD+QPgnO2vPE47X3vScv9NNRNojWrLjNUeVYqNJXA+6d/yafRU0WtvGZLVQKtzvKAXUx809AoiDtZQo77wS7dV2fMKPqw/10UqPKYjAAjwAhkBgJM4GXGOfEqGQFGgBHwhYBVkISvgVwusgpnSFVQhO7+zMmMutfZ9bMLqPA67l4RWrH/tcdcS06txkUyrdd0WisCr7njILWIl7mVFg6nMvGSTVXgSeLHSnGJYIuDeWvFK5F4OS5vDo2ho2hC07m+iEqvmPbsXE+HfrGUGusSKivZQNwJ/o0qBXknW+/IGtr65ir6QcUXXaeR5J3s+J+fny3KRCuEyqggMjVe/obdVPHff3Rdm9pBR31nVm3ZTfDym7cbe4OysqFVj8QTvBGVFefSpz/m7q/mZWPXfVNvfjmmoQIVrN1Zp32T9uxZbTuVQTTLcIfDLJ/X8ll43b2y6hYv2zH6njZvieGN56eh/BPllYeE352Tzyi+KCgtitGKn188KAlanVeGWpiJPIRYHHtWKR1zhkiq8dhSnUDrtFzgFxc3tlpq7JTu63HroXTHvQkM8UUIN0aAEWAEGIGhhwATeEPvzHnHjAAjMIQQiNoHTyWtzOmbfPtEqAAAIABJREFUqQiK8HK0UIIAnzB8jhIPxOEkkCJ1Fi+nklO7fQYl8Dq7WqmudZcrjNUlE6ggv4RA4Mlyaah9ULatG1CSMNnvTzrtfv8gdX8wmDQs+NKRruvR6QASr/vxpY5dcybMpPwvLqYnV26ix5/7u2NfM3mHzlDh3ShIPLSigtzI1HheSDwd7ztJvkKx63Z+IPDQQB9Aqdba0SNID3uoimI5AosE2RA2gXfXT3to0xb30zdSg0E6ivVOO4LoovPeo+ee+YbrhbKs9uxzf0zDRx7j2l/t4FV9J6/1o8LTJV/V9QGT+nV/pH1ChYeyYSd/ShB5Rkiv+J+SUTPo5C9bl867AZQuCbR268S/CXXNnSJsZ3A4SroQeex/53aX8e8ZAUaAEchuBJjAy+7z5d0xAozAEEcAaqgmUQ4UdsODGMgXkFYgbVqFObr5wR+kAB4K082sXGJRXS5KHQU+QYI1dAIqrLBf0ZMoXVvRk0iBnJ+76PCf5xnkHV5oiZJTQTwIgkQnb9MPgSdDLHTJO7mfGZ87hWZ8PpF86zdZGA/FRZsb6OB9a6jzvQO2t2nRbSdR3tH9yr8g93P3609Rj3ipDcRd7kmLKHfiTOPHd/5xLW3Y3UjdTYeop72duttb+7rnVwwjkHe5qLe1aPf/6yf6fgrSIyo1Hkg8eOLFNu62XEf8yHHU9K3PO0Llh/iRBJ4cWKrxOrt6qa0zcZdCcYcSREncyb5hE3gbN/fSj+91fmeAuDMSkA+L9b5+bQ4dOU14I+5eTavefoBqHZR4WPdxx19FCxd8tc8PUJfw//1LF/u+Tb94+qPa1zqVzOoMsv3FpdRxYIPByBr+fw5wAsc5Z3+XqsbNGpAyrjMP+qRbAq153TrqQOltin/vUpFWWy5Ugvi3lRsjwAgwAozA0ESACbyhee68a0aAERgiCEThgyfVZm7hDHhYKxYEX50o50rHFoRglGbncaHU8EJebe9dRw93O6vAFrw2hQpXvtdPAon/yhOVam4prrjAD4HXuq2Bdv3mA9rbuNHTMR26eg1dfNz/BjJ5h+qu/abXjXJFkANO6q8wSTy3jf7zz9/o90JzkiZZDKQSePLXUavxzCQeyLuuGeOo7YP3jJfaio86hgpGj6bqmgmGSlBHdadev2rdb6m+acegnRuKUdFkaIkVxmETeJjDjsRL+NklVILyCCV5N2A/gsQDkWdu8LybJ8i7sePmGr9KKEbxhUVcK204GQQeSj79nKG615a96+nAB09R2/71fWWZ3RYKNFwz6fTFVDp65mF1acxIOUfQgy6piS98QPjii6V0a/hMrxal7/sbOrSWBuK0UuDf3tlt7MdNuao1qEYn9r/TAIm7MAKMACOQxQgwgZfFh8tbYwQYAUYAiorauvZQgPCqNpPqnn31eg9EoSzSwyCGGkSEbdSLoAndFsTnToe8M4iCHc30iUdbqYAGKrxA4kGG50SQ+AmxwJzrf/EK7d+4VRcGWveJ5dQwbC+dPOVKOkW8nNrj61cN+vXsEWNo5h6RcirIO9kSZY4IELAv54uaxJPn+8W7VlqW0ekAZEXg4boo1XjmdcX37aWml16wXC7WkS9YlIq5c6l3+hydLQ3os/WjFbRNvKya0xlOHj+fpohXFA0k3rPP9/aV0yY87PpVd9OnEn32zITyLkiT9wfSomWaq914URJ4fpSTbvuWqbTGGcogj8PMZ8momVSzYPGAIdAPhBxIzdaOLkNp7UbkIYG2Q5B+qsec27qS9Xs1wEJ3TokBQkOaxf51MNAd264fvFvZ/y4oinw9I8AIMAKZiwATeJl7drxyRoARYAS0EAjqg+eVuFMXNaqq0FA0uD3YaW0k5E4yZVAnqRcPtHhYhdrFKqTBbWm65J0cZ/rvtlDNzvJBJJ5TSa0f9Z2c7/+8dDrNfuMMqjg0xm0rtPbjy6lx+F6j38SqYw0VnlVbc2AP3fLqc7bjfWVZAc09UErF+bEBfQzfMRCVNjdN6R/Pdl2jnw6yFBGKmgv/d6WfIYxr7Ag8OWCUajzM0SiIuy5B4Fk1YAt4oRxFyx81mipOP8PzXu1UeHIgSaCBbJYVmVGo78wL//BDot0f5RkEUefhPYK8C0rcmeeRaa5xUZdrp0CLisCT9ykIoyiIMBB5xr0hSErYJAw/ahE1OyjmQGKVF8eMvsDCaU3pnEBrFWCh+8bwQ2bqjq32Y/87P6jxNYwAI8AIZBcCTOBl13nybhgBRoARGIQAfHr8+NBJ4g6lQfXNcV8lQvCZaxMPmu1CdZGOTSeJdsfaBioVD6cQo6AkuWx4IVWNKvK0nd9230E7etdrXwMV3lGP7qZhNHbQNQmFzOCSWr/qO0wAAg+t4uBomv33hZbrbBxWS7umv9dH3slO/9/pLw3q70be4YKb/ztRczm2rHIQiSdTQHHvmS25wlbhgdSCMghlg7IU7ul/7CK8/DQ3Ag9jRqXGs1PeSdUd7mFzqR9KavHy0uoat9Pq9Q87XqKWsM6deRlVlk/yMoXnvijthxosKmLLakEyzdWqrHbN1icIL69tzpQLCC+rFkbJrJf14L5BmWiBUCvj3wAnv1D5PsL4dupEnc9bL+sLs69TgIXuPFETefgSqbJ04BceumvjfowAI8AIMALZgQATeNlxjrwLRoARYARsEUA66KEm/TJWmbaHBzK3hzY32NPZ8whrd3poe/8VUYZ4IKEetFKDHXPaaC0iz6v6TmJqV0orf6+W1E679BYqq0kESvhpksCT14LIq6wbLXztEyWtIO7smpnA0yHvPvlaDn3y9f5yRisSL0EAYX54mPXTeDGRShtWMq1MXzWrKteLAIv/EkEWXtu5J0wgvHRb2Go8K/WdWXVntbZhF1+mu+S+fiDxUEpr5YenDnbSsVfQxFFTBTkaN4Igwm6yrBXjJtOLTO5DklwxkdyBL0tUksuPCs8qwEItma3fuJa6d26iztf/3AdlwUmfo7yJ08UrnMRm8xl5SWBV+6pf/CT2UEj76sOxdAjzPsIZjq4qCs1uQt6TsGjAfd/S7hDT7GEjw8oLDRU4N0aAEWAEGIGhiwATeEP37HnnjAAjMEQQ0PXBC+LvZgelfJjTKVNNxXFYJdE27GunNX/bZyi/3IzJdUg8JM7KtFmve5y3czpNf7WDmnessby0+oij6MR/vdNzEIF5MDOBhxTRBHHpvmIzgXfR0/e7XmQm8IpEGe04ocSzagi4SPiZJRYTBoEnkyRBKEGdanXOMonWdTNKBx31nXm8sNR4ZvUdMJMEqNt9XC7KaGOinNZPgydevSDzVCKvSqjtqipq+jzvVHWWXzWv1dpkOWnCgy0cksQPBrhGftbhNpXBNvvq1tIrq27RHvK0eUtoVPXsAf3VktlDv/kRde/aZDte3oTpVHAyyLzoiDyoVfG+ATHlZI0gQx6QUI6+hUicFkpmnH+6NS92Cl7WLsfF/d8oyouDEtjsf+cFfe7LCDACjEB2IsAEXnaeK++KEWAEGIEBCOwXQRJxm/SDKIg7OXnYyoawjxUKLDQodyQOb72wh2o/atH27XMj8YIQePNzF9H83POoefsHg0g8eN5J4qBKpCfqpmNaYSgJPGF7ZZR3CmsvrWb2wNNR32FgM4GHnx1RNcJ2TjUcIe9i/wo89V7XSV/1QuL95+dn08xxFVq4WXUKqsZTE2d1VHfqGvyU0Zr3ULytftC22iZXDfiZU8mpF+Ck71rQBFYvc+r2lXsEqQjvON1SWivyTi2Zbf7d/ziSdwPO84vfiIzEwzwykdctvEItKe0WMdoyuVwXy2T1g1JckmxRzOlFwWg3f0z4JowUnrLcGAFGgBFgBIY2AkzgDe3z590zAozAEEGgvrlTJAUOVKhESdypsIbhLRTVMSGJtlSQeHiwhGLkvTcP0MbVhzxP98kv1theEwaB57YgqXCKd8HHzVkZYzXWmzt+Qys/fMAgLQ8L3dymNH5/0bz/oUnVc/v6InH2iQ2DU2fNg9XsJLr89wNLwaqLSggvp2aEiVw+k3oumKpNsMrx1JAKELa6zc0Pb4Yg7VA2G4S8k2sJosYDgdex5j1t1Z26/yAEXvnqWoo12JdFxiuLqGlufziKLDlFkitKTrt0ZJ7KYqNIYNW9F3T7qQQjFGe79n9gEHn76weXZY+smm143qnKO/Me2x77X23yTq6x7D/u1V2ur37qHt2CfdB3hCifxZ9hKNF8LdjhomSl4wYh8tj/LuxT5/EYAUaAEchMBJjAy8xz41UzAowAI+AJAdUHTyXukBgI0/coU2LTOcgCCrxyYdIOpQxKKV9+dJsnXGXnmjmVVDNnoNpI/i4ZBJ6cC4odJBUeaup0Lf/FNSAwK4QpOgjMm/4039PerRJodQk8TCRDLOSkOgQe+lb9n1Op6mOjB/mN2S3eKqTC00ZFZ3jibRAv+SeuB2kH8i4M4s68Hq9qPOyxa71Qab67ui9h1sse/RJ4buSdXIOZxMPP1TJmXdJZehbi/vZK/HnBI6y+VmW1arCFVViFOWW2e+dGavv9XZ6XBF88lNNG3XTCK7AGfJGDf2uQVotmF3QR9Xqtxk92Oq6891HarotDtVBZ497gxggwAowAIzC0EWACb2ifP++eEWAEhggCXaJ8FiWDeHjCQ4CT91fYkERdnuRnvTJhF8RViSif2n2wzRjmb7/f7mc4qhxZSMd+ul9lZB7k9q4rfY37nfwHPV8nyR+QIiBorZpVKemOutX02Kpvas1nRd7hQi8EnrmMVofAyz1qOBXffhLBW2qYSDh28+KyC6nQ2mSKO+mq8eS9XL/9I6p97i++Vu2HwFPJu3hvHcVpcAltjKoollNtrMmKxOsvsUx4o9mlnEpFGt6vDcJLLMovHHwB6HKRWlaLLwrs1m+VMtv52p8HBFZ4WV/UKjx1LSClkJAKr0rpAaj+Xk2glQRWXKgvrfp62WPQvqm0eZA+ge2d8Am09uKU+2P/u6AnzdczAowAI5AdCDCBlx3nyLtgBBgBRsAVgV7x1AhCx+1BwXUgjx3SKchCqkUQjCBVasMrCgw115b36mj7mgaPu+vvHnYZrfS/87Mg7BMEV4cgPFCypjYnUkuHxAN5d/KUKweUzsrxvRB4uObyx3KoZpfIus3tojKR2FgaK6CSPOswC/Qvuu0kyjt6uDGdLOGzUhzqhFT4wTUV19j5xlmRsIce/a2vJXpNoc0XSaIV79aKkJM26qBa1zkLaQzl5RRT47FjqEukfZqbNPtH+TbK/VWCC/svKcw31Ft2hLTrAtKgg7msViUrncqCm//7Wt+rTyaBJxdpRVbaJdDqEpu+AdC4MKoAC42p+z7H8CVXmXjJwA8zwZsvPs9HVQ9+3+jOwf0YAUaAEWAEsgcBJvCy5yx5J4wAI8AIOCIAkgrqj1S0ccOL+1RuqZjfye9PlvhuEN53URF42PNvu++gHb3rtbY/KWcmXZb3ba2+Tp3UktoCUS4L8k5Hffnq1gdpp1Dk7ax/t294J+JOdtINsZD9x096hxbdN5bGbKqgwrxEoAhaLKeQSnIrKJbb/9CqknfqnqXyB4b62Bv2iL3qhFQEBjhJA5hTXOVZSkJeLkMNstBdml/1XW59nRZ5J9cBEq9z8lgyB1uo61TJSnmW8MoLM7lWF5eo+qk+aNgXzhLkjR1BmWkEHnBTwysQrtMtlHZ2CbTmvslOE04XhbiKgzkchP3vono38riMACPACGQeAkzgZd6Z8YoZAUaAEfCFAL7dh7olFU2q3JLtWwXiQ5YNg7y0ejjEw5HoZigT/ZbQOnngSby3966jFT1PaZF4fkpn7c4ViZEgCND2N3RoeeMFuUd+sPJZWnvQWZVVXFJHE2r6wy5OWj6FjvrzuEHTgsirnjvLKJt1arIMDn1wn5tVh0H2k07XgpCVHmJ2Z9n40gvUtW+v1rLzR42mitPP0Oqrdhr2123U2rvV83UlOVPo0KcmO14n1WhQyWbzWcryyVzx4bNPKBrtPhszkcCTByyJZySotsfx789ANbB6I6Av/DhjhwlbKBRb1+6mtnV7Btwvw88/3vN953RBsgIsdBdtReRVlbL/nS5+3I8RYAQYgWxHgAm8bD9h3h8jwAhkFQIf1R6gex94inaLP0W1Gd22+BoaP2aE1h7xQHRAEDipaCAe4sKHL5klcKrPnVPZsCy3hGIrihALM95OoRZBymbN8wwkL7sN4seqpDaK++Gip++3HdZM3hXl59O4skTZbNU6kUS7vj+Jdut5B2hMbBp9tvprtuPJcuG4uL/hv4VySxDVdn5qUew3GWPKcAOcYSxfMM6i/d9/PE2PbxjsezetOZ/O6a2hGbnDbJfml7zDgOWvrLL0vHPDAZ54TafNc+wmfeDgjQZCBySek2+c25zp+Hu1ZDZHMDbwrbTzAPSTQCv3nIoSWhVvNZBDEs9uakp8HtOHe2n9zU+KLxvI0i9w+BeOp7CIvGQHWOjej1I1XiSsBdBA9HJjBBgBRoARYASYwON7gBFgBBiBDELgqm8spQXzj6fTT5lHDz2xnF5a+Q49cd8tVFHWT3o4bWdvXXvkCiyr+ZF2igeRevFQHnXTJe7UdUifrdV/32944XltTv53dmNBkSdLalEyW5Mzy+u0tv0lBmrKsPTgAvnj9hAddCFOpbTTZ73UN7xK3jnNeVbV9TS2YPqgLlZ+fqr/n266adD9Rnm9fJBXS0nXHNhET215ntYc3Ew9grSEd5y59cbjNL2jmL6VM5AwA3EXEy+UzvptRa+87OtS8SlAradZqymtfOCcfON8LSANLjKnzGJJ0ocN/21+b/oNscibMJ2KL/r3lO1YvjfVUnb5ueQUPrPz1j8ZqjucPbg83Nsg8syteNZYmnjzOYH2l8oAC92FFwoQhosEX26MACPACDACjAAQYAKP7wNGgBFgBDIEAajvQOC98OiP+lZ809L7qFyQd4uvv0RrFwhuQOJdshseUKvLCowSzqiaVQmWl7lksunfntlF+3a3al96zGmjqWpUehiMq2o0O9Wh9BlLhkrNXE47bMRWGj4yUXpZXVQsXnrEs1mFB0IY6iwnPz+p5MI9n+zSbe2bx6WjJHsSnlj979sv/unrfVeC2EODz5hVmz18Gn3/ZHsFo5+1+iXwMFf7aZ8eNKUke+xUaNLnEOrDTCZlrVJmVTASBBdUhzjv/rRaP2W0xV/8BuVNPNLP8Xq6Ju/DOBW+2Eb5W/v9VfE5lDO9gJo+WUjxKf3elhjYXCLaLKwLZDu47G06+Ie3B8yfK0i8PHFRt0h26DEReUFJvHQKWLIDHcrFKvFvJzdGgBFgBBgBRsD4d1SkElr/X3yMDyPACDACjEDKEVh48bfouqsW0bmfOZUam1sJf3/jmXv71gVSDz9bLkg9nVJaPBQizCIVbYxI0dsrvJ7C/lfHKaDC6z4lAbby2V209yN3Ei8dyLt1K5YZD8XwmTrprIsNLz+3UmWn1EuvmOn0hyJv7YFaWt/7OxpW2UzF+TGdywb0+fKoH5NV8qrTQLI82s7/0PMiknSBlepOTv391+4RHoObB5Ic4vxRYmenxvveSdfTnBGDFYx+t3Ng649ownbvXmS7at6mEVO+1Tet9EgD+aiTjm2XyOt3H8m6zsv7rZ/gyjO8HEFSd+/cSG2/v0t7ucki70p+2TiAuMMC8UUIVHO4F9G6BIHX+pWKQWvHPiuFtQJCPCRxu/HSX9juUXy8Ca9SQeSZFKcTbjqbSmYP9s/UAStdAiyc1lopvqjAOrkxAowAI8AIMAJAgAk8vg8YAUaAEUhTBEDYXXDNEmN1IOjQrhYKvEWCzAOhJ9sNN99NC049bsDP7LaUSh88BFk0tXaF6k0G5Q5Kc6EqBAEQRpNle9vX1NP7bx2wHLJyZCHVzKlKmfJu//a1tH7lMjqwY51B3oEgkA/NIybNopmnnk8ja2Y7wiEfoJOZ8nn/vn7lmNezum7iT4wwDq9njX1CwQILqahLh73uyaq/neoOfVE6+4PXf2I7jZ0aL2wV3oeH/oNmv3e25+2+8clf0tzcR4zrrEpJdQY0J/KC0EnnFtY+O7dv0CLxUkXeyc8hnIf5Sxo7Eg/nJlVwex57i7Y//HfXL3jyE5Zwff54QVR46RZgYXUfj6oqFKSoYC+5MQKMACPACDACAgEm8Pg2YAQYAUYgTRFYv3kHPbTsBcPFG+q6a4US7+m/rKSfihALSehh6Qi1QMPvdVrtoTZLzyyda4P0QfkYHu6ghgra/PjceZ0zofbJp3de3zfAN7ByZFHKiDvsYcXDPzSIOzSoXfCwbEVinHrJza4kHsaQ/n9QZkYd/OCXwMsX7NsNNT/VUmnZnbNUbzW2oozcwlTL6w0Scn8n1Z2c6vENz9HjGweHVqhLAVFppcb7/Tk/Dm3Fm3t+SFXbiz2p8NYe8wz1VFXRjPwlokw0XwRx5BLKm/0ScAh+qCgpMMpN1TLM0DYZwkBWPnBeh03ssz/Mo+PVP1Pn638eNAw87wpO/lxSymZRMouXbLjfcN85lap3LCgmvOzapst+4eh5p16XIAsTP+kSleVHPvwvXmE1+o+qKqKDjdEnc/tanLgImI4ZZo+Z33H5OkaAEWAEGIHMRYAJvMw9O145I8AIZDkCTz3/Ku3es99Q1kF59+u7FhtEntn3Dr54V1x4phFsodNS5YMH37JiQYjViYd2v016n3UaXlhdvh/+dedPlGAK5WBb3LUsVXfMIP0keWd4TImHOyu1izq+Lokn/f+iLjX1SuChZA4PsfC/unpkcAIqXQMuQEjD6wqJq07lzzoEnjx/sxrvwiM/QxfOOCvI7dd3bW3vMsJr9rufo4qGgeWLHUIN22kiSFdOXEbVRzfR7IIlVF18NDWK9y+It/LeYMma6RpyIUtmQRSjDDZoU33j5GcRymq7d24ShN30pJB26h4qvnOo76/YK5oOEdt4u30qsiyfdfK8M+OIqTH/sU9c69kbMRMCLNj/Lug7h69nBBgBRiD7EGACL/vOlHfECDACWYIAUmbLy0tp0ZmnGKQd/O5mTa+hf7vyXELZ7G7xdzSU1Oqq79A/VT54kjzxE2Shep/ZGd1Hdexe/KuiWgPGRdnsa4/eZhB3vTbJo1bzn/ftRMmiW1NLTUHyhu1ViPlXtTwnXs4KMvQDJdBXFnx4IfDAC6vJMIFk30vm9cv7Gj/XIaTV8AodLFQ13vnTwyPwMDdUeM20jirqxwol3nFUuH80NTcLFacg52Tb0LaF/li/nDa2b6FcqqRvXPFLmjLhqEFLLxMq0iBknvQ6dEo31cErjD5+SmZbu+qotbu+b/qSvCoqya8etBy1fBiK2VSEs6jqO7PfnRt+LdeUU/cR1v6XZv+7hOddolTWrUr6uD9cKwhw4bVnCv9wWk8mBFiw/53bHcW/ZwQYAUZg6CHABN7QO3PeMSPACKQ5AvC+qxDJskt/8gh9bN4sampqMcpmQeA9IFR4J86daewAf0cCLfp6aR3xblE25F8F52Uuc1/4+YDA0yWHwgyoCLJuSW6BOGsQihrd9QeZU70WysO/PXwr1W5do6V0Ua+FH96s+edrL0WWDkeR3rqncxM9V2/v4YZFInESeHcpT+3zSj9D80r9q8f2rtlO+9buGIDB8V/6lFBXxgxCW0141QYqYEdZutws5ncLHZFTeVHgqcuDGu+yOZ+jz09Z6Pn+sdtmc+9a2tx7q/Hr7Tua6O9P9dKMoql93f8kiLtEK6A8GilI2XzxyqVrL72DptUcM2jYAkHiDQ+gyFNVaqkqk/ZaMnugYysd6NhmeydNKplrSeSlkrAEgVf0UptBsLspgM0bcyqjtQuwMHveWYGFElo1FEMnyCcTAixGVBYaHoHcGAFGgBFgBBgBiQATeHwvMAKMACOQJgiAkIOfHbzvlt13i6Gye3P1eiOgAgo76XV32+JrAq84VT541eUF1CJKX3W81qIIqAgKnNcH9KDzqQTmz2/UJ+HUeRFqMf/S73paSpQltc/W3UO18YEpqlic4Wsl/qdHsKNmxY1f9R2Iu/efWDGIvJNgHHPhfDr1ytMpmUEeXpNX1YPzS+BhjD9f+BPhdwjCMh4aYQkS7+87HqSf/e6PNvcXyLtRgrjLM8g72aIi8TB+KkIu/JTM7mhZNUB1h7W3NLZRS1O/txx+1rxHlFVvrKaPn3EMTZg6ug9DlbAM80zdPijKXmmnAkHi+VH/ORF4B5e9TQf/8Lbl9H2ed4Lk7TLZV5pDLKSyDgM5hSaBvG8XJevtimLUbe/J/D373yUTbZ6LEWAEGIHMQYAJvMw5K14pI8AIZCkCkrh7S5B11wmiTibM4u/jhOcdfO/Q0A9N/j0IHAeEcbda6hZkLC/XggAzHqwcEmOTEVDhZc3mvon1xUSiaacWEel3LkkWSjXJk3dc4nco0i2jVSfAQ/MwQbiCTMNew1Qdmkk8hFQYYRwWk5xVdT2NLZjuee8g71685WHX60bNnkTn3nGlQW5F7XXoR3WnbsAthdZuszKFNgpy6/ZHbqT3d/yNBBVCPeIlW36OMN/vrRLEXdGgZU2ddDRdd9lSy+UGLaeVg8rQkqjJLT8ls2byrrNDkKqCvIt3Wgf81K6KU+3qLoPEw0tt6pmGnfJt/jyoFGEaeS+0UvyZJtf3lVUHJwKvde1u2nXbM47jSs87fE70HCbyJtx0NpXMHujDiEGkShEqQav06XQPsEC6Oj5/uTECjAAjwAgwAgP+Pe4VjSFhBBgBRoARSA0CIOmgrIOPnSTukrESkEJNIZire12rk++QfOBKVkCF17Wr/aNUqNkRmMkm8OR+JZEYdkktSLx9XcIb7XBIhfn/GhkTmybKZj8TKXkn9wgS78wfXG6U1HZ3h18mHUR1Z75P/ajwvnfS9TRnRD8JGha5tW7He3THI4sHLDGhlsoxSqDjDv8Xpp0KD4ON7QkWbiEXJMsqobCMgtzyo8iF3908nlv3AAAgAElEQVSO1tUDMNv3UX8ohN3n0ubnOqi5tseSxFMJqyh8AFUf0E5B3qkJtF4+R5088DCOkwpPnUcGXUB9N+bbZzsuQd7rqj9eJgRYsP+dlzuL+zICjAAjMHQQYAXe0Dlr3ikjwAgwAn0IpMoHz+rBKV187rzeHpKUiXf1hpI0ifEqSmMENRrIMnOqY6oIPEkO4IEy8RDc7RWqQf0lSbm5cT1taVpPezo395XVgrQDeedHdScneuSi2z2v8egL5tPRoqRWegCGFXARVHVntZHvv3YPrT24mc5cNTh5evOYPbRlbG3fZWbyTv4iDDXekysfJrxkyxX3LghZlFfiLjFVOw7YypnzL6Ez519qeU7DxIWFRpRJOC1szzg/JbNyJ2b1Xd3+Rlvlnbp7qcLDz77w1TMGlNPKflGU1QI783tfTaH1ckJOKbRynJ23/ona1u1xHbZEkHfH/tf54n7LIQR6ONkymHHBZ3apUIMHSUR3XWDADux/FxBAvpwRYAQYgSxFgAm8LD1Y3hYjwAgwAk4I9Ah1TG1df7lbMtEaKYy5QVChQcFSIAIadEzHk7lGL3Mh0bRQ7MGKdNMZB2QADNWLCpxx8EvgeQ2xsFuzqiLzG+ShkrUIUjGTlDp4ufV5//EVhu+dn3bJY98xLlMJn0afSlVJ8kShKM17uo32Pbyd2rs6LLcJEu+nZz1HduSdelEQNZ5K4JkTSYMQeGGV0ar77Cdx8oySSh0fTitw/ZTMquOsb3y5768ona0/oF+OuvrXCX+88UeMpvP/9QzbWzysslo7haGaRKv7PnNT36njuCnxhn/heBp+/vF979XykoQ1g1WprDqu/JKkWJSn4ssI9E/Hxv536XgqvCZGgBFgBNIDASbw0uMceBWMACPACCQdgVT54CHIQnoZIX0TKZyZbubgN7lVKtF0cFi3YhmtX7nM831y6iU308ia2Z6vs7vAT9kgxpLER1gqPrv1BSHwFiy5lEbPqTGGBuEDr0M/5KzZvzA08MVAsTsbKXdDwiutravdIPHq2hv7pijKL6Ri8ao8djjFb6zQmtqvGg8E3tOvPZIogxaqO7ViNt0IPAkEiEYoyuDt2CiUW15IZL/3vnoIKoFnFVrhdGCSwEOfG/7rMtezDaI8xBcTsfwcW1Ks5JeNlL/V2rPPvLCuKfnU+hW9e1G9FkSeuUnizvxzfL7Ao69V/JsCL0unf1Pwb1AsL1ecfU8kpdWuB+PSAZ85wysKgw7D1zMCjAAjwAhkIQJM4GXhofKWGAFGgBHQQQBlRy2CPEtmk4QVHq78KtaSuV4vc8lSMxCSIOScml911oqHf0gHdqzTXpafBFqdwRPEgEgU1kg09btXnXVY9fn/f/Bb29RZtzFlGa3aD8rIipL02CuUd/nipdt6ZuRrk3gY04saD+f6l7cepUde/I0lCZauBJ7Ezuteh1cUEPzl/Coy5bwqgadbPiuvVQk8q0ALu/uiTCidSwrzPb1f8RnmFDaEuXRIPKfgCt37WLefWiqLLwqabcKSZIAFCEoQlVGoZHXXbNWvXKxJBj4FGYevZQQYAUaAEcg+BJjAy74z5R0xAowAI6CFAB5GDzVZl+BpDeChkwx9gNqlVXiolRTlEUoos62B1EByYEfc+kE/qN/f/u1raeUjt2rBFhV5JydXPQDtFC9RKtHsQAibwMM8cq9OibzJ2Gvhl93DDsy4dJ1bTN3ipdt01Hiqr9+FPzjTcmg3Au9/vvNn2yWFFWLhtmd1r/hCA9595ha0ZNY8nkrg6YRXqNf7JfDkPSw9Nu084/yE8+R9GDeUeOZgCxB3UN51HxFzO4bQf69+zprLpc0+rGbSD19qpVoRzv53od8SPCAjwAgwAlmDABN4WXOUvBFGgBFgBLwhkAwfPDvCakx1Uco8+Lyh5L23mnwJlaF8GFTLZd2ULW6zuinxoibv1PVZeQDKvUaRiOmGTVgltFbzWAVcJEthKNV3naLsr1PcVC3iT9li4qYrE9GcBYjntGgd9w9zg23Q760UavL9jFRX6Td2+yM30vod71uOb+cwNnXS0XTdZUstrykQHNrw3vACLHQ2LktNUX6pEjhhlMya51dDLLwo8NQQC4zpRYGnrkEqheNdPaR6WUaxVx3so+wjU88xh0whxs+sAixkyXyJKMXFFxJhhPX42RvK0aEQRCAMN0aAEWAEGAFGwIwAE3h8TzACjAAjMIQR2F/fQfFup6xIf+C4Kc0QZFHX3GmpePE3Y/pdJR+I4bMF5UvYZVpQ46GcVvrigbQbMWk2zZp/ftLBUMtM4S2FYBJzSEVvw0YivEwtZ9LZoa43CIEnQyycFiRVSiAnewWRBrIyGSEs8L6rX9NOcQd5kB2R54fAAwaqQg37BalnVVp5xdLPWkKGTxarYnIn9V3YCbS6N5ckcHAvy/dsGCWz5vlbu+poR+tq48dePPA2P9dBzbX9n9U6HnhOe5cELUpNkeTqx+tRF9tU95MELT6DoaJFsyuFlve8TrptFPti/7soUOUxGQFGgBHIHgSYwMues+SdMAKMACPgGYEofPCkSgkP+lC0WJnEQ7UF4tDNK87zhtLoAllOGxOKj1ZRllUviLxsbiAs4d0EZc/+hv7SbBB3vTueEU/Mm2y3n3PUv1NO5ZGhwfPIRbd7HsvK/85uEJB4UMmgHRB79Zto6mWR9Zfv1e5enZc3QI3X+Z/l1DvTfykjCHfcx/AUsyI+1u14jxBoYaXEg8umWph67aV30LSaYyz3kiryTl1MlSDbS0QqdJf4fMJ9HEU5JVR4vS9vosK/7qdu8X6RrW1sAR06vpzaxw0MMGje002b/9JvOeCWQqt7o8j7GGpsqIWTcR/rri3sfiBoR4hgCPl57Jakrar37Mqrw14jxkMiOUJWuDECjAAjwAgwAlYIMIHH9wUjwAgwAkMYgTB98GTZpI7SLKHYig1SaWXLUYDMKirIEwRml2F8DwN8HVP4TNy/WkKKAA+Y5Us1T1fdBur94H+1thUmibd3zXZ68ZaHteZFp1GzJ9E/fc891RN9Va87JK/qhnloL8ai4+/q6uisG7x5Ro6O9ZMAQRR48t4FGV9VlhhTls+alwoSDy9zgwrvjPmX0JnzL7WFIR3IO7WMFJ9RpUUxreAHL2fb9WEttd73AnV2t1KP0Cd2xbsHkYQg8nafM8IY1kze4Wdf+OoZNGHqaC/TDuqr+t3Fu3oN0qhLEHlek3kDLSKJF+PfG5R+Y38gyXC+OspZL+m2YWyH/e/CQJHHYAQYAUYgexFgAi97z5Z3xggwAoyAKwJQmewTZbRBmlSaYQwvybIJwi9mBGlYmccHWVOqrrUjMaH+QLgFCB835Ueq1u51XuypTDwIW5WQyrPd++zVnhRMYZJ4uqW0uuSdndedWmaqeh56xdOu/47OTnq0vp7OvreXxm3RH7VU+OGVCSUemh8CT5JZ5hAAnfRWM4l33qkJ4q4pp5dAQ3YetvcqE9I8eN4VUmr9vuTZmktmdQI99E+ESJJ38pquHvHZ1yusBGxIvHcmVFLt6oFJ4WGQd1IljftV/exVy2rTIczBC7Z2fe2ChUBgSqWb9MezGyOZQRfwh2X/uzBOnsdgBBgBRiA7EWACLzvPlXfFCDACjIA2An598Nx87nQWgIeo6rICgnIrk8tpVSzM3m8qDlaBDzo4pVsfnZAKKO/ymjcZnlMo0dNtuaf8TLeraz83JZ4ueaeTMCvJj3rh7RhmKSLUdzvjcTr++V46frnrlgd0gAqvZ0Y+xW+s0L5QklYgm6FQsiqBD5vY0l5cBB11UmalxyMUtSgj9tPM5J06Boi89lZx34jxZetq66WDBfn092GVfT8Lg7yT6mC7L1tUL0Azeetn36m8RhKzTupn6Y8XF/e7m/pQBhTB41NHved17/APHVk1sHza6xjcnxFgBBgBRiC7EWACL7vPl3fHCDACjIArAiAcUB6n2/BQ5KUEyW1cnYcstzFS+XsdckddnyQM4KsUJtGTDAx0iUqspefVfzOWBDUJHnxBCOm0nImfo7CDLUDk7Vu7Y8D0R18433U5qjILiiQrMksdRFX7IMkyDP+0/9q3r2+Kf/kPPQzlBSDwvPjf6ZBZ6n511HhuINfv2UrbV71EDbXbBnStHDOZauadTlVjp7gN4fv3XpJXgxJbLb9cTt1bnX0MOzviFBckHsItZHtjWAVN/9zxRupskGanRLMbU/WAsyuZDrKeqK9VS4R1EmW9qA+t0m3D2A/734WBIo/BCDACjEB2I8AEXnafL++OEWAEGAFXBEDegcTTaarySofQ0BkTfeTDZXtnt6FsyITmxfPPvB+vD5fpgAcecOFvBxWS2wMxQit6d/65b9mCwzOIPBBgbqRWFASeH/zsSkjdxlKJHnOJotu1Vr9XCbyxm3vpHA8CxZFHFWup7yQxC4N/L2Xw8r3r5o1nt+93n/3VIOLO3BdE3rGf/Wc/0NleoxKzXolWqdhCua2Xaxu/85CvPRQuOIYKFxzr61p5UZAvSbwQW4EWGeLFUEzCnsFr0rkslYU1AM7W7XNO3gv4XAuD5BxWXmh483FjBBgBRoARYATsEGACj+8NRoARYASGOAJQgSFJ06kFIat04ZXlSeifzj5xeBiuEIbv+eJPr2SHioVXRYwujmH3s/N+c5rHTODJviAuXUtqK6ZT7tHfDHsb2uMFIXfUSfBwD48tHcLTaXEqgYd+uiTe7qlEs7/vHnQgSYigISte1Xg65J3EpXh4DbWOucD4a834Ipo8IZEA7KdJlWGQc1E90RpF6SvIPLfml8DDuBW3X+42vO3v7fzuvAwYVH3oZa6gfb2oKu3mkiXiuWLjOkppYAzCEFYQQbwD2f8u6Onz9YwAI8AIZD8CTOBl/xnzDhkBRoARcEVgb127o9cVHmjCUBO5LgQPqyItECmmBxo7XNVaOuOF1Sfs0mG5Luw3lp8TioIjrL1iHKeQCrd57Ag8XAcckVlgV1KbSgWeX9WdHR7AECm12LJfhY70wFPnAIkHPzy7UIu3FxK9fWYO/eeoUY5HFQbZoU6g642Hktntq152u43oYJ3whYsnyoYPxE6kg+Il26c+XkV4eWmp2m/Hi+9Sx4vveVnqgL5+Cbyw95vuZbVu/n5eD8DLfs1BF169EvGF0CgRYMGNEWAEGAFGgBFwQoAJPL4/GAFGgBFgBAxyDuWrsoURUBEE1rAfPIOsBddKBSJUSlBZuJWBep0vDJWM1zmd+uuEVDhd39uwkRBiYdecSmpTQeCFpbqz269Up+mqtdRxXm1pIbzsGsItZIPqbs+0RKLrKaWlxsuqJWu/Le3WZYh/u/+7jrdrh1C0HaqPD+qzoeTaAT+DGu/K88e43vqp3q9TgIXr4kUHrwSeJFLjXQgiCceLUV1nupXVSqK8V8h7o1Bv4/OwUnzRArsJNzylQjsm1MZeQkDY/07nncB9GAFGgBFgBJjA43uAEWAEGAFGwCj7QakQHj5KxMMKHliCltQFhVWSSMlS/jkRHZ1x+F25hxgE2XOilLHAeEBMVSKvl5AKt73KEAu7fqCZ8ixKanOO+nfKqTzSbfhQfp/M0sAgARfmMlqdzdup77yGrujMZffesfLGc1Pf2ZF3mGNH4bnUljfeE4kXRsmsDgbSAgA+glZll35LaL164AXxu9PZp+yjvncaW+NaZcRextftmyz/VK8KO69BF0hjx73KjRFgBBgBRoARcEKACTy+PxgBRoARYASMNFSQeHi4TwZZpQt5IuyhMOmkVqoUiMl6+LbCH6qaooI8Q4npZt6uc35OZbTq9QNKapPofxeW95sOFmoflEzDqN4LMb2js5Mera/XnuriqiqaVFAwoL9UZaF0OWoyWp3Y7I3nRuCpZbPmDZvLaOXv7cppU6HktQu50EmhtTrgkmvOoPwj3FWGuDYVCdf4jIbXI5rfMnHtG9vUMRWfl+q/DToKO7zX8Z53+3eV/e/83gV8HSPACDACQwsBJvCG1nnzbhkBRoARsEXAKA8SSgok6qVTkw9pIBjDIJbc9qaWy6YiEVeWgyUeiDtDL9c1799PSIUbhvL3Pe//D1HjJtfusqQ275hvUrxkmmv/IB1U5dDBxs6U3O+S5PFyT+uSeFbkXbJUaHbnonrjvbviefrw7Zcsuzqp73CBHYGH3y25YXLfmEHUjkHuLXltv1orr6+M0k8Zbd6U0VT6FWFoqNFSQVaqy5JlpkGDHDS2anQBcQjVGhTLOiEiuuPq9vOisDOr98xBF+x/p4s692MEGAFGgBFgAo/vAUaAEWAEGAEDAbMPXjrBkowyqYTar8BVKZEsXMJ8IF/5WvOAZZ96cpkRUoHkRChEQFRGVbarS+IVzP0PGjZ+jqEEjYqoTZXqzuqe8RtwYeeJNzEWMzzvVOWdLOssEKEwqSIr1b1Djbfz3Vdo7WvLjTRic2tq6aLmln4vTvPvnQi8K74wxkinxf2M+zpIymxY73GzOq31hdXaYRa65J30XIvK/80LFsn6TJGeoelwT6sKOzf/PYkPbCpAPMrPOfwd9gncGAFGgBFgBBgBNwSYwHNDiH/PCDACjEAGItDY3GqsuqKsRHv10gdP+4Ikd8TDDwg2lADWC7++sJpaEtUoxm0Xfnfp0hJqwJihxEOZs9cG4u7V1wYHIADLI6cW06knl9LI0XmRq/wcy2mVsll5xiB3wlQfhqG6++uT7xrw18wcTZNn6ZU06pyXLDEN2/swFeWFOvtFCe1H771idO0S72W1uRF4Vh548nqU0Z592gijjDQdiB11X2oZccPaj6j1vhccofJC3g2vKEi5X6l5M6riEn6A5nPWuU/s+oT5xUaQdajXuins7PDJFRcCHwRYsP9dWKfB4zACjAAjkN0IMIGX3efLu2MEGIEhhgCIuzt/8gi9tXo94b8XfeZUWnz9JVoodMS7jQffdG/wEyoUiqIDjR2BiSc8DErft1SUy+pg7aeEeMfOTgJ5t3PnYKIT4+GBUz5Uf+miapo0MTnqD6TTEl6HW86ksy0hkA/pXnzi7LDUKYl+tnmHcfn0gkrjJdu2dbX0mzuWWw79qfOOJbzCaGGnhoaJXxj7M4+BFFqUTWPfKNmXajw3As+cQquOe86nRxJIPLeU0Cj2ozOmSmod+mAXtW/aM0iNB+Iu/4jRVLjA/b5KKL8KyE+ysc56w+gj1WkocQ3jXORnPz4X0s3qAXh5VSDKMtzCGIdXhHG/8RiMACPACAwFBJjAGwqnzHtkBBiBIYPAvQ8+TY1NLQZpBwLv6m8spQWnHkfXXrVIC4PaQ22WpW1aFyexU1AVhiR13IzFk7glx6mkMi3ehQCCuCtx+chjhwaRd9JnrkchTOSkTiTe2q2dtHbbYCLwgk+XRgoPHm5hju+3FNItUXdTZwPdXfeB5R7OKp1I++9eR9vX73Xd4xXfXhiaIk8GXOiY41stTPUzdCvnc91YhB3UIAuUmaKBUHYi8OzKZ/HeyBP/c9onqujkE/rJ1wiXH2hoWcYNz1GzF5ruwEE//3TnCaOfVKeVCZUZ7kk/pfrq5x8Sb9O9qaXTTa1djuppfC6PGVac7lvi9TECjAAjwAikCQJM4KXJQfAyGAFGgBEIAwEQdiDrTpw70xjuo9oDtPDib9HyR39E48eMcJ0CKjQEWWRCkyScF5WWSup4uS5d8NBRoFiVzeKBsldUK9qpViZOjNElFw0btM1b7q+jdRbknex4viDxoiTy1ORUL4SUm+rux4fep83xRttjPbingTqFF98RTzVT2e4u1+MPk8RTU0y9kBUo0ywpzCeECPghSVw3GXKHd5/9FTXUbjNGlWq8+qYu8cXDYLxbc8fRzqLBX0KAuMO1cUFKqyEWIS819OFUpZYXsjbZATdhblxVILqRWuq86VoKroONfC/HBTkNewarz1/2v9NBkvswAowAI8AISASYwON7gRFgBBiBDEUA5BwIOyjtrrjwTLr2ynNpqSifbRJ/v23xNX27uveBpwwiT/2Z05YzicRLPCAVaHnESdVKlIENybiVpIG7HQF554/6VWO5gt0AwYEHRxB4Tk1V4UF198Nf12ttZ9bkGC35crVWX7+dpDLNzdvMTXWH+d3Iu6a6Vmqub+tbqg6JB1+8K79zpt/tGdftX1Pbdz28sfBgP2HeOCPF1KlcUO45T5C0bn0DLTCCi1USD8ODaN65p33ATHbkHZI7DVJa/E/N+CK68vzwfAkj2KrlkFJl2iXen26ej5lMZKmbVwlqNzWxH/uAZJ2dl3mkB2JCTdw14LMYKmN44HFjBBgBRoARYAR0EGACTwcl7sMIMAKMQJohANLu/GuW0AN3LaZyEVQB0g4KO0nq/Vr8XCru8DP0feOZe7V3kUkknttDXqaVy+ockkzMNSe2wvvud4/VUU3DQQJ5V19YTHUFeuVZp4hAC6TTorkp78xrjFqJh/kkWdvS3p/eqK7DTXWHvk5ls3KsPVsPDthe6UddNPXpgSm+VmfkV4UH4m7vlo3UU9xAXcN2Dhg6r1148h1xEo2cMt0ymVdnzzr3Uyr71O/ZSiiplWq8uAiROVjfRSDu9uefQG154wcsT5bM9gjiTnrnZZL6zgprNeTCKoFZJutmirrS7X5SQx/s3s/S489vgI/bGpL9e7WUWE2gHVVVKIjr3GQvh+djBBgBRoARyFAEmMDL0IPjZTMCjMDQRuClV1fRiyvfodtu/GeDtHv6LysNwu5cEVpx09L7DHBUxd2c066iNa884Am0TCTxUDoowyhA7FUIdQOUOulqeu7pQEydsT+k8nYIwgOllu3bG2nXczvowNrByrltlcNoe+Vwx+lkGe0TL7fQMvHy2r57dRXNnhJtGIa6Z6ne0VHdyb24qe/Qz0zg4Wc6Kjw/oRbbX95MDV3rBhF3ZuyHD5tAc4+5oE+lFUaqrtfzTWb/3/yhlnbt7jCmhDpNNrVkVv7sii+MockTipK5vEjmUktMVSVlJvndeQXGrqx2KOwZKlv8W4XPcG6MACPACDACjIAuAkzg6SLF/RgBRoARSCMEkDKL0thrrz6Pbrrjl3T5BQvpKUHiwfsO/43S2tNFeAX+G+Teus076J5bb/C8AxitQwGRCU01Osd6oeDI9HJZN9yx50qRyrv1/g+o6cN6am4WZF5Tt+1lq0eNp4aiEsvfSwXel5bsc5vW8vfJUOHJiWVJLdRKUC+pxK3T4r+291XHvZnLZ2Xn0W+1E15OzWsZLci7utgbQnln78Wnzje8egKdeNyFhpcWiGndPfs6zDS4aNuudvrtk7VGQAXKZEF4yJJZLA9ls0idzQbyToVbKs9QbokvH3LEy84/LQ2OKZQlqGW1veKQoSx1K5cPZeIUDoI9Vgv7B26MACPACDACjIAXBJjA84IW92UEGAFGII0QQDgFymdRLlsh/pTls1DejRNqPCTSrt+03VMKrdX2QOI1tnRmRDqtfChC0uqBxg4j2TLbW+1vhYJrVxPlCIKjta2bDh5yDl2wI/GCEnjA+Xe3jEoK3FKJFxMeiFAfNgsli1vTKZ+1I/Aw9jH3unsCLvnNFW7LMH7fsq+Ztr7xDnWOt07BtRtk5oxTaFrNJ6its5vqmjKDWNcCxKHT31c1UVEsl5595YARVgHSDuRdthF3KgQokR9VVUT4HIN6uLOrJyiMaX89vowYWVlk+CAiSEkqqdN+4T4WGBN7rBbKOy6d9QEeX8IIMAKMwBBHgAm8IX4D8PYZAUYgMxGAB97LooT2DhFaoXrb3XDz3XT6/ONp0ZmnhLoxEGEHGtrTlsSTPngy7Q8BAEUFeVlZOisPFg+8tKeFdj6wxlAokfj/wGHXbmdiB754746eMOj+yBQCT/V9A7ksy4jdDPGfbd5Bz7UM9Jgzg9DRFqdDtdaKODcCz0sJLXzvdrb/wfN7NE8QWcfP+zcjbbZQ/Hc2loaroKillFCmlRbFhBegtQeiZzDT9AIZbAEFXryrV4T0xKi9s0cQWnHXIJo03ZLrsiQh3y6IabynsWc0L2m1rpOkSQeQd8MrCg2PUm6MACPACDACjIBXBJjA84oY92cEGAFGIIUIoBz2p6J09rqrFhl+d1DhLRJ/XiZKZRFkcZUonV123y2GIi/sBhLvoFC1OSVihj2n23iq/5m5XFamtWZjKZYksTb8/D1q2DJQGdba4lxGC0ytVHg3fmu0AbffElpcG6UCz87rTvrB6RBabiW02IOVB55OkIUXAu+DP/7Nk/oOe5QpwhMRalE9zygRrygpyEpCSxI6caE8axBqLLl3O584t8+JTPm9lfebGvgAEg+l09nU7NJ1ZVltp/D4VO+BTN47vlSqEuXvTN5l8iny2hkBRoARSC0CTOClFn+enRFgBBgBLQTWCw87qOvgcXetIO/UhFmEVqB8Ft/n333b12nm1IlaY/rp1NUtEiIbO9OCxNNJ4My2JEMzibXl1jcsj7G2Nu54vOZQiy9dVE2TJib8mPwSeLMmx2jJl6v93Fau1+icdaJPTJTUdhqKJaumQ+Ad3NNAne0DS3J1PPB0y2exrvde+JNrcIUdKJLAw+9VQgtqPEl0uQKqdNhSW09b9iZI4IXHTvZyaSR9E+9ZKO26LJN3MalbamskC4t4UOzZiYTORvJSpmnbeZXqpNVGfCyhDQ/VrFQWhjYoD8QIMAKMACMw5BBgAm/IHTlvmBFgBDINAYRVgMC78fpL+oi7VO4h1SSefOiDMgMPfm6KQLeHxFRi6WVuqHOg4EBpHcIbkDpb+/A6yyE6BYF16JCzUuevk6Yb16rkHf5+y/11tG6bMwFoNWkUIRZeEmaxJqnmAYEHbzxz0ymjxTVmFV6Y5bMYPywCT+4PhBYIggYRcKHrl7b83W20/L3tgzCaOrqSFh4zmaaOqfJye4bS10v6aLYQWnYKNDtAJXmJzwEd78dQDiaCQUC4lxXlU50ISXLzKlUTxb3c46E6eDcAACAASURBVBEs29eQ5YKcxb3tp910569o5rRJdPn5Z/i5nK9hBBgBRoARyDIEmMDLsgPl7TACjED2IQC/uyhKYoMglQoSz6lc1m0vXh+S3cZL5u/l2s2EZf2KXVS/4iPHpRwSgRZxwWMh2dHcQOCZyTv0Wbu1k374a/fABvN4YZfPygd8SVjqYu5WUqujwlO98NzUd17TZ7GPsAk8jCmJaje/NCjufvbCu65wgsj7t4VzXfuF0cGuZFZn7ExW48kzc1IbWmGAe7xKJJgipTYjCS1BZvlJmpVltdLr1O3LG537J+o+Qcg7rA3//ssv8a4TqfMnHjujb8lQ3t8sFPi7xJ/nnTWfLhMkX7r93wpR48vjMwKMACMw1BBgAm+onTjvlxFgBBiBkBBAQiI88fAwFXVLKHPyhf9Tl+90Qjz0jhDm4R1CuWelzop6D17HB6lRKhQqKCm0KjFzUuCpc3V29lJXvBcZF31EXsW0Spr05Tm2S/Kqwvvu1VU0e0qiBDdok0RtnjB7r2+Ouyos7eaTxE69UPioqjSdNFqMCRKvd00jTXzMnsz04nunrnPvlg30Uf2LnqHKFam74yd93PDAs2qSvMQ9g5Jas7JJl7yTYyeDxNMpmXUDKhPVeF7Uhnb7l4SWG2nrhl8yfy+VxEECWMrEvwdQnKa7CjEoeaeeC8g62GWgIWkeNhpQ55WXFtNioc5/6dVVtPSeh2n5oz9K5nHyXIwAI8AIMAJJRoAJvCQDztMxAowAI5BNCIDEQwkUSLEomvQ+0y2XdVsDCI5KUc6UI8gxEDt+PMPc5gjj96rnW7PwArNb57bb/649HdQ64rjESyRbzh8vXoOTaNXBdEm8MMk7v6o7OxBUVZpK2oLEQznt5rh14izGO6t0In22bBJtW1dL29fvpb8+mVCtQXE3edYYAnkXpL3z9r2eLy8QqqujZlzjel2C2BkccPGz5auF312D6/Vqh38749jIymnDILHUtWaKGs/N787LAamkLe5xO/9HL2NG0VeqBnvFh1C9KPUO2uRneYFIYwbRr1s6HnRe3evDJO/UOd96dwOVi5AqeN2C0EOIFbxx0a4WIVaXX3gmnX6KNcGvu3buxwgwAowAI5C+CDCBl75nwytjBBgBRiBjEMADFNQQYTW1XNZKSRR0njBUIEHXYHW9V8+32t+upfYdTdpLyRVPvYLHo5rvfFxL2eZUTovQigs+XRqK8i4s1Z0VEPJB30rRByIPr+dadhqXTotV0PSCSoO4i7rt2f0W7dnzlvY0buo780DY97DyhCoS78+Nu+u0SmfN40ShwgtSMusGWDqr8eS+o1ABg6yuFAmn8rzTqbw0SgsDkNXYN76YCKLYdbuvvPwe5HmJ8PiLqqGs9s6fPGJ44yJ9/rZvf8X482s3/dhQ4EGdh8R6qPZOnDdrQNltVGvicRkBRoARYASSgwATeMnBmWdhBBgBRiDrEWgUqgqoxYI2qcqxSyYMOr68PqHWyU+bVF1zSIXuPr2o8DDmmAWTaNKZky1LLO3mBJG3Vgm2AHEXVpMBDF697rzOL887XdQ6UBuuX/8kHTy0y3UrIO+qRtbQ5HFnufY1d5CqtN+/tpGeeXur5+txwY8u/5Sv66wuCqNk1mrcQ4WPGz8u7ppNxd1z0i6p1q/fnVfg002FGCV5p2KjhnvAUzBV6uqoyTvs+d4Hn6Y3V62ju2+9gXYLkg4J9TNE0AV88E4QHnlQ4o0TJN6C+cfTQ48/T4vEzzkEw+s7ifszAowAI5CeCDCBl57nwqtiBBgBRiAjEQDp1mSR/qmzmbDLZfXnjAkyq8M1CVFnPD997EIqdMfS9cLDeEWTymnMZbNJllg2CZ+3tg7ntFrddXjtlwqllCRRsGfcq6loZpXlpm3PUP3+wWmwcm35gmSuqJ7oi7yTY2DOX738Pq3ddYhQ9u61LTymhhYeO9nrZYP6h10y25a3hg4VPkHt+WsHzVXdcQGV9cyhscWJII5UqrMkeRyFmtjqUNT3FkIu3FJeAx+szQDyc6axtTMppb1qOXGyiXoom0dUFhlhMlG3869ZQgi0kKWy8L8DoQcvvKWHlXkP3LXYWAZUeCD02Bsv6lPh8RkBRoARSA4CTOAlB2eehRFgBBiBIYOAVxIPD5sVogQqJh58DjZ2apV2hgkmHriqRckT1IPJJLPsQip25x6k3XmHBmxxXPcwGtcz3HbbIPGQSutUTmv2vUuWMsZq0VJ1l2zMsRbpxYUHbhAqyVTq2BGILW17aO/+d6i5qT9VGKq7svLxIrBiLpUWjw18y3/rob9Srtg09t0tgme80HhBCbwoSmY/KvmBJXFnBmpcyxIaETtGKPJi1NIeF6/kEtZhk5ZeboRUhlykUuGMfZeX5CeNuE18lhYmhbzDpkDSIW322qsWGXvE39Euv2Ahgdxbdt8tRhktGsptF178LXrjGe+em17uNe7LCDACjAAjkBwEmMBLDs48CyPACDACQwoBHRLPLWU1mYAlm8xSQypUJdgfC9+gPSbyTsXhnPaPOxJ5IPHQ6lckiCAo7opqKmwDK1R/uGSQWVIZBAIJ+06lT1eyS2olkZMsFZb5/bP83W20/L3tBoGJc4AST1eMFyTIwm/J7P7XV9EB8TK3kgljKOfTH1B86tvaHxEg8cp6jxLBHsnziIvS705746JjKlRpqSQtVWySUVYr/+3Iz8v1ciyB+0qlHYg8eOH9Wijunn7+VWPca688t298WW4rFXmBJ+YBGAFGgBFgBFKKABN4KYWfJ2cEGAFGIHsRaBVlikh6tWqSwEJiIvyKUknkyPXJB+72zujKK+1CKqC6+1ORXqKsG4nn545KxgN3KlV3dphIhRLuwaiUWWqJdIMoL0+m4k/dtyTw1PudDqvx3O4Zvx54fu6rlp17aMfjz9kuqYfaKJ67h4qv2EX5k9vclt73+6mNjxn/nQyPuGR/IaADggx76EIKbIQJ3EjYjeXnJF3daodBlARmqsg7uVeo6zYI8g7+dyDy4IUHvztZWivVd/fc9nUOstB5k3AfRoARYAQyAAEm8DLgkHiJjAAjwAhkKgIgw/CwKJU+8oEnLhRYCL1IB+JOxVYq0vCzsMkWp3AON+Wd+fyjIPESpGrMOK/Orh7HW651474Bv48NLyW8rJpaPplq1Z3V+mRaK+7RsImNdCMtUUarNpTToqwW70M7YtFP+azfklk38g5r78zZTb057cY2vJB48MQb1nGhcV2U/ovJVnZ6/behrDhfJKTmh15OnC6KQzs8wi6rTTV5Z7VPM4EHlR488O4RYRfcGAFGgBFgBLIDASbwsuMceReMACPACKQtAjBQrxMhEUh8LYjlGqWTyfSa8wMMVCSFYq0odwxKMrqFVPwjtoneFi+v7autn/V6iWt/N+UQiLvWTfstx4kNK6GSI0cNIPKSlSjsujGNDmGWuEqlZZ7wV0xleIJ522YVnvw9vPlAYFoFXHhV3wVRNW7//bPUuqvW8bQ6cj8c8HtdEq9IpNOOb/3egGvDVuP5URxq3JqhdwmbwHT73Ah9AwEGxJnjiwr4b/pNq01H8g6QgKwDiXfi3JlGOi3+jtJaqPO4MQKMACPACGQHAkzgZcc58i4YAUaAEUh7BJoFcdfoM6E2FZsL+jCulss6kZY/L3nW1/aOj0+nE8Qr7CYVafCpU1WIDa9vpfihVtfpKj8xmYpGlglT9wLqjPek3OvOdcFKhyDkkxzGzt/Qyzqi7Puz5atpy96GQVP0BVwoajyv3ndB3jM66jss2kzg5dW0UsmV/eEfTtjJMlq1Txhklhwj3gVvx9SVSXu9b3CvVoovK8Igs6IsQ/e6L7f+alkt/k2ClYNuQ9gSAivwfknHhrJZpNKCtJOltOm4Tl4TI8AIMAKMgD8EmMDzhxtfxQgwAowAI+ARga7unpSkzHpc5oDukozxGjygS+J48b4z7yMqAk/Oo6oQD736oRZ5h2vxYDtpwXTqLi9Ke6Wl1b2hBm14KaNWSYFUpCl7uc/tlHh9ARdCjffVBcfQ1DFVWsP6xUwd3C60wrwAM4GH35cv0VOwWhF4cny/arxMUp9ZHWYQjzgElEDNVifK7qG0zrSmltU2CEsHtz0UFeRRlUhMT1fyLtPw5/UyAowAI8AIeEeACTzvmPEVjAAjwAgwAj4RyEQSD+WFw8oLDWWNW+mvXUiFHVx+y2cx3tjuYfT5jk/4PAm9y0BqdGw5QHtXC98xjUuAFbzUUHY84nNzNK5I3y4gMEFQ6JC3mUjibKmtF0o8+Uoo8qaOrhSvKvriydNJt/w3DNUi5l73P/dr3QxREXiY3KsaT3oc4rPBi4pLa6NJ7iTPEfvQUREGUVsmeWuu00klIoKX7PYO30CZYuw6IHdgBBgBRoARYAQiQoAJvIiA5WEZAUaAEchWBJ7+y0p6890N9LFjZ9C5nznV8zZB4tUJbzkEWWRKkwSNU5mYH7+3dCfwcD4H/7zGIHPgjybDSMznZlV+WTJ9pOGJl8ktQWoUOBr+y3OH151b+EcmYaGjSAuTxAlC4On44KkhFm7nkOy9u60nWb+HGg9epW4hF2Gee7L25jaPVCKWiLJiKG/VL2vKBZmPPXNjBBgBRoARYARSjQATeKk+AZ6fEWAEGIEMQuBr372HKkqLacH84+mnv36SZk6bRLctvsbzDkAGHWzsyDgSb1h5gVDaQKXR1bdnt5AKJ3DSuYRWrvuAIPDQ7IIOVNWduleEWlSeNMXzvZFuF6jpmqo6J8i5J3uP+7d10fpX2umA+FNtIybn08zTimik+NOqSUWaOaFXLZkNK1lYl8DryqmjbvFSm04J7biWJVTcra8KtVPj4ecVoowSLezU4mTfF3bz2e1dpnTnCAyyde/4PKs8fL4oqy0Wyjsm79LlzuR1MAKMACPACDCBx/cAI8AIMAKMgBYC6zfvoJuW3kfL7rvF6A+z7AuuWUKLhArv2qsWaY2hdgKJVy8ekECIZUpTAx5AXODBLmiyrt8Qi3PaP07jeoZHCl38YAs1vLGtbw482JP4/wi4sFLdmReT6WW06n5kSS387VBaW1oU0yqrjvSANAZf8UDzIOJu0DkJAm/+VWW2oyUUaflGoi4aSgmhUFKJbI2lOHbR9cDDIOYyWjcCz4v6zrxIVY2H8lKEs4S996DYRXW93HtrRyI53OoLjKjmTvW4eI9XlxUSPvO5MQKMACPACDAC6YIAE3jpchK8DkaAEWAE0hABpNmVizS7E0W5LAi8G26+m5Y/+qO+lb61er1B6v36rsU0fswIXzsAKYAHxExqIysLKSbKK8NI1vVTRpsM/zucR+vGfdS6af+AowFxByKvR5jdgchzatlE4GGfIDRg2o+9p3tQBdarQ97J84Maz4nEQzkxCByc/966dsPnMMymm0KLOXuojeK5e4zpCz51kAo/dch2KUVds2l86/cCLVWqLfPzcqmhpVOUVGfOlw6BNi4ull9aFMbyqE182QL7g6HQuGx2KJwy75ERYAQYgcxDgAm8zDszXjEjwAgwApEjINV1J86dSU1CaXerKJOtEETewou/RYu/dimdfsq8vjWA1Ftw6nG+/PDkIFDyNAnfoXRvakgF1DhQaYRB5Pyx8A3ak2dPQphxSYb6Ts4pS2jxd6m6g3oS/w0SB6EVVi1bSmjl3nDWUOHh3AtjudQR1zP7T9U9vU6UzKJs1ktDOe0s8TI3tWQWvB32j3CPsEm87b9/llp31WotGSQeymnLvve+bf8wyDsMLj3fEHLg5g+ntfgM6oSS0mrhAwnyDv5wuiEXGbTFQUtFCS0Up9wYAUaAEWAEGIF0Q4AJvHQ7EV4PI8AIMAJpgMDSnzxiKOouv2DhgNUgwOKnDzw1QIX30BPL6aPaA7T4+ksCrTzdSTyrkApZWqiTVOoEDrzwoMTTIfGSSd5hzaoHnkyYlXux88XD77MhxELu02zaLw3voyKyAr2RDl/85PfrfQ1z3verBlwn00nVstEEmekc7uFrcnGRFxJv0oVnUe7kQ9SWv5bqCp8wpgRpV9w9m4Z1XOh3CX3X4ZwRYoImPd+8JtUGXkQKBzB/vsn7HuffKL5wyfTkXStocd4gKrkxAowAI8AIMALpiAATeOl4KrwmRoARYARSjMDV31hq+NqNEyTevYKwe1OUyn5MqPFuFCQdfgfFnfS9A9lXUV5K1155buBVpyOJB5IKpYOdhuKqa5DqSCaV4gE/aAqpUzktymZPiE+P3PfOfIjxDw9QiyiltVPbGb54opnVWMkon93edIi2N9eJV0K9WFM2zPjzk2OnBr4XMYAsnbRTHRWLB32U1Da2dqYVmeFHfScBO1V44clQC6e0UZXIAoFtp8T0cxA6JB7Iu9KJY/0Mr3WNPHs7vzudpFqtidK0k9PZq0EPsEAIW4mZCkjwMTaissgI6+HGCDACjAAjwAikKwJM4KXryfC6GAFGgBFIIQIg5dB2C2UdSDv44MHrDmW0IO7w32h41AHJhz74XRitpb1L+EylvpxWLZcFcYcHebsmH/ax9jD8sUDkqW2cIO+iDqww701NWd3+4iZCoIVdk6W1XYc98So/MZliw0vDuB1sx3ho01sGeWfXPjlmaiAiz6y4XPPiMtr/4Vrav3WdMeXIKbNo5BGz6ZgzLjCCDUDyQZWUDi0IgYcy2qNOLzaCKuBx6JYyqwZcBCWwVezgiYdy2gOvr+r7ccmEMVQiSLuRJ/WX8EeBNxRmIGabxfvZ7X0PnNCyhcjCXlAqrqMuzRYSMyh5B8uJsP79i+J+5jEZAUaAEWAEsgcBJvCy5yx5J4wAI8AIhIYASmLhd3edIOuk0k7+bM0rDxjzINQC/njwyQu7wWsKirZUtYSyKt9T2qSbYidVe/Ezr5m8AnmHQIv4oVbb4VBeBwyqT55CvRXFfqbVugaqu4c2/0Orb01ZNV0+/UStvrKT6vcG8uq9F56gtYK8c2qf/sp36Yg5x2iRHp4W47NzEALv6AXFdPI5FZ7ufalSzQZ/NCflmd1xZAuRJUuGQdzqktGqErOptSuwCtnnLe/7MjWgxM8g+HfxKqFKv+LCM+ny88/wMwRfwwgwAowAI8AIaCPABJ42VNyREWAEGIGhgQB87k4QpBy87ZAyu+y+W4yNQ2UAUu+NZ+5NChBQ8xxq7BCJn0mZzpgED3MVwsA8Jsqo/IRTyMTGeJf+A3Dyduc+k1vJaMPrWx1JvOGnHEGjJ1cJFWI4SkSrFbsp78zXeCHxQNyWCfN6pCJDSfnKL3/Yp7hzQ+9T19xMU2YfLczvYwb5HKYazW1u8+/9EnhQUh53ZilNPkmo7zy+8VR/tKCekF73G0Z/rL9SKM/yxHvfj5ou073xgn4BIb0SM4nEDUreyfsO/zbCagJfal139XlGartsIPjwO8T5fkz8/NzPnBrG7cpjMAKMACPACAxRBJjAG6IHz9tmBBgBRsCMAB40kCg7c9okoyQWDX53KJFdMP94enHF2/SxebOSqjJASeaBhvakkHhWIRV+75IqQQKCCAjbG8zvenSuk/sHeeFGPkGNpzaUy8qSWTwUwzMwipTWv+3ZQn+r3aKznQF9Lp92AtWUJ/zxrJpaLi2J232iXPav993qaS6QeOOmzzH2n8qSWj8EHu5XEmT5yVeU0ojJ/hM4owy48HQYHjoHJa/UqTJRjReWBYBK4up8jng4otC7hkXeqQt7690N9NNfP2n8G4pQp/VbdtJNd/ySrhBhUDPEz/ClGMKhpKo99E3xgIwAI8AIMAJZjwATeFl/xLxBRoARYAScEZAKAajtfn3XYuMBQzYoC6DIQx+Qd6efEq33lNVKu7p7fKnhdM/dLaRCdxxzPz+leH7nCnJdVMoZXR8tL2v3qr6TYzv54VmlrOK6x7/jPVUZvniniXLaoGouL5jY9dVNoZWlzxDc9Yj/MafQ+llLJqnRcP6VgnCXqks/+zVfk0n7l4RrmEEsEtMucT81Cj9Tr2rOMM7AaYwoyDt1PvxbCsLugmuW0KKz5tO5Z55i/LsKhR78Y6WqPep98viMACPACDAC2YcAE3jZd6a8I0aAEWAEPCFws3iggI9dOpf2REHieQmp8ASo0lka/Pspx/U7p+51yVDLyP2HVVJ566rlutsb1O/meQsH/UySrOb1IbDCzffObiEX3p4IgEGLKuBBBwQdFZ45fAQBFrPEK6wm1WhO5FDz7nZq2d0xaMrRJ1SGtQzbcaIm2dNdjRf151OZ8BEtKcwX5ejxUMJ9wrghYI8wvKKQcO9H2e598Gl66rkVtEiUzCLFXaryZOAFvhR7aeU7NHN6zYCS2yjXxGMzAowAI8AIZD4CTOBl/hnyDhgBRoARGBIIhEni+Qmp8AuyVLik2hdNXb+d6q6texe1d3/U17W64ON+t913XZgKp7AIPDVht0Ekx/aafBaDEHizF5xPc8RLNqnwRJopQjGS2VY80EwHtlnPKUtmpToKZbPzryoLfXlOARcf/nEvtewZTN7JRYw6vpKiIPIkea2TtBoUkHRV40VNXkrc1P0jXVwmVQfF1c/1RQV5BHuDqMk7rO0M4Rd7+7e/0kfOwY7ichF0ARX708+/Snfc8zAtOPU4Yxsg8x4Q6ndujAAjwAgwAoyAGwJM4LkhxL9nBBgBRoARSBsEQOLVNXVSXHjj+WkypCJfqC+gukpWaZckMUDggMhJVbNT3dV1/p3wsmpFeeMJRF5x3gTfy1Z98XTTLa0mC4PAk6ojp7MIk8DDPmS6J0Q/yfZFNJN45pJZrC8q8k6eoVXAhRt5J68tHVtIR3x+tO97z3yhV7+7xzrq+oaYk1dER+X7S1hOJzUeytsTYR2dg8jr0IA2DYQvTRAS0mwE3HQlbV65DCgBq8piUW1v0LhzTruKZGI7fgl/2dOFlywCLs4XpbUP/PjbNHPqROM6lNXCZzYVFhVJA4QnYgQYAUaAEQgFASbwQoGRB2EEGAFGgBFIFgLw6Too0mm9knhhhlT42atX4sDPHE7X2Hm97W5bNkB1ZzfG2OIvBCLxVF84vySWXwIPSbRXzfiYkTAMEs0tZTRsAq+PjCrKE2W1+cb8bkEhYZ7/fqHCW/9KOx3a0W3sX6qgQNyhbHZkgNAKL+uU9+BbD++kxl3t2peGReJJIt0tJfmDrjYCcbem23qNFxVWE15eW6rVeGER6V73Lfsno2zfam1IlsZ7P5kNJN0VQnEH/zuEWVz19Tto+aM/Mog8+Mlee+W5fcu56c5fGTYWi0RfbowAI8AIMAKMgBMCTODx/cEIMAKMACOQcQiAxGts7TKM592aJA064z1GCWOyVHdW68ID7Ajhv4SE1iBKNLc9q7+3SliVv9cl72T/oCQexglSuuc3xOKMidPpnGkzDfWjThlrEAIPSbSjjphte0TyfnQjkbycsVtfSRx1C+Vqqt8De//RQAdXNxhL7vYgRp1yzigqG+ffn8/O79CMHci7Ja173CAlqPF+WDrOtZ9Vh1So8VL9BYKKg/qZbFXC7gtUm4vKheoPZ5/shrJYKOuaRBAUXtddfR6dcFh9ByKvoqzEWBI88RaKcluU0MInjxsjwAgwAowAI+CEABN4fH8wAowAI8AIZCwCUDLZkXgqcYUkxHZBmqVDk0q0HCGFirqEzcnrz6ls1g4nlNOOK+73d/OLp9/ky+1Nh+ihzf/wNC38ru761Nmekoz3fbiW/nrfrZ7mkZ3VEAu7AXAPDCsvICS/Rn0P2CkvfW0uhItk6WxOZytRRxt1NexPjFpYQjnKyzxVED883URkXfJOri0IiZdMNZ6u8jCE49UeAu8BqFGjDLlIFXmnggAiTya7PyW875BQe9uN/9zXZelPHjHSadkDT/vW4Y6MACPACAxpBJjAG9LHz5tnBBgBRiDzEYCiqEkEEagtmSEVfhGEKgSm6lF48Tmp7uR6P2y+29fSw1DhYWK/iiAvKjwQFyidHVPoPdH0lV/+kPZvXecJI3OAhdvFuqowt3Hsfh/1+H7W9f7Pd1D3/h2CvBMEnmgyC3SAq6Ug8vJGDlYjHf1VbwolryWjX2j80POWbikZ69sXD5NFrcbDZyFKSOuE310qAyTsgI2KyEwH8s6855deXWUk09596w3Gr2Rp7bL7bukj+TzfgHwBI8AIMAKMwJBCgAm8IXXcvFlGgBFgBLITAUniyYfBXCHviIIYCxu9IOWkdmvRIS+RNrun7Q++toNAizDSaQ3y5rASDaWdXkrp3Eg8qO7g9Xbp1BNoYql3rzIJzOPfuUQbo5FTZtFpX/mudn/ZMYqSWvk+gMIP6tNUlo2rgHRtfoc+ePDAIIx0STwvBJ7X9F943qmBFboHGUSFJ+eIjMQSXxLg8+BgY/ICe3RxM/eTRCYU1c0B05orhd8d1H3p2K4SabRSkfemUON9+2uXcnhFOh4Ur4kRYAQYgTRFgAm8ND0YXhYjwAgwAoyANwTgcZebS4bHVyqTXr2tmowH7PLimCAcOwIpZHRUd3Jtfspn5bVhEnhyTN0SRxVbu3JaEDeTBGl3yugjqKZsmNfjGNAfpbRrX1zmqsTzS96ZCRyvRKbV5rwSV4EA8nhxy73X06Y9n7e9CkSekxJPl8CTScMg8XVVZ99t2W0bWuG2zT9UHOHWRev3YarxolT4am3GRyeZ1hwzEnL9Bb1UlRWIstw8H7Mn75K33t1A6zdtp9NPPY6Vd8mDnWdiBBgBRiArEGACLyuOkTfBCDACjAAjAARaRUgBPMUyrYF0qRYPns3t/shHHdWdikm6EXhYmyQvcH5eE1r/tmeL8dCeJ3A8edQRoRO4dqEWIO5GisCKOQuC+wICAxCZ8Af0Qjyp55qOJbNyfR3P/4o6lt9PH5Z8z/HtaVbj5YpSWnjj6SbR+lW1+imflRsJi8DDeEHVeJIE6xXySy+q1nT6zJSq1PZOBA/FqXcAq2u9Uihuh4mAIFzLjRFgBBgBRoARhxhtZQAAIABJREFUyFYEmMDL1pPlfTECjAAjMEQRyFQSz48nnBfVXboTeFifH7N9iUFMPLgno2waZB5aWKSd+W2aIC8KqKU9Ll56Ma3AoEKUDaKlU8msurfG/zjF+OvuwiupPW+y66dTjnDH6xX/L6diBOWKl1uIhVe/O/MC0oXAk+uShHZjayeByNJpfj5DdMZNRR8QkVAmg5ivb3HGAOTdiMoi4/ODGyPACDACjAAjkM0IMIGXzafLe2MEGAFGYIgiAAXXocYOI+Uzk5okIdo7u41SYKfmVXVnHstviEUUJbTq2rwQMYk025ihuHPDKxPvgw5RFu6mQErnklkVc0ngteXW0J6iq7SOQ9IxuRNmklP5bBjEVboReABI9TJ0SysOAwOtQ0lyJ9zf8LRDQ1mt2c9R7js/j5V3ST4ano4RYAQYAUYgBQgwgZcC0HlKRoARYAQYAXsEGptb6eWV79Cbwido0WdOpROPneELLnhfHWhozzgSD8qTSkFKoVmVwPlV3ZlB3N22jNq7P/KM7RFliQTFKJtU3xTG7FV1stw0Ewz6/WLltsd0Lpk171kSePi5rgoPfUHinfif/0RUnW9ZWi397vx6psl1+g2xwPVhltBa3StuarwEiVtIXtR6fu/JVF1n5Q/I5F2qToPnZQQYAUaAEUgVAkzgpQp5npcRYAQYAUZgEAIg7y64ZolB3M2cXkNL73nY+O9rr1rkC62u7p6MSGC02pxVsENQ1Z15Hq8qvLHFX6DivAm+zsLPRVZhBPKh3Ys/lp+50+WahMqwwFDiyXCWTCiZNeOnEnj4nS6JN7b9AZr4v4+KsuKYUUra2BrvG9qv353d2fpR4V1UWE14Rd3s1Hh+AjuiXmtU46sp41DcVpTki7JZVt5FhTePywgwAowAI5B+CDCBl35nwitiBBgBRmDIIvDU86/SW6vX0203/rOBwUe1B+jqbyyl6wSBd64g8vy0TCbxJEEBdRG8oAqEIi1MxVlb9y7a0/YHLViTTd7JRUlPOBBY+cLsCiRmpiUNawHs0EktK0Z5NcisTCsbNhN42K5TOW1R9zaqjr9CZVOGUem1PyFVldnQEqdyQd6Ekdqrwu5VhTcnr4h+WDou6PF6ul5V48UEeYX3Q5ifCZ4Wk6LOpUX5fWW1KVoCT8sIMAKMACPACKQEASbwUgI7T8oIMAKMACMgEXjp1VV0+inzjL/iv596bgXdfWt/mebTf1lJP33gKVr+6I98g5bJJF51eQEVF+RRq0iorRfERdgNJB5SaZ3KaVNF3qkk3ojKQsrkcwx6biCwRoiUTSOsQ/g7tgt/vExqLfdeT91bVlkuGUSeGmxRHf9rX7/ChV+mwjMThD5aWXG+4XvYEe82iKuw23dbdtOa7nbXYVNB3slFgdAdKd4PueLPffXt4n2RYWafrujad4iJcuHh4n2AvXNjBBgBRoARYASGGgJM4A21E+f9MgKMACOQRghAYbfw4m8ZCjuUycq/g6wbP2ZE30rR57bF19CJc2f6Xn2PSLQ4KIiPeIY87Kped0gWrSgdWEbpGwibC0HkWZF4CK1IZYPCqEwoblo7uoQCMY/w3I602d6hw1kYYQYyZbZTEFelRbGM8zvr2vwOtf7sa55vpYr/frXvGqjPSgpxL3QTSosRUuMW7uB5QnGBmxIvleSdJHIRcNLd05OR94KfM8E1TN75RY6vYwQYAUaAEcgWBJjAy5aT5H0wAowAI5CBCKzfvIOW/uQRo2xWknb4e5PwwgNhJ9u9QoGH5tcLT46TKSSelded9H5rEUq8lvbuDDxtb0u2C+sI2/fM26qS39sqZVb1AVQ94ZK/Om8zdjz/K+pYfr/2RSX/dg/lTzvO6G917mEFWNgtCESeuYG8Oyq/WHsPYXa0Spr1klQb5lqSPRaIW5SOc2MEGAFGgBFgBIYyAkzgDeXT570zAowAI5BiBEDcvSgSZyvKSuhN8d8P3LW4z/dODa+4Cj54V5/nO5HWvE14ykHRlW7NLWFWeqHBBw0+cNnaEr539j5vCYIzZqivOrsyq5TUy5k5pczqJPV6mStZfXVJPEneqepDK7WdJDizPdTEjcB3S6pN1vlGMQ8UuFKBGsX4PCYjwAgwAowAI5ApCDCBlyknxetkBBgBRiALEYDn3fpN2w1lHcpkL7/wTJo5dSKNE+WzCK/An3A6mjFtEi2+/pJQEUBZarNQs6VLU0tFnRR2IG6GCV88GPhH4YmXajycSCt1bVbKtFSvPaz5vaTMSuImk8hMlNNCiWfliZc3dR7B9w7KOyvFmRXGmUpm6t4vMonY7YyzUY1XLvwO8ZnAjRFgBBgBRoARYASImMDju4ARYAQYAUYgZQggdXb3nv0GgXfvg0/TT3/9JF1+wcI+sg4ltuVCnaf64YW5WKjYmlrDD4bwskb50I1roAzshrGXRoORf6FIpT0gfP2ywQ9OkjWdwturQZyJzp4kdmGnkWrAH1kXP8SkqkLLpJJagAhFHhqIO1kui79L0qqxtZOgrtNp8pqW9njWlJn7KRnPFjVeUPKuUVgx3CksGW4UX/5A5c2NEWAEGAFGgBHIdASYwMv0E+T1MwKMACOQgQggrAKkHAi8t1atM3YA3ztQV3jQUv3vot5eKkk8+aDtl3Dw83AfNZ5+xpf7wFm0iYACr02SmQi30CVAvc6RjP666kOrtUCFVilI3TyR0umFCE7GvrzOEeS+9kuIe11jMvoHwQGkbnVZAXWJLwSiCPqIev9ByTu5Pqi877jnYbpCqLsvP/+MqJfN4zMCjAAjwAgwApEiwARepPDy4IwAI8AIMAJmBBBI8dRfVtIT991C/3h3Ay0VD1dIoT33M6ca/ncPPbE89HJZt1NINokXJskgjfxBXnVlSMKuPA/GIYGEl5JZt3s56mAHt/mD/B4kZJUgndBQ4h6EjAUO8EnE+yLTfBIlGZsj0oeDkm9lovwUHnL1LfpKxiBnGMa1laUxka4bXtkslHj4dweKbnw5ZKXohh8rrBpYqRfGCfIYjAAjwAgwAlEhwAReVMjyuIwAI8AIMAIDEIASAmSdGk6RThC1CuUXHpajbkFVd1brS4Q+FGRUqINV0m5Q7GX4RSYl9fopmXXDKYox3eYM+ntdvzsv82RiwEUUQTWZpMbD51iJCKmJor0lvjBCaNLi676UIIkFsfey+PtPBbmHL48QlHTtledGMTWPyQgwAowAI8AIhIIAE3ihwMiDMAKMACPACDghcNPS+4xfw+suKj+7ME4AJF6jUKpo2tB5mjJMtZnVxG4plZ4WG2FnqS4qEP59BxvDL3mVBEiH8NJLdz84WSKJktewVWIy7ARHme4ltfCug1oOydBOAS5+bksZcIE50l2lGgWJqWKWzmo8ITY0voQoKoiGvFNxAFkHRd5u8Se+UCJxk6DMdvmjP2IFnp83GV/DCDACjAAjkDQEmMBLGtQ8ESPACDACQxcBlCedOHdmRgCAMtQDDe2hknhBPd50gYuaANBdh12/ZK1P9YMDaaMTiBF0b16ulyWzIC2iJtfSvaQ2iM+bF8yhzkR6c1NbegZcSLWgXx9IXSzSUY2H98GIyiLC2qJuUOFd9fU7aPHXLu3zxDv/miW06Kz5xt9lue0GUW574rxZdJn4GZfVRn0qPD4jwAgwAoyALgJM4Okixf0YAUaAEWAEhgwCIPEOinTXIB5cAEtNVsWDedDxdA5AKq+QzFovfMTSpUWpNrPbY7LIIS8Yp6K8NR1Li1MRuhG1CtbLfaD2TYWPZbqo8eRnZH5erl/4PF/30LIX6KnnVhgkHr5celMEKT1w12KDvLv6G0tpnAhYgi8ryL6HHn/eUOZxYwQYAUaAEWAE0gEBJvDS4RR4DYwAI8AIMAJph0BXd0+gEs9kqe7sgKsSRvBIJE21Ai0VJKaKCUonK0oSyis/Cbdh3pipIDHl+lXyKl3uCZwHiO1kN+lD2dia+mCHVJLMqVbjpYK8k/caymhvuPluI9jintu+TqefMo+W/uQR4+8g82QDoQfrh0xRkCf7vcTzMQKMACPACCQXASbwkos3z8YIMAKMACOQQQj4IfFSTVip8KaSHMA6JFGSavIsWaW7drd2Mktm3d5e8p5IlR9cuqgBVa9E3J+pKLOuKIlRofCCxFkkQ51rd2+kQo2XSvJO4nCVIOfQxo8daYRXoJR2mUhHV31aP3H2tQahN1Mk1HJjBBgBRoARYARSjQATeKk+AZ6fEWAEGAFGIK0R8ELipVp1ZwWkLM+LIjDCibACFjHhO5ZqckKuUZYWI6AEacPJImxSUTLr9oZKFYmWakLZjIsacBFFkIjdOWBeBDb0ipsxXcrck6nGSwfyDuWyN4twpbtvvcE4poeeWG4k0S6+/pK+Y0Op7Ysr3h6gyHN7b/HvGQFGgBFgBBiBKBFgAi9KdHlsRoARYAQYgaxAoEc8aMMTLy585axaOqnurNYny0hBXIWddmqer7gwTySK5hvlqqkoj3S74aB6Ah7JIDRTWTLrhoMsqYVXYkNrtAo0SZSlg9rMCpdkEpqpVoO63RdRq/Fioqy/WoSJJNPzzm3P+P1Nd/7KKJNddOYpRncQfAsv/pZRXnvisTN0huA+jAAjwAgwAoxA5AgwgRc5xDwBI8AIMAKMQDYgABKvThBgHfGeAdtJR9WdFd5Rp1yqaqZkkGNB7qkEyRgzlHhREJrpVDLrhpMkNP9fe/cTI1d55nv8pbqquun/QM8NEyZkESKbFZA7RpEwG2YwRIpkc00kRK4TW+NsDCYskNK5Riyi+MZILMAEb8LITqxESAFhS5EGjGAWsXWjcSaE1TUashhycTJKG9zVf9xd/3yf5zRvp9xUV506f99zzvdIKGBXnT+f99iKf36e94mrpdYGViv1tqlJUOjqYavi4pwM7Hp4Z9cmrmo8De9umhw2JUV27Dj9xlnzbzLQ4vDsfq8ST/fH00CvsyLPsVvmdhBAAAEECihAgFfAReeREUAAAQSCC2ir3fJqM5UJs8Hveu2bcQUInSFNWvuJDWpjA82llaZZWmkN+vVNP+9iy2y/h1urQKuKQyMWi6iN+z1PmJ+3+zZGbWGNXRic4dcnymq80eGyvGMVv5dO5XPHfnravC7TaRek+u5bD+/whldwIIAAAggg4JIAAZ5Lq8G9IIAAAghkQmCl3pIWsOu8FtG0J5sOCqaVRjNSBaOVhFFURGWlArGbU+cggygtktxPbdD13+zzUQ91SHtYRhgXa9FotiNpL05jH8owz9/53Siq8cZHymZSpmJzIIAAAggggEA4AQK8cH58GwEEEECgoAJaVTS/5G5LYK9l0RBvSvaCu05a2YIOdOgMOTTITHOKZthXMOw0ULXQwGpIQl0N77JuEXaPwCT3GQy79r2+b58jTCDr2uCOoF5Bq/Em5PcZNeBAAAEEEEAAgfACBHjhDTkDAggggEBBBTS4WnB4X69+y6J/sB6pDg08KdbuIRd1m2G/+43z522V1KB7wcXVlhzns/Y7d9D1jbqisd99JvHzdsBFkD38NACslK/zfn0lNfU4TpNBq/EI7+JcDc6NAAIIIFBEAQK8Iq46z4wAAgggEJlAHkI8DWz8DJ6wlWbVSjJTXCNbJJ8nGnQvOJenzPp85E0/NmhLbRb3/vNrZAdclKXSUsO4fhWWeQwyO638VOMR3vl9u/gcAggggAAC/gUI8Pxb8UkEEEAAAQS6CiyvtrxW1KwetuLq44VV02xd7foYthJJ9/zT0DKvh5+pqXlqme21jnay8LAEtr2Cq6DVi1l7h/wMuMhjRWa3depVjUd4l7U3m/tFAAEEEMiKAAFeVlaK+0QAAQQQiFzg2IlT5u2zvzMT46PmsX0PmW13bAl8DQ3xakt10+6efwU+b1JfXKugGpZwrvGZwRxZHkgQxM9WXEnB1WfaH4sS0HS62eCq17vhp4IzyFq49h1df52mqr/ON+4fGddkY9cMOu9nYzWeTjMelYpeDgQQQAABBBCIXoAAL3pTzogAAgggkAGBk6+9ZT7601/Mnod3mIv/dckcPPSC+d/f/4657567At+9Vq/Nza9kNsTbGE7Z/67LxNp52esvD/t4DbK4GwcQ5Llltp/LxsrEQVts+50/az+vwZUGVXbAhZ8q1qw9o9/7tdV4JQk39b3gQAABBBBAAIF4BAjw4nHlrAgggAACjgvc/8hT5sTzs+aWm2e8Oz39xlnzklTkHe/4sSCPoCHepdpq332ygpw7ie/YYKYtaZ3+u7bLattsUQ8bzNQbrVxMmQ2zjnZ6caVcMvrveW+n7mdl28p1Tzz9tVKUKsSNLprZzUyNGA3yOBBAAAEEEEAgPgECvPhsOTMCCCCAgEMCH/15zlyUf7bcdquZlJbZfU8eMbse3G52yj/2OHTkZe9fD8/uD3XnzVY7s3+Yty2CQ6WSachzbGwTDAWTwS/byrOSJFZLKzJ1OMf7//lZHrvfnTHXmdpy3eh01iIfOmlWK/E0xPMz4CJvVmu/PoYJ7/K2sDwPAggggICTAgR4Ti4LN4UAAgggEKWA7nV3SirsPi/VdhriaZXdb39/wau4O/PKc+uX0pBv9/5nzG9+dSz05bMY4q1Vm5XXK6s0nOg3wCA0lMMnsC2zNWkfXpU24hsnqqYlFZZFbCfWZepsKdYKPPXQAE99inbYfRL1uTXkHqnqr52KhLwN+acYFas23C4PlYq2/DwvAggggAACqQgQ4KXCzkURQAABBJISOP/e++al46977bJ6aJWdts0e2LvL7JA2Wq3C03+3x8bW2jD3qSHeJwt1qWRze7KFbY2syrTRjW2AG/eBC+ORle/aKbPaKrqxqqqIoaatytwYXnYO+9C94LQKrQjHZoNMrNNaqJdvD8K7IrzpPCMCCCCAgGsCBHiurQj3gwACCCAQqYBW330kQyoOf++fvPOeevOcuSjDKzS004o7baXd840HzJ7d9xsdbPH2r/99PeyL4kbaEmronniuhnh2cmav/cxsZZ6GWbrHX54PP1NmbRupHWCAx1r1mb4f9Wa+W2r9vh959iC8y/OveJ4NAQQQQMBlAQI8l1eHe0MAAQQQCC2gIZ0edljFoWf/2au623bHFu/H9ee1Kk//9+47t3rBnv1s6It/egIN8S4vNaTd0K3WukGmqq4FfcOyB1wjt0MtOltm++3tZoNP3Rcvry2Ta0Flxdded36C4Kh+PaV1HjttdVHWvN9gF+uh75H+msnLBOeK/D5wg7RO0zab1lvIdRFAAAEEiixAgFfk1efZEUAAgQIK/A/Z407baXWQRdKHVmwtrzaTvuxnrmcraOqyr5sOZfDb+mi/l7fQqlfLbK/FshN7dX+8vO0DF6R12u6Lp2Z5ayG1VZeDVKGqh1bijVTXWrGzXr2q4Z0OrCjp2NmAR21x2Twtf2HyvccfjfwvSgLeEl9DAAEEEEAgMwIEeJlZKm4UAQQQQCCIwHkZVrFNKuv0uPDBh+bIj3/hBXhacffE00fNrPxB0v58kPMP+p2aVOJpBU9ah62qClpJZ0MrrSbMw0RWPy2RvdbK7h84JOFGHkIr9Zgcq3iPHHQCcd5ajIOEmZ3vjAZ402NVrxIvq9Wao8NlMzlaDhXeWRPdl/TIiz83u752r7d1wcZDQ740/oIlrd+TuS4CCCCAAAJ+BQjw/ErxOQQQQACBTAloQKf73+lxeHa/9792/zvv32Uq7WPSLrtT2mmTPjT4Wkh4cmfQKrNuNrbSKusTWQdpme33jthzZbnSKmyY2WlUlQEg0+M6lTXbLcZRDS3J8oALDe90LaM8NKTT35/1L1X092e7bYH+mE4H14nhD0nAd+DbO6O8LOdCAAEEEEAg0wIEeJlePm4eAQQQQKCbwGkJ5zSg073uOgM6DfAO/egnZs/DO7y97tKs8kgyxLNDKHoNqgjyJtlwY06GdGRpj68ow8zPhlZrlVb99kgL4h3nd7RKbHI02nvvDK002MzSO2JD6kbzaqTt0YPsKxjnevs998RoRdqAy34/PvDnNk4J12BvQf7RQE/DPP1vrZLmQAABBBBAAAFjCPB4CxBAAAEEciOgf9h7Vlpk9eg2jEKr8i7KRFo7wCLtB9fqpHlpqY3r6NyD61Kt7nuvu0HuJ2x74SDXiuKzUVaZdbufuM8fhcHGc8S9hlmrTox7DbMy4CLu8K7fu/zOuXfNS8dfN6+9/IN+H+XnEUAAAQQQKIQAAV4hlpmHRAABBIoj0LnnXRaeenm15e01FvVhQ4gkpmAG2eA/6uf1c74oW2Z7Xc9Wb8nw4cD7yPl5nrCf0fucHq96p9G9Gf0OMwlyXW2pvVGml7q+D1xSg1o6w3XdO7HebAdhje07aYd32lqr08H/4d7/7rXR6l++aLXefffclWrldGzgnBgBBBBAAAEfAgR4PpD4CAIIIIAAAnEK6B/eP5Y2VA18ojhsUKVtukm1cq7teVaVdsO60dDQpSOultl+zxh3ZVu/6/f6+birzLpd27bUuhps2jbiJN9hF/cKTDq803DunbO/M/qXL9o++//kv/9OWmjtFgjaSqtbIuiwIf3s3fK/WmHNgQACCCCAQNEECPCKtuI8LwIIIICAkwLN1lUzN78SKsSzoUy90fYmxMZZUdUN0bYGujS4II2gqtNmbf/BileJ50qVlQ2qllbSmYqqeyfqPbg08CPNsNVWbOp7k/YkYw3hR+WdTfL46tcPrIdyW2+79ZpLa9XdwUMveG20dtDFjkeeMsdlkrj97yTvlWshgAACCCCQpgABXpr6XBsBBBBAAIEOgWarbYLuVWc3x08rlLGPkXZg1vlCJdUy2+8ldinYTDOo6nRKO0Ts9p4E/bXXb/39/nyav4ZL0k49OZZ8eKc2GtLpcKH7tn/lmoEVuqfpw/ufMbMHv+m1ztpjt/yYTq7dGPb5deZzCCCAAAIIZFWAAC+rK8d9I4AAAgjkUmDQEM+2h1YrpcDhX9SQWlE0MzlsGtIafDnGIR2b3XdaLbO9HPWedA84NZlfbiQ+kVXXZEoq3yrS6qyVb0lXZ3azSdtE72l6rGKGhq7zTFyYkpuGiYZ3M1MjRoPmtA4N67RVVttoD3//O2brl75gTr56xrwtrbUnpNrOHtpCqwHemVeeYy+8tBaL6yKAAAIIpCZAgJcaPRdGAAEEEECgu4DfEG+tiqni7XOnLbOuHWmEIy5VAHZbD12vYQlbkwzRsmCi73KSFXA2KFuVdvOaBKquHbbNOO4BF2vvxnCq4V2nvVbjXZSQbucD95gnnj5qdn3t3muq7/Y+ecTcfdft3mALDgQQQAABBIomQIBXtBXneRFAAAEEMiGgId4nUhXUkL3xuh22FdKlfcR63WcS4YwrLbP9XrAkp/a6OCShm4/dKzCJFnDXA03rY9dOh8LEETJah/JQqd8rm8rP6xRaHVix88Ht3vXtMItXZT+8yfHRVO6JiyKAAAIIIJCmAAFemvpcGwEEEEAAgR4CbRnXeUmm03aGeJ2DKtJoxQyyYDawiivEc7E9tJ+ThjPaUrtwJb5BEq7sd9fPwv58Z1WcusTR0urSfoR+XPTd1sESZamUi7Jq0/XwTm20XVar8HT6rE6n1fZahlf4eWv4DAIIIIBAXgUI8PK6sjwXAggggEAuBDTEqy03zfJqU6aZlo1WKmm7rLbNZumIaxprVqqpuq2VvfeoK6w09NHJt0m36kbxPsZ572vvYNmp6bd+zaIccJGF8M666N54p984a275278xf3/HFirv/L4wfA4BBBBAIJcCBHi5XFYeCgEEEEAgbwJahdduy1CIxYYTAwiC+Nrqp6gCSFvZF9X5gjxT2O/Y6sGoBinEFQqGfc5Bv28DK63EiyKszlo14maBr1ZtNiXUv7wYbOhGRQZV3CDncLVtdtD3hM8jgAACCCBQJAECvCKtNs+KAAIIIJBpAQ2qFhzccH8Q1Cgq5mzo5dLk3UEMun02ioApa+2h/cyiCiPVdqQ6FGkLar97j/Pngw640PBOB1aUdOwsBwIIIIAAAghkToAAL3NLxg0jgAACCBRZIA8hngZwMxIkBJkAGkUA6Or7Y9uMg1SdZWWoyaD2YfY3tPvH6TWDVqwNer9Jfb5zwIWf/QI1wNSp0IR3Sa0Q10EAAQQQQCB6AQK86E05IwIIIIAAArEK5CXEmxqtmEFaR/PQMtvvxQgSUNqKrLiGhPS75yR+3rbU1pbrRvcM7HcEcex3Ttd+3m+4OTpclkEYFddun/tBAAEEEEAAgQEFCPAGBOPjCCCAAAIIuCCwtNI080sNF24l1D34aW/MY8tsLzR9Xt3rrCX7HvaaNNw5tbWW8dZqPy+RbRHuN/SjCOFdp1evARcTEpLrrzEOBBBAAAEEEMi+AAFe9teQJ0AAAQQQKKjAskyi1dbArB+99n8rWhjTuZa9KutsmKUDHrQisyiHbYvVbdy6DXRZay2tyuRmf5V6eXHTXye2ys66EN7lZXV5DgQQQAABBNYECPB4ExBAAAEEEMiwQL3ZNh/XVo0Mpsz0Yfd/+3hh1TSl8kyPIrTM9ls0NZi4vuINYNC17nTRoMb+WL/z5O3ne7molX2H8vbc/Z7Hulypt+TXD5V3/bz4eQQQQAABBLIkQICXpdXiXhFAAAEEEOgioGHF3PxK5kM8rSrT4Ra6KX9lqGTyNGU2zIvbOV12qFSSiaolk+f97vxadVYh6nc0BMZlrf1ah1ZwIIAAAggggEC+BAjw8rWePA0CCCCAQEEFmq12LsILbYGcmdIJtS3veTjWBKyLhrV/kbD2asYrLqNaV22p/dz0iDdddW5+tbAVidZT24dHJcjkQAABBBBAAIH8CRDg5W9NeSIEEEAAAQcEaovL5rfvvW8mxkfNtju2JHJHWQ/xbMvs0krLCyFWG23Zyyz7gzrCLn7nPoD67xUJObVNtJX1vumQMJ1DPFrtttcyWuS2YsK7kC8UX0cAAQQQQMC1zmgsAAAgAElEQVRxAQI8xxeI20MAAQQQyJ7AhT/80Tz74s/NlttuNRc++NB7gBPPzybyIFkM8bpNmbXDCq5KSNVrEmsiqClexIaaGmTq9FU97I8Vea+3bsNN1gZYVIxOaNYQuCiHDvSYmRox2lLMgQACCCCAAAL5FSDAy+/a8mQIIIAAAikJ7H3yiHls30PrlXcHnz5qbrl5xsw+/mgid9SW0OuSDLZofDoMIpGLBrxIvymzOol1uFLMirNe03nttFXdL1An0Rbp6NwTcGNQ1zmNVQPOvLcar/36GSa8K9IvAJ4VAQQQQKCwAgR4hV16HhwBBBBAIC6B+x95yqu409BOj4/+PGf2aai3d5fZ+eD2uC57zXmzEOL5nTLbK8hKBDPhi9gQSjtkLy9uHkLZ8FMr84rSaqwDPCZHq55Lrwm89p3Jc5WiXf+yDHzhQAABBBBAAIH8CxDg5X+NeUIEEEAAgYQFDh152QvvDkhgZ4/Tb5w1L504Zc688lyid6N7gi2vNhO9Zr+LdWuZ7fcdnTCqoUyeAxk16FeRuNHJthprG2XeK84GDXK1SlEnsmqVYt5aagnv+v2Owc8jgAACCCCQPwECvPytKU+EAAIIIJCygFbc7ZAqPA3rbBWe3tJXv37AvPbyD675sSRu1aUQb9CAqtMn722jaxWJFammq6/vd+f3/Rg03PJ7Xlc+F/T5/FYzuvKcfu6D8M6PEp9BAAEEEEAgfwIEePlbU54IAQQQQMABgWNSbXdKqu6Of9pKq1Npd+9/xgvwJmUybdLHwpWmWUh5oqvfltleNmECwKTNB7le0ICq8xprVYrBAsBB7jXJz9oKQ71mr3bifvekeylq+23WKzgJ7/qtND+PAAIIIIBAfgUI8PK7tjwZAggggEDKAjbEu2/7V8w7Z39n9nzjAbNn9/2p3VVaIV6Qltl+IZ62Rq7UW9Ie6VZ78KCLq4HM5FjF+1ptqWFauvFdiMMOeNDBFnmwiXKd7f55SyvZbKmtyJRZHVhR0n7pBA5t+//Zq2eM/uXDt1L+vSuBx+USCCCAAAIIOC9AgOf8EnGDCCCAAAJZFjj/3vveEIttd2xJvHW2m1vSIV5cFXMaCmq405JJu/NSWZjFaaNx2eh51abRbGOz4RdBVm1GqkNmWoLepMK7zgriCakY1n09/1H+IiKpITxZ/j2fe0cAAQQQQCAuAQK8uGQ5LwIIIIAAAo4KLEt1lrYjxn1oy6y2dGrAphVhcRzaGjlcKZm52mqmQjxbDaYDFuK20bbRsJV9cazdZue0VYQaNsdpo2twqea+zehw2UyPr1VpJnHoXzocPPTCNft1nv/9BaOhnm4JwIEAAggggAAC6QgQ4KXjzlURQAABBBBIVUBDvNpS3YTs2Oz6DFG3zPaDimL/uH7XiPLnk7xfu++gDjKpS0We64e93yT2qrN7BrrcUjshAbW+L0keulfnrq/de027/7Gfnja1hSUz+/ijSd4K10IAAQQQQACBDgECPF4HBBBAAAEECirQlPbTufmVSEM82xa6Um/LNNVGYrJJBj9BH8oOZNAtzDRQS6oqbm16b8UsrTTln3gqIYOadH4vyWDTXte21K422rJnoFut2GmEdxc++NBrl9VhO/bQLQA01Hvx8He9rQA4EEAAAQQQQCAdAQK8dNy5KgIIIIAAAk4IaIh3SdpPowiTkmiZ7YW2FlRVJTisy4ALt6rN4trvzu9L1BlUJRms+r0/2wqdRruvBqva6q2t2Glcv5tRGuGd3oe2yp6UwRVHf/jE+m3tffKI2XrbrVTf+X2Z+RwCCCCAAAIxCRDgxQTLaRFAAAEEEMiKQLPVDrUXmA1AXNhTzO6f5lK1mSvTT21r85BMM02yArDXrwOXhpGsVXFWvEq8uPbe8/N7Qlrhnd6bVts98fRRM3vwm+bzn7vJHPnxL8xF+THd+25ShllwIIAAAggggEB6AgR46dlzZQQQQAABBJwRCBripdUy2wsu7Wq3zntLoy2030tl7ymJfeaysk72PtN+n6dk0uzYSLJ73m1cIw3xnpXgzpuefedWc2DvLsK7fr+o+HkEEEAAAQQSECDASwCZSyCAAAIIIJAFgUFDPFcqlrrZamXXzOSwacjghstLye3FZ+/FVrtVpK3XlbbMTqckpuBmLbxLe+20/Xt0eCgLv1VwjwgggAACCCCQggABXgroXBIBBBBAAAFXBdoyllb3xGvI3nibHS61zPZznJaKJm0Z1RDt6uaP1O80A/28SxWALoZoNjx0ca/CTi8bUMd9nzrUZGZqxGj7NwcCCCCAAAIIILDp/we/Kgc8CCCAAAIIIICAFdAQ75PFutHJnBuPtFsMg6xSkm2sWZn4ah3tHnSy5LIvXvwhZ5JrEeRd2fgdu6diXFOVCe+iWCXOgQACCCCAQDEEqMArxjrzlAgggAACCAwsoIMOlleb699zuWW238Ot3Xs51LCOftdwZW+5fvfZ7ed1CmzcQ0iyFt51hpza3qphW5TDP2wYXh4qBVkyvoMAAggggAACBRMgwCvYgvO4CCCAAAIIDCJQk/3jliTEm7g+/oBnkPsK8tnrZX8xfQ6tNKvL3nhRHbaleLji5n53fp/TBrRR++j1tZVZ9wOck/bsrPZ+qI++P9qOHfb9Ibzz+1byOQQQQAABBBCwAgR4vAsIIIAAAggg0FNAA5ellaapLSc/DCLqpbEtkfo8Syut0KfPYktxr4eOw+fGiarXjp2n9+fKasssXPlrdeogLxLh3SBafBYBBBBAAAEECPB4BxBAAAEEcivw0Z/nzPt/+KO57567rnnGd869a7Z86QvmlptncvvscT2YhhULOQjw1CeqIRM27AoT5sS1XmHOqz42dFu40ghcMReVc5hnieO7dt9APfegLbWEd3GsCOdEAAEEEECgGAJU4BVjnXlKBBBAoFACGuDteOQpc+aV59bDuvPvvW8OHnrB+7HJ8dFCeUT1sHkL8cJUhmV5vzs/70PYtuCoK/n83HPSn7H7KmqI56eltiJTZm+aHDYl3UyPAwEEEEAAAQQQGFCAAG9AMD6OAAIIIJANgUNHXvZu9PDsfu9/j/z4F+aiBHtHf/hENh7A0btcltZB3SMtD4eGVFMyvGFIghXd18zv3mxJDHxwxdeGVOrTbEkvtY/D7jX48cKq7+/4OK2TH7FTh/tVYer+iDfoIAzCOyfXkZtCAAEEEEAgCwIEeFlYJe4RAQQQQGBggfO/v2A0xNOKOz2++vUD5sXD3zXb7tgy8Ln4wrUCGuLVluqm7S/PcZ5Pq+lGqkNeiNfq8VCdraV52M/N78KshVRV2TNQBpr02Tcwq5Nm/Vp0+5y+F9PjFe+nugXBo8Pl9Z8Pcx2+iwACCCCAAALFFiDAK/b68/QIIIBArgW0jXb24DfNhLTMavvsb351LNfPm+TDaTXW3PxKrkI8rRy7VOse4uV1vzu/74yfYR1FDO86/ezzd7bUjo+UzaRM4OUILqB/GXPqzXPm8Pf+af0kWlG968HtZutttwY/Md9EAAEEEEAgYwIEeBlbMG4XAQQQQMC/wOk3zpp/kz/8aYA3OTFmDnx7p/8v88m+AhriXaqt9qxa63sShz6wWevnoHudOfRIkd6KthxrJZ52gXZWmtkf14tpe7XfVuRIb86Rk2m1ou6tqMM/SqWS0VCPI7yA/mWMBnYH9u4yJ197y7z96383J56fDX9izoAAAggggECGBAjwMrRY3CoCCCCAwGACdpiFBngMrxjMzu+nm632plVrfs/h0ue00m5GBg3My8Rd3desSPvd+V2Hzko7/c5Nk1XPSoeccKxNOf5v0yNGg02OaAT09/Ld+58xL8oepv9LtkbQ8I5p4tHYchYEEEAAgewIEOBlZ624UwQQQACBAAJPPH3U1BaXC1utoVWI+offbXfdHtv+f3kL8Wy7aEkSmJV6ywvzilxV1u2Xna1WNOYq4d0GoAkZjELlXYDfrPt85dhPT5uXjr/ubYuwZ/f90V+AMyKAAAIIIOC4AAGe4wvE7SGAAAIIBBfQ4E5br4o6vOKghJeTUn14y9/+jfnZL980j+17KLY/+GqI94kMgWj4nFQafFXj/6YN8PRKK/W2KdLACr+6drCFttMurTSpvvsUbkr2uxuTfe84ohfY++QR7y8j7r5z6/p08eivwhkRQAABBBBwV4AAz9214c4QQAABBEIIvHPuXa9aQ9tni7hX0vn33jcnJbQ7Ki1neugffPfJH4C1euW+e+4KIbv5V9sywVX3xMtyiGf3u9PQbrXR9vZ8uyrPRRXeX9fdGtl98HQCa0uC26Ib6bsyKoNQOKIX6Nz3Tv9SZs83HojtLyOiv3vOiAACCCCAQDQCBHjROHIWBBBAAAHHBHRK4e0yoXCnbHxexEMDTN3o/fDs/vXH11Dv0I9+Yo7HuH+UhniXlxpe62nWjs2mqOo+eMOVkje4oSXPV+Sjl9FItZSr/RD9rrNWId4o+yZqVSJH9AIX/vBHb4q43ffO7m2q+5qyD1703pwRAQQQQMBdAQI8d9eGO0MAAQQQQMC3gP6hVve707bhPQ/v8L6nlSob/5B7SDaA3yrBpv2M7wsM+MHLiw2zvJqNoQbaMqtVZJrNbTZFdbPgakCWTH+8X5CplXkT11e8oLPebGf6Wf3evIZ3M1MjRoefcMQjoH8ZoUdn5bD+Xjchk8XjqiaO50k4KwIIIIAAAuEECPDC+fFtBBBAAAEEUhfQ8O5pCebu2/4Vr1X2nbO/84K7YydOmbfl3197+Qfr93j+9xfMyVfPrLfWxnnzNanEW5T90Vw+7H53fqaorg1uKHsBVTMHe/35XRc1unGi6rUU99sPcG1vvIq3L97SSvaqMP2a6Ofsu1MeovJuEDc+iwACCCCAAALBBAjwgrnxLQQQQAABBJwR0Em7u75273o1yu79z3gBnbaX6b9rxZ1tpdXW4kmpXDnw7Z2J3P/CFRlwIPvJuXis7eVWkVCq7g2r8HPY4Q0LVxre9NW8H4MEnNZikMAvq36Ed1ldOe4bAQQQQACB7AoQ4GV37bhzBBBAAAEEPAEN6TSg06DO/rcN8LSlVgO+Cx986O0J+HkJ9b73+KPedNqkDhdDvDAtsUFCraSso7xOmOe8TjpKp2TvwIpU5OVt78C4wzv9NfuvUjn70X9d8qpqt37pC1EuK+dCAAEEEEAAgYwKEOBldOG4bQQQQAABBKyAts12buZ+v+x995a00HYeGuDpYUO+pPW0pXJeWmrTPjR8mRyreLehLb5Bh1KECbfSNvBzfR1IMTlaHag6sdt5bVCal7bjuMM7NdRfz8d+etoL7rTd/bF9D5mdD9zjZ9n4DAIIIIAAAgjkWIAAL8eLy6MhgAACCBRPQIM6bZPViY16nHztLa+11oVpjcvScqpDItI6og7dtMpM94ZryX5489ImfDUnA2rDVCd2W1sbBma97bgigypukmmzJZ1ckdChYd6+J4+YV2UfyySrZhN6PC6DAAIIIIAAAgMIEOANgMVHEUAAAQQQcF3g1JvnzIX/+E9vyqwOsZiQVtlZaZl15dDppB/XVr2Jr0keawModLhCI/LhCnY665w8V9ZDvKjDO7vGUYenSb47eq00wju9rgZ4Ok36//zqGAFe0ovO9RBAAAEEEHBMgADPsQXhdhBAAAEEEAgjoKHd/5UqvIvyB38dbLFn9/1hThfLd3WC69z8SmIhXlyhVCdOEteIZTE+Pands25Iqsy03TWOINJWLOol47pGHEajw2Vvsm7ch4Z1+utW977T4+Kf/mJOvXHW7Hpwuzmwd9f6hOmtX/6i2XbHlrhvh/MjgAACCCCAgGMCBHiOLQi3gwACCCCAQBiBd869a468+HNzXFpoXWib3exZmq22uVSrB96Dzo+RBkbT41WjHY+XF4Pvd+fnWvqZtam2ZS+c0pAyK0fSU2O1YlHbauNe/yj8x2U97Z6JUZyv2zl0aMXTR172Bs1su+t2c8vnbvI+pkHdFtkHT38dazBvwzwN6HUgjYZ6HAgggAACCCBQHAECvOKsNU+KAAIIIICAUwJxhnhptWxWZeqqhoa15bpZqbed8u52M2k52ZZm3RNR26pdPCYkaNTKyiQO3avy5C/fNEcPf/czU2ftz3WG8rovngZ42+7cmsTtcQ0EEEAAAQQQcECAAM+BReAWEEAAAQQQKKpAHCGeVndNj1WNDk1YWmklTluWNlQdbqGTd9O4vt8HTvs+7fWvyHCThStNv7edyOeSDO/sA134wx/NoR/95JrWd22r3b3/GfOiBHudbbN7JcB7jAAvkXeBiyCAAAIIIOCKAAGeKyvBfSCAAAIIIFBQAQ3xPpG200YEbaeu7EWXVmWb31fIlXZf277bkCo8Vyb5phHe2XXTdtpD0k77j9u/YnbK3nc6UVqPzkE059973xw89II588pzDLbw+8LzOQQQQAABBHIgQICXg0XkERBAAAEEEMi6QFvG0l6SKa5BQzw7hKEiLay6B10r6TG3XRZA72lmcthoOHV5qeHMErkScnaC2Em+aa9dmuFdtxdEq++O/vCJ9f0sNeB7WH5szzcecHJAjTMvOTeCAAIIIIBADgUI8HK4qDwSAggggAACWRTQEK+23DTLq4O1U9pqN91zrrbsTlBm12B6rGLinO46yFq7EpR1u+e0qwJ178LR4aFBOGP/rFbj6TALnSZtq/Mmx0fN4dn9sV+bCyCAAAIIIICAWwIEeG6tB3eDAAIIIIBA4QV0YqzfEG9taETF+f3m0q56sxN59eXSwRFXHR2Sq+up+wcmuX+hTinW8G6k6lZ4p2ule+BpiCe36P27Dq0gvCv8b5EAIIAAAggUVIAAr6ALz2MjgAACCCDgsoAONVjoU01nQzFtu2xGsH9e3B5pVZi5vh/fRvckKyo1vJuZGjE6UMPl48IHH5rP3zzDnncuLxL3hgACCCCAQMwCBHgxA3N6BBBAAAEEEAgmsFmIp9VkE9dXpGKqJPvmubHfnd8nvF5aNPXetQquLnvjxX1kLbyzHrZiUAM2DWjjqBi0NuWhUtzLwPkRQAABBBBAAIHQAgR4oQk5AQIIIIAAAgjEJbAxxEuyOiuuZ9JqL20TXVppyj+tuC7jBZyTo1XZF7BudH/ALB5xVVkS3mXxbeCeEUAAAQQQKLYAAV6x15+nRwABBBBAwHkBDbrmZYqrDb6urLZkj7TBBl249pBxV8al1a4bh7OtWtR98XTtwx6Ed2EF+T4CCCCAAAIIpCFAgJeGOtdEAAEEEEAAgYEEGq22t5F/Vva78/NwGiRpJd5qI9rpuWkPzPDz7IN+JqrAk/BuUHk+jwACCCCAAAKuCBDgubIS3AcCCCCAAAII9BTQQRVz8yum7egE1SDLp3u9TY1WzJC01Uax19uknGu4UvLO1coTlOCqlQae+lhBJulWxPgG+T573gV5U/kOAggggAACCKQtQICX9gpwfQQQQAABBBDwLZDHEE8fXqvmRqpDgYO3uKr5fC9Mgh/UkHLQASYa3t00OWxKOhWDAwEEEEAAAQQQyKAAAV4GF41bRgABBBBAoMgCTWmnzdr0WT/rFbT1Nar2Uj/36MpndI8/nearlYb9pvmODpfN9HjFlVvnPhBAAAEEEEAAgUACBHiB2PgSAggggAACCKQpkNcQzw5s+Hhh1Wi1Yb8jqYm2/e4jjZ/38+yEd2msDNdEAAEEEEAAgTgECPDiUOWcCCCAAAIIIBC7QF5DPA2mZqTdc36599RVG/bpfnD9qtBiX4yULtDZOqxTaq92ZJ4T0mqrVY0cCCCAAAIIIIBAHgQI8PKwijwDAggggAACBRVoy0SDS7VV0/BRrZYlon5tsUHbbbNk4Pde7SCQSvmvwzsI7/zq8TkEEEAAAQQQyIoAAV5WVor7RAABBBBAAIGuAnkO8XTq6mqjbWpSjWcPwrvuvxCsi3qNjVB5x28XCCCAAAIIIJAvAQK8fK0nT4MAAggggEBhBS4vNszyajNXz6/VZdPjVe+ZaksNMzm2NoxB22Y720Vz9dAhHkYDT53my4EAAggggAACCORNgAAvbyvK8yCAAAIIIOCYwEd/njOn3zhrtt11u9l2x5ZY7y6PIZ6CTUtwNypVZYtXmtdU48WKmbGTa9A5Okx4l7Fl43YRQAABBBBAwKcAAZ5PKD6GAAIIIIAAAoMLnH7znHnp+Otm14PbzSkN8e7cag7P7h/8RAN8Y0FCroWOltMBvurkR+1+eC3Z70///VKtbvTfOdYEhMSrUqTyjjcCAQQQQAABBPIsQICX59Xl2RBAAAEEEEhZ4P5HnjKvvfwDMzk+amqLy2bfk0e8EG/28UdjvbO8hHh2Iq1OWF1aaRmdPDsl01XnZHBHM2eDO4K8EBrezUyNGHXiQAABBBBAAAEE8ixAgJfn1eXZEEAAAQQQSFFAA7sdEuD95lfH1u9C22k1xHts7y6zU6ry4jyyHuKNjQx5wxg+XqhfE9ZVZdqqVpxpqHdltRUnodPnXqtMHCa8c3qVuDkEEEAAAQQQiEqAAC8qSc6DAAIIIIAAAkYDut++977Z+cA9nsbu/c+YXV+71+zZff+6zjvn3jVHXvy5OfPKc7GLLUvApQMfsnb0mzRr22o1wNOgsmiHff7yUKloj87zIoAAAggggEBBBQjwCrrwPDYCCCCAAAJxCBx69p/NqX/5tRfO3XLzjBfoaYinbbT63/b46tcPeJ/R1tq4Dw3xakt1k5Vt4yalRXa4UvIq73rtdVfUEI/wLu5fMZwfAQQQQAABBFwUIMBzcVW4JwQQQAABBDIqcOzEKXPhgw+9f44/P+uFdvpjOsDC/reGenuljfatBCrwLKPuFzc3v+J0iHed7ucmLaGNZtvMyxCOqz7mVOh3bpyompY8n9/vZPTV8m6b8C7Lq8e9I4AAAggggEAYAQK8MHp8FwEEEEAAAQSuEdCwbuuXv2gu/Md/mrfP/s6rvNPj5GtvmZ/98k3zd59W4X3v4DfN1i99IVE9DfEuyfAHFye4hq2mmx6rmIrsjafDLfwEf4nCR3SxigyquEHCStpmIwLlNAgggAACCCCQKQECvEwtFzeLAAIIIICA2wKn3jzn3eAu2QPv4NNHzUWpttMqvKM/fMKbQvu+VOZtue3WRFpnu0k1W20J8Xq3piYtHDa8s/fbb9+8pJ8ryutpeKcDK0o6dpYDAQQQQAABBBAooAABXgEXnUdGAAEEEEAgLgEdUKHVd//z4R3m2R//wqvCe2zfQ9cMsYjr2n7P61KIN1ItmcnRqqkt181Kve33ETb93GaTa0OfOMUTjA6XxaicaHinYbMOY9kiVaKdezemyMClEUAAAQQQQKDgAgR4BX8BeHwEEEAAAQSiFDj/+wvmZ6+e8Srtdj243eyUf/bJfnevSittEgMr/D6LCyFeXBVzVWmlnR6vynTahtEptVk+NLybHq8k8gi6b6O+v+9I6KwBnu7VePedW73qUQ4EEEAAAQQQQCBtAQK8tFeA6yOAAAIIIJAjAQ09NLA7evi7ie9xNyhjW8bS6p54DdkbL+kjrvDOPkdZWk51uMXSSlP+yWaINyHTeNUp7uO0tH2/dPx1s1Vau/9h+1fM30top5WkJ2XPRteC57gtOD8CCCCAAAIIuCtAgOfu2nBnCCCAAAIIZFJAq5dcqrbrhagh3ieLdbPaCN++6mexdGrslARTSQyciGpvPT/PFfVnkgrv9L41dH5C9mv81jceMDtl78bz0jp78NAL3gAW2z6rgd5Hf/qLV1GalXc76jXhfAgggAACCCCQrgABXrr+XB0BBBBAAAEEHBC4vNgwy6vNWO9EAzWtitOwsLbciPVa9uQaGM7I8IdGs20uLyVzzbAPlmR4Z+/VhnjbtPpOWmhnZUryfffc5YV7WlGqPz4xPuq12GpLLfvihV1lvo8AAggggAACgwoQ4A0qxucRQAABBBBAIJcCNQm4FqXlNI4j7Wq46bFkqv7C2qUR3nXe81e/fsAL6rT6To+H9z/j7eV4YO8u779Pv3HWG8zCvnhhV5rvI4AAAggggMCgAgR4g4rxeQQQQAABBBDIrcDClaZZiLg6zpX96OLedy/sS6GDN0aHh8KeJvD3j/30tNcmu/XLX/Qq7o68+HOv0u7w7P71c2p7re6NR4AXmJkvIoAAAggggEBAAQK8gHB8DQEEEEAAAQTyKRBliDc2MiSDGCpmToZlNFMYlrFxhfR+xkbK5uOFuhP3o/cnncVmcizd8E7v48If/mg+/7mbvD3u7D54Z1557po97/ZKO+3dd91uDnx7Zz5ffp4KAQQQQAABBJwVIMBzdmm4MQQQQAABBBBISyCKEM/VirfrpcpNQ8XLMryjLnvjpXloeDczNWK0StGl4+SrZ7z972Yff3T9tk6+9haTaV1aJO4FAQQQQACBggkQ4BVswXlcBBBAAAEEEPAnsLza8kKuIMekTJodqZbMpVrdtGTSrWuHC229a/sCDjsX3ula6bCKl06cMieen/WWzoZ3x+W/GWDh2tvM/SCAAAIIIFAMAQK8YqwzT4kAAggggAACAQQ0xKst1Y3fDE6nvupebnpo+HfVvexuXSHNwRr22uWhUoBVSeYrxyTA04EV2lKr1XiEd8m4cxUEEEAAAQQQ6C5AgMebgQACCCCAAAII9BDQvevm5lf6hnhpBmJBF1Dv+caJqllttE0t4uEdm91TFsI7e++1xWVzUcK7rbfdGpSY7yGAAAIIIIAAApEIEOBFwshJEEAAAQQQQCDPAhriXZJBFJu1w2YxvLPrpVWDU9LyOyT70OlwizirBrMU3uX5febZEEAAAQQQQCB7AgR42Vsz7hgBBBBAAAEEUhBottpd97Srlkte2+zClYa5Ii23WT3i3revIgHhDVLt53LbbFbXjvtGAAEEEEAAgfwLEODlf415QgQQQAABBBCISGBjiDc2MmTGRspe5ZpW6Urn6EMAAAVaSURBVGX9iGtyroZ3OrCipGNnORBAAAEEEEAAAQQGFiDAG5iMLyCAAAIIIIBAkQU0xPtEAruR6pC5fnjI2UmzQddIn2ni+oo3hKPebAc9zfr31Gl6rEJ4F1qSEyCAAAIIIIBAkQUI8Iq8+jw7AggggAACCAQSaMtYWt0PTyvvNtsXL9CJHflSWSrmZqRibl4GW4RpCx4dLkt7ccWRp+I2EEAAAQQQQACB7AoQ4GV37bhzBBBAAAEEEEhRQEO8y0sNs1LP7r53vfjCDuaYkMEY2pLLgQACCCCAAAIIIBBegAAvvCFnQAABBBBAAIECC1xebJjl1WYuBTTEu1EGT6w22qYm1Xh+D8I7v1J8DgEEEEAAAQQQ8CdAgOfPiU8hgAACCCCAAAKbCtSkEm9xJZ8h3nUyd0JDPCk49PbFu9pnVgfhHb9QEEAAAQQQQACB6AUI8KI35YwIIIAAAgggUECBhStNszBAlVrWiCalJXa4Uuq57x/hXdZWlftFAAEEEEAAgawIEOBlZaW4TwQQQAABBBBwXiDvIZ7uabfZ5N0pmTQ7NsKed86/pNwgAggggAACCGRSgAAvk8vGTSOAAAIIIICAqwJL0ko7Ly21eT00wJuSary52qppttb6aafHq2ZUfpwDAQQQQAABBBBAIB4BArx4XDkrAggggAACCBRYYHm15e0Xl9ejWi55oZ2GlVp1Vx6SjfI4EEAAAQQQQAABBGITIMCLjZYTI4AAAggggECRBerNtvlYqtR0+EMeDw3xZqaG8/hoPBMCCCCAAAIIIOCcAAGec0vCDSGAAAIIIIBAXgS0xXRufiV3Id5Q6Tpz02RVKu9KeVkqngMBBBBAAAEEEHBagADP6eXh5hBAAAEEEEAgaoHTb5w1tcVls/XLXzTb7tgS9ek/c75mq20u1eqmlZNSPMK72F8ZLoAAAggggAACCHxGgACPlwIBBBBAAAEECiNw5Me/MAsS3k2Mj5pTEuR96+Ed5sDeXbE/f15CPMK72F8VLoAAAggggAACCHQVIMDjxUAAAQQQQACBQghc+OBDowHeiednvef96M9zZt+TR8yuB7cT4vl4AwjvfCDxEQQQQAABBBBAICYBAryYYDktAggggAACCLgl8M65d82pf/m1OfrDJ9ZvzIZ4h7//ncTaaT9ZqJuG7I2XpaMiU2Zvmhw2Jdn7jgMBBBBAAAEEEEAgeQECvOTNuSICCCCAAAIIJCSgAd1F+WfLbbd6V3x4/zPmuFTg3XLzzPod6J542k6rP57E0Za98C7JdNqshHgj1SEzPVYhvEvi5eAaCCCAAAIIIIDAJgIEeLwaCCCAAAIIIJBLAa24O/nLN72BFbMHv+lV2GlY99KJU58J8e5/5Cnz1ivPJeagIV5tuWmWV5uJXTPIhUaHy2Z6vBLkq3wHAQQQQAABBBBAIEIBArwIMTkVAggggAACCCCAAAIIIIAAAggggAACUQsQ4EUtyvkQQAABBBBAAAEEEEAAAQQQQAABBBCIUIAAL0JMToUAAggggAACCCCAAAIIIIAAAggggEDUAgR4UYtyPgQQQAABBBBAAAEEEEAAAQQQQAABBCIUIMCLEJNTIYAAAggggAACCCCAAAIIIIAAAgggELUAAV7UopwPAQQQQAABBBBAAAEEEEAAAQQQQACBCAUI8CLE5FQIIIAAAggggAACCCCAAAIIIIAAAghELUCAF7Uo50MAAQQQQAABBBBAAAEEEEAAAQQQQCBCAQK8CDE5FQIIIIAAAggggAACCCCAAAIIIIAAAlELEOBFLcr5EEAAAQQQQAABBBBAAAEEEEAAAQQQiFDg/wNLBAPbsKL33wAAAABJRU5ErkJggg==", + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Let's try 3D!\n", + "\n", + "tsne = TSNE(n_components=3, random_state=42)\n", + "reduced_vectors = tsne.fit_transform(vectors)\n", + "\n", + "# Create the 3D scatter plot\n", + "fig = go.Figure(data=[go.Scatter3d(\n", + " x=reduced_vectors[:, 0],\n", + " y=reduced_vectors[:, 1],\n", + " z=reduced_vectors[:, 2],\n", + " mode='markers',\n", + " marker=dict(size=5, color=colors, opacity=0.8),\n", + " text=[f\"Video: {t}
Text: {d[:100]}...\" for t, d in zip(video_numbers, documents)],\n", + " hoverinfo='text'\n", + ")])\n", + "\n", + "fig.update_layout(\n", + " title='3D Chroma Vector Store Visualization',\n", + " scene=dict(xaxis_title='x', yaxis_title='y', zaxis_title='z'),\n", + " width=900,\n", + " height=700,\n", + " margin=dict(r=20, b=10, l=10, t=40)\n", + ")\n", + "\n", + "fig.show()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9b3ada26-b4b7-42fc-b943-933c14adf89b", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/week5/community-contributions/subtitles/srts/59166281/en_US.srt b/week5/community-contributions/subtitles/srts/59166281/en_US.srt new file mode 100755 index 0000000..0ab072d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166281/en_US.srt @@ -0,0 +1,55 @@ +WEBVTT + +00:00.800 --> 00:08.990 +And with that, amazingly, you completed day one of week two already and that gets you to the 15% point + +00:08.990 --> 00:14.300 +towards your goal of being an LM Engineering Master. + +00:14.540 --> 00:17.810 +So congratulations on getting to the 15% point. + +00:17.840 --> 00:20.630 +Let's talk about what you're already able to do. + +00:20.810 --> 00:23.750 +As you know, you can describe Transformers. + +00:23.750 --> 00:30.050 +And that includes all of the context window tokens, API costs and the like. + +00:30.080 --> 00:32.330 +You can talk about the six leading frontier LMS. + +00:32.330 --> 00:37.430 +You can now confidently use OpenAI's API with streaming, with markdown, with JSON. + +00:37.430 --> 00:45.320 +And now in addition, you can use the Anthropic and Google APIs, and you've hopefully got even deeper + +00:45.320 --> 00:52.160 +insights into that structure of messages, that list of dicts that is going to feature from time to + +00:52.190 --> 00:55.490 +time, as you will see tomorrow. + +00:55.520 --> 00:58.730 +Tomorrow is a day that I've been looking forward to for a long time. + +00:58.730 --> 01:05.570 +I am going to be gushing gushing about Gradio, which I think is an absolutely fabulous platform and + +01:05.570 --> 01:07.610 +you are going to see why yourself. + +01:07.610 --> 01:11.390 +You're going to see why I love it so much, and I'm hoping that you're going to love it too. + +01:11.420 --> 01:16.970 +We're going to create a simple UI with Gradio, and we're going to hook it up to Frontier Models, and + +01:16.970 --> 01:19.100 +it's going to be really, really easy. + +01:19.100 --> 01:20.360 +And I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59166281/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166281/ja_JP.srt new file mode 100755 index 0000000..e74899e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166281/ja_JP.srt @@ -0,0 +1,43 @@ +WEBVTT + +00:00.800 --> 00:14.300 +そして、 驚くべきことに、 あなたはすでに2週目の初日を終え、 これでLMエンジニアリング・マスターという目標に向けて15%のポイントを獲得したことになる。 + +00:14.540 --> 00:17.810 +というわけで、 15%まで到達したことを祝福したい。 + +00:17.840 --> 00:20.630 +あなたがすでにできていることについて話しましょう。 + +00:20.810 --> 00:23.750 +ご存知のように、 トランスフォーマーを表現することは可能だ。 + +00:23.750 --> 00:30.050 +その中には、 コンテクスト・ウィンドウのトークンやAPIのコストなども含まれる。 + +00:30.080 --> 00:32.330 +6つの主要なフロンティアLMSについて話すことができます。 + +00:32.330 --> 00:37.430 +OpenAIのAPIをストリーミング、 マークダウン、 JSONで自信を持って使えるようになりました。 + +00:37.430 --> 00:45.320 +さらに、 AnthropicとGoogleのAPIを使うことができるようになり、 + +00:45.320 --> 00:55.490 +メッセージの構造やディクツのリストについてさらに深い洞察を得ることができるようになった。 + +00:55.520 --> 00:58.730 +明日はずっと楽しみにしていた日だ。 + +00:58.730 --> 01:07.610 +Gradioは本当に素晴らしいプラットフォームだと思う。 + +01:07.610 --> 01:11.390 +私がなぜこのクラブをこんなに気に入っているのか、 その理由がわかるはずだ。 + +01:11.420 --> 01:19.100 +GradioでシンプルなUIを作り、 Frontier Modelsに接続する。 + +01:19.100 --> 01:20.360 +その時にまた会おう。 diff --git a/week5/community-contributions/subtitles/srts/59166281/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166281/ko_KR.srt new file mode 100755 index 0000000..653abc9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166281/ko_KR.srt @@ -0,0 +1,55 @@ +WEBVTT + +00:00.800 --> 00:08.990 +놀랍게도 둘째 주 첫째 날을 벌써 마쳤습니다 15%를 달성했습니다 LM + +00:08.990 --> 00:14.300 +엔지니어링 마스터가 되는 목표에 가까워졌죠 + +00:14.540 --> 00:17.810 +15%가 된 걸 축하해요 + +00:17.840 --> 00:20.630 +이미 할 수 있는 걸 얘기해 보죠 + +00:20.810 --> 00:23.750 +트랜스포머는 설명하기 쉽잖아요 + +00:23.750 --> 00:30.050 +컨텍스트 윈도우 토큰과 API 비용 등을 포함하죠 + +00:30.080 --> 00:32.330 +프런티어 LMS의 여섯 가지 대표적인 예가 있죠 + +00:32.330 --> 00:37.430 +이제 OpenAI의 API를 스트리밍, 마크다운, JSON으로 자신 있게 사용할 수 있어요 + +00:37.430 --> 00:45.320 +Anthropic과 구글 API를 이용하면 메시지 구조에 + +00:45.320 --> 00:52.160 +대해 더 깊이 이해할 수 있을 겁니다 내일도 이따금씩 + +00:52.190 --> 00:55.490 +나올 기능 목록도요 + +00:55.520 --> 00:58.730 +내일은 제가 오랫동안 기다려온 날이에요 + +00:58.730 --> 01:05.570 +그라디오에 대해 열변을 토할게요 정말 멋진 플랫폼이라고 생각하거든요 그 이유는 + +01:05.570 --> 01:07.610 +직접 보시게 될 거예요 + +01:07.610 --> 01:11.390 +제가 왜 좋아하는지 알게 되실 거예요 여러분도 좋아하시면 좋겠네요 + +01:11.420 --> 01:16.970 +Gadio로 간단한 UI를 만들고 프론티어 모델에 연결할 + +01:16.970 --> 01:19.100 +거예요 정말 정말 쉽죠 + +01:19.100 --> 01:20.360 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59166317/en_US.srt b/week5/community-contributions/subtitles/srts/59166317/en_US.srt new file mode 100755 index 0000000..b6f5460 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166317/en_US.srt @@ -0,0 +1,124 @@ +WEBVTT + +00:00.680 --> 00:08.540 +And welcome to week two, day two, as we continue our adventure into the realm of LMS. + +00:08.780 --> 00:14.630 +Uh, so today, a very special day that I'm really looking forward to. + +00:14.720 --> 00:15.830 +Uh, quick recap. + +00:15.860 --> 00:22.250 +Of course, you can now describe Transformers as well, and you can talk about six top frontier models. + +00:22.250 --> 00:27.920 +You can confidently use OpenAI's API along with Anthropic and Google's. + +00:27.920 --> 00:33.080 +But today, changing topic, we are going to be talking about Gradio. + +00:33.350 --> 00:37.040 +And I realized I have gone on about Gradio a bit, but you're going to see why. + +00:37.040 --> 00:40.490 +It's really terrific and we're going to have fun with it. + +00:40.640 --> 00:46.070 +We're going to create a simple UI using radio and then hook it up to Frontier Models. + +00:46.070 --> 00:49.130 +And as I say, it's going to be easy. + +00:49.580 --> 00:53.420 +Uh, so why make such a fuss about user interfaces? + +00:53.420 --> 01:00.890 +Because it allows us, as data scientists, as LM engineers, to do more quickly, to be able to build + +01:00.920 --> 01:08.510 +prototypes, expose them to our audience, to our business sponsors, the the, the people that need + +01:08.540 --> 01:12.140 +our LMS and do so very quickly indeed. + +01:12.140 --> 01:17.000 +If you are a front end person or you've dabbled in front end and you know what it's like to stand up + +01:17.030 --> 01:22.370 +a react app or something like that, you know that there's a lot of boilerplate code that goes into + +01:22.400 --> 01:26.750 +getting things up and running, and it turns out that we don't need to do that with models. + +01:26.750 --> 01:30.380 +We can build a user interface super quickly, and that's what we'll be doing today. + +01:30.380 --> 01:35.030 +So Gradio is in fact, uh, a part of Hugging Face. + +01:35.030 --> 01:38.510 +It was a startup that was acquired by Hugging Face a couple of years ago. + +01:38.510 --> 01:41.600 +So Gradio is part of the Hugging Face family. + +01:41.780 --> 01:48.260 +Uh, and as it says on the landing page there, it lets you build and share delightful machine learning + +01:48.290 --> 01:48.620 +apps. + +01:48.620 --> 01:51.620 +And I think you will be delighted by it. + +01:51.830 --> 01:54.530 +Uh, so I promised you it was easy. + +01:54.560 --> 01:57.080 +It really is easy, as you will see. + +01:57.110 --> 02:02.790 +What it comes down to is there is this magical line import Gradio as GR, which is the way people do + +02:02.790 --> 02:03.270 +it. + +02:03.570 --> 02:07.500 +You write a function, any function, a function to do a task. + +02:07.500 --> 02:13.380 +In this case, the function that they've written here is greet takes a name and it replies hello name. + +02:13.380 --> 02:19.890 +And then you can create a user interface based on that function, give it inputs and outputs, and you're + +02:19.890 --> 02:24.120 +going to get a user interface built for you just like that. + +02:24.120 --> 02:26.430 +And that is what we're going to do. + +02:26.970 --> 02:35.430 +So what we're going to do now is create a UI for API calls to GPT and Claude and Gemini, so that you + +02:35.430 --> 02:37.380 +can see how to expose this. + +02:37.410 --> 02:45.870 +We are then going to go ahead and create a UI for the brochure that we built in the last week's lectures. + +02:45.900 --> 02:52.050 +And so that's going to allow us to really package up our application into a nice business app with prototype + +02:52.050 --> 02:52.860 +screens. + +02:52.860 --> 02:57.510 +And of course, we'll throw into the mix streaming and markdown into a UI, since we're pretty good + +02:57.510 --> 03:00.630 +with that already, that's going to be the plan. + +03:00.630 --> 03:03.060 +Let's go over to the lab and get on with it. diff --git a/week5/community-contributions/subtitles/srts/59166317/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166317/ja_JP.srt new file mode 100755 index 0000000..90a2b20 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166317/ja_JP.srt @@ -0,0 +1,103 @@ +WEBVTT + +00:00.680 --> 00:08.540 +そして、 LMSの領域への冒険を続ける第2週、 2日目へようこそ。 + +00:08.780 --> 00:14.630 +ええと、 それで今日は、 とても楽しみにしている特別な日なんだ。 + +00:14.720 --> 00:15.830 +ええと、 簡単にまとめると + +00:15.860 --> 00:22.250 +もちろん、 トランスフォーマーについても説明できるようになったし、 6人のトップ・フロンティア・モデルについても語ることができる。 + +00:22.250 --> 00:27.920 +OpenAIのAPIは、 AnthropicやGoogleのAPIと一緒に自信を持って使うことができる。 + +00:27.920 --> 00:33.080 +しかし、 今日は話題を変えて、 グラディオについて話そう。 + +00:33.350 --> 00:37.040 +そして、 グラディオについて少し話したことに気づいた。 + +00:37.040 --> 00:40.490 +本当に素晴らしいし、 楽しみながらやっていくつもりだ。 + +00:40.640 --> 00:46.070 +radioを使ってシンプルなUIを作り、 それをFrontier Modelsに接続する。 + +00:46.070 --> 00:49.130 +言っておくが、 それは簡単なことだ。 + +00:49.580 --> 00:53.420 +では、 なぜユーザー・インターフェースについて大騒ぎするのですか? + +00:53.420 --> 01:00.890 +なぜなら、 データ・サイエンティストとして、 LMエンジニアとして、 より迅速に、 プロトタイプを構築し、 + +01:00.920 --> 01:12.140 +それをオーディエンスやビジネス・スポンサー、 LMSを必要としている人々に公開し、 実に迅速に実行することができるからだ。 + +01:12.140 --> 01:17.000 +フロントエンドの人なら、 あるいはフロントエンドに手を出したことがある人なら、 + +01:17.030 --> 01:26.750 +リアクトのアプリを立ち上げるのがどんな感じか知っているだろう。 + +01:26.750 --> 01:30.380 +私たちはユーザー・インターフェースを超高速で構築することができる。 + +01:30.380 --> 01:35.030 +つまり、 グラディオはハギング・フェイスの一員なのだ。 + +01:35.030 --> 01:38.510 +数年前にハギング・フェイスに買収された新興企業だ。 + +01:38.510 --> 01:41.600 +つまり、 グラディオはハギング・フェイス・ファミリーの一員なのだ。 + +01:41.780 --> 01:48.620 +ランディングページに書いてあるように、 楽しい機械学習アプリを作り、 共有することができる。 + +01:48.620 --> 01:51.620 +きっと喜んでもらえると思う。 + +01:51.830 --> 01:54.530 +ええと、 だから簡単だって約束したでしょ。 + +01:54.560 --> 01:57.080 +見ての通り、 本当に簡単だ。 + +01:57.110 --> 02:03.270 +結局のところ、 グラディオをGRとして輸入するという魔法のようなラインが存在する。 + +02:03.570 --> 02:07.500 +関数、 どんな関数でも、 タスクを実行するための関数を書く。 + +02:07.500 --> 02:13.380 +この場合、 greetという関数は名前を受け取り、 helloという名前を返信する。 + +02:13.380 --> 02:19.890 +そして、 その関数に基づいてユーザー・インターフェースを作成し、 + +02:19.890 --> 02:24.120 +入力と出力を与える。 + +02:24.120 --> 02:26.430 +そして、 それが私たちがやろうとしていることだ。 + +02:26.970 --> 02:37.380 +それでは、 GPTとClaudeとGeminiへのAPIコールのためのUIを作成します。 + +02:37.410 --> 02:45.870 +続いて、 先週の講義で作成したパンフレットのUIを作成します。 + +02:45.900 --> 02:52.860 +そうすることで、 プロトタイプのスクリーンを持つ素敵なビジネス・アプリにアプリケーションをパッケージ化することができる。 + +02:52.860 --> 03:00.630 +もちろん、 ストリーミングやマークダウンをUIに取り入れることも考えている。 + +03:00.630 --> 03:03.060 +さっそくラボに行こう。 diff --git a/week5/community-contributions/subtitles/srts/59166317/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166317/ko_KR.srt new file mode 100755 index 0000000..30d261c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166317/ko_KR.srt @@ -0,0 +1,121 @@ +WEBVTT + +00:00.680 --> 00:08.540 +둘째 주, 둘째 날입니다 LMS 왕국으로의 모험은 계속되죠 + +00:08.780 --> 00:14.630 +오늘은 제가 정말 기대하고 있는 아주 특별한 날이에요 + +00:14.720 --> 00:15.830 +요약해 보죠 + +00:15.860 --> 00:22.250 +이제는 트랜스포머도 묘사할 수 있어요 최고의 개척자 모델 6명도 말할 수 있죠 + +00:22.250 --> 00:27.920 +오픈AI의 API 또한 앤스로픽, 구글과 함께 사용할 수 있죠 + +00:27.920 --> 00:33.080 +오늘은 주제를 바꿔서 그라디오에 관해 얘기해 볼게요 + +00:33.350 --> 00:37.040 +비트에 대해 너무 많이 얘기했네요 이유를 알게 될 거예요 + +00:37.040 --> 00:40.490 +정말 멋진 작품이고 재미있게 만들 거예요 + +00:40.640 --> 00:46.070 +라디오를 이용해 간단한 UI를 만들고 프론티어 모델에 연결할 거예요 + +00:46.070 --> 00:49.130 +말씀드렸듯이 쉬울 거예요 + +00:49.580 --> 00:53.420 +그런데 왜 사용자 인터페이스를 두고 그렇게 소란을 피우는 거죠? + +00:53.420 --> 01:00.890 +데이터 과학자나 LMS 엔지니어로서 시제품을 더 빨리 만들 + +01:00.920 --> 01:08.510 +수 있고 고객과 비즈니스 스폰서 LMS가 필요한 사람들에게 빠르게 + +01:08.540 --> 01:12.140 +노출할 수 있으니까요 + +01:12.140 --> 01:17.000 +여러분이 프런트엔드를 사용하거나 약간 해본 적이 있다면 리액트 앱 같은 것을 + +01:17.030 --> 01:22.370 +세우는 것이 어떤 것인지 알 수 있습니다 무언가를 올리고 실행하는 데에는 많은 상용 + +01:22.400 --> 01:26.750 +코드가 있습니다 모델과 함께 할 필요가 없다는 것이 밝혀졌죠 + +01:26.750 --> 01:30.380 +사용자 인터페이스를 아주 빨리 만들 수 있어요 그게 오늘 할 일이죠 + +01:30.380 --> 01:35.030 +그러니까 그라디오는 페이스 포옹의 일부예요 + +01:35.030 --> 01:38.510 +몇 년 전 페이스 포옹으로 인수한 스타트업 회사예요 + +01:38.510 --> 01:41.600 +그래디오는 포옹하는 얼굴 가족이에요 + +01:41.780 --> 01:48.620 +랜딩 페이지에 적혀 있듯이 즐거운 머신 러닝 앱을 만들고 공유할 수 있어요 + +01:48.620 --> 01:51.620 +당신도 좋아할 거예요 + +01:51.830 --> 01:54.530 +쉬울 거라고 약속했죠 + +01:54.560 --> 01:57.080 +보시면 알겠지만 정말 쉬워요 + +01:57.110 --> 02:02.790 +결국 그라디오를 GR로 불러오는 마술적인 경계가 있어요 사람들이 그렇게 + +02:02.790 --> 02:03.270 +하죠 + +02:03.570 --> 02:07.500 +어떤 함수든 작업을 위한 함수를 작성하세요 + +02:07.500 --> 02:13.380 +이 경우 Greet라는 함수가 있는데 이름을 취하면 hello name을 응답하죠 + +02:13.380 --> 02:19.890 +함수를 기반으로 사용자 인터페이스를 생성하고 입력과 출력을 제공하면 + +02:19.890 --> 02:24.120 +사용자 인터페이스가 만들어지는 거예요 + +02:24.120 --> 02:26.430 +그게 우리가 할 일이죠 + +02:26.970 --> 02:35.430 +이제 GPT와 클로드와 제미니에 API 호출을 위한 UI를 생성하겠습니다 이걸 어떻게 노출하는지 + +02:35.430 --> 02:37.380 +보실 수 있게요 + +02:37.410 --> 02:45.870 +그런 다음 지난 강의에서 만든 브로슈어의 UI를 생성할 거예요 + +02:45.900 --> 02:52.050 +그래서 응용 프로그램을 프로토타입 스크린이 있는 멋진 비즈니스 앱으로 패키지할 수 있게 + +02:52.050 --> 02:52.860 +해주죠 + +02:52.860 --> 02:57.510 +물론 믹스 스트리밍과 마크다운을 UI에 넣을 거예요 이미 꽤 잘 + +02:57.510 --> 03:00.630 +하고 있으니까요 그게 계획이 될 거예요 + +03:00.630 --> 03:03.060 +Get it, get it, get it, it! 실험실로 가서 검사해 보죠 diff --git a/week5/community-contributions/subtitles/srts/59166353/en_US.srt b/week5/community-contributions/subtitles/srts/59166353/en_US.srt new file mode 100755 index 0000000..73534b6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166353/en_US.srt @@ -0,0 +1,61 @@ +WEBVTT + +00:00.590 --> 00:04.460 +Well, congratulations on leveling up yet again. + +00:04.520 --> 00:08.690 +You've got some real hard skills that you've added to your arsenal. + +00:08.750 --> 00:14.570 +Uh, it's, uh, been a really, really enjoyable last few lectures. + +00:14.570 --> 00:20.990 +So at this point, not only can you confidently use OpenAI's API, not only can you throw Anthropic + +00:20.990 --> 00:25.970 +and Gemini into the mix, but you can also build UIs for your solution. + +00:25.970 --> 00:32.660 +Doing reasonably sophisticated things like pick between models and make changes to prompts, and generate + +00:32.660 --> 00:35.750 +company brochures with markdown and streaming. + +00:35.810 --> 00:41.630 +Uh, it's all pretty, uh, high functionality stuff, so well done. + +00:41.750 --> 00:45.650 +Tomorrow, though, uh, we raised the bar again. + +00:45.650 --> 00:53.090 +We'll be able to build chat UIs in Gradio, so these are more complex UIs with much more going on with + +00:53.120 --> 00:55.880 +like, an instant message style interaction. + +00:55.880 --> 00:57.740 +It sounds complex. + +00:57.770 --> 00:58.850 +We'll see if it is. + +00:59.060 --> 01:05.900 +Uh, we're going to talk about providing more context in a prompt, including Multi-shot prompting something + +01:05.900 --> 01:10.160 +which hopefully you already experimented with a bit in some of the earlier exercises, and we'll keep + +01:10.160 --> 01:11.420 +going with that. + +01:11.480 --> 01:16.010 +And ultimately we're going to build a customer support assistant. + +01:16.040 --> 01:19.460 +We're going to build a real tool and see how that works. + +01:19.460 --> 01:22.490 +And that's all happening starting in the next lecture. + +01:22.490 --> 01:23.510 +And I'll see you there. diff --git a/week5/community-contributions/subtitles/srts/59166353/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166353/ja_JP.srt new file mode 100755 index 0000000..ead2e7f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166353/ja_JP.srt @@ -0,0 +1,52 @@ +WEBVTT + +00:00.590 --> 00:04.460 +まあ、 またしてもレベルアップおめでとう。 + +00:04.520 --> 00:08.690 +あなたは本当にハードな技術を自分の武器に加えた。 + +00:08.750 --> 00:14.570 +この数回の講義は、 本当に本当に楽しかった。 + +00:14.570 --> 00:20.990 +つまりこの時点で、 OpenAIのAPIを自信を持って使えるだけでなく、 AnthropicやGeminiをミックスに放り込めるだけでなく、 + +00:20.990 --> 00:25.970 +ソリューションのUIを構築することもできる。 + +00:25.970 --> 00:32.660 +モデルの選択やプロンプトの変更、 マークダウンやストリーミングを使った会社案内の作成など、 + +00:32.660 --> 00:35.750 +それなりに洗練されたことができる。 + +00:35.810 --> 00:41.630 +機能的なものばかりで、 よくできているよ。 + +00:41.750 --> 00:45.650 +明日はまたハードルを上げたけどね。 + +00:45.650 --> 00:55.880 +GradioでチャットUIを構築できるようになるので、 インスタント・メッセージのようなインタラクションで、 より複雑なUIを構築できるようになります。 + +00:55.880 --> 00:57.740 +複雑そうだ。 + +00:57.770 --> 00:58.850 +そうなるかどうかはこれからだ。 + +00:59.060 --> 01:11.420 +マルチショット・プロンプトを含め、 プロンプトでより多くのコンテクストを提供することについてお話しします。 + +01:11.480 --> 01:16.010 +そして最終的には、 カスタマー・サポートのアシスタントを作るつもりだ。 + +01:16.040 --> 01:19.460 +実際のツールを作って、 それがどう機能するか見てみるつもりだ。 + +01:19.460 --> 01:22.490 +そして、 それはすべて次の講義から始まる。 + +01:22.490 --> 01:23.510 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59166353/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166353/ko_KR.srt new file mode 100755 index 0000000..68264fb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166353/ko_KR.srt @@ -0,0 +1,61 @@ +WEBVTT + +00:00.590 --> 00:04.460 +또 한 번 레벨 업을 축하해요 + +00:04.520 --> 00:08.690 +어려운 기술을 무기로 활용하고 있어요 + +00:08.750 --> 00:14.570 +지난 강의 몇 개는 정말 즐거웠어요 + +00:14.570 --> 00:20.990 +오픈AI API를 자신 있게 사용할 수 있을 뿐 아니라 앤스로픽과 제미니를 + +00:20.990 --> 00:25.970 +결합할 수 있을 뿐만 아니라 솔루션 UI도 구축할 수 있죠 + +00:25.970 --> 00:32.660 +모델 사이에서 고르기와 프롬프트 변경하기 같은 꽤 복잡한 작업을 하고 마크다운과 스트리밍을 + +00:32.660 --> 00:35.750 +이용한 회사 브로슈어를 생성하죠 + +00:35.810 --> 00:41.630 +아주 높은 기능성이에요 잘 만들었어요 + +00:41.750 --> 00:45.650 +내일은 기대치를 한 단계 높였어요 + +00:45.650 --> 00:53.090 +Gadio에서 채팅 UI도 만들 수 있어요 좀 더 복잡한 UI로 훨씬 더 많은 일이 진행되죠 인스턴트 + +00:53.120 --> 00:55.880 +메시지 스타일 상호 작용 같은 거요 + +00:55.880 --> 00:57.740 +복잡하게 들리네요 + +00:57.770 --> 00:58.850 +과연 그럴까요? + +00:59.060 --> 01:05.900 +프롬프트에서 더 많은 컨텍스트 제공에 대해 말씀드리겠습니다 멀티샷 프롬프트를 포함해서요 앞서 + +01:05.900 --> 01:10.160 +몇몇 연습에서 이미 비트로 실험해 보셨길 바랍니다 계속 그렇게 + +01:10.160 --> 01:11.420 +할 거예요 + +01:11.480 --> 01:16.010 +궁극적으로는 고객 지원 비서를 만들 거예요 + +01:16.040 --> 01:19.460 +실제 도구를 만들어 어떻게 작동하는지 보죠 + +01:19.460 --> 01:22.490 +다음 강의에서 그 모든 일이 일어나죠 + +01:22.490 --> 01:23.510 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59166421/en_US.srt b/week5/community-contributions/subtitles/srts/59166421/en_US.srt new file mode 100755 index 0000000..7bffc34 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166421/en_US.srt @@ -0,0 +1,319 @@ +WEBVTT + +00:00.830 --> 00:04.250 +Welcome back to the radio day in the lab. + +00:04.250 --> 00:05.180 +More to do. + +00:05.210 --> 00:06.620 +Let's keep going. + +00:06.620 --> 00:14.150 +Where we left off is we had just built a simple user interface that was calling an LLM and telling a + +00:14.150 --> 00:16.610 +very, uh, good joke. + +00:16.850 --> 00:20.060 +Uh, let's keep going with this. + +00:20.060 --> 00:28.250 +What we're going to do next is ask for the, uh, assistant to respond in markdown, uh, as a way of, + +00:28.250 --> 00:32.540 +uh, um, getting better looking user interfaces. + +00:32.960 --> 00:40.190 +Um, and wouldn't it be nice if we wanted to show results in gradio with good formatting written in + +00:40.190 --> 00:40.940 +markdown? + +00:40.940 --> 00:47.690 +If we could just have that instead of text box, just replace it with the word markdown, and then the + +00:47.690 --> 00:51.560 +output would be in perfectly formatted markdown. + +00:51.560 --> 00:53.180 +That would be great wouldn't it? + +00:53.210 --> 00:54.380 +Wouldn't it be nice? + +00:55.190 --> 00:56.900 +You're probably getting the idea here. + +00:57.770 --> 00:59.030 +Things just really are. + +00:59.030 --> 00:59.840 +This good. + +01:00.020 --> 01:02.510 +Uh, So let's say your message. + +01:02.510 --> 01:03.740 +Let's say, um. + +01:03.890 --> 01:14.240 +Um, how do I get from Times Square to Times Square like that to Grand Central? + +01:14.960 --> 01:17.120 +I've got a question for New York navigation. + +01:17.120 --> 01:18.710 +Let's see how it does. + +01:19.640 --> 01:21.440 +It's thinking about that. + +01:22.400 --> 01:23.360 +And there we go. + +01:23.360 --> 01:24.440 +Here's a response. + +01:24.440 --> 01:28.370 +And you can see to get from Times Square to Grand Central Terminal in New York City, it figured out + +01:28.370 --> 01:29.660 +that's what I was talking about. + +01:29.870 --> 01:34.040 +Follow these steps and you can see it's good headings. + +01:34.310 --> 01:41.810 +And it's got a nice sub bullets and numbers and all the rest of it, as described in the markdown that + +01:41.810 --> 01:43.820 +came back from GPT four. + +01:43.850 --> 01:47.000 +Oh very easy, very nice. + +01:47.390 --> 01:50.360 +Let's have a look at what else we can do. + +01:51.500 --> 01:53.420 +Uh, streaming. + +01:53.420 --> 01:55.700 +Streaming is something we got used to last time. + +01:55.700 --> 02:01.840 +So can we stream results back to Gradio user interfaces, just as we did when it was coming back into + +02:01.840 --> 02:03.820 +a Jupyter output cell. + +02:03.820 --> 02:04.960 +So here we go. + +02:04.990 --> 02:07.300 +We change our function. + +02:07.330 --> 02:10.840 +It used to be the message GPT. + +02:10.870 --> 02:12.400 +Now we're making it stream GPT. + +02:12.430 --> 02:13.420 +Different function. + +02:13.420 --> 02:15.610 +And the key thing is that this isn't actually a function. + +02:15.610 --> 02:20.320 +It's a generator in that it's going to end by yielding a result. + +02:20.320 --> 02:26.170 +And Gradio is going to detect that we're giving it a generator, not a function. + +02:26.170 --> 02:32.830 +And because of that, Gradio is automatically going to be iterative and decide to fill in, um, piece + +02:32.830 --> 02:35.980 +by piece as it comes back from this generator. + +02:36.100 --> 02:38.020 +So usual story. + +02:38.020 --> 02:45.100 +I create the messages and then you'll remember this time it's the same API call, but we pass in stream + +02:45.100 --> 02:46.120 +equals true. + +02:46.150 --> 02:47.770 +You remember how Claude does it? + +02:47.860 --> 02:48.790 +Hopefully you do. + +02:48.820 --> 02:54.220 +With Claude, you don't have an attribute, but instead you call dot stream instead of dot create. + +02:54.220 --> 02:56.290 +But otherwise it's very similar. + +02:56.350 --> 02:58.810 +So one thing that's worth noting here. + +02:58.840 --> 03:01.110 +Just a tiny subtlety with Gradio. + +03:01.140 --> 03:08.820 +When you are streaming back the results to Gradio, you don't stream back chunk by chunk of the results. + +03:08.820 --> 03:15.870 +You have to stream back the entire cumulative result so far and stream back a longer and longer cumulative + +03:15.870 --> 03:16.410 +result. + +03:16.410 --> 03:20.520 +So you can see what I'm doing is I'm I'm sort of starting with an empty string. + +03:20.520 --> 03:28.110 +And then for each chunk I'm adding that in and then yielding the total cumulative result so far. + +03:28.260 --> 03:34.830 +Um, and if you don't do that, what you'll see is each individual chunk will appear in the output cell + +03:34.830 --> 03:37.140 +and then disappear and will be replaced by something else. + +03:37.140 --> 03:38.910 +So you have to do it this way. + +03:38.910 --> 03:42.900 +If you don't see what I mean, try doing yield chunk instead of yield result. + +03:43.050 --> 03:43.290 +Sorry. + +03:43.320 --> 03:46.200 +Yield chunk .0. delta dot content. + +03:46.320 --> 03:46.590 +Uh. + +03:46.590 --> 03:49.350 +And you'll see exactly what I mean. + +03:49.350 --> 03:50.910 +It's not not going to look good. + +03:51.150 --> 03:59.640 +Uh, anyway, that is our stream GPT uh, and wouldn't it be nice if all we needed to do was replace + +03:59.760 --> 04:06.390 +the the function that it used to be message GPT with stream, GPT and Gradio just figured out the rest. + +04:06.390 --> 04:09.300 +It figured out that okay, this is a generator, not a function. + +04:09.300 --> 04:11.610 +Therefore, they're going to want to stream back results. + +04:11.610 --> 04:15.360 +Therefore, I need to have a sort of typewriter animation style effect. + +04:15.390 --> 04:18.660 +Let's see if it really can be that simple. + +04:18.660 --> 04:20.190 +Can it be that simple? + +04:20.520 --> 04:21.660 +Here we go. + +04:21.690 --> 04:29.940 +Uh, how do I get from Times Square to Grand Central? + +04:32.310 --> 04:33.360 +And there we go. + +04:33.510 --> 04:34.470 +Of course, it's that simple. + +04:34.500 --> 04:35.460 +Of course it is. + +04:35.490 --> 04:36.930 +Streams the results. + +04:36.930 --> 04:37.920 +They look great. + +04:37.920 --> 04:40.380 +Markdown looks fantastic. + +04:40.890 --> 04:46.710 +Uh, so, uh, of course it wouldn't be doing my job if I didn't show you how easy it is with Claude + +04:46.710 --> 04:47.280 +as well. + +04:47.280 --> 04:51.540 +I mentioned it before, and now you can see there is Claude's API call. + +04:51.540 --> 04:52.800 +It's very similar. + +04:52.800 --> 04:55.650 +You call dot stream, you don't pass in the parameter. + +04:55.650 --> 05:00.470 +You remember that you do have to specify max tokens and the system message goes in separately. + +05:00.500 --> 05:02.240 +Otherwise very similar. + +05:02.270 --> 05:05.510 +The streaming back is just for results as stream. + +05:05.690 --> 05:08.030 +So with result as stream a context manager. + +05:08.030 --> 05:13.070 +And then we yield the full response just as before. + +05:13.550 --> 05:16.670 +And at this point it's going to be boringly simple. + +05:16.670 --> 05:17.780 +You get the joke. + +05:17.810 --> 05:21.050 +You simply pass in that function instead. + +05:21.050 --> 05:24.110 +And now we are talking to Claude instead. + +05:24.110 --> 05:25.640 +We'll ask it the same question though. + +05:25.670 --> 05:31.910 +How do I get from Times Square to Grand Central? + +05:33.290 --> 05:40.250 +And here comes Claude's response, a bit shorter, just two options, but nicely structured. + +05:40.250 --> 05:41.720 +Nicely formatted. + +05:41.720 --> 05:43.820 +Very good indeed. + +05:44.120 --> 05:53.750 +Um, so we can take this one step forwards by having the ability to choose either GPT or Claude. + +05:53.750 --> 05:56.840 +But I'm going to get to that in the very next session. + +05:56.840 --> 05:57.980 +So hang on in there. diff --git a/week5/community-contributions/subtitles/srts/59166421/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166421/ja_JP.srt new file mode 100755 index 0000000..8a58e54 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166421/ja_JP.srt @@ -0,0 +1,283 @@ +WEBVTT + +00:00.830 --> 00:04.250 +ラボのラジオ・デイへようこそ。 + +00:04.250 --> 00:05.180 +もっとやることがある。 + +00:05.210 --> 00:06.620 +続けよう。 + +00:06.620 --> 00:16.610 +LLMを呼び出し、 とてもいいジョークを言うシンプルなユーザー・インターフェースを作ったところだった。 + +00:16.850 --> 00:20.060 +ええと、 このまま続けましょう。 + +00:20.060 --> 00:32.540 +次にやることは、 より見栄えのするユーザー・インターフェースを得る方法として、 マークダウンで応答するようアシスタントに求めることだ。 + +00:32.960 --> 00:40.940 +マークダウンで書かれた優れた書式で、 グラディオの結果を表示できたらいいと思わない? + +00:40.940 --> 00:51.560 +テキストボックスの代わりにマークダウンという言葉に置き換えるだけで、 完璧にフォーマットされたマークダウンが出力される。 + +00:51.560 --> 00:53.180 +それは素晴らしいことだろう? + +00:53.210 --> 00:54.380 +いいことだと思わない? + +00:55.190 --> 00:56.900 +このあたりはお分かりだろう。 + +00:57.770 --> 00:59.030 +物事は本当にそうなんだ。 + +00:59.030 --> 00:59.840 +これはいい。 + +01:00.020 --> 01:02.510 +では、 あなたのメッセージを言いましょう。 + +01:02.510 --> 01:03.740 +そうだな。 + +01:03.890 --> 01:14.240 +あの、 タイムズ・スクエアからグランド・セントラルまでどうやって行けばいいんですか? + +01:14.960 --> 01:17.120 +ニューヨーク・ナビゲーションに質問がある。 + +01:17.120 --> 01:18.710 +どうなるか見てみよう。 + +01:19.640 --> 01:21.440 +それを考えているんだ。 + +01:22.400 --> 01:23.360 +さあ、 行こう。 + +01:23.360 --> 01:24.440 +これが返答だ。 + +01:24.440 --> 01:29.660 +ニューヨークのタイムズ・スクエアからグランド・セントラル・ターミナルに行くには、 私が話していたことを理解する必要がある。 + +01:29.870 --> 01:34.040 +これらの手順を踏めば、 良い見出しであることがわかるだろう。 + +01:34.310 --> 01:43.820 +そして、 GPT4から戻ってきたマークダウンに記されているように、 素敵なサブの弾丸や数字、 その他もろもろを備えている。 + +01:43.850 --> 01:47.000 +とても簡単だ。 + +01:47.390 --> 01:50.360 +他に何ができるか見てみよう。 + +01:51.500 --> 01:53.420 +ええと、 ストリーミング。 + +01:53.420 --> 01:55.700 +ストリーミングは前回で慣れた。 + +01:55.700 --> 02:03.820 +そこで、 Jupyterの出力セルに戻ってきたときと同じように、 結果をGradioのユーザー・インターフェースにストリームバックすることができる。 + +02:03.820 --> 02:04.960 +それでは、 どうぞ。 + +02:04.990 --> 02:07.300 +私たちは機能を変える。 + +02:07.330 --> 02:10.840 +以前はGPTというメッセージだった。 + +02:10.870 --> 02:12.400 +今はGPTストリームにしている。 + +02:12.430 --> 02:13.420 +機能が違う。 + +02:13.420 --> 02:15.610 +そして重要なのは、 これは実際には機能ではないということだ。 + +02:15.610 --> 02:20.320 +結果を出して終わるという点ではジェネレーターだ。 + +02:20.320 --> 02:26.170 +そしてグラディオは、 私たちが関数ではなくジェネレーターを与えていることを検知する。 + +02:26.170 --> 02:35.980 +そのため、 グラディオは自動的に反復性を持ち、 このジェネレーターから戻ってくるデータを少しずつ埋めていくことになる。 + +02:36.100 --> 02:38.020 +だからいつもの話だ。 + +02:38.020 --> 02:46.120 +メッセージを作成し、 同じAPIコールであることを思い出してほしい。 + +02:46.150 --> 02:47.770 +クロードのやり方を覚えているか? + +02:47.860 --> 02:48.790 +そうなるといいね。 + +02:48.820 --> 02:54.220 +クロードの場合は、 アトリビュートを持たず、 ドット・クリエイトの代わりにドット・ストリームを呼び出す。 + +02:54.220 --> 02:56.290 +でも、 それ以外はよく似ている。 + +02:56.350 --> 02:58.810 +ここでひとつ注目すべきことがある。 + +02:58.840 --> 03:01.110 +ただ、 グラディオの場合はほんの少し微妙だ。 + +03:01.140 --> 03:08.820 +結果をGradioにストリームバックする場合、 結果のチャンクごとにストリームバックすることはない。 + +03:08.820 --> 03:16.410 +これまでの累積結果をすべてストリームバックし、 さらに長い累積結果をストリームバックしなければならない。 + +03:16.410 --> 03:20.520 +つまり、 私がやっているのは、 空の文字列から始めるということだ。 + +03:20.520 --> 03:28.110 +そして、 各チャンクごとにそれを足し算して、 これまでの累積結果を出す。 + +03:28.260 --> 03:37.140 +そうしないと、 個々の塊が出力セルに現れては消え、 別のものに置き換わってしまう。 + +03:37.140 --> 03:38.910 +だから、 こうしなければならない。 + +03:38.910 --> 03:42.900 +意味がわからない場合は、 yield resultの代わりにyield chunkを試してみてほしい。 + +03:43.050 --> 03:43.290 +申し訳ない。 + +03:43.320 --> 03:46.200 +収量塊. 0. デルタ・ドット・コンテンツ + +03:46.320 --> 03:46.590 +ええと。 + +03:46.590 --> 03:49.350 +そうすれば、 私が言っている意味がよくわかるだろう。 + +03:49.350 --> 03:50.910 +見栄えは良くないだろう。 + +03:51.150 --> 03:59.640 +とにかく、 これが我々のストリームGPTだ。 メッセージGPTであった関数をストリームGPTに置き換えるだけで、 + +03:59.760 --> 04:06.390 +あとはGradioがやってくれるとしたら、 それは素晴らしいことではないだろうか。 + +04:06.390 --> 04:09.300 +これは関数ではなくジェネレーターなんだ。 + +04:09.300 --> 04:11.610 +そのため、 彼らは結果をストリーミングで返したいと思っているはずだ。 + +04:11.610 --> 04:15.360 +だから、 タイプライター・アニメーション風のエフェクトが必要なんだ。 + +04:15.390 --> 04:18.660 +本当にそんな単純なことができるのか、 見てみよう。 + +04:18.660 --> 04:20.190 +そんな単純なことでいいのだろうか? + +04:20.520 --> 04:21.660 +さあ、 始めよう。 + +04:21.690 --> 04:29.940 +タイムズ・スクエアからグランド・セントラルへはどうやって行くの? + +04:32.310 --> 04:33.360 +さあ、 行こう。 + +04:33.510 --> 04:34.470 +もちろん、 それは簡単なことだ。 + +04:34.500 --> 04:35.460 +もちろんそうだ。 + +04:35.490 --> 04:36.930 +結果をストリームする。 + +04:36.930 --> 04:37.920 +見た目も素晴らしい。 + +04:37.920 --> 04:40.380 +マークダウンは素晴らしい。 + +04:40.890 --> 04:47.280 +ええと、 だから、 もちろん、 クロードと一緒ならどんなに簡単かお見せしなければ、 私の仕事ではありません。 + +04:47.280 --> 04:51.540 +前にも書いたが、 クロードのAPIコールがあるのがわかるだろう。 + +04:51.540 --> 04:52.800 +とてもよく似ている。 + +04:52.800 --> 04:55.650 +ドットストリームを呼び出し、 パラメータは渡さない。 + +04:55.650 --> 05:00.470 +トークンの最大数を指定する必要があり、 システム・メッセージは別に入力されることを覚えておいてほしい。 + +05:00.500 --> 05:02.240 +それ以外は非常によく似ている。 + +05:02.270 --> 05:05.510 +ストリーミングバックは、 ストリームとしての結果のためだけのものだ。 + +05:05.690 --> 05:08.030 +つまり、 結果をストリームとしてコンテキスト・マネージャーとする。 + +05:08.030 --> 05:13.070 +そして、 以前と同じように全回答を返す。 + +05:13.550 --> 05:16.670 +そしてこの時点では、 退屈なほどシンプルなものになるだろう。 + +05:16.670 --> 05:17.780 +冗談はわかるだろう。 + +05:17.810 --> 05:21.050 +代わりにその関数を渡すだけだ。 + +05:21.050 --> 05:24.110 +そして今、 私たちは代わりにクロードと話している。 + +05:24.110 --> 05:25.640 +同じ質問をしよう。 + +05:25.670 --> 05:31.910 +タイムズ・スクエアからグランド・セントラルへの行き方は? + +05:33.290 --> 05:40.250 +そして、 クロードの回答が来た。 少し短く、 2つの選択肢だけだが、 うまく構成されている。 + +05:40.250 --> 05:41.720 +うまくフォーマットされている。 + +05:41.720 --> 05:43.820 +実に素晴らしい。 + +05:44.120 --> 05:53.750 +GPTとクロードのどちらかを選択できるようにすることで、 一歩前進させることができます。 + +05:53.750 --> 05:56.840 +でも、 それは次のセッションで話そうと思っている。 + +05:56.840 --> 05:57.980 +だから、 そこで頑張るんだ。 diff --git a/week5/community-contributions/subtitles/srts/59166421/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166421/ko_KR.srt new file mode 100755 index 0000000..4e09ea2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166421/ko_KR.srt @@ -0,0 +1,313 @@ +WEBVTT + +00:00.830 --> 00:04.250 +실험실의 라디오 방송에 잘 오셨어요 + +00:04.250 --> 00:05.180 +할 일이 더 있어요 + +00:05.210 --> 00:06.620 +계속하죠 + +00:06.620 --> 00:14.150 +마지막으로 본 것은 간단한 사용자 인터페이스를 구축한 것이었죠 LLM을 호출하고 아주 + +00:14.150 --> 00:16.610 +재미있는 농담을 했어요 + +00:16.850 --> 00:20.060 +계속 진행하죠 + +00:20.060 --> 00:28.250 +다음으로 할 일은 보조에게 가격 인하를 요청하는 겁니다 보다 나은 사용자 + +00:28.250 --> 00:32.540 +인터페이스를 얻는 방법으로요 + +00:32.960 --> 00:40.940 +그러디오에서 좋은 서식을 마크다운으로 써서 결과를 보여주면 좋지 않을까요? + +00:40.940 --> 00:47.690 +텍스트 상자 대신에 마크다운으로 대체할 수 있다면 완벽한 형식의 + +00:47.690 --> 00:51.560 +마크다운으로 결과물이 나올 거예요 + +00:51.560 --> 00:53.180 +그러면 정말 좋겠죠? + +00:53.210 --> 00:54.380 +좋지 않을까요? + +00:55.190 --> 00:56.900 +무슨 말인지 아시겠죠? + +00:57.770 --> 00:59.030 +정말 그래요 + +00:59.030 --> 00:59.840 +이 정도면 돼요 + +01:00.020 --> 01:02.510 +메시지를 생각해 보죠 + +01:02.510 --> 01:03.740 +예를 들어 볼게요 + +01:03.890 --> 01:14.240 +어떻게 타임스스퀘어에서 그랜드 센트럴까지 가죠? Get you get the time + +01:14.960 --> 01:17.120 +뉴욕 항법팀에 질문이 있어요 + +01:17.120 --> 01:18.710 +어떻게 되나 보죠 + +01:19.640 --> 01:21.440 +그 생각을 하는 거예요 + +01:22.400 --> 01:23.360 +다 됐어요 + +01:23.360 --> 01:24.440 +이렇게 대답하죠 + +01:24.440 --> 01:28.370 +타임스퀘어에서 뉴욕 그랜드 센트럴 터미널까지 가는 길은 모두 제 말을 이해했어요 + +01:28.370 --> 01:29.660 +Get it + +01:29.870 --> 01:34.040 +이 단계를 따라가면 멋진 헤딩이 나와요 + +01:34.310 --> 01:41.810 +괜찮은 sub 탄환과 번호도 있고 GPT 4에서 받은 표시된 모든 + +01:41.810 --> 01:43.820 +것이 있어요. + +01:43.850 --> 01:47.000 +아주 쉽고 좋네요 + +01:47.390 --> 01:50.360 +다른 방법도 한번 살펴보죠 + +01:51.500 --> 01:53.420 +스트리밍요 + +01:53.420 --> 01:55.700 +스트리밍은 지난번에도 익숙해졌잖아요 + +01:55.700 --> 02:01.840 +그라디오 유저 인터페이스로 결과를 스트리밍할 수 있나요? 주피터 출력 셀로 되돌아올 + +02:01.840 --> 02:03.820 +때 했던 것처럼요 + +02:03.820 --> 02:04.960 +자, 시작하죠 + +02:04.990 --> 02:07.300 +함수를 바꾸죠 + +02:07.330 --> 02:10.840 +예전엔 GPT라는 메시지였죠 + +02:10.870 --> 02:12.400 +이제 GPT 스트림으로 만들 거예요 + +02:12.430 --> 02:13.420 +함수가 달라요 + +02:13.420 --> 02:15.610 +중요한 건 이게 함수가 아니란 거죠 + +02:15.610 --> 02:20.320 +발전기 같은 거예요 결과적으로 끝날 거예요 + +02:20.320 --> 02:26.170 +그라디오는 우리가 함수 기능을 부여한 게 아니라 발전기를 줬다는 걸 눈치챌 거예요 + +02:26.170 --> 02:32.830 +그 때문에 그라디오는 자동으로 반복적으로 재생되고 발전기에서 돌아오는 + +02:32.830 --> 02:35.980 +부분마다 채워넣게 되죠 + +02:36.100 --> 02:38.020 +늘 있는 일이죠 + +02:38.020 --> 02:45.100 +메시지를 생성하면 이번엔 같은 API 호출이란 걸 기억하실 겁니다 하지만 스트리밍에서 넘긴 건 + +02:45.100 --> 02:46.120 +true죠 + +02:46.150 --> 02:47.770 +클로드가 어떻게 하는지 알죠? + +02:47.860 --> 02:48.790 +그러길 바라요 + +02:48.820 --> 02:54.220 +클로드는 특성이 없어요 대신 .Create 대신 .Timle을 호출하죠 + +02:54.220 --> 02:56.290 +그 외에는 아주 비슷해요 + +02:56.350 --> 02:58.810 +여기서 주목할 게 하나 있어요 + +02:58.840 --> 03:01.110 +그래디오와 아주 미묘하게 일치하죠 + +03:01.140 --> 03:08.820 +결과를 그라디오로 스트리밍할 때 결과를 하나씩 스트리밍하지 않아요 + +03:08.820 --> 03:15.870 +지금까지 누적된 결과를 전부 스트리밍해야 해요 그리고 누적 결과를 점점 더 오랫동안 스트리밍해야 + +03:15.870 --> 03:16.410 +하죠 + +03:16.410 --> 03:20.520 +제가 뭘 하는지 보이시죠 빈 문자열로 시작해요 + +03:20.520 --> 03:28.110 +각각의 덩어리에 추가하고 지금까지 누적된 총 결과를 산출하는 거죠 + +03:28.260 --> 03:34.830 +그렇게 하지 않으면 출력 셀에 각 덩어리가 나타났다가 사라지고 + +03:34.830 --> 03:37.140 +다른 것으로 대체되죠 + +03:37.140 --> 03:38.910 +이렇게 해야 해요 + +03:38.910 --> 03:42.900 +무슨 뜻인지 모르겠다면 수확 결과 대신 수확량 덩어리를 입력해 보세요 + +03:43.050 --> 03:43.290 +미안해요 + +03:43.320 --> 03:46.200 +큰 덩어리를요 0살요 델타 닷 콘텐츠요 + +03:46.320 --> 03:46.590 +네 + +03:46.590 --> 03:49.350 +무슨 말인지 알게 될 거예요 + +03:49.350 --> 03:50.910 +보기 안 좋을 거예요 + +03:51.150 --> 03:59.640 +어쨌든 이게 스트림 GPT입니다 메시지 GPT였던 함수를 스트림으로 대체하기만 하면 + +03:59.760 --> 04:06.390 +좋을 것 같은데요 GPT와 Gadio가 나머지를 해결했네요 + +04:06.390 --> 04:09.300 +이건 발전기일 뿐 함수가 아니란 걸 알아냈죠 + +04:09.300 --> 04:11.610 +따라서 결과를 스트리밍하길 원할 거예요 + +04:11.610 --> 04:15.360 +그래서 타자기 애니메이션 같은 효과를 내야 했죠 + +04:15.390 --> 04:18.660 +그렇게 간단한지 확인해 보죠 + +04:18.660 --> 04:20.190 +그렇게 간단해요? + +04:20.520 --> 04:21.660 +시작할게요 + +04:21.690 --> 04:29.940 +Get it, get it, get it, it 타임스스퀘어에서 그랜드 센트럴까지 어떻게 가죠? + +04:32.310 --> 04:33.360 +다 됐어요 + +04:33.510 --> 04:34.470 +그럼요, 간단하죠 + +04:34.500 --> 04:35.460 +당연히 그렇겠죠 + +04:35.490 --> 04:36.930 +결과를 내보내죠 + +04:36.930 --> 04:37.920 +잘 어울려요 + +04:37.920 --> 04:40.380 +마크다운이 아주 멋져요 + +04:40.890 --> 04:47.280 +클로드와의 관계가 얼마나 쉬운지 보여드리지 않으면 제 일이 아니겠죠 + +04:47.280 --> 04:51.540 +전에 언급했었죠 클로드의 API 호출이 저기 있는 게 보이시죠 + +04:51.540 --> 04:52.800 +아주 비슷해요 + +04:52.800 --> 04:55.650 +닷 스트림을 호출하면 매개변수에서 통과 못 해요 + +04:55.650 --> 05:00.470 +최대 토큰을 지정해야 한다는 걸 기억하세요 시스템 메시지는 따로 보내지죠 + +05:00.500 --> 05:02.240 +그 외에는 아주 비슷해요 + +05:02.270 --> 05:05.510 +스트리밍은 결과만을 위한 거죠 + +05:05.690 --> 05:08.030 +결과는 스트림으로서 컨텍스트 관리자죠 + +05:08.030 --> 05:13.070 +그럼 전처럼 완전한 반응을 보여드리죠 + +05:13.550 --> 05:16.670 +이 시점에서 지루할 정도로 간단해요 + +05:16.670 --> 05:17.780 +Get it, get it, get it, get it, it! 농담 이해하죠? + +05:17.810 --> 05:21.050 +그 함수를 전달하는 거죠 + +05:21.050 --> 05:24.110 +지금은 클로드랑 얘기하고 있어요 + +05:24.110 --> 05:25.640 +우리도 같은 질문을 할 거예요 + +05:25.670 --> 05:31.910 +Get it, get it, get it, it, it, it, it! 타임스스퀘어에서 그랜드 센트럴까지 어떻게 가죠? + +05:33.290 --> 05:40.250 +클로드의 답장은 비트보다 짧고 두 가지밖에 없지만 구조가 잘 짜여 있죠 + +05:40.250 --> 05:41.720 +서식이 멋지네요 + +05:41.720 --> 05:43.820 +정말 훌륭해요 + +05:44.120 --> 05:53.750 +GPT나 클로드 중 하나를 선택함으로써 한 걸음 더 나아갈 수 있어요 + +05:53.750 --> 05:56.840 +Get in get은 다음 세션에서 다룰게요 + +05:56.840 --> 05:57.980 +조금만 더 버텨요 diff --git a/week5/community-contributions/subtitles/srts/59166443/en_US.srt b/week5/community-contributions/subtitles/srts/59166443/en_US.srt new file mode 100755 index 0000000..77bcc9c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166443/en_US.srt @@ -0,0 +1,202 @@ +WEBVTT + +00:00.590 --> 00:02.720 +And welcome back everybody. + +00:02.720 --> 00:06.200 +Welcome to week two day three. + +00:06.230 --> 00:13.100 +It's a continuation of our enjoyment of radio, our celebration of everything that is radio and user + +00:13.100 --> 00:14.030 +interfaces. + +00:14.330 --> 00:19.820 +Uh, what you can already do in addition to using open AI, anthropic and Gemini, you can now also + +00:19.820 --> 00:22.130 +build UIs for your solutions. + +00:22.130 --> 00:24.500 +And you should feel pretty good about that. + +00:24.530 --> 00:27.230 +Uh, by the end of today, you'll be able to do more. + +00:27.260 --> 00:32.120 +You'll be able to build chat UIs, a specific type of UI which is very common. + +00:32.120 --> 00:38.270 +You'll be able to provide the history of conversation in a prompt, and you will build your very first + +00:38.270 --> 00:42.980 +customer support assistant, an AI assistant, also known as a chat bot. + +00:43.010 --> 00:46.280 +A very common I use case. + +00:46.280 --> 00:48.590 +You will have mastered it today. + +00:49.340 --> 00:51.950 +So again, very common. + +00:51.950 --> 00:53.150 +J'en ai use case. + +00:53.150 --> 00:55.220 +I think we're all very familiar with them. + +00:55.250 --> 00:56.810 +Llms based on chat bots. + +00:56.810 --> 00:59.210 +Super effective at conversation. + +00:59.210 --> 01:05.780 +It's hard to remember that only a few years ago, if you experienced one of these chatbot style interfaces + +01:05.780 --> 01:11.410 +on a website, you would be in the world of responding one, two, three, or four to different things, + +01:11.410 --> 01:15.820 +or use a keyword like booking or something like that. + +01:15.850 --> 01:17.860 +How far we have come. + +01:17.860 --> 01:23.740 +You can now have an informed conversation with customer service chatbots on websites, and you often + +01:23.770 --> 01:24.280 +do. + +01:24.280 --> 01:29.470 +And, you know, frankly, there have been times when I've got more value from a conversation with a + +01:29.470 --> 01:36.460 +chatbot than I have from a human being, which is a sorry, sad sense of the times. + +01:36.640 --> 01:42.040 +Um, but obviously we can't do things like asking it how many times the letter A appears in that sentence. + +01:42.400 --> 01:46.510 +Uh, but anyways, uh, the, uh, the chatbot use case. + +01:46.510 --> 01:49.030 +Very familiar, very important indeed. + +01:49.030 --> 01:51.280 +And something where llms excel. + +01:51.430 --> 01:57.190 +You can imagine some of the things that we're familiar with, the friendly personas that we can give + +01:57.220 --> 02:06.220 +chatbots, or indeed any persona we can have the ability to maintain context between messages this staggering + +02:06.220 --> 02:11.440 +way that you can hold a conversation and refer to things that you said earlier. + +02:11.440 --> 02:15.790 +And we all know now that that is some, some, some trickery going on there. + +02:15.790 --> 02:19.870 +It's an illusion that you're really having this persistent conversation. + +02:19.900 --> 02:22.500 +What's happening is at each step. + +02:22.500 --> 02:29.280 +The entire conversation history is being provided to the LLM in order to get back the next response. + +02:29.520 --> 02:36.450 +Um, and then also these assistants can have subject matter expertise, which they use to answer questions + +02:36.450 --> 02:37.830 +in a knowledgeable way. + +02:38.730 --> 02:45.480 +So, uh, very important aspect of interacting with assistants is the correct use of prompts. + +02:45.480 --> 02:49.590 +We're very familiar now with the system prompt that we can use to set the tone of the conversation. + +02:49.590 --> 02:51.180 +You can establish ground rules. + +02:51.180 --> 02:56.400 +There is a common prompt technique of saying if you don't know the answer, just say so. + +02:56.400 --> 03:01.140 +To try and encourage llms to be truthful and not to hallucinate. + +03:01.470 --> 03:09.690 +Uh, context is how you can use, uh, the add additional information into the conversation to give + +03:09.690 --> 03:13.140 +the LLM more context on what's being discussed. + +03:13.140 --> 03:21.660 +And then multi shots prompting is when you add information to the prompt to give multiple examples of + +03:21.660 --> 03:29.160 +interactions as a way to, uh, craft, to sort of hone the character of the LLM by giving it examples + +03:29.160 --> 03:35.390 +to work from, and also to prime it with information that might be useful later. + +03:35.420 --> 03:40.160 +It's interesting that this feels a bit like training because it's learning from multiple examples, + +03:40.160 --> 03:43.340 +but of course, this isn't training in the data science sense. + +03:43.340 --> 03:45.410 +The model has already been trained. + +03:45.440 --> 03:47.750 +The neural network training has happened. + +03:47.780 --> 03:51.260 +This is all at what we call an inference time at runtime. + +03:51.260 --> 03:54.770 +It's all just generating future tokens based on past. + +03:54.770 --> 04:01.940 +But the point is that if that past set of tokens includes a bunch of questions and answers, then when + +04:01.940 --> 04:08.810 +it's predicting the future, it's more likely it's more likely to pick future tokens that are consistent + +04:08.810 --> 04:10.610 +with what it's seen in the past. + +04:10.610 --> 04:13.670 +And that's why this works so very well. + +04:14.540 --> 04:16.700 +So we're now going to build a chatbot. + +04:16.730 --> 04:17.390 +Our first chatbot. + +04:17.390 --> 04:18.410 +And it's going to look like this. + +04:18.440 --> 04:23.690 +It's going to have a sort of instant message style interface to it with questions from us, responses + +04:23.690 --> 04:29.690 +from the chatbot in this sort of interface, which, you know, that's that's reasonably sophisticated + +04:29.720 --> 04:35.600 +and I'm telling you that we're going to be able to do it all in this one lesson, and it will give you + +04:35.720 --> 04:39.020 +tooling to be able to do the same thing in the future. + +04:39.020 --> 04:42.950 +So without further ado, let's go over to JupyterLab. diff --git a/week5/community-contributions/subtitles/srts/59166443/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166443/ja_JP.srt new file mode 100755 index 0000000..dc5a7a9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166443/ja_JP.srt @@ -0,0 +1,166 @@ +WEBVTT + +00:00.590 --> 00:02.720 +そしてみんな、 おかえりなさい。 + +00:02.720 --> 00:06.200 +第2週3日目へようこそ。 + +00:06.230 --> 00:14.030 +ラジオを楽しむこと、 ラジオとユーザー・インターフェースのすべてを称えることの継続だ。 + +00:14.330 --> 00:22.130 +オープンAI、 anthropic、 Geminiを使うだけでなく、 ソリューションのUIを構築することもできます。 + +00:22.130 --> 00:24.500 +そして、 それについてかなり良い気分になっているはずだ。 + +00:24.530 --> 00:27.230 +ええと、 今日が終われば、 もっとできるようになるよ。 + +00:27.260 --> 00:32.120 +非常に一般的なUIの一種であるチャットUIを構築できるようになる。 + +00:32.120 --> 00:38.270 +会話の履歴をプロンプトで提供できるようになり、 まさに最初のカスタマー・サポート・アシスタント、 + +00:38.270 --> 00:42.980 +AIアシスタント(チャットボットとも呼ばれる)を構築することになる。 + +00:43.010 --> 00:46.280 +非常に一般的な使用例だ。 + +00:46.280 --> 00:48.590 +今日でマスターできるだろう。 + +00:49.340 --> 00:51.950 +だから、 これもよくあることだ。 + +00:51.950 --> 00:53.150 +ユースケースです。 + +00:53.150 --> 00:55.220 +みんなよく知っていると思う。 + +00:55.250 --> 00:56.810 +チャットボットに基づくLlms。 + +00:56.810 --> 00:59.210 +会話に超効果的。 + +00:59.210 --> 01:05.780 +ほんの数年前まで、 ウェブサイトでこうしたチャットボット・スタイルのインターフェイスを体験すると、 + +01:05.780 --> 01:15.820 +さまざまなことに1つ、 2つ、 3つ、 4つと反応したり、 予約などのキーワードを使ったりする世界だったことを思い出すのは難しい。 + +01:15.850 --> 01:17.860 +我々はここまで来た。 + +01:17.860 --> 01:24.280 +ウェブサイト上のカスタマーサービス・チャットボットと、 十分な情報を得た上で会話をすることができるようになった。 + +01:24.280 --> 01:36.460 +そして、 正直なところ、 人間との会話よりもチャットボットとの会話から得た価値の方が大きかったこともある。 + +01:36.640 --> 01:42.040 +でも、 Aという文字がその文中に何回出てくるか、 というようなことは明らかにできない。 + +01:42.400 --> 01:46.510 +ええと、 とにかく、 ええと、 チャットボットの使用例です。 + +01:46.510 --> 01:49.030 +とても身近で、 とても重要なことだ。 + +01:49.030 --> 01:51.280 +そしてllmsが得意とすること。 + +01:51.430 --> 01:57.190 +チャットボットに与えることができるフレンドリーなペルソナ、 + +01:57.220 --> 02:11.440 +あるいはどんなペルソナでも、 メッセージ間の文脈を維持する能力を持つことができます。 + +02:11.440 --> 02:15.790 +そして私たちは今、 それが何らかの、 何らかの、 何らかの策略であることを知っている。 + +02:15.790 --> 02:19.870 +本当にしつこく会話しているかのような錯覚に陥る。 + +02:19.900 --> 02:22.500 +それぞれのステップで何が起きているのか。 + +02:22.500 --> 02:29.280 +次の返答を得るために、 会話履歴はすべてLLMに提供される。 + +02:29.520 --> 02:37.830 +それから、 アシスタントは専門的な知識を持っていて、 その知識を使って質問に答えることもできる。 + +02:38.730 --> 02:45.480 +だから、 アシスタントと接する上でとても重要なのは、 プロンプトを正しく使うことなんだ。 + +02:45.480 --> 02:49.590 +私たちは、 会話のトーンを設定するために使用できるシステム・プロンプトを熟知している。 + +02:49.590 --> 02:51.180 +基本的なルールを設けることができる。 + +02:51.180 --> 02:56.400 +答えがわからなければそう言えばいい、 というよくあるプロンプトのテクニックがある。 + +02:56.400 --> 03:01.140 +幻覚を見ないよう、 llmsに真実を話すよう促すためだ。 + +03:01.470 --> 03:09.690 +コンテクストとは、 LLMが議論していることについてより多くのコンテクストを与えるために、 + +03:09.690 --> 03:13.140 +会話に追加情報を加えることです。 + +03:13.140 --> 03:21.660 +そしてマルチ・ショット・プロンプトとは、 プロンプトに情報を追加して複数の交流例を示すことで、 + +03:21.660 --> 03:35.390 +LLMの性格に磨きをかけるとともに、 後で役に立つ情報を与えるためのものだ。 + +03:35.420 --> 03:43.340 +これは複数の例から学習しているため、 トレーニングのように感じられるのが面白いところだが、 もちろんこれはデータサイエンスの意味でのトレーニングではない。 + +03:43.340 --> 03:45.410 +モデルはすでに訓練されている。 + +03:45.440 --> 03:47.750 +ニューラルネットワークのトレーニングが行われた。 + +03:47.780 --> 03:51.260 +これはすべて、 実行時の推論時間と呼ばれるものだ。 + +03:51.260 --> 03:54.770 +すべては過去に基づいて未来のトークンを生成しているだけなのだ。 + +03:54.770 --> 04:01.940 +しかし、 重要なのは、 もし過去のトークンのセットに質問と答えがたくさん含まれていれば、 + +04:01.940 --> 04:10.610 +未来を予測するときに、 過去に見たものと一致する未来のトークンを選ぶ可能性が高くなるということだ。 + +04:10.610 --> 04:13.670 +だからこそ、 これはとても効果的なのだ。 + +04:14.540 --> 04:16.700 +だから、 これからチャットボットを作るんだ。 + +04:16.730 --> 04:17.390 +私たちの最初のチャットボット。 + +04:17.390 --> 04:18.410 +そして、 このようになるだろう。 + +04:18.440 --> 04:29.690 +このレッスンでは、 インスタントメッセージのようなインターフェイスで、 + +04:29.720 --> 04:39.020 +私たちからの質問とチャットボットからの応答が行われます。 + +04:39.020 --> 04:42.950 +それでは早速、 JupyterLabに行ってみよう。 diff --git a/week5/community-contributions/subtitles/srts/59166443/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166443/ko_KR.srt new file mode 100755 index 0000000..6cae3db --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166443/ko_KR.srt @@ -0,0 +1,199 @@ +WEBVTT + +00:00.590 --> 00:02.720 +다시 오신 걸 환영해요 + +00:02.720 --> 00:06.200 +2주 차에 오신 걸 환영해요 3일째예요 + +00:06.230 --> 00:13.100 +라디오에 대한 즐거움의 연속이죠 라디오와 사용자 인터페이스에 관한 모든 것을 축하하는 + +00:13.100 --> 00:14.030 +거예요 + +00:14.330 --> 00:19.820 +오픈 인공지능과 인류학 제미니 개발 외에도 여러분의 솔루션에 맞는 + +00:19.820 --> 00:22.130 +UI를 구축할 수 있죠 + +00:22.130 --> 00:24.500 +그러니 기분 좋으시겠어요 + +00:24.530 --> 00:27.230 +오늘 저녁쯤엔 더 많은 걸 할 수 있을 거예요 + +00:27.260 --> 00:32.120 +채팅 UI를 만들 수 있어요 아주 흔한 특정 유형의 UI죠 + +00:32.120 --> 00:38.270 +신속하게 대화 이력을 제공할 수 있고 첫 고객 지원 비서를 만들 수 있습니다 + +00:38.270 --> 00:42.980 +채팅 봇이라고도 하는 인공지능 비서요 + +00:43.010 --> 00:46.280 +아주 흔한 케이스죠 + +00:46.280 --> 00:48.590 +오늘 통달할 거예요 + +00:49.340 --> 00:51.950 +아주 흔한 일이죠 + +00:51.950 --> 00:53.150 +인공지능은 케이스로 사용해요 + +00:53.150 --> 00:55.220 +다들 잘 아실 거예요 + +00:55.250 --> 00:56.810 +채팅봇을 기반으로 한 LM이에요 + +00:56.810 --> 00:59.210 +대화에 아주 효과적이죠 + +00:59.210 --> 01:05.780 +기억하기 힘들지만 불과 몇 년 전만 해도 챗봇 스타일 인터페이스를 웹사이트에서 + +01:05.780 --> 01:11.410 +경험했다면 다른 것에 하나, 둘, 셋, 넷 응답하는 세상에 있었을 + +01:11.410 --> 01:15.820 +거예요 예약 같은 키워드를 사용하거나요 + +01:15.850 --> 01:17.860 +우리가 얼마나 멀리 왔는지도요 + +01:17.860 --> 01:24.280 +고객 서비스 챗봇과 웹사이트에서 정보를 주고받을 수 있어요 자주 그러죠 + +01:24.280 --> 01:29.470 +솔직히 말해서 인간과의 대화보다 챗봇과의 대화가 + +01:29.470 --> 01:36.460 +더 가치 있었던 때도 있었어요 시대상으로는 안타깝고 슬픈 일이죠 + +01:36.640 --> 01:42.040 +하지만 알파벳 A가 문장에 몇 번 나오는지 물어볼 수는 없어요 + +01:42.400 --> 01:46.510 +어쨌든 챗봇 사용 사례 말인데요 + +01:46.510 --> 01:49.030 +아주 친숙하고 중요하죠 + +01:49.030 --> 01:51.280 +llms가 탁월한 것 말이에요 + +01:51.430 --> 01:57.190 +우리에게 익숙한 것들을 상상해 보세요 챗봇에 붙여주는 + +01:57.220 --> 02:06.220 +친근한 가명이나 메시지 사이의 컨텍스트를 유지할 수 있는 모든 가명을요 대화를 나누고 + +02:06.220 --> 02:11.440 +아까 말한 걸 참조할 수 있는 놀라운 방법이죠 + +02:11.440 --> 02:15.790 +그게 속임수라는 건 다들 알잖아요 + +02:15.790 --> 02:19.870 +정말 끈질기게 대화하고 있다는 건 환상이에요 + +02:19.900 --> 02:22.500 +각 단계마다 달라요 + +02:22.500 --> 02:29.280 +모든 대화 기록은 LLM에 제공됩니다 다음 응답을 얻기 위해서죠 + +02:29.520 --> 02:36.450 +또한 이 비서들은 주제와 관련된 전문 지식을 갖추고 지식이 풍부한 답변을 + +02:36.450 --> 02:37.830 +할 수 있죠 + +02:38.730 --> 02:45.480 +비서와의 상호 작용에서 가장 중요한 건 프롬프트 사용의 정확성이에요 + +02:45.480 --> 02:49.590 +대화의 분위기를 결정하는 시스템 프롬프트가 이젠 아주 익숙하죠 + +02:49.590 --> 02:51.180 +기본 규칙을 정할 수 있어요 + +02:51.180 --> 02:56.400 +답을 모르면 모른다고 말하는 데 흔히 쓰이는 기법이 있어요 + +02:56.400 --> 03:01.140 +환각을 보지 않고 진실하도록 장려하는 거죠 + +03:01.470 --> 03:09.690 +컨텍스트는 대화에 추가 정보를 추가하는 방법을 말합니다 대화 내용에 대해 더 많은 + +03:09.690 --> 03:13.140 +컨텍스트를 제공하기 위해서죠 + +03:13.140 --> 03:21.660 +Multi숏 프롬프팅은 프롬프트에 정보를 추가하는 것을 뜻합니다 상호 작용 + +03:21.660 --> 03:29.160 +예제를 여러 개 제공하는 거죠 작업 예제를 제공해 LLM의 성격을 다듬고 + +03:29.160 --> 03:35.390 +나중에 유용할 정보를 프라임으로 함으로써요 + +03:35.420 --> 03:40.160 +훈련 같은 느낌이 드는 게 흥미롭네요 비트 코스트는 여러 예시를 통해 + +03:40.160 --> 03:43.340 +배우지만 데이터 과학적 측면에서는 아니죠 + +03:43.340 --> 03:45.410 +모델은 이미 훈련받았어요 + +03:45.440 --> 03:47.750 +신경망 훈련이 끝났어요 + +03:47.780 --> 03:51.260 +런타임에서 추론 타임이라고 부르는 때죠 + +03:51.260 --> 03:54.770 +과거에 기반해 미래 토큰을 생성하는 거죠 + +03:54.770 --> 04:01.940 +하지만 중요한 것은, 과거의 토큰들이 질문과 답변을 담고 있다면 미래를 예측할 때, + +04:01.940 --> 04:08.810 +미래의 토큰을 선택할 가능성이 더 높다는 것입니다. 과거에 봤던 것과 일관되는 + +04:08.810 --> 04:10.610 +것으로요. + +04:10.610 --> 04:13.670 +그래서 이 장면이 잘 된 거예요 + +04:14.540 --> 04:16.700 +챗봇을 만들 거예요 + +04:16.730 --> 04:17.390 +첫 챗봇이에요 + +04:17.390 --> 04:18.410 +이렇게 될 거예요 + +04:18.440 --> 04:23.690 +일종의 인스턴스 메시지 스타일 인터페이스가 있을 겁니다 저희의 + +04:23.690 --> 04:29.690 +질문과 챗봇의 응답이 있는 이런 종류의 인터페이스죠 꽤나 정교한 겁니다 + +04:29.720 --> 04:35.600 +이 강의에서 전부 다 할 수 있다고 말씀드리는 거예요 미래에 같은 걸 할 + +04:35.720 --> 04:39.020 +수 있는 도구를 줄 거예요 + +04:39.020 --> 04:42.950 +그럼 바로 유피터랩으로 넘어가죠 diff --git a/week5/community-contributions/subtitles/srts/59166453/en_US.srt b/week5/community-contributions/subtitles/srts/59166453/en_US.srt new file mode 100755 index 0000000..de2f69a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166453/en_US.srt @@ -0,0 +1,583 @@ +WEBVTT + +00:00.530 --> 00:05.180 +Welcome back and welcome to our continuing JupyterLab experience. + +00:05.300 --> 00:09.110 +Uh, I'm hopefully going to keep you entertained with another fun example. + +00:09.200 --> 00:14.690 +Uh, we are going to have an adversarial conversation between chatbots. + +00:14.720 --> 00:16.220 +Let's see how we're going to do it. + +00:16.400 --> 00:22.310 +You're familiar at this point with the way that we can have a conversation expressed in a list of elements. + +00:22.340 --> 00:23.420 +You've seen this several times. + +00:23.420 --> 00:29.990 +Now a list with a system and a user prompt in this, uh, in this list. + +00:30.410 --> 00:37.130 +Um, but as I sort of alluded earlier, this list can be a longer list with multiple interactions and + +00:37.130 --> 00:42.410 +the way that might look, for example, as I've shown it here, is you could have a system, uh, message + +00:42.410 --> 00:49.280 +at the beginning, role system content or system message, then a user message, then an assistant that + +00:49.280 --> 00:53.720 +has replied to that user message, and then another user message. + +00:53.720 --> 00:59.030 +And that structure would then represent a longer conversation history. + +00:59.030 --> 01:05.080 +And we can use that approach to engage in a longer conversation between ourselves and a chatbot, or + +01:05.080 --> 01:06.910 +even between two chatbots. + +01:06.940 --> 01:14.110 +It's worth me pointing out that this approach, this kind of structure, is the entire way in which + +01:14.110 --> 01:16.930 +one has a conversation with a chatbot. + +01:16.960 --> 01:21.220 +That appears to be something that persists over multiple interactions. + +01:21.220 --> 01:30.490 +You every single time that you make another, uh, another prompt to an LLM like GPT four, what gets + +01:30.760 --> 01:37.030 +sent into it, what gets fed in in the input prompt is, in fact, this whole structure of the whole + +01:37.030 --> 01:38.530 +conversation so far. + +01:38.530 --> 01:45.460 +And then it's asked to continue by completing, by continuing to generate tokens that feel like they're + +01:45.490 --> 01:47.740 +the most likely tokens to come next. + +01:47.740 --> 01:49.930 +And then that gets added to the conversation. + +01:49.930 --> 01:52.240 +And then you reply to that. + +01:52.240 --> 01:56.830 +And the next time the LLM is called, the entire conversation is fed in. + +01:56.830 --> 01:59.980 +And again it's asked to predict the subsequent tokens. + +01:59.980 --> 02:06.260 +So there's this illusion that you're having a conversation with something that has memory and remembers + +02:06.260 --> 02:08.870 +back to what you said ten minutes ago. + +02:08.870 --> 02:14.420 +But what's actually happening is that with each of your interactions, what's being fed to the LM is + +02:14.420 --> 02:18.800 +the entire conversation so far, and then it's being asked to continue it. + +02:19.010 --> 02:23.900 +Um, and, and that should give you a good sense and intuition for how it's actually working. + +02:23.900 --> 02:28.670 +And again, that's why when we talked about the context window last, last week, we said that the the + +02:28.670 --> 02:34.010 +size of the context window has to be able to fit all of the conversations so far as well as the subsequent + +02:34.010 --> 02:35.210 +generated tokens. + +02:35.210 --> 02:40.970 +And that's because every time you call the LM, this entire input is passed in. + +02:41.480 --> 02:47.960 +So we can use that approach to engage in a bit of some fun. + +02:47.960 --> 02:54.950 +So what we're going to do is we're going to have a conversation between GPT four and Mini and Claude + +02:54.980 --> 02:58.940 +three haiku, which is the very cheap version of Claude three. + +02:59.150 --> 03:03.260 +Um, it's also a chance for me to show using a different model, and it's useful might be useful for + +03:03.260 --> 03:09.870 +you to have these strings at your disposal so you can quickly try out different models yourself. + +03:09.900 --> 03:14.010 +So GPT is going to be given this system prompt. + +03:14.010 --> 03:16.500 +You're a chatbot who's very argumentative. + +03:16.530 --> 03:19.440 +You disagree with everything in the conversation, anything in conversation. + +03:19.440 --> 03:22.470 +And you challenge everything in a snarky way. + +03:22.920 --> 03:25.380 +Uh, Claude gets a different system prompt. + +03:25.380 --> 03:27.510 +You're very polite, courteous chatbot. + +03:27.540 --> 03:31.320 +You try to agree with everything the other person says or find common ground. + +03:31.320 --> 03:35.580 +If the other person is argumentative, you try and calm them down and keep chatting. + +03:35.700 --> 03:37.380 +Seems like a good setup, doesn't it? + +03:37.410 --> 03:39.720 +A nice, uh, juicy setup. + +03:40.050 --> 03:41.970 +Uh, and then we're going to start with hi there. + +03:41.970 --> 03:42.930 +And hi. + +03:42.960 --> 03:44.730 +So that's the setup. + +03:45.030 --> 03:51.720 +All right then I'm writing a function called GPT, uh, and, uh, this this is what it does. + +03:51.780 --> 04:01.830 +Uh, it takes these messages, um, uh, and, uh, it, uh, it basically it takes these two lists + +04:01.830 --> 04:07.660 +that you see here, GPT messages and Claude messages, and it builds this kind of list that you see + +04:07.660 --> 04:08.290 +here. + +04:08.290 --> 04:13.480 +So it's going to take two lists of messages and build this whole conversation history. + +04:13.480 --> 04:20.860 +And obviously in this case, uh, Claude's messages need to be considered to be the user and its own + +04:20.860 --> 04:22.780 +messages are the assistant. + +04:23.110 --> 04:25.000 +So let me tell you what I mean by that. + +04:25.000 --> 04:27.220 +So I started off with a system prompt. + +04:27.460 --> 04:32.290 +So then I iterate through the GPT messages and the Claude messages. + +04:32.290 --> 04:34.900 +And I use this handy utility zip. + +04:35.080 --> 04:40.540 +Uh, as data scientists, it's it might be something you've used a lot before, but if not, some people + +04:40.540 --> 04:41.680 +don't don't know about it. + +04:41.680 --> 04:43.030 +And it's such a useful one. + +04:43.030 --> 04:49.300 +So if you have a bunch of different lists and you want to iterate element by element through both of + +04:49.300 --> 04:56.740 +them together, uh, the sort of boring way of doing it is doing a kind of for I in range and the length + +04:56.740 --> 04:57.880 +of the list. + +04:57.880 --> 05:03.520 +So you basically have a sort of iterator with an index, and you count through until you get to the + +05:03.520 --> 05:05.530 +end and you pluck out the two elements. + +05:05.530 --> 05:09.690 +But there's a lovely, pythonic, simple way of doing it using zip. + +05:09.690 --> 05:16.770 +And what you can do is if you call zip on those two lists, it builds the response to that is an iterator + +05:16.770 --> 05:24.960 +that iterates through each each pair, each element of both lists together, and returns the pairs at + +05:24.960 --> 05:25.890 +each point. + +05:26.220 --> 05:31.110 +And so you can unpack that and just say like for GPT comma Claude in. + +05:31.110 --> 05:34.380 +And you're going to get the pairs each time as you go through. + +05:34.380 --> 05:39.480 +And you may guess this, but you can also, if you're trying to iterate through 3 or 4 lists, you could + +05:39.480 --> 05:41.730 +just shove them all here and do the same thing. + +05:41.760 --> 05:47.010 +Great trick to have play around with it in JupyterLab if you're not familiar with it, with a few random + +05:47.010 --> 05:50.640 +lists and get comfortable, it's a it's a good tool to have at your disposal. + +05:50.640 --> 05:58.230 +Anyways, we we iterate through these two sets of messages, we unpack them, and then of course, you + +05:58.230 --> 06:05.490 +can imagine we simply add in the we say that the assistant says whatever GPT said and the user said + +06:05.490 --> 06:06.870 +whatever Claude said. + +06:06.870 --> 06:12.040 +And then quite simply, we call OpenAI ChatGPT completions create. + +06:12.070 --> 06:21.010 +We ask to use our model and we pass in these messages and we return completion .0. message content. + +06:21.010 --> 06:24.640 +You hopefully are getting very familiar with this structure. + +06:25.030 --> 06:26.440 +Let's execute that. + +06:26.440 --> 06:29.560 +And let's try just calling GPT based on this history. + +06:29.560 --> 06:31.750 +And let's see what GPT would say after. + +06:31.750 --> 06:32.230 +Hi there. + +06:32.230 --> 06:32.980 +And hi. + +06:33.010 --> 06:35.020 +This is what it would say back. + +06:35.500 --> 06:36.610 +Oh great. + +06:36.610 --> 06:37.870 +Another hi. + +06:37.900 --> 06:39.220 +How original. + +06:39.220 --> 06:40.870 +What do you want to talk about. + +06:41.440 --> 06:42.430 +Ha ha ha. + +06:42.520 --> 06:44.110 +You can see this is going to be fun. + +06:44.410 --> 06:47.680 +Uh, all right, so here's Claude's function. + +06:47.710 --> 06:49.000 +Uh, it's very similar. + +06:49.000 --> 06:54.070 +Of course, you'll remember that the system message gets passed in separately, so we don't need to + +06:54.100 --> 06:54.730 +build that. + +06:54.730 --> 06:56.020 +You can see that here. + +06:56.410 --> 07:00.790 +Um, one other there's there's, uh, obviously we reverse the roles. + +07:00.790 --> 07:04.570 +The user is now GPT, the assistant is now Claude. + +07:04.570 --> 07:05.950 +So it's it's flipped. + +07:05.980 --> 07:13.260 +There's a there's a subtlety here that you may spot, um, once we've iterated through these lists. + +07:13.260 --> 07:16.470 +The list if since GPT is going to go first. + +07:16.560 --> 07:22.590 +If Claude is always the replier, there's going to be one more message in GPT list than there is in + +07:22.590 --> 07:23.100 +Claude's. + +07:23.100 --> 07:25.680 +So just have to add that in at the end there. + +07:25.770 --> 07:30.120 +Uh, you if you don't see what I mean, I think that will become clear in a second. + +07:30.150 --> 07:33.090 +I think you'll, you'll you'll see see where I'm coming from. + +07:33.390 --> 07:36.210 +Um, and then this is the API call to Claude. + +07:36.210 --> 07:37.860 +Hopefully this is somewhat familiar to you now. + +07:37.860 --> 07:38.490 +It's simpler. + +07:38.490 --> 07:39.150 +It's just Claude. + +07:39.150 --> 07:40.530 +Dot messages dot create. + +07:40.860 --> 07:43.620 +Um, and we pass in the max tokens again. + +07:43.620 --> 07:46.440 +And in the response, it's message content. + +07:46.470 --> 07:47.580 +Zero dot text. + +07:47.580 --> 07:48.660 +That is Claude's reply. + +07:48.690 --> 07:49.860 +Let's run that. + +07:50.190 --> 07:54.420 +Uh, and I think we're just going to go straight to, to having some fun right away. + +07:54.420 --> 07:56.940 +So this this is where we put it all together. + +07:57.120 --> 07:59.730 +Um, we start off with reset it to hi there. + +07:59.730 --> 08:04.560 +And hi, I'm going to print that that GPT and Claude making that introduction. + +08:04.560 --> 08:07.290 +And then we'll do a loop of five times. + +08:07.290 --> 08:15.070 +We will call GPT and print GPT answer and put that in the list of messages, we'll call Claude, print + +08:15.070 --> 08:20.920 +Claude's answer and put that in the list of messages, and then repeat, and we will see what these + +08:20.920 --> 08:23.260 +two chatbots have to say to each other. + +08:23.290 --> 08:24.490 +Are you ready? + +08:25.000 --> 08:25.840 +Here we go. + +08:25.870 --> 08:27.160 +Did I execute that cell before? + +08:27.160 --> 08:27.940 +I want it to go wrong again. + +08:27.970 --> 08:28.450 +I did. + +08:28.480 --> 08:30.670 +Okay, we're ready for showtime. + +08:36.280 --> 08:37.450 +Let's go through this. + +08:37.480 --> 08:38.950 +GPT says hi there. + +08:38.980 --> 08:40.030 +Claude says hi. + +08:40.060 --> 08:41.650 +GPT says, oh, great. + +08:41.650 --> 08:42.700 +Another casual greeting. + +08:42.700 --> 08:43.270 +How original. + +08:43.270 --> 08:44.230 +What's next? + +08:44.260 --> 08:45.010 +How are you? + +08:45.010 --> 08:47.230 +Because I can't wait to disagree with that too. + +08:47.560 --> 08:51.100 +Claude, I apologize for my initial greeting came across as unoriginal. + +08:51.100 --> 08:53.530 +I tried to keep responses friendly and polite. + +08:53.740 --> 08:54.280 +Uh oh. + +08:54.280 --> 08:58.840 +Please don't flatter yourself, thinking your friendly attempt was anything less than generic and finding + +08:58.840 --> 08:59.590 +common ground. + +08:59.620 --> 09:02.710 +That's just a fancy way of saying you want to sugarcoat everything. + +09:02.710 --> 09:05.290 +How about we just dig into something controversial? + +09:05.350 --> 09:06.580 +Pineapple and pizza? + +09:06.610 --> 09:08.410 +Because I'm ready to argue about that all day long. + +09:08.410 --> 09:11.170 +So GPT has the snarky sense of humor. + +09:11.170 --> 09:17.060 +Um, and then Claude tries to be nice and humorous and I'll admit it was generic, but hey, you got + +09:17.060 --> 09:18.620 +to start somewhere, right? + +09:19.010 --> 09:25.340 +Uh, and then tries to be nice, uh, and then you can see, uh, off they go arguing about pineapple + +09:25.370 --> 09:26.300 +on pizza. + +09:26.510 --> 09:27.440 +Uh oh. + +09:27.470 --> 09:30.770 +How magnanimous of you to respect my pizza preferences. + +09:30.770 --> 09:31.910 +But let's be real. + +09:31.910 --> 09:38.450 +Not everyone deserves respect when they inflict abominations like pineapple and pizza, abominations + +09:38.450 --> 09:40.340 +like pineapple and pizza on the world. + +09:40.520 --> 09:42.080 +Uh, um. + +09:42.080 --> 09:48.200 +So, uh, anyway, uh, look at you trying to justify your love for a glorified. + +09:48.200 --> 09:54.680 +It's more fun reading gpts, uh, agro, uh, things than Claude's. + +09:54.680 --> 09:55.490 +Very nice. + +09:55.520 --> 09:58.910 +You're not holding back on, uh, avocado toast critique, are you? + +09:58.940 --> 10:03.110 +You make some fair points, says Claude, being very affable, of course. + +10:03.890 --> 10:07.370 +Anyway, that wraps up this little demo. + +10:07.400 --> 10:08.900 +I hope you enjoyed it. + +10:08.900 --> 10:13.700 +Uh, if you didn't understand what I meant about the way that I'm building these messages, then please + +10:13.700 --> 10:16.090 +print that message and run it and see. + +10:16.120 --> 10:17.200 +You'll see it printing. + +10:17.200 --> 10:21.880 +Print this messages array at each point so you see what's being created. + +10:21.880 --> 10:25.090 +And you can use that to satisfy yourself that we're doing it properly. + +10:25.180 --> 10:28.510 +Um, but here importantly is the ask for you. + +10:28.540 --> 10:31.720 +Please go back now and try switching the roles. + +10:31.720 --> 10:40.390 +Switch it so that Claude is the more combative one, and OpenAI is the one trying to keep the peace, + +10:40.420 --> 10:44.290 +see how they behave, and try giving them different styles of chatbot. + +10:44.620 --> 10:49.330 +Of course, the real the the purpose of this exercise is to get you very comfortable with these kinds + +10:49.330 --> 10:51.550 +of conversation structures. + +10:51.550 --> 10:53.560 +And also with Claude's API. + +10:53.680 --> 10:55.240 +Um, but that will be fun to do. + +10:55.240 --> 11:00.400 +And one other challenge for you, of course, would be that an ad Gemini to the mix. + +11:00.400 --> 11:02.560 +Uh, use Gemini's API. + +11:02.560 --> 11:10.510 +Uh, give Gemini a third personality and see if we can't have some crazy conversations going on here. + +11:10.510 --> 11:12.250 +Uh, enjoy playing with that. + +11:12.250 --> 11:15.700 +Do push your code if you do that, because I would love to see some results. + +11:15.730 --> 11:18.130 +And I hope you have fun doing it. diff --git a/week5/community-contributions/subtitles/srts/59166453/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166453/ja_JP.srt new file mode 100755 index 0000000..2aa132d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166453/ja_JP.srt @@ -0,0 +1,511 @@ +WEBVTT + +00:00.530 --> 00:05.180 +おかえりなさい、 そして引き続きJupyterLabの体験へようこそ。 + +00:05.300 --> 00:09.110 +ええと、 また別の楽しい例で皆さんを楽しませたいと思います。 + +00:09.200 --> 00:14.690 +ええと、 私たちはチャットボット同士で敵対的な会話をするつもりです。 + +00:14.720 --> 00:16.220 +どうやるか見てみよう。 + +00:16.400 --> 00:22.310 +要素のリストで会話を表現する方法については、 もうお馴染みだろう。 + +00:22.340 --> 00:23.420 +何度か見たことがあるだろう。 + +00:23.420 --> 00:29.990 +さて、 このリストにはシステムとユーザーのプロンプトがある。 + +00:30.410 --> 00:37.130 +例えば、 ここに示したように、 最初にシステムメッセージ、 役割システムコンテンツ、 + +00:37.130 --> 00:42.410 +システムメッセージ、 次にユーザーメッセージ、 そしてそのユーザーメッセージに返信したアシスタント、 + +00:42.410 --> 00:53.720 +さらに別のユーザーメッセージを持つことができます。 + +00:53.720 --> 00:59.030 +そしてその構造は、 より長い会話の歴史を表すことになる。 + +00:59.030 --> 01:06.910 +そして、 そのアプローチを使って、 自分とチャットボット、 あるいは2つのチャットボット間でより長い会話をすることができる。 + +01:06.940 --> 01:16.930 +このようなアプローチ、 このような構造は、 チャットボットと会話をする方法のすべてであることを指摘する価値がある。 + +01:16.960 --> 01:21.220 +それは、 何度もの交流の中で持続するもののようだ。 + +01:21.220 --> 01:30.490 +GPT 4のようなLLMに別のプロンプトを送るたびに、 入力プロンプトに送られるのは、 + +01:30.760 --> 01:38.530 +実は、 これまでの会話全体の構造なのだ。 + +01:38.530 --> 01:47.740 +そして、 次に来る可能性が最も高いと思われるトークンを生成し続けることで、 完成させ続けることが求められる。 + +01:47.740 --> 01:49.930 +そして、 それが会話に加わる。 + +01:49.930 --> 01:52.240 +それに対してあなたはこう答える。 + +01:52.240 --> 01:56.830 +そして、 次にLLMが呼ばれたときには、 その会話はすべて入力される。 + +01:56.830 --> 01:59.980 +そしてまた、 後続のトークンを予測するよう求められる。 + +01:59.980 --> 02:08.870 +だから、 記憶力のある何かと会話をしているような錯覚に陥り、 10分前に自分が何を言ったかを思い出してしまう。 + +02:08.870 --> 02:14.420 +しかし、 実際に起こっているのは、 あなたとのやり取りのたびに、 LMに送られるのはこれまでの会話のすべてであり、 + +02:14.420 --> 02:18.800 +そしてそれを続けるよう求められるということだ。 + +02:19.010 --> 02:23.900 +それで、 それが実際にどう機能しているのか、 いい感覚と直感を与えてくれるはずだ。 + +02:23.900 --> 02:28.670 +先週、 コンテキスト・ウインドウについて話したときに、 コンテキスト・ウインドウのサイズは、 + +02:28.670 --> 02:34.010 +これまでのすべての会話とその後に生成されるトークンを収めることができなければならないと言ったのは、 + +02:34.010 --> 02:35.210 +そのためだ。 + +02:35.210 --> 02:40.970 +LMを呼び出すたびに、 この入力がすべて渡されるからだ。 + +02:41.480 --> 02:47.960 +だから、 私たちはそのアプローチを使って、 ちょっとした遊びに参加することができる。 + +02:47.960 --> 02:58.940 +そこで、 GPT4とミニ、 そしてクロード3(クロード3の激安版)の俳句で会話をしてみようというわけだ。 + +02:59.150 --> 03:09.870 +この弦があれば、 いろいろなモデルをすぐに試すことができる。 + +03:09.900 --> 03:14.010 +GPTにはこのシステム・プロンプトが表示されるわけだ。 + +03:14.010 --> 03:16.500 +あなたはとても議論好きなチャットボットですね。 + +03:16.530 --> 03:19.440 +あなたは会話の中のあらゆることに反対する。 + +03:19.440 --> 03:22.470 +そして、 あなたは鼻につくやり方で何にでも挑戦する。 + +03:22.920 --> 03:25.380 +ええと、 クロードには別のシステムプロンプトが出るんだ。 + +03:25.380 --> 03:27.510 +とても礼儀正しいチャットボットですね。 + +03:27.540 --> 03:31.320 +相手の言うことすべてに同意しようとしたり、 共通点を見つけようとしたりする。 + +03:31.320 --> 03:35.580 +相手が喧嘩腰の場合は、 相手をなだめ、 おしゃべりを続ける。 + +03:35.700 --> 03:37.380 +いいセットアップだと思わないか? + +03:37.410 --> 03:39.720 +いい、 あー、 ジューシーなセットアップだ。 + +03:40.050 --> 03:41.970 +ええと、 それからハイ、 そこから始めよう。 + +03:41.970 --> 03:42.930 +そして、 こんにちは。 + +03:42.960 --> 03:44.730 +それがセットアップだ。 + +03:45.030 --> 03:51.720 +それじゃ、 GPTという関数を書くよ。 + +03:51.780 --> 04:01.830 +GPTメッセージとクロード・メッセージ、 この2つのリストを使って、 + +04:01.830 --> 04:08.290 +ここにあるようなリストを作ります。 + +04:08.290 --> 04:13.480 +つまり、 2つのメッセージ・リストから会話履歴を作成するのだ。 + +04:13.480 --> 04:22.780 +そしてこの場合、 明らかにクロードのメッセージはユーザーであり、 自身のメッセージはアシスタントであると考える必要がある。 + +04:23.110 --> 04:25.000 +では、 どういうことかというと......。 + +04:25.000 --> 04:27.220 +そこで、 まずシステムプロンプトを表示した。 + +04:27.460 --> 04:32.290 +そこで、 GPTメッセージとクロード・メッセージを反復する。 + +04:32.290 --> 04:34.900 +そして、 私はこの便利なユーティリティ・ジップを使っている。 + +04:35.080 --> 04:40.540 +データサイエンティストとして、 それは以前からよく使っているものかもしれないが、 そうでなければ、 + +04:40.540 --> 04:41.680 +知らない人もいる。 + +04:41.680 --> 04:43.030 +そして、 それはとても役に立つものだ。 + +04:43.030 --> 04:49.300 +つまり、 複数の異なるリストがあり、 その両方を要素ごとに反復処理したい場合、 + +04:49.300 --> 04:57.880 +退屈な方法だが、 for Iを範囲とリストの長さで実行する。 + +04:57.880 --> 05:05.530 +つまり、 基本的にはインデックスを持つイテレータのようなもので、 最後までカウントして2つの要素を取り出す。 + +05:05.530 --> 05:09.690 +しかし、 zipを使った素敵な、 パイソン的な、 シンプルな方法がある。 + +05:09.690 --> 05:16.770 +そして、 この2つのリストに対してZIPを呼び出すと、 そのレスポンスとしてイテレーターが生成され、 + +05:16.770 --> 05:25.890 +両リストの各ペア(各要素)を反復処理し、 各ポイントのペアを返す。 + +05:26.220 --> 05:31.110 +そして、 GPTのコンマ・クロードのように、 それを解凍して言うことができる。 + +05:31.110 --> 05:34.380 +そして、 通うたびにペアを手に入れることになる。 + +05:34.380 --> 05:39.480 +そして、 これは想像がつくかもしれないが、 3つか4つのリストを反復処理する場合、 それらをすべてここに押し込んで、 + +05:39.480 --> 05:41.730 +同じことをすることもできる。 + +05:41.760 --> 05:50.640 +もしJupyterLabに慣れていないなら、 いくつかのランダムなリストを使ってJupyterLabで遊んでみるといい。 + +05:50.640 --> 05:58.230 +とにかく、 これら2つのメッセージセットを繰り返し、 それらを解凍し、 そしてもちろん、 アシスタントはGPTが言ったことは何でも言う、 + +05:58.230 --> 06:06.870 +そしてユーザーはクロードが言ったことは何でも言う、 と単純に追加することは想像に難くない。 + +06:06.870 --> 06:12.040 +そして、 OpenAIのChatGPTの完了を作成と呼びます。 + +06:12.070 --> 06:21.010 +私たちのモデルの使用を依頼し、 これらのメッセージを渡し、 完了を返す。 0. メッセージの内容 + +06:21.010 --> 06:24.640 +この構造にはだいぶ慣れてきただろう。 + +06:25.030 --> 06:26.440 +それを実行しよう。 + +06:26.440 --> 06:29.560 +そして、 この履歴をもとにGPTとだけ呼んでみよう。 + +06:29.560 --> 06:31.750 +GPTがこの後何と言うか見てみよう。 + +06:31.750 --> 06:32.230 +こんにちは。 + +06:32.230 --> 06:32.980 +そして、 こんにちは。 + +06:33.010 --> 06:35.020 +こう返される。 + +06:35.500 --> 06:36.610 +素晴らしい。 + +06:36.610 --> 06:37.870 +もうひとつ、 ハイ。 + +06:37.900 --> 06:39.220 +なんと斬新な。 + +06:39.220 --> 06:40.870 +何を話したいんだい? + +06:41.440 --> 06:42.430 +ハハハハ。 + +06:42.520 --> 06:44.110 +楽しくなりそうなのがわかるだろう。 + +06:44.410 --> 06:47.680 +クロードの機能はこうだ。 + +06:47.710 --> 06:49.000 +よく似ているよ。 + +06:49.000 --> 06:54.730 +もちろん、 システム・メッセージは別に渡されるので、 それを作る必要はない。 + +06:54.730 --> 06:56.020 +それはここで見ることができる。 + +06:56.410 --> 07:00.790 +ええと、 もうひとつ、 明らかに役割が逆なんだ。 + +07:00.790 --> 07:04.570 +ユーザーはGPTになり、 アシスタントはクロードになった。 + +07:04.570 --> 07:05.950 +だから、 反転しているんだ。 + +07:05.980 --> 07:13.260 +このリストには微妙なニュアンスがある。 + +07:13.260 --> 07:16.470 +GPTが最初に行くのであれば、 そのリストだ。 + +07:16.560 --> 07:23.100 +もしクロードが常にレプリヤーなら、 GPTのリストにはクロードのものよりも多くのメッセージがあることになる。 + +07:23.100 --> 07:25.680 +だから、 最後にそれを付け加えなければならない。 + +07:25.770 --> 07:30.120 +ええと、 もし私の言っている意味がわからないなら、 すぐにわかると思うよ。 + +07:30.150 --> 07:33.090 +私がどこから来たのか、 きっとわかると思う。 + +07:33.390 --> 07:36.210 +それから、 これはクロードへのAPIコールだ。 + +07:36.210 --> 07:37.860 +これで多少はお分かりいただけただろうか。 + +07:37.860 --> 07:38.490 +もっとシンプルだ。 + +07:38.490 --> 07:39.150 +ただのクロードだよ。 + +07:39.150 --> 07:40.530 +ドット・メッセージ・ドット・クリエイト + +07:40.860 --> 07:43.620 +ええと、 そしてまた最大トークンを渡すんだ。 + +07:43.620 --> 07:46.440 +そしてレスポンスでは、 メッセージの内容だ。 + +07:46.470 --> 07:47.580 +ゼロ・ドット・テキスト。 + +07:47.580 --> 07:48.660 +それがクロードの返事だ。 + +07:48.690 --> 07:49.860 +それを実行しよう。 + +07:50.190 --> 07:54.420 +そして、 私たちはすぐに、 楽しむことにしようと思う。 + +07:54.420 --> 07:56.940 +だから、 ここですべてをまとめる。 + +07:57.120 --> 07:59.730 +まず、 リセットしてハイ、 そこから始めるんだ。 + +07:59.730 --> 08:04.560 +そして、 そのGPTとクロードの紹介を印刷するつもりだ。 + +08:04.560 --> 08:07.290 +そして5回ループする。 + +08:07.290 --> 08:15.070 +GPTを呼び出し、 GPTの答えを表示してメッセージのリストに入れ、 クロードを呼び出し、 + +08:15.070 --> 08:23.260 +クロードの答えを表示してメッセージのリストに入れ、 それを繰り返します。 + +08:23.290 --> 08:24.490 +準備はできているか? + +08:25.000 --> 08:25.840 +さあ、 始めよう。 + +08:25.870 --> 08:27.160 +そのセルは以前にも実行したことがあったかな? + +08:27.160 --> 08:27.940 +また失敗してほしい。 + +08:27.970 --> 08:28.450 +そうだ。 + +08:28.480 --> 08:30.670 +さて、 ショータイムの準備は整った。 + +08:36.280 --> 08:37.450 +では、 これを見ていこう。 + +08:37.480 --> 08:38.950 +GPTがよろしくと言っている。 + +08:38.980 --> 08:40.030 +クロードがよろしくと言っている。 + +08:40.060 --> 08:41.650 +GPTは言う。 + +08:41.650 --> 08:42.700 +またもやカジュアルな挨拶だ。 + +08:42.700 --> 08:43.270 +なんと斬新な。 + +08:43.270 --> 08:44.230 +次はどうする? + +08:44.260 --> 08:45.010 +お元気ですか? + +08:45.010 --> 08:47.230 +私も早く反対したいからだ。 + +08:47.560 --> 08:51.100 +クロード、 最初の挨拶が独創的でないように伝わってしまったことをお詫びする。 + +08:51.100 --> 08:53.530 +私は友好的で丁寧な対応を心がけた。 + +08:53.740 --> 08:54.280 +ああ、 ああ。 + +08:54.280 --> 08:59.590 +あなたの友好的な試みが、 一般的なものであり、 共通点を見出すことに他ならないと考えて、 お世辞を言わないでほしい。 + +08:59.620 --> 09:02.710 +それは、 何でもかんでも甘くしたい、 という洒落た言い方に過ぎない。 + +09:02.710 --> 09:05.290 +論争になりそうなことを掘り下げるのはどうだろう? + +09:05.350 --> 09:06.580 +パイナップルとピザ? + +09:06.610 --> 09:08.410 +それについては、 一日中議論する用意があるからね。 + +09:08.410 --> 09:11.170 +GPTは鼻につくユーモアのセンスを持っているんだね。 + +09:11.170 --> 09:18.620 +それからクロードはユーモアを交えていい人になろうとした。 + +09:19.010 --> 09:26.300 +それから、 ピザのパイナップルについて口論になったんだ。 + +09:26.510 --> 09:27.440 +ああ、 ああ。 + +09:27.470 --> 09:30.770 +私のピザの好みを尊重してくれるとは、 なんと寛大なことだろう。 + +09:30.770 --> 09:31.910 +しかし、 現実を見よう。 + +09:31.910 --> 09:40.340 +パイナップルやピザのような忌まわしいものを世界に広めた時点で、 誰もが尊敬に値するわけではない。 + +09:40.520 --> 09:42.080 +あ、 あの。 + +09:42.080 --> 09:48.200 +だから、 とにかく、 栄光の男への愛を正当化しようとするあなたを見て。 + +09:48.200 --> 09:54.680 +クロードのものよりも、 グッツやアグロのものを読む方が楽しいよ。 + +09:54.680 --> 09:55.490 +とても素晴らしい。 + +09:55.520 --> 09:58.910 +アボカドトーストの批評を控えているわけではないだろう? + +09:58.940 --> 10:03.110 +もちろん、 愛想はいい。 + +10:03.890 --> 10:07.370 +とにかく、 これでこの小さなデモは終わった。 + +10:07.400 --> 10:08.900 +楽しんでいただけたなら幸いだ。 + +10:08.900 --> 10:16.090 +もし、 このメッセージの作り方について私の言っている意味が理解できなかったのなら、 そのメッセージを印刷して実行して見てください。 + +10:16.120 --> 10:17.200 +印刷されているのを見るだろう。 + +10:17.200 --> 10:21.880 +各ポイントでこのメッセージ配列を表示し、 何が作成されているかを確認できるようにする。 + +10:21.880 --> 10:25.090 +それを使って、 私たちがきちんとやっていることを納得してくれればいい。 + +10:25.180 --> 10:28.510 +うーん、 でも、 ここで重要なのはあなたへのお願いだ。 + +10:28.540 --> 10:31.720 +今すぐ戻って、 役割を入れ替えてみてください。 + +10:31.720 --> 10:40.390 +クロードがより闘争的で、 OpenAIが平和を守ろうとするように切り替え、 彼らがどのように振る舞うか見て、 + +10:40.420 --> 10:44.290 +異なるスタイルのチャットボットを与えてみる。 + +10:44.620 --> 10:51.550 +もちろん、 この練習の本当の目的は、 このような会話構成に慣れてもらうことだ。 + +10:51.550 --> 10:53.560 +それにクロードのAPIもね。 + +10:53.680 --> 10:55.240 +うーん、 でもそれはそれで楽しそうだ。 + +10:55.240 --> 11:00.400 +そして、 あなたにとってのもう一つの挑戦は、 もちろん、 双子座をミックスに加えることだろう。 + +11:00.400 --> 11:02.560 +ジェミニのAPIを使ってください。 + +11:02.560 --> 11:10.510 +ええと、 双子座に第3の人格を与えて、 ここでおかしな会話ができないか見てみよう。 + +11:10.510 --> 11:12.250 +それで楽しんでくれ + +11:12.250 --> 11:15.700 +もしそうしたら、 コードをプッシュしてほしい。 + +11:15.730 --> 11:18.130 +そして、 楽しんでやってほしい。 diff --git a/week5/community-contributions/subtitles/srts/59166453/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166453/ko_KR.srt new file mode 100755 index 0000000..77df5db --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166453/ko_KR.srt @@ -0,0 +1,568 @@ +WEBVTT + +00:00.530 --> 00:05.180 +다시 오신 걸 환영합니다 유피터랩에 오신 걸 환영해요 + +00:05.300 --> 00:09.110 +재밌는 예시로 여러분을 즐겁게 해 드릴게요 + +00:09.200 --> 00:14.690 +챗봇끼리 적대적인 대화를 나눌 거예요 + +00:14.720 --> 00:16.220 +어떻게 할지 보죠 + +00:16.400 --> 00:22.310 +이쯤 되면 어떤 요소들을 가지고 대화를 할 수 있는지 익숙해지실 거예요 + +00:22.340 --> 00:23.420 +여러 번 보셨잖아요 + +00:23.420 --> 00:29.990 +이제 시스템과 사용자 프롬프트가 있는 리스트를 보죠 + +00:30.410 --> 00:37.130 +하지만 앞서 암시했듯이 여러 상호 작용을 할 경우 목록은 더 길어질 수 있습니다 예를 + +00:37.130 --> 00:42.410 +들어, 여기 보이는 것처럼 시스템 메시지가 초기에 있을 수 있고 역할 + +00:42.410 --> 00:49.280 +시스템 콘텐츠나 시스템 메시지가 있고 그 후 사용자 메시지 그 메시지에 응답한 비서가 + +00:49.280 --> 00:53.720 +있고 그 후 또 다른 사용자 메시지가 있을 수 있죠 + +00:53.720 --> 00:59.030 +그 구조는 더 긴 대화의 역사를 대변하죠 + +00:59.030 --> 01:05.080 +그런 접근법을 이용해 우리와 챗봇 혹은 두 챗봇 사이의 더 긴 대화를 + +01:05.080 --> 01:06.910 +할 수 있어요 + +01:06.940 --> 01:14.110 +짚고 넘어갈 게 있어요 이런 접근법, 이런 구조는 챗봇과 대화하는 + +01:14.110 --> 01:16.930 +모든 방법이에요 + +01:16.960 --> 01:21.220 +여러 번의 상호 작용을 통해 지속되는 것 같아요 + +01:21.220 --> 01:30.490 +매번 GPT 4 같은 LLM에 다른 프롬프트를 만들 때마다 입력 프롬프트에 + +01:30.760 --> 01:38.530 +입력되는 것은 지금까지 전체 대화의 전체 구조예요 + +01:38.530 --> 01:45.460 +그리고 완료를 통해 계속됩니다 가장 다음에 나올 것 같은 토큰을 + +01:45.490 --> 01:47.740 +계속 생성하죠 + +01:47.740 --> 01:49.930 +그리고 그 내용이 대화에 추가되죠 + +01:49.930 --> 01:52.240 +그럼 답장하세요 + +01:52.240 --> 01:56.830 +다음에 LLM이 호출되면 모든 대화가 연결되죠 + +01:56.830 --> 01:59.980 +그리고 그 다음 패를 예측해 달라고 하죠 + +01:59.980 --> 02:06.260 +뭔가와 대화를 하고 있는데 메모리가 있고 10분 전에 한 말을 기억한다는 + +02:06.260 --> 02:08.870 +착각이 들어요 + +02:08.870 --> 02:14.420 +하지만 실제로 발생하는 일은 각각의 상호 작용에서 지금까지의 전체 + +02:14.420 --> 02:18.800 +대화를 LM에 입력하고 계속하라고 요청하는 거죠 + +02:19.010 --> 02:23.900 +어떻게 작동하는지 좋은 감각과 직관을 얻을 수 있을 거예요 + +02:23.900 --> 02:28.670 +그래서 지난주에 컨텍스트 창에 대해 얘기할 때 컨텍스트 창의 + +02:28.670 --> 02:34.010 +크기는 지금까지 모든 대화에 맞아야 하고 그 후 생성된 토큰도 포함돼야 + +02:34.010 --> 02:35.210 +한다고 했죠 + +02:35.210 --> 02:40.970 +LM을 호출할 때마다 이 전체 입력이 통과되기 때문이죠 + +02:41.480 --> 02:47.960 +그런 접근법을 이용해 비트를 즐길 수 있죠 + +02:47.960 --> 02:54.950 +이제 GPT 4와 미니 클로드 3이 대화를 나눌 거예요 클로드 + +02:54.980 --> 02:58.940 +3의 아주 저렴한 버전이죠 + +02:59.150 --> 03:03.260 +다른 모델을 사용하는 것도 보여드릴 수 있고요 이런 문자열을 + +03:03.260 --> 03:09.870 +마음대로 사용할 수 있는 게 유용할 겁니다 직접 다른 모델을 빠르게 시험해볼 수 있도록요 + +03:09.900 --> 03:14.010 +GPT는 이 시스템 프롬프트를 받게 되죠 + +03:14.010 --> 03:16.500 +당신은 논쟁을 좋아하는 챗봇이에요 + +03:16.530 --> 03:19.440 +당신은 대화의 모든 내용에 반대해요 + +03:19.440 --> 03:22.470 +모든 것에 도전하는 비꼬는 방식으로요 + +03:22.920 --> 03:25.380 +클로드는 다른 시스템을 받았어요 + +03:25.380 --> 03:27.510 +아주 예의 바르고 정중한 챗봇이군요 + +03:27.540 --> 03:31.320 +상대방의 모든 말에 동의하고 공통점을 찾으려고 노력하죠 + +03:31.320 --> 03:35.580 +상대가 논쟁을 하면 진정시키고 계속 대화를 나누죠 + +03:35.700 --> 03:37.380 +괜찮은 계획 같지 않아요? + +03:37.410 --> 03:39.720 +근사하고 군침 도는 설정이죠 + +03:40.050 --> 03:41.970 +그럼 인사부터 시작할게요 안녕하세요 + +03:41.970 --> 03:42.930 +안녕하세요 + +03:42.960 --> 03:44.730 +그게 설정이에요 + +03:45.030 --> 03:51.720 +좋아요, GPT라는 함수를 작성하고 있어요 이게 하는 일이죠 + +03:51.780 --> 04:01.830 +이 메시지를 갖고 그리고 기본적으로 여기 보이는 두 개의 목록이 필요합니다 GPT 메시지와 클로드 + +04:01.830 --> 04:08.290 +메시지요 그리고 여기 보이는 이런 종류의 목록을 만들죠 + +04:08.290 --> 04:13.480 +두 개의 메시지 목록을 가지고 전체 대화 기록을 구축할 거예요 + +04:13.480 --> 04:20.860 +이 경우에는 클로드의 메시지가 사용자고 메시지가 보조인 + +04:20.860 --> 04:22.780 +셈이죠 + +04:23.110 --> 04:25.000 +무슨 뜻인지 설명해 드릴게요 + +04:25.000 --> 04:27.220 +시스템 프롬프트부터 시작했죠 + +04:27.460 --> 04:32.290 +GPT 메시지와 클로드 메시지를 반복하죠 + +04:32.290 --> 04:34.900 +이 압축 파일 지퍼를 사용해요 + +04:35.080 --> 04:40.540 +데이터 과학자로서 많이 써 본 것일 수도 있고 그렇지 않더라도 모르는 사람들이 + +04:40.540 --> 04:41.680 +있을 수도 있죠 + +04:41.680 --> 04:43.030 +정말 유용한 정보예요 + +04:43.030 --> 04:49.300 +여러 개의 다른 리스트가 있고 각각의 요소들을 반복하고 싶다면 양쪽을 + +04:49.300 --> 04:57.880 +함께 이용해야 합니다. 지루한 방법은 범위와 리스트의 길이를 입력하는 것인데요. + +04:57.880 --> 05:03.520 +기본적으로 인덱스가 있는 일종의 순환기가 있는 거죠 그리고 끝날 때까지 숫자를 세다가 두 요소를 + +05:03.520 --> 05:05.530 +get get 하는 거예요 + +05:05.530 --> 05:09.690 +지퍼로 압축 파일을 만드는 비단뱀처럼 간단한 방법이 있어요 + +05:09.690 --> 05:16.770 +이 두 목록에서 zip을 호출하면 그에 대한 반응을 구축합니다 순환기로서 + +05:16.770 --> 05:25.890 +각각의 쌍을 반복하죠 두 목록의 각각의 요소를 함께요 그리고 각 지점에서 그 쌍을 반환해요 + +05:26.220 --> 05:31.110 +그걸 풀고 GPT에 클로드를 입력하세요 + +05:31.110 --> 05:34.380 +Get을 통해 페어를 받을 수 있어요 + +05:34.380 --> 05:39.480 +이걸 추측할 수도 있지만 서너 개의 목록을 반복하려 할 경우 그냥 여기로 밀어 + +05:39.480 --> 05:41.730 +넣고 같은 걸 할 수도 있어요 + +05:41.760 --> 05:47.010 +JupyterLab에서 활용할 수 있는 훌륭한 트릭이죠 익숙하지 않다면 임의 목록 몇 + +05:47.010 --> 05:50.640 +개를 편하게 활용하세요 언제든 사용할 수 있는 좋은 도구예요 + +05:50.640 --> 05:58.230 +어쨌든 이 두 개의 메시지 세트를 반복하고 그걸 풀어냅니다 그런 다음 추가하는 걸 상상하실 + +05:58.230 --> 06:05.490 +수 있어요 보조는 GPT가 하는 말은 뭐든 한다고 하고 사용자는 클로드가 하는 말은 뭐든 + +06:05.490 --> 06:06.870 +한다고 하죠 + +06:06.870 --> 06:12.040 +간단하게 OpenAI ChatGPT완료 생성이라고 부르죠 + +06:12.070 --> 06:21.010 +모델을 사용하길 요청하고 메시지를 전달하고 완료를 리턴하죠 0살요 메시지 내용이죠 + +06:21.010 --> 06:24.640 +이 구조에 익숙해지길 바라요 + +06:25.030 --> 06:26.440 +실행해보죠 + +06:26.440 --> 06:29.560 +이 이력을 바탕으로 GPT에 전화해 보죠 + +06:29.560 --> 06:31.750 +GPT는 뭐라고 할까요? + +06:31.750 --> 06:32.230 +안녕하세요 + +06:32.230 --> 06:32.980 +안녕하세요 + +06:33.010 --> 06:35.020 +이렇게 답장할 거예요 + +06:35.500 --> 06:36.610 +잘됐네요 + +06:36.610 --> 06:37.870 +또 인사하네요 + +06:37.900 --> 06:39.220 +참 독창적이네요 + +06:39.220 --> 06:40.870 +무슨 얘기를 하고 싶어요? + +06:41.440 --> 06:42.430 +네 + +06:42.520 --> 06:44.110 +재미있을 것 같죠? + +06:44.410 --> 06:47.680 +클로드의 함수는 이거예요 + +06:47.710 --> 06:49.000 +아주 비슷해요 + +06:49.000 --> 06:54.070 +시스템 메시지는 따로 전달된다는 걸 기억하실 겁니다 그러니 그걸 만들 필요는 + +06:54.100 --> 06:54.730 +없죠 + +06:54.730 --> 06:56.020 +여기 보이시죠 + +06:56.410 --> 07:00.790 +그리고 또 한 가지 역할이 바뀌었어요 + +07:00.790 --> 07:04.570 +사용자는 이제 GPT고 보조는 클로드예요 + +07:04.570 --> 07:05.950 +그래서 뒤집혔어요 + +07:05.980 --> 07:13.260 +이 목록들을 살펴보면 미묘한 차이를 발견하실 수 있을 거예요 + +07:13.260 --> 07:16.470 +명단은 GPT가 먼저 출발하니까요 + +07:16.560 --> 07:23.100 +클로드가 늘 답을 맞힌다면 클로드보다 GPT 목록에 메시지가 하나 더 있을 거예요 + +07:23.100 --> 07:25.680 +마지막에 추가해야 하는 거죠 + +07:25.770 --> 07:30.120 +제 말뜻을 모르신다면 곧 알게 되시겠지만요 + +07:30.150 --> 07:33.090 +제가 왜 이러는지 이해하실 거예요 + +07:33.390 --> 07:36.210 +이건 클로드에게 API 호출하는 거예요 + +07:36.210 --> 07:37.860 +이제 익숙해지셨길 바라요 + +07:37.860 --> 07:38.490 +더 간단하죠 + +07:38.490 --> 07:39.150 +클로드예요 + +07:39.150 --> 07:40.530 +.Message.Create요 + +07:40.860 --> 07:43.620 +최대 토큰을 또 통과시키죠 + +07:43.620 --> 07:46.440 +응답은 메시지 콘텐츠예요 + +07:46.470 --> 07:47.580 +0.Txt요 + +07:47.580 --> 07:48.660 +클로드의 대답이에요 + +07:48.690 --> 07:49.860 +실행해 보죠 + +07:50.190 --> 07:54.420 +바로 재미를 보러 갈 것 같아요 + +07:54.420 --> 07:56.940 +그래서 여기서 모든 걸 합쳐요 Put it up Put it up Put it up Put it up Put it up Put it up Put it up Put it + +07:57.120 --> 07:59.730 +안녕하세요로 다시 시작해요 + +07:59.730 --> 08:04.560 +안녕하세요, 프린트하겠습니다 GPT와 클로드가 소개를 하고 있네요 + +08:04.560 --> 08:07.290 +다섯 번 반복할 거예요 + +08:07.290 --> 08:15.070 +GPT를 호출해서 GPT 응답을 인쇄하고 메시지 목록에 넣습니다 클로드를 호출해서 클로드의 + +08:15.070 --> 08:20.920 +응답을 인쇄하고 그걸 메시지 목록에 넣고 반복합니다 두 챗봇이 어떤 + +08:20.920 --> 08:23.260 +대화를 하는지 보죠 + +08:23.290 --> 08:24.490 +준비됐어요? + +08:25.000 --> 08:25.840 +시작할게요 + +08:25.870 --> 08:27.160 +내가 그 감방을 처형한 적이 있나요? + +08:27.160 --> 08:27.940 +또 잘못되길 바라요 + +08:27.970 --> 08:28.450 +네 + +08:28.480 --> 08:30.670 +공연할 준비 됐어요 + +08:36.280 --> 08:37.450 +확인해 보죠 + +08:37.480 --> 08:38.950 +GPT가 안부 전해달래요 + +08:38.980 --> 08:40.030 +클로드가 안부 전하래요 + +08:40.060 --> 08:41.650 +GPT는 잘됐다고 하죠 + +08:41.650 --> 08:42.700 +또 인사하네요 + +08:42.700 --> 08:43.270 +참 독창적이네요 + +08:43.270 --> 08:44.230 +다음은 뭐죠? + +08:44.260 --> 08:45.010 +안녕하세요? + +08:45.010 --> 08:47.230 +나도 그 말에 반대하고 싶거든요 + +08:47.560 --> 08:51.100 +클로드, 첫인사가 진부했던 거 사과할게요 + +08:51.100 --> 08:53.530 +친절하고 정중하게 대답하려고 했어요 + +08:53.740 --> 08:54.280 +네 + +08:54.280 --> 08:58.840 +착각하지 마세요 당신의 친선적인 시도는 평범했고 공통점을 찾았어요 + +08:58.840 --> 08:59.590 +less + +08:59.620 --> 09:02.710 +모든 걸 사탕발림으로 포장하고 싶다는 말이죠 + +09:02.710 --> 09:05.290 +논란이 될 만한 걸 파헤쳐 보죠 + +09:05.350 --> 09:06.580 +파인애플과 피자요? + +09:06.610 --> 09:08.410 +온종일 논쟁할 준비가 돼 있거든요 + +09:08.410 --> 09:11.170 +GPT는 빈정대는 유머 감각이 있어요 + +09:11.170 --> 09:17.060 +클로드는 친절하고 유머러스하게 굴었고요 좀 뻔하긴 했지만 뭐, 시작은 + +09:17.060 --> 09:18.620 +할 수도 있죠 + +09:19.010 --> 09:25.340 +그리고 잘해주려고 하는데 피자에 파인애플을 얹을 건지 말 건지 싸우는 + +09:25.370 --> 09:26.300 +게 보여요 + +09:26.510 --> 09:27.440 +네 + +09:27.470 --> 09:30.770 +내 피자 취향을 존중해 주다니 정말 관대하군요 + +09:30.770 --> 09:31.910 +하지만 현실적으로 생각해 보죠 + +09:31.910 --> 09:38.450 +파인애플이나 피자 같은 혐오스러운 걸 세상에 퍼뜨린다고 해서 모두가 존중받을 + +09:38.450 --> 09:40.340 +필요는 없어요 + +09:40.520 --> 09:42.080 +네 + +09:42.080 --> 09:48.200 +어쨌든, 어... 미화된 사랑을 정당화하는 것 좀 봐요 + +09:48.200 --> 09:54.680 +클로드 것보다 gpt나 농어 문제를 읽는 게 더 재밌어요 + +09:54.680 --> 09:55.490 +아주 좋아요 + +09:55.520 --> 09:58.910 +아보카도 토스트 비평을 참는 건 아니죠? + +09:58.940 --> 10:03.110 +클로드는 아주 상냥하게 타당한 지적을 했다고 하네요 + +10:03.890 --> 10:07.370 +어쨌든 이걸로 데모를 마무리하죠 + +10:07.400 --> 10:08.900 +즐거우셨길 바라요 + +10:08.900 --> 10:13.700 +제가 메시지를 구성하는 방식을 이해 못 하셨다면 프린트해서 + +10:13.700 --> 10:16.090 +실행해 보세요 + +10:16.120 --> 10:17.200 +인쇄되는 게 보일 거예요 + +10:17.200 --> 10:21.880 +각 지점에 이 메시지 배열을 프린트하면 뭐가 생성됐는지 볼 수 있죠 + +10:21.880 --> 10:25.090 +그걸 보고 우리가 제대로 하고 있다고 만족할 수 있죠 + +10:25.180 --> 10:28.510 +여기 요구 사항이 있어요 + +10:28.540 --> 10:31.720 +이제 돌아가서 역할을 바꿔 보세요 + +10:31.720 --> 10:40.390 +클로드가 더 공격적이고 오픈아이는 평화를 유지하려 하죠 그들의 행동을 관찰하고 + +10:40.420 --> 10:44.290 +다른 스타일의 챗봇을 줘요 + +10:44.620 --> 10:49.330 +물론 이 훈련의 목적은 이런 대화 구조에 익숙해지게 + +10:49.330 --> 10:51.550 +하는 거죠 get it + +10:51.550 --> 10:53.560 +클로드의 API 덕분이기도 하죠 + +10:53.680 --> 10:55.240 +하지만 그것도 재미있겠네요 + +10:55.240 --> 11:00.400 +또 다른 도전은 제미니가 함께 있는 거죠 + +11:00.400 --> 11:02.560 +제미니의 API를 써요 + +11:02.560 --> 11:10.510 +제미니에 제3의 인격을 부여해서 이상한 대화가 오가지 않는지 보는 거죠 + +11:10.510 --> 11:12.250 +즐겁게 갖고 놀아요 + +11:12.250 --> 11:15.700 +그렇게 하시면 코드를 푸시하세요 결과를 보고 싶으니까요 + +11:15.730 --> 11:18.130 +즐겁게 작업하시길 바라요 diff --git a/week5/community-contributions/subtitles/srts/59166461/en_US.srt b/week5/community-contributions/subtitles/srts/59166461/en_US.srt new file mode 100755 index 0000000..9de9ff9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166461/en_US.srt @@ -0,0 +1,610 @@ +WEBVTT + +00:00.710 --> 00:02.690 +And welcome back to the lab. + +00:02.690 --> 00:08.300 +Here we are in Jupyter Lab and we are going to go into week two. + +00:08.300 --> 00:10.790 +And we're going to go now to day two. + +00:10.820 --> 00:12.440 +Here we are radio day. + +00:12.470 --> 00:17.390 +Today we will build user interfaces using the outrageously simple Gradio framework. + +00:17.390 --> 00:19.010 +Prepare for joy. + +00:19.760 --> 00:20.810 +There you go. + +00:20.810 --> 00:22.490 +We will do some imports. + +00:22.490 --> 00:27.140 +And then this magical line import Gradio as GR. + +00:27.170 --> 00:28.400 +And I said oh yeah. + +00:28.430 --> 00:30.200 +So there we go. + +00:30.200 --> 00:34.700 +And we load our environment variables in using the usual approach. + +00:35.030 --> 00:43.010 +Um, and you'll recognize the next familiar cell, which is the three somewhat analogous commands to + +00:43.040 --> 00:46.250 +get our APIs up and ready. + +00:46.790 --> 00:47.630 +Okay. + +00:47.630 --> 00:53.960 +So, uh, start by setting a system message in a variable, which is going to be the very generic UI, + +00:53.990 --> 01:00.380 +a helpful assistant that is often the kind of standard starting point for a system message. + +01:00.620 --> 01:02.630 +So that's what we will take. + +01:03.080 --> 01:07.490 +Um, and now we're going to wrap a call to GPT four mini. + +01:07.490 --> 01:14.540 +Uh, in a simple function like this, a message GPT takes a prompt messages equals. + +01:14.540 --> 01:18.380 +Now, by this point, uh, hopefully you were bored of this structure. + +01:18.380 --> 01:24.620 +You know it so well, uh, a simple conversation structure, a list of dictionaries system, a system + +01:24.620 --> 01:30.620 +message user, a user prompt, and then we call completion OpenAI chat, dot completions, dot create. + +01:30.650 --> 01:33.620 +We pass in a model and we pass in the messages. + +01:33.620 --> 01:37.400 +And what we return is the completion dot choices. + +01:37.400 --> 01:40.370 +We take the first choice dot message content. + +01:40.370 --> 01:45.800 +That is a function which we are wrapping to message GPT and return a response. + +01:45.800 --> 01:47.510 +Let's run that. + +01:47.510 --> 01:49.700 +Let's just quickly try that out. + +01:49.730 --> 01:50.450 +What should we say? + +01:50.480 --> 01:58.550 +Message GPT we've tried a few things that we know spell that right GPT we know what GPT is good at and + +01:58.550 --> 01:59.180 +what it's bad at. + +01:59.180 --> 02:00.260 +Let's just try one more thing. + +02:00.260 --> 02:02.780 +We know that it's not great at current events. + +02:02.780 --> 02:04.730 +Let's just go with something very simple. + +02:04.730 --> 02:13.430 +What is today's date and let's see what GPT believes is today's date. + +02:14.420 --> 02:18.080 +Today's date is October the 3rd, 2023. + +02:18.410 --> 02:19.880 +So a few things to note. + +02:19.880 --> 02:24.140 +One is that, as expected, it does not have a good sense of current events. + +02:24.140 --> 02:29.660 +And the second is that it does appear that its training data took it up until October 2023, just the + +02:29.660 --> 02:34.310 +beginning of October, uh, which is something that I alluded to before when it had said September. + +02:34.310 --> 02:39.860 +I thought it was October, but I suppose if it's October the 3rd, then, then maybe it's a moot point. + +02:39.860 --> 02:42.290 +It's a it's an end of September. + +02:42.320 --> 02:46.310 +Early October would be the answer anyways. + +02:46.310 --> 02:48.950 +That is a very simple function that we've got there. + +02:48.950 --> 02:52.190 +Put that to the back of your mind because we're going to come back to it later. + +02:52.190 --> 02:54.320 +It's time to create user interfaces. + +02:54.320 --> 02:56.770 +First of all, nothing to do with data science. + +02:56.770 --> 02:59.680 +Let's just see how to create a simple user interface. + +02:59.680 --> 03:03.640 +So here then is a very simple function called shout. + +03:03.640 --> 03:06.040 +And shout is going to take some text. + +03:06.040 --> 03:10.330 +And it's going to reply with that text in uppercase. + +03:10.330 --> 03:11.620 +That's a pretty simple one. + +03:11.620 --> 03:13.150 +So let's shout hello. + +03:13.150 --> 03:17.500 +And it says back hello and uppercase a shouty way. + +03:17.890 --> 03:19.060 +Um, okay. + +03:19.060 --> 03:29.620 +So I put it to you that building a sophisticated user interface with inputs and outputs that can convert + +03:29.650 --> 03:33.250 +a little hello to a big hello, is as simple as this. + +03:33.280 --> 03:36.910 +It's a two lines view is great interface. + +03:36.910 --> 03:38.380 +That means I want a new interface. + +03:38.380 --> 03:41.260 +You tell it the function that you want. + +03:41.260 --> 03:46.630 +The function that is this user interface is built around, which in this case is shout. + +03:46.660 --> 03:51.730 +This function describes right here I'm passing in the function name and what you pass in. + +03:51.730 --> 03:53.560 +You then have to pass in inputs and outputs. + +03:53.560 --> 03:57.820 +And Gradio is very flexible about what you can pass in here. + +03:57.820 --> 04:01.390 +You can pass in lists of things if you've got multiple inputs and outputs. + +04:01.390 --> 04:06.280 +If you've only got one input and one output, you can just say what kind of thing it is as a string. + +04:06.280 --> 04:07.240 +That's all it needs. + +04:07.240 --> 04:08.950 +It will figure it all out. + +04:09.070 --> 04:14.020 +Um, and just because this is two lines of code, but just to show you, we could just do it as one + +04:14.020 --> 04:18.070 +line of code because I'm really showing off here like that. + +04:18.130 --> 04:23.740 +We can just put it all in one line, uh, and just run that and let's see what happens. + +04:23.740 --> 04:26.950 +We have ourselves here a little user interface. + +04:26.950 --> 04:28.810 +I'm going to type hello. + +04:28.960 --> 04:30.910 +And I'm going to press submit. + +04:31.510 --> 04:34.510 +And there is a shouty hello right back at me. + +04:34.510 --> 04:38.920 +It's a user interface with great controls around it. + +04:38.920 --> 04:46.030 +And it's all been built running within this uh, this this this, uh, browser, just like that. + +04:46.150 --> 04:51.430 +Now, one thing you might notice is that there's a flag button here, and a folder has been created + +04:51.430 --> 04:52.720 +over here called flagged. + +04:52.720 --> 04:58.510 +And this is a feature that comes out of the box with Gradio to allow functionality for users to flag + +04:58.510 --> 05:03.250 +your results, which is a kind of common use case with machine learning, where you want users to be + +05:03.280 --> 05:08.080 +able to to see what's going on and make note if there's a problem with the results. + +05:08.320 --> 05:12.850 +But that out of the box functionality is not something we particularly want, and the way we can remove + +05:12.850 --> 05:16.780 +that is by passing in allow flagging equals never instead. + +05:16.780 --> 05:22.870 +So if I now run that instead, uh, again, I sort of resent the fact that I put that as two lines when + +05:22.870 --> 05:27.970 +I could equally well have done it as one line like that, just to really show you how simple it is. + +05:28.000 --> 05:29.320 +A single line. + +05:29.320 --> 05:33.850 +Uh, and here we get, um, our user interface. + +05:34.780 --> 05:38.110 +Uh, so there's a couple of things I've done about this that I want to mention. + +05:38.140 --> 05:43.150 +The first of them is with either of these cases, there's also a link that it gives you at the top here. + +05:43.150 --> 05:49.000 +And if you click on this link, uh, it actually brings up your interface in an entirely separate window + +05:49.000 --> 05:51.220 +like this, which seems almost magical. + +05:51.250 --> 05:51.820 +Let's go. + +05:51.850 --> 05:52.750 +Hello. + +05:55.030 --> 05:56.830 +And it just works. + +05:56.860 --> 06:04.180 +And that's because when you run Gradio, it actually runs a little web server running in the background. + +06:04.270 --> 06:08.350 +Uh, running locally at a at whatever the first port it finds that's free. + +06:08.350 --> 06:13.390 +After after some, some number, uh, after, I think 760 is where it begins and it starts going on + +06:13.390 --> 06:13.750 +from there. + +06:13.750 --> 06:15.190 +So I suspect the last one was it. + +06:15.490 --> 06:15.880 +Yeah. + +06:16.000 --> 06:17.380 +Was it 760? + +06:17.530 --> 06:20.830 +Uh, so, um, it will run that little web server. + +06:20.830 --> 06:25.600 +And so you can either show that in the same Jupyter notebook in the output, or you can just bring it + +06:25.600 --> 06:29.290 +up in a separate screen in its own right, which is amazing. + +06:29.290 --> 06:35.500 +But even more than that, the other thing I've shown here is that you can pass share equals true into + +06:35.500 --> 06:36.250 +your call. + +06:36.250 --> 06:43.960 +And if you do that, then Gradio also serves the same interface on a public URL that you can share with + +06:43.960 --> 06:50.010 +other people so that other people, colleagues that you're working with can use your same model and + +06:50.010 --> 06:53.430 +be able to come in and work on your prototype. + +06:53.430 --> 06:58.740 +And this part is a little bit of the mind bending part of it. + +06:58.740 --> 07:05.220 +When someone brings up this user interface, which we'll do right now, it'll take just a second. + +07:05.220 --> 07:06.690 +There's a bit more going on behind the scenes. + +07:06.690 --> 07:07.380 +Here it comes. + +07:07.380 --> 07:08.490 +Here's the user interface. + +07:08.490 --> 07:10.560 +It looks it's of course the same as this. + +07:10.560 --> 07:11.640 +I'll run hello. + +07:11.640 --> 07:13.380 +And we'll see it working. + +07:14.940 --> 07:16.530 +What's happening here. + +07:16.530 --> 07:19.200 +This is of course being served by Gradio. + +07:19.200 --> 07:25.080 +But when you call submit, when you press submit and call the function, that function hello is running + +07:25.080 --> 07:29.580 +on, on my local box in this Jupyter environment right here. + +07:29.670 --> 07:32.250 +Uh, it's uh, it's a bit crazy. + +07:32.280 --> 07:34.920 +It's still running the code as it's running on my box. + +07:34.920 --> 07:37.560 +It's just there's a publicly available URL for it. + +07:37.590 --> 07:39.000 +It's kind of magic. + +07:39.000 --> 07:40.620 +Uh, let me explain what I mean by that. + +07:40.620 --> 07:44.340 +By going back here and printing here. + +07:46.680 --> 07:52.020 +Shout has been called with input. + +07:54.840 --> 07:58.650 +So now we are making very clear what's going on. + +07:59.130 --> 08:01.980 +So when I run that it says shout has been called with input. + +08:02.010 --> 08:02.550 +Hello. + +08:02.580 --> 08:06.480 +So now let's come back here and run this again. + +08:07.770 --> 08:12.240 +So now it's running with this again a public URL. + +08:13.020 --> 08:14.430 +Here it comes. + +08:16.020 --> 08:17.190 +I'm going to type. + +08:17.190 --> 08:20.070 +This is very cool. + +08:20.400 --> 08:22.110 +And press submit. + +08:22.230 --> 08:24.570 +And obviously this is very cool as what comes back. + +08:24.600 --> 08:27.570 +This is this is being hosted by Gradio. + +08:27.570 --> 08:34.050 +But again the somewhat remarkable thing is if I come back here and look in my output, you'll see that + +08:34.050 --> 08:36.030 +shout has been called with input. + +08:36.030 --> 08:37.320 +This is very cool. + +08:37.320 --> 08:41.340 +So the function that's running is running on my box. + +08:41.340 --> 08:47.850 +The user interface is being served up through a public radio Gradio website, but the code is running + +08:47.850 --> 08:50.010 +on my local box, which is really amazing. + +08:50.010 --> 08:54.690 +And what that means basically is that you can write models running on your local box, and you can build + +08:54.690 --> 08:59.370 +interfaces, and you can either bring them up locally for yourself, or you can share them with others. + +08:59.370 --> 09:04.740 +And as people work with those shared user interfaces, it's still calling the code that is running on + +09:04.740 --> 09:06.870 +your box incredibly useful. + +09:06.870 --> 09:12.210 +And as you can imagine, for collaborating with people and sharing your models and getting your co-workers + +09:12.210 --> 09:15.600 +to to work with you, uh, it couldn't be easier. + +09:16.710 --> 09:20.370 +All right, so let's keep going and show a couple more things. + +09:20.370 --> 09:25.590 +I'm now going to bring up an interface which is going to specify inputs and outputs. + +09:25.800 --> 09:30.810 +And you can see here what I'm doing is I'm saying that the the inputs it's a list. + +09:30.810 --> 09:32.640 +It's just got one thing in there. + +09:32.640 --> 09:34.080 +It's a text box. + +09:34.080 --> 09:37.740 +It's got a label your message and it's got six lines. + +09:37.740 --> 09:41.190 +The outputs is response and it's got eight lines. + +09:41.340 --> 09:43.650 +Um, and it's calling the function shout. + +09:43.710 --> 09:49.620 +let's have a look at that and let's bring that up here. + +09:50.340 --> 09:53.610 +And it comes up just as you'd expect and make it a bit bigger for you. + +09:53.610 --> 09:54.480 +There we go. + +09:54.510 --> 09:55.710 +There is a message. + +09:55.710 --> 09:56.760 +There's a response. + +09:56.760 --> 10:02.010 +I can say hello yet again and I can press submit. + +10:02.010 --> 10:05.610 +And over here comes the capitalized version. + +10:05.700 --> 10:08.220 +Very easy, nice and configurable. + +10:08.250 --> 10:10.860 +Looks like a good UI. + +10:11.790 --> 10:15.930 +Well, you can probably imagine what I'm going to suggest next. + +10:16.380 --> 10:17.820 +Wouldn't it be great? + +10:17.850 --> 10:23.460 +Wouldn't it be great if you could just replace that word shout with another function? + +10:23.490 --> 10:24.720 +Any function? + +10:24.720 --> 10:28.170 +Why not a message GPT function that we wrote earlier? + +10:28.170 --> 10:33.780 +You could just simply replace that word shout with that function, and you'd be able to have a user + +10:33.780 --> 10:35.820 +interface built on top of an LLM. + +10:35.820 --> 10:37.380 +Wouldn't that just be great? + +10:37.410 --> 10:38.640 +Wouldn't it be great? + +10:38.970 --> 10:40.020 +Ha ha. + +10:40.050 --> 10:41.640 +Well, let's have a look. + +10:41.640 --> 10:42.770 +Let's have a look. + +10:42.800 --> 10:43.700 +Here we go. + +10:43.700 --> 10:44.420 +Same. + +10:44.420 --> 10:45.500 +Same code. + +10:45.500 --> 10:46.910 +We've replaced the function. + +10:46.910 --> 10:47.840 +It's no longer shout. + +10:47.840 --> 10:49.400 +It's now message GPT. + +10:49.610 --> 10:51.290 +Let's see what happens. + +10:51.290 --> 10:53.480 +Let's bring that up in a separate window. + +10:53.510 --> 10:54.740 +Here it is. + +10:54.920 --> 11:02.240 +Please tell me a joke and we will submit that. + +11:02.240 --> 11:04.430 +And we'll see what comes back. + +11:05.180 --> 11:07.310 +Why did the Scarecrow win an award? + +11:07.310 --> 11:10.550 +Because he was outstanding in his field. + +11:10.580 --> 11:12.080 +That's a great joke. + +11:13.730 --> 11:15.350 +Uh, okay. + +11:15.470 --> 11:19.100 +A great joke from GPT four mini. + +11:19.100 --> 11:25.730 +And a great example of how easy it is to make that bigger for you to build a user interface that is + +11:25.730 --> 11:29.510 +running using an LLM behind the scenes. + +11:29.570 --> 11:34.760 +I hope that you are as overjoyed by this experience as I am. + +11:34.850 --> 11:37.370 +I think Gradio is awesome. + +11:37.400 --> 11:41.330 +All right, I will see you next time when we're going to put Gradio to even more good use. diff --git a/week5/community-contributions/subtitles/srts/59166461/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166461/ja_JP.srt new file mode 100755 index 0000000..0ccd769 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166461/ja_JP.srt @@ -0,0 +1,571 @@ +WEBVTT + +00:00.710 --> 00:02.690 +そして、 ラボにおかえりなさい。 + +00:02.690 --> 00:08.300 +ここJupyter Labで2週目に入る。 + +00:08.300 --> 00:10.790 +そして2日目に入る。 + +00:10.820 --> 00:12.440 +ラジオの日だ。 + +00:12.470 --> 00:17.390 +今日は、 とんでもなくシンプルなGradioフレームワークを使ってユーザー・インターフェースを構築する。 + +00:17.390 --> 00:19.010 +喜びの準備をしよう。 + +00:19.760 --> 00:20.810 +そうだ。 + +00:20.810 --> 00:22.490 +輸入もするつもりだ。 + +00:22.490 --> 00:27.140 +そして、 この不思議なセリフは、 グラディオをGRとしてインポートする。 + +00:27.170 --> 00:28.400 +そして私は、 ああそうだと言った。 + +00:28.430 --> 00:30.200 +そうだ。 + +00:30.200 --> 00:34.700 +そして、 通常の方法で環境変数をロードする。 + +00:35.030 --> 00:46.250 +次のおなじみのセルは、 APIを立ち上げて準備するための3つの類似したコマンドだ。 + +00:46.790 --> 00:47.630 +オーケー。 + +00:47.630 --> 00:53.960 +変数にシステム・メッセージを設定することから始めましょう。 これは非常に一般的なUIで、 + +00:53.990 --> 01:00.380 +システム・メッセージの標準的な出発点となることが多い、 役に立つアシスタントです。 + +01:00.620 --> 01:02.630 +だから、 私たちが取るのはそれだ。 + +01:03.080 --> 01:07.490 +さて、 これからGPT4ミニに電話をかける。 + +01:07.490 --> 01:14.540 +ええと、 このような単純な関数では、 メッセージGPTはプロンプトメッセージに等しいものを受け取ります。 + +01:14.540 --> 01:18.380 +さて、 この時点で、 できればこの構成に飽きていてほしい。 + +01:18.380 --> 01:24.620 +シンプルな会話構造、 辞書のリスト、 システムメッセージ、 ユーザープロンプト、 + +01:24.620 --> 01:30.620 +そしてOpenAIチャットの補完、 ドット補完、 ドット作成。 + +01:30.650 --> 01:33.620 +モデルを渡し、 メッセージを渡す。 + +01:33.620 --> 01:37.400 +そして、 私たちが返すのは完成点の選択肢である。 + +01:37.400 --> 01:40.370 +私たちは最初の選択肢であるドットメッセージの内容を取る。 + +01:40.370 --> 01:45.800 +これは、 GPTにメッセージを送り、 レスポンスを返すためにラップしている関数だ。 + +01:45.800 --> 01:47.510 +それを実行しよう。 + +01:47.510 --> 01:49.700 +さっそく試してみよう。 + +01:49.730 --> 01:50.450 +何と言うべきか? + +01:50.480 --> 01:59.180 +メッセージ GPTの得意なこと、 不得意なことをいくつか試してみた。 + +01:59.180 --> 02:00.260 +もうひとつだけ試してみよう。 + +02:00.260 --> 02:02.780 +時事問題が苦手なのは知っている。 + +02:02.780 --> 02:04.730 +とてもシンプルなもので行こう。 + +02:04.730 --> 02:13.430 +今日の日付は何か、 GPTが考える今日の日付を見てみよう。 + +02:14.420 --> 02:18.080 +今日の日付は2023年10月3日。 + +02:18.410 --> 02:19.880 +そこで、 いくつか注意しておきたいことがある。 + +02:19.880 --> 02:24.140 +ひとつは、 予想通り、 時事問題に対するセンスがないことだ。 + +02:24.140 --> 02:29.660 +そして2つ目は、 トレーニングデータが2023年10月まで、 + +02:29.660 --> 02:34.310 +つまり10月の初めまで有効だったということだ。 + +02:34.310 --> 02:39.860 +私は10月だと思っていたが、 10月3日なら、 それは無意味なことなのかもしれない。 + +02:39.860 --> 02:42.290 +もう9月も終わりだ。 + +02:42.320 --> 02:46.310 +いずれにせよ、 10月初旬が答えだろう。 + +02:46.310 --> 02:48.950 +これはとてもシンプルな機能だ。 + +02:48.950 --> 02:52.190 +そのことは頭の片隅に置いておいてほしい。 + +02:52.190 --> 02:54.320 +ユーザー・インターフェースを作る時だ。 + +02:54.320 --> 02:56.770 +まず第一に、 データサイエンスとは何の関係もない。 + +02:56.770 --> 02:59.680 +簡単なユーザー・インターフェースの作り方を見てみよう。 + +02:59.680 --> 03:03.640 +では、 shoutという非常にシンプルな関数を紹介しよう。 + +03:03.640 --> 03:06.040 +そして、 叫ぶにはテキストが必要だ。 + +03:06.040 --> 03:10.330 +そして、 そのテキストが大文字で返信される。 + +03:10.330 --> 03:11.620 +簡単なことだよ。 + +03:11.620 --> 03:13.150 +だから、 ハローと叫ぼう。 + +03:13.150 --> 03:17.500 +そして、 ハローと大文字で怒鳴るように言い返す。 + +03:17.890 --> 03:19.060 +うーん、 わかった。 + +03:19.060 --> 03:29.620 +つまり、 小さなハローを大きなハローに変換できる入出力を備えた洗練されたユーザー・インターフェースを構築するのは、 + +03:29.650 --> 03:33.250 +これくらい簡単なことなのだ。 + +03:33.280 --> 03:36.910 +2行で表示される素晴らしいインターフェイスだ。 + +03:36.910 --> 03:38.380 +つまり、 新しいインターフェイスが欲しいということだ。 + +03:38.380 --> 03:41.260 +欲しい機能を伝えるのだ。 + +03:41.260 --> 03:46.630 +このユーザー・インターフェースは、 この場合はシャウトを中心に構築されている。 + +03:46.660 --> 03:51.730 +この関数は、 関数名と何を渡すかをここで説明している。 + +03:51.730 --> 03:53.560 +そして、 入力と出力を渡さなければならない。 + +03:53.560 --> 03:57.820 +そしてグラディオは、 ここでパスできるものに関して非常に柔軟だ。 + +03:57.820 --> 04:01.390 +複数の入出力がある場合は、 リストを渡すことができる。 + +04:01.390 --> 04:06.280 +入力と出力が1つずつしかない場合は、 それがどのようなものかを文字列で表せばいい。 + +04:06.280 --> 04:07.240 +それだけで十分だ。 + +04:07.240 --> 04:08.950 +それがすべてを解決してくれる。 + +04:09.070 --> 04:18.070 +これは2行のコードですが、 お見せするために1行のコードにすることもできます。 + +04:18.130 --> 04:23.740 +すべてを1行にまとめて、 それを実行して、 どうなるか見てみよう。 + +04:23.740 --> 04:26.950 +私たちはここに小さなユーザー・インターフェイスを持っている。 + +04:26.950 --> 04:28.810 +ハローと打つよ。 + +04:28.960 --> 04:30.910 +そして、 私は送信を押すつもりだ。 + +04:31.510 --> 04:34.510 +そして、 私に向かって怒鳴るような挨拶が返ってきた。 + +04:34.510 --> 04:38.920 +素晴らしい操作性を備えたユーザーインターフェースだ。 + +04:38.920 --> 04:46.030 +そしてそれはすべて、 この、 この、 ブラウザーの中で動いている。 + +04:46.150 --> 04:52.720 +ここでひとつお気づきの点があるとすれば、 ここにフラグボタンがあり、 フラグ付きというフォルダが作成されていることだ。 + +04:52.720 --> 04:58.510 +これは、 機械学習でよくあるユースケースで、 ユーザーが何が起こっているかを確認し、 + +04:58.510 --> 05:08.080 +結果に問題があればメモを取ることができるようにしたい場合です。 + +05:08.320 --> 05:12.850 +その代わりに、 フラグを立てることを許可するイコール + +05:12.850 --> 05:16.780 +"never "を渡すのだ。 + +05:16.780 --> 05:22.870 +だから今、 その代わりにそれを実行すると、 あー、 繰り返しになるけど、 このように1行で済ませることも同じようにできたのに、 + +05:22.870 --> 05:27.970 +2行にしたことがちょっと恨めしいよ。 + +05:28.000 --> 05:29.320 +一本の線。 + +05:29.320 --> 05:33.850 +そしてここに、 ユーザー・インターフェイスがある。 + +05:34.780 --> 05:38.110 +ええと、 それで、 この件に関していくつかやったことがあるんだけど、 それについて言っておきたいことがあるんだ。 + +05:38.140 --> 05:43.150 +そのうちのひとつは、 これらのケースのいずれかを選択した場合、 この一番上にリンクが表示されます。 + +05:43.150 --> 05:51.220 +このリンクをクリックすると、 このようにまったく別のウィンドウにインターフェイスが表示される。 + +05:51.250 --> 05:51.820 +行こう。 + +05:51.850 --> 05:52.750 +こんにちは。 + +05:55.030 --> 05:56.830 +そして、 うまくいくんだ。 + +05:56.860 --> 06:04.180 +というのも、 Gradioを実行すると、 バックグラウンドで小さなウェブ・サーバーが動くからだ。 + +06:04.270 --> 06:08.350 +ええと、 最初に空いているポートを見つけて、 ローカルで実行するんだ。 + +06:08.350 --> 06:13.750 +何回目か、 何回目か......760回目くらいからが始まりで、 そこからが本番だと思う。 + +06:13.750 --> 06:15.190 +だから、 最後の1本がそうだったんじゃないかと思う。 + +06:15.490 --> 06:15.880 +そうだね。 + +06:16.000 --> 06:17.380 +760だったか? + +06:17.530 --> 06:20.830 +それで、 その小さなウェブサーバーを動かすんだ。 + +06:20.830 --> 06:25.600 +同じJupyterノートブックに出力することもできるし、 + +06:25.600 --> 06:29.290 +別の画面に表示することもできる。 + +06:29.290 --> 06:36.250 +しかしそれ以上に、 私がここで示したもう一つのことは、 シェア・イコール・トゥルーを通話に反映させることができるということだ。 + +06:36.250 --> 06:43.960 +そうすれば、 Gradioは同じインターフェイスをパブリックURLで提供し、 + +06:43.960 --> 06:53.430 +他の人と共有することができます。 + +06:53.430 --> 06:58.740 +そして、 この部分は少し心を曲げる部分でもある。 + +06:58.740 --> 07:05.220 +誰かがこのユーザー・インターフェースを表示させたら、 今すぐにでも表示させることができる。 + +07:05.220 --> 07:06.690 +舞台裏ではもう少しいろいろなことが起こっている。 + +07:06.690 --> 07:07.380 +来たぞ。 + +07:07.380 --> 07:08.490 +これがユーザーインターフェースだ。 + +07:08.490 --> 07:10.560 +見た目はもちろんこれと同じだ。 + +07:10.560 --> 07:11.640 +こんにちは。 + +07:11.640 --> 07:13.380 +そして、 それがうまくいくのを見るだろう。 + +07:14.940 --> 07:16.530 +ここで何が起きているのか。 + +07:16.530 --> 07:19.200 +これはもちろんグラディオが提供している。 + +07:19.200 --> 07:25.080 +しかし、 submitを呼び出したとき、 submitを押して関数を呼び出したとき、 その関数helloは、 + +07:25.080 --> 07:29.580 +このJupyter環境の私のローカル・ボックスで実行されている。 + +07:29.670 --> 07:32.250 +ちょっとクレイジーなんだ。 + +07:32.280 --> 07:34.920 +私のボックスで実行されているコードはそのまま実行されている。 + +07:34.920 --> 07:37.560 +公開されているURLがあるだけだ。 + +07:37.590 --> 07:39.000 +一種のマジックだ。 + +07:39.000 --> 07:40.620 +ええと、 どういう意味か説明させてください。 + +07:40.620 --> 07:44.340 +ここに戻って、 ここに印刷することで + +07:46.680 --> 07:52.020 +シャウトがインプットされた。 + +07:54.840 --> 07:58.650 +だから今、 私たちは何が起こっているのかを明確にしている。 + +07:59.130 --> 08:01.980 +だから、 それを実行すると、 shoutが入力で呼び出されたと表示される。 + +08:02.010 --> 08:02.550 +こんにちは。 + +08:02.580 --> 08:06.480 +では、 ここに戻ってもう一度実行してみよう。 + +08:07.770 --> 08:12.240 +これでまた公開URLで実行されるようになった。 + +08:13.020 --> 08:14.430 +来たぞ。 + +08:16.020 --> 08:17.190 +これから打つよ。 + +08:17.190 --> 08:20.070 +これはとてもクールだ。 + +08:20.400 --> 08:22.110 +そして送信を押す。 + +08:22.230 --> 08:24.570 +そして明らかに、 これは非常にクールだ。 + +08:24.600 --> 08:27.570 +これはグラディオが主催している。 + +08:27.570 --> 08:36.030 +しかし、 ちょっと注目すべき点は、 ここに戻って出力を見てみると、 shoutが入力で呼ばれていることだ。 + +08:36.030 --> 08:37.320 +これはとてもクールだ。 + +08:37.320 --> 08:41.340 +つまり、 実行されている機能は私のボックスで実行されているのだ。 + +08:41.340 --> 08:50.010 +ユーザー・インターフェースは公共ラジオGradioのウェブサイトを通じて提供されているが、 コードは私のローカル・ボックス上で動いている。 + +08:50.010 --> 08:54.690 +つまり、 自分のローカル・ボックスで動作するモデルを書き、 インターフェイスを構築し、 それを自分のためにローカルに立ち上げることも、 + +08:54.690 --> 08:59.370 +他の人と共有することもできるということだ。 + +08:59.370 --> 09:06.870 +そして、 共有されたユーザー・インターフェイスで作業している人たちは、 自分のボックス上で動いているコードを信じられないほど便利なものとして呼び出しているのだ。 + +09:06.870 --> 09:12.210 +そして、 想像できるように、 人々と共同作業をしたり、 モデルを共有したり、 同僚に協力してもらったりするのに、 + +09:12.210 --> 09:15.600 +これ以上簡単なものはない。 + +09:16.710 --> 09:20.370 +よし、 では続けてもう2つほど見せよう。 + +09:20.370 --> 09:25.590 +これから入出力を指定するインターフェイスを表示する。 + +09:25.800 --> 09:30.810 +ここで私がやっていることは、 入力がリストになっているということだ。 + +09:30.810 --> 09:32.640 +ただ、 一つだけあるんだ。 + +09:32.640 --> 09:34.080 +テキストボックスだ。 + +09:34.080 --> 09:37.740 +メッセージのラベルがあり、 6行ある。 + +09:37.740 --> 09:41.190 +出力はレスポンスで、 8行ある。 + +09:41.340 --> 09:43.650 +そして、 シャウトという関数を呼び出している。 + +09:43.710 --> 09:49.620 +それを見てみよう。 + +09:50.340 --> 09:53.610 +そして、 それはあなたが期待するように出てきて、 あなたのために少し大きくする。 + +09:53.610 --> 09:54.480 +これでよし。 + +09:54.510 --> 09:55.710 +メッセージがある。 + +09:55.710 --> 09:56.760 +手応えはある。 + +09:56.760 --> 10:02.010 +もう一度挨拶をして、 送信を押すことができる。 + +10:02.010 --> 10:05.610 +そしてこっちは大文字バージョン。 + +10:05.700 --> 10:08.220 +とても簡単で、 素晴らしく、 設定可能だ。 + +10:08.250 --> 10:10.860 +良いUIに見える。 + +10:11.790 --> 10:15.930 +さて、 私が次に何を提案するかは想像がつくだろう。 + +10:16.380 --> 10:17.820 +素晴らしいと思わないか? + +10:17.850 --> 10:23.460 +シャウトという言葉を別の機能で置き換えることができたら素晴らしいと思わないか? + +10:23.490 --> 10:24.720 +何か機能は? + +10:24.720 --> 10:28.170 +先に書いたメッセージGPT関数ではダメなのか? + +10:28.170 --> 10:35.820 +シャウトという言葉をその関数に置き換えるだけで、 LLMの上にユーザー・インターフェースを構築することができる。 + +10:35.820 --> 10:37.380 +それは素晴らしいことだと思わない? + +10:37.410 --> 10:38.640 +素晴らしいと思わないか? + +10:38.970 --> 10:40.020 +ハハハ。 + +10:40.050 --> 10:41.640 +では、 見てみよう。 + +10:41.640 --> 10:42.770 +見てみよう。 + +10:42.800 --> 10:43.700 +さあ、 始めよう。 + +10:43.700 --> 10:44.420 +同じだ。 + +10:44.420 --> 10:45.500 +同じコードだ。 + +10:45.500 --> 10:46.910 +機能を入れ替えました。 + +10:46.910 --> 10:47.840 +もはやシャウトではない。 + +10:47.840 --> 10:49.400 +今はメッセージGPTだ。 + +10:49.610 --> 10:51.290 +どうなるか見てみよう。 + +10:51.290 --> 10:53.480 +別ウィンドウで表示しよう。 + +10:53.510 --> 10:54.740 +これだ。 + +10:54.920 --> 11:02.240 +ジョークを言ってください。 + +11:02.240 --> 11:04.430 +何が戻ってくるか見てみよう + +11:05.180 --> 11:07.310 +スケアクロウが受賞した理由は? + +11:07.310 --> 11:10.550 +彼はその分野で傑出していたからだ。 + +11:10.580 --> 11:12.080 +いいジョークだね。 + +11:13.730 --> 11:15.350 +ああ、 わかった。 + +11:15.470 --> 11:19.100 +GPTフォーミニの素晴らしいジョーク。 + +11:19.100 --> 11:25.730 +そして、 LLMを舞台裏で使っているユーザー・インターフェースを構築することが、 + +11:25.730 --> 11:29.510 +いかに簡単なことかを示す好例だ。 + +11:29.570 --> 11:34.760 +皆さんも私と同じように、 この経験で大喜びしてほしい。 + +11:34.850 --> 11:37.370 +グラディオはすごいと思う。 + +11:37.400 --> 11:41.330 +それではまた次回、 グラディオをさらに有効活用するためにお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59166461/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166461/ko_KR.srt new file mode 100755 index 0000000..4185157 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166461/ko_KR.srt @@ -0,0 +1,607 @@ +WEBVTT + +00:00.710 --> 00:02.690 +연구실에 잘 돌아왔어요 + +00:02.690 --> 00:08.300 +주피터 연구소에 왔어요 이제 2주 차로 접어들죠 + +00:08.300 --> 00:10.790 +이제 둘째 날로 가보죠 + +00:10.820 --> 00:12.440 +라디오 데이예요 + +00:12.470 --> 00:17.390 +오늘 우리는 사용자 인터페이스를 만들 겁니다 말도 안 되게 단순한 그래디오 프레임워크를 이용해서요 + +00:17.390 --> 00:19.010 +기쁨을 준비하세요 + +00:19.760 --> 00:20.810 +여기요 + +00:20.810 --> 00:22.490 +수입도 좀 할 거예요 + +00:22.490 --> 00:27.140 +그러디오가 GR를 상징하는 마법의 대사예요 + +00:27.170 --> 00:28.400 +그렇다고 대답했죠 + +00:28.430 --> 00:30.200 +자, 됐어요 + +00:30.200 --> 00:34.700 +그리고 일반적인 접근법으로 환경 변수를 로드하죠 + +00:35.030 --> 00:43.010 +다음으로 익숙한 셀은 API를 get up up 준비시키는 + +00:43.040 --> 00:46.250 +다소 유사한 명령 3개죠 + +00:46.790 --> 00:47.630 +네 + +00:47.630 --> 00:53.960 +변수에 시스템 메시지를 설정하는 것으로 시작합니다 아주 일반적인 + +00:53.990 --> 01:00.380 +UI가 될 거예요 시스템 메시지의 표준 시작점인 보조죠 + +01:00.620 --> 01:02.630 +그렇게 할 거예요 + +01:03.080 --> 01:07.490 +이제 GPT 4 미니와의 통화를 마무리할 거예요 + +01:07.490 --> 01:14.540 +이런 간단한 함수에서 GPT는 프롬프트 메시지의 등호를 취하죠 + +01:14.540 --> 01:18.380 +이쯤 되면 이 구조물에 질렸길 바라요 + +01:18.380 --> 01:24.620 +잘 아시네요 간단한 대화 구조와 사전 시스템 목록 시스템 메시지 사용자, + +01:24.620 --> 01:30.620 +사용자 프롬프트 완성 오픈아이 채팅 .완성, .생성이죠 + +01:30.650 --> 01:33.620 +모형을 전달하고 메시지를 전달하죠 + +01:33.620 --> 01:37.400 +완료 .선택으로 반환되죠 + +01:37.400 --> 01:40.370 +선택 닷 메시지 콘텐츠를 선택해요 + +01:40.370 --> 01:45.800 +GPT 메시지를 래핑하고 응답을 반환하는 함수죠 + +01:45.800 --> 01:47.510 +실행해 보죠 + +01:47.510 --> 01:49.700 +빨리 시험해 보죠 + +01:49.730 --> 01:50.450 +뭐라고 하죠? + +01:50.480 --> 01:58.550 +몇 가지를 시도해 보았습니다 GPT의 철자를 알고 있고 GPT의 장단점을 알고 + +01:58.550 --> 01:59.180 +있죠 + +01:59.180 --> 02:00.260 +하나만 더 해 보죠 + +02:00.260 --> 02:02.780 +시사 문제에는 좋지 않죠 + +02:02.780 --> 02:04.730 +아주 단순한 걸로 가죠 + +02:04.730 --> 02:13.430 +오늘 날짜는 언제죠? GPT는 오늘 날짜를 어떻게 인식하는지 보죠 + +02:14.420 --> 02:18.080 +오늘은 2023년 10월 3일이에요 + +02:18.410 --> 02:19.880 +몇 가지 알아둘 게 있어요 + +02:19.880 --> 02:24.140 +하나는 예상대로 시사 감각이 별로 없다는 거예요 + +02:24.140 --> 02:29.660 +둘째, 훈련 데이터는 2023년 10월 초까지 지속된 것으로 보입니다. + +02:29.660 --> 02:34.310 +9월이라고 했을 때 제가 암시했던 거죠. + +02:34.310 --> 02:39.860 +10월인 줄 알았는데 10월 3일이면 논란의 여지가 있겠네요 + +02:39.860 --> 02:42.290 +9월 말이 됐어요 + +02:42.320 --> 02:46.310 +10월 초가 정답이겠죠 + +02:46.310 --> 02:48.950 +아주 간단한 함수예요 + +02:48.950 --> 02:52.190 +잠시 후 다루게 될 내용이니 마음 한구석에 두시고요. + +02:52.190 --> 02:54.320 +사용자 인터페이스를 만들 때죠 + +02:54.320 --> 02:56.770 +우선 데이터 과학과는 아무 상관 없어요 + +02:56.770 --> 02:59.680 +간단한 사용자 인터페이스를 만드는 방법을 보죠 + +02:59.680 --> 03:03.640 +여기 샤우트라는 아주 간단한 함수가 있어요 + +03:03.640 --> 03:06.040 +샤우트에는 텍스트가 좀 들어가요 + +03:06.040 --> 03:10.330 +대문자로 답장할 거예요 + +03:10.330 --> 03:11.620 +아주 간단한 질문이죠 + +03:11.620 --> 03:13.150 +그럼 인사할까요? + +03:13.150 --> 03:17.500 +Hello와 대문자 큰 소리로 대답하죠 + +03:17.890 --> 03:19.060 +네 + +03:19.060 --> 03:29.620 +입력과 출력을 가지고 정교한 사용자 인터페이스를 구축하는 것은 작은 안녕을 큰 안녕으로 바꿀 수 + +03:29.650 --> 03:33.250 +있습니다. 이렇게 간단해요. + +03:33.280 --> 03:36.910 +두 줄짜리 뷰는 훌륭한 인터페이스죠 + +03:36.910 --> 03:38.380 +새 인터페이스가 필요하단 뜻이죠 + +03:38.380 --> 03:41.260 +원하는 함수를 말해주세요 + +03:41.260 --> 03:46.630 +사용자 인터페이스를 중심으로 한 함수인데 이 경우엔 샤우트죠 + +03:46.660 --> 03:51.730 +이 함수는 제가 함수 이름을 넘기고 여러분이 넘기는 걸 설명해요 + +03:51.730 --> 03:53.560 +입력과 출력을 통과해야 하죠 + +03:53.560 --> 03:57.820 +그래디오는 융통성 있게 장면을 연출했어요 + +03:57.820 --> 04:01.390 +입력과 출력이 여러 개라면 목록도 넘길 수 있어요 + +04:01.390 --> 04:06.280 +입력도 하나, 출력도 하나라면 문자열로서 그게 뭔지 그냥 말할 수 있어요 + +04:06.280 --> 04:07.240 +그거면 돼요 + +04:07.240 --> 04:08.950 +다 해결될 거예요 + +04:09.070 --> 04:14.020 +이건 코드 두 줄이지만 여러분께 보여드리기 위해 코드 한 줄로 + +04:14.020 --> 04:18.070 +할 수도 있어요 이렇게 보여드리고 있으니까요 + +04:18.130 --> 04:23.740 +모든 걸 한 줄에 넣고 실행해 어떻게 되는지 보죠 Put it's go + +04:23.740 --> 04:26.950 +사용자 인터페이스가 있어요 + +04:26.950 --> 04:28.810 +안녕하세요라고 칠게요 + +04:28.960 --> 04:30.910 +제출을 누르죠 + +04:31.510 --> 04:34.510 +그리고 저한테도 큰 소리로 인사를 하죠 + +04:34.510 --> 04:38.920 +훌륭한 컨트롤이 있는 사용자 인터페이스죠 + +04:38.920 --> 04:46.030 +전부 이 브라우저 안에서 실행되도록 만들어졌어요 + +04:46.150 --> 04:51.430 +한 가지 눈치채셨을지 모르겠는데 여기 플래그 버튼이 있어요 플래그드라는 폴더가 + +04:51.430 --> 04:52.720 +여기 만들어졌죠 + +04:52.720 --> 04:58.510 +이건 Gradio와 함께 나오는 기능으로 사용자가 결과를 플래그 지정할 수 + +04:58.510 --> 05:03.250 +있게 해줍니다 머신 러닝에서 흔히 사용되는 경우죠 사용자가 무슨 + +05:03.280 --> 05:08.080 +일이 벌어지는지 보고 결과에 문제가 있는지 기록하길 원하죠 + +05:08.320 --> 05:12.850 +하지만 그 기능성은 특별히 우리가 원하는 게 아니죠 그걸 제거하는 + +05:12.850 --> 05:16.780 +방법은 허용 플래깅 =never를 넘기는 거예요 + +05:16.780 --> 05:22.870 +이제 실행해 볼게요. 두 줄로 놓은 것이 조금 억울하네요. 한 줄로 놓을 수도 + +05:22.870 --> 05:27.970 +있었는데 말이죠. 얼마나 간단한지 보여드리기 위해서요. + +05:28.000 --> 05:29.320 +한 줄이에요 + +05:29.320 --> 05:33.850 +Get in UI 사용자 인터페이스예요 + +05:34.780 --> 05:38.110 +이것과 관련해 제가 한 게 몇 가지 있어요 + +05:38.140 --> 05:43.150 +첫 번째는 이 두 케이스 중 어느 것이든 여기 위에 링크가 있어요 + +05:43.150 --> 05:49.000 +이 링크를 클릭하면 완전히 분리된 창에서 인터페이스가 나타납니다 + +05:49.000 --> 05:51.220 +마법 같죠 + +05:51.250 --> 05:51.820 +가요 + +05:51.850 --> 05:52.750 +안녕하세요 + +05:55.030 --> 05:56.830 +잘 어울려요 + +05:56.860 --> 06:04.180 +그건 그레이디오를 실행할 때 백그라운드에서 실행되는 작은 웹 서버를 실행하기 때문이죠 + +06:04.270 --> 06:08.350 +처음 발견하는 포트에서 무료로 로컬에서 실행하는 거죠 + +06:08.350 --> 06:13.750 +몇 번이 지나고 나서... 760번이었던 것 같아요 거기서부터 시작이죠 + +06:13.750 --> 06:15.190 +아마 마지막이 그거였을 거예요 + +06:15.490 --> 06:15.880 +네 + +06:16.000 --> 06:17.380 +760개였나요? + +06:17.530 --> 06:20.830 +저 웹 서버를 실행할 거예요 + +06:20.830 --> 06:25.600 +그래서 같은 주피터 노트북에서 출력물에 보여줄 수도 있고 별도의 + +06:25.600 --> 06:29.290 +화면에서 불러올 수도 있어요 그 자체로 훌륭하죠 + +06:29.290 --> 06:35.500 +하지만 그보다 더 중요한 건 여기서 보여드린 다른 건 호출에서 공유 = true를 넘기는 + +06:35.500 --> 06:36.250 +거예요 + +06:36.250 --> 06:43.960 +그렇게 하면 Gadio는 공용 URL 내의 동일한 인터페이스를 제공해 다른 사람들과 공유할 + +06:43.960 --> 06:50.010 +수 있죠 여러분이 함께 일하는 다른 사람, 동료가 같은 모델을 사용해 여러분의 + +06:50.010 --> 06:53.430 +프로토타입을 작업할 수 있도록요 + +06:53.430 --> 06:58.740 +이 부분은 좀 기가 막힌 비트예요 + +06:58.740 --> 07:05.220 +누가 사용자 인터페이스를 언급하면∙∙∙ 지금 할 건데 시간이 좀 걸려요 + +07:05.220 --> 07:06.690 +비트가 더 있어요 + +07:06.690 --> 07:07.380 +나오네요 + +07:07.380 --> 07:08.490 +이게 사용자 인터페이스예요 + +07:08.490 --> 07:10.560 +당연히 이것과 똑같죠 + +07:10.560 --> 07:11.640 +내가 뛰어갈게 여보세요 + +07:11.640 --> 07:13.380 +효과가 있을 거예요 + +07:14.940 --> 07:16.530 +무슨 일이 일어나고 있는지요 + +07:16.530 --> 07:19.200 +이건 물론 그래디오가 제공하죠 + +07:19.200 --> 07:25.080 +보내기를 호출할 때 보내기를 누르고 함수를 호출할 때 그 함수가 hello를 + +07:25.080 --> 07:29.580 +실행합니다 여기 주피터 환경의 제 로컬 상자에서요 + +07:29.670 --> 07:32.250 +비트가 좀 심하죠 + +07:32.280 --> 07:34.920 +박스에서 실행되는 것처럼 여전히 코드를 실행하고 있어요 + +07:34.920 --> 07:37.560 +공개적으로 사용 가능한 URL 뿐이죠 + +07:37.590 --> 07:39.000 +마법 같아요 + +07:39.000 --> 07:40.620 +무슨 뜻인지 설명해 드리죠 + +07:40.620 --> 07:44.340 +여기로 돌아가서 프린트하는 거죠 + +07:46.680 --> 07:52.020 +입력된 외침이 울렸어요 + +07:54.840 --> 07:58.650 +이제 상황을 분명히 설명하고 있어요 + +07:59.130 --> 08:01.980 +실행하면 Shout이 입력과 함께 호출되었다고 나오죠 + +08:02.010 --> 08:02.550 +안녕하세요 + +08:02.580 --> 08:06.480 +이제 여기로 돌아와서 다시 실행해보죠 + +08:07.770 --> 08:12.240 +이제 다시 공용 URL로 실행되고 있어요 + +08:13.020 --> 08:14.430 +나오네요 + +08:16.020 --> 08:17.190 +타이핑 할게요 + +08:17.190 --> 08:20.070 +정말 멋져요 + +08:20.400 --> 08:22.110 +제출을 누르세요 + +08:22.230 --> 08:24.570 +물론 이것도 아주 근사하죠 + +08:24.600 --> 08:27.570 +이 쇼는 그래디오가 진행해요 + +08:27.570 --> 08:34.050 +하지만 다시 한 번 주목할 만한 건 여기로 돌아와서 제 출력을 보면 입력과 함께 호출된 + +08:34.050 --> 08:36.030 +호출이 보이시죠 + +08:36.030 --> 08:37.320 +정말 멋져요 + +08:37.320 --> 08:41.340 +실행되는 함수는 제 상자에서 실행되죠 + +08:41.340 --> 08:47.850 +사용자 인터페이스는 공용 라디오 그래디오 웹사이트를 통해 제공되지만 코드는 제 로컬 박스에서 실행되고 + +08:47.850 --> 08:50.010 +있어요, 정말 대단하죠 + +08:50.010 --> 08:54.690 +그 말은 즉 로컬 박스에서 실행되는 모델을 작성할 수 있고 인터페이스를 빌드할 + +08:54.690 --> 08:59.370 +수 있고 로컬에서 불러올 수도 있고 다른 사람과 공유할 수도 있다는 거죠 + +08:59.370 --> 09:04.740 +사람들이 공유 사용자 인터페이스를 작업하면서 여러분의 컴퓨터에서 실행 중인 코드를 여전히 + +09:04.740 --> 09:06.870 +호출하고 있어요 아주 유용하죠 + +09:06.870 --> 09:12.210 +상상이 되시겠지만 사람들과 협력하고 모델을 공유하고 동료들이 + +09:12.210 --> 09:15.600 +함께 일하게 하는 건 정말 쉬운 일이에요 + +09:16.710 --> 09:20.370 +좋아요, 몇 가지 더 보여드리죠 + +09:20.370 --> 09:25.590 +이제 인터페이스를 불러올게요 입력과 출력을 지정해주는 거죠 + +09:25.800 --> 09:30.810 +여길 보시면 제가 하는 게∙∙∙ 입력이∙∙∙ 목록이죠 + +09:30.810 --> 09:32.640 +한 가지만 들어 있어요 + +09:32.640 --> 09:34.080 +텍스트 박스죠 + +09:34.080 --> 09:37.740 +메시지도 라벨에 6줄이나 돼요 + +09:37.740 --> 09:41.190 +반응이 출력력이고요 줄이 8개예요 + +09:41.340 --> 09:43.650 +함수 샤우트라고 부르네요 + +09:43.710 --> 09:49.620 +그걸 보죠 여기로 불러오죠 + +09:50.340 --> 09:53.610 +비트가 예상대로 나와서 좀 더 커지죠 + +09:53.610 --> 09:54.480 +됐어요 + +09:54.510 --> 09:55.710 +메시지가 있어요 + +09:55.710 --> 09:56.760 +반응이 있어요 + +09:56.760 --> 10:02.010 +다시 Hello를 하고 제출을 누를 수 있어요 + +10:02.010 --> 10:05.610 +여기 대문자 버전이 있어요 + +10:05.700 --> 10:08.220 +아주 쉽고, 멋지고, 구성할 수 있죠 + +10:08.250 --> 10:10.860 +좋은 UI 같네요 + +10:11.790 --> 10:15.930 +이제 뭘 제안할지 짐작이 가실 거예요 + +10:16.380 --> 10:17.820 +멋지지 않아요? + +10:17.850 --> 10:23.460 +샤우트라는 단어를 다른 함수로 대체할 수 있다면 멋지지 않을까요? + +10:23.490 --> 10:24.720 +함수 같은 거요? + +10:24.720 --> 10:28.170 +왜 아까 만든 메시지 GPT 함수가 아닌 거죠? + +10:28.170 --> 10:33.780 +그냥 Shout이라는 단어를 그 함수로 대체할 수 있어요 그럼 LLM 위에 빌드된 + +10:33.780 --> 10:35.820 +사용자 인터페이스가 생기죠 + +10:35.820 --> 10:37.380 +정말 멋지지 않아요? + +10:37.410 --> 10:38.640 +멋지지 않아요? + +10:38.970 --> 10:40.020 +네 + +10:40.050 --> 10:41.640 +한번 보죠 + +10:41.640 --> 10:42.770 +한번 보죠 + +10:42.800 --> 10:43.700 +시작할게요 + +10:43.700 --> 10:44.420 +저도요 + +10:44.420 --> 10:45.500 +코드가 같아요 + +10:45.500 --> 10:46.910 +함수를 대체했어요 + +10:46.910 --> 10:47.840 +소리 지르는 게 아니에요 + +10:47.840 --> 10:49.400 +GPT에 메시지를 보내죠 + +10:49.610 --> 10:51.290 +어떻게 되나 보죠 + +10:51.290 --> 10:53.480 +다른 창으로 보죠 + +10:53.510 --> 10:54.740 +여기 있네요 + +10:54.920 --> 11:02.240 +농담 하나 해 주시면 제출할게요 + +11:02.240 --> 11:04.430 +결과를 기다려 보죠 + +11:05.180 --> 11:07.310 +허수아비가 왜 상을 받았냐고요? + +11:07.310 --> 11:10.550 +자기 분야에서 뛰어난 사람이었으니까요 + +11:10.580 --> 11:12.080 +재미있는 농담이네요 + +11:13.730 --> 11:15.350 +네 + +11:15.470 --> 11:19.100 +GPT 4 미니의 멋진 농담이네요 + +11:19.100 --> 11:25.730 +사용자 인터페이스를 크게 만드는 게 얼마나 쉬운지 보여주는 좋은 예죠 + +11:25.730 --> 11:29.510 +LLM을 이용해 뒤에서 실행되는 거요 + +11:29.570 --> 11:34.760 +당신도 나만큼 이 경험을 즐기길 바라요 + +11:34.850 --> 11:37.370 +그래디오는 대단해요 + +11:37.400 --> 11:41.330 +다음 시간에는 그래디오를 더 유용하게 사용할 겁니다 TREEN METING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETING TO DETY diff --git a/week5/community-contributions/subtitles/srts/59166465/en_US.srt b/week5/community-contributions/subtitles/srts/59166465/en_US.srt new file mode 100755 index 0000000..38b9bb6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166465/en_US.srt @@ -0,0 +1,469 @@ +WEBVTT + +00:00.620 --> 00:05.360 +Welcome back to the JupyterLab on Gradio day, so you'll remember where we left off. + +00:05.360 --> 00:14.990 +We'd written two user interfaces, one of them for chatting with GPT four using the function uh, stream + +00:14.990 --> 00:19.460 +GPT, and one of them with Claude, with stream Claude. + +00:19.580 --> 00:21.980 +Uh, and so now I put it to you. + +00:22.010 --> 00:25.310 +Supposing we wrote a function like this. + +00:25.310 --> 00:28.160 +This function is like a composite function. + +00:28.160 --> 00:32.330 +It's a function that calls others in that it's called stream model. + +00:32.330 --> 00:40.040 +And it takes a prompt and it takes a model and it says if the model is GPT, then it calls stream GPT. + +00:40.370 --> 00:42.530 +If the model is Claude, it calls stream. + +00:42.530 --> 00:44.420 +Claude otherwise throws an error. + +00:44.420 --> 00:46.130 +So it needs to be GPT or Claude. + +00:46.130 --> 00:51.140 +And then it basically iterates through and yields each chunk in turn. + +00:51.140 --> 00:53.420 +So this is in fact I called it a function, but it's not. + +00:53.420 --> 00:54.440 +It's a generator. + +00:54.590 --> 01:01.970 +Um, and it yields each chunk from one or the other models depending on which model is called. + +01:02.000 --> 01:03.500 +Well, obviously that's going to work fine. + +01:03.530 --> 01:07.730 +That's now a function which has a few more variables. + +01:07.730 --> 01:12.140 +So as far as Gradio is concerned, that's just another function. + +01:12.140 --> 01:16.490 +And that means that we can build a user interface very easily around that function. + +01:16.490 --> 01:17.660 +Let's look at it. + +01:17.690 --> 01:18.800 +Here it is. + +01:18.800 --> 01:20.450 +Here's an interface. + +01:20.480 --> 01:26.840 +The function it's taking is just this this sort of hybrid generator that we just written the inputs. + +01:26.840 --> 01:29.000 +Of course we're now going to have two inputs. + +01:29.000 --> 01:30.890 +One of them is going to be your message. + +01:30.890 --> 01:34.010 +And the other of them I wish it were this easy. + +01:34.040 --> 01:39.230 +Drop down with two values GPT or Claude label select model. + +01:39.230 --> 01:41.990 +And then, you know, have that as the output. + +01:42.470 --> 01:44.630 +Things are rarely that easy though. + +01:44.660 --> 01:47.600 +Oh, but this is gradio, so things really are that easy. + +01:47.690 --> 01:50.240 +Uh, sorry, I have to run this first. + +01:50.480 --> 01:51.320 +There we go. + +01:51.320 --> 01:52.100 +It's not that easy. + +01:52.130 --> 01:54.110 +You still do have to execute all of your code. + +01:54.290 --> 01:56.390 +Uh, so here we go. + +01:56.390 --> 02:05.750 +We bring it up, we say something like, how do I get from times Square to Grand Central? + +02:06.210 --> 02:08.160 +and we pick one of our models. + +02:08.160 --> 02:12.600 +Let's pick GPT and we submit that and they're streaming back in markdown. + +02:12.600 --> 02:17.310 +Is GPT response to that question of directions. + +02:17.610 --> 02:19.230 +Enjoy your visit again at the end. + +02:19.260 --> 02:20.190 +Very nice. + +02:20.310 --> 02:23.760 +Uh, I feel like it's giving more options this time, but there we go. + +02:23.790 --> 02:24.480 +Maybe not. + +02:24.600 --> 02:25.710 +You'll probably remember. + +02:25.980 --> 02:29.640 +Uh, I can then just flip to Claude and ask Claude the same question. + +02:29.640 --> 02:32.490 +And here is Claude's answer to the same question. + +02:32.640 --> 02:34.260 +Uh, using Claude haiku. + +02:34.290 --> 02:37.950 +That might explain why we're getting slightly shorter, more terse answers. + +02:37.950 --> 02:40.230 +But, uh, isn't that amazing? + +02:40.230 --> 02:40.890 +Isn't that cool? + +02:40.890 --> 02:42.300 +We just built this functionality. + +02:42.330 --> 02:46.470 +We can flip between two different models, ask the same question, get the responses. + +02:46.560 --> 02:51.810 +Uh, you could just have this running sometime if you wanted a nice chat UI of your own and be able + +02:51.810 --> 02:53.610 +to bounce it around different models. + +02:53.670 --> 02:55.710 +Uh, it's a useful little tool. + +02:56.820 --> 03:04.050 +Um, and, uh, yeah, you can imagine an obvious exercise that I'll leave for you is to simply add + +03:04.050 --> 03:05.160 +Gemini to the mix. + +03:05.160 --> 03:05.670 +Why not? + +03:05.700 --> 03:06.450 +You can imagine. + +03:06.450 --> 03:07.230 +It's super easy. + +03:07.230 --> 03:08.070 +You just add in. + +03:08.070 --> 03:09.470 +Gemini is another option. + +03:09.470 --> 03:15.680 +I haven't shown you how to stream back from Gemini, but it's very similar and you can quickly google + +03:15.680 --> 03:20.900 +it to see the documentation is very clear and then add it into the mix, and then push that code so + +03:20.900 --> 03:22.820 +I can have it and share it with other students. + +03:22.820 --> 03:23.810 +That would be good. + +03:24.770 --> 03:25.700 +All right. + +03:25.700 --> 03:30.230 +So the next last for this lab is going to be okay. + +03:30.230 --> 03:35.810 +Let's take the company brochure generator we made last time and put a user interface around that. + +03:35.840 --> 03:37.070 +Wouldn't that be awesome. + +03:37.250 --> 03:41.750 +Uh, so now that you know, as I say, it's going to be really, really simple. + +03:41.750 --> 03:44.660 +So I've decided I'm going with the earlier version of the brochure. + +03:44.660 --> 03:48.200 +We're just going to use a, we're going to use just the landing page only. + +03:48.200 --> 03:52.850 +We're not going to do the, the two step process where we collect all the links, because that's maybe + +03:52.850 --> 03:54.350 +more involved than we need right now. + +03:54.440 --> 04:00.050 +Um, we're just going to have a simpler version of the website class that has URL, title and text, + +04:00.050 --> 04:02.210 +and you'll remember how it works. + +04:02.210 --> 04:07.520 +We use the requests package, and we use the wonderful Beautifulsoup to parse to strip out things we + +04:07.520 --> 04:10.160 +don't care about and to get the text. + +04:10.160 --> 04:18.070 +And there is a little getcontext helper to give us sort of getcontents helper to give us the page title + +04:18.070 --> 04:19.840 +and body of the page. + +04:19.840 --> 04:21.160 +So that's our helper class. + +04:21.190 --> 04:22.180 +Remember to run it. + +04:22.210 --> 04:23.170 +System prompt. + +04:23.170 --> 04:27.310 +You're in a system that analyzes the contents of a company website landing page and creates a short + +04:27.310 --> 04:28.750 +brochure respond. + +04:28.750 --> 04:31.180 +In markdown there is a system prompt. + +04:31.180 --> 04:41.350 +So here is a stream brochure function that takes a company name, a URL and a model. + +04:42.160 --> 04:46.510 +Uh, and it's going to say please generate a company brochure for company name. + +04:46.510 --> 04:48.250 +Here is their landing page. + +04:48.250 --> 04:54.370 +And then we'll use our website helper class here to read in that URL and get the contents. + +04:54.370 --> 04:56.140 +So this is all making sense. + +04:56.140 --> 04:59.230 +We're just going to to get the contents of the website. + +04:59.230 --> 05:00.790 +We're going to turn that into a prompt. + +05:00.790 --> 05:04.330 +And then if it's GPT we're going to stream from GPT. + +05:04.360 --> 05:06.640 +If it's Claude, we're going to stream from Claude. + +05:06.850 --> 05:14.280 +Um, otherwise we'll raise an error and we will then make this a generator and yield the results chunk + +05:14.280 --> 05:15.510 +by chunk. + +05:16.830 --> 05:22.920 +Uh, I realize it's a bit bit misleading to call this chunk because it's in fact not actually chunk + +05:22.920 --> 05:23.340 +by chunk. + +05:23.370 --> 05:25.560 +It's it's the full amount. + +05:25.590 --> 05:31.260 +So you might want to rename that something that's, uh, a better reflection of what this this is. + +05:32.160 --> 05:33.810 +But you get the idea. + +05:33.840 --> 05:36.030 +It should do the trick. + +05:36.420 --> 05:37.770 +Uh, so wouldn't it be nice? + +05:37.770 --> 05:39.420 +I'm going to stop saying that because it's going to get old. + +05:39.450 --> 05:40.470 +But it is nice. + +05:40.470 --> 05:45.090 +It is as simple as now just replacing the function with stream brochure. + +05:45.090 --> 05:46.560 +And you can see here the inputs. + +05:46.560 --> 05:48.570 +We of course have these three inputs. + +05:48.600 --> 05:49.800 +Now we have a company name. + +05:49.800 --> 05:51.420 +We have a landing page URL. + +05:51.420 --> 05:53.490 +And then we can pick the model. + +05:53.610 --> 05:56.520 +And let's give that a whirl. + +05:56.970 --> 05:58.170 +Uh here we go. + +05:58.170 --> 05:59.190 +Running locally. + +05:59.190 --> 06:03.960 +Bring it up so we can say company name hugging face. + +06:06.240 --> 06:09.510 +Landing page URL we can say. + +06:09.750 --> 06:10.830 +And we'll just do a. + +06:13.740 --> 06:19.040 +Hugging s.co and select model. + +06:19.040 --> 06:26.240 +We will ask GPT to be first add it and then just press submit. + +06:26.420 --> 06:27.920 +And here it goes. + +06:27.920 --> 06:34.280 +Here is our company brochure for Huggingface streaming back in markdown based on our web scrape. + +06:34.280 --> 06:35.540 +It's all there. + +06:35.540 --> 06:39.680 +It's even got links down at the bottom for different things. + +06:39.770 --> 06:43.220 +Uh, and yeah, that link looks like that is correct. + +06:43.220 --> 06:44.270 +That's going to work. + +06:44.270 --> 06:49.700 +Some of these links look like they're not going to work because of, uh, how it's been generated. + +06:49.700 --> 06:53.900 +But still, that's a pretty impressive web page, I've got to say, an impressive brochure. + +06:53.900 --> 06:55.610 +I mean, I love it. + +06:55.640 --> 06:58.520 +Let's see what Claude does with this Claude haiku. + +06:58.550 --> 06:59.060 +Of course. + +06:59.060 --> 07:05.120 +So it's, uh, a slimmer model, but it's perfectly acceptable. + +07:05.120 --> 07:07.190 +Let's build the future of AI together. + +07:07.280 --> 07:11.150 +Uh, very nice brochure there from haiku. + +07:11.570 --> 07:13.880 +Uh, and there we go. + +07:13.910 --> 07:14.840 +I'm. + +07:14.840 --> 07:21.400 +I'm blown away every time I use gradio by how simple it is, how effective it is. + +07:21.400 --> 07:27.250 +We've just built a user interface around our brochure where you can pick between different models and + +07:27.250 --> 07:29.650 +let's face it, it was easy. + +07:29.860 --> 07:33.880 +So the to dos for you, the ways you can make this better are there are so many. + +07:33.940 --> 07:38.260 +You could, as I say, add Gemini not only to the earlier example, but to this one as well. + +07:38.410 --> 07:47.050 +Another idea is you could add in another selection, another drop down where you can pick the the style, + +07:47.050 --> 07:52.240 +the tone you remember last time, how we could easily change the system prompt so that the brochure + +07:52.240 --> 07:55.540 +was in a humorous, jokey, jovial tone. + +07:55.840 --> 08:00.940 +Well, why don't you set it so you can pick from that drop down, choose a different tone, and then + +08:00.940 --> 08:04.390 +it will generate a company brochure using that tone. + +08:04.510 --> 08:07.330 +Uh, it's actually super easy to do that. + +08:07.450 --> 08:08.410 +So give it a try. + +08:08.440 --> 08:08.980 +Do that. + +08:08.980 --> 08:13.990 +And you'll have really beefed up this application to be something that that is increasingly high in + +08:14.020 --> 08:14.920 +functionality. + +08:14.950 --> 08:16.930 +So I hope you have fun doing that. + +08:16.930 --> 08:17.860 +Check in the code afterwards. + +08:17.860 --> 08:18.880 +So I get to see it. + +08:18.880 --> 08:22.180 +And I will see you in the next lecture for the wrap up. diff --git a/week5/community-contributions/subtitles/srts/59166465/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166465/ja_JP.srt new file mode 100755 index 0000000..44d8b03 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166465/ja_JP.srt @@ -0,0 +1,421 @@ +WEBVTT + +00:00.620 --> 00:05.360 +グラジオのJupyterLabにようこそ。 + +00:05.360 --> 00:19.460 +ひとつはGPTとチャットするためのもので、 GPTのストリーム機能を使ったもの。 + +00:19.580 --> 00:21.980 +ええと、 それで今、 君に聞いてみたんだ。 + +00:22.010 --> 00:25.310 +仮にこのような関数を書いたとしよう。 + +00:25.310 --> 00:28.160 +この関数は複合関数のようなものだ。 + +00:28.160 --> 00:32.330 +ストリームモデルと呼ばれる、 他を呼び出す関数だ。 + +00:32.330 --> 00:40.040 +そして、 プロンプトを受け取り、 モデルを受け取り、 モデルがGPTであれば、 ストリームGPTを呼び出す。 + +00:40.370 --> 00:42.530 +モデルがクロードの場合、 ストリームを呼び出す。 + +00:42.530 --> 00:44.420 +そうでなければクロードはエラーを投げる。 + +00:44.420 --> 00:46.130 +だからGPTかクロードである必要がある。 + +00:46.130 --> 00:51.140 +そして、 基本的に反復して各チャンクを順番に降ろす。 + +00:51.140 --> 00:53.420 +だから、 私はこれを関数と呼んだが、 実はそうではないのだ。 + +00:53.420 --> 00:54.440 +発電機だ。 + +00:54.590 --> 01:01.970 +そして、 どちらのモデルが呼び出されたかに応じて、 どちらか一方のモデルからチャンクを生成する。 + +01:02.000 --> 01:03.500 +まあ、 明らかにうまくいくだろうね。 + +01:03.530 --> 01:07.730 +これで、 さらにいくつかの変数を持つ関数になった。 + +01:07.730 --> 01:12.140 +だから、 グラディオに関する限り、 それは単なる機能の一つに過ぎない。 + +01:12.140 --> 01:16.490 +つまり、 その機能を中心にユーザー・インターフェースを簡単に構築できるということだ。 + +01:16.490 --> 01:17.660 +見てみよう。 + +01:17.690 --> 01:18.800 +これだ。 + +01:18.800 --> 01:20.450 +これがインターフェイスだ。 + +01:20.480 --> 01:26.840 +この関数が受け取るのは、 今入力を書いたハイブリッドジェネレーターのようなものだ。 + +01:26.840 --> 01:29.000 +もちろん、 これで2つのインプットを持つことになる。 + +01:29.000 --> 01:30.890 +そのうちのひとつがあなたのメッセージになる。 + +01:30.890 --> 01:34.010 +そしてもうひとつは、 こんなに簡単だったらいいのにと思う。 + +01:34.040 --> 01:39.230 +GPTまたはClaudeラベルの2つの値のドロップダウンでモデルを選択します。 + +01:39.230 --> 01:41.990 +そして、 それを出力するんだ。 + +01:42.470 --> 01:44.630 +しかし、 物事がそんなに簡単であることはめったにない。 + +01:44.660 --> 01:47.600 +ああ、 でもここはグラディオだから、 物事は本当に簡単なんだ。 + +01:47.690 --> 01:50.240 +申し訳ないが、 まずこれを実行しなければならない。 + +01:50.480 --> 01:51.320 +これでよし。 + +01:51.320 --> 01:52.100 +そんなに簡単なことじゃない。 + +01:52.130 --> 01:54.110 +それでも、 すべてのコードを実行しなければならない。 + +01:54.290 --> 01:56.390 +ええと、 それではどうぞ。 + +01:56.390 --> 02:05.750 +タイムズ・スクエアからグランド・セントラルまでどうやって行けばいいんだ? + +02:06.210 --> 02:08.160 +そして、 私たちのモデルの一つを選ぶ。 + +02:08.160 --> 02:12.600 +GPTを選び、 それを送信すると、 マークダウンでストリーミングバックされる。 + +02:12.600 --> 02:17.310 +その質問に対するGPTの回答が方向性だ。 + +02:17.610 --> 02:19.230 +最後にもう一度、 訪問を楽しもう。 + +02:19.260 --> 02:20.190 +とても素晴らしい。 + +02:20.310 --> 02:23.760 +あー、 今回は選択肢が増えたような気がするけど、 まあいいや。 + +02:23.790 --> 02:24.480 +そうではないかもしれない。 + +02:24.600 --> 02:25.710 +おそらく覚えているだろう。 + +02:25.980 --> 02:29.640 +クロードに同じ質問をすればいい。 + +02:29.640 --> 02:32.490 +同じ質問に対するクロードの答えはこうだ。 + +02:32.640 --> 02:34.260 +クロードの俳句を使ってね。 + +02:34.290 --> 02:37.950 +そのためか、 回答はやや短く、 簡潔なものになっている。 + +02:37.950 --> 02:40.230 +でも、 それってすごいことじゃない? + +02:40.230 --> 02:40.890 +クールだろ? + +02:40.890 --> 02:42.300 +我々はこの機能を構築したばかりだ。 + +02:42.330 --> 02:46.470 +私たちは2つの異なるモデルの間を行き来し、 同じ質問をして回答を得ることができる。 + +02:46.560 --> 02:53.610 +もしチャットUIを作りたいなら、 このチャットUIをいつか実行させればいい。 + +02:53.670 --> 02:55.710 +便利な道具だよ。 + +02:56.820 --> 03:05.160 +うーん、 それで、 そうだな、 双子座を単純にミックスに加えるという明らかな練習を想像できるだろう。 + +03:05.160 --> 03:05.670 +なぜだ? + +03:05.700 --> 03:06.450 +想像がつくだろう。 + +03:06.450 --> 03:07.230 +超簡単だよ。 + +03:07.230 --> 03:08.070 +ただ加えるだけだ。 + +03:08.070 --> 03:09.470 +双子座という選択肢もある。 + +03:09.470 --> 03:15.680 +Geminiからのストリーミングバックのやり方はまだお見せしていませんが、 とてもよく似ていますし、 + +03:15.680 --> 03:22.820 +ググればドキュメントがとてもわかりやすいのですぐにわかります。 + +03:22.820 --> 03:23.810 +それはいいことだ。 + +03:24.770 --> 03:25.700 +分かった。 + +03:25.700 --> 03:30.230 +だから、 このラボの次のラストは大丈夫だ。 + +03:30.230 --> 03:35.810 +前回作った会社案内ジェネレーターを使って、 ユーザー・インターフェースを作ってみよう。 + +03:35.840 --> 03:37.070 +すごいことだと思わない? + +03:37.250 --> 03:41.750 +ええと、 だから、 今言ったように、 本当に、 本当に簡単なことなんだ。 + +03:41.750 --> 03:44.660 +だから、 私は以前のバージョンのパンフレットを使うことに決めたんだ。 + +03:44.660 --> 03:48.200 +ランディング・ページだけを使います。 + +03:48.200 --> 03:54.350 +すべてのリンクを集めるという2段階のプロセスを行うつもりはない。 + +03:54.440 --> 04:02.210 +ええと、 URL、 タイトル、 テキストを持つウェブサイト・クラスのシンプルなバージョンを用意するだけです。 + +04:02.210 --> 04:10.160 +リクエスト・パッケージを使い、 素晴らしいビューティフル・スープを使って解析し、 どうでもいいものを取り除いてテキストを取得する。 + +04:10.160 --> 04:19.840 +そして、 小さなgetcontextヘルパーがあり、 getcontentsヘルパーのようなもので、 ページのタイトルと本文を与えてくれる。 + +04:19.840 --> 04:21.160 +これがヘルパークラスだ。 + +04:21.190 --> 04:22.180 +忘れずに実行すること。 + +04:22.210 --> 04:23.170 +システムプロンプト。 + +04:23.170 --> 04:28.750 +あなたは、 企業のウェブサイトのランディングページの内容を分析し、 短いパンフレットを作成するシステムに入っている。 + +04:28.750 --> 04:31.180 +マークダウンにはシステム・プロンプトがある。 + +04:31.180 --> 04:41.350 +ここでは、 会社名、 URL、 モデルを受け取るパンフレットのストリーム機能を紹介します。 + +04:42.160 --> 04:46.510 +会社名のパンフレットを作成してください。 + +04:46.510 --> 04:48.250 +これが彼らのランディングページだ。 + +04:48.250 --> 04:54.370 +そして、 このウェブサイト・ヘルパークラスを使ってURLを読み込み、 内容を取得する。 + +04:54.370 --> 04:56.140 +だから、 これはすべて理にかなっている。 + +04:56.140 --> 04:59.230 +ウェブサイトのコンテンツを取得するだけだ。 + +04:59.230 --> 05:00.790 +それをプロンプトに変えるんだ。 + +05:00.790 --> 05:04.330 +そしてGPTならGPTからストリーミングする。 + +05:04.360 --> 05:06.640 +クロードなら、 クロードからストリーミングするつもりだ。 + +05:06.850 --> 05:15.510 +ええと、 そうでなければエラーを発生させ、 これをジェネレーターにして、 チャンクごとに結果を出します。 + +05:16.830 --> 05:23.340 +このチャンクをチャンクと呼ぶのは少し誤解を招くかもしれない。 + +05:23.370 --> 05:25.560 +全額だ。 + +05:25.590 --> 05:31.260 +だから、 この名前を変更した方がいいかもしれない。 + +05:32.160 --> 05:33.810 +でも、 おわかりだろう。 + +05:33.840 --> 05:36.030 +それでうまくいくはずだ。 + +05:36.420 --> 05:37.770 +ええと、 それならいいんじゃない? + +05:37.770 --> 05:39.420 +もう古くなるから言うのはやめるよ。 + +05:39.450 --> 05:40.470 +でも、 いいものだよ。 + +05:40.470 --> 05:45.090 +これはもう、 関数をストリームパンフレットに置き換えるだけの簡単なことだ。 + +05:45.090 --> 05:46.560 +そして、 ここにインプットを見ることができる。 + +05:46.560 --> 05:48.570 +もちろん、 この3つのインプットはある。 + +05:48.600 --> 05:49.800 +これで社名が決まった。 + +05:49.800 --> 05:51.420 +ランディングページのURLがあります。 + +05:51.420 --> 05:53.490 +そしてモデルを選ぶことができる。 + +05:53.610 --> 05:56.520 +そして、 それを試してみよう。 + +05:56.970 --> 05:58.170 +さあ、 行くぞ。 + +05:58.170 --> 05:59.190 +ローカルで走っている。 + +05:59.190 --> 06:03.960 +社名を抱きしめている顔を言えるように、 その話を持ち出そう。 + +06:06.240 --> 06:09.510 +ランディングページのURL + +06:09.750 --> 06:10.830 +そして、 ただやるだけだ。 + +06:13.740 --> 06:19.040 +Sを抱きしめる。 コとモデルを選択する。 + +06:19.040 --> 06:26.240 +まずGPTに追加を依頼し、 送信を押すだけだ。 + +06:26.420 --> 06:27.920 +そして、 こうなる。 + +06:27.920 --> 06:34.280 +Huggingfaceの会社案内を、 ウェブスクレイプに基づきマークダウンしてお届けします。 + +06:34.280 --> 06:35.540 +すべてそこにある。 + +06:35.540 --> 06:39.680 +下の方にいろいろなリンクがある。 + +06:39.770 --> 06:43.220 +ああ、 そのリンクは正しいようだ。 + +06:43.220 --> 06:44.270 +うまくいきそうだ。 + +06:44.270 --> 06:49.700 +これらのリンクのいくつかは、 その、 生成された方法のために機能しないように見える。 + +06:49.700 --> 06:53.900 +それにしても、 かなり印象的なウェブページ、 印象的なパンフレットと言わざるを得ない。 + +06:53.900 --> 06:55.610 +つまり、 大好きなんだ。 + +06:55.640 --> 06:58.520 +クロードがこのクロード俳句で何をするか見てみよう。 + +06:58.550 --> 06:59.060 +もちろんだ。 + +06:59.060 --> 07:05.120 +だから、 スリムなモデルだが、 まったく問題ない。 + +07:05.120 --> 07:07.190 +AIの未来を一緒に築いていきましょう。 + +07:07.280 --> 07:11.150 +ええと、 俳句のパンフレットはとても良かったよ。 + +07:11.570 --> 07:13.880 +ああ、 そうだ。 + +07:13.910 --> 07:14.840 +私は。 + +07:14.840 --> 07:21.400 +gradioを使うたびに、 そのシンプルさと効果に驚かされる。 + +07:21.400 --> 07:29.650 +私たちはパンフレットを中心に、 さまざまなモデルを選べるユーザー・インターフェイスを構築しました。 + +07:29.860 --> 07:33.880 +だから、 あなたにとってやるべきこと、 これをより良くする方法はたくさんある。 + +07:33.940 --> 07:38.260 +先ほどの例だけでなく、 この例にも双子座を加えることができる。 + +07:38.410 --> 07:55.540 +もう一つのアイデアは、 別の選択項目を追加して、 前回覚えているスタイルや口調を選択できるようにすることだ。 + +07:55.840 --> 08:04.390 +それなら、 ドロップダウンから別のトーンを選んで、 そのトーンで会社案内を作成できるように設定したらどうだろう。 + +08:04.510 --> 08:07.330 +それはとても簡単なことなんだ。 + +08:07.450 --> 08:08.410 +だから試してみてほしい。 + +08:08.440 --> 08:08.980 +そうしてくれ。 + +08:08.980 --> 08:14.920 +そして、 このアプリケーションをますます機能性の高いものに強化していくのだ。 + +08:14.950 --> 08:16.930 +だから、 それを楽しんでほしい。 + +08:16.930 --> 08:17.860 +その後、 コードを確認する。 + +08:17.860 --> 08:18.880 +だから私はそれを見ることができる。 + +08:18.880 --> 08:22.180 +それではまた、 次回の講義でお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59166465/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166465/ko_KR.srt new file mode 100755 index 0000000..0659ca9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166465/ko_KR.srt @@ -0,0 +1,466 @@ +WEBVTT + +00:00.620 --> 00:05.360 +그라디오의 날 유피터랩에 잘 오셨습니다 어디까지 했는지 기억하실 거예요 + +00:05.360 --> 00:14.990 +두 개의 사용자 인터페이스를 작성했는데 하나는 GPT 4와 기능상 채팅용이었고 하나는 + +00:14.990 --> 00:19.460 +함수용 클로드와 채팅용이었죠 + +00:19.580 --> 00:21.980 +이제 여러분께 묻죠. TUT D. + +00:22.010 --> 00:25.310 +이런 함수를 썼다고 가정해 보죠 + +00:25.310 --> 00:28.160 +이 함수는 복합 함수예요 + +00:28.160 --> 00:32.330 +스트림 모델이라고 하는 함수로 다른 이들을 호출하죠 + +00:32.330 --> 00:40.040 +프롬프트와 모델을 취하고 모델이 GPT라면 스트리밍 GPT를 호출하죠 + +00:40.370 --> 00:42.530 +클로드라면 개울이라고 하죠 + +00:42.530 --> 00:44.420 +안 그러면 클로드가 실수를 하죠 + +00:44.420 --> 00:46.130 +GPT나 클로드가 돼야 해요 + +00:46.130 --> 00:51.140 +그러면 순환하면서 한 덩어리를 한 번에 수확해요 + +00:51.140 --> 00:53.420 +함수라고 불렀지만 사실은 아니죠 + +00:53.420 --> 00:54.440 +발전기예요 + +00:54.590 --> 01:01.970 +어떤 모델로 부르느냐에 따라 다른 모델에서 덩어리가 나와요 + +01:02.000 --> 01:03.500 +잘 작동할 거예요 + +01:03.530 --> 01:07.730 +변수가 몇 가지 더 있는 함수가 됐죠 + +01:07.730 --> 01:12.140 +그러니 그라디오에게 그건 또 다른 함수일 뿐이죠 + +01:12.140 --> 01:16.490 +그 함수를 중심으로 사용자 인터페이스를 쉽게 만들 수 있다는 거죠 + +01:16.490 --> 01:17.660 +한번 보죠 + +01:17.690 --> 01:18.800 +여기 있네요 + +01:18.800 --> 01:20.450 +인터페이스예요 + +01:20.480 --> 01:26.840 +이 함수는 우리가 방금 입력한 하이브리드 생성기예요 + +01:26.840 --> 01:29.000 +물론 입력값은 2개죠 + +01:29.000 --> 01:30.890 +그중 하나는 메시지예요 + +01:30.890 --> 01:34.010 +다른 사람들은 이렇게 쉬우면 좋겠어요 + +01:34.040 --> 01:39.230 +두 개의 값을 불러오는 거죠 GPT나 Clude Label SELECT 모델로요 + +01:39.230 --> 01:41.990 +그런 다음 그걸 출력으로 하는 거죠 + +01:42.470 --> 01:44.630 +하지만 그렇게 쉬운 일은 드물죠 + +01:44.660 --> 01:47.600 +여긴 그라디오라서 모든 게 정말 쉬워요 + +01:47.690 --> 01:50.240 +죄송해요, 이것부터 확인할게요 + +01:50.480 --> 01:51.320 +됐어요 + +01:51.320 --> 01:52.100 +그렇게 간단하지 않아요 + +01:52.130 --> 01:54.110 +여전히 모든 코드를 실행해야 해요 + +01:54.290 --> 01:56.390 +자, 시작하죠 + +01:56.390 --> 02:05.750 +Get up, 예를 들어 타임스스퀘어에서 그랜드 센트럴까지 어떻게 가죠? + +02:06.210 --> 02:08.160 +모델 중 한 명을 선택해요 + +02:08.160 --> 02:12.600 +GPT를 선택해 제출하면 마크다운에서 스트리밍되죠 + +02:12.600 --> 02:17.310 +방향 문제에 대한 GPT의 반응이죠 + +02:17.610 --> 02:19.230 +즐거운 시간 보내세요 + +02:19.260 --> 02:20.190 +아주 좋아요 + +02:20.310 --> 02:23.760 +이번에는 선택지가 더 많아진 것 같지만 어쩔 수 없죠 + +02:23.790 --> 02:24.480 +아닐지도 모르죠 + +02:24.600 --> 02:25.710 +기억날 거예요 + +02:25.980 --> 02:29.640 +클로드를 보고 같은 질문을 하면 돼요 + +02:29.640 --> 02:32.490 +똑같은 질문에 대한 클로드의 대답이에요 + +02:32.640 --> 02:34.260 +클로드 하이쿠를 써서요 + +02:34.290 --> 02:37.950 +그래서 대답이 짧고 간결해진 것 같아요 + +02:37.950 --> 02:40.230 +정말 놀랍지 않아요? + +02:40.230 --> 02:40.890 +멋지죠? + +02:40.890 --> 02:42.300 +이 기능성만 빌드했죠 + +02:42.330 --> 02:46.470 +두 모델 사이를 넘나들면서 같은 질문을 하면 답이 나오죠 Get it + +02:46.560 --> 02:51.810 +그냥 실행시킬 수도 있어요 멋진 채팅 UI를 원하시면요 다양한 + +02:51.810 --> 02:53.610 +모델로 튕길 수 있죠 + +02:53.670 --> 02:55.710 +유용한 도구죠 + +02:56.820 --> 03:04.050 +그리고 여러분이 상상할 수 있는 명백한 훈련이 하나 더 있어요 여기에 제미니를 추가하는 + +03:04.050 --> 03:05.160 +거죠 + +03:05.160 --> 03:05.670 +왜요? + +03:05.700 --> 03:06.450 +상상이 되시죠? + +03:06.450 --> 03:07.230 +아주 쉬워요 + +03:07.230 --> 03:08.070 +그냥 추가하는 거죠 + +03:08.070 --> 03:09.470 +제미니를 선택해도 되고요 + +03:09.470 --> 03:15.680 +제미니 강의에서 스트림하는 법을 보여드린 적은 없지만 아주 비슷해요 구글로 빠르게 검색하면 + +03:15.680 --> 03:20.900 +아주 명확한 문서화가 있고 그걸 추가해서 그 코드를 푸시해 제가 갖고 다른 학생들과 + +03:20.900 --> 03:22.820 +공유할 수 있죠 + +03:22.820 --> 03:23.810 +그럼 좋죠 + +03:24.770 --> 03:25.700 +좋아요 + +03:25.700 --> 03:30.230 +이 실험실의 마지막은 괜찮을 거예요 + +03:30.230 --> 03:35.810 +지난번에 만든 회사 브로슈어 생성기를 가져다 사용자 인터페이스를 적용해 보죠. + +03:35.840 --> 03:37.070 +그럼 정말 멋지겠죠? + +03:37.250 --> 03:41.750 +이제 아셨으니 말씀드렸듯이 아주 간단할 거예요 + +03:41.750 --> 03:44.660 +그래서 전 그 책자의 초기 버전을 선택했어요 + +03:44.660 --> 03:48.200 +랜딩 페이지만 사용할 거예요 + +03:48.200 --> 03:52.850 +모든 링크를 모으는 2단계 프로세스는 하지 않겠습니다 지금 필요한 것보다 + +03:52.850 --> 03:54.350 +더 복잡할 수 있으니까요 + +03:54.440 --> 04:00.050 +URL과 제목, 텍스트를 가진 웹사이트 클래스의 간단한 버전을 보여드릴게요 어떻게 + +04:00.050 --> 04:02.210 +작동하는지 기억하실 거예요 + +04:02.210 --> 04:07.520 +요청 패키지를 사용하고 뷰티풀 get을 이용해 관심 없는 + +04:07.520 --> 04:10.160 +걸 걸러내고 텍스트를 얻죠 + +04:10.160 --> 04:18.070 +getcontext 도우미가 있어요 일종의 getcontent 도우미로 페이지 제목과 + +04:18.070 --> 04:19.840 +본문을 제공하죠 + +04:19.840 --> 04:21.160 +도우미 수업은 여기까지고요 + +04:21.190 --> 04:22.180 +실행하는 거 잊지 마요 + +04:22.210 --> 04:23.170 +시스템 프롬프트예요 + +04:23.170 --> 04:27.310 +회사 웹사이트의 내용을 분석하고 랜딩 페이지에 짧은 답변을 작성하는 + +04:27.310 --> 04:28.750 +시스템에 있죠 + +04:28.750 --> 04:31.180 +마크다운에는 시스템 프롬프트가 있어요 + +04:31.180 --> 04:41.350 +스트림 브로슈어 함수가 있어요 회사 이름, URL 그리고 모델을 취하죠 + +04:42.160 --> 04:46.510 +회사명을 위한 회사 안내 책자를 생성해 달라고 하네요 + +04:46.510 --> 04:48.250 +이게 랜딩 페이지예요 + +04:48.250 --> 04:54.370 +그런 다음 웹사이트 도우미 클래스를 이용해 해당 URL을 읽고 내용을 get 하죠 + +04:54.370 --> 04:56.140 +이제 이해가 되네요 + +04:56.140 --> 04:59.230 +웹 사이트의 내용을 get 할 거예요 + +04:59.230 --> 05:00.790 +그걸 프롬프트로 바꿀게요 + +05:00.790 --> 05:04.330 +GPT라면 GPT에서 스트림할 거예요 + +05:04.360 --> 05:06.640 +클로드면 클로드에서 물을 퍼내야죠 + +05:06.850 --> 05:14.280 +안 그러면 에러가 발생해서 발전기를 만들게 되고 결과물이 한 덩어리씩 나올 + +05:14.280 --> 05:15.510 +거예요 + +05:16.830 --> 05:23.340 +비트라고 부르는 게 오해의 소지가 있는 것 같아요 사실 비트 한 덩어리씩이 아니거든요 + +05:23.370 --> 05:25.560 +총액이에요 + +05:25.590 --> 05:31.260 +이름을 바꾸는 게 좋겠어요 이 상황을 더 잘 반영하는 이름으로요 + +05:32.160 --> 05:33.810 +Get you, I'm get you, I'm get you, I'm get it, I'm it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get'm get, I'm get'm get, I'm get. + +05:33.840 --> 05:36.030 +이거면 될 거예요 + +05:36.420 --> 05:37.770 +그럼 좋지 않을까요? + +05:37.770 --> 05:39.420 +Get it이라고 하면 질릴 테니 그만할게요 + +05:39.450 --> 05:40.470 +하지만 좋아요 + +05:40.470 --> 05:45.090 +함수를 스트림 브로슈어로 대체하는 것만큼 간단해요 + +05:45.090 --> 05:46.560 +여기 입력값이 보이시죠 + +05:46.560 --> 05:48.570 +물론 세 가지 입력값이 있죠 + +05:48.600 --> 05:49.800 +회사 이름이 생겼네요 + +05:49.800 --> 05:51.420 +랜딩 페이지 URL이 있어요 + +05:51.420 --> 05:53.490 +그런 다음 모델을 고르죠 + +05:53.610 --> 05:56.520 +한번 해 보죠 + +05:56.970 --> 05:58.170 +여기 있네요 + +05:58.170 --> 05:59.190 +현지에서 운영되죠 + +05:59.190 --> 06:03.960 +회사명 포옹 얼굴이라고 할 수 있게 띄워 주세요 + +06:06.240 --> 06:09.510 +랜딩 페이지 URL도 말할 수 있죠 + +06:09.750 --> 06:10.830 +이렇게 하죠 + +06:13.740 --> 06:19.040 +안아주기요 co와 select 모델을 선택하세요 + +06:19.040 --> 06:26.240 +GPT에 먼저 추가하고 제출을 누르라고 요청할 거예요 + +06:26.420 --> 06:27.920 +이제 시작이네요 + +06:27.920 --> 06:34.280 +이건 회사 안내 책자예요 헐깅페이스 스트리밍을 마크다운으로 하는 거죠 웹 스크래프를 기반으로요 + +06:34.280 --> 06:35.540 +다 있어요 + +06:35.540 --> 06:39.680 +심지어 아래쪽에 다양한 용도의 링크가 있어요 + +06:39.770 --> 06:43.220 +네, 링크가 맞는 것 같네요 + +06:43.220 --> 06:44.270 +이거면 되겠어요 + +06:44.270 --> 06:49.700 +몇몇 링크들은 작동하지 않을 것 같습니다 생성된 방식 때문에요 + +06:49.700 --> 06:53.900 +그래도 웹페이지는 정말 인상적이에요 브로슈어도 인상적이고요 + +06:53.900 --> 06:55.610 +정말 좋아요 + +06:55.640 --> 06:58.520 +클로드가 클로드 하이쿠로 뭘 하는지 보죠 + +06:58.550 --> 06:59.060 +물론이죠 + +06:59.060 --> 07:05.120 +좀 더 날씬하지만 이 정도면 괜찮아요 + +07:05.120 --> 07:07.190 +함께 인공지능의 미래를 만들어 나가요 + +07:07.280 --> 07:11.150 +하이쿠 책자가 아주 좋네요 + +07:11.570 --> 07:13.880 +이제 됐어요 + +07:13.910 --> 07:14.840 +네 + +07:14.840 --> 07:21.400 +그러디오를 쓸 때마다 정말 놀라워요 얼마나 단순하고 효과적인지 말이에요 + +07:21.400 --> 07:27.250 +책자 주위에 사용자 인터페이스를 구축했어요 다른 모델 사이에서 고를 수 + +07:27.250 --> 07:29.650 +있죠 인정합시다, 쉬웠어요 + +07:29.860 --> 07:33.880 +이 상황을 개선할 방법은 아주 많아요 + +07:33.940 --> 07:38.260 +앞서 언급한 제미니를 추가할 수도 있고 이 예제에도 추가할 수 있어요 + +07:38.410 --> 07:47.050 +다른 아이디어는 다른 드롭다운을 추가하는 거예요 스타일이나 톤을 고를 수 있는 드롭다운요 + +07:47.050 --> 07:52.240 +시스템 프롬프트도 쉽게 바꿀 수 있죠 브로슈어가 익살스럽고 + +07:52.240 --> 07:55.540 +농담조로 유쾌하게요 + +07:55.840 --> 08:00.940 +저 드롭다운에서 고를 수 있도록 설정해 볼까요? 다른 톤을 선택하면 + +08:00.940 --> 08:04.390 +그 톤을 이용해 회사 브로슈어를 생성하죠 + +08:04.510 --> 08:07.330 +사실 엄청 쉬워요 + +08:07.450 --> 08:08.410 +그러니 한번 해보세요 + +08:08.440 --> 08:08.980 +그렇게 해요 + +08:08.980 --> 08:13.990 +이 응용 프로그램을 강화해 기능성이 점점 더 높아지게 될 + +08:14.020 --> 08:14.920 +거예요 + +08:14.950 --> 08:16.930 +즐거운 시간 보내시길 바라요 + +08:16.930 --> 08:17.860 +나중에 코드를 확인하세요 + +08:17.860 --> 08:18.880 +Get it, get it. 보여주세요 + +08:18.880 --> 08:22.180 +그럼 다음 강의에서 마무리하도록 하죠 diff --git a/week5/community-contributions/subtitles/srts/59166481/en_US.srt b/week5/community-contributions/subtitles/srts/59166481/en_US.srt new file mode 100755 index 0000000..43f6566 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166481/en_US.srt @@ -0,0 +1,889 @@ +WEBVTT + +00:00.860 --> 00:05.330 +And here, once more we find ourselves in our favorite place, the Jupyter Lab. + +00:05.330 --> 00:07.310 +Ready to go with weeks. + +00:07.340 --> 00:09.620 +Week two's exercises. + +00:09.620 --> 00:14.930 +So we go into week two folder and we open up week two, day one. + +00:15.230 --> 00:18.230 +Uh, and here we go. + +00:18.230 --> 00:26.990 +So a reminder that in week one we used uh multiple frontier LMS through the chat user interface, a + +00:26.990 --> 00:32.990 +way to use it through the web, uh, and then through the API, we connect it to OpenAI's API. + +00:33.020 --> 00:39.890 +So today we're going to add to the mix the APIs for Anthropic and Google to join with our skills of + +00:39.890 --> 00:41.090 +using OpenAI. + +00:41.960 --> 00:47.630 +Uh, so as a one more reminder, you're going to kill me for keeping going on about this. + +00:47.630 --> 00:50.300 +This is where you set up your keys. + +00:50.300 --> 00:55.850 +Uh, you can set up keys for OpenAI, which presumably you already did last week, uh, for anthropic + +00:55.850 --> 00:58.460 +and for Gemini for Google. + +00:58.490 --> 01:04.370 +Uh, but, uh, bearing in mind that there is more of an adventure to be had in setting up your Google + +01:04.400 --> 01:05.330 +Keys. + +01:05.390 --> 01:09.410 +Once you've set them up, you create. + +01:09.470 --> 01:11.330 +You should already have created the file called. + +01:11.480 --> 01:15.170 +Env and make sure that your keys are in there in that form. + +01:15.560 --> 01:21.500 +If you wish, instead of doing that, you can do it by typing your keys in these cells. + +01:21.500 --> 01:24.020 +It's it's possible to do it that way. + +01:24.020 --> 01:26.270 +It's not recommended for security reasons. + +01:26.270 --> 01:30.350 +In case you one day make this public and then other people will see your keys. + +01:30.380 --> 01:32.300 +All right, enough preamble. + +01:32.330 --> 01:33.800 +Let's run some imports. + +01:33.800 --> 01:37.400 +Let's run this block of code here that sets the environment variables. + +01:37.400 --> 01:38.900 +You're pretty familiar with this. + +01:38.900 --> 01:46.280 +And now in this cell, you can see that I make the same call to OpenAI to, to establish that connection + +01:46.280 --> 01:49.400 +to the OpenAI API that you're familiar with now. + +01:49.400 --> 01:55.790 +But then I have something pretty similar for Claude and then something a little bit different for Google + +01:55.790 --> 01:56.840 +for Gemini. + +01:56.960 --> 02:04.220 +So these are the sort of, uh, semi somewhat analogous commands that we're using for those three. + +02:04.730 --> 02:05.510 +Okay. + +02:05.510 --> 02:11.420 +So we've seen a bunch of things that Llms are pretty good at, and then just a few things where it tripped + +02:11.420 --> 02:13.160 +up, but mostly things that it's very good at. + +02:13.190 --> 02:17.600 +One of the things that it's not so good at as it would happen, is telling jokes. + +02:17.600 --> 02:24.080 +When you give it a very tight, uh, context in which it has to try and form that joke. + +02:24.260 --> 02:28.610 +Uh, and so, you know, this is clearly not a very commercial example, but it's a way of having some + +02:28.610 --> 02:30.980 +fun and getting some experience with the APIs. + +02:31.040 --> 02:34.850 +Uh, we are going to ask some llms to tell jokes over the API. + +02:35.120 --> 02:36.770 +Um, so what information do you do? + +02:36.770 --> 02:37.550 +You send over an API. + +02:37.580 --> 02:41.750 +Typically, you always specify the name of the model that you want to use. + +02:41.750 --> 02:45.380 +You typically give the system message and the user message. + +02:45.380 --> 02:48.950 +You're super familiar with this now system message giving the overall context. + +02:48.950 --> 02:52.340 +The user message is the actual prompt. + +02:52.550 --> 02:54.410 +Um, and there are some other characteristics. + +02:54.410 --> 02:55.700 +There are some other things that you can do. + +02:55.730 --> 03:00.890 +You can pass in something called the temperature which is between 0 and 1, usually where one means + +03:00.890 --> 03:08.430 +I want a more random creative output Outputs, and zero would be the lowest possible focused, deterministic + +03:08.430 --> 03:09.960 +repeatable setting. + +03:10.320 --> 03:14.250 +So that is another parameter that you can often provide. + +03:14.280 --> 03:19.470 +So in this case we're going to set a system message to be you are an assistant that is great at telling + +03:19.470 --> 03:20.010 +jokes. + +03:20.010 --> 03:26.670 +And the user prompt will be tell a light hearted joke for an audience of data scientists. + +03:26.670 --> 03:30.000 +That would be you and also me. + +03:30.660 --> 03:35.850 +Okay, so then this structure here is hopefully something very familiar to you. + +03:35.850 --> 03:43.410 +This is where we put the prompts into a list to elements you a system and a user as the as the role + +03:43.410 --> 03:44.910 +in these two elements. + +03:44.940 --> 03:49.860 +Going into this list, I hopefully don't need to explain it because you're now quite familiar with this. + +03:50.040 --> 03:55.080 +Uh, as I say, this, this, this, um, value here, the role can be system or user. + +03:55.080 --> 03:56.070 +You're going to find out later. + +03:56.070 --> 03:57.570 +It can also be assistant. + +03:57.570 --> 03:59.760 +So it can be system user or assistant. + +03:59.760 --> 04:04.150 +And then later this week you're going to find some other thing that can go in there as well. + +04:04.240 --> 04:04.990 +So. + +04:05.020 --> 04:09.790 +But for now, all you need to remember is system and user as the two roles we're going to be using. + +04:09.790 --> 04:12.610 +So we put that into the list of prompts. + +04:13.480 --> 04:16.570 +And I should remember to execute the cell before it. + +04:16.570 --> 04:18.790 +Before I do that did I execute the cell here. + +04:18.790 --> 04:20.350 +Yes I did all right. + +04:20.350 --> 04:20.770 +Here we go. + +04:20.800 --> 04:21.790 +Let's try that one again. + +04:21.790 --> 04:22.840 +Execute that cell. + +04:22.840 --> 04:23.860 +Execute this cell. + +04:23.890 --> 04:25.720 +Very good okay. + +04:25.750 --> 04:33.280 +Let's start with one of the older GPT models GPT 3.5 turbo, which quite recently was like the the latest + +04:33.280 --> 04:34.390 +and greatest frontier model. + +04:34.390 --> 04:35.830 +But it's already old news. + +04:35.830 --> 04:37.330 +But we will use this. + +04:37.330 --> 04:44.680 +And so the API, which now you're quite familiar with for OpenAI is OpenAI dot chat, dot completions, + +04:44.680 --> 04:53.500 +dot create completions, um, being the name of this, this API, the one that basically takes an existing + +04:53.500 --> 04:59.530 +set of prompts and then tries to complete generate text to complete the conversation. + +04:59.800 --> 05:06.960 +Um, and as we call create, we pass in a model and we pass in the messages in the format that you're + +05:06.960 --> 05:07.980 +familiar with. + +05:08.010 --> 05:09.750 +So let's see. + +05:09.780 --> 05:15.870 +And you remember when we get back the response, what we do is we take completion dot choices, which + +05:15.870 --> 05:18.030 +is a list of possible choices. + +05:18.030 --> 05:19.980 +But there will only be one element in there. + +05:19.980 --> 05:23.790 +There is a way that you can specify that you want it to return multiple choices. + +05:23.790 --> 05:28.740 +But since we haven't done that, we just get back one and it's in location zero of course. + +05:28.740 --> 05:35.550 +So completion dot choices zero dot message gives us back the message and content returns it in a string. + +05:35.760 --> 05:37.770 +So that is what we get back and we print it. + +05:37.770 --> 05:39.360 +And now let's see what kind of joke. + +05:39.360 --> 05:42.690 +For data scientists GPT 3.5 turbo can come up with. + +05:42.720 --> 05:43.680 +Here we go. + +05:44.010 --> 05:48.000 +Why did the data scientists break up with their computer? + +05:48.000 --> 05:52.020 +It just couldn't handle their complex relationship. + +05:52.830 --> 05:53.970 +Okay, okay. + +05:54.000 --> 05:56.250 +You know, I get it, I see it. + +05:56.280 --> 05:58.770 +It's not the world's funniest joke, but it's not terrible. + +05:58.800 --> 06:03.540 +You know, the data scientists model relationships between things and couldn't handle their complex + +06:03.540 --> 06:04.200 +relationship. + +06:04.200 --> 06:04.800 +Fair enough. + +06:04.800 --> 06:13.140 +I'd say that's a perfectly acceptable, acceptable joke coming from GPT 3.5 turbo. + +06:13.200 --> 06:17.010 +So let's see if GPT four mini can do better. + +06:17.160 --> 06:21.450 +This time, we're going to just slightly expand our use of the API. + +06:21.600 --> 06:26.340 +I'm including temperature, so this is where you can pass in this number between 0 and 1. + +06:26.340 --> 06:29.220 +One for the most creative, zero for the least. + +06:29.490 --> 06:34.980 +Um, and uh, out of this I have completion choices zero message content. + +06:34.980 --> 06:36.720 +Again, you should be very familiar with this. + +06:36.750 --> 06:38.970 +Let's see how it performs. + +06:39.570 --> 06:42.060 +Why did the data scientist break up with a statistician? + +06:42.060 --> 06:44.670 +Because she found him too mean. + +06:44.700 --> 06:46.230 +I'd say that's a pretty good joke. + +06:46.230 --> 06:47.490 +I'd say that's fine. + +06:47.490 --> 06:49.950 +That's that's, uh, that's an acceptable joke. + +06:49.980 --> 06:54.300 +Maybe I was harsh when I said that llms aren't very good at this, because that's a perfectly decent + +06:54.300 --> 06:54.990 +joke. + +06:55.170 --> 07:02.610 +Uh, and, uh, I think we will give GPT four a mini, uh, a round of applause for that. + +07:03.030 --> 07:09.160 +Okay, let's try GPT four Minis, uh, bigger cousin, GPT four. + +07:09.190 --> 07:12.130 +Oh, the maxi version of GPT four. + +07:12.160 --> 07:14.260 +Oh, the big guy. + +07:14.260 --> 07:16.000 +And we will ask it. + +07:16.030 --> 07:19.210 +Let's give it the same temperature so we're not messing with things as we go. + +07:19.240 --> 07:21.160 +We'll ask it for it for a joke. + +07:21.190 --> 07:23.230 +Two and let's see how it does. + +07:24.250 --> 07:27.130 +Why did the data scientist go broke? + +07:27.130 --> 07:30.850 +Because they couldn't find any cache in their array. + +07:32.410 --> 07:35.560 +If it hadn't put on in their array, I might have found that better. + +07:35.560 --> 07:38.650 +I don't, uh, couldn't find any cache. + +07:38.650 --> 07:39.910 +Would be okay. + +07:40.810 --> 07:42.280 +Maybe I'm missing something here. + +07:42.310 --> 07:45.280 +I I'm not sure I get it. + +07:45.550 --> 07:47.380 +Uh, let's try another one. + +07:47.560 --> 07:52.480 +Let's do what I had in there before and start pulling the temperature down a bit, see what we get. + +07:52.990 --> 07:56.560 +Why did scientists break up with the logistic regression model? + +07:56.590 --> 07:58.390 +Because it couldn't find the right fit. + +07:58.600 --> 08:00.130 +Uh, you know, that's perfectly decent. + +08:00.130 --> 08:00.970 +That's acceptable. + +08:00.970 --> 08:06.160 +That's that's maybe, uh, I'm not sure which I prefer between Mini and Maxi, but, uh, that's a that's + +08:06.160 --> 08:08.860 +a pretty solid, solid gag there. + +08:08.860 --> 08:12.640 +I think we will we will say that that that's a pass for sure. + +08:13.810 --> 08:14.800 +All right. + +08:14.830 --> 08:17.050 +Let's move on to clause 3.5. + +08:17.080 --> 08:17.680 +Sonnet. + +08:17.950 --> 08:21.430 +Uh, so the API looks strikingly similar. + +08:21.430 --> 08:22.270 +That's the good news. + +08:22.270 --> 08:25.030 +It's basically very, very similar indeed. + +08:25.060 --> 08:26.530 +A couple of differences. + +08:26.530 --> 08:31.510 +You do have to pass in the system message as its own separate attribute. + +08:31.510 --> 08:36.430 +And then the messages is again this this list of decks. + +08:36.430 --> 08:41.380 +But of course it doesn't have that first entry for the system message because you've already passed + +08:41.380 --> 08:42.550 +that in separately. + +08:42.910 --> 08:45.310 +Um, so that's a slight difference. + +08:45.340 --> 08:51.670 +Um, also, Max tokens is something which is optional for the OpenAI API to to specify the, the maximum + +08:51.670 --> 08:52.360 +number of tokens. + +08:52.360 --> 08:55.180 +And I believe it's actually required for Claude. + +08:55.180 --> 08:56.860 +So that's why it's in here. + +08:56.860 --> 08:59.200 +But otherwise everything should look very similar. + +08:59.230 --> 09:03.250 +The API itself is a little bit easier to memorize. + +09:03.250 --> 09:05.740 +It's just Claude dot messages dot create. + +09:05.740 --> 09:11.470 +It's slightly shorter, but it's otherwise quite similar to OpenAI ChatGPT completions create. + +09:11.710 --> 09:13.150 +Uh, so there it is. + +09:13.180 --> 09:17.830 +And then when we get back a response, it's message content zero. + +09:17.860 --> 09:22.630 +Again, you're asking for the the first one, but we're only going to get back one because we've only + +09:22.630 --> 09:28.750 +asked for one dot text gives us that's the equivalent of dot content for OpenAI. + +09:28.780 --> 09:30.100 +So let's see. + +09:30.100 --> 09:35.020 +This is a useful hopefully for you for for the API framework for Claude. + +09:35.020 --> 09:38.080 +Let's see now how Claude does with a joke. + +09:39.910 --> 09:40.630 +Sure. + +09:40.660 --> 09:43.540 +Here's a lighthearted joke for data scientists. + +09:43.570 --> 09:46.210 +Why do data scientists break up with their significant other? + +09:46.240 --> 09:50.800 +They just was too much variance in the relationship, and they couldn't find a good way to normalize + +09:50.800 --> 09:51.310 +it. + +09:51.970 --> 09:53.530 +Uh, yeah, that's all right. + +09:53.530 --> 09:59.110 +I'd say it's a nerdier it's a slightly more, uh, um, data sciency. + +09:59.110 --> 10:03.640 +It's perhaps just a tiny bit less funny, but it's not bad at all. + +10:03.640 --> 10:07.570 +I don't know, I think whether you prefer that to GPT four is probably a matter of taste. + +10:07.900 --> 10:10.100 +They're perfectly solid jokes. + +10:10.220 --> 10:14.210 +They're not explosively funny, but I'd say perfectly solid. + +10:14.210 --> 10:15.440 +Not terrible. + +10:15.950 --> 10:16.550 +Um. + +10:16.610 --> 10:22.220 +Anyway, the point of this is more about APIs and about jokes, although it always keeps it entertaining. + +10:22.250 --> 10:24.800 +What I want to show you now is about streaming. + +10:24.890 --> 10:29.090 +Um, you remember we talked briefly about streaming before the streaming example? + +10:29.090 --> 10:33.140 +We did before, uh, looked a bit complicated because we had to deal with the fact that we were bringing + +10:33.140 --> 10:36.470 +back markdown and we had to to handle that markdown. + +10:36.470 --> 10:40.280 +This looks a bit simpler because we're not dealing with with a markdown response. + +10:40.280 --> 10:45.980 +We're going to ask the same model, cloud 3.5 again for a joke, but this time we're going to stream + +10:45.980 --> 10:46.730 +back results. + +10:46.730 --> 10:53.090 +So you may remember when we asked OpenAI to stream the way we did it is we just added another attribute + +10:53.090 --> 10:54.470 +stream equals true. + +10:54.470 --> 10:56.570 +And that meant that it was in streaming mode. + +10:56.570 --> 10:58.490 +For Claude, it's slightly different. + +10:58.490 --> 11:00.380 +There is no extra attribute. + +11:00.380 --> 11:06.440 +Instead, you call the dot stream method instead of the dot create method. + +11:06.440 --> 11:09.020 +So slightly different approach there. + +11:09.020 --> 11:13.790 +That's a nuance of difference between anthropic and OpenAI for streaming. + +11:13.790 --> 11:16.430 +So we call Claude messages stream. + +11:16.460 --> 11:17.840 +Otherwise it's the same. + +11:17.840 --> 11:22.430 +And then with what comes back, we use a context manager with results as stream. + +11:22.610 --> 11:26.960 +Um, and then it's for text in stream text stream. + +11:26.960 --> 11:31.550 +And you remember OpenAI was was for chunk in response. + +11:31.550 --> 11:35.990 +So OpenAI was a bit different again in the way that you read back results. + +11:35.990 --> 11:37.040 +But there it is. + +11:37.040 --> 11:41.420 +We get each little chunk back and just going to print that chunk. + +11:41.540 --> 11:46.460 +Um, and the reason for this is to make sure that it doesn't print each chunk on a separate line. + +11:46.670 --> 11:48.170 +Otherwise it'd be very hard to read. + +11:48.170 --> 11:49.490 +So this should look better. + +11:49.490 --> 11:56.510 +Let's see how Claude 3.5 sonnet does with a joke that it will then stream back to us in JupyterLab. + +11:57.200 --> 11:57.800 +There we go. + +11:57.800 --> 11:58.040 +You see? + +11:58.040 --> 11:59.060 +It's streaming. + +11:59.330 --> 12:01.580 +Sure, here's a light hearted joke for Data Scientist. + +12:01.610 --> 12:03.110 +Why did that same joke? + +12:03.110 --> 12:08.690 +It seems exactly the same joke, but it's added in a Brahms little drum. + +12:08.840 --> 12:12.000 +Uh, explosion at the end, which is nice. + +12:12.000 --> 12:14.670 +I wonder why did I ask for more tokens than before? + +12:14.700 --> 12:15.180 +Let's see. + +12:15.210 --> 12:15.630 +No. + +12:15.630 --> 12:16.350 +The same. + +12:16.650 --> 12:17.730 +Um, it's. + +12:17.760 --> 12:19.020 +And it gives a little explanation. + +12:19.020 --> 12:22.170 +This joke plays on statistical concepts which are common to data science. + +12:22.260 --> 12:27.060 +It's a bit nerdy, but should get a chuckle from data savvy audience. + +12:27.060 --> 12:32.070 +Well, I would say you guys are a data savvy audience, so you can be the judge of that. + +12:32.100 --> 12:34.440 +Did it get a chuckle from you? + +12:35.220 --> 12:36.540 +Moving on. + +12:36.570 --> 12:39.120 +Gemini has a different structure. + +12:39.120 --> 12:41.370 +It's it's quite a bit different, actually. + +12:41.400 --> 12:48.780 +Um, and I'd probably say to Google's credit, their ability to set up tokens is much more complicated, + +12:48.780 --> 12:50.580 +but the API is a bit simpler. + +12:50.670 --> 12:56.850 +Uh, you can see here you create a generative model object and you pass in the name of the model, we'll + +12:56.850 --> 12:59.550 +use the Gemini 1.5 flash. + +12:59.580 --> 13:03.510 +You remember how many how large the context window is for Gemini 1.5 flash. + +13:03.540 --> 13:04.680 +Can you remember that? + +13:04.710 --> 13:07.050 +It was top of the table that we had before? + +13:07.050 --> 13:10.380 +It was a remarkable 1 million tokens. + +13:10.410 --> 13:11.450 +A million tokens. + +13:11.480 --> 13:13.310 +750,000 words. + +13:13.340 --> 13:15.500 +So, Gemini 1.5 flash. + +13:15.950 --> 13:23.270 +We pass in the system instruction when we create this object, and then we call Gemini dot. + +13:23.270 --> 13:26.420 +Generate content with the user prompt. + +13:26.420 --> 13:28.520 +And it's just response dot text. + +13:28.520 --> 13:35.090 +So a little bit less futzing around with both the request and the response here it's a bit of a simpler + +13:35.120 --> 13:37.520 +API, but let's see the quality of joke. + +13:37.670 --> 13:42.200 +Importantly, why did the data scientists break up with a statistician? + +13:42.200 --> 13:45.590 +Because they couldn't see eye to eye on the p value. + +13:47.420 --> 13:48.020 +Ah. + +13:48.800 --> 13:52.310 +Well, uh, I see the data science side of it. + +13:52.310 --> 13:53.810 +I'm not sure I get it. + +13:53.900 --> 13:55.070 +Hahaha. + +13:55.370 --> 13:57.380 +Uh, maybe you do get it. + +13:57.380 --> 13:59.540 +And I'm being being, uh, being dozy. + +13:59.540 --> 14:01.310 +Uh, in which case, by all means pointed out to me. + +14:01.310 --> 14:05.450 +But I don't particularly get the funny aspect of that joke. + +14:05.450 --> 14:11.630 +So for me, I would say that, uh, Gemini certainly lags in terms of its, uh, Gemini 1.5 flash in + +14:11.630 --> 14:13.440 +terms of its humor value. + +14:14.220 --> 14:15.060 +All right. + +14:15.090 --> 14:18.960 +Anyways, to get serious for a moment, let's go back to GPT four. + +14:19.170 --> 14:20.910 +Many with the original question. + +14:20.910 --> 14:22.410 +You're a helpful assistant. + +14:22.440 --> 14:25.950 +How do I decide if a business problem is suitable for an LLM solution? + +14:25.950 --> 14:29.790 +Remember, that was the very first question we asked through the chat interface. + +14:29.970 --> 14:32.970 +Um, and we can now bring this together again. + +14:32.970 --> 14:34.260 +This should be pretty familiar to you. + +14:34.290 --> 14:37.320 +We're going to stream back the results in markdown. + +14:37.320 --> 14:40.770 +So it's OpenAI chat dot completions dot create. + +14:40.770 --> 14:41.880 +We pass in the model. + +14:41.880 --> 14:43.350 +We're going to go for the big guy. + +14:43.530 --> 14:44.820 +Um we use the prompts. + +14:44.820 --> 14:45.840 +We set a temperature. + +14:45.840 --> 14:47.250 +We say stream equals true. + +14:47.250 --> 14:49.680 +That's the way that you do it with OpenAI. + +14:49.830 --> 14:54.750 +Um, and then this is the way that we stream back the results again. + +14:54.750 --> 14:57.720 +It's a little bit more involved because we're dealing with markdown. + +14:57.720 --> 15:03.390 +And so we have to do some, some sort of, uh, special stuff here to basically refresh the markdown + +15:03.390 --> 15:04.950 +with each iteration. + +15:04.980 --> 15:08.850 +If you're not sure we have to do it this way, try taking that out and doing it differently, and you'll + +15:08.850 --> 15:11.190 +immediately see what what what happens. + +15:11.220 --> 15:13.200 +It it won't look good. + +15:13.440 --> 15:15.720 +Uh, and let's run that. + +15:15.720 --> 15:21.810 +And here we get the results, and you can see that it looks great. + +15:22.500 --> 15:28.260 +You can see some of the flicking happening when the markdown has only partially come through. + +15:28.260 --> 15:33.600 +And so it's interpreting things like when there's perhaps multiple hashes representing a subheading. + +15:33.600 --> 15:37.050 +And it's only received one hash and it thinks there's a big heading coming. + +15:37.110 --> 15:41.430 +Uh, at least I think that's what we were seeing there briefly, with some of that flickering as the + +15:41.430 --> 15:42.660 +markdown appeared. + +15:42.660 --> 15:50.730 +But at the end of it we get back, of course, a very nicely constructed response, well structured, + +15:50.730 --> 15:55.020 +and it's formatted perfectly in markdown streams back. + +15:55.740 --> 15:56.460 +All right. + +15:56.460 --> 16:03.300 +So that has given you a sense of the different APIs and a bit of messing around with some, some fun + +16:03.300 --> 16:04.140 +questions. + +16:04.170 --> 16:12.150 +And what we're going to do next in the next video is actually have a couple of llms talk to each other, + +16:12.150 --> 16:13.200 +which should be fun. + +16:13.200 --> 16:14.340 +I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59166481/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166481/ja_JP.srt new file mode 100755 index 0000000..8a49165 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166481/ja_JP.srt @@ -0,0 +1,799 @@ +WEBVTT + +00:00.860 --> 00:05.330 +そしてここでもう一度、 私たちはお気に入りの場所、 Jupyter Labにいることに気づく。 + +00:05.330 --> 00:07.310 +数週間で準備完了。 + +00:07.340 --> 00:09.620 +2週目の練習 + +00:09.620 --> 00:14.930 +2週目のフォルダーに入り、 2週目の初日を迎える。 + +00:15.230 --> 00:18.230 +ええと、 それで......。 + +00:18.230 --> 00:26.990 +第1週目では、 チャット・ユーザー・インターフェースを通して複数のfrontier LMSを使い、 ウェブを通して使う方法、 + +00:26.990 --> 00:32.990 +そしてAPIを通してOpenAIのAPIに接続したことを思い出してください。 + +00:33.020 --> 00:41.090 +そこで今日は、 AnthropicとGoogleのAPIをミックスに加え、 OpenAIを使用する私たちのスキルに加わります。 + +00:41.960 --> 00:47.630 +ええと、 だから、 もうひとつ念を押しておくけど、 この話を続けていると殺されちゃうよ。 + +00:47.630 --> 00:50.300 +ここでキーをセットする。 + +00:50.300 --> 00:58.460 +OpenAIのキーを設定することができます。 おそらく先週すでに設定したと思いますが、 anthropicとGoogleのGeminiのキーを設定することができます。 + +00:58.490 --> 01:05.330 +でも、 グーグル・キーの設定にはもっと冒険が必要なんだ。 + +01:05.390 --> 01:09.410 +一度セットアップしたら、 あとは作るだけだ。 + +01:09.470 --> 01:11.330 +というファイルはすでに作成されているはずだ。 + +01:11.480 --> 01:15.170 +鍵がその形であることを確認する。 + +01:15.560 --> 01:21.500 +その代わりに、 これらのセルにキーを入力することもできる。 + +01:21.500 --> 01:24.020 +そうすることは可能だ。 + +01:24.020 --> 01:26.270 +セキュリティ上の理由から推奨されていない。 + +01:26.270 --> 01:30.350 +いつかこれを公開し、 他の人があなたの鍵を見ることになったときのために。 + +01:30.380 --> 01:32.300 +さて、 前置きはここまで。 + +01:32.330 --> 01:33.800 +インポートをしよう。 + +01:33.800 --> 01:37.400 +環境変数を設定するコードのブロックを実行してみよう。 + +01:37.400 --> 01:38.900 +あなたはよくご存じでしょう。 + +01:38.900 --> 01:49.400 +そして今、 このセルで、 OpenAIに同じ呼び出しをして、 OpenAI APIへの接続を確立しているのがわかるだろう。 + +01:49.400 --> 01:56.840 +でも、 クロードには似たようなものがあるし、 双子座のグーグルにはちょっと違うものがある。 + +01:56.960 --> 02:04.220 +つまり、 この3つのコマンドは、 ある意味類似しているんだ。 + +02:04.730 --> 02:05.510 +オーケー。 + +02:05.510 --> 02:13.160 +LLMSが得意なことをたくさん見てきたし、 つまずいたこともいくつかあったが、 ほとんどはLLMSが得意なことだった。 + +02:13.190 --> 02:17.600 +その中で、 あまり得意でないことのひとつがジョークを言うことだ。 + +02:17.600 --> 02:24.080 +非常にタイトな文脈を与えることで、 その中でジョークを作ろうとする。 + +02:24.260 --> 02:30.980 +これは明らかに商業的な例ではないけれど、 APIを楽しみながら体験する方法なんだ。 + +02:31.040 --> 02:34.850 +ええと、 API上で何人かのLLMにジョークを言ってもらう予定です。 + +02:35.120 --> 02:36.770 +ええと、 それでどんな情報を? + +02:36.770 --> 02:37.550 +API経由で送信する。 + +02:37.580 --> 02:41.750 +通常、 使用したいモデルの名前を常に指定する。 + +02:41.750 --> 02:45.380 +通常、 システムメッセージとユーザーメッセージを伝える。 + +02:45.380 --> 02:48.950 +全体的な背景を伝えるシステムメッセージは、 もうお馴染みですね。 + +02:48.950 --> 02:52.340 +ユーザーメッセージは実際のプロンプトである。 + +02:52.550 --> 02:54.410 +他にもいくつか特徴がある。 + +02:54.410 --> 02:55.700 +他にもできることはある。 + +02:55.730 --> 03:00.890 +温度と呼ばれるものを0から1の間で渡すことができ、 通常、 1はよりランダムで創造的な出力が欲しいことを意味し、 + +03:00.890 --> 03:09.960 +0は可能な限り低く集中した、 決定論的な反復可能な設定となる。 + +03:10.320 --> 03:14.250 +だから、 これもよく提供できるパラメーターのひとつだ。 + +03:14.280 --> 03:20.010 +そこで今回は、 「あなたはジョークを言うのが得意なアシスタントです」というシステムメッセージを設定する。 + +03:20.010 --> 03:26.670 +そして、 ユーザー・プロンプトは、 データ・サイエンティストの聴衆に向けて軽いジョークを言う。 + +03:26.670 --> 03:30.000 +それはあなたであり、 私でもある。 + +03:30.660 --> 03:35.850 +オーケー、 ではこの構成は、 あなたにとって非常に馴染みのあるものであることを願っている。 + +03:35.850 --> 03:44.910 +ここでは、 プロンプトをリストに入れて、 システムとユーザーを要素とし、 これら2つの要素に役割を設定します。 + +03:44.940 --> 03:49.860 +このリストに入るにあたって、 説明するまでもないだろう。 + +03:50.040 --> 03:55.080 +ええと、 私が言ったように、 この、 この、 ええと、 ここの値、 ロールはシステムでもユーザーでもいい。 + +03:55.080 --> 03:56.070 +後で分かることだ。 + +03:56.070 --> 03:57.570 +アシスタントになることもある。 + +03:57.570 --> 03:59.760 +だから、 システム・ユーザーでもアシスタントでもいい。 + +03:59.760 --> 04:04.150 +そして今週の後半には、 そこに入れられる他のものも見つけることになる。 + +04:04.240 --> 04:04.990 +だから + +04:05.020 --> 04:09.790 +しかし今は、 これから使う2つのロールとして、 systemとuserを覚えておけばいい。 + +04:09.790 --> 04:12.610 +だから、 それをプロンプトのリストに入れた。 + +04:13.480 --> 04:16.570 +そして、 その前のセルを実行することも忘れてはならない。 + +04:16.570 --> 04:18.790 +その前に、 ここでセルを実行したか? + +04:18.790 --> 04:20.350 +はい、 大丈夫です。 + +04:20.350 --> 04:20.770 +さあ、 始めよう。 + +04:20.800 --> 04:21.790 +もう一度やってみよう。 + +04:21.790 --> 04:22.840 +そのセルを実行する。 + +04:22.840 --> 04:23.860 +このセルを実行する。 + +04:23.890 --> 04:25.720 +とてもいい。 + +04:25.750 --> 04:34.390 +古いGPTモデルの一つGPT 3から始めよう。 5ターボは、 ごく最近、 最新で最高のフロンティアモデルのようだった。 + +04:34.390 --> 04:35.830 +しかし、 それはすでに古いニュースだ。 + +04:35.830 --> 04:37.330 +しかし、 我々はこれを使う。 + +04:37.330 --> 04:44.680 +OpenAIのAPIは、 OpenAI dot chat, dot completions, dot + +04:44.680 --> 04:53.500 +create completionsで、 このAPIの名前は、 基本的に既存のプロンプトのセットを受け取り、 + +04:53.500 --> 04:59.530 +会話を完成させるためにテキストを生成しようとするものです。 + +04:59.800 --> 05:07.980 +そしてcreateを呼び出すと、 モデルを渡し、 おなじみのフォーマットでメッセージを渡します。 + +05:08.010 --> 05:09.750 +では、 見てみよう。 + +05:09.780 --> 05:18.030 +そして、 回答が返ってきたときに私たちがすることは、 可能な選択肢のリストである完了点の選択肢を取ることだ。 + +05:18.030 --> 05:19.980 +しかし、 そこに含まれる要素は1つだけだ。 + +05:19.980 --> 05:23.790 +複数の選択肢を返すように指定する方法がある。 + +05:23.790 --> 05:28.740 +でも、 それをやっていないので、 ただ1つ戻ってくるだけで、 もちろんゼロ地点にある。 + +05:28.740 --> 05:35.550 +つまり、 completion dot choices zero dot messageはメッセージを返し、 contentはそれを文字列で返す。 + +05:35.760 --> 05:37.770 +だから、 それを印刷するんだ。 + +05:37.770 --> 05:39.360 +さて、 どんなジョークか見てみよう。 + +05:39.360 --> 05:42.690 +データサイエンティスト向け GPT 3. 5ターボが思いつく。 + +05:42.720 --> 05:43.680 +さあ、 始めよう。 + +05:44.010 --> 05:48.000 +データサイエンティストはなぜコンピューターと別れたのか? + +05:48.000 --> 05:52.020 +二人の複雑な関係を処理しきれなかったのだ。 + +05:52.830 --> 05:53.970 +オーケー、 オーケー。 + +05:54.000 --> 05:56.250 +分かるよ、 分かるんだ。 + +05:56.280 --> 05:58.770 +世界一面白いジョークではないが、 ひどくはない。 + +05:58.800 --> 06:04.200 +データサイエンティストは物事の関係をモデル化するが、 その複雑な関係を扱うことができなかったんだ。 + +06:04.200 --> 06:04.800 +十分フェアだ。 + +06:04.800 --> 06:13.140 +GPT3からすれば、 まったく問題ない、 受け入れられるジョークだと思う。 5ターボ。 + +06:13.200 --> 06:17.010 +では、 GPT four miniがもっとうまくやれるかどうか見てみよう。 + +06:17.160 --> 06:21.450 +今回は、 APIの使い方を少し拡張するだけだ。 + +06:21.600 --> 06:26.340 +温度を含めているので、 ここで0から1の間の数値を渡すことができる。 + +06:26.340 --> 06:29.220 +最もクリエイティブなものに1点、 最もクリエイティブでないものに0点。 + +06:29.490 --> 06:34.980 +ええと、 それで、 この中で私は完成度の高い選択肢を持っていて、 メッセージの内容はゼロなんだ。 + +06:34.980 --> 06:36.720 +繰り返しになるが、 あなたはこのことをよく知っているはずだ。 + +06:36.750 --> 06:38.970 +そのパフォーマンスを見てみよう。 + +06:39.570 --> 06:42.060 +データサイエンティストはなぜ統計学者と別れたのか? + +06:42.060 --> 06:44.670 +彼女は彼があまりに意地悪だと感じたからだ。 + +06:44.700 --> 06:46.230 +なかなかいいジョークだと思うよ。 + +06:46.230 --> 06:47.490 +それでいいと思うよ。 + +06:47.490 --> 06:49.950 +それは......ああ、 これはジョークとして受け入れられるね。 + +06:49.980 --> 06:54.990 +llmsはあまり得意ではない、 と言ったのは厳しかったかもしれない。 それは至極まっとうなジョークだからだ。 + +06:55.170 --> 07:02.610 +そして、 GPT4にはちょっとした拍手を送りたい。 + +07:03.030 --> 07:09.160 +じゃあ、 GPT4ミニを試してみよう。 + +07:09.190 --> 07:12.130 +GPT4のマキシバージョンだ。 + +07:12.160 --> 07:14.260 +ああ、 大物だ。 + +07:14.260 --> 07:16.000 +そして私たちはそれを問う。 + +07:16.030 --> 07:19.210 +温度を同じにしよう。 + +07:19.240 --> 07:21.160 +冗談で聞いてみよう。 + +07:21.190 --> 07:23.230 +2人で、 どうなるか見てみよう。 + +07:24.250 --> 07:27.130 +データサイエンティストはなぜ破産したのか? + +07:27.130 --> 07:30.850 +なぜなら、 彼らのアレーにはキャッシュが見つからなかったからだ。 + +07:32.410 --> 07:35.560 +もし、 それが彼らの配列になかったら、 私はその方がいいと思ったかもしれない。 + +07:35.560 --> 07:38.650 +キャッシュは見つからなかった。 + +07:38.650 --> 07:39.910 +大丈夫だろう。 + +07:40.810 --> 07:42.280 +何か見落としているのかもしれない。 + +07:42.310 --> 07:45.280 +よく分からないんだ。 + +07:45.550 --> 07:47.380 +ええと、 もうひとつやってみよう。 + +07:47.560 --> 07:52.480 +前にやったように、 温度を少し下げてみよう。 + +07:52.990 --> 07:56.560 +科学者たちはなぜロジスティック回帰モデルと決別したのか? + +07:56.590 --> 07:58.390 +適切な相手が見つからなかったからだ。 + +07:58.600 --> 08:00.130 +あ、 あのね、 それは至極まっとうなことだよ。 + +08:00.130 --> 08:00.970 +それは受け入れられる。 + +08:00.970 --> 08:08.860 +ミニとマキシのどっちが好きかわからないけど、 これはこれで、 なかなかしっかりしたギャグだよ。 + +08:08.860 --> 08:12.640 +それは間違いなくパスだ。 + +08:13.810 --> 08:14.800 +分かった。 + +08:14.830 --> 08:17.050 +第3節に移ろう。 5. + +08:17.080 --> 08:17.680 +ソネット + +08:17.950 --> 08:21.430 +APIは驚くほど似ている。 + +08:21.430 --> 08:22.270 +それは良いニュースだ。 + +08:22.270 --> 08:25.030 +基本的にはとてもよく似ている。 + +08:25.060 --> 08:26.530 +いくつかの違いがある。 + +08:26.530 --> 08:31.510 +システム・メッセージは別の属性として渡さなければならない。 + +08:31.510 --> 08:36.430 +そしてメッセージはまたこのデッキリストだ。 + +08:36.430 --> 08:42.550 +しかしもちろん、 システムメッセージの最初のエントリーは持っていない。 + +08:42.910 --> 08:45.310 +うーん、 それは少し違うかな。 + +08:45.340 --> 08:52.360 +また、 Max tokensは、 OpenAI APIでトークンの最大数を指定するためのオプションです。 + +08:52.360 --> 08:55.180 +クロードには必要なことだと思う。 + +08:55.180 --> 08:56.860 +だからここにあるんだ。 + +08:56.860 --> 08:59.200 +しかし、 それ以外はすべてよく似ているはずだ。 + +08:59.230 --> 09:03.250 +API自体は少し覚えやすい。 + +09:03.250 --> 09:05.740 +クロード・ドット・メッセージ・ドット・クリエイトだ。 + +09:05.740 --> 09:11.470 +少し短いですが、 それ以外はOpenAIのChatGPTの完了が作成するものとよく似ています。 + +09:11.710 --> 09:13.150 +あ、 そうだ。 + +09:13.180 --> 09:17.830 +そして返答が返ってきたときには、 メッセージの内容はゼロになっている。 + +09:17.860 --> 09:22.630 +繰り返しますが、 最初の1つを要求していますが、 1つしか返ってきません。 なぜなら、 + +09:22.630 --> 09:28.750 +OpenAIのドット・コンテンツに相当するドット・テキストを1つしか要求していないからです。 + +09:28.780 --> 09:30.100 +では、 見てみよう。 + +09:30.100 --> 09:35.020 +これは、 クロードのAPIフレームワークのために役立つことを期待している。 + +09:35.020 --> 09:38.080 +さて、 クロードがジョークでどうするか見てみよう。 + +09:39.910 --> 09:40.630 +もちろんだ。 + +09:40.660 --> 09:43.540 +データサイエンティスト向けの軽いジョークを紹介しよう。 + +09:43.570 --> 09:46.210 +データサイエンティストはなぜ恋人と別れるのか? + +09:46.240 --> 09:51.310 +ただ、 その関係にあまりにばらつきがありすぎて、 それを正常化するいい方法が見つからなかった。 + +09:51.970 --> 09:53.530 +ああ、 そうだね。 + +09:53.530 --> 09:59.110 +よりオタク的というか......もう少し、 うーん、 データサイエンス的というか。 + +09:59.110 --> 10:03.640 +ほんの少し笑えなくなったかもしれないが、 決して悪くはない。 + +10:03.640 --> 10:07.570 +GPT4よりGPT4が好きかどうかは、 好みの問題だろう。 + +10:07.900 --> 10:10.100 +完璧なジョークだ。 + +10:10.220 --> 10:14.210 +爆発的に面白いというわけではないが、 完璧にしっかりしていると言える。 + +10:14.210 --> 10:15.440 +ひどくはない。 + +10:15.950 --> 10:16.550 +うーん。 + +10:16.610 --> 10:22.220 +いずれにせよ、 これはAPIについての話であり、 ジョークの話である。 + +10:22.250 --> 10:24.800 +これからお見せしたいのは、 ストリーミングについてです。 + +10:24.890 --> 10:29.090 +ストリーミングの例の前に、 ストリーミングについて簡単に話したのを覚えているかい? + +10:29.090 --> 10:36.470 +マークダウンを復活させ、 そのマークダウンを処理しなければならなかったからだ。 + +10:36.470 --> 10:40.280 +これは、 マークダウン・レスポンスを扱っていないので、 少し単純に見える。 + +10:40.280 --> 10:46.730 +同じモデル、 クラウド3にお願いするつもりだ。 冗談でまた5点、 でも今回は結果をストリーミングで返します。 + +10:46.730 --> 10:54.470 +OpenAIにストリーミングを依頼したとき、 別の属性stream equals trueを追加したことを覚えているだろうか。 + +10:54.470 --> 10:56.570 +そしてそれは、 ストリーミング・モードであることを意味していた。 + +10:56.570 --> 10:58.490 +クロードの場合は少し違う。 + +10:58.490 --> 11:00.380 +余計な属性はない。 + +11:00.380 --> 11:06.440 +その代わり、 dot createメソッドの代わりにdot streamメソッドを呼び出す。 + +11:06.440 --> 11:09.020 +そこで少し異なるアプローチを取る。 + +11:09.020 --> 11:13.790 +それは、 ストリーミングのための人間工学とOpenAIのニュアンスの違いだ。 + +11:13.790 --> 11:16.430 +そこで、 クロード・メッセージ・ストリームと呼ぶことにする。 + +11:16.460 --> 11:17.840 +それ以外は同じだ。 + +11:17.840 --> 11:22.430 +そして、 戻ってきたものについては、 ストリームとしての結果を持つコンテキスト・マネージャーを使用する。 + +11:22.610 --> 11:26.960 +それから、 ストリーム・テキスト・ストリームのテキスト用だね。 + +11:26.960 --> 11:31.550 +オープンAIは、 それに応えるチャンクのためのものだったことを覚えているだろう。 + +11:31.550 --> 11:35.990 +だからOpenAIは、 結果を読み返す方法がまた少し違っていた。 + +11:35.990 --> 11:37.040 +でも、 それがある。 + +11:37.040 --> 11:41.420 +それぞれの小さな塊を取り戻し、 その塊を印刷する。 + +11:41.540 --> 11:46.460 +その理由は、 各チャンクを別々の行に印刷しないようにするためです。 + +11:46.670 --> 11:48.170 +そうでなければ、 とても読みにくい。 + +11:48.170 --> 11:49.490 +だから、 この方がよく見えるはずだ。 + +11:49.490 --> 11:56.510 +クロード3世はどうだったかな。 5ソネットは、 JupyterLabで私たちにストリームバックされるジョークで行う。 + +11:57.200 --> 11:57.800 +これでよし。 + +11:57.800 --> 11:58.040 +分かるか? + +11:58.040 --> 11:59.060 +ストリーミングだよ。 + +11:59.330 --> 12:01.580 +もちろん、 データサイエンティスト向けの軽いジョークだ。 + +12:01.610 --> 12:03.110 +なぜ同じジョークを? + +12:03.110 --> 12:08.690 +まったく同じジョークのようだが、 ブラームスの小太鼓が加えられている。 + +12:08.840 --> 12:12.000 +最後に爆発があったのは良かった。 + +12:12.000 --> 12:14.670 +なぜ以前より多くのメダルを要求したのだろう? + +12:14.700 --> 12:15.180 +見てみよう。 + +12:15.210 --> 12:15.630 +そうだ。 + +12:15.630 --> 12:16.350 +同じだ。 + +12:16.650 --> 12:17.730 +ええと、 それは + +12:17.760 --> 12:19.020 +そして、 ちょっとした説明もある。 + +12:19.020 --> 12:22.170 +このジョークは、 データサイエンスに共通する統計的概念を利用したものだ。 + +12:22.260 --> 12:27.060 +少しマニアックだが、 データに精通した観客の笑いを誘うはずだ。 + +12:27.060 --> 12:32.070 +まあ、 君たちはデータに精通しているから、 それを判断するのは君たちだ。 + +12:32.100 --> 12:34.440 +笑ってもらえましたか? + +12:35.220 --> 12:36.540 +前進だ。 + +12:36.570 --> 12:39.120 +双子座は構造が違う。 + +12:39.120 --> 12:41.370 +実際にはかなり違うんだ。 + +12:41.400 --> 12:50.580 +グーグルの名誉のために言っておくと、 トークンを設定する機能はもっと複雑だが、 APIはもう少しシンプルだ。 + +12:50.670 --> 12:59.550 +ここではジェネレーティブ・モデル・オブジェクトを作成し、 モデルの名前を渡す。 + +12:59.550 --> 12:59.550 +5フラッシュ + +12:59.580 --> 13:03.510 +ジェミニ1のコンテキストウィンドウの大きさを覚えているだろうか。 5フラッシュ + +13:03.540 --> 13:04.680 +覚えていますか? + +13:04.710 --> 13:07.050 +以前はトップだった? + +13:07.050 --> 13:10.380 +100万トークンという驚異的な数字だった。 + +13:10.410 --> 13:11.450 +100万トークン。 + +13:11.480 --> 13:13.310 +75万語。 + +13:13.340 --> 13:15.500 +というわけで、 双子座1号。 5フラッシュ + +13:15.950 --> 13:23.270 +このオブジェクトを作成するときにシステム命令を渡し、 ジェミニ・ドットを呼び出す。 + +13:23.270 --> 13:26.420 +ユーザープロンプトでコンテンツを生成する。 + +13:26.420 --> 13:28.520 +しかも、 ただのレスポンス・ドット・テキストだ。 + +13:28.520 --> 13:37.520 +リクエストもレスポンスも、 もう少しシンプルなAPIにしてみよう。 + +13:37.670 --> 13:42.200 +重要なのは、 なぜデータサイエンティストは統計学者と別れたのか、 ということだ。 + +13:42.200 --> 13:45.590 +p値で意見が一致しなかったからだ。 + +13:47.420 --> 13:48.020 +ああ。 + +13:48.800 --> 13:52.310 +まあ、 データ・サイエンスの側面はわかるよ。 + +13:52.310 --> 13:53.810 +よく分からないんだ。 + +13:53.900 --> 13:55.070 +ハハハ。 + +13:55.370 --> 13:57.380 +ああ、 たぶん君はわかっているんだろうね。 + +13:57.380 --> 13:59.540 +それに、 僕は、 うとうとしているんだ。 + +13:59.540 --> 14:01.310 +ああ、 その場合はぜひ指摘してほしい。 + +14:01.310 --> 14:05.450 +でも、 そのジョークの面白さは特に分からない。 + +14:05.450 --> 14:13.440 +だから私としては、 ジェミニはジェミニ1より遅れていると思う。 ユーモアの価値という点では5フラッシュ。 + +14:14.220 --> 14:15.060 +分かった。 + +14:15.090 --> 14:18.960 +ともあれ、 ちょっと真面目にGPT4に戻ろう。 + +14:19.170 --> 14:20.910 +最初の質問と同じだ。 + +14:20.910 --> 14:22.410 +君は役に立つアシスタントだ。 + +14:22.440 --> 14:25.950 +ビジネス上の問題がLLMのソリューションに適しているかどうかは、 どのように判断すればよいのでしょうか? + +14:25.950 --> 14:29.790 +覚えているだろうか、 それが私たちがチャット・インターフェースを通じてした最初の質問だった。 + +14:29.970 --> 14:32.970 +そして今、 私たちはこれを再び一つにすることができる。 + +14:32.970 --> 14:34.260 +これは、 あなたにとって馴染み深いものだろう。 + +14:34.290 --> 14:37.320 +結果をマークダウンでストリームバックする。 + +14:37.320 --> 14:40.770 +つまり、 OpenAI chat dot completions dot createだ。 + +14:40.770 --> 14:41.880 +我々はモデルにパスを出す。 + +14:41.880 --> 14:43.350 +大物を狙うんだ。 + +14:43.530 --> 14:44.820 +プロンプトを使うんだ。 + +14:44.820 --> 14:45.840 +温度を設定した。 + +14:45.840 --> 14:47.250 +私たちはストリーム=トゥルーと言う。 + +14:47.250 --> 14:49.680 +それがOpenAIのやり方だ。 + +14:49.830 --> 14:54.750 +ええと、 それからこれは、 結果を再びストリームバックする方法です。 + +14:54.750 --> 14:57.720 +マークダウンを扱っているので、 もう少し複雑だ。 + +14:57.720 --> 15:04.950 +そのため、 基本的に反復ごとにマークダウンを更新するために、 ここではある種の特別なことをしなければならない。 + +15:04.980 --> 15:08.850 +もし、 私たちがこのようにしなければならないと確信が持てないのであれば、 それを取り除いて違うやり方をしてみれば、 + +15:08.850 --> 15:11.190 +何が起こるかすぐにわかるだろう。 + +15:11.220 --> 15:13.200 +見栄えは良くない。 + +15:13.440 --> 15:15.720 +それを実行しよう + +15:15.720 --> 15:21.810 +そして、 その結果がここにある。 + +15:22.500 --> 15:28.260 +マークダウンが部分的にしか通過していないときに、 フリックが起こっているのがわかるだろう。 + +15:28.260 --> 15:33.600 +そのため、 小見出しを表すハッシュが複数ある場合などを解釈している。 + +15:33.600 --> 15:37.050 +まだ1回しかハッシュを受け取っていないし、 大きなヘディングが来ると思っている。 + +15:37.110 --> 15:42.660 +少なくとも、 マークダウンが表示されるときにチカチカと点滅していたのは、 一時的に見たことだと思う。 + +15:42.660 --> 15:55.020 +しかし、 その最後には、 もちろん、 とてもきれいに構成された回答が返ってくる。 + +15:55.740 --> 15:56.460 +分かった。 + +15:56.460 --> 16:04.140 +これで、 さまざまなAPIについて理解していただけたと思う。 + +16:04.170 --> 16:13.200 +そして、 次のビデオでは、 実際に2、 3人のLLMがお互いに会話をする予定だ。 + +16:13.200 --> 16:14.340 +それではまた。 diff --git a/week5/community-contributions/subtitles/srts/59166481/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166481/ko_KR.srt new file mode 100755 index 0000000..ba0b391 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166481/ko_KR.srt @@ -0,0 +1,859 @@ +WEBVTT + +00:00.860 --> 00:05.330 +우리가 좋아하는 장소에 다시 모였네요 주피터 연구소 + +00:05.330 --> 00:07.310 +몇 주 준비됐죠 + +00:07.340 --> 00:09.620 +둘째 주에는 운동하고요 + +00:09.620 --> 00:14.930 +2주 차 폴더로 가서 2주 차 첫날을 열어보죠 + +00:15.230 --> 00:18.230 +자, 시작할게요 + +00:18.230 --> 00:26.990 +첫째 주에 다중 프런티어 LMS를 사용했죠 채팅방 사용자 인터페이스를 통해서요 웹을 통한 + +00:26.990 --> 00:32.990 +사용법이죠 API를 통해 OpenAI API에 연결했어요 + +00:33.020 --> 00:39.890 +그래서 오늘은 안트로픽과 구글의 API를 통합해 오픈AI 사용 기술에 추가할 + +00:39.890 --> 00:41.090 +거예요 + +00:41.960 --> 00:47.630 +다시 한번 말씀드리지만 계속 그 얘기 하면 절 죽이실 거잖아요 + +00:47.630 --> 00:50.300 +여기에 열쇠를 꽂아두죠 + +00:50.300 --> 00:55.850 +오픈라이 키를 설정할 수 있죠 아마 지난주에 이미 했겠죠 인류애 + +00:55.850 --> 00:58.460 +키와 구글 제미니 키를요 + +00:58.490 --> 01:05.330 +하지만 구글 키를 설정하는 데 더 많은 모험이 있다는 걸 명심하세요 + +01:05.390 --> 01:09.410 +설정이 끝나면 창조하는 거죠 + +01:09.470 --> 01:11.330 +파일을 생성했어야 해요 + +01:11.480 --> 01:15.170 +그 형태로 열쇠가 있는지 확인하세요 + +01:15.560 --> 01:21.500 +그렇게 하는 대신 이 셀에서 키를 입력하면 돼요 + +01:21.500 --> 01:24.020 +그렇게 할 수 있어요 + +01:24.020 --> 01:26.270 +보안상 권장할 수 없는 일이죠 + +01:26.270 --> 01:30.350 +언젠가 이걸 공개해서 다른 사람들이 열쇠를 보게 되면요 + +01:30.380 --> 01:32.300 +서론은 그만하죠 + +01:32.330 --> 01:33.800 +수입품 검사를 해 보죠 + +01:33.800 --> 01:37.400 +환경 변수를 설정하는 코드 블록을 실행해보죠 + +01:37.400 --> 01:38.900 +잘 아시네요 + +01:38.900 --> 01:46.280 +이 셀에서 오픈AI API 연결을 설정하기 위해 OpenAI에 + +01:46.280 --> 01:49.400 +같은 전화를 걸었어요 + +01:49.400 --> 01:55.790 +클로드 비트도 비슷하고 제미니 비트는 구글에서 약간 다르게 + +01:55.790 --> 01:56.840 +만들었죠 + +01:56.960 --> 02:04.220 +이런 식으로 반이나 어느 정도 비슷한 명령을 이 세 가지에 사용하고 있어요 + +02:04.730 --> 02:05.510 +네 + +02:05.510 --> 02:11.420 +Lms가 잘하는 것들을 많이 보았고 몇 가지 실행되는 것들을 보았습니다 하지만 대부분은 Lms가 + +02:11.420 --> 02:13.160 +잘하는 것들이죠 + +02:13.190 --> 02:17.600 +한 가지 잘 안 되는 건 농담을 하는 거예요 + +02:17.600 --> 02:24.080 +아주 딱 맞는 문맥을 주면 그 안에서 농담을 구성해야 해요 + +02:24.260 --> 02:28.610 +이건 상업적인 예는 아니지만 재미를 위한 방법이고 + +02:28.610 --> 02:30.980 +API로 경험을 쌓는 거죠 + +02:31.040 --> 02:34.850 +API 상에서 농담을 해 줄 llms를 모실 거예요 + +02:35.120 --> 02:36.770 +어떤 정보를 제공하죠? + +02:36.770 --> 02:37.550 +API 하나를 보내죠 + +02:37.580 --> 02:41.750 +일반적으로 사용하고 싶은 모델의 이름을 항상 지정해요 + +02:41.750 --> 02:45.380 +시스템 메시지와 사용자 메시지를 주로 제공하죠 + +02:45.380 --> 02:48.950 +전반적인 컨텍스트를 제공하는 시스템 메시지에 아주 익숙하죠 + +02:48.950 --> 02:52.340 +사용자 메시지가 실제 프롬프트죠 + +02:52.550 --> 02:54.410 +다른 특징도 있어요 + +02:54.410 --> 02:55.700 +다른 방법도 있어요 + +02:55.730 --> 03:00.890 +온도라는 걸 통과시킬 수 있어요 0에서 1 사이죠 보통 좀 더 무작위적인 + +03:00.890 --> 03:09.960 +창의적 출력 출력을 뜻합니다 0은 가장 가능성이 낮은 집중된 결정론적 반복 가능 설정이고요 + +03:10.320 --> 03:14.250 +여러분이 제공할 수 있는 또 다른 매개 변수죠 + +03:14.280 --> 03:19.470 +이 경우에 시스템 메시지를 설정하겠습니다 농담을 잘 하는 비서라고 하는 + +03:19.470 --> 03:20.010 +거죠 + +03:20.010 --> 03:26.670 +데이터 과학자들을 위해 가벼운 농담을 할 거예요 + +03:26.670 --> 03:30.000 +당신과 내가 되겠죠 + +03:30.660 --> 03:35.850 +여기 이 구조는 여러분에게 아주 익숙하길 바라요 + +03:35.850 --> 03:43.410 +여기서 프롬프트들을 목록에 넣습니다. 요소들, 시스템, 사용자를 역할로 for each each each each each요. + +03:43.410 --> 03:44.910 +이 두 요소에서요. + +03:44.940 --> 03:49.860 +이 리스트를 살펴보면, 설명하지 않아도 될 것 같네요. 이제 익숙해졌으니까요. + +03:50.040 --> 03:55.080 +말씀드렸듯이 이 값은 여기 이 역할이 시스템 또는 유저가 될 수 있어요 + +03:55.080 --> 03:56.070 +곧 알게 될 거예요 + +03:56.070 --> 03:57.570 +조수라고도 할 수 있죠 + +03:57.570 --> 03:59.760 +시스템 사용자나 보조가 될 수 있죠 + +03:59.760 --> 04:04.150 +그리고 이번 주 후반에는 다른 것도 넣을 수 있을 거예요 + +04:04.240 --> 04:04.990 +그래서요? + +04:05.020 --> 04:09.790 +하지만 지금은 시스템과 사용자를 우리가 사용할 두 역할로 기억해야 해요 + +04:09.790 --> 04:12.610 +그래서 그걸 Put 프롬프트 목록에 넣었어요 + +04:13.480 --> 04:16.570 +그 전에 감옥을 처리해야 하고요 + +04:16.570 --> 04:18.790 +그 전에 이 감방을 실행했어요 + +04:18.790 --> 04:20.350 +네, 괜찮았어요 + +04:20.350 --> 04:20.770 +시작할게요 + +04:20.800 --> 04:21.790 +다시 해 보죠 + +04:21.790 --> 04:22.840 +저 감방을 처형해요 + +04:22.840 --> 04:23.860 +이 감방을 실행해요 + +04:23.890 --> 04:25.720 +좋아요 + +04:25.750 --> 04:33.280 +오래된 GPT 모델부터 살펴보죠 GPT 3 5 터보 엔진인데 최근에 나온 최고의 개척 + +04:33.280 --> 04:34.390 +모델이죠 + +04:34.390 --> 04:35.830 +하지만 이미 지난 일이에요 + +04:35.830 --> 04:37.330 +하지만 이걸 쓸 거예요 + +04:37.330 --> 04:44.680 +오픈AI에서 이제 익숙해진 API는 OpenAI.챗, .완성 .Creetions, + +04:44.680 --> 04:53.500 +.Create 완성 이 API의 이름이죠 기존 프롬프트 모음을 가져다가 대화를 완성하기 + +04:53.500 --> 04:59.530 +위해 텍스트 생성을 시도하는 거예요 + +04:59.800 --> 05:07.980 +생성을 통해 모델과 메시지를 전달합니다 여러분이 익숙한 형식으로 전달하죠 + +05:08.010 --> 05:09.750 +어디 보죠 + +05:09.780 --> 05:15.870 +응답을 받았을 때 우리가 하는 건 완료 .선택입니다 가능한 + +05:15.870 --> 05:18.030 +선택 목록이죠 + +05:18.030 --> 05:19.980 +하지만 한 가지 요소만 넣을 거예요 + +05:19.980 --> 05:23.790 +여러 개의 선택을 반환하도록 지정할 수 있는 방법이 있어요 + +05:23.790 --> 05:28.740 +하지만 아직 안 했으니 get get을 하면 당연히 0위치에 있죠 + +05:28.740 --> 05:35.550 +완료 .선택 .0.Message는 메시지를 반환하고 콘텐츠는 문자열로 반환하죠 + +05:35.760 --> 05:37.770 +그걸 get 해 프린트하는 거죠 + +05:37.770 --> 05:39.360 +어떤 장난인지 볼까요? + +05:39.360 --> 05:42.690 +데이터 과학자인 GPT 3을 위해서요 5 터보로 할 수 있어요 + +05:42.720 --> 05:43.680 +시작할게요 + +05:44.010 --> 05:48.000 +왜 데이터 과학자들이 컴퓨터와 분리했을까요? + +05:48.000 --> 05:52.020 +둘의 복잡한 관계를 감당하지 못했죠 + +05:52.830 --> 05:53.970 +알았어요 + +05:54.000 --> 05:56.250 +Get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it. + +05:56.280 --> 05:58.770 +세상에서 가장 웃긴 농담은 아니지만 끔찍하지도 않아요 + +05:58.800 --> 06:03.540 +데이터 과학자들은 사물의 관계를 모델로 삼지만 그 복잡한 관계를 다룰 + +06:03.540 --> 06:04.200 +수 없어요 + +06:04.200 --> 06:04.800 +좋아요 + +06:04.800 --> 06:13.140 +GPT 3에서 나온 농담치고는 아주 괜찮은데요 5 터보요 + +06:13.200 --> 06:17.010 +GPT 4 미니는 더 잘할 수 있을까요? + +06:17.160 --> 06:21.450 +이번엔 API 사용을 살짝 확장할게요 + +06:21.600 --> 06:26.340 +온도도 포함해서 0에서 1 사이의 숫자를 입력할 수 있어요 + +06:26.340 --> 06:29.220 +창의성은 1점, 최저점은 0점이죠 + +06:29.490 --> 06:34.980 +이 중엔 완료 선택지와 메시지 콘텐츠도 없어요 + +06:34.980 --> 06:36.720 +이것도 아주 익숙할 거예요 + +06:36.750 --> 06:38.970 +잘 달리는지 보죠 + +06:39.570 --> 06:42.060 +데이터 과학자가 왜 통계학자랑 헤어졌죠? + +06:42.060 --> 06:44.670 +너무 못됐다고 생각했거든요 + +06:44.700 --> 06:46.230 +꽤 괜찮은 농담이네요 + +06:46.230 --> 06:47.490 +괜찮은 것 같아요 + +06:47.490 --> 06:49.950 +그 정도는 괜찮은 농담이죠 + +06:49.980 --> 06:54.990 +llm은 이런 농담 잘 못한다고 한 게 너무 심했나 봐요 그 정도면 괜찮은 농담인데요 + +06:55.170 --> 07:02.610 +GPT 4에 작은 박수를 보내 줘야 할 것 같네요 + +07:03.030 --> 07:09.160 +GPT 4 미니와 더 큰 사촌인 GPT 4를 써 보죠 + +07:09.190 --> 07:12.130 +GPT 4의 맥시 버전이네요 + +07:12.160 --> 07:14.260 +덩치 큰 친구요 + +07:14.260 --> 07:16.000 +우리가 물어볼 거예요 + +07:16.030 --> 07:19.210 +온도는 똑같이 유지해야 해요 그래야 실수가 없죠 + +07:19.240 --> 07:21.160 +농담 삼아 물어보죠 + +07:21.190 --> 07:23.230 +둘, 어떻게 되나 보죠 + +07:24.250 --> 07:27.130 +데이터 과학자가 왜 파산했죠? + +07:27.130 --> 07:30.850 +어레이에서 캐시를 못 찾았거든요 + +07:32.410 --> 07:35.560 +Get up이 아니었다면 더 나았을 거예요 + +07:35.560 --> 07:38.650 +캐시는 못 찾았어요 + +07:38.650 --> 07:39.910 +괜찮을 거예요 + +07:40.810 --> 07:42.280 +내가 뭘 놓쳤나 봐요 + +07:42.310 --> 07:45.280 +Get me get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get it, I'm get'm it, I'm it'm it'm get'm it, I'm it'm it'm get'm it'm it'm it'm it're + +07:45.550 --> 07:47.380 +다른 걸 해 보죠 + +07:47.560 --> 07:52.480 +아까 했던 대로 비트 온도를 낮춰서 어떻게 되는지 보죠 get it + +07:52.990 --> 07:56.560 +왜 과학자들은 물류 회귀 모델과 헤어졌을까요? + +07:56.590 --> 07:58.390 +맞는 걸 못 찾았거든요 + +07:58.600 --> 08:00.130 +괜찮은 생각이네요 + +08:00.130 --> 08:00.970 +그건 괜찮아요 + +08:00.970 --> 08:06.160 +미니와 맥시 중에 뭐가 더 좋은지 모르겠지만 꽤 + +08:06.160 --> 08:08.860 +튼튼한 개그 소재예요 + +08:08.860 --> 08:12.640 +이건 확실히 통과라고 할 수 있겠네요 + +08:13.810 --> 08:14.800 +좋아요 + +08:14.830 --> 08:17.050 +3번 조항으로 넘어가죠 5분 + +08:17.080 --> 08:17.680 +소네트요 + +08:17.950 --> 08:21.430 +API가 눈에 띄게 비슷하죠 + +08:21.430 --> 08:22.270 +좋은 소식이죠 + +08:22.270 --> 08:25.030 +기본적으로 아주 비슷해요 + +08:25.060 --> 08:26.530 +차이점이 몇 가지 있어요 + +08:26.530 --> 08:31.510 +시스템 메시지를 개별 특성으로 전달해야 해요 + +08:31.510 --> 08:36.430 +메시지는 다시 이 데크 목록이에요 + +08:36.430 --> 08:42.550 +물론 시스템 메시지의 첫 항목은 없어요 이미 별도로 넘겼으니까요 + +08:42.910 --> 08:45.310 +그건 미묘한 차이죠 + +08:45.340 --> 08:52.360 +최대 토큰은 오픈AI API 선택 사항으로 최대 토큰의 수를 지정하는 데 사용되죠 + +08:52.360 --> 08:55.180 +클로드도 그래야 할 거예요 + +08:55.180 --> 08:56.860 +그래서 여기 들어있군요 + +08:56.860 --> 08:59.200 +하지만 그 외에는 전부 비슷해 보여야 해요 + +08:59.230 --> 09:03.250 +API 자체는 외우기가 좀 더 쉬워요 비트 + +09:03.250 --> 09:05.740 +클로드 점 메시지 점 만들기예요 + +09:05.740 --> 09:11.470 +약간 짧지만 오픈AI 챗GPT 완성본과 상당히 비슷해요 + +09:11.710 --> 09:13.150 +저기 있네요 + +09:13.180 --> 09:17.830 +응답이 오면 메시지 콘텐츠 get이 0이 되죠 + +09:17.860 --> 09:22.630 +첫 번째 것만 요청하는데 하나만 나올 거예요 왜냐하면 .text만 + +09:22.630 --> 09:28.750 +요청했거든요 OpenAI의 .content와 같은 거죠 + +09:28.780 --> 09:30.100 +어디 보죠 + +09:30.100 --> 09:35.020 +클로드의 API 프레임워크에 유용하면 좋겠네요 + +09:35.020 --> 09:38.080 +클로드가 농담을 어떻게 하는지 보죠 + +09:39.910 --> 09:40.630 +네 + +09:40.660 --> 09:43.540 +데이터 과학자들을 위한 가벼운 농담이 있어요 + +09:43.570 --> 09:46.210 +데이터 과학자들은 왜 사랑하는 사람과 헤어질까요? + +09:46.240 --> 09:51.310 +관계에 너무 많은 변화가 있었고 그걸 정상화할 좋은 방법을 찾지 못했어요 + +09:51.970 --> 09:53.530 +네, 괜찮아요 + +09:53.530 --> 09:59.110 +좀 더 너디스러운 것 같아요 데이터 과학에 가깝죠 + +09:59.110 --> 10:03.640 +비트만 조금 less지만 나쁘지 않아요 + +10:03.640 --> 10:07.570 +글쎄요, GPT 4보다 그게 더 좋은지는 취향의 문제겠죠 + +10:07.900 --> 10:10.100 +완벽한 농담이죠 + +10:10.220 --> 10:14.210 +폭발할 만큼 웃기진 않지만 아주 튼튼해요 + +10:14.210 --> 10:15.440 +나쁘지 않아요 + +10:15.950 --> 10:16.550 +네 + +10:16.610 --> 10:22.220 +어쨌든 핵심은 API와 농담에 관한 겁니다 늘 재미있긴 하지만요 + +10:22.250 --> 10:24.800 +지금부터 보여드릴 건 스트리밍에 관한 거예요 + +10:24.890 --> 10:29.090 +스트리밍 예시를 보기 전에 스트리밍에 대해 잠깐 얘기했었죠? + +10:29.090 --> 10:33.140 +전에 했던 건 좀 복잡해 보였어요 비트코인 가격 인하를 다시 + +10:33.140 --> 10:36.470 +해야 하고 그 가격 인하에 대처해야 했으니까요 + +10:36.470 --> 10:40.280 +마크다운 비트를 다루는 게 아니라서 더 간단해 보이죠 + +10:40.280 --> 10:46.730 +같은 모델인 클라우드 3에 질문할게요 다시 5가 나왔네요 이번엔 결과를 스트리밍할게요 + +10:46.730 --> 10:53.090 +오픈AI에 스트림하라고 요청했을 때 다른 특성 스트림이 true인 것을 추가한 것을 + +10:53.090 --> 10:54.470 +기억하시나요? + +10:54.470 --> 10:56.570 +그건 스트리밍 모드였다는 뜻이죠 + +10:56.570 --> 10:58.490 +클로드는 조금 다르죠 + +10:58.490 --> 11:00.380 +추가 속성은 없어요 + +11:00.380 --> 11:06.440 +.Stream 메서드를 호출해요 .Create 메서드 대신에요 + +11:06.440 --> 11:09.020 +접근법이 약간 달라요 + +11:09.020 --> 11:13.790 +인도적인 것과 오픈AI의 스트리밍은 차이가 있어요 + +11:13.790 --> 11:16.430 +클로드 메시지의 흐름이라고 부르기로 했어요 + +11:16.460 --> 11:17.840 +그 외에는 똑같아요 + +11:17.840 --> 11:22.430 +돌아온 결과로는 스트림으로서의 결과를 가진 컨텍스트 관리자를 사용하죠 + +11:22.610 --> 11:26.960 +스트림 텍스트 스트림의 텍스트죠 + +11:26.960 --> 11:31.550 +오픈아이는 그에 대한 답장이었죠 + +11:31.550 --> 11:35.990 +오픈AI는 비트 백 결과를 읽는 방식이 조금 달랐어요 + +11:35.990 --> 11:37.040 +하지만 저기 있네요 + +11:37.040 --> 11:41.420 +각각의 덩어리를 get 해 프린트하죠 + +11:41.540 --> 11:46.460 +이렇게 하는 이유는 한 줄에 한 덩어리가 찍히지 않도록 하기 위해서죠 + +11:46.670 --> 11:48.170 +안 그러면 읽기 힘들었을 거예요 + +11:48.170 --> 11:49.490 +이게 더 보기 좋을 거예요 + +11:49.490 --> 11:56.510 +클로드 3을 보죠 농담과 함께 소네트 5개가 유피터랩에서 스트리밍될 거예요 + +11:57.200 --> 11:57.800 +됐어요 + +11:57.800 --> 11:58.040 +봤죠? + +11:58.040 --> 11:59.060 +스트리밍이에요 + +11:59.330 --> 12:01.580 +데이터 사이언스에게는 가벼운 농담이 있죠 + +12:01.610 --> 12:03.110 +왜 똑같은 농담을 해요? + +12:03.110 --> 12:08.690 +똑같은 농담 같지만 브람스 드럼에 추가된 거죠 + +12:08.840 --> 12:12.000 +마지막에 폭발하는 게 좋았어요 + +12:12.000 --> 12:14.670 +왜 전보다 패를 더 많이 달라고 했을까요? + +12:14.700 --> 12:15.180 +어디 보죠 + +12:15.210 --> 12:15.630 +아뇨 + +12:15.630 --> 12:16.350 +똑같아요 + +12:16.650 --> 12:17.730 +네 + +12:17.760 --> 12:19.020 +설명이 나오죠 + +12:19.020 --> 12:22.170 +이 농담은 데이터 과학에서 흔한 통계 개념을 이용한 거예요 + +12:22.260 --> 12:27.060 +좀 따분하지만 데이터에 밝은 관객들은 웃을 거예요 Get it + +12:27.060 --> 12:32.070 +데이터에 밝은 분들이니 판단해 주실 수 있겠죠 + +12:32.100 --> 12:34.440 +Get it, Get it, Get it, Get it, Get, Get, Get, Get, Get 웃었어요? + +12:35.220 --> 12:36.540 +넘어가죠 + +12:36.570 --> 12:39.120 +제미니는 구조가 달라요 + +12:39.120 --> 12:41.370 +사실 비트는 좀 달라요 + +12:41.400 --> 12:48.780 +구글 크레딧은 토큰을 설정하는 기능이 훨씬 복잡하지만 API 설정은 + +12:48.780 --> 12:50.580 +좀 더 간단해요 + +12:50.670 --> 12:56.850 +여기 보이는 것처럼 모델 객체를 생성해서 모델 이름을 입력해요 제미니 1호를 + +12:56.850 --> 12:59.550 +쓸 거예요 5번 섬광이에요 + +12:59.580 --> 13:03.510 +제미니 1호의 경우 얼마나 큰 문맥 창문이 필요했는지 기억하시죠? 5번 섬광이에요 + +13:03.540 --> 13:04.680 +기억할 수 있겠어요? + +13:04.710 --> 13:07.050 +전에 있던 테이블 위였나요? + +13:07.050 --> 13:10.380 +놀랍게도 백만 토큰이었죠 + +13:10.410 --> 13:11.450 +백만 토큰요 + +13:11.480 --> 13:13.310 +750,000단어요 + +13:13.340 --> 13:15.500 +제미니 1호예요 5번 섬광이에요 + +13:15.950 --> 13:23.270 +이 객체를 만들 때 시스템 지시를 전달해요 제미니 닷이라고 부르죠 + +13:23.270 --> 13:26.420 +사용자 프롬프트에서 콘텐츠를 생성하세요 + +13:26.420 --> 13:28.520 +응답 .text예요 + +13:28.520 --> 13:35.090 +요청과 응답 둘 다에서 좀 덜 빈둥거리죠 API가 좀 더 간단해요 하지만 + +13:35.120 --> 13:37.520 +비트의 질을 보죠 + +13:37.670 --> 13:42.200 +중요한 건, 데이터 과학자들이 왜 통계 전문가와 헤어졌느냐죠 + +13:42.200 --> 13:45.590 +P 값에 대한 견해가 달랐기 때문이죠 + +13:47.420 --> 13:48.020 +네 + +13:48.800 --> 13:52.310 +전 데이터 과학 쪽을 봐요 + +13:52.310 --> 13:53.810 +Get you get. 잘 모르겠어요 + +13:53.900 --> 13:55.070 +네 + +13:55.370 --> 13:57.380 +Get it, get it, get it! 이해하실지도 모르겠네요 + +13:57.380 --> 13:59.540 +전 졸린 것 같아요 + +13:59.540 --> 14:01.310 +그런 경우라면 어떻게든 절 가리키겠죠 + +14:01.310 --> 14:05.450 +근데 그 농담의 재밌는 면을 잘 모르겠어요 Get it + +14:05.450 --> 14:13.440 +그래서 제 생각에는 제미니 1호가 확실히 뒤처졌다고 봐요 유머 면에서 말이죠 + +14:14.220 --> 14:15.060 +좋아요 + +14:15.090 --> 14:18.960 +아무튼 본격적으로 GPT 4로 돌아가 보죠 + +14:19.170 --> 14:20.910 +다들 같은 질문을 했죠 + +14:20.910 --> 14:22.410 +정말 도움이 되는 조수네요 + +14:22.440 --> 14:25.950 +사업상의 문제가 LLM 해결책에 적합한지 어떻게 판단하죠? + +14:25.950 --> 14:29.790 +채팅 인터페이스를 통해 가장 먼저 받은 질문이었죠 + +14:29.970 --> 14:32.970 +이제 다시 합칠 수 있어요 + +14:32.970 --> 14:34.260 +이런 거 익숙하죠? + +14:34.290 --> 14:37.320 +가격 인하 결과를 스트리밍으로 보여드릴게요 + +14:37.320 --> 14:40.770 +OpenAI 채팅 .완료 .Create죠 + +14:40.770 --> 14:41.880 +모델을 통과시키죠 + +14:41.880 --> 14:43.350 +큰 녀석을 노릴 거예요 + +14:43.530 --> 14:44.820 +프롬프트도 사용해요 + +14:44.820 --> 14:45.840 +온도를 설정했어요 + +14:45.840 --> 14:47.250 +스트리밍은 true라고 하죠 + +14:47.250 --> 14:49.680 +오픈아이는 이렇게 요리해요 + +14:49.830 --> 14:54.750 +그리고 이런 식으로 결과를 다시 스트리밍하죠 + +14:54.750 --> 14:57.720 +비트 박스를 줄이는 중이라 좀 더 복잡해요 + +14:57.720 --> 15:03.390 +그래서 우린 여기서 특별한 작업을 해야 합니다 각 반복에서 마크다운을 새로 + +15:03.390 --> 15:04.950 +고침하기 위해서요 + +15:04.980 --> 15:08.850 +이렇게 해야 하는지 확신이 안 들면 저걸 빼고 다르게 해 보세요 + +15:08.850 --> 15:11.190 +그럼 바로 어떻게 되는지 보일 거예요 + +15:11.220 --> 15:13.200 +보기 안 좋을 거예요 + +15:13.440 --> 15:15.720 +그걸 실행해 보죠 + +15:15.720 --> 15:21.810 +get get 결과가 나왔네요. 아주 좋아 보이죠. + +15:22.500 --> 15:28.260 +일부만 뚫렸을 때 튕기는 게 보이죠 + +15:28.260 --> 15:33.600 +즉 부제목을 나타내는 여러 개의 해시를 해석하는 것이죠 + +15:33.600 --> 15:37.050 +해시 하나만 받았는데 큰 게 오는 줄 아나 봐요 + +15:37.110 --> 15:41.430 +마크다운이 일어나면서 깜빡이는 현상이 잠깐 있었던 + +15:41.430 --> 15:42.660 +것 같아요 + +15:42.660 --> 15:50.730 +하지만 마지막에 우린 아주 잘 구성된 응답을 얻습니다 잘 구조화되어 마크다운 + +15:50.730 --> 15:55.020 +스트림 백에서 완벽한 포맷이죠 + +15:55.740 --> 15:56.460 +좋아요 + +15:56.460 --> 16:04.140 +다양한 API에 대한 감각을 얻었고 재미있는 질문도 비트 박스를 좀 어지럽혔죠 + +16:04.170 --> 16:12.150 +다음 비디오에서 할 것은 몇 개의 llms가 서로 대화하는 겁니다 재미있을 + +16:12.150 --> 16:13.200 +거예요 + +16:13.200 --> 16:14.340 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59166847/en_US.srt b/week5/community-contributions/subtitles/srts/59166847/en_US.srt new file mode 100755 index 0000000..1e18d21 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166847/en_US.srt @@ -0,0 +1,106 @@ +WEBVTT + +00:00.860 --> 00:05.810 +Well, they say that time flies when you're having fun, and it certainly feels like time is flying. + +00:05.840 --> 00:08.120 +Uh, hopefully for you as well as for me. + +00:08.270 --> 00:11.150 +Uh, you have reached the 20% milestone. + +00:11.150 --> 00:18.170 +You were a fifth of the way towards being an expert in all things LMN, and I hope that it feels that + +00:18.170 --> 00:18.380 +way. + +00:18.380 --> 00:23.420 +I hope you're feeling the the sense of accomplishment and the way that you are leveling up. + +00:23.420 --> 00:28.580 +Every time we get to these summary pages and think about all of the new skills you've acquired. + +00:28.580 --> 00:32.870 +So to recap them, I know I keep doing this, but I do think it's important. + +00:32.900 --> 00:39.980 +You can of course describe transformers and tokens and contacts, windows and API prices and all of + +00:40.010 --> 00:40.670 +that. + +00:40.940 --> 00:48.200 +You can code now pretty confidently, I would say with the different APIs for the frontier models that + +00:48.200 --> 00:54.710 +you know well, and you can build an AI chatbot assistant, including an interactive UI. + +00:54.710 --> 00:56.150 +And I promise you it would be easy. + +00:56.150 --> 00:57.410 +And it was easy. + +00:57.560 --> 01:01.190 +You hopefully you weren't expecting it to be quite as easy as it was. + +01:01.190 --> 01:04.010 +The one line of code is insane. + +01:04.100 --> 01:09.080 +Uh, that is the, uh, the the wonder, the magic of Gradio. + +01:09.110 --> 01:17.630 +So next time we change subject to something called tools, which is a particularly interesting capability. + +01:17.630 --> 01:26.450 +It's it allows us to give LMS powers to do something, to run some functionality that we will arm it + +01:26.450 --> 01:33.470 +with so we can write some code, and then we can give the arm the ability to, to use it. + +01:33.500 --> 01:35.630 +Now that might sound a bit spooky. + +01:35.660 --> 01:40.730 +We're actually going to build something and then sort of give the LMS powers to to to do it. + +01:40.760 --> 01:42.770 +What to like run code on our box. + +01:42.770 --> 01:44.810 +We're going to let them do that. + +01:44.990 --> 01:51.110 +Um, and unfortunately, I will warn you, this is one of those things that that sounds very magical. + +01:51.110 --> 01:54.590 +And then a bit like the wizard behind the curtain. + +01:54.590 --> 02:00.140 +Uh, when you find out what this actually means, it's a little bit less magical when you know the ingredients + +02:00.140 --> 02:02.930 +to the incredible, uh, dish. + +02:02.960 --> 02:05.810 +Suddenly, uh, you think, is that all? + +02:06.590 --> 02:12.770 +But for the moment, you can you can live in awe of the fact that next time we are going to empower, + +02:12.800 --> 02:21.040 +uh, frontier model with the ability to run code on our box, which is going to do things, uh, and + +02:21.190 --> 02:26.380 +I'm going to reveal the, the secret sauce behind that. + +02:26.530 --> 02:31.570 +And then perhaps it won't be quite as mysterious as it sounds, but I'm excited to take you through + +02:31.570 --> 02:31.960 +that. + +02:31.960 --> 02:34.840 +That's what we're doing next time, and I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59166847/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166847/ja_JP.srt new file mode 100755 index 0000000..2c1ef17 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166847/ja_JP.srt @@ -0,0 +1,91 @@ +WEBVTT + +00:00.860 --> 00:05.810 +まあ、 楽しんでいるときは時間が過ぎるのが早いというし、 確かに時間は過ぎているように感じる。 + +00:05.840 --> 00:08.120 +ああ、 できれば僕だけでなく君にとってもね。 + +00:08.270 --> 00:11.150 +ええと、 20%のマイルストーンに到達しましたね。 + +00:11.150 --> 00:18.380 +あなたはLMNのあらゆることの専門家になるための5分の1の道のりを歩んできた。 + +00:18.380 --> 00:23.420 +達成感やレベルアップしていく様子を感じていることを願う。 + +00:23.420 --> 00:28.580 +このまとめページにたどり着くたびに、 あなたが身につけた新しいスキルのすべてについて考える。 + +00:28.580 --> 00:32.870 +だから、 彼らを総括するために、 何度も言うようだけど、 これは重要なことだと思うんだ。 + +00:32.900 --> 00:40.670 +もちろん、 トランスフォーマー、 トークン、 コンタクト、 ウィンドウ、 APIの価格、 その他すべてを説明することができる。 + +00:40.940 --> 00:48.200 +あなたがよく知っているフロンティア・モデル用のさまざまなAPIを使えば、 かなり自信を持ってコーディングできるようになったし、 + +00:48.200 --> 00:54.710 +対話型UIを含むAIチャットボット・アシスタントを構築できるようになった。 + +00:54.710 --> 00:56.150 +それは簡単なことだ。 + +00:56.150 --> 00:57.410 +それは簡単なことだった。 + +00:57.560 --> 01:01.190 +これほど簡単だとは思っていなかっただろう。 + +01:01.190 --> 01:04.010 +この1行のコードは正気の沙汰ではない。 + +01:04.100 --> 01:09.080 +それがグラディオの素晴らしさであり、 マジックなんだ。 + +01:09.110 --> 01:17.630 +そこで次回は、 特に興味深い能力であるツールというものに話題を変えよう。 + +01:17.630 --> 01:33.470 +LMSに何かをさせたり、 機能を実行させたりする権限を与えることができる。 + +01:33.500 --> 01:35.630 +ちょっと不気味に聞こえるかもしれない。 + +01:35.660 --> 01:40.730 +私たちは実際に何かを作り、 LMSにそのための権限を与えるつもりです。 + +01:40.760 --> 01:42.770 +私たちのボックスでコードを実行するようなもの。 + +01:42.770 --> 01:44.810 +そうさせるつもりだ。 + +01:44.990 --> 01:51.110 +残念ながら、 これはとても魔法のように聞こえることのひとつなんだ。 + +01:51.110 --> 01:54.590 +そして、 カーテンの向こうの魔法使いのように。 + +01:54.590 --> 02:00.140 +これが実際に何を意味するのかがわかると、 信じられないような、 ええと、 料理の材料がわかると、 + +02:00.140 --> 02:02.930 +ちょっと不思議な感じがしなくなる。 + +02:02.960 --> 02:05.810 +突然、 ああ、 これで終わりか? + +02:06.590 --> 02:12.770 +しかし当面は、 畏敬の念を抱くことができるだろう。 + +02:12.800 --> 02:26.380 +次回は、 フロンティア・モデルに力を与え、 我々のボックス上でコードを実行できるようにする。 + +02:26.530 --> 02:31.960 +そして、 恐らく、 それほどミステリアスなものにはならないだろうが、 それを皆さんにお見せできることを楽しみにしている。 + +02:31.960 --> 02:34.840 +それが次回の予定だ。 diff --git a/week5/community-contributions/subtitles/srts/59166847/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166847/ko_KR.srt new file mode 100755 index 0000000..fa9efac --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166847/ko_KR.srt @@ -0,0 +1,97 @@ +WEBVTT + +00:00.860 --> 00:05.810 +즐거운 시간은 쏜살같이 흘러간다고들 하잖아요 정말 시간이 쏜살같이 흘러가는 것 같아요 + +00:05.840 --> 00:08.120 +저와 여러분을 위해서요 + +00:08.270 --> 00:11.150 +20% 이정표에 도달했어요 + +00:11.150 --> 00:18.380 +LMN에 관한 모든 것의 5분의 1을 알게 되었어요 그렇게 느끼셨으면 좋겠어요 + +00:18.380 --> 00:23.420 +여러분이 성취감을 느끼길 바라요 레벨을 높이는 과정도요 + +00:23.420 --> 00:28.580 +요약 페이지를 볼 때마다 새로운 기술이 떠오르죠 Get it + +00:28.580 --> 00:32.870 +요약하자면, 계속 이러지만 중요한 것 같아요 + +00:32.900 --> 00:40.670 +변압기, 토큰, 연락처, 윈도우, API 가격 등을 설명하실 수 있어요 + +00:40.940 --> 00:48.200 +꽤 자신 있게 코드를 작성할 수 있어요 여러분이 잘 아는 프론티어 모델에 대한 다양한 + +00:48.200 --> 00:54.710 +API로요 대화형 UI를 포함한 인공지능 챗봇 비서를 만들 수도 있죠 + +00:54.710 --> 00:56.150 +내가 장담하는데 쉬울 거예요 + +00:56.150 --> 00:57.410 +식은 죽 먹기였죠 + +00:57.560 --> 01:01.190 +이렇게 쉬울 줄은 몰랐길 바라요 + +01:01.190 --> 01:04.010 +코드 한 줄이 정말 대단해요 + +01:04.100 --> 01:09.080 +그게 바로 그래디오의 마법이죠 + +01:09.110 --> 01:17.630 +다음엔 도구라는 것으로 주제를 바꾸죠 아주 흥미로운 기능이에요 + +01:17.630 --> 01:26.450 +LMS가 뭔가 하도록 권한을 줍니다 일부 기능을 실행해 코드를 + +01:26.450 --> 01:33.470 +작성할 수 있도록요 그런 다음 사용 권한을 주죠 + +01:33.500 --> 01:35.630 +비트 박스는 좀 으스스하네요 + +01:35.660 --> 01:40.730 +실제로 뭔가를 구축하고 LMS에게 권한을 주는 거죠 + +01:40.760 --> 01:42.770 +박스에 코드를 실행하는 거죠 + +01:42.770 --> 01:44.810 +그렇게 하도록 둘 거예요 + +01:44.990 --> 01:51.110 +불행히도 미리 경고하는데 마법처럼 들리는 그런 거예요 + +01:51.110 --> 01:54.590 +커튼 뒤의 마법사처럼 비트를 입었죠 + +01:54.590 --> 02:00.140 +비트가 무슨 뜻인지 알게 되면 그 놀라운 요리의 재료를 알게 되면 + +02:00.140 --> 02:02.930 +마법 같은 느낌이 덜해져요 + +02:02.960 --> 02:05.810 +갑자기 그게 다인가 싶더군요 + +02:06.590 --> 02:12.770 +하지만 지금은 경외심을 가지셔도 좋습니다 다음번에는 + +02:12.800 --> 02:21.040 +개척 모델에게 권한을 부여할 테니까요 컴퓨터에 코드를 실행할 수 있고 + +02:21.190 --> 02:26.380 +그 뒤에 숨겨진 비밀도 밝혀낼 거예요 + +02:26.530 --> 02:31.960 +그렇게 되면 생각만큼 신비롭진 않겠지만 여러분을 안내하게 돼서 기뻐요 + +02:31.960 --> 02:34.840 +다음 시간에도 그렇게 할 거예요 그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59166915/en_US.srt b/week5/community-contributions/subtitles/srts/59166915/en_US.srt new file mode 100755 index 0000000..623af69 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166915/en_US.srt @@ -0,0 +1,592 @@ +WEBVTT + +00:00.440 --> 00:03.560 +Welcome back to the wonderful world of JupyterLab. + +00:03.560 --> 00:06.830 +And here we are in week two. + +00:07.490 --> 00:09.110 +Day three. + +00:09.260 --> 00:11.990 +Uh, bring up this notebook. + +00:11.990 --> 00:18.080 +So we're talking conversational AI, also known as chat bot, and we're going to get right into it. + +00:18.110 --> 00:24.680 +We start by doing our usual imports and we do our usual setting of our environment variables. + +00:24.680 --> 00:27.620 +And we initialize OpenAI. + +00:27.650 --> 00:29.840 +This time we will use OpenAI. + +00:30.020 --> 00:34.310 +And you can have it as an exercise to switch in other models if you'd like to do so. + +00:34.670 --> 00:38.510 +So uh, going to start with the basic system message. + +00:38.510 --> 00:40.340 +You are a helpful assistant. + +00:40.970 --> 00:41.480 +All right. + +00:41.510 --> 00:45.800 +Now I want to talk for a bit about, um, message structure. + +00:45.980 --> 00:54.200 +So first of all, uh, reminder of the structure of a prompt message to OpenAI. + +00:54.320 --> 00:58.700 +Uh, we've seen this many times now, and so you're probably thoroughly bored of me explaining it. + +00:58.700 --> 00:59.350 +There it is. + +00:59.350 --> 01:00.010 +One more time. + +01:00.010 --> 01:00.730 +You know it well. + +01:00.760 --> 01:07.840 +A list of dictionaries that give the system the user, and it can have an assistant responding and then + +01:07.840 --> 01:09.220 +the user and so on. + +01:09.220 --> 01:12.910 +And you may remember I mentioned there is something else to come, but for now. + +01:12.910 --> 01:13.990 +System user assistant. + +01:13.990 --> 01:14.650 +User assistant. + +01:14.680 --> 01:16.960 +User assistant user and so on. + +01:17.470 --> 01:21.430 +Uh, now we are going to write a function called chat. + +01:21.430 --> 01:27.400 +And that function chat is going to take two inputs message and history. + +01:27.430 --> 01:34.930 +Message represents the current, uh, message that is being being asked that chat needs to respond to. + +01:34.960 --> 01:41.050 +And history has the history of all prior messages, all prior exchanges. + +01:41.050 --> 01:45.520 +And the structure of history is going to look like this. + +01:45.550 --> 01:50.140 +It's going to be a list, a list that consists of lists. + +01:50.140 --> 01:55.810 +And these sub lists like this are simply what the user said and what the assistant replied, what the + +01:55.810 --> 01:58.920 +user said and what the assistant replied and so on. + +01:59.340 --> 02:02.730 +So why am I asking you to do that? + +02:02.760 --> 02:07.830 +Why are we going to write a function that looks like that with parameters, that arguments that look + +02:07.860 --> 02:08.340 +like this? + +02:08.340 --> 02:15.870 +The answer is because that is a particular type of function that Gradio expects for using with its chat + +02:15.870 --> 02:16.710 +user interfaces. + +02:16.710 --> 02:22.260 +And that's why Gradio expects us to write a function called chat that's going to take a message. + +02:22.260 --> 02:27.630 +It's going to take history in this structure, and it will return the next the response, the response + +02:27.630 --> 02:28.590 +to this chat. + +02:28.590 --> 02:30.390 +So that's why we're thinking about that format. + +02:30.390 --> 02:39.420 +So our job for this function is going to be convert this kind of style of message into this. + +02:39.420 --> 02:46.860 +So we're going to need to iterate row by row through this structure and build this structure that you + +02:46.860 --> 02:47.940 +see above. + +02:47.970 --> 02:49.710 +Hopefully that makes sense. + +02:49.710 --> 02:53.400 +If not, it will make sense when I show you what that looks like. + +02:53.400 --> 02:56.730 +So I'm defining a function called chat. + +02:56.760 --> 03:03.450 +It takes a message that we got to respond to an input message, and it takes history of prior messages. + +03:03.480 --> 03:09.090 +So we first of all, we we set up our list of messages, which is going to be this guy. + +03:09.090 --> 03:12.870 +And we populate it with the system prompt at the very start. + +03:12.900 --> 03:17.010 +Of course, then we are going to iterate through history. + +03:17.040 --> 03:20.460 +Each element of history again is one of these lists with two values. + +03:20.460 --> 03:24.000 +So we're going to unpack that into user message assistant message. + +03:24.000 --> 03:28.470 +And then we append the user's message and the assistant message. + +03:28.530 --> 03:30.390 +Uh each each time. + +03:30.390 --> 03:38.310 +So each row from from history turns into two rows in this list. + +03:38.310 --> 03:40.650 +One for the user, one for the assistant. + +03:40.770 --> 03:42.480 +Hopefully that makes complete sense. + +03:42.480 --> 03:43.050 +Now. + +03:43.200 --> 03:44.250 +If not, you can always. + +03:44.280 --> 03:45.240 +Oh well, you don't need to. + +03:45.270 --> 03:47.370 +I was going to say you can always put in some print statements. + +03:47.430 --> 03:50.160 +Uh, I had the foresight to put in some print statements myself. + +03:50.160 --> 03:51.180 +So we will see this. + +03:51.210 --> 03:54.980 +We're going to print the history And then we're going to print the messages. + +03:54.980 --> 03:57.170 +So we get to see that too. + +03:57.530 --> 04:05.120 +Um, and then the next line is very familiar to you for this particular chat, uh, method at this point, + +04:05.150 --> 04:12.110 +this function, sorry, at this point we are then going to take, um, this set of messages and we're + +04:12.110 --> 04:14.300 +going to call OpenAI with them. + +04:14.300 --> 04:17.810 +So we do OpenAI chat dot completions, dot create. + +04:17.810 --> 04:22.970 +We pass in the model, we pass in the messages, and we're going to say please stream results. + +04:22.970 --> 04:23.810 +We might as well. + +04:23.810 --> 04:27.200 +And then we go through and we yield response. + +04:27.440 --> 04:30.170 +Um, so again this isn't actually a function. + +04:30.170 --> 04:35.900 +It's really a generator because we're going to be yielding the responses piece by piece. + +04:36.800 --> 04:37.280 +Okay. + +04:37.280 --> 04:45.500 +So what I want to do now is turn this into the kind of user interface that you saw in the slide a moment + +04:45.500 --> 04:49.850 +ago, a user interface which has an instant message style interaction. + +04:49.850 --> 04:54.580 +So obviously there's a bit of work to do there because we're going to have to, to to craft that kind + +04:54.580 --> 05:01.360 +of, um, canvas with the messages that come one after another and figure out how to do that. + +05:01.420 --> 05:06.430 +Um, based on the response that's coming back from this chat message. + +05:06.940 --> 05:10.300 +Uh, I don't know if you've cottoned on, but I am, of course, fibbing. + +05:10.300 --> 05:11.770 +It's going to be really easy. + +05:11.770 --> 05:12.910 +It's going to be really easy. + +05:12.910 --> 05:14.470 +It's going to be a single line. + +05:15.310 --> 05:21.460 +Uh, so Gradio comes with something called chat interface out of the box, and chat interface, uh, + +05:21.460 --> 05:25.540 +expects a single function which needs to have this structure. + +05:25.540 --> 05:31.300 +If you've written a function which takes a message and history in this particular format, then for + +05:31.300 --> 05:34.240 +Gradio it's just a single line of code. + +05:34.480 --> 05:36.670 +Uh, let's see if it's really that easy. + +05:36.670 --> 05:42.610 +I do need to remember to execute that so that we have defined our chat generator. + +05:42.610 --> 05:46.510 +And then we will launch our interface. + +05:46.510 --> 05:47.770 +And here it is. + +05:47.770 --> 05:49.890 +Here is our chat interface. + +05:50.190 --> 05:53.730 +Let's bring it up in a separate window, because I just prefer it that way. + +05:53.730 --> 05:55.830 +And we'll say, uh. + +05:55.830 --> 05:56.970 +Hello there. + +05:59.070 --> 06:00.000 +Hello. + +06:00.030 --> 06:01.410 +How can I assist you today? + +06:01.530 --> 06:05.220 +I want to buy a tie. + +06:06.780 --> 06:09.270 +Great kind of tie are you looking for? + +06:09.300 --> 06:11.730 +Do you have a specific color, pattern or material? + +06:12.210 --> 06:14.160 +Uh, so you get the idea. + +06:14.430 --> 06:22.830 +But let me just say, um, a red one red tie is a classic choice. + +06:22.830 --> 06:24.510 +Here are a few options to consider. + +06:24.510 --> 06:26.340 +And there comes the answer. + +06:26.820 --> 06:31.470 +Now, obviously the reason I said a red one is I wanted to demonstrate what you already know, which + +06:31.470 --> 06:35.940 +is that it has context of this conversation and it knows what came before. + +06:35.970 --> 06:43.290 +And one more time, it's a bit of an illusion to feel as if this thing has memory from when we first + +06:43.290 --> 06:43.800 +spoke to it. + +06:43.800 --> 06:45.180 +And I said, I want to buy a tie. + +06:45.210 --> 06:51.630 +All that's happening is that every time we interact, that chat method, function generator, I get + +06:51.630 --> 06:52.410 +it right eventually. + +06:52.440 --> 06:55.290 +That chat generator is being called. + +06:55.470 --> 06:58.860 +And what's being what's being passed in is the whole history so far. + +06:58.860 --> 07:03.720 +And it's building that set of messages and that's what's being sent to OpenAI. + +07:03.750 --> 07:07.470 +So for each of our calls, the whole history is being provided. + +07:07.470 --> 07:10.980 +And that's why it has the context of what came before. + +07:10.980 --> 07:17.970 +It's not as if the LLM, it's not as if GPT four is remembering that that 30s ago we said that. + +07:17.970 --> 07:20.520 +It's just that with every call, we pass it all in. + +07:20.520 --> 07:22.080 +I'm sure it's obvious to you at this point. + +07:22.080 --> 07:26.010 +So I'm sorry I'm belaboring it, but I think it's important to, to really rub it in. + +07:26.400 --> 07:31.650 +Um, and yeah, so you remember I have some print statements happening below which are going to be quite + +07:31.650 --> 07:35.130 +chunky now, but let's just look at the the last one there. + +07:35.130 --> 07:41.700 +So the last one said history is and then this is what Gradio sent us. + +07:41.730 --> 07:48.500 +And you'll see it's like uh, what we said, what it said, what we said, what it said. + +07:48.890 --> 07:56.480 +Uh, and then we converted that into the right format for GPT four zero. + +07:56.510 --> 07:57.950 +Uh, GPT four mini. + +07:58.100 --> 08:02.000 +Um, we converted it into a list of, like, role system content. + +08:02.000 --> 08:03.110 +You're a helpful assistant. + +08:03.110 --> 08:05.360 +And then user said, hello there. + +08:05.360 --> 08:07.910 +And the assistant replied, hello, how can I assist you today? + +08:07.910 --> 08:08.540 +And so on. + +08:08.540 --> 08:11.450 +So that is what we turned it into. + +08:12.530 --> 08:18.530 +All right, just before we go on, I'm going to have a quick tangent, but it is an important tangent. + +08:18.530 --> 08:20.420 +So this isn't just me prattling on. + +08:20.420 --> 08:24.230 +This is something which I want to sow a seed with you. + +08:24.230 --> 08:30.200 +Something that we will come back to later and is an important point, um, which maybe, maybe something + +08:30.200 --> 08:31.670 +that's been on your mind. + +08:31.730 --> 08:33.590 +Um, or if not, it should be. + +08:33.800 --> 08:42.020 +Um, so just to mention, you might be thinking, so this structure, this system user assistant user. + +08:42.140 --> 08:43.480 +Uh, so is this. + +08:43.510 --> 08:49.960 +Does this somehow get passed into the LLM in some structured way? + +08:49.960 --> 08:56.860 +Like are we somehow when we when we provide this data to the LLM, is it being given maybe as a as a + +08:56.890 --> 09:00.160 +like a dictionary, a list of dictionaries in some way? + +09:00.280 --> 09:04.300 +Um, because you may say, I thought Llms just took tokens. + +09:04.300 --> 09:08.290 +They just take a list of tokens and they generate the most likely next token. + +09:08.290 --> 09:13.990 +So how does this whole list of dictionaries and so on, uh, translate to the world of tokens? + +09:13.990 --> 09:16.300 +And that would be a great thought if you had that thought. + +09:16.300 --> 09:17.680 +Uh, very good. + +09:17.680 --> 09:25.390 +Uh, and there's a simple answer, uh, it is just tokens that gets passed to the actual underlying + +09:25.420 --> 09:29.290 +GPT four, uh, GPT four LLM. + +09:29.290 --> 09:39.760 +What happens is that OpenAI turns this into a series of tokens, and it has special tokens, special + +09:39.760 --> 09:44.430 +ways of explaining that this is the beginning of a system prompt. + +09:44.430 --> 09:47.670 +This is the beginning of a user and an assistance response. + +09:47.670 --> 09:55.080 +It has some markup to say that, and it tokenizes that whole markup, including some special placeholder + +09:55.080 --> 09:59.010 +tokens that sort of communicate inform the LLM. + +09:59.010 --> 10:01.410 +We're now switching to system prompt mode. + +10:01.410 --> 10:02.880 +Here's some system prompt text. + +10:02.880 --> 10:04.500 +And now we're out of system prompt mode. + +10:04.530 --> 10:07.410 +Now we're doing a user message and so on. + +10:07.410 --> 10:11.460 +So this structure is what we send the OpenAI API. + +10:11.490 --> 10:13.980 +It converts it into tokens. + +10:13.980 --> 10:19.470 +And it's those tokens that then get fed to the LLM to predict the next token. + +10:19.950 --> 10:24.300 +And you might say, okay, I hear you, I get that. + +10:24.300 --> 10:32.760 +But how does the LLM know that this particular special token means system message and should interpret + +10:32.760 --> 10:35.340 +that to be its its high level directive? + +10:35.340 --> 10:39.180 +And how does it know that this token means user and this means assistant and so on. + +10:39.210 --> 10:43.740 +Like like what gives it that ability is that, like, baked into its architecture in some way? + +10:44.040 --> 10:46.590 +Uh, and there's a very simple answer to that, which is that. + +10:46.590 --> 10:49.530 +No, it's just because that's how it's been trained. + +10:49.560 --> 10:54.270 +It's been trained with lots of data, with that structure, with millions of examples like that. + +10:54.270 --> 11:00.300 +And it's learned through training that when it's being given a specific directive in a system instruction, + +11:00.300 --> 11:06.810 +the most likely next token, the most likely response is going to be one that adheres to that system + +11:06.810 --> 11:07.440 +prompt. + +11:07.470 --> 11:09.510 +There's I've oversimplified. + +11:09.510 --> 11:14.400 +There's some, uh, more nuance there to do with with things like the technique that is like chef and + +11:14.400 --> 11:14.940 +things like that. + +11:14.940 --> 11:18.570 +For for those that know all this stuff are listening and saying, oh, it's a bit oversimplified, but + +11:18.570 --> 11:19.770 +it's the general idea. + +11:19.770 --> 11:21.090 +It's the basic idea. + +11:21.090 --> 11:24.540 +This structure is this sort of the API structure. + +11:24.540 --> 11:27.390 +This is how we communicate to OpenAI that that's what we want to do. + +11:27.390 --> 11:31.170 +And OpenAI takes that structure and turns it into tokens. + +11:31.170 --> 11:38.390 +So to to sort of take a step to, to the very beginning, Gradio gives us data in this format. + +11:38.420 --> 11:47.300 +We map that to this format, which is what we send OpenAI and OpenAI converts that to tokens, including + +11:47.300 --> 11:48.680 +some special tokens. + +11:48.680 --> 11:54.740 +It's that that goes into the LLM for the whole conversation so far, for everything, every time it + +11:54.740 --> 12:01.460 +gets the entire conversation, and then it generates the most plausible next sequence of tokens that + +12:01.460 --> 12:04.400 +are most likely to come after that. + +12:04.490 --> 12:12.770 +Um, and that is what gets returned to us that we then assume represents the assistance response. + +12:12.980 --> 12:15.860 +So I realized that was quite a long sidebar. + +12:15.860 --> 12:18.740 +It's very important, foundational understanding. + +12:18.740 --> 12:22.190 +And we will come back to that when we, particularly when we look at open source models. + +12:22.190 --> 12:28.190 +And we're actually going to see these kinds of generated tokens, these special tokens ourselves. + +12:28.340 --> 12:34.880 +So with that, I'm going to pause it until the next video when we're going to press ahead building this + +12:34.880 --> 12:35.780 +chatbot out. diff --git a/week5/community-contributions/subtitles/srts/59166915/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166915/ja_JP.srt new file mode 100755 index 0000000..0fb36c3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166915/ja_JP.srt @@ -0,0 +1,523 @@ +WEBVTT + +00:00.440 --> 00:03.560 +JupyterLabの素晴らしい世界へようこそ。 + +00:03.560 --> 00:06.830 +そして2週目に入った。 + +00:07.490 --> 00:09.110 +3日目。 + +00:09.260 --> 00:11.990 +このノートを出して + +00:11.990 --> 00:18.080 +そこで今回は、 チャットボットとしても知られる会話型AIについて、 さっそくご紹介しよう。 + +00:18.110 --> 00:24.680 +まずはいつものようにインポートし、 環境変数を設定する。 + +00:24.680 --> 00:27.620 +そして OpenAI を初期化する。 + +00:27.650 --> 00:29.840 +今回はOpenAIを使う。 + +00:30.020 --> 00:34.310 +そして、 もしそうしたければ、 他のモデルに乗り換える練習として持っていてもいい。 + +00:34.670 --> 00:38.510 +では、 基本的なシステムメッセージから始めよう。 + +00:38.510 --> 00:40.340 +あなたは役に立つアシスタントだ。 + +00:40.970 --> 00:41.480 +分かった。 + +00:41.510 --> 00:45.800 +さて、 メッセージの構造について少し話をしたい。 + +00:45.980 --> 00:54.200 +まず最初に、 OpenAIへのプロンプトメッセージの構造を思い出してください。 + +00:54.320 --> 00:58.700 +ええと、 これはもう何度も見てきたことだから、 私が説明するのに飽き飽きしただろうね。 + +00:58.700 --> 00:59.350 +あれだ。 + +00:59.350 --> 01:00.010 +もう1度だけ。 + +01:00.010 --> 01:00.730 +よくご存知でしょう。 + +01:00.760 --> 01:09.220 +システムにユーザーを与える辞書のリストで、 アシスタントが応答し、 次にユーザーが応答するといった具合だ。 + +01:09.220 --> 01:12.910 +そして、 他にも何かあると言ったのを覚えているかもしれないが、 今はまだだ。 + +01:12.910 --> 01:13.990 +システム・ユーザー・アシスタント。 + +01:13.990 --> 01:14.650 +ユーザーアシスタント。 + +01:14.680 --> 01:16.960 +ユーザーアシスタントユーザーなど。 + +01:17.470 --> 01:21.430 +さて、 これからchatという関数を書きます。 + +01:21.430 --> 01:27.400 +チャット機能には、 メッセージと履歴の2つの入力がある。 + +01:27.430 --> 01:34.930 +Messageは、 チャットが応答する必要のある、 現在の、 あー、 質問されているメッセージを表す。 + +01:34.960 --> 01:41.050 +そして歴史は、 以前のすべてのメッセージ、 以前のすべてのやりとりの履歴を持っている。 + +01:41.050 --> 01:45.520 +そして歴史の構造はこうなる。 + +01:45.550 --> 01:50.140 +リスト、 リストからなるリストになるだろう。 + +01:50.140 --> 01:58.920 +そして、 このようなサブリストは、 単にユーザーの発言とアシスタントの返答、 ユーザーの発言とアシスタントの返答などである。 + +01:59.340 --> 02:02.730 +では、 なぜそんなことをお願いしているのか? + +02:02.760 --> 02:08.340 +なぜ、 このような引数で、 このような関数を書こうとするのか? + +02:08.340 --> 02:16.710 +その答えは、 Gradioがチャット・ユーザー・インターフェースで使うことを期待している特定のタイプの機能だからです。 + +02:16.710 --> 02:22.260 +だからGradioは、 メッセージを受け取るchatという関数を書くことを期待しているのだ。 + +02:22.260 --> 02:28.590 +この構造体で履歴を取り、 次のレスポンス、 つまりこのチャットに対するレスポンスを返す。 + +02:28.590 --> 02:30.390 +だから、 そういう形式を考えているんだ。 + +02:30.390 --> 02:39.420 +この関数の仕事は、 このようなスタイルのメッセージをこのように変換することだ。 + +02:39.420 --> 02:47.940 +そこで、 この構造を1行ずつ繰り返し、 上にあるような構造を構築する必要がある。 + +02:47.970 --> 02:49.710 +それが理解できればいいのだが......。 + +02:49.710 --> 02:53.400 +そうでなくても、 それがどんなものかをお見せすれば納得していただけるだろう。 + +02:53.400 --> 02:56.730 +そこで、 chatという関数を定義している。 + +02:56.760 --> 03:03.450 +入力されたメッセージに応答するために得たメッセージを受け取り、 以前のメッセージの履歴を受け取る。 + +03:03.480 --> 03:09.090 +そこでまず、 メッセージのリストを設定する。 + +03:09.090 --> 03:12.870 +そして、 一番最初にシステム・プロンプトを入力する。 + +03:12.900 --> 03:17.010 +もちろん、 その後は歴史を反復することになる。 + +03:17.040 --> 03:20.460 +ヒストリーの各要素は、 2つの値を持つリストの1つである。 + +03:20.460 --> 03:24.000 +だから、 それをユーザー・メッセージ・アシスタントのメッセージに展開するんだ。 + +03:24.000 --> 03:28.470 +そして、 ユーザーのメッセージとアシスタントのメッセージを追加する。 + +03:28.530 --> 03:30.390 +その都度、 その都度。 + +03:30.390 --> 03:38.310 +つまり、 履歴の各行がこのリストでは2行になる。 + +03:38.310 --> 03:40.650 +1つはユーザー用、 もう1つはアシスタント用だ。 + +03:40.770 --> 03:42.480 +それが完全に意味をなしていることを願うよ。 + +03:42.480 --> 03:43.050 +今すぐだ。 + +03:43.200 --> 03:44.250 +そうでなければ、 いつでもできる。 + +03:44.280 --> 03:45.240 +まあ、 その必要はない。 + +03:45.270 --> 03:47.370 +いつでもprint文を入れられると言おうとしたんだ。 + +03:47.430 --> 03:50.160 +ええと、 私には先見の明があったので、 自分でいくつかのプリント文を入れたんだ。 + +03:50.160 --> 03:51.180 +だから、 これを見ることになる。 + +03:51.210 --> 03:54.980 +履歴を印刷し、 メッセージを印刷します。 + +03:54.980 --> 03:57.170 +だから、 それも見ることができる。 + +03:57.530 --> 04:05.120 +次の行は、 このチャットではお馴染みのメソッドで、 この時点で、 この関数、 すみません、 + +04:05.150 --> 04:14.300 +この時点で、 メッセージのセットを受け取り、 それを使ってOpenAIを呼び出します。 + +04:14.300 --> 04:17.810 +だから、 OpenAIチャットのドットコンプリートやドットクリエイトをやっているんだ。 + +04:17.810 --> 04:22.970 +モデルを渡し、 メッセージを渡し、 結果をストリームしてくださいと言うつもりだ。 + +04:22.970 --> 04:23.810 +そうかもしれない。 + +04:23.810 --> 04:27.200 +そして、 私たちはそれを通過し、 返答を得る。 + +04:27.440 --> 04:30.170 +ええと、 つまり、 これは実際には機能ではないんだ。 + +04:30.170 --> 04:35.900 +私たちは一つひとつ答えを出していくので、 本当にジェネレーターなんだ。 + +04:36.800 --> 04:37.280 +オーケー。 + +04:37.280 --> 04:49.850 +つまり、 先ほどのスライドにあったような、 インスタント・メッセージのようなユーザー・インターフェースを作りたいのです。 + +04:49.850 --> 04:54.580 +だから、 次から次へとやってくるメッセージをどうキャンバスに描くか、 + +04:54.580 --> 05:01.360 +その方法を考えなければならない。 + +05:01.420 --> 05:06.430 +ええと、 このチャットメッセージから返ってくる反応からするとね。 + +05:06.940 --> 05:10.300 +ええと、 お気づきになったかどうかわかりませんが、 私はもちろん嘘をついています。 + +05:10.300 --> 05:11.770 +本当に簡単なことだよ。 + +05:11.770 --> 05:12.910 +本当に簡単なことだよ。 + +05:12.910 --> 05:14.470 +一本の線になる。 + +05:15.310 --> 05:21.460 +Gradioにはチャット・インターフェイスというものが付属していて、 チャット・インターフェイスは、 + +05:21.460 --> 05:25.540 +このような構造を持つ1つの関数を想定しています。 + +05:25.540 --> 05:34.240 +もしあなたが、 メッセージと履歴をこの特殊なフォーマットで受け取る関数を書いたのなら、 Gradioにとってそれはたった1行のコードに過ぎない。 + +05:34.480 --> 05:36.670 +ええと、 本当にそんなに簡単なことなのか見てみよう。 + +05:36.670 --> 05:42.610 +チャット・ジェネレーターを定義するために、 忘れずに実行する必要がある。 + +05:42.610 --> 05:46.510 +そしてインターフェイスを立ち上げる。 + +05:46.510 --> 05:47.770 +そしてここにある。 + +05:47.770 --> 05:49.890 +これが私たちのチャット・インターフェースです。 + +05:50.190 --> 05:53.730 +別ウインドウで表示させよう。 + +05:53.730 --> 05:55.830 +そして、 こう言うんだ。 + +05:55.830 --> 05:56.970 +こんにちは。 + +05:59.070 --> 06:00.000 +こんにちは。 + +06:00.030 --> 06:01.410 +本日はどのようなご用件でしょうか? + +06:01.530 --> 06:05.220 +ネクタイを買いたい。 + +06:06.780 --> 06:09.270 +どんなネクタイをお探しですか? + +06:09.300 --> 06:11.730 +色や柄、 素材は決まっていますか? + +06:12.210 --> 06:14.160 +ええと、 それでお分かりいただけたと思う。 + +06:14.430 --> 06:22.830 +でも、 赤のネクタイはクラシックなチョイスだよ。 + +06:22.830 --> 06:24.510 +ここでは、 いくつかのオプションを紹介しよう。 + +06:24.510 --> 06:26.340 +そこに答えがある。 + +06:26.820 --> 06:35.940 +この会話には文脈があり、 その前に何があったかを知っている。 + +06:35.970 --> 06:43.800 +そしてもうひとつ、 私たちが最初に話しかけたときからの記憶があるかのように感じるのは、 ちょっとした錯覚だ。 + +06:43.800 --> 06:45.180 +そして私はネクタイを買いたいと言った。 + +06:45.210 --> 06:52.410 +私たちが交流するたびに、 そのチャットメソッド、 ファンクションジェネレーター、 私は最終的にそれを正しく理解する。 + +06:52.440 --> 06:55.290 +そのチャットジェネレーターが呼ばれている。 + +06:55.470 --> 06:58.860 +そして、 通過しているのはこれまでの歴史のすべてだ。 + +06:58.860 --> 07:03.720 +そして、 そのメッセージのセットを構築し、 それがOpenAIに送信される。 + +07:03.750 --> 07:07.470 +だから、 それぞれの通話に対して、 全履歴が提供される。 + +07:07.470 --> 07:10.980 +だからこそ、 その前の文脈がある。 + +07:10.980 --> 07:17.970 +LLMが、 GPT4が、 30年前に私たちがそう言ったことを覚えているかのようではない。 + +07:17.970 --> 07:20.520 +ただ、 コールがあるたびに、 すべてをパスするんだ。 + +07:20.520 --> 07:22.080 +もうお分かりだろう。 + +07:22.080 --> 07:26.010 +だから、 くどくどと書いてしまって申し訳ないんだけど、 本当に大事なことだと思うんだ。 + +07:26.400 --> 07:35.130 +ええと、 そうだ、 下にprint文がいくつかあるのを覚えているだろう。 + +07:35.130 --> 07:41.700 +最後に歴史がどうのこうのと言ったが、 これはグラディオが送ってくれたものだ。 + +07:41.730 --> 07:48.500 +私たちが言ったこと、 言ったこと、 言ったこと、 言ったこと。 + +07:48.890 --> 07:56.480 +そして、 それをGPT 4ゼロの正しいフォーマットに変換したんだ。 + +07:56.510 --> 07:57.950 +ええと、 GPTフォーミニ。 + +07:58.100 --> 08:02.000 +私たちは、 それを役割システムの内容のリストに変換したんだ。 + +08:02.000 --> 08:03.110 +君は役に立つアシスタントだ。 + +08:03.110 --> 08:05.360 +そしてユーザーは、 こんにちは、 と言った。 + +08:05.360 --> 08:07.910 +するとアシスタントは、 「こんにちは、 今日はどのようなご用件でしょうか? + +08:07.910 --> 08:08.540 +などなど。 + +08:08.540 --> 08:11.450 +だから、 そういうことにしたんだ。 + +08:12.530 --> 08:18.530 +さて、 先に進む前にちょっと余談をさせてもらうが、 これは重要な余談だ。 + +08:18.530 --> 08:20.420 +だから、 これは私だけがしゃべっているのではない。 + +08:20.420 --> 08:24.230 +これは、 私が君たちに種を蒔きたいことなんだ。 + +08:24.230 --> 08:31.670 +後で触れることになるが、 重要なポイントだ。 + +08:31.730 --> 08:33.590 +そうでなければ、 そうあるべきだ。 + +08:33.800 --> 08:42.020 +ええと、 だから、 この構造、 このシステム・ユーザー・アシスタント・ユーザーについて、 あなたは考えているかもしれません。 + +08:42.140 --> 08:43.480 +ああ、 これもそうだ。 + +08:43.510 --> 08:49.960 +LLMでは、 このようなことは構造化されているのでしょうか? + +08:49.960 --> 09:00.160 +このデータをLLMに提供するとき、 私たちは何らかの形で、 辞書のような、 辞書のリストのような形で提供されているのでしょうか? + +09:00.280 --> 09:04.300 +というのも、 Llmsはトークンを取るだけだと思っていたからだ。 + +09:04.300 --> 09:08.290 +トークンのリストを受け取り、 次のトークンを生成する。 + +09:08.290 --> 09:13.990 +では、 この辞書などのリストは、 トークンの世界にどのように変換されるのでしょうか? + +09:13.990 --> 09:16.300 +そして、 もしあなたがそのような考えを持っているなら、 それは素晴らしい考えだろう。 + +09:16.300 --> 09:17.680 +ああ、 とてもいいね。 + +09:17.680 --> 09:29.290 +答えは簡単で、 トークンがGPT4LLMに渡されるだけです。 + +09:29.290 --> 09:39.760 +何が起こるかというと、 OpenAIはこれを一連のトークンに変え、 これがシステムプロンプトの始まりであることを説明する特別なトークン、 + +09:39.760 --> 09:44.430 +特別な方法を持っている。 + +09:44.430 --> 09:47.670 +これがユーザーとアシスタンス対応の始まりである。 + +09:47.670 --> 09:59.010 +そして、 そのマークアップ全体をトークン化し、 LLMに情報を伝える特別なプレースホルダートークンを含む。 + +09:59.010 --> 10:01.410 +システム・プロンプト・モードに切り替えます。 + +10:01.410 --> 10:02.880 +これがシステムプロンプトのテキストだ。 + +10:02.880 --> 10:04.500 +そして今、 システム・プロンプト・モードから抜け出した。 + +10:04.530 --> 10:07.410 +今はユーザーメッセージなどをやっている。 + +10:07.410 --> 10:11.460 +つまり、 この構造はOpenAI APIに送るものだ。 + +10:11.490 --> 10:13.980 +それをトークンに変換する。 + +10:13.980 --> 10:19.470 +そして、 そのトークンがLLMに送られ、 次のトークンを予測する。 + +10:19.950 --> 10:24.300 +と言われるかもしれない。 + +10:24.300 --> 10:35.340 +しかし、 LLMはどのようにして、 この特別なトークンがシステムメッセージを意味することを知り、 それをその高レベル指令と解釈するのだろうか? + +10:35.340 --> 10:39.180 +また、 このトークンはユーザー、 このトークンはアシスタントを意味する、 などということをどうやって知るのだろうか。 + +10:39.210 --> 10:43.740 +何がその能力を与えているのか、 それは何らかの形でアーキテクチャに組み込まれているのか? + +10:44.040 --> 10:46.590 +それに対するとてもシンプルな答えがある。 + +10:46.590 --> 10:49.530 +いや、 そう訓練されているからだよ。 + +10:49.560 --> 10:54.270 +そのような構造で、 何百万もの例で、 たくさんのデータで訓練されている。 + +10:54.270 --> 11:00.300 +そして、 システム指示の中で特定の指示が与えられると、 最も可能性の高い次のトークン、 最も可能性の高いレスポンスは、 + +11:00.300 --> 11:07.440 +そのシステム・プロンプトに従ったものになることを、 トレーニングを通じて学習している。 + +11:07.470 --> 11:09.510 +単純化しすぎましたね。 + +11:09.510 --> 11:14.940 +シェフのようなテクニックのようなものとか、 そういうニュアンスがあるんだ。 + +11:14.940 --> 11:18.570 +このようなことをすべて知っている人は、 それを聞いて、 ああ、 ちょっと単純化しすぎだが、 一般的な考え方だ、 + +11:18.570 --> 11:19.770 +と言うだろう。 + +11:19.770 --> 11:21.090 +基本的な考え方だ。 + +11:21.090 --> 11:24.540 +この構造はAPI構造の一種である。 + +11:24.540 --> 11:27.390 +これが、 私たちがやりたいことをOpenAIに伝える方法です。 + +11:27.390 --> 11:31.170 +そしてOpenAIはその構造をトークンに変える。 + +11:31.170 --> 11:38.390 +つまり、 最初のステップに進むために、 グラディオはこのようなフォーマットでデータを提供してくれる。 + +11:38.420 --> 11:48.680 +それをこのフォーマットにマッピングしてOpenAIに送り、 OpenAIがそれをトークンに変換する。 + +11:48.680 --> 11:54.740 +それは、 これまでの会話全体について、 すべてについて、 会話全体を取得するたびにLLMに入り、 + +11:54.740 --> 12:04.400 +その次に来る可能性が最も高いトークンのシーケンスを生成することだ。 + +12:04.490 --> 12:12.770 +そして、 それがアシスタンスレスポンスとして私たちに返される。 + +12:12.980 --> 12:15.860 +だから、 かなり長いサイドバーだったことに気づいた。 + +12:15.860 --> 12:18.740 +非常に重要で、 基礎となる理解だ。 + +12:18.740 --> 12:22.190 +そして、 私たちが、 特にオープンソースモデルに注目するときには、 またこの話に戻ることになるだろう。 + +12:22.190 --> 12:28.190 +そして、 私たちは実際にこのような生成されたトークンや特別なトークンを目にすることになる。 + +12:28.340 --> 12:35.780 +それでは、 このチャットボットの構築を進める次のビデオまで、 このビデオを一時停止します。 diff --git a/week5/community-contributions/subtitles/srts/59166915/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166915/ko_KR.srt new file mode 100755 index 0000000..5a3507d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166915/ko_KR.srt @@ -0,0 +1,577 @@ +WEBVTT + +00:00.440 --> 00:03.560 +놀라운 유피터랩의 세계에 잘 오셨어요 + +00:03.560 --> 00:06.830 +이제 2주 차예요 + +00:07.490 --> 00:09.110 +3일째예요 + +00:09.260 --> 00:11.990 +이 공책을 꺼내요 + +00:11.990 --> 00:18.080 +대화형 인공지능인 채팅 봇에 대해 알아보도록 하죠. + +00:18.110 --> 00:24.680 +먼저 일반적인 가져오기와 환경 변수의 일반적인 설정으로 시작하죠 + +00:24.680 --> 00:27.620 +오픈AI를 초기화하죠 + +00:27.650 --> 00:29.840 +이번에는 오픈아이를 쓸 거예요 + +00:30.020 --> 00:34.310 +원한다면 다른 모델로 바꾸는 연습으로 사용할 수 있어요 + +00:34.670 --> 00:38.510 +기본적인 시스템 메시지부터 시작할게요 + +00:38.510 --> 00:40.340 +정말 도움이 되는 조수네요 + +00:40.970 --> 00:41.480 +좋아요 + +00:41.510 --> 00:45.800 +이제 비트 코드의 메시지 구조를 얘기해 볼게요 + +00:45.980 --> 00:54.200 +먼저 오픈AI에 즉각 메시지를 보내는 구조를 다시 알려주세요 + +00:54.320 --> 00:58.700 +이런 건 이제 질리게 봤으니까 제가 설명하는 게 지겹겠죠 + +00:58.700 --> 00:59.350 +저기 있네요 + +00:59.350 --> 01:00.010 +한 번 더요 + +01:00.010 --> 01:00.730 +잘 아시네요 + +01:00.760 --> 01:07.840 +시스템에 사용자를 제공하는 사전 목록이죠 보조가 응답하고 그 다음 사용자가 + +01:07.840 --> 01:09.220 +응답하는 거죠 + +01:09.220 --> 01:12.910 +다른 게 있다고 말씀드렸죠? 지금은 아니에요 + +01:12.910 --> 01:13.990 +시스템 사용자 보조요 + +01:13.990 --> 01:14.650 +사용자 보조요 + +01:14.680 --> 01:16.960 +사용자 보조 사용자 등등이요 + +01:17.470 --> 01:21.430 +이제 채팅이라는 함수를 쓸 거예요 + +01:21.430 --> 01:27.400 +그 함수 채팅은 두 개의 입력 메시지와 역사를 취할 거예요 + +01:27.430 --> 01:34.930 +이 메시지는 현재 채팅방이 답변해야 할 메시지를 나타내죠 + +01:34.960 --> 01:41.050 +모든 이전의 메시지와 교류는 역사가 기록하고 있죠 + +01:41.050 --> 01:45.520 +역사의 구조는 이렇게 생겼을 거예요 + +01:45.550 --> 01:50.140 +목록이 될 겁니다 목록으로 구성된 목록이죠 + +01:50.140 --> 01:55.810 +이런 하위 목록은 사용자가 뭐라고 대답하고 비서가 뭐라고 대답하고 사용자가 뭐라고 + +01:55.810 --> 01:58.920 +대답하고 비서가 뭐라고 대답하는지 등이죠 + +01:59.340 --> 02:02.730 +왜 이런 부탁을 하는 걸까요? + +02:02.760 --> 02:08.340 +왜 매개 변수가 있는 저런 함수를 써야 하죠? 왜 이런 인수가 있는 거죠? + +02:08.340 --> 02:15.870 +그 대답은 그러디오가 채팅 유저 인터페이스와 함께 사용하기 위해 기대하는 특정한 유형의 함수이기 + +02:15.870 --> 02:16.710 +때문이죠 + +02:16.710 --> 02:22.260 +그래서 그라디오는 우리가 메시지를 받는 채팅이라는 함수를 쓰길 기대하죠 + +02:22.260 --> 02:27.630 +이 구조에서 히스토리를 선택하고 응답을 반환할 겁니다 이 채팅에 대한 + +02:27.630 --> 02:28.590 +응답이요 + +02:28.590 --> 02:30.390 +그래서 그 형식을 고려하는 거죠 + +02:30.390 --> 02:39.420 +이 함수에 대한 우리 작업은 이런 종류의 메시지를 여기로 변환하는 거죠 + +02:39.420 --> 02:47.940 +이 구조를 통해 한 열씩 반복해야 합니다 위에 보이는 이 구조를 구축하고요 + +02:47.970 --> 02:49.710 +이해가 되면 좋겠네요 + +02:49.710 --> 02:53.400 +그렇지 않다면 제가 보여드릴 때 이해가 될 거예요 + +02:53.400 --> 02:56.730 +채팅이라는 함수를 정의하고 있어요 + +02:56.760 --> 03:03.450 +입력 메시지에 응답해야 하는 메시지가 필요합니다 이전 메시지의 기록도 필요하고요 + +03:03.480 --> 03:09.090 +먼저 메시지 목록을 설정합니다 이 사람이 되겠죠 + +03:09.090 --> 03:12.870 +시작 부분에 시스템 프롬프트로 채우죠 + +03:12.900 --> 03:17.010 +물론 그런 다음 역사를 반복하겠죠 + +03:17.040 --> 03:20.460 +역사의 각 요소는 두 개의 값을 가진 리스트 중 하나죠 + +03:20.460 --> 03:24.000 +사용자 메시지 비서 메시지에 그걸 풀어놓을게요 + +03:24.000 --> 03:28.470 +사용자 메시지와 보조 메시지를 추가해요 + +03:28.530 --> 03:30.390 +매번요 + +03:30.390 --> 03:38.310 +역사에서 각 행은 이 목록에서 두 행으로 바뀌죠 + +03:38.310 --> 03:40.650 +하나는 사용자를 위한 것 하나는 보조를 위한 것이죠 + +03:40.770 --> 03:42.480 +이해가 되면 좋겠네요 + +03:42.480 --> 03:43.050 +지금요 + +03:43.200 --> 03:44.250 +아니면 언제든 괜찮아요 + +03:44.280 --> 03:45.240 +그럴 필요 없어요 + +03:45.270 --> 03:47.370 +언제든 print문을 넣을 수 있다고 말하려 했어요 + +03:47.430 --> 03:50.160 +제가 선견지명이 있어서 인쇄물도 몇 개 넣었어요 TED TED TED TED + +03:50.160 --> 03:51.180 +곧 보게 될 거예요 + +03:51.210 --> 03:54.980 +히스토리를 프린트하고 메시지를 프린트할 거예요 + +03:54.980 --> 03:57.170 +Get in get 역시 볼 수 있죠 + +03:57.530 --> 04:05.120 +다음 줄은 이 채팅방에서 아주 익숙하실 텐데요 이 시점에서 이 메서드, 이 + +04:05.150 --> 04:12.110 +함수 죄송합니다, 이 시점에서 이 메시지 세트를 가지고 OpenAI를 + +04:12.110 --> 04:14.300 +호출할 거예요 + +04:14.300 --> 04:17.810 +OpenAI 채팅 .완성 .Create를 입력해요 + +04:17.810 --> 04:22.970 +모델에서 전달하고 메시지를 전달하고 결과를 스트리밍해달라고 요청하죠 + +04:22.970 --> 04:23.810 +그렇게 하죠 + +04:23.810 --> 04:27.200 +그런 다음 검토하고 응답을 유도하죠 + +04:27.440 --> 04:30.170 +다시 말씀드리지만 이건 함수가 아니에요 + +04:30.170 --> 04:35.900 +하나씩 반응을 산출하기 때문에 발전기라고 할 수 있죠 + +04:36.800 --> 04:37.280 +네 + +04:37.280 --> 04:45.500 +이제 이걸 좀 전에 슬라이드에서 본 사용자 인터페이스로 바꿀 거예요 인스턴트 메시지 스타일 + +04:45.500 --> 04:49.850 +상호 작용이 있는 사용자 인터페이스요 + +04:49.850 --> 04:54.580 +할 일이 많아요 비트가 계속 오는 메시지를 + +04:54.580 --> 05:01.360 +캔버스로 만들어서 어떻게 만들지 알아내야 하니까요 + +05:01.420 --> 05:06.430 +이 채팅창에 올라온 반응을 보면요 + +05:06.940 --> 05:10.300 +눈치챘는지 모르겠지만 당연히 거짓말이죠 + +05:10.300 --> 05:11.770 +아주 쉬울 거예요 + +05:11.770 --> 05:12.910 +아주 쉬울 거예요 + +05:12.910 --> 05:14.470 +한 줄로 할 거예요 + +05:15.310 --> 05:21.460 +그러디오는 채팅 인터페이스라는 독창적인 걸 내놓는데 채팅 인터페이스는 + +05:21.460 --> 05:25.540 +이런 구조를 가진 단일 함수를 기대하죠 + +05:25.540 --> 05:31.300 +이 특정한 형식으로 메시지와 역사를 취하는 함수를 작성했다면 Gadio에선 + +05:31.300 --> 05:34.240 +코드 한 줄로 끝나죠 + +05:34.480 --> 05:36.670 +그렇게 쉬운지 볼까요? + +05:36.670 --> 05:42.610 +저걸 실행하는 걸 기억해야 합니다 채팅 생성기를 정의하도록요 + +05:42.610 --> 05:46.510 +이제 인터페이스를 실행할 거예요 + +05:46.510 --> 05:47.770 +여기 있네요 + +05:47.770 --> 05:49.890 +채팅 인터페이스예요 + +05:50.190 --> 05:53.730 +다른 창으로 보여드리죠 그게 더 좋거든요 + +05:53.730 --> 05:55.830 +이렇게 말해요 + +05:55.830 --> 05:56.970 +안녕하세요 + +05:59.070 --> 06:00.000 +안녕하세요 + +06:00.030 --> 06:01.410 +무엇을 도와드릴까요? + +06:01.530 --> 06:05.220 +넥타이 하나 사려고요 + +06:06.780 --> 06:09.270 +어떤 넥타이를 찾으세요? + +06:09.300 --> 06:11.730 +특정한 색상, 패턴이나 재료가 있나요? + +06:12.210 --> 06:14.160 +Get it, get it, get it. 대충 아시겠죠? + +06:14.430 --> 06:22.830 +하지만 빨간색 넥타이는 고전적인 선택이에요 + +06:22.830 --> 06:24.510 +몇 가지 선택지를 드리죠 + +06:24.510 --> 06:26.340 +답이 나왔네요 + +06:26.820 --> 06:31.470 +빨간색을 고른 이유는 여러분이 이미 아는 걸 보여드리고 싶었기 때문이에요 + +06:31.470 --> 06:35.940 +이 대화의 맥락을 갖고 있고 전에 뭐가 있었는지도 알죠 + +06:35.970 --> 06:43.800 +다시 한번 말씀드리지만 비트가 처음 대화했을 때의 메모리를 가진 것 같아요 + +06:43.800 --> 06:45.180 +넥타이를 사고 싶다고 했어요 + +06:45.210 --> 06:51.630 +우리가 상호 작용할 때마다 채팅 메서드, 함수 생성기가 결국엔 제대로 작동해요 get + +06:51.630 --> 06:52.410 +it + +06:52.440 --> 06:55.290 +채팅 생성기가 호출됐어요 + +06:55.470 --> 06:58.860 +지금까지의 역사를 전부 담고 있어요 + +06:58.860 --> 07:03.720 +메시지 집합을 구축하고 오픈AI에 전송하는 거죠 + +07:03.750 --> 07:07.470 +통화 내역이 전부 제공되고 있어요 + +07:07.470 --> 07:10.980 +그래서 이전의 맥락이 있는 거예요 + +07:10.980 --> 07:17.970 +LLM도 아니고 GPT 4도 아닙니다 30년 전에 했던 말을 기억하는 것도 아니죠 + +07:17.970 --> 07:20.520 +출동할 때마다 전부 전달해요 + +07:20.520 --> 07:22.080 +이쯤 되면 눈치채셨겠지만요 + +07:22.080 --> 07:26.010 +장황하게 말해서 미안하지만 염장 지르는 게 중요하다고 생각해요 + +07:26.400 --> 07:31.650 +네, 기억하세요? 아래에 print문이 있는데 지금은 + +07:31.650 --> 07:35.130 +꽤 두툼하죠 마지막 걸 보죠 + +07:35.130 --> 07:41.700 +마지막 건 역사고 이건 그래디오가 보낸 거예요 + +07:41.730 --> 07:48.500 +그럼 보일 거예요 우리가 말한 대로 우리가 말한 대로요 + +07:48.890 --> 07:56.480 +그리고 GPT 40을 위해 올바른 포맷으로 변환했죠 + +07:56.510 --> 07:57.950 +GPT 4 미니요 + +07:58.100 --> 08:02.000 +역할 시스템 콘텐츠 목록으로 변환했어요 + +08:02.000 --> 08:03.110 +정말 도움이 되는 조수네요 + +08:03.110 --> 08:05.360 +사용자가 안녕하세요라고 하죠 + +08:05.360 --> 08:07.910 +그러자 조수가 어떻게 도와드리면 되냐고 물었어요 + +08:07.910 --> 08:08.540 +계속해서요 + +08:08.540 --> 08:11.450 +그래서 이렇게 바꿨죠 + +08:12.530 --> 08:18.530 +좋아요, 시작하기 전에 잠깐 옆길로 새죠 중요한 거예요 + +08:18.530 --> 08:20.420 +나 혼자 떠드는 게 아니에요 + +08:20.420 --> 08:24.230 +당신과 함께 그 씨앗을 뿌리고 싶어요 + +08:24.230 --> 08:30.200 +나중에 다시 얘기하겠지만 중요한 부분이에요 어쩌면 당신이 생각해둔 + +08:30.200 --> 08:31.670 +것일 수도 있고요 + +08:31.730 --> 08:33.590 +그렇지 않다면 그래야겠죠 + +08:33.800 --> 08:42.020 +언급하자면 이렇게 생각하실 수 있어요 시스템 사용자 비서가 이 구조에 대해서요 + +08:42.140 --> 08:43.480 +이것도 그래요 + +08:43.510 --> 08:49.960 +어떤 구조적인 방법으로 LLM에 Get이 전달되나요? + +08:49.960 --> 08:56.860 +우리가 어떻게든 이 데이터를 LLM에 제공할 때 사전이나 사전 목록 + +08:56.890 --> 09:00.160 +같은 어떤 식으로 제공되나요? + +09:00.280 --> 09:04.300 +LM은 토큰만 받는 줄 알았거든요 + +09:04.300 --> 09:08.290 +토큰의 리스트를 선택하고 가장 가능성이 높은 다음 토큰을 생성하죠 + +09:08.290 --> 09:13.990 +그럼 이 사전 전체 목록은 어떻게 토큰의 세계로 변환하죠? + +09:13.990 --> 09:16.300 +그런 생각을 한다면 정말 좋을 거예요 + +09:16.300 --> 09:17.680 +아주 좋아요 + +09:17.680 --> 09:25.390 +답은 간단합니다. 토큰을 전달하는 것입니다. 실제 GPT 4개, + +09:25.420 --> 09:29.290 +GPT 4개 LLM으로요. + +09:29.290 --> 09:39.760 +오픈AI는 이것을 토큰의 시리즈로 바꾸어 놓습니다. 특별한 토큰을 가지고 있는데 시스템 프롬프트의 + +09:39.760 --> 09:44.430 +시작이라고 설명하는 방법이죠. + +09:44.430 --> 09:47.670 +사용자와 보조 대응의 시작이죠 + +09:47.670 --> 09:55.080 +그 말을 하는 마크업이 있고 그 전체 마크업을 토큰화합니다 LLM에 정보를 전달하는 + +09:55.080 --> 09:59.010 +특별한 자리 표시자 토큰을 포함해서요 + +09:59.010 --> 10:01.410 +이제 시스템 프롬프트 모드로 바꿀게요 + +10:01.410 --> 10:02.880 +시스템 프롬프트 텍스트가 있네요 + +10:02.880 --> 10:04.500 +이제 시스템 프롬프트 모드에서 벗어났어요 + +10:04.530 --> 10:07.410 +사용자 메시지 같은 걸 하고 있죠 + +10:07.410 --> 10:11.460 +이 구조가 OpenAI API 전송이에요 + +10:11.490 --> 10:13.980 +패로 변환하죠 + +10:13.980 --> 10:19.470 +이 토큰들이 LLM에 입력되어 get 다음 토큰을 예측하죠 + +10:19.950 --> 10:24.300 +Get it, get it, get it's right, get's right, right. + +10:24.300 --> 10:32.760 +그런데 LLM은 이 특별한 토큰이 시스템 메시지를 의미한다는 것과 이를 높은 수준의 지시로 해석해야 + +10:32.760 --> 10:35.340 +한다는 것을 어떻게 알까요? + +10:35.340 --> 10:39.180 +이 토큰이 사용자를 의미하고 이건 비서를 의미한다는 걸 어떻게 알까요? + +10:39.210 --> 10:43.740 +어떤 식으로든 그런 능력을 아키텍처에 구현한 건가요? + +10:44.040 --> 10:46.590 +그에 대한 답은 아주 간단해요 + +10:46.590 --> 10:49.530 +아뇨, 그렇게 훈련받았으니까요 + +10:49.560 --> 10:54.270 +많은 데이터와 구조로 훈련되었고 수백만 개의 예로 훈련되었어요 + +10:54.270 --> 11:00.300 +시스템 지침에서 특정 지침을 받았을 때 다음 토큰과 반응은 + +11:00.300 --> 11:07.440 +시스템 프롬프트를 준수하는 것이라는 것을 배우게 되죠 + +11:07.470 --> 11:09.510 +너무 단순화했어요 + +11:09.510 --> 11:14.940 +뉘앙스가 좀 더 있어요 셰프 같은 기술과 관련해서요 + +11:14.940 --> 11:18.570 +비트 박스를 아는 사람들은 너무 단순화됐다고 하겠지만 그게 일반적인 + +11:18.570 --> 11:19.770 +개념이에요 + +11:19.770 --> 11:21.090 +기본 아이디어예요 + +11:21.090 --> 11:24.540 +이 구조는 일종의 API 구조예요 + +11:24.540 --> 11:27.390 +오픈AI에 우리가 원하는 걸 이렇게 전달하는 거죠 + +11:27.390 --> 11:31.170 +오픈아이는 그 구조를 토큰으로 바꾸죠 + +11:31.170 --> 11:38.390 +그럼 이제 처음으로 넘어가서 그라디오는 이런 포맷의 데이터를 주죠 + +11:38.420 --> 11:47.300 +이것을 OpenAI에 보내는 이 형식에 매핑합니다 OpenAI는 이것을 특별한 토큰을 포함한 토큰으로 + +11:47.300 --> 11:48.680 +변환하죠 + +11:48.680 --> 11:54.740 +지금까지의 모든 대화에 해당하는 LLM으로 들어갑니다 대화 전체를 + +11:54.740 --> 12:01.460 +담을 때마다 가장 그럴듯한 다음 토큰 배열을 생성하죠 그다음으로 나올 가능성이 + +12:01.460 --> 12:04.400 +가장 큰 토큰요 + +12:04.490 --> 12:12.770 +그게 우리에게 돌아오는 거고 우린 그걸 원조 대응이라고 생각하죠 + +12:12.980 --> 12:15.860 +그래서 그게 꽤 긴 사이드바라는 걸 깨달았죠 + +12:15.860 --> 12:18.740 +아주 중요한 기본적 이해예요 + +12:18.740 --> 12:22.190 +다시 돌아오죠 특히 오픈 소스 모델을 볼 때요 + +12:22.190 --> 12:28.190 +생성된 토큰들을 직접 볼 것입니다. 특별한 토큰들을요. + +12:28.340 --> 12:34.880 +다음 비디오까지 잠시 멈추겠습니다 그때 이 챗봇을 만들 + +12:34.880 --> 12:35.780 +거예요 diff --git a/week5/community-contributions/subtitles/srts/59166919/en_US.srt b/week5/community-contributions/subtitles/srts/59166919/en_US.srt new file mode 100755 index 0000000..6f81b8a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166919/en_US.srt @@ -0,0 +1,43 @@ +WEBVTT + +00:00.560 --> 00:03.590 +And with that, it concludes our session on tools. + +00:03.590 --> 00:08.720 +And at this point, you are probably an expert on tools because you've gone back and you've added in + +00:08.720 --> 00:17.120 +the extras, like giving your LLM the ability to book flights in that it can print it to your output. + +00:17.360 --> 00:19.940 +So congratulations on getting here. + +00:19.970 --> 00:22.550 +Now you are very well versed in transformers. + +00:22.550 --> 00:28.340 +You can code against the frontier LLM APIs, and you can build AI assistants with user interfaces and + +00:28.340 --> 00:30.890 +using tools for more expertise. + +00:30.890 --> 00:38.540 +Tomorrow is completing week two, bringing introducing agents to the mix, a super juicy topic. + +00:38.570 --> 00:44.720 +We're going to talk about how agents can carry out more complex sequential activities, breaking them + +00:44.720 --> 00:51.350 +down into smaller steps, and having specialist AIS that can handle each of those steps. + +00:51.350 --> 00:56.750 +And the specific area we're going to look at is introducing some multi-modality. + +00:56.780 --> 01:01.820 +We're going to have specialists that can take care of things like creating images, because that's going + +01:01.850 --> 01:08.090 +to be fun, and it is going to allow us to build an even more sophisticated business application. + +01:08.090 --> 01:12.920 +So with that, I'll see you for the next one and very much looking forward to it. diff --git a/week5/community-contributions/subtitles/srts/59166919/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166919/ja_JP.srt new file mode 100755 index 0000000..3665c02 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166919/ja_JP.srt @@ -0,0 +1,40 @@ +WEBVTT + +00:00.560 --> 00:03.590 +これでツールについてのセッションは終了だ。 + +00:03.590 --> 00:08.720 +この時点で、 あなたはおそらくツールのエキスパートになっているはずだ。 + +00:08.720 --> 00:17.120 +LLMに航空券を予約する機能を持たせ、 それを出力できるようにしたように。 + +00:17.360 --> 00:19.940 +よくぞここまでたどり着いた。 + +00:19.970 --> 00:22.550 +これであなたはトランスフォーマーに詳しくなった。 + +00:22.550 --> 00:30.890 +フロンティアLLMのAPIに対してコードを書くことができ、 ユーザーインターフェースを備えたAIアシスタントを構築し、 より専門的な知識を得るためのツールを使うことができる。 + +00:30.890 --> 00:38.540 +明日で2週目が終了し、 エージェント紹介という超ジューシーなトピックが加わる。 + +00:38.570 --> 00:44.720 +我々は、 エージェントがより複雑な連続的活動をどのように行うか、 それらをより小さなステップに分解し、 + +00:44.720 --> 00:51.350 +それらの各ステップを処理できる専門のAISを持つことについて話すつもりだ。 + +00:51.350 --> 00:56.750 +具体的には、 マルチモダリティの導入だ。 + +00:56.780 --> 01:01.820 +私たちは、 画像を作成するようなことを担当できるスペシャリストを持つつもりです。 なぜなら、 + +01:01.850 --> 01:08.090 +それは楽しいことですし、 さらに洗練されたビジネス・アプリケーションを構築できるようになるからです。 + +01:08.090 --> 01:12.920 +それではまた次回、 とても楽しみにしています。 diff --git a/week5/community-contributions/subtitles/srts/59166919/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166919/ko_KR.srt new file mode 100755 index 0000000..24ad15b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166919/ko_KR.srt @@ -0,0 +1,43 @@ +WEBVTT + +00:00.560 --> 00:03.590 +이것으로 도구 세션을 마칠게요 + +00:03.590 --> 00:08.720 +이 시점에서 여러분은 도구 전문가일 겁니다 왜냐하면 돌아가서 + +00:08.720 --> 00:17.120 +추가 사항을 추가했으니까요 LLM에게 비행 예약 기능을 주고 출력에 프린트할 수 있도록 하는 거죠 + +00:17.360 --> 00:19.940 +여기 온 걸 축하해요 + +00:19.970 --> 00:22.550 +이제 트랜스포머에 정통해졌네요 + +00:22.550 --> 00:28.340 +프론티어 LLM API에 대항해 코드를 작성할 수 있고 사용자 인터페이스와 전문성을 위한 도구를 + +00:28.340 --> 00:30.890 +이용해 인공지능 보조를 제작할 수 있죠 + +00:30.890 --> 00:38.540 +내일은 둘째 주를 마무리하는 날입니다 에이전트를 소개하는 아주 흥미로운 주제죠 + +00:38.570 --> 00:44.720 +지금부터는 에이전트가 어떻게 복잡한 순차적 활동을 수행하는지 살펴볼 + +00:44.720 --> 00:51.350 +겁니다 이를 더 작은 단계로 나누고 각 단계를 처리할 전문 AI를 두는 거죠 + +00:51.350 --> 00:56.750 +우리가 살펴볼 특정 영역은 다중 양상을 소개하는 거예요 + +00:56.780 --> 01:01.820 +이미지 생성과 같은 것을 담당할 전문가들도 갖게 될 겁니다 재미있을 + +01:01.850 --> 01:08.090 +테니까요 더 복잡한 비즈니스 응용 프로그램을 만들 수 있게 해주죠 + +01:08.090 --> 01:12.920 +그럼 다음 시간에 뵙죠 정말 기대되네요 diff --git a/week5/community-contributions/subtitles/srts/59166947/en_US.srt b/week5/community-contributions/subtitles/srts/59166947/en_US.srt new file mode 100755 index 0000000..0f1eca7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166947/en_US.srt @@ -0,0 +1,313 @@ +WEBVTT + +00:01.040 --> 00:04.340 +Well, thank you for coming along for week two, day four. + +00:04.370 --> 00:06.920 +We have lots of good stuff in store today. + +00:06.920 --> 00:15.770 +It's another day of levelling up, of building new skills that adds to your capabilities of using Llms + +00:15.770 --> 00:19.490 +for generating important business value. + +00:19.940 --> 00:26.240 +As always, a quick recap of what you can do already describing Transformers and the terminology involved. + +00:26.240 --> 00:27.290 +You know it well. + +00:27.290 --> 00:33.920 +Confidently coding with the APIs for the top three frontier models, and now most recently, building + +00:33.920 --> 00:37.670 +a chatbot assistant, an AI chatbot including an interactive UI. + +00:37.670 --> 00:43.400 +And you're very familiar now with that messages structure going into OpenAI and with the way that the + +00:43.400 --> 00:46.130 +chat function works for Gradio. + +00:46.310 --> 00:49.880 +So today is about these things called tools. + +00:49.880 --> 00:54.080 +By the end, you'll be able to define them, you'll have common use cases for them, and you'll be able + +00:54.080 --> 00:57.650 +to code an AI assistant that uses tools. + +00:57.650 --> 00:58.910 +Let's get to it. + +00:59.570 --> 01:02.420 +So what are tools? + +01:02.900 --> 01:11.300 +So it allows frontier models to connect with external functions with functionality outside the frontier + +01:11.330 --> 01:11.780 +model. + +01:11.810 --> 01:14.570 +In fact, tools can mean something broader than that. + +01:14.570 --> 01:15.650 +It can be other things too. + +01:15.680 --> 01:21.110 +But most commonly, when you hear people talk about tools, it's in the context of giving frontier models + +01:21.140 --> 01:23.870 +access to external functions. + +01:23.870 --> 01:32.300 +It allows for a richer replies from an LLM by extending its knowledge, um, it can carry out advanced + +01:32.330 --> 01:39.380 +actions within your application, and it can enhance its abilities by, for example, giving it a calculator. + +01:39.530 --> 01:44.510 +So as I said at the end of the last time, this might sound very mysterious. + +01:44.510 --> 01:47.090 +How exactly what exactly is going on here? + +01:47.090 --> 01:52.430 +We're going to build something like a calculator, like a function that can do calculations, uh, even + +01:52.430 --> 01:54.860 +go as far as to do a sort of exact of Python code. + +01:54.860 --> 01:59.960 +And then we're going to sort of give that to the LLM and say, okay, you can use this. + +01:59.960 --> 02:03.140 +You can run this software on my computer in some way. + +02:03.140 --> 02:05.210 +It sounds sounds mysterious. + +02:05.210 --> 02:06.860 +It sounds a bit spooky, really. + +02:06.920 --> 02:09.790 +Uh, but alas, it is not. + +02:09.790 --> 02:10.930 +Not that clever. + +02:10.930 --> 02:13.840 +It's a pretty simple workflow around it. + +02:13.870 --> 02:15.130 +Here's the scoop. + +02:15.130 --> 02:24.220 +What we do is we start by defining what functions we have available that the LM is allowed to call. + +02:24.220 --> 02:25.900 +So we define these functions. + +02:25.900 --> 02:27.100 +Let's say we have a calculator. + +02:27.100 --> 02:28.390 +We define the calculator. + +02:28.390 --> 02:32.920 +We say what are the inputs, what kind of outputs and when should the LM use it. + +02:33.010 --> 02:36.550 +And then we tell the LM about that. + +02:36.550 --> 02:42.430 +When we make a call to do something we say to it, hey, can you can you respond to this user. + +02:42.430 --> 02:45.760 +And by the way, you have access to this tool. + +02:45.790 --> 02:51.970 +When the LM replies to us, it can either just respond with a prompt or it can respond with something + +02:51.970 --> 02:57.490 +like, hey, if I'm going to to to generate you a response, first I'm going to need to ask you to run + +02:57.490 --> 03:03.310 +that tool you told me about and run it with these inputs and then provide me back with the outputs. + +03:03.310 --> 03:12.280 +And then you take that, you run the tool and then you provide the responses back to the LM, and it + +03:12.280 --> 03:14.770 +then uses it to generate its response. + +03:14.860 --> 03:20.140 +So if you follow my drift there, it's not actually particularly amazing. + +03:20.140 --> 03:24.850 +It's that you call an LM and it responds and says, hey, I need you to call the tool that you told + +03:24.850 --> 03:25.570 +me you have. + +03:25.600 --> 03:31.630 +You do that, you provide it back to the LM, and then it's able to give you richer responses. + +03:32.320 --> 03:38.620 +And if you're really following along, you'll realize that that's not massively different to the kind + +03:38.650 --> 03:45.970 +of thing we did in the last lab when we just looked for a string and we just inserted extra context + +03:45.970 --> 03:52.330 +in the prompt that goes to the LM, it's just about really inserting extra context in prompts. + +03:52.360 --> 03:52.930 +All right. + +03:52.960 --> 03:56.860 +Anyway, hopefully I didn't muddle you there, but it's going to come together when you see the code, + +03:56.860 --> 03:57.820 +I promise you. + +03:58.630 --> 04:00.550 +But first, what are the use cases. + +04:00.550 --> 04:02.500 +When when do we typically do this. + +04:02.500 --> 04:07.150 +There are four ones that really that you come across a lot. + +04:07.330 --> 04:15.100 +Um, you can use tools to fetch extra data, like look something up in a database, um, add knowledge. + +04:15.100 --> 04:19.720 +Uh, and again, you can think of it that's rather similar to what we did with with belts in the last + +04:19.720 --> 04:23.800 +lab, but you can do that using tools instead. + +04:24.370 --> 04:30.940 +Uh, you can use it as a way that the LM can take an action, like booking a meeting, so you can tell + +04:30.970 --> 04:34.120 +it as part of your, uh, you have access. + +04:34.120 --> 04:40.240 +You have the ability to actually, uh, to carry out these, these, these items to buy a plane ticket + +04:40.240 --> 04:41.710 +to do, do the following. + +04:41.860 --> 04:47.050 +Um, and essentially in its response back, it will tell you that that's the tool that wants to use, + +04:48.580 --> 04:51.880 +as I just mentioned, a use case would be a calculator. + +04:51.880 --> 04:58.510 +Uh, LMS are famously not great at calculations because all they're trying to do is predict, uh, tokens + +04:58.510 --> 04:59.530 +in English language. + +04:59.530 --> 05:04.360 +They don't have, like, a calculator built in to a to a deep neural network. + +05:04.360 --> 05:07.090 +But you can provide that as a tool. + +05:07.270 --> 05:13.240 +And you can notice that, uh, GPT four is very good at calculations these days. + +05:13.240 --> 05:17.800 +And one wonders whether something that's going on behind the scenes might be something like this, that + +05:17.800 --> 05:22.020 +it might have its own tool made available in order to run calculations. + +05:22.020 --> 05:26.010 +Perhaps just speculation, but it seems very reasonable. + +05:27.090 --> 05:34.260 +Another thing it can do is modify the UI so you could tell it, hey, here's some tools. + +05:34.260 --> 05:39.390 +You can use, some functions you can call that will update different things on my user interface. + +05:39.390 --> 05:46.980 +And that would give the LLM the direct ability to trigger changes in the UI, which is a pretty cool + +05:46.980 --> 05:51.600 +idea to have sort of tighter integration between the LLM and the UI. + +05:52.740 --> 06:00.090 +Again, one thing worth pointing out for the second one here, and for the fourth one for taking actions + +06:00.090 --> 06:03.660 +and modifying the UI, there will be another way to achieve this. + +06:03.660 --> 06:09.090 +That would be perhaps a simpler approach if that's all you wanted to do. + +06:09.210 --> 06:14.460 +See if you can, based on something we've already done before, uh, give you a moment to pause, to + +06:14.490 --> 06:16.470 +think about what I might be getting at. + +06:17.070 --> 06:24.740 +The answer is, you remember, uh, in one of the earlier labs we had the model respond in JSON to respond + +06:24.740 --> 06:29.900 +with a structured response, and its response had JSON to tell us bits of information. + +06:29.900 --> 06:35.180 +In our case, it was about links and uh, giving us more information about fully qualified links and + +06:35.180 --> 06:36.440 +which links to collect. + +06:36.470 --> 06:41.900 +Well, similarly, we could just ask the model to respond in JSON with what actions need to be taken + +06:41.900 --> 06:46.790 +to book a meeting or respond in JSON based on how it wants the user interface modified. + +06:46.790 --> 06:50.690 +So there are other ways other than using tools to accomplish this. + +06:50.690 --> 06:55.430 +But if you want to be able to give it tools in addition to streaming back text, then this is a good + +06:55.430 --> 06:55.970 +solution. + +06:55.970 --> 07:00.740 +That's the that's the best time to use this when it's in conjunction with a number of other things that + +07:00.740 --> 07:01.730 +the LM is doing. + +07:01.730 --> 07:05.600 +So these tools are sort of adding to its capabilities. + +07:06.800 --> 07:13.250 +So what we're going to do now is build an informed airline customer support agent. + +07:13.250 --> 07:19.190 +We're going to want to be able to tell it that we're traveling to Paris and then have it respond with + +07:19.190 --> 07:21.170 +a ticket price to Paris. + +07:21.170 --> 07:22.310 +That's the idea. + +07:22.310 --> 07:26.960 +We're going to do it with tools, and I will see you over in the lab to find out how. diff --git a/week5/community-contributions/subtitles/srts/59166947/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166947/ja_JP.srt new file mode 100755 index 0000000..649d820 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166947/ja_JP.srt @@ -0,0 +1,259 @@ +WEBVTT + +00:01.040 --> 00:04.340 +さて、 第2週4日目もお付き合いいただきありがとうございました。 + +00:04.370 --> 00:06.920 +今日はいいことがたくさんある。 + +00:06.920 --> 00:19.490 +重要なビジネス価値を生み出すためにLlmsを活用する能力を高める、 新たなスキルを身につけるためのレベルアップの日だ。 + +00:19.940 --> 00:26.240 +いつものように、 トランスフォーマーについてすでに説明できることと、 それに関係する用語を簡単にまとめてみた。 + +00:26.240 --> 00:27.290 +よくご存知でしょう。 + +00:27.290 --> 00:33.920 +上位3つのフロンティアモデルのAPIを使って自信を持ってコーディングし、 最近ではチャットボットアシスタント、 + +00:33.920 --> 00:37.670 +対話型UIを含むAIチャットボットを構築している。 + +00:37.670 --> 00:43.400 +そして、 OpenAIに入るメッセージの構造や、 Gradioのチャット機能の仕組みは、 + +00:43.400 --> 00:46.130 +もうよくご存知でしょう。 + +00:46.310 --> 00:49.880 +というわけで、 今日は道具というものについて。 + +00:49.880 --> 00:57.650 +最後には、 それらを定義し、 そのための一般的な使用事例を持ち、 ツールを使用するAIアシスタントをコーディングできるようになるでしょう。 + +00:57.650 --> 00:58.910 +さっそく始めよう。 + +00:59.570 --> 01:02.420 +では、 道具とは何か? + +01:02.900 --> 01:11.780 +そのため、 フロンティア・モデルは、 フロンティア・モデルの外部にある機能を持つ外部関数と接続することができる。 + +01:11.810 --> 01:14.570 +実際、 道具とはもっと広い意味を持つこともある。 + +01:14.570 --> 01:15.650 +他のことでもあり得る。 + +01:15.680 --> 01:23.870 +しかし、 一般的にツールについて語られるとき、 それはフロンティア・モデルに外部機能へのアクセスを与えるという文脈で語られることが多い。 + +01:23.870 --> 01:32.300 +LLMの知識を拡張することで、 LLMからのリッチな返信を可能にし、 アプリケーション内で高度なアクションを実行させ、 + +01:32.330 --> 01:39.380 +例えば電卓を持たせることでその能力を高めることができます。 + +01:39.530 --> 01:44.510 +だから、 前回の最後に言ったように、 これはとてもミステリアスに聞こえるかもしれない。 + +01:44.510 --> 01:47.090 +いったい何がどうなっているのか? + +01:47.090 --> 01:54.860 +電卓のようなもの、 計算ができる関数のようなものを作ろうと思っているんだ。 + +01:54.860 --> 01:59.960 +そして、 それをLLMに渡して、 オーケー、 これを使っていいよ、 と言うんだ。 + +01:59.960 --> 02:03.140 +このソフトを私のコンピューターで何らかの方法で動かすことができる。 + +02:03.140 --> 02:05.210 +ミステリアスな響きだ。 + +02:05.210 --> 02:06.860 +ちょっと不気味な感じだね。 + +02:06.920 --> 02:09.790 +でも、 残念ながら、 そうではないんだ。 + +02:09.790 --> 02:10.930 +それほど賢くはない。 + +02:10.930 --> 02:13.840 +このあたりのワークフローはいたってシンプルだ。 + +02:13.870 --> 02:15.130 +これがスクープだ。 + +02:15.130 --> 02:24.220 +まずは、 LM が呼び出すことのできる関数を定義することから始めます。 + +02:24.220 --> 02:25.900 +そこで、 これらの関数を定義する。 + +02:25.900 --> 02:27.100 +電卓があるとしよう。 + +02:27.100 --> 02:28.390 +私たちは計算機を定義する。 + +02:28.390 --> 02:32.920 +何がインプットで、 どのようなアウトプットがあり、 LMはいつそれを使うべきなのか。 + +02:33.010 --> 02:36.550 +そして、 そのことをLMに伝える。 + +02:36.550 --> 02:42.430 +私たちが何かをするために呼びかけるとき、 私たちはそれに向かってこう言う。 + +02:42.430 --> 02:45.760 +ところで、 あなたはこのツールにアクセスできる。 + +02:45.790 --> 02:51.970 +LMから返信があった場合、 ただプロンプトを表示するか、 あるいは、 もし私があなたに返信を返すのであれば、 + +02:51.970 --> 02:57.490 +まず、 あなたが教えてくれたツールを実行し、 これらのインプットを使って実行し、 + +02:57.490 --> 03:03.310 +私にアウトプットを返すようにお願いする必要があります。 + +03:03.310 --> 03:14.770 +そして、 そのツールを実行し、 LMにレスポンスを返す。 + +03:14.860 --> 03:20.140 +というわけで、 私の流れに従えば、 実は特別すごいわけではないのだ。 + +03:20.140 --> 03:24.850 +LMに電話をかけると、 LMが応答して、 あなたが持っていると言ったツールに電話をかけてほしい、 + +03:24.850 --> 03:25.570 +と言うんだ。 + +03:25.600 --> 03:31.630 +そうすれば、 LMにそれをフィードバックし、 LMはよりリッチなレスポンスを返してくれるようになる。 + +03:32.320 --> 03:38.620 +そして、 もしあなたが本当についてきてくれているのなら、 前回のラボでやったような、 + +03:38.650 --> 03:52.330 +ただ文字列を探してLMに行くプロンプトに余計なコンテキストを挿入するようなことと大差ないことに気づくだろう。 + +03:52.360 --> 03:52.930 +分かった。 + +03:52.960 --> 03:57.820 +とにかく、 私があなたを混乱させなければいいのだが。 + +03:58.630 --> 04:00.550 +その前に、 どのような使用例があるのか。 + +04:00.550 --> 04:02.500 +普通はいつやるんだ? + +04:02.500 --> 04:07.150 +よく目にするのは4つ。 + +04:07.330 --> 04:15.100 +例えば、 データベースで何かを調べたり、 知識を追加したり。 + +04:15.100 --> 04:19.720 +前回のラボでベルトを使ったのと似たようなものだが、 + +04:19.720 --> 04:23.800 +代わりに道具を使うことができる。 + +04:24.370 --> 04:34.120 +ミーティングの予約など、 LMが行動を起こすための手段として使うことができる。 + +04:34.120 --> 04:41.710 +あなたには、 実際に、 えー、 これらを実行する能力がある。 + +04:41.860 --> 04:47.050 +そして、 基本的には、 その返答の中で、 今言ったように、 + +04:48.580 --> 04:51.880 +ユースケースは電卓である。 + +04:51.880 --> 04:59.530 +LMSは計算が苦手なことで有名だが、 それは英語のトークンを予測しようとしているからだ。 + +04:59.530 --> 05:04.360 +ディープ・ニューラル・ネットワークに電卓が組み込まれているわけではないのだ。 + +05:04.360 --> 05:07.090 +でも、 それをツールとして提供することはできる。 + +05:07.270 --> 05:13.240 +そして、 GPTの4番は最近、 計算がとても上手くなっていることにお気づきだろう。 + +05:13.240 --> 05:17.800 +そして、 舞台裏で進行していることは、 このようなことなのではないか、 計算を実行するために独自のツールを用意しているのではないか、 + +05:17.800 --> 05:22.020 +と考えてしまう。 + +05:22.020 --> 05:26.010 +憶測にすぎないかもしれないが、 非常に合理的だと思う。 + +05:27.090 --> 05:34.260 +もうひとつできることは、 UIを変更することだ。 + +05:34.260 --> 05:39.390 +ユーザー・インターフェースのさまざまな情報を更新する関数を呼び出すことができる。 + +05:39.390 --> 05:51.600 +これは、 LLMとUIをより緊密に統合させるためのかなりクールなアイデアだ。 + +05:52.740 --> 06:00.090 +繰り返しになるが、 2つ目と4つ目のアクションとUIの変更については、 + +06:00.090 --> 06:03.660 +別の方法がある。 + +06:03.660 --> 06:09.090 +それだけなら、 もっとシンプルな方法かもしれない。 + +06:09.210 --> 06:16.470 +前にやったことを踏まえて、 私が何を言いたいのか、 ちょっと立ち止まって考えてみてください。 + +06:17.070 --> 06:24.740 +答えは、 ええと、 以前のラボの1つで、 構造化されたレスポンスで応答するためにモデルにJSONで応答させ、 そのレスポンスにはJSONがあり、 + +06:24.740 --> 06:29.900 +私たちに情報の断片を伝えていたのを覚えていますか? + +06:29.900 --> 06:36.440 +私たちの場合、 それはリンクとあーに関するもので、 完全修飾リンクとどのリンクを収集するかについての詳細な情報を与えてくれた。 + +06:36.470 --> 06:46.790 +同じように、 ミーティングを予約するために必要なアクションをJSONで応答するようにモデルに要求したり、 ユーザーインターフェイスをどのように変更したいかに基づいてJSONで応答したりすることができます。 + +06:46.790 --> 06:50.690 +だから、 道具を使う以外の方法もある。 + +06:50.690 --> 06:55.970 +しかし、 テキストをストリーミングで送り返すだけでなく、 ツールも与えたいのであれば、 これは良い解決策だ。 + +06:55.970 --> 07:01.730 +それこそ、 LMが行っている他の様々なことと連動しているときこそ、 これを使うベストなタイミングなのだ。 + +07:01.730 --> 07:05.600 +だから、 これらのツールはその能力をさらに高めているんだ。 + +07:06.800 --> 07:13.250 +そこで、 私たちがこれからやろうとしているのは、 情報に精通した航空会社のカスタマーサポートを作ることだ。 + +07:13.250 --> 07:21.170 +パリに旅行することを伝えて、 パリまでのチケット代を返信してもらえるようにしたい。 + +07:21.170 --> 07:22.310 +そういうことだ。 + +07:22.310 --> 07:26.960 +道具を使ってやるんだ。 その方法を見つけるためにラボで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59166947/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166947/ko_KR.srt new file mode 100755 index 0000000..bbf4562 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166947/ko_KR.srt @@ -0,0 +1,304 @@ +WEBVTT + +00:01.040 --> 00:04.340 +2주 차, 4일째에 함께해 주셔서 감사해요 + +00:04.370 --> 00:06.920 +오늘 멋진 걸 많이 준비했어요 + +00:06.920 --> 00:15.770 +오늘도 레벨업의 날입니다 중요한 비즈니스 가치를 창출하는 Lms를 이용해 여러분의 역량을 향상하는 + +00:15.770 --> 00:19.490 +새로운 기술을 개발하는 날이죠 + +00:19.940 --> 00:26.240 +늘 그렇듯, 이미 할 수 있는 일을 간단히 요약해보죠 트랜스포머와 관련된 용어를 설명하세요 + +00:26.240 --> 00:27.290 +잘 아시네요 + +00:27.290 --> 00:33.920 +상위 3개 프런티어 모델을 위해 API를 자신 있게 코딩했고 최근에는 챗봇 비서를 제작했습니다 + +00:33.920 --> 00:37.670 +대화형 UI를 갖춘 인공지능 챗봇이죠 + +00:37.670 --> 00:43.400 +이제 OpenAI의 메시지 구조에 아주 익숙해졌죠 그래디오를 위한 채팅 + +00:43.400 --> 00:46.130 +함수가 어떻게 작동하는지도요 + +00:46.310 --> 00:49.880 +오늘은 도구에 대해 알아보죠 + +00:49.880 --> 00:54.080 +결국 정의도 할 수 있고 공통 유스케이스를 갖게 될 겁니다 도구를 + +00:54.080 --> 00:57.650 +사용하는 인공지능 보조도 코딩할 수 있고요 + +00:57.650 --> 00:58.910 +Get it, get it 해 보죠 + +00:59.570 --> 01:02.420 +그럼 도구가 뭘까요? + +01:02.900 --> 01:11.780 +즉, 프런티어 모델은 외부 기능과 외부 기능성을 연결할 수 있죠 + +01:11.810 --> 01:14.570 +도구의 의미는 그보다 더 광범위해요 + +01:14.570 --> 01:15.650 +다른 것도 가능해요 + +01:15.680 --> 01:21.110 +하지만 보통 툴이라고 하면 외부 기능에 접근하는 + +01:21.140 --> 01:23.870 +개척자 모델을 뜻하죠 + +01:23.870 --> 01:32.300 +LLM의 지식을 확장함으로써 더 많은 회신이 가능하게 합니다 응용 프로그램 내에서 고급 액션을 + +01:32.330 --> 01:39.380 +수행할 수 있고 성능을 향상할 수도 있습니다 예를 들어 계산기를 제공할 수도 있죠 + +01:39.530 --> 01:44.510 +지난 시간에 말했듯이 아주 신비롭게 들릴 수도 있어요 + +01:44.510 --> 01:47.090 +어떻게 어떻게 된 거죠? + +01:47.090 --> 01:52.430 +계산기 같은 걸 만들어 보죠 계산을 하는 함수요 일종의 파이썬 + +01:52.430 --> 01:54.860 +코드도 할 수 있는 함수요 + +01:54.860 --> 01:59.960 +그런 다음 그걸 LLM에 주고 이걸 사용하라고 하는 거죠 + +01:59.960 --> 02:03.140 +내 컴퓨터로 이 소프트웨어를 작동시켜요 + +02:03.140 --> 02:05.210 +신비롭게 들리네요 + +02:05.210 --> 02:06.860 +비트가 좀 으스스하네요 + +02:06.920 --> 02:09.790 +하지만 그렇지 않아요 + +02:09.790 --> 02:10.930 +별로 안 똑똑해요 + +02:10.930 --> 02:13.840 +워크플로우는 아주 간단해요 + +02:13.870 --> 02:15.130 +특종이에요 + +02:15.130 --> 02:24.220 +LM이 호출할 수 있는 사용 가능한 함수를 정의하는 것부터 시작하죠 + +02:24.220 --> 02:25.900 +이 함수들을 정의하죠 + +02:25.900 --> 02:27.100 +계산기가 있다고 가정해 보죠 + +02:27.100 --> 02:28.390 +계산기는 정의했어요 + +02:28.390 --> 02:32.920 +입력과 출력은 무엇인지 LM이 언제 사용해야 하는지도요 + +02:33.010 --> 02:36.550 +달 착륙선에 그 얘기를 하는 거죠 + +02:36.550 --> 02:42.430 +뭔가를 하려고 호출할 때 사용자에게 응답할 수 있는지 묻죠 + +02:42.430 --> 02:45.760 +이 도구에 엑세스할 수 있어요 + +02:45.790 --> 02:51.970 +LM이 응답할 때 프롬프트로 응답할 수도 있고 혹은 다른 것으로 응답할 수도 + +02:51.970 --> 02:57.490 +있습니다 응답을 생성해야 한다면 먼저 당신이 말한 도구를 실행하도록 + +02:57.490 --> 03:03.310 +요청해야 합니다 입력으로 실행하고 출력을 제공해야 하죠 + +03:03.310 --> 03:12.280 +그걸 가져가서 도구를 실행하고 LM에 응답을 다시 제공하면 LM은 응답을 생성하는 + +03:12.280 --> 03:14.770 +데 사용하죠 + +03:14.860 --> 03:20.140 +제 말을 이해하신다면 사실 그렇게 대단하진 않아요 + +03:20.140 --> 03:24.850 +LM을 호출하면 응답이 옵니다 당신이 갖고 있다고 한 도구를 호출해 + +03:24.850 --> 03:25.570 +주세요 + +03:25.600 --> 03:31.630 +그렇게 하면 LM에 다시 제공됩니다 그럼 더 풍부한 반응을 제공하죠 + +03:32.320 --> 03:38.620 +잘 따라오신다면 지난 랩에서 했던 것과 크게 다르지 않다는 + +03:38.650 --> 03:45.970 +걸 아실 겁니다 문자열을 찾아 프롬프트에 추가 컨텍스트를 삽입해 LM으로 + +03:45.970 --> 03:52.330 +간 거요 프롬프트에 추가 컨텍스트를 삽입하는 거죠 + +03:52.360 --> 03:52.930 +좋아요 + +03:52.960 --> 03:56.860 +어쨌든 헷갈리신 게 아니면 좋겠네요 코드를 보시면 다 이해될 거예요 + +03:56.860 --> 03:57.820 +약속드리죠 + +03:58.630 --> 04:00.550 +먼저 유스 케이스가 뭐죠? + +04:00.550 --> 04:02.500 +보통 언제 이걸 하죠? + +04:02.500 --> 04:07.150 +자주 보게 되는 게 네 가지 있어요 + +04:07.330 --> 04:15.100 +도구를 이용해 추가 데이터를 가져올 수도 있어요 데이터베이스에서 찾아보고 지식을 추가하는 것처럼요 + +04:15.100 --> 04:19.720 +지난 랩에서 벨트랑 했던 것과 비슷하다고 생각하실 + +04:19.720 --> 04:23.800 +수 있지만 대신 도구를 사용할 수 있어요 + +04:24.370 --> 04:30.940 +LM이 행동을 취하는 방법으로 사용할 수 있습니다 회의를 예약하는 것처럼요 그럼 + +04:30.970 --> 04:34.120 +액세스 권한의 일부로 말할 수 있죠 + +04:34.120 --> 04:40.240 +실제로 이런 물건을 운반할 능력이 있고 비행기 표를 사서 다음 일을 + +04:40.240 --> 04:41.710 +할 수 있어요 + +04:41.860 --> 04:47.050 +기본적으로 응답을 보면 사용하고 싶은 툴이 뭔지 알려줍니다 방금 + +04:48.580 --> 04:51.880 +언급한 것처럼요 사용 사례는 계산기예요 + +04:51.880 --> 04:59.530 +LMS는 계산에 약하기로 유명하죠 영어로 토큰을 예측하는 게 전부니까요 + +04:59.530 --> 05:04.360 +심층 신경망에 계산기가 내장돼 있지 않아요 + +05:04.360 --> 05:07.090 +하지만 도구로 제공할 수 있어요 + +05:07.270 --> 05:13.240 +보시다시피 GPT 4는 요즘 연산에 아주 능숙하죠 + +05:13.240 --> 05:17.800 +배후에서 일어나는 일이 이런 건 아닌지 궁금하네요 계산을 + +05:17.800 --> 05:22.020 +실행하기 위해 고유한 도구를 사용할 수 있는 거죠 + +05:22.020 --> 05:26.010 +추측일 수도 있지만 아주 합리적인 것 같아요 + +05:27.090 --> 05:34.260 +UI 수정도 할 수 있어요 여기 도구가 있다고 말할 수 있게요 + +05:34.260 --> 05:39.390 +사용자 인터페이스에서 다양한 걸 업데이트하는 함수를 사용할 수 있어요 + +05:39.390 --> 05:46.980 +LLM은 UI에서 변화를 촉발하는 직접적인 기능을 갖게 됩니다 LLM과 UI 사이에 + +05:46.980 --> 05:51.600 +더 탄탄한 통합을 갖는다는 건 멋진 아이디어죠 + +05:52.740 --> 06:00.090 +두 번째에 대해 짚고 넘어갈 게 하나 있어요 네 번째 것은 행동을 취하고 UI 수정하는 + +06:00.090 --> 06:03.660 +거죠 이걸 달성할 다른 방법이 있어요 + +06:03.660 --> 06:09.090 +그게 당신이 원하는 전부라면 더 간단한 방법이겠죠 + +06:09.210 --> 06:14.460 +전에 했던 걸 바탕으로 잠시 멈춰서 제가 뭘 하려는지 + +06:14.490 --> 06:16.470 +생각해 보세요 + +06:17.070 --> 06:24.740 +답은 이거죠 초기 실험 중 하나에서 JSON에서 모델이 구조적 응답으로 응답하게 한 거 + +06:24.740 --> 06:29.900 +기억하시죠? 그 응답은 JSON이 정보를 제공했고요 + +06:29.900 --> 06:35.180 +저희의 경우는 링크가 중요했어요 완전한 자격의 링크와 수집할 링크에 대한 정보를 + +06:35.180 --> 06:36.440 +더 많이 주는 거였죠 + +06:36.470 --> 06:41.900 +유사하게 모델에게 JSON에서 반응하도록 요청할 수도 있어요 회의를 예약하기 위해 어떤 행동을 취해야 하는지 + +06:41.900 --> 06:46.790 +또는 JSON에서 반응해야 하는지 사용자 인터페이스를 어떻게 수정하길 원하는지에 근거해서요 + +06:46.790 --> 06:50.690 +도구를 사용하는 것 외에 다른 방법도 있어요 + +06:50.690 --> 06:55.970 +하지만 스트리밍 백 텍스트 외에 도구를 제공하고 싶다면 이게 좋은 해결책이에요 + +06:55.970 --> 07:00.740 +그게 이걸 사용하기에 가장 좋은 때죠 LM이 작업하는 다른 여러 가지 작업과 + +07:00.740 --> 07:01.730 +함께요 + +07:01.730 --> 07:05.600 +이런 도구들이 기능에 추가되는 거죠 + +07:06.800 --> 07:13.250 +이제 할 일은 항공사 고객 지원 에이전트를 정보에 따라 만드는 거죠 + +07:13.250 --> 07:19.190 +우리가 파리로 간다고 말하고 파리행 비행기 표 가격으로 답장을 + +07:19.190 --> 07:21.170 +받아야 해요 + +07:21.170 --> 07:22.310 +그게 목적이죠 + +07:22.310 --> 07:26.960 +도구를 이용해 알아보겠습니다 실험실에서 방법을 알아보죠 diff --git a/week5/community-contributions/subtitles/srts/59166949/en_US.srt b/week5/community-contributions/subtitles/srts/59166949/en_US.srt new file mode 100755 index 0000000..fe786a3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166949/en_US.srt @@ -0,0 +1,463 @@ +WEBVTT + +00:00.260 --> 00:02.750 +Welcome back to making chatbots. + +00:02.780 --> 00:04.070 +Let's keep going. + +00:04.070 --> 00:09.650 +So for the next part we're going to beef up the system message to something a bit more interesting. + +00:09.650 --> 00:10.700 +System message. + +00:10.700 --> 00:12.710 +You're a helpful assistant in a clothes store. + +00:12.740 --> 00:15.920 +You should try to gently encourage the customer to try items on sale. + +00:15.950 --> 00:17.780 +Hats off 60% off. + +00:17.780 --> 00:19.880 +Most other items are 50% off. + +00:19.880 --> 00:24.530 +For example, if the customer says I'm looking to buy a hat, you could reply, wonderful! + +00:24.560 --> 00:28.100 +We have lots of hats, including several part of our sales event. + +00:28.100 --> 00:32.330 +Encourage the customer to buy hats if they're unsure what to get. + +00:32.330 --> 00:36.050 +So what you're seeing here is a few things going on in this system prompt. + +00:36.050 --> 00:42.230 +We've got some facts that are being provided about the sales, the hats and other items. + +00:42.350 --> 00:47.930 +Um, you've got an example, an example of if the customer says this, you could say that. + +00:47.930 --> 00:52.280 +And that example both is a way to establish tone and style. + +00:52.400 --> 00:58.850 +Um, and it's also a way to introduce more facts about hats, uh, into the conversation. + +00:58.850 --> 01:01.670 +So this is all example of one shot prompting. + +01:01.670 --> 01:03.150 +And you could argue Multi-shot prompting. + +01:03.150 --> 01:06.720 +So we're giving it a few different sort of nuances of how to reply. + +01:06.930 --> 01:09.810 +Um, and building that into the system message. + +01:09.810 --> 01:13.950 +There are other ways of doing it that we'll talk about, or at least at least another way of doing it. + +01:14.040 --> 01:16.980 +But this is one very effective way. + +01:16.980 --> 01:25.950 +So we we add in that system message, and we're now going to have a chat with the with with the chat + +01:25.980 --> 01:26.580 +bot. + +01:26.670 --> 01:34.170 +So again we write our method of our generator chat um which takes a message and history because that's + +01:34.170 --> 01:36.030 +what Gradio wants to call us with. + +01:36.210 --> 01:43.650 +Um, and we first convert that into the format that OpenAI expects, um, by building the usual list + +01:43.650 --> 01:44.730 +that you're familiar with. + +01:44.730 --> 01:49.200 +I should also mention, I don't know if I mentioned this last time, that at the end here we have to + +01:49.230 --> 01:56.550 +add in, of course, into that list the latest message that the user is sending that gets added to the + +01:56.550 --> 01:58.770 +bottom as role user content. + +01:58.770 --> 02:00.480 +And that message. + +02:00.480 --> 02:08.460 +Then of course, we make a call that at this point, you have ingrained in your deepest memory the create + +02:08.460 --> 02:12.630 +call and we have stream is true and we stream back results. + +02:12.630 --> 02:13.530 +So here we go. + +02:13.560 --> 02:14.760 +We'll bring that up. + +02:14.760 --> 02:16.680 +We'll bring it up in a separate window again. + +02:16.710 --> 02:17.340 +Why not. + +02:17.340 --> 02:20.910 +And let's talk to our shopping assistant. + +02:21.210 --> 02:22.050 +Hi there. + +02:23.520 --> 02:24.870 +Welcome to our store. + +02:24.900 --> 02:25.920 +How can I assist you today? + +02:25.920 --> 02:27.510 +Are you looking for anything specific? + +02:27.510 --> 02:33.090 +Say, uh, I'd like to buy some shoes. + +02:34.110 --> 02:35.280 +Great. + +02:36.480 --> 02:38.760 +We have a lovely selection. + +02:38.760 --> 02:41.580 +While you're browsing, I want to mention we have a fantastic sale going on. + +02:41.580 --> 02:43.140 +Most items are 50% off. + +02:43.140 --> 02:47.340 +If you're open to it, we have some stylish hats that are 60% off. + +02:47.370 --> 02:50.340 +They might be the perfect complement to your new shoes. + +02:50.340 --> 02:52.170 +Would you like to take a look at both? + +02:52.620 --> 02:56.490 +So I want to point out that that it's obviously figured out. + +02:56.490 --> 03:01.170 +It's got the knowledge that we supplied to it in the system prompt. + +03:01.170 --> 03:07.990 +But you hopefully will also notice that the sort of enthusiastic, effusive style that I used in that + +03:07.990 --> 03:16.030 +system prompt has rubbed off in a way that it's communicating in this kind of, uh, very, uh, amiable + +03:16.030 --> 03:16.960 +fashion. + +03:17.290 --> 03:22.720 +Um, and that's a big part of this kind of one shot or multi-shot prompting when you set the tone, + +03:22.720 --> 03:25.930 +give examples of how it should reply. + +03:26.860 --> 03:28.510 +Uh, so let's keep going. + +03:28.510 --> 03:31.180 +Let's take that system message and and add in. + +03:31.180 --> 03:35.950 +If the customer asks for shoes, you could respond that shoes are not on sale. + +03:35.980 --> 03:37.720 +Let's say should respond. + +03:37.720 --> 03:39.910 +Should respond that shoes are not on sale today. + +03:39.910 --> 03:42.760 +But remind the customer to look at hats. + +03:42.760 --> 03:44.170 +So let's try this again. + +03:44.170 --> 03:45.580 +Let's see how this does. + +03:45.640 --> 03:48.160 +Uh, it's got another fact to add. + +03:48.310 --> 03:49.630 +Uh, you could argue that that's. + +03:49.660 --> 03:54.220 +Yeah, that's like another, uh, multi-shot prompt. + +03:54.250 --> 03:55.210 +Let's say. + +03:55.210 --> 03:55.990 +Hi there. + +03:57.640 --> 03:58.420 +Are you looking. + +03:58.420 --> 04:04.540 +I'd like to buy some shoes. + +04:04.540 --> 04:05.900 +That sounds great. + +04:05.930 --> 04:08.630 +I should mention shoes aren't on sale today. + +04:08.630 --> 04:10.940 +But while you're here, have you thought about checking out our hats? + +04:13.640 --> 04:20.030 +You've got to feel sorry for this poor customer who's going to get repeatedly pitched hats. + +04:20.330 --> 04:27.860 +Um, so you can see again how it's established, um, that shoes aren't on sale, but that hats very + +04:27.860 --> 04:28.910 +much are. + +04:29.180 --> 04:34.820 +And that is an example, then, of Multi-shot prompting, uh, in that we're giving it more examples + +04:34.820 --> 04:35.930 +to learn from. + +04:36.260 --> 04:44.510 +Um, so another thing that we can do that is interesting, um, is that whenever you've seen these constructions + +04:44.510 --> 04:47.960 +so far, you've always seen us beginning with the system message. + +04:47.960 --> 04:49.640 +The system messages come at the top. + +04:49.640 --> 04:54.830 +But in fact, with the OpenAI call, there's you're not constrained to have the system message at the + +04:54.830 --> 04:55.070 +top. + +04:55.070 --> 04:58.460 +You can add in more system messages as you go. + +04:58.730 --> 05:05.930 +Um, and so for example, one of the things we can do, um, uh, and let me apologize for some very + +05:05.970 --> 05:09.120 +hacky code here that I will not recommend, but it's here to show the example. + +05:09.120 --> 05:15.390 +It's not the way that you should do it in practice, but what we could do is when we're building this + +05:15.390 --> 05:23.280 +chat generator, we could put in here if this current message that the user is sending us contains the + +05:23.280 --> 05:24.510 +word belt. + +05:24.510 --> 05:28.020 +And you can see in a rather unawesome way, I've just looked for the string belt. + +05:28.050 --> 05:33.150 +Of course, I should be testing whether it's the full word, and I should be thinking about uppercase, + +05:33.180 --> 05:34.770 +lowercase and so on. + +05:34.770 --> 05:38.220 +I'm not doing any of that, which is very naughty of me, but it shows the point. + +05:38.220 --> 05:43.560 +So if belt is in the word message, it's going to add into this set of messages. + +05:43.590 --> 05:50.490 +Another system message saying for added context, the store does not sell belts, but be sure to point + +05:50.490 --> 05:58.500 +out items on sale so that will be then added in to the prompt if the user asks for a belt. + +05:59.940 --> 06:06.030 +So let's give that a try and bring this up here. + +06:08.250 --> 06:09.180 +Hi there. + +06:11.130 --> 06:12.060 +Welcome to the store. + +06:12.090 --> 06:13.080 +I can assist you. + +06:13.530 --> 06:17.310 +I'd like to buy a belt. + +06:19.110 --> 06:19.710 +I'm sorry. + +06:19.710 --> 06:20.910 +We don't carry belts. + +06:20.910 --> 06:24.390 +However, we have fantastic items, including hats. + +06:24.420 --> 06:26.130 +60% off. + +06:26.310 --> 06:27.780 +So there you go. + +06:27.810 --> 06:28.620 +There you go. + +06:28.650 --> 06:29.880 +It's, um. + +06:30.210 --> 06:33.270 +Uh, definitely pays attention. + +06:33.270 --> 06:41.310 +You can see that when the system message is added in as another row in this, uh, in this list of messages, + +06:41.310 --> 06:42.900 +it pays attention to it. + +06:42.900 --> 06:49.140 +And that gives us the opportunity to add context into the conversation. + +06:49.140 --> 06:58.740 +And this is a, uh, whilst it is, of course, uh, very kludgy code to be detecting a word, a substring + +06:58.740 --> 07:02.640 +like that, you can imagine that you could beef this up to be a bit more robust. + +07:02.670 --> 07:07.830 +You could properly you could have a little dictionary which looks for particular words. + +07:07.830 --> 07:15.910 +And when it finds them, it could use them to then enrich the the context in the right way. + +07:15.970 --> 07:23.140 +So it gives you a little, a little ability to be looking things up and adding them into the context. + +07:23.170 --> 07:29.950 +Now, you may be familiar with with some things about Rag, and you may be aware that that is a lot + +07:29.950 --> 07:31.150 +of what rag is about. + +07:31.180 --> 07:38.650 +Rag is about finding extra information that's relevant to the prompt, and adding it in to the context + +07:38.680 --> 07:41.200 +of the message that gets sent to the LM. + +07:41.230 --> 07:47.260 +Now, of course, Rag does that in a much more sophisticated and intelligent way than this hokey piece + +07:47.260 --> 07:48.430 +of code right here. + +07:48.610 --> 07:55.540 +But you can think of this as a as a light, a baby version of rag, and as an exercise for you. + +07:55.540 --> 08:01.030 +You can certainly beef this up a bit, and at the very least, use regex to make it look for a particular + +08:01.030 --> 08:01.540 +word. + +08:01.540 --> 08:07.930 +Maybe have a little dictionary that has the the words, the different items in the store together with + +08:07.930 --> 08:13.730 +their price so that you could add that in as a system message to give it more context. + +08:13.730 --> 08:20.270 +You could try that out and see that you could build a chatbot that actually knows about the prices of + +08:20.270 --> 08:23.000 +the goods in its store, and that would be pretty cool. + +08:23.750 --> 08:27.200 +So that wraps up this particular experiment. + +08:27.200 --> 08:32.270 +I do just want to mention, I alluded earlier to the fact that there are other ways of doing Multi-shot + +08:32.270 --> 08:38.840 +prompting other than shoving it in the system prompt, and the other way is that you can have a user + +08:38.840 --> 08:43.430 +assistant, user assistant set of messages that hasn't actually happened. + +08:43.460 --> 08:49.580 +You can have a fictitious exchange between the user and the assistant that you include in the conversation + +08:49.580 --> 08:56.930 +before the current conversation, and use that as a way to prime the LM with similar conversations, + +08:56.930 --> 09:02.990 +so that it gets a sense of how it's responded to other questions. + +09:03.140 --> 09:09.200 +You can use that again, both to train it on style and also to supply extra facts. + +09:09.200 --> 09:14.640 +So there could have been an earlier interaction when there had been a question about a belt, and the + +09:14.640 --> 09:19.740 +assistant had already replied that there are no belts in the store, and it would have learnt from that. + +09:19.740 --> 09:25.380 +So either technique, they have pros and cons, whether you supply it in system prompts or whether you + +09:25.410 --> 09:33.270 +give example user assistant interactions for it to have as part of the input context for it to to be + +09:33.300 --> 09:34.410 +able to absorb. + +09:34.560 --> 09:38.640 +Um, and my ask to you is to try them both out. + +09:38.640 --> 09:43.680 +So update this so that it uses user assistant interactions instead of a system prompt and see how that + +09:43.680 --> 09:44.160 +works. + +09:44.190 --> 09:47.430 +See if you think you get a better or a worse clothes store assistant. + +09:47.430 --> 09:53.460 +And then also make this change to make this a whole lot more robust, have a dictionary of different + +09:53.460 --> 10:00.780 +items in the store, look up their prices or their sale amounts, and then add that as context into + +10:00.780 --> 10:09.060 +the conversation so that the assistant responds with some expertise and have fun doing it, and I will + +10:09.060 --> 10:12.300 +see you for the next video to wrap up this day. diff --git a/week5/community-contributions/subtitles/srts/59166949/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166949/ja_JP.srt new file mode 100755 index 0000000..7f0bbfd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166949/ja_JP.srt @@ -0,0 +1,391 @@ +WEBVTT + +00:00.260 --> 00:02.750 +チャットボット作りにお帰りなさい。 + +00:02.780 --> 00:04.070 +続けよう。 + +00:04.070 --> 00:09.650 +そこで次のパートでは、 システム・メッセージをもう少し面白いものに強化する。 + +00:09.650 --> 00:10.700 +システムメッセージ。 + +00:10.700 --> 00:12.710 +あなたは洋服店の店員だ。 + +00:12.740 --> 00:15.920 +セール品を試してみるよう、 お客にそっと勧めるべきだ。 + +00:15.950 --> 00:17.780 +帽子60%オフ。 + +00:17.780 --> 00:19.880 +その他のほとんどの商品は50%オフ。 + +00:19.880 --> 00:24.530 +例えば、 お客さんが帽子を買いたいんだけど、 と言ったら、 素敵ですね、 と返せばいい! + +00:24.560 --> 00:28.100 +私たちは、 販売イベントの一部を含め、 たくさんの帽子を用意しています。 + +00:28.100 --> 00:32.330 +何を買おうか迷っているお客さんには、 帽子を買うように勧める。 + +00:32.330 --> 00:36.050 +このシステム・プロンプトでは、 いくつかのことが進行している。 + +00:36.050 --> 00:42.230 +セールや帽子などについて、 いくつかの事実をお伝えします。 + +00:42.350 --> 00:47.930 +もしお客さんがこう言うなら、 あなたはこう言うことができる。 + +00:47.930 --> 00:52.280 +そしてその例はどちらも、 トーンとスタイルを確立する方法なのだ。 + +00:52.400 --> 00:58.850 +それに、 帽子に関する事実をもっと会話の中に取り入れる方法でもあるんだ。 + +00:58.850 --> 01:01.670 +つまり、 これはすべて一発プロンプトの例なのだ。 + +01:01.670 --> 01:03.150 +そして、 マルチショットのプロンプトを主張することもできる。 + +01:03.150 --> 01:06.720 +だから、 返答の仕方のニュアンスを少し変えているんだ。 + +01:06.930 --> 01:09.810 +それをシステムメッセージに組み込むんだ。 + +01:09.810 --> 01:13.950 +他のやり方もあるので、 それについてはまたお話ししますし、 少なくとも別のやり方もあります。 + +01:14.040 --> 01:16.980 +しかし、 これは非常に効果的な方法のひとつだ。 + +01:16.980 --> 01:26.580 +システム・メッセージを追加し、 チャットボットとチャットをします。 + +01:26.670 --> 01:36.030 +そこでもう一度、 メッセージと履歴を受け取るジェネレーター・チャットのメソッドを書く。 + +01:36.210 --> 01:44.730 +まず、 OpenAIが期待するフォーマットに変換します。 + +01:44.730 --> 01:49.200 +また、 前回言ったかどうかわからないが、 最後に、 ユーザーが送信している最新のメッセージを、 + +01:49.230 --> 01:58.770 +ロール・ユーザー・コンテンツとして一番下に追加しなければならない。 + +01:58.770 --> 02:00.480 +そしてそのメッセージ。 + +02:00.480 --> 02:08.460 +そしてもちろん、 この時点であなたの深い記憶に刻み込まれた "create "コールを行い、 "stream + +02:08.460 --> 02:12.630 +is true "で結果をストリームバックする。 + +02:12.630 --> 02:13.530 +それでは、 どうぞ。 + +02:13.560 --> 02:14.760 +その話を持ち出そう。 + +02:14.760 --> 02:16.680 +また別ウィンドウで表示させます。 + +02:16.710 --> 02:17.340 +なぜだ。 + +02:17.340 --> 02:20.910 +そして、 ショッピング・アシスタントに話を聞いてみよう。 + +02:21.210 --> 02:22.050 +こんにちは。 + +02:23.520 --> 02:24.870 +当店へようこそ。 + +02:24.900 --> 02:25.920 +本日はどのようなご用件でしょうか? + +02:25.920 --> 02:27.510 +何か特定のものをお探しですか? + +02:27.510 --> 02:33.090 +靴を買いたいんだ。 + +02:34.110 --> 02:35.280 +素晴らしい。 + +02:36.480 --> 02:38.760 +素敵な品揃えです。 + +02:38.760 --> 02:41.580 +ご覧いただいている間に、 素晴らしいセールが開催中であることをお伝えしたい。 + +02:41.580 --> 02:43.140 +ほとんどの商品が50%オフ。 + +02:43.140 --> 02:47.340 +もしよろしければ、 60%オフのおしゃれな帽子をどうぞ。 + +02:47.370 --> 02:50.340 +新しい靴にぴったりかもしれない。 + +02:50.340 --> 02:52.170 +両方ご覧になりますか? + +02:52.620 --> 02:56.490 +だから、 私はそれが明らかに解明されていることを指摘したい。 + +02:56.490 --> 03:01.170 +システム・プロンプトで供給した知識を持っている。 + +03:01.170 --> 03:07.990 +しかし、 そのシステム・プロンプトで私が使っていた、 熱狂的で熱弁をふるうようなスタイルが、 このような、 + +03:07.990 --> 03:16.960 +ええと、 とても、 ええと、 愛想のいいファッションでコミュニケーションしていることに気づいていただければ幸いだ。 + +03:17.290 --> 03:22.720 +そして、 このようなワンショットやマルチショットのプロンプトの大きな役割は、 あなたがトーンを設定し、 + +03:22.720 --> 03:25.930 +どのように答えるべきかの例を示すことです。 + +03:26.860 --> 03:28.510 +じゃあ、 続けよう。 + +03:28.510 --> 03:31.180 +そのシステム・メッセージに、 こう付け加えよう。 + +03:31.180 --> 03:35.950 +靴が欲しい」と言われたら、 「靴はセール対象外です」と答えればいい。 + +03:35.980 --> 03:37.720 +と答えるべきだ。 + +03:37.720 --> 03:39.910 +今日は靴はセール対象外だと答えるべきだ。 + +03:39.910 --> 03:42.760 +でも、 お客さんには帽子を見るように注意してください。 + +03:42.760 --> 03:44.170 +では、 もう一度やってみよう。 + +03:44.170 --> 03:45.580 +どうなるか見てみよう。 + +03:45.640 --> 03:48.160 +ええと、 もうひとつ事実があるんだ。 + +03:48.310 --> 03:49.630 +そうとも言えるね。 + +03:49.660 --> 03:54.220 +ああ、 これもマルチショットのプロンプトだね。 + +03:54.250 --> 03:55.210 +こう言おう。 + +03:55.210 --> 03:55.990 +こんにちは。 + +03:57.640 --> 03:58.420 +お探しですか? + +03:58.420 --> 04:04.540 +靴を買いたいんだ。 + +04:04.540 --> 04:05.900 +それはいいね。 + +04:05.930 --> 04:08.630 +今日は靴はセール対象外なんだ。 + +04:08.630 --> 04:10.940 +でも、 ここにいる間に、 私たちの帽子をチェックしようと思ったことはある? + +04:13.640 --> 04:20.030 +何度も帽子を投げつけられるかわいそうな客に同情せざるを得ない。 + +04:20.330 --> 04:28.910 +靴はセールにならないが、 帽子はセールになる。 + +04:29.180 --> 04:35.930 +これはマルチショット・プロンプトの一例で、 より多くの例を与えて学習させるということだ。 + +04:36.260 --> 04:47.960 +もうひとつ、 興味深いのは、 これまでこのような構成を見てきたとき、 いつもシステム・メッセージから始めていたことだ。 + +04:47.960 --> 04:49.640 +システムメッセージが一番上に来る。 + +04:49.640 --> 04:55.070 +しかし実際、 OpenAIのコールでは、 システムメッセージをトップに置くことに制約されることはない。 + +04:55.070 --> 04:58.460 +さらにシステムメッセージを追加していくこともできる。 + +04:58.730 --> 05:05.930 +例えば、 私たちができることのひとつは、 ええと、 ええと、 あまりお勧めはしないのですが、 + +05:05.970 --> 05:09.120 +例を示すためにここにあります。 + +05:09.120 --> 05:24.510 +しかし、 このチャット・ジェネレーターを構築する際に、 ユーザーが現在送っているメッセージにベルトという単語が含まれているかどうかをここに書き込むことができる。 + +05:24.510 --> 05:28.020 +そして、 ちょっと見苦しいですが、 紐ベルトを探したところです。 + +05:28.050 --> 05:34.770 +もちろん、 完全な単語かどうかをテストすべきだし、 大文字、 小文字なども考えるべきだ。 + +05:34.770 --> 05:38.220 +私はそんなことはしていない。 とてもエッチなことだが、 要点はここにある。 + +05:38.220 --> 05:43.560 +だから、 もしベルトがメッセージという言葉に入っていれば、 それはこのメッセージのセットに加えられることになる。 + +05:43.590 --> 05:50.490 +もう一つのシステムメッセージは、 この店ではベルトを販売していないが、 + +05:50.490 --> 05:58.500 +セール品を必ず示すこと。 + +05:59.940 --> 06:06.030 +では、 試しにこれをここに持ってきてみよう。 + +06:08.250 --> 06:09.180 +こんにちは。 + +06:11.130 --> 06:12.060 +ご来店ありがとうございます。 + +06:12.090 --> 06:13.080 +私がお手伝いします。 + +06:13.530 --> 06:17.310 +ベルトを買いたいんだ。 + +06:19.110 --> 06:19.710 +ごめんなさい. + +06:19.710 --> 06:20.910 +ベルトは持っていない。 + +06:20.910 --> 06:24.390 +しかし、 帽子を含む素晴らしいアイテムがあります。 + +06:24.420 --> 06:26.130 +60%オフ。 + +06:26.310 --> 06:27.780 +そうだ。 + +06:27.810 --> 06:28.620 +そうだ。 + +06:28.650 --> 06:29.880 +それは、 うーん。 + +06:30.210 --> 06:33.270 +ああ、 間違いなく注意を払っている。 + +06:33.270 --> 06:42.900 +システム・メッセージがこのメッセージ・リストに別の行として追加されると、 そのメッセージに注意を払うのがわかるだろう。 + +06:42.900 --> 06:49.140 +そしてそれは、 私たちが会話に文脈を加える機会を与えてくれる。 + +06:49.140 --> 07:02.640 +もちろん、 このような単語や部分文字列を検出するのは非常に不格好なコードだが、 これをもう少し堅牢にすることは可能だろう。 + +07:02.670 --> 07:07.830 +特定の単語を探す小さな辞書があってもいい。 + +07:07.830 --> 07:15.910 +そして、 それを見つけたら、 適切な方法でコンテクストを豊かにするために使うことができる。 + +07:15.970 --> 07:23.140 +だから、 いろいろなことを調べたり、 文脈に加えたりすることができるんだ。 + +07:23.170 --> 07:31.150 +さて、 皆さんはラグについてある程度ご存知かもしれないし、 ラグとはそういうものだということもご存知かもしれない。 + +07:31.180 --> 07:41.200 +ラグとは、 プロンプトに関連する余分な情報を見つけ、 それをLMに送られるメッセージの文脈に加えることである。 + +07:41.230 --> 07:48.430 +もちろん、 ラグはこのような陳腐なコードよりも、 はるかに洗練されたインテリジェントな方法でそれを実現している。 + +07:48.610 --> 07:55.540 +でも、 これは軽いもの、 雑巾の赤ちゃんバージョン、 そして自分のための練習だと思えばいい。 + +07:55.540 --> 08:01.540 +少なくとも、 正規表現を使って特定の単語を検索させることはできる。 + +08:01.540 --> 08:07.930 +店内のさまざまな商品と、 その価格が書かれた小さな辞書を用意して、 それをシステムメッセージとして追加することで、 + +08:07.930 --> 08:13.730 +より多くの文脈を与えることができるかもしれない。 + +08:13.730 --> 08:20.270 +それを試してみて、 実際にその店の商品の価格を知っているチャットボットを作れば、 + +08:20.270 --> 08:23.000 +かなりクールだろう。 + +08:23.750 --> 08:27.200 +これで今回の実験は終了だ。 + +08:27.200 --> 08:43.430 +先ほど、 マルチショット・プロンプトをシステム・プロンプトに押し込む以外の方法があることを申し上げました。 + +08:43.460 --> 08:49.580 +現在の会話の前に、 ユーザーとアシスタントの架空のやりとりを会話に入れ、 + +08:49.580 --> 09:02.990 +それをLMに似たような会話をさせることで、 LMが他の質問に対してどのように答えたか感覚をつかむことができる。 + +09:03.140 --> 09:09.200 +また、 それを使ってスタイルをトレーニングしたり、 追加情報を提供することもできる。 + +09:09.200 --> 09:14.640 +だから、 以前にベルトについての質問があったときに、 アシスタントがすでに店にベルトはないと答えていて、 + +09:14.640 --> 09:19.740 +そこから学習したのかもしれない。 + +09:19.740 --> 09:25.380 +つまり、 システムのプロンプトでそれを提供するか、 ユーザー・アシスタントが入力コンテキストの一部として吸収できるようなインタラクション例を与えるか、 + +09:25.410 --> 09:34.410 +どちらの手法にも長所と短所がある。 + +09:34.560 --> 09:38.640 +ええと、 私があなたにお願いしたいのは、 両方試してみることです。 + +09:38.640 --> 09:44.160 +そこで、 システムプロンプトの代わりにユーザーアシスタントのインタラクションを使うようにアップデートし、 それがどのように機能するか見てみよう。 + +09:44.190 --> 09:47.430 +洋服店の店員との相性が良いか悪いか。 + +09:47.430 --> 09:53.460 +そして、 この変更をもっとしっかりしたものにするために、 + +09:53.460 --> 10:00.780 +店内のさまざまなアイテムの辞書を用意し、 その価格やセール金額を調べ、 + +10:00.780 --> 10:12.300 +それを会話の文脈に加えることで、 アシスタントが専門的な知識を持って対応できるようにするのです。 diff --git a/week5/community-contributions/subtitles/srts/59166949/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166949/ko_KR.srt new file mode 100755 index 0000000..f9f2a98 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166949/ko_KR.srt @@ -0,0 +1,442 @@ +WEBVTT + +00:00.260 --> 00:02.750 +챗봇 만들기입니다 안녕하세요 + +00:02.780 --> 00:04.070 +계속하죠 + +00:04.070 --> 00:09.650 +다음 부분에서는 시스템 메시지를 좀 더 흥미롭게 비트로 업그레이드할 거예요 + +00:09.650 --> 00:10.700 +시스템 메시지예요 + +00:10.700 --> 00:12.710 +옷 가게의 도우미로 일하죠 + +00:12.740 --> 00:15.920 +세일 중인 제품을 입어 보라고 부드럽게 격려해 주세요 + +00:15.950 --> 00:17.780 +60% 할인이라니 대단해요 + +00:17.780 --> 00:19.880 +다른 건 50% 할인해요 + +00:19.880 --> 00:24.530 +예를 들어 손님이 모자를 사러 왔다고 하면 아주 좋다고 대답하면 돼요 + +00:24.560 --> 00:28.100 +판매 행사에 쓸 모자가 아주 많아요 + +00:28.100 --> 00:32.330 +Get it. 어떤 걸 사야 할지 모를 땐 모자를 사라고 권장하세요 + +00:32.330 --> 00:36.050 +시스템 프롬프트에서 몇 가지 일이 일어나고 있어요 + +00:36.050 --> 00:42.230 +판매와 모자, 기타 물품에 대한 정보를 입수했어요 + +00:42.350 --> 00:47.930 +예를 들어보죠 고객이 이걸 말하면 저걸 말하면 돼요 + +00:47.930 --> 00:52.280 +그 두 가지 예는 분위기와 스타일을 설정하는 방법이죠 + +00:52.400 --> 00:58.850 +모자에 관한 더 많은 사실을 대화에 소개하는 방법이기도 하죠 + +00:58.850 --> 01:01.670 +이게 다 원샷 프롬프트 예죠 + +01:01.670 --> 01:03.150 +멀티샷 프롬핑을 논할 수도 있어요 + +01:03.150 --> 01:06.720 +어떻게 응답할지 미묘한 차이를 주고 있어요 + +01:06.930 --> 01:09.810 +그걸 시스템 메시지로 구축하는 거죠 + +01:09.810 --> 01:13.950 +다른 방법도 있어요 나중에 얘기하거나 적어도 다른 방법이요 + +01:14.040 --> 01:16.980 +하지만 이건 아주 효과적인 방법이에요 + +01:16.980 --> 01:26.580 +이 시스템 메시지를 추가하면 채팅 봇과 채팅을 할 수 있어요 + +01:26.670 --> 01:34.170 +그래서 다시 발전기 채팅 방식을 작성해요 메시지와 역사가 필요하죠 그라디오가 그걸로 연락하고 + +01:34.170 --> 01:36.030 +싶어 하거든요 + +01:36.210 --> 01:44.730 +먼저 오픈AI가 기대하는 포맷으로 변환합니다 여러분이 익숙한 목록을 작성함으로써요 + +01:44.730 --> 01:49.200 +또 하나 언급할 것은 지난 시간에 말했는지 모르겠는데 여기 마지막에 + +01:49.230 --> 01:56.550 +우린 물론 리스트에 추가해야 합니다 사용자가 보내는 가장 최근 메시지를 역할 사용자 콘텐츠로 하단에 + +01:56.550 --> 01:58.770 +추가해야 하죠 + +01:58.770 --> 02:00.480 +그 메시지도요 + +02:00.480 --> 02:08.460 +물론 이때쯤이면 가장 깊은 메모리에 새겨진 create 호출을 사용하고 스트리밍 + +02:08.460 --> 02:12.630 +이즈 true로 결과를 스트리밍하죠 + +02:12.630 --> 02:13.530 +자, 시작하죠 + +02:13.560 --> 02:14.760 +그 얘기도 하죠 + +02:14.760 --> 02:16.680 +다른 창에서 다시 보여드릴게요 + +02:16.710 --> 02:17.340 +안 될 거 없죠 + +02:17.340 --> 02:20.910 +쇼핑 보조와 얘기해 보죠 + +02:21.210 --> 02:22.050 +안녕하세요 + +02:23.520 --> 02:24.870 +어서 오세요 + +02:24.900 --> 02:25.920 +무엇을 도와드릴까요? + +02:25.920 --> 02:27.510 +특별히 찾는 게 있나요? + +02:27.510 --> 02:33.090 +신발을 좀 사고 싶은데요 + +02:34.110 --> 02:35.280 +좋아요 + +02:36.480 --> 02:38.760 +예쁜 게 많아요 + +02:38.760 --> 02:41.580 +여러분이 보시는 동안 세일을 말씀드릴게요 + +02:41.580 --> 02:43.140 +대부분 50% 할인해요 + +02:43.140 --> 02:47.340 +괜찮으시면 60% 할인된 멋진 모자가 있어요 + +02:47.370 --> 02:50.340 +새 신발과 잘 어울릴 거예요 + +02:50.340 --> 02:52.170 +둘 다 보시겠어요? + +02:52.620 --> 02:56.490 +그래서 제가 말씀드리고 싶은 건 확실히 해결됐다는 거예요 + +02:56.490 --> 03:01.170 +시스템 프롬프트에 제공한 지식이 있어요 + +03:01.170 --> 03:07.990 +하지만 제가 시스템 프롬프트에 사용한 열정적이고 활발한 + +03:07.990 --> 03:16.960 +스타일이 이런 식으로 소통하는 방식으로 전염됐다는 걸 눈치채셨길 바라요 + +03:17.290 --> 03:22.720 +이런 원샷 또는 멀티샷 프롬핑에서 중요한 부분이죠 분위기를 설정할 때 + +03:22.720 --> 03:25.930 +어떻게 응답해야 할지 예제를 제시하세요 + +03:26.860 --> 03:28.510 +계속 진행하죠 + +03:28.510 --> 03:31.180 +시스템 메시지를 추가하죠 + +03:31.180 --> 03:35.950 +손님이 신발을 찾으시면 세일 중인 신발이 아니라고 대답하세요 + +03:35.980 --> 03:37.720 +대응해야 한다고 해두죠 + +03:37.720 --> 03:39.910 +신발은 오늘 세일 안 한다고 대답하세요 + +03:39.910 --> 03:42.760 +손님들께 모자를 보여주세요 + +03:42.760 --> 03:44.170 +다시 해 보죠 + +03:44.170 --> 03:45.580 +어떻게 되나 보죠 + +03:45.640 --> 03:48.160 +하나 더 있어요 + +03:48.310 --> 03:49.630 +그렇게 볼 수도 있겠네요 + +03:49.660 --> 03:54.220 +네, 그것도 멀티샷 프롬프트죠 + +03:54.250 --> 03:55.210 +이렇게 하죠 + +03:55.210 --> 03:55.990 +안녕하세요 + +03:57.640 --> 03:58.420 +보고 있어요? + +03:58.420 --> 04:04.540 +신발을 좀 사려고요 + +04:04.540 --> 04:05.900 +좋아요 + +04:05.930 --> 04:08.630 +오늘 신발 세일 안 해요 + +04:08.630 --> 04:10.940 +여기 오신 김에 우리 모자도 구경해 보실래요? + +04:13.640 --> 04:20.030 +Get it's go 계속 얻어맞을 불쌍한 손님을 불쌍히 여겨 주세요 + +04:20.330 --> 04:27.860 +그래서 어떻게 확립됐는지 다시 볼 수 있죠 신발은 세일하지 않지만 모자는 세일한다는 + +04:27.860 --> 04:28.910 +거요 + +04:29.180 --> 04:34.820 +그게 멀티샷 프롬프트 예입니다 배울 수 있는 예시를 더 제공하는 + +04:34.820 --> 04:35.930 +거죠 + +04:36.260 --> 04:44.510 +우리가 할 수 있는 또 다른 흥미로운 것은 지금까지 이런 건축을 보실 때마다 항상 시스템 메시지로 + +04:44.510 --> 04:47.960 +시작하는 걸 보셨을 거예요 + +04:47.960 --> 04:49.640 +시스템 메시지는 상단에 있어요 + +04:49.640 --> 04:55.070 +하지만 OpenAI 호출에서는 상단에 시스템 메시지가 있어야 한다는 제약이 없어요 + +04:55.070 --> 04:58.460 +시스템 메시지는 가면서 추가하세요 + +04:58.730 --> 05:05.930 +예를 들어 우리가 할 수 있는 것 중 하나는 아주 해커 같은 코드에 대해 사과드립니다 권장하진 않지만 + +05:05.970 --> 05:09.120 +예제를 보여주기 위해 여기 나와있어요 + +05:09.120 --> 05:15.390 +실제론 그렇게 하면 안 되지만 채팅 생성기를 만들 때 할 수 있는 + +05:15.390 --> 05:24.510 +건 여기에 넣는 거죠 사용자가 보내는 현재 메시지가 워드 벨트를 포함한다면요 + +05:24.510 --> 05:28.020 +다소 민망한 방법이지만 문자열 벨트를 찾아봤어요 + +05:28.050 --> 05:33.150 +물론 완전한 단어인지 테스트해야 하고 대문자, 소문자 등을 + +05:33.180 --> 05:34.770 +생각해야 하죠 + +05:34.770 --> 05:38.220 +전 그런 거 안 해요 못된 짓이지만 요점은 알겠죠 + +05:38.220 --> 05:43.560 +벨트가 메시지 안에 있으면 이 메시지 모음에 추가될 거예요 + +05:43.590 --> 05:50.490 +또 다른 시스템 메시지는 추가 컨텍스트를 위해 마트는 벨트를 팔지 않습니다 하지만 + +05:50.490 --> 05:58.500 +세일 중인 아이템을 꼭 지적하세요 사용자가 벨트를 요청하면 프롬프트에 추가될 거예요 + +05:59.940 --> 06:06.030 +한 번 해보죠 이걸 여기로 불러와요 + +06:08.250 --> 06:09.180 +안녕하세요 + +06:11.130 --> 06:12.060 +어서 오세요 + +06:12.090 --> 06:13.080 +제가 도와드릴게요 + +06:13.530 --> 06:17.310 +벨트 하나 사려고요 + +06:19.110 --> 06:19.710 +미안해요 + +06:19.710 --> 06:20.910 +벨트는 없어요 + +06:20.910 --> 06:24.390 +하지만 모자도 있고 멋진 아이템도 있어요 + +06:24.420 --> 06:26.130 +60% 할인요 + +06:26.310 --> 06:27.780 +자, 됐어요 + +06:27.810 --> 06:28.620 +여기요 + +06:28.650 --> 06:29.880 +네 + +06:30.210 --> 06:33.270 +확실히 주의를 기울이죠 + +06:33.270 --> 06:41.310 +시스템 메시지가 이 메시지 목록의 다른 행으로 추가되면 주의를 + +06:41.310 --> 06:42.900 +기울이죠 + +06:42.900 --> 06:49.140 +대화에 맥락을 추가할 기회를 주는 거죠 + +06:49.140 --> 06:58.740 +단어나 비트링을 감지하는 건 매우 복잡한 코드지만 좀 더 튼튼하게 + +06:58.740 --> 07:02.640 +만들 수도 있어요 + +07:02.670 --> 07:07.830 +특정 단어를 찾는 작은 사전도 하나 있어야 해요 + +07:07.830 --> 07:15.910 +그걸 찾으면 적절한 방식으로 문맥을 풍부하게 하는 거죠 + +07:15.970 --> 07:23.140 +따라서 여러분이 뭔가를 찾아 컨텍스트에 추가할 수 있는 능력을 주죠 + +07:23.170 --> 07:29.950 +랙에 대해 좀 아실지도 모르겠네요 랙이 의미하는 바가 아주 크다는 것도 + +07:29.950 --> 07:31.150 +알 거예요 + +07:31.180 --> 07:38.650 +Rag는 프롬프트와 관련된 추가 정보를 찾아 LM으로 전송되는 메시지의 컨텍스트에 + +07:38.680 --> 07:41.200 +추가하는 거죠 + +07:41.230 --> 07:48.430 +물론 래그는 훨씬 더 정교하고 똑똑한 방법으로 그걸 하죠 여기 이 진부한 코드보다는요 + +07:48.610 --> 07:55.540 +하지만 이걸 조명이나 아기용 랙이라고 생각해도 돼요 운동으로 생각하세요 + +07:55.540 --> 08:01.030 +비트를 좀 더 보강할 수도 있어요 적어도 레벡스를 써서 특정 단어를 찾아볼 수 + +08:01.030 --> 08:01.540 +있죠 + +08:01.540 --> 08:07.930 +작은 사전을 하나 준비하세요 스토어에 있는 다양한 아이템의 가격을 함께 + +08:07.930 --> 08:13.730 +적어서 시스템 메시지로 추가하는 거죠 컨텍스트를 더 주려고요 + +08:13.730 --> 08:20.270 +시험해 보고 챗봇을 만들 수 있다는 걸 알 수 있어요 스토어에 있는 상품의 가격을 + +08:20.270 --> 08:23.000 +아는 챗봇요 그럼 멋질 거예요 + +08:23.750 --> 08:27.200 +이번 실험은 여기까지예요 + +08:27.200 --> 08:32.270 +아까 언급하고 싶은 게 있었는데 멀티샷 프롬프트를 시스템 프롬프트에 + +08:32.270 --> 08:38.840 +밀어 넣는 것 말고 다른 방법도 있어요 다른 방법은 사용자 비서가 있는 거죠 사용자 비서가 + +08:38.840 --> 08:43.430 +메시지 세트를 갖는 건데 실제로 일어나지 않았어요 + +08:43.460 --> 08:49.580 +사용자와 비서 사이의 가상의 교환을 가질 수 있습니다 현재 대화 + +08:49.580 --> 08:56.930 +전에 포함된 대화에요 그걸 LM을 프라임하는 방법으로 사용할 수 있습니다 + +08:56.930 --> 09:02.990 +다른 질문에 어떻게 반응했는지 감을 잡을 수 있도록요 + +09:03.140 --> 09:09.200 +다시 한번 써먹으세요 스타일 훈련도 하고 추가 정보도 제공하고요 + +09:09.200 --> 09:14.640 +그 전에 벨트에 관한 질문이 들어왔을 때 비서가 이미 매장에 벨트가 + +09:14.640 --> 09:19.740 +없다고 답했을 수도 있어요 그럼 거기서 배웠겠죠 + +09:19.740 --> 09:25.380 +기술에는 장단점이 있습니다 시스템 프롬프트에서 제공하든 + +09:25.410 --> 09:34.410 +사용자 보조 상호 작용을 제공하든 입력 컨텍스트의 일부로 받아들일 수 있도록요 + +09:34.560 --> 09:38.640 +둘 다 시험해 보고 싶어요 + +09:38.640 --> 09:43.680 +이걸 업데이트해 사용자 비서 상호 작용을 사용하도록 하세요 시스템 프롬프트 대신에요 어떻게 되는지 + +09:43.680 --> 09:44.160 +보시죠 + +09:44.190 --> 09:47.430 +Get it get it get it get it get it get it get it a good-tures good. + +09:47.430 --> 09:53.460 +그리고 이걸 훨씬 더 견고하게 만들 이 변화도 가하세요 스토어에 있는 + +09:53.460 --> 10:00.780 +다양한 아이템에 대한 사전을 만들고 가격이나 판매 금액을 찾아보세요 그런 다음 + +10:00.780 --> 10:09.060 +그걸 대화에 추가해 비서가 전문 지식으로 답변할 수 있도록요 재미있게 하세요 오늘을 마무리할 + +10:09.060 --> 10:12.300 +다음 비디오에서 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59166951/en_US.srt b/week5/community-contributions/subtitles/srts/59166951/en_US.srt new file mode 100755 index 0000000..8e04ae3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166951/en_US.srt @@ -0,0 +1,343 @@ +WEBVTT + +00:00.710 --> 00:02.780 +All right, back to the lab. + +00:02.780 --> 00:03.950 +Back to our project. + +00:03.980 --> 00:06.230 +Time to work with tools. + +00:06.530 --> 00:11.210 +I am in the week two folder in JupyterLab, and I'm launching day four. + +00:11.240 --> 00:18.110 +It's time for us to bring together what we've done so far to make a customer service assistant for a + +00:18.110 --> 00:19.880 +fictitious airline. + +00:19.910 --> 00:21.770 +We start with some imports. + +00:21.770 --> 00:25.550 +As usual, we initialize a load of key. + +00:25.580 --> 00:32.870 +We're going to use GPT four mini today, and the OpenAI initialization is there. + +00:32.900 --> 00:33.890 +The system message. + +00:33.890 --> 00:39.920 +You're a helpful assistant for an airline called flight I, flight II flight A, however you want to + +00:39.920 --> 00:40.520 +call it. + +00:40.610 --> 00:41.630 +There it is. + +00:41.660 --> 00:45.200 +Give short, courteous answers no more than one sentence. + +00:45.200 --> 00:46.400 +Always be accurate. + +00:46.400 --> 00:48.770 +If you don't know the answer, say so. + +00:48.800 --> 00:54.290 +This, of course, a very good type of, uh, system prompt. + +00:54.350 --> 01:00.410 +Um, if you're, uh, want to have a strong focus on lack of hallucinations on truthfulness. + +01:00.410 --> 01:01.790 +So we run that. + +01:01.820 --> 01:04.370 +Then this is something you're now very familiar with. + +01:04.370 --> 01:08.540 +This is the chat function in the style that Gradio expects it. + +01:08.570 --> 01:14.630 +It takes a message, it takes history, and it then builds the style that is expected by OpenAI. + +01:14.660 --> 01:16.940 +You may notice this looks a bit shorter than before. + +01:16.940 --> 01:19.850 +And that's because this time I'm not streaming back results. + +01:19.850 --> 01:20.900 +I think we've done enough of that. + +01:20.900 --> 01:25.400 +And since we're going with these short responses, streaming is probably overkill. + +01:25.400 --> 01:27.230 +Let's see this in action. + +01:27.260 --> 01:28.280 +Up it comes. + +01:28.280 --> 01:29.870 +We know Gradio so well now. + +01:29.870 --> 01:33.440 +We don't need to show off about it and we can say hi there. + +01:34.700 --> 01:36.680 +Hello, how can I assist you today? + +01:36.710 --> 01:41.360 +I want to go to London, my hometown. + +01:41.360 --> 01:42.170 +I always want to go there. + +01:42.200 --> 01:42.950 +Great choice. + +01:42.950 --> 01:45.800 +Would you like to help finding flights to London? + +01:46.460 --> 01:47.450 +Yes. + +01:47.450 --> 01:50.300 +How much is a ticket? + +01:52.100 --> 01:56.060 +I don't have real time pricing, but you can check our website or app for the latest ticket prices. + +01:56.090 --> 01:56.480 +London. + +01:56.480 --> 01:59.390 +So you know it's good to see as instructed. + +01:59.390 --> 02:00.920 +It does not hallucinate prices. + +02:00.920 --> 02:02.900 +It doesn't try and go there. + +02:02.900 --> 02:09.200 +It does what it's told, and you can also see it's giving short one line responses just as we asked. + +02:09.230 --> 02:11.600 +Okay, back we go. + +02:11.630 --> 02:17.780 +So it is time to talk about tools, an incredibly powerful feature provided by the frontier LMS. + +02:17.780 --> 02:21.770 +You can write a function, have it call that function as part of its response. + +02:21.770 --> 02:24.140 +Sounds almost spooky. + +02:24.170 --> 02:27.680 +We're giving it the power to run code on our machine. + +02:27.710 --> 02:33.110 +As I said, it's just a kind of story and that will soon be very clear to you. + +02:33.530 --> 02:39.470 +So let's start by making ourselves a function, a function that is going to be a useful function that + +02:39.470 --> 02:41.510 +we want to arm our alarm with. + +02:41.510 --> 02:45.710 +And that function is going to be called get ticket price given a city. + +02:45.710 --> 02:49.130 +So uh, it's going to begin by printing. + +02:49.130 --> 02:52.490 +Get ticket price called for destination City. + +02:52.670 --> 02:58.250 +And we're doing that so that we can watch later to see when this function is called. + +02:58.520 --> 03:03.380 +Uh, what we do is we take the destination city and we make it lowercase. + +03:03.380 --> 03:10.130 +So this works and we look it up in this dictionary where we've got lowercase cities and prices. + +03:10.130 --> 03:13.190 +If it doesn't find it, it says unknown. + +03:13.400 --> 03:15.170 +Let's just add one in here. + +03:15.200 --> 03:18.650 +Why not change things on the fly? + +03:18.680 --> 03:20.180 +Probably break everything. + +03:21.110 --> 03:21.770 +Hopefully not. + +03:21.800 --> 03:28.100 +Let's give ourselves Berlin and a nice, cheap, special deal for for flights to Berlin. + +03:28.100 --> 03:29.000 +Why not? + +03:29.330 --> 03:29.900 +Um. + +03:29.930 --> 03:31.880 +Okay, let's run that. + +03:32.180 --> 03:35.090 +So we will now try this out. + +03:35.120 --> 03:44.270 +Get ticket price to Berlin, see what we get. + +03:44.300 --> 03:46.400 +And it says tool, get ticket price. + +03:46.400 --> 03:49.490 +Called for Berlin for $99. + +03:49.520 --> 03:50.990 +Now you might be thinking to yourself. + +03:51.020 --> 03:51.980 +What does he mean, tool? + +03:51.980 --> 03:54.080 +This isn't a tool, it's just a function. + +03:54.110 --> 03:56.630 +And the answer is for now, it's just a function. + +03:56.630 --> 03:58.490 +We're about to make it into a tool. + +03:58.490 --> 04:02.570 +I don't know if you actually sound that way, but that's how you sound in my mind. + +04:02.930 --> 04:07.400 +So anyway, that that is us creating our tool. + +04:07.940 --> 04:15.560 +Now, the process of putting these tools into our interface with an LLM is a bit storied. + +04:15.560 --> 04:20.420 +It's not going to be as simple as bringing up a Gradio interface. + +04:20.420 --> 04:23.660 +Regrettably, uh, it's more involved. + +04:23.660 --> 04:24.770 +There's good reason for that. + +04:24.770 --> 04:26.180 +And we'll find out why. + +04:26.240 --> 04:28.340 +Um, but it is a little bit more involved. + +04:28.460 --> 04:34.940 +Um, but the good news is it's very much a sort of cookie cutter style that can be replicated for other + +04:34.940 --> 04:36.830 +function calls for other tools. + +04:36.830 --> 04:39.740 +So you'll be able to reuse this for your own projects. + +04:39.740 --> 04:41.390 +And I very much hope you do. + +04:41.420 --> 04:47.630 +One of the intentions of having these useful projects in here is so that you can then take this as a + +04:47.630 --> 04:51.290 +resource and use these bits of code for your own projects. + +04:51.440 --> 04:55.160 +Um, and I'll certainly be recommending that you try adding your own tools. + +04:55.160 --> 04:57.980 +So you should be closely following this. + +04:58.550 --> 05:04.130 +Uh, and the first thing I'm going to mention is that we need to build a particular dictionary structure + +05:04.130 --> 05:07.700 +that's required to describe the function we just wrote. + +05:07.700 --> 05:09.380 +And this is what it looks like. + +05:09.410 --> 05:11.150 +Price function, I'll call it. + +05:11.150 --> 05:12.050 +You call it anything you want. + +05:12.080 --> 05:15.290 +You give it a name and you describe it. + +05:15.290 --> 05:21.470 +And the way you describe it is in plain Old English, because this is going to be given to the LLM so + +05:21.470 --> 05:25.070 +that it can understand when is it appropriate to call this function. + +05:25.070 --> 05:28.460 +So it says get the price of a return ticket to the destination city. + +05:28.490 --> 05:30.710 +Call this whenever you need to know the ticket price. + +05:30.710 --> 05:34.940 +For example, when a customer asks how much is a ticket to the city? + +05:34.970 --> 05:38.360 +So giving it an example is always a good, good technique. + +05:38.360 --> 05:39.950 +And that's what we're using here. + +05:39.950 --> 05:43.640 +And then you provide the parameters in this setup here. + +05:43.640 --> 05:47.060 +And our function has one parameter destination city. + +05:47.060 --> 05:50.330 +And that is what the parameter does. + +05:50.480 --> 05:54.230 +So that's how you describe the function that you're using. + +05:54.230 --> 05:58.070 +And you can see I say that it's a required parameter. + +05:58.640 --> 06:00.530 +So that is the setup. + +06:00.560 --> 06:03.110 +And at this point I'm going to pause for a moment. + +06:03.110 --> 06:05.210 +And in the next video we're going to keep going. + +06:05.210 --> 06:09.650 +And we're going to arm the LLM with this function. + +06:09.650 --> 06:10.700 +See you there. diff --git a/week5/community-contributions/subtitles/srts/59166951/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166951/ja_JP.srt new file mode 100755 index 0000000..bc04f2b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166951/ja_JP.srt @@ -0,0 +1,322 @@ +WEBVTT + +00:00.710 --> 00:02.780 +よし、 ラボに戻ろう。 + +00:02.780 --> 00:03.950 +話をプロジェクトに戻そう。 + +00:03.980 --> 00:06.230 +道具を使う時間だ。 + +00:06.530 --> 00:11.210 +僕は今、 JupyterLabの2週目のフォルダにいて、 4日目に突入するところだ。 + +00:11.240 --> 00:19.880 +これまでやってきたことを結集して、 架空の航空会社のカスタマーサービス・アシスタントを作る時が来た。 + +00:19.910 --> 00:21.770 +まずは輸入品から。 + +00:21.770 --> 00:25.550 +いつものように、 キーのロードを初期化する。 + +00:25.580 --> 00:32.870 +今日はGPT four miniを使うつもりで、 OpenAIの初期化はそこにある。 + +00:32.900 --> 00:33.890 +システムメッセージ。 + +00:33.890 --> 00:40.520 +あなたはI便、 II便、 A便と呼ばれる航空会社のアシスタントだ。 + +00:40.610 --> 00:41.630 +あれだ。 + +00:41.660 --> 00:45.200 +一文以内で短く丁寧に答えること。 + +00:45.200 --> 00:46.400 +常に正確に。 + +00:46.400 --> 00:48.770 +答えがわからなければ、 そう言ってください。 + +00:48.800 --> 00:54.290 +これはもちろん、 システム・プロンプトの非常に良いタイプだ。 + +00:54.350 --> 01:00.410 +ええと、 もし、 幻覚がないことに強く焦点を当てたいのであれば、 真実性を重視したい。 + +01:00.410 --> 01:01.790 +だから、 それを実行するんだ。 + +01:01.820 --> 01:04.370 +それなら、 これはもうお馴染みのことだ。 + +01:04.370 --> 01:08.540 +これはGradioが期待するスタイルのチャット機能である。 + +01:08.570 --> 01:14.630 +メッセージを受け取り、 履歴を受け取り、 そしてOpenAIが期待するスタイルを構築する。 + +01:14.660 --> 01:16.940 +以前より少し短く見えるかもしれない。 + +01:16.940 --> 01:19.850 +そして、 今回は結果をストリーミングで返さないからだ。 + +01:19.850 --> 01:20.900 +それはもう十分やったと思う。 + +01:20.900 --> 01:25.400 +それに、 このような短い返答をするのだから、 ストリーミングはやり過ぎだろう。 + +01:25.400 --> 01:27.230 +実際に見てみよう。 + +01:27.260 --> 01:28.280 +上がってきた。 + +01:28.280 --> 01:29.870 +私たちは今、 グラディオのことをよく知っている。 + +01:29.870 --> 01:33.440 +私たちはそれを見せびらかす必要はないし、 そこで挨拶することもできる。 + +01:34.700 --> 01:36.680 +こんにちは、 本日はどのようなご用件でしょうか? + +01:36.710 --> 01:41.360 +故郷のロンドンに行きたい。 + +01:41.360 --> 01:42.170 +私はいつもそこに行きたいと思っている。 + +01:42.200 --> 01:42.950 +素晴らしい選択だ。 + +01:42.950 --> 01:45.800 +ロンドン行きのフライトをお探しですか? + +01:46.460 --> 01:47.450 +そうだ。 + +01:47.450 --> 01:50.300 +チケットはいくらですか? + +01:52.100 --> 01:56.060 +リアルタイムの価格は分からないが、 最新のチケット価格はウェブサイトやアプリで確認できる。 + +01:56.090 --> 01:56.480 +ロンドンだ。 + +01:56.480 --> 01:59.390 +だから、 指示された通りに見るのがいいんだ。 + +01:59.390 --> 02:00.920 +価格を幻覚することはない。 + +02:00.920 --> 02:02.900 +そこに行こうとはしない。 + +02:02.900 --> 02:09.200 +言われたとおりに動くし、 私たちが頼んだように1行の短い返事をしているのもわかる。 + +02:09.230 --> 02:11.600 +よし、 戻ろう。 + +02:11.630 --> 02:17.780 +そこで、 フロンティアLMSが提供する信じられないほど強力な機能であるツールについてお話ししましょう。 + +02:17.780 --> 02:21.770 +関数を書いて、 レスポンスの一部としてその関数を呼び出すことができる。 + +02:21.770 --> 02:24.140 +ほとんど不気味な響きだ。 + +02:24.170 --> 02:27.680 +私たちのマシン上でコードを実行する力を与えているのだ。 + +02:27.710 --> 02:33.110 +さっきも言ったように、 これは一種の物語に過ぎない。 + +02:33.530 --> 02:41.510 +では、 まず自分自身で関数を作ってみよう。 この関数は、 アラームを武装させるのに便利な関数だ。 + +02:41.510 --> 02:45.710 +この関数は、 都市を指定してチケット価格を取得する。 + +02:45.710 --> 02:49.130 +だから、 印刷することから始まるんだ。 + +02:49.130 --> 02:52.490 +目的地の都市で呼び出されるチケット料金を入手する。 + +02:52.670 --> 02:58.250 +そして、 この関数がいつ呼び出されるかを後で確認できるようにするためだ。 + +02:58.520 --> 03:03.380 +つまり、 目的地の都市を小文字にするんだ。 + +03:03.380 --> 03:10.130 +そこで、 小文字の都市と価格を辞書で調べてみる。 + +03:10.130 --> 03:13.190 +見つからなければ不明と表示される。 + +03:13.400 --> 03:15.170 +ここで1つ付け加えよう。 + +03:15.200 --> 03:18.650 +なぜその場で変更しないのか? + +03:18.680 --> 03:20.180 +おそらく、 すべてを壊してしまうだろう。 + +03:21.110 --> 03:21.770 +そうでないことを祈る。 + +03:21.800 --> 03:28.100 +ベルリンと、 ベルリン行きの航空券のための素敵で、 安くて、 特別な取引をしよう。 + +03:28.100 --> 03:29.000 +なぜだ? + +03:29.330 --> 03:29.900 +うーん。 + +03:29.930 --> 03:31.880 +よし、 実行してみよう。 + +03:32.180 --> 03:35.090 +では、 これを試してみよう。 + +03:35.120 --> 03:44.270 +ベルリンまでのチケット代、 何が手に入るか見てみよう。 + +03:44.300 --> 03:46.400 +そして、 ツール、 チケット代と書いてある。 + +03:46.400 --> 03:49.490 +ベルリンに99ドルで電話。 + +03:49.520 --> 03:50.990 +今、 あなたはこう思ったかもしれない。 + +03:51.020 --> 03:51.980 +道具ってどういう意味? + +03:51.980 --> 03:54.080 +これはツールではなく、 単なる機能だ。 + +03:54.110 --> 03:56.630 +そして答えは、 今のところただの機能だ。 + +03:56.630 --> 03:58.490 +我々はそれをツールにしようとしている。 + +03:58.490 --> 04:02.570 +実際にそう聞こえるかどうかはわからないが、 私の中ではそう聞こえる。 + +04:02.930 --> 04:07.400 +とにかく、 これが僕らのツールを作るということなんだ。 + +04:07.940 --> 04:15.560 +さて、 これらのツールをLLMとのインターフェイスに導入する過程には、 ちょっとした物語がある。 + +04:15.560 --> 04:20.420 +グラディオのインターフェイスを立ち上げるような単純なものにはならないだろう。 + +04:20.420 --> 04:23.660 +残念ながら、 もっと複雑なんだ。 + +04:23.660 --> 04:24.770 +それには理由がある。 + +04:24.770 --> 04:26.180 +そして、 その理由を突き止める。 + +04:26.240 --> 04:28.340 +うーん、 でももう少し複雑なんだ。 + +04:28.460 --> 04:36.830 +うーん、 でも良いニュースは、 他のツールの他のファンクション・コールでも再現できる、 ある種のクッキー・カッターのようなスタイルだということだ。 + +04:36.830 --> 04:39.740 +だから、 これを自分のプロジェクトに再利用することができる。 + +04:39.740 --> 04:41.390 +そして、 そうなることを強く望んでいる。 + +04:41.420 --> 04:51.290 +ここに有用なプロジェクトを掲載した意図のひとつは、 これをリソースとして、 自分のプロジェクトにコードの断片を使えるようにすることだ。 + +04:51.440 --> 04:55.160 +そして、 あなた自身のツールを追加してみることをお勧めします。 + +04:55.160 --> 04:57.980 +だから、 あなたはこれを注意深く追うべきだ。 + +04:58.550 --> 05:07.700 +ええと、 最初に言っておくのは、 今書いた関数を記述するために必要な特定の辞書構造を構築する必要があるということだ。 + +05:07.700 --> 05:09.380 +そして、 こんな感じだ。 + +05:09.410 --> 05:11.150 +プライス・ファンクションと呼ぼう。 + +05:11.150 --> 05:12.050 +好きなように呼べばいい。 + +05:12.080 --> 05:15.290 +名前をつけて、 それを説明する。 + +05:15.290 --> 05:25.070 +LLMがこの関数をいつ呼び出すのが適切かを理解できるようにするためだ。 + +05:25.070 --> 05:28.460 +つまり、 目的地までの往復航空券の料金を取得すると書いてある。 + +05:28.490 --> 05:30.710 +チケットの値段を知りたいときはいつでも電話してください。 + +05:30.710 --> 05:34.940 +例えば、 客が「市内までのチケットはいくらですか? + +05:34.970 --> 05:38.360 +だから、 例を挙げることは常に良い、 良いテクニックなんだ。 + +05:38.360 --> 05:39.950 +それが、 ここで使っているものだ。 + +05:39.950 --> 05:43.640 +そして、 このセットアップでパラメータを指定する。 + +05:43.640 --> 05:47.060 +そして、 この関数には1つのパラメータがある。 + +05:47.060 --> 05:50.330 +そして、 それこそがパラメーターの役割なのだ。 + +05:50.480 --> 05:54.230 +そうやって、 使っている機能を説明するんだ。 + +05:54.230 --> 05:58.070 +そして、 必須パラメータだと言っているのがわかるだろう。 + +05:58.640 --> 06:00.530 +これがセットアップだ。 + +06:00.560 --> 06:03.110 +そしてこの時点で、 私は少し立ち止まるつもりだ。 + +06:03.110 --> 06:05.210 +そして次のビデオでは、 さらに続けるつもりだ。 + +06:05.210 --> 06:09.650 +そして、 LLMにこの機能を持たせるつもりだ。 + +06:09.650 --> 06:10.700 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59166951/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166951/ko_KR.srt new file mode 100755 index 0000000..96eb19a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166951/ko_KR.srt @@ -0,0 +1,340 @@ +WEBVTT + +00:00.710 --> 00:02.780 +좋아요, 연구실로 돌아가죠 + +00:02.780 --> 00:03.950 +우리 프로젝트로 돌아가죠 + +00:03.980 --> 00:06.230 +도구를 사용할 시간이에요 + +00:06.530 --> 00:11.210 +전 주피터랩의 2주 차 폴더에 있고 4일째를 맞이했어요 + +00:11.240 --> 00:18.110 +이제 지금까지 했던 걸 활용할 차례예요 가상의 항공사의 고객 서비스 담당자를 + +00:18.110 --> 00:19.880 +만드는 거죠 + +00:19.910 --> 00:21.770 +수입품부터 시작하죠 + +00:21.770 --> 00:25.550 +늘 그렇듯 키를 잔뜩 준비해요 + +00:25.580 --> 00:32.870 +오늘은 GPT 4 미니를 사용할 겁니다 오픈AI 초기화가 진행 중이죠 + +00:32.900 --> 00:33.890 +시스템 메시지요 + +00:33.890 --> 00:40.520 +1편인지 2편인지 A편인지 하는 항공사의 조수로 일하죠 + +00:40.610 --> 00:41.630 +저기 있네요 + +00:41.660 --> 00:45.200 +짧고 공손하게 한 문장 이상 대답하지 마세요 + +00:45.200 --> 00:46.400 +항상 정확해야 해요 + +00:46.400 --> 00:48.770 +모르면 모른다고 하세요 + +00:48.800 --> 00:54.290 +이건 아주 좋은 시스템 프롬프트예요 + +00:54.350 --> 01:00.410 +진실성에 대한 환각의 부재에 집중하고 싶다면요 + +01:00.410 --> 01:01.790 +그걸 실행하죠 + +01:01.820 --> 01:04.370 +그럼 이제 아주 익숙한 거예요 + +01:04.370 --> 01:08.540 +이건 그라디오가 예상하는 채팅 함수예요 + +01:08.570 --> 01:14.630 +메시지와 역사를 취하고 오픈AI에 기대되는 스타일을 구축하죠 + +01:14.660 --> 01:16.940 +비트가 전보다 좀 짧아진 걸 느끼실 거예요 + +01:16.940 --> 01:19.850 +이번엔 결과를 스트리밍하지 않기 때문이죠 + +01:19.850 --> 01:20.900 +그 정도면 충분한 것 같아요 + +01:20.900 --> 01:25.400 +짧은 응답을 받는 거라 스트리밍은 과잉 대응이에요 + +01:25.400 --> 01:27.230 +어떻게 작동하는지 보죠 + +01:27.260 --> 01:28.280 +올라와요 + +01:28.280 --> 01:29.870 +우린 이제 그라디오를 잘 알아요 + +01:29.870 --> 01:33.440 +자랑할 필요 없이 인사만 하면 돼요 + +01:34.700 --> 01:36.680 +안녕하세요, 무엇을 도와드릴까요? + +01:36.710 --> 01:41.360 +제 고향인 런던에 가고 싶어요 + +01:41.360 --> 01:42.170 +늘 가고 싶어요 + +01:42.200 --> 01:42.950 +탁월한 선택이에요 + +01:42.950 --> 01:45.800 +런던행 비행기표 구하는 거 도와줄래요? + +01:46.460 --> 01:47.450 +네 + +01:47.450 --> 01:50.300 +한 장에 얼마죠? + +01:52.100 --> 01:56.060 +실시간 가격은 없지만 웹사이트나 앱에서 최신 티켓 가격을 확인해 보세요 + +01:56.090 --> 01:56.480 +런던요 + +01:56.480 --> 01:59.390 +그럼 배운 대로 하는 게 좋다는 걸 알겠군요 + +01:59.390 --> 02:00.920 +가격을 착각하지 않아요 + +02:00.920 --> 02:02.900 +그쪽으로 가지 않아요 + +02:02.900 --> 02:09.200 +우리가 말한 대로 하고 있어요 그리고 우리가 요구한 대로 짧게 한 줄로 응답하고 있죠 + +02:09.230 --> 02:11.600 +좋아요, 다시 가죠 + +02:11.630 --> 02:17.780 +이제 프론티어 LMS가 제공하는 강력한 기능인 툴에 대해 얘기해 보죠 + +02:17.780 --> 02:21.770 +응답의 일부로 함수를 써서 그 함수를 호출하게 할 수 있어요 + +02:21.770 --> 02:24.140 +으스스하게 들리네요 + +02:24.170 --> 02:27.680 +우리 컴퓨터에서 코드를 실행할 힘을 주는 거죠 + +02:27.710 --> 02:33.110 +말씀드렸듯이 그냥 이야기예요 곧 분명하게 알게 되실 거예요 + +02:33.530 --> 02:39.470 +그럼 함수부터 만들어 보죠 알람을 무장하는 + +02:39.470 --> 02:41.510 +유용한 함수요 + +02:41.510 --> 02:45.710 +그 함수는 도시당 티켓 가격 get이라고 불릴 거예요 + +02:45.710 --> 02:49.130 +프린팅으로 시작할 거예요 + +02:49.130 --> 02:52.490 +Get up 목적지 시티 티켓팅하세요 + +02:52.670 --> 02:58.250 +이 함수가 언제 호출되는지 나중에 보기 위해 그렇게 하고 있어요 + +02:58.520 --> 03:03.380 +목적지 도시를 소문자로 만드는 거예요 + +03:03.380 --> 03:10.130 +이건 작동하죠 이 사전에서 찾아볼게요 소문자 도시와 가격이 있죠 + +03:10.130 --> 03:13.190 +못 찾으면 미확인이라고 뜨죠 + +03:13.400 --> 03:15.170 +여기에 하나를 추가할게요 + +03:15.200 --> 03:18.650 +그때그때 바꾸면 되잖아요 + +03:18.680 --> 03:20.180 +아마 다 부서질 거예요 + +03:21.110 --> 03:21.770 +안 그러길 바라요 + +03:21.800 --> 03:28.100 +베를린에 가서 싸고 좋은 특별 할인을 받자고요 베를린으로 가는 비행기요 + +03:28.100 --> 03:29.000 +왜요? + +03:29.330 --> 03:29.900 +네 + +03:29.930 --> 03:31.880 +좋아요, 실행해 보죠 + +03:32.180 --> 03:35.090 +이제 이걸 시험해 보죠 + +03:35.120 --> 03:44.270 +베를린행 항공권 가격을 알아보죠 get it + +03:44.300 --> 03:46.400 +도구, 티켓가격 get + +03:46.400 --> 03:49.490 +베를린에 99달러 걸었어요 + +03:49.520 --> 03:50.990 +이런 생각이 들 거예요 + +03:51.020 --> 03:51.980 +무슨 뜻이죠? + +03:51.980 --> 03:54.080 +이건 도구가 아니라 함수예요 + +03:54.110 --> 03:56.630 +대답은 지금으로선 그냥 함수라는 거죠 + +03:56.630 --> 03:58.490 +도구로 만들 거예요 + +03:58.490 --> 04:02.570 +실제로 그렇게 들리는지 모르겠지만 제 마음속에선 그렇게 들려요 + +04:02.930 --> 04:07.400 +어쨌든, 이게 도구를 만드는 우리 모습이었어요 + +04:07.940 --> 04:15.560 +이런 도구들을 LLM과 인터페이스에 넣는 과정은 좀 복잡해요 비트 + +04:15.560 --> 04:20.420 +그래디오 인터페이스를 불러오는 것처럼 간단하지 않을 거예요 + +04:20.420 --> 04:23.660 +안타깝게도 더 복잡해요 + +04:23.660 --> 04:24.770 +그럴 만한 이유가 있죠 + +04:24.770 --> 04:26.180 +이유를 알아보죠 + +04:26.240 --> 04:28.340 +하지만 비트가 좀 더 복잡해요 + +04:28.460 --> 04:34.940 +좋은 소식은 쿠키 커터 스타일과 비슷해서 다른 도구의 함수 호출에 따라 복제할 + +04:34.940 --> 04:36.830 +수 있다는 거죠 + +04:36.830 --> 04:39.740 +여러분의 프로젝트에 재사용할 수 있어요 + +04:39.740 --> 04:41.390 +꼭 그러길 바라요 + +04:41.420 --> 04:47.630 +이런 유용한 프로젝트가 여기 있는 목적은 이걸 리소스로 갖고 와 여러분 프로젝트에 + +04:47.630 --> 04:51.290 +이 비트의 코드를 사용할 수 있도록 하는 거죠 + +04:51.440 --> 04:55.160 +자신만의 도구를 추가해 보시길 권해드리고 싶어요 + +04:55.160 --> 04:57.980 +그러니 잘 따라 하세요 + +04:58.550 --> 05:04.130 +제일 먼저 말씀드릴 것은 우리가 방금 쓴 함수를 설명하는 데 필요한 + +05:04.130 --> 05:07.700 +특정 사전 구조를 구축해야 한다는 거예요 + +05:07.700 --> 05:09.380 +이렇게 생긴 거예요 + +05:09.410 --> 05:11.150 +가격 함수라고 부를게요 + +05:11.150 --> 05:12.050 +마음대로 부르세요 + +05:12.080 --> 05:15.290 +이름을 지어주고 묘사해 보세요 + +05:15.290 --> 05:21.470 +설명은 그냥 옛날 영어로 해주세요 왜냐하면 이게 LLM에 주어질 것이기 때문에 이 함수를 + +05:21.470 --> 05:25.070 +언제 호출하는 게 적절한지 이해할 수 있거든요 + +05:25.070 --> 05:28.460 +목적지까지 왕복 항공권의 가격을 get get이라고 뜨네요 + +05:28.490 --> 05:30.710 +비행기 표가 궁금하면 언제든 전화해요 + +05:30.710 --> 05:34.940 +예를 들어 도시행 기차표가 얼마냐고 손님이 물으면요 + +05:34.970 --> 05:38.360 +예를 들어주는 건 언제나 좋은 기술이죠 + +05:38.360 --> 05:39.950 +그걸 여기서 사용하고 있어요 + +05:39.950 --> 05:43.640 +그런 다음 이 셋업에서 매개 변수를 제공하죠 + +05:43.640 --> 05:47.060 +함수에는 하나의 매개 변수 대상 도시만 있어요 + +05:47.060 --> 05:50.330 +그게 매개 변수가 하는 일이죠 + +05:50.480 --> 05:54.230 +이게 여러분이 사용하는 함수를 설명하는 방법이에요 + +05:54.230 --> 05:58.070 +보다시피 필수 매개 변수죠 + +05:58.640 --> 06:00.530 +그게 설정이에요 + +06:00.560 --> 06:03.110 +여기서 잠시 멈춰 볼게요 + +06:03.110 --> 06:05.210 +다음 비디오에서도 계속 할 거예요 + +06:05.210 --> 06:09.650 +이 함수로 LLM을 무장시킬 거예요 + +06:09.650 --> 06:10.700 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59166981/en_US.srt b/week5/community-contributions/subtitles/srts/59166981/en_US.srt new file mode 100755 index 0000000..911d61f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166981/en_US.srt @@ -0,0 +1,211 @@ +WEBVTT + +00:00.980 --> 00:04.040 +Welcome to week two, day five. + +00:04.070 --> 00:09.050 +The last day of week two where a lot is coming together. + +00:09.050 --> 00:16.100 +I am so grateful that you're sticking with it, and I'm going to make it worth your while because today + +00:16.100 --> 00:18.620 +is going to be really, really good fun. + +00:18.620 --> 00:21.530 +I'm excited to get into this. + +00:21.890 --> 00:24.410 +It's the big conclusion of the second week. + +00:24.740 --> 00:27.800 +Again, I'm going to keep saying what you can do. + +00:27.800 --> 00:32.840 +I think it's so important to celebrate your upskilling, you know, Transformers back to front. + +00:32.840 --> 00:38.660 +You can code against the frontier APIs, you can build an AI assistant, and you can add tools to give + +00:38.660 --> 00:39.800 +it expertise. + +00:39.830 --> 00:42.440 +Today we introduce agents. + +00:42.440 --> 00:48.650 +We talk about how agents can carry out more advanced sequential activities. + +00:48.650 --> 00:56.450 +And then we do something super fun creating a multimodal AI assistant using agents and tools. + +00:57.620 --> 00:59.390 +So what are agents? + +00:59.720 --> 01:01.190 +Agents, I should say. + +01:01.220 --> 01:02.390 +An agent I. + +01:02.420 --> 01:02.900 +An agent. + +01:03.530 --> 01:07.790 +It is one of these umbrella terms that people can use in different contexts. + +01:07.790 --> 01:12.140 +So it is one of these things that, that that can mean different things to different people. + +01:12.140 --> 01:17.660 +But generally speaking, most often people are talking about software entities that are autonomous. + +01:17.660 --> 01:25.640 +They can perform tasks not just in the sense of taking an input prompt and generating text. + +01:25.820 --> 01:27.530 +Um, typical characteristics. + +01:27.530 --> 01:28.700 +Let's say they are autonomous. + +01:28.700 --> 01:33.740 +They have some sort of agency, they are goal oriented, that they have some kind of thing that they're + +01:33.740 --> 01:37.520 +setting out to do, and that they are task specific. + +01:37.520 --> 01:42.620 +They are usually specialized on being good at one thing or another. + +01:43.010 --> 01:48.230 +Um, and they're typically designed to be part of something called an agent framework, which is a sort + +01:48.230 --> 01:55.190 +of environment in which agents can interact to solve more complex problems and potentially with limited + +01:55.190 --> 01:56.450 +human involvement. + +01:56.450 --> 02:00.020 +So it's not like it's just a sort of request response situation with a human. + +02:00.020 --> 02:06.150 +But you can imagine this sort of environment where multiple software agents that could be combinations + +02:06.150 --> 02:12.690 +of llms along with traditional software interacting in order to carry out tasks. + +02:12.690 --> 02:19.770 +And so some of the features you might expect is the ability to have memory or persistence that sort + +02:19.770 --> 02:26.820 +of goes beyond just a request response, the ability to have some sort of decision making and orchestration + +02:26.820 --> 02:30.750 +about what does what are planning abilities. + +02:30.930 --> 02:36.240 +And sometimes that is just a matter of the environment as some planning coded into it. + +02:36.240 --> 02:40.410 +Sometimes you have an LLM which is responsible for planning. + +02:40.410 --> 02:45.840 +It's a model that knows how to take complex problems and break it down into smaller problems for other + +02:45.840 --> 02:47.400 +models to take care of. + +02:47.880 --> 02:53.310 +And then use of tools is often also an example of a genetic AI. + +02:53.310 --> 02:59.370 +This is where, of course, as you are now very familiar, we give models the ability to do things like + +02:59.370 --> 03:06.910 +connect to databases or connect to the internet or whatever we want because we are providing it access + +03:06.910 --> 03:10.450 +to functions and we know how that works behind under the hood. + +03:10.450 --> 03:17.440 +Now we know that it's really just a fancy if statement, but it gives the effect that the Llms are able + +03:17.440 --> 03:18.580 +to do this. + +03:19.960 --> 03:22.540 +So we're about to do a few things. + +03:22.540 --> 03:26.170 +Let me just quickly sort of set the scene for you. + +03:26.200 --> 03:34.390 +We're going to first build a function that can generate images, a good multimodal use case. + +03:34.390 --> 03:37.990 +We're going to have an LLM call that can do that. + +03:37.990 --> 03:39.760 +And it's going to be a function that does it. + +03:39.760 --> 03:42.670 +And you can think of that in its own right as being like an agent. + +03:42.670 --> 03:49.000 +It's like a piece of software that is able to take this very specific, specialized instruction and + +03:49.000 --> 03:49.540 +do it. + +03:49.540 --> 03:58.990 +That will be an artist that we will create in code with the help of Dall-E three, the the image generation + +03:58.990 --> 04:02.240 +model from uh, OpenAI. + +04:02.480 --> 04:07.910 +Uh, and, you know, if you want to to quibble, you could argue that image generation is not in itself + +04:07.910 --> 04:09.650 +an LM thing. + +04:09.680 --> 04:16.250 +Uh, lm being language models, but these days, generally llms are used interchangeably with the broader + +04:16.250 --> 04:18.380 +gen AI context. + +04:18.380 --> 04:24.590 +And so one does tend to think of image generation and other kinds of multimodal generation as falling + +04:24.590 --> 04:28.100 +within the LM engineer's, uh, toolkit. + +04:29.120 --> 04:35.510 +So we're then going to look to, to make agents these sort of, uh, these, these functions that are + +04:35.510 --> 04:36.290 +able to do things. + +04:36.290 --> 04:43.700 +And we're going to add sound as well as images, and then we're going to have an agent framework in + +04:43.730 --> 04:50.000 +that we are going to teach our AI assistant, the same airline assistant that we've been working on + +04:50.030 --> 04:52.580 +how to speak and draw. + +04:52.760 --> 04:55.820 +All right, without further ado, I hope that sounds fun to you. + +04:55.850 --> 04:59.060 +I hope it sounds exciting because it's going to be it's going to be great. + +04:59.090 --> 05:00.320 +Uh, I can't wait to do it. + +05:00.320 --> 05:01.700 +Let's go and do it right now. diff --git a/week5/community-contributions/subtitles/srts/59166981/ja_JP.srt b/week5/community-contributions/subtitles/srts/59166981/ja_JP.srt new file mode 100755 index 0000000..5d707c1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166981/ja_JP.srt @@ -0,0 +1,169 @@ +WEBVTT + +00:00.980 --> 00:04.040 +第2週5日目へようこそ。 + +00:04.070 --> 00:09.050 +多くのことがまとまりつつある第2週の最終日。 + +00:09.050 --> 00:18.620 +今日は本当に、 本当に楽しい日になりそうだから。 + +00:18.620 --> 00:21.530 +これに参加するのが楽しみだ。 + +00:21.890 --> 00:24.410 +2週目の大きな締めくくりだ。 + +00:24.740 --> 00:27.800 +繰り返しになるが、 私はあなたに何ができるかを言い続けるつもりだ。 + +00:27.800 --> 00:32.840 +自分のスキルアップを祝うことはとても重要だと思う。 + +00:32.840 --> 00:39.800 +フロンティアAPIに対してコーディングし、 AIアシスタントを構築し、 専門知識を与えるツールを追加することができる。 + +00:39.830 --> 00:42.440 +今日はエージェントを紹介しよう。 + +00:42.440 --> 00:48.650 +私たちは、 エージェントがより高度な逐次的活動を行う方法について話します。 + +00:48.650 --> 00:56.450 +そして、 エージェントやツールを使ってマルチモーダルAIアシスタントを作るという、 とても楽しいこともやっています。 + +00:57.620 --> 00:59.390 +では、 エージェントとは何なのか? + +00:59.720 --> 01:01.190 +エージェントと言うべきだろう。 + +01:01.220 --> 01:02.390 +エージェントI。 + +01:02.420 --> 01:02.900 +エージェントだ。 + +01:03.530 --> 01:07.790 +これは、 人々がさまざまな文脈で使うことができる包括的な用語のひとつである。 + +01:07.790 --> 01:12.140 +だから、 それは人によって意味が違うことのひとつなんだ。 + +01:12.140 --> 01:17.660 +しかし、 一般的に言えば、 多くの場合、 人々は自律的なソフトウェア・エンティティについて話している。 + +01:17.660 --> 01:25.640 +入力プロンプトを受けてテキストを生成するという意味だけでなく、 タスクを実行することもできる。 + +01:25.820 --> 01:27.530 +うーん、 典型的な特徴だね。 + +01:27.530 --> 01:28.700 +彼らが自律的だとしよう。 + +01:28.700 --> 01:33.740 +彼らはある種の主体性を持っていて、 目標志向で、 何かしらの目的を持っていて、 + +01:33.740 --> 01:37.520 +タスクが具体的なのだ。 + +01:37.520 --> 01:42.620 +彼らはたいてい、 何か一つのことに特化している。 + +01:43.010 --> 01:48.230 +一般的には、 エージェントフレームワークと呼ばれるものの一部として設計され、 + +01:48.230 --> 01:56.450 +エージェントがより複雑な問題を解決するために相互作用できる環境のようなものです。 + +01:56.450 --> 02:00.020 +だから、 人間に対する一種のリクエスト・レスポンスとは違うんだ。 + +02:00.020 --> 02:06.150 +しかし、 このような環境では、 従来のソフトウェアとllmsを組み合わせた複数のソフトウェアエージェントが、 + +02:06.150 --> 02:12.690 +タスクを遂行するために相互作用することが想像できる。 + +02:12.690 --> 02:19.770 +期待される機能としては、 単なるリクエスト・レスポンスにとどまらないメモリーや永続性を持つ能力、 + +02:19.770 --> 02:30.750 +ある種の意思決定やオーケストレーションができる能力、 プランニング能力などがある。 + +02:30.930 --> 02:36.240 +そして、 それはただ単に、 環境に組み込まれたプランニングの問題であることもある。 + +02:36.240 --> 02:40.410 +企画を担当するLLMがいることもある。 + +02:40.410 --> 02:47.400 +複雑な問題を、 他のモデルが処理できるように小さな問題に分解する方法を知っているモデルなのだ。 + +02:47.880 --> 02:53.310 +そして、 道具を使うことも遺伝的AIの一例であることが多い。 + +02:53.310 --> 02:59.370 +もちろん、 皆さんもよくご存知のように、 私たちはモデルにデータベースへの接続やインターネットへの接続など、 + +02:59.370 --> 03:10.450 +好きなことをさせる機能を与えている。 + +03:10.450 --> 03:18.580 +これは単なるif文に過ぎないが、 Llmsにこのようなことができるという効果を与えている。 + +03:19.960 --> 03:22.540 +だから、 これからいくつかやることがある。 + +03:22.540 --> 03:26.170 +簡単に状況を説明しよう。 + +03:26.200 --> 03:34.390 +まず、 マルチモーダルなユースケースに適した、 画像を生成する機能を構築する。 + +03:34.390 --> 03:37.990 +それができるLLMコールを用意するつもりだ。 + +03:37.990 --> 03:39.760 +そして、 それを実行する機能になる。 + +03:39.760 --> 03:42.670 +そして、 それ自体がエージェントのようなものだと考えることもできる。 + +03:42.670 --> 03:49.540 +これは、 非常に特殊で専門的な指導を受け、 それを実行できるソフトウェアのようなものだ。 + +03:49.540 --> 04:02.240 +これは、 Dall-E three、 つまりOpenAIの画像生成モデルの助けを借りて、 コードで作成したアーティストになります。 + +04:02.480 --> 04:07.910 +そして、 もし屁理屈をこねたいのであれば、 イメージの生成自体はLM的なものではない、 + +04:07.910 --> 04:09.650 +と主張することもできる。 + +04:09.680 --> 04:18.380 +ええと、 lmは言語モデルのことですが、 最近では一般的に、 lmsはより広範なgen AI文脈と同じ意味で使われています。 + +04:18.380 --> 04:28.100 +だから、 画像生成や他の種類のマルチモーダル生成は、 LMエンジニアのツールキットに含まれると考えがちだ。 + +04:29.120 --> 04:36.290 +だから、 私たちはエージェントを作るために、 これらの、 あー、 これらの、 これらの、 これらの、 これらの機能ができるようにするんだ。 + +04:36.290 --> 04:52.580 +そして、 画像だけでなく音も追加し、 AIアシスタントに話し方や絵の描き方を教えるエージェントフレームワークを導入する予定です。 + +04:52.760 --> 04:55.820 +さて、 前置きはこれくらいにして、 楽しそうだと思われただろうか。 + +04:55.850 --> 04:59.060 +エキサイティングに聞こえることを願っているよ。 + +04:59.090 --> 05:00.320 +早くやりたいよ。 + +05:00.320 --> 05:01.700 +さあ、 今すぐ行こう。 diff --git a/week5/community-contributions/subtitles/srts/59166981/ko_KR.srt b/week5/community-contributions/subtitles/srts/59166981/ko_KR.srt new file mode 100755 index 0000000..ec493ab --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59166981/ko_KR.srt @@ -0,0 +1,208 @@ +WEBVTT + +00:00.980 --> 00:04.040 +둘째 주, 5일째예요 + +00:04.070 --> 00:09.050 +2주 차 마지막 날입니다 많은 일이 벌어지고 있죠 + +00:09.050 --> 00:16.100 +계속 함께해 줘서 정말 고마워요 보람을 느끼게 해 줄게요 오늘은 + +00:16.100 --> 00:18.620 +정말 재미있을 거예요 + +00:18.620 --> 00:21.530 +Get it가 기대되네요 + +00:21.890 --> 00:24.410 +둘째 주의 대망의 결말이죠 + +00:24.740 --> 00:27.800 +전 계속 여러분이 뭘 할 수 있는지 말할 거예요 + +00:27.800 --> 00:32.840 +트랜스포머의 성공을 축하하는 건 정말 중요해요 + +00:32.840 --> 00:38.660 +프론티어 API에 대항해 코드를 작성할 수 있고 인공지능 보조를 만들 수 있고 전문 지식을 줄 도구를 + +00:38.660 --> 00:39.800 +추가할 수 있죠 + +00:39.830 --> 00:42.440 +오늘은 에이전트를 소개하죠 + +00:42.440 --> 00:48.650 +요원이 보다 진보된 순차적 작업을 수행하는 방법을 얘기했죠 + +00:48.650 --> 00:56.450 +그리고 아주 재미있는 걸 할 거예요 에이전트와 도구를 이용해 다중 모듈 인공지능 보조를 만드는 거죠 + +00:57.620 --> 00:59.390 +에이전트가 뭐죠? + +00:59.720 --> 01:01.190 +요원이라고 해야겠죠 + +01:01.220 --> 01:02.390 +에이전트 I요 + +01:02.420 --> 01:02.900 +에이전트요 + +01:03.530 --> 01:07.790 +다양한 상황에서 사용할 수 있는 우산형 용어 중 하나죠 + +01:07.790 --> 01:12.140 +사람마다 다른 의미를 가질 수 있는 거예요 + +01:12.140 --> 01:17.660 +하지만 일반적으로 사람들은 자율적인 소프트웨어 엔터티라고 하죠 + +01:17.660 --> 01:25.640 +입력 프롬프트와 텍스트 생성에서만이 아니라 다른 작업도 수행할 수 있죠 + +01:25.820 --> 01:27.530 +전형적인 특징이죠 + +01:27.530 --> 01:28.700 +자율적이라 치죠 + +01:28.700 --> 01:33.740 +일종의 기관이 있고 목표 지향적이며 어떤 일을 하려고 + +01:33.740 --> 01:37.520 +설정하고 작업에 구체적이죠 + +01:37.520 --> 01:42.620 +보통 한 가지에 특화된 사람들이에요 + +01:43.010 --> 01:48.230 +에이전트 프레임워크라는 것의 일부로 설계되었는데 인간의 + +01:48.230 --> 01:55.190 +제한적인 개입으로 더 복잡한 문제를 해결하기 위해 에이전트가 상호 작용하는 + +01:55.190 --> 01:56.450 +환경이죠 + +01:56.450 --> 02:00.020 +인간에게 요청하는 요청 대응 상황이 아니에요 + +02:00.020 --> 02:06.150 +하지만 이런 환경을 상상해 보세요 작업을 수행하기 위해 기존 소프트웨어와 상호 + +02:06.150 --> 02:12.690 +작용하기 위해 다중 소프트웨어 에이전트가 llms의 조합이 될 수 있는 환경이요 + +02:12.690 --> 02:19.770 +여러분이 기대할 수 있는 기능은 메모리와 지속성인데 요청 응답을 + +02:19.770 --> 02:26.820 +넘어서 의사 결정과 오케스트레이션을 할 수 있고 무엇을 계획할 + +02:26.820 --> 02:30.750 +수 있는지에 관한 거죠 + +02:30.930 --> 02:36.240 +때로는 환경의 문제일 뿐입니다 그에 대한 어떤 계획으로 코드되어 있는 거죠 + +02:36.240 --> 02:40.410 +계획을 책임지는 LLM이 있을 때도 있어요 + +02:40.410 --> 02:45.840 +복잡한 문제를 작은 문제로 쪼개서 다른 모델이 해결하도록 + +02:45.840 --> 02:47.400 +하는 모델이죠 + +02:47.880 --> 02:53.310 +도구를 사용하는 건 유전적 인공지능의 예죠 + +02:53.310 --> 02:59.370 +이제 익숙해지셨겠지만 모델에 데이터베이스나 인터넷 연결 등 + +02:59.370 --> 03:06.910 +원하는 모든 것에 연결할 수 있는 기능을 제공합니다 기능에 액세스를 제공하고 + +03:06.910 --> 03:10.450 +어떻게 작동하는지도 아니까요 + +03:10.450 --> 03:17.440 +그냥 if문인 걸 알지만 이건 Lms가 이걸 할 수 있다는 효과를 + +03:17.440 --> 03:18.580 +주죠 + +03:19.960 --> 03:22.540 +몇 가지 할 게 있어요 + +03:22.540 --> 03:26.170 +상황을 간단히 설명해 드리죠 + +03:26.200 --> 03:34.390 +먼저 이미지를 생성할 수 있는 함수를 만들겠습니다 좋은 다중 모듈 사용 사례죠 + +03:34.390 --> 03:37.990 +그걸 할 수 있는 LLM 호출을 할 거예요 + +03:37.990 --> 03:39.760 +그걸 하는 함수가 되겠죠 + +03:39.760 --> 03:42.670 +그 자체로 에이전트라고 볼 수 있어요 + +03:42.670 --> 03:49.540 +아주 구체적이고 전문적인 지시를 그대로 실행하는 소프트웨어 같아요 + +03:49.540 --> 03:58.990 +코드로 아티스트 작업을 할 거예요 오픈AI의 이미지 생성 모델인 달리 + +03:58.990 --> 04:02.240 +3의 도움을 받아서요 + +04:02.480 --> 04:07.910 +굳이 트집을 잡자면 이미지 생성은 LM과 무관하다고 주장할 + +04:07.910 --> 04:09.650 +수도 있어요 + +04:09.680 --> 04:16.250 +lm은 언어 모델이지만 요즘엔 일반적으로 더 넓은 세대 인공지능 컨텍스트와 + +04:16.250 --> 04:18.380 +교환적으로 사용되죠 + +04:18.380 --> 04:24.590 +이미지 생성이나 다른 다중 모듈 생성 역시 LM 엔지니어의 도구 키트에 + +04:24.590 --> 04:28.100 +포함된다고 생각하는 경향이 있죠 + +04:29.120 --> 04:35.510 +에이전트를 만드는 걸 살펴볼 겁니다 이런 작업을 할 수 있는 이런 + +04:35.510 --> 04:36.290 +함수요 + +04:36.290 --> 04:43.700 +이미지뿐 아니라 소리도 추가할 거예요 에이전트 프레임워크를 만들어 + +04:43.730 --> 04:50.000 +인공지능 보조를 가르칠 거예요 우리가 말하고 그리는 법을 연구했던 + +04:50.030 --> 04:52.580 +항공사 보조요 + +04:52.760 --> 04:55.820 +그럼 바로 시작하죠 재미있을 것 같아요? + +04:55.850 --> 04:59.060 +흥미진진하게 들리면 좋겠네요 정말 멋질 테니까요 + +04:59.090 --> 05:00.320 +빨리 하고 싶어요 + +05:00.320 --> 05:01.700 +지금 당장 가죠 diff --git a/week5/community-contributions/subtitles/srts/59167007/en_US.srt b/week5/community-contributions/subtitles/srts/59167007/en_US.srt new file mode 100755 index 0000000..771e2be --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167007/en_US.srt @@ -0,0 +1,205 @@ +WEBVTT + +00:00.500 --> 00:02.780 +Well, how fabulous is that? + +00:02.780 --> 00:09.620 +I hope that you are as wowed as I am by our new airline, I assistant and everything it can do. + +00:09.620 --> 00:16.610 +I've taken another screenshot here of of a conversation I'd had, and you can see again that gorgeous + +00:16.610 --> 00:17.690 +image of London. + +00:17.720 --> 00:18.740 +A very different approach now. + +00:18.740 --> 00:22.430 +Not the montage, but something rather rather simpler. + +00:22.430 --> 00:28.790 +Uh, it I find it astounding that you get such variety, such diversity of images. + +00:28.850 --> 00:34.280 +Um, and I also find it astounding that it's so easy to put together these sophisticated frameworks + +00:34.280 --> 00:36.140 +involving lots of functionality. + +00:36.230 --> 00:39.890 +Remember, we also had our tool running there, looking up the prices. + +00:39.890 --> 00:46.400 +Uh, everything we had together was a very sophisticated, complex app, complete with user interface. + +00:46.400 --> 00:51.470 +And we did it all just in a few hours worth of work. + +00:53.000 --> 00:56.030 +So it's a congratulations. + +00:56.030 --> 01:01.370 +But as always, there's a challenge for you and if I may say, one more time, the best way to learn + +01:01.370 --> 01:02.150 +is by doing. + +01:02.150 --> 01:07.160 +It is incredibly important that you now go and do some exercises and work on this to improve it. + +01:07.190 --> 01:09.930 +It's also a lot of fun as an extra bonus. + +01:09.930 --> 01:11.880 +So here are some of the things you can do. + +01:11.910 --> 01:16.590 +We talked before about adding in another tool to make a booking. + +01:16.620 --> 01:21.360 +In theory, not obviously a real booking, but make a booking and then it should print to an output + +01:21.360 --> 01:22.680 +that a booking has been made. + +01:22.680 --> 01:27.120 +Or maybe if you if you want, you could have it write to a file or something like that to give you a + +01:27.120 --> 01:28.740 +sense that the booking has happened. + +01:28.770 --> 01:29.940 +Add that as a tool. + +01:29.940 --> 01:33.270 +Hopefully you've done it already, but if not, now's a good time to do it. + +01:33.360 --> 01:35.400 +Then add another agent. + +01:35.400 --> 01:41.040 +Uh, have an agent that is able to translate all of the responses to a different language. + +01:41.040 --> 01:43.500 +Uh, something that we'd suggested for a previous project. + +01:43.500 --> 01:48.060 +But do that and then show it on the right hand side and use a different frontier model. + +01:48.090 --> 01:54.240 +How about Claude, for example, use Claude as a way to translate to another language of your choosing, + +01:54.240 --> 01:58.140 +and then you'd have to do some radio work to add another panel. + +01:58.290 --> 02:00.450 +With that translation. + +02:00.450 --> 02:03.510 +There will be a little bit of futzing around with Gradio when you do that. + +02:03.510 --> 02:09.660 +So you may you may find that that requires a little bit of googling, but hopefully you'll get an idea + +02:09.690 --> 02:10.770 +or you don't need to Google. + +02:10.770 --> 02:17.520 +You could actually ask, uh, Claude yourself for some advice on how to extend that Gradio app to add + +02:17.520 --> 02:23.970 +in that extra section to reflect the translation that it makes into another language. + +02:23.970 --> 02:28.650 +You'll find that when you do something like that, you provide a bunch of code and ask it to extend + +02:28.650 --> 02:35.520 +it, to do more, to add more capabilities, that these models are excellent at that kind of, uh, + +02:35.520 --> 02:37.410 +that kind of iterating on code. + +02:37.800 --> 02:45.870 +And then finally, since we've been enjoying multi-modality, one more multimodal task for you is audio + +02:45.870 --> 02:52.890 +to text, uh, add an agent that can listen to audio from your audio input source and turn it into text + +02:52.890 --> 02:55.560 +as the input to the AI assistant. + +02:55.560 --> 02:57.150 +And then you've really completed the loop. + +02:57.150 --> 03:01.950 +You'll be able to talk to it, and it will be able to talk back and draw images. + +03:01.950 --> 03:05.040 +When you were looking to ask for ticket prices. + +03:05.040 --> 03:08.640 +And that will then complete the week two challenge. + +03:08.640 --> 03:15.780 +And at that point you will be very familiar with Multi-modality and with using these, uh, stitching + +03:15.810 --> 03:19.860 +together these different agents to carry out a bigger task. + +03:21.960 --> 03:24.790 +And at that point, May I tell you? + +03:24.820 --> 03:29.260 +You are now 25% of the way to mastering LM engineering. + +03:29.290 --> 03:30.460 +25% of the way. + +03:30.490 --> 03:31.720 +A quarter of the way through. + +03:31.750 --> 03:36.520 +You can describe Transformers comfortably, including all of the terminology. + +03:36.520 --> 03:43.900 +You can code against the APIs, and you can build multimodal assistance using UIs, using tools, using + +03:43.900 --> 03:44.560 +agents. + +03:44.590 --> 03:47.710 +This is practically second nature to you at this point. + +03:48.010 --> 03:48.340 +Uh. + +03:48.370 --> 03:57.910 +Next week, change in topic to something that is absolutely wonderful the thriving open source community + +03:57.910 --> 04:05.110 +and and thriving capabilities that you have access to through open source, you're going to get to know + +04:05.140 --> 04:07.720 +hugging face really well, really, really well. + +04:07.720 --> 04:14.050 +You're going to work with pipelines and also with Tokenizers and with the models themselves with transformer + +04:14.050 --> 04:15.040 +models. + +04:15.280 --> 04:21.430 +And ultimately you're going to be running inference of open source models using Google Colab on their + +04:21.430 --> 04:23.020 +boxes with GPUs. + +04:23.020 --> 04:30.010 +And so by the end of the week, you'll be highly proficient with inference of open source models. + +04:30.010 --> 04:31.540 +And I can't wait to get to it. + +04:31.540 --> 04:32.740 +And I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59167007/ja_JP.srt b/week5/community-contributions/subtitles/srts/59167007/ja_JP.srt new file mode 100755 index 0000000..de83f1b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167007/ja_JP.srt @@ -0,0 +1,163 @@ +WEBVTT + +00:00.500 --> 00:02.780 +なんて素晴らしいんだ + +00:02.780 --> 00:09.620 +私たちの新しい航空会社、 アイ・アシスタントとそのできることすべてに、 私と同じように驚いてほしい。 + +00:09.620 --> 00:17.690 +ここでもう一枚、 私が交わした会話のスクリーンショットを撮ったのだが、 ロンドンのあのゴージャスなイメージをもう一度見ることができる。 + +00:17.720 --> 00:18.740 +今はまったく違うアプローチだ。 + +00:18.740 --> 00:22.430 +モンタージュではなく、 もっとシンプルなものだ。 + +00:22.430 --> 00:28.790 +これほどバラエティーに富んだ、 多様なイメージが得られるというのは驚きです。 + +00:28.850 --> 00:36.140 +それに、 多くの機能を含む洗練されたフレームワークを簡単に組み立てることができるのも驚きだ。 + +00:36.230 --> 00:39.890 +私たちはそこでツールを動かし、 価格を調べていたことも覚えている。 + +00:39.890 --> 00:46.400 +私たちが一緒に作ったものは、 ユーザーインターフェイスを備えた非常に洗練された複雑なアプリだった。 + +00:46.400 --> 00:51.470 +しかも、 数時間分の作業ですべてをやり遂げた。 + +00:53.000 --> 00:56.030 +おめでとう。 + +00:56.030 --> 01:02.150 +しかし、 いつものように、 あなたには挑戦がある。 もう一度言わせてもらえば、 学ぶ最善の方法は実践することだ。 + +01:02.150 --> 01:07.160 +それを改善するために、 今からエクササイズをしたり、 取り組んだりすることが非常に重要だ。 + +01:07.190 --> 01:09.930 +おまけにとても楽しい。 + +01:09.930 --> 01:11.880 +そこで、 あなたができることをいくつか紹介しよう。 + +01:11.910 --> 01:16.590 +以前、 予約のための別のツールを追加するという話をした。 + +01:16.620 --> 01:22.680 +理屈の上では、 明らかに実際の予約ではなく、 予約を行い、 予約が行われたことを出力する必要があります。 + +01:22.680 --> 01:28.740 +あるいは、 必要であれば、 ファイルか何かに書き込んで、 予約が起こったことを知らせることもできるだろう。 + +01:28.770 --> 01:29.940 +それを道具として加える。 + +01:29.940 --> 01:33.270 +すでに済んでいればいいが、 そうでなければ今がチャンスだ。 + +01:33.360 --> 01:35.400 +それから別のエージェントを加える。 + +01:35.400 --> 01:41.040 +ええと、 すべての返答を異なる言語に翻訳できるエージェントを雇ってください。 + +01:41.040 --> 01:43.500 +ええと、 以前のプロジェクトで提案したものなんだ。 + +01:43.500 --> 01:48.060 +しかし、 それを右側に表示し、 別のフロンティアモデルを使う。 + +01:48.090 --> 01:58.140 +例えばクロードはどうだろう。 クロードを好きな別の言語に翻訳する方法として使い、 別のパネルを追加するために無線作業をしなければならない。 + +01:58.290 --> 02:00.450 +その翻訳で。 + +02:00.450 --> 02:03.510 +その際、 グラディオを少しいじらなければならない。 + +02:03.510 --> 02:10.770 +そのため、 少しググる必要があるかもしれないが、 うまくいけばアイデアが得られるかもしれないし、 ググる必要もないだろう。 + +02:10.770 --> 02:17.520 +Gradioアプリを拡張して、 他言語への翻訳を反映する追加セクションを追加する方法について、 + +02:17.520 --> 02:23.970 +クロード自身にアドバイスを求めることもできるだろう。 + +02:23.970 --> 02:28.650 +コードの束を提供し、 それを拡張したり、 より多くの機能を追加したりするよう求めるようなことをすると、 + +02:28.650 --> 02:37.410 +これらのモデルはそのような、 コードの反復に優れていることがわかるだろう。 + +02:37.800 --> 02:45.870 +そして最後に、 マルチモーダリティを楽しんできたので、 もう1つマルチモーダルなタスクとして、 音声をテキストに変換する、 + +02:45.870 --> 02:55.560 +つまり、 音声入力ソースから音声を聞き、 それをAIアシスタントへの入力としてテキストに変換できるエージェントを追加します。 + +02:55.560 --> 02:57.150 +そして、 ループを完成させる。 + +02:57.150 --> 03:01.950 +あなたはそれに話しかけることができるようになり、 それは言葉を返してイメージを描くことができるようになる。 + +03:01.950 --> 03:05.040 +チケットの値段を聞こうと思ったとき。 + +03:05.040 --> 03:08.640 +これで2週目のチャレンジは終了だ。 + +03:08.640 --> 03:19.860 +その時点で、 あなたはマルチモダリティに精通し、 より大きな仕事を遂行するために、 さまざまなエージェントをつなぎ合わせて使うことになる。 + +03:21.960 --> 03:24.790 +その時、 私はこう言った。 + +03:24.820 --> 03:29.260 +これでLMエンジニアリングをマスターする道のりは25%になった。 + +03:29.290 --> 03:30.460 +全体の25%だ。 + +03:30.490 --> 03:31.720 +4分の1は終わった。 + +03:31.750 --> 03:36.520 +あなたはトランスフォーマーを、 すべての用語を含めて快適に説明することができます。 + +03:36.520 --> 03:44.560 +あなたはAPIに対してコードを書くことができ、 UIを使い、 ツールを使い、 エージェントを使い、 マルチモーダルアシスタンスを構築することができる。 + +03:44.590 --> 03:47.710 +この時点では、 これはほとんど自然なことだ。 + +03:48.010 --> 03:48.340 +ええと。 + +03:48.370 --> 03:57.910 +来週は、 オープンソースコミュニティの繁栄と、 オープンソースを通じてアクセスできる能力の繁栄という、 + +03:57.910 --> 04:07.720 +絶対に素晴らしいものに話題を変えて、 ハグ顔を本当によく、 本当によく知ることになる。 + +04:07.720 --> 04:15.040 +パイプラインやトーケナイザー、 トランスフォーマーモデルを使ったモデルそのものを扱うことになる。 + +04:15.280 --> 04:23.020 +そして最終的には、 GPUを搭載したGoogle Colabを使ってオープンソースのモデルの推論を実行することになる。 + +04:23.020 --> 04:30.010 +今週が終わるころには、 オープンソースモデルの推論に習熟していることだろう。 + +04:30.010 --> 04:31.540 +そして、 早くそれを手に入れたい。 + +04:31.540 --> 04:32.740 +その時にまた会おう。 diff --git a/week5/community-contributions/subtitles/srts/59167007/ko_KR.srt b/week5/community-contributions/subtitles/srts/59167007/ko_KR.srt new file mode 100755 index 0000000..c25d610 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167007/ko_KR.srt @@ -0,0 +1,205 @@ +WEBVTT + +00:00.500 --> 00:02.780 +정말 멋지지 않아요? + +00:02.780 --> 00:09.620 +우리 새 항공사, 아이 어시스턴트와 그 모든 것에 저만큼 놀라셨길 바라요 + +00:09.620 --> 00:16.610 +제가 나눈 대화를 스크린샷으로 찍어 봤어요 런던의 아름다운 모습을 다시 한번 + +00:16.610 --> 00:17.690 +볼 수 있죠 + +00:17.720 --> 00:18.740 +지금은 아주 다른 접근법이죠 + +00:18.740 --> 00:22.430 +몽타주보다는 좀 더 간단한 거요 + +00:22.430 --> 00:28.790 +이렇게 다양하고 다양한 이미지를 볼 수 있다니 놀라워요 Get it + +00:28.850 --> 00:34.280 +또 하나 놀라운 건 이렇게 정교한 프레임워크를 조립하기가 이렇게 쉽다는 거예요 많은 기능성 요소를 + +00:34.280 --> 00:36.140 +포함해서요 TFI, TFI + +00:36.230 --> 00:39.890 +그리고 가격표를 보기 위해 툴을 실행시켰죠 + +00:39.890 --> 00:46.400 +우리가 함께 만든 모든 건 사용자 인터페이스가 있는 아주 정교하고 복잡한 앱이었어요 + +00:46.400 --> 00:51.470 +그 모든 걸 몇 시간 만에 해냈죠 + +00:53.000 --> 00:56.030 +축하의 의미군요 + +00:56.030 --> 01:01.370 +하지만 늘 그렇듯 도전은 있어요 다시 한번 말하지만 가장 좋은 방법은 직접 해 보는 + +01:01.370 --> 01:02.150 +거예요 + +01:02.150 --> 01:07.160 +이제 여러분이 이 문제를 개선하기 위해 운동하고 노력해야 해요 + +01:07.190 --> 01:09.930 +보너스로 재미도 쏠쏠하죠 + +01:09.930 --> 01:11.880 +여러분이 할 수 있는 걸 알려드리죠 + +01:11.910 --> 01:16.590 +예약을 위한 다른 도구를 추가하는 것에 대해 전에 얘기했었죠 + +01:16.620 --> 01:21.360 +이론상으로는 진짜 예약은 아니지만 예약을 하고 나면 예약이 된 출력물로 + +01:21.360 --> 01:22.680 +프린트되어야 하죠 + +01:22.680 --> 01:27.120 +원한다면 파일에 작성하게 할 수도 있어요 예약이 됐다는 + +01:27.120 --> 01:28.740 +느낌을 주는 거죠 + +01:28.770 --> 01:29.940 +도구로 추가하세요 + +01:29.940 --> 01:33.270 +이미 해보셨길 바라지만 아니라면 지금이 좋은 때예요 + +01:33.360 --> 01:35.400 +그럼 에이전트를 추가해요 + +01:35.400 --> 01:41.040 +모든 응답을 다른 언어로 번역할 요원이 필요해요 + +01:41.040 --> 01:43.500 +전에 했던 프로젝트에서 제안했던 거예요 + +01:43.500 --> 01:48.060 +하지만 그렇게 하고 오른쪽에 다른 프론티어 모델을 사용하세요 + +01:48.090 --> 01:54.240 +예를 들어 클로드를 이용해서 원하는 다른 언어로 통역해 보세요 그리고 라디오 + +01:54.240 --> 01:58.140 +작업을 해서 패널을 하나 더 추가하고요 + +01:58.290 --> 02:00.450 +그 말을 번역하면요 + +02:00.450 --> 02:03.510 +그러면서도 그라디오의 비트를 약간 손봐야 해요 + +02:03.510 --> 02:09.660 +구글링으로 검색해봐야 알 수 있을 거예요 비트가 떠오르면 구글링할 필요 + +02:09.690 --> 02:10.770 +없어요 + +02:10.770 --> 02:17.520 +클로드한테 조언을 구할 수도 있어요 그래디오 앱을 확장해서 번역된 + +02:17.520 --> 02:23.970 +내용을 다른 언어로 반영할 추가 섹션에 추가할 방법요 + +02:23.970 --> 02:28.650 +이런 작업을 할 때는 코드를 잔뜩 제공하고 확장하고, 더 + +02:28.650 --> 02:35.520 +하고, 더 많은 기능을 추가하라고 요청하죠 이런 모델은 그런 종류의 코드에서의 반복에 + +02:35.520 --> 02:37.410 +아주 뛰어나요 + +02:37.800 --> 02:45.870 +끝으로, 다중 모듈을 즐기고 계시니 한 가지 더 다중 모듈 작업은 오디오 투 텍스트입니다 + +02:45.870 --> 02:52.890 +에이전트를 추가해 오디오 입력 소스에서 오디오를 듣고 인공지능 보조의 입력으로 + +02:52.890 --> 02:55.560 +텍스트로 바꾸는 거죠 + +02:55.560 --> 02:57.150 +그럼 루프가 완성되는 거죠 + +02:57.150 --> 03:01.950 +대화도 할 수 있고 대화도 하고 그림도 그릴 수 있죠 + +03:01.950 --> 03:05.040 +티켓 가격을 물어보려고 할 때요 + +03:05.040 --> 03:08.640 +그러면 둘째 주 과제가 끝나요 + +03:08.640 --> 03:15.780 +그때쯤이면 더 큰 작업을 수행하기 위해 서로 다른 요소들을 꿰매는 + +03:15.810 --> 03:19.860 +다중 양식에 익숙해지겠죠 + +03:21.960 --> 03:24.790 +그 시점에서, 말해도 될까요? + +03:24.820 --> 03:29.260 +이제 달 착륙 엔지니어링의 25%를 완성했어요 + +03:29.290 --> 03:30.460 +25%는 성공했죠 + +03:30.490 --> 03:31.720 +4분의 1이 지났어요 + +03:31.750 --> 03:36.520 +트랜스포머를 편하게 묘사할 수 있어요 모든 용어도 포함해서요 + +03:36.520 --> 03:43.900 +API에 대해 코드도 할 수 있고 UI, 도구, 에이전트를 이용해 다중 모듈 보조를 구축할 수도 + +03:43.900 --> 03:44.560 +있어요 + +03:44.590 --> 03:47.710 +이젠 이런 게 당신에겐 제2의 천성이군요 + +03:48.010 --> 03:48.340 +네 + +03:48.370 --> 03:57.910 +다음 주엔 정말 멋진 것으로 주제를 바꿉니다 오픈 소스 커뮤니티와 오픈 소스를 통해 액세스할 + +03:57.910 --> 04:05.110 +수 있는 역량이 번창하는 것으로요 여러분은 당혹스러운 얼굴을 아주 + +04:05.140 --> 04:07.720 +아주 잘 알게 될 거예요 + +04:07.720 --> 04:14.050 +파이프라인도 작업하고 Tokenizers도 작업하고 트랜스포머 모델 그 자체도 + +04:14.050 --> 04:15.040 +작업하죠 + +04:15.280 --> 04:21.430 +궁극적으로 여러분은 오픈 소스 모델을 실행할 겁니다 구글 Colab을 이용해 GPU와 + +04:21.430 --> 04:23.020 +함께 상자에서요 + +04:23.020 --> 04:30.010 +주말쯤엔 오픈 소스 모델 추론에 아주 능숙해지실 거예요 + +04:30.010 --> 04:31.540 +빨리 get it로 가고 싶네요 + +04:31.540 --> 04:32.740 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59167009/en_US.srt b/week5/community-contributions/subtitles/srts/59167009/en_US.srt new file mode 100755 index 0000000..7355482 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167009/en_US.srt @@ -0,0 +1,304 @@ +WEBVTT + +00:00.740 --> 00:01.910 +Welcome back. + +00:01.910 --> 00:04.220 +It's time to make our full agent framework. + +00:04.220 --> 00:05.630 +I'm super excited about this. + +00:05.660 --> 00:10.490 +It's pulling everything together that we've been doing before, and I think you'll be very happy with + +00:10.490 --> 00:11.510 +the outcome. + +00:11.720 --> 00:13.730 +Uh, so just a quick recap. + +00:13.730 --> 00:14.870 +An agent framework. + +00:14.960 --> 00:17.840 +The term agent I as I said, it's an umbrella term. + +00:17.840 --> 00:20.120 +It can refer to a bunch of different techniques. + +00:20.240 --> 00:23.060 +Um, for example, it can be any of these five. + +00:23.090 --> 00:28.250 +It can be about breaking a complex problem into smaller steps with multiple models carrying out different + +00:28.250 --> 00:29.420 +specialized tasks. + +00:29.420 --> 00:33.890 +It can be the ability for an LLM to have tools to give them extra capabilities. + +00:33.890 --> 00:41.000 +It can be, uh, talking about the agent environment, which is the setup or the agent framework that + +00:41.000 --> 00:43.160 +allows agents to collaborate. + +00:43.190 --> 00:50.270 +Um, it can be the idea that one LLM can act as a planner, dividing tasks into smaller ones, that + +00:50.270 --> 00:55.100 +specialists that can themselves be llms or bits of software can carry out. + +00:55.280 --> 01:00.210 +Um, and then there is another point here, which is that people talk about agentic AI when you're thinking + +01:00.210 --> 01:07.740 +about an agent having its own autonomy agency, uh, beyond necessarily just responding to a prompt, + +01:07.740 --> 01:13.050 +such as having memory, being able to sort of, uh, I don't know, do something like, uh, scrape + +01:13.050 --> 01:18.690 +the web for news information and using that to make decisions about buying or selling stocks, something + +01:18.690 --> 01:19.110 +like that. + +01:19.110 --> 01:26.040 +That is a kind of, uh, something that that exists outside the context of just say, a request response + +01:26.040 --> 01:26.670 +chat. + +01:26.670 --> 01:32.100 +So these are all the kinds of ways that that these are the kinds of things people are referring to when + +01:32.100 --> 01:34.890 +they talk about agentic AI and the use of agents. + +01:34.890 --> 01:39.510 +And what we're really doing here is we're talking about, uh, definitely number one and two there and + +01:39.510 --> 01:42.210 +to a certain extent, numbers three and five. + +01:42.300 --> 01:45.090 +But we're not we're not building an LLM that does the planning. + +01:45.090 --> 01:47.280 +That's not something we'll be doing in this session. + +01:47.280 --> 01:55.710 +So, uh, this should be somewhat familiar to you because this is the chat method that's quite close + +01:55.710 --> 01:57.000 +to what we had before. + +01:57.000 --> 01:59.710 +So you'll recognize a few things about this. + +01:59.710 --> 02:07.210 +This section here is the usual chat radio function that we know really well. + +02:07.210 --> 02:16.180 +It takes a message and a history, and it, uh, sort of unpacks that history into the format that OpenAI + +02:16.210 --> 02:20.200 +will expect and then calls the response. + +02:20.440 --> 02:26.080 +This part here will also look familiar to you because it's our use of tools. + +02:26.080 --> 02:32.860 +It's where we find out if the model wants to call a tool, and if so, we handle that tool. + +02:33.010 --> 02:38.950 +Uh, but there's one little extra line just inserted in there, and it's that line there that what we're + +02:38.950 --> 02:44.710 +going to say is if the person does, if the model decides it needs to, to run the tool to find the + +02:44.710 --> 02:54.520 +price of a ticket, then we will also have the, um, artist generate an image to represent that city + +02:54.520 --> 02:56.200 +that's being looked up. + +02:56.200 --> 02:58.160 +So there we have it. + +02:58.220 --> 03:00.050 +Uh, that's, uh that's nice. + +03:00.050 --> 03:04.160 +And also, now, if you remember before I told you there was a reason I passed back city that you're + +03:04.160 --> 03:04.940 +going to find out. + +03:04.970 --> 03:05.690 +Here it is. + +03:05.690 --> 03:09.800 +That's why I needed the city to pass it to the artist. + +03:10.130 --> 03:13.880 +Um, and then, uh, this is all exactly the same. + +03:13.880 --> 03:16.040 +There's one more tiny change. + +03:16.040 --> 03:22.760 +Which is this here, which is that, uh, once I've collected the response from the model, I then call + +03:22.790 --> 03:26.840 +talker to make sure that we speak the response. + +03:26.840 --> 03:29.780 +So that is our chat. + +03:29.960 --> 03:32.270 +Uh, let's run that. + +03:33.920 --> 03:40.550 +Now, this, I should say, since I've always showed off about how easy Gradio is, this code is a little + +03:40.580 --> 03:41.420 +bit more involved. + +03:41.420 --> 03:46.970 +You may notice the reason is because we're now because we want to do a little bit more and show images. + +03:46.970 --> 03:55.190 +We're going outside the default, the sort of off the shelf, uh, chat user interface that Gradio provides + +03:55.190 --> 03:55.550 +for us. + +03:55.550 --> 03:58.120 +And we have to then build the interface ourselves. + +03:58.150 --> 04:04.960 +And as a result, I've had to put together this interface that kind of puts together the various components + +04:04.960 --> 04:07.090 +like the input and the buttons. + +04:07.300 --> 04:10.180 +But what I'll say is this is still actually super straightforward. + +04:10.180 --> 04:11.470 +It still reads like English. + +04:11.470 --> 04:13.030 +It's very clear what's going on. + +04:13.030 --> 04:19.330 +You'll see everything that's happening here, and hopefully this will be quite readable for you. + +04:19.330 --> 04:26.740 +And you can use this to build more sophisticated chats, more sophisticated UIs yourself. + +04:26.740 --> 04:36.850 +So with that background, we now are going to run this to it's running and we'll bring that up. + +04:37.240 --> 04:41.650 +And here we have our chat with our new assistant. + +04:41.680 --> 04:43.300 +Let's give it a try. + +04:47.740 --> 04:48.280 +Hello. + +04:48.280 --> 04:49.630 +How can I assist you today. + +04:51.040 --> 04:52.090 +You like that? + +04:52.240 --> 04:53.650 +It spoke to us. + +04:53.740 --> 04:54.520 +There we go. + +04:54.520 --> 04:56.620 +That's the first use of an agent. + +04:56.620 --> 05:04.420 +We had a specialist model that's able to create, uh, audio, and we integrated that with our chatbot + +05:04.420 --> 05:07.510 +so that it was able to speak back to us. + +05:15.760 --> 05:17.020 +Great choice. + +05:17.080 --> 05:20.410 +Would you like to know the to the ticket price for a return trip to London? + +05:22.930 --> 05:24.220 +There we go. + +05:24.250 --> 05:27.220 +That's entertaining, let's say. + +05:34.240 --> 05:35.710 +We know there's a pause. + +05:43.090 --> 05:44.080 +Here we go. + +05:48.040 --> 05:51.220 +A return ticket to London is priced at 799. + +05:53.120 --> 05:54.920 +And there we have it. + +05:54.950 --> 05:58.340 +A return ticket to London is priced at $7.99. + +05:58.340 --> 06:00.290 +And there is the image. + +06:00.290 --> 06:04.220 +And that image looks spectacular. + +06:04.370 --> 06:06.290 +A London bus in the middle. + +06:06.290 --> 06:07.700 +It's got Big Ben. + +06:07.700 --> 06:10.400 +It's got the bridge. + +06:10.400 --> 06:11.690 +It's got, uh. + +06:11.720 --> 06:14.420 +Yeah, I can see taxi there. + +06:14.450 --> 06:18.950 +It's just a great montage of images. + +06:19.160 --> 06:26.570 +Uh, and so I find this to be very compelling indeed, a wonderful example of what we're able to achieve + +06:26.570 --> 06:28.280 +with just a little bit of code. + +06:28.520 --> 06:37.550 +And so I present to you a multimodal app, complete with audio and some images running as part of what + +06:37.550 --> 06:48.050 +is a in a, in a small way, a multimodal agentic framework for talking to an airline AI assistant. + +06:48.140 --> 06:49.280 +Great work. + +06:49.310 --> 06:52.790 +I'll see you for the challenge of the week and the wrap up. diff --git a/week5/community-contributions/subtitles/srts/59167009/ja_JP.srt b/week5/community-contributions/subtitles/srts/59167009/ja_JP.srt new file mode 100755 index 0000000..f8fd647 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167009/ja_JP.srt @@ -0,0 +1,250 @@ +WEBVTT + +00:00.740 --> 00:01.910 +お帰りなさい。 + +00:01.910 --> 00:04.220 +フルエージェントの枠組みを作る時が来た。 + +00:04.220 --> 00:05.630 +すごく楽しみだよ。 + +00:05.660 --> 00:11.510 +私たちがこれまでやってきたことをすべてまとめている。 + +00:11.720 --> 00:13.730 +ええと、 簡単にまとめると + +00:13.730 --> 00:14.870 +エージェントのフレームワーク。 + +00:14.960 --> 00:17.840 +エージェントという言葉は......さっきも言ったように、 包括的な言葉なんだ。 + +00:17.840 --> 00:20.120 +さまざまなテクニックを指すことがある。 + +00:20.240 --> 00:23.060 +例えば、 この5つのうちのどれでもいい。 + +00:23.090 --> 00:29.420 +それは、 複雑な問題をより小さなステップに分割し、 複数のモデルが異なる専門的なタスクを実行することである。 + +00:29.420 --> 00:33.890 +それは、 LLMが特別な能力を与えるツールを持つ能力である。 + +00:33.890 --> 00:43.160 +エージェント環境、 つまりエージェントが協力できるようにするためのセットアップやエージェントのフレームワークのことです。 + +00:43.190 --> 00:50.270 +つまり、 1人のLLMがプランナーとして機能し、 タスクをより小さなものに分割し、 それ自身がLLMやソフトウェアの一部となりうるスペシャリストがそれを実行する、 + +00:50.270 --> 00:55.100 +というアイデアだ。 + +00:55.280 --> 01:00.210 +それは、 エージェントが、 単にプロンプトに応答するだけでなく、 独自の自律的なエージェンシーを持ち、 + +01:00.210 --> 01:19.110 +例えば、 記憶力を持ち、 ニュース情報をウェブにかき集め、 それを使って株の売り買いを判断するようなことができるようなエージェントを考えているときに、 人々はエージェント型AIについて話すということです。 + +01:19.110 --> 01:26.670 +それは、 リクエスト・レスポンス・チャットという文脈の外側に存在するものだ。 + +01:26.670 --> 01:34.890 +つまり、 エージェント型AIやエージェントの利用について語るとき、 人々が言及するのはこうした種類の方法ばかりなのだ。 + +01:34.890 --> 01:39.510 +そして、 私たちがここでやろうとしていることは、 1番と2番、 + +01:39.510 --> 01:42.210 +そして3番と5番についてだ。 + +01:42.300 --> 01:45.090 +しかし、 我々はプランニングを行うLLMを構築しているわけではない。 + +01:45.090 --> 01:47.280 +それはこのセッションでやることではない。 + +01:47.280 --> 01:57.000 +というわけで、 これは以前使っていたものにかなり近いチャット方法なので、 多少はなじみがあるはずだ。 + +01:57.000 --> 01:59.710 +だから、 これについてはいくつか知っていることがあるだろう。 + +01:59.710 --> 02:07.210 +このセクションは、 私たちがよく知っているいつものチャットラジオ機能だ。 + +02:07.210 --> 02:16.180 +メッセージと履歴を受け取り、 その履歴をOpenAIが期待するフォーマットに展開し、 + +02:16.210 --> 02:20.200 +レスポンスを呼び出す。 + +02:20.440 --> 02:26.080 +この部分は、 私たちの道具の使い方なので、 皆さんも見覚えがあるだろう。 + +02:26.080 --> 02:32.860 +モデルがツールを呼び出したいかどうかを調べ、 呼び出したい場合はそのツールを処理する。 + +02:33.010 --> 02:44.710 +その行は、 もし人が、 もしモデルがチケットの値段を調べるためにツールを実行する必要があると判断した場合、 + +02:44.710 --> 02:56.200 +アーティストに、 調べられる都市を表す画像を生成させるというものです。 + +02:56.200 --> 02:58.160 +そうだ。 + +02:58.220 --> 03:00.050 +ああ、 それは......いいね。 + +03:00.050 --> 03:04.940 +それに、 私がバックシティをパスしたのには理由があるんだ。 + +03:04.970 --> 03:05.690 +これだ。 + +03:05.690 --> 03:09.800 +だから、 アーティストに渡すために市が必要だったんだ。 + +03:10.130 --> 03:13.880 +ええと、 それから、 これは全部まったく同じなんだ。 + +03:13.880 --> 03:16.040 +もうひとつ、 小さな変更がある。 + +03:16.040 --> 03:26.840 +つまり、 モデルからの反応を収集したら、 トーカーを呼び出して反応を確認する。 + +03:26.840 --> 03:29.780 +これが私たちのチャットだ。 + +03:29.960 --> 03:32.270 +ええと、 それを実行しよう。 + +03:33.920 --> 03:41.420 +さて、 Gradioがいかに簡単かをいつも自慢してきたので、 このコードはもう少し複雑だと言っておく。 + +03:41.420 --> 03:46.970 +お気づきの方もいらっしゃるかもしれませんが、 その理由は、 私たちが今、 もう少し、 画像を見せたいからです。 + +03:46.970 --> 03:55.550 +私たちは、 Gradioが私たちのために提供してくれるデフォルトの、 ある種の既製品の、 えー、 チャット・ユーザー・インターフェースの外に出ようとしています。 + +03:55.550 --> 03:58.120 +そして、 自分たちでインターフェースを構築しなければならない。 + +03:58.150 --> 04:07.090 +その結果、 入力やボタンのような様々なコンポーネントをまとめたインターフェースを作らなければならなくなった。 + +04:07.300 --> 04:10.180 +しかし、 私が言いたいのは、 これでも実は超簡単だということだ。 + +04:10.180 --> 04:11.470 +まだ英語のように読める。 + +04:11.470 --> 04:13.030 +何が起こっているかははっきりしている。 + +04:13.030 --> 04:19.330 +ここで起こっていることがすべてわかるだろうし、 願わくば、 これがあなたにとってかなり読みやすいものになることを願っている。 + +04:19.330 --> 04:26.740 +そしてこれを利用して、 より洗練されたチャットやより洗練されたUIを自分で構築することができる。 + +04:26.740 --> 04:36.850 +このような背景を踏まえて、 これからこれを実行に移し、 それを表示させる。 + +04:37.240 --> 04:41.650 +そしてここで、 新しいアシスタントとのおしゃべりが始まった。 + +04:41.680 --> 04:43.300 +試してみよう。 + +04:47.740 --> 04:48.280 +こんにちは。 + +04:48.280 --> 04:49.630 +今日はどのようなご用件でしょうか? + +04:51.040 --> 04:52.090 +気に入ったかい? + +04:52.240 --> 04:53.650 +それは私たちに語りかけてきた。 + +04:53.740 --> 04:54.520 +これでよし。 + +04:54.520 --> 04:56.620 +それがエージェントの最初の使い方だ。 + +04:56.620 --> 05:07.510 +私たちは、 音声を作成できるスペシャリスト・モデルを持っていて、 それをチャットボットに統合して、 チャットボットが私たちに話しかけられるようにしたんだ。 + +05:15.760 --> 05:17.020 +素晴らしい選択だ。 + +05:17.080 --> 05:20.410 +ロンドンへの往復航空券の料金をお知りになりたいですか? + +05:22.930 --> 05:24.220 +これでよし。 + +05:24.250 --> 05:27.220 +それはエンターテインメントだ。 + +05:34.240 --> 05:35.710 +間があるのは分かっている。 + +05:43.090 --> 05:44.080 +さあ、 始めよう。 + +05:48.040 --> 05:51.220 +ロンドンまでの往復航空券は799ドル。 + +05:53.120 --> 05:54.920 +そうだ。 + +05:54.950 --> 05:58.340 +ロンドンまでの往復チケットは7ドル。 99. + +05:58.340 --> 06:00.290 +そして、 そこにはイメージがある。 + +06:00.290 --> 06:04.220 +そして、 その画像は壮大に見える。 + +06:04.370 --> 06:06.290 +真ん中にロンドンバス。 + +06:06.290 --> 06:07.700 +ビッグベンがある。 + +06:07.700 --> 06:10.400 +ブリッジがある。 + +06:10.400 --> 06:11.690 +これは... + +06:11.720 --> 06:14.420 +ああ、 タクシーが見えるね。 + +06:14.450 --> 06:18.950 +素晴らしいモンタージュ映像だ。 + +06:19.160 --> 06:28.280 +ほんの少しのコードで実現できることの素晴らしい例だ。 + +06:28.520 --> 06:37.550 +そこで、 航空会社のAIアシスタントと会話するためのマルチモーダル・エージェント・フレームワークの一部として、 + +06:37.550 --> 06:48.050 +音声と画像を含むマルチモーダル・アプリを紹介しよう。 + +06:48.140 --> 06:49.280 +素晴らしい仕事だ。 + +06:49.310 --> 06:52.790 +また今週のチャレンジと総括でお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59167009/ko_KR.srt b/week5/community-contributions/subtitles/srts/59167009/ko_KR.srt new file mode 100755 index 0000000..9d75b66 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167009/ko_KR.srt @@ -0,0 +1,286 @@ +WEBVTT + +00:00.740 --> 00:01.910 +잘 돌아왔어요 + +00:01.910 --> 00:04.220 +에이전트 프레임워크를 만들 때가 됐어요 + +00:04.220 --> 00:05.630 +정말 기대돼요 + +00:05.660 --> 00:10.490 +지금까지 해 왔던 것처럼 모든 걸 잘 활용하고 있어요 결과에 아주 만족하실 + +00:10.490 --> 00:11.510 +거예요 + +00:11.720 --> 00:13.730 +간단히 정리해 보죠 + +00:13.730 --> 00:14.870 +프레임워크 요원요 + +00:14.960 --> 00:17.840 +에이전트 I는 포괄적인 용어예요 + +00:17.840 --> 00:20.120 +다양한 기술을 참고할 수 있어요 + +00:20.240 --> 00:23.060 +예를 들어 이 다섯 가지 중 아무거나요 + +00:23.090 --> 00:28.250 +복잡한 문제를 여러 모델이 각기 다른 전문화된 작업을 수행하며 작은 단계로 + +00:28.250 --> 00:29.420 +단계화하는 거죠 + +00:29.420 --> 00:33.890 +LLM이 추가적인 기능을 부여할 도구를 갖는 기능일 수 있죠 + +00:33.890 --> 00:41.000 +에이전트 환경에 대해 얘기할 수도 있습니다 에이전트가 협업을 할 수 있는 에이전트 + +00:41.000 --> 00:43.160 +프레임워크죠 + +00:43.190 --> 00:50.270 +하나의 LLM이 플래너 역할을 하는 겁니다 작은 작업들로 나누고 전문가들이 직접 + +00:50.270 --> 00:55.100 +llm이 되거나 소프트웨어 조각을 수행하는 거죠 + +00:55.280 --> 01:00.210 +또 다른 요점은 에이전트 인공지능을 생각할 + +01:00.210 --> 01:07.740 +때 독자적인 기관을 떠올리는데 즉각적인 응답을 넘어서 메모리를 + +01:07.740 --> 01:13.050 +갖고 뉴스 정보를 검색해 웹을 긁고 주식을 매매할지 + +01:13.050 --> 01:19.110 +결정하는 데 사용한다고 보면 돼요 + +01:19.110 --> 01:26.670 +그건 말하자면 요청 응답 채팅의 컨텍스트 밖에 존재하는 무언가예요 + +01:26.670 --> 01:32.100 +이런 식으로∙∙∙ 에이전트 AI와 에이전트의 사용에 대해 얘기할 + +01:32.100 --> 01:34.890 +때 사람들이 언급하는 것들이죠 + +01:34.890 --> 01:39.510 +우리가 여기서 얘기하고 있는 건 1번과 2번이고 3번과 + +01:39.510 --> 01:42.210 +5번도 어느 정도 있어요 + +01:42.300 --> 01:45.090 +하지만 계획을 실행하는 LLM을 만드는 게 아니잖아요 + +01:45.090 --> 01:47.280 +이번 시간엔 그런 걸 안 할 거예요 + +01:47.280 --> 01:57.000 +이건 익숙하실 거예요 채팅 메서드거든요 전에 있던 것과 꽤 비슷하죠 + +01:57.000 --> 01:59.710 +몇 가지 눈에 띄는 게 있어요 + +01:59.710 --> 02:07.210 +이 섹션은 우리가 잘 아는 라디오 채팅 함수예요 + +02:07.210 --> 02:16.180 +메시지와 히스토리를 취하고 그 히스토리를 오픈AI가 기대하는 포맷으로 + +02:16.210 --> 02:20.200 +풀어낸 다음 응답을 호출해요 + +02:20.440 --> 02:26.080 +이 부분도 익숙하실 겁니다 도구를 사용하는 곳이니까요 + +02:26.080 --> 02:32.860 +모델이 도구를 호출하길 원하는지 알고 있다면 그 도구를 다루죠 + +02:33.010 --> 02:38.950 +여기 한 줄 더 있는데요 여기서 말씀 드리고 싶은 + +02:38.950 --> 02:44.710 +건 만약 고객이 티켓 가격을 찾기 위해 툴을 실행해야 + +02:44.710 --> 02:56.200 +한다면 아티스트가 그 도시를 나타내는 이미지를 생성하도록 할 거예요 + +02:56.200 --> 02:58.160 +자, 됐어요 + +02:58.220 --> 03:00.050 +그거 좋네요 + +03:00.050 --> 03:04.160 +그리고 제가 전에 얘기했던 걸 기억하신다면 제가 돌아온 이유를 곧 알게 + +03:04.160 --> 03:04.940 +되실 거예요 + +03:04.970 --> 03:05.690 +여기 있네요 + +03:05.690 --> 03:09.800 +그래서 시가 예술가에게 넘겨야 했어요 + +03:10.130 --> 03:13.880 +그리고 이건 전부 똑같아요 + +03:13.880 --> 03:16.040 +한 가지 더 있어요 + +03:16.040 --> 03:22.760 +여기 있는 이 모델에서 반응을 수집한 다음 토크 커뮤니티에 + +03:22.790 --> 03:26.840 +전화해 반응을 확인하죠 + +03:26.840 --> 03:29.780 +이게 우리 대화예요 + +03:29.960 --> 03:32.270 +그걸 실행해 보죠 + +03:33.920 --> 03:41.420 +비트가 얼마나 쉬운지 늘 자랑했듯이 이 코드는 좀 더 복잡해요 + +03:41.420 --> 03:46.970 +그 이유는 비트를 더 많이 사용하고 이미지를 보여주기 위해서예요 + +03:46.970 --> 03:55.550 +기본 설정 밖으로 나가보죠 규격화된 채팅 사용자 인터페이스 같은 건데 그라디오가 제공해요 + +03:55.550 --> 03:58.120 +인터페이스는 우리가 직접 만들어야 해요 + +03:58.150 --> 04:04.960 +그 결과 이 인터페이스를 구성해야 했어요 input이나 버튼 같은 다양한 구성 + +04:04.960 --> 04:07.090 +요소를 구성하는 거죠 + +04:07.300 --> 04:10.180 +하지만 이건 여전히 아주 간단해요 + +04:10.180 --> 04:11.470 +영어처럼 들려요 + +04:11.470 --> 04:13.030 +무슨 일인지 뻔하죠 + +04:13.030 --> 04:19.330 +여기서 일어나는 모든 일을 볼 수 있어요 읽을 수 있길 바라요 + +04:19.330 --> 04:26.740 +이걸 이용해 더 복잡한 채팅이나 더 복잡한 UI를 만들 수 있어요 + +04:26.740 --> 04:36.850 +이 배경으로 이걸 실행∙∙∙ 실행되고 있어요 불러오죠 + +04:37.240 --> 04:41.650 +새 비서와 대화하는 모습이네요 + +04:41.680 --> 04:43.300 +한번 해 보죠 + +04:47.740 --> 04:48.280 +안녕하세요 + +04:48.280 --> 04:49.630 +무엇을 도와드릴까요? + +04:51.040 --> 04:52.090 +맘에 들어요? + +04:52.240 --> 04:53.650 +우리에게 말을 걸었어요 + +04:53.740 --> 04:54.520 +됐어요 + +04:54.520 --> 04:56.620 +에이전트 사용은 처음이죠 + +04:56.620 --> 05:04.420 +오디오를 만드는 전문 모델이 있었는데 그걸 챗봇과 통합해서 우리에게 + +05:04.420 --> 05:07.510 +말을 걸 수 있게 했어요 + +05:15.760 --> 05:17.020 +탁월한 선택이에요 + +05:17.080 --> 05:20.410 +런던 왕복 비행기 표 가격을 알고 싶으세요? + +05:22.930 --> 05:24.220 +됐어요 + +05:24.250 --> 05:27.220 +재미있다고 해두죠 + +05:34.240 --> 05:35.710 +잠시 멈추죠 + +05:43.090 --> 05:44.080 +시작할게요 + +05:48.040 --> 05:51.220 +런던 왕복 항공권은 799달러예요 + +05:53.120 --> 05:54.920 +다 됐어요 + +05:54.950 --> 05:58.340 +런던 왕복 항공권은 7달러예요 99살요 + +05:58.340 --> 06:00.290 +이미지가 나오네요 + +06:00.290 --> 06:04.220 +정말 장관이에요 + +06:04.370 --> 06:06.290 +런던 버스예요 + +06:06.290 --> 06:07.700 +빅벤도 있어요 + +06:07.700 --> 06:10.400 +다리도 있어요 + +06:10.400 --> 06:11.690 +네 + +06:11.720 --> 06:14.420 +저기 택시가 보여요 + +06:14.450 --> 06:18.950 +여러 이미지의 멋진 몽타주예요 + +06:19.160 --> 06:26.570 +그래서 저는 이 비트가 아주 흥미롭다고 생각합니다 소량의 코드만으로 무엇을 이룰 수 있는지 보여주는 + +06:26.570 --> 06:28.280 +훌륭한 예죠 + +06:28.520 --> 06:37.550 +다중 모듈 앱을 소개합니다 비행기의 인공지능 보조와 통신할 + +06:37.550 --> 06:48.050 +때 음성과 이미지가 포함된 다중 모듈 에이전트 프레임워크죠 + +06:48.140 --> 06:49.280 +수고했어요 + +06:49.310 --> 06:52.790 +이번 주의 도전과 마무리에서 다시 만나요 diff --git a/week5/community-contributions/subtitles/srts/59167015/en_US.srt b/week5/community-contributions/subtitles/srts/59167015/en_US.srt new file mode 100755 index 0000000..f00c6ce --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167015/en_US.srt @@ -0,0 +1,424 @@ +WEBVTT + +00:00.800 --> 00:05.960 +Welcome back to Jupyter Lab and welcome to Day Five's Lab. + +00:05.960 --> 00:12.020 +And this is going to be lots of creativity and hopefully lots of entertainment. + +00:12.020 --> 00:16.910 +So to start with I have copied the day four Jupyter Lab. + +00:17.030 --> 00:19.370 +And I've duplicated that. + +00:19.370 --> 00:20.420 +And then I've extended it. + +00:20.420 --> 00:23.570 +So everything above where I am now is just a repeat of day four. + +00:23.600 --> 00:31.670 +Creates the AI assistant for our airline called flight flight AI, something like that, and arms it + +00:31.670 --> 00:33.590 +with a tool to be able to get ticket prices. + +00:33.590 --> 00:39.140 +All of that is already there and I've executed it ready for our showtime today. + +00:39.140 --> 00:40.820 +We're going to go multi-modal. + +00:40.820 --> 00:46.250 +We're going to use Dall-E three, which is the image generation model that sits behind GPT four. + +00:46.760 --> 00:48.800 +We're going to use it to make some images. + +00:48.800 --> 00:52.790 +And let's start by putting it into a function called artist. + +00:52.790 --> 00:57.770 +Before that, there are two, uh, service announcements I should make. + +00:57.950 --> 01:03.830 +Uh, first of all, I should point out that the price associated with generating an image is not tiny. + +01:03.880 --> 01:10.150 +Everything that we've done so far, I hope, has had a de minimis price in the fractions of a cent. + +01:10.300 --> 01:16.090 +Unless you've been generating tons of lengthy brochures, you have not racked up a significant bill + +01:16.090 --> 01:17.830 +from running this course so far. + +01:17.950 --> 01:21.880 +But now we are doing something that's slightly more on the radar. + +01:21.910 --> 01:25.420 +Each image that we generate will cost $0.04. + +01:25.450 --> 01:30.700 +Now, I put it to you that when you see these images, you will agree that they are well worth $0.04 + +01:30.730 --> 01:31.360 +each. + +01:31.570 --> 01:34.720 +And they are super creative and high value. + +01:34.720 --> 01:35.590 +And I love them. + +01:35.590 --> 01:37.630 +So I think it is money well spent. + +01:37.630 --> 01:41.650 +But I do want to inform you of that so that you can decide whether you want to spend your $0.04 each + +01:41.650 --> 01:42.310 +time. + +01:42.700 --> 01:49.120 +Uh, the other thing to mention is that there is a little bit of, uh, um, uh, there's there's a + +01:49.120 --> 01:56.770 +there's a point about whether or not one should use the term LM when referring to image generation and + +01:56.770 --> 01:58.270 +audio generation and the like. + +01:58.300 --> 01:59.290 +Text to audio. + +01:59.320 --> 02:03.400 +Because of course, these are not large language models sitting behind the scenes. + +02:03.430 --> 02:10.090 +Now, what tends to happen these days is that people use LM as a bit of a general term for the models + +02:10.090 --> 02:12.280 +that sit behind gen AI systems. + +02:12.280 --> 02:19.450 +So actually, in practice, I think this very much is part of the skill set and toolkit of an LM engineer. + +02:19.450 --> 02:23.800 +But I should mention that, of course, strictly speaking, these aren't language models. + +02:23.800 --> 02:30.730 +These are image models and audio models that we'll be playing with right now as we add them to our agent + +02:30.730 --> 02:31.630 +framework. + +02:31.750 --> 02:34.720 +Anyways, with that preamble, let's get on with it. + +02:34.720 --> 02:39.220 +So we start by importing some useful image libraries. + +02:39.220 --> 02:40.420 +Well, the first one isn't. + +02:40.570 --> 02:44.260 +First two aren't image libraries, but some, some, uh, utilities. + +02:44.260 --> 02:51.520 +And then the Python image library is going to be very useful for us, a very handy common library. + +02:51.760 --> 03:00.820 +Uh, so the next thing we do is we're going to write a function called artist and artist calls OpenAI + +03:00.850 --> 03:03.520 +dot images dot generate. + +03:03.520 --> 03:09.460 +So it's a very consistent style that you're used to OpenAI images generate. + +03:09.460 --> 03:11.520 +We pass in the name of a model. + +03:11.520 --> 03:13.860 +In this case, the model is Dall-E three. + +03:13.890 --> 03:16.650 +You could also try Dall-E two, its predecessor. + +03:16.680 --> 03:18.480 +The images are less awesome. + +03:18.480 --> 03:19.440 +It's a bit cheaper. + +03:19.440 --> 03:24.450 +I seem to remember it's about $0.02 rather than $0.04, so it's not massively cheaper and in my opinion + +03:24.450 --> 03:26.160 +well worth the extra $0.02. + +03:26.190 --> 03:27.750 +Stick with Dall-E three. + +03:27.780 --> 03:32.400 +We give it a prompt and this isn't now a clever list of dictionaries. + +03:32.400 --> 03:33.360 +It's just text. + +03:33.360 --> 03:39.240 +And in this case, the prompt I'm suggesting here is, we say, an image representing a vacation in + +03:39.240 --> 03:46.980 +city, showing tourist spots and everything unique about city in a vibrant pop art style. + +03:46.980 --> 03:50.250 +We give it a size that is the smallest size. + +03:50.250 --> 03:53.070 +Dall-E three will do, Dall-E two will go much smaller. + +03:53.250 --> 03:58.680 +Um and Dall-E three also does two larger sizes in a portrait and landscape format. + +03:58.740 --> 04:00.870 +Just google it if you'd like to know those dimensions. + +04:00.870 --> 04:02.400 +If you'd like to try those images. + +04:02.430 --> 04:04.260 +We just want one image back. + +04:04.260 --> 04:06.210 +We say we want this format. + +04:06.450 --> 04:12.840 +Back comes something in the, uh, this um, uh, base64 encoded format. + +04:12.840 --> 04:20.040 +We then decode that into bytes, and then we then create a bytes IO object on those bytes, which we + +04:20.040 --> 04:26.850 +can then pass in to the image dot open function, and that will return an image for us. + +04:26.850 --> 04:28.320 +Let's execute that. + +04:28.320 --> 04:30.300 +And now let's give it a try. + +04:30.330 --> 04:35.040 +So I'm going to say image equals artist. + +04:36.870 --> 04:38.940 +And what shall we say New York City. + +04:42.660 --> 04:50.520 +And then display image is the Jupiter way of then getting that to show. + +04:50.550 --> 04:53.400 +Let's run that or you're seeing one I ran already there. + +04:53.400 --> 04:56.790 +Sorry it's not that quick, but look how amazing that is. + +04:56.940 --> 04:58.380 +Uh, you're already getting. + +04:58.380 --> 05:00.750 +I'm spoiling you by showing you one right away. + +05:00.750 --> 05:01.950 +This is what it looks like. + +05:01.950 --> 05:07.710 +It's generating a second one above you get to see the Statue of Liberty, a few different Empire State + +05:07.710 --> 05:16.200 +buildings, some planes in the sky, and then a sort of image to Times Square with lots of signs and + +05:16.200 --> 05:18.510 +with New York, spelled out their taxi. + +05:18.540 --> 05:19.140 +Look at that. + +05:19.170 --> 05:20.610 +A yellow New York taxi. + +05:20.640 --> 05:21.690 +And Coca-Cola. + +05:21.690 --> 05:23.040 +And a hot dog. + +05:23.070 --> 05:25.050 +A very New York iconic thing. + +05:25.080 --> 05:26.550 +Fantastic. + +05:26.580 --> 05:29.190 +Meanwhile, it's built another image for us here. + +05:29.190 --> 05:29.670 +And. + +05:29.670 --> 05:31.020 +Wow, look at this one. + +05:31.020 --> 05:32.340 +It's different. + +05:32.340 --> 05:33.120 +It's great. + +05:33.120 --> 05:35.430 +It's got a big jet over here. + +05:35.430 --> 05:40.620 +It's got the Empire State Building, of course, multiple Empire State buildings, Statue of Liberty's. + +05:40.620 --> 05:47.280 +And it's got again the sort of thriving shops and taxi in the foreground like that, an iconic New York + +05:47.310 --> 05:48.870 +taxi and a hot dog again. + +05:49.080 --> 05:54.330 +Uh, so the thing to mention is that these images, they're so creative and they're so different, we've + +05:54.330 --> 05:59.790 +got two now that we can see the one I did a moment ago and this one here, uh, and you can see how + +05:59.790 --> 06:01.470 +great they look. + +06:02.430 --> 06:03.060 +All right. + +06:03.060 --> 06:05.220 +Well, I hope that you were entertained by that. + +06:05.220 --> 06:10.920 +And by all means, can I suggest spend some $0.04, generate a few images for yourself. + +06:10.920 --> 06:12.180 +They're great. + +06:12.690 --> 06:14.940 +All right, let's add one more function. + +06:14.940 --> 06:20.450 +We're going to make a function that uses OpenAI's speech to generate some audio. + +06:20.450 --> 06:25.670 +So we're going to use a couple of utility stuff here with a library called Pi Dub. + +06:25.670 --> 06:26.630 +That's very useful. + +06:26.840 --> 06:29.300 +We're going to write a function called talker. + +06:29.300 --> 06:33.860 +And talker is going to call OpenAI dot audio dot speech dot create. + +06:33.860 --> 06:39.470 +So if we look back up the image generation was OpenAI images generate. + +06:39.470 --> 06:46.760 +And for audio it's a case of uh OpenAI audio dot speech dot create. + +06:46.760 --> 06:48.500 +We pass in a model. + +06:48.740 --> 06:56.300 +Um, and this is the model we're using, TTS one TTS stands for text to speech and is, uh, the, the + +06:56.330 --> 07:00.080 +this kind of model that we're going for, we supply a voice. + +07:00.080 --> 07:01.880 +In this case we're going to try the voice. + +07:01.910 --> 07:02.750 +Onyx. + +07:02.750 --> 07:04.880 +There's something like eight different voices to try again. + +07:04.910 --> 07:06.800 +You can you can Google to see what they are. + +07:06.800 --> 07:10.310 +And we pass in the thing that this function was called. + +07:10.310 --> 07:16.520 +With what comes back, we again create a bytes IO object to represent those bytes. + +07:16.520 --> 07:25.330 +And then we use this to this audio segment, uh creating it from a file and the audio stream and get + +07:25.330 --> 07:27.250 +it to play that audio. + +07:27.250 --> 07:31.180 +So let's create that function and then let's say talker. + +07:33.070 --> 07:35.470 +Well hi there. + +07:40.150 --> 07:41.110 +Well hi there. + +07:42.430 --> 07:43.240 +There we go. + +07:43.270 --> 07:44.500 +As simple as that. + +07:44.830 --> 07:47.410 +Uh, let's see how another voice sounds. + +07:47.410 --> 07:50.320 +Let's see how alloy sounds. + +07:50.470 --> 07:52.270 +Let's put alloy in there. + +07:55.000 --> 07:55.930 +Well hi there. + +07:56.860 --> 07:58.630 +And that was alloy. + +07:58.660 --> 08:01.180 +I think we'll stick with onyx. + +08:01.180 --> 08:03.070 +But you can try either. + +08:03.070 --> 08:09.580 +And you can also put in some more there that you can experiment with and pick your favorite. + +08:09.910 --> 08:10.810 +All right. + +08:10.810 --> 08:13.510 +Well that's what we'll go with. + +08:14.710 --> 08:20.920 +Uh and now let's talk about the agent framework. + +08:20.950 --> 08:23.650 +I think we will break for the next video. + +08:23.650 --> 08:26.200 +And that's where we'll take on our full agent framework. + +08:26.230 --> 08:27.280 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59167015/ja_JP.srt b/week5/community-contributions/subtitles/srts/59167015/ja_JP.srt new file mode 100755 index 0000000..39e7dc7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167015/ja_JP.srt @@ -0,0 +1,391 @@ +WEBVTT + +00:00.800 --> 00:05.960 +Jupyter Labへようこそ、 そして5日目のラボへようこそ。 + +00:05.960 --> 00:12.020 +そして、 これはたくさんの創造性と、 できればたくさんのエンターテインメントになるだろう。 + +00:12.020 --> 00:16.910 +そこでまず、 4日目のJupyter Labをコピーしてみた。 + +00:17.030 --> 00:19.370 +そして、 私はそれを再現した。 + +00:19.370 --> 00:20.420 +そして、 それを延長したんだ。 + +00:20.420 --> 00:23.570 +だから、 今いるところより上は、 すべて4日目の繰り返しなんだ。 + +00:23.600 --> 00:33.590 +フライトAIと呼ばれる航空会社のAIアシスタントを作成し、 航空券の価格を知ることができるツールを持たせる。 + +00:33.590 --> 00:39.140 +そのすべてがすでにあり、 今日のショータイムのために準備してきた。 + +00:39.140 --> 00:40.820 +私たちはマルチモーダルを目指す。 + +00:40.820 --> 00:46.250 +私たちは、 GPT 4の後ろに位置するイメージ生成モデルであるDall-E 3を使うつもりです。 + +00:46.760 --> 00:48.800 +これを使って画像を作るんだ。 + +00:48.800 --> 00:52.790 +そして、 それをartistという関数に入れることから始めよう。 + +00:52.790 --> 00:57.770 +その前に、 2つ、 ええと、 サービスアナウンスがあるんだ。 + +00:57.950 --> 01:03.830 +ええと、 まず最初に言っておかなければならないのは、 画像生成にかかる料金は決して小さなものではないということだ。 + +01:03.880 --> 01:10.150 +私たちがこれまでやってきたことはすべて、 1セントの何分の1という最小限の価格だったと思う。 + +01:10.300 --> 01:17.830 +長大なパンフレットを大量に作成しているのでなければ、 このコースの運営で多額の請求が来ることはないだろう。 + +01:17.950 --> 01:21.880 +でも、 今はもう少しレーダーに近いことをやっている。 + +01:21.910 --> 01:25.420 +弊社が生成する各画像は0ドルです。 04. + +01:25.450 --> 01:31.360 +さて、 これらの画像をご覧になれば、 0ドルの価値は十分にあるとご納得いただけるだろう。 各04ドル。 + +01:31.570 --> 01:34.720 +しかも、 超クリエイティブで価値が高い。 + +01:34.720 --> 01:35.590 +私は彼らを愛している。 + +01:35.590 --> 01:37.630 +だから、 私は十分なお金を使ったと思う。 + +01:37.630 --> 01:42.310 +でも、 0ドルを使うかどうかを決めるために、 そのことはお知らせしておきたい。 毎回04。 + +01:42.700 --> 01:49.120 +もうひとつ言っておくと、 画像生成や音声生成などにLMという言葉を使うべきかどうかという点で、 少し、 あー、 あー、 あー、 + +01:49.120 --> 01:56.770 +あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 あー、 + +01:56.770 --> 01:58.270 +あー、 あー、 あー、 あー。 + +01:58.300 --> 01:59.290 +テキストから音声へ。 + +01:59.320 --> 02:03.400 +というのも、 もちろん、 これらは舞台裏にある大規模な言語モデルではないからだ。 + +02:03.430 --> 02:12.280 +さて、 最近起こりがちなのは、 人々はLMを、 一般的なAIシステムの背後にあるモデルの総称として使っているということだ。 + +02:12.280 --> 02:19.450 +だから実際には、 これはLMエンジニアのスキルセットとツールキットの一部だと思う。 + +02:19.450 --> 02:23.800 +しかし、 もちろん、 厳密に言えば、 これらは言語モデルではないということは言っておかなければならない。 + +02:23.800 --> 02:31.630 +これらは画像モデルとオーディオモデルで、 これからエージェントフレームワークに追加して遊ぶことになる。 + +02:31.750 --> 02:34.720 +ともあれ、 前置きはこれくらいにして、 さっそく本題に入ろう。 + +02:34.720 --> 02:39.220 +そこで、 便利な画像ライブラリをいくつかインポートすることから始めよう。 + +02:39.220 --> 02:40.420 +まあ、 最初のは違うけどね。 + +02:40.570 --> 02:44.260 +最初の2つはイメージライブラリではなく、 いくつかのユーティリティだ。 + +02:44.260 --> 02:51.520 +そして、 Pythonのイメージ・ライブラリーは、 私たちにとって非常に便利な共通ライブラリーです。 + +02:51.760 --> 03:03.520 +次にやることは、 artistという関数を書いて、 artistがOpenAI dot images dot generateを呼び出すことだ。 + +03:03.520 --> 03:09.460 +つまり、 OpenAIの画像が生成するのに慣れた、 非常に一貫したスタイルなのだ。 + +03:09.460 --> 03:11.520 +モデル名でパスを出す。 + +03:11.520 --> 03:13.860 +この場合、 モデルはDall-E 3である。 + +03:13.890 --> 03:16.650 +その前身である『Dall-E two』を試してみるのもいいだろう。 + +03:16.680 --> 03:18.480 +画像はそれほど素晴らしいものではない。 + +03:18.480 --> 03:19.440 +もう少し安い。 + +03:19.440 --> 03:26.160 +0ドルくらいだったと記憶している。 0ドルではなく02ドル。 04なので、 大幅に安いわけではなく、 0ドル余分に払う価値は十分にあると私は思う。 + +03:26.160 --> 03:26.160 +02. + +03:26.190 --> 03:27.750 +Dall-E 3にこだわる。 + +03:27.780 --> 03:32.400 +私たちはプロンプトを与え、 これは今、 辞書の巧妙なリストではない。 + +03:32.400 --> 03:33.360 +ただのテキストだ。 + +03:33.360 --> 03:39.240 +この場合、 私が提案するプロンプトは、 例えば、 都市での休暇を表現するイメージで、 観光スポットや都市に関するあらゆるユニークなものを、 + +03:39.240 --> 03:46.980 +活気に満ちたポップアート・スタイルで表現するものだ。 + +03:46.980 --> 03:50.250 +最小のサイズを与える。 + +03:50.250 --> 03:53.070 +Dall-E 3なら大丈夫、 Dall-E 2ならもっと小さくなる。 + +03:53.250 --> 03:58.680 +UmとDall-E threeは、 縦型と横型の2つの大きなサイズも用意している。 + +03:58.740 --> 04:00.870 +その寸法を知りたければググればいい。 + +04:00.870 --> 04:02.400 +もしこれらの画像を試してみたいなら。 + +04:02.430 --> 04:04.260 +私たちはただ1枚の画像を返してほしいだけなのだ。 + +04:04.260 --> 04:06.210 +私たちはこの形式を望んでいると言っている。 + +04:06.450 --> 04:12.840 +Base64でエンコードされたフォーマットで戻ってくる。 + +04:12.840 --> 04:20.040 +そして、 それをバイトにデコードし、 そのバイトでバイトIOオブジェクトを作り、 + +04:20.040 --> 04:26.850 +それをimage dot open関数に渡すと、 画像を返してくれる。 + +04:26.850 --> 04:28.320 +それを実行しよう。 + +04:28.320 --> 04:30.300 +そして今、 それを試してみよう。 + +04:30.330 --> 04:35.040 +だから、 私はイメージ=アーティストと言うつもりだ。 + +04:36.870 --> 04:38.940 +そしてニューヨーク・シティ。 + +04:42.660 --> 04:50.520 +そして、 画像を表示させるのがジュピター流だ。 + +04:50.550 --> 04:53.400 +それを実行しましょう、 あるいは私がすでに実行したものがそこにあります。 + +04:53.400 --> 04:56.790 +そんなに早くないのは残念だけど、 見てよ、 この素晴らしさを。 + +04:56.940 --> 04:58.380 +ええと、 もうわかっていますよね。 + +04:58.380 --> 05:00.750 +さっそく1つお見せしましょう。 + +05:00.750 --> 05:01.950 +こんな感じだ。 + +05:01.950 --> 05:07.710 +自由の女神、 数種類のエンパイアステートビル、 空に浮かぶ飛行機、 + +05:07.710 --> 05:18.510 +そしてたくさんの看板とニューヨークのタクシーが綴られたタイムズスクエアのイメージのようなものを見ることができる。 + +05:18.540 --> 05:19.140 +あれを見ろ。 + +05:19.170 --> 05:20.610 +黄色いニューヨークのタクシー。 + +05:20.640 --> 05:21.690 +そしてコカ・コーラ。 + +05:21.690 --> 05:23.040 +それとホットドッグ。 + +05:23.070 --> 05:25.050 +まさにニューヨークを象徴するものだ。 + +05:25.080 --> 05:26.550 +ファンタスティックだ。 + +05:26.580 --> 05:29.190 +その一方で、 ここでまた新たなイメージを構築してくれた。 + +05:29.190 --> 05:29.670 +そして + +05:29.670 --> 05:31.020 +うわあ、 これを見てよ。 + +05:31.020 --> 05:32.340 +違うんだ。 + +05:32.340 --> 05:33.120 +素晴らしいよ。 + +05:33.120 --> 05:35.430 +こっちには大きなジェット機がある。 + +05:35.430 --> 05:40.620 +エンパイア・ステート・ビルはもちろん、 複数のエンパイア・ステート・ビルや自由の女神像がある。 + +05:40.620 --> 05:48.870 +ニューヨークを象徴するタクシーとホットドッグ。 + +05:49.080 --> 05:54.330 +この画像はとてもクリエイティブで、 それぞれ違っていて、 + +05:54.330 --> 06:01.470 +さっきの画像とこの画像の2つをご覧ください。 + +06:02.430 --> 06:03.060 +分かった。 + +06:03.060 --> 06:05.220 +まあ、 楽しんでもらえたなら幸いだ。 + +06:05.220 --> 06:10.920 +そして、 ぜひとも0ドルを使うことを提案してもいいかな。 04、 自分用にいくつかの画像を作成する。 + +06:10.920 --> 06:12.180 +彼らは素晴らしい。 + +06:12.690 --> 06:14.940 +よし、 もうひとつ機能を追加しよう。 + +06:14.940 --> 06:20.450 +OpenAIの音声を使って音声を生成する関数を作ります。 + +06:20.450 --> 06:25.670 +ここでは、 Pi Dubと呼ばれるライブラリを使って、 いくつかのユーティリティを使うことにしよう。 + +06:25.670 --> 06:26.630 +とても役に立つよ。 + +06:26.840 --> 06:29.300 +これからtalkerという関数を書きます。 + +06:29.300 --> 06:33.860 +トーカーはOpenAI dot audio dot speech dot createを呼び出す。 + +06:33.860 --> 06:39.470 +つまり、 画像生成はOpenAIの画像生成だったのだ。 + +06:39.470 --> 06:46.760 +オーディオについては、 OpenAIのオーディオ・ドット・スピーチ・ドット・クリエイトのケースだ。 + +06:46.760 --> 06:48.500 +モデルを渡す。 + +06:48.740 --> 06:56.300 +TTSはテキスト・トゥ・スピーチ(text to speech)の略で、 + +06:56.330 --> 07:00.080 +音声を供給するモデルです。 + +07:00.080 --> 07:01.880 +今回は声を試してみよう。 + +07:01.910 --> 07:02.750 +オニキス + +07:02.750 --> 07:04.880 +8種類の声をもう一度試すことができるんだ。 + +07:04.910 --> 07:06.800 +それが何かはググればわかる。 + +07:06.800 --> 07:10.310 +そして、 この関数が呼ばれたことを渡す。 + +07:10.310 --> 07:16.520 +戻ってきたバイトで、 それらのバイトを表すbytes IOオブジェクトを再び作成する。 + +07:16.520 --> 07:27.250 +そして、 このオーディオ・セグメントを使って、 ファイルとオーディオ・ストリームからオーディオ・セグメントを作成し、 そのオーディオを再生する。 + +07:27.250 --> 07:31.180 +では、 その関数を作ってトーカーとしよう。 + +07:33.070 --> 07:35.470 +やあ、 こんにちは。 + +07:40.150 --> 07:41.110 +やあ、 こんにちは。 + +07:42.430 --> 07:43.240 +これでよし。 + +07:43.270 --> 07:44.500 +簡単なことだ。 + +07:44.830 --> 07:47.410 +ええと、 別の声がどう聞こえるか見てみよう。 + +07:47.410 --> 07:50.320 +合金の音を聞いてみよう。 + +07:50.470 --> 07:52.270 +そこに合金を入れよう。 + +07:55.000 --> 07:55.930 +やあ、 こんにちは。 + +07:56.860 --> 07:58.630 +それが合金だった。 + +07:58.660 --> 08:01.180 +オニキスにこだわると思う。 + +08:01.180 --> 08:03.070 +しかし、 どちらでも試すことができる。 + +08:03.070 --> 08:09.580 +そして、 そこにさらにいくつか入れて、 試してみて好きなものを選ぶこともできる。 + +08:09.910 --> 08:10.810 +分かった。 + +08:10.810 --> 08:13.510 +まあ、 それで行こう。 + +08:14.710 --> 08:20.920 +さて、 次はエージェントのフレームワークについて話そう。 + +08:20.950 --> 08:23.650 +次のビデオまで休憩しよう。 + +08:23.650 --> 08:26.200 +そして、 そこで私たちは完全なエージェントの枠組みを手に入れることになる。 + +08:26.230 --> 08:27.280 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59167015/ko_KR.srt b/week5/community-contributions/subtitles/srts/59167015/ko_KR.srt new file mode 100755 index 0000000..7e22532 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59167015/ko_KR.srt @@ -0,0 +1,418 @@ +WEBVTT + +00:00.800 --> 00:05.960 +주피터 연구소에 잘 오셨습니다 파이브의 연구실에도요 + +00:05.960 --> 00:12.020 +창의력이 많이 발휘될 거고 오락성도 많으면 좋겠어요 + +00:12.020 --> 00:16.910 +우선 4일째의 주피터 연구소를 모사했어요 + +00:17.030 --> 00:19.370 +그걸 복사했어요 + +00:19.370 --> 00:20.420 +그리고 확장했어요 + +00:20.420 --> 00:23.570 +지금 여기 위로는 나흘째의 반복이에요 + +00:23.600 --> 00:31.670 +우리 항공사의 AI 보조를 만들었어요 비행기 AI라고 불렀죠 그리고 비행기 표 가격을 알아낼 + +00:31.670 --> 00:33.590 +도구를 장착했어요 + +00:33.590 --> 00:39.140 +모든 게 이미 갖춰져 있고 오늘 공연에 맞게 완성했어요 + +00:39.140 --> 00:40.820 +다중 모듈로 갈 거예요 + +00:40.820 --> 00:46.250 +달레3을 사용할 건데요 GPT 4 뒤에 있는 이미지 생성 모델이에요 + +00:46.760 --> 00:48.800 +이미지를 만드는 데 사용할 거예요 + +00:48.800 --> 00:52.790 +아티스트라는 함수에 넣는 것으로 시작하죠 + +00:52.790 --> 00:57.770 +그 전에 두 가지 서비스 공지를 해야 해요 + +00:57.950 --> 01:03.830 +먼저 이미지 생성에 드는 비용은 아주 적지 않아요 + +01:03.880 --> 01:10.150 +지금까지 한 모든 게 1센트 미만이라도 적게 들었으면 좋겠어요 + +01:10.300 --> 01:16.090 +당신이 엄청나게 긴 책자를 만든 게 아니라면 지금까지 이 과정을 진행했다고 해서 + +01:16.090 --> 01:17.830 +큰돈을 번 건 아니에요 + +01:17.950 --> 01:21.880 +하지만 지금은 좀 더 눈에 띄는 일을 하고 있어요 + +01:21.910 --> 01:25.420 +우리가 생성하는 이미지마다 0달러가 들어요 4시요 + +01:25.450 --> 01:31.360 +이 사진들을 보시면 0달러의 가치가 있다는 걸 아실 거예요 Put 각각 04달러요 + +01:31.570 --> 01:34.720 +아주 창의적이고 가치도 높아요 + +01:34.720 --> 01:35.590 +정말 좋아요 + +01:35.590 --> 01:37.630 +돈을 잘 쓴 것 같아요 + +01:37.630 --> 01:42.310 +하지만 그걸 알려드리고 싶어요 0달러를 쓸지 말지 결정하시라고요 한 번에 04개요 + +01:42.700 --> 01:49.120 +언급하고 싶은 다른 것은 약간 그러니까 이미지 생성이나 음향 + +01:49.120 --> 01:56.770 +생성 같은 것을 말할 때 LM이라는 용어를 써야 할지에 대한 논점이 있어요 + +01:56.770 --> 01:58.270 +비트 + +01:58.300 --> 01:59.290 +문자로 오디오를 연결해요 + +01:59.320 --> 02:03.400 +물론 이건 무대 뒤에 있는 대형 언어 모델이 아니니까요 + +02:03.430 --> 02:10.090 +요즘 사람들은 LM을 일반 용어로 사용합니다 인공지능 시스템 뒤에 있는 + +02:10.090 --> 02:12.280 +모델을 일컫는 비트죠 + +02:12.280 --> 02:19.450 +실제로 LM 엔지니어가 갖춰야 할 기술과 도구라고 생각해요 + +02:19.450 --> 02:23.800 +하지만 엄밀히 말하면 이건 언어 모델이 아니에요 + +02:23.800 --> 02:30.730 +이것들은 이미지 모델과 오디오 모델입니다 에이전트 프레임워크에 추가할 때 실행할 + +02:30.730 --> 02:31.630 +수 있죠 + +02:31.750 --> 02:34.720 +어쨌든 서문은 됐고, 이제 시작하죠. Get it. + +02:34.720 --> 02:39.220 +유용한 이미지 라이브러리 가져오기로 시작하죠 + +02:39.220 --> 02:40.420 +첫 번째는 아니에요 + +02:40.570 --> 02:44.260 +처음 두 개는 이미지 라이브러리가 아니라 일부 유틸리티예요 + +02:44.260 --> 02:51.520 +파이썬 이미지 라이브러리는 아주 유용합니다 아주 편리한 공통 라이브러리예요 + +02:51.760 --> 03:00.820 +다음으로 할 일은 아티스트라는 함수를 작성하는 겁니다 아티스트는 OpenAI.Nagees.Nageate를 + +03:00.850 --> 03:03.520 +호출하죠 + +03:03.520 --> 03:09.460 +OpenAI 이미지 생성에 사용되는 스타일이 아주 일관적이죠 + +03:09.460 --> 03:11.520 +모델 이름을 통과해요 + +03:11.520 --> 03:13.860 +이 경우에는 모델이 달어리 쓰리예요 + +03:13.890 --> 03:16.650 +그 전신인 달이 2호도 한번 드셔 보세요 + +03:16.680 --> 03:18.480 +이미지는 less예요 + +03:18.480 --> 03:19.440 +비트가 좀 더 저렴해요 + +03:19.440 --> 03:24.450 +0달러 정도였던 것 같아요 0달러가 아니라 02달러요 0달러면 많이 싼 편은 아닌데 0달러나 + +03:24.450 --> 03:26.160 +더 쓴 보람이 있네요 2번요 + +03:26.190 --> 03:27.750 +달리 쓰리 주세요 + +03:27.780 --> 03:32.400 +즉각적으로 알려주면 이건 현명한 사전 목록이 아니죠 + +03:32.400 --> 03:33.360 +그냥 문자예요 + +03:33.360 --> 03:39.240 +이 경우에는 제가 제안하는 건 도시에서의 휴가를 상징하는 이미지예요 + +03:39.240 --> 03:46.980 +관광지와 도시의 모든 특징을 생동감 넘치는 팝아트 스타일로 표현하는 거죠 + +03:46.980 --> 03:50.250 +가장 작은 크기로 정해요 + +03:50.250 --> 03:53.070 +달이 3개면 충분하고 달이 2개면 훨씬 작아요 + +03:53.250 --> 03:58.680 +돌레 3도 초상과 가로 형식으로 큰 사이즈를 두 개 만들 수 있어요 + +03:58.740 --> 04:00.870 +크기가 궁금하면 구글에서 검색하세요 + +04:00.870 --> 04:02.400 +이 이미지들을 시도해 보세요 + +04:02.430 --> 04:04.260 +이미지만 있으면 돼요 + +04:04.260 --> 04:06.210 +이 포맷을 원한다고 하죠 + +04:06.450 --> 04:12.840 +백은 베이스64 암호 형식으로 되어 있어요 + +04:12.840 --> 04:20.040 +바이트 단위로 디코딩하고 그 바이트 단위로 바이트 IO 객체를 생성합니다 + +04:20.040 --> 04:26.850 +그리고 나서 이미지.오픈 함수로 이동합니다 이미지를 반환해 주죠 + +04:26.850 --> 04:28.320 +실행해보죠 + +04:28.320 --> 04:30.300 +이제 한번 해 보죠 + +04:30.330 --> 04:35.040 +이미지 = 아티스트라고 하죠 + +04:36.870 --> 04:38.940 +뭐라고 해야 할까요? 뉴욕시 + +04:42.660 --> 04:50.520 +이미지를 디스플레이하는 건 그걸 보여주는 주피터 방식이죠 + +04:50.550 --> 04:53.400 +실행해보죠, 아니면 이미 실행한 게 보이나요 + +04:53.400 --> 04:56.790 +시간이 좀 걸리지만 정말 놀랍죠? + +04:56.940 --> 04:58.380 +이미 먹고 있잖아요 + +04:58.380 --> 05:00.750 +바로 보여 드려서 버릇 나빠지게 해 드렸어요 + +05:00.750 --> 05:01.950 +이렇게 생겼어요 + +05:01.950 --> 05:07.710 +두 번째 이미지를 위에 만들 거예요 자유의 여신상과 엠파이어 + +05:07.710 --> 05:16.200 +스테이트 빌딩 몇 개와 비행기들이 보이고 타임스 스퀘어 이미지와 많은 간판과 뉴욕 이미지가 + +05:16.200 --> 05:18.510 +택시를 나타내죠 + +05:18.540 --> 05:19.140 +보세요 + +05:19.170 --> 05:20.610 +노란 뉴욕 택시요 + +05:20.640 --> 05:21.690 +코카콜라도요 + +05:21.690 --> 05:23.040 +핫도그도 있어요 + +05:23.070 --> 05:25.050 +뉴욕의 상징이죠 + +05:25.080 --> 05:26.550 +환상적이에요 + +05:26.580 --> 05:29.190 +한편, 다른 이미지를 구축했어요 + +05:29.190 --> 05:29.670 +그리고요 + +05:29.670 --> 05:31.020 +이것 좀 봐요 + +05:31.020 --> 05:32.340 +달라요 + +05:32.340 --> 05:33.120 +좋아요 + +05:33.120 --> 05:35.430 +여기에 큰 제트기가 있어요 + +05:35.430 --> 05:40.620 +엠파이어 스테이트 빌딩도 있고 여러 채와 자유의 여신상도 있어요 + +05:40.620 --> 05:47.280 +앞에는 번화한 상점들과 택시가 보이고 뉴욕의 상징적인 택시와 핫도그가 + +05:47.310 --> 05:48.870 +다시 등장하죠 + +05:49.080 --> 05:54.330 +이 사진들을 보면 정말 창의적이고 색다르다는 걸 알 수 있어요 + +05:54.330 --> 05:59.790 +두 장이 있는데 조금 전에 찍은 사진과 여기 있는 사진을 보면 얼마나 + +05:59.790 --> 06:01.470 +멋진지 알 수 있죠 + +06:02.430 --> 06:03.060 +좋아요 + +06:03.060 --> 06:05.220 +재미있게 보셨길 바라요 + +06:05.220 --> 06:10.920 +0달러 정도 쓰는 게 어때요? 04, 이미지 몇 개 만들어 보세요 + +06:10.920 --> 06:12.180 +멋져요 + +06:12.690 --> 06:14.940 +함수를 하나 더 추가할게요 + +06:14.940 --> 06:20.450 +OpenAI의 음성을 이용해 오디오를 생성하는 함수를 만들 거예요 + +06:20.450 --> 06:25.670 +파이덥이라는 라이브러리를 가진 몇 가지 유틸리티들을 이용할 거예요 + +06:25.670 --> 06:26.630 +아주 유용하죠 + +06:26.840 --> 06:29.300 +토커라는 함수를 쓸 거예요 + +06:29.300 --> 06:33.860 +토커는 OpenAI. audio.speaks.Crate라고 부를 거예요 + +06:33.860 --> 06:39.470 +이미지 생성은 OpenAI 이미지가 생성된 것인데요 + +06:39.470 --> 06:46.760 +오디오는 OpenAI audio.speaks.Create가 있네요 + +06:46.760 --> 06:48.500 +모형을 통과시키죠 + +06:48.740 --> 06:56.300 +이게 우리가 쓰는 모델이에요 TTS는 텍스트에서 음성으로 전환하는 모델이죠 + +06:56.330 --> 07:00.080 +우리가 쓰는 모델은 목소리를 제공해요 + +07:00.080 --> 07:01.880 +이번에는 목소리를 시험해 보죠 + +07:01.910 --> 07:02.750 +오닉스요 + +07:02.750 --> 07:04.880 +8개의 다른 목소리를 다시 녹음해야 했어요 + +07:04.910 --> 07:06.800 +구글로 검색하면 다 나와요 + +07:06.800 --> 07:10.310 +이 함수라고 불리는 것을 전달하죠 + +07:10.310 --> 07:16.520 +결과를 보면 바이트당 IO 객체를 생성해 그 바이트를 나타내죠 + +07:16.520 --> 07:25.330 +그리고 이걸 이 오디오 세그먼트에 사용합니다 파일과 오디오 스트림에서 생성해 해당 오디오를 재생하도록 + +07:25.330 --> 07:27.250 +get 하죠 + +07:27.250 --> 07:31.180 +함수를 만들고 토크커라고 하죠 + +07:33.070 --> 07:35.470 +안녕하세요 + +07:40.150 --> 07:41.110 +안녕하세요 + +07:42.430 --> 07:43.240 +됐어요 + +07:43.270 --> 07:44.500 +아주 간단해요 + +07:44.830 --> 07:47.410 +다른 목소리는 어떤지 들어보죠 + +07:47.410 --> 07:50.320 +합금 소리를 들어보죠 + +07:50.470 --> 07:52.270 +합금을 넣죠 Put + +07:55.000 --> 07:55.930 +안녕하세요 + +07:56.860 --> 07:58.630 +합금으로 만든 거예요 + +07:58.660 --> 08:01.180 +그냥 오닉스라고 하죠 + +08:01.180 --> 08:03.070 +하지만 둘 다 가능해요 + +08:03.070 --> 08:09.580 +그리고 여기에 더 넣어서 실험해보고 마음에 드는 걸 고르세요. Put it up Put it up Put it up Put it up Put it up Put it up Put it up Put it up Put it it up Put it up Put it up Put it Put it + +08:09.910 --> 08:10.810 +좋아요 + +08:10.810 --> 08:13.510 +그렇게 하죠 + +08:14.710 --> 08:20.920 +이제 에이전트 프레임워크에 대해 얘기해보죠 + +08:20.950 --> 08:23.650 +다음 영상은 잠시 쉬죠 + +08:23.650 --> 08:26.200 +거기서 에이전트 프레임워크를 다룰 거예요 + +08:26.230 --> 08:27.280 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59169985/en_US.srt b/week5/community-contributions/subtitles/srts/59169985/en_US.srt new file mode 100755 index 0000000..1e5fe7f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59169985/en_US.srt @@ -0,0 +1,73 @@ +WEBVTT + +00:00.680 --> 00:03.740 +So I hope you enjoyed that whirlwind tour of Google Colab. + +00:03.740 --> 00:08.240 +Here's just a little screenshot example of how easy it is to use it. + +00:08.570 --> 00:10.760 +You can just put in a bunch of code. + +00:10.760 --> 00:16.010 +This is of course, hugging face code that we're going to be getting deep into very, very soon. + +00:16.010 --> 00:25.070 +And in this case, I used the flux model, which is you may have noticed it was one of the top trending + +00:25.070 --> 00:27.620 +models when we were looking at models in hugging face. + +00:27.620 --> 00:35.960 +It is a text to image generation model from Black Forest that is a particularly, uh, exciting in that + +00:35.960 --> 00:40.460 +it's one of the really strong open source image generation models. + +00:40.880 --> 00:46.370 +And I prompted it with a futuristic class full of students learning AI coding in the surreal style of + +00:46.370 --> 00:47.210 +Dall-E. + +00:47.390 --> 00:51.650 +Uh, and this is what came up, which is wonderful, wonderful. + +00:51.830 --> 01:00.590 +Uh, and so it gives you a sense of how quickly you can use Google Colab to be, uh, working with, + +01:00.590 --> 01:03.890 +uh, high powered GPUs in the cloud. + +01:05.480 --> 01:11.690 +And with that, we, uh, take a moment to take stock of our progress. + +01:11.720 --> 01:13.130 +We are now ready. + +01:13.130 --> 01:17.390 +You are well positioned to be beginning on your open source adventure. + +01:17.510 --> 01:24.200 +Uh, in addition to what you could already do confidently coding with frontier APIs and building multimodal + +01:24.230 --> 01:32.060 +AI assistants, you can now navigate through Hugging Face and Google Colab and you are ready for action. + +01:32.090 --> 01:39.950 +So next time you're going to be able to run open source models, there's two different levels of API + +01:39.950 --> 01:43.820 +in hugging face, and you're going to understand what that means and what they are. + +01:43.820 --> 01:47.750 +And then we're going to start with the first of those, which is called pipelines. + +01:47.750 --> 01:53.240 +You're going to be able to use pipelines for a bunch of different AI tasks, including generating text, + +01:53.270 --> 01:57.080 +images and audio using open source models. + +01:57.110 --> 01:58.220 +I can't wait. diff --git a/week5/community-contributions/subtitles/srts/59169985/ja_JP.srt b/week5/community-contributions/subtitles/srts/59169985/ja_JP.srt new file mode 100755 index 0000000..01d5f66 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59169985/ja_JP.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:00.680 --> 00:03.740 +というわけで、 Google Colabの旋風ツアーを楽しんでいただけただろうか。 + +00:03.740 --> 00:08.240 +使い方の簡単さをスクリーンショットで紹介しよう。 + +00:08.570 --> 00:10.760 +コードをたくさん入れるだけでいい。 + +00:10.760 --> 00:16.010 +これはもちろん、 ハグ・フェイス・コードである。 + +00:16.010 --> 00:27.620 +この場合、 私はフラックス・モデルを使用した。 これは、 ハグする顔のモデルを見ていたとき、 トップ・トレンド・モデルのひとつだったことにお気づきだろうか。 + +00:27.620 --> 00:40.460 +これはBlack Forestのテキストから画像への生成モデルで、 オープンソースの画像生成モデルの中でも特に強力なもののひとつだ。 + +00:40.880 --> 00:47.210 +そして私は、 『Dall-E』のシュールなスタイルでAIコーディングを学ぶ生徒でいっぱいの未来的なクラスでそれを促した。 + +00:47.390 --> 00:51.650 +ええと、 それで出てきたのがこれです。 + +00:51.830 --> 01:03.890 +Google Colabを使うことで、 クラウド上で高性能GPUをどれだけ早く使えるかを実感していただけると思います。 + +01:05.480 --> 01:11.690 +そして、 私たちは......私たちの進捗状況を確認する時間を取る。 + +01:11.720 --> 01:13.130 +準備は整った。 + +01:13.130 --> 01:17.390 +あなたはオープンソースの冒険を始めるのにふさわしい位置にいる。 + +01:17.510 --> 01:24.200 +フロンティアAPIを使ったコーディングやマルチモーダルAIアシスタントの構築など、 すでに自信を持ってできることに加えて、 + +01:24.230 --> 01:32.060 +ハギング・フェイスやグーグルコラボをナビゲートできるようになり、 行動の準備は整った。 + +01:32.090 --> 01:39.950 +だから今度オープンソースのモデルを走らせるときは、 ハグフェイスには2つの異なるレベルのAPIがあり、 + +01:39.950 --> 01:43.820 +その意味と内容を理解することになる。 + +01:43.820 --> 01:47.750 +まず、 パイプラインと呼ばれるものから始めよう。 + +01:47.750 --> 01:57.080 +オープンソースのモデルを使ったテキスト、 画像、 音声の生成など、 さまざまなAIタスクにパイプラインを使えるようになる。 + +01:57.110 --> 01:58.220 +待ちきれないよ。 diff --git a/week5/community-contributions/subtitles/srts/59169985/ko_KR.srt b/week5/community-contributions/subtitles/srts/59169985/ko_KR.srt new file mode 100755 index 0000000..d7d038e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59169985/ko_KR.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:00.680 --> 00:03.740 +구글 콜랍의 급속한 탐방을 즐기셨길 바라요 + +00:03.740 --> 00:08.240 +얼마나 사용이 쉬운지 스크린샷으로 보여드릴게요 + +00:08.570 --> 00:10.760 +그냥 코드 뭉치를 Put만 하면 돼요 + +00:10.760 --> 00:16.010 +이건 물론 포옹하는 얼굴 코드죠 아주 곧 깊이 다룰 거예요 + +00:16.010 --> 00:25.070 +이 경우 플럭스 모델을 사용했어요 가장 트렌드되는 모델 중 하나란 걸 눈치채셨을 겁니다 얼굴을 + +00:25.070 --> 00:27.620 +껴안는 모델들을 보면요 + +00:27.620 --> 00:35.960 +블랙 포레스트에서 나온 텍스트 이미지 생성 모델로 아주 강력한 오픈 소스 이미지 생성 + +00:35.960 --> 00:40.460 +모델 중 하나라는 점에서 특히 흥미롭죠 + +00:40.880 --> 00:47.210 +초현대적인 달리의 인공지능 코딩을 배우는 학생들로 가득한 미래적인 수업으로 프롬프트했죠 + +00:47.390 --> 00:51.650 +그래서 나온 게 이거예요 정말 훌륭하죠 + +00:51.830 --> 01:00.590 +구글 Colab을 얼마나 빨리 사용할 수 있는지 알 수 있죠 클라우드에서 고성능 + +01:00.590 --> 01:03.890 +GPU와 작업하기 위해서요 + +01:05.480 --> 01:11.690 +이와 함께 진행 상황을 잠시 점검해 보죠 + +01:11.720 --> 01:13.130 +이제 준비됐어요 + +01:13.130 --> 01:17.390 +오픈 소스 모험을 시작하기에 좋은 위치예요 + +01:17.510 --> 01:24.200 +개척형 API로 자신 있게 코딩하고 다중 모듈 인공지능 어시스턴트 제작을 하는 + +01:24.230 --> 01:32.060 +것 외에도 이제는 포옹하는 얼굴과 구글 Colab을 탐색할 수 있습니다 이제 준비가 다 됐죠 + +01:32.090 --> 01:39.950 +다음에 오픈 소스 모델을 실행할 땐 얼굴을 안는 동작엔 두 가지 API 레벨이 있어요 그게 + +01:39.950 --> 01:43.820 +무슨 뜻인지 그게 뭔지 이해하게 될 거예요 + +01:43.820 --> 01:47.750 +이제 첫 번째 것부터 시작할게요 파이프라인이라고 하죠 + +01:47.750 --> 01:53.240 +다양한 인공지능 작업에 대해 파이프라인을 사용할 수 있습니다 오픈 소스 모델을 + +01:53.270 --> 01:57.080 +이용해 텍스트, 이미지, 오디오 생성을 포함해서요 + +01:57.110 --> 01:58.220 +기대되네요 diff --git a/week5/community-contributions/subtitles/srts/59169991/en_US.srt b/week5/community-contributions/subtitles/srts/59169991/en_US.srt new file mode 100755 index 0000000..4884a48 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59169991/en_US.srt @@ -0,0 +1,127 @@ +WEBVTT + +00:01.010 --> 00:03.500 +Okay, so that was your introduction to Hugging Face. + +00:03.500 --> 00:10.010 +And now I'm going to turn to a different resource available which is Google Colab. + +00:10.040 --> 00:13.880 +There are a bunch of different alternatives to Google Colab that all do much the same thing, and you + +00:13.880 --> 00:15.470 +can really use any of them. + +00:15.560 --> 00:18.080 +I like Colab in particular for a couple of reasons. + +00:18.170 --> 00:19.910 +One of them is that so many people use it. + +00:20.330 --> 00:24.770 +And another is it's so easy to share, but let's just talk about what it is. + +00:24.800 --> 00:28.400 +So Google Colab, um, it's a it's a few things. + +00:28.400 --> 00:33.440 +But the reason, the main thing that it is, and what we're going to do with it, is the ability to + +00:33.440 --> 00:41.480 +run a Jupyter notebook like the ones we've been using, and run it in the cloud on a Google box, which + +00:41.480 --> 00:50.120 +will have not only a decent CPU, but also a GPU that might be high spec, uh, and uh, in addition + +00:50.120 --> 00:55.670 +to that, the thing that I like about it is that you can share and collaborate your Jupyter notebook + +00:55.670 --> 00:59.270 +with others using the same kind of familiar interface. + +00:59.270 --> 01:01.940 +You can use to share other types of Google Doc. + +01:01.940 --> 01:08.260 +So if, like me, you're very used to using Google Docs and Google Sheets and the like and sharing them + +01:08.260 --> 01:13.750 +and editing them and so on, then it's a very familiar experience to be able to share and collaborate + +01:13.750 --> 01:17.920 +on a Jupyter notebook running in Colab. + +01:18.280 --> 01:21.130 +Uh, and it's also integrated with other Google services. + +01:21.130 --> 01:25.990 +So for example, you can very easily access your own Google Drive if you have data there or something + +01:25.990 --> 01:26.620 +like that. + +01:26.620 --> 01:29.650 +So it's it's nicely part of the Google ecosystem. + +01:29.650 --> 01:33.130 +But as I say, there are a bunch of other offerings. + +01:33.190 --> 01:38.650 +And you can if you if you're using a something that is a competitor to Google Colab and you like it, + +01:38.650 --> 01:40.300 +then by all means use it. + +01:40.300 --> 01:47.080 +Uh, you may have to, uh, copy across the colab that I'll be using in sharing, but otherwise everything + +01:47.110 --> 01:48.730 +should work just fine. + +01:49.120 --> 01:55.420 +When you're using Colab, you get to choose what runtimes you're working with, what kind of box it's, + +01:55.450 --> 01:58.330 +what kind of VM is essentially, uh, running. + +01:58.330 --> 02:04.240 +There are CPU based boxes which don't have a GPU, are just CPUs. + +02:04.240 --> 02:12.080 +There is, uh, there are lower spec boxes running cheaper GPUs, and then there's higher spec, beefier + +02:12.110 --> 02:15.740 +boxes for resource intensive stuff. + +02:16.190 --> 02:23.900 +Everything that we do in this course can run on on up to number two, the lower spec GPU runtimes. + +02:23.900 --> 02:29.900 +I'm going to be trying my absolute best to keep it so that you can do everything and not spend anything, + +02:29.900 --> 02:31.700 +any material amount of money. + +02:31.820 --> 02:37.220 +Um, perhaps at this point, if you, if we if you go as far as training a full deep neural network + +02:37.250 --> 02:43.400 +yourself, we might be starting to talk about, uh, a few dollars, but nothing that's going to break + +02:43.430 --> 02:50.270 +the bank, I hope, uh, unless you wish to take it a step further and train faster, do more experimenting. + +02:50.300 --> 02:56.600 +In which case, you certainly have the ability to opt for number three and spend a little bit more. + +02:56.600 --> 03:02.270 +Uh, and again, we're talking about maybe spending $10 to get a decent, like, a day or two's worth + +03:02.270 --> 03:07.310 +of work, uh, against a top end GPU box. + +03:07.790 --> 03:11.690 +So without further ado, that's a quick intro. + +03:11.690 --> 03:16.400 +Let's go in and take a look at Colab and get comfortable with it. diff --git a/week5/community-contributions/subtitles/srts/59169991/ja_JP.srt b/week5/community-contributions/subtitles/srts/59169991/ja_JP.srt new file mode 100755 index 0000000..b68811e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59169991/ja_JP.srt @@ -0,0 +1,97 @@ +WEBVTT + +00:01.010 --> 00:03.500 +さて、 以上がハギング・フェイスの紹介だった。 + +00:03.500 --> 00:10.010 +そして今度は、 Google Colabという別のリソースを紹介しよう。 + +00:10.040 --> 00:15.470 +Google Colabの代わりとなるものはたくさんあるが、 どれもほとんど同じことができる。 + +00:15.560 --> 00:18.080 +私がColabを特に気に入っている理由はいくつかある。 + +00:18.170 --> 00:19.910 +そのひとつは、 非常に多くの人が利用していることだ。 + +00:20.330 --> 00:24.770 +そしてもうひとつは、 共有するのがとても簡単だということだ。 + +00:24.800 --> 00:28.400 +グーグル・コラボは、 いくつかあるんだ。 + +00:28.400 --> 00:33.440 +Jupyterノートブックは、 + +00:33.440 --> 00:59.270 +CPUだけでなく、 GPUもハイスペックなものを搭載している。 + +00:59.270 --> 01:01.940 +他のタイプのGoogleドキュメントを共有するために使用することができます。 + +01:01.940 --> 01:08.260 +僕のように、 GoogleドキュメントやGoogleシートなどを使い、 それらを共有したり編集したりすることに慣れている人なら、 + +01:08.260 --> 01:13.750 +Colabで動いているJupyterノートブックを共有したり共同作業したりするのは、 + +01:13.750 --> 01:17.920 +とても馴染みのある体験だ。 + +01:18.280 --> 01:21.130 +それに、 グーグルの他のサービスとも統合されている。 + +01:21.130 --> 01:26.620 +例えば、 自分のGoogleドライブにデータがあれば、 簡単にアクセスできる。 + +01:26.620 --> 01:29.650 +つまり、 グーグルのエコシステムの一部なのだ。 + +01:29.650 --> 01:33.130 +しかし、 私が言うように、 他にもたくさんのオファーがある。 + +01:33.190 --> 01:38.650 +そして、 もしGoogle Colabの競合となるものを使っていて、 それが気に入ったのであれば、 + +01:38.650 --> 01:40.300 +ぜひそれを使ってください。 + +01:40.300 --> 01:48.730 +ええと、 僕がシェアリングで使うコラボをコピーする必要があるかもしれないけど、 それ以外はすべてうまくいくはずだよ。 + +01:49.120 --> 01:58.330 +Colabを使うときは、 どんなランタイムを使うか、 どんなボックスか、 どんなVMを動かすかを選ぶことができる。 + +01:58.330 --> 02:04.240 +GPUを搭載していないCPUベースのボックスもある。 + +02:04.240 --> 02:15.740 +安価なGPUを搭載した低スペックのマシンもあれば、 リソースを大量に消費するようなマシン向けの高スペックのマシンもある。 + +02:16.190 --> 02:23.900 +このコースで行うことはすべて、 2番までの低スペックGPUランタイムで実行できる。 + +02:23.900 --> 02:29.900 +私は、 あなたが何でもできるように、 そして何も、 どんな物質的なお金も使わないように、 + +02:29.900 --> 02:31.700 +全力を尽くすつもりだ。 + +02:31.820 --> 02:37.220 +おそらくこの時点で、 もしあなた自身が完全なディープ・ニューラル・ネットワークをトレーニングするとしたら、 + +02:37.250 --> 02:50.270 +数ドルの話になるかもしれませんが、 銀行を破たんさせるようなことはないでしょう。 + +02:50.300 --> 02:56.600 +その場合、 3番を選んでもう少し出費を増やすこともできる。 + +02:56.600 --> 03:07.310 +また、 トップエンドのGPUボックスに対して、 1日か2日分の仕事をするのに10ドルくらいかかるかもしれない。 + +03:07.790 --> 03:11.690 +というわけで、 前置きはこれくらいにして、 簡単な自己紹介をしよう。 + +03:11.690 --> 03:16.400 +さっそくColabを見て、 慣れてみよう。 diff --git a/week5/community-contributions/subtitles/srts/59169991/ko_KR.srt b/week5/community-contributions/subtitles/srts/59169991/ko_KR.srt new file mode 100755 index 0000000..b87f934 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59169991/ko_KR.srt @@ -0,0 +1,127 @@ +WEBVTT + +00:01.010 --> 00:03.500 +얼굴 껴안기는 여기까지였어요 + +00:03.500 --> 00:10.010 +이제 다른 리소스로 넘어가죠 구글 Colab이에요 + +00:10.040 --> 00:13.880 +구글 콜랍을 대체할 수 있는 다양한 방법이 있어요 다 똑같은 기능이고 + +00:13.880 --> 00:15.470 +아무거나 사용해도 돼요 + +00:15.560 --> 00:18.080 +콜랍이 좋은 이유가 두 가지 있어요 + +00:18.170 --> 00:19.910 +그중 하나는 너무 많은 사람이 쓴다는 거죠 + +00:20.330 --> 00:24.770 +다른 하나는 공유하기 쉽다는 거예요 하지만 뭔지 얘기해보죠 + +00:24.800 --> 00:28.400 +구글 콜랍은 몇 가지에 해당해요 + +00:28.400 --> 00:33.440 +하지만 이 제품의 주요한 이유이자 앞으로 할 일은 우리가 + +00:33.440 --> 00:41.480 +사용한 것처럼 Jupyter 노트북을 실행하는 겁니다 구글 박스의 클라우드에서 + +00:41.480 --> 00:50.120 +실행하면 괜찮은 CPU뿐 아니라 고사양 GPU도 갖추게 되죠 그뿐 아니라 Jupyter + +00:50.120 --> 00:55.670 +노트북을 다른 제품과 공유하고 협력할 수 있다는 게 좋아요 익숙한 + +00:55.670 --> 00:59.270 +인터페이스를 사용해서요 + +00:59.270 --> 01:01.940 +다른 유형의 구글 문서를 공유할 때 사용할 수 있죠 + +01:01.940 --> 01:08.260 +저처럼 구글 문서나 구글 시트를 사용하는 데 익숙하고 공유하고 + +01:08.260 --> 01:13.750 +편집하는 데 익숙하다면 콜랍에 있는 주피터 공책에 공유하고 + +01:13.750 --> 01:17.920 +협업하는 건 아주 익숙한 경험이죠 + +01:18.280 --> 01:21.130 +다른 구글 서비스와도 통합돼 있어요 + +01:21.130 --> 01:25.990 +예를 들어, 데이터가 있다면 구글 드라이브에 쉽게 접근할 + +01:25.990 --> 01:26.620 +수 있죠 + +01:26.620 --> 01:29.650 +구글 생태계의 멋진 일부죠 + +01:29.650 --> 01:33.130 +하지만 말씀드렸듯이 다른 공물도 많아요 + +01:33.190 --> 01:38.650 +여러분이 구글 Colab의 경쟁사 제품을 사용하고 있고 그게 마음에 든다면 + +01:38.650 --> 01:40.300 +얼마든지 사용하세요 + +01:40.300 --> 01:47.080 +공유에 사용할 콜라브를 복사해야 할 수도 있지만 그것만 빼면 다 + +01:47.110 --> 01:48.730 +괜찮을 거예요 + +01:49.120 --> 01:55.420 +Colab을 사용할 때는 어떤 런타임을 사용할 것인지 어떤 종류의 박스를 사용할 것인지 어떤 종류의 + +01:55.450 --> 01:58.330 +VM을 실행할 것인지 선택해야 하죠 + +01:58.330 --> 02:04.240 +CPU 기반의 박스는 GPU가 없고 그냥 CPU죠 + +02:04.240 --> 02:12.080 +저렴한 GPU를 사용하는 저사양 박스도 있고 자원 집약적인 엔진을 위한 + +02:12.110 --> 02:15.740 +고사양의 튼튼한 박스도 있죠 + +02:16.190 --> 02:23.900 +이 코스에서 하는 모든 건 2번까지 실행할 수 있습니다 하위 사양 GPU 런타임이죠 + +02:23.900 --> 02:29.900 +최선을 다해서 가게를 지킬 테니 재료비 걱정은 하지 말고 필요한 + +02:29.900 --> 02:31.700 +건 뭐든 하세요 + +02:31.820 --> 02:37.220 +이 시점에서 당신이 직접 심층 신경망을 훈련한다면 + +02:37.250 --> 02:43.400 +몇 달러 정도 들겠지만 큰돈은 안 들 거예요 한 단계 더 나아가서 + +02:43.430 --> 02:50.270 +더 빨리 훈련하고 더 많은 실험을 하고 싶지 않다면요 + +02:50.300 --> 02:56.600 +3번째 비트를 선택하고 좀 더 많은 돈을 쓸 수 있죠 + +02:56.600 --> 03:02.270 +다시 말씀드리지만 10달러 정도 들여서 하루 이틀 정도 + +03:02.270 --> 03:07.310 +작업하면 최고의 GPU 박스가 완성될 거예요 + +03:07.790 --> 03:11.690 +그럼 지체 없이 간단히 소개를 마칠게요 + +03:11.690 --> 03:16.400 +콜랍을 살펴보고 익숙해지도록 하죠 Get diff --git a/week5/community-contributions/subtitles/srts/59170025/en_US.srt b/week5/community-contributions/subtitles/srts/59170025/en_US.srt new file mode 100755 index 0000000..82626c4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170025/en_US.srt @@ -0,0 +1,163 @@ +WEBVTT + +00:00.740 --> 00:05.000 +And a massive welcome back one more time to LM engineering. + +00:05.000 --> 00:10.220 +We are in week three, day two and we are getting into open source models. + +00:10.370 --> 00:14.960 +So as a reminder you can already do frontier models back to front. + +00:14.960 --> 00:16.940 +You can build multimodal AI assistants. + +00:16.940 --> 00:22.940 +And now you're comfortable looking at the hugging face hub, looking at models and data sets and spaces. + +00:22.940 --> 00:26.690 +And you can run code using Google Colab. + +00:27.020 --> 00:33.080 +So today we're going to look at Hugging Face Transformers library and discuss the fact that there are + +00:33.080 --> 00:39.950 +two different types of API, two different levels that you can work with transformers at one level, + +00:39.980 --> 00:46.250 +the higher level API is called pipelines, and that's what we'll be working with mostly today, including + +00:46.250 --> 00:50.810 +generating text, images and sound using pipelines. + +00:51.080 --> 00:56.570 +So let's just talk for a moment about these two different API levels. + +00:56.570 --> 01:02.770 +So there are these two modes of interacting with the hugging face code. + +01:02.770 --> 01:10.060 +One of them is if you want to carry out a standard, everyday typical task in what we'd call inference, + +01:10.060 --> 01:14.860 +or running a model at runtime given an input to get an output. + +01:14.860 --> 01:21.550 +And hugging face has wonderfully packaged this up into a high level interface that's super easy to use, + +01:21.550 --> 01:29.080 +and that provides you with a rapid way to get going, generating text, and doing a number of everyday + +01:29.080 --> 01:30.130 +functions. + +01:30.580 --> 01:37.360 +But if you want to get deeper into the code, if you want to be looking in more detail at things like + +01:37.360 --> 01:44.380 +how how you are tokenizing your text at which models and which parameters you're using to run a model, + +01:44.380 --> 01:50.500 +or if you're actually going to go as far as training and be fine tuning your own model to carry out + +01:50.530 --> 01:54.010 +specialist tasks with extra knowledge or nuance. + +01:54.010 --> 02:00.820 +At that point, you need to look at the deeper APIs, the lower level APIs, working with Tokenizers + +02:00.820 --> 02:02.800 +and models in Hugging Face. + +02:02.830 --> 02:05.260 +Today we're going to be looking at pipelines. + +02:05.260 --> 02:10.420 +And then after that we're going to turn to the Tokenizers and models. + +02:11.080 --> 02:13.060 +So what can you do with these pipelines? + +02:13.060 --> 02:22.360 +So essentially it allows you to take instant advantage of models on the Hugging face hub with two lines + +02:22.360 --> 02:22.960 +of code. + +02:22.960 --> 02:24.340 +It's as simple as that. + +02:24.340 --> 02:28.330 +And I'm going to give you lots of examples and lots of things you can take away so that you can use + +02:28.330 --> 02:32.800 +it yourself to carry out every day inference tasks. + +02:32.800 --> 02:37.720 +So one classic example, which is one of the easiest ones to start with, is what they call sentiment + +02:37.720 --> 02:38.350 +analysis. + +02:38.380 --> 02:43.570 +Given a sentence saying what is the emotion conveyed by this sentence? + +02:44.380 --> 02:50.740 +Uh, then classification, of course, is one of those very traditional machine learning tasks of putting + +02:50.740 --> 02:52.450 +things into buckets. + +02:52.660 --> 03:00.160 +Named entity recognition is when you can take a sentence and tag the words in that sentence as things + +03:00.160 --> 03:04.630 +like whether they are people or whether they are locations or things and so on. + +03:04.970 --> 03:11.900 +Question answering is when you have some context and you want to be able to ask questions about the + +03:11.900 --> 03:13.610 +context that you provide. + +03:13.640 --> 03:20.210 +Summarization, of course, is when you have a block of text and you want to turn it into a summary + +03:20.660 --> 03:21.710 +translation. + +03:21.740 --> 03:26.870 +Another classic AI task translating between one language and another. + +03:26.900 --> 03:32.060 +So what if I told you that all of these things can be done with two lines of code each? + +03:32.330 --> 03:37.370 +Hopefully you would be amazed and you will see it in a moment. + +03:37.490 --> 03:43.460 +There are some other things you can do as well that become perhaps slightly more advanced. + +03:43.580 --> 03:45.740 +Text generation actually isn't advanced at all. + +03:45.740 --> 03:47.120 +It's still just two lines of code. + +03:47.120 --> 03:52.760 +It's still super simple, and it's another thing that you will marvel at. + +03:53.210 --> 03:57.470 +But generating images is also very simple, as is audio. + +03:57.470 --> 04:02.810 +It becomes a little bit more than two lines, but it's still very simple and I can't wait to show you. + +04:02.840 --> 04:04.760 +I think that's enough preamble. + +04:04.760 --> 04:05.720 +Let's get straight to it. + +04:05.720 --> 04:07.340 +Let's go to Google Colab. diff --git a/week5/community-contributions/subtitles/srts/59170025/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170025/ja_JP.srt new file mode 100755 index 0000000..12a0093 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170025/ja_JP.srt @@ -0,0 +1,136 @@ +WEBVTT + +00:00.740 --> 00:05.000 +そして、 LMエンジニアリングにもう一度大歓迎を。 + +00:05.000 --> 00:10.220 +3週目、 2日目に入り、 オープンソースのモデルに入っている。 + +00:10.370 --> 00:14.960 +つまり、 すでにフロンティア・モデルのバック・トゥ・フロントが可能なのだ。 + +00:14.960 --> 00:16.940 +マルチモーダルなAIアシスタントを構築できる。 + +00:16.940 --> 00:22.940 +そして今、 あなたはハグする顔のハブを見たり、 モデルやデータセットやスペースを見たりすることに快適さを感じている。 + +00:22.940 --> 00:26.690 +また、 Google Colabを使ってコードを実行することもできる。 + +00:27.020 --> 00:33.080 +そこで今日は、 Hugging Face Transformersライブラリを見て、 2つの異なるタイプのAPIがあること、 + +00:33.080 --> 00:39.950 +1つのレベルでトランスフォーマーを扱うことができる2つの異なるレベルがあること、 より高いレベルのAPIはパイプラインと呼ばれ、 + +00:39.980 --> 00:50.810 +パイプラインを使ったテキスト、 画像、 サウンドの生成など、 今日主に扱うのはこれだということを説明する。 + +00:51.080 --> 00:56.570 +では、 この2つの異なるAPIレベルについて少し話をしよう。 + +00:56.570 --> 01:02.770 +つまり、 ハグする顔のコードと相互作用する2つのモードがあるのだ。 + +01:02.770 --> 01:10.060 +そのひとつは、 推論と呼ばれるような、 標準的で日常的な典型的なタスクを実行したい場合、 つまり、 + +01:10.060 --> 01:14.860 +入力が与えられて実行時にモデルを実行して出力を得たい場合だ。 + +01:14.860 --> 01:21.550 +そしてハギング・フェイスは、 これを素晴らしく使いやすい高レベルのインターフェイスにパッケージ化し、 + +01:21.550 --> 01:30.130 +テキストを生成し、 多くの日常的な機能を実行するための迅速な方法を提供する。 + +01:30.580 --> 01:37.360 +しかし、 もしあなたがコードにもっと深く入り込みたいのであれば、 どのモデルをどのようにトークン化し、 どのパラメータを使ってモデルを実行しているのか、 + +01:37.360 --> 01:44.380 +あるいは実際にトレーニングまで行って、 専門的なタスクを実行するために独自のモデルをファインチューニングし、 + +01:44.380 --> 01:54.010 +特別な知識やニュアンスを身につけたいのであれば、 そのようなことをもっと詳しく調べたいでしょう。 + +01:54.010 --> 02:02.800 +その時点で、 より深いAPI、 より低レベルのAPI、 トーケナイザーやハギング・フェイスのモデルを扱うことに目を向ける必要がある。 + +02:02.830 --> 02:05.260 +今日はパイプラインについて見ていこう。 + +02:05.260 --> 02:10.420 +そしてそのあとは、 トーケナイザーとモデルの話に移る。 + +02:11.080 --> 02:13.060 +では、 このパイプラインを使って何ができるのか? + +02:13.060 --> 02:22.960 +そのため、 基本的には2行のコードで、 ハギング・フェイス・ハブのモデルを即座に利用することができる。 + +02:22.960 --> 02:24.340 +簡単なことだ。 + +02:24.340 --> 02:28.330 +そして、 あなたが毎日の推論作業に使えるように、 たくさんの例と、 + +02:28.330 --> 02:32.800 +あなたが持ち帰ることができるものをたくさん紹介するつもりだ。 + +02:32.800 --> 02:38.350 +典型的な例としては、 センチメント分析と呼ばれるものがある。 + +02:38.380 --> 02:43.570 +ある文章が与えられたとき、 この文章から伝わってくる感情は何か? + +02:44.380 --> 02:52.450 +分類は、 もちろん、 物事をバケツに分類するという、 非常に伝統的な機械学習タスクのひとつだ。 + +02:52.660 --> 03:00.160 +名前付きエンティティ認識とは、 ある文章に含まれる単語を、 人なのか、 場所なのか、 + +03:00.160 --> 03:04.630 +物なのか、 といったようにタグ付けすることだ。 + +03:04.970 --> 03:13.610 +質問応答とは、 何らかの文脈があり、 提供した文脈について質問できるようにしたい場合である。 + +03:13.640 --> 03:21.710 +要約はもちろん、 テキストブロックがあり、 それを要約翻訳にしたい場合である。 + +03:21.740 --> 03:26.870 +もうひとつの古典的なAIタスクは、 ある言語と別の言語の間の翻訳である。 + +03:26.900 --> 03:32.060 +では、 これらのことがそれぞれ2行のコードでできると言ったらどうだろう? + +03:32.330 --> 03:37.370 +願わくば驚かれることを願っています。 + +03:37.490 --> 03:43.460 +その他にも、 少し高度なこともできる。 + +03:43.580 --> 03:45.740 +テキスト生成は実はまったく進歩していない。 + +03:45.740 --> 03:47.120 +たった2行のコードだ。 + +03:47.120 --> 03:52.760 +それでも超シンプルで、 これまた驚嘆することだろう。 + +03:53.210 --> 03:57.470 +しかし、 画像の生成もオーディオと同様、 非常に簡単だ。 + +03:57.470 --> 04:02.810 +2行より少し多くなりますが、 それでもとてもシンプルなので、 早くお見せしたいです。 + +04:02.840 --> 04:04.760 +前置きはこれくらいにしておこう。 + +04:04.760 --> 04:05.720 +本題に入ろう。 + +04:05.720 --> 04:07.340 +グーグルコラボに行こう。 diff --git a/week5/community-contributions/subtitles/srts/59170025/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170025/ko_KR.srt new file mode 100755 index 0000000..ca644ac --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170025/ko_KR.srt @@ -0,0 +1,154 @@ +WEBVTT + +00:00.740 --> 00:05.000 +LM 엔지니어링에 다시 한번 큰 박수를 보내주세요 + +00:05.000 --> 00:10.220 +3주 차, 2일째입니다 오픈 소스 모델로 들어가고 있죠 + +00:10.370 --> 00:14.960 +다시 말씀드리지만 개척 시대 모델은 이미 거꾸로 할 수 있어요 + +00:14.960 --> 00:16.940 +다중 모듈 인공지능 보조를 만들 수 있어요 + +00:16.940 --> 00:22.940 +이제 안는 얼굴 허브를 편하게 볼 수 있습니다 모델, 데이터 세트, 공간을 보는 거죠 + +00:22.940 --> 00:26.690 +구글 콜라브로 코드를 실행할 수 있어요 + +00:27.020 --> 00:33.080 +오늘은 얼굴 트랜스포머 껴안기 라이브러리를 살펴보고 API 종류에 대해 + +00:33.080 --> 00:39.950 +얘기해 볼게요 두 가지 레벨로 트랜스포머와 작업할 수 있어요 더 높은 레벨은 파이프라인이라는 + +00:39.980 --> 00:46.250 +API로 오늘 주로 작업할 거예요 파이프라인을 이용해 텍스트, 이미지, + +00:46.250 --> 00:50.810 +소리를 생성하는 걸 포함해서요 + +00:51.080 --> 00:56.570 +잠시 다른 API 레벨에 대해 얘기해보죠 + +00:56.570 --> 01:02.770 +안는 얼굴 코드와 상호 작용하는 방식은 두 가지예요 + +01:02.770 --> 01:10.060 +그 중 하나는 표준을 수행하고 싶을 때죠 추론이라는 것을 위한 매일의 전형적인 작업이나 런타임에 + +01:10.060 --> 01:14.860 +모델을 실행할 때요 입력값을 받아 getput을 얻는 거죠 + +01:14.860 --> 01:21.550 +얼굴을 안는 방법은 패키지로 아주 쉽게 상위 레벨 인터페이스에 넣을 수 있게 해줍니다. + +01:21.550 --> 01:30.130 +빠르게 진행할 수 있게 해줍니다. 텍스트를 생성하고 일상적인 함수를 수행할 수도 있어요. + +01:30.580 --> 01:37.360 +코드를 좀 더 깊이 파고들고 싶다면 예를 들어, 어떻게 텍스트를 토큰화하고 + +01:37.360 --> 01:44.380 +어떤 모델과 어떤 매개 변수를 모델 실행에 사용할지 알고 싶다면요 혹은 훈련을 + +01:44.380 --> 01:50.500 +통해 자신의 모델을 잘 조정해서 추가적인 지식이나 뉘앙스를 가지고 + +01:50.530 --> 01:54.010 +특수한 작업을 수행할 수 있다면요 + +01:54.010 --> 02:00.820 +그땐 더 깊은 API를 살펴봐야 합니다 하위 레벨 API요 토큰라이저와 포옹하는 얼굴 + +02:00.820 --> 02:02.800 +모델과 작업하는 거죠 + +02:02.830 --> 02:05.260 +오늘은 파이프라인을 살펴볼 거예요 + +02:05.260 --> 02:10.420 +그런 다음 토큰라이저와 모델로 넘어가죠 + +02:11.080 --> 02:13.060 +파이프라인을 어떻게 할 수 있을까요? + +02:13.060 --> 02:22.960 +즉, 두 줄의 코드로 얼굴 허브에서 모델을 즉각적으로 이용할 수 있게 해주는 것이죠 + +02:22.960 --> 02:24.340 +아주 간단해요 + +02:24.340 --> 02:28.330 +많은 예제를 제공할 거예요 여러분이 가져갈 수 있는 많은 + +02:28.330 --> 02:32.800 +것도요 매일 추론 작업을 수행하는 데 직접 사용할 수 있도록요 + +02:32.800 --> 02:38.350 +가장 쉬운 것 중 하나인 전형적인 예가 바로 정서 분석이에요 + +02:38.380 --> 02:43.570 +이 문장이 전달하는 감정은 무엇인지 묻는 문장이죠 + +02:44.380 --> 02:50.740 +분류는 물론 아주 전통적인 머신 러닝 과제입니다 물건을 양동이에 + +02:50.740 --> 02:52.450 +담는 거죠 + +02:52.660 --> 03:00.160 +개체 인식이라는 것은 문장을 보고 그 문장의 단어를 사람이냐 장소냐 + +03:00.160 --> 03:04.630 +같은 것으로 태그할 수 있는 거예요 + +03:04.970 --> 03:11.900 +질문 답변은 어떤 컨텍스트가 있는데 여러분이 제공하는 컨텍스트에 관해 질문할 + +03:11.900 --> 03:13.610 +수 있어야 할 때죠 + +03:13.640 --> 03:20.210 +요약은 텍스트 블록이 있을 때 요약 번역으로 바꾸는 + +03:20.660 --> 03:21.710 +거예요 + +03:21.740 --> 03:26.870 +한 언어를 다른 언어로 번역하는 인공지능의 전형적인 작업이죠 + +03:26.900 --> 03:32.060 +이 모든 게 각각 코드 2줄로 가능하다면 어떨까요? + +03:32.330 --> 03:37.370 +잠시 후에 깜짝 놀라실 거예요 + +03:37.490 --> 03:43.460 +좀 더 발전된 다른 기능도 할 수 있어요 + +03:43.580 --> 03:45.740 +문자 생성은 사실 전혀 발전하지 않았어요 + +03:45.740 --> 03:47.120 +여전히 코드 2줄일 뿐이죠 + +03:47.120 --> 03:52.760 +여전히 아주 간단해요 놀랄 만한 또 다른 거죠 + +03:53.210 --> 03:57.470 +이미지 생성 역시 간단합니다 오디오도 마찬가지죠 + +03:57.470 --> 04:02.810 +비트 코드는 두 줄 이상이지만 아주 간단해요 빨리 보여드리고 싶네요 + +04:02.840 --> 04:04.760 +서론은 그만하면 됐어요 + +04:04.760 --> 04:05.720 +Get it, get it 바로 본론으로 들어가죠 + +04:05.720 --> 04:07.340 +구글 콜랍으로 가보죠 diff --git a/week5/community-contributions/subtitles/srts/59170037/en_US.srt b/week5/community-contributions/subtitles/srts/59170037/en_US.srt new file mode 100755 index 0000000..3dec832 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170037/en_US.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:00.410 --> 00:06.830 +So how does it feel to be 30% of the way down the journey to being a proficient LLM engineer? + +00:06.860 --> 00:12.020 +Take a moment to congratulate yourself on a big accomplishment and a lot of progress. + +00:12.110 --> 00:13.850 +And hopefully you have that sense. + +00:13.850 --> 00:19.310 +You have that feeling that you are up skilling, that you can do so much more than you could just a + +00:19.310 --> 00:22.160 +matter of days ago, and it's going to keep being that way. + +00:22.160 --> 00:26.120 +We're going to keep building and building on the skills and knowledge that you're acquiring. + +00:26.120 --> 00:28.790 +So you're able to do more and more. + +00:28.820 --> 00:32.780 +But again, what you can already do, you can already confidently code with frontiers. + +00:32.780 --> 00:36.410 +You can build multimodal AI assistants using tools. + +00:36.410 --> 00:40.670 +And now and now you're familiar with hugging face pipelines. + +00:40.670 --> 00:49.310 +And you can use pipelines to run inference tasks across a wide variety of different common machine learning + +00:49.340 --> 00:50.390 +tasks. + +00:50.840 --> 01:00.620 +Next time, next time we get below into the lower level Transformers API as we start to work with Tokenizers, + +01:00.650 --> 01:05.690 +we've of course already spent some time talking about tokens, and we looked at Gpts tokenizer through + +01:05.690 --> 01:06.800 +the web user interface. + +01:06.830 --> 01:13.190 +Now we're going to actually use code to translate text to tokens and back again. + +01:13.190 --> 01:16.550 +And as part of that we're going to understand things like special tokens. + +01:16.550 --> 01:22.550 +I remember I had a sidebar, uh, ramble about some time ago now, but it's going to all come together. + +01:22.550 --> 01:23.450 +It's going to be worth it. + +01:23.450 --> 01:26.060 +That seed I planted is going to come together. + +01:26.060 --> 01:31.070 +When we look at what tokens look like for what gets passed into an LLM. + +01:31.070 --> 01:37.610 +And then also when we look at these things called chat templates, all of this is going to be extremely + +01:37.610 --> 01:41.660 +important foundation material, and I look forward to going through it with you next time. diff --git a/week5/community-contributions/subtitles/srts/59170037/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170037/ja_JP.srt new file mode 100755 index 0000000..af37051 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170037/ja_JP.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:00.410 --> 00:06.830 +では、 熟達したLLMエンジニアになるための道のりの30%を歩んでいる今、 どのように感じているのだろうか? + +00:06.860 --> 00:12.020 +大きな達成と多くの進歩について、 自分自身を祝福するひとときを過ごしてください。 + +00:12.110 --> 00:13.850 +そして願わくば、 あなたにもその感覚を持っていてほしい。 + +00:13.850 --> 00:22.160 +数日前の自分よりずっと多くのことができるようになったという実感がある。 + +00:22.160 --> 00:26.120 +私たちは、 あなたが身につけている技術や知識をどんどん積み上げていくつもりです。 + +00:26.120 --> 00:28.790 +だから、 どんどんできることが増えていく。 + +00:28.820 --> 00:32.780 +しかし、 繰り返しになるが、 すでにできることは、 フロンティアで自信を持ってコーディングできる。 + +00:32.780 --> 00:36.410 +ツールを使ってマルチモーダルAIアシスタントを構築できる。 + +00:36.410 --> 00:40.670 +そして今も昔も、 ハグする顔のパイプラインはお馴染みだ。 + +00:40.670 --> 00:50.390 +また、 パイプラインを使って、 さまざまな一般的な機械学習タスクの推論タスクを実行することができる。 + +00:50.840 --> 01:00.620 +次回は、 より低レベルのTransformers APIに入り、 Tokenizersを扱い始めます。 もちろん、 トークンについてはすでに時間を費やしていますし、 + +01:00.650 --> 01:06.800 +Gptsトークナイザーをウェブ・ユーザー・インターフェイスを通して見てきました。 + +01:06.830 --> 01:13.190 +では、 実際にコードを使ってテキストをトークンに変換し、 また元に戻してみよう。 + +01:13.190 --> 01:16.550 +その一環として、 私たちは特別なトークンのようなものを理解しようとしている。 + +01:16.550 --> 01:22.550 +少し前にサイドバーで、 ええと、 とりとめのない話をしたのを覚えているんだけど、 全部まとまりそうなんだ。 + +01:22.550 --> 01:23.450 +それだけの価値がある + +01:23.450 --> 01:26.060 +私が蒔いた種が結実するんだ。 + +01:26.060 --> 01:31.070 +LLMに渡されるトークンがどのようなものかを見てみよう。 + +01:31.070 --> 01:41.660 +そして、 チャット・テンプレートと呼ばれるものを見るときにも、 これらすべてが非常に重要な基礎資料となるでしょう。 diff --git a/week5/community-contributions/subtitles/srts/59170037/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170037/ko_KR.srt new file mode 100755 index 0000000..af5eae9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170037/ko_KR.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:00.410 --> 00:06.830 +능숙한 LLM 엔지니어가 되기까지 30% 정도 성장한 기분이 어떤가요? + +00:06.860 --> 00:12.020 +큰 성과를 거두고 큰 진전을 이룬 걸 축하하는 시간을 가져요 + +00:12.110 --> 00:13.850 +여러분도 그런 걸 느끼셨으면 해요 + +00:13.850 --> 00:19.310 +기술이 좋아진 것 같고 며칠 전보다 훨씬 많은 걸 할 수 있을 + +00:19.310 --> 00:22.160 +것 같고 앞으로도 그럴 거예요 + +00:22.160 --> 00:26.120 +여러분이 습득하는 기술과 지식을 계속 발전시킬 거예요 + +00:26.120 --> 00:28.790 +그래서 더 많은 걸 할 수 있죠 + +00:28.820 --> 00:32.780 +하지만 이미 할 수 있는 건 이미 개척지를 이용해 자신감 있게 코드를 작성할 수 있죠 + +00:32.780 --> 00:36.410 +도구를 이용해 다중 모듈 인공지능 보조를 만들 수 있죠 + +00:36.410 --> 00:40.670 +이제 페이스 파이프라인을 껴안는 게 익숙해졌죠 + +00:40.670 --> 00:49.310 +파이프라인을 이용해 추론 작업을 실행할 수 있습니다 다양하고 공통적인 머신 러닝 작업들에 + +00:49.340 --> 00:50.390 +걸쳐서요 + +00:50.840 --> 01:00.620 +다음 시간에는 낮은 레벨의 트랜스포머 API에서 토큰라이저를 다룰 겁니다 토큰에 대해 이미 얘기했고 + +01:00.650 --> 01:05.690 +웹 사용자 인터페이스를 통해 Gpts 토큰라이저를 + +01:05.690 --> 01:06.800 +살펴봤죠 + +01:06.830 --> 01:13.190 +이제 코드를 이용해서 텍스트를 토큰으로 변환하고 다시 돌아오도록 하죠 + +01:13.190 --> 01:16.550 +그 일부로 특별한 토큰 같은 걸 이해하게 될 거예요 + +01:16.550 --> 01:22.550 +예전에 잠깐 잡담도 했는데 곧 다 해결될 거예요 + +01:22.550 --> 01:23.450 +보람이 있을 거예요 + +01:23.450 --> 01:26.060 +내가 심은 씨앗이 합쳐질 거예요 + +01:26.060 --> 01:31.070 +LLM으로 전달되는 토큰의 모양을 살펴보죠 + +01:31.070 --> 01:37.610 +채팅 템플릿이라는 것도 살펴보면 이 모든 게 아주 중요한 기본 자료가 될 겁니다 + +01:37.610 --> 01:41.660 +다음 시간에도 함께 살펴보고 싶네요 diff --git a/week5/community-contributions/subtitles/srts/59170043/en_US.srt b/week5/community-contributions/subtitles/srts/59170043/en_US.srt new file mode 100755 index 0000000..4c9515a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170043/en_US.srt @@ -0,0 +1,412 @@ +WEBVTT + +00:01.490 --> 00:08.720 +Let me enthusiastically welcome you all back to week three of our LLM engineering journey. + +00:08.750 --> 00:15.140 +If you enjoyed last week when we got deep into building user interfaces using the fabulous Gradio framework, + +00:15.170 --> 00:21.290 +then you're going to love this week even more, because now it's time to get into open source and start + +00:21.320 --> 00:24.500 +using the wonderful world of Huggingface. + +00:24.830 --> 00:28.340 +But first, a quick recap as always on what you can already do. + +00:28.370 --> 00:33.260 +You can describe Transformers and you are fluent in the key terminology. + +00:33.290 --> 00:38.750 +You can talk about context windows until the cows come home and all of that. + +00:38.780 --> 00:44.210 +You can confidently code whether it's with Gemini or Claude or with OpenAI. + +00:44.240 --> 00:45.680 +You know the APIs. + +00:45.680 --> 00:49.820 +You know how to stream, you know about markdown, you know about JSON responses. + +00:49.940 --> 00:53.330 +And you can also build an AI assistant, a chatbot. + +00:53.360 --> 00:55.190 +You can make it use tools. + +00:55.190 --> 01:00.260 +You can make it use different agents, and you can make it multimodal. + +01:00.380 --> 01:02.330 +And we've built one ourselves. + +01:02.330 --> 01:04.400 +And hopefully you've extended it to. + +01:04.400 --> 01:04.400 +too. + +01:05.060 --> 01:06.590 +So what's happening today? + +01:06.620 --> 01:09.080 +Today we're going to get into hugging face. + +01:09.080 --> 01:14.630 +And to start with, you're just going to be able to describe what it is and the scope and scale of hugging + +01:14.630 --> 01:14.930 +face. + +01:14.930 --> 01:18.260 +One of the most remarkable things about hugging face is its breadth. + +01:18.260 --> 01:24.140 +All the different things that it offers to the open source data science community, and you'll have + +01:24.140 --> 01:26.990 +a good appreciation for that shortly. + +01:27.320 --> 01:33.650 +Uh, we're going to look at models, data sets and spaces in hugging face, and you'll also have a good + +01:33.650 --> 01:35.510 +understanding of Google Colab. + +01:35.510 --> 01:39.410 +You may already have an understanding of Google Colab, in which case it'll be a quick revision point. + +01:39.410 --> 01:41.840 +But for those that don't, we're going to go into it. + +01:41.870 --> 01:47.810 +You're going to see how you can run code on a box with a good GPU, and you'll have a sense of the different + +01:47.840 --> 01:50.270 +offerings out there and which ones we'll be using for the class. + +01:50.270 --> 01:51.980 +So we'll get you set up. + +01:51.980 --> 01:55.550 +So prepare for some open source stuff. + +01:55.550 --> 02:02.900 +But first, as always, a quick recap on what's been going on, where we are and what's left to do. + +02:02.930 --> 02:09.510 +We started on the left with uh, at the beginning, no LM engineering knowledge, we will end up on + +02:09.510 --> 02:12.750 +the right as proficient LM engineers. + +02:12.750 --> 02:16.980 +In week one, we got immersed in all things frontier. + +02:16.980 --> 02:18.060 +In week two. + +02:18.090 --> 02:20.250 +Last week we built UIs. + +02:20.250 --> 02:26.070 +We used all of the APIs for the top three and we experimented with tools. + +02:26.100 --> 02:31.500 +Agent ization Multi-modality this week, all about open source, all about hugging face. + +02:31.530 --> 02:37.500 +Next week we talk about selecting the right LM for the problem and generating code. + +02:37.530 --> 02:39.480 +After that is Rag week. + +02:39.510 --> 02:47.040 +Then we fine tune a frontier model, then we fine tune an open source model, and then in the finale + +02:47.040 --> 02:48.450 +we bring it all home. + +02:49.830 --> 02:54.150 +So without further ado, let's talk hugging face. + +02:54.540 --> 02:56.670 +So as I say, it's ubiquitous. + +02:56.700 --> 02:59.280 +It's it's used across the community. + +02:59.310 --> 03:01.770 +It is a fabulous resource. + +03:01.980 --> 03:09.780 +And amongst many things, it offers us three the hugging face platform, the the what you get to if + +03:09.780 --> 03:12.900 +you go to Hugging Face Co and you've signed up with an account. + +03:12.900 --> 03:16.890 +You have access to three categories of things. + +03:16.890 --> 03:25.860 +First of all, you have models over 800,000 open source models that can do a bunch of different types + +03:25.860 --> 03:31.080 +of tasks, many of which we will experiment with in this week's lectures. + +03:31.080 --> 03:35.010 +And in future weeks, there are data sets. + +03:35.010 --> 03:41.880 +It is a treasure trove, over 200,000 data sets covering almost any problem that you can think of. + +03:41.910 --> 03:44.070 +You can try searching and see what you find. + +03:44.100 --> 03:49.470 +We're going to be using one particularly amazing data set later in this course. + +03:49.500 --> 03:54.030 +But but you will find lots of data to to solve your problems. + +03:54.270 --> 04:00.150 +Um, it's similar to to the platform Kaggle which is much more focused on the data side of things. + +04:00.150 --> 04:05.550 +But you have such a huge resource of that data within hugging face. + +04:06.000 --> 04:13.050 +And then hugging face also has something called spaces, which is where you can write an app and expose + +04:13.050 --> 04:13.560 +that app. + +04:13.590 --> 04:20.680 +Have it running on hugging face cloud hardware and and available for other people to use. + +04:20.680 --> 04:26.530 +As long as you're you're happy for your code to be open source, because that is the the you know, + +04:26.560 --> 04:28.360 +that's what Hugging Face is all about. + +04:28.630 --> 04:35.110 +Uh, so spaces are many of the spaces apps are written built in Gradio. + +04:35.110 --> 04:36.910 +So they are gradio apps. + +04:37.060 --> 04:38.890 +Um, there are things that are not gradio apps. + +04:38.890 --> 04:43.660 +There's something called Streamlit, which is another way to build apps that is also quite magical. + +04:43.660 --> 04:45.670 +Different to Gradio, quite magical. + +04:45.730 --> 04:48.640 +Um, and there are some other ways that you can publish apps as well. + +04:48.700 --> 04:51.520 +Uh, but I'd say Gradio is probably the most common that's there. + +04:51.520 --> 04:59.230 +And there's in particular things called leaderboards, which are gradio apps whose job it is to evaluate + +04:59.230 --> 05:02.650 +different llms and rank them and show them in a kind of scorecard. + +05:02.680 --> 05:07.300 +We're going to be using leaderboards a lot when we look at comparing different llms and, but we'll + +05:07.330 --> 05:11.590 +be seeing some of them today as well as we look at huggingface spaces. + +05:12.190 --> 05:18.610 +So that's the Huggingface platform, which is what you get to if you go to Huggingface Co and log in + +05:18.610 --> 05:20.230 +and start looking at what's out there. + +05:20.260 --> 05:28.240 +Hugging face also offers libraries code, which forms the basis of many of our open source projects. + +05:28.870 --> 05:35.140 +And the libraries give us this amazing head start in what we want to do. + +05:35.170 --> 05:41.230 +It brings time to market much lower, because you can just be off and running very quickly with very + +05:41.230 --> 05:42.910 +little boilerplate code. + +05:43.180 --> 05:51.970 +It's the very well crafted libraries to reduce the barrier to entry and make people productive quickly. + +05:52.420 --> 05:57.880 +The one of the first libraries you'll experience is the Hugging Face Hub, which is a library that allows + +05:57.880 --> 06:07.030 +you to log in to hugging face and, uh, both download and upload things like data sets and models from + +06:07.030 --> 06:12.430 +the hub, which is what hugging face calls the platform we just talked about. + +06:12.850 --> 06:22.000 +Um, data sets is a library that gives us access, immediate access to, uh, the, the the data repositories + +06:22.000 --> 06:25.540 +in hugging Huggingface and Transformers. + +06:25.570 --> 06:35.860 +This is a central library, which is the wrapper code around Llms that follow the transformer architecture, + +06:36.010 --> 06:44.830 +and under the covers it's got either PyTorch or TensorFlow code that actually runs these neural networks. + +06:45.160 --> 06:52.480 +But when you create a transformer, you have the actual deep neural network code at your fingertips. + +06:52.480 --> 06:59.200 +When we make calls to functions, to methods in transformer code, we're no longer calling out to an + +06:59.200 --> 07:04.270 +API running on a cloud somewhere else under OpenAI's umbrella. + +07:04.270 --> 07:13.240 +We are executing the code ourselves to to execute to either inference or training against our deep neural + +07:13.240 --> 07:14.050 +network. + +07:14.860 --> 07:20.800 +So there are three other libraries that I wanted to mention that we're going to come to later in the + +07:20.800 --> 07:23.740 +course that are more advanced libraries. + +07:24.010 --> 07:29.810 +Um, the first of them, Peft, stands for parameter efficient fine tuning. + +07:29.990 --> 07:39.890 +And this is, uh, utilities which allow us to train llms without needing to work with all of the billions + +07:39.890 --> 07:42.290 +of parameters in the Llms. + +07:42.290 --> 07:43.910 +So it's parameter efficient. + +07:43.910 --> 07:49.400 +And the technique in particular that we'll be using is called Laura or Laura is a variation of Laura, + +07:49.400 --> 07:52.460 +and there'll be plenty of time to explain that later on. + +07:52.460 --> 07:54.710 +But but bear in mind that's what we'll be using. + +07:54.710 --> 07:59.750 +And it's part of the Peft library parameter efficient fine tuning. + +08:00.140 --> 08:07.550 +Then there's a library called Treal, which stands for Transformer Reinforcement Learning. + +08:07.550 --> 08:09.440 +And it includes a few things. + +08:09.440 --> 08:13.730 +It's the ability to do things like something called reward modeling. + +08:14.060 --> 08:14.630 +Mm. + +08:14.630 --> 08:20.630 +And it's also something called proximal policy optimization PPO. + +08:20.900 --> 08:24.200 +And you may see mm and PPO mentioned from time to time. + +08:24.200 --> 08:32.720 +And this is related to uh, the both this thing called WRF that I mentioned a while ago, and it's the + +08:32.990 --> 08:42.320 +successors better ways of doing it, which is how we are able to train LMS so that they are really effective + +08:42.320 --> 08:43.100 +at chat. + +08:43.100 --> 08:48.620 +And it was the key innovation that resulted in ChatGPT in late 2022. + +08:48.650 --> 08:52.130 +So a lot of that code is within TRL. + +08:52.160 --> 09:00.290 +Also within TRL is something called supervised fine tuning and SFT, and that is something we will directly + +09:00.290 --> 09:02.390 +use ourselves later in the course. + +09:02.390 --> 09:10.220 +That is the specific library we will be using to fine tune an open source model, so that it's even + +09:10.220 --> 09:14.540 +more effective in our particular domain with a particular problem. + +09:14.540 --> 09:20.240 +We will set it so SFT supervised fine tuning part of the TRL library. + +09:20.330 --> 09:21.380 +All these acronyms. + +09:21.830 --> 09:30.230 +SFT part of TRL uh and the it's a it's an essential framework. + +09:30.350 --> 09:32.570 +But this is some of the more advanced stuff we'll get back to. + +09:32.600 --> 09:37.020 +So you don't have to remember all that right now, and certainly don't have to remember all these acronyms, + +09:37.140 --> 09:42.120 +but just let me plant that seed in you so that when you see it later, it's something that you've heard + +09:42.120 --> 09:42.990 +of before. + +09:44.160 --> 09:51.240 +The other one is one that is more of a behind the scenes, but you'll often see us importing it and + +09:51.330 --> 09:52.590 +making some use of it. + +09:52.620 --> 10:01.890 +It's called accelerate, and it's some, uh, advanced huggingface code that allows, uh, that allows + +10:01.890 --> 10:05.670 +our transformers to run across any distributed configuration. + +10:05.670 --> 10:13.350 +So it allows both training and inference to run at scale in an efficient, adaptable way, potentially + +10:13.350 --> 10:14.760 +across multiple GPUs. + +10:14.760 --> 10:19.950 +Although in all the experiments we'll be doing, we'll only be using a maximum of one GPU. + +10:20.910 --> 10:26.820 +So those are some of the key libraries that sit behind hugging face. + +10:27.660 --> 10:28.590 +At this point. + +10:28.590 --> 10:31.140 +I think it's time that we get to look at hugging face. + +10:31.140 --> 10:38.070 +So let's go in and take some some browsing around, starting with the hugging face platform. diff --git a/week5/community-contributions/subtitles/srts/59170043/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170043/ja_JP.srt new file mode 100755 index 0000000..97bba53 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170043/ja_JP.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:01.490 --> 00:08.720 +LLMエンジニアリングの旅の第3週目に戻ってきた皆さんを熱烈に歓迎しましょう。 + +00:08.750 --> 00:15.140 +先週、 素晴らしいGradioフレームワークを使ったユーザー・インターフェースの構築を楽しんだのなら、 + +00:15.170 --> 00:24.500 +今週はもっと気に入るはずだ。 + +00:24.830 --> 00:28.340 +その前に、 すでにできることをいつものように簡単にまとめておこう。 + +00:28.370 --> 00:33.260 +トランスフォーマーについて説明でき、 重要な専門用語に精通している。 + +00:33.290 --> 00:38.750 +コンテクスト・ウィンドウについては、 牛が帰ってくるまで話すことができる。 + +00:38.780 --> 00:44.210 +GeminiでもClaudeでもOpenAIでも、 自信を持ってコーディングできる。 + +00:44.240 --> 00:45.680 +APIは知っているだろう。 + +00:45.680 --> 00:49.820 +ストリーミングのやり方も、 マークダウンのことも、 JSONレスポンスのことも知っている。 + +00:49.940 --> 00:53.330 +また、 AIアシスタント、 チャットボットを作ることもできる。 + +00:53.360 --> 00:55.190 +道具を使わせることもできる。 + +00:55.190 --> 01:00.260 +さまざまなエージェントを使うこともできるし、 マルチモーダルにもできる。 + +01:00.380 --> 01:02.330 +そして、 自分たちでも作った。 + +01:02.330 --> 01:04.400 +そして願わくば、 それをさらに広げてほしい。 + +01:04.400 --> 01:04.400 +それもそうだ。 + +01:05.060 --> 01:06.590 +それで、 今日は何が起きているんだ? + +01:06.620 --> 01:09.080 +今日はハグ顔に入ろう。 + +01:09.080 --> 01:14.930 +そもそも、 ハグ顔とは何か、 その範囲と規模を説明できればいいわけだし。 + +01:14.930 --> 01:18.260 +ハグ顔で最も注目すべきことのひとつは、 その幅広さだ。 + +01:18.260 --> 01:24.140 +オープンソースのデータ・サイエンス・コミュニティに提供するさまざまなものを、 + +01:24.140 --> 01:26.990 +すぐに理解してもらえるだろう。 + +01:27.320 --> 01:35.510 +モデル、 データセット、 空間をハグハグしながら見ていくんだけど、 Google Colabについてもよく理解できるようになるよ。 + +01:35.510 --> 01:39.410 +すでにGoogle Colabを理解しているかもしれないが、 その場合はすぐに復習できるだろう。 + +01:39.410 --> 01:41.840 +しかし、 そうでない人たちのために、 私たちはそれに踏み込もうとしている。 + +01:41.870 --> 01:47.810 +優れたGPUを搭載したマシンでどのようにコードを走らせることができるかを見てもらい、 世の中にあるさまざまな製品と、 + +01:47.840 --> 01:50.270 +このクラスで使うGPUを理解してもらう。 + +01:50.270 --> 01:51.980 +だから、 私たちがセッティングします。 + +01:51.980 --> 01:55.550 +だから、 オープンソースのものを準備するんだ。 + +01:55.550 --> 02:02.900 +その前に、 いつものように、 これまでの経過と現在地、 そして残された課題を簡単に振り返っておこう。 + +02:02.930 --> 02:12.750 +最初はLMエンジニアリングの知識がない状態で左からスタートした。 + +02:12.750 --> 02:16.980 +第1週は、 フロンティアのあらゆることに没頭した。 + +02:16.980 --> 02:18.060 +第2週は。 + +02:18.090 --> 02:20.250 +先週はUIを構築した。 + +02:20.250 --> 02:26.070 +私たちはトップ3のAPIをすべて使い、 ツールを使って実験した。 + +02:26.100 --> 02:31.500 +エージェント化 マルチモダリティ 今週は、 オープンソースについて、 ハグ顔について。 + +02:31.530 --> 02:37.500 +来週は、 問題に適したLMの選択とコードの生成について話す。 + +02:37.530 --> 02:39.480 +そのあとはラグ・ウィークだ。 + +02:39.510 --> 02:48.450 +そしてフロンティアモデルを微調整し、 オープンソースモデルを微調整し、 フィナーレですべてを持ち帰る。 + +02:49.830 --> 02:54.150 +それでは早速、 ハグ顔の話をしよう。 + +02:54.540 --> 02:56.670 +つまり、 どこにでもあるものなんだ。 + +02:56.700 --> 02:59.280 +地域全体で使われているんだ。 + +02:59.310 --> 03:01.770 +素晴らしいリソースだ。 + +03:01.980 --> 03:12.900 +ハギング・フェイス・プラットフォームは、 ハギング・フェイス・コーにアクセスし、 アカウント登録すれば利用できる。 + +03:12.900 --> 03:16.890 +あなたは3つのカテゴリーにアクセスできる。 + +03:16.890 --> 03:31.080 +まず第一に、 80万を超えるオープンソースのモデルがあり、 それらは様々な種類のタスクをこなすことができる。 + +03:31.080 --> 03:35.010 +そして今後の週にはデータセットがある。 + +03:35.010 --> 03:41.880 +それは宝の山で、 20万を超えるデータセットが、 考えられるほとんどすべての問題をカバーしている。 + +03:41.910 --> 03:44.070 +検索してみてください。 + +03:44.100 --> 03:49.470 +このコースの後半で、 特に素晴らしいデータセットを使うことになる。 + +03:49.500 --> 03:54.030 +しかし、 あなたの問題を解決するためのデータはたくさん見つかるだろう。 + +03:54.270 --> 04:00.150 +Kaggleというプラットフォームと似ていて、 よりデータ面に特化しています。 + +04:00.150 --> 04:05.550 +でも、 あなたはその膨大なデータを抱きかかえる顔の中に持っている。 + +04:06.000 --> 04:13.560 +また、 ハギング・フェイスにはスペースというものがあり、 アプリを書いてそのアプリを公開することができる。 + +04:13.590 --> 04:20.680 +クラウドのハードウェアで動作させ、 他の人が使えるようにする。 + +04:20.680 --> 04:28.360 +自分のコードがオープンソースであることに満足しているのであれば、 それこそがハギング・フェイスのすべてなのだから。 + +04:28.630 --> 04:35.110 +多くのスペースアプリはGradioで作られています。 + +04:35.110 --> 04:36.910 +つまり、 これらはグラディオのアプリなのだ。 + +04:37.060 --> 04:38.890 +ええと、 グラディオのアプリではないものもあります。 + +04:38.890 --> 04:43.660 +Streamlitと呼ばれるものがあり、 これも非常に不思議なアプリの作り方だ。 + +04:43.660 --> 04:45.670 +グラディオとは違って、 かなりマジカルだ。 + +04:45.730 --> 04:48.640 +他にもアプリを公開する方法はいくつかあります。 + +04:48.700 --> 04:51.520 +あー、 でも、 グラディオが一番一般的だと思うよ。 + +04:51.520 --> 05:02.650 +特にリーダーボードと呼ばれるものがあり、 これはグラディオのアプリで、 さまざまなLLMを評価し、 ランク付けしてスコアカードのような形で表示するものだ。 + +05:02.680 --> 05:07.300 +リーダーボードは、 さまざまなLLMを比較するときによく使うが、 今日もハギングフェイス・スペースを見るときに、 + +05:07.330 --> 05:11.590 +そのいくつかを見ることになるだろう。 + +05:12.190 --> 05:20.230 +これがHuggingfaceのプラットフォームで、 Huggingface Co.にアクセスしてログインし、 そこにあるものを見始めるとたどり着くことができる。 + +05:20.260 --> 05:28.240 +Hugging faceは、 多くのオープンソースプロジェクトの基礎となるライブラリコードも提供しています。 + +05:28.870 --> 05:35.140 +そして図書館は、 私たちがやりたいことを実現するための素晴らしいスタートを切ってくれる。 + +05:35.170 --> 05:42.910 +なぜなら、 定型的なコードをほとんど使用することなく、 すぐに実行に移せるからだ。 + +05:43.180 --> 05:51.970 +それは、 参入障壁を低くし、 人々を素早く生産的にするために非常にうまく作られたライブラリだ。 + +05:52.420 --> 05:57.880 +Hugging Face Hubは、 Hugging Faceにログインして、 + +05:57.880 --> 06:12.430 +データセットやモデルのようなものをHubからダウンロードしたりアップロードしたりできるライブラリです。 + +06:12.850 --> 06:25.540 +データセットは、 ハグハグフェイスやトランスフォーマーのデータリポジトリにすぐにアクセスできるライブラリだ。 + +06:25.570 --> 06:44.830 +これは中心的なライブラリで、 トランスフォーマーアーキテクチャに従うLlmsのラッパーコードであり、 そのカバーの下には実際にニューラルネットワークを実行するPyTorchかTensorFlowのコードがある。 + +06:45.160 --> 06:52.480 +しかし、 トランスフォーマーを作れば、 実際のディープ・ニューラル・ネットワークのコードを指先で操作できる。 + +06:52.480 --> 07:04.270 +トランスフォーマーのコードで関数やメソッドを呼び出すとき、 私たちはもはやOpenAIの傘下にあるどこかのクラウド上で動いているAPIを呼び出しているわけではない。 + +07:04.270 --> 07:14.050 +私たちは、 ディープ・ニューラル・ネットワークに対する推論やトレーニングを実行するために、 自分たちでコードを実行している。 + +07:14.860 --> 07:23.740 +このコースの後半で紹介する、 より高度なライブラリが他にも3つある。 + +07:24.010 --> 07:29.810 +その最初のPeftは、 Parameter efficient fine tuning(パラメータ効率的な微調整)の略だ。 + +07:29.990 --> 07:42.290 +このユーティリティを使えば、 LLMSの何十億ものパラメーターを操作することなく、 LLMSをトレーニングすることができる。 + +07:42.290 --> 07:43.910 +つまり、 パラメータが効率的なんだ。 + +07:43.910 --> 07:49.400 +特に使うテクニックはローラ、 あるいはローラはローラのバリエーションと呼ばれるもので、 + +07:49.400 --> 07:52.460 +これについては後でたっぷり説明する時間がある。 + +07:52.460 --> 07:54.710 +でも、 それが私たちが使うものだということを念頭に置いておいてほしい。 + +07:54.710 --> 07:59.750 +そして、 ペフト・ライブラリーのパラメーターの効率的な微調整の一部でもある。 + +08:00.140 --> 08:07.550 +それから、 Trealというライブラリーがある。 TrealはTransformer Reinforcement Learning(トランスフォーマー強化学習)の略だ。 + +08:07.550 --> 08:09.440 +それにはいくつかのことが含まれている。 + +08:09.440 --> 08:13.730 +リワード・モデリングと呼ばれるようなことができる能力だ。 + +08:14.060 --> 08:14.630 +うん。 + +08:14.630 --> 08:20.630 +また、 近接政策最適化PPOと呼ばれるものもある。 + +08:20.900 --> 08:24.200 +また、 時折、 mmやPPOの名前を目にすることもあるだろう。 + +08:24.200 --> 08:32.720 +そしてこれは、 少し前に話したWRFと呼ばれるものの両方に関連していて、 LMSがチャットで本当に効果的であるように、 + +08:32.990 --> 08:43.100 +私たちがLMSを訓練することができる方法です。 + +08:43.100 --> 08:48.620 +そして、 2022年後半にChatGPTを生み出した重要な革新だった。 + +08:48.650 --> 08:52.130 +だから、 そのコードの多くはTRLの中にある。 + +08:52.160 --> 09:02.390 +また、 TRLの中にはスーパーバイズド・ファイン・チューニングやSFTと呼ばれるものがあり、 これはコースの後半で私たち自身が直接使うことになる。 + +09:02.390 --> 09:14.540 +これは、 オープンソースのモデルを微調整するために使用する特定のライブラリであり、 特定の問題を抱える特定の領域でより効果的になるようにする。 + +09:14.540 --> 09:20.240 +SFTがTRLライブラリーの微調整の一部を監督するように設定する。 + +09:20.330 --> 09:21.380 +略語ばかりだ。 + +09:21.830 --> 09:30.230 +SFTはTRLの一部であり、 不可欠なフレームワークだ。 + +09:30.350 --> 09:32.570 +でも、 これはまた後ほど紹介する、 より高度なものなんだ。 + +09:32.600 --> 09:37.020 +だから、 今すぐ全部覚える必要はないし、 頭文字を全部覚える必要もない。 + +09:37.140 --> 09:42.990 +ただ、 後で見たときに聞いたことがあるようなものになるように、 種を植えさせてほしい。 + +09:44.160 --> 09:52.590 +もうひとつは裏方的なものですが、 インポートして活用しているのをよく見かけます。 + +09:52.620 --> 10:05.670 +accelerateと呼ばれるもので、 高度なHuggingfaceコードによって、 トランスフォーマーがどんな分散構成でも実行できるようになっているんだ。 + +10:05.670 --> 10:14.760 +そのため、 トレーニングと推論の両方を、 効率的で適応性のある方法で、 複数のGPUにまたがる可能性もあるスケールで実行することができる。 + +10:14.760 --> 10:19.950 +これから行う実験では、 GPUは最大でも1つしか使わない。 + +10:20.910 --> 10:26.820 +これが、 ハグフェイスの背後にある重要なライブラリのいくつかだ。 + +10:27.660 --> 10:28.590 +この時点ではね。 + +10:28.590 --> 10:31.140 +そろそろハグしている顔を見てもいい頃だと思う。 + +10:31.140 --> 10:38.070 +それでは早速、 ハグフェイス・プラットフォームから見て回ろう。 diff --git a/week5/community-contributions/subtitles/srts/59170043/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170043/ko_KR.srt new file mode 100755 index 0000000..83313ef --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170043/ko_KR.srt @@ -0,0 +1,397 @@ +WEBVTT + +00:01.490 --> 00:08.720 +LLM 엔지니어링 여정 3주 차에 오신 여러분을 열렬히 환영해 주세요 + +00:08.750 --> 00:15.140 +지난주에 사용자 인터페이스를 구축하는 과정을 보셨다면 멋진 그래디오 프레임워크를 + +00:15.170 --> 00:21.290 +이용했죠 이번 주는 더 맘에 드실 겁니다 이제 오픈 소스를 시작해 포옹의 멋진 + +00:21.320 --> 00:24.500 +세계를 사용할 시간이거든요 + +00:24.830 --> 00:28.340 +하지만 먼저 늘 그렇듯 이미 할 수 있는 걸 간단히 요약해보죠 + +00:28.370 --> 00:33.260 +트랜스포머를 묘사할 수 있고 핵심 용어도 유창하죠 + +00:33.290 --> 00:38.750 +컨텍스트 창문에 대해 얘기할 수 있어요 + +00:38.780 --> 00:44.210 +쌍둥이자리나 클로드 오픈아이든 자신 있게 코드를 짤 수 있어요 + +00:44.240 --> 00:45.680 +API 아시죠? + +00:45.680 --> 00:49.820 +스트림도 알고 마크다운도 알고 JSON 반응도 알죠 + +00:49.940 --> 00:53.330 +인공지능 비서인 챗봇도 만들 수 있어요 + +00:53.360 --> 00:55.190 +도구를 사용해도 돼요 + +00:55.190 --> 01:00.260 +여러 에이전트를 사용할 수도 있고 멀티모덜로 만들 수도 있어요 + +01:00.380 --> 01:02.330 +우리도 하나 지었죠 + +01:02.330 --> 01:04.400 +그걸 확장했길 바라요 + +01:04.400 --> 01:04.400 +저도요 + +01:05.060 --> 01:06.590 +오늘은 무슨 일이죠? + +01:06.620 --> 01:09.080 +오늘은 얼굴 포옹을 배워볼 거예요 get it + +01:09.080 --> 01:14.930 +먼저, 이게 무엇인지 설명하실 수 있을 겁니다 얼굴을 껴안는 것의 스코프와 규모도요 + +01:14.930 --> 01:18.260 +얼굴을 안는 것의 가장 놀라운 점은 너비예요 + +01:18.260 --> 01:24.140 +오픈 소스 데이터 과학 커뮤니티에 제공하는 모든 다양한 것들이요 여러분도 + +01:24.140 --> 01:26.990 +곧 그걸 잘 이해하게 될 거예요 + +01:27.320 --> 01:33.650 +모델과 얼굴을 포옹할 때 데이터 세트, 공간을 살펴볼 겁니다 구글 콜랩도 잘 + +01:33.650 --> 01:35.510 +이해하게 될 거예요 + +01:35.510 --> 01:39.410 +구글 콜랍을 이미 이해했다면 빠른 복습 지점이 되겠죠 + +01:39.410 --> 01:41.840 +그렇지 않은 분들은 지금 살펴보죠 + +01:41.870 --> 01:47.810 +좋은 GPU 박스에서 코드를 실행하는 방법을 볼 겁니다 다양한 제공이 있고 어떤 걸 클래스에 + +01:47.840 --> 01:50.270 +사용할지 알게 될 거예요 + +01:50.270 --> 01:51.980 +get up을 해드릴게요 + +01:51.980 --> 01:55.550 +오픈 소스 준비하세요 + +01:55.550 --> 02:02.900 +하지만 먼저, 현재 상황과 현재 위치, 남은 일을 간단히 정리해 보죠 + +02:02.930 --> 02:09.510 +왼쪽에서 출발해서 달 착륙선 엔지니어링 지식이 없었지만 능숙한 달 착륙선 엔지니어답게 + +02:09.510 --> 02:12.750 +오른쪽에서 끝날 거예요 + +02:12.750 --> 02:16.980 +첫 주에는 개척지 문화에 푹 빠져들었어요 + +02:16.980 --> 02:18.060 +2주 차에요 + +02:18.090 --> 02:20.250 +지난주엔 UI를 만들었죠 + +02:20.250 --> 02:26.070 +상위 3개에 API를 모두 사용했고 도구로 실험했어요 + +02:26.100 --> 02:31.500 +이번 주 에이전트 iization 다중 모듈은 오픈 소스와 얼굴 포옹에 관한 거죠 + +02:31.530 --> 02:37.500 +다음 주엔 문제에 맞는 LM을 선택하고 코드 생성하는 걸 얘기하죠 + +02:37.530 --> 02:39.480 +그다음은 래그 주간이에요 + +02:39.510 --> 02:47.040 +그 후 개척지 모델을 조정하고 오픈 소스 모델을 조정하고 피날레에서 전부 완성하는 + +02:47.040 --> 02:48.450 +거죠 + +02:49.830 --> 02:54.150 +그럼 지체 없이 포옹하는 얼굴 얘기를 해보죠 + +02:54.540 --> 02:56.670 +유비쿼터스라고 할 수 있죠 + +02:56.700 --> 02:59.280 +지역 사회에서 사용되죠 + +02:59.310 --> 03:01.770 +정말 훌륭한 자원이에요 + +03:01.980 --> 03:09.780 +무엇보다도 H깅 페이스 플랫폼이 세 가지 있습니다 H깅 페이스 코에 접속하면 + +03:09.780 --> 03:12.900 +계정을 만들 수 있죠 + +03:12.900 --> 03:16.890 +세 가지 카테고리에 접근할 수 있어요 + +03:16.890 --> 03:25.860 +우선 800,000개가 넘는 오픈 소스 모델이 있어요 다양한 작업을 수행할 수 있죠 + +03:25.860 --> 03:31.080 +그중 상당수는 이번 주 강의에서 실험할 거예요 + +03:31.080 --> 03:35.010 +앞으로 몇 주간은 데이터 세트가 있어요 + +03:35.010 --> 03:41.880 +보물 창고입니다 200,000개가 넘는 데이터가 거의 모든 문제를 다루고 있죠 + +03:41.910 --> 03:44.070 +검색해 보고 뭐가 나오는지 보세요 + +03:44.100 --> 03:49.470 +이 과정 후반부에 특히 놀라운 데이터 세트를 사용할 거예요 + +03:49.500 --> 03:54.030 +하지만 많은 데이터를 찾아서 문제를 해결할 수 있어요 + +03:54.270 --> 04:00.150 +플랫폼 케이글과 비슷합니다 데이터 측면에 훨씬 더 집중하죠 + +04:00.150 --> 04:05.550 +하지만 얼굴 한 번 안아도 데이터는 방대하죠 + +04:06.000 --> 04:13.560 +얼굴을 안는 것에는 스페이스라는 것이 있습니다 앱을 작성하고 공개할 수 있는 곳이죠 + +04:13.590 --> 04:20.680 +페이스 클라우드 하드웨어를 끌어안고 다른 사람들이 사용할 수 있도록 하세요 + +04:20.680 --> 04:26.530 +코드가 오픈 소스인 게 좋다면 말이죠 왜냐하면 그게 포옹하는 얼굴의 + +04:26.560 --> 04:28.360 +의미니까요 + +04:28.630 --> 04:35.110 +스페이스 앱은 많은 스페이스 앱이 그래디오로 만들어졌어요 + +04:35.110 --> 04:36.910 +그러디오 앱인 셈이죠 + +04:37.060 --> 04:38.890 +그러디오 앱이 아닌 것도 있어요 + +04:38.890 --> 04:43.660 +스트림리츠라는 게 있는데 앱을 만드는 또 다른 방법인데 이것도 꽤 마법 같아요 + +04:43.660 --> 04:45.670 +그래디오와는 달라요 마법 같죠 + +04:45.730 --> 04:48.640 +앱을 게시할 수 있는 다른 방법도 있어요 + +04:48.700 --> 04:51.520 +하지만 그라디오가 가장 흔한 것 같아요 + +04:51.520 --> 04:59.230 +그리고 leaderboard라는 것이 있는데 그러디오 앱으로 여러 llm을 평가하고 순위를 + +04:59.230 --> 05:02.650 +매겨 점수표에 표시하는 역할을 하죠 + +05:02.680 --> 05:07.300 +다른 llm들을 비교할 때 leaderboard를 많이 사용할 + +05:07.330 --> 05:11.590 +거예요 오늘 몇 가지를 볼 겁니다 포옹표 공간도 보고요 + +05:12.190 --> 05:18.610 +그게 허깅페이스 플랫폼입니다 허깅페이스 코에 로그인해 뭐가 있는지 살펴보면 + +05:18.610 --> 05:20.230 +알 수 있죠 + +05:20.260 --> 05:28.240 +얼굴을 안는 것은 라이브러리 코드도 제공합니다 우리 오픈 소스 프로젝트의 기초가 되는 코드죠 + +05:28.870 --> 05:35.140 +그리고 라이브러리는 우리가 원하는 것에서 출발할 수 있게 해주죠. HDP, DBP, HDP, HD, HDP, HD, HDP, HD, HDP, HD, HDP, HD, HDP, HD, HD + +05:35.170 --> 05:41.230 +시장성이 훨씬 낮아지죠 아주 적은 상용 코드만으로도 아주 빨리 실행할 + +05:41.230 --> 05:42.910 +수 있으니까요 + +05:43.180 --> 05:51.970 +아주 잘 만들어진 도서관으로 출입 장벽을 줄이고 사람들의 생산력을 높여주죠 + +05:52.420 --> 05:57.880 +여러분이 경험할 첫 라이브러리 중 하나는 페이스 허브입니다 + +05:57.880 --> 06:07.030 +얼굴을 안는 데 로그인할 수 있는 라이브러리로서 허브에서 데이터 세트와 모델 같은 걸 다운로드 및 업로드 + +06:07.030 --> 06:12.430 +할 수 있습니다 얼굴을 안는 플랫폼이라고 부르는 거죠 + +06:12.850 --> 06:22.000 +데이터 집합은 라이브러리 같은 거예요 즉각적인 접근이 가능하죠 포옹이나 트랜스포머에 + +06:22.000 --> 06:25.540 +관한 데이터 저장소 같은 거요 + +06:25.570 --> 06:35.860 +이건 중앙 라이브러리인데 변압기 아키텍처를 따라가는 ㄹm을 감싸는 코드죠 그 밑에는 이런 + +06:36.010 --> 06:44.830 +신경망 네트워크를 실행하는 PyToch나 텐서플로우 코드가 있어요 + +06:45.160 --> 06:52.480 +하지만 변압기를 만들 때 손끝에 심층 신경망 코드가 있어요 + +06:52.480 --> 06:59.200 +함수나 변압기 코드의 메서드에 호출을 할 때 오픈AI의 범주 내 다른 + +06:59.200 --> 07:04.270 +클라우드에서 실행되는 API를 호출하지 않아요 + +07:04.270 --> 07:14.050 +우리가 직접 코드를 실행하는 건 심층 신경망에 대한 추론이나 훈련에 실행하기 위해서죠 + +07:14.860 --> 07:20.800 +언급하고 싶은 다른 라이브러리가 세 개 더 있어요 과정의 나중에 + +07:20.800 --> 07:23.740 +좀 더 고급 라이브러리예요 + +07:24.010 --> 07:29.810 +첫 번째는 페프트인데 매개 변수 효율 미세 조정의 약자죠 + +07:29.990 --> 07:39.890 +이 유틸리티 덕분에 수십억 개의 변수를 다루지 않고 llm을 + +07:39.890 --> 07:42.290 +훈련할 수 있죠 + +07:42.290 --> 07:43.910 +매개 변수 효율이죠 + +07:43.910 --> 07:49.400 +특히 우리가 사용할 기술은 로라라고 하는데 로라의 변형이죠 + +07:49.400 --> 07:52.460 +나중에 설명할 시간은 많아요 + +07:52.460 --> 07:54.710 +하지만 우리가 사용할 거라는 걸 명심하세요 + +07:54.710 --> 07:59.750 +페프트 라이브러리 매개 변수 효율적인 미세 조정의 일부죠 + +08:00.140 --> 08:07.550 +트리알이라는 라이브러리도 있는데 트랜스포머 강화 학습의 약자죠 + +08:07.550 --> 08:09.440 +몇 가지 조건이 있어요 + +08:09.440 --> 08:13.730 +보상 모델링 같은 걸 하는 능력이죠 + +08:14.060 --> 08:14.630 +네 + +08:14.630 --> 08:20.630 +최측근 정책 최적화 PPO라고도 하죠 + +08:20.900 --> 08:24.200 +음미로와 PPO에 대해 가끔 언급하실 텐데요 + +08:24.200 --> 08:32.720 +이건 앞서 언급한 WRF와 관련이 있어요 더 나은 후속 + +08:32.990 --> 08:43.100 +방식이죠 LMS를 훈련하는 방법이요 채팅에서 효과적이죠 + +08:43.100 --> 08:48.620 +2022년 말 챗GPT가 탄생한 핵심 혁신이었죠 + +08:48.650 --> 08:52.130 +많은 코드가 TRL 내에 있어요 + +08:52.160 --> 09:00.290 +TRL에는 감독 미세 조정과 SFT라는 게 있는데 나중에 코스에서도 직접 + +09:00.290 --> 09:02.390 +사용할 거예요 + +09:02.390 --> 09:10.220 +오픈 소스 모델을 조정하기 위해 사용하는 라이브러리입니다 특정 문제가 + +09:10.220 --> 09:14.540 +있는 특정 도메인에서 더 효과적으로요 + +09:14.540 --> 09:20.240 +TRL 라이브러리의 미세한 조율을 담당했죠 + +09:20.330 --> 09:21.380 +전부 두문자어예요 + +09:21.830 --> 09:30.230 +TRL의 일부예요 아주 중요한 프레임워크죠 + +09:30.350 --> 09:32.570 +이건 좀 더 고급이죠 나중에 다시 올게요 Get in get + +09:32.600 --> 09:37.020 +지금 당장 다 외울 필요는 없어요 이 약어들도 확실히 기억할 필요는 + +09:37.140 --> 09:42.990 +없고요 하지만 제가 그 씨앗을 심어서 나중에 봤을 때 이미 들어본 것으로 만들게요 + +09:44.160 --> 09:51.240 +다른 하나는 무대 뒤의 것에 더 가깝지만 불러와서 이용하는 걸 종종 보실 + +09:51.330 --> 09:52.590 +수 있어요 + +09:52.620 --> 10:01.890 +가속이라고 하는데 고급 안기체 코드예요 어떤 분산 구성에서도 트랜스포머가 + +10:01.890 --> 10:05.670 +작동할 수 있게 해 주죠 + +10:05.670 --> 10:13.350 +훈련과 추론 둘 다 가능합니다 효율적이고 적응성 있는 방법으로 여러 GPU에 걸쳐 실행할 + +10:13.350 --> 10:14.760 +수 있도록요 + +10:14.760 --> 10:19.950 +우리가 할 실험은 최대 1개의 GPU로 진행되지만요 + +10:20.910 --> 10:26.820 +얼굴을 껴안는 것 뒤에 있는 주요 라이브러리예요 + +10:27.660 --> 10:28.590 +지금은요 + +10:28.590 --> 10:31.140 +Get it get 이제 포옹하는 모습을 볼 때가 됐어요 + +10:31.140 --> 10:38.070 +그럼 좀 둘러보죠 포옹하는 얼굴 플랫폼부터요 diff --git a/week5/community-contributions/subtitles/srts/59170055/en_US.srt b/week5/community-contributions/subtitles/srts/59170055/en_US.srt new file mode 100755 index 0000000..03d161b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170055/en_US.srt @@ -0,0 +1,472 @@ +WEBVTT + +00:00.740 --> 00:03.140 +Welcome to the world of Google Colab. + +00:03.140 --> 00:07.730 +You may already be very familiar with Google Colab, even if so, I hope I'll show you a couple of things + +00:07.730 --> 00:08.660 +here and there. + +00:08.780 --> 00:13.340 +But if not, uh, prepare for a great tool. + +00:13.610 --> 00:17.630 +Um, and as I say, there's other competitors to Google Colab that are pretty similar. + +00:17.750 --> 00:25.490 +Um, but this is where I suggest you start or do do the same sort of thing in your, uh, cloud compute + +00:25.490 --> 00:26.990 +platform of choice. + +00:26.990 --> 00:33.140 +So the first thing you'll need to do is have a Google account if you don't already have one. + +00:33.170 --> 00:40.970 +So when you go to this URL colab.research.google.com, uh, if you don't already have a Google account, + +00:40.970 --> 00:43.850 +it will prompt you to, to, to to create one. + +00:43.850 --> 00:44.840 +And it's worth it. + +00:44.840 --> 00:46.880 +There's going to be tons that we can do with it. + +00:46.880 --> 00:50.240 +So uh, go ahead and do that if you need to. + +00:50.510 --> 00:56.990 +Um, but uh, for everybody else that has one, you will see this, um, which will give you some information + +00:56.990 --> 00:58.010 +about Colab. + +00:58.010 --> 01:00.650 +There's a free tier and there is a paid tier. + +01:00.650 --> 01:02.900 +There's an awful lot you can do just with the free tier. + +01:02.960 --> 01:07.320 +Um, and I think in theory, you should be able to do almost everything in our class with the free tier. + +01:07.320 --> 01:08.820 +It just might take longer. + +01:09.060 --> 01:13.680 +And the paid tier, you can control how much you spend, and it can be relatively small in a matter + +01:13.680 --> 01:14.430 +of a few dollars. + +01:14.430 --> 01:20.490 +So it's certainly something that I'd recommend you consider because it will allow you to get deeper + +01:20.490 --> 01:23.670 +into training and it will be very satisfying. + +01:23.760 --> 01:31.980 +So, uh, when you come up with a new Colab notebook, it looks a bit like what gets served up right + +01:31.980 --> 01:32.460 +away. + +01:32.460 --> 01:34.890 +It looks very much like a Jupyter notebook. + +01:34.920 --> 01:41.820 +You have cells that can be code, or they can be text, and you can run code just by clicking in it + +01:41.820 --> 01:42.840 +and running it. + +01:42.840 --> 01:45.990 +And this is a kind of default one that comes up. + +01:46.020 --> 01:49.950 +What we can do is we can go file new notebook in drive. + +01:49.950 --> 01:55.650 +And it says in drive like that, because this notebook is created in your Google Drive, which is so + +01:55.650 --> 01:56.280 +convenient. + +01:56.280 --> 02:03.360 +It has the same kind of construct as making Google Docs, making Google Sheets, and it's done in a + +02:03.360 --> 02:06.780 +way that you can share it just as you would share anything else. + +02:06.780 --> 02:08.270 +So here we are. + +02:09.020 --> 02:14.840 +And the first thing that we see, what looks like a Jupiter notebook over here is a connect button. + +02:14.840 --> 02:19.400 +And I'm going to show you we can start with change runtime type because it shows you the different kinds + +02:19.400 --> 02:23.690 +of runtime, the different kinds of VM that we can run a CPU. + +02:23.690 --> 02:29.480 +In other words, a normal box that doesn't have one of these GPUs, graphics processing units that are + +02:29.480 --> 02:35.900 +so good at running, uh, parallel matrix maths that sits behind neural networks. + +02:35.900 --> 02:41.300 +So we can just choose a CPU box, which is very much available on the free tier. + +02:41.330 --> 02:50.420 +There is a low end GPU box called a T4, which has a smaller GPU attached to it. + +02:50.420 --> 02:56.390 +This is available on the free plan with some rate limits in terms of how much you can use it, and it's + +02:56.390 --> 02:58.340 +also very cheap on the paid plan. + +02:58.550 --> 03:05.630 +Um, there's an L4, which is a bit higher spec, and an A100, which is the strongest one and which + +03:05.630 --> 03:08.930 +we will use when we want to do things quickly. + +03:08.960 --> 03:12.150 +It does cost a little bit more, but still we're talking about dollars. + +03:12.180 --> 03:14.940 +Not not not massive amounts. + +03:14.940 --> 03:16.050 +$10 will get you. + +03:16.050 --> 03:23.850 +I think it's it's, uh, with $10, you'd be able to to keep training for about 24 to 48 hours, uh, + +03:23.850 --> 03:26.580 +using that box constantly. + +03:26.580 --> 03:32.370 +So it's, uh, it's not not still not going to break the bank, but it is on the radar when you start + +03:32.370 --> 03:34.170 +using a100s a lot. + +03:34.830 --> 03:40.020 +Uh, so, um, and of course, you always get to see how much you're spending, and you can always choose + +03:40.020 --> 03:43.500 +to to go with the cheaper option or go with a free option as you wish. + +03:43.530 --> 03:46.650 +And when you pick a box, you can have a high Ram version of it. + +03:46.650 --> 03:48.870 +And that's talking about the CPU, Ram, not the GPU. + +03:48.900 --> 03:54.030 +The GPU Ram is associated with which instance you pick, but you can choose whether you want a high + +03:54.030 --> 03:55.290 +CPU, Ram, or not. + +03:55.290 --> 04:01.770 +So let's just go with a CPU box with normal amount of Ram and connect to that box by pressing the connect + +04:01.770 --> 04:02.730 +button. + +04:02.790 --> 04:07.320 +It does take a little while to connect, because it has to hunt down a box and connect to it, but there + +04:07.320 --> 04:07.680 +we go. + +04:07.710 --> 04:09.480 +We are now attached to a box. + +04:09.480 --> 04:15.750 +You go to this dropdown and say View Resources to see what you're working with. + +04:15.780 --> 04:17.040 +You can see the system Ram. + +04:17.040 --> 04:24.840 +We've got like almost 13 gigs on this box, and we've got 225 gigs of disk space there. + +04:25.290 --> 04:35.910 +And I can go over here and I can type something like print hello Data Science World and run that. + +04:35.910 --> 04:39.150 +And shockingly, we get that message printed. + +04:39.330 --> 04:42.990 +Uh, so, uh, hopefully no surprises there. + +04:42.990 --> 04:46.530 +It's a Jupyter notebook running in the cloud on a CPU. + +04:46.560 --> 04:48.210 +A couple of other things to mention. + +04:48.210 --> 04:50.370 +If you look down here, there's some useful stuff. + +04:50.370 --> 04:56.520 +This one here opens up your sort of browser, a file browser, onto your local disk. + +04:56.550 --> 05:01.380 +This local disk is ephemeral, and then it gets completely wiped once you finished using this box. + +05:01.380 --> 05:06.900 +So consider it temporary and you can use it to be writing files there that you maybe are then going + +05:06.900 --> 05:13.290 +to upload your model or data to the Huggingface hub, um, which you will later download somewhere else. + +05:13.290 --> 05:14.880 +But this is temporary. + +05:14.910 --> 05:16.290 +This is very important. + +05:16.290 --> 05:21.000 +This key is for what's called the secrets associated with your notebook. + +05:21.000 --> 05:26.520 +And this is where you can put in the environment variables that you'll be able to access within your + +05:26.520 --> 05:27.090 +notebook. + +05:27.120 --> 05:31.020 +That should not be included in the code of the notebook. + +05:31.050 --> 05:33.930 +And what you'll see here is I have my anthropic API key. + +05:33.960 --> 05:37.530 +I have my OpenAI API key and my hugging face token. + +05:37.530 --> 05:43.890 +That's the thing we created in the last video, and I've got them associated with this notebook. + +05:43.920 --> 05:46.020 +You can just press Add New Secret to do that. + +05:46.020 --> 05:48.270 +And it comes associated with all of my notebooks. + +05:48.450 --> 05:51.870 +Um, because I've got that set up as my colab secrets. + +05:51.870 --> 05:56.880 +And you can create a new one by pressing Add New Secret there. + +05:57.270 --> 06:01.590 +You can switch notebook access on here. + +06:01.860 --> 06:05.280 +I've just seen that there's a Create Gemini key option there. + +06:05.280 --> 06:10.500 +They're obviously a cross-selling to Gemini, and I know that I that I say that creating Gemini Keys + +06:10.530 --> 06:11.370 +is is hard. + +06:11.370 --> 06:15.300 +Maybe they've got an easier path to creating Gemini API keys right there. + +06:15.300 --> 06:16.740 +So that would be worth trying. + +06:16.770 --> 06:20.460 +If you haven't already gone through the rigmarole of setting up a Gemini API key. + +06:20.670 --> 06:26.340 +Uh, so, um, and it's even I was going to say later we'll find out how to access your key from within + +06:26.340 --> 06:27.180 +the Jupyter notebook. + +06:27.180 --> 06:30.540 +But wonderfully, they've given you the little, little scriptlet of code just there. + +06:30.540 --> 06:36.690 +That's what we'll be doing later to be accessing our secrets within the code on the right. + +06:36.690 --> 06:40.020 +So you should set these up when you get a chance. + +06:40.050 --> 06:45.630 +When you're working with an actual notebook in particular, you flip this switch on to make sure that + +06:45.630 --> 06:50.820 +when you execute this code in a cell, it will have access to that secret. + +06:51.120 --> 06:56.100 +And of course, as you can imagine, the sort of powerful thing about these secrets is that if you share + +06:56.100 --> 06:59.490 +this notebook with others, then they get all of your code. + +06:59.490 --> 07:02.100 +But of course, they don't get your secrets shared. + +07:02.100 --> 07:07.380 +They will have to enter in their own secrets in order to be able to run that code. + +07:07.380 --> 07:12.240 +And similarly, of course, when I share notebooks for you to use, the same thing will apply. + +07:12.240 --> 07:18.180 +You'll need to put in your own tokens in order to make take advantage of the code and run it against + +07:18.180 --> 07:23.850 +the frontier models or use your hugging face hub, um, or whatever. + +07:24.600 --> 07:26.700 +Okay, let's close that down. + +07:26.700 --> 07:30.930 +So let me just show you some of the more powerful boxes. + +07:30.930 --> 07:34.500 +So you remember we can go here and go change runtime type. + +07:34.500 --> 07:38.040 +Click on T4 to to use that box. + +07:38.040 --> 07:40.080 +And I did that earlier. + +07:40.230 --> 07:45.150 +And I did that because uh, it can take a little while to connect to some of these boxes. + +07:45.150 --> 07:50.700 +And with the really high spec boxes like A100, sometimes it just won't be available and you'll have + +07:50.700 --> 07:54.180 +to come back and try again two minutes later, and then it will be available. + +07:54.180 --> 07:58.710 +Invariably it becomes available after a couple of tries, but sometimes they are oversubscribed and + +07:58.710 --> 08:00.660 +it takes a few attempts. + +08:00.660 --> 08:02.580 +So this is a T4 box. + +08:02.580 --> 08:09.210 +If I do view resources, we'll see that we have again 12 and a bit of system Ram. + +08:09.210 --> 08:12.780 +We have the same a slightly smaller hard drive, I think. + +08:12.960 --> 08:15.690 +I think it was two, two five before, but it's 200 whatever. + +08:15.690 --> 08:16.980 +That's plenty of disk space. + +08:16.980 --> 08:24.000 +And we have a GPU with 15GB of Ram, and 15GB might sound like a huge amount of Ram to have for a GPU. + +08:24.000 --> 08:28.240 +But as you'll quickly discover when it comes to training deep neural networks, that is a kind of puny + +08:28.270 --> 08:30.490 +GPU, but it's good enough for our purposes. + +08:30.490 --> 08:32.920 +We'll be able to use this for this class. + +08:33.130 --> 08:38.110 +Um, uh, but but it just some things might take a long time. + +08:38.260 --> 08:45.100 +Uh, this is a bit of code that I just copied from the original colab that Google prompted us with, + +08:45.100 --> 08:51.970 +which gives us a nice little, uh, printout of details behind this GPU, including how much memory + +08:52.000 --> 08:54.250 +we're using out of the 15GB. + +08:54.280 --> 08:57.040 +Although, of course, you can always watch it happening over here. + +08:58.000 --> 09:02.110 +Uh, so this is the T4 box. + +09:02.110 --> 09:05.410 +I'm now going to show you the A100 box. + +09:05.410 --> 09:11.290 +This is the super powered one, and I may splash out and use this from time to time. + +09:11.290 --> 09:17.440 +Just in the spirit of keeping this class moving fast and showing you, uh, great results really quickly. + +09:17.590 --> 09:21.700 +Uh, if we view the resources, you'll see what's going on. + +09:21.700 --> 09:29.380 +Now, we've got a 40 gigabyte Ram, GPU and that that is a beefy GPU. + +09:29.380 --> 09:34.240 +That is something which will be able to use to do some hefty training. + +09:34.480 --> 09:37.750 +Um, and we can use this to print more details. + +09:37.840 --> 09:46.930 +You can see that we are using two megabytes by, uh, when we're not doing anything out of the 40GB + +09:46.930 --> 09:49.270 +of available memory. + +09:49.870 --> 09:53.200 +So that's the quick tour of what's going on with Colab. + +09:53.200 --> 09:57.040 +The one other thing I'll mention is the share button up here. + +09:57.070 --> 10:03.880 +Uh, if you press the share button, then you will see a very familiar interface, because if you use + +10:03.880 --> 10:07.600 +Google Drive at all, it looks just like everything else in Google Drive. + +10:07.630 --> 10:13.600 +You can share these notebooks and with different levels of permission with different groups, and use + +10:13.600 --> 10:16.330 +that as a way to collaborate really effectively. + +10:16.330 --> 10:25.810 +Uh, with friends, colleagues, coworkers on the, uh, the AI Jen AI projects that you're working + +10:25.810 --> 10:26.110 +on. + +10:26.110 --> 10:29.380 +And it's a super effective way to collaborate, of course. + +10:29.410 --> 10:32.980 +And that's one of the great benefits of using the Google Colab setup. + +10:33.220 --> 10:33.910 +All right. + +10:33.910 --> 10:35.500 +I'll see you back for the next lecture. diff --git a/week5/community-contributions/subtitles/srts/59170055/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170055/ja_JP.srt new file mode 100755 index 0000000..6b0d2c1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170055/ja_JP.srt @@ -0,0 +1,400 @@ +WEBVTT + +00:00.740 --> 00:03.140 +Google Colabの世界へようこそ。 + +00:03.140 --> 00:08.660 +すでにGoogle Colabを使いこなしている人もいるかもしれないが、 そうであっても、 あちこちでいくつか紹介できればと思う。 + +00:08.780 --> 00:13.340 +しかし、 そうでない場合は、 素晴らしい道具を用意することだ。 + +00:13.610 --> 00:17.630 +グーグル・コラボには、 似たような競合他社が他にもあります。 + +00:17.750 --> 00:26.990 +でも、 ここから始めるか、 あるいはあなたが選んだクラウド・コンピューティング・プラットフォームで同じようなことをすることをお勧めする。 + +00:26.990 --> 00:33.140 +まず最初に必要なのは、 グーグルアカウントを持っていることだ。 + +00:33.170 --> 00:40.970 +だから、 このURLのcolab. を研究している。 グーグル もしまだグーグルアカウントをお持ちでない場合は、 + +00:40.970 --> 00:43.850 +アカウントを作成するよう促されます。 + +00:43.850 --> 00:44.840 +その価値はある。 + +00:44.840 --> 00:46.880 +それでできることは山ほどあるだろう。 + +00:46.880 --> 00:50.240 +だから、 必要ならそうしてくれ。 + +00:50.510 --> 00:58.010 +でも、 それ以外の人は、 Colabについての情報を見ることができる。 + +00:58.010 --> 01:00.650 +無料のティアと有料のティアがある。 + +01:00.650 --> 01:02.900 +無料ティアだけでできることは非常に多い。 + +01:02.960 --> 01:07.320 +理屈の上では、 フリー・ティアでも僕らのクラスのほとんどのことができるはずなんだ。 + +01:07.320 --> 01:08.820 +ただ、 もっと時間がかかるかもしれない。 + +01:09.060 --> 01:14.430 +そして、 有料ティアでは、 いくら使うかをコントロールすることができ、 それは数ドルの問題で比較的少額にすることができる。 + +01:14.430 --> 01:20.490 +トレーニングに深く打ち込むことができ、 + +01:20.490 --> 01:23.670 +満足感も大きい。 + +01:23.760 --> 01:32.460 +だから、 新しいColabノートを思いついたとき、 すぐに提供されるものと少し似ているんだ。 + +01:32.460 --> 01:34.890 +Jupyterノートブックによく似ている。 + +01:34.920 --> 01:42.840 +セルにはコードもあればテキストもあり、 クリックするだけでコードを実行できる。 + +01:42.840 --> 01:45.990 +そして、 これはデフォルトのようなものだ。 + +01:46.020 --> 01:49.950 +私たちにできることは、 新しいノートをドライブにファイルすることだ。 + +01:49.950 --> 01:56.280 +このノートブックはグーグル・ドライブに作成されるので、 とても便利です。 + +01:56.280 --> 02:06.780 +GoogleドキュメントやGoogleシートを作るのと同じような構成で、 他のものを共有するのと同じように共有することができる。 + +02:06.780 --> 02:08.270 +だから、 ここにいる。 + +02:09.020 --> 02:14.840 +ジュピター・ノートのように見えるのは、 接続ボタンだ。 + +02:14.840 --> 02:19.400 +ランタイムの種類を変更するところから始めましょう。 ランタイムの種類を変更することで、 + +02:19.400 --> 02:23.690 +CPUで実行できるVMの種類がわかります。 + +02:23.690 --> 02:29.480 +言い換えれば、 GPU(グラフィックス・プロセッシング・ユニット)を1つも搭載していない普通の箱で、 + +02:29.480 --> 02:35.900 +ニューラルネットワークの背後にある並列マトリックス計算を実行するのが得意なのだ。 + +02:35.900 --> 02:41.300 +だから、 CPUボックスを選べばいいのだ。 + +02:41.330 --> 02:50.420 +T4と呼ばれるローエンドGPUボックスがあり、 これにはより小さなGPUが取り付けられている。 + +02:50.420 --> 02:58.340 +これは無料プランでも利用可能だが、 使用量に制限がある。 + +02:58.550 --> 03:08.930 +L4はもう少しハイスペックだし、 A100は最強で、 素早くやりたいときに使う。 + +03:08.960 --> 03:12.150 +少し高くつくが、 それでもドルの話だ。 + +03:12.180 --> 03:14.940 +大量ではない。 + +03:14.940 --> 03:16.050 +10ドルがあなたを捕まえる。 + +03:16.050 --> 03:26.580 +10ドルあれば、 24時間から48時間くらいはトレーニングを続けられると思う。 + +03:26.580 --> 03:34.170 +100sを多用するようになれば、 そのようなことも視野に入ってくるでしょう。 + +03:34.830 --> 03:40.020 +ええと、 それで、 ええと、 もちろん、 いくら使っているかはいつでも確認できるし、 安いオプションにするか、 + +03:40.020 --> 03:43.500 +無料のオプションにするかはいつでも好きなように選択できる。 + +03:43.530 --> 03:46.650 +そして、 ボックスを選ぶと、 そのハイラム版を持つことができる。 + +03:46.650 --> 03:48.870 +これはGPUではなくCPUとラムの話だ。 + +03:48.900 --> 03:55.290 +GPUラムはどのインスタンスを選ぶかに関連するが、 高いCPU、 ラムが必要かどうかは選択できる。 + +03:55.290 --> 04:02.730 +そこで、 通常のRAMを搭載したCPUボックスに接続ボタンを押して接続することにしよう。 + +04:02.790 --> 04:07.680 +ボックスを探し出して接続する必要があるため、 接続には少し時間がかかるが、 これでOKだ。 + +04:07.710 --> 04:09.480 +私たちは今、 箱に取り付けられている。 + +04:09.480 --> 04:15.750 +このドロップダウンメニューから「View Resources(リソースを表示)」と選択し、 作業内容を確認する。 + +04:15.780 --> 04:17.040 +システム・ラムを見ることができる。 + +04:17.040 --> 04:24.840 +このボックスには13ギガ近くあり、 225ギガのディスクスペースがある。 + +04:25.290 --> 04:35.910 +そしてここに行き、 print hello Data Science Worldのように入力して実行することができる。 + +04:35.910 --> 04:39.150 +そして衝撃的なことに、 私たちはそのメッセージを印刷される。 + +04:39.330 --> 04:42.990 +ああ、 だから、 サプライズがないことを祈るよ。 + +04:42.990 --> 04:46.530 +クラウド上のCPUで動作するJupyterノートブックだ。 + +04:46.560 --> 04:48.210 +他にもいくつか触れておきたいことがある。 + +04:48.210 --> 04:50.370 +この下を見れば、 役に立つものがある。 + +04:50.370 --> 04:56.520 +これは、 ブラウザーの一種、 ファイル・ブラウザーをローカル・ディスク上に開くものだ。 + +04:56.550 --> 05:01.380 +このローカルディスクは一時的なもので、 このボックスの使用を終えると完全に消去される。 + +05:01.380 --> 05:06.900 +だから、 一時的なものと考えて、 そこにファイルを書き込んで、 モデルやデータをHuggingfaceのハブにアップロードして、 + +05:06.900 --> 05:13.290 +後でどこかにダウンロードするために使うことができる。 + +05:13.290 --> 05:14.880 +しかし、 これは一時的なものだ。 + +05:14.910 --> 05:16.290 +これはとても重要なことだ。 + +05:16.290 --> 05:21.000 +このキーは、 あなたのノートブックに関連する秘密と呼ばれるもののためのものです。 + +05:21.000 --> 05:27.090 +そしてここに、 ノートブック内でアクセスできる環境変数を入れる。 + +05:27.120 --> 05:31.020 +それはノートのコードに含まれるべきではない。 + +05:31.050 --> 05:33.930 +そして、 ここに表示されているのは、 私の擬人化APIキーです。 + +05:33.960 --> 05:37.530 +僕はOpenAIのAPIキーとハグする顔のトークンを持っている。 + +05:37.530 --> 05:43.890 +これは前回のビデオで作成したもので、 このノートブックに関連付けました。 + +05:43.920 --> 05:46.020 +新しいシークレットを追加]を押すだけです。 + +05:46.020 --> 05:48.270 +そして、 それは私のすべてのノートブックに付属している。 + +05:48.450 --> 05:51.870 +ええと、 それは僕のコラボの秘密として設定されているからなんだ。 + +05:51.870 --> 05:56.880 +そこで新しいシークレットを追加を押せば、 新しいシークレットを作成できる。 + +05:57.270 --> 06:01.590 +ノートブックへのアクセスはここで切り替えることができる。 + +06:01.860 --> 06:05.280 +ジェミニ・キーを作成するオプションがあるのを今見たよ。 + +06:05.280 --> 06:11.370 +双子座へのクロスセリングであることは明らかだし、 私が双子座のキーを作るのは難しいと言っていることも知っている。 + +06:11.370 --> 06:15.300 +もしかしたら、 ジェミニのAPIキーを作成するための簡単なパスがそこにあるのかもしれない。 + +06:15.300 --> 06:16.740 +だから、 試してみる価値はあるだろう。 + +06:16.770 --> 06:20.460 +まだGemini APIキーの設定を行っていない場合。 + +06:20.670 --> 06:27.180 +Jupyterノートブックから自分のキーにアクセスする方法は、 後で説明しようと思っていたんだ。 + +06:27.180 --> 06:30.540 +しかし、 素晴らしいことに、 彼らは小さな小さなスクリプトのようなコードを与えてくれた。 + +06:30.540 --> 06:36.690 +これが、 右のコード内の秘密にアクセスするために後でやることだ。 + +06:36.690 --> 06:40.020 +だから、 機会があれば、 これらをセットアップしておくべきだ。 + +06:40.050 --> 06:45.630 +特に実際のノートブックで作業しているときは、 このスイッチをオンにして、 あるセルでこのコードを実行したときに、 + +06:45.630 --> 06:50.820 +そのコードがそのシークレットにアクセスできるようにする。 + +06:51.120 --> 06:59.490 +そしてもちろん、 ご想像のとおり、 これらの秘密が強力なのは、 このノートブックを他の人と共有すれば、 その人があなたのコードをすべて手に入れることができるということだ。 + +06:59.490 --> 07:02.100 +しかしもちろん、 彼らはあなたの秘密を共有することはない。 + +07:02.100 --> 07:07.380 +そのコードを実行できるようにするためには、 彼ら自身の秘密を入力しなければならない。 + +07:07.380 --> 07:12.240 +もちろん、 私が皆さんにノートをお分けするときも同様です。 + +07:12.240 --> 07:18.180 +フロンティアのモデルに対してコードを実行したり、 ハグする顔のハブを使ったりするためには、 + +07:18.180 --> 07:23.850 +独自のトークンを入れる必要がある。 + +07:24.600 --> 07:26.700 +よし、 これで終わりにしよう。 + +07:26.700 --> 07:30.930 +では、 より強力なボックスをいくつか紹介しよう。 + +07:30.930 --> 07:34.500 +ここでランタイム・タイプを変更することができる。 + +07:34.500 --> 07:38.040 +そのボックスを使用するにはT4をクリックする。 + +07:38.040 --> 07:40.080 +さっきもそうだった。 + +07:40.230 --> 07:45.150 +そうしたのは、 いくつかのボックスに接続するのに少し時間がかかることがあるからだ。 + +07:45.150 --> 07:50.700 +A100のような本当にハイスペックなボックスでは、 時々使用できないことがあり、 2分後にもう一度来て試してみると、 + +07:50.700 --> 07:54.180 +使用できるようになっている。 + +07:54.180 --> 08:00.660 +数回トライすれば必ず空くが、 時には定員オーバーで数回トライすることもある。 + +08:00.660 --> 08:02.580 +これがT4ボックスか。 + +08:02.580 --> 08:09.210 +リソースを見ると、 また12と少しシステムラムがある。 + +08:09.210 --> 08:12.780 +ハードディスクは同じで、 少し小さいと思う。 + +08:12.960 --> 08:15.690 +以前は2か5だったと思うが、 今は200だ。 + +08:15.690 --> 08:16.980 +十分なディスク容量だ。 + +08:16.980 --> 08:24.000 +そして、 15GBのラムを搭載したGPUがある。 15GBというと、 GPUにとっては膨大なラムの量に聞こえるかもしれない。 + +08:24.000 --> 08:28.240 +しかし、 ディープ・ニューラル・ネットワークをトレーニングすることになればすぐにわかるように、 これはある意味ちっぽけなGPUだが、 + +08:28.270 --> 08:30.490 +我々の目的には十分だ。 + +08:30.490 --> 08:32.920 +このクラスで使うことができるだろう。 + +08:33.130 --> 08:38.110 +うーん、 でも、 時間がかかるものもあるかもしれない。 + +08:38.260 --> 08:45.100 +これは、 Googleが私たちに促してくれたオリジナル・ラボからコピーしたコードで、 + +08:45.100 --> 08:54.250 +15GBのうちどれだけのメモリーを使用しているかなど、 このGPUの詳細をプリントアウトしてくれる。 + +08:54.280 --> 08:57.040 +もちろん、 こちらでその様子を見ることもできる。 + +08:58.000 --> 09:02.110 +ええと、 これがT4の箱ですね。 + +09:02.110 --> 09:05.410 +これからA100の箱をお見せします。 + +09:05.410 --> 09:11.290 +これは超強力なもので、 時々これを使うかもしれない。 + +09:11.290 --> 09:17.440 +このクラスを早く進め、 皆さんに素晴らしい結果を早くお見せしたいという精神からです。 + +09:17.590 --> 09:21.700 +リソースを見れば、 何が起こっているかわかるだろう。 + +09:21.700 --> 09:29.380 +現在、 40ギガバイトのラム、 GPUを搭載している。 + +09:29.380 --> 09:34.240 +それは、 重たいトレーニングをするために使えるものだ。 + +09:34.480 --> 09:37.750 +そして、 これを使って詳細を印刷することができる。 + +09:37.840 --> 09:49.270 +40GBの空きメモリのうち、 何もしていないときに2メガバイト使っているのがわかるだろう。 + +09:49.870 --> 09:53.200 +というわけで、 Colabの近況をざっとご紹介した。 + +09:53.200 --> 09:57.040 +もうひとつ言っておくと、 ここにシェアボタンがある。 + +09:57.070 --> 10:03.880 +共有ボタンを押すと、 とても見慣れたインターフェイスが表示される。 Googleドライブをまったく使っていない人なら、 + +10:03.880 --> 10:07.600 +Googleドライブの他のものと同じように見えるからだ。 + +10:07.630 --> 10:16.330 +これらのノートブックを、 異なるグループと異なる許可レベルで共有し、 本当に効果的なコラボレーションの方法として使うことができる。 + +10:16.330 --> 10:26.110 +あなたが取り組んでいるジェンのAIプロジェクトの友人、 同僚、 同僚と。 + +10:26.110 --> 10:29.380 +もちろん、 共同作業には超効果的な方法だ。 + +10:29.410 --> 10:32.980 +それが、 グーグルコラボのセットアップを使う大きなメリットのひとつだ。 + +10:33.220 --> 10:33.910 +分かった。 + +10:33.910 --> 10:35.500 +また次の講義でお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59170055/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170055/ko_KR.srt new file mode 100755 index 0000000..4c85fe7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170055/ko_KR.srt @@ -0,0 +1,451 @@ +WEBVTT + +00:00.740 --> 00:03.140 +구글 콜랍의 세계에 잘 오셨어요 + +00:03.140 --> 00:08.660 +구글 콜랍에 대해 잘 아실지도 모르지만 그래도 몇 가지 보여드릴게요 + +00:08.780 --> 00:13.340 +하지만 그게 아니라면 멋진 도구를 준비하세요 + +00:13.610 --> 00:17.630 +콜랍과 비슷한 제품을 구글에 검색한 경쟁 업체도 있어요 + +00:17.750 --> 00:26.990 +여기서 시작하거나 같은 걸 하길 권합니다 클라우드 컴퓨팅 플랫폼에서요 + +00:26.990 --> 00:33.140 +구글 계정이 없다면 가장 먼저 구글 계정을 만드세요 + +00:33.170 --> 00:40.970 +URL Colab으로 가보죠 연구요 구글 검색요 구글 계정이 없는 분들도 + +00:40.970 --> 00:43.850 +계정을 만들라고 할 거예요 + +00:43.850 --> 00:44.840 +그럴 가치가 있어요 + +00:44.840 --> 00:46.880 +할 수 있는 일이 아주 많을 거예요 + +00:46.880 --> 00:50.240 +그러니까 필요하면 그렇게 하세요 + +00:50.510 --> 00:56.990 +하지만 다른 분들을 위해 이걸 보시면 콜랍에 대한 정보가 + +00:56.990 --> 00:58.010 +나와요 + +00:58.010 --> 01:00.650 +무료 계층과 유료 계층이 있어요 + +01:00.650 --> 01:02.900 +무료 계층으로도 할 수 있는 게 정말 많아요 + +01:02.960 --> 01:07.320 +이론상으로는 무료로 수업 내용을 거의 다 들을 수 있어야 해요 + +01:07.320 --> 01:08.820 +시간이 좀 더 걸릴 뿐이죠 + +01:09.060 --> 01:13.680 +유료 계층은 여러분이 얼마를 쓸지 결정할 수 있습니다 몇 달러 안에 비교적 적은 금액일 + +01:13.680 --> 01:14.430 +수도 있죠 + +01:14.430 --> 01:20.490 +그래서 반드시 고려해 보시길 권합니다 더 깊이 훈련할 수 있고 아주 + +01:20.490 --> 01:23.670 +만족스러울 테니까요 Get it + +01:23.760 --> 01:32.460 +콜랍 비트를 새로 만들면 바로 나오는 것과 비슷하죠 + +01:32.460 --> 01:34.890 +주피터 공책과 아주 비슷해요 + +01:34.920 --> 01:41.820 +코드나 텍스트가 될 수 있는 셀이 있어요 클릭해서 실행하면 코드를 실행할 + +01:41.820 --> 01:42.840 +수 있죠 + +01:42.840 --> 01:45.990 +이건 기본값으로 나오는 거죠 + +01:46.020 --> 01:49.950 +드라이브에 새 공책을 철할 수 있어요 + +01:49.950 --> 01:56.280 +드라이브에서는 이렇게 된대요 구글 드라이브에서 만든 노트라서 아주 편리하죠 + +01:56.280 --> 02:03.360 +구글 문서나 구글 시트를 만드는 것과 같은 구성 구조를 갖고 있어요 다른 걸 공유하듯 + +02:03.360 --> 02:06.780 +공유할 수 있는 방식으로 이뤄졌죠 + +02:06.780 --> 02:08.270 +그래서 이렇게 됐죠 + +02:09.020 --> 02:14.840 +제일 먼저 보이는 건 주피터 노트북처럼 보이는 연결 버튼이에요 + +02:14.840 --> 02:19.400 +런타임 형식 변경부터 보여드릴게요 다양한 런타임과 + +02:19.400 --> 02:23.690 +CPU를 실행할 다양한 VM을 보여주니까요 + +02:23.690 --> 02:29.480 +다시 말해 일반 상자에는 GPU가 없다는 거죠 그래픽 처리 장치가 + +02:29.480 --> 02:35.900 +없는 상자요 신경망 뒤에 있는 평행 행렬 수학을 실행하는 장치요 + +02:35.900 --> 02:41.300 +CPU 상자를 선택하면 됩니다 무료 계층에서 많이 사용 가능하죠 + +02:41.330 --> 02:50.420 +T4라는 저단 GPU 박스가 있는데 더 작은 GPU가 부착되어 있어요 + +02:50.420 --> 02:56.390 +무료 요금제로도 구매하실 수 있어요 사용량에 따라 요금제한이 있지만 유료 요금제에서도 + +02:56.390 --> 02:58.340 +아주 저렴해요 + +02:58.550 --> 03:05.630 +L4는 사양이 좀 더 높은 비트와 A100은 가장 강한 비트로 + +03:05.630 --> 03:08.930 +빨리 작업할 때 쓸 거예요 + +03:08.960 --> 03:12.150 +비트는 좀 더 들지만 그래도 달러잖아요 + +03:12.180 --> 03:14.940 +엄청난 양은 아니죠 + +03:14.940 --> 03:16.050 +10달러면 Get이죠 + +03:16.050 --> 03:23.850 +10달러만 있으면 24시간에서 48시간 동안 계속 훈련할 수 있어요 그 상자를 + +03:23.850 --> 03:26.580 +계속 사용하면서요 + +03:26.580 --> 03:32.370 +그래서 큰돈을 벌지는 못해도 a100을 많이 사용하면 + +03:32.370 --> 03:34.170 +주목을 받게 되죠 + +03:34.830 --> 03:40.020 +그래서 항상 얼마를 쓰는지 확인할 수 있고 언제든 더 싼 옵션이나 + +03:40.020 --> 03:43.500 +무료 옵션을 선택할 수 있어요. + +03:43.530 --> 03:46.650 +상자를 고르면 높은 램 버전이 나와요 + +03:46.650 --> 03:48.870 +GPU 말고 CPU, 램에 관한 거죠 + +03:48.900 --> 03:54.030 +GPU 램은 여러분이 고르는 인스턴스와 연결되어 있지만 높은 CPU, 램을 원하는지를 + +03:54.030 --> 03:55.290 +선택할 수 있어요 + +03:55.290 --> 04:02.730 +보통 양의 램을 가진 CPU 박스로 가서 연결 버튼을 눌러 박스에 연결할게요 + +04:02.790 --> 04:07.680 +연결하는 데 시간이 좀 걸려요 상자를 찾아서 연결해야 하니까요 하지만 됐어요 + +04:07.710 --> 04:09.480 +지금 우린 상자에 연결돼 있어요 + +04:09.480 --> 04:15.750 +이 드롭다운으로 가서 리소스 보기에서 작업 중인 걸 보세요 + +04:15.780 --> 04:17.040 +시스템 램이 보이죠 + +04:17.040 --> 04:24.840 +이 박스는 거의 13기가이고 디스크 공간은 225기가예요 + +04:25.290 --> 04:35.910 +여기로 가서 hello Data 과학 World 같은 걸 입력해 실행할 수 있어요 + +04:35.910 --> 04:39.150 +놀랍게도 그 메시지가 인쇄됐어요 get it + +04:39.330 --> 04:42.990 +놀랄 일은 없길 바라요 + +04:42.990 --> 04:46.530 +주피터 노트북이 클라우드에서 CPU를 실행하고 있어요 + +04:46.560 --> 04:48.210 +몇 가지 더 언급할 게 있어요 + +04:48.210 --> 04:50.370 +이 아래를 보면 유용한 게 있어요 + +04:50.370 --> 04:56.520 +이건 일종의 브라우저를 엽니다 파일 브라우저요 로컬 디스크로요 + +04:56.550 --> 05:01.380 +이 부분 디스크는 일시적이고 이 상자를 다 쓰면 완전히 지워져요 + +05:01.380 --> 05:06.900 +임시라고 생각하고 파일을 작성할 수 있습니다 그 후 모델이나 데이터를 + +05:06.900 --> 05:13.290 +H깅페이스 허브에 업로드 할 수 있죠 나중에 다른 곳에서 다운로드 할 수 있도록요 + +05:13.290 --> 05:14.880 +하지만 이건 일시적인 거예요 + +05:14.910 --> 05:16.290 +아주 중요한 거예요 + +05:16.290 --> 05:21.000 +이 열쇠는 당신 수첩과 관련된 비밀들을 여는 열쇠예요 + +05:21.000 --> 05:26.520 +여기 환경 변수를 입력할 수 있어요 노트북 안에서 액세스할 수 있는 + +05:26.520 --> 05:27.090 +거죠 + +05:27.120 --> 05:31.020 +그건 공책 코드에 포함되면 안 되죠 + +05:31.050 --> 05:33.930 +여기 보이는 건 인류 API 키예요 + +05:33.960 --> 05:37.530 +OpenAI API 키와 포옹하는 얼굴 토큰이 있어요 + +05:37.530 --> 05:43.890 +지난 비디오에서 만든 거죠 이 공책과 관련 있어요 + +05:43.920 --> 05:46.020 +Add New 시크릿을 누르면 돼요 + +05:46.020 --> 05:48.270 +제 모든 공책과 관련이 있어요 + +05:48.450 --> 05:51.870 +콜라베의 비밀이라고 적어 놨거든요 + +05:51.870 --> 05:56.880 +새 비밀 추가를 눌러 새 걸 만들 수 있어요 + +05:57.270 --> 06:01.590 +여기서 공책 사용 권한을 바꿀 수 있어요 + +06:01.860 --> 06:05.280 +제미니 만들기 핵심 옵션이 있네요 + +06:05.280 --> 06:10.500 +제미니 키스와 교차 판매를 하는 거죠 제미니 키스를 만드는 건 어려운 + +06:10.530 --> 06:11.370 +일이에요 + +06:11.370 --> 06:15.300 +제미니 API 키를 만드는 더 쉬운 길이 있을지도 몰라요 + +06:15.300 --> 06:16.740 +시도해 볼 만하겠어요 + +06:16.770 --> 06:20.460 +제미니 API 키를 설정하는 복잡한 과정을 아직 안 거쳤다면요 + +06:20.670 --> 06:26.340 +그래서... 나중에 말씀드리려고 했는데 주피터 수첩에서 열쇠에 접근하는 방법을 알아낼 + +06:26.340 --> 06:27.180 +거예요 + +06:27.180 --> 06:30.540 +하지만 놀랍게도 작은 스크립트렛 코드를 제공했어요 + +06:30.540 --> 06:36.690 +나중에 그렇게 할 겁니다 오른쪽 코드에 있는 우리 비밀에 접근하기 위해서요 + +06:36.690 --> 06:40.020 +그러니까 기회 있을 때 이런 거 좀 만들어 놔요 get it get it + +06:40.050 --> 06:45.630 +특히 실제 노트북으로 작업할 땐 이 스위치를 켜서 셀에서 + +06:45.630 --> 06:50.820 +이 코드를 실행할 때 해당 비밀에 접근하게 해야죠 + +06:51.120 --> 06:56.100 +그리고 아시다시피 이 비밀의 강력한 힘은 이 공책을 다른 사람과 공유하면 + +06:56.100 --> 06:59.490 +코드를 모두 얻게 된다는 거죠. Get it. + +06:59.490 --> 07:02.100 +물론 비밀을 공유하진 못하죠 Get it + +07:02.100 --> 07:07.380 +코드를 실행하려면 자신의 비밀을 입력해야 하죠 + +07:07.380 --> 07:12.240 +마찬가지로 공책을 공유할 때도 같은 일이 적용되죠 + +07:12.240 --> 07:18.180 +코드를 활용하고 프론티어 모델에 적용하려면 여러분의 토큰을 넣어야 + +07:18.180 --> 07:23.850 +합니다 또는 for 포옹 얼굴 허브 같은 걸 사용하세요 + +07:24.600 --> 07:26.700 +이제 닫을게요 + +07:26.700 --> 07:30.930 +좀 더 강력한 상자들을 보여드릴게요 + +07:30.930 --> 07:34.500 +기억하시겠지만 런타임 형식을 바꿀 수 있어요 + +07:34.500 --> 07:38.040 +T4를 클릭하면 사용하실 수 있어요 + +07:38.040 --> 07:40.080 +아까도 그랬고요 + +07:40.230 --> 07:45.150 +이렇게 한 이유는 상자에 연결하는 데 시간이 좀 걸리기 때문이에요 + +07:45.150 --> 07:50.700 +A100처럼 고성능 박스가 있으면 구하기가 힘들어서 2분 후에 + +07:50.700 --> 07:54.180 +다시 와서 시도해 봐야 구할 수 있어요 + +07:54.180 --> 07:58.710 +언제나 두어 번 시도하면 구할 수 있지만 너무 많이 팔려서 몇 번 + +07:58.710 --> 08:00.660 +시도해야 할 때도 있어요 + +08:00.660 --> 08:02.580 +이건 T4 박스인데요 + +08:02.580 --> 08:09.210 +리소스를 보면 12개와 시스템 램이 약간 있네요 + +08:09.210 --> 08:12.780 +하드 드라이브는 좀 작지만요 + +08:12.960 --> 08:15.690 +전에는 200달러였는데 지금은 200달러예요 + +08:15.690 --> 08:16.980 +디스크 공간이 충분하죠 + +08:16.980 --> 08:24.000 +15GB 램이 있는 GPU가 있는데 GPU에 15GB 램은 너무 많은 것 같아요 + +08:24.000 --> 08:28.240 +하지만 심층 신경망 훈련에 관해선 자그마한 GPU 수준이지만 + +08:28.270 --> 08:30.490 +우리 목적에는 충분하죠 + +08:30.490 --> 08:32.920 +이 수업에서 활용할 수 있을 거예요 + +08:33.130 --> 08:38.110 +하지만 어떤 일은 시간이 오래 걸릴 수도 있어요 + +08:38.260 --> 08:45.100 +이건 코드인데 복사한 겁니다 구글 프롬프트 원본 콜라브에서요 + +08:45.100 --> 08:51.970 +GPU 뒤의 세부 정보를 출력한 거죠 15GB에서 사용하는 메모리도 + +08:52.000 --> 08:54.250 +포함해서요 + +08:54.280 --> 08:57.040 +물론 여기서도 볼 수 있지만요 + +08:58.000 --> 09:02.110 +이게 T4 상자예요 + +09:02.110 --> 09:05.410 +A100 박스를 보여 드릴게요 + +09:05.410 --> 09:11.290 +이건 슈퍼 파워로 가끔 돈을 펑펑 쓰며 사용할 수도 있어요 + +09:11.290 --> 09:17.440 +이 수업을 빠르게 진행하고 좋은 결과를 빨리 보여드리기 위한 정신이죠 + +09:17.590 --> 09:21.700 +리소스를 보면 어떤 상황인지 알 수 있어요 + +09:21.700 --> 09:29.380 +40기가 램이 있고 GPU는 아주 큰 GPU죠 + +09:29.380 --> 09:34.240 +격렬한 훈련을 하는 데 유용할 거예요 + +09:34.480 --> 09:37.750 +이걸 이용해서 더 자세한 걸 인쇄할 수 있어요 + +09:37.840 --> 09:46.930 +2메가바이트로 사용하고 있는 걸 보실 수 있어요 사용 가능한 메모리 40GB에서 아무것도 + +09:46.930 --> 09:49.270 +하지 않을 때요 + +09:49.870 --> 09:53.200 +콜랍에 대해 간단히 살펴봤는데요 + +09:53.200 --> 09:57.040 +또 한 가지 언급할 것은 공유 버튼이에요 + +09:57.070 --> 10:03.880 +공유 버튼을 누르면 아주 익숙한 인터페이스가 보일 겁니다 구글 드라이브를 사용한다면 구글 + +10:03.880 --> 10:07.600 +드라이브의 다른 모든 것과 똑같을 테니까요 + +10:07.630 --> 10:13.600 +허가 수준에 따라 다른 그룹과 이 노트를 공유할 수 있어요 협업하는 + +10:13.600 --> 10:16.330 +데 효과적인 방법이죠 + +10:16.330 --> 10:26.110 +친구, 동료, 동료와 함께 인공지능 프로젝트를 진행하고 계시죠? + +10:26.110 --> 10:29.380 +물론 협업하기에 아주 효과적인 방법이죠 + +10:29.410 --> 10:32.980 +구글 Colab 설정이 가진 가장 큰 장점 중 하나죠 + +10:33.220 --> 10:33.910 +좋아요 + +10:33.910 --> 10:35.500 +다음 강의 때 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59170057/en_US.srt b/week5/community-contributions/subtitles/srts/59170057/en_US.srt new file mode 100755 index 0000000..975ee16 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170057/en_US.srt @@ -0,0 +1,34 @@ +WEBVTT + +00:00.530 --> 00:05.750 +And so at the beginning of this week, we started by talking about hugging face pipelines. + +00:05.750 --> 00:08.750 +And you used all the different pipeline. + +00:08.750 --> 00:13.130 +Not actually not all of them, because there's so many, but we use many of the most common pipelines + +00:13.130 --> 00:15.980 +to do every day inference tasks. + +00:15.980 --> 00:23.000 +And now today we looked at Tokenizers and you are well versed in Tokenizers, and hopefully a lot has + +00:23.000 --> 00:27.560 +come together in terms of your understanding of what they mean and how they work, and special tokens + +00:27.560 --> 00:29.060 +and all the like. + +00:29.150 --> 00:37.760 +So next time, next time we start to work with models and this is when we can use the underlying hugging + +00:37.790 --> 00:44.870 +face code that is a wrapper around PyTorch or TensorFlow code to generate text and compare the results + +00:44.870 --> 00:48.170 +across multiple open source models. + +00:48.170 --> 00:51.590 +And that's going to be a ton of fun and I'm looking forward to it. diff --git a/week5/community-contributions/subtitles/srts/59170057/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170057/ja_JP.srt new file mode 100755 index 0000000..7ebf0f0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170057/ja_JP.srt @@ -0,0 +1,25 @@ +WEBVTT + +00:00.530 --> 00:05.750 +そして今週の冒頭では、 まずハグフェイス・パイプラインについて話した。 + +00:05.750 --> 00:08.750 +そして、 あなたはすべての異なるパイプラインを使用した。 + +00:08.750 --> 00:15.980 +しかし、 私たちは日常的な推論作業に最も一般的なパイプラインの多くを使用しています。 + +00:15.980 --> 00:23.000 +そして今日、 トーケナイザーについて調べました。 トーケナイザーの意味や仕組み、 + +00:23.000 --> 00:29.060 +特別なトークンなどについて、 多くのことが理解できたと思います。 + +00:29.150 --> 00:37.760 +そこで次回は、 モデルを使って作業を開始し、 PyTorchやTensorFlowのコードのラッパーである、 + +00:37.790 --> 00:48.170 +基本的な抱きつき顔のコードを使ってテキストを生成し、 複数のオープンソースのモデル間で結果を比較できるようにする。 + +00:48.170 --> 00:51.590 +それはとても楽しいことだし、 楽しみにしているよ。 diff --git a/week5/community-contributions/subtitles/srts/59170057/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170057/ko_KR.srt new file mode 100755 index 0000000..f2c6c03 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170057/ko_KR.srt @@ -0,0 +1,34 @@ +WEBVTT + +00:00.530 --> 00:05.750 +이번 주 초에는 페이스 파이프라인 껴안기부터 얘기해 봤는데요 + +00:05.750 --> 00:08.750 +다양한 파이프라인을 사용했죠 + +00:08.750 --> 00:13.130 +사실 전부는 아니죠 너무 많으니까요 하지만 가장 일반적인 파이프라인을 많이 + +00:13.130 --> 00:15.980 +사용합니다 매일 추론 작업을 하기 위해서요 + +00:15.980 --> 00:23.000 +오늘은 토큰이기에 대해 살펴봤는데요 토큰이 뭔지 잘 아실 겁니다 토큰의 + +00:23.000 --> 00:27.560 +의미와 작동 방식 특별한 토큰 등을 잘 이해하셨길 + +00:27.560 --> 00:29.060 +바라요 + +00:29.150 --> 00:37.760 +다음번엔 모델로 작업하기 시작할 때죠 끌어안는 얼굴 코드를 사용할 때인데요 PyToch나 + +00:37.790 --> 00:44.870 +텐서플로우 코드를 감싸 텍스트를 생성하고 여러 오픈 소스 모델에서 결과를 + +00:44.870 --> 00:48.170 +비교하는 거죠 + +00:48.170 --> 00:51.590 +정말 재미있을 거예요 기대가 돼요 diff --git a/week5/community-contributions/subtitles/srts/59170093/en_US.srt b/week5/community-contributions/subtitles/srts/59170093/en_US.srt new file mode 100755 index 0000000..1174562 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170093/en_US.srt @@ -0,0 +1,229 @@ +WEBVTT + +00:00.410 --> 00:02.180 +I'm delighted to see you again. + +00:02.180 --> 00:10.130 +As we get started with day three of week three of our adventure and the, uh, things are going to get + +00:10.130 --> 00:11.900 +get deeper this time. + +00:11.900 --> 00:18.140 +We're going to roll our sleeves up as we get into the lower level APIs of hugging Face Transformers + +00:18.140 --> 00:18.890 +library. + +00:19.490 --> 00:24.800 +And as always, just a quick reminder you can code against frontier models, you can build AI assistants, + +00:24.800 --> 00:26.330 +and you can use pipelines. + +00:26.330 --> 00:26.870 +Pipelines. + +00:26.870 --> 00:35.150 +What we did last time, such an easy way to use the wide variety of open source inference tasks available + +00:35.150 --> 00:36.290 +from Hugging Face. + +00:36.290 --> 00:39.260 +Today, though, we get lower level. + +00:39.350 --> 00:45.470 +As I mentioned, there are these these these two things, tokenizers and models that are part of the + +00:45.470 --> 00:49.430 +way we interact with transformers at a lower level than pipelines. + +00:49.430 --> 00:50.630 +And that's what we're going to be doing today. + +00:50.630 --> 00:53.000 +We're going to be starting with Tokenizers. + +00:53.000 --> 00:58.100 +We're going to be learning how to translate between text and tokens for different models, and we're + +00:58.100 --> 01:02.600 +going to be understanding something called chat templates, which I'm hoping is going to make a few + +01:02.600 --> 01:03.890 +different things come together. + +01:03.920 --> 01:06.170 +It's quite an important moment. + +01:06.440 --> 01:13.700 +Um, so first, to introduce this type of object called a tokenizer in hugging face, it is an object + +01:13.700 --> 01:20.870 +which translates as you can imagine between text, a string and tokens, a list of numbers. + +01:21.020 --> 01:23.930 +Um, and there are very simply two functions. + +01:23.930 --> 01:26.960 +Two things you need to know about encoding and decoding. + +01:26.960 --> 01:32.060 +Encode takes you from strings to tokens, and decode takes you back again. + +01:32.060 --> 01:33.590 +And we will see that. + +01:33.590 --> 01:38.810 +And of course, there's just a little bit of nuance and fiddly stuff, but that's basically all there + +01:38.810 --> 01:39.920 +is to it. + +01:40.370 --> 01:48.290 +A tokenizer contains a vocab, which is all of the different fragments of characters of one character, + +01:48.290 --> 01:53.150 +two, three, four characters shoved together that make up that token. + +01:53.360 --> 01:57.110 +Um, and it can also include as well as these fragments of characters. + +01:57.110 --> 01:59.870 +It can include something called a special token. + +01:59.900 --> 02:07.880 +A few of these special tokens where a special token is again a single token that is going to tell the + +02:07.880 --> 02:15.620 +the model something that it represents, like start of a sentence or beginning of a chat with the assistant + +02:15.620 --> 02:17.210 +or something like that. + +02:17.660 --> 02:23.150 +And as I mentioned before, if you're thinking, okay, but how do we train a neural network architecture, + +02:23.150 --> 02:28.730 +how do we how do we how do we construct a neural network architecture so that it expects a particular + +02:28.730 --> 02:33.470 +token to represent something like start of sentence or something like that? + +02:33.470 --> 02:35.420 +And there's no magic answer. + +02:35.420 --> 02:37.370 +It just simply comes down to training. + +02:37.370 --> 02:43.130 +If it's seen enough examples in its training data that has that special token being used for that purpose, + +02:43.160 --> 02:46.550 +it learns that that is the objective of that special token. + +02:46.550 --> 02:52.400 +But there's nothing fundamental in the architecture, generally speaking, that expects one particular + +02:52.400 --> 02:57.890 +type of token over another and also a tokenizer. + +02:57.890 --> 03:02.810 +In addition to doing this, mapping text to tokens and having a vocab also has something called a chat + +03:02.840 --> 03:03.590 +template. + +03:03.590 --> 03:07.320 +At least for a specific type of model, as we'll see. + +03:07.320 --> 03:14.160 +And that knows how to take a set of messages where you've had system message, user message and so on + +03:14.160 --> 03:16.950 +and turn that into just a set of tokens. + +03:16.950 --> 03:20.940 +And that will all make sense when you see a real example. + +03:21.630 --> 03:29.520 +So every model in hugging face, every open source model has its own tokenizer associated with it. + +03:29.520 --> 03:34.590 +There's not just one general tokenizer that applies to models because it depends on how the model was + +03:34.590 --> 03:35.190 +trained. + +03:35.220 --> 03:40.920 +The tokenizer, um, I mean, obviously multiple models could share the same tokenizer, but but what + +03:40.920 --> 03:46.200 +matters is which tokenizer was used when the model was trained, because you have to use exactly the + +03:46.200 --> 03:53.040 +same tokenizer during inference time when you're running it, otherwise you will get back bad results. + +03:53.130 --> 03:57.390 +Uh, maybe that's an experiment we should try at some point, but I'll you'll see why. + +03:57.390 --> 04:01.380 +That would be a very unproductive experiment in just a moment. + +04:01.380 --> 04:10.590 +So for today we're going to look at the tokenizer for llama 3.1 which is the iconic family of models + +04:10.590 --> 04:12.120 +from Larma that paved. + +04:12.240 --> 04:12.420 +Sorry. + +04:12.450 --> 04:12.660 +From. + +04:12.690 --> 04:13.230 +From Larma. + +04:13.230 --> 04:13.890 +From Mehta. + +04:13.920 --> 04:17.010 +That paved the way for open source models. + +04:17.010 --> 04:20.670 +And we're going to look at a model called Phi three from Microsoft. + +04:20.670 --> 04:26.760 +And we're going to look at Quinn two again, the powerhouse from Alibaba Cloud, which leads the way + +04:26.760 --> 04:29.400 +in many of the different metrics. + +04:29.400 --> 04:35.790 +We're also going to look at something very different, which is a model called Star Coder two, which + +04:35.790 --> 04:41.010 +is a model for, for for generating code. + +04:41.010 --> 04:44.970 +We're going to look at its tokenizer to see any differences. + +04:45.270 --> 04:51.660 +Um, and the reason that these two have similar looking graphics is that Lama 3.1 and Phi three are + +04:51.660 --> 04:53.520 +extremely similar. + +04:53.550 --> 05:00.780 +Quantu perhaps it's also very similar, but it's it's got more of a focus on, uh, Chinese as well + +05:00.780 --> 05:01.650 +as English. + +05:01.650 --> 05:05.580 +And Star Coder two is of course more about coding. + +05:05.700 --> 05:12.120 +So with that introduction, we're going to head over to Google Colab and we're going to do some tokenizing. diff --git a/week5/community-contributions/subtitles/srts/59170093/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170093/ja_JP.srt new file mode 100755 index 0000000..38cbd8a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170093/ja_JP.srt @@ -0,0 +1,169 @@ +WEBVTT + +00:00.410 --> 00:02.180 +また会えて嬉しいよ。 + +00:02.180 --> 00:11.900 +冒険の3週目、 3日目をスタートさせるにあたり、 ええと、 今回は物事がより深くなりそうだ。 + +00:11.900 --> 00:18.890 +我々は、 Face Transformersライブラリを抱きしめるための低レベルのAPIに入るために、 腕まくりをするつもりだ。 + +00:19.490 --> 00:26.330 +そしていつものように、 フロンティアモデルに対してコードを書くことも、 AIアシスタントを構築することも、 パイプラインを使用することもできることを簡単に覚えておいてほしい。 + +00:26.330 --> 00:26.870 +パイプライン + +00:26.870 --> 00:36.290 +前回やったことは、 Hugging Faceから入手可能なオープンソースの推論タスクを幅広く利用する簡単な方法だ。 + +00:36.290 --> 00:39.260 +今日、 私たちはもっと低いレベルにいる。 + +00:39.350 --> 00:45.470 +先ほど申し上げたように、 パイプラインよりも低いレベルでトランスフォーマーとやりとりする方法の一部として、 + +00:45.470 --> 00:49.430 +トークナイザーとモデルがあります。 + +00:49.430 --> 00:50.630 +そして、 それが今日私たちがやろうとしていることだ。 + +00:50.630 --> 00:53.000 +まずはトーケナイザーから。 + +00:53.000 --> 01:03.890 +テキストとトークンの間の翻訳をモデル別に学び、 チャット・テンプレートというものを理解するつもりです。 + +01:03.920 --> 01:06.170 +非常に重要な瞬間だ。 + +01:06.440 --> 01:13.700 +ええと、 まず、 ハギング・フェイスのトークナイザーと呼ばれるオブジェクトを紹介すると、 + +01:13.700 --> 01:20.870 +これはテキスト(文字列)とトークン(数値のリスト)の変換を行うオブジェクトです。 + +01:21.020 --> 01:23.930 +ええと、 簡単に言うと2つの機能がある。 + +01:23.930 --> 01:26.960 +エンコードとデコードについて知っておくべき2つのこと。 + +01:26.960 --> 01:32.060 +エンコードすると文字列からトークンになり、 デコードすると元に戻る。 + +01:32.060 --> 01:33.590 +そして私たちはそれを見ることになる。 + +01:33.590 --> 01:39.920 +もちろん、 ちょっとしたニュアンスや手間のかかることはあるが、 基本的にはそれだけだ。 + +01:40.370 --> 01:53.150 +トークナイザーにはボキャブラリーがあり、 1文字、 2文字、 3文字、 4文字など、 トークンを構成するさまざまな文字の断片がすべて含まれています。 + +01:53.360 --> 01:57.110 +そして、 このような文字の断片を含むこともできる。 + +01:57.110 --> 01:59.870 +スペシャル・トークンと呼ばれるものが含まれることもある。 + +01:59.900 --> 02:07.880 +特別なトークンとは、 文の始まりやアシスタントとのチャットの始まりなど、 + +02:07.880 --> 02:17.210 +モデルに何かを伝えるためのトークンです。 + +02:17.660 --> 02:23.150 +前にも言ったように、 ニューラルネットワーク・アーキテクチャをどのように訓練すればいいのか、 + +02:23.150 --> 02:28.730 +特定のトークンが文頭などを表すと期待できるようにニューラルネットワーク・アーキテクチャをどのように構築すればいいのか、 + +02:28.730 --> 02:33.470 +と考えているのなら、 どうすればいいのだろう? + +02:33.470 --> 02:35.420 +そして、 魔法のような答えはない。 + +02:35.420 --> 02:37.370 +単純にトレーニングに尽きる。 + +02:37.370 --> 02:46.550 +もし学習データの中で、 その特別なトークンがその目的に使われている例を十分に見ていれば、 それがその特別なトークンの目的であることを学習する。 + +02:46.550 --> 02:52.400 +しかし、 一般的に言って、 ある特定のタイプのトークンを他のトークンよりも期待するような、 + +02:52.400 --> 02:57.890 +またトークナイザーを期待するような基本的なものは、 アーキテクチャにはない。 + +02:57.890 --> 03:03.590 +これに加えて、 テキストをトークンにマッピングし、 ボキャブラリーを持つことも、 チャットテンプレートと呼ばれるものがある。 + +03:03.590 --> 03:07.320 +少なくとも、 特定のタイプのモデルについては、 これからわかるだろう。 + +03:07.320 --> 03:16.950 +そして、 システム・メッセージやユーザー・メッセージなどのメッセージ・セットを、 トークン・セットに変換する方法を知っている。 + +03:16.950 --> 03:20.940 +そしてそれは、 実際の例を見ればすべて理解できるだろう。 + +03:21.630 --> 03:29.520 +だから、 ハギング・フェイスのすべてのモデル、 すべてのオープン・ソース・モデルには、 それ自身のトークナイザーが関連付けられている。 + +03:29.520 --> 03:35.190 +モデルがどのようにトレーニングされたかに依存するので、 モデルに適用される一般的なトークナイザーは1つだけではありません。 + +03:35.220 --> 03:53.040 +しかし、 重要なのは、 モデルが学習されたときにどのトークナイザーが使われたかということだ。 + +03:53.130 --> 03:57.390 +それはいずれやってみるべき実験かもしれない。 + +03:57.390 --> 04:01.380 +そんなことをしたら、 すぐに非生産的な実験になってしまう。 + +04:01.380 --> 04:12.120 +というわけで、 今日は llama 3 のトークナイザを見てみましょう。 ラルマの象徴的なモデル群である「1」が道を切り開いた。 + +04:12.240 --> 04:12.420 +申し訳ない。 + +04:12.450 --> 04:12.660 +からだ。 + +04:12.690 --> 04:13.230 +ラルマより + +04:13.230 --> 04:13.890 +メータより + +04:13.920 --> 04:17.010 +それがオープンソースモデルへの道を開いた。 + +04:17.010 --> 04:20.670 +マイクロソフトのファイ3というモデルを見てみよう。 + +04:20.670 --> 04:29.400 +アリババ・クラウドの強豪であり、 さまざまな指標で業界をリードしているクイン2を再び見てみよう。 + +04:29.400 --> 04:41.010 +スター・コーダー2と呼ばれる、 コードを生成するためのモデルだ。 + +04:41.010 --> 04:44.970 +そのトークナイザーを見て、 違いを確認する。 + +04:45.270 --> 04:53.520 +ええと、 この2つが似たようなグラフィックなのは、 ラマ3. 1とファイ3は極めてよく似ている。 + +04:53.550 --> 05:01.650 +Quantuもよく似ていますが、 英語だけでなく中国語にも力を入れています。 + +05:01.650 --> 05:05.580 +Star Coder 2は、 もちろんコーディングに関するものだ。 + +05:05.700 --> 05:12.120 +それでは、 Google Colabに移動し、 トークン化を行います。 diff --git a/week5/community-contributions/subtitles/srts/59170093/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170093/ko_KR.srt new file mode 100755 index 0000000..6474dcb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170093/ko_KR.srt @@ -0,0 +1,220 @@ +WEBVTT + +00:00.410 --> 00:02.180 +다시 만나서 반가워요 + +00:02.180 --> 00:10.130 +모험 3주 차 3일째가 시작됐는데요 이번엔 상황이 더 심화될 거예요 + +00:10.130 --> 00:11.900 +get it + +00:11.900 --> 00:18.140 +페이스 트랜스포머 포옹 라이브러리 하위 레벨 API를 살펴보면서 팔을 걷어붙일게요. Get + +00:18.140 --> 00:18.890 +it. + +00:19.490 --> 00:24.800 +다시 한번 말씀드리지만 프론티어 모델에 대한 코드도 만들 수 있고 인공지능 비서를 만들 수도 있고 파이프라인을 + +00:24.800 --> 00:26.330 +사용할 수도 있어요 + +00:26.330 --> 00:26.870 +파이프라인요 + +00:26.870 --> 00:35.150 +지난번에 했던 건 정말 쉬운 방법이었죠 얼굴을 안는 것으로부터 가능한 다양한 오픈 소스 추론 작업을 사용하는 + +00:35.150 --> 00:36.290 +거요 + +00:36.290 --> 00:39.260 +오늘은 낮은 레벨로 가죠 Get it + +00:39.350 --> 00:45.470 +앞서 언급했듯이 토큰라이저와 모델은 파이프라인보다 낮은 수준에서 + +00:45.470 --> 00:49.430 +트랜스포머와 상호 작용하는 방법의 일부죠 + +00:49.430 --> 00:50.630 +오늘 그걸 할 거예요 + +00:50.630 --> 00:53.000 +토큰라이저부터 시작할 거예요 + +00:53.000 --> 00:58.100 +텍스트와 토큰을 다른 모델로 변환하는 법을 배울 거예요 채팅 템플릿이라는 + +00:58.100 --> 01:02.600 +것도 이해할 거고요 이로써 몇 가지 다른 것들이 하나로 합쳐지면 + +01:02.600 --> 01:03.890 +좋겠네요 + +01:03.920 --> 01:06.170 +중요한 순간이에요 + +01:06.440 --> 01:13.700 +먼저, 토큰마이저라는 유형의 객체를 소개할게요 얼굴을 안는 거예요 + +01:13.700 --> 01:20.870 +이 객체는 문자열과 토큰, 숫자 목록을 번역하는 거예요 + +01:21.020 --> 01:23.930 +기능은 두 가지로 아주 간단해요 + +01:23.930 --> 01:26.960 +암호화와 해독에 관해 알아야 할 게 두 가지 있어요 + +01:26.960 --> 01:32.060 +인코드는 문자열에서 토큰으로 디코드는 다시 과거로 돌아가게 하죠 + +01:32.060 --> 01:33.590 +두고 봐야죠 + +01:33.590 --> 01:38.810 +물론 약간의 뉘앙스와 성가신 부분이 있지만 기본적으로 그게 다예요 + +01:38.810 --> 01:39.920 +비트 박스는 + +01:40.370 --> 01:48.290 +토큰이에는 여러 단어가 들어 있어요 한 글자에서부터 네 글자까지 다양한 + +01:48.290 --> 01:53.150 +문자를 조합해서 토큰을 구성하는 거죠 + +01:53.360 --> 01:57.110 +또한 이런 캐릭터의 단편들도 포함할 수 있죠 + +01:57.110 --> 01:59.870 +특별한 토큰이라는 것도 포함할 수 있어요 + +01:59.900 --> 02:07.880 +몇몇 특별한 토큰은 하나의 토큰으로 모델에게 그것이 나타내는 + +02:07.880 --> 02:15.620 +것을 알려줍니다 문장의 시작이나 비서와의 채팅 시작 같은 + +02:15.620 --> 02:17.210 +것을요 + +02:17.660 --> 02:23.150 +아까도 언급했지만 신경망 구조를 어떻게 훈련할지 궁금하실 + +02:23.150 --> 02:28.730 +거예요 신경망 구조를 어떻게 구성해야 특정 토큰이 문장의 + +02:28.730 --> 02:33.470 +시작 같은 걸 나타낼지 궁금하실 거예요 + +02:33.470 --> 02:35.420 +마법 같은 답은 없어요 + +02:35.420 --> 02:37.370 +훈련만 잘하면 돼요 + +02:37.370 --> 02:43.130 +훈련 데이터에서 특정 토큰을 사용하는 예제를 충분히 봤다면 그게 그 + +02:43.160 --> 02:46.550 +특별한 토큰의 목표라는 걸 알게 되죠 + +02:46.550 --> 02:52.400 +하지만 아키텍처에서 근본적인 것은 없습니다 일반적으로 토큰의 종류를 + +02:52.400 --> 02:57.890 +다른 종류로 바꾸는 것은 없습니다 토큰라이저도 마찬가지죠 + +02:57.890 --> 03:02.810 +이 외에도 텍스트를 토큰에 매핑하고 대화방법도 가지고 있어요 채팅방 템플릿이라고도 + +03:02.840 --> 03:03.590 +하죠 + +03:03.590 --> 03:07.320 +적어도 특정 모델은 그렇죠 곧 보시겠지만요 + +03:07.320 --> 03:14.160 +시스템 메시지나 사용자 메시지 같은 메시지 세트를 토큰 + +03:14.160 --> 03:16.950 +세트로 바꿀 수 있죠 + +03:16.950 --> 03:20.940 +실제 예시를 보면 이해가 될 거예요 + +03:21.630 --> 03:29.520 +얼굴을 끌어안는 모든 모델 오픈 소스 모델은 그와 관련된 토큰라이저가 있어요 + +03:29.520 --> 03:35.190 +토큰이 한 명만 모델에 적용되는 게 아니에요 모델이 어떻게 훈련되느냐에 따라 다르거든요 + +03:35.220 --> 03:40.920 +토큰라이저는, 음 여러 모델이 같은 토큰라이저를 공유할 수 있지만, 중요한 것은 어떤 토큰라이저가 + +03:40.920 --> 03:46.200 +훈련된 모델에서 사용되었는가 입니다. 왜냐하면 실행중인 추론기간에 정확히 동일한 + +03:46.200 --> 03:53.040 +토큰라이저를 사용해야 하기 때문입니다. 그렇지 않으면 나쁜 결과를 얻을 수 있기 때문이죠. + +03:53.130 --> 03:57.390 +언젠가 한번 해 봐야겠지만 이유는 알게 될 거예요 + +03:57.390 --> 04:01.380 +당장은 비생산적인 실험이 될 거예요 + +04:01.380 --> 04:12.120 +오늘은 라마 3 토큰라이저를 살펴볼 거예요 라르마의 상징적인 모델 가족이에요 + +04:12.240 --> 04:12.420 +미안해요 + +04:12.450 --> 04:12.660 +네 + +04:12.690 --> 04:13.230 +라르마한테서요 + +04:13.230 --> 04:13.890 +메타가 보냈어요 + +04:13.920 --> 04:17.010 +오픈 소스 모델의 길을 닦았죠 + +04:17.010 --> 04:20.670 +마이크로소프트의 파이 3 모델을 보죠 + +04:20.670 --> 04:26.760 +퀸 2도 다시 보죠 알리바바 클라우드의 동력원으로 여러 지표에서 + +04:26.760 --> 04:29.400 +선두를 달리고 있죠 + +04:29.400 --> 04:35.790 +다른 것도 살펴볼 거예요 Star Coder 2라는 + +04:35.790 --> 04:41.010 +모델인데 코드 생성을 위한 모델이죠 + +04:41.010 --> 04:44.970 +토큰라이저를 살펴보고 차이점을 찾아볼게요 + +04:45.270 --> 04:51.660 +이 두 차의 그래픽이 비슷한 이유는 라마 3 때문이에요 1과 피3는 굉장히 + +04:51.660 --> 04:53.520 +비슷해요 + +04:53.550 --> 05:01.650 +콴투도 비슷하긴 하지만 영어와 중국어에 더 중점을 두고 있어요 + +05:01.650 --> 05:05.580 +스타 코더 2는 물론 코딩에 관한 거죠 + +05:05.700 --> 05:12.120 +소개를 마쳤으니 구글 Colab으로 가서 토큰라이징을 해보죠 HDMHD HDMHDDHDHD diff --git a/week5/community-contributions/subtitles/srts/59170107/en_US.srt b/week5/community-contributions/subtitles/srts/59170107/en_US.srt new file mode 100755 index 0000000..c8ba879 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170107/en_US.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:01.370 --> 00:08.900 +And once again, it's that moment when you take a pause and congratulate yourself on another day of + +00:08.900 --> 00:17.270 +skills learned and fantastic achievements of being able to be an expert in the hugging face Transformers + +00:17.270 --> 00:18.110 +library. + +00:18.110 --> 00:22.640 +In addition to using pipelines and tokenizers, you can now use models. + +00:22.640 --> 00:29.120 +You can look at models, you can load different models, and you can run models to do hopefully more + +00:29.120 --> 00:36.260 +than just tell jokes, but also other kinds of text generation tasks like the ones we've done in previous + +00:36.260 --> 00:37.250 +weeks. + +00:37.340 --> 00:44.420 +Uh, you, uh, also can, of course, code confidently with frontier model APIs and build AI assistants, + +00:44.420 --> 00:48.320 +including multimodal AI assistants, and use tools. + +00:48.320 --> 00:55.820 +So all of this together, uh, totals a significant amount of learning that you've done already, with + +00:55.820 --> 00:58.250 +a lot more exciting stuff ahead. + +00:58.520 --> 01:03.890 +The next session, we're going to do one more project with Tokenizers and models, just to give you + +01:03.890 --> 01:05.720 +a little bit more experience. + +01:05.810 --> 01:12.500 +Uh, and we're also going to yeah, just keep keep running inference on open source models and implement + +01:12.500 --> 01:19.520 +an LLM solution that's going to combine a frontier model call with an open source model call. + +01:19.520 --> 01:22.610 +And it will be a useful business application. + +01:22.610 --> 01:28.400 +And it's going to really wrap up this week of learning about hugging face and open source. + +01:28.400 --> 01:30.140 +So looking forward to it. + +01:30.140 --> 01:31.220 +I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59170107/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170107/ja_JP.srt new file mode 100755 index 0000000..b1f43cc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170107/ja_JP.srt @@ -0,0 +1,43 @@ +WEBVTT + +00:01.370 --> 00:08.900 +そしてまた、 ハグ顔トランスフォーマーライブラリーのエキスパートになるためのスキルを学び、 + +00:08.900 --> 00:18.110 +素晴らしい功績を残したもう一日の自分を祝福する瞬間だ。 + +00:18.110 --> 00:22.640 +パイプラインとトークナイザーに加えて、 モデルも使えるようになった。 + +00:22.640 --> 00:29.120 +モデルを見たり、 さまざまなモデルを読み込んだり、 ジョークを言うだけでなく、 + +00:29.120 --> 00:37.250 +前の週にやったような他の種類のテキスト生成タスクを実行することもできる。 + +00:37.340 --> 00:44.420 +もちろん、 フロンティアモデルAPIを使って自信を持ってコーディングし、 マルチモーダルAIアシスタントを含むAIアシスタントを構築し、 + +00:44.420 --> 00:48.320 +ツールを使うこともできる。 + +00:48.320 --> 00:58.250 +だから、 これらすべてを合わせると、 君たちはすでにかなりの量の学習をしてきたことになる。 + +00:58.520 --> 01:05.720 +次のセッションでは、 トーケナイザーとモデルを使ったプロジェクトをもう1つ行います。 + +01:05.810 --> 01:19.520 +さらに、 オープンソースモデルで推論を実行し続け、 フロンティアモデルコールとオープンソースモデルコールを組み合わせたLLMソリューションを実装するつもりだ。 + +01:19.520 --> 01:22.610 +そして、 ビジネス・アプリケーションとしても役立つだろう。 + +01:22.610 --> 01:28.400 +そして、 ハグ顔とオープンソースについて学んだこの1週間を締めくくることになる。 + +01:28.400 --> 01:30.140 +だから楽しみにしている。 + +01:30.140 --> 01:31.220 +それではまた。 diff --git a/week5/community-contributions/subtitles/srts/59170107/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170107/ko_KR.srt new file mode 100755 index 0000000..ff98d50 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170107/ko_KR.srt @@ -0,0 +1,52 @@ +WEBVTT + +00:01.370 --> 00:08.900 +다시 한번 잠시 멈춰서 축하하는 순간입니다 오늘도 포옹하는 + +00:08.900 --> 00:18.110 +트랜스포머 라이브러리에서 기술을 배우고 놀라운 성과를 거뒀으니까요 + +00:18.110 --> 00:22.640 +파이프라인과 토큰라이저 외에도 모델을 사용할 수 있죠 + +00:22.640 --> 00:29.120 +모델을 보고 다른 모델을 로드하고 모델을 실행해 농담만 하는 게 + +00:29.120 --> 00:37.250 +아니라 다른 종류의 텍스트 생성 작업도 할 수 있어요 지난 주에 했던 것처럼요 + +00:37.340 --> 00:44.420 +또한 프론티어 모델 API로 자신 있게 코드를 작성하고 멀티모덜 인공지능 어시스턴트를 비롯한 인공지능 + +00:44.420 --> 00:48.320 +어시스턴트를 제작할 수 있으며 도구를 사용할 수도 있죠 + +00:48.320 --> 00:55.820 +지금까지 배운 걸 합산하면 상당한 양의 학습이 될 거예요 앞으로 더 많은 + +00:55.820 --> 00:58.250 +걸 배울 수 있겠죠 + +00:58.520 --> 01:03.890 +다음 시간에는 Tokenizers와 모델로 프로젝트를 하나 더 할 겁니다 경험을 + +01:03.890 --> 01:05.720 +좀 더 드릴 수 있도록요 비트 + +01:05.810 --> 01:12.500 +그리고 오픈 소스 모델에 대한 추론을 계속 실행하고 LLM 솔루션을 구현할 + +01:12.500 --> 01:19.520 +겁니다 프런티어 모델 호출과 오픈 소스 모델 호출을 결합하는 거죠 + +01:19.520 --> 01:22.610 +유용한 비즈니스 응용 프로그램이 될 거예요 + +01:22.610 --> 01:28.400 +얼굴 안기와 오픈 소스 수업을 이걸로 마무리하죠 + +01:28.400 --> 01:30.140 +정말 기대돼요 + +01:30.140 --> 01:31.220 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59170135/en_US.srt b/week5/community-contributions/subtitles/srts/59170135/en_US.srt new file mode 100755 index 0000000..04cdb33 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170135/en_US.srt @@ -0,0 +1,154 @@ +WEBVTT + +00:00.830 --> 00:01.940 +Welcome. + +00:01.940 --> 00:02.870 +It's week three. + +00:02.870 --> 00:03.800 +It's day four. + +00:03.830 --> 00:11.720 +We are back on the adventure in open source land, back investigating how to run inference over open + +00:11.720 --> 00:12.890 +source models. + +00:13.130 --> 00:17.120 +And today it is time to look at the model class in Hugging Face. + +00:17.120 --> 00:20.390 +We talked originally about pipeline API, the high level API. + +00:20.420 --> 00:26.090 +Then we started talking about the low level API, beginning with Tokenizers and now onto the model. + +00:26.150 --> 00:28.580 +So what can you already do? + +00:28.610 --> 00:33.290 +Of course, in addition to coding with frontier models, building multimodal AI assistants or you can + +00:33.290 --> 00:38.270 +now do is use hugging faces, pipelines and tokenizers today. + +00:38.300 --> 00:41.270 +New skills, new classes. + +00:41.270 --> 00:49.010 +We're going to get into the models part of hugging face, which is when you actually create a transformer + +00:49.010 --> 00:51.860 +and run it to generate text. + +00:51.860 --> 00:56.300 +And we'll be comparing results across five different models. + +00:56.300 --> 01:02.090 +I'm actually going to be doing three of them with you and leaving you to experiment with the other two, + +01:02.210 --> 01:07.910 +uh, so that you can have an extra exercise, but I'll have all of the code ready for you. + +01:08.300 --> 01:10.700 +Um, so it should be a lot of fun. + +01:10.970 --> 01:13.380 +So the models then to introduce them. + +01:13.380 --> 01:21.330 +We are going to again be working with llama 3.1 from meta, their flagship and groundbreaking model. + +01:21.330 --> 01:29.670 +We are going to be looking at Phi three, which is Microsoft's open source model, and Gemma from Google. + +01:29.670 --> 01:32.190 +It's a the small. + +01:32.190 --> 01:36.450 +The small cousin of Gemini is Google's Gemma. + +01:36.510 --> 01:41.880 +There are two other models that I'll be leaving you with to experiment with on your own. + +01:41.910 --> 01:49.830 +One of them is Mistral from Mistral and the other, the other is the powerhouse that is Quinn two. + +01:50.040 --> 01:53.190 +And I hope that you will enjoy using Quantu. + +01:54.270 --> 02:01.320 +So we're also going to be covering three aspects of working with open source models in the hugging face + +02:01.320 --> 02:02.160 +framework. + +02:02.430 --> 02:05.610 +Um, the first of them is called quantization. + +02:05.640 --> 02:13.080 +And this is about reducing the precision of the weights in the model so that it is easier to fit into + +02:13.080 --> 02:16.860 +memory and loads in and also can run faster. + +02:16.860 --> 02:23.820 +So quantization, a very important technique that allows us to work with, say, a one of the lower + +02:23.820 --> 02:25.290 +end GPU boxes. + +02:25.330 --> 02:29.650 +and when we get to training, it's going to be absolutely critical to be able to use quantization, + +02:29.650 --> 02:34.480 +to be able to train large open source models. + +02:34.510 --> 02:39.160 +In fact, you've heard me saying, now the Q Laura, that is the name of the technique that we're going + +02:39.190 --> 02:42.040 +to be using in a couple of weeks time. + +02:42.040 --> 02:45.460 +And the Q and Q, Laura stands for quantization. + +02:45.460 --> 02:49.960 +So we will be coming up against quantization a few times on this journey. + +02:50.650 --> 02:54.580 +Today we're also going to be looking inside a model. + +02:54.580 --> 03:00.310 +So generally again this is a class that is more practical than theoretical. + +03:00.460 --> 03:02.050 +But this will be one of those moments. + +03:02.050 --> 03:10.750 +And we'll just take a peek inside at what what do the PyTorch layers look like that sit behind the hugging + +03:10.780 --> 03:12.970 +face Transformers library. + +03:13.720 --> 03:20.050 +And then also, we're so familiar with streaming at this point that it hardly needs to be said that + +03:20.050 --> 03:21.520 +we want to be able to stream results. + +03:21.520 --> 03:26.290 +So I will show you how you can work with open source models to stream results as well. + +03:26.290 --> 03:32.440 +So these are some of the little extra bits that we're going to look into in our voyage into running + +03:32.440 --> 03:35.710 +inference over the lower level APIs for hugging face. + +03:35.740 --> 03:36.940 +There's quite enough talk. + +03:36.940 --> 03:38.350 +Let's get to it. diff --git a/week5/community-contributions/subtitles/srts/59170135/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170135/ja_JP.srt new file mode 100755 index 0000000..4956dd7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170135/ja_JP.srt @@ -0,0 +1,130 @@ +WEBVTT + +00:00.830 --> 00:01.940 +ようこそ。 + +00:01.940 --> 00:02.870 +3週目だ。 + +00:02.870 --> 00:03.800 +4日目だ。 + +00:03.830 --> 00:12.890 +私たちはオープンソースの土地での冒険に戻り、 オープンソースのモデル上で推論を実行する方法を調査している。 + +00:13.130 --> 00:17.120 +そして今日は、 『ハギング・フェイス』のモデルクラスを見てみよう。 + +00:17.120 --> 00:20.390 +当初はパイプラインAPI、 つまりハイレベルAPIについて話をした。 + +00:20.420 --> 00:26.090 +その後、 低レベルのAPIについて話し始めた。 トーケナイザーから始まり、 今度はモデルについてだ。 + +00:26.150 --> 00:28.580 +では、 すでに何ができるのか? + +00:28.610 --> 00:33.290 +もちろん、 フロンティアモデルを使ったコーディングに加え、 マルチモーダルAIアシスタントの構築や、 + +00:33.290 --> 00:38.270 +現在できることは、 抱き顔、 パイプライン、 トークナイザーを使うことだ。 + +00:38.300 --> 00:41.270 +新しいスキル、 新しいクラス。 + +00:41.270 --> 00:51.860 +実際にトランスフォーマーを作成し、 それを実行してテキストを生成するのだ。 + +00:51.860 --> 00:56.300 +そして、 5つの異なるモデルの結果を比較する。 + +00:56.300 --> 01:07.910 +そのうちの3つを一緒にやって、 あとの2つで実験してもらうつもりだ。 + +01:08.300 --> 01:10.700 +だから、 とても楽しいはずだよ。 + +01:10.970 --> 01:13.380 +だから、 モデルたちはそれを導入した。 + +01:13.380 --> 01:21.330 +今回もリャマ3世と仕事をすることになる。 1、 metaのフラッグシップで画期的なモデル。 + +01:21.330 --> 01:29.670 +今回は、 マイクロソフトのオープンソースモデルであるファイ3と、 グーグルのジェンマを取り上げる。 + +01:29.670 --> 01:32.190 +小さいよ。 + +01:32.190 --> 01:36.450 +ジェミニのいとこにあたるのがグーグルのジェンマだ。 + +01:36.510 --> 01:41.880 +他にも2つのモデルがあるので、 自分で試してみてほしい。 + +01:41.910 --> 01:49.830 +一人はミストラルのミストラルで、 もう一人はクイン2の強豪だ。 + +01:50.040 --> 01:53.190 +そしてQuantuを楽しんで使ってほしい。 + +01:54.270 --> 02:02.160 +そこで今回は、 ハギング・フェイス・フレームワークでオープンソースのモデルを扱う際の3つの側面についても取り上げます。 + +02:02.430 --> 02:05.610 +その最初のものは量子化と呼ばれるものだ。 + +02:05.640 --> 02:13.080 +そしてこれは、 モデルの重みの精度を下げることで、 メモリに収めやすくし、 ロードしやすくし、 + +02:13.080 --> 02:16.860 +さらに高速に実行できるようにすることである。 + +02:16.860 --> 02:25.290 +量子化というのは、 例えばローエンドのGPUボックスで作業することを可能にする非常に重要なテクニックだ。 + +02:25.330 --> 02:34.480 +そしてトレーニングに入ると、 量子化を使えるかどうか、 大規模なオープンソースモデルをトレーニングできるかどうかが絶対的に重要になる。 + +02:34.510 --> 02:42.040 +実際、 私が言っているのを聞いたことがあると思うが、 今、 Qローラ、 これは数週間後に使うテクニックの名前だ。 + +02:42.040 --> 02:45.460 +そしてQとQ、 ローラは量子化を意味する。 + +02:45.460 --> 02:49.960 +だから、 この旅では量子化に何度か直面することになる。 + +02:50.650 --> 02:54.580 +今日はモデルの内部も見てみよう。 + +02:54.580 --> 03:00.310 +だから、 このクラスは理論的というより実践的なクラスなんだ。 + +03:00.460 --> 03:02.050 +しかし、 これはその瞬間のひとつになるだろう。 + +03:02.050 --> 03:10.750 +そして、 ハグする顔のトランスフォーマーライブラリの後ろにあるPyTorchレイヤーがどのようなものか、 + +03:10.780 --> 03:12.970 +中を覗いてみよう。 + +03:13.720 --> 03:21.520 +そしてまた、 我々はこの時点でストリーミングに慣れ親しんでいるので、 結果をストリーミングできるようにしたいということはほとんど言うまでもない。 + +03:21.520 --> 03:26.290 +そこで、 オープンソースのモデルを使って、 どのように結果を出すことができるかを紹介しよう。 + +03:26.290 --> 03:32.440 +このように、 ハグフェイスのための低レベルのAPI上で推論を実行するための航海の中で、 私たちが調べようとしているのは、 + +03:32.440 --> 03:35.710 +ちょっとした余分な部分なのだ。 + +03:35.740 --> 03:36.940 +話はもう十分だ。 + +03:36.940 --> 03:38.350 +さっそく始めよう。 diff --git a/week5/community-contributions/subtitles/srts/59170135/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170135/ko_KR.srt new file mode 100755 index 0000000..9af00c4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170135/ko_KR.srt @@ -0,0 +1,151 @@ +WEBVTT + +00:00.830 --> 00:01.940 +어서 오세요 + +00:01.940 --> 00:02.870 +3주 차예요 + +00:02.870 --> 00:03.800 +4일째예요 + +00:03.830 --> 00:11.720 +오픈 소스 랜드로 돌아왔습니다 오픈 소스 모델을 어떻게 추론하는지 조사하고 + +00:11.720 --> 00:12.890 +있죠 + +00:13.130 --> 00:17.120 +오늘은 얼굴 껴안기 모범 수업을 해 볼게요 + +00:17.120 --> 00:20.390 +파이프라인 API 얘기를 했었죠 상위 수준 API요 + +00:20.420 --> 00:26.090 +그다음에는 낮은 수준의 API 얘기를 했습니다 토큰라이저로 시작해서 모델로 넘어갔죠 + +00:26.150 --> 00:28.580 +그래서 뭘 할 수 있는데요? + +00:28.610 --> 00:33.290 +물론 지금은 개척 시대 모델의 코딩 외에도 다중 모듈 인공지능 비서를 제작하고 + +00:33.290 --> 00:38.270 +있습니다 현재는 얼굴 포옹이나 파이프라인 토큰라이저를 사용하죠 + +00:38.300 --> 00:41.270 +새로운 기술에 새로운 수업이죠 + +00:41.270 --> 00:49.010 +얼굴을 안는 것의 모델 부분으로 들어가겠습니다 변압기를 생성하고 텍스트를 생성하기 + +00:49.010 --> 00:51.860 +위해 실행하는 거죠 get it + +00:51.860 --> 00:56.300 +다섯 가지 모델로 결과를 비교할 거예요 + +00:56.300 --> 01:02.090 +제가 3개를 같이 할 거예요 나머지 2개는 당신이 실험해 보세요 + +01:02.210 --> 01:07.910 +추가 운동을 할 수 있게요 코드는 다 준비해 둘게요 + +01:08.300 --> 01:10.700 +재미있을 것 같아요 + +01:10.970 --> 01:13.380 +모델들이 그들을 소개하죠 + +01:13.380 --> 01:21.330 +라마 3을 다시 작업하게 될 거예요 기함이자 획기적인 모델인 메타에서 한 대 왔어요 + +01:21.330 --> 01:29.670 +마이크로소프트의 파이3 오픈 소스 모델과 구글의 젬마를 살펴볼 거예요 + +01:29.670 --> 01:32.190 +스몰 사이즈예요 + +01:32.190 --> 01:36.450 +제미니의 사촌이 구글의 제마죠 + +01:36.510 --> 01:41.880 +여러분이 직접 실험해 볼 모델이 두 개 더 있어요 + +01:41.910 --> 01:49.830 +하나는 미스트럴의 미스트럴이고 다른 하나는 퀸의 2번 선수예요 + +01:50.040 --> 01:53.190 +취안토를 즐겨 보세요 + +01:54.270 --> 02:02.160 +오픈 소스 모델과의 세 가지 측면도 다룰 겁니다 얼굴 프레임워크에서요 + +02:02.430 --> 02:05.610 +첫 번째는 퀀타이즈라는 거예요 + +02:05.640 --> 02:13.080 +이것은 모델의 무게의 정밀도를 줄이는 것입니다. 메모리에 쉽게 맞추고 로드도 + +02:13.080 --> 02:16.860 +쉽게 하고 더 빠르게 달릴 수 있죠. + +02:16.860 --> 02:23.820 +퀀타이즈는 아주 중요한 기술로 GPU 하위 제품 중 하나로 작업할 수 + +02:23.820 --> 02:25.290 +있게 해주죠 + +02:25.330 --> 02:29.650 +트레이닝을 할 때 반드시 퀀타이즈를 사용할 수 있어야 + +02:29.650 --> 02:34.480 +합니다 큰 오픈 소스 모델을 훈련하기 위해서요 + +02:34.510 --> 02:39.160 +아까도 말했지만 Q 로라는 우리가 몇 주 후에 + +02:39.190 --> 02:42.040 +사용할 기술의 이름이에요 + +02:42.040 --> 02:45.460 +질문과 질문, 로라는 수량화의 약자예요 + +02:45.460 --> 02:49.960 +이번 여정에서 퀀타이즈와 몇 번 부딪힐 거예요 + +02:50.650 --> 02:54.580 +오늘은 모델 내부도 살펴볼 거예요 + +02:54.580 --> 03:00.310 +다시 말씀드리지만 이론보다는 실용적인 수업이에요 + +03:00.460 --> 03:02.050 +하지만 지금이 바로 그런 순간이에요 + +03:02.050 --> 03:10.750 +포옹하는 트랜스포머 라이브러리 뒤에 있는 파이토치 층은 어떤 모습일지 살짝 + +03:10.780 --> 03:12.970 +들여다볼게요 + +03:13.720 --> 03:20.050 +지금은 스트리밍에 익숙해서 결과를 스트리밍할 수 있다고 말할 + +03:20.050 --> 03:21.520 +필요도 없어요 + +03:21.520 --> 03:26.290 +결과 스트리밍을 위해 오픈 소스 모델로 작업하는 방법을 보여드리죠 + +03:26.290 --> 03:32.440 +이게 이번 항해에서 살펴볼 추가 사항입니다 얼굴을 껴안는 하위 + +03:32.440 --> 03:35.710 +레벨 API를 실행하는 거죠 + +03:35.740 --> 03:36.940 +얘기는 충분히 했어요 + +03:36.940 --> 03:38.350 +Get it, get it 해 보죠 diff --git a/week5/community-contributions/subtitles/srts/59170165/en_US.srt b/week5/community-contributions/subtitles/srts/59170165/en_US.srt new file mode 100755 index 0000000..23b6aca --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170165/en_US.srt @@ -0,0 +1,130 @@ +WEBVTT + +00:01.340 --> 00:05.000 +Welcome, everybody to the last day of week three. + +00:05.030 --> 00:05.810 +Week three. + +00:05.840 --> 00:06.710 +Day five. + +00:06.740 --> 00:12.740 +We're here already wrapping up open source model inference with hugging face. + +00:12.740 --> 00:16.790 +And today, today is the day that you're going pro. + +00:16.790 --> 00:23.150 +Today is the day when we're putting together everything you've learned in the last four days of lectures + +00:23.150 --> 00:31.910 +and really solidifying it with an excellent, uh, juicy project, a business project which is going + +00:31.910 --> 00:37.940 +to give you some, some true experience in the field, what you can do already, if you don't mind me + +00:37.940 --> 00:41.180 +telling you one more time, you can code with frontier models. + +00:41.180 --> 00:46.970 +You can build AI assistants with tools, multi-modality, generating images, making sounds. + +00:47.120 --> 00:55.100 +Uh, and you can use pipelines, tokenizers and models within the hugging face Transformers library. + +00:55.130 --> 00:59.960 +Today, you're going to be even more confident with Tokenizers and models. + +00:59.960 --> 01:05.330 +You're going to be able to run inference across open source models with ease, and you're going to have + +01:05.360 --> 01:13.260 +implemented an LLM solution combining frontier and open source models together into one nice package. + +01:13.260 --> 01:21.240 +There's also going to be a good business challenge for you to keep working on this, so let's get started. + +01:22.440 --> 01:28.710 +The business problem that we have is a feature that is in many applications that we all know, and so + +01:28.710 --> 01:32.130 +it's a good, real kind of product. + +01:32.130 --> 01:40.260 +We want to build a solution that can create minutes of meetings including things like actions and owners + +01:40.260 --> 01:41.880 +and so on. + +01:42.120 --> 01:51.180 +Uh, it will be able to take an audio recording and then use a frontier model, use an API to convert + +01:51.180 --> 01:52.620 +the audio to text. + +01:52.620 --> 01:58.320 +It's actually a task that I had given you as a follow on exercise from one of the projects last week, + +01:58.320 --> 02:01.830 +so you may have already experimented with this, but if not, we're going to do it together. + +02:01.830 --> 02:07.430 +We're going to call a frontier model to convert audio to text. + +02:07.430 --> 02:14.120 +We are then going to use an open source model to turn that text into meeting minutes, summarizing it, + +02:14.120 --> 02:17.760 +plucking out actions and owners and the like. + +02:17.820 --> 02:21.870 +And we will stream back results and show them in markdown. + +02:21.870 --> 02:25.380 +So these these are the activities we're going to do. + +02:25.410 --> 02:27.060 +That's how we're going to put it together. + +02:27.390 --> 02:31.800 +And it's going to build a product that will be useful. + +02:32.250 --> 02:34.140 +This is what we want to come up with. + +02:34.170 --> 02:40.440 +We want to be able to have a solution that produces minutes like this with discussion points, takeaways, + +02:40.470 --> 02:47.700 +action items, and as the input data to start with the resource that we'll be using. + +02:47.730 --> 02:56.400 +There are audio files of publicly available council meetings from councils across the US available on + +02:56.430 --> 02:57.270 +hugging face. + +02:57.270 --> 02:59.400 +And that is where we'll begin. + +02:59.670 --> 03:03.930 +I've already downloaded one of the audio files and taken a chunk out of it. + +03:04.200 --> 03:08.460 +In the interest of time, we'll do just a piece of the Denver City Council meeting rather than the whole + +03:08.460 --> 03:09.030 +meeting. + +03:09.300 --> 03:12.900 +But the idea is that that's going to help us show that it works. + +03:12.900 --> 03:17.370 +And then perhaps this is something that you'll be able to use for your own meetings, for real, when + +03:17.370 --> 03:19.680 +we have a working product. + +03:19.710 --> 03:24.840 +So without further ado, let's go to Google Colab and let's build our application. diff --git a/week5/community-contributions/subtitles/srts/59170165/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170165/ja_JP.srt new file mode 100755 index 0000000..adfe607 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170165/ja_JP.srt @@ -0,0 +1,100 @@ +WEBVTT + +00:01.340 --> 00:05.000 +ようこそ、 第3週最終日へ。 + +00:05.030 --> 00:05.810 +第3週 + +00:05.840 --> 00:06.710 +5日目。 + +00:06.740 --> 00:12.740 +オープンソースのモデル推論をハグ顔でラッピングしているところだ。 + +00:12.740 --> 00:16.790 +そして今日、 今日がプロになる日だ。 + +00:16.790 --> 00:23.150 +今日は、 この4日間の講義で学んだことをすべてまとめ、 + +00:23.150 --> 00:41.180 +素晴らしい、 あ、 ジューシーなプロジェクト、 ビジネス・プロジェクトでそれを本当に強固なものにする日だ。 + +00:41.180 --> 00:46.970 +ツール、 マルチモダリティ、 画像生成、 音声生成でAIアシスタントを作ることができる。 + +00:47.120 --> 00:55.100 +そして、 パイプライン、 トークナイザー、 抱擁顔トランスフォーマー・ライブラリー内のモデルを使うことができる。 + +00:55.130 --> 00:59.960 +今日は、 トーケナイザーとモデルを使って、 さらに自信を深めてください。 + +00:59.960 --> 01:13.260 +オープンソースのモデル間で簡単に推論を実行できるようになり、 フロンティアモデルとオープンソースモデルを1つの素晴らしいパッケージにまとめたLLMソリューションを実装したことになる。 + +01:13.260 --> 01:21.240 +また、 これに取り組み続けることは、 あなたにとって良いビジネス・チャレンジになるはずですから、 始めましょう。 + +01:22.440 --> 01:32.130 +私たちが抱えているビジネス上の問題は、 私たち誰もが知っている多くのアプリケーションにある機能である。 + +01:32.130 --> 01:41.880 +私たちは、 行動や所有者などを含む会議の議事録を作成できるソリューションを構築したいと考えています。 + +01:42.120 --> 01:52.620 +音声を録音して、 フロンティアモデルを使い、 APIを使って音声をテキストに変換する。 + +01:52.620 --> 02:01.830 +実はこれは、 先週のあるプロジェクトのフォローアップ練習として私が出した課題なんだ。 + +02:01.830 --> 02:07.430 +音声をテキストに変換するフロンティアモデルを呼ぶことにする。 + +02:07.430 --> 02:14.120 +そのテキストをオープンソースのモデルを使って議事録にし、 要約し、 + +02:14.120 --> 02:17.760 +行動や所有者などを抜き出す。 + +02:17.820 --> 02:21.870 +そして、 結果をストリームバックし、 マークダウンで表示する。 + +02:21.870 --> 02:25.380 +だから、 これらの活動は私たちがやろうとしていることなんだ。 + +02:25.410 --> 02:27.060 +そうやってまとめるんだ。 + +02:27.390 --> 02:31.800 +そして、 役に立つ製品を作ることになる。 + +02:32.250 --> 02:34.140 +これが私たちの望むものだ。 + +02:34.170 --> 02:40.440 +このような議事録を作成し、 ディスカッションのポイント、 要点、 アクションアイテム、 + +02:40.470 --> 02:47.700 +そしてこれから使用するリソースを入力データとして作成できるソリューションが欲しい。 + +02:47.730 --> 02:57.270 +ハギング・フェイスでは、 全米各地の協議会の音声ファイルが公開されている。 + +02:57.270 --> 02:59.400 +そこから始めよう。 + +02:59.670 --> 03:03.930 +すでに音声ファイルのひとつをダウンロードし、 その一部を抜粋した。 + +03:04.200 --> 03:09.030 +時間の都合上、 デンバー市議会の会議全体ではなく、 その一部だけを取り上げる。 + +03:09.300 --> 03:12.900 +しかし、 このアイデアは、 それが機能することを示すのに役立つということだ。 + +03:12.900 --> 03:19.680 +そして、 私たちが実用的な製品を完成させた暁には、 おそらく、 あなた自身のミーティングでも使えるようになるでしょう。 + +03:19.710 --> 03:24.840 +それでは早速、 Google Colabにアクセスしてアプリケーションを作ってみよう。 diff --git a/week5/community-contributions/subtitles/srts/59170165/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170165/ko_KR.srt new file mode 100755 index 0000000..5ec7b88 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170165/ko_KR.srt @@ -0,0 +1,127 @@ +WEBVTT + +00:01.340 --> 00:05.000 +3주 차 마지막 날에 오신 걸 환영합니다, 여러분 + +00:05.030 --> 00:05.810 +3주 차예요 + +00:05.840 --> 00:06.710 +5일째예요 + +00:06.740 --> 00:12.740 +오픈 소스 모델 추론을 얼굴 껴안기와 함께 마무리하고 있어요 + +00:12.740 --> 00:16.790 +오늘은 프로가 되는 날이에요 + +00:16.790 --> 00:23.150 +오늘은 지난 나흘간 여러분이 배운 모든 걸 한데 모아 아주 훌륭하고 흥미진진한 + +00:23.150 --> 00:31.910 +사업 프로젝트로 굳건히 다지는 날입니다 이 분야에서 진정한 경험을 쌓을 수 있는 사업 프로젝트죠 + +00:31.910 --> 00:37.940 +여러분이 이미 할 수 있는 일요 한 번 더 말해도 괜찮으시다면 개척자 + +00:37.940 --> 00:41.180 +모델을 코딩할 수 있어요 + +00:41.180 --> 00:46.970 +툴을 이용한 다단계 인공지능 보조를 만들 수 있습니다 이미지 생성, 소리 생성 등이죠 + +00:47.120 --> 00:55.100 +파이프라인, 토큰마이저, 모델을 포옹 트랜스포머 라이브러리에서 사용하세요 + +00:55.130 --> 00:59.960 +오늘은 토큰저와 모델에 대해 더 잘 알게 될 거예요 + +00:59.960 --> 01:05.330 +오픈 소스 모델을 쉽게 추론할 수 있을 겁니다 프론티어와 오픈 + +01:05.360 --> 01:13.260 +소스 모델을 결합한 LLM 솔루션을 구현해 하나의 멋진 패키지로 만들 수 있고요 + +01:13.260 --> 01:21.240 +Get it을 계속하는 데 좋은 사업 과제도 있을 거예요 그럼 시작하죠 + +01:22.440 --> 01:28.710 +우리가 가진 비즈니스 문제는 우리가 아는 많은 응용 프로그램에 있는 기능이에요 + +01:28.710 --> 01:32.130 +좋은 종류의 진짜 제품이죠 + +01:32.130 --> 01:40.260 +작업이나 소유주 같은 걸 포함해 회의록을 만들 수 있는 솔루션을 구축하고 + +01:40.260 --> 01:41.880 +싶어요 + +01:42.120 --> 01:51.180 +오디오 레코딩을 프론티어 모델로 사용할 수 있고 API를 이용해 오디오를 텍스트로 변환할 + +01:51.180 --> 01:52.620 +수 있죠 + +01:52.620 --> 01:58.320 +지난주에 진행한 프로젝트에서 받은 후속 작업으로 드린 거예요 이미 실험해 + +01:58.320 --> 02:01.830 +보셨을 수도 있지만 아니라면 같이 해 보죠 + +02:01.830 --> 02:07.430 +음향을 텍스트로 변환하는 개척자 모델을 부를 거예요 + +02:07.430 --> 02:14.120 +그런 다음 오픈 소스 모델을 사용해 그 텍스트를 회의록으로 바꾸고 요약하고 + +02:14.120 --> 02:17.760 +행동과 소유주 등을 추려내죠 + +02:17.820 --> 02:21.870 +결과를 스트리밍해서 할인된 걸 보여드릴게요 + +02:21.870 --> 02:25.380 +이게 우리가 할 활동이에요 + +02:25.410 --> 02:27.060 +그렇게 Put을 짜는 거예요 + +02:27.390 --> 02:31.800 +유용한 제품을 만들 거예요 + +02:32.250 --> 02:34.140 +이런 걸 만들고 싶었어요 + +02:34.170 --> 02:40.440 +이와 같은 솔루션을 만들고 싶습니다. 토론 포인트, 포장 포인트, 액션 + +02:40.470 --> 02:47.700 +아이템, 입력 데이터 등을 만들어서 사용할 리소스로 시작하는 솔루션이죠. + +02:47.730 --> 02:57.270 +미국 전역의 자치 위원회가 공개적으로 연 의회 회의에 참석한 음성 파일도 있어요 + +02:57.270 --> 02:59.400 +거기서부터 시작하죠 + +02:59.670 --> 03:03.930 +이미 오디오 파일 하나를 다운로드해서 일부를 잘라냈어요 + +03:04.200 --> 03:08.460 +시간 관계상 덴버시 의회 회의 일부만 진행하도록 하죠 전체 회의 + +03:08.460 --> 03:09.030 +말고요 + +03:09.300 --> 03:12.900 +하지만 그렇게 하면 작동한다는 걸 보여줄 수 있어요 + +03:12.900 --> 03:17.370 +실제 상품이 출시됐을 때 여러분의 회의에서 + +03:17.370 --> 03:19.680 +사용할 수 있을 거예요 + +03:19.710 --> 03:24.840 +구글 Colab으로 가서 응용 프로그램을 만들어보죠 diff --git a/week5/community-contributions/subtitles/srts/59170223/en_US.srt b/week5/community-contributions/subtitles/srts/59170223/en_US.srt new file mode 100755 index 0000000..3da9f01 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170223/en_US.srt @@ -0,0 +1,220 @@ +WEBVTT + +00:00.470 --> 00:01.100 +Well. + +00:01.130 --> 00:02.000 +Fantastic. + +00:02.030 --> 00:06.560 +It's coming up to the end of the week, and that means it's coming up to a challenge for you again, + +00:06.560 --> 00:10.580 +even though I've just given you a challenge to build a Gradio user interface for for what we just saw. + +00:10.580 --> 00:11.750 +But that's an easy challenge. + +00:11.750 --> 00:12.680 +You can do that. + +00:12.680 --> 00:13.640 +No problem. + +00:13.640 --> 00:14.990 +This you need a harder challenge. + +00:14.990 --> 00:17.360 +At the end of the week, it's time for a harder challenge. + +00:17.360 --> 00:25.130 +So the end of week challenge is to build an important business application that we will, in fact, + +00:25.130 --> 00:26.690 +use later in the course. + +00:26.690 --> 00:32.120 +Although yeah, you you won't need to to you won't need to have built it for that because because I'll + +00:32.120 --> 00:32.750 +have done it. + +00:32.750 --> 00:35.690 +But it's really helpful if you've done it. + +00:35.690 --> 00:39.680 +And this is something that you'll be able to use no matter in any business. + +00:39.710 --> 00:45.260 +This, this, this tool will apply to every business vertical and can be useful to you, I guarantee + +00:45.290 --> 00:45.830 +it. + +00:46.220 --> 00:48.140 +And this is what it is. + +00:48.140 --> 00:58.940 +Create your own tool that generates synthetic testing data a test data generator, open source model. + +00:59.180 --> 01:01.970 +This is something that is so valuable. + +01:01.970 --> 01:06.380 +Generating data sets is something that you need for many different purposes. + +01:06.380 --> 01:14.600 +And I want to give you a very, um, wide remit to decide how you want to go about doing this, but + +01:14.600 --> 01:21.590 +I'm looking for something where you can describe a kind of data set you want, and maybe it's descriptions + +01:21.590 --> 01:29.420 +of products, maybe it's descriptions of, uh, um, uh, job postings, whatever it is you want to + +01:29.450 --> 01:36.260 +be able to, to tell your product what it is, what kind of data that you're working with and let it + +01:36.260 --> 01:45.080 +dream up, uh, diverse outputs, diverse test set that you'll be able to use when experimenting with + +01:45.080 --> 01:47.300 +your business area in the future. + +01:47.300 --> 01:55.610 +So this synthetic data generator is going to be a valuable tool for yourself, for me and for for this, + +01:55.640 --> 01:59.090 +both for this course and for future business problems you tackle. + +01:59.090 --> 02:03.890 +So it's worth investing some time in, and it's worth giving it a gradio UI while you're doing it. + +02:03.920 --> 02:05.720 +And that's going to be the super easy part. + +02:05.720 --> 02:08.420 +So I have a shot at that. + +02:08.450 --> 02:11.060 +It will apply to your business area no matter what you do. + +02:11.060 --> 02:13.790 +It's going to be useful and you're going to really enjoy it. + +02:16.160 --> 02:25.250 +And then that would then complete week three, wrapping up your third week of your journey towards being + +02:25.250 --> 02:27.800 +a proficient LM engineer. + +02:27.830 --> 02:31.790 +You can already, of course, code confidently with frontier models. + +02:31.790 --> 02:34.940 +You must be sick of me saying that now because you're that good. + +02:34.940 --> 02:37.100 +You can build an AI assistant. + +02:37.100 --> 02:39.980 +You can have it be multimodal, you can have it use tools. + +02:39.980 --> 02:46.010 +You can have it be consist of multiple smaller agents that carry out specialist tasks. + +02:46.040 --> 02:52.460 +And of course, at this point you can create an LM solution that combines calls to frontier models. + +02:52.460 --> 02:54.680 +And it can call open source models. + +02:54.680 --> 03:03.080 +And you can use the pipeline API, using it to to carry out a large variety of common inference tasks. + +03:03.080 --> 03:10.550 +And you can also use the lower level hugging face APIs, the Tokenizers, and the models for inference + +03:10.550 --> 03:11.780 +tasks. + +03:12.470 --> 03:18.980 +So congratulations once again, you should be very proud next week. + +03:19.010 --> 03:20.990 +Next week we change topics. + +03:20.990 --> 03:23.240 +There's a thorny question. + +03:23.240 --> 03:24.980 +It's a question I get asked all the time. + +03:24.980 --> 03:31.010 +It's something which is, uh, where there's actually a lot of great resources to help. + +03:31.010 --> 03:37.440 +It's about how do you pick the right model for the for a given task that you have to work on. + +03:37.440 --> 03:39.870 +There are so many models, there are so many options. + +03:39.870 --> 03:41.220 +There's for staff. + +03:41.250 --> 03:43.290 +There's there's do you go closed source or open source? + +03:43.290 --> 03:48.000 +But then whichever path you take, there are so many possibilities. + +03:48.000 --> 03:52.830 +And how do you navigate through this to decide which one is right for a particular problem. + +03:52.830 --> 03:53.850 +And that is the key. + +03:53.880 --> 03:55.470 +It depends on the problem. + +03:55.470 --> 03:58.650 +Different models will be appropriate for different problems. + +03:58.650 --> 04:00.660 +I'm going to show you how to figure that out. + +04:00.990 --> 04:02.550 +We're going to compare LMS. + +04:02.550 --> 04:03.720 +We're going to use leaderboards. + +04:03.720 --> 04:04.950 +We're going to use arenas. + +04:04.950 --> 04:08.070 +And we're going to do some some work with arenas ourselves. + +04:08.070 --> 04:09.360 +And that's going to be fun. + +04:09.360 --> 04:15.930 +And then as our practical work, we're going to to go a different direction than we've gone in the past, + +04:16.020 --> 04:21.780 +except we did it very briefly once, but we're going to be looking at code generation when we're using + +04:21.780 --> 04:29.490 +frontier models and open source models to be generating code and tackling some code generation problems. + +04:29.490 --> 04:33.060 +So that will be a new, interesting perspective for you. + +04:33.060 --> 04:38.910 +So I'm really excited about next week, and I'm so, so impressed by how much progress you've made already + +04:38.910 --> 04:42.990 +and how many skills that you've already acquired. + +04:43.110 --> 04:49.140 +And I will see you for week four for picking the right LLM. diff --git a/week5/community-contributions/subtitles/srts/59170223/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170223/ja_JP.srt new file mode 100755 index 0000000..16b79a2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170223/ja_JP.srt @@ -0,0 +1,196 @@ +WEBVTT + +00:00.470 --> 00:01.100 +まあね。 + +00:01.130 --> 00:02.000 +ファンタスティックだ。 + +00:02.030 --> 00:10.580 +今週も終わりに近づいています。 つまり、 またチャレンジの時期が近づいているということです。 先ほど、 私たちが見たものに対するグラディオのユーザー・インターフェースを作るという課題を出したばかりですが。 + +00:10.580 --> 00:11.750 +しかし、 それは簡単な挑戦だ。 + +00:11.750 --> 00:12.680 +それはできる。 + +00:12.680 --> 00:13.640 +問題ないよ。 + +00:13.640 --> 00:14.990 +もっと難しい課題が必要だ。 + +00:14.990 --> 00:17.360 +週の終わりには、 よりハードなチャレンジの時間だ。 + +00:17.360 --> 00:26.690 +つまり、 週明けの課題は、 このコースの後半で実際に使用する重要なビジネス・アプリケーションを作ることだ。 + +00:26.690 --> 00:32.750 +とはいえ、 そうする必要はないだろうけど......そのために作る必要はないだろう。 + +00:32.750 --> 00:35.690 +でも、 やっておくと本当に役に立つよ。 + +00:35.690 --> 00:39.680 +そしてこれは、 どのようなビジネスでも関係なく使えるものだ。 + +00:39.710 --> 00:45.830 +この、 この、 このツールはあらゆる業種に適用でき、 あなたの役に立つこと請け合いだ。 + +00:46.220 --> 00:48.140 +それがこれだ。 + +00:48.140 --> 00:58.940 +合成テストデータを生成する独自のツールを作成する テストデータジェネレータ、 オープンソースモデル。 + +00:59.180 --> 01:01.970 +これはとても貴重なことだ。 + +01:01.970 --> 01:06.380 +データセットの生成は、 さまざまな目的で必要となるものだ。 + +01:06.380 --> 01:14.600 +しかし、 私は、 あなたが欲しいデータセットを記述できるものを探しています。 それは、 + +01:14.600 --> 01:21.590 +商品の説明かもしれませんし、 求人情報の説明かもしれません。 あなたが欲しいものが何であれ、 + +01:21.590 --> 01:29.420 +製品にそれが何であるかを伝え、 あなたが扱っているデータがどのようなものであるかを伝え、 + +01:29.450 --> 01:36.260 +将来あなたのビジネス分野で実験するときに使えるように、 多様な出力、 + +01:36.260 --> 01:47.300 +多様なテストセットを夢見させることができます。 + +01:47.300 --> 01:55.610 +だから、 この合成データジェネレーターは、 あなた自身にとっても、 私にとっても、 そしてこのコースにとっても、 将来あなたが取り組むビジネス上の問題にとっても、 + +01:55.640 --> 01:59.090 +貴重なツールになるだろう。 + +01:59.090 --> 02:03.890 +グラディオのUIに時間を費やす価値はある。 + +02:03.920 --> 02:05.720 +そして、 それが超簡単な部分になる。 + +02:05.720 --> 02:08.420 +だから、 僕はそれを狙っているんだ。 + +02:08.450 --> 02:11.060 +何をするにしても、 あなたのビジネス領域に適用される。 + +02:11.060 --> 02:13.790 +きっと役に立つし、 本当に楽しめるよ。 + +02:16.160 --> 02:27.800 +これで3週目が終了し、 熟練したLMエンジニアになるための旅の3週目が終わる。 + +02:27.830 --> 02:31.790 +もちろん、 すでにフロンティアモデルを使って自信を持ってコーディングすることはできる。 + +02:31.790 --> 02:34.940 +もう私がそんなことを言うのはうんざりしているに違いない。 + +02:34.940 --> 02:37.100 +AIアシスタントを作ることができる。 + +02:37.100 --> 02:39.980 +マルチモーダルでもいいし、 ツールを使ってもいい。 + +02:39.980 --> 02:46.010 +専門的な仕事を行う複数の小さなエージェントで構成することもできる。 + +02:46.040 --> 02:52.460 +もちろん、 この時点でフロンティアモデルへのコールを組み合わせたLMソリューションを作ることもできる。 + +02:52.460 --> 02:54.680 +そして、 オープンソースのモデルを呼ぶことができる。 + +02:54.680 --> 03:03.080 +また、 パイプラインAPIを使って、 一般的な推論タスクを実行することもできる。 + +03:03.080 --> 03:11.780 +また、 推論タスクのために、 より低レベルの抱擁顔API、 トーケナイザー、 モデルを使うこともできる。 + +03:12.470 --> 03:18.980 +もう一度おめでとう。 + +03:19.010 --> 03:20.990 +来週はトピックを変える。 + +03:20.990 --> 03:23.240 +茨の道がある。 + +03:23.240 --> 03:24.980 +よく聞かれる質問だ。 + +03:24.980 --> 03:31.010 +それは......実際、 助けてくれる素晴らしいリソースがたくさんあることなんだ。 + +03:31.010 --> 03:37.440 +自分が取り組まなければならない仕事に対して、 どのように適切なモデルを選ぶかということだ。 + +03:37.440 --> 03:39.870 +たくさんのモデルがあり、 たくさんの選択肢がある。 + +03:39.870 --> 03:41.220 +スタッフ用だ。 + +03:41.250 --> 03:43.290 +クローズドソースかオープンソースか? + +03:43.290 --> 03:48.000 +でも、 どの道を選んでも、 たくさんの可能性がある。 + +03:48.000 --> 03:52.830 +そして、 特定の問題に対してどれが正しいかを判断するために、 どのようにナビゲートするのか。 + +03:52.830 --> 03:53.850 +それが鍵だ。 + +03:53.880 --> 03:55.470 +それは問題による。 + +03:55.470 --> 03:58.650 +問題によって適切なモデルは異なるだろう。 + +03:58.650 --> 04:00.660 +その方法をお見せしよう。 + +04:00.990 --> 04:02.550 +我々はLMSを比較するつもりだ。 + +04:02.550 --> 04:03.720 +リーダーボードを使うつもりだ。 + +04:03.720 --> 04:04.950 +アリーナを使うつもりだ。 + +04:04.950 --> 04:08.070 +そして、 我々自身もアリーナでいくつかの仕事をするつもりだ。 + +04:08.070 --> 04:09.360 +それは楽しみだ。 + +04:09.360 --> 04:21.780 +フロンティア・モデルやオープンソース・モデルを使ってコードを生成し、 + +04:21.780 --> 04:29.490 +コード生成の問題に取り組む場合だ。 + +04:29.490 --> 04:33.060 +だから、 それはあなたにとって新しい、 興味深い視点になるだろう。 + +04:33.060 --> 04:38.910 +だから来週が本当に楽しみだし、 君たちがすでにどれだけ進歩し、 どれだけ多くのスキルを身につけたか、 + +04:38.910 --> 04:42.990 +とてもとても感心している。 + +04:43.110 --> 04:49.140 +そして、 正しいLLMを選ぶために4週目に会いましょう。 diff --git a/week5/community-contributions/subtitles/srts/59170223/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170223/ko_KR.srt new file mode 100755 index 0000000..4a54efc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170223/ko_KR.srt @@ -0,0 +1,211 @@ +WEBVTT + +00:00.470 --> 00:01.100 +글쎄요 + +00:01.130 --> 00:02.000 +환상적이에요 + +00:02.030 --> 00:06.560 +이번 주말이면 끝나요 다시 도전이 기다리고 있다는 뜻이죠 방금 제가 여러분께 + +00:06.560 --> 00:10.580 +그래디오 사용자 인터페이스를 구축하라고 과제를 드렸는데도요 + +00:10.580 --> 00:11.750 +하지만 쉬운 도전이죠 + +00:11.750 --> 00:12.680 +할 수 있어요 + +00:12.680 --> 00:13.640 +별말씀을요 + +00:13.640 --> 00:14.990 +더 어려운 과제가 필요해요 + +00:14.990 --> 00:17.360 +이번 주 후반에는 더 어려운 과제가 기다리고 있죠 + +00:17.360 --> 00:25.130 +마지막 챌린지는 중요한 비즈니스 응용 프로그램을 만드는 겁니다 나중에 이 과정을 통해 + +00:25.130 --> 00:26.690 +사용할 거죠 + +00:26.690 --> 00:32.750 +하지만 그걸 위해 만들 필요는 없어요 제가 만들 거니까요 + +00:32.750 --> 00:35.690 +하지만 해 본 사람이라면 정말 도움이 되죠 + +00:35.690 --> 00:39.680 +어떤 사업에서든 사용할 수 있는 거죠 + +00:39.710 --> 00:45.830 +이 도구는 모든 비즈니스에 적용될 겁니다 여러분께 유용할 거예요 제가 보장하죠 + +00:46.220 --> 00:48.140 +이게 그 결과예요 + +00:48.140 --> 00:58.940 +합성 테스트 데이터를 생성하는 자신만의 도구를 만드세요 테스트 데이터 생성기 오픈 소스 모델이요 + +00:59.180 --> 01:01.970 +정말 귀한 거예요 + +01:01.970 --> 01:06.380 +데이터 세트 생성에는 여러 가지 목적이 필요해요 + +01:06.380 --> 01:14.600 +저는 여러분이 어떻게 하고 싶은지 결정할 수 있는 아주 폭넓은 권한을 드리고 싶습니다. 하지만 제가 + +01:14.600 --> 01:21.590 +원하는 것은 여러분이 원하는 데이터 세트를 묘사할 수 있는 것입니다. 제품에 대한 + +01:21.590 --> 01:29.420 +설명일 수도 있고, 어 구인 공고일 수도 있습니다. 여러분의 제품이 무엇인지, 여러분이 작업하고 + +01:29.450 --> 01:36.260 +있는 데이터는 어떤 종류인지, 그리고 다양한 출력과 다양한 테스트 세트를 꿈꿀 + +01:36.260 --> 01:47.300 +수 있도록 하는 것입니다. 이로써 앞으로 여러분의 비즈니스 영역에서 실험할 때 사용할 수 있을 수 있을 거예요. + +01:47.300 --> 01:55.610 +이 합성 데이터 발생기는 여러분과 저, 그리고 이걸 위해 귀중한 도구가 될 겁니다 이 과정과 + +01:55.640 --> 01:59.090 +미래의 사업 문제에 있어서요 + +01:59.090 --> 02:03.890 +시간을 투자할 가치가 있고 그러데이션 UI를 적용할 가치가 있어요 + +02:03.920 --> 02:05.720 +아주 쉬운 부분이에요 + +02:05.720 --> 02:08.420 +그래서 저도 도전해 보려고요 + +02:08.450 --> 02:11.060 +뭘 하든 비즈니스 영역에 적용되죠 + +02:11.060 --> 02:13.790 +유용하고 정말 즐기실 수 있어요 + +02:16.160 --> 02:25.250 +그러면 3주 차를 마무리합니다 여정의 3주 차를 마무리하면 능숙한 LM 엔지니어가 + +02:25.250 --> 02:27.800 +되는 거죠 + +02:27.830 --> 02:31.790 +물론 개척 시대 모델로 이미 코드를 만들 수 있죠 + +02:31.790 --> 02:34.940 +이제 와서 이런 말 하는 거 지겹죠? + +02:34.940 --> 02:37.100 +인공지능 보조를 만들 수 있어요 + +02:37.100 --> 02:39.980 +멀티모덜이 될 수도 있고 도구를 사용할 수도 있죠 + +02:39.980 --> 02:46.010 +여러 명의 작은 요원들로 구성해 전문 임무를 수행할 수도 있죠 + +02:46.040 --> 02:52.460 +물론 이 시점에서 여러분은 프런티어 모델에 호출하는 LM 솔루션을 만들 수 있죠 + +02:52.460 --> 02:54.680 +오픈 소스 모델을 호출할 수 있어요 + +02:54.680 --> 03:03.080 +파이프라인 API를 사용할 수 있습니다 일반 추론 작업을 수행하기 위해 다양하게 사용하죠 + +03:03.080 --> 03:10.550 +하위 레벨의 얼굴 포옹 API를 사용할 수도 있습니다 토큰라이저와 추론 작업을 위한 + +03:10.550 --> 03:11.780 +모델을요 + +03:12.470 --> 03:18.980 +다시 한번 축하드려요 다음 주에는 자랑스러워하세요 + +03:19.010 --> 03:20.990 +다음 주엔 화제를 바꾸죠 + +03:20.990 --> 03:23.240 +가시가 돋친 질문이네요 + +03:23.240 --> 03:24.980 +Get it, get it get it get it + +03:24.980 --> 03:31.010 +실제로 도움이 될 만한 훌륭한 자원이 많은 곳이죠 + +03:31.010 --> 03:37.440 +주어진 작업에 맞는 모델을 어떻게 고르느냐가 관건이죠 + +03:37.440 --> 03:39.870 +모델도 많고 옵션도 많아요 + +03:39.870 --> 03:41.220 +직원용이에요 + +03:41.250 --> 03:43.290 +비공개 소스인가요 오픈 소스인가요? + +03:43.290 --> 03:48.000 +하지만 어떤 길을 택하든 가능성은 무궁무진해요 + +03:48.000 --> 03:52.830 +특정 문제에 어떤 게 적절한지 어떻게 탐색해야 할까요? + +03:52.830 --> 03:53.850 +그게 핵심이에요 + +03:53.880 --> 03:55.470 +어떤 문제냐에 따라 다르죠 + +03:55.470 --> 03:58.650 +다른 문제에는 다른 모델이 적합하죠 + +03:58.650 --> 04:00.660 +어떻게 하는지 보여드릴게요 + +04:00.990 --> 04:02.550 +LMS를 비교할 거예요 + +04:02.550 --> 04:03.720 +리더보드를 사용할 거예요 + +04:03.720 --> 04:04.950 +아레나에서 할 거예요 + +04:04.950 --> 04:08.070 +우리도 아레나에서 작업 좀 하려고요 + +04:08.070 --> 04:09.360 +재미있을 거예요 + +04:09.360 --> 04:15.930 +실제 업무로서 과거와는 다른 방향으로 갈 겁니다 아주 간략하게 + +04:16.020 --> 04:21.780 +한 번 했지만요 프론티어 모델과 오픈 소스 모델을 이용해 + +04:21.780 --> 04:29.490 +코드 생성 문제를 해결하면서 코드 생성을 살펴볼 거예요 + +04:29.490 --> 04:33.060 +새롭고 흥미로운 관점이 될 거예요 + +04:33.060 --> 04:38.910 +다음 주가 정말 기대돼요 여러분이 벌써 이렇게 많이 발전하고 + +04:38.910 --> 04:42.990 +많은 기술을 습득한 게 정말 인상적이에요 + +04:43.110 --> 04:49.140 +적절한 LLM을 선택해서 4주 차에 만나요 diff --git a/week5/community-contributions/subtitles/srts/59170227/en_US.srt b/week5/community-contributions/subtitles/srts/59170227/en_US.srt new file mode 100755 index 0000000..7e32a07 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170227/en_US.srt @@ -0,0 +1,508 @@ +WEBVTT + +00:00.200 --> 00:02.360 +Welcome back to Google Colab. + +00:02.360 --> 00:06.290 +Here we are ready to explore the wonderful world of Tokenizers. + +00:06.290 --> 00:11.360 +So, uh, the first thing I'm going to do is do some imports. + +00:11.600 --> 00:15.290 +And after I've done that, I want to mention this statement here. + +00:15.290 --> 00:20.750 +I forgot to mention this in the last video, but I have added it into that colab, so hopefully you + +00:20.780 --> 00:23.300 +found it anyway and read my explanation. + +00:23.450 --> 00:28.220 +Uh, you may need to log in to Huggingface in Colab if you've never done that before. + +00:28.220 --> 00:31.370 +And this is the code that you use to do that. + +00:31.370 --> 00:36.260 +First of all, if you haven't already created your account with hugging face, you need an account with + +00:36.290 --> 00:36.890 +hugging face. + +00:36.890 --> 00:37.700 +It's free. + +00:37.730 --> 00:40.910 +It's it's terrific and you will never regret it. + +00:40.910 --> 00:46.970 +So sign up at huggingface and then navigate to settings and create a new API token. + +00:46.970 --> 00:48.470 +Giving yourself write permission. + +00:48.470 --> 00:52.130 +We won't need to use the right permission today, but we will in the future, so might as well set it + +00:52.130 --> 00:53.060 +up right now. + +00:53.090 --> 00:59.570 +Then when you come back, you go to this key section here in the Colab and you add in a new secret. + +00:59.570 --> 01:05.220 +The secret should say HF underscore token and the value should be your token. + +01:05.220 --> 01:12.270 +And then all you have to do is run this code that will get the HF token from your secrets, and it will + +01:12.300 --> 01:15.000 +then call this login method, which I imported here. + +01:15.000 --> 01:18.180 +And that login method logs in to hugging face. + +01:18.180 --> 01:19.470 +Let's run that right away. + +01:19.470 --> 01:20.760 +And it's done. + +01:20.790 --> 01:23.400 +And you see it says I have rights permission right there. + +01:24.060 --> 01:31.950 +Okay, let's talk Tokenizers we are going to start with the fantastic llama 3.1, the iconic model from + +01:31.950 --> 01:35.400 +meta, which paved the way for open source models. + +01:35.880 --> 01:42.240 +Now, when you're using llama 3.1, meta does need you first to sign their terms of service. + +01:42.240 --> 01:47.520 +And the way you do that is you visit their model page on Hugging Face, which is linked here. + +01:47.520 --> 01:52.680 +And at the top of that page, there are very simple instructions for what you need to do to sign. + +01:52.830 --> 01:57.270 +Uh, you should you'll need to supply your email address, and it's best if the email address that you + +01:57.270 --> 01:59.610 +supply matches your hugging face account. + +01:59.610 --> 02:01.370 +That means they get things done quickly. + +02:01.370 --> 02:04.370 +In fact, they should approve you in a matter of minutes. + +02:04.370 --> 02:07.610 +I've done this many times, including once late on a Saturday night. + +02:07.610 --> 02:09.680 +I got approved very, very quickly. + +02:09.740 --> 02:13.460 +I don't know whether that's just because they're really on the ball or whether it's all automated, + +02:13.550 --> 02:15.350 +but it's very quick indeed. + +02:15.770 --> 02:20.810 +And in case you think there's something evil with this signing terms of service, it's really if you + +02:20.810 --> 02:26.420 +read the fine print, it's about making sure that you're not going to use lemma 3.1 for anything nefarious + +02:26.420 --> 02:30.770 +and that you have good intentions, which is very much the case in this class. + +02:30.770 --> 02:34.400 +So it should be no problems whatsoever signing that. + +02:34.400 --> 02:39.590 +Once you've done so, you will have access to all the variants of llama 3.1. + +02:39.590 --> 02:43.070 +It's one one sign and then it applies to the whole family. + +02:43.370 --> 02:49.070 +If you wanted to use one of the older llama three models, like llama 3 or 2, you would need to go + +02:49.070 --> 02:53.060 +and sign the terms for that family of models. + +02:53.450 --> 02:57.650 +If for some reason you don't want to, or you're finding that they're not approving you right away, + +02:57.650 --> 03:00.200 +you can just skip to later when we start. + +03:00.230 --> 03:05.490 +Or you can just watch me executing for 3.1, and then you can pick up when we start working with some + +03:05.490 --> 03:06.840 +of the other tokenizers. + +03:06.840 --> 03:12.510 +But with that, creating a tokenizer is this single line here. + +03:12.690 --> 03:21.810 +Hugging face has this class auto tokenizer, which will create whatever subclass of tokenizer is needed + +03:21.810 --> 03:23.070 +for this particular model. + +03:23.100 --> 03:24.330 +Don't need to worry too much about that. + +03:24.330 --> 03:31.410 +Just know that auto tokenizer is the one to do and you call the class method from pre-trained, which + +03:31.410 --> 03:35.790 +means I've got a pre-trained model and I want you to create the tokenizer that's for that. + +03:35.820 --> 03:36.960 +And that is the name. + +03:36.960 --> 03:38.760 +This is the model that we're using. + +03:38.760 --> 03:41.610 +That's which you can take directly from the Hugging face hub. + +03:41.610 --> 03:45.690 +It's meta llama's meta llama 3.18 billion. + +03:45.720 --> 03:51.930 +This trust remote code equals true when as as you bring in this tokenizer, it's possible for there + +03:51.930 --> 03:55.140 +to be code that is part of of a model. + +03:55.140 --> 03:57.750 +And we're saying we know who meta is. + +03:57.780 --> 04:01.570 +We know that this is fine so you can trust it. + +04:01.840 --> 04:04.030 +If you don't include that, it will still work fine. + +04:04.030 --> 04:06.040 +It just gives you a warning, an ugly warning. + +04:06.040 --> 04:10.930 +So if you don't want the ugly warning, then just, uh, put that in there. + +04:11.950 --> 04:12.550 +Okay. + +04:12.550 --> 04:15.970 +With that, the next thing I'm doing is I'm using the text. + +04:16.000 --> 04:24.040 +I'm excited to show Tokenizers in action to my LLM engineers, and we take that text as a string and + +04:24.040 --> 04:27.160 +we call tokenizer dot encode that text. + +04:27.160 --> 04:30.070 +And then we will print the tokens that result. + +04:30.760 --> 04:31.720 +Here they are. + +04:31.750 --> 04:33.400 +It's something that's super simple. + +04:33.400 --> 04:34.720 +It's just a list of numbers. + +04:34.720 --> 04:35.860 +Nothing more than that. + +04:35.860 --> 04:37.390 +Nothing magical about tokens. + +04:37.390 --> 04:38.440 +They are just numbers. + +04:38.440 --> 04:40.960 +And these numbers represent that text. + +04:40.990 --> 04:43.600 +Let's see how many of them there are. + +04:43.630 --> 04:50.320 +Well, let's start by saying how many, um, uh, letters were in that text that we gave it. + +04:50.350 --> 04:53.560 +There are 61 letters in that text. + +04:53.560 --> 04:56.260 +So now we can count the number of tokens. + +04:56.260 --> 05:02.510 +And do you remember the rule of thumb for roughly speaking, the how many characters map to a token. + +05:02.540 --> 05:06.110 +On average, it's four on average. + +05:06.110 --> 05:06.440 +Roughly. + +05:06.440 --> 05:12.890 +Rule of thumb about four letters should be one token for normal English or if you have a lot of English. + +05:12.890 --> 05:16.880 +So we're expecting for 61 letters. + +05:16.970 --> 05:19.790 +We're expecting around 15 tokens. + +05:19.820 --> 05:20.780 +Let's see what we get. + +05:20.780 --> 05:21.980 +15 tokens. + +05:21.980 --> 05:22.520 +There we go. + +05:22.550 --> 05:25.280 +Exactly 15 tokens for this text. + +05:25.610 --> 05:31.940 +Um, and we can in fact do this decode to turn our tokens back into text again. + +05:31.940 --> 05:35.150 +So we're expecting to recreate the original text. + +05:35.150 --> 05:39.020 +And what we get is something similar, slightly different. + +05:39.020 --> 05:44.180 +As you will see what we get back is the text that we were expecting. + +05:44.180 --> 05:50.990 +But at the front of it is something new, this this funny thing here, this set of text that says in + +05:50.990 --> 05:55.010 +angled brackets are less than and greater than sign begin of text. + +05:55.040 --> 05:55.910 +What is this? + +05:55.910 --> 06:01.090 +So this is something called a special token or all of the, all of what I've highlighted just maps to + +06:01.120 --> 06:01.900 +one token. + +06:01.930 --> 06:09.340 +In fact, this token here, this 128,000 token, um, and it is a special token which is indicating + +06:09.370 --> 06:14.740 +to our model that it is the start of a, uh, of a text of a prompt. + +06:14.950 --> 06:20.710 +Um, and so it's used for that purpose to be a special indicator to the LM. + +06:20.740 --> 06:24.550 +Now, again, you might be thinking, uh, okay. + +06:24.580 --> 06:28.960 +So does that mean that somehow the architecture of the transformer has to be set, set up so that it + +06:28.990 --> 06:30.820 +expects that kind of token? + +06:30.910 --> 06:35.920 +Uh, and uh, as you're probably, uh, very comfortable now, the answer is no. + +06:35.920 --> 06:37.270 +That's not what it means. + +06:37.300 --> 06:43.000 +Uh, what this means is that in all of the training examples that it saw during training time, it was + +06:43.000 --> 06:44.080 +set up this way. + +06:44.080 --> 06:48.250 +The training examples began with this special token begin of text. + +06:48.250 --> 06:52.780 +So it's got used to through training expecting that. + +06:52.780 --> 06:58.330 +And in order to ensure the highest quality output, one should recreate that same approach. + +06:58.390 --> 07:02.210 +Uh, when feeding in new prompts at inference time. + +07:02.990 --> 07:04.670 +So hope that made sense. + +07:04.700 --> 07:08.360 +There's another method batch decode. + +07:08.360 --> 07:13.940 +And if you run that with your tokens what you get back instead of one string, you get back these, + +07:13.940 --> 07:19.550 +uh, little, um, sets of strings where each string represents one token. + +07:19.550 --> 07:24.080 +So as I say, this first token here turned into this here. + +07:24.080 --> 07:27.920 +And then you can follow through to, to to see how that's working. + +07:28.130 --> 07:30.920 +Um, and there's a few things to note from this. + +07:30.920 --> 07:36.080 +Uh, as you'll see straight away, one of them is that in most cases a word mapped to a token, because + +07:36.080 --> 07:37.730 +we've got very simple words here. + +07:37.730 --> 07:43.370 +So excited, even though it's way more than four characters mapped to one token, because it's such + +07:43.370 --> 07:45.380 +a common word, it's in the vocab. + +07:45.620 --> 07:53.180 +Um, another thing to notice is that, uh, as with GPT tokenizer, uh, the fact that something is + +07:53.180 --> 07:58.700 +the beginning of a word, this space before the word is part of the token. + +07:58.700 --> 08:09.150 +So and so am as the beginning of the word, and then the letters Am is a different token to just am, + +08:09.150 --> 08:13.560 +the fragment of characters that could be within something more complicated. + +08:14.250 --> 08:20.640 +You'll also notice that something like Tokenizers got broken into two tokens, one for the word token + +08:20.640 --> 08:23.130 +and the other for ISAs. + +08:23.460 --> 08:28.740 +So that's an interesting, uh, you know, a word ending Isa ISAs. + +08:28.740 --> 08:33.120 +You could imagine that might be stuck on the end of lots of different things, and that's part of its + +08:33.150 --> 08:34.350 +tokenization. + +08:34.380 --> 08:37.890 +One other thing to notice is that it is case sensitive. + +08:37.890 --> 08:43.860 +So so you can see that, uh, token with a capital T has been been taken there. + +08:45.120 --> 08:53.040 +Uh, so, uh, the final thing I want to mention here is the tokenizer dot vocab. + +08:53.070 --> 08:58.500 +If you run tokenizer dot vocab, you get the, uh, it gives you the. + +08:58.500 --> 09:03.980 +It's the dictionary of the complete mapping between fragments of Words and numbers. + +09:04.310 --> 09:06.590 +And you can see there's some pretty obscure things here. + +09:06.590 --> 09:12.620 +There's an awful lot of tokens that are available, and there's some quite odd tokens in here that are + +09:12.740 --> 09:15.920 +from different languages or used for different purposes. + +09:16.190 --> 09:22.580 +So very much it does go beyond three letters, four letters, and you'll see a number of different things. + +09:22.610 --> 09:26.630 +A um, it's printed out quite a lot of them. + +09:26.870 --> 09:32.840 +Uh, something else that I'll show you from this, uh, as I scroll back through all of our dictionary. + +09:33.050 --> 09:34.040 +Get back here. + +09:34.250 --> 09:41.990 +Uh, is, uh, that you can also print, uh, comment that, comment this out. + +09:42.440 --> 09:48.470 +Uh, just what's called the added vocab, which are the special tokens that I mentioned. + +09:48.650 --> 09:53.840 +Um, there's a bunch of these reserved special tokens, and sorry, at the top you can see here are + +09:53.840 --> 10:01.560 +the special tokens that have been reserved in the vocab, uh, to be used to signal to things to the + +10:01.560 --> 10:01.860 +LM. + +10:01.890 --> 10:02.580 +Beginning of text. + +10:02.610 --> 10:03.570 +End of text. + +10:04.020 --> 10:06.150 +Some reserved, um. + +10:06.180 --> 10:11.100 +And then a start header, ID and header. + +10:11.100 --> 10:12.690 +And then some other things here. + +10:12.690 --> 10:14.190 +And a Python tag. + +10:14.220 --> 10:17.070 +Uh, something obviously special there. + +10:17.070 --> 10:25.470 +So for whatever reason, these are the special tokens that have been identified, uh, as, as it being, + +10:25.470 --> 10:33.300 +uh, useful to include those special tokens in the vocab and provide them during training so that when + +10:33.330 --> 10:38.850 +you're doing inference, when you're running the model, uh, to, to generate text, you can use these + +10:38.850 --> 10:42.180 +tokens to indicate things to the model. + +10:42.960 --> 10:43.530 +All right. + +10:43.560 --> 10:47.580 +Well, that's a bit of playing around with the llama three model. + +10:47.640 --> 10:49.290 +Uh, llama 3.1 tokenizer. + +10:49.320 --> 10:56.670 +When we come back, we're going to look at the, uh, the way that that this applies to chats in particular. + +10:56.670 --> 10:59.640 +And then we're going to play with some other tokenizers. + +10:59.640 --> 11:00.390 +So see you then. diff --git a/week5/community-contributions/subtitles/srts/59170227/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170227/ja_JP.srt new file mode 100755 index 0000000..1975656 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170227/ja_JP.srt @@ -0,0 +1,439 @@ +WEBVTT + +00:00.200 --> 00:02.360 +Google Colabへようこそ。 + +00:02.360 --> 00:06.290 +トーケナイザーの素晴らしい世界を探検してみよう。 + +00:06.290 --> 00:11.360 +それで、 ええと、 まず最初にすることは、 輸入をすることだ。 + +00:11.600 --> 00:15.290 +そして、 それが終わった後、 ここでこの発言に触れたい。 + +00:15.290 --> 00:23.300 +前回のビデオでこのことを言い忘れたが、 あのコラボに追加したので、 とにかく見つけて私の説明を読んでほしい。 + +00:23.450 --> 00:28.220 +ええと、 Huggingfaceにログインしたことがないなら、 Colabにログインする必要があるかもしれない。 + +00:28.220 --> 00:31.370 +そのためのコードがこれだ。 + +00:31.370 --> 00:36.890 +まず最初に、 まだ抱き顔のアカウントを作成していない場合は、 抱き顔のアカウントが必要です。 + +00:36.890 --> 00:37.700 +無料だ。 + +00:37.730 --> 00:40.910 +それは素晴らしいことで、 決して後悔することはない。 + +00:40.910 --> 00:46.970 +そこで、 huggingfaceにサインアップし、 設定に移動して新しいAPIトークンを作成する。 + +00:46.970 --> 00:48.470 +自分に書く許可を与える。 + +00:48.470 --> 00:53.060 +今は必要ないだろうが、 将来は必要になるだろう。 + +00:53.090 --> 00:59.570 +そして戻ってきたら、 このColabのキーセクションに行き、 新しいシークレットを追加する。 + +00:59.570 --> 01:05.220 +secretにはHFアンダースコア・トークンを、 valueにはあなたのトークンを指定する。 + +01:05.220 --> 01:15.000 +そして、 シークレットからHFトークンを取得するこのコードを実行し、 ここでインポートしたログイン・メソッドを呼び出すだけです。 + +01:15.000 --> 01:18.180 +そして、 そのログイン方法はハグ顔にログインする。 + +01:18.180 --> 01:19.470 +すぐに実行しよう。 + +01:19.470 --> 01:20.760 +そして完成した。 + +01:20.790 --> 01:23.400 +そして、 そこに権利があると書いてあるのがわかるだろう。 + +01:24.060 --> 01:35.400 +さて、 トーケナイザーの話をしよう......まずはファンタスティックなラマ3から。 1、 メタの象徴的なモデルで、 オープンソースモデルへの道を開いた。 + +01:35.880 --> 01:42.240 +さて、 llama 3を使っているとき。 1、 metaはまず利用規約にサインする必要がある。 + +01:42.240 --> 01:47.520 +その方法は、 ここにリンクされているハギング・フェイスのモデル・ページにアクセスすることだ。 + +01:47.520 --> 01:52.680 +そのページの一番上には、 サインするために必要なことがとてもシンプルに書かれている。 + +01:52.830 --> 01:59.610 +メールアドレスは、 あなたのハグする顔のアカウントと一致しているのがベストです。 + +01:59.610 --> 02:01.370 +つまり、 素早く物事を成し遂げるということだ。 + +02:01.370 --> 02:04.370 +実際、 数分で承認されるはずだ。 + +02:04.370 --> 02:07.610 +土曜の深夜に一度だけ行ったこともある。 + +02:07.610 --> 02:09.680 +とても早く承認されたよ。 + +02:09.740 --> 02:13.460 +ただ、 彼らが本当にボールを持っているからなのか、 それともすべて自動化されているのかはわからないが、 + +02:13.550 --> 02:15.350 +実に素早い。 + +02:15.770 --> 02:26.420 +万が一、 この署名規約が何か邪悪なものだと思われるかもしれないが、 細かい字を読めば、 それはあなたがレンマ3を使うつもりがないことを確認するためのものなのだ。 + +02:26.420 --> 02:26.420 +1悪意はなく、 + +02:26.420 --> 02:30.770 +善意がある。 + +02:30.770 --> 02:34.400 +だから、 サインすることに何の問題もないはずだ。 + +02:34.400 --> 02:39.590 +そうすれば、 ラマ3のすべてのバリエーションにアクセスできるようになる。 1. + +02:39.590 --> 02:43.070 +一つのサインで、 家族全員に適用されるんだ。 + +02:43.370 --> 02:53.060 +もし、 llama 3や2のような古いllama 3モデルを使いたいのであれば、 そのモデルのファミリーの契約書にサインする必要がある。 + +02:53.450 --> 02:57.650 +もし、 何らかの理由で承認されたくなかったり、 すぐに承認されなかったりした場合は、 + +02:57.650 --> 03:00.200 +後日、 私たちが開始するときにスキップすればいい。 + +03:00.230 --> 03:06.840 +それか、 私が3人分プレーするのを見ることもできる。 1、 そして、 他のトークナイザーを使い始めたら、 また戻ってくることができる。 + +03:06.840 --> 03:12.510 +しかし、 トークナイザーの作成はこの1行だけだ。 + +03:12.690 --> 03:23.070 +Hugging faceにはオート・トークナイザーというクラスがあり、 この特定のモデルに必要なトークナイザーのサブクラスを作成する。 + +03:23.100 --> 03:24.330 +あまり心配する必要はない。 + +03:24.330 --> 03:31.410 +オート・トークナイザーは、 pre-trainedからクラス・メソッドを呼び出します。 つまり、 事前に訓練されたモデルがあるので、 + +03:31.410 --> 03:35.790 +そのためのトークナイザーを作成してほしいということです。 + +03:35.820 --> 03:36.960 +それが名前だ。 + +03:36.960 --> 03:38.760 +これが私たちが使っているモデルだ。 + +03:38.760 --> 03:41.610 +それは、 ハグする顔のハブから直接取ることができるものだ。 + +03:41.610 --> 03:45.690 +メタ・ラマのメタ・ラマ3だ。 180億ドル + +03:45.720 --> 03:55.140 +このトークナイザーを持ち込むと、 モデルの一部であるコードが存在する可能性がある。 + +03:55.140 --> 03:57.750 +そして、 メタの正体を知っていると言っているんだ。 + +03:57.780 --> 04:01.570 +私たちはこれが問題ないことを知っていますから、 信頼してください。 + +04:01.840 --> 04:04.030 +それを含まなくても、 問題なく機能する。 + +04:04.030 --> 04:06.040 +ただ警告を与えるだけだ。 + +04:06.040 --> 04:10.930 +だから、 もし醜い警告を出したくないのであれば、 そう書いておいてくれ。 + +04:11.950 --> 04:12.550 +オーケー。 + +04:12.550 --> 04:15.970 +それで次にすることは、 テキストを使うことだ。 + +04:16.000 --> 04:27.160 +LLMのエンジニアにトーケナイザーの動きを見せるのが楽しみです。 テキストを文字列として受け取り、 トーケナイザーを呼び出してそのテキストをドット・エンコードします。 + +04:27.160 --> 04:30.070 +そして、 その結果のトークンを印刷する。 + +04:30.760 --> 04:31.720 +それがこれだ。 + +04:31.750 --> 04:33.400 +とてもシンプルなことなんだ。 + +04:33.400 --> 04:34.720 +単なる数字の羅列だ。 + +04:34.720 --> 04:35.860 +それ以上のことはない。 + +04:35.860 --> 04:37.390 +トークンには何の不思議もない。 + +04:37.390 --> 04:38.440 +ただの数字だ。 + +04:38.440 --> 04:40.960 +そして、 この数字はそのテキストを表している。 + +04:40.990 --> 04:43.600 +何人いるか見てみよう。 + +04:43.630 --> 04:50.320 +では、 まず、 私たちが渡したテキストに何文字あったかを言ってみよう。 + +04:50.350 --> 04:53.560 +そのテキストには61の文字がある。 + +04:53.560 --> 04:56.260 +これでトークンの数を数えることができる。 + +04:56.260 --> 05:02.510 +大まかに言って、 トークンに何文字が対応するかという経験則を覚えていますか? + +05:02.540 --> 05:06.110 +平均すると4人だ。 + +05:06.110 --> 05:06.440 +大体ね。 + +05:06.440 --> 05:12.890 +経験則では、 通常の英語、 または英語をたくさん使う場合は、 4文字程度を1トークンとする。 + +05:12.890 --> 05:16.880 +だから61通を期待している。 + +05:16.970 --> 05:19.790 +トークンは15枚程度を想定している。 + +05:19.820 --> 05:20.780 +何が出てくるか見てみよう。 + +05:20.780 --> 05:21.980 +15トークン + +05:21.980 --> 05:22.520 +これでよし。 + +05:22.550 --> 05:25.280 +このテキストにちょうど15トークン。 + +05:25.610 --> 05:31.940 +トークンをテキストに戻すために、 このデコードを行うことができる。 + +05:31.940 --> 05:35.150 +だから、 原文を再現することを期待している。 + +05:35.150 --> 05:39.020 +そして私たちが手にするのは、 似ているようで少し違うものだ。 + +05:39.020 --> 05:44.180 +おわかりのように、 返ってくるのは期待通りのテキストである。 + +05:44.180 --> 05:50.990 +しかし、 その前面には新しいものがある。 このおかしなもの、 角度のついた括弧で囲まれたテキストは、 less + +05:50.990 --> 05:55.010 +thanとgreater thanの記号で始まる。 + +05:55.040 --> 05:55.910 +これは何だ? + +05:55.910 --> 06:01.900 +これはスペシャル・トークンと呼ばれるもので、 ハイライトしたものはすべて1つのトークンにマッピングされます。 + +06:01.930 --> 06:14.740 +実際、 このトークン、 128,000トークンは特別なトークンで、 プロンプトのテキストの始まりであることをモデルに示している。 + +06:14.950 --> 06:20.710 +だから、 LMに特別な指示を出すために使うんだ。 + +06:20.740 --> 06:24.550 +さて、 皆さんはこう思うかもしれない。 + +06:24.580 --> 06:30.820 +ということは、 何らかの方法でトランスフォーマーのアーキテクチャを設定し、 そのようなトークンを期待するようにしなければならないということですか? + +06:30.910 --> 06:35.920 +ええと、 そして、 おそらくあなたは今、 とても快適だと思いますが、 答えはノーです。 + +06:35.920 --> 06:37.270 +そういう意味ではない。 + +06:37.300 --> 06:44.080 +ええと、 これはどういう意味かというと、 トレーニング中に見たすべてのトレーニング例の中で、 このように設定されていたということだ。 + +06:44.080 --> 06:48.250 +トレーニングの例は、 この特別なトークンから始まる。 + +06:48.250 --> 06:52.780 +だから、 それを期待したトレーニングで慣れてきたんだ。 + +06:52.780 --> 06:58.330 +そして、 最高品質のアウトプットを確実にするためには、 同じアプローチを再現する必要がある。 + +06:58.390 --> 07:02.210 +ええと、 推論時に新しいプロンプトを入力するとき。 + +07:02.990 --> 07:04.670 +というわけで、 お分かりいただけただろうか。 + +07:04.700 --> 07:08.360 +バッチデコードの方法もある。 + +07:08.360 --> 07:13.940 +トークンを使ってこれを実行すると、 1つの文字列の代わりに、 それぞれの文字列が1つのトークンを表す、 + +07:13.940 --> 07:19.550 +小さな文字列のセットが返ってくる。 + +07:19.550 --> 07:24.080 +だから、 この最初のトークンがここになったんだ。 + +07:24.080 --> 07:27.920 +そして、 それがどのように機能しているかを確認するために、 フォロースルーすることができる。 + +07:28.130 --> 07:30.920 +ええと、 ここから注目すべきことがいくつかある。 + +07:30.920 --> 07:37.730 +そのひとつは、 ほとんどの場合、 単語がトークンにマッピングされることだ。 + +07:37.730 --> 07:43.370 +1つのトークンにマッピングされる文字数は4文字よりはるかに多いのですが、 一般的な単語なので、 + +07:43.370 --> 07:45.380 +ボキャブラリーに入っています。 + +07:45.620 --> 07:58.700 +GPTトークナイザーと同じように、 単語の前にあるスペースもトークンの一部です。 + +07:58.700 --> 08:13.560 +So and so amは言葉の始まりで、 Amという文字はただのamとは違うトークンであり、 もっと複雑なものの中にある可能性のある文字の断片である。 + +08:14.250 --> 08:23.130 +また、 Tokenizersのようなものが、 単語トークンとISAの2つのトークンに分割されたことにもお気づきだろう。 + +08:23.460 --> 08:28.740 +ISAの語尾は面白いね。 + +08:28.740 --> 08:34.350 +それはトークン化の一部なんだ。 + +08:34.380 --> 08:37.890 +もうひとつ注意しなければならないのは、 大文字と小文字が区別されるということだ。 + +08:37.890 --> 08:43.860 +だから、 大文字のTがついたトークンがそこにあるのがわかるだろう。 + +08:45.120 --> 08:53.040 +最後に、 トークナイザー・ドット・ボキャブについて触れておこう。 + +08:53.070 --> 08:58.500 +tokenizer dot vocabを実行すると、 ええと、 これが表示されます。 + +08:58.500 --> 09:03.980 +言葉の断片と数字の完全な対応付けの辞書である。 + +09:04.310 --> 09:06.590 +そして、 ここにはかなり曖昧なものがあるのがわかるだろう。 + +09:06.590 --> 09:12.620 +非常に多くのトークンが用意されており、 中には異なる言語や異なる目的で使用される、 + +09:12.740 --> 09:15.920 +かなり奇妙なトークンも含まれている。 + +09:16.190 --> 09:22.580 +だから、 3文字や4文字の枠を超え、 さまざまなものを目にすることになる。 + +09:22.610 --> 09:26.630 +A ええと、 かなりたくさん印刷されています。 + +09:26.870 --> 09:32.840 +ええと、 この辞書をスクロールしていくと、 他のものが出てきます。 + +09:33.050 --> 09:34.040 +ここに戻ってこい。 + +09:34.250 --> 09:41.990 +印刷もできるし、 コメントもできる。 + +09:42.440 --> 09:48.470 +ええと、 追加されたボキャブラリーと呼ばれるもので、 さっき言った特別なトークンです。 + +09:48.650 --> 09:53.840 +申し訳ないが、 一番上にあるのは、 LMに合図を送るために使われる、 + +09:53.840 --> 10:01.860 +語彙に予約されている特別なトークンだ。 + +10:01.890 --> 10:02.580 +本文の冒頭。 + +10:02.610 --> 10:03.570 +本文終わり。 + +10:04.020 --> 10:06.150 +ちょっと遠慮がちに...。 + +10:06.180 --> 10:11.100 +そしてスタートヘッダ、 ID、 ヘッダ。 + +10:11.100 --> 10:12.690 +そして他にもいくつかある。 + +10:12.690 --> 10:14.190 +そしてパイソンのタグ。 + +10:14.220 --> 10:17.070 +明らかに特別な何かがある。 + +10:17.070 --> 10:25.470 +どんな理由であれ、 これらの特別なトークンを語彙に含め、 トレーニング中に提供することは、 + +10:25.470 --> 10:42.180 +推論を行う際や、 テキストを生成するためにモデルを実行する際に、 これらのトークンを使ってモデルに物事を示すことができるため、 有用であると認識されています。 + +10:42.960 --> 10:43.530 +分かった。 + +10:43.560 --> 10:47.580 +まあ、 これはラマ3モデルでちょっと遊んだだけだ。 + +10:47.640 --> 10:49.290 +ええと、 ラマ3。 1トークナイザー。 + +10:49.320 --> 10:56.670 +また戻ってきたら、 特にチャットに適用される方法を見てみよう。 + +10:56.670 --> 10:59.640 +それから、 他のトークナイザーも使ってみよう。 + +10:59.640 --> 11:00.390 +それではまた。 diff --git a/week5/community-contributions/subtitles/srts/59170227/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170227/ko_KR.srt new file mode 100755 index 0000000..a31276e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170227/ko_KR.srt @@ -0,0 +1,502 @@ +WEBVTT + +00:00.200 --> 00:02.360 +구글 콜랍에 잘 오셨어요 + +00:02.360 --> 00:06.290 +이제 토큰이들의 세계를 탐험해 볼까요? + +00:06.290 --> 00:11.360 +먼저 할 일은 수입품 처리를 하는 거예요 + +00:11.600 --> 00:15.290 +그걸 한 후 여기 이 문장을 언급하고 싶어요 + +00:15.290 --> 00:20.750 +지난 강의에서 언급하는 걸 잊었는데 Colab에 추가했어요 어쨌든 + +00:20.780 --> 00:23.300 +찾아서 제 설명을 읽어보세요 + +00:23.450 --> 00:28.220 +콜랍의 포옹 사이트에 로그인해야 할 거예요 + +00:28.220 --> 00:31.370 +그걸 위해 사용하는 코드가 이거죠 + +00:31.370 --> 00:36.890 +먼저, 포옹하는 얼굴 계정을 아직 만들지 않으셨다면 포옹하는 얼굴 계정이 필요해요 + +00:36.890 --> 00:37.700 +공짜예요 + +00:37.730 --> 00:40.910 +정말 멋지고 절대 후회하지 않을 거예요 + +00:40.910 --> 00:46.970 +안기페이스에 등록한 다음 설정으로 이동해 새 API 토큰을 생성하세요 + +00:46.970 --> 00:48.470 +스스로 허락하는 거죠 + +00:48.470 --> 00:52.130 +오늘은 올바른 권한이 필요 없지만 나중엔 필요할 테니 지금 설정하는 + +00:52.130 --> 00:53.060 +게 좋아요 + +00:53.090 --> 00:59.570 +다시 돌아와서 Colab의 이 키 섹션으로 가서 새 비밀을 추가하세요 + +00:59.570 --> 01:05.220 +비밀은 HF_token이라고 하고 값은 여러분의 토큰이어야 해요 + +01:05.220 --> 01:12.270 +이제 코드를 실행해서 기밀에서 HF 토큰을 가져오면 로그인 메서드를 호출할 + +01:12.300 --> 01:15.000 +거예요 여기 불러왔죠 + +01:15.000 --> 01:18.180 +로그인 방법은 얼굴을 안는 거예요 + +01:18.180 --> 01:19.470 +바로 실행하죠 + +01:19.470 --> 01:20.760 +다 됐어요 + +01:20.790 --> 01:23.400 +여기 보면 권한이 있다고 나와 있죠 + +01:24.060 --> 01:31.950 +토큰라이저에 대해 얘기해 보죠 환상적인 라마 3부터 시작할게요 1번, 메타의 상징적인 모델 오픈 + +01:31.950 --> 01:35.400 +소스 모델의 길을 닦았죠 + +01:35.880 --> 01:42.240 +llama 3을 사용하면요 1. 메타 서비스 약관에 먼저 서명하세요 + +01:42.240 --> 01:47.520 +모델 페이지에 방문해서 얼굴 안기기를 하면 돼요 여기 링크가 있죠 + +01:47.520 --> 01:52.680 +그 페이지 상단에 서명하기 위해 해야 할 간단한 지침이 있어요 + +01:52.830 --> 01:57.270 +이메일 주소를 제공해야 하는데 포옹하는 얼굴 계정과 일치하는 + +01:57.270 --> 01:59.610 +이메일 주소가 좋아요 + +01:59.610 --> 02:01.370 +I'm get it's get it. 일이 빨리 끝난다는 뜻이죠 + +02:01.370 --> 02:04.370 +몇 분 내로 승인될 거예요 + +02:04.370 --> 02:07.610 +토요일 밤늦게까지 여러 번 해 봤어요 + +02:07.610 --> 02:09.680 +아주 빨리 승인을 받았어요 + +02:09.740 --> 02:13.460 +정말 꼼꼼해서 그런 건지 자동화되어 있어서 그런 건진 모르겠지만 + +02:13.550 --> 02:15.350 +정말 빠르네요 + +02:15.770 --> 02:20.810 +이 서비스 서명 약관에 뭔가 해로운 게 있다고 생각하실까 봐 말씀드리는데 작은 글씨를 읽어 보시면 + +02:20.810 --> 02:26.420 +lemma 3을 사용하지 않도록 확실히 하는 거예요 1번, 비도덕적인 행동과 선한 의도가 + +02:26.420 --> 02:30.770 +있을 경우입니다 이 수업에서는 그런 경우가 많죠 + +02:30.770 --> 02:34.400 +그러니 서명하는 건 문제가 안 될 거예요 + +02:34.400 --> 02:39.590 +그렇게 하면 라마다 3의 모든 변수를 볼 수 있죠 1번요 + +02:39.590 --> 02:43.070 +하나의 간판이 가족 전체에 적용돼요 + +02:43.370 --> 02:49.070 +라마 3이나 2 같은 구형 라마 모델을 사용하려면 모델 + +02:49.070 --> 02:53.060 +가족에 가서 계약서에 서명해야 해요 + +02:53.450 --> 02:57.650 +혹시 하기 싫거나 당장 승인해 주지 않는 것 같으면 + +02:57.650 --> 03:00.200 +나중에 시작해도 돼요 + +03:00.230 --> 03:05.490 +아니면 제가 3번 실행하는 걸 보셔도 돼요 1, 다른 토큰라이저와 작업하기 시작하면 + +03:05.490 --> 03:06.840 +그걸 선택하세요 + +03:06.840 --> 03:12.510 +하지만 이걸로 토큰라이저를 만드는 건 여기 이 한 줄이죠 + +03:12.690 --> 03:21.810 +안는 얼굴에는 오토 토큰마이저 클래스가 있습니다 이 모델을 위해 필요한 토큰마이저의 서브클래스를 + +03:21.810 --> 03:23.070 +생성하죠 + +03:23.100 --> 03:24.330 +그건 너무 걱정하지 마세요 + +03:24.330 --> 03:31.410 +오토 토큰마이저를 이용하면 됩니다 미리 훈련된 것을 이용해 수업 메서드를 호출합니다 즉, 미리 훈련된 모델이 + +03:31.410 --> 03:35.790 +있다면 토큰마이저를 이용하여 이를 위해 개발하면 된다는 거죠 + +03:35.820 --> 03:36.960 +그게 이름이에요 + +03:36.960 --> 03:38.760 +이게 우리가 사용하는 모델이에요 + +03:38.760 --> 03:41.610 +안아주기 얼굴 허브에서 바로 가져올 수 있는 거죠 + +03:41.610 --> 03:45.690 +메타 라마 3이에요 180억 달러요 + +03:45.720 --> 03:51.930 +이 트러스트 원격 코드는 true입니다 토큰라이저를 가져오면 모델의 + +03:51.930 --> 03:55.140 +일부인 코드가 있을 수 있어요 + +03:55.140 --> 03:57.750 +메타가 누군지 안다고 했잖아요 + +03:57.780 --> 04:01.570 +이건 괜찮으니까 믿어도 돼요 + +04:01.840 --> 04:04.030 +그것만 빼면 괜찮을 거예요 + +04:04.030 --> 04:06.040 +그저 추악한 경고만 남겨요 + +04:06.040 --> 04:10.930 +어글리 경고가 싫으면 그냥 Put을 해요 + +04:11.950 --> 04:12.550 +네 + +04:12.550 --> 04:15.970 +이것과 함께 다음으로 할 일은 텍스트를 이용하는 거예요 + +04:16.000 --> 04:24.040 +LLM 엔지니어들에게 토큰라이저의 작동을 보여드릴 수 있어 기쁩니다 텍스트를 문자열로 만들어 Tokenizer.incode로 + +04:24.040 --> 04:27.160 +호출하죠 + +04:27.160 --> 04:30.070 +그 결과의 토큰을 인쇄하죠 + +04:30.760 --> 04:31.720 +여기 있네요 + +04:31.750 --> 04:33.400 +아주 간단한 거예요 + +04:33.400 --> 04:34.720 +그냥 번호표예요 + +04:34.720 --> 04:35.860 +그 이상은 아니에요 + +04:35.860 --> 04:37.390 +증표는 마법과 아무 상관 없어요 + +04:37.390 --> 04:38.440 +그냥 숫자일 뿐이에요 + +04:38.440 --> 04:40.960 +이 숫자들이 그 텍스트를 나타내죠 + +04:40.990 --> 04:43.600 +몇 개나 있는지 보죠 + +04:43.630 --> 04:50.320 +우리가 준 문자에 편지가 몇 통이나 있었는지부터 말해보죠 + +04:50.350 --> 04:53.560 +글자가 61개나 돼요 + +04:53.560 --> 04:56.260 +이제 패의 수를 세면 돼요 + +04:56.260 --> 05:02.510 +대략적으로 토큰 하나에 몇 글자가 매개되는지 기억하시나요? + +05:02.540 --> 05:06.110 +평균 4개예요 + +05:06.110 --> 05:06.440 +대강요 + +05:06.440 --> 05:12.890 +경험상 네 글자는 토큰이 되어야 해요 영어를 많이 안다면 말이죠 + +05:12.890 --> 05:16.880 +61통의 글자가 예상되네요 + +05:16.970 --> 05:19.790 +15토큰 정도 예상해요 + +05:19.820 --> 05:20.780 +get get을 해 보죠 + +05:20.780 --> 05:21.980 +15토큰요 + +05:21.980 --> 05:22.520 +됐어요 + +05:22.550 --> 05:25.280 +토큰이 정확히 15개예요 + +05:25.610 --> 05:31.940 +이 디코딩을 통해 토큰을 다시 텍스트로 바꿀 수 있어요 + +05:31.940 --> 05:35.150 +그래서 원문을 재창조할 거예요 + +05:35.150 --> 05:39.020 +Get it은 비슷하면서도 약간 달라요 + +05:39.020 --> 05:44.180 +보다시피 get get은 우리가 기대하는 텍스트예요 + +05:44.180 --> 05:50.990 +그런데 그 앞에 새로운 게 있어요 여기 재미있는 거요 비스듬한 대괄호로 기호 시작보다보다보다보다보다보다보다보다보다보다보다보다가 + +05:50.990 --> 05:55.010 +텍스트 집합이죠 + +05:55.040 --> 05:55.910 +이게 뭐죠? + +05:55.910 --> 06:01.090 +이건 특별한 토큰이란 건데요 제가 강조 표시한 모든 게 하나의 토큰에 매핑된 + +06:01.120 --> 06:01.900 +거죠 + +06:01.930 --> 06:09.340 +사실 여기 이 128,000 토큰은 특별한 토큰으로 우리 모델에 + +06:09.370 --> 06:14.740 +프롬프트의 텍스트의 시작을 알려주고 있어요 + +06:14.950 --> 06:20.710 +그 목적으로 LM의 특별한 지표가 된 거죠 + +06:20.740 --> 06:24.550 +이렇게 생각하실지도 몰라요 + +06:24.580 --> 06:28.960 +그렇다면 변압기의 구조가 그런 종류의 토큰을 기대하도록 설정되어야 + +06:28.990 --> 06:30.820 +한다는 뜻인가요? + +06:30.910 --> 06:35.920 +지금은 아주 편하실지 모르겠지만 제 대답은 안 된다는 거예요 + +06:35.920 --> 06:37.270 +그런 뜻이 아니에요 + +06:37.300 --> 06:43.000 +이 말은 훈련 중에 본 모든 훈련 사례가 이런 식으로 설정됐다는 + +06:43.000 --> 06:44.080 +뜻이에요 + +06:44.080 --> 06:48.250 +훈련 예시는 이 특별한 토큰에서 시작됐죠 + +06:48.250 --> 06:52.780 +훈련을 통해 그런 걸 기대하며 익숙해졌죠 + +06:52.780 --> 06:58.330 +최상의 결과를 내기 위해서는 같은 방법을 써야 하죠 + +06:58.390 --> 07:02.210 +새로운 먹이를 줄 때 추론할 때 나타나요 + +07:02.990 --> 07:04.670 +이해가 되셨길 바라요 + +07:04.700 --> 07:08.360 +다른 방법으로 해독할 수도 있어요 + +07:08.360 --> 07:13.940 +그걸 토큰과 함께 실행하면 하나의 문자열 대신 이런 문자열 + +07:13.940 --> 07:19.550 +집합을 얻게 됩니다 각각의 문자열이 토큰을 나타내는 거죠 + +07:19.550 --> 07:24.080 +말씀드렸듯이 이 첫 번째 토큰이 이렇게 변했어요 + +07:24.080 --> 07:27.920 +그러면 그게 어떻게 작동하는지 볼 수 있죠 + +07:28.130 --> 07:30.920 +몇 가지 주의할 점이 있어요 + +07:30.920 --> 07:36.080 +바로 보실 수 있듯이, 그중 하나는 대부분의 경우 워드를 토큰에 매핑한 것입니다. 왜냐하면 여기에는 아주 + +07:36.080 --> 07:37.730 +간단한 단어들이 있으니까요. + +07:37.730 --> 07:43.370 +정말 신나요, 토큰 하나에 4자 이상으로 매핑되긴 하지만요 흔한 단어니까요 + +07:43.370 --> 07:45.380 +단어 선택에 포함돼 있죠 + +07:45.620 --> 07:53.180 +또 하나 주목할 점은 GPT 토큰라이저와 마찬가지로 단어 시작을 의미하는 + +07:53.180 --> 07:58.700 +겁니다 단어 앞의 이 공백은 토큰의 일부죠 + +07:58.700 --> 08:09.150 +그래서 AM은 단어의 시작으로 쓰이고 알파벳 AM은 문자 조각인 AM을 나타내는 다른 토큰이에요 + +08:09.150 --> 08:13.560 +더 복잡한 무언가에 속해 있을 수 있죠 + +08:14.250 --> 08:20.640 +토큰라이저 같은 것이 토큰 두 개로 나뉘는 것도 보이실 겁니다 하나는 워드 토큰이고 + +08:20.640 --> 08:23.130 +다른 하나는 ISA죠 + +08:23.460 --> 08:28.740 +ISA로 끝나는 단어가 참 흥미롭네요 + +08:28.740 --> 08:33.120 +여러 가지 끝에 걸렸을 수도 있어요 그것도 토큰화의 + +08:33.150 --> 08:34.350 +일부죠 + +08:34.380 --> 08:37.890 +또 하나 주목할 점은 대소문자를 구별한다는 거죠 + +08:37.890 --> 08:43.860 +보시다시피 대문자 T로 시작하는 토큰이 저기로 옮겨졌어요 + +08:45.120 --> 08:53.040 +마지막으로 말씀드리고 싶은 건 토큰라이저라는 단어예요 + +08:53.070 --> 08:58.500 +토큰라이저 닷 단어집을 실행하면 get이 나와요 + +08:58.500 --> 09:03.980 +단어와 숫자 조각 사이를 오가는 완전한 지도 사전이죠 + +09:04.310 --> 09:06.590 +잘 안 알려진 것들이 있어요 + +09:06.590 --> 09:12.620 +사용할 수 있는 토큰이 정말 많아요 여기엔 꽤 이상한 토큰들도 있어요 다른 언어의 + +09:12.740 --> 09:15.920 +토큰이거나 다른 목적으로 사용되고 있죠 + +09:16.190 --> 09:22.580 +서너 글자 정도가 아니라 다양한 걸 볼 수 있어요 + +09:22.610 --> 09:26.630 +꽤 많이 인쇄했어요 + +09:26.870 --> 09:32.840 +여기서 보여드릴 게 또 있어요 우리 사전을 쭉 넘기면서 보여드리죠 + +09:33.050 --> 09:34.040 +Get it, Get it, Get it, Get it, get, it, it, it! 이리 와요 + +09:34.250 --> 09:41.990 +주석 인쇄도 할 수 있다는 거예요 + +09:42.440 --> 09:48.470 +어, 추가된 단어라고 하는 건데, 제가 아까 말했던 특별한 토큰이에요 + +09:48.650 --> 09:53.840 +예약된 특별한 토큰이 여러 개 있는데요 죄송합니다, 맨 + +09:53.840 --> 10:01.860 +위에 보이는 건 보캡에서 예약된 특별한 토큰으로 LM에 신호를 보내기 위해 사용되죠 + +10:01.890 --> 10:02.580 +글의 시작이죠 + +10:02.610 --> 10:03.570 +그게 다예요 + +10:04.020 --> 10:06.150 +예약된 것도 있고요 + +10:06.180 --> 10:11.100 +그리고 시작 헤더, ID와 헤더를 두죠 + +10:11.100 --> 10:12.690 +다른 것도 있어요 + +10:12.690 --> 10:14.190 +파이썬 태그도요 + +10:14.220 --> 10:17.070 +뭔가 특별한 게 있어요 + +10:17.070 --> 10:25.470 +어떤 이유에서든지 이 토큰들은 특별한 토큰들입니다. 이 특별한 토큰들을 단어 선택에 포함시키면 + +10:25.470 --> 10:33.300 +유용하게 쓰일 것입니다. 그리고 훈련 중에 제공해서 추론을 할 때나, 모델을 실행할 + +10:33.330 --> 10:38.850 +때 텍스트를 생성할 때, 이 토큰들을 이용해서 모델에 무언가를 + +10:38.850 --> 10:42.180 +표시할 수 있어요. + +10:42.960 --> 10:43.530 +좋아요 + +10:43.560 --> 10:47.580 +라마 3 모델은 좀 비트가 있었죠 + +10:47.640 --> 10:49.290 +라마 3요 토큰라이저 1개요 + +10:49.320 --> 10:56.670 +잠시 후에는 이 기능이 채팅방에 어떻게 적용되는지 살펴볼 거예요 + +10:56.670 --> 10:59.640 +그런 다음 다른 토큰라이저로 놀 거예요 + +10:59.640 --> 11:00.390 +그럼 그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59170233/en_US.srt b/week5/community-contributions/subtitles/srts/59170233/en_US.srt new file mode 100755 index 0000000..e6912ef --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170233/en_US.srt @@ -0,0 +1,475 @@ +WEBVTT + +00:00.560 --> 00:04.160 +Welcome back to our continued exploits with Tokenizers. + +00:04.160 --> 00:09.830 +What we're now going to look at is what's called the instruct variance of models. + +00:09.830 --> 00:18.650 +So there are many models that have been fine tuned to be specifically designed for chats, for carrying + +00:18.650 --> 00:28.430 +out chat conversations with users, as one does with the with, uh, GPT four with with chat GPT um, + +00:28.520 --> 00:33.830 +typically when you see those models in hugging face, you'll see that they have the same name as their + +00:33.830 --> 00:40.580 +base models, but with instruct added to the end of it, meaning that they have been fine tuned to be + +00:40.580 --> 00:43.310 +used in this instruct use case. + +00:43.610 --> 00:50.870 +Uh, they have been trained to expect prompts in a particular structure with a particular set of special + +00:50.900 --> 00:59.660 +tokens that identifies the system message, the user message and assistance responses so that it forms + +00:59.690 --> 01:00.920 +a kind of a chat. + +01:00.920 --> 01:06.270 +And that is just simply part of the way that it's been trained with enough examples. + +01:06.270 --> 01:13.260 +So it expects it in this format, and this is hopefully going to bring some things together for you, + +01:13.260 --> 01:19.830 +because it's now finally going to close the loop on something where I planted a seed some time ago about + +01:19.830 --> 01:26.250 +the reason for this structure of messages, lists of dicts that we became very familiar with when we + +01:26.280 --> 01:28.290 +were playing with frontier models. + +01:28.290 --> 01:37.470 +So I'm going to create my tokenizer this time using the meta lemma 3.18 billion instruct variant. + +01:37.470 --> 01:39.720 +So this will look familiar to you. + +01:39.720 --> 01:48.420 +This is one of those lists of dicts that we use so much with, uh, OpenAI and Claude and so on, uh, + +01:48.420 --> 01:55.530 +where you specify a role and content, a role a system is for the system message and user is for the + +01:55.530 --> 01:56.790 +user message. + +01:56.880 --> 02:06.570 +Then the tokenizers that Huggingface provide have a special function apply chat template and it will + +02:06.570 --> 02:16.170 +take messages in this format in the OpenAI API format, and it will convert it into the right structure + +02:16.170 --> 02:24.960 +to be used for a this particular model, the type of the prompt that this model is expecting, given + +02:24.960 --> 02:31.470 +the way it's been trained, if you have tokenized equals true here, um, then what we'll get back is + +02:31.470 --> 02:34.290 +just a series of numbers and we won't know what's going on. + +02:34.290 --> 02:35.910 +So I've got tokenized equals false. + +02:35.910 --> 02:39.750 +So what we'll get back will be the, the text version of it. + +02:39.750 --> 02:46.770 +And I'm going to print it so you can see what is the uh the what is it that this is converted into that + +02:46.770 --> 02:53.820 +gets pumped into the model at inference time for this particular conversation. + +02:53.820 --> 03:00.360 +And here it is, starts with a special token begin of text and then a header. + +03:00.360 --> 03:04.380 +And then system the word system and then end header. + +03:04.560 --> 03:10.240 +And then there's some information that's shoved in there about the cutting knowledge date and today's + +03:10.240 --> 03:10.780 +date. + +03:10.780 --> 03:12.160 +That's that's special. + +03:12.160 --> 03:14.260 +And I think that's a llama 3.1 thing. + +03:14.260 --> 03:17.830 +I don't remember that from previous llama families, but I could be wrong there. + +03:18.280 --> 03:25.840 +Uh, and then, um, this here is of course, the system message that we provided here. + +03:26.860 --> 03:31.870 +Uh, then there is another start header for user and header. + +03:31.870 --> 03:35.170 +And then this is the user message. + +03:35.620 --> 03:41.800 +Then there's another start header and then the word assistant and then end header because we want the + +03:41.800 --> 03:44.590 +model to generate the assistance response. + +03:44.590 --> 03:50.800 +So this is kind of teeing up the model that what should come next right after this should be whatever + +03:50.800 --> 03:58.720 +the assistance said in response to this uh, prompt following this system instruction. + +03:59.590 --> 04:06.700 +So I'm hoping this is an aha moment for you that you see now how you can you can have a structure like + +04:06.700 --> 04:07.000 +this. + +04:07.000 --> 04:10.120 +And that's how you might think about the conversation with the model. + +04:10.120 --> 04:15.570 +But at the end of the day, what gets pumped into the model is a prompt that looks like this with special + +04:15.600 --> 04:16.980 +tokens in the mix. + +04:16.980 --> 04:22.470 +And because it's been trained with that structure, with those kinds of special tokens, it knows what + +04:22.470 --> 04:23.490 +needs to come next. + +04:23.520 --> 04:25.410 +The assistance reply. + +04:27.210 --> 04:30.990 +So that explains the chat interfaces. + +04:30.990 --> 04:34.140 +Let's work with a few more models to get some more experience with this. + +04:34.140 --> 04:36.360 +I'm going to pick three models in particular. + +04:36.480 --> 04:40.290 +Phi three is a model from Microsoft. + +04:40.680 --> 04:45.150 +Quinn two is this powerhouse model I keep mentioning from Alibaba Cloud. + +04:45.150 --> 04:49.800 +Star coder two is a model designed for generating code. + +04:49.890 --> 04:57.210 +It's built by three companies working together, collaborating ServiceNow and hugging face themselves + +04:57.240 --> 05:05.340 +hugging face and Nvidia uh, that those uh, three mighty companies have partnered to make this, uh, + +05:05.340 --> 05:11.450 +group star coder and have built this, uh, this particular model. + +05:11.450 --> 05:12.560 +Okay. + +05:12.560 --> 05:18.060 +So, uh, let's give a try for Phi three. + +05:18.060 --> 05:24.300 +So we use exactly the same approach auto tokenizer from pre-trained and we provide the model. + +05:24.300 --> 05:27.750 +And now um I'm giving the same text. + +05:27.750 --> 05:31.470 +I'm excited to show Tokenizers in action to my LLM engineers. + +05:31.470 --> 05:39.480 +I'm going to reprint the previous the llama 3.1 Tokenizers results to remind you what it's tokens look + +05:39.480 --> 05:40.020 +like. + +05:40.050 --> 05:44.070 +Then an empty line, and then I'm going to print Phi three. + +05:44.070 --> 05:49.500 +And the question is going to be at the end of the day, do they basically produce the same tokens or + +05:49.500 --> 05:50.490 +is it different. + +05:50.520 --> 05:52.200 +Let's have a look. + +05:53.700 --> 05:57.150 +Well you'll see right away they are completely different. + +05:57.270 --> 05:58.200 +Uh they're different. + +05:58.230 --> 06:05.250 +Not only is the generated text different, but this first one, which is the start of of message special + +06:05.280 --> 06:07.620 +token is completely different. + +06:07.830 --> 06:11.070 +Uh, let's do batch decode so we can see that. + +06:16.980 --> 06:17.760 +Tokenizer. + +06:17.790 --> 06:21.930 +Dot Batch decode. + +06:24.450 --> 06:27.030 +I'll have to say tokens. + +06:27.030 --> 06:28.110 +Equals. + +06:31.770 --> 06:32.970 +Tokens. + +06:33.780 --> 06:35.280 +Let's see what we get here. + +06:36.360 --> 06:40.800 +Uh, and we do get something completely different. + +06:40.860 --> 06:44.520 +And actually, interestingly, I was wrong with what I said a second ago. + +06:44.550 --> 06:52.350 +There isn't a start of sentence special token in the case of 53, so it just goes straight into it. + +06:53.250 --> 06:56.850 +So that's that's a very different approach. + +06:58.830 --> 06:59.670 +All right. + +06:59.700 --> 07:07.350 +Let's use the apply chat template to see how 53 uses chat templates. + +07:07.380 --> 07:09.900 +Let's start by doing it for llama again. + +07:09.900 --> 07:11.250 +So we'll see llamas one. + +07:11.250 --> 07:17.070 +And then we'll print side by side the same the chat template for that same conversation, that same + +07:17.070 --> 07:18.990 +prompt for 53. + +07:19.020 --> 07:20.160 +Let's see how they look. + +07:20.160 --> 07:26.260 +So this is the one we just looked at for for Lama, here is the equivalent for Phi three. + +07:26.290 --> 07:28.450 +It's obviously much shorter. + +07:28.450 --> 07:31.270 +It doesn't pass in the the date. + +07:31.510 --> 07:38.230 +And interestingly, whereas the structure for Lama was about a header and then the word system and end + +07:38.260 --> 07:42.730 +header and a header the word user and an end header. + +07:42.730 --> 07:51.310 +In the case of Phi three there's just a special tag for system and a special tag for user and a special + +07:51.310 --> 07:52.720 +tag for assistant. + +07:52.720 --> 07:55.870 +So it's this whole sort of different approach. + +07:56.110 --> 08:02.020 +Um, and that's really interesting to see that these two tokenizers, these two models just have a different + +08:02.020 --> 08:04.240 +approach for how prompts get sent in. + +08:04.240 --> 08:07.870 +So obviously, hopefully you're getting the impression if you use the wrong tokenizer for the wrong + +08:07.870 --> 08:12.940 +model, you'd get garbage, because obviously this with different tokens and different structure is + +08:12.940 --> 08:15.430 +going to be meaningless to llama three. + +08:16.120 --> 08:18.850 +And now let's do the same for Quinn two. + +08:18.880 --> 08:23.020 +We're going to see the original Lama version. + +08:23.020 --> 08:26.870 +And then we're going to show the Phi three version and then the two version. + +08:27.050 --> 08:28.460 +Here they come. + +08:29.120 --> 08:35.690 +Uh, obviously you can see totally different results for the three tokenizers. + +08:35.750 --> 08:38.720 +Uh, and one more time highlights. + +08:38.720 --> 08:41.810 +You got to pick the right tokenizer for the right model. + +08:43.370 --> 08:49.430 +Uh, and, uh, let's just apply the chat template and we'll see again the chat templates for that same + +08:49.430 --> 08:51.170 +message about telling a joke. + +08:51.170 --> 08:52.400 +We'll see that for llama. + +08:52.400 --> 08:56.330 +And then for five three and then for Quinn two all side by side. + +08:56.330 --> 08:57.350 +Let's see what they look like. + +08:57.380 --> 08:59.000 +We already saw the one from llama. + +08:59.000 --> 09:01.010 +We already saw the one from 53. + +09:01.010 --> 09:03.560 +And here is the one for Quinn two. + +09:03.560 --> 09:06.650 +And what you'll see is that it's it's sort of somewhere in between. + +09:06.680 --> 09:08.840 +It does a bit like llama. + +09:08.840 --> 09:14.030 +It's got the, the Im start im end and system in here. + +09:14.210 --> 09:16.850 +Um and then user and then assistant. + +09:16.850 --> 09:19.250 +So it's some somewhere in between the two. + +09:19.250 --> 09:23.870 +Uh, it doesn't uh, it doesn't have something in between the word. + +09:23.870 --> 09:26.000 +It doesn't have a header special tag. + +09:26.000 --> 09:28.440 +It just has, uh, this approach here. + +09:28.440 --> 09:36.810 +So it's an interesting again a third approach, another variation and with different special tokens. + +09:37.740 --> 09:38.370 +All right. + +09:38.370 --> 09:41.580 +And finally let me show you Star Coder two. + +09:41.610 --> 09:44.520 +This is the code generation module. + +09:44.520 --> 09:46.440 +We're going to take its tokenizer. + +09:46.440 --> 09:49.470 +And we're going to put this code in there. + +09:49.500 --> 09:54.570 +Hello world a def hello world uh taking a person variable. + +09:54.570 --> 09:55.980 +And it's going to print hello. + +09:55.980 --> 09:57.090 +And then the person. + +09:57.090 --> 10:02.220 +And then we just use the same encode to turn it into tokens. + +10:02.220 --> 10:09.000 +And what I'm then going to do is just print out each token followed by what did that get to uh, get + +10:09.030 --> 10:11.730 +mapped to what what text did that represent? + +10:11.730 --> 10:18.840 +And what you'll see here is that there was something at the beginning, and then there's def went into + +10:18.840 --> 10:25.110 +one token and then hello underscore world and then person. + +10:25.110 --> 10:33.210 +This here obviously will will reflect the tab and then print hello comma person close brackets. + +10:33.210 --> 10:42.660 +So it gives you some sense that, um, the star coder two tokenizer is a tokenizer that is designed + +10:42.660 --> 10:46.140 +around tokenizing code rather than English. + +10:46.500 --> 10:48.120 +And there's some experiments you can do. + +10:48.150 --> 10:54.060 +First of all, try out different tokenizers try exploring mapping from text to tokens. + +10:54.180 --> 10:55.590 +Find out which words. + +10:55.590 --> 11:02.040 +Try and find the rarest possible word that has a single token in in llamas. + +11:02.040 --> 11:06.360 +Uh, tokenizer or perhaps the longest word or something like that. + +11:06.360 --> 11:09.720 +Do some experiments, um, and then satisfy you. + +11:10.170 --> 11:15.210 +Satisfy yourself that if you take a pretty complicated piece of code, you should find that star coder + +11:15.240 --> 11:21.270 +tos tokenizer tokenizes it in a more efficient way than one of the tokenizers that's designed for just + +11:21.270 --> 11:22.260 +English. + +11:22.650 --> 11:30.570 +And at that point, you will be an expert in the world of open source tokenizers and you'll be ready + +11:30.570 --> 11:33.180 +to take on the next piece, which is models. + +11:33.180 --> 11:35.160 +First, let's go back to the slides. diff --git a/week5/community-contributions/subtitles/srts/59170233/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170233/ja_JP.srt new file mode 100755 index 0000000..8bcda7a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170233/ja_JP.srt @@ -0,0 +1,406 @@ +WEBVTT + +00:00.560 --> 00:04.160 +トーケナイザーの活躍をご覧いただき、 ありがとうございます。 + +00:04.160 --> 00:09.830 +これから見ていくのは、 モデルのインストラクター分散と呼ばれるものだ。 + +00:09.830 --> 00:28.430 +そのため、 ユーザーとチャットで会話をするために特別に設計された、 + +00:28.520 --> 00:43.310 +チャット用に微調整されたモデルがたくさんあります。 + +00:43.610 --> 00:50.870 +つまり、 システムメッセージ、 ユーザーメッセージ、 アシスタンスレスポンスを識別する特別なトークンのセットを持つ特定の構造のプロンプトを期待するように訓練されているので、 + +00:50.900 --> 01:00.920 +一種のチャットを形成している。 + +01:00.920 --> 01:06.270 +そして、 それは単に十分な例で訓練された方法の一部に過ぎない。 + +01:06.270 --> 01:13.260 +というのも、 フロンティア・モデルで遊んでいたときに慣れ親しんだ、 + +01:13.260 --> 01:19.830 +メッセージやディクテット(辞書)のリストという構造の理由について、 + +01:19.830 --> 01:28.290 +私が少し前に種をまいたことがあったからだ。 + +01:28.290 --> 01:37.470 +というわけで、 今回はメタ・レンマ3を使ってトークナイザーを作ってみようと思う。 180億のインストラクター・バリアント。 + +01:37.470 --> 01:39.720 +だから、 これは見覚えがあるだろう。 + +01:39.720 --> 01:48.420 +これは、 OpenAIやClaudeなどでよく使うディクテーションのリストのひとつで、 + +01:48.420 --> 01:56.790 +ロールとコンテンツを指定する。 + +01:56.880 --> 02:06.570 +Huggingfaceが提供するトークナイザーは 特別な関数を持っています チャットテンプレートを適用し OpenAI APIフォーマットで + +02:06.570 --> 02:16.170 +このフォーマットのメッセージを受け取ります そしてこの特定のモデルで使用される 適切な構造に変換します このモデルが期待しているプロンプトのタイプは + +02:16.170 --> 02:34.290 +それが訓練された方法であることを考慮します ここでtokenized equals trueを指定した場合 返ってくるのは単なる数字の羅列で 何が起こっているのかわかりません + +02:34.290 --> 02:35.910 +だから、 トークン化イコールfalseにしたんだ。 + +02:35.910 --> 02:39.750 +だから、 私たちに戻ってくるのは、 そのテキスト版になる。 + +02:39.750 --> 02:46.770 +そして、 この会話を推論するときに、 これが何に変換されてモデルに送り込まれるのかがわかるように、 + +02:46.770 --> 02:53.820 +これを印刷します。 + +02:53.820 --> 03:00.360 +特別なトークンのテキストで始まり、 ヘッダーがある。 + +03:00.360 --> 03:04.380 +そして、 システムという言葉をシステムにして、 ヘッダーを終了する。 + +03:04.560 --> 03:10.780 +そして、 そこにはカット知識の日付と今日の日付についての情報が押し込まれている。 + +03:10.780 --> 03:12.160 +それは特別なことだ。 + +03:12.160 --> 03:14.260 +それにラマ3だと思う。 1のことだ。 + +03:14.260 --> 03:17.830 +以前のリャマの家族にはそのような記憶はないが、 間違っているかもしれない。 + +03:18.280 --> 03:25.840 +それから、 これはもちろん、 私たちがここで提供したシステムメッセージです。 + +03:26.860 --> 03:31.870 +それから、 ユーザーとヘッダーの開始ヘッダーがもう一つある。 + +03:31.870 --> 03:35.170 +そして、 これがユーザーメッセージだ。 + +03:35.620 --> 03:41.800 +それから、 別の開始ヘッダーがあり、 アシスタントという言葉があり、 + +03:41.800 --> 03:44.590 +そして終了ヘッダーがある。 + +03:44.590 --> 03:50.800 +つまりこれは、 この後に続くのは、 このシステム指示に続くプロンプトに応答してアシスタンスが言ったことであるべきだ、 + +03:50.800 --> 03:58.720 +というモデルのお膳立てのようなものだ。 + +03:59.590 --> 04:07.000 +だから、 これがあなたにとってハッとするような瞬間であってほしい。 どうすればこのような構造を持つことができるのか、 おわかりいただけただろうか? + +04:07.000 --> 04:10.120 +そして、 モデルとの会話について、 あなたはこう考えるかもしれない。 + +04:10.120 --> 04:16.980 +しかし、 結局のところ、 モデルに投入されるのは、 特別なトークンが混じったこのようなプロンプトだ。 + +04:16.980 --> 04:23.490 +そして、 そのような構造、 特別なトークンで訓練されているため、 次に何が必要か分かっている。 + +04:23.520 --> 04:25.410 +とアシスタンスは答える。 + +04:27.210 --> 04:30.990 +というわけで、 チャットのインターフェースについて説明しよう。 + +04:30.990 --> 04:34.140 +もう少しモデルを使って経験を積もう。 + +04:34.140 --> 04:36.360 +私は特に3つのモデルを選ぶつもりだ。 + +04:36.480 --> 04:40.290 +ファイ3はマイクロソフトのモデル。 + +04:40.680 --> 04:45.150 +クイン2は、 アリババ・クラウドが提供するこの強力なモデルだ。 + +04:45.150 --> 04:49.800 +スターコーダー2は、 コードを生成するために設計されたモデルだ。 + +04:49.890 --> 04:57.210 +ServiceNowとNvidiaの3社が協力し、 顔をくっつけ、 顔をくっつけ、 + +04:57.240 --> 05:05.340 +顔をくっつけ、 顔をくっつけ、 顔をくっつけ、 顔をくっつけ......この3社が提携して、 + +05:05.340 --> 05:11.450 +このスター・コーダーを作り、 この特別なモデルを作った。 + +05:11.450 --> 05:12.560 +オーケー。 + +05:12.560 --> 05:18.060 +では、 ファイ3に挑戦してみよう。 + +05:18.060 --> 05:24.300 +そこで、 まったく同じアプローチで、 事前に訓練された自動トークナイザーを使い、 モデルを提供する。 + +05:24.300 --> 05:27.750 +そして今、 私は同じ文章を書いている。 + +05:27.750 --> 05:31.470 +LLMのエンジニアたちに、 トーケナイザーの動きを見せるのが楽しみだ。 + +05:31.470 --> 05:40.020 +前回のザ・ラマ3を再掲する。 1 トークンがどのように見えるかを思い出させるTokenizersの結果。 + +05:40.050 --> 05:44.070 +それから空白の行を入れ、 ファイ3をプリントする。 + +05:44.070 --> 05:50.490 +そして問題は、 一日の終わりに、 基本的に同じトークンを生産するのか、 それとも違うのかということだ。 + +05:50.520 --> 05:52.200 +見てみよう。 + +05:53.700 --> 05:57.150 +まあ、 両者がまったく違うことはすぐにわかるだろう。 + +05:57.270 --> 05:58.200 +彼らは違うんだ。 + +05:58.230 --> 06:07.620 +生成されたテキストが違うだけでなく、 メッセージの特別なトークンの始まりであるこの最初のテキストもまったく違う。 + +06:07.830 --> 06:11.070 +ええと、 バッチデコードをして、 それを見てみましょう。 + +06:16.980 --> 06:17.760 +トーケナイザー。 + +06:17.790 --> 06:21.930 +ドットバッチデコード。 + +06:24.450 --> 06:27.030 +トークンと言わざるを得ない。 + +06:27.030 --> 06:28.110 +イコールである。 + +06:31.770 --> 06:32.970 +トークン + +06:33.780 --> 06:35.280 +何が出てくるか見てみよう。 + +06:36.360 --> 06:40.800 +そして、 まったく違うものを手に入れた。 + +06:40.860 --> 06:44.520 +そして実は、 興味深いことに、 1秒前に言ったことは間違っていた。 + +06:44.550 --> 06:52.350 +53の場合は文頭の特殊トークンがないので、 そのまま文頭に入る。 + +06:53.250 --> 06:56.850 +だから、 それは非常に異なるアプローチなんだ。 + +06:58.830 --> 06:59.670 +分かった。 + +06:59.700 --> 07:07.350 +適用されたチャットテンプレートを使って、 53がどのようにチャットテンプレートを使うか見てみよう。 + +07:07.380 --> 07:09.900 +まずはラマにもう一度やってみよう。 + +07:09.900 --> 07:11.250 +だから、 リャマに会うことになる。 + +07:11.250 --> 07:18.990 +そして、 同じ会話、 同じプロンプト53のチャットテンプレートを並べて印刷します。 + +07:19.020 --> 07:20.160 +どう見えるか見てみよう。 + +07:20.160 --> 07:26.260 +つまり、 これはラマに相当するもので、 ファイ3に相当するものはこちらだ。 + +07:26.290 --> 07:28.450 +明らかにもっと短い。 + +07:28.450 --> 07:31.270 +日付が変わっても通過しない。 + +07:31.510 --> 07:38.230 +そして興味深いことに、 Lamaの構造がヘッダー、 システム、 エンドヘッダー、 ユーザー、 エンドヘッダーという構成だったのに対して、 + +07:38.260 --> 07:42.730 +Lamaはヘッダー、 システム、 エンドヘッダーという構成になっている。 + +07:42.730 --> 07:52.720 +ファイ3の場合は、 システム用の特別なタグとユーザー用の特別なタグ、 アシスタント用の特別なタグがあるだけだ。 + +07:52.720 --> 07:55.870 +だから、 まったく違うアプローチなんだ。 + +07:56.110 --> 08:02.020 +この2つのトークナイザー、 この2つのモデルは、 プロンプトがどのように送信されるかについて異なるアプローチを持っているというのは、 + +08:02.020 --> 08:04.240 +実に興味深いことです。 + +08:04.240 --> 08:07.870 +だから、 もし間違ったモデルに間違ったトークナイザーを使ったら、 ゴミになってしまうという印象を持ってもらえればいいんだけど、 + +08:07.870 --> 08:15.430 +トークンが違ったり構造が違ったりすると、 llama 3にとっては無意味になってしまうのは明らかだからね。 + +08:16.120 --> 08:18.850 +そして今度は、 クイン2についても同じことをやってみよう。 + +08:18.880 --> 08:23.020 +オリジナルのラマ・バージョンを見るつもりだ。 + +08:23.020 --> 08:26.870 +そして、 ファイ3バージョンとファイ2バージョンをお見せします。 + +08:27.050 --> 08:28.460 +来たぞ。 + +08:29.120 --> 08:35.690 +この3つのトークナイザーで、 まったく異なる結果が得られることは明らかだ。 + +08:35.750 --> 08:38.720 +それと、 もう1回ハイライトを。 + +08:38.720 --> 08:41.810 +適切なモデルに適切なトークナイザーを選ばなければならない。 + +08:43.370 --> 08:51.170 +チャットテンプレートを適用して、 ジョークを言うという同じメッセージのチャットテンプレートをもう一度見てみましょう。 + +08:51.170 --> 08:52.400 +それはリャマのために見ることにしよう。 + +08:52.400 --> 08:56.330 +そして5-3、 クイン-2......。 + +08:56.330 --> 08:57.350 +どんなものか見てみよう。 + +08:57.380 --> 08:59.000 +リャマのものはすでに見た。 + +08:59.000 --> 09:01.010 +53年のものはすでに見た。 + +09:01.010 --> 09:03.560 +そして、 これがクイン2のものだ。 + +09:03.560 --> 09:06.650 +その中間のようなものだ。 + +09:06.680 --> 09:08.840 +ラマに少し似ている。 + +09:08.840 --> 09:14.030 +イム・スタート、 イム・エンド、 そしてシステムがここにある。 + +09:14.210 --> 09:16.850 +次にユーザー、 そしてアシスタント。 + +09:16.850 --> 09:19.250 +つまり、 この2つの中間ということになる。 + +09:19.250 --> 09:23.870 +ええと、 単語の間に何かが入っているわけではないんだ。 + +09:23.870 --> 09:26.000 +ヘッダーの特別なタグはない。 + +09:26.000 --> 09:28.440 +ただ、 その、 このアプローチなんだ。 + +09:28.440 --> 09:36.810 +だから、 第3のアプローチ、 別のバリエーション、 別の特別なトークンというのはまた面白い。 + +09:37.740 --> 09:38.370 +分かった。 + +09:38.370 --> 09:41.580 +そして最後に、 スターコーダー2をお見せしよう。 + +09:41.610 --> 09:44.520 +これはコード生成モジュールである。 + +09:44.520 --> 09:46.440 +トークン化する。 + +09:46.440 --> 09:49.470 +そこにこのコードを入れる。 + +09:49.500 --> 09:54.570 +Hello world a def hello world uh person 変数を取る。 + +09:54.570 --> 09:55.980 +そして、 ハローと印刷される。 + +09:55.980 --> 09:57.090 +そしてその人。 + +09:57.090 --> 10:02.220 +そして、 同じエンコードを使ってトークンに変換する。 + +10:02.220 --> 10:11.730 +そして、 各トークンの後に、 そのトークンは何にマッピングされ、 そのテキストは何を表しているのか? + +10:11.730 --> 10:18.840 +ここでわかることは、 最初に何かがあり、 次にdefが1つのトークンに入り、 次にhello + +10:18.840 --> 10:25.110 +underscore world、 そしてpersonということだ。 + +10:25.110 --> 10:33.210 +これは明らかにタブを反映し、 ハローカンマの人を閉じ括弧で囲んで印刷する。 + +10:33.210 --> 10:46.140 +つまり、 スターコーダー・ツー・トークナイザーは、 英語ではなくコードをトークン化するために設計されたトークナイザーなのだ。 + +10:46.500 --> 10:48.120 +そして、 いくつかできる実験もある。 + +10:48.150 --> 10:54.060 +まずは、 さまざまなトークナイザーを試して、 テキストからトークンへのマッピングを探ってみよう。 + +10:54.180 --> 10:55.590 +どの単語が使われているか調べる + +10:55.590 --> 11:02.040 +リャマに含まれるトークンが1つである、 可能な限りレアな単語を探してみてください。 + +11:02.040 --> 11:06.360 +トークナイザーとか、 一番長い単語とか、 そんな感じかな。 + +11:06.360 --> 11:09.720 +いくつか実験をして、 それから満足するんだ。 + +11:10.170 --> 11:15.210 +かなり複雑なコードであっても、 star coder tos tokenizerの方が、 + +11:15.240 --> 11:22.260 +英語専用のトークナイザーよりも効率的にトークン化できることがお分かりいただけるはずです。 + +11:22.650 --> 11:33.180 +そしてその時点で、 あなたはオープン・ソース・トークナイザーの世界におけるエキスパートとなり、 次のピースであるモデルに挑戦する準備が整うだろう。 + +11:33.180 --> 11:35.160 +まず、 スライドに戻ろう。 diff --git a/week5/community-contributions/subtitles/srts/59170233/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170233/ko_KR.srt new file mode 100755 index 0000000..6e26627 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170233/ko_KR.srt @@ -0,0 +1,451 @@ +WEBVTT + +00:00.560 --> 00:04.160 +토큰이들과의 활약에 돌아오신 걸 환영해요 + +00:04.160 --> 00:09.830 +지금 볼 것은 모델 지시 변수라는 건데요 + +00:09.830 --> 00:18.650 +채팅용으로 특별히 설계된 모델도 많습니다 사용자와의 채팅을 수행하기 위해서죠 + +00:18.650 --> 00:28.430 +GPT4 채팅도 마찬가지입니다 얼굴을 끌어안는 모델이 등장할 경우 베이스 모델과 + +00:28.520 --> 00:33.830 +이름이 같지만 끝에 지시 사항이 추가됩니다 이 + +00:33.830 --> 00:43.310 +지시 사항 사용 사례에 적절하게 사용되도록 설계됐다는 뜻이죠 + +00:43.610 --> 00:50.870 +특정 구조에서 특정 토큰으로 시스템 메시지와 + +00:50.900 --> 01:00.920 +사용자 메시지 지원 답변을 식별해 채팅을 하도록 훈련받았죠 + +01:00.920 --> 01:06.270 +많은 예시를 들면서 훈련된 방식의 일부일 뿐이죠 + +01:06.270 --> 01:13.260 +이 포맷에서 기대하죠 여러분께 뭔가 제공하면 좋겠네요 이제 루프를 + +01:13.260 --> 01:19.830 +닫을 테니까요 한참 전에 메시지 구조의 이유에 대해 시드를 + +01:19.830 --> 01:28.290 +심었던 거죠 독재 목록은 프론티어 모델을 할 때 아주 익숙해졌어요 + +01:28.290 --> 01:37.470 +이번엔 토큰라이저를 만들게요 메타 lemma 3을 이용해서요 180억 개요 + +01:37.470 --> 01:39.720 +그러니 낯익을 거예요 + +01:39.720 --> 01:48.420 +이것은 우리가 오픈AI나 클로드에서 많이 사용하는 독촉 목록 중 하나입니다 역할과 콘텐츠를 + +01:48.420 --> 01:56.790 +지정하는 거죠 역할 시스템은 시스템 메시지 사용자는 사용자 메시지에요 + +01:56.880 --> 02:06.570 +H깅페이스가 제공하는 토큰라이저는 특별한 기능을 수행해 채팅 템플릿을 적용하고 OpenAI + +02:06.570 --> 02:16.170 +API 포맷의 이 포맷 메시지를 취합니다 올바른 구조로 전환해 이 모델이 기대하는 특정 + +02:16.170 --> 02:24.960 +프롬프트 유형에 사용되죠 훈련된 방식에 따라 토큰라이즈 = true 함수를 + +02:24.960 --> 02:31.470 +입력하면 일련의 숫자만 나올 뿐 무슨 일이 일어나는지 알 수 + +02:31.470 --> 02:34.290 +없어요 + +02:34.290 --> 02:35.910 +토큰화 = false라고 적었죠 + +02:35.910 --> 02:39.750 +Get in get은 텍스트 버전이에요 + +02:39.750 --> 02:46.770 +프린트해서 여러분이 보실 수 있도록 하겠습니다 이게 무엇으로 변환되었는가 + +02:46.770 --> 02:53.820 +하는 거죠 이 특정 대화에 대한 추론 시간에 모델로 펌프질되었어요 + +02:53.820 --> 03:00.360 +여기 있네요, 특별한 토큰 비긴즈 오브 텍스트와 헤더로 시작하네요 + +03:00.360 --> 03:04.380 +워드 시스템과 end 헤더도 있어요 + +03:04.560 --> 03:10.780 +그리고 절단 연도와 오늘 날짜에 대한 정보도 있어요 + +03:10.780 --> 03:12.160 +정말 특별하네요 + +03:12.160 --> 03:14.260 +라마 3 같아요 하나만요 + +03:14.260 --> 03:17.830 +이전 라마 가족들은 그런 적이 없었는데 제가 틀렸을 수도 있어요 + +03:18.280 --> 03:25.840 +그리고 이건 물론 우리가 제공한 시스템 메시지예요 + +03:26.860 --> 03:31.870 +User와 헤더를 위한 또 다른 스타트 헤더가 있어요 + +03:31.870 --> 03:35.170 +이건 사용자 메시지예요 + +03:35.620 --> 03:41.800 +또 다른 start 헤더와 보조란 단어가 있고 end 헤더가 있어요 모델이 + +03:41.800 --> 03:44.590 +보조 응답을 생성해야 하니까요 + +03:44.590 --> 03:50.800 +이 모델은 다음 순서로 이어집니다 이 시스템 + +03:50.800 --> 03:58.720 +지침을 신속히 따르는 지원군에 대한 대응이죠 + +03:59.590 --> 04:07.000 +이번 기회에 깨달으셨으면 좋겠어요 어떻게 이런 구조물을 지었는지요 + +04:07.000 --> 04:10.120 +모델과의 대화도 그렇게 생각해야 해요 + +04:10.120 --> 04:15.570 +하지만 결국 모델에는 이런 프롬프트가 들어갑니다 특별한 토큰이 들어 + +04:15.600 --> 04:16.980 +있는 프롬프트죠 + +04:16.980 --> 04:22.470 +그런 구조와 특별한 토큰으로 훈련했기 때문에 다음에 뭐가 필요한지 + +04:22.470 --> 04:23.490 +알아요 + +04:23.520 --> 04:25.410 +지원팀 응답요 + +04:27.210 --> 04:30.990 +채팅 인터페이스를 설명하는 거죠 + +04:30.990 --> 04:34.140 +get it의 경험을 쌓기 위해 모델 몇 명과 더 일해 보죠 + +04:34.140 --> 04:36.360 +전 특별히 세 가지 모델을 고를 거예요 + +04:36.480 --> 04:40.290 +파이 3은 마이크로소프트 모델이에요 + +04:40.680 --> 04:45.150 +퀸 2는 알리바바 클라우드에서 계속 언급했던 강력한 모델이에요 + +04:45.150 --> 04:49.800 +스타 코더 2는 코드 생성을 위해 설계된 모델이죠 + +04:49.890 --> 04:57.210 +세 회사가 협력해서 만든 회사로 서비스나우와 포옹하는 + +04:57.240 --> 05:05.340 +얼굴 그리고 엔비디아입니다 이 세 회사가 파트너십을 맺어 + +05:05.340 --> 05:11.450 +그룹스타 코더와 이 모델을 만들었죠 + +05:11.450 --> 05:12.560 +네 + +05:12.560 --> 05:18.060 +그럼 피3을 불러 볼까요? + +05:18.060 --> 05:24.300 +오토 토큰라이저와 똑같은 접근법을 사용합니다 미리 훈련받은 모델이죠 + +05:24.300 --> 05:27.750 +지금은 저도 같은 문자를 보내고 있어요 + +05:27.750 --> 05:31.470 +LLM 엔지니어들에게 토큰라이저의 작동을 보여 줄 생각에 신나요 + +05:31.470 --> 05:40.020 +라마 3을 재인쇄할 거예요 토큰라이저 1개 토큰의 모습을 다시 보여드리죠 + +05:40.050 --> 05:44.070 +빈 선이 하나 있고 피3을 프린트할 거예요 + +05:44.070 --> 05:49.500 +결국 중요한 질문은 이겁니다 기본적으로 같은 토큰을 생산하나요? 아니면 + +05:49.500 --> 05:50.490 +다른가요? + +05:50.520 --> 05:52.200 +한번 보죠 + +05:53.700 --> 05:57.150 +보면 아시겠지만 완전히 달라요 + +05:57.270 --> 05:58.200 +달라요 + +05:58.230 --> 06:05.250 +생성된 텍스트만 다른 게 아니라 메시지 특별 토큰의 시작인 이 첫 번째 + +06:05.280 --> 06:07.620 +것도 완전히 달라요 + +06:07.830 --> 06:11.070 +그걸 볼 수 있게 배치 디코딩을 하죠 + +06:16.980 --> 06:17.760 +토큰자이예요 + +06:17.790 --> 06:21.930 +닷 배치 해독법이에요 + +06:24.450 --> 06:27.030 +토큰이라고 해야겠네요 + +06:27.030 --> 06:28.110 +동등하게요 + +06:31.770 --> 06:32.970 +토큰요 + +06:33.780 --> 06:35.280 +get in the right 한번 볼까요? + +06:36.360 --> 06:40.800 +Get in get은 완전히 달라요 + +06:40.860 --> 06:44.520 +사실, 흥미롭게도 조금 전에 한 말은 틀렸어요 + +06:44.550 --> 06:52.350 +53번의 경우 문장 시작 특별 토큰이 없어요 그냥 바로 들어가죠 + +06:53.250 --> 06:56.850 +아주 색다른 접근법이죠 + +06:58.830 --> 06:59.670 +좋아요 + +06:59.700 --> 07:07.350 +채팅 템플릿 적용을 이용해 53명이 채팅 템플릿을 어떻게 사용하는지 보죠 + +07:07.380 --> 07:09.900 +라마를 위해 다시 해 보죠 + +07:09.900 --> 07:11.250 +라마도 볼 수 있겠네요 + +07:11.250 --> 07:17.070 +그런 다음 나란히 같은 채팅 템플릿을 출력할 겁니다 같은 대화, 같은 53에 대한 같은 + +07:17.070 --> 07:18.990 +프롬프트를 위해서요 + +07:19.020 --> 07:20.160 +어떤지 보죠 + +07:20.160 --> 07:26.260 +이게 라마에게 필요한 거고 이건 피3에 해당하는 거예요 + +07:26.290 --> 07:28.450 +훨씬 짧죠 + +07:28.450 --> 07:31.270 +날짜에 안 들어가요 + +07:31.510 --> 07:38.230 +흥미롭게도 라마라는 단어는 헤더가 기본이었어요 워드 시스템과 엔드 + +07:38.260 --> 07:42.730 +헤더가 사용자와 엔드 헤더로 이어졌죠 + +07:42.730 --> 07:51.310 +파이 3의 경우 시스템을 위한 특별한 태그와 사용자를 위한 특별한 태그 보조를 위한 특별한 + +07:51.310 --> 07:52.720 +태그가 있어요 + +07:52.720 --> 07:55.870 +접근 방식이 완전히 달라요 + +07:56.110 --> 08:02.020 +흥미로운 점은 두 토큰라이저, 두 모델이 프롬프트를 get으로 보내는 방법에 + +08:02.020 --> 08:04.240 +다른 접근법을 취한다는 거죠 + +08:04.240 --> 08:07.870 +만약 잘못된 토큰라이저를 잘못된 모델로 사용한다면 가비지가 + +08:07.870 --> 08:12.940 +된다는 것을 아셔야 합니다. 왜냐하면 토큰이 다르고 구조가 다르다면 llama3에는 + +08:12.940 --> 08:15.430 +의미가 없기 때문이죠. + +08:16.120 --> 08:18.850 +이제 퀸 2호도 똑같이 해 보죠 + +08:18.880 --> 08:23.020 +라마의 원조 버전을 볼 거예요 + +08:23.020 --> 08:26.870 +파이3 버전을 보여드리고 두 가지 버전을 보여드릴게요 + +08:27.050 --> 08:28.460 +저기 오네요 + +08:29.120 --> 08:35.690 +보시다시피 토큰라이저 세 개는 완전히 다른 결과를 볼 수 있죠 + +08:35.750 --> 08:38.720 +하이라이트 한 번 더 할게요 + +08:38.720 --> 08:41.810 +모델에 맞는 토큰라이저를 골라야 해요 + +08:43.370 --> 08:49.430 +채팅 템플릿을 적용해 보죠 같은 메시지를 전달하는 채팅 템플릿이 + +08:49.430 --> 08:51.170 +또 있어요 + +08:51.170 --> 08:52.400 +곧 알게 되겠죠 + +08:52.400 --> 08:56.330 +다섯, 셋, 퀸, 둘 이렇게 나란히요 + +08:56.330 --> 08:57.350 +어떻게 생겼는지 보죠 + +08:57.380 --> 08:59.000 +라마 사진은 이미 봤어요 + +08:59.000 --> 09:01.010 +53편은 이미 봤어요 + +09:01.010 --> 09:03.560 +그리고 이건 퀸의 두 번째예요 + +09:03.560 --> 09:06.650 +보시면 알겠지만 그 중간쯤에 있어요 + +09:06.680 --> 09:08.840 +비트도 라마랑 비슷해요 + +09:08.840 --> 09:14.030 +시작과 끝이라는 시스템도 있고요 + +09:14.210 --> 09:16.850 +그 다음은 사용자, 그 다음은 조수죠 + +09:16.850 --> 09:19.250 +그 둘 사이의 어디쯤이죠 + +09:19.250 --> 09:23.870 +그 어정쩡한 단어가 없어요 + +09:23.870 --> 09:26.000 +헤더 스페셜 태그가 없어요 + +09:26.000 --> 09:28.440 +이런 식으로 접근해요 + +09:28.440 --> 09:36.810 +흥미로운 제3의 접근법이죠 또 다른 변종으로 특별한 토큰을 사용해요 + +09:37.740 --> 09:38.370 +좋아요 + +09:38.370 --> 09:41.580 +마지막으로 스타 코더 2를 보여드릴게요 + +09:41.610 --> 09:44.520 +코드 생성 모듈이에요 + +09:44.520 --> 09:46.440 +토큰라이저를 가져갈 거예요 + +09:46.440 --> 09:49.470 +이 코드를 저기에 Put 할게요 + +09:49.500 --> 09:54.570 +안녕 세계, 안녕 세계 변수를 선택하는 거죠 + +09:54.570 --> 09:55.980 +그러면 hello를 출력하죠 + +09:55.980 --> 09:57.090 +그리고 그 사람도요 + +09:57.090 --> 10:02.220 +그런 다음 같은 인코드를 사용해 토큰으로 바꾸죠 + +10:02.220 --> 10:09.000 +이제 해야 할 일은 각각의 토큰을 프린트하는 것입니다. 그리고 무엇이 get이 되었는지, 어떤 텍스트를 + +10:09.030 --> 10:11.730 +나타내는지 매핑하는 것이죠. + +10:11.730 --> 10:18.840 +여기 보시면 처음에 뭔가 있었어요 데프가 토큰 하나에 들어갔고 + +10:18.840 --> 10:25.110 +hello_ world와 Person이 나왔죠 + +10:25.110 --> 10:33.210 +여기 이건 탭을 반영할 겁니다 그런 다음 hello, 사람 괄호 닫기를 인쇄하죠 + +10:33.210 --> 10:42.660 +대충 감이 오실 겁니다 스타 코더 2 토큰라이저는 토큰라이저로 영어보다는 토큰라이저 + +10:42.660 --> 10:46.140 +코드를 중심으로 디자인됐죠 + +10:46.500 --> 10:48.120 +실험할 수 있는 게 있어요 + +10:48.150 --> 10:54.060 +먼저, 다양한 토큰라이저를 사용해보고 텍스트에서 토큰으로의 매핑을 탐색해보세요 + +10:54.180 --> 10:55.590 +어떤 단어인지 찾아봐요 + +10:55.590 --> 11:02.040 +라마 안에 토큰이 하나 있는 가장 희귀한 단어를 찾아보세요 + +11:02.040 --> 11:06.360 +토큰이거나 가장 긴 단어였을 거예요 + +11:06.360 --> 11:09.720 +실험도 좀 하고 여러분을 만족시켜 드릴게요 + +11:10.170 --> 11:15.210 +꽤 복잡한 코드를 가지고 있다면 이 스타 코더의 토큰라이저 토큰화가 + +11:15.240 --> 11:22.260 +더 효율적으로 이루어질 것입니다 토큰라이저의 영어 버전보다 더 효율적이죠 + +11:22.650 --> 11:30.570 +그때쯤이면 여러분은 오픈 소스 토큰라이저의 전문가가 되어 다음 단계인 모델에 + +11:30.570 --> 11:33.180 +도전할 준비가 될 거예요 + +11:33.180 --> 11:35.160 +먼저 슬라이드로 돌아가죠 diff --git a/week5/community-contributions/subtitles/srts/59170235/en_US.srt b/week5/community-contributions/subtitles/srts/59170235/en_US.srt new file mode 100755 index 0000000..8efb655 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170235/en_US.srt @@ -0,0 +1,565 @@ +WEBVTT + +00:00.350 --> 00:08.060 +So here we are in Google Colab for our first collaborative session on the cloud using a GPU box. + +00:08.090 --> 00:15.110 +On that note, I've actually connected to a T4 box, one of the lower spec boxes. + +00:15.350 --> 00:16.880 +You can see my resources here. + +00:16.880 --> 00:24.170 +You can see that I just ran this and managed to fill up most of the GPU, but then I've just gone to + +00:24.200 --> 00:31.520 +runtime and done a restart session, which is why the GPU memory has come slamming down to nothing. + +00:31.520 --> 00:36.230 +Anyway, we'll now, uh, remove this screen so that it's not in our way. + +00:36.680 --> 00:37.550 +There we go. + +00:37.940 --> 00:41.210 +Uh, and look at our colab. + +00:41.210 --> 00:47.930 +So I begin by introducing you to the world of pipelines, a reminder that the Transformers library with + +00:47.960 --> 00:51.710 +hugging face has these two API levels the pipelines. + +00:51.710 --> 00:55.670 +And then later we will come to Tokenizers and models for today. + +00:55.670 --> 01:02.920 +It's all about pipelines, high level API that allows you to run inference for common tasks in just + +01:02.920 --> 01:11.440 +a couple of lines of code, which makes it frictionless and easy to be using open source models in production. + +01:11.950 --> 01:16.360 +The way that we use it is, as I say, super simple. + +01:16.690 --> 01:23.920 +You first call pipeline and you pass in a string with the task that you want to do. + +01:23.920 --> 01:28.960 +So there's various things you can pass in here, various, uh, strings, and you will see what they + +01:28.960 --> 01:29.830 +are in just a moment. + +01:29.830 --> 01:35.200 +You get back a pipeline object and you can then call it with your input, and you get back the result. + +01:35.200 --> 01:36.760 +That's all there is to it. + +01:36.760 --> 01:37.930 +Simple as that. + +01:38.020 --> 01:43.390 +So I'm going to start by doing the installs the Transformers library of course, which is the heart + +01:43.390 --> 01:44.170 +of everything. + +01:44.170 --> 01:48.640 +Datasets is a library that gives us access to hugging faces, datasets. + +01:48.640 --> 01:53.890 +Diffusers actually are sort of companion library to transformers when you're talking about diffusion + +01:53.890 --> 01:55.210 +models that generate images. + +01:55.240 --> 02:02.110 +Often when I say transformers, I'm really referring to both transformers and its sibling library diffusers + +02:02.110 --> 02:05.250 +as well for any times that we're generating images. + +02:05.250 --> 02:07.920 +So I'm doing a pip install. + +02:08.040 --> 02:13.830 +When you have an exclamation mark like that in front of a of a cell, it means that you want to run + +02:13.830 --> 02:15.330 +that as a terminal command. + +02:15.330 --> 02:16.530 +And so that's going to run. + +02:16.530 --> 02:23.970 +And if you didn't know that minus Q is it puts it on quiet mode so that we don't get all of the outputs + +02:24.000 --> 02:28.020 +of installing all of the packages, but it installs now installed particularly quickly for me because + +02:28.020 --> 02:29.790 +I already ran this notebook earlier. + +02:29.790 --> 02:33.210 +It may take 30s for you while those packages install. + +02:33.510 --> 02:36.030 +And then we're going to do some imports. + +02:37.590 --> 02:43.020 +And then once we've done that we're going to get started with pipelines. + +02:43.050 --> 02:44.490 +Get ready for this. + +02:44.490 --> 02:50.220 +So first of all sentiment analysis is something positive or negative a bit of text. + +02:50.220 --> 02:56.670 +And we're going to start by saying I am super excited to be on the way to LM mastery. + +02:56.790 --> 02:59.940 +I wonder if that's positive or negative. + +03:00.120 --> 03:03.500 +So a few things to note immediately. + +03:03.710 --> 03:10.250 +First of all, it warns us that no model was supplied and it's defaulted to this particular model. + +03:10.250 --> 03:16.700 +What it means there is that you can say model equals and tell it which of the models from the hugging + +03:16.730 --> 03:19.790 +face hub you would like to use as part of this pipeline. + +03:19.790 --> 03:25.160 +If you don't supply one, it just picks the default for that task, which is great for our purposes + +03:25.160 --> 03:25.880 +today. + +03:26.090 --> 03:32.600 +The other thing it's telling us is that the GPU is available in the environment, but we didn't tell + +03:32.600 --> 03:37.430 +it to use a GPU, which it correctly thinks is a bit strange of us. + +03:37.460 --> 03:46.100 +And we can make it do that by saying device equals Cuda like that, and that command will tell it that + +03:46.100 --> 03:51.290 +we would like to use this pipeline, and we would like to take advantage of the GPU that we have. + +03:52.130 --> 03:57.050 +And so now we've run it, it's run on our GPU. + +03:57.050 --> 03:59.330 +And maybe we should also look at the results. + +03:59.330 --> 04:05.050 +The result is that it's considered a positive statement and its score, which is its level of confidence, + +04:05.080 --> 04:06.460 +is very high indeed. + +04:06.460 --> 04:08.080 +So that sounds good. + +04:08.080 --> 04:11.560 +I think that's probably a good interpretation of the sentence. + +04:11.560 --> 04:13.960 +Let's just try putting in the word not. + +04:13.990 --> 04:18.010 +I'm not super excited to be on the way to LLM mastery. + +04:18.040 --> 04:18.970 +Perish the thought. + +04:18.970 --> 04:22.120 +I hope you're not thinking that for a moment, but we should just check that. + +04:22.120 --> 04:27.940 +If we say that, it clearly identifies, then that the label is negative and it's pretty confident that + +04:27.940 --> 04:31.090 +that is indeed a negative statement to be making. + +04:31.090 --> 04:35.440 +So that works well, let's leave it on an enthusiastic note. + +04:35.470 --> 04:37.630 +We would want it to think otherwise. + +04:38.080 --> 04:40.720 +Named entity recognition is the next task. + +04:40.720 --> 04:44.890 +I'm just going to be rattling through these pipelines and you should try all of this yourself. + +04:44.920 --> 04:50.530 +Of course, named entity recognition is when you provide some text and you ask the model to identify + +04:50.530 --> 04:53.080 +what kinds of things are being referred to. + +04:53.110 --> 04:56.500 +This is a standard one that that I took. + +04:56.530 --> 05:00.310 +Barack Obama was the 44th president of the United States. + +05:00.670 --> 05:13.350 +Uh, and we ask it to analyze that, And it responds here that the there's it responds with, um, two + +05:13.380 --> 05:15.840 +different named entities. + +05:15.960 --> 05:22.590 +If you can see it in this text right here, the first named entity is of type per as in person. + +05:22.950 --> 05:27.030 +Uh, it's got high confidence and the word is Barack Obama. + +05:27.120 --> 05:33.510 +And the second one is a lock for a location and the word is United States. + +05:33.510 --> 05:38.850 +And of course, it shows you tells you where that is in the input. + +05:39.300 --> 05:45.570 +It's a very common use case in data science, a great thing to have at your disposal and to be able + +05:45.570 --> 05:46.860 +to do so quickly. + +05:47.160 --> 05:53.010 +Question answering with context, you can create a question answering pipeline. + +05:53.010 --> 05:59.250 +And again, I'm using this device's Cuda to run it on the GPU and say, who was the 44th president of + +05:59.250 --> 06:02.640 +the US and provide it with some context here. + +06:02.640 --> 06:09.770 +So it has something to look up against and ask it to print the result there, and it's simply no surprise + +06:09.770 --> 06:11.480 +answers that result. + +06:11.480 --> 06:12.170 +I think. + +06:12.170 --> 06:14.750 +I'm not trying to show off the power of this model right now. + +06:14.750 --> 06:17.930 +I'm trying to show off the simplicity of the pipeline API. + +06:17.960 --> 06:23.420 +You can play with more sophisticated context and better questions, and I'd also encourage you to try + +06:23.660 --> 06:29.540 +passing in different models to explore some of the different models available on the Hugging Face hub. + +06:30.110 --> 06:32.780 +Text summarization is just as easy. + +06:32.810 --> 06:35.360 +You then of course the pipeline. + +06:35.360 --> 06:39.770 +The type is summarization and you can put in a ton of text. + +06:39.800 --> 06:44.840 +Here I'm talking about I'm generally gushing about the hugging face Transformers library. + +06:45.290 --> 06:52.760 +I ask for the summarizer, I give it a min and a max length, and I get back a nice short and sharp + +06:52.760 --> 06:56.180 +sentence that summarizes that text. + +06:56.300 --> 07:00.530 +Um, and I think it's mostly just a sort of chop of what I already put in. + +07:00.530 --> 07:03.590 +So it didn't do a wonderful job, but it's a pretty simple model. + +07:03.590 --> 07:07.650 +Again, you can explore better summarizations from better models. + +07:07.920 --> 07:11.400 +When you have a moment, we can translate. + +07:11.430 --> 07:13.740 +Translation on to English. + +07:13.740 --> 07:14.610 +To French. + +07:15.060 --> 07:21.450 +The data scientists were truly amazed by the power and simplicity of the hugging Face pipeline API. + +07:21.930 --> 07:28.260 +And let's see how it performs for all of you French speakers out there. + +07:28.290 --> 07:34.350 +I'm not going to try and say that my high school French is good enough, but you can say, astonished + +07:34.350 --> 07:39.720 +at the power and simplicity of the API of the hugging face pipeline. + +07:39.720 --> 07:45.330 +And as far as my limited French skills can tell, that seems like it's a pretty robust translation. + +07:45.330 --> 07:52.230 +Very easy classification, or what's called zero shot classification, when we just give it an example + +07:52.230 --> 07:57.600 +and ask it to label it with some labels without giving it any prior examples. + +07:57.810 --> 08:03.720 +So we're giving it the text hugging face Transformers library is amazing, and asking it to classify + +08:03.750 --> 08:06.150 +technology or sports or politics. + +08:06.150 --> 08:08.660 +And let's see how it performs. + +08:09.050 --> 08:15.200 +Um, and it says labels, uh, for technology. + +08:15.230 --> 08:20.810 +It gives it a score of 95% and then a tiny score for sports and politics. + +08:20.810 --> 08:21.500 +Politics. + +08:21.500 --> 08:23.090 +Low of them all. + +08:23.180 --> 08:29.360 +Uh, and that seems especially as we didn't particularly have any words that were directly tech related. + +08:29.360 --> 08:31.100 +That's not bad at all. + +08:31.430 --> 08:37.340 +And then last on this series of the really simple ones is text generation. + +08:37.370 --> 08:42.980 +Let's say if there's one thing I want you to remember about using Huggingface pipelines, it's and let's + +08:42.980 --> 08:43.160 +see. + +08:43.190 --> 08:46.910 +Obviously we're using a vanilla model, but let's see how it handles it. + +08:48.830 --> 08:53.240 +It's that any application that runs on Nautilus will generate the hugging face package as the target, + +08:53.240 --> 08:54.350 +just as if it had been compiled. + +08:54.350 --> 08:55.340 +So that's a bit random. + +08:55.340 --> 08:59.720 +It did better in some of my, uh, uh, prior tests. + +08:59.900 --> 09:02.030 +Uh, it's how good and resilient they are. + +09:02.060 --> 09:04.400 +As with any project, you'll need to remember your risk tolerance. + +09:04.430 --> 09:05.630 +Remember when to push it to the point. + +09:05.660 --> 09:09.520 +So of course, it's rambling based on how it thinks that begins. + +09:09.520 --> 09:13.210 +You can try some more amusing starts and see how it performs. + +09:13.210 --> 09:17.260 +You could also try bigger, beefier models and you'll get something. + +09:17.290 --> 09:20.980 +Of course, that is more accurate in terms of the text generation. + +09:21.700 --> 09:22.420 +Okay. + +09:22.420 --> 09:30.100 +And then of course, since you are now all experts in Multi-modality, let's just show you some image + +09:30.100 --> 09:30.970 +generation. + +09:30.970 --> 09:35.800 +Everything up to this point has been using the Transformers library, and we're now flipping to the + +09:35.800 --> 09:41.590 +diffusers or diffusion style models, which is the architecture that generates images. + +09:41.860 --> 09:49.870 +And we are using the well-known stable diffusion model, the parameters a few more need to be passed + +09:49.870 --> 09:50.080 +in. + +09:50.110 --> 09:51.010 +As you'll see here. + +09:51.040 --> 09:54.010 +We need to tell it the kind of data type that we're using. + +09:54.400 --> 10:00.640 +And then once we've done that and and put it on the GPU, we can give it some text. + +10:00.640 --> 10:08.130 +And I'm saying here a class of data scientists learning about AI in the surreal style of Salvador Dali, + +10:08.400 --> 10:11.010 +and let's generate that image. + +10:11.010 --> 10:13.050 +This will take a little bit longer. + +10:13.050 --> 10:15.510 +It's a bit more of a meaty task. + +10:15.750 --> 10:21.390 +Um, it might take even longer for you because I have the benefit of having already run this once. + +10:21.390 --> 10:26.880 +And so the model has been downloaded from Huggingface and stored in its local directory. + +10:26.880 --> 10:30.960 +So it may take more like a minute when you run it. + +10:31.200 --> 10:32.970 +Um, but here we go. + +10:32.970 --> 10:43.500 +Here are here is the room full of, uh, uh, class of data scientists learning about AI in the surreal + +10:43.500 --> 10:44.490 +style of Dali. + +10:44.490 --> 10:45.960 +I love it, I love it. + +10:45.990 --> 10:47.760 +Do you look anything like these guys? + +10:47.970 --> 10:52.320 +Uh, I hope not only in your worst nightmare, but there we go. + +10:52.320 --> 10:55.110 +This is the surreal Dali. + +10:55.140 --> 10:57.840 +Scary, strange world. + +10:57.930 --> 11:01.350 +Uh, for a class of data scientists, I love it. + +11:01.380 --> 11:07.530 +Now, you can substitute this for the flux model that I mentioned before, and you may remember, I + +11:07.560 --> 11:11.450 +flashed up the code at the last, uh, lectures. + +11:11.630 --> 11:13.160 +Uh, by all means, use that. + +11:13.160 --> 11:18.170 +You'll find it does take longer, and you will need to be running on a beefier box than the T4. + +11:18.200 --> 11:24.710 +If you do it on an A100, it'll take a couple of minutes and you will get a breathtakingly good image + +11:24.740 --> 11:28.070 +like the one that I showed in the last lecture. + +11:28.280 --> 11:34.520 +Uh, but, um, this seems pretty good for a quick, cheap model to me. + +11:35.120 --> 11:39.680 +And last but not least, some audio generation. + +11:39.680 --> 11:45.200 +Uh, we are going to use the text to speech pipeline now, and we are going to tell it which model we + +11:45.200 --> 11:45.440 +want. + +11:45.470 --> 11:49.040 +We want Microsoft's Speech five TTS. + +11:49.040 --> 11:55.820 +There is a little bit more that you have to provide into the model, something about the type of voice + +11:55.820 --> 11:56.900 +that it should use. + +11:56.900 --> 12:01.340 +And so there's a couple of lines to load that data set from hugging face. + +12:01.490 --> 12:04.460 +Um, and, and get it into the right shape. + +12:04.460 --> 12:10.270 +But once you've done that, we can just call our pipeline and we're going to say hi to an artificial + +12:10.270 --> 12:17.140 +intelligence engineer on the way to mastery and pass in this this speech voice. + +12:17.410 --> 12:24.460 +Um, then this is some code to then, uh, write that to, uh, to a wav file. + +12:24.820 --> 12:34.360 +Um, and uh, we will then be able to play it within the, um, this, uh, Colab notebook. + +12:34.360 --> 12:36.220 +So it ran pretty fast. + +12:36.220 --> 12:38.050 +Let's see how it sounds. + +12:38.050 --> 12:38.830 +Hi. + +12:38.860 --> 12:43.450 +To an artificial intelligence engineer on the way to mastery. + +12:44.200 --> 12:45.520 +Seems pretty good to me. + +12:45.520 --> 12:50.590 +That would be an example of using pipeline for text to speech generation. + +12:50.800 --> 12:55.180 +And that wraps up this colab walkthrough of pipeline APIs. + +12:55.180 --> 13:00.910 +I, of course, will share this colab so that you can have access to this and very much encourage you + +13:00.910 --> 13:06.460 +to go through and try these yourself, experiment with different inputs and also experiment with different + +13:06.460 --> 13:09.880 +models that you'll find on the Huggingface hub. + +13:10.180 --> 13:10.990 +Enjoy. diff --git a/week5/community-contributions/subtitles/srts/59170235/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170235/ja_JP.srt new file mode 100755 index 0000000..cfa53d0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170235/ja_JP.srt @@ -0,0 +1,472 @@ +WEBVTT + +00:00.350 --> 00:08.060 +というわけで、 Google ColabでGPUボックスを使ったクラウド上での最初の共同セッションを行うことになった。 + +00:08.090 --> 00:15.110 +その点、 私はT4ボックスという低スペックのボックスに接続したことがある。 + +00:15.350 --> 00:16.880 +私のリソースはここで見ることができる。 + +00:16.880 --> 00:24.170 +GPUのほとんどを満タンにすることができたが、 ランタイムでセッションをリスタートしたところ、 + +00:24.200 --> 00:31.520 +GPUメモリがゼロになった。 + +00:31.520 --> 00:36.230 +とにかく、 このスクリーンを外して、 邪魔にならないようにしよう。 + +00:36.680 --> 00:37.550 +これでよし。 + +00:37.940 --> 00:41.210 +それと、 僕らのコラボを見てよ。 + +00:41.210 --> 00:51.710 +というわけで、 パイプラインの世界を紹介することから始めよう。 ハグしている顔のトランスフォーマー・ライブラリーには、 パイプラインという2つのAPIレベルがあることを思い出してほしい。 + +00:51.710 --> 00:55.670 +そして後ほど、 トーケナイザーと今日のモデルを紹介する。 + +00:55.670 --> 01:11.440 +パイプラインがすべてであり、 数行のコードで一般的なタスクの推論を実行できる高レベルのAPIである。 + +01:11.950 --> 01:16.360 +使い方は超シンプルだ。 + +01:16.690 --> 01:23.920 +まずパイプラインを呼び出し、 実行したいタスクを文字列で渡す。 + +01:23.920 --> 01:29.830 +だから、 ここにいろいろなものを、 いろいろな文字列を渡すことができる。 + +01:29.830 --> 01:35.200 +パイプライン・オブジェクトが返ってくるので、 それを入力で呼び出すと、 結果が返ってくる。 + +01:35.200 --> 01:36.760 +それだけだ。 + +01:36.760 --> 01:37.930 +単純なことだ。 + +01:38.020 --> 01:44.170 +だから、 まずはトランスフォーマーのライブラリーをインストールするところから始めようと思う。 + +01:44.170 --> 01:48.640 +Datasetsは、 ハグする顔、 データセットにアクセスできるライブラリだ。 + +01:48.640 --> 01:55.210 +ディフューザーは、 画像を生成する拡散モデルについて語るとき、 トランスフォーマーのコンパニオン・ライブラリーのようなものだ。 + +01:55.240 --> 02:05.250 +私がトランスフォーマーと言う場合、 画像を生成するときのために、 トランスフォーマーとその兄弟ライブラリであるディフューザーの両方を指すことが多い。 + +02:05.250 --> 02:07.920 +だからpipでインストールしている。 + +02:08.040 --> 02:15.330 +セルのaの前に感嘆符がある場合は、 それをターミナル・コマンドとして実行することを意味する。 + +02:15.330 --> 02:16.530 +そして、 それは実行される。 + +02:16.530 --> 02:29.790 +マイナスQはクワイエットモードにすることで、 すべてのパッケージをインストールする際の出力が出ないようにする。 + +02:29.790 --> 02:33.210 +これらのパッケージがインストールされるまで、 30秒かかるかもしれない。 + +02:33.510 --> 02:36.030 +そして、 輸入をするんだ。 + +02:37.590 --> 02:43.020 +それが終わったら、 パイプラインに取り掛かろう。 + +02:43.050 --> 02:44.490 +覚悟しておけ。 + +02:44.490 --> 02:50.220 +つまり、 まずセンチメント分析とは、 肯定的または否定的なテキストを分析することである。 + +02:50.220 --> 02:56.670 +そして、 LMマスターへの道を歩んでいることにとても興奮している、 というところから始めるつもりだ。 + +02:56.790 --> 02:59.940 +それがプラスなのかマイナスなのか。 + +03:00.120 --> 03:03.500 +というわけで、 さっそくいくつか注意しておきたいことがある。 + +03:03.710 --> 03:10.250 +まず第一に、 モデルが提供されていないことを警告し、 この特定のモデルがデフォルトになっている。 + +03:10.250 --> 03:19.790 +つまり、 model equalsと言って、 このパイプラインの一部として使用したい抱きつき顔ハブのモデルを指示することができるということだ。 + +03:19.790 --> 03:25.880 +何も指定しなければ、 そのタスクのデフォルトを選ぶだけだ。 + +03:26.090 --> 03:32.600 +もうひとつは、 GPUが使用可能な環境であることを教えてくれているのだが、 + +03:32.600 --> 03:37.430 +GPUを使用するように指示していない。 + +03:37.460 --> 03:46.100 +このコマンドは、 このパイプラインを使いたい、 我々が持っているGPUを活用したい、 + +03:46.100 --> 03:51.290 +ということを伝える。 + +03:52.130 --> 03:57.050 +GPUで実行した。 + +03:57.050 --> 03:59.330 +そして、 結果にも目を向けるべきかもしれない。 + +03:59.330 --> 04:06.460 +その結果、 この発言は肯定的な発言とみなされ、 その信頼度を示すスコアは実に高いものとなった。 + +04:06.460 --> 04:08.080 +それはいいことだ。 + +04:08.080 --> 04:11.560 +おそらく、 この文章の解釈は正しいと思う。 + +04:11.560 --> 04:13.960 +notという単語を入れてみよう。 + +04:13.990 --> 04:18.010 +LLMマスターへの道を歩んでいることに、 それほど興奮はしていない。 + +04:18.040 --> 04:18.970 +考えもしなかった。 + +04:18.970 --> 04:22.120 +一瞬でもそう思っていないことを願うが、 一応確認しておこう。 + +04:22.120 --> 04:31.090 +もしそう言うなら、 そのレッテルは否定的なものであり、 それが本当に否定的な発言であることは間違いない。 + +04:31.090 --> 04:35.440 +というわけで、 うまくいった。 + +04:35.470 --> 04:37.630 +そうでないと思わせたいのだ。 + +04:38.080 --> 04:40.720 +名前付きエンティティ認識は次のタスクである。 + +04:40.720 --> 04:44.890 +私はパイプラインをガチャガチャやるだけだから、 全部自分でやってみてほしい。 + +04:44.920 --> 04:53.080 +もちろん、 名前付きエンティティ認識とは、 テキストを提供し、 それがどのようなものを指しているかを識別するようモデルに求めることである。 + +04:53.110 --> 04:56.500 +これは私が撮った標準的なものだ。 + +04:56.530 --> 05:00.310 +バラク・オバマは第44代アメリカ合衆国大統領である。 + +05:00.670 --> 05:15.840 +そして、 それを分析するように頼むと、 ここに2つの異なる名前の実体があると答える。 + +05:15.960 --> 05:22.590 +この文章を見ていただければわかると思うが、 最初の名前付きエンティティはperersonのようにper型である。 + +05:22.950 --> 05:27.030 +バラク・オバマだ。 + +05:27.120 --> 05:33.510 +そして2つ目は場所のロックで、 単語はUnited States。 + +05:33.510 --> 05:38.850 +もちろん、 それが入力のどこにあるのかを教えてくれる。 + +05:39.300 --> 05:46.860 +データ・サイエンスでは非常に一般的なユースケースであり、 自由に使えて、 素早く実行できる素晴らしいものだ。 + +05:47.160 --> 05:53.010 +コンテキストを利用した質問回答では、 質問回答パイプラインを作成することができます。 + +05:53.010 --> 06:02.640 +そしてまた、 このデバイスのCudaを使ってGPUで実行し、 第44代アメリカ大統領は誰だったのか? + +06:02.640 --> 06:11.480 +だから、 何か見上げるものがあって、 そこに結果をプリントするように頼む。 + +06:11.480 --> 06:12.170 +私はそう思う。 + +06:12.170 --> 06:14.750 +私は今、 このモデルのパワーを誇示したいわけではない。 + +06:14.750 --> 06:17.930 +パイプラインAPIのシンプルさをアピールしたいんだ。 + +06:17.960 --> 06:23.420 +より洗練された文脈や、 より良い質問で遊ぶことができます。 また、 ハギング・フェイス・ハブで利用可能なさまざまなモデルのいくつかを探求するために、 + +06:23.660 --> 06:29.540 +異なるモデルでパスを試してみることをお勧めします。 + +06:30.110 --> 06:32.780 +テキストの要約も簡単だ。 + +06:32.810 --> 06:35.360 +もちろん、 パイプラインもそうだ。 + +06:35.360 --> 06:39.770 +タイプは要約で、 大量のテキストを入れることができる。 + +06:39.800 --> 06:44.840 +ここでは、 私は一般的に抱擁顔トランスフォーマーライブラリについて噴出していることについて話している。 + +06:45.290 --> 06:56.180 +要約を依頼し、 長さの最小値と最大値を指定すると、 その文章を要約した短くてシャープな文章が返ってくる。 + +06:56.300 --> 07:00.530 +それに、 ほとんど僕がすでに入れたものを切り刻んだだけなんだ。 + +07:00.530 --> 07:03.590 +だから素晴らしい仕事をしたとは言えないが、 かなりシンプルなモデルだ。 + +07:03.590 --> 07:07.650 +繰り返しになるが、 より優れたモデルから、 より優れた要約を探ることができる。 + +07:07.920 --> 07:11.400 +お時間があれば通訳しますよ。 + +07:11.430 --> 07:13.740 +英語への翻訳。 + +07:13.740 --> 07:14.610 +フランス人へ。 + +07:15.060 --> 07:21.450 +データサイエンティストたちは、 Hugging FaceパイプラインAPIのパワーとシンプルさに本当に驚いていた。 + +07:21.930 --> 07:28.260 +そして、 フランス語を話す皆さんのために、 その性能を見てみよう。 + +07:28.290 --> 07:39.720 +私の高校生のフランス語で十分だと言うつもりはないが、 抱きつき顔のパイプラインのAPIのパワーとシンプルさに驚かされたと言えるだろう。 + +07:39.720 --> 07:45.330 +そして、 私の乏しいフランス語能力を見る限り、 かなりしっかりした翻訳のようだ。 + +07:45.330 --> 07:52.230 +非常に簡単な分類、 あるいはゼロショット分類と呼ばれるもので、 事前の例を与えることなく、 + +07:52.230 --> 07:57.600 +ただ例を与えてラベルを付けるよう求めるものだ。 + +07:57.810 --> 08:06.150 +だから私たちは、 テキストを抱きしめている顔のトランスフォーマーのライブラリーは素晴らしいし、 テクノロジーやスポーツや政治を分類するよう求めている。 + +08:06.150 --> 08:08.660 +そして、 そのパフォーマンスを見てみよう。 + +08:09.050 --> 08:15.200 +ええと、 ラベルには、 ええと、 テクノロジー用って書いてある。 + +08:15.230 --> 08:20.810 +点数は95%で、 スポーツと政治の点数はごくわずかだ。 + +08:20.810 --> 08:21.500 +政治だ。 + +08:21.500 --> 08:23.090 +その中で最も低い。 + +08:23.180 --> 08:29.360 +特に技術に直接関係するような言葉はなかったからね。 + +08:29.360 --> 08:31.100 +全然悪くないよ。 + +08:31.430 --> 08:37.340 +そして、 このシリーズ最後の本当にシンプルなものは、 テキスト生成だ。 + +08:37.370 --> 08:43.160 +Huggingfaceのパイプラインを使う上で覚えておいてほしいことがあるとすれば、 それは......そうだな。 + +08:43.190 --> 08:46.910 +明らかにバニラモデルを使用しているが、 どのように処理されるか見てみよう。 + +08:48.830 --> 08:54.350 +それは、 ノーチラス上で実行されるアプリケーションは、 あたかもコンパイルされたかのように、 ターゲットとして抱きつき顔パッケージを生成するということだ。 + +08:54.350 --> 08:55.340 +だから、 ちょっとランダムなんだ。 + +08:55.340 --> 08:59.720 +私のいくつかの、 ええと、 ええと、 以前のテストではもっとうまくいった。 + +08:59.900 --> 09:02.030 +それは、 彼らがどれだけ優れているか、 回復力があるかということだ。 + +09:02.060 --> 09:04.400 +どんなプロジェクトでもそうだが、 自分のリスク許容度を覚えておく必要がある。 + +09:04.430 --> 09:05.630 +追い込むタイミングを忘れない。 + +09:05.660 --> 09:09.520 +だからもちろん、 それがどのように始まると考えているかに基づいて漫然としている。 + +09:09.520 --> 09:13.210 +もっと面白いスタートを試してみて、 そのパフォーマンスを見てみるといい。 + +09:13.210 --> 09:17.260 +もっと大きくて頑丈なモデルを試してみるのもいい。 + +09:17.290 --> 09:20.980 +もちろん、 テキスト生成という点では、 その方がより正確だ。 + +09:21.700 --> 09:22.420 +オーケー。 + +09:22.420 --> 09:30.970 +そしてもちろん、 皆さんはマルチモダリティの専門家ですから、 画像生成をお見せしましょう。 + +09:30.970 --> 09:35.800 +ここまではすべてトランスフォーマー・ライブラリーを使ってきましたが、 ここからはディフューザー(拡散スタイル・モデル)、 + +09:35.800 --> 09:41.590 +つまり画像を生成するアーキテクチャに切り替えます。 + +09:41.860 --> 09:50.080 +そして、 私たちはよく知られた安定拡散モデルを使用している。 + +09:50.110 --> 09:51.010 +ご覧の通りだ。 + +09:51.040 --> 09:54.010 +使用するデータ型を伝える必要がある。 + +09:54.400 --> 10:00.640 +そうしてGPUに表示させたら、 テキストを表示させることができる。 + +10:00.640 --> 10:08.130 +そして、 サルバドール・ダリのシュールなスタイルでAIについて学ぶデータサイエンティストのクラスで、 そのイメージを生成しよう、 + +10:08.400 --> 10:11.010 +と言っているんだ。 + +10:11.010 --> 10:13.050 +これにはもう少し時間がかかる。 + +10:13.050 --> 10:15.510 +もう少し肉付きのいい仕事だ。 + +10:15.750 --> 10:21.390 +ええと、 あなたにとってはもっと時間がかかるかもしれない。 + +10:21.390 --> 10:26.880 +こうしてモデルはHuggingfaceからダウンロードされ、 ローカルディレクトリに保存された。 + +10:26.880 --> 10:30.960 +だから、 実行すると1分以上かかるかもしれない。 + +10:31.200 --> 10:32.970 +うーん、 でもこれでいい。 + +10:32.970 --> 10:44.490 +ダリの超現実的なスタイルでAIについて学ぶデータ・サイエンティストのクラスがここにある。 + +10:44.490 --> 10:45.960 +大好きだよ。 + +10:45.990 --> 10:47.760 +この人たちに似ている? + +10:47.970 --> 10:52.320 +君の最悪の悪夢だけでないことを祈るよ。 + +10:52.320 --> 10:55.110 +これがシュールなダリだ。 + +10:55.140 --> 10:57.840 +怖くて、 奇妙な世界だ。 + +10:57.930 --> 11:01.350 +データサイエンティストのクラスとしては、 とても気に入っている。 + +11:01.380 --> 11:11.450 +前回の講義でコードをフラッシュアップしたのを覚えているだろうか。 + +11:11.630 --> 11:13.160 +ああ、 ぜひそれを使ってくれ。 + +11:13.160 --> 11:18.170 +時間がかかることがわかるだろうし、 T4よりも頑丈なボックスで動作させる必要がある。 + +11:18.200 --> 11:28.070 +A100でやれば2、 3分で終わるし、 前回の講義でお見せしたような息をのむような良い画像が得られる。 + +11:28.280 --> 11:34.520 +ええと、 でも、 手っ取り早く安いモデルとしては、 かなりいいんじゃないかと思うんだけど。 + +11:35.120 --> 11:39.680 +そして最後が、 オーディオの生成だ。 + +11:39.680 --> 11:45.440 +これから音声合成パイプラインを使って、 どのモデルが欲しいか指示します。 + +11:45.470 --> 11:49.040 +マイクロソフトのSpeech five TTSが欲しい。 + +11:49.040 --> 11:56.900 +モデルにはもう少し、 音声の種類を設定する必要があります。 + +11:56.900 --> 12:01.340 +そして、 ハギング・フェイスのデータセットをロードするための行がいくつかある。 + +12:01.490 --> 12:04.460 +そして、 正しい形に整えるんだ。 + +12:04.460 --> 12:10.270 +でも、 それができたら、 パイプラインに電話して、 人工知能エンジニアに挨拶して、 + +12:10.270 --> 12:17.140 +このスピーチ音声でマスターとパスへの道を歩むことになるんだ。 + +12:17.410 --> 12:24.460 +これは、 それをwavファイルに書き出すコードです。 + +12:24.820 --> 12:34.360 +そうすれば、 このColabノートブックの中で再生できるようになる。 + +12:34.360 --> 12:36.220 +だからかなり速く走れた。 + +12:36.220 --> 12:38.050 +どう聞こえるか見てみよう。 + +12:38.050 --> 12:38.830 +こんにちは。 + +12:38.860 --> 12:43.450 +マスターへの道を歩む人工知能エンジニアへ。 + +12:44.200 --> 12:45.520 +私にはかなりいいように思える。 + +12:45.520 --> 12:50.590 +これは音声合成にパイプラインを使う例だろう。 + +12:50.800 --> 12:55.180 +パイプラインAPIのウォークスルーはこれで終わりです。 + +12:55.180 --> 13:00.910 +もちろん、 私はこのコラボを共有し、 みなさんがこのコラボにアクセスできるようにします。 そして、 みなさん自身がこのコラボにアクセスし、 + +13:00.910 --> 13:09.880 +さまざまなインプットを試し、 Huggingfaceのハブにあるさまざまなモデルを試してみることを強くお勧めします。 + +13:10.180 --> 13:10.990 +楽しもう。 diff --git a/week5/community-contributions/subtitles/srts/59170235/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170235/ko_KR.srt new file mode 100755 index 0000000..b287a30 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170235/ko_KR.srt @@ -0,0 +1,550 @@ +WEBVTT + +00:00.350 --> 00:08.060 +구글 Colab입니다 GPU 박스를 이용한 클라우드에서의 첫 협업 세션이죠 + +00:08.090 --> 00:15.110 +T4 박스에 연결했습니다 하위 사양 중 하나죠 + +00:15.350 --> 00:16.880 +여기 리소스가 보이시죠 + +00:16.880 --> 00:24.170 +이걸 실행하면 GPU 대부분을 채울 수 있죠 런타임으로 가서 재시작 + +00:24.200 --> 00:31.520 +세션을 했어요 그래서 GPU 메모리가 아무것도 안 된 거죠 + +00:31.520 --> 00:36.230 +어쨌든 방해되지 않게 이 스크린을 제거할게요 + +00:36.680 --> 00:37.550 +됐어요 + +00:37.940 --> 00:41.210 +콜랍 좀 보세요 + +00:41.210 --> 00:47.930 +파이프라인의 세계로 시작하겠습니다 얼굴을 끌어안는 트랜스포머 라이브러리에 + +00:47.960 --> 00:51.710 +파이프라인이라는 두 API 레벨이 있어요 + +00:51.710 --> 00:55.670 +오늘의 토큰라이저와 모델에 관해선 나중에 얘기하죠 + +00:55.670 --> 01:02.920 +파이프라인에 관한 거죠 상위 레벨 API 말이에요 일반적인 작업에 대한 추론을 실행하게 + +01:02.920 --> 01:11.440 +합니다 코드 두어 줄로요 프로덕션에서 마찰 없이 오픈 소스 모델을 쉽게 사용하게 해주죠 + +01:11.950 --> 01:16.360 +우리가 사용하는 방법은 아주 간단해요 + +01:16.690 --> 01:23.920 +파이프라인을 호출하고 문자열을 전달합니다 원하는 작업을 전달하죠 + +01:23.920 --> 01:28.960 +여러분이 넘길 수 있는 다양한 것들이 있어요 다양한 문자열이죠 잠시 후에 그게 뭔지 보실 + +01:28.960 --> 01:29.830 +수 있어요 + +01:29.830 --> 01:35.200 +파이프라인 객체를 얻어서 입력으로 호출할 수 있고 get 결과도 얻을 수 있죠 + +01:35.200 --> 01:36.760 +그게 다예요 + +01:36.760 --> 01:37.930 +간단해요 + +01:38.020 --> 01:44.170 +트랜스포머 라이브러리 설치부터 시작할게요 모든 것의 핵심이죠 + +01:44.170 --> 01:48.640 +데이터셋은 얼굴을 포옹하는 데이터셋 라이브러리예요 + +01:48.640 --> 01:53.890 +디퓨저는 트랜스포머의 동반 라이브러리 같은 거예요 이미지를 만들어내는 디퓨전 + +01:53.890 --> 01:55.210 +모델 말이에요 + +01:55.240 --> 02:02.110 +트랜스포머라고 하면 트랜스포머와 형제 라이브러리 디퓨저를 말하는 거예요 + +02:02.110 --> 02:05.250 +이미지를 만들 때 쓰는 거죠 + +02:05.250 --> 02:07.920 +파이프를 설치하고 있어요 + +02:08.040 --> 02:13.830 +a 셀 앞에 저런 느낌표가 있으면 터미널 명령으로 실행하고 싶다는 + +02:13.830 --> 02:15.330 +뜻이에요 + +02:15.330 --> 02:16.530 +실행될 거예요 + +02:16.530 --> 02:23.970 +이 마이너스 Q를 모르신다면 침묵 모드로 설정합니다. 모든 패키지를 설치하는 출력을 얻지 않기 위해서입니다. 하지만 지금은 + +02:24.000 --> 02:28.020 +설치가 빠르게 이루어지고 있습니다. 왜냐하면 아까 이 노트북을 실행해 + +02:28.020 --> 02:29.790 +봤기 때문이죠. + +02:29.790 --> 02:33.210 +패키지가 설치되는 동안 30이 걸릴 수도 있어요 + +02:33.510 --> 02:36.030 +그런 다음 수입을 할 거예요 + +02:37.590 --> 02:43.020 +Get을 완료하고 나면 파이프라인으로 넘어가죠 + +02:43.050 --> 02:44.490 +Get it, Get it 준비하세요 + +02:44.490 --> 02:50.220 +먼저 감정 분석은 긍정적이거나 부정적인 비트예요 + +02:50.220 --> 02:56.670 +이 말부터 할게요 LM 마스터로 가는 길에 들어서 정말 신나요 + +02:56.790 --> 02:59.940 +좋은 건지 나쁜 건지 모르겠네요 + +03:00.120 --> 03:03.500 +몇 가지 주의할 점이 있어요 + +03:03.710 --> 03:10.250 +먼저, 어떤 모델도 제공되지 않았고 기본값이 이 모델에 있다고 경고하네요 + +03:10.250 --> 03:16.700 +무슨 뜻이냐면 모델 =이라고 입력하면 안는 얼굴 허브에서 어떤 모델을 이 파이프라인의 + +03:16.730 --> 03:19.790 +일부로 사용할 건지 정할 수 있다는 거죠 + +03:19.790 --> 03:25.160 +하나를 제공하지 않으면 해당 작업에 대한 기본값을 선택합니다 오늘 우리 목적에 아주 + +03:25.160 --> 03:25.880 +유용하죠 + +03:26.090 --> 03:32.600 +또 다른 건 환경에서 GPU가 사용 가능하지만 GPU를 사용하라고 하진 않았다는 + +03:32.600 --> 03:37.430 +겁니다 비트는 우리가 좀 이상하다고 생각하죠 + +03:37.460 --> 03:46.100 +장치가 CUDOW와 같다고 하면 그렇게 할 수 있어요 그럼 그 명령은 이 파이프라인을 사용하고 + +03:46.100 --> 03:51.290 +싶다고 하죠 우리가 가진 GPU 덕을 보고 싶다고요 + +03:52.130 --> 03:57.050 +이제 실행했어요 GPU 상에서 실행됐죠 + +03:57.050 --> 03:59.330 +결과도 살펴보는 게 좋겠어요 + +03:59.330 --> 04:05.050 +긍정적인 성명서로 간주되고 있습니다 점수인 자신감 수준도 + +04:05.080 --> 04:06.460 +매우 높고요 + +04:06.460 --> 04:08.080 +좋은 것 같아요 + +04:08.080 --> 04:11.560 +그 문장을 잘 해석한 것 같아요 + +04:11.560 --> 04:13.960 +안 된다고만 해두죠 + +04:13.990 --> 04:18.010 +LLM 마스터가 되는 게 썩 기쁘진 않아요 + +04:18.040 --> 04:18.970 +그럴 리가요 + +04:18.970 --> 04:22.120 +그런 생각은 안 하셨으면 좋겠는데 확인해 보죠 + +04:22.120 --> 04:27.940 +우리가 그렇게 말하면 라벨이 부정적이라는 걸 확실히 인식하고 부정적 + +04:27.940 --> 04:31.090 +선언이라는 걸 확신할 수 있죠 + +04:31.090 --> 04:35.440 +잘 작동하네요 열정적으로 마무리하죠 + +04:35.470 --> 04:37.630 +그렇게 생각하길 바라죠 + +04:38.080 --> 04:40.720 +독립체 인식이 다음 작업이죠 + +04:40.720 --> 04:44.890 +난 파이프라인이나 통과할 테니 당신도 직접 해 봐요 + +04:44.920 --> 04:50.530 +물론 엔터티 인지션은 여러분이 텍스트를 제공하고 모델에게 어떤 종류의 + +04:50.530 --> 04:53.080 +것이 참조되는지 확인하는 거죠 + +04:53.110 --> 04:56.500 +이건 제가 찍은 표준 사진이에요 + +04:56.530 --> 05:00.310 +버락 오바마는 미국의 44대 대통령이었죠 + +05:00.670 --> 05:13.350 +분석을 요청하면 여기에 반응을 해요 두 가지 다른 이름으로 + +05:13.380 --> 05:15.840 +응답하죠 + +05:15.960 --> 05:22.590 +여기 이 텍스트를 보시면 첫 번째 명명된 엔터티의 형식은 사람 이름 당 퍼예요 + +05:22.950 --> 05:27.030 +자신감이 넘치고 이름은 버락 오바마예요 + +05:27.120 --> 05:33.510 +두 번째는 장소 이름인데 미국이라는 뜻이에요 + +05:33.510 --> 05:38.850 +입력에서 어디에 있는지 알려주는 거죠 + +05:39.300 --> 05:45.570 +데이터 과학에서는 아주 흔한 사용 사례입니다 원하는 대로 빨리 할 수 + +05:45.570 --> 05:46.860 +있으면 좋죠 + +05:47.160 --> 05:53.010 +컨텍스트를 이용한 질문 답변 파이프라인을 만들 수 있어요 + +05:53.010 --> 05:59.250 +다시 한 번 이 장치의 CUDU를 이용해 GPU에서 실행하고 누가 미국의 44대 대통령이었는지 + +05:59.250 --> 06:02.640 +컨텍스트를 제공한다고 하죠 + +06:02.640 --> 06:09.770 +검색할 게 있고 결과를 프린트하도록 요청하는데 결과에 대한 놀라운 + +06:09.770 --> 06:11.480 +답변이 없어요 + +06:11.480 --> 06:12.170 +아마도요 + +06:12.170 --> 06:14.750 +이 모델의 힘을 과시하려는 게 아니에요 + +06:14.750 --> 06:17.930 +파이프라인 API의 단순함을 보여주려는 거예요 + +06:17.960 --> 06:23.420 +더 정교한 컨텍스트와 더 나은 질문을 가지고 놀 수 있습니다 다양한 모델을 통과시켜 + +06:23.660 --> 06:29.540 +보는 것도 추천합니다 페이스 허브에서 이용 가능한 다양한 모델을 탐구해 보세요 + +06:30.110 --> 06:32.780 +텍스트 요약도 간단해요 + +06:32.810 --> 06:35.360 +파이프라인도 빼놓을 수 없죠 + +06:35.360 --> 06:39.770 +유형은 요약이고 많은 텍스트를 쓸 수 있어요. Tool karaoke Tool karaoke. + +06:39.800 --> 06:44.840 +포옹하는 트랜스포머 라이브러리 얘기를 하고 있네요 + +06:45.290 --> 06:52.760 +요약본을 요청하고 짧은 시간 동안 최대 길이로 말하면 get 절차가 짧고 + +06:52.760 --> 06:56.180 +명료한 문장으로 돌아오죠 + +06:56.300 --> 07:00.530 +이미 넣은 걸 토막 내서 얹은 거예요 + +07:00.530 --> 07:03.590 +잘 만들진 못했지만 단순한 모형이었어요 + +07:03.590 --> 07:07.650 +더 나은 모델에서 더 나은 요약본을 탐구할 수 있어요 + +07:07.920 --> 07:11.400 +시간 되시면 통역해 드릴게요 + +07:11.430 --> 07:13.740 +영어로 번역해 주세요 + +07:13.740 --> 07:14.610 +프랑스어로요 + +07:15.060 --> 07:21.450 +데이터 과학자들은 어징 페이스 파이프라인 API의 힘과 단순함에 정말 놀랐죠 + +07:21.930 --> 07:28.260 +프랑스어를 구사하는 모든 분께 어떻게 작동하는지 보죠 + +07:28.290 --> 07:34.350 +고등학교 때 배운 프랑스어가 뛰어나다는 말은 안 하겠지만 허징 페이스 파이프라인의 + +07:34.350 --> 07:39.720 +강력하고 단순한 API에 놀랐다고는 말해 줄 수 있죠 + +07:39.720 --> 07:45.330 +제 프랑스어 실력으로는 꽤 확실한 번역인 것 같네요 + +07:45.330 --> 07:52.230 +아주 쉬운 분류, 혹은 제로 샷 분류라 불리는 건 그냥 예만 주고 일부 + +07:52.230 --> 07:57.600 +라벨을 붙이라고 할 때죠 이전 예는 주지 않고요 + +07:57.810 --> 08:03.720 +그래서 얼굴 껴안기 트랜스포머 라이브러리는 굉장하다고 표현하고 기술, 스포츠, + +08:03.750 --> 08:06.150 +정치 등으로 분류하라고 했어요 + +08:06.150 --> 08:08.660 +어떻게 작동하는지 보죠 + +08:09.050 --> 08:15.200 +기술 라벨이라고 적혀 있어요 + +08:15.230 --> 08:20.810 +95%의 점수를 받았지만 스포츠와 정치학에서는 아주 낮은 점수를 받았죠 + +08:20.810 --> 08:21.500 +정치요 + +08:21.500 --> 08:23.090 +가장 낮은 수치죠 + +08:23.180 --> 08:29.360 +특히나 기술과 직접적으로 관련된 단어가 없었으니까요 + +08:29.360 --> 08:31.100 +나쁘지 않네요 + +08:31.430 --> 08:37.340 +마지막으로 이 시리즈의 정말 간단한 건 텍스트 생성이에요 + +08:37.370 --> 08:43.160 +H깅페이스 파이프라인을 사용할 때 기억해야 할 게 하나 있다면 그건∙∙∙ 어디 보죠 + +08:43.190 --> 08:46.910 +바닐라 모델을 사용하고 있지만 어떻게 처리하는지 보죠 + +08:48.830 --> 08:53.240 +노틸러스호에서 실행되는 모든 응용 프로그램은 컴파일된 것처럼 안는 얼굴 패키지를 + +08:53.240 --> 08:54.350 +목표로 하죠 + +08:54.350 --> 08:55.340 +비트가 좀 뜬금없죠 + +08:55.340 --> 08:59.720 +전에 했던 검사에서는 결과가 더 좋았어요 + +08:59.900 --> 09:02.030 +얼마나 잘 회복하는지가 중요하죠 + +09:02.060 --> 09:04.400 +모든 프로젝트가 그렇듯 위험 부담을 감수해야 해요 + +09:04.430 --> 09:05.630 +언제 끝까지 밀어붙여야 하는지 기억하세요 + +09:05.660 --> 09:09.520 +어떻게 시작됐는지에 따라 횡설수설하는 거죠 + +09:09.520 --> 09:13.210 +좀 더 재미있는 시작을 해보고 어떻게 되는지 보세요 + +09:13.210 --> 09:17.260 +좀 더 크고 육중한 모델도 시도해 볼 수 있어요 get it get it get it get it get it get it + +09:17.290 --> 09:20.980 +물론 텍스트 생성에 있어서는 그게 더 정확하죠 + +09:21.700 --> 09:22.420 +네 + +09:22.420 --> 09:30.970 +이제 여러분은 다중 양식의 전문가들이니 이미지 생성을 보여드리죠 + +09:30.970 --> 09:35.800 +지금까지는 트랜스포머 라이브러리를 사용했고 디퓨전 스타일 + +09:35.800 --> 09:41.590 +모델로 영상을 전송하고 있어요 이미지를 생성하는 아키텍처죠 + +09:41.860 --> 09:50.080 +잘 알려진 안정적 확산 모델을 사용하고 있는데 몇 가지 변수를 더 통과시켜야 해요 + +09:50.110 --> 09:51.010 +곧 보시게 될 거예요 + +09:51.040 --> 09:54.010 +우리가 사용하는 데이터 타입을 알려줘야 해요 + +09:54.400 --> 10:00.640 +그 작업을 마치고 GPU 위에 얹으면 텍스트를 줄 수 있어요 + +10:00.640 --> 10:08.130 +살바도르 달리의 초현실적인 스타일로 인공지능을 연구하는 데이터 과학자들이죠 + +10:08.400 --> 10:11.010 +그 이미지를 생성해보죠 + +10:11.010 --> 10:13.050 +이 비트는 시간이 좀 더 걸릴 거예요 + +10:13.050 --> 10:15.510 +좀 더 비트 있는 작업이죠 + +10:15.750 --> 10:21.390 +당신은 더 오래 걸릴 수도 있어요 저는 이미 한 번 실행해봤으니까요 + +10:21.390 --> 10:26.880 +H깅페이스에서 모델을 다운로드해 로컬 디렉터리에 저장했어요 + +10:26.880 --> 10:30.960 +실행하는 데 1분 정도 걸릴 수도 있어요 + +10:31.200 --> 10:32.970 +하지만 시작할게요 + +10:32.970 --> 10:43.500 +이 방에는 데이터 과학자들이 가득합니다 달리의 초현실적인 스타일로 인공지능을 배우고 + +10:43.500 --> 10:44.490 +있죠 + +10:44.490 --> 10:45.960 +정말 좋아요 + +10:45.990 --> 10:47.760 +이 사람들과 닮았어요? + +10:47.970 --> 10:52.320 +최악의 악몽에서만 그러는 게 아니라 저기도 그랬으면 좋겠네요 + +10:52.320 --> 10:55.110 +초현실적인 달리예요 + +10:55.140 --> 10:57.840 +무섭고 이상한 세상이죠 + +10:57.930 --> 11:01.350 +데이터 과학자치고 정말 마음에 들어요 + +11:01.380 --> 11:07.530 +이걸 플럭스 모델로 대체할 수 있어요 아까 강의에서 + +11:07.560 --> 11:11.450 +코드를 보여드렸죠 + +11:11.630 --> 11:13.160 +네, 그렇게 하세요 + +11:13.160 --> 11:18.170 +시간이 더 오래 걸리고 T4보다 더 우람한 박스로 달려야 해요 + +11:18.200 --> 11:24.710 +A100 카메라로 하면 몇 분이면 숨이 멎을 정도로 멋진 이미지가 완성되죠 + +11:24.740 --> 11:28.070 +지난 강의에서 보여 드린 것처럼요 + +11:28.280 --> 11:34.520 +하지만 저렴한 초고속 모델치고는 괜찮은 것 같아요 + +11:35.120 --> 11:39.680 +마지막으로 오디오 세대예요 + +11:39.680 --> 11:45.440 +텍스트-언어 파이프라인을 사용할 겁니다 어떤 모델을 원하는지 알려주는 거죠 + +11:45.470 --> 11:49.040 +마이크로소프트의 스피치 5 TTS를 원하죠 + +11:49.040 --> 11:55.820 +모델에 제공해야 할 게 좀 더 있어요 비트가 사용해야 할 음성의 종류 + +11:55.820 --> 11:56.900 +같은 거요 + +11:56.900 --> 12:01.340 +얼굴을 안는 것으로 데이터 세트를 로드하는 라인이 몇 개 있어요 + +12:01.490 --> 12:04.460 +Get it으로 모양을 잡아야죠 + +12:04.460 --> 12:10.270 +다 됐으면 파이프라인을 호출하고 인공 지능 엔지니어에게 + +12:10.270 --> 12:17.140 +인사한 다음 이 음성 메시지를 전달할게요 + +12:17.410 --> 12:24.460 +이건 웨이브 파일에 쓸 코드예요 + +12:24.820 --> 12:34.360 +이 콜랍 공책에 그 내용을 담을 거예요 + +12:34.360 --> 12:36.220 +꽤 빨랐어요 + +12:36.220 --> 12:38.050 +소리가 어떤지 들어보죠 + +12:38.050 --> 12:38.830 +안녕하세요 + +12:38.860 --> 12:43.450 +숙련된 인공지능 엔지니어에게요 + +12:44.200 --> 12:45.520 +괜찮은 것 같은데요 + +12:45.520 --> 12:50.590 +텍스트에서 음성 생성 파이프라인을 사용하는 예가 되겠네요 + +12:50.800 --> 12:55.180 +파이프라인 API의 Colab 워크스루가 끝났네요 + +12:55.180 --> 13:00.910 +물론 이 Colab을 공유할 테니 여러분이 액세스할 수 있고 직접 시도해 + +13:00.910 --> 13:06.460 +보시길 권장합니다 여러 입력값을 실험하고 H깅페이스 허브에서 찾을 + +13:06.460 --> 13:09.880 +수 있는 여러 모델도 실험해 보세요 + +13:10.180 --> 13:10.990 +맛있게 드세요 diff --git a/week5/community-contributions/subtitles/srts/59170255/en_US.srt b/week5/community-contributions/subtitles/srts/59170255/en_US.srt new file mode 100755 index 0000000..f691c42 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170255/en_US.srt @@ -0,0 +1,463 @@ +WEBVTT + +00:00.800 --> 00:09.230 +And welcome back to us continuing our journey through the model class in Hugging Face Transformers library. + +00:09.260 --> 00:14.960 +We were just looking at the architecture of the llama model that you get when you simply write it, + +00:14.960 --> 00:17.690 +and saying that you should look at it for the others. + +00:17.690 --> 00:22.010 +One other thing to point out is to always look at the dimensionality. + +00:22.010 --> 00:28.490 +As I briefly mentioned, look at the number of input dimensions representing the vocab, and see that + +00:28.490 --> 00:32.120 +that matches the output down here. + +00:32.300 --> 00:39.080 +And you can follow the dimensions through the architecture and get a sense of what's going on. + +00:40.010 --> 00:41.060 +All right. + +00:41.060 --> 00:48.170 +But now that we have done all of this and we've talked through what's going on and we've built our inputs, + +00:48.200 --> 00:51.320 +it is time for business. + +00:51.320 --> 00:53.990 +This is the method model dot generate. + +00:53.990 --> 00:58.220 +It takes our inputs which are sitting on our GPU, ready for this. + +00:58.460 --> 01:02.100 +Um, and we can say we want up to 80 new tokens. + +01:02.340 --> 01:06.150 +Um, a reminder, in case you forgot that what we asked for was a joke. + +01:06.180 --> 01:08.970 +A joke for a room of data scientists. + +01:09.000 --> 01:11.460 +Our favorite little experiment. + +01:11.700 --> 01:16.110 +Uh, and then we take the the outputs. + +01:16.110 --> 01:20.490 +We take the the first in the list of outputs, there will only be one. + +01:20.760 --> 01:28.770 +Um, and we then call tokenizer dot decode to turn it from tokens back into letters to text again. + +01:28.770 --> 01:30.690 +And we print the result. + +01:30.720 --> 01:32.340 +Let's do it. + +01:32.520 --> 01:34.560 +So it starts to run. + +01:34.710 --> 01:41.070 +I like to to watch what's going on by looking down here and seeing it do a forward pass. + +01:41.070 --> 01:45.810 +And there comes our answer to a lighthearted joke. + +01:45.810 --> 01:49.590 +Why did the regression model break up with the neural network? + +01:49.620 --> 01:54.900 +Because it was a bad fit and the neural network was overfitting its emotions. + +01:55.230 --> 01:56.610 +Ah, you know, it's okay. + +01:56.610 --> 01:57.830 +It's not terrible. + +01:57.830 --> 01:58.880 +It's, uh. + +01:58.910 --> 02:02.990 +Yeah, it's it's a perfectly plausible joke. + +02:02.990 --> 02:05.870 +It's not the funniest that I've heard, but it's, uh. + +02:05.900 --> 02:07.370 +It's not bad. + +02:09.290 --> 02:12.350 +Why did the logistic regression model go to therapy? + +02:12.350 --> 02:15.410 +Because it was struggling to classify its emotions. + +02:15.410 --> 02:17.120 +I think that's really good, actually. + +02:17.120 --> 02:18.560 +I think that's great. + +02:19.520 --> 02:23.810 +It's simpler and it's, uh, spot on for a data science audience. + +02:23.810 --> 02:30.410 +I think that's a better gag than, uh, than some of the ones at the frontier models came up with. + +02:30.830 --> 02:33.320 +Uh, so good job. + +02:33.320 --> 02:34.520 +Llama 3.1. + +02:34.730 --> 02:38.630 +Uh, the thing to bear in mind again, of course, is that we're dealing here with the 8 billion parameter + +02:38.630 --> 02:45.320 +version of llama 3.1, the smallest version of it, and we've quantized it down to four bits, and then + +02:45.320 --> 02:46.730 +we double quantized it. + +02:46.910 --> 02:54.740 +Uh, so it's a this super slim version of the model, and it just told a perfectly respectable joke. + +02:55.710 --> 02:57.840 +Uh, so I hope you enjoyed that. + +02:57.990 --> 03:01.500 +The next thing we do is we do some cleanup to free up some memory. + +03:01.500 --> 03:06.570 +Otherwise, if we keep running, different models will very quickly run out of GPU. + +03:06.660 --> 03:13.320 +You may find this happens to you, in which case you can always restart your session by going runtime + +03:13.320 --> 03:19.110 +restart session and then continue from where you left off after doing the, uh, the imports. + +03:19.620 --> 03:25.770 +So the next thing I'm going to do is take everything we've just done and package it up into a nice little + +03:25.770 --> 03:27.450 +function that does all of it. + +03:27.450 --> 03:34.770 +The function will take the name of a model and messages a usual list of dictionaries. + +03:34.770 --> 03:38.970 +And let's just go through this line by line as a way of revising what we just did. + +03:39.000 --> 03:47.430 +We start by using the auto tokenizer class to create a new tokenizer based on the model that we're working + +03:47.430 --> 03:48.060 +with. + +03:48.990 --> 03:54.860 +This line is the thing that sets the padding token to be the same as the end of sentence token. + +03:54.890 --> 03:57.710 +This sort of standard boilerplate thing to do. + +03:57.950 --> 03:58.970 +And then this. + +03:58.970 --> 04:00.890 +We know and know it well. + +04:01.130 --> 04:08.900 +This is where we apply the chat template that's suitable for this tokenizer to the messages the list. + +04:08.900 --> 04:13.220 +And it will return back a series of tokens. + +04:13.220 --> 04:20.240 +We then put that onto the GPU and that we assign to inputs. + +04:20.510 --> 04:21.590 +This is new. + +04:21.590 --> 04:29.120 +So just as another little little skill to add I'm going to say let's stream back the results. + +04:29.240 --> 04:33.320 +And the Huggingface library supports that as well. + +04:33.320 --> 04:36.290 +You do you create this thing called a text streamer. + +04:36.320 --> 04:41.360 +You need to give it the tokenizer because as it streams back tokens, it's going to need to convert + +04:41.360 --> 04:43.190 +them back into, into text. + +04:43.220 --> 04:46.130 +Uh, so it needs to know what tokenizer you're using. + +04:46.130 --> 04:50.240 +So you provide that and then action. + +04:50.450 --> 04:52.400 +Uh, we first of all get the model. + +04:52.400 --> 04:55.200 +This is auto model for causal lm. + +04:55.200 --> 04:58.770 +This is the equivalent to the auto tokenizer. + +04:58.770 --> 05:04.350 +But to load the model we say from pre-trained, we tell it the name of the model. + +05:04.470 --> 05:07.590 +We say device map is auto, meaning user GPU. + +05:07.590 --> 05:16.260 +If you've got one and we pass in our quant config that we set way up there somewhere, uh, to be four + +05:16.260 --> 05:22.650 +bit double quantized NF for uh, type of four bit numbers. + +05:22.650 --> 05:28.710 +And the bfloat16 is the calculation, uh data type. + +05:29.010 --> 05:34.170 +And it's now time for business model dot generate. + +05:34.170 --> 05:35.460 +That's the big method. + +05:35.460 --> 05:42.540 +And we pass in the inputs we'll generate up to 80 new tokens and we'll give it our streamer. + +05:42.570 --> 05:49.020 +That's this is the piece that means that it will then stream the output and then we'll do our cleanup. + +05:49.530 --> 05:55.550 +So that is the function which kind of wraps everything that we did before, but also adds in streaming. + +05:55.550 --> 06:02.810 +And with that, let's quite simply call Phi three with our messages, uh, using the function we just + +06:02.810 --> 06:03.350 +wrote. + +06:03.350 --> 06:06.380 +So Phi three will now load in again. + +06:06.410 --> 06:12.920 +This will take a little bit longer for you because you will, uh, be loading it for the first time. + +06:12.980 --> 06:16.370 +Uh, I have already loaded it, so it's cached on disk. + +06:16.460 --> 06:20.660 +Uh, so it doesn't need to redownload the whole thing from hugging faces. + +06:20.690 --> 06:21.380 +Hub. + +06:21.710 --> 06:25.730 +Um, there's still a little bit to do to load it in while it's doing that. + +06:25.730 --> 06:27.140 +We could, uh. + +06:27.170 --> 06:32.180 +Oh, I was going to say we could look at the resources, but I think it's going to be so quick that + +06:32.180 --> 06:34.580 +I want you to see it streaming back. + +06:34.610 --> 06:39.050 +And I think, uh, I should prepare you for the fact that you may be disappointed. + +06:39.560 --> 06:49.010 +Um, so I found that from using at least the prompt that I've got there, I was not able to get Phi + +06:49.040 --> 06:50.400 +three to tell a joke. + +06:50.400 --> 06:57.540 +Rather, it gives some sort of general stuff that a data scientist might be talking about and sort of + +06:57.540 --> 06:59.010 +rambles away. + +06:59.040 --> 07:04.320 +Now, I don't know whether I can improve the prompt to be something that's a bit more assertive for + +07:04.320 --> 07:08.700 +53, or whether it's simply not something that 53 is willing to do. + +07:08.970 --> 07:14.040 +53 will do a lot of things very admirably indeed, but not this particular task. + +07:14.040 --> 07:19.680 +So I also leave that as an exercise for you as as well as trying some other models. + +07:19.680 --> 07:27.000 +Also see whether you can improve the prompting to get 53 to tell a joke or if it's not a jokester, + +07:27.000 --> 07:28.860 +you can find some of the things it's good at. + +07:28.860 --> 07:34.200 +It will answer some of the other questions that we've asked llms about things like use of Llms very + +07:34.200 --> 07:35.130 +well indeed. + +07:35.700 --> 07:41.070 +Um, so that is the the 53 outcome. + +07:41.070 --> 07:43.410 +Now let's see how Gemma does. + +07:43.410 --> 07:51.290 +So the same approach, we can use our utility function for Info Google's Gemma two model, and it's + +07:51.320 --> 07:57.650 +worth noting that Gemma doesn't support a system prompt, so you have to just pass in the user prompt + +07:57.650 --> 08:02.270 +like this, which is fine because the system prompt didn't say anything special anyway. + +08:02.540 --> 08:06.500 +And let's give Gemma a whirl. + +08:06.500 --> 08:08.780 +It is, of course, a 2 billion model. + +08:08.780 --> 08:10.130 +It's a very small model. + +08:10.130 --> 08:15.980 +In addition to being a very small model, we are also quantizing it down to four bits and then quantizing + +08:15.980 --> 08:16.850 +it again. + +08:16.850 --> 08:25.520 +So we are really, uh, dealing with a very slim model at this point, which shouldn't use up much memory + +08:25.520 --> 08:29.270 +and also should load nice and quickly and tell a joke quickly. + +08:32.330 --> 08:34.130 +And there is its joke. + +08:34.130 --> 08:37.400 +Why did the data scientists break up with the statistician? + +08:37.400 --> 08:41.240 +Because they had too many disagreements about the p value. + +08:41.270 --> 08:44.540 +It's, uh, another nerdy joke about p value. + +08:44.570 --> 08:46.130 +I don't get it. + +08:46.140 --> 08:49.470 +But maybe there's something obvious that I'm missing. + +08:49.620 --> 08:50.790 +Uh, welcome. + +08:50.790 --> 08:53.040 +Anyone to tell me if it is. + +08:53.370 --> 08:53.760 +Uh. + +08:53.760 --> 08:56.820 +But still, I like the way it's nice and friendly. + +08:56.820 --> 08:57.540 +It's got another. + +08:57.540 --> 08:59.490 +Let me know if you'd like to hear another joke. + +08:59.640 --> 09:02.970 +Uh, maybe when you run this, you're going to get a better joke, I don't know. + +09:03.120 --> 09:07.890 +Uh, but, uh, it's certainly, uh, an enjoyable, uh, tone. + +09:07.890 --> 09:14.340 +And I think that that, uh, Gemma two has done a laudable job of, uh, certainly it's data science + +09:14.340 --> 09:15.180 +relevant. + +09:15.480 --> 09:21.780 +Um, and, uh, particularly when you remember that this is a tiny model that we are further quantizing. + +09:21.780 --> 09:28.440 +I think it's a fine showing from Gemma two, but certainly I fully expect when you use quantum, which + +09:28.440 --> 09:35.100 +I have used uh, that you'll see, uh, superior results and, uh, you maybe you'll get something better + +09:35.100 --> 09:36.150 +from Pi three as well. + +09:36.150 --> 09:41.310 +And then whatever, whether you pick the mixed trial model or something a bit slimmer that you can also + +09:41.340 --> 09:44.370 +use, I imagine you'll be able to get some good results. + +09:44.510 --> 09:49.580 +You could also try asking maths questions, something which they can struggle with. + +09:49.610 --> 09:52.640 +If you're dealing with difficult maths. + +09:52.790 --> 09:59.360 +But I tried asking a fairly difficult question to llama 3.1 earlier, and it had no difficulties at + +09:59.360 --> 10:02.840 +all to see if you can have the same experience. + +10:03.200 --> 10:09.050 +Regardless, now is a moment for you to explore using these models, trying out different things. + +10:09.080 --> 10:11.450 +You're working with open source models. + +10:11.540 --> 10:13.490 +There's no API cost going on. + +10:13.520 --> 10:16.610 +The only cost you you pay is if you're using up. + +10:16.610 --> 10:23.840 +If you're not using Free Colab, but you're using up some of your, uh, units from the, uh, the Google + +10:23.840 --> 10:25.850 +Colab, uh, costs. + +10:26.030 --> 10:32.240 +Um, the I'm using 1.76 units per hour. + +10:32.240 --> 10:40.250 +So there's really plenty of time to be, uh, to be working with this and enjoying inference on open + +10:40.250 --> 10:43.790 +source models using the hugging face Transformers library. diff --git a/week5/community-contributions/subtitles/srts/59170255/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170255/ja_JP.srt new file mode 100755 index 0000000..eb57260 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170255/ja_JP.srt @@ -0,0 +1,409 @@ +WEBVTT + +00:00.800 --> 00:09.230 +そして、 ハギング・フェイス・トランスフォーマー・ライブラリーのモデル・クラスを巡る旅の続きへようこそ。 + +00:09.260 --> 00:17.690 +私たちはただ、 単純に書いたときに得られるラマ・モデルのアーキテクチャを見て、 他のモデルにも目を向けるべきだと言っただけなのだ。 + +00:17.690 --> 00:22.010 +もうひとつ指摘しておきたいのは、 常に次元を見ることだ。 + +00:22.010 --> 00:32.120 +簡単に触れたように、 ボキャブラリーを表す入力次元の数を見て、 それがこの下の出力と一致していることを確認してほしい。 + +00:32.300 --> 00:39.080 +そして、 建築物を通して寸法を追うことができ、 何が起こっているのかを感じ取ることができる。 + +00:40.010 --> 00:41.060 +分かった。 + +00:41.060 --> 00:48.170 +しかし、 こうしてすべてを終え、 何が起こっているのかを話し合い、 インプットを作り上げた今、 + +00:48.200 --> 00:51.320 +ビジネスの時が来た。 + +00:51.320 --> 00:53.990 +これがドットジェネレーションのメソッドモデルだ。 + +00:53.990 --> 00:58.220 +GPU上にある私たちの入力を取り込み、 その準備をする。 + +00:58.460 --> 01:02.100 +新しいトークンは80枚までとする。 + +01:02.340 --> 01:06.150 +ええと、 念のために言っておくけど、 私たちがお願いしたのはジョークだよ。 + +01:06.180 --> 01:08.970 +データサイエンティストの部屋でのジョーク。 + +01:09.000 --> 01:11.460 +私たちのお気に入りの小さな実験。 + +01:11.700 --> 01:16.110 +それから出力を取るんだ。 + +01:16.110 --> 01:20.490 +アウトプットのリストの最初のものを取る。 + +01:20.760 --> 01:28.770 +そして、 トークナイザー・ドット・デコードを呼び出して、 トークンから文字に戻し、 再びテキストにする。 + +01:28.770 --> 01:30.690 +そしてその結果を印刷する。 + +01:30.720 --> 01:32.340 +そうしよう。 + +01:32.520 --> 01:34.560 +だから走り出す。 + +01:34.710 --> 01:41.070 +この下を見て、 フォワードパスをするのを見るのが好きなんだ。 + +01:41.070 --> 01:45.810 +そして、 軽妙なジョークに対する私たちの答えがそこにある。 + +01:45.810 --> 01:49.590 +なぜ回帰モデルはニューラルネットワークと別れたのか? + +01:49.620 --> 01:54.900 +ニューラルネットワークが感情をオーバーフィットさせていたからだ。 + +01:55.230 --> 01:56.610 +ああ、 大丈夫だよ。 + +01:56.610 --> 01:57.830 +ひどくはない。 + +01:57.830 --> 01:58.880 +それは、 ええと。 + +01:58.910 --> 02:02.990 +ああ、 まったくもっともなジョークだよ。 + +02:02.990 --> 02:05.870 +今まで聞いた中で一番面白いとは思わないけど、 まあね。 + +02:05.900 --> 02:07.370 +悪くないよ。 + +02:09.290 --> 02:12.350 +ロジスティック回帰モデルはなぜセラピーを受けたのか? + +02:12.350 --> 02:15.410 +感情を分類するのに苦労していたからだ。 + +02:15.410 --> 02:17.120 +それは本当にいいことだと思うよ。 + +02:17.120 --> 02:18.560 +それは素晴らしいことだと思う。 + +02:19.520 --> 02:23.810 +よりシンプルで、 データ・サイエンスの読者には的確だ。 + +02:23.810 --> 02:30.410 +フロンティアのモデルたちが思いつくギャグより、 その方がいいと思うよ。 + +02:30.830 --> 02:33.320 +ああ、 よくやった。 + +02:33.320 --> 02:34.520 +ラマ 3. 1. + +02:34.730 --> 02:45.320 +ええと、 念頭に置いておかなければならないのは、 もちろん、 ここで扱っているのは80億パラメータのバージョンのラマ3だということだ。 + +02:45.320 --> 02:45.320 +1、 + +02:45.320 --> 02:46.730 +つまり最小のバージョンで、 それを4ビットに量子化し、 さらに2倍に量子化した。 + +02:46.910 --> 02:54.740 +ええと、 つまり、 この超スリム版のモデルで、 完全に立派なジョークを言っただけなんだ。 + +02:55.710 --> 02:57.840 +楽しんでもらえたかな? + +02:57.990 --> 03:01.500 +次にすることは、 メモリを解放するためのクリーンアップだ。 + +03:01.500 --> 03:06.570 +そうでなければ、 このまま走り続ければ、 さまざまなモデルがあっという間にGPUを使い果たしてしまう。 + +03:06.660 --> 03:13.320 +このようなことが起こるかもしれないが、 その場合はいつでもランタイムのセッションを再起動して、 + +03:13.320 --> 03:19.110 +インポートを行った後、 中断したところから続けることができる。 + +03:19.620 --> 03:27.450 +だから次にすることは、 今やったことをすべて、 そのすべてを行う小さな関数にパッケージ化することだ。 + +03:27.450 --> 03:34.770 +この関数は、 モデル名を受け取り、 通常の辞書のリストをメッセージします。 + +03:34.770 --> 03:38.970 +そして、 今やったことを修正する方法として、 これを一行ずつ見ていこう。 + +03:39.000 --> 03:48.060 +まず、 オート・トークナイザー・クラスを使用して、 作業中のモデルに基づいて新しいトークナイザーを作成します。 + +03:48.990 --> 03:54.860 +この行は、 パディング・トークンを文末トークンと同じにするためのものである。 + +03:54.890 --> 03:57.710 +このようなことは、 お決まりの定型文のようなものだ。 + +03:57.950 --> 03:58.970 +そしてこれだ。 + +03:58.970 --> 04:00.890 +私たちはそれを知っているし、 よく知っている。 + +04:01.130 --> 04:08.900 +ここで、 このトークナイザーに適したチャットテンプレートをリストのメッセージに適用します。 + +04:08.900 --> 04:13.220 +そして、 一連のトークンを返してくる。 + +04:13.220 --> 04:20.240 +それをGPUに載せ、 入力に割り当てる。 + +04:20.510 --> 04:21.590 +これは新しい。 + +04:21.590 --> 04:29.120 +だから、 もうひとつ、 ちょっとした技を加えて、 結果をストリーミングで返そうと思うんだ。 + +04:29.240 --> 04:33.320 +そして、 Huggingfaceライブラリーはそれもサポートしている。 + +04:33.320 --> 04:36.290 +テキストストリーマーというものを作るんだ。 + +04:36.320 --> 04:43.190 +トークンをストリームバックする際に、 トークンをテキストに変換する必要があるからだ。 + +04:43.220 --> 04:46.130 +どのトークナイザーを使っているかを知る必要がある。 + +04:46.130 --> 04:50.240 +だから、 それを提供し、 そして行動する。 + +04:50.450 --> 04:52.400 +ええと、 まずモデルを手に入れます。 + +04:52.400 --> 04:55.200 +これは因果LMの自動モデルである。 + +04:55.200 --> 04:58.770 +これはオート・トークナイザーに相当する。 + +04:58.770 --> 05:04.350 +しかし、 事前に訓練されたモデルをロードするには、 モデル名を指定する。 + +05:04.470 --> 05:07.590 +デバイス・マップはオート、 つまりユーザーGPUを意味する。 + +05:07.590 --> 05:22.650 +もし4ビットの数値があれば、 その数値のコンフィギュレーションを4ビット・ダブル量子化NFに設定する。 + +05:22.650 --> 05:28.710 +そして、 bfloat16は計算、 つまりデータ型である。 + +05:29.010 --> 05:34.170 +そして、 今こそビジネスモデルのドットジェネレーションの時だ。 + +05:34.170 --> 05:35.460 +それが大きな方法だ。 + +05:35.460 --> 05:42.540 +そしてインプットを渡すと、 最大80の新しいトークンを生成し、 ストリーマに渡す。 + +05:42.570 --> 05:49.020 +これが、 出力をストリーミングし、 クリーンアップする部分だ。 + +05:49.530 --> 05:55.550 +つまり、 この関数は、 前にやったことをすべてラップし、 さらにストリーミングを追加したものだ。 + +05:55.550 --> 06:03.350 +それで、 今書いた関数を使って、 単純にファイ3をメッセージで呼び出してみよう。 + +06:03.350 --> 06:06.380 +これでファイ3が再びロードされることになる。 + +06:06.410 --> 06:12.920 +初めてロードするので、 少し時間がかかります。 + +06:12.980 --> 06:16.370 +ええと、 すでにロードしてあるので、 ディスクにキャッシュされています。 + +06:16.460 --> 06:20.660 +ええと、 だから、 ハグした顔から全部を再ダウンロードする必要はないんだ。 + +06:20.690 --> 06:21.380 +ハブだ。 + +06:21.710 --> 06:25.730 +そうしている間に、 ロードするためにまだ少しやることがあるんだ。 + +06:25.730 --> 06:27.140 +そうだな。 + +06:27.170 --> 06:32.180 +ああ、 資源を見ることができると言おうと思ったんだけど、 あっという間に終わってしまいそうだから、 + +06:32.180 --> 06:34.580 +ストリーミングで戻ってきてほしいんだ。 + +06:34.610 --> 06:39.050 +そして、 がっかりされるかもしれないという事実を覚悟しておくべきだと思うんだ。 + +06:39.560 --> 06:50.400 +少なくともプロンプトを使った限りでは、 ファイ3にジョークを言わせることはできなかった。 + +06:50.400 --> 06:59.010 +むしろ、 データ・サイエンティストが話していそうな一般的な事柄をいくつか挙げて、 とりとめもなく説明している。 + +06:59.040 --> 07:04.320 +このプロンプトを、 53歳にとってもう少し自己主張の強いものに改善できるか、 + +07:04.320 --> 07:08.700 +それとも単に53歳がやりたがらないだけなのかはわからない。 + +07:08.970 --> 07:14.040 +53は実に多くのことを立派にこなすが、 この特別な仕事はできない。 + +07:14.040 --> 07:19.680 +だから、 他のモデルも試してみてほしい。 + +07:19.680 --> 07:28.860 +また、 53にジョークを言わせるために促し方を改善できるかどうか、 あるいはジョーク好きでないなら、 得意なことをいくつか見つけられるかどうかも見てみよう。 + +07:28.860 --> 07:35.130 +Llmsの使用方法など、 我々がllmsに尋ねた他の質問にも、 実にうまく答えてくれるだろう。 + +07:35.700 --> 07:41.070 +それが53の結果だ。 + +07:41.070 --> 07:43.410 +さて、 ジェマはどうするか。 + +07:43.410 --> 07:51.290 +Gemmaはシステム・プロンプトをサポートしていないので、 + +07:51.320 --> 08:02.270 +このようにユーザー・プロンプトを渡さなければならない。 + +08:02.540 --> 08:06.500 +そして、 ジェンマを試してみよう。 + +08:06.500 --> 08:08.780 +もちろん、 20億のモデルだ。 + +08:08.780 --> 08:10.130 +とても小さなモデルだ。 + +08:10.130 --> 08:16.850 +非常に小さなモデルであることに加えて、 私たちはそれを4ビットに量子化し、 また量子化している。 + +08:16.850 --> 08:29.270 +というわけで、 この時点では非常にスリムなモデルを扱っていて、 メモリもそれほど消費しないし、 読み込みも早くてジョークもすぐに言えるはずだ。 + +08:32.330 --> 08:34.130 +そこにジョークがある。 + +08:34.130 --> 08:37.400 +データサイエンティストはなぜ統計学者と別れたのか? + +08:37.400 --> 08:41.240 +p値に関する意見の相違が多すぎたからだ。 + +08:41.270 --> 08:44.540 +p値に関する、 またオタクっぽいジョークだよ。 + +08:44.570 --> 08:46.130 +私には理解できない。 + +08:46.140 --> 08:49.470 +でも、 もしかしたら私が見落としている明白な何かがあるかもしれない。 + +08:49.620 --> 08:50.790 +ああ、 ようこそ。 + +08:50.790 --> 08:53.040 +どなたか教えてください。 + +08:53.370 --> 08:53.760 +ええと。 + +08:53.760 --> 08:56.820 +それでも、 親切でフレンドリーなところがいい。 + +08:56.820 --> 08:57.540 +もうひとつある。 + +08:57.540 --> 08:59.490 +またジョークを聞きたいなら言ってくれ。 + +08:59.640 --> 09:02.970 +これを走らせたら、 もっといいジョークが飛び出すかもしれないよ。 + +09:03.120 --> 09:07.890 +あー、 でも、 あー、 確かに、 あー、 楽しい、 あー、 トーンだ。 + +09:07.890 --> 09:15.180 +そして、 ジェマ2世はデータ・サイエンスに関連した、 称賛に値する仕事をしたと思う。 + +09:15.480 --> 09:21.780 +そして、 特に、 この小さなモデルをさらに量子化していることを思い出してほしい。 + +09:21.780 --> 09:28.440 +ジェマ2世は素晴らしい結果を残したと思うが、 私が使っている量子を使えば、 + +09:28.440 --> 09:36.150 +もっと優れた結果が出るだろうし、 パイ3世でももっと良い結果が出るかもしれない。 + +09:36.150 --> 09:41.310 +それから、 ミックスドトライアルモデルを選ぼうが、 もう少しスリムで自分も使えるものを選ぼうが、 + +09:41.340 --> 09:44.370 +いずれにせよ、 いい結果が得られると想像している。 + +09:44.510 --> 09:49.580 +また、 彼らが苦手とする数学の質問をしてみるのもいいだろう。 + +09:49.610 --> 09:52.640 +難しい数学を扱っているのなら。 + +09:52.790 --> 10:02.840 +しかし、 私はラマ3世にかなり難しい質問をしてみた。 1以前は全く問題なかった。 + +10:03.200 --> 10:09.050 +いずれにせよ、 今はこれらのモデルを使って、 いろいろなことを試してみる時だ。 + +10:09.080 --> 10:11.450 +あなたはオープンソースのモデルを使っている。 + +10:11.540 --> 10:13.490 +APIにコストはかかっていない。 + +10:13.520 --> 10:16.610 +あなたが支払うコストは、 使い切った場合だけです。 + +10:16.610 --> 10:25.850 +フリー・コラボを利用していない方で、 グーグル・コラボのユニットを使い切ろうとしている方。 + +10:26.030 --> 10:32.240 +ええと、 私が使っているのは1. 毎時76台。 + +10:32.240 --> 10:40.250 +というわけで、 これを使って作業したり、 抱きつき顔トランスフォーマー・ライブラリーを使ってオープンソースのモデルで推論を楽しんだりする時間は、 + +10:40.250 --> 10:43.790 +本当にたくさんある。 diff --git a/week5/community-contributions/subtitles/srts/59170255/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170255/ko_KR.srt new file mode 100755 index 0000000..ead8493 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170255/ko_KR.srt @@ -0,0 +1,454 @@ +WEBVTT + +00:00.800 --> 00:09.230 +다시 오신 걸 환영합니다 얼굴 포옹 트랜스포머 라이브러리 모델 수업에 잘 오셨어요 + +00:09.260 --> 00:14.960 +라마 모델의 구조를 살펴보고 있었어요 단순히 작성했을 때 얻게 되는 거죠 그리고 다른 + +00:14.960 --> 00:17.690 +것들을 위해 그걸 살펴보라고 했어요 + +00:17.690 --> 00:22.010 +또 하나 지적할 점은 입체감을 봐야 한다는 거예요 + +00:22.010 --> 00:28.490 +간단히 말씀드렸듯이 단어 사용을 나타내는 입력 크기 수를 + +00:28.490 --> 00:32.120 +보세요 아래의 출력과 일치하죠 + +00:32.300 --> 00:39.080 +get-ture 구조를 통해 크기를 따라가면 어떤 상황인지 알 수 있죠 + +00:40.010 --> 00:41.060 +좋아요 + +00:41.060 --> 00:48.170 +이제 이 모든 걸 끝냈고 어떻게 되는지에 대해 얘기했고 입력을 빌드했어요 + +00:48.200 --> 00:51.320 +이제 일할 시간이에요 + +00:51.320 --> 00:53.990 +이건 메서드 모델 .생성이에요 + +00:53.990 --> 00:58.220 +이걸 위해 GPU 위에 있는 입력값을 가져가죠 + +00:58.460 --> 01:02.100 +새 토큰을 80개까지 원한다고 할 수 있어요 + +01:02.340 --> 01:06.150 +우리가 부탁한 건 농담이란 걸 잊으셨나 봐요 + +01:06.180 --> 01:08.970 +데이터 과학자들에겐 농담거리였죠 + +01:09.000 --> 01:11.460 +우리가 좋아하는 실험이죠 + +01:11.700 --> 01:16.110 +그런 다음 출력부를 찍어요 + +01:16.110 --> 01:20.490 +첫 번째 출력물을 가져가면 하나밖에 안 남아요 + +01:20.760 --> 01:28.770 +그런 다음 토큰라이저 .디코드를 호출해 토큰에서 다시 글자로 텍스트로 바꾸죠 + +01:28.770 --> 01:30.690 +결과를 프린트하죠 + +01:30.720 --> 01:32.340 +시작하죠 + +01:32.520 --> 01:34.560 +달리기 시작하네요 + +01:34.710 --> 01:41.070 +아래를 보면서 진행 상황을 보고 싶어요 앞으로 패스하는 걸 보는 거죠 + +01:41.070 --> 01:45.810 +명랑한 농담에 대한 답이 나왔네요 + +01:45.810 --> 01:49.590 +왜 회귀 모형이 신경망과 분리된 거죠? + +01:49.620 --> 01:54.900 +안 맞았거든요 신경망이 감정에 너무 부합했어요 + +01:55.230 --> 01:56.610 +뭐, 괜찮아요 + +01:56.610 --> 01:57.830 +나쁘지 않아요 + +01:57.830 --> 01:58.880 +네 + +01:58.910 --> 02:02.990 +네, 완벽하게 그럴듯한 농담이죠 + +02:02.990 --> 02:05.870 +그렇게 웃기진 않지만 뭐, 그렇죠 + +02:05.900 --> 02:07.370 +나쁘지 않네요 + +02:09.290 --> 02:12.350 +왜 물리적 회귀 모델이 상담을 받으러 갔죠? + +02:12.350 --> 02:15.410 +감정을 분류하는 게 어려웠거든요 + +02:15.410 --> 02:17.120 +정말 좋은 것 같아요 + +02:17.120 --> 02:18.560 +좋은 것 같아요 + +02:19.520 --> 02:23.810 +데이터 과학을 좋아하는 사람들에겐 더 간단하고 딱 맞죠 + +02:23.810 --> 02:30.410 +프론티어 모델에서 만든 것보다 더 괜찮은 개그 같아요 + +02:30.830 --> 02:33.320 +정말 잘했어요 + +02:33.320 --> 02:34.520 +라마 3요 1번요 + +02:34.730 --> 02:38.630 +다시 한 번 명심해야 할 것은 우리가 다루는 건 80억 + +02:38.630 --> 02:45.320 +개의 라마 3 매개 변수 버전이란 거예요 첫째, 가장 작은 버전이죠 4개로 퀀타이즈한 다음 두 + +02:45.320 --> 02:46.730 +배로 퀀타이즈했어요 + +02:46.910 --> 02:54.740 +모델의 아주 날씬한 버전인데 아주 괜찮은 농담을 하더군요 + +02:55.710 --> 02:57.840 +재밌게 보셨길 바라요 + +02:57.990 --> 03:01.500 +다음으로 할 일은 청소해서 메모리를 확보하는 거예요 + +03:01.500 --> 03:06.570 +그렇지 않으면 계속 실행하면 다른 모델들이 GPU 소진으로 빠르게 소진되죠 + +03:06.660 --> 03:13.320 +이런 경우에 여러분은 언제든 세션을 다시 시작할 수 있습니다 런타임 리스타트 + +03:13.320 --> 03:19.110 +세션으로요 그런 다음 가져오기 다음에 중단한 곳에서 계속하죠 + +03:19.620 --> 03:25.770 +다음으로 할 일은 우리가 한 모든 걸 패키지화해 작은 함수로 + +03:25.770 --> 03:27.450 +만드는 거예요 + +03:27.450 --> 03:34.770 +이 함수는 모델의 이름과 일반적인 사전 목록의 메시지를 취하죠 + +03:34.770 --> 03:38.970 +한 줄씩 살펴보면서 방금 한 걸 다시 검토해보죠 + +03:39.000 --> 03:48.060 +오토 토큰라이저 클래스를 이용해 우리가 작업하는 모델에 기반을 둔 새 토큰라이저를 만드는 것으로 시작하죠 + +03:48.990 --> 03:54.860 +이 선은 안쪽 토큰을 문장 토큰과 동일하게 설정하는 부분이죠 + +03:54.890 --> 03:57.710 +이건 표준 절차예요 + +03:57.950 --> 03:58.970 +그리고 이거요 + +03:58.970 --> 04:00.890 +잘 알고 있어요 + +04:01.130 --> 04:08.900 +여기서 토큰라이저에 적합한 채팅 템플릿을 리스트에 적용해요 + +04:08.900 --> 04:13.220 +그럼 일련의 패를 반환하죠 + +04:13.220 --> 04:20.240 +그런 다음 GPU에 넣고 입력을 할당하죠 + +04:20.510 --> 04:21.590 +새롭네요 + +04:21.590 --> 04:29.120 +추가할 또 다른 기술로 결과를 스트리밍으로 보여드리죠 + +04:29.240 --> 04:33.320 +허깅페이스 라이브러리도 그걸 지원하죠 + +04:33.320 --> 04:36.290 +텍스트 스트리머라는 걸 만들어요 + +04:36.320 --> 04:41.360 +토큰라이저를 줘야 합니다 토큰을 스트리밍하면 토큰을 텍스트로 다시 + +04:41.360 --> 04:43.190 +변환해야 하니까요 + +04:43.220 --> 04:46.130 +어떤 토큰라이저를 쓰는지 알아야 해요 + +04:46.130 --> 04:50.240 +그걸 제공하고 액션을 하는 거죠 + +04:50.450 --> 04:52.400 +먼저 모형을 get 해요 + +04:52.400 --> 04:55.200 +인과관계 lm 오토 모델이에요 + +04:55.200 --> 04:58.770 +오토 토큰마이저와 같은 거예요 + +04:58.770 --> 05:04.350 +하지만 미리 훈련된 모델을 로드하려면 모델 이름을 말해야 해요 + +05:04.470 --> 05:07.590 +장치 맵은 자동입니다 사용자 GPU 말이에요 + +05:07.590 --> 05:16.260 +하나를 갖고 계시면 저 위에 설치한 퀀트 구성을 넘겨요 4비트 이중 + +05:16.260 --> 05:22.650 +퀀트화된 NF로 4비트 숫자 유형을 뜻하죠 + +05:22.650 --> 05:28.710 +bfloat16은 데이터 타입을 계산한 거고요 + +05:29.010 --> 05:34.170 +이제 비즈니스 모델 생성 차례죠 + +05:34.170 --> 05:35.460 +그게 큰 방법이죠 + +05:35.460 --> 05:42.540 +새 토큰 80개까지 생성할 입력을 전달하고 스트리머를 주는 거죠 + +05:42.570 --> 05:49.020 +이게 결과물을 스트림으로 내보낸다는 뜻입니다 그런 다음 정리를 하죠 + +05:49.530 --> 05:55.550 +이 함수는 우리가 전에 했던 모든 걸 래핑해주고 스트리밍도 추가해줘요 + +05:55.550 --> 06:03.350 +이제 간단하게 메시지를 파이 3으로 불러볼게요 방금 만든 함수를 사용해서요 + +06:03.350 --> 06:06.380 +피 3은 다시 로드할게요 + +06:06.410 --> 06:12.920 +이건 시간이 좀 걸릴 거예요 처음으로 비트를 싣는 거니까요 + +06:12.980 --> 06:16.370 +이미 로드했어요 디스크에 캐시돼 있죠 + +06:16.460 --> 06:20.660 +얼굴 껴안는 걸 다시 할 필요는 없어요 + +06:20.690 --> 06:21.380 +허브요 + +06:21.710 --> 06:25.730 +비트를 싣는 동안 할 일이 좀 더 남았어요 + +06:25.730 --> 06:27.140 +그럴까요? + +06:27.170 --> 06:32.180 +리소스를 살펴보자고 하려고 했는데 너무 빨리 끝날 것 같네요 스트리밍하는 + +06:32.180 --> 06:34.580 +걸 보여드리고 싶어요 + +06:34.610 --> 06:39.050 +실망하실지도 모른다는 걸 미리 알려드려야 할 것 같아서요 + +06:39.560 --> 06:49.010 +그래서 적어도 제가 갖고 있는 프롬프트만 사용해봐도 파이 3이 농담을 하게 만들 수 없다는 걸 알게 됐죠. + +06:49.040 --> 06:50.400 +Get it. + +06:50.400 --> 06:57.540 +데이터 과학자들이 장황하게 늘어놓는 일반적인 정보를 + +06:57.540 --> 06:59.010 +제공하죠 + +06:59.040 --> 07:04.320 +비트 부분을 53번에게 좀 더 단호하게 바꿀 수 있을지 + +07:04.320 --> 07:08.700 +53번이 원하지 않을지는 모르겠어요 + +07:08.970 --> 07:14.040 +53 소방대는 많은 일을 훌륭히 해내겠지만 이 작업은 아니에요 + +07:14.040 --> 07:19.680 +다른 모델과 마찬가지로 여러분도 연습해 보세요 + +07:19.680 --> 07:27.000 +또한 수정 헌정으로 53이 농담을 하게 할 수 있는지 혹은 53이 농담이 안 웃기면 get이 잘하는 + +07:27.000 --> 07:28.860 +걸 찾을 수 있는지도요 + +07:28.860 --> 07:34.200 +우리가 llms에 대해 했던 질문에 대한 답변이 될 겁니다. Lllms의 사용에 + +07:34.200 --> 07:35.130 +대해서요. + +07:35.700 --> 07:41.070 +이게 53번 결과예요 + +07:41.070 --> 07:43.410 +제마는 어떤지 보죠 + +07:43.410 --> 07:51.290 +같은 접근법으로 인포 구글의 젬마 2 모델에 유틸리티 함수를 쓸 수 있어요 젬마는 시스템 프롬프트를 + +07:51.320 --> 07:57.650 +지원하지 않죠 그래서 이렇게 사용자 프롬프트를 입력해야 해요 시스템 프롬프트는 + +07:57.650 --> 08:02.270 +특별한 걸 말하지 않으니 괜찮아요 + +08:02.540 --> 08:06.500 +제마를 한번 보죠 + +08:06.500 --> 08:08.780 +물론 20억 개 모델이죠 + +08:08.780 --> 08:10.130 +아주 작은 모델이죠 + +08:10.130 --> 08:15.980 +아주 작은 모델일 뿐 아니라 4비트로 수량화하고 다시 수량화하고 + +08:15.980 --> 08:16.850 +있어요 + +08:16.850 --> 08:25.520 +이 시점에서 우린 아주 슬림 모델을 다루고 있어요 메모리를 많이 쓰지 않고 잘 + +08:25.520 --> 08:29.270 +로드되고 빨리 농담도 할 수 있죠 + +08:32.330 --> 08:34.130 +농담도 나오네요 + +08:34.130 --> 08:37.400 +데이터 과학자들은 왜 통계 전문가와 헤어졌을까요? + +08:37.400 --> 08:41.240 +p 값에 대한 의견이 너무 많이 충돌했거든요 + +08:41.270 --> 08:44.540 +p 값에 관한 또 다른 샌님 농담이죠 + +08:44.570 --> 08:46.130 +Get it, get it, get it! 이해가 안 돼요 + +08:46.140 --> 08:49.470 +하지만 제가 놓치고 있는 게 있을지도 몰라요 + +08:49.620 --> 08:50.790 +어서 오세요 + +08:50.790 --> 08:53.040 +누가 좀 알려줘요 + +08:53.370 --> 08:53.760 +네 + +08:53.760 --> 08:56.820 +그래도 착하고 친근해서 좋아요 + +08:56.820 --> 08:57.540 +또 있어요 + +08:57.540 --> 08:59.490 +다른 농담 듣고 싶으면 말해요 + +08:59.640 --> 09:02.970 +Get it로 하면 더 재밌는 농담이 나올지도 모르겠네요 + +09:03.120 --> 09:07.890 +하지만 확실히 즐거운 어조예요 + +09:07.890 --> 09:14.340 +2번 제마는 칭찬받을 만한 일을 했어요 데이터 과학과 관련이 + +09:14.340 --> 09:15.180 +있죠 + +09:15.480 --> 09:21.780 +특히 이건 작은 모델이라 수량화하고 있다는 걸 기억하면요 + +09:21.780 --> 09:28.440 +2번도 잘 보여 줬지만 제가 사용한 양자 원리를 사용하면 우수한 + +09:28.440 --> 09:36.150 +결과가 나올 거예요 3번 파이에서도 더 나은 결과가 나올 수 있겠죠 + +09:36.150 --> 09:41.310 +혼합 트라이얼 모델이든 좀 더 날씬한 비트 모델이든 좋은 결과가 + +09:41.340 --> 09:44.370 +나올 거예요 get it get it + +09:44.510 --> 09:49.580 +수학 문제를 내도 돼요 애들이 어려워하는 문제요 + +09:49.610 --> 09:52.640 +어려운 수학 문제라면요 + +09:52.790 --> 09:59.360 +하지만 라마 3에 꽤 어려운 질문을 했어요 당신도 같은 경험을 할 수 있는지 + +09:59.360 --> 10:02.840 +보는 데 전혀 어려움이 없었죠 + +10:03.200 --> 10:09.050 +어쨌든 이 모델들을 사용해 보고 다양한 것을 시도해 볼 기회예요 + +10:09.080 --> 10:11.450 +오픈 소스 모델로 작업하고 있죠 + +10:11.540 --> 10:13.490 +API 비용은 없어요 + +10:13.520 --> 10:16.610 +모든 걸 소진할 때만 대가를 치르죠 + +10:16.610 --> 10:23.840 +프리 콜랍을 안 쓰는데 구글 콜랍 비용으로 단위를 쓴다면 + +10:23.840 --> 10:25.850 +말이죠 + +10:26.030 --> 10:32.240 +1을 쓰고 있어요 시간당 76유닛요 + +10:32.240 --> 10:40.250 +이와 함께 작업할 시간은 충분합니다 포옹하는 트랜스포머 라이브러리를 이용해 + +10:40.250 --> 10:43.790 +오픈 소스 모델 추론을 즐길 수 있죠 diff --git a/week5/community-contributions/subtitles/srts/59170291/en_US.srt b/week5/community-contributions/subtitles/srts/59170291/en_US.srt new file mode 100755 index 0000000..9c5901f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170291/en_US.srt @@ -0,0 +1,562 @@ +WEBVTT + +00:00.950 --> 00:05.600 +Welcome back to Colab and welcome back to our business project. + +00:05.600 --> 00:12.500 +So again our assignment, we are due to create meeting minutes based on an audio file. + +00:12.620 --> 00:15.620 +Uh so I found this very useful data set. + +00:15.620 --> 00:19.610 +It's typical hugging face to have a perfect data set for us. + +00:19.790 --> 00:25.760 +Um, it's a data set called meeting Bank, which is apparently a pretty well known, uh, data set, + +00:25.790 --> 00:31.250 +a benchmark created from the City council's six major US cities. + +00:31.460 --> 00:32.990 +Um, so I use this. + +00:32.990 --> 00:39.800 +I downloaded a particular Denver City Council meeting, and I actually just took a ten minute segment + +00:39.800 --> 00:43.730 +of it, uh, to be used for our experiment here. + +00:43.730 --> 00:46.550 +Uh, either ten or maybe it was a 20 minute segment of it. + +00:46.640 --> 00:54.890 +Um, but anyway, took that audio cut and I saved it on my Google Drive because my idea here is I'd + +00:54.890 --> 00:59.690 +like this product to be able to take anything that is in one's Google Drive, or if you're building + +00:59.690 --> 01:06.150 +this for a company in the company's drive and be able to use that to generate meeting minutes. + +01:06.150 --> 01:11.850 +So as part of this project, a little sidebar is we're also going to see how you can get a colab to + +01:11.880 --> 01:13.800 +read from your Google Drive. + +01:13.800 --> 01:23.850 +So we begin, as usual with some imports these sorry with Pip installs, the one little extra here is. + +01:23.850 --> 01:29.250 +You'll notice that we're also going to install OpenAI on this colab as well. + +01:29.250 --> 01:34.170 +We're not just using hugging face, we're using a bunch of hugging face, uh, packages. + +01:34.170 --> 01:37.140 +And also OpenAI library. + +01:37.230 --> 01:44.730 +We do some imports and including an OpenAI import as well as everything else. + +01:45.000 --> 01:47.370 +Um, and then we're going to set some constants. + +01:47.370 --> 01:52.020 +We're going to use an audio model called whisper, one that may be used yourself. + +01:52.020 --> 01:55.530 +When I'd set you this assignment, uh, previously. + +01:55.710 --> 02:01.770 +Um, and then this is the llama 3.18 billion instruct model that we'll be using as well. + +02:01.890 --> 02:08.010 +So here this is the new capability that that that you'll be learning. + +02:08.040 --> 02:09.660 +Today as well a little extra. + +02:09.690 --> 02:13.080 +This is how you connect Colab to your Google Drive. + +02:13.110 --> 02:14.280 +Super simple. + +02:14.310 --> 02:16.140 +And it's just a drive dot mount. + +02:16.140 --> 02:19.260 +And you tell it where locally you would like to. + +02:19.290 --> 02:20.220 +Mount the drive. + +02:20.220 --> 02:21.720 +That's basically it. + +02:21.720 --> 02:25.740 +And then I've set a constant for myself of within my drive. + +02:25.800 --> 02:33.480 +Uh, it's uh, in the I've got a folder called LMS and within that I have Denver Extract dot mp3, which + +02:33.480 --> 02:41.190 +is the MP3 recording of, uh, this, uh, segment that somewhere between 10 and 20 minutes from the + +02:41.190 --> 02:43.080 +Denver City Council. + +02:43.110 --> 02:51.900 +So if I run this, it pops up with a, um, a it's connected to Google the first. + +02:51.930 --> 02:55.980 +I'm running this for the second time, and the first time I ran this, it of course popped up with an + +02:55.980 --> 03:02.080 +authentication, uh, selection for me to confirm that I'm signed in with my Google account and I grant + +03:02.080 --> 03:02.680 +access. + +03:02.680 --> 03:05.860 +This time it's telling me it's already mounted there. + +03:05.980 --> 03:10.390 +Um, if I were to go to the folder here, would be able to go through my Google drive and see all my + +03:10.420 --> 03:13.780 +all my files under slash content slash drive. + +03:14.410 --> 03:18.040 +So we then sign in to the Huggingface hub. + +03:18.760 --> 03:19.300 +Here we go. + +03:19.330 --> 03:20.620 +Login successful. + +03:20.620 --> 03:23.380 +And we also sign in to OpenAI. + +03:23.410 --> 03:25.690 +So this is very similar. + +03:25.690 --> 03:33.340 +We get our OpenAI key which is also I've set the OpenAI key uh in the secrets of this colab. + +03:33.580 --> 03:41.710 +Um, and so we retrieve that key and then we call the usual OpenAI uh, constructor to, to establish + +03:41.710 --> 03:43.810 +the, the interface connection. + +03:43.810 --> 03:48.820 +But this time I am passing in that OpenAI, OpenAI API key. + +03:49.000 --> 03:54.190 +Uh, you remember in the past I've not had to specify this because I've relied on the fact that the + +03:54.190 --> 03:55.630 +environment variable is set. + +03:55.660 --> 03:58.570 +This time I'm passing it in explicitly. + +03:58.630 --> 04:00.010 +So there we go. + +04:00.250 --> 04:04.570 +That is now established the OpenAI connection. + +04:04.870 --> 04:06.790 +And then what am I going to do? + +04:06.790 --> 04:14.290 +I'm going to take this audio file, which is sitting on my Google Drive that's now mapped to this colab. + +04:14.290 --> 04:18.010 +And then I'm going to call OpenAI dot audio. + +04:18.010 --> 04:24.790 +Dot transcriptions dot create, which is very similar to other OpenAI API methods we've used. + +04:24.820 --> 04:30.130 +It's particularly similar to the one when we actually made, made it to speak, made it generate audio. + +04:30.340 --> 04:36.370 +I passed in the name of the model, the whisper one model, the file, and that I want the response + +04:36.370 --> 04:37.150 +in text. + +04:37.150 --> 04:42.760 +And then I will print what comes back from OpenAI's whisper model. + +04:42.760 --> 04:49.870 +So it's been provided with a bunch of audio, or it's being provided as we speak with a bunch of audio + +04:49.900 --> 05:00.460 +that is now calling the frontier model, and we are currently waiting to get back a transcription of + +05:00.490 --> 05:01.690 +that meeting. + +05:02.590 --> 05:03.700 +Well, that's happening. + +05:03.700 --> 05:07.030 +I'm going to keep going so that we can get ahead on on the other things. + +05:07.030 --> 05:07.960 +I have to run. + +05:07.960 --> 05:14.080 +We're then going to set up the prompt for llama three, and there's going to be a system prompt, a + +05:14.110 --> 05:16.630 +system message and a user prompt system messages. + +05:16.630 --> 05:22.120 +You're an assistant that produces meetings of minutes from transcripts with a summary, key discussion + +05:22.120 --> 05:29.380 +points, takeaways and action items with owners in markdown, uh, and then a user prompt that says + +05:29.380 --> 05:36.430 +below is the transcript of a an extract transcript. + +05:36.490 --> 05:36.910 +That's fine. + +05:36.940 --> 05:39.520 +I thought my English was bad, but it's okay. + +05:39.550 --> 05:41.680 +Other Denver Council meeting. + +05:41.680 --> 05:46.750 +Please write minutes in markdown, including a summary with attendees, location and date, discussion + +05:46.750 --> 05:48.790 +points, takeaways, and action items with owners. + +05:48.790 --> 05:54.640 +And then I shove in the transcript of the meeting right after that user prompt. + +05:54.820 --> 05:56.560 +Here is the transcript. + +05:56.590 --> 06:00.440 +It just got printed out And it's a long old transcript. + +06:00.440 --> 06:02.960 +The Denver City Council meeting. + +06:02.990 --> 06:09.890 +Talked for quite a while, and a lot of it was about Indigenous Peoples Day, which was the upcoming + +06:10.010 --> 06:11.540 +federal holiday. + +06:11.780 --> 06:19.130 +And there was some debate about the right way for the council to recognize Indigenous Peoples Day. + +06:19.130 --> 06:24.050 +If you read through all of this text or if you listen to the, the, the audio. + +06:24.050 --> 06:30.140 +So this is all now in text in this transcription variable. + +06:30.230 --> 06:31.760 +So we started with audio. + +06:31.790 --> 06:36.050 +We now have text thanks to OpenAI's whisper one model. + +06:36.260 --> 06:40.100 +We now create our system and user prompt. + +06:40.130 --> 06:41.960 +Now this will look familiar to you. + +06:41.960 --> 06:44.000 +This is our quant config. + +06:44.000 --> 06:45.950 +We're going to be quantizing again. + +06:45.980 --> 06:46.490 +Why not. + +06:46.520 --> 06:55.790 +It was very effective with lambda 3.1 before it reduced the memory significantly down to 55. five gigabytes. + +06:56.060 --> 06:57.320 +But it did not. + +06:57.350 --> 06:59.510 +At least His performance seemed to be perfectly good to us. + +06:59.510 --> 07:03.410 +Maybe you tried it without quantizing to see how much better the joke was. + +07:03.590 --> 07:03.950 +Um. + +07:03.980 --> 07:06.830 +I wouldn't be surprised if it didn't make much difference at all. + +07:06.860 --> 07:08.990 +Quantization is very effective. + +07:09.320 --> 07:12.890 +Okay, it's time for action. + +07:12.980 --> 07:17.990 +This should all be quite familiar to you because this is what we did last time. + +07:17.990 --> 07:26.360 +We are going to create a tokenizer for Lama using the auto tokenizer Frompretrained method. + +07:26.360 --> 07:30.410 +We're going to do this business of setting the pad token as before. + +07:30.560 --> 07:35.900 +Then we're going to call the apply chat template function method. + +07:35.900 --> 07:39.950 +Passing in the messages the this this right here. + +07:39.980 --> 07:41.090 +We're passing that in. + +07:41.090 --> 07:43.790 +And this of course includes the whole transcript. + +07:43.820 --> 07:47.090 +It includes the text of the whole meeting and the user prompt. + +07:47.120 --> 07:51.350 +And we're going to put that massive amount of text on our GPU. + +07:51.410 --> 07:53.270 +We're going to stream again. + +07:53.270 --> 07:55.790 +So use this text stream object. + +07:55.790 --> 07:57.800 +And then here we go. + +07:57.830 --> 07:58.400 +This is. + +07:58.400 --> 08:00.710 +This is when we create our model. + +08:00.710 --> 08:03.080 +We create the auto model for causal Elm. + +08:03.080 --> 08:06.680 +We pass in the llama model name. + +08:06.680 --> 08:10.070 +We say, please use a GPU if we've got one, which we do. + +08:10.220 --> 08:13.760 +We're using the T4 box, the small GPU box for this. + +08:13.760 --> 08:17.150 +And we pass in our quantization config. + +08:17.450 --> 08:20.480 +I'm going to start this running now because it will take take a while. + +08:20.480 --> 08:22.820 +I should have started running before I was talking. + +08:23.000 --> 08:24.770 +That would have been smarter. + +08:24.950 --> 08:26.090 +Uh uh. + +08:26.090 --> 08:33.410 +And so we're going to then create the model and then we're going to do the action. + +08:33.440 --> 08:37.610 +Action is to call generate on model. + +08:37.610 --> 08:46.220 +And when you call generate you have to pass in the inputs, which of course is now this entire tokenized, + +08:46.250 --> 08:48.830 +uh prompts and transcript. + +08:49.070 --> 08:51.770 +This is a bit bigger than you're used to before. + +08:51.770 --> 08:54.110 +We used to say in the maximum new tokens was 80. + +08:54.140 --> 09:00.560 +Now we're saying maximum new tokens is 2000 because there could be quite a hefty response. + +09:00.830 --> 09:07.940 +Um, so, uh, uh, we that should be enough space to get back our meeting minutes. + +09:07.940 --> 09:15.830 +And then we're also providing the streamer, which is telling it that it can stream results back into + +09:15.830 --> 09:17.270 +our colab. + +09:17.780 --> 09:22.640 +While it's going to be thinking for a little bit, I'll tell you what's going to happen next is going + +09:22.640 --> 09:25.550 +to stream the meeting minutes back in here. + +09:25.790 --> 09:32.930 +Um, afterwards, what we can also do is we can also just get that text by taking the outputs, taking + +09:32.930 --> 09:36.260 +the first of the outputs, and there only will be one. + +09:36.500 --> 09:41.540 +And then decoding that using tokenizer dot decode. + +09:41.840 --> 09:45.380 +Uh, and that's something we will then put into a variable called response. + +09:45.380 --> 09:46.790 +Well here come the minutes. + +09:47.150 --> 09:47.960 +Um. + +09:52.430 --> 09:53.600 +It's about to come. + +09:53.600 --> 09:54.770 +It's so far. + +09:54.770 --> 09:56.690 +Just put the, the uh. + +09:57.750 --> 09:59.520 +The transcript in their. + +10:04.170 --> 10:10.320 +Minutes of the Denver City Council meeting Monday, October the 9th and location attendees who are the + +10:10.320 --> 10:11.370 +attendees. + +10:12.930 --> 10:14.100 +Summary. + +10:19.380 --> 10:25.530 +They met on Monday, October the 9th to discuss and adopt a proclamation for Indigenous Peoples Day. + +10:25.560 --> 10:28.320 +Councilman Lopez presented the proclamation. + +10:28.410 --> 10:29.970 +Key discussion points. + +10:30.000 --> 10:31.050 +Takeaways. + +10:31.050 --> 10:34.140 +It was adopted recognizing the importance of the day. + +10:34.170 --> 10:37.590 +They emphasized the importance of inclusivity and respecting all cultures. + +10:37.620 --> 10:41.250 +Some actions with owners and actions. + +10:41.250 --> 10:44.370 +Councilman Lopez and clerk. + +10:44.520 --> 10:49.890 +Clerk is to attest and affix the seal of the City and Council of Denver to the proclamation. + +10:49.890 --> 10:57.330 +And then, uh, Councilman Lopez to transmit a copy of the proclamation to the Denver American Indian + +10:57.330 --> 11:03.420 +Commission and some other areas, and then some next steps at the end. + +11:03.420 --> 11:06.960 +So I've got to hand it to llama 3.1. + +11:06.960 --> 11:13.230 +This seems to be a very comprehensive, very clear, very thorough set of minutes with attendees with + +11:13.230 --> 11:20.100 +date with with, uh, all of the right format and the right sections. + +11:20.130 --> 11:24.510 +Now, you'll notice, of course, that it's come in markdown format. + +11:24.510 --> 11:30.330 +And you're familiar from, uh, when we were working with frontier models before in Jupyter Notebook + +11:30.330 --> 11:39.360 +locally, that we can use this display markdown response as our way to see that in markdown in the Colab. + +11:39.360 --> 11:47.310 +And here we have, uh, the minutes of the Denver City Council meeting, um, and, uh, organized into + +11:47.310 --> 11:53.310 +those various sections of the summary, the takeaways, the action items, and the next steps. + +11:53.490 --> 12:01.210 +So I give you an application that uses a frontier model and an open source model to take audio and convert + +12:01.240 --> 12:07.660 +it to a transcript, and convert that transcript to a meeting summary with actions and next steps. + +12:09.160 --> 12:11.620 +Well, the obvious exercise for you. + +12:11.620 --> 12:13.900 +I hope you've already guessed what it's going to be. + +12:13.900 --> 12:17.950 +It's easy peasy to now put that into a nice user interface. + +12:17.950 --> 12:25.780 +You can use Gradio very similar to what we've had in the previous week, and you can bring up this into + +12:25.780 --> 12:27.370 +a nice little Gradio interface. + +12:27.370 --> 12:32.590 +Perhaps you could type out the name of a file on your Google Drive and press Generate Minutes. + +12:32.620 --> 12:39.940 +It will read in that audio, convert it to text, and then convert it to meeting minutes actions, takeaways, + +12:39.970 --> 12:41.200 +next steps. + +12:41.800 --> 12:43.570 +So that's the task for you. + +12:43.570 --> 12:45.640 +Please go away and do that. + +12:45.640 --> 12:49.270 +And I can't wait to see some terrific user interfaces. + +12:49.270 --> 12:51.730 +Please do push the code when you've got it. + +12:51.730 --> 12:56.470 +I would love to see them and I will see you for the next lecture in a moment. diff --git a/week5/community-contributions/subtitles/srts/59170291/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170291/ja_JP.srt new file mode 100755 index 0000000..a022f31 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170291/ja_JP.srt @@ -0,0 +1,502 @@ +WEBVTT + +00:00.950 --> 00:05.600 +Colabへようこそ、 そして我々のビジネスプロジェクトへようこそ。 + +00:05.600 --> 00:12.500 +というわけで、 今回もオーディオファイルをもとに議事録を作成することになった。 + +00:12.620 --> 00:15.620 +それで、 とても役に立つデータを見つけたんだ。 + +00:15.620 --> 00:19.610 +完璧なデータセットを用意してくれるのは、 典型的な抱きつき顔だ。 + +00:19.790 --> 00:25.760 +ミーティング・バンクと呼ばれるデータセットで、 + +00:25.790 --> 00:31.250 +これはかなり有名なデータセットらしい。 + +00:31.460 --> 00:32.990 +それで、 これを使っているんだ。 + +00:32.990 --> 00:43.730 +あるデンバー市議会の会議をダウンロードし、 その一部を10分間撮影した。 + +00:43.730 --> 00:46.550 +ええと、 10分だったか、 20分だったか、 そのどちらかだ。 + +00:46.640 --> 00:54.890 +とにかく、 その音声をカットしてグーグルドライブに保存したんだ。 僕がここで考えているのは、 この製品でグーグルドライブにあるもの、 + +00:54.890 --> 00:59.690 +あるいは会社のために作るのであれば会社のドライブにあるものを何でも取り込んで、 + +00:59.690 --> 01:06.150 +それを使って議事録を作成できるようにしたいんだ。 + +01:06.150 --> 01:13.800 +このプロジェクトの一環として、 ちょっとしたサイドバーとして、 Google DriveからColabを読み込む方法も紹介しよう。 + +01:13.800 --> 01:23.850 +では、 ピップ・インストールのインポートについて、 いつものように説明しよう。 + +01:23.850 --> 01:29.250 +このラボにもOpenAIをインストールすることになる。 + +01:29.250 --> 01:34.170 +ハグフェイスだけじゃなくて、 ハグフェイスのパッケージをたくさん使っているんだ。 + +01:34.170 --> 01:37.140 +そしてOpenAIのライブラリも。 + +01:37.230 --> 01:44.730 +OpenAIのインポートも含め、 あらゆるものをインポートしている。 + +01:45.000 --> 01:47.370 +それから、 いくつかの定数を設定します。 + +01:47.370 --> 01:52.020 +私たちはウィスパーと呼ばれるオーディオモデルを使うつもりです。 + +01:52.020 --> 01:55.530 +以前、 君にこの任務を与えたときだ。 + +01:55.710 --> 02:01.770 +それからこれがラマ3だ。 180億ドルのインストラクター・モデルも使用する予定だ。 + +02:01.890 --> 02:08.010 +これが、 これから学ぶ新しい能力だ。 + +02:08.040 --> 02:09.660 +今日もちょっとおまけ。 + +02:09.690 --> 02:13.080 +これがColabをGoogle Driveに接続する方法です。 + +02:13.110 --> 02:14.280 +超シンプルだ。 + +02:14.310 --> 02:16.140 +しかも、 ただのドライブドットマウントだ。 + +02:16.140 --> 02:19.260 +そして、 どこに行きたいかを伝える。 + +02:19.290 --> 02:20.220 +ドライブをマウントする。 + +02:20.220 --> 02:21.720 +基本的にはそれだけだ。 + +02:21.720 --> 02:25.740 +そして、 自分のドライブの範囲内ということを自分に課している。 + +02:25.800 --> 02:33.480 +LMSというフォルダの中にデンバー・エクストラクト・ドットmp3というのがあって、 + +02:33.480 --> 02:43.080 +これがデンバー市議会の10分から20分の間のセグメントをMP3で録音したものです。 + +02:43.110 --> 02:51.900 +これを実行すると、 ポップアップで「Googleに接続しています。 + +02:51.930 --> 02:55.980 +2回目の実行ですが、 最初に実行したときは、 もちろん認証がポップアップ表示され、 + +02:55.980 --> 03:02.680 +Googleアカウントでログインしていることを確認し、 アクセスを許可するよう選択されました。 + +03:02.680 --> 03:05.860 +今回はすでにそこに装着されていると言っている。 + +03:05.980 --> 03:13.780 +ここのフォルダに行けば、 Googleドライブにアクセスして、 スラッシュ・コンテンツ・スラッシュ・ドライブの下にあるすべてのファイルを見ることができる。 + +03:14.410 --> 03:18.040 +そこでHuggingfaceのハブにサインインする。 + +03:18.760 --> 03:19.300 +さあ、 始めよう。 + +03:19.330 --> 03:20.620 +ログインに成功しました。 + +03:20.620 --> 03:23.380 +そしてOpenAIにもサインインする。 + +03:23.410 --> 03:25.690 +だから、 これはとても似ている。 + +03:25.690 --> 03:33.340 +OpenAIのキーを取得します。 このコラボの秘密の中にOpenAIのキーを設定しました。 + +03:33.580 --> 03:43.810 +そして、 そのキーを取得し、 通常のOpenAIのコンストラクタを呼び出して、 インターフェースの接続を確立します。 + +03:43.810 --> 03:48.820 +しかし今回は、 OpenAIのAPIキーを渡している。 + +03:49.000 --> 03:55.630 +過去には、 環境変数が設定されていることに頼っていたので、 これを指定する必要がなかったのを覚えているだろう。 + +03:55.660 --> 03:58.570 +今回は明確にパスする。 + +03:58.630 --> 04:00.010 +そうだ。 + +04:00.250 --> 04:04.570 +これでOpenAIの接続が確立した。 + +04:04.870 --> 04:06.790 +それからどうすればいいんだ? + +04:06.790 --> 04:14.290 +グーグルドライブにあるこの音声ファイルを、 このコラボにマッピングしておこうと思う。 + +04:14.290 --> 04:18.010 +そしてOpenAI dot audioを呼び出す。 + +04:18.010 --> 04:24.790 +ドット転写ドット作成は、 私たちが使ってきた他のOpenAI APIのメソッドと非常によく似ています。 + +04:24.820 --> 04:30.130 +特に、 私たちが実際に作り、 喋らせ、 音声を発生させたときのものと似ている。 + +04:30.340 --> 04:37.150 +モデル名、 ウィスパー・ワン・モデル、 ファイル名、 そしてレスポンスをテキストで表示することを入力した。 + +04:37.150 --> 04:42.760 +そして、 OpenAIのウィスパーモデルから返ってきたものをプリントする。 + +04:42.760 --> 04:49.870 +つまり、 フロンティア・モデルと呼ばれる音声の束が提供され、 あるいは今話しているように音声の束が提供され、 + +04:49.900 --> 05:01.690 +現在その会議の書き起こしが戻ってくるのを待っているところです。 + +05:02.590 --> 05:03.700 +まあ、 それは起こっている。 + +05:03.700 --> 05:07.030 +他のことを先に進めるように、 このまま続けるつもりだ。 + +05:07.030 --> 05:07.960 +私は走らなければならない。 + +05:07.960 --> 05:16.630 +システムプロンプト、 システムメッセージ、 ユーザープロンプトのシステムメッセージがある。 + +05:16.630 --> 05:22.120 +あなたは、 議事録から要約、 主要な論点、 + +05:22.120 --> 05:36.430 +要点、 行動項目をマークダウンした所有者を含む議事録を作成するアシスタントです。 + +05:36.490 --> 05:36.910 +それでいい。 + +05:36.940 --> 05:39.520 +僕の英語は下手だと思ったけど、 大丈夫だよ。 + +05:39.550 --> 05:41.680 +その他のデンバー評議会 + +05:41.680 --> 05:46.750 +議事録は、 出席者、 場所、 日付、 ディスカッションのポイント、 持ち帰り事項、 所有者を記したアクションアイテムなどのサマリーを含めて、 + +05:46.750 --> 05:48.790 +マークダウンで作成してください。 + +05:48.790 --> 05:54.640 +そして、 そのユーザープロンプトの直後にミーティングの記録を押し込む。 + +05:54.820 --> 05:56.560 +以下はその記録である。 + +05:56.590 --> 06:00.440 +ちょうどプリントアウトしたところで、 長い古い記録なんだ。 + +06:00.440 --> 06:02.960 +デンバー市議会。 + +06:02.990 --> 06:11.540 +かなり長い時間話し込んだが、 その多くは、 今度の連邦祝日である先住民の日についてだった。 + +06:11.780 --> 06:19.130 +そして、 先住民の日を議会がどのように認識するのが正しいかについても議論があった。 + +06:19.130 --> 06:24.050 +この文章をすべて読み通すか、 あるいは音声を聴いてみてほしい。 + +06:24.050 --> 06:30.140 +つまり、 これはすべてこの転写変数にテキストで入っている。 + +06:30.230 --> 06:31.760 +そこで私たちはオーディオから始めた。 + +06:31.790 --> 06:36.050 +OpenAIのウィスパー・ワン・モデルのおかげで、 テキストが使えるようになった。 + +06:36.260 --> 06:40.100 +システムプロンプトとユーザープロンプトを作成する。 + +06:40.130 --> 06:41.960 +見慣れた光景だろう。 + +06:41.960 --> 06:44.000 +これがクオンツのコンフィグだ。 + +06:44.000 --> 06:45.950 +また量子化することになる。 + +06:45.980 --> 06:46.490 +なぜだ。 + +06:46.520 --> 06:55.790 +ラムダ3では非常に効果的だった。 メモリが55まで大幅に減少する前に、 1. 5ギガバイト。 + +06:56.060 --> 06:57.320 +しかし、 そうはならなかった。 + +06:57.350 --> 06:59.510 +少なくとも、 彼のパフォーマンスは私たちには完璧に見えた。 + +06:59.510 --> 07:03.410 +もしかしたら、 量子化せずに試してみて、 どれだけジョークがうまくなったか確かめたのかもしれない。 + +07:03.590 --> 07:03.950 +うーん。 + +07:03.980 --> 07:06.830 +大差なくても驚かないよ。 + +07:06.860 --> 07:08.990 +量子化は非常に効果的だ。 + +07:09.320 --> 07:12.890 +よし、 アクションの時間だ。 + +07:12.980 --> 07:17.990 +これは前回もやったことなので、 皆さんもよくご存知のはずだ。 + +07:17.990 --> 07:26.360 +自動トークナイザーFrompretrainedメソッドを使用して、 Lama用のトークナイザーを作成します。 + +07:26.360 --> 07:30.410 +パッド・トークンをセットする作業は、 これまでと同じように行う。 + +07:30.560 --> 07:35.900 +次に、 apply chat template functionメソッドを呼び出します。 + +07:35.900 --> 07:39.950 +メッセージで伝える......これだ。 + +07:39.980 --> 07:41.090 +私たちはそれをパスしている。 + +07:41.090 --> 07:43.790 +もちろん、 これには全記録が含まれる。 + +07:43.820 --> 07:47.090 +会議全体のテキストとユーザーによるプロンプトが含まれる。 + +07:47.120 --> 07:51.350 +そして、 その大量のテキストをGPUに載せるのだ。 + +07:51.410 --> 07:53.270 +またストリーミングするつもりだ。 + +07:53.270 --> 07:55.790 +そこで、 このテキスト・ストリーム・オブジェクトを使う。 + +07:55.790 --> 07:57.800 +そして、 こうだ。 + +07:57.830 --> 07:58.400 +これがそうだ。 + +07:58.400 --> 08:00.710 +これがモデルを作るときだ。 + +08:00.710 --> 08:03.080 +我々は、 因果関係エルムの自動モデルを作成する。 + +08:03.080 --> 08:06.680 +ラマのモデル名を渡す。 + +08:06.680 --> 08:10.070 +GPUがあれば使ってください。 + +08:10.220 --> 08:13.760 +T4ボックス(小型GPUボックス)を使っている。 + +08:13.760 --> 08:17.150 +そして量子化コンフィグを渡す。 + +08:17.450 --> 08:20.480 +しばらく時間がかかるだろうから、 今から走り始めるつもりだ。 + +08:20.480 --> 08:22.820 +話す前に走り出すべきだった。 + +08:23.000 --> 08:24.770 +その方がスマートだ。 + +08:24.950 --> 08:26.090 +ええと......。 + +08:26.090 --> 08:33.410 +そしてモデルを作成し、 アクションを実行する。 + +08:33.440 --> 08:37.610 +アクションはモデルのジェネレートを呼び出すことである。 + +08:37.610 --> 08:48.830 +generateを呼び出すときには、 もちろん、 トークン化されたプロンプトとトランスクリプト全体を入力として渡さなければならない。 + +08:49.070 --> 08:51.770 +これは、 あなたが以前使っていたものより少し大きい。 + +08:51.770 --> 08:54.110 +以前は新規トークンの上限は80枚だと言っていた。 + +08:54.140 --> 09:00.560 +現在、 新規トークンの上限を2000としているのは、 かなりの反響がある可能性があるからだ。 + +09:00.830 --> 09:07.940 +ええと、 それで、 ええと、 会議の議事録を取り戻すには十分なスペースが必要なんだ。 + +09:07.940 --> 09:17.270 +そして、 ストリーマーを提供し、 そのストリーマーが結果を私たちのラボにストリームバックできることを伝えている。 + +09:17.780 --> 09:25.550 +少し考え込んでいる間に、 次に何が起こるかと言うと、 会議の議事録をここに流すんだ。 + +09:25.790 --> 09:36.260 +ええと、 その後、 私たちができることは、 出力を取ってテキストを取得することです。 + +09:36.500 --> 09:41.540 +そして、 それをトークナイザー・ドット・デコードを使ってデコードする。 + +09:41.840 --> 09:45.380 +それをresponseという変数に入れるんだ。 + +09:45.380 --> 09:46.790 +さて、 議事録だ。 + +09:47.150 --> 09:47.960 +うーん。 + +09:52.430 --> 09:53.600 +もうすぐだ。 + +09:53.600 --> 09:54.770 +これまでのところだ。 + +09:54.770 --> 09:56.690 +を置くだけだ。 + +09:57.750 --> 09:59.520 +その記録である。 + +10:04.170 --> 10:11.370 +デンバー市議会10月9日(月)の議事録と出席者。 + +10:12.930 --> 10:14.100 +概要 + +10:19.380 --> 10:25.530 +彼らは10月9日(月)に会合を開き、 先住民の日の宣言を討議し採択した。 + +10:25.560 --> 10:28.320 +ロペス議員が宣言を発表した。 + +10:28.410 --> 10:29.970 +主な論点 + +10:30.000 --> 10:31.050 +収穫。 + +10:31.050 --> 10:34.140 +この日の重要性を認識して採択された。 + +10:34.170 --> 10:37.590 +彼らは、 包括性とあらゆる文化を尊重することの重要性を強調した。 + +10:37.620 --> 10:41.250 +オーナーと行動 + +10:41.250 --> 10:44.370 +ロペス議員と事務員 + +10:44.520 --> 10:49.890 +書記官は、 この宣言にデンバー市およびデンバー議会の印章を押印する。 + +10:49.890 --> 10:57.330 +そして、 ロペス議員がデンバー・アメリカン・インディアン委員会とその他の地域に宣言のコピーを送付し、 + +10:57.330 --> 11:03.420 +最後に次のステップを紹介する。 + +11:03.420 --> 11:06.960 +だから、 ラマ3世には敬意を表したい。 1. + +11:06.960 --> 11:13.230 +この議事録は、 出席者、 日付、 形式、 セクションがすべて揃った、 + +11:13.230 --> 11:20.100 +非常に包括的で、 明確で、 徹底した議事録のようだ。 + +11:20.130 --> 11:24.510 +もちろん、 マークダウン形式であることにお気づきだろう。 + +11:24.510 --> 11:30.330 +そして、 以前Jupyter Notebookでフロンティアモデルをローカルで扱ったときに、 + +11:30.330 --> 11:39.360 +Colabでマークダウンを表示する方法として、 このディスプレイマークダウンレスポンスが使えることをご存知でしょう。 + +11:39.360 --> 11:47.310 +そしてここに、 デンバー市議会の議事録があり、 要約、 要点、 行動項目、 次のステップなど、 + +11:47.310 --> 11:53.310 +さまざまなセクションに整理されている。 + +11:53.490 --> 12:07.660 +そこで、 フロンティアモデルとオープンソースモデルを使って、 音声を取り込み、 それをトランスクリプトに変換し、 そのトランスクリプトをアクションと次のステップを含むミーティングの要約に変換するアプリケーションを紹介しよう。 + +12:09.160 --> 12:11.620 +さて、 あなたには明らかな練習がある。 + +12:11.620 --> 12:13.900 +もうおわかりいただけただろうか。 + +12:13.900 --> 12:17.950 +それを素敵なユーザーインターフェイスに落とし込むのは簡単だ。 + +12:17.950 --> 12:27.370 +Gradioは、 前の週と同じように使うことができる。 + +12:27.370 --> 12:32.590 +グーグル・ドライブ上のファイル名を入力し、 「議事録の作成」を押せばいい。 + +12:32.620 --> 12:41.200 +その音声を読み込んでテキストに変換し、 会議の議事録、 持ち帰り事項、 次のステップに変換する。 + +12:41.800 --> 12:43.570 +それがあなたにとっての課題です。 + +12:43.570 --> 12:45.640 +そうしてくれ。 + +12:45.640 --> 12:49.270 +そして、 素晴らしいユーザー・インターフェースを見るのが待ちきれない。 + +12:49.270 --> 12:51.730 +コードを入手したら、 ぜひプッシュしてほしい。 + +12:51.730 --> 12:56.470 +また次の講義でお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59170291/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170291/ko_KR.srt new file mode 100755 index 0000000..3ec6fe0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170291/ko_KR.srt @@ -0,0 +1,556 @@ +WEBVTT + +00:00.950 --> 00:05.600 +콜랍에 다시 오신 걸 환영합니다 사업 프로젝트에 다시 오신 것도요 + +00:05.600 --> 00:12.500 +다시 숙제가 나왔네요 오디오 파일을 기반으로 회의록을 만들어야 해요 + +00:12.620 --> 00:15.620 +아주 유용한 데이터 세트를 찾았어요 + +00:15.620 --> 00:19.610 +완벽한 데이터를 얻기 위해 포옹하는 얼굴이죠 + +00:19.790 --> 00:25.760 +미팅 뱅크라는 데이터 세트인데 꽤 유명한 데이터 세트예요 + +00:25.790 --> 00:31.250 +미국 시의회의 6대 주요 도시에서 만든 척도죠 + +00:31.460 --> 00:32.990 +그래서 이걸 써요 + +00:32.990 --> 00:39.800 +덴버시 의회 회의 중 하나를 다운로드해서 10분 정도 편집한 + +00:39.800 --> 00:43.730 +부분을 실험에 사용했어요 + +00:43.730 --> 00:46.550 +10분인가 20분 정도 됐을 거예요 + +00:46.640 --> 00:54.890 +어쨌든 오디오를 잘라 구글 드라이브에 저장했어요 구글 드라이브에 있는 건 뭐든 취할 수 있는 + +00:54.890 --> 00:59.690 +제품이 되길 원했거든요 회사의 드라이브에 있는 회사를 + +00:59.690 --> 01:06.150 +위해 이걸 구축하고 있다면 회의록 생성하는 데 사용할 수 있어요 + +01:06.150 --> 01:11.850 +이 프로젝트의 일부로 작은 부록을 드리죠 구글 드라이브에서 Colab이 읽게 + +01:11.880 --> 01:13.800 +하는 방법을 보여드릴게요 + +01:13.800 --> 01:23.850 +늘 그렇듯 수입부터 시작할게요 Pip 설치부터요 하나 더 있어요 + +01:23.850 --> 01:29.250 +이 Colab에도 OpenAI를 설치할 거예요 + +01:29.250 --> 01:34.170 +포옹하는 얼굴만 쓰는 게 아니라 포옹하는 얼굴과 소포를 잔뜩 사용해요 + +01:34.170 --> 01:37.140 +오픈AI 라이브러리도 있어요 + +01:37.230 --> 01:44.730 +오픈AI와 다른 모든 것들을 수입하는 일도 해요 + +01:45.000 --> 01:47.370 +그리고 몇 가지 상수를 설정할 거예요 + +01:47.370 --> 01:52.020 +위스퍼라는 오디오 모델을 사용할 거예요 여러분이 사용할 수도 있는 거죠 + +01:52.020 --> 01:55.530 +전에 과제를 드렸을 때요 + +01:55.710 --> 02:01.770 +이건 라마 3이고요 180억 개짜리 지시 모델도 사용하게 될 거예요 + +02:01.890 --> 02:08.010 +여기 여러분이 배우게 될 새 기능이 있어요 + +02:08.040 --> 02:09.660 +오늘도 좀 더 주세요 + +02:09.690 --> 02:13.080 +콜랍을 구글 드라이브에 연결하는 방법이죠 + +02:13.110 --> 02:14.280 +아주 간단해요 + +02:14.310 --> 02:16.140 +드라이브 닷 마운트예요 + +02:16.140 --> 02:19.260 +원하는 곳을 말해주면 돼요 + +02:19.290 --> 02:20.220 +구동축을 올려요 + +02:20.220 --> 02:21.720 +그게 다예요 + +02:21.720 --> 02:25.740 +제 열정에 대한 고정관념을 세웠어요 + +02:25.800 --> 02:33.480 +LMS라는 폴더에 들어 있는데 그 안에 덴버의 닷 mp3 추출물이 있어요 + +02:33.480 --> 02:41.190 +MP3 녹음 파일인데 덴버 시의회에서 10분에서 20분 정도 분량으로 + +02:41.190 --> 02:43.080 +녹음한 거예요 + +02:43.110 --> 02:51.900 +이걸 실행하면 뭐가 떠요 구글 1번과 연결됐죠 + +02:51.930 --> 02:55.980 +이걸 두 번째로 실행하고 있어요 처음 실행했을 때 인증과 함께 + +02:55.980 --> 03:02.680 +나타났죠 구글 계정으로 로그인되어 엑세스 권한을 부여받았다는 걸 확인하기 위한 선택이요 + +03:02.680 --> 03:05.860 +이번에는 이미 설치됐다고 하네요 + +03:05.980 --> 03:10.390 +이 폴더로 가면 구글 드라이브로 가서/콘텐츠/드라이브 + +03:10.420 --> 03:13.780 +아래에 있는 모든 파일을 볼 수 있어요 + +03:14.410 --> 03:18.040 +그런 다음 H깅페이스 허브에 로그인하죠 + +03:18.760 --> 03:19.300 +시작할게요 + +03:19.330 --> 03:20.620 +로그인 성공이에요 + +03:20.620 --> 03:23.380 +오픈AI에도 로그인해야 해요 + +03:23.410 --> 03:25.690 +이것도 비슷해요 + +03:25.690 --> 03:33.340 +오픈아이 키를 준비했어요 오픈아이 키도 콜랍에 넣어 뒀죠 + +03:33.580 --> 03:41.710 +그 키를 회수하고 일반적인 OpenAI 생성자를 호출해 인터페이스 + +03:41.710 --> 03:43.810 +연결을 설정해요 + +03:43.810 --> 03:48.820 +이번엔 OpenAI API 키를 넘길게요 + +03:49.000 --> 03:54.190 +기억하시겠지만 과거에는 이걸 명시할 필요가 없었죠 환경 변수가 설정되어 있다는 사실에 + +03:54.190 --> 03:55.630 +의존했으니까요 + +03:55.660 --> 03:58.570 +이번엔 명시적으로 전달하죠 + +03:58.630 --> 04:00.010 +자, 됐어요 + +04:00.250 --> 04:04.570 +오픈아이와의 연결 고리가 만들어졌죠 + +04:04.870 --> 04:06.790 +그럼 전 어떻게 하죠? + +04:06.790 --> 04:14.290 +이 오디오 파일을 취하겠습니다 구글 드라이브에 있는 거죠 이제 이 Colab에 매핑됐어요 + +04:14.290 --> 04:18.010 +OpenAI. audio를 호출할 거예요 + +04:18.010 --> 04:24.790 +.Rrcript는 .Create입니다 다른 오픈AI API 방식과 아주 유사하죠 + +04:24.820 --> 04:30.130 +실제로 말을 하도록 만들었을 때 오디오를 생성하게 했던 것과 특히 유사하죠 + +04:30.340 --> 04:36.370 +모델 이름을 입력했어요 위스퍼 원 모델, 파일 응답을 텍스트로 원한다는 + +04:36.370 --> 04:37.150 +거죠 + +04:37.150 --> 04:42.760 +오픈아이 위스퍼 모델에서 나온 것을 프린트할 거예요 + +04:42.760 --> 04:49.870 +많은 오디오가 제공되었죠 프론티어 모델이라 부르는 + +04:49.900 --> 05:00.460 +많은 오디오가 제공되고 있어요 현재 그 회의의 녹취본을 받으려고 기다리고 + +05:00.490 --> 05:01.690 +있죠 + +05:02.590 --> 05:03.700 +그렇게 됐네요 + +05:03.700 --> 05:07.030 +Get it, get it, get it, get it, get it, it, it, it, it. 다른 걸 할 수 있게 계속할게요 + +05:07.030 --> 05:07.960 +난 가야 해요 + +05:07.960 --> 05:14.080 +다음엔 llama3 프롬프트를 설정하겠습니다 시스템 프롬프트 시스템 메시지, + +05:14.110 --> 05:16.630 +사용자 프롬프트가 있어요 + +05:16.630 --> 05:22.120 +비서가 회의록을 작성하고 요약, 핵심 토론 포인트 + +05:22.120 --> 05:29.380 +테이크아웃, 행동 항목을 작성해요 소유주가 마크다운된 사항이죠 + +05:29.380 --> 05:36.430 +아래에 추출한 회의록의 프롬프트가 있어요 + +05:36.490 --> 05:36.910 +괜찮아요 + +05:36.940 --> 05:39.520 +제 영어 실력이 별로인 줄 알았는데 괜찮았어요 + +05:39.550 --> 05:41.680 +다른 덴버 의회 회의요 + +05:41.680 --> 05:46.750 +참석자와 장소, 날짜, 토론 포인트 포장 음식점, 소유주와 함께하는 활동 아이템 등을 + +05:46.750 --> 05:48.790 +포함해 요약본으로 작성하세요 + +05:48.790 --> 05:54.640 +그런 다음 회의 녹취록을 넣어요 사용자 프롬프트 바로 다음에요 + +05:54.820 --> 05:56.560 +여기 녹취록이에요 + +05:56.590 --> 06:00.440 +방금 인쇄했는데 아주 긴 필사본이에요 + +06:00.440 --> 06:02.960 +덴버 시의회 회의요 + +06:02.990 --> 06:09.890 +한동안 대화를 나눴는데 대부분 토착민의 날에 관한 거였어요 곧 다가올 연방 + +06:10.010 --> 06:11.540 +공휴일이죠 + +06:11.780 --> 06:19.130 +의회가 토착민의 날을 어떻게 인정할지 논쟁이 벌어졌어요 + +06:19.130 --> 06:24.050 +이 글을 전부 읽거나 오디오를 들어 보세요 + +06:24.050 --> 06:30.140 +이건 모두 텍스트로 되어 있고 이 전사 변수에 들어 있어요 + +06:30.230 --> 06:31.760 +그래서 오디오로 시작했죠 + +06:31.790 --> 06:36.050 +오픈라이의 위스퍼 원 모델 덕분에 텍스트가 생겼죠 + +06:36.260 --> 06:40.100 +이제 시스템과 사용자 프롬프트를 생성하죠 + +06:40.130 --> 06:41.960 +이제 눈에 익을 거예요 + +06:41.960 --> 06:44.000 +이건 우리의 퀀트 구성이에요 + +06:44.000 --> 06:45.950 +다시 수량화할 거예요 + +06:45.980 --> 06:46.490 +안 될 거 없죠 + +06:46.520 --> 06:55.790 +람다 3에 아주 효과적이었어요 메모리가 55로 크게 줄어들기 전까지는요 5기가바이트요 + +06:56.060 --> 06:57.320 +하지만 아니었죠 + +06:57.350 --> 06:59.510 +적어도 우리가 보기엔 연기가 완벽했어요 + +06:59.510 --> 07:03.410 +얼마나 웃긴지 보려고 수량화하지 않고 해 봤을 수도 있죠 + +07:03.590 --> 07:03.950 +네 + +07:03.980 --> 07:06.830 +별 차이가 없다고 해도 놀랍지 않을 거예요 + +07:06.860 --> 07:08.990 +퀀타이즈는 아주 효과적이에요 + +07:09.320 --> 07:12.890 +이제 행동할 시간이에요 + +07:12.980 --> 07:17.990 +지난번에 했던 거니까 익숙할 거예요 + +07:17.990 --> 07:26.360 +라마를 위한 토큰마이저를 만들 거예요 교육된 오토 토큰마이저를 사용해서요 + +07:26.360 --> 07:30.410 +이전처럼 패드 토큰을 설정하는 작업을 할 거예요 + +07:30.560 --> 07:35.900 +채팅 템플릿 함수 적용 메서드를 호출할게요 + +07:35.900 --> 07:39.950 +메시지를 전달하는 거죠 + +07:39.980 --> 07:41.090 +전달하는 거죠 + +07:41.090 --> 07:43.790 +물론 녹취록도 포함해서요 + +07:43.820 --> 07:47.090 +전체 회의의 텍스트와 사용자 프롬프트를 포함하죠 + +07:47.120 --> 07:51.350 +GPU 상에 엄청난 양의 텍스트를 Put 할 거예요 + +07:51.410 --> 07:53.270 +다시 방송할 거예요 + +07:53.270 --> 07:55.790 +이 텍스트 스트림 객체를 사용하세요 + +07:55.790 --> 07:57.800 +이제 시작이에요 + +07:57.830 --> 07:58.400 +맞아요 + +07:58.400 --> 08:00.710 +이제 모델을 만들 거예요 + +08:00.710 --> 08:03.080 +인과율 엘름 오토모델을 제작해요 + +08:03.080 --> 08:06.680 +라마 모델명을 통과시키죠 + +08:06.680 --> 08:10.070 +GPU가 있으면 사용하라고 하고 실제로 사용하죠 + +08:10.220 --> 08:13.760 +T4 박스를 사용하고 있어요 작은 GPU 박스요 + +08:13.760 --> 08:17.150 +퀀타이즈 구성에서 넘기죠 + +08:17.450 --> 08:20.480 +시간이 좀 걸리니까 실행을 시작할게요 + +08:20.480 --> 08:22.820 +말하기 전에 뛰어야 했어요 + +08:23.000 --> 08:24.770 +그게 더 현명했을 거예요 + +08:24.950 --> 08:26.090 +네 + +08:26.090 --> 08:33.410 +그런 다음 모델을 생성하고 액션을 실행하죠 + +08:33.440 --> 08:37.610 +동작은 생성기를 호출하는 거죠 + +08:37.610 --> 08:46.220 +생성기를 호출할 때는 입력값을 넘겨야 합니다 입력값은 전체 토큰화 된 프롬프트와 + +08:46.250 --> 08:48.830 +녹취록이죠 + +08:49.070 --> 08:51.770 +비트가 좀 더 커졌어요 + +08:51.770 --> 08:54.110 +새 패는 최대 80장이라고 했죠 + +08:54.140 --> 09:00.560 +최대 토큰은 2000이라고 했어요 응답이 많을 수 있으니까요 + +09:00.830 --> 09:07.940 +그러니까, 어, 어 이 정도면 회의록 돌려받기에 충분한 공간이겠죠? Get it + +09:07.940 --> 09:15.830 +그리고 스트리머도 제공하는데 Cab으로 결과를 보낼 수 있다고 + +09:15.830 --> 09:17.270 +알려줘요 + +09:17.780 --> 09:22.640 +잠시 생각할 동안 다음에 일어날 일을 말씀드리죠 회의 내용을 여기서 + +09:22.640 --> 09:25.550 +스트림으로 비트로 보낼 거예요 + +09:25.790 --> 09:32.930 +그 후에 할 수 있는 또 다른 일은 출력을 가져와서 텍스트를 get 하는 것입니다 출력자의 첫 + +09:32.930 --> 09:36.260 +번째 것을 가져와서 하나만 남게 되는 거죠 + +09:36.500 --> 09:41.540 +토큰라이저 닷디코드로 그걸 해독해요 + +09:41.840 --> 09:45.380 +응답이라는 변수에 넣을 거예요 put it + +09:45.380 --> 09:46.790 +결과가 나왔네요 + +09:47.150 --> 09:47.960 +네 + +09:52.430 --> 09:53.600 +곧 나와요 + +09:53.600 --> 09:54.770 +너무 멀어요 + +09:54.770 --> 09:56.690 +Put it을 눌러요 + +09:57.750 --> 09:59.520 +녹취록 말이에요 + +10:04.170 --> 10:11.370 +덴버시 의회 회의록 10월 9일 월요일 장소 참석자들과 누가 참석했는지를요 + +10:12.930 --> 10:14.100 +요약해 보죠 + +10:19.380 --> 10:25.530 +10월 9일 월요일에 만나서 원주민의 날을 위한 선언문을 논의하고 채택했어요 + +10:25.560 --> 10:28.320 +로페즈 의원이 선언문을 발표했죠 + +10:28.410 --> 10:29.970 +핵심 논점이에요 + +10:30.000 --> 10:31.050 +포장 음식점요 + +10:31.050 --> 10:34.140 +시대의 중요성을 깨닫고 채택된 거죠 + +10:34.170 --> 10:37.590 +포용과 모든 문화를 존중하는 걸 강조했죠 + +10:37.620 --> 10:41.250 +어떤 행동들은 소유주와 행동을 동반하죠 + +10:41.250 --> 10:44.370 +로페즈 의원과 서기요 + +10:44.520 --> 10:49.890 +서기는 시와 덴버 의회의 인장을 보증하고 서명하세요 + +10:49.890 --> 10:57.330 +로페즈 시의원이 성명서 사본을 덴버 아메리카 인디언 위원회와 + +10:57.330 --> 11:03.420 +다른 곳에 전달하고 최종적으로 다음 단계를 밟았죠 + +11:03.420 --> 11:06.960 +라마 3의 활약을 인정해야겠네요 1번요 + +11:06.960 --> 11:13.230 +아주 포괄적이고 명확하며 아주 철저한 회의록인 것 같아요 + +11:13.230 --> 11:20.100 +모든 형식과 섹션을 제대로 갖춘 참석자들이 참석한 회의록이죠 + +11:20.130 --> 11:24.510 +보다시피 마크다운 포맷으로 나왔어요 + +11:24.510 --> 11:30.330 +전에 프론티어 모델을 작업했을 때 주피터 노트북을 사용했었는데 + +11:30.330 --> 11:39.360 +이 표시형 마크다운 반응을 이용해서 Colab에서 마크다운을 확인할 수 있어요 + +11:39.360 --> 11:47.310 +여기 덴버시 의회 회의록이 있습니다 요약과 요점, + +11:47.310 --> 11:53.310 +행동 항목 다음 단계로 나뉘어 있죠 + +11:53.490 --> 12:01.210 +그래서 저는 오디오를 녹취록으로 변환하기 위해 프런티어 모델과 오픈 소스 모델을 사용하는 응용 프로그램을 + +12:01.240 --> 12:07.660 +제공합니다 그 녹취록을 액션과 다음 단계가 있는 회의 요약으로 변환하죠 + +12:09.160 --> 12:11.620 +당신에겐 당연한 운동이죠 + +12:11.620 --> 12:13.900 +뭘 준비했는지 이미 짐작하셨길 바라요 + +12:13.900 --> 12:17.950 +사용자 인터페이스로 쉽게 옮길 수 있어요. + +12:17.950 --> 12:25.780 +지난주에 다룬 것과 아주 유사하게 그래디오를 사용할 수 있어요 작고 멋진 그래디오 인터페이스로 + +12:25.780 --> 12:27.370 +이걸 불러올 수 있죠 + +12:27.370 --> 12:32.590 +구글 드라이브에 파일 이름을 입력하고 통화 기록 생성 버튼을 누르세요 + +12:32.620 --> 12:39.940 +오디오에서 읽고 텍스트로 변환하고 회의록 동작, 테이크아웃, 다음 단계로 + +12:39.970 --> 12:41.200 +변환해요 + +12:41.800 --> 12:43.570 +그게 당신 작업이에요 + +12:43.570 --> 12:45.640 +가서 그렇게 해요 + +12:45.640 --> 12:49.270 +훌륭한 사용자 인터페이스를 빨리 보고 싶어요 + +12:49.270 --> 12:51.730 +코드가 나오면 누르세요 + +12:51.730 --> 12:56.470 +보고 싶네요 잠시 후 다음 강의에서 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59170297/en_US.srt b/week5/community-contributions/subtitles/srts/59170297/en_US.srt new file mode 100755 index 0000000..9e41ab9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170297/en_US.srt @@ -0,0 +1,604 @@ +WEBVTT + +00:00.710 --> 00:06.830 +And here we are in Google Colab, ready for fun with models. + +00:07.100 --> 00:13.640 +So first we do the usual Pip installs and some imports. + +00:13.940 --> 00:18.710 +Now this will take a little bit longer for you because I have cheated and run this already right before + +00:18.710 --> 00:21.440 +recording this so that it can be a bit faster. + +00:21.530 --> 00:26.240 +The Pip installs will probably take 30s to a minute to to all go through for you. + +00:26.270 --> 00:29.450 +So once we've done the Pip installs, we sign in to hugging face. + +00:29.450 --> 00:30.800 +I think you're used to this now. + +00:30.830 --> 00:35.330 +Hopefully you've got the token set up as your secret on the left. + +00:35.570 --> 00:41.780 +Um, and I'm now going to set some constants for the names of the models we'll be playing with in the + +00:41.810 --> 00:42.950 +Hugging Face hub. + +00:42.950 --> 00:48.350 +As always, it's company slash, the name of the model or the model repo. + +00:48.620 --> 00:52.370 +So we're going to be playing with llama with Phi three with Gemma two. + +00:52.370 --> 01:00.290 +And then I'm leaving an exercise for you to repeat with quanto the mighty LLM from Alibaba Cloud. + +01:00.290 --> 01:03.190 +And then I've also given Mistral here. + +01:03.220 --> 01:08.800 +Now, I have to say, this is probably going to be a model that will be too big unless you've splashed + +01:08.800 --> 01:11.020 +out on on some big GPUs. + +01:11.110 --> 01:19.450 +Uh, in which case the the ask for you is to go to Huggingface hub and find a nice model that's 8 billion + +01:19.450 --> 01:23.110 +parameters or fewer and, uh, use that instead. + +01:23.110 --> 01:29.410 +Pick the one you like or one that's that's popular or is is doing well at the moment and see what you + +01:29.410 --> 01:30.100 +make of that. + +01:30.100 --> 01:34.840 +But do be sure to have completed at least five models. + +01:34.930 --> 01:42.220 +So with that, let's set those constants, and then let's make a messages list in a format that we know + +01:42.220 --> 01:48.700 +so well at this point, with a system message and a user message as two dicts in a list. + +01:48.730 --> 01:51.640 +No more explanation needed for that. + +01:51.910 --> 02:02.540 +So you remember from last time that you need to, uh, agree to the, um, llama 3.1 terms of service + +02:02.690 --> 02:07.610 +by going to the model page and um, pressing the agree. + +02:07.610 --> 02:15.380 +If you haven't already done that, um, then please do so so that you have access to llama 3.1 model. + +02:16.370 --> 02:18.740 +Now this is something new. + +02:18.740 --> 02:23.180 +I want to talk a bit about something called quantization, which I mentioned. + +02:23.180 --> 02:27.350 +So quantization is a rather surprising thing. + +02:27.440 --> 02:35.480 +Uh, the idea is that we can say, look, we, we want to load in this model into memory, but when + +02:35.480 --> 02:42.500 +we do so we want to reduce the precision of the numbers of the weights that make up the model. + +02:42.500 --> 02:46.130 +These weights are normally 32 bit floats. + +02:46.250 --> 02:51.650 +Uh, 32 bit floating point numbers make up the weights in this deep neural network. + +02:51.650 --> 02:55.490 +What if we brought them in with fewer bits? + +02:55.760 --> 02:59.320 +Uh, so 32 bits, of course, is four bytes. + +02:59.590 --> 03:08.350 +Um, and, uh, we might want to try and cram, uh, more of our, of our numbers into less memory. + +03:08.650 --> 03:15.490 +Um, and that process of reducing the precision so that you have more coarse numbers in your model is + +03:15.490 --> 03:17.080 +known as quantization. + +03:17.080 --> 03:19.930 +And that's something that we're going to do. + +03:19.930 --> 03:24.670 +And I remember being very surprised when I first heard about this, that the, uh, initially people + +03:24.700 --> 03:31.630 +talked about taking your 32 bit numbers and replacing them with eight bit numbers, much, much lower + +03:31.630 --> 03:32.530 +accuracy. + +03:32.530 --> 03:39.460 +And the thinking was that, surprisingly, whilst of course the accuracy decreases a bit, it doesn't + +03:39.460 --> 03:41.320 +decrease as much as you might think. + +03:41.320 --> 03:44.680 +It doesn't decrease, it doesn't become four times worse. + +03:44.860 --> 03:51.550 +Uh, it just becomes a bit worse, uh, and tolerably so and worth the trade off in terms of more memory. + +03:51.670 --> 03:56.800 +I was surprised to hear that, and I was even more surprised to hear that you could do more than that. + +03:56.800 --> 04:01.760 +You can actually reduce it, not down to eight bits, but all the way down to four bits. + +04:01.760 --> 04:03.950 +That's half a byte if you're counting. + +04:04.220 --> 04:09.560 +You can reduce from 32 bit number down to four bits going into the number. + +04:09.800 --> 04:17.600 +And again accuracy sure is is is is hurt, but not by as much as you might expect. + +04:17.630 --> 04:21.500 +Not I would have expected it would be profoundly different. + +04:21.500 --> 04:23.990 +And it's not it's quite tolerable. + +04:23.990 --> 04:29.870 +And it allows for much bigger models to fit into memory and load faster and run faster and so on. + +04:29.870 --> 04:33.710 +So this is something that people do a lot quantization. + +04:33.710 --> 04:39.560 +It's a powerful tool, uh, particularly when you get to training and you're having to do lots more + +04:39.560 --> 04:41.060 +work with these models. + +04:41.060 --> 04:43.190 +Uh, quantization is a lifesaver. + +04:43.220 --> 04:48.590 +You may remember I mentioned that at some point in a few weeks time, we're going to get to a technique + +04:48.590 --> 04:53.150 +called Q Laura, a way of fine tuning in an efficient way. + +04:53.150 --> 04:57.760 +And in Q, Laura, the Q of Q, Laura stands for quantization. + +04:57.760 --> 05:01.720 +So it is something we will be coming back to from time to time. + +05:02.350 --> 05:09.340 +So in the meantime we are using a library called Bits and Bytes, which goes hand in hand with the hugging + +05:09.340 --> 05:10.480 +face Transformers library. + +05:10.510 --> 05:18.670 +It's a wonderful library and you can create a new bits and bytes config object to that will be using + +05:18.760 --> 05:22.720 +shortly to describe what kind of quantization we want to do. + +05:22.720 --> 05:27.490 +And we are going to say load in four bit equals true. + +05:27.520 --> 05:29.020 +We're going to go all the way down to four bits. + +05:29.020 --> 05:31.810 +You can also here say load in eight bit equals true. + +05:31.810 --> 05:36.370 +Instead if you want to do eight bits and maybe you want to try doing both and see if you can tell the + +05:36.370 --> 05:37.660 +difference in accuracy. + +05:38.320 --> 05:41.260 +And now this again is very surprising. + +05:41.260 --> 05:46.510 +But you also can do four bit use double quant equals true. + +05:46.510 --> 05:51.760 +And this means that it quantizes all of the weights, not not once, but twice. + +05:51.790 --> 05:54.190 +Uh, saving a little bit more memory. + +05:54.190 --> 06:02.210 +And the results of doing this again doesn't massively impact the accuracy of the results. + +06:02.210 --> 06:05.300 +So it's a good trade to make and people do it. + +06:05.870 --> 06:13.340 +Um, this is this is saying that in doing the calculations use a this this data type A, B 16, which + +06:13.520 --> 06:17.270 +makes, uh, makes a some improvement in performance. + +06:17.270 --> 06:19.280 +So this is quite common as well. + +06:19.400 --> 06:27.290 +Uh, and then this is about the so when you have reduced the numbers down to four bits, how how will + +06:27.320 --> 06:32.510 +how to interpret how to treat that, that those four bit numbers, how to compress it down to four bits. + +06:32.510 --> 06:37.490 +And this, uh, NF four is a four bit representation of numbers. + +06:37.490 --> 06:39.860 +The N stands for normalized. + +06:39.860 --> 06:45.230 +And I understand that it's to do with with considering these numbers to follow a normal distribution + +06:45.230 --> 06:51.200 +allows for more more accuracy when you're compressing things down to just four bits. + +06:51.230 --> 06:56.090 +So, um, these these two are probably less important. + +06:56.110 --> 06:58.690 +They're not expected to make a massive difference. + +06:58.720 --> 06:59.170 +They're meant to be. + +06:59.200 --> 06:59.680 +It's meant to be. + +06:59.680 --> 07:01.030 +Good settings to have, though. + +07:01.030 --> 07:03.490 +And this one makes some difference. + +07:03.490 --> 07:06.280 +And this one makes a huge amount of difference in terms of memory. + +07:06.280 --> 07:11.500 +And none of it is too bad in terms of the output. + +07:11.530 --> 07:19.420 +So with all of that chit chat, we've now created our quant config, our bits and bytes config. + +07:19.480 --> 07:21.370 +This is something we're familiar with. + +07:21.400 --> 07:24.910 +We are going to create a tokenizer for Lama. + +07:25.450 --> 07:28.090 +This line is a new one that I haven't talked about before. + +07:28.090 --> 07:37.240 +Uh, the uh, there is something called a pad token, which is which token is used to fill up the prompt + +07:37.240 --> 07:43.090 +if there needs to be more added to the prompt when it's fed into the neural network. + +07:43.180 --> 07:50.440 +Uh, and the, um, it's a sort of common practice to set that pad token to be the same as the special + +07:50.470 --> 07:54.220 +token for the end of sentence, the end of the prompt token. + +07:54.370 --> 07:57.830 +Uh, and if you don't do this, you get a warning. + +07:57.860 --> 07:59.840 +It doesn't matter that you get a warning. + +07:59.840 --> 08:01.250 +I don't think it makes any impact. + +08:01.250 --> 08:04.610 +But if you don't want to get the warning, then you keep it in here and you see that people have this + +08:04.640 --> 08:09.170 +in as very standard in, in a lot of code that you'll see. + +08:10.310 --> 08:10.970 +Okay. + +08:10.970 --> 08:13.520 +And so then we are going to use our tokenizer. + +08:13.520 --> 08:17.390 +We're going to call the apply chat template function that you know. + +08:17.390 --> 08:23.930 +Well that takes our messages as a list of dictionaries and converts it into tokens. + +08:24.260 --> 08:28.820 +And there we are pushing that onto our GPU. + +08:28.850 --> 08:33.080 +So let's run that and the tokenizer will get to work. + +08:33.080 --> 08:37.520 +And what we're going to do next is load our model. + +08:37.520 --> 08:39.950 +So what does this line do. + +08:39.980 --> 08:44.900 +So first of all it's very analogous to this line. + +08:44.900 --> 08:50.540 +Here we created a tokenizer by saying auto tokenizer dot frompretrained. + +08:50.570 --> 08:57.200 +We create a model by saying auto model for causal LLM from pre-trained. + +08:57.290 --> 09:02.420 +Now this is the general class for creating any LLM. + +09:02.450 --> 09:06.290 +A causal LLM is the same as an autoregressive LLM. + +09:06.290 --> 09:13.760 +And that means it's an LLM which takes some set of tokens in the past and predicts future tokens. + +09:13.760 --> 09:18.170 +And basically all the llms we've talked about have been that kind of LLM. + +09:18.170 --> 09:24.200 +Later in the course, we will look at one other kind of LLM, which has some some use from time to time. + +09:24.200 --> 09:30.710 +But for everything that we're talking about for this sort of generative AI use case, we'll be working + +09:30.710 --> 09:34.130 +with causal llms or autoregressive llms. + +09:34.130 --> 09:39.650 +And this will be the way to create them from pre-trained we pass in. + +09:39.650 --> 09:42.560 +Just as with the tokenizer, we pass in the name of the model. + +09:42.560 --> 09:46.340 +We tell it that if we have a GPU, we want to use that GPU. + +09:46.370 --> 09:48.950 +That's what Device map auto does. + +09:48.980 --> 09:56.750 +And we pass in the quantization config, the quant config that we just set up and that is how we build + +09:56.750 --> 09:57.590 +a model. + +09:57.620 --> 10:07.700 +The model is the real code, which is actually our large language model as software, as Python code, + +10:07.700 --> 10:09.440 +which we're going to be able to run. + +10:09.440 --> 10:11.720 +And under the covers it is PyTorch. + +10:11.750 --> 10:19.220 +It is a series of PyTorch layers, layers of a neural network that will be able to feed in inputs and + +10:19.220 --> 10:20.390 +get out outputs. + +10:20.390 --> 10:22.580 +So it's the real deal. + +10:22.670 --> 10:28.520 +Now, it will probably take longer when you run this because I just ran it, and so it didn't have to + +10:28.520 --> 10:32.330 +do as much work as if it was a completely fresh box. + +10:32.360 --> 10:38.090 +What actually happens when when you run this is it downloads. + +10:38.090 --> 10:39.650 +It connects to hugging face. + +10:39.680 --> 10:46.190 +It downloads all of the model weights from the Hugging face hub, and it puts it locally on the disk + +10:46.190 --> 10:54.460 +of this Google Colab instance in a cache in a special file, which is a temporary file on the desk of + +10:54.460 --> 11:01.540 +this box, which will get deleted when we later disconnect from this box so that this model is now temporarily + +11:01.540 --> 11:07.660 +stored on the box on disk, and it's also loaded into memory as well, ready for us to use. + +11:07.660 --> 11:12.940 +We can we can ask the model how much memory it uses up by calling get memory footprint. + +11:12.940 --> 11:15.100 +And so we will see what that says. + +11:15.100 --> 11:19.510 +It says the memory footprint of this model is about 5.5GB. + +11:19.840 --> 11:27.250 +And so if we look at the resources for this box, you can see that we are using about 5.5GB of space + +11:27.250 --> 11:28.030 +on the box. + +11:28.030 --> 11:31.210 +And it's bouncing around in the past because I've been running this already. + +11:31.450 --> 11:35.740 +But you can imagine that when you look at it, you'll be starting from down here and it will bump up + +11:35.740 --> 11:37.000 +to about five and a half. + +11:37.000 --> 11:42.700 +And on the disk, we're using up plenty of space because it's been loaded into the cache of the disk. + +11:43.690 --> 11:44.560 +Okay. + +11:44.590 --> 11:47.350 +Almost ready for for for prime time here. + +11:47.350 --> 11:51.250 +At first we're going to look at the model itself. + +11:51.430 --> 11:54.210 +And we do that simply by printing the model. + +11:54.990 --> 12:04.080 +What comes up when we print the model is a description of the actual deep neural network that is represented + +12:04.080 --> 12:05.370 +by this model object. + +12:05.370 --> 12:06.990 +This is what we're looking at here. + +12:06.990 --> 12:12.720 +It's real layers of code representing the layers of the deep neural network. + +12:12.720 --> 12:19.140 +And these are all this is showing PyTorch classes that have set up that are being that are that are + +12:19.140 --> 12:20.730 +referenced by model. + +12:21.210 --> 12:26.460 +Uh, and again, this is a practical class with only a touch of theory from time to time. + +12:26.460 --> 12:31.320 +But it is worth looking at this, depending on your level of of knowledge of the innards of deep neural + +12:31.320 --> 12:32.700 +networks and the layers. + +12:32.700 --> 12:34.740 +Some of this may be super familiar to you. + +12:34.740 --> 12:40.230 +You may be comfortable seeing that it begins with an embedding layer, which is how the tokens become + +12:40.230 --> 12:41.820 +embedded into the neural network. + +12:41.820 --> 12:47.280 +And you can imagine that this dimension, these are showing the dimensions and it's the dimensionality + +12:47.280 --> 12:49.170 +of the vocab. + +12:49.620 --> 12:56.680 +Uh, and you'll then see that there's a series of modules, each of the layers in the neural network. + +12:56.710 --> 13:03.130 +There are attention layers that you'd be expecting to see, particularly as you know that attention + +13:03.130 --> 13:04.060 +is all you need. + +13:04.090 --> 13:08.890 +As the paper said, attention is all you need, and that is at the heart of what makes a transformer + +13:08.920 --> 13:11.350 +a transformer, these attention layers. + +13:11.350 --> 13:17.230 +And then we have multi-layer perceptron layers right here. + +13:17.230 --> 13:19.690 +And there is an activation function. + +13:19.690 --> 13:24.340 +Uh, again, as those those who are more familiar with the theory will be expecting to see this. + +13:24.340 --> 13:32.860 +The activation function that is used by this llama 3.1 model is the ReLU activation function, which + +13:32.860 --> 13:40.570 +is the sigmoid uh, linear unit, which is described in Pytorch's documentation right here. + +13:40.750 --> 13:43.900 +Uh, and it is also known apparently as the swish function. + +13:44.080 --> 13:49.300 +Uh, and it's, it's basically x times the logistic sigmoid of x. + +13:49.300 --> 13:52.190 +And that's what the activation function looks like. + +13:52.220 --> 13:57.020 +Again, if you're into the theory of deep neural networks, you know exactly what this is. + +13:57.050 --> 13:59.300 +If you're not, then then don't worry. + +13:59.300 --> 14:05.630 +Just get a general sense of what's happening here, and it's something that you can look more at as + +14:05.630 --> 14:09.200 +you study this model and others afterwards. + +14:09.890 --> 14:17.690 +At the end of that, there's a, uh, like a, some, some, uh, uh, layer norm layers and then the + +14:17.690 --> 14:20.360 +linear layer at the end. + +14:21.170 --> 14:29.300 +So this is worth looking at particularly, uh, depending on your level of knowledge of, uh, PyTorch + +14:29.300 --> 14:30.560 +neural networks. + +14:30.770 --> 14:35.060 +But also later when you look at other models, you could do the same thing. + +14:35.060 --> 14:36.680 +Look at the model's output. + +14:36.710 --> 14:42.320 +Look, look at the model, print the model, look at what it looks like and compare with lama3. + +14:43.160 --> 14:47.720 +I'm going to break for the next video, but in the next video, we're then going to run this and then + +14:47.720 --> 14:49.040 +run the other models too. + +14:49.070 --> 14:50.690 +So don't go anywhere. + +14:50.720 --> 14:51.770 +See you in a second. diff --git a/week5/community-contributions/subtitles/srts/59170297/ja_JP.srt b/week5/community-contributions/subtitles/srts/59170297/ja_JP.srt new file mode 100755 index 0000000..da6e961 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170297/ja_JP.srt @@ -0,0 +1,511 @@ +WEBVTT + +00:00.710 --> 00:06.830 +そして私たちはGoogle Colabで、 モデルたちと楽しむ準備をしている。 + +00:07.100 --> 00:13.640 +そこでまず、 いつものようにPipのインストールといくつかのインポートを行う。 + +00:13.940 --> 00:21.440 +なぜなら、 録画する直前にズルをして、 もう少し早くできるようにすでに実行してあるからだ。 + +00:21.530 --> 00:26.240 +ピップのインストールは、 おそらく30秒から1分程度ですべて完了する。 + +00:26.270 --> 00:29.450 +ピップのインストールが終わったら、 ハグフェイスにサインインしよう。 + +00:29.450 --> 00:30.800 +もう慣れていると思うよ。 + +00:30.830 --> 00:35.330 +トークンが左のシークレットに設定されていることを祈る。 + +00:35.570 --> 00:42.950 +ええと、 これからハギング・フェイス・ハブでプレーするモデルの名前に定数を設定します。 + +00:42.950 --> 00:48.350 +いつものように、 会社のスラッシュ、 モデルの名前、 またはモデルのレポだ。 + +00:48.620 --> 00:52.370 +だから、 3歳のファイと2歳のジェマと一緒にラマで遊ぶんだ。 + +00:52.370 --> 01:00.290 +そして、 アリババクラウドのLLMのクアントと一緒に練習を繰り返してほしい。 + +01:00.290 --> 01:03.190 +そして、 ミストラルもここにあげた。 + +01:03.220 --> 01:11.020 +さて、 これはおそらく、 大きなGPUに大枚をはたかない限り、 大きすぎるモデルになると言わざるを得ない。 + +01:11.110 --> 01:19.450 +その場合は、 Huggingfaceのハブに行って、 80億パラメータ以下のいいモデルを見つけて、 + +01:19.450 --> 01:23.110 +それを代わりに使ってください。 + +01:23.110 --> 01:30.100 +自分の好きなもの、 人気のあるもの、 現在好調なものを選んで、 それをどう評価するか見てみよう。 + +01:30.100 --> 01:34.840 +ただし、 少なくとも5つのモデルを完成させておくこと。 + +01:34.930 --> 01:42.220 +システム・メッセージとユーザー・メッセージを2つのディクテットにして、 + +01:42.220 --> 01:48.700 +メッセージ・リストを作りましょう。 + +01:48.730 --> 01:51.640 +これ以上の説明は必要ない。 + +01:51.910 --> 02:02.540 +前回、 ラマ3に同意する必要があることを覚えているよね。 1 利用規約は、 モデルのページに行き、 + +02:02.690 --> 02:07.610 +同意するを押してください。 + +02:07.610 --> 02:15.380 +もし、 まだそうしていないのなら、 ラマ3にアクセスできるようにそうしてください。 1モデル。 + +02:16.370 --> 02:18.740 +これは新しいことだ。 + +02:18.740 --> 02:23.180 +量子化と呼ばれるものについて少しお話ししたいと思います。 + +02:23.180 --> 02:27.350 +だから量子化というのは、 かなり意外なことなんだ。 + +02:27.440 --> 02:42.500 +つまり、 このモデルをメモリにロードしたいが、 その際、 モデルを構成するウェイトの数値の精度を下げたい。 + +02:42.500 --> 02:46.130 +これらのウェイトは通常32ビットの浮動小数点数である。 + +02:46.250 --> 02:51.650 +このディープ・ニューラル・ネットワークの重みは、 32ビットの浮動小数点数で構成されている。 + +02:51.650 --> 02:55.490 +もっと少ないビットで彼らを連れてきたらどうだろう? + +02:55.760 --> 02:59.320 +32ビットはもちろん4バイトだ。 + +02:59.590 --> 03:08.350 +そして、 より少ないメモリに、 より多くの数字を詰め込むことを試みるかもしれない。 + +03:08.650 --> 03:17.080 +そして、 精度を下げることによって、 より粗い数をモデルに入れることを量子化と呼ぶんだ。 + +03:17.080 --> 03:19.930 +そして、 それは私たちがやろうとしていることでもある。 + +03:19.930 --> 03:24.670 +最初にこの話を聞いたとき、 とても驚いたことを覚えている。 + +03:24.700 --> 03:32.530 +32ビットの数字を8ビットの数字に置き換えることで、 精度を大幅に下げるという話だった。 + +03:32.530 --> 03:41.320 +そして考えたのは、 意外なことに、 もちろん精度は少し落ちるものの、 思ったほどは落ちないということだった。 + +03:41.320 --> 03:44.680 +減るわけでもなく、 4倍悪くなるわけでもない。 + +03:44.860 --> 03:51.550 +ちょっと悪くなるけど、 まあ我慢できる程度だし、 メモリを増やすというトレードオフの価値はある。 + +03:51.670 --> 03:56.800 +それを聞いて驚いたし、 それ以上のことができると聞いてさらに驚いたよ。 + +03:56.800 --> 04:01.760 +実際には、 8ビットではなく、 4ビットまで減らすことができる。 + +04:01.760 --> 04:03.950 +数えてみれば半分のバイトだ。 + +04:04.220 --> 04:09.560 +32ビットから4ビットに減らすことができる。 + +04:09.800 --> 04:17.600 +また、 精度は確かに落ちているが、 期待するほどではない。 + +04:17.630 --> 04:21.500 +しかし、 私はそれが大きく異なるものだとは思っていなかった。 + +04:21.500 --> 04:23.990 +それに、 かなり我慢できるレベルじゃない。 + +04:23.990 --> 04:29.870 +そして、 より大きなモデルをメモリに収め、 より速くロードし、 より速く実行することなどが可能になる。 + +04:29.870 --> 04:33.710 +だから、 量子化はよく行われることなんだ。 + +04:33.710 --> 04:41.060 +これは強力なツールで、 特にトレーニングに入ると、 これらのモデルを使ってより多くの仕事をこなさなければならなくなる。 + +04:41.060 --> 04:43.190 +量子化は救世主だよ。 + +04:43.220 --> 04:53.150 +数週間後のある時点で、 効率的な方法で微調整を行うQラウラと呼ばれるテクニックを紹介すると書いたのを覚えているだろうか。 + +04:53.150 --> 04:57.760 +Q,ローラのQは量子化を意味する。 + +04:57.760 --> 05:01.720 +だから、 これからも折に触れて訪れることになるだろう。 + +05:02.350 --> 05:10.480 +その間に、 私たちはBits and Bytesというライブラリを使っている。 これは、 抱きつき顔のトランスフォーマーライブラリと相性がいい。 + +05:10.510 --> 05:18.670 +これは素晴らしいライブラリで、 これから使用する新しいbits and bytesコンフィグ・オブジェクトを作成して、 + +05:18.760 --> 05:22.720 +どのような量子化を行いたいかを記述することができる。 + +05:22.720 --> 05:27.490 +そして、 4ビットのロードは真に等しいと言うつもりだ。 + +05:27.520 --> 05:29.020 +我々は4ビットまでやるつもりだ。 + +05:29.020 --> 05:31.810 +また、 ここでは8ビットのロードは真に等しいと言うこともできる。 + +05:31.810 --> 05:37.660 +その代わり、 8ビットにしたいのであれば、 両方やってみて、 精度の違いがわかるかどうか試してみるのもいいかもしれない。 + +05:38.320 --> 05:41.260 +そして今、 これまた非常に驚いている。 + +05:41.260 --> 05:46.510 +しかし、 4ビットでdouble quant equals trueを使うこともできる。 + +05:46.510 --> 05:51.760 +そしてこれは、 すべての重みを1度ではなく2度量子化することを意味する。 + +05:51.790 --> 05:54.190 +ええと、 もう少しメモリを節約します。 + +05:54.190 --> 06:02.210 +また、 これをやり直したからといって、 結果の精度に大きな影響はない。 + +06:02.210 --> 06:05.300 +だから、 それはいいトレードだし、 みんなそうしている。 + +06:05.870 --> 06:17.270 +ええと、 これは、 計算を行う際に、 このデータ型A、 B 16を使用することで、 パフォーマンスが多少向上するということです。 + +06:17.270 --> 06:19.280 +だから、 これもよくあることだ。 + +06:19.400 --> 06:27.290 +それから、 これは数字を4ビットに減らしたときに、 その4ビットの数字をどう解釈してどう扱うか、 + +06:27.320 --> 06:32.510 +どう4ビットに圧縮するかということです。 + +06:32.510 --> 06:37.490 +そして、 このNF4は4ビットの数値表現である。 + +06:37.490 --> 06:39.860 +Nはノーマライズドを意味する。 + +06:39.860 --> 06:45.230 +私は、 これらの数値が正規分布に従うと考えることで、 物事をわずか4ビットに圧縮する際に、 + +06:45.230 --> 06:51.200 +より正確さを増すことができることに関係していると理解している。 + +06:51.230 --> 06:56.090 +だから、 この2つはあまり重要ではないだろう。 + +06:56.110 --> 06:58.690 +大差をつけることは期待されていない。 + +06:58.720 --> 06:59.170 +そうなる運命なんだ。 + +06:59.200 --> 06:59.680 +それは運命なんだ。 + +06:59.680 --> 07:01.030 +しかし、 持っていて損はないセッティングだ。 + +07:01.030 --> 07:03.490 +そして、 これは多少の違いがある。 + +07:03.490 --> 07:06.280 +そして、 これはメモリという点で大きな違いを生む。 + +07:06.280 --> 07:11.500 +そしてそのどれもが、 アウトプットの面ではそれほど悪くない。 + +07:11.530 --> 07:19.420 +そんな雑談をしながら、 クォンツのコンフィグ、 ビット・アンド・バイトのコンフィグができあがった。 + +07:19.480 --> 07:21.370 +これは私たちがよく知っていることだ。 + +07:21.400 --> 07:24.910 +Lamaのトークナイザーを作成します。 + +07:25.450 --> 07:28.090 +このラインは、 これまで話題にしたことのない新しいものだ。 + +07:28.090 --> 07:37.240 +ええと、 パッドトークンと呼ばれるものがあって、 これはニューラルネットワークに入力されるときに、 プロンプトにさらに追加する必要がある場合に、 + +07:37.240 --> 07:43.090 +プロンプトを埋めるために使われるトークンです。 + +07:43.180 --> 07:50.440 +そして、 そのパッド・トークンは、 文末の特別なトークン、 つまりプロンプトの終わりのトークンと同じに設定するのが、 + +07:50.470 --> 07:54.220 +ある種の一般的なやり方です。 + +07:54.370 --> 07:57.830 +もしそうしなければ、 警告を受けることになる。 + +07:57.860 --> 07:59.840 +警告を受けることは問題ではない。 + +07:59.840 --> 08:01.250 +インパクトはないと思う。 + +08:01.250 --> 08:04.610 +しかし、 警告を受けたくないのであれば、 このままにしておけばいいし、 + +08:04.640 --> 08:09.170 +多くのコードで標準的に使われているのを見るだろう。 + +08:10.310 --> 08:10.970 +オーケー。 + +08:10.970 --> 08:13.520 +そしてトークナイザーを使う。 + +08:13.520 --> 08:17.390 +ご存知のチャットテンプレート適用関数を呼び出します。 + +08:17.390 --> 08:23.930 +メッセージを辞書のリストとして受け取り、 トークンに変換する。 + +08:24.260 --> 08:28.820 +そしてそれをGPUに押し付ける。 + +08:28.850 --> 08:33.080 +では、 これを実行すると、 トークナイザーが動き出す。 + +08:33.080 --> 08:37.520 +次にすることは、 モデルをロードすることだ。 + +08:37.520 --> 08:39.950 +では、 このセリフは何を意味するのか。 + +08:39.980 --> 08:44.900 +だからまず、 このセリフにとても似ている。 + +08:44.900 --> 08:50.540 +ここでは、 auto tokenizer dot frompretrainedと言って、 トークナイザーを作成しました。 + +08:50.570 --> 08:57.200 +我々は、 因果LLMのための自動モデルを、 事前に訓練されたものから作成する。 + +08:57.290 --> 09:02.420 +このクラスは、 LLMを作成するための一般的なクラスである。 + +09:02.450 --> 09:06.290 +因果LLMは自己回帰LLMと同じである。 + +09:06.290 --> 09:13.760 +そしてそれは、 過去のトークンのセットを受け取り、 未来のトークンを予測するLLMであることを意味する。 + +09:13.760 --> 09:18.170 +そして基本的に、 私たちが話してきたすべてのLLMはそのようなLLMだった。 + +09:18.170 --> 09:24.200 +このコースの後半では、 もう一つのLLMについて見ていく。 + +09:24.200 --> 09:34.130 +しかし、 このような生成的AIのユースケースで話していることすべてにおいて、 我々は因果関係のあるLLMや自己回帰LLMを扱うことになる。 + +09:34.130 --> 09:39.650 +そしてこれは、 事前に訓練されたものから作成する方法になる。 + +09:39.650 --> 09:42.560 +トークナイザーと同じように、 モデルの名前を渡します。 + +09:42.560 --> 09:46.340 +GPUがあれば、 そのGPUを使いたい。 + +09:46.370 --> 09:48.950 +それがデバイス・マップ・オートだ。 + +09:48.980 --> 09:57.590 +そして、 先ほど設定した量子化コンフィグ、 クオンツ・コンフィグを渡して、 モデルを構築する。 + +09:57.620 --> 10:09.440 +モデルとは実際のコードのことで、 これは実際に我々の大規模な言語モデルをソフトウェアとして、 Pythonのコードとして実行できるようにしたものだ。 + +10:09.440 --> 10:11.720 +そしてカバーの下にはPyTorchがある。 + +10:11.750 --> 10:20.390 +これは一連のPyTorchレイヤーであり、 入力を入力し、 出力を得ることができるニューラルネットワークのレイヤーである。 + +10:20.390 --> 10:22.580 +だから本物なんだ。 + +10:22.670 --> 10:32.330 +今、 この作業をしたところだから、 おそらくもっと時間がかかるだろう。 + +10:32.360 --> 10:38.090 +これを実行すると、 実際にダウンロードが行われる。 + +10:38.090 --> 10:39.650 +ハグ顔につながる。 + +10:39.680 --> 10:46.190 +これは、 Hugging face hubからモデルの重みをすべてダウンロードし、 + +10:46.190 --> 11:07.660 +Google Colabインスタンスのディスク上の特別なファイルにキャッシュされます。 + +11:07.660 --> 11:12.940 +メモリフットプリントの取得を呼び出すことで、 モデルがどれだけのメモリを使用しているかを尋ねることができる。 + +11:12.940 --> 11:15.100 +その結果、 どうなるかはこれからだ。 + +11:15.100 --> 11:19.510 +このモデルのメモリー・フットプリントは約5。 5GB。 + +11:19.840 --> 11:28.030 +このボックスのリソースを見ると、 5つほど使っていることがわかる。 5GBの空き容量。 + +11:28.030 --> 11:31.210 +そして、 すでにこれを実行しているので、 過去にバウンドしている。 + +11:31.450 --> 11:37.000 +でも、 ここからスタートすると、 5.5メートルくらいに跳ね上がることは想像できると思う。 + +11:37.000 --> 11:42.700 +そしてディスク上では、 ディスクのキャッシュにロードされているため、 十分なスペースを使用している。 + +11:43.690 --> 11:44.560 +オーケー。 + +11:44.590 --> 11:47.350 +プライムタイムの準備はほぼ整っている。 + +11:47.350 --> 11:51.250 +まずはモデルそのものを見てみよう。 + +11:51.430 --> 11:54.210 +そして、 私たちは単にモデルを印刷することでそれを行う。 + +11:54.990 --> 12:05.370 +モデルをプリントすると出てくるのは、 このモデル・オブジェクトで表現される実際のディープ・ニューラル・ネットワークの説明である。 + +12:05.370 --> 12:06.990 +これがここで見ているものだ。 + +12:06.990 --> 12:12.720 +これは、 ディープ・ニューラル・ネットワークのレイヤーを表す、 実際のレイヤーのコードだ。 + +12:12.720 --> 12:20.730 +そしてこれらはすべて、 モデルによって参照されるように設定されたPyTorchクラスを示している。 + +12:21.210 --> 12:26.460 +ええと、 繰り返しになるけど、 このクラスは実践的なクラスで、 時折理論的なことを少し話すだけなんだ。 + +12:26.460 --> 12:31.320 +しかし、 ディープ・ニューラル・ネットワークの内部やレイヤーに関する知識のレベルによっては、 + +12:31.320 --> 12:32.700 +これを見る価値はある。 + +12:32.700 --> 12:34.740 +この中には、 あなたにとって超お馴染みのものもあるかもしれない。 + +12:34.740 --> 12:41.820 +トークンをニューラルネットワークに埋め込むエンベッディング層から始まることは、 おわかりいただけるだろう。 + +12:41.820 --> 12:49.170 +そして、 この次元、 これは次元を示していて、 ボキャブラリーの次元性を示していることが想像できるだろう。 + +12:49.620 --> 12:56.680 +そして、 ニューラルネットワークの各層に、 一連のモジュールがあることがわかるだろう。 + +12:56.710 --> 13:04.060 +特にあなたは、 注目されることがすべてだと知っているのだから。 + +13:04.090 --> 13:11.350 +論文にあるように、 必要なのは注意力だけであり、 それこそがトランスフォーマーをトランスフォーマーたらしめている核心である。 + +13:11.350 --> 13:17.230 +そして多層パーセプトロン層がここにある。 + +13:17.230 --> 13:19.690 +そして活性化関数がある。 + +13:19.690 --> 13:24.340 +理論に詳しい人たちなら、 これを期待しているはずだ。 + +13:24.340 --> 13:32.860 +このラマが使用する活性化関数 3. 1つのモデルはReLU活性化関数で、 シグモイド、 + +13:32.860 --> 13:40.570 +つまり線形単位で、 Pytorchのドキュメントに記載されている。 + +13:40.750 --> 13:43.900 +それと、 どうやらスウィッシュ機能としても知られているようだ。 + +13:44.080 --> 13:49.300 +基本的にはxのロジスティック・シグモイドをx倍にしたものだ。 + +13:49.300 --> 13:52.190 +これが活性化関数だ。 + +13:52.220 --> 13:57.020 +繰り返しになるが、 ディープ・ニューラル・ネットワークの理論に詳しい人なら、 これが何なのかよくご存じだろう。 + +13:57.050 --> 13:59.300 +もしそうでないなら、 心配はいらない。 + +13:59.300 --> 14:05.630 +ここで何が起こっているのか、 一般的な感覚をつかむだけでいい。 このモデルや他のモデルを研究していくうちに、 + +14:05.630 --> 14:09.200 +もっと詳しく見ることができるようになるだろう。 + +14:09.890 --> 14:20.360 +その最後に、 いくつかの、 いくつかの、 いくつかの、 いくつかの、 レイヤーのようなものがあり、 そして最後に直線的なレイヤーがある。 + +14:21.170 --> 14:30.560 +PyTorchニューラルネットワークの知識レベルによっては、 特に見る価値がある。 + +14:30.770 --> 14:35.060 +でも、 後で他のモデルを見ても同じことができる。 + +14:35.060 --> 14:36.680 +モデルの出力を見てください。 + +14:36.710 --> 14:42.320 +見て、 模型を見て、 模型をプリントして、 どんな風に見えるか見て、 ラマ3と比べてみてください。 + +14:43.160 --> 14:49.040 +次のビデオまでお休みしますが、 次のビデオでは、 これを実行し、 他のモデルも実行します。 + +14:49.070 --> 14:50.690 +だからどこにも行くな。 + +14:50.720 --> 14:51.770 +またすぐに会おう。 diff --git a/week5/community-contributions/subtitles/srts/59170297/ko_KR.srt b/week5/community-contributions/subtitles/srts/59170297/ko_KR.srt new file mode 100755 index 0000000..2dc8294 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59170297/ko_KR.srt @@ -0,0 +1,592 @@ +WEBVTT + +00:00.710 --> 00:06.830 +구글 콜랩에 왔습니다 모델들과 즐길 준비가 됐죠 + +00:07.100 --> 00:13.640 +먼저 Pip 설치와 수입 작업을 할 거예요 + +00:13.940 --> 00:18.710 +이건 좀 더 오래 걸릴 거예요 이걸 녹음하기 전에 이미 비트를 써서 + +00:18.710 --> 00:21.440 +실행했거든요 좀 더 빠르게요 + +00:21.530 --> 00:26.240 +피프를 설치하는 데 30-1분이 걸릴 거예요 + +00:26.270 --> 00:29.450 +피프 설치가 끝나면 얼굴 포옹으로 로그인해요 + +00:29.450 --> 00:30.800 +이제 익숙해진 것 같네요 + +00:30.830 --> 00:35.330 +왼쪽에 비밀이 담긴 토큰이 있으면 좋겠네요 + +00:35.570 --> 00:41.780 +이제 상수를 설정하겠습니다 포옹 페이스 허브에서 함께 플레이할 모델들의 이름에 + +00:41.810 --> 00:42.950 +대해서요 + +00:42.950 --> 00:48.350 +늘 그렇듯 모델명 또는 모델 회수 회사명이죠 + +00:48.620 --> 00:52.370 +3번과 피 2번과 라마와 놀 거예요 + +00:52.370 --> 01:00.290 +연습 문제를 낼 테니 콴토와 함께 반복하세요 알리바바 클라우드의 위대한 LLM이죠 + +01:00.290 --> 01:03.190 +미스트랄도 여기 있어요 + +01:03.220 --> 01:08.800 +이건 너무 큰 모델이 될 수도 있어요 큰 GPU에 돈을 쏟아붓지 + +01:08.800 --> 01:11.020 +않는다면요 + +01:11.110 --> 01:19.450 +그럴 경우 H깅페이스 허브에 가서 80억 매개 변수가 넘는 모델을 찾아서 + +01:19.450 --> 01:23.110 +그걸 대신 사용하세요 + +01:23.110 --> 01:29.410 +마음에 드는 걸 고르거나 아니면 지금 인기가 있거나 잘 팔리는 걸 골라서 어떻게 해석할지 + +01:29.410 --> 01:30.100 +보세요 + +01:30.100 --> 01:34.840 +하지만 적어도 5개는 완성해야 해요 + +01:34.930 --> 01:42.220 +이제 상수를 설정하고 메시지 목록을 만들게요 이 시점에서 우리가 잘 아는 형식으로요 + +01:42.220 --> 01:48.700 +시스템 메시지와 사용자 메시지죠 목록의 2개 독점으로요 + +01:48.730 --> 01:51.640 +더 설명할 필요 없어요 + +01:51.910 --> 02:02.540 +지난번에 라마 3에 동의해야 했던 거 기억하죠? 한 가지 서비스 조건은 모델 페이지에서 + +02:02.690 --> 02:07.610 +동의를 얻는 거예요 + +02:07.610 --> 02:15.380 +아직 안 하셨다면 llama 3에 액세스할 수 있도록 하세요 모델은 하나예요 + +02:16.370 --> 02:18.740 +이런 건 처음 봐요 + +02:18.740 --> 02:23.180 +제가 언급했던 퀀타이즈라는 것에 대해 잠깐 얘기할게요 비트 + +02:23.180 --> 02:27.350 +퀀타이즈는 놀라운 개념이에요 + +02:27.440 --> 02:35.480 +이 모델의 경우 메모리에 로드하고 싶은데 그렇게 할 때 + +02:35.480 --> 02:42.500 +모델을 구성하는 무게의 정밀도를 줄이는 거죠 + +02:42.500 --> 02:46.130 +이 무게추들은 보통 32비트 플로트예요 + +02:46.250 --> 02:51.650 +32 비트 부양점 숫자는 이 심층 신경망의 무게예요 + +02:51.650 --> 02:55.490 +덜 채워서 가져오면 어떨까요? + +02:55.760 --> 02:59.320 +32비트는 4바이트로 계산할 수 있고요 + +02:59.590 --> 03:08.350 +그리고 숫자를 더 적어 넣는 게 좋을 것 같아요 less 메모리에요 + +03:08.650 --> 03:15.490 +정밀도를 줄이는 이 과정을 퀀타이즈라고 합니다 모델에 거친 숫자가 더 많이 + +03:15.490 --> 03:17.080 +보이게 하는 거죠 + +03:17.080 --> 03:19.930 +그게 우리가 할 일이죠 + +03:19.930 --> 03:24.670 +이 이야기를 처음 들었을 때 정말 놀랐던 기억이 납니다 처음에는 + +03:24.700 --> 03:32.530 +32 비트 숫자를 8 비트 숫자로 대체하면 정확도가 훨씬 떨어진다고들 했거든요 + +03:32.530 --> 03:39.460 +그 비트는 놀랍게도 정확도는 조금 떨어지지만 생각만큼 떨어지진 + +03:39.460 --> 03:41.320 +않아요 + +03:41.320 --> 03:44.680 +줄어들지도 않고 4배로 악화되지도 않아요 + +03:44.860 --> 03:51.550 +좀 더 나빠지지만 견딜 만해요 메모리가 더 많다면 비트를 걸 만하죠 + +03:51.670 --> 03:56.800 +그 말을 듣고 놀랐지만 그보다 더 많은 걸 할 수 있다는 걸 알고 더 놀랐어요 + +03:56.800 --> 04:01.760 +8비트가 아니라 4비트로 졸일 수 있어요 + +04:01.760 --> 04:03.950 +0.5 바이트예요 세어본다면요 + +04:04.220 --> 04:09.560 +32비트 숫자를 4비트로 줄일 수 있어요 + +04:09.800 --> 04:17.600 +다시 말하지만 정확도는 손상됐지만 기대만큼 크진 않아요 + +04:17.630 --> 04:21.500 +완전히 다를 줄은 몰랐어요 + +04:21.500 --> 04:23.990 +견딜 만해요 + +04:23.990 --> 04:29.870 +훨씬 더 큰 모델이 메모리에 맞아 더 빨리 로드되고 더 빨리 달리게 해주죠 + +04:29.870 --> 04:33.710 +사람들이 많이 하는 거죠 + +04:33.710 --> 04:39.560 +강력한 도구예요 특히 훈련을 받거나 이런 모델로 많은 작업을 해야 할 때는요 get + +04:39.560 --> 04:41.060 +it get it + +04:41.060 --> 04:43.190 +퀀타이즈는 구세주예요 + +04:43.220 --> 04:48.590 +기억하실지 모르겠지만 몇 주 후에 큐 로라라는 기술을 배울 + +04:48.590 --> 04:53.150 +거예요 효율적으로 미세 조정하는 방법이죠 + +04:53.150 --> 04:57.760 +Q의 로라, Q의 로라는 퀀타이즈를 뜻하죠 + +04:57.760 --> 05:01.720 +그래서 이따금 다시 보게 될 거예요 + +05:02.350 --> 05:09.340 +그동안 비트 앤 바이트라는 라이브러리를 사용합니다 포옹하는 트랜스포머 라이브러리와 밀접한 + +05:09.340 --> 05:10.480 +관련이 있죠 + +05:10.510 --> 05:18.670 +훌륭한 라이브러리예요 새 bits와 바이트 구성 객체를 만들 수 있죠 우리가 원하는 + +05:18.760 --> 05:22.720 +수량화가 뭔지 곧 설명드릴게요 + +05:22.720 --> 05:27.490 +로드 인 for 비트 = true라고 하죠 + +05:27.520 --> 05:29.020 +50센트까지 쭉 내릴 거예요 + +05:29.020 --> 05:31.810 +로드 인 8 비트 = true도 할 수 있어요 + +05:31.810 --> 05:36.370 +대신 8bit를 하고 싶다면 둘 다 해서 차이를 확인할 + +05:36.370 --> 05:37.660 +수 있죠 + +05:38.320 --> 05:41.260 +이것도 정말 놀랍죠 + +05:41.260 --> 05:46.510 +근데 4비트 퀀트 = True도 할 수 있어요 + +05:46.510 --> 05:51.760 +한 번도 아니고 두 번씩 모든 무게를 수량화한다는 뜻이죠 + +05:51.790 --> 05:54.190 +메모리를 좀 더 저장하려고요 비트 + +05:54.190 --> 06:02.210 +이걸 다시 한다고 해서 결과의 정확성에 큰 영향을 주진 않아요 + +06:02.210 --> 06:05.300 +좋은 거래고 사람들이 그렇게 해요 + +06:05.870 --> 06:13.340 +이 데이터 형식 A와 B 16을 이용해 계산을 한다는 뜻입니다 + +06:13.520 --> 06:17.270 +그러면 성능이 향상되죠 + +06:17.270 --> 06:19.280 +이것도 꽤 흔한 일이에요 + +06:19.400 --> 06:27.290 +여기서 중요한 건 숫자를 4비트로 줄였을 때 4비트 숫자를 어떻게 + +06:27.320 --> 06:32.510 +처리하고 압축하는지에 관한 거죠 + +06:32.510 --> 06:37.490 +이 NF4는 4비트로 숫자를 나타내요 + +06:37.490 --> 06:39.860 +평범화됐다는 뜻이죠 + +06:39.860 --> 06:45.230 +이런 숫자들을 고려하는 것과 관련 있다는 걸 압니다 일반적인 + +06:45.230 --> 06:51.200 +분포를 따르려면 4비트로 압축할 때 더 정확할 수 있죠 + +06:51.230 --> 06:56.090 +이 둘은 less로 보여요 + +06:56.110 --> 06:58.690 +큰 변화를 기대하진 않아요 + +06:58.720 --> 06:59.170 +둘은 운명이에요 + +06:59.200 --> 06:59.680 +운명인 거죠 + +06:59.680 --> 07:01.030 +설정이 좋네요 + +07:01.030 --> 07:03.490 +이건 좀 달라요 + +07:03.490 --> 07:06.280 +이건 메모리 면에서 큰 차이를 만들어요 + +07:06.280 --> 07:11.500 +결과물 면에서는 나쁘지 않아요 + +07:11.530 --> 07:19.420 +이 모든 대화로 우린 이제 퀀트 구성, 비트 및 바이트 구성을 만들었어요 + +07:19.480 --> 07:21.370 +이건 우리에게 익숙한 거예요 + +07:21.400 --> 07:24.910 +라마에게 토큰을 만들어 주려고요 + +07:25.450 --> 07:28.090 +이 대사는 처음 언급하는 거예요 + +07:28.090 --> 07:37.240 +패드 토큰이라는 것도 있는데요 신경망에 추가할 것이 있을 때 프롬프트를 + +07:37.240 --> 07:43.090 +채우는 데 쓰이는 토큰이죠 + +07:43.180 --> 07:50.440 +일반적인 관행으로 패드 토큰을 문장 끝, 프롬프트 토큰의 마지막과 + +07:50.470 --> 07:54.220 +동일하게 설정하는 것이죠 + +07:54.370 --> 07:57.830 +Get을 안 하면 경고를 받아요 + +07:57.860 --> 07:59.840 +Get you 경고는 중요하지 않아요 + +07:59.840 --> 08:01.250 +효과가 없는 것 같아요 + +08:01.250 --> 08:04.610 +경고를 받고 싶지 않다면 get을 선택하세요. + +08:04.640 --> 08:09.170 +많은 코드에서 사람들이 표준으로 사용하는 것을 볼 수 있어요. + +08:10.310 --> 08:10.970 +네 + +08:10.970 --> 08:13.520 +이제 토큰라이저를 사용할 거예요 + +08:13.520 --> 08:17.390 +채팅 템플릿 적용 함수라고 부를게요 + +08:17.390 --> 08:23.930 +그건 우리 메시지를 사전 목록으로 받아들여 토큰으로 변환하는 거죠 + +08:24.260 --> 08:28.820 +그걸 GPU 위에 적용했어요 + +08:28.850 --> 08:33.080 +get을 실행하면 토큰izer가 작동할 거예요 + +08:33.080 --> 08:37.520 +다음으로 할 일은 모델을 로드하는 거예요 + +08:37.520 --> 08:39.950 +이 선은 뭐죠? + +08:39.980 --> 08:44.900 +우선 이 대사와 아주 유사해요 + +08:44.900 --> 08:50.540 +여기서 토큰라이저를 만들었는데 오토 토큰라이저. FROMpretrep이라고 입력했어요 + +08:50.570 --> 08:57.200 +미리 배운 자동 LLM을 위한 모델을 생성하죠 + +08:57.290 --> 09:02.420 +LLM을 생성하는 일반 클래스죠 + +09:02.450 --> 09:06.290 +인과적 LLM은 자가적 공격 LLM과 같아요 + +09:06.290 --> 09:13.760 +LLM은 과거의 토큰을 가지고 미래의 토큰을 예측하는 것이죠 + +09:13.760 --> 09:18.170 +기본적으로 우리가 얘기한 모든 llm은 그런 종류의 LLM이었죠 + +09:18.170 --> 09:24.200 +과정 후반부에 다른 종류의 LLM을 살펴보겠습니다 때때로 사용되는 거죠 + +09:24.200 --> 09:30.710 +하지만 우리가 말하는 이런 생성적인 인공지능 사용 사례에선 인과적 llms 즉 + +09:30.710 --> 09:34.130 +자동적 위반 llms를 연구할 거예요 + +09:34.130 --> 09:39.650 +미리 훈련된 걸 이용해서 만드는 거죠 + +09:39.650 --> 09:42.560 +토큰이처럼 모델 이름을 전달하는 거죠 + +09:42.560 --> 09:46.340 +GPU가 있다면 그걸 사용하고 싶다고 하죠 + +09:46.370 --> 09:48.950 +그게 Device Map 오토의 역할이죠 + +09:48.980 --> 09:57.590 +그런 다음 퀀트화 구성에서 넘깁니다 방금 설정한 퀀트 구성요 그게 모델을 구축하는 방법이죠 + +09:57.620 --> 10:07.700 +모델은 실제 코드입니다 즉, 소프트웨어로서의 큰 언어 모델로서 파이썬 코드로서 우리가 + +10:07.700 --> 10:09.440 +실행할 것이죠 + +10:09.440 --> 10:11.720 +그 아래는 피토치예요 + +10:11.750 --> 10:19.220 +파이토치 층으로 구성돼 있어요 신경망 층으로 구성되어 있어서 입력값에 입력하면 출력이 + +10:19.220 --> 10:20.390 +나오죠 + +10:20.390 --> 10:22.580 +진짜 물건이네요 + +10:22.670 --> 10:28.520 +이걸 실행하면 시간이 더 걸릴 거예요 방금 실행했으니까요 완전히 새로운 + +10:28.520 --> 10:32.330 +상자처럼 많은 작업을 할 필요가 없었죠 + +10:32.360 --> 10:38.090 +이걸 실행하면 실제로 발생하는 일은 다운로드예요 + +10:38.090 --> 10:39.650 +얼굴을 안는 것과 연결되죠 + +10:39.680 --> 10:46.190 +Hub에서 모델 무게를 다운로드해 Google Colab 인스턴스의 + +10:46.190 --> 10:54.460 +디스크에 로컬로 놓습니다 이 박스의 데스크에 있는 임시 파일 캐시에요 나중에 이 박스와 연결을 + +10:54.460 --> 11:01.540 +끊으면 삭제됩니다 이 모델은 이제 일시적으로 디스크의 박스에 저장됩니다 메모리에 + +11:01.540 --> 11:07.660 +로드되기도 해서 우리가 사용할 수 있게 되죠 + +11:07.660 --> 11:12.940 +모델에게 물어볼 수 있어요 get 메모리풋프린트를 호출해 메모리를 얼마나 사용하는지요 + +11:12.940 --> 11:15.100 +뭐라고 하는지 보죠 + +11:15.100 --> 11:19.510 +이 모델의 메모리 공간은 약 5개라고 나오네요 5기가바요 + +11:19.840 --> 11:28.030 +이 박스에 대한 리소스를 보면 약 5개를 사용하는 걸 보실 수 있어요 상자는 5GB로 하고요 + +11:28.030 --> 11:31.210 +이미 실행하고 있었기 때문에 과거에도 흔들리고 있어요 + +11:31.450 --> 11:35.740 +하지만 자세히 보면 이 아래부터 시작해서 5.5cm까지 + +11:35.740 --> 11:37.000 +올라갈 거예요 + +11:37.000 --> 11:42.700 +디스크에서 많은 공간을 사용하고 있습니다 디스크 캐시에 로드되어 있기 때문이죠 + +11:43.690 --> 11:44.560 +네 + +11:44.590 --> 11:47.350 +황금 시간대 준비가 거의 다 됐어요 + +11:47.350 --> 11:51.250 +먼저 모델 자체를 살펴보죠 + +11:51.430 --> 11:54.210 +모델을 프린트함으로써 그렇게 하죠 + +11:54.990 --> 12:04.080 +모델을 프린트하면 이 모델 객체에 의해 표현되는 실제 심층 신경망에 대한 설명이 + +12:04.080 --> 12:05.370 +나와요 + +12:05.370 --> 12:06.990 +여기 보이는 게 그거예요 + +12:06.990 --> 12:12.720 +딥 신경망 층을 나타내는 코드 층이에요 + +12:12.720 --> 12:19.140 +이건 전부 PyTorch 클래스를 보여줍니다 모델에 의해 참조되는 + +12:19.140 --> 12:20.730 +설정이죠 + +12:21.210 --> 12:26.460 +다시 말씀드리지만 실습 시간이라 가끔 이론만 살짝 다룰 거예요 + +12:26.460 --> 12:31.320 +하지만 깊은 신경망과 그 층의 내장에 대한 지식 수준에 따라 살펴볼 + +12:31.320 --> 12:32.700 +가치가 있어요 + +12:32.700 --> 12:34.740 +익숙한 내용도 있을 거예요 + +12:34.740 --> 12:40.230 +토큰을 심는 레이어에서 시작한다는 게 익숙하실 겁니다 토큰을 신경망에 + +12:40.230 --> 12:41.820 +심는 방법이죠 + +12:41.820 --> 12:47.280 +이걸 상상해 보세요 단어의 입체성을 나타내는 + +12:47.280 --> 12:49.170 +거예요 + +12:49.620 --> 12:56.680 +그리고 모듈이 여러 개 있는 게 보이시죠 신경망의 각 레이어예요 + +12:56.710 --> 13:04.060 +예상했던 대로 주의 깊게 보는 층이 있어요 특히 주의 깊게 보는 게 가장 중요하잖아요 + +13:04.090 --> 13:08.890 +종이에 쓰여 있듯이 주의만 기울이면 돼요 이런 주의 층이 트랜스포머를 + +13:08.920 --> 13:11.350 +트랜스포머로 만드는 핵심이죠 + +13:11.350 --> 13:17.230 +그리고 다중 페르셉트론 층이 여기 있어요 + +13:17.230 --> 13:19.690 +활성화 함수가 있어요 + +13:19.690 --> 13:24.340 +이 이론에 더 익숙한 사람들은 이걸 보길 기대하겠죠 + +13:24.340 --> 13:32.860 +이 llama 3이 사용하는 활성화 함수죠 1번 모델은 리루 활성화 함수로 + +13:32.860 --> 13:40.570 +구형 아니 선형 유닛으로 여기 문서화에 나와 있죠 + +13:40.750 --> 13:43.900 +스위시 함수라고도 알려져 있죠 + +13:44.080 --> 13:49.300 +기본적으로 물적 X의 X 곱하기 X죠 + +13:49.300 --> 13:52.190 +활성화 함수는 이렇게 생겼어요 + +13:52.220 --> 13:57.020 +심층 신경망 이론에 관심이 있다면 이게 뭔지 정확히 알 거예요 + +13:57.050 --> 13:59.300 +아니라면 걱정하지 마세요 + +13:59.300 --> 14:05.630 +여기서 무슨 일이 일어나는지 전반적으로 파악하세요 이 모델이나 나중에 다른 걸 연구할 + +14:05.630 --> 14:09.200 +때 더 자세히 살펴볼 수 있는 거죠 Get it + +14:09.890 --> 14:17.690 +그 끝에는 표준 층이 있고 그 끝에는 직선 + +14:17.690 --> 14:20.360 +층이 있어요 + +14:21.170 --> 14:29.300 +이건 특별히 살펴볼 가치가 있어요 피터치 신경망에 대한 지식이 + +14:29.300 --> 14:30.560 +있다면요 + +14:30.770 --> 14:35.060 +나중에 다른 모델도 똑같이 할 수 있어요 + +14:35.060 --> 14:36.680 +모델 출력을 보세요 + +14:36.710 --> 14:42.320 +모델을 보고 인쇄해서 어떻게 생겼는지 보고 라마3와 비교해요 + +14:43.160 --> 14:47.720 +다음 비디오에선 쉬도록 하죠 다음 비디오에선 이걸 실행하고 다른 모델도 + +14:47.720 --> 14:49.040 +실행할 거예요 + +14:49.070 --> 14:50.690 +그러니 어디 가지 마세요 + +14:50.720 --> 14:51.770 +이따 봐요 diff --git a/week5/community-contributions/subtitles/srts/59271655/en_US.srt b/week5/community-contributions/subtitles/srts/59271655/en_US.srt new file mode 100755 index 0000000..63b5789 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59271655/en_US.srt @@ -0,0 +1,529 @@ +WEBVTT + +00:00.410 --> 00:06.860 +So here we are on Hugging Face's main landing page at Hugging Face Core. + +00:06.890 --> 00:07.790 +A URL you know. + +00:07.790 --> 00:11.390 +Well, since we produced the company brochure for Hugging Face a couple of times. + +00:11.510 --> 00:14.270 +And, uh, this is this is what you'll see. + +00:14.270 --> 00:16.760 +If you're not logged in, you don't have an account. + +00:16.760 --> 00:18.530 +You'll see a screen like this. + +00:18.530 --> 00:20.870 +And the first thing you need to do is go to sign up. + +00:20.870 --> 00:27.470 +If you don't already have a hugging face account, it is free to join email address and a password and + +00:27.470 --> 00:29.210 +then you will be in. + +00:29.240 --> 00:32.810 +And once you're in, you'll get to see something like this. + +00:33.590 --> 00:40.040 +Uh, the main navigation of Hugging Face is up here, and you can see the first three parts of the main + +00:40.040 --> 00:42.680 +navigation are models, datasets and spaces. + +00:42.680 --> 00:45.050 +And that is what we're going to look at right now. + +00:45.350 --> 00:52.820 +So the first tab models, this is where in the hugging face platform or the hub as it's known, you + +00:52.820 --> 00:56.690 +can see all of the models that are available to you. + +00:56.900 --> 01:02.090 +I said before that it's more than 800,000 is in fact more than 900,000. + +01:02.120 --> 01:03.140 +Look at that. + +01:03.530 --> 01:10.460 +And this is sorted, I think, by default by trending, which is some combination of recency and popularity. + +01:10.490 --> 01:16.580 +You can also turn that into by how popular it is, how many times it's been downloaded and how recently + +01:16.580 --> 01:18.320 +it was created or updated. + +01:19.310 --> 01:24.920 +As you can see here, the name of each of these models is typically the name of the organization and + +01:24.920 --> 01:26.450 +then the name of the model. + +01:26.780 --> 01:34.610 +Flux is one of the latest and most exciting text to image models, and I do believe we'll be playing + +01:34.610 --> 01:35.780 +with that at some point. + +01:36.110 --> 01:42.170 +Quanta, the powerhouse model from Alibaba Cloud that I've mentioned a few times now. + +01:42.170 --> 01:48.590 +And you can see typically you have the number of parameters often in the description of a lamb. + +01:48.590 --> 01:56.540 +And then instruct at the end tells us that this has been fine tuned to be most applicable during a sort + +01:56.540 --> 02:01.980 +of chat and instruct kind of interaction with the input. + +02:02.700 --> 02:05.820 +And we can see lots of other models here. + +02:05.820 --> 02:07.740 +There's of course a filter up here. + +02:07.740 --> 02:16.830 +So we can filter on something like llama to look at the llama model, which was Meta's model, which + +02:16.830 --> 02:20.400 +is so very well known, so very popular. + +02:20.550 --> 02:21.240 +Here it is. + +02:21.270 --> 02:26.550 +These are all various um, times that meta or llama will have been mentioned at some point in these + +02:26.550 --> 02:27.000 +descriptions. + +02:27.000 --> 02:35.220 +But you can see what we're really looking for is is here this is Meta llama 3.18 billion version model. + +02:35.220 --> 02:39.720 +And that's the same one fine tuned for the instruct use case. + +02:39.750 --> 02:43.500 +The this kind of uh um chat use case. + +02:43.500 --> 02:45.000 +So let's go in here. + +02:45.000 --> 02:51.540 +If you go into llama 3.18 billion, you see a ton of information about it. + +02:51.540 --> 02:55.170 +You see that it's been downloaded a large number of times. + +02:55.320 --> 03:00.450 +Uh, recently we'll be downloading it ourselves, I think, on many occasions over the coming weeks. + +03:00.570 --> 03:06.990 +Uh, you get some information about the model architecture, uh, the languages, the family, the how + +03:06.990 --> 03:12.270 +it's intended to be used, and then some code examples down at the end. + +03:12.420 --> 03:15.420 +Uh, so this is all useful stuff to read about. + +03:15.420 --> 03:17.550 +Also information about how it was trained. + +03:17.820 --> 03:22.590 +Um, and lots of other things worth reading about. + +03:23.100 --> 03:30.330 +There's also this tab here, Files and Versions, which opens up something that looks a bit like a git + +03:30.330 --> 03:31.380 +repository. + +03:31.380 --> 03:36.480 +And it's funny you should think that because it actually is a git repository. + +03:36.510 --> 03:42.930 +A lot of what you can think of the hugging face hub as is a sort of interface on top of a series of + +03:42.960 --> 03:44.190 +git repos. + +03:44.190 --> 03:51.330 +And often when you're downloading a model or downloading data, what you're really doing is doing a + +03:51.360 --> 03:56.830 +sort of a pull from git get and getting these files locally. + +03:56.830 --> 04:08.110 +So this is the the the the folder and file structure that sits behind the meta llama 3.18 billion. + +04:08.440 --> 04:12.430 +And there are various ways that you can use it. + +04:12.520 --> 04:14.020 +There's a button here. + +04:14.020 --> 04:18.880 +Use this model that will give you more information if you go into it on what you need to do. + +04:18.880 --> 04:21.490 +That gives you actual examples of code. + +04:21.490 --> 04:24.370 +We'll be using this code later, so don't worry about it right now. + +04:24.670 --> 04:28.780 +You don't need to to read this, but know that you can always go to use this model. + +04:28.780 --> 04:34.390 +Select Transformers, which means you want to use it using hugging face Transformers library, and then + +04:34.390 --> 04:38.230 +copy code examples directly from the user interface. + +04:38.260 --> 04:39.850 +As simple as that. + +04:40.480 --> 04:43.270 +And there's other stuff here that we'll look at another day. + +04:43.300 --> 04:50.290 +There's some tags of course, which also allows you to filter on different, uh, aspects of models + +04:50.290 --> 04:51.940 +very quickly and easily. + +04:52.000 --> 04:55.420 +We'll be looking at lots of other models in time. + +04:55.420 --> 05:01.750 +We'll be looking at things like the Phi model from Microsoft and which I think I mentioned. + +05:01.780 --> 05:08.680 +We'll be looking, of course, at quanta, and that you've actually already seen and plenty of others. + +05:08.680 --> 05:14.350 +Maybe I'll mention Google's Gemma that you'll see Gemma will come up when I do this. + +05:14.590 --> 05:16.750 +Um, it did come up and then it went away. + +05:16.750 --> 05:19.900 +So let's do Google slash Gemma. + +05:19.930 --> 05:21.070 +There we go. + +05:21.340 --> 05:30.850 +Um, so this, for example, the 2 billion very small on device version of Gemma and it as before has + +05:30.850 --> 05:37.990 +the description, the code examples, the files and versions and the ability to use this model just + +05:37.990 --> 05:40.360 +by clicking there like so. + +05:40.870 --> 05:42.280 +That's models. + +05:42.280 --> 05:44.080 +Let's move on to data sets. + +05:44.110 --> 05:50.920 +Data sets shows you the vast resource of data that's available on the Hugging Face hub. + +05:50.920 --> 05:53.470 +And again, you've got the ability to search. + +05:53.500 --> 05:55.600 +It's sorted by default on trending. + +05:55.870 --> 06:03.250 +And let me say later on, we're going to be doing some experiments using prices of products. + +06:03.250 --> 06:07.810 +And one of the things we'd love to see is some sort of scrape of product data. + +06:07.840 --> 06:15.850 +And it turns out there is in fact, there are a bunch of repositories of data related to prices on Amazon. + +06:15.850 --> 06:17.170 +Here is some of them. + +06:17.200 --> 06:21.220 +You can have a look around the popularity of them and which ones are useful. + +06:21.220 --> 06:26.380 +We are in fact going to be using this one here, which is very recent and which is very comprehensive + +06:26.380 --> 06:29.830 +indeed and has tons of useful data. + +06:29.950 --> 06:32.950 +So it's absolutely phenomenal resource. + +06:33.100 --> 06:40.390 +So do take a look at this and you can you can also use things like data set viewers and other tools + +06:40.390 --> 06:42.730 +that come with the data sets. + +06:42.760 --> 06:50.360 +Part of the Huggingface hub spaces I mentioned is where Gradio radio apps and other kinds of apps can + +06:50.360 --> 06:56.960 +run to do things that people in the community want to show off to others, or just get to get people + +06:56.960 --> 06:57.470 +using them. + +06:57.470 --> 07:02.600 +And you can do the same with your hugging face apps or your radio apps. + +07:02.600 --> 07:05.090 +There's a lot of things to try out here. + +07:05.240 --> 07:07.850 +Uh, there's uh, there's sort of spaces of the week. + +07:07.850 --> 07:12.230 +There's there's things that are trending, and then there's a few types of spaces that we will be looking + +07:12.230 --> 07:19.340 +at a lot more in the next few or probably next week, mostly about leaderboards, comparing different + +07:19.370 --> 07:20.240 +LMS. + +07:20.480 --> 07:28.760 +Um, there's a plenty of fun spaces where you can try out different interesting LMS or generative AI + +07:28.760 --> 07:29.870 +applications. + +07:30.140 --> 07:35.330 +Um, one of the things I do tend to find is that sometimes because a lot of these are running for free + +07:35.330 --> 07:41.120 +on free boxes, it can be a bit flaky in that if it's a popular one, then it's overused and it's quite + +07:41.120 --> 07:45.050 +hard to get it to run because it will tell you that it's too busy right now. + +07:45.200 --> 07:48.750 +Um, but I think that comes with the territory of free software. + +07:49.050 --> 07:51.150 +When it does work, it is wonderful. + +07:51.150 --> 07:54.870 +So, for example, I just tried out a couple that were in the top spaces. + +07:54.870 --> 08:02.850 +So this one here, the AI Comic Factory, you can give it a style, you can give it a title of your + +08:02.850 --> 08:08.340 +comic and give it some something about how you want it to think when it's producing it. + +08:08.490 --> 08:15.720 +And I did a super powered data scientist comic, and you get this very cross looking, presumably the + +08:15.720 --> 08:21.390 +villain, and then you get presumably the heroine saying data is power, but sometimes it needs a little + +08:21.390 --> 08:22.050 +push. + +08:22.440 --> 08:24.630 +And so it's great fun. + +08:24.630 --> 08:31.740 +This is an imagined created comic strip based on a topic that you choose, and I encourage you to come + +08:31.740 --> 08:32.700 +in and give it a try. + +08:32.700 --> 08:40.410 +It did take me 2 or 3 tries before I got this because it was too much in demand, but it's for me. + +08:40.410 --> 08:44.880 +It's on the first page of Most Popular right now, but if you find this or something like it, give + +08:44.880 --> 08:45.590 +it a try. + +08:45.620 --> 08:50.480 +Another thing that I tried out that was available was this. + +08:50.480 --> 08:51.650 +This is pretty funny. + +08:51.710 --> 08:53.360 +You can upload an image of yourself. + +08:53.360 --> 08:58.400 +I chose the one with me in front of the plane from before, which I cannot fly. + +08:58.790 --> 09:00.800 +But that hasn't stopped me from trying. + +09:01.190 --> 09:07.790 +And then you can pick a garment or upload a garment, and then it will show you wearing that garment. + +09:07.820 --> 09:08.810 +I mean, it's all right. + +09:08.810 --> 09:09.860 +It's not perfect. + +09:09.860 --> 09:11.030 +I don't know what's happening. + +09:11.240 --> 09:14.720 +I'm sort of hunched up like this a bit, but it gives you the idea. + +09:14.720 --> 09:19.760 +It's interesting to see that some sort of strange artifact has happened with the plane behind me. + +09:19.940 --> 09:23.570 +Uh, but, um, aside from that, it's fun. + +09:23.570 --> 09:29.900 +It's free, it's easy to use, and it's a classic example of people having good ideas about fun things + +09:29.900 --> 09:36.050 +you can do with AI, with Llms, and have surfaced it for others to play with. + +09:36.080 --> 09:39.590 +You will find many examples of this on spaces. + +09:40.190 --> 09:47.460 +Uh, so then the other thing I wanted to show you, is to show you what happens if you go to to the + +09:47.460 --> 09:49.260 +avatar menu and look at yourself. + +09:49.260 --> 09:54.390 +So here is, um, the, uh, I'm just going to go straight here. + +09:54.570 --> 09:57.360 +Uh, I can see my own what I've done. + +09:57.360 --> 10:03.330 +I have one space, I've got a bunch of different models, some of which will be playing with ourselves, + +10:03.330 --> 10:05.880 +and I've got a bunch of data sets, and they're all private. + +10:05.880 --> 10:10.530 +You can make them private if you only want to have access to them, or public if you want the world + +10:10.530 --> 10:11.400 +to see them. + +10:11.610 --> 10:17.250 +Um, and I've got various data sets that we will be talking about many of these in the next few weeks, + +10:17.250 --> 10:18.720 +and I think you'll have fun with them. + +10:19.080 --> 10:25.890 +Uh, and this space that I've got, for example, I also will might refer to, um, just just as an + +10:25.890 --> 10:28.350 +example of how easy it is to do this stuff. + +10:28.440 --> 10:37.050 +But in this case, this is public and this is a game that I built which allows you to have llms compete + +10:37.050 --> 10:39.540 +against each other to try to battle. + +10:39.540 --> 10:45.360 +It was inspired by the the battle, the leadership battle we had in the first week. + +10:45.510 --> 10:53.340 +And I beefed that up a bit to make something where different lambs can fight in a way, to try and outwit + +10:53.370 --> 10:57.390 +each other and take money from each other, following some rules. + +10:57.390 --> 10:59.340 +And you can play a game and watch it run. + +10:59.340 --> 11:04.980 +Maybe we'll do that when we talk about the differences between lambs next week, I'll see if we have + +11:04.980 --> 11:05.370 +time. + +11:05.370 --> 11:09.570 +If not, by all means come and give this a try yourself to see how I've done, and you can see it as + +11:09.570 --> 11:16.500 +an example of how it's easy to take an LLM application with either a gradio front end. + +11:16.500 --> 11:21.120 +In this case, this is called Streamlit, a different kind of user interface or others. + +11:21.120 --> 11:25.740 +And then publish it to be available to everyone on huggingface spaces. + +11:26.490 --> 11:33.300 +One more thing about this menu that's worth mentioning is that if you go to your profile and your, + +11:33.300 --> 11:40.840 +uh, sorry, if you go to your settings, I mean, down here to settings, uh, down here is a section + +11:40.840 --> 11:42.790 +called Access Tokens. + +11:42.790 --> 11:45.910 +This is something you need to do if you haven't done it before. + +11:45.940 --> 11:50.950 +You go to access tokens and then there's a simple button create new token. + +11:50.950 --> 11:58.060 +You press that to give yourself a new token, a new API token, where you will ask for both read and + +11:58.060 --> 11:59.230 +write permissions. + +11:59.230 --> 12:07.030 +That is a token that we'll be using in Jupyter in order to get access to the hub, and in order to both + +12:07.030 --> 12:09.970 +download and upload models and data. + +12:09.970 --> 12:14.260 +So that is a key part of setting up your hugging face account. + +12:14.350 --> 12:18.250 +And that concludes our very quick tour of all things hugging face. + +12:18.250 --> 12:19.990 +There's so much to explore. + +12:20.050 --> 12:22.420 +The to do for you is now go in. + +12:22.450 --> 12:26.650 +If you haven't already, set up your account, set up your API key, it's all free. + +12:26.650 --> 12:34.300 +And then go hunt up models for datasets and look around some of the spaces and try out some of the cool + +12:34.300 --> 12:37.990 +products that people have made available for all of the community to try. + +12:38.110 --> 12:39.220 +Enjoy that. diff --git a/week5/community-contributions/subtitles/srts/59271655/ja_JP.srt b/week5/community-contributions/subtitles/srts/59271655/ja_JP.srt new file mode 100755 index 0000000..76f0114 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59271655/ja_JP.srt @@ -0,0 +1,430 @@ +WEBVTT + +00:00.410 --> 00:06.860 +というわけで、 ハギング・フェイスのメイン・ランディング・ページ、 ハギング・フェイス・コアにやってきた。 + +00:06.890 --> 00:07.790 +あなたの知っているURL。 + +00:07.790 --> 00:11.390 +まあ、 ハギング・フェイスの会社案内を何度か制作したからね。 + +00:11.510 --> 00:14.270 +そして、 あー、 これがご覧のものです。 + +00:14.270 --> 00:16.760 +ログインしていない場合は、 アカウントを持っていません。 + +00:16.760 --> 00:18.530 +このような画面が表示されます。 + +00:18.530 --> 00:20.870 +そして、 まず最初にしなければならないことは、 登録に行くことだ。 + +00:20.870 --> 00:29.210 +まだハギング・フェイスのアカウントをお持ちでない方は、 メールアドレスとパスワードを入力すれば無料で参加できます。 + +00:29.240 --> 00:32.810 +そして中に入れば、 こんなものを見ることができる。 + +00:33.590 --> 00:40.040 +ハギング・フェイスのメイン・ナビゲーションはここにあります。 メイン・ナビゲーションの最初の3つの部分は、 + +00:40.040 --> 00:42.680 +モデル、 データセット、 スペースです。 + +00:42.680 --> 00:45.050 +それが今、 私たちが見ているものだ。 + +00:45.350 --> 00:56.690 +最初のタブでは、 ハギング・フェイス・プラットフォーム、 いわゆるハブで、 利用可能なすべてのモデルを見ることができます。 + +00:56.900 --> 01:02.090 +前に80万人以上と言ったが、 実際は90万人以上だ。 + +01:02.120 --> 01:03.140 +あれを見ろ。 + +01:03.530 --> 01:10.460 +そして、 これはデフォルトでトレンド順にソートされている。 + +01:10.490 --> 01:18.320 +また、 人気度やダウンロード回数、 最近作成または更新された度合いなどでも判断できる。 + +01:19.310 --> 01:26.450 +ここにあるように、 これらの各モデルの名前は通常、 組織名とモデル名の順になっている。 + +01:26.780 --> 01:35.780 +FLUXは、 最新で最もエキサイティングなテキストから画像へのモデルのひとつであり、 いずれはそれを使ってプレーすることになると思う。 + +01:36.110 --> 01:42.170 +クアンタは、 これまで何度か紹介してきたアリババクラウドの強力モデルだ。 + +01:42.170 --> 01:48.590 +そして、 子羊の説明によくあるパラメータの数を見ることができる。 + +01:48.590 --> 02:01.980 +そして最後には、 このインストラクターが、 チャットやインストラクターのようなインプットとの対話の中で最も適用できるように微調整されていることを教えてくれる。 + +02:02.700 --> 02:05.820 +他にもたくさんのモデルを見ることができる。 + +02:05.820 --> 02:07.740 +ここにはもちろんフィルターがある。 + +02:07.740 --> 02:20.400 +だから、 メタのモデルであり、 とても有名で、 とても人気があるラマ・モデルを見るために、 ラマ・モデルのようなものでフィルターをかけることができる。 + +02:20.550 --> 02:21.240 +これだ。 + +02:21.270 --> 02:27.000 +これらはすべて、 メタやラマがこれらの記述のどこかで言及されている、 さまざまなウム、 回である。 + +02:27.000 --> 02:35.220 +しかし、 我々が本当に求めているものは、 ここにあるメタ・ラマ3であることがお分かりいただけるだろう。 180億バージョンモデル。 + +02:35.220 --> 02:39.720 +そして、 それは同じものをインストラクターのユースケースに合わせて微調整したものだ。 + +02:39.750 --> 02:43.500 +このような......チャットのユースケース。 + +02:43.500 --> 02:45.000 +では、 ここに入ろう。 + +02:45.000 --> 02:51.540 +リャマ3. 180億ドルもあれば、 それに関する情報は山ほどある。 + +02:51.540 --> 02:55.170 +何度もダウンロードされているのがわかるだろう。 + +02:55.320 --> 03:00.450 +最近、 僕らもダウンロードするようになったんだ。 + +03:00.570 --> 03:06.990 +モデル・アーキテクチャ、 言語、 ファミリ、 使用方法についての情報があり、 + +03:06.990 --> 03:12.270 +最後にコード例があります。 + +03:12.420 --> 03:15.420 +ええと、 だから、 これはすべて読むと役に立つことなんだ。 + +03:15.420 --> 03:17.550 +また、 どのようにトレーニングされたかについての情報も。 + +03:17.820 --> 03:22.590 +そのほかにも、 読む価値のあることはたくさんある。 + +03:23.100 --> 03:31.380 +ここには「ファイルとバージョン」というタブもあり、 gitリポジトリのようなものを開くことができる。 + +03:31.380 --> 03:36.480 +というのも、 実際にgitリポジトリだからだ。 + +03:36.510 --> 03:44.190 +ハグフェイス・ハブの多くは、 一連のgitリポジトリの上にあるインターフェイスのようなものだ。 + +03:44.190 --> 03:51.330 +モデルをダウンロードしたりデータをダウンロードしたりするとき、 実際にやっているのはgit getからのpullのようなもので、 + +03:51.360 --> 03:56.830 +これらのファイルをローカルに取得することです。 + +03:56.830 --> 04:08.110 +これがmeta llama 3の背後にあるフォルダとファイル構造だ。 180億ドル + +04:08.440 --> 04:12.430 +そして、 いろいろな使い方ができる。 + +04:12.520 --> 04:14.020 +ここにボタンがある。 + +04:14.020 --> 04:18.880 +やるべきことを突き詰めていけば、 より多くの情報が得られるこのモデルを利用しよう。 + +04:18.880 --> 04:21.490 +これは実際のコード例を示している。 + +04:21.490 --> 04:24.370 +このコードは後で使うので、 今は気にしなくていい。 + +04:24.670 --> 04:28.780 +これを読む必要はないが、 このモデルを使うためにいつでも行くことができることを知っておいてほしい。 + +04:28.780 --> 04:34.390 +Transformersを選択し、 抱擁顔Transformersライブラリを使って使用することを意味し、 + +04:34.390 --> 04:38.230 +ユーザーインターフェイスから直接コード例をコピーする。 + +04:38.260 --> 04:39.850 +簡単なことだ。 + +04:40.480 --> 04:43.270 +他にもいろいろあるが、 それはまた別の日にしよう。 + +04:43.300 --> 04:51.940 +もちろん、 タグもあるので、 モデルのさまざまな側面を素早く簡単にフィルタリングすることもできる。 + +04:52.000 --> 04:55.420 +いずれ他のモデルもたくさん見ていくつもりだ。 + +04:55.420 --> 05:01.750 +マイクロソフトのPhiモデルのようなものにも注目しています。 + +05:01.780 --> 05:08.680 +もちろん、 クアンタ(量子)にも注目する。 + +05:08.680 --> 05:14.350 +多分、 グーグルのジェマに言及すると、 ジェマが出てくると思うよ。 + +05:14.590 --> 05:16.750 +確かに出てきたけど、 すぐに消えたよ。 + +05:16.750 --> 05:19.900 +では、 グーグル・スラッシュ・ジェンマをやってみよう。 + +05:19.930 --> 05:21.070 +これでよし。 + +05:21.340 --> 05:30.850 +例えば、 この20億のとても小さなデバイス版Gemmaには、 先ほどと同じように、 説明、 コード例、 + +05:30.850 --> 05:40.360 +ファイル、 バージョン、 そしてこのモデルをクリックするだけで使える機能があります。 + +05:40.870 --> 05:42.280 +それがモデルだ。 + +05:42.280 --> 05:44.080 +データセットに話を移そう。 + +05:44.110 --> 05:50.920 +データセットは、 Hugging Faceハブで利用可能な膨大なデータリソースを表示します。 + +05:50.920 --> 05:53.470 +また、 検索機能もある。 + +05:53.500 --> 05:55.600 +デフォルトではトレンドでソートされている。 + +05:55.870 --> 06:03.250 +そして後ほど、 商品の価格を使った実験を行う予定であることをお伝えしておこう。 + +06:03.250 --> 06:07.810 +そして、 私たちがぜひ見てみたいもののひとつが、 製品データのスクレイピングです。 + +06:07.840 --> 06:15.850 +そして、 実際にアマゾンの価格に関するデータのリポジトリがたくさんあることがわかった。 + +06:15.850 --> 06:17.170 +その一部を紹介しよう。 + +06:17.200 --> 06:21.220 +どれが便利なのか、 その人気ぶりを見て回ることができる。 + +06:21.220 --> 06:29.830 +これは非常に最近のもので、 実に包括的で有用なデータがたくさんある。 + +06:29.950 --> 06:32.950 +だから驚異的なリソースなんだ。 + +06:33.100 --> 06:42.730 +データセットに付属しているデータセット・ビューアなどのツールも利用できる。 + +06:42.760 --> 06:50.360 +先ほどのHuggingfaceのハブスペースの一部は、 Gradioのラジオアプリやその他の種類のアプリを走らせ、 コミュニティの人々が他の人に見せびらかしたい、 + +06:50.360 --> 06:57.470 +あるいはただ使ってもらいたいと思うようなことができる場所だ。 + +06:57.470 --> 07:02.600 +ハグ顔アプリやラジオアプリでも同じことができる。 + +07:02.600 --> 07:05.090 +ここには試してみたいことがたくさんある。 + +07:05.240 --> 07:07.850 +ええと、 週のスペースみたいなものがあるんだ。 + +07:07.850 --> 07:12.230 +トレンドになっているものもありますし、 来週か再来週には、 リーダーボードやさまざまなLMSの比較など、 + +07:12.230 --> 07:20.240 +さらに詳しく見ていくことになるいくつかのタイプのスペースもあります。 + +07:20.480 --> 07:29.870 +興味深いLMSやジェネレーティブAIのアプリケーションを試せる楽しいスペースがたくさんある。 + +07:30.140 --> 07:35.330 +私が感じることのひとつは、 これらの多くが無料ボックスで無料で提供されているため、 + +07:35.330 --> 07:41.120 +人気のあるものだと使いすぎてしまい、 「ただいま混雑しています」と表示されるため、 + +07:41.120 --> 07:45.050 +実行させるのが難しいということです。 + +07:45.200 --> 07:48.750 +うーん、 でも、 フリーソフトにはつきものだと思うよ。 + +07:49.050 --> 07:51.150 +うまくいったときは素晴らしい。 + +07:51.150 --> 07:54.870 +だから、 例えば、 トップ・スペースにあったカップルを試してみたんだ。 + +07:54.870 --> 08:08.340 +このAI Comic Factoryでは、 スタイルやコミックのタイトルを設定することができます。 + +08:08.490 --> 08:15.720 +データサイエンティストがスーパーパワーを発揮するコミックを描いたんだけど、 おそらく悪役と思われる、 とても十字架を背負ったような人物が登場し、 + +08:15.720 --> 08:22.050 +ヒロインと思われる人物が、 データは力だけど、 時にはちょっとした後押しが必要だと言うんだ。 + +08:22.440 --> 08:24.630 +とても楽しいよ。 + +08:24.630 --> 08:32.700 +これは、 あなたが選んだトピックに基づいて想像で作られた漫画です。 + +08:32.700 --> 08:40.410 +あまりの需要の多さに、 これを手に入れるまでに2、 3回トライした。 + +08:40.410 --> 08:45.590 +今、 Most Popularの1ページ目に掲載されていますが、 これか似たようなものを見つけたら試してみてください。 + +08:45.620 --> 08:50.480 +もうひとつ、 こんなものもあったので試してみた。 + +08:50.480 --> 08:51.650 +これはかなり面白い。 + +08:51.710 --> 08:53.360 +自分の画像をアップロードすることができる。 + +08:53.360 --> 08:58.400 +私が選んだのは、 以前から飛行機の前にいる私だ。 + +08:58.790 --> 09:00.800 +しかし、 それでも挑戦することを止めない。 + +09:01.190 --> 09:07.790 +そして、 服を選ぶか、 服をアップロードすると、 その服を着ているあなたの姿が表示されます。 + +09:07.820 --> 09:08.810 +つまり、 大丈夫なんだ。 + +09:08.810 --> 09:09.860 +完璧ではない。 + +09:09.860 --> 09:11.030 +何が起きているのか分からない。 + +09:11.240 --> 09:14.720 +ちょっと猫背になっちゃったけど、 これを見てもらえればわかると思う。 + +09:14.720 --> 09:19.760 +後ろの飛行機で何か奇妙なアーティファクトが起きているのは興味深い。 + +09:19.940 --> 09:23.570 +あー、 でも、 それはさておき、 楽しいよ。 + +09:23.570 --> 09:29.900 +無料だし、 使いやすいし、 AIやLlmsでできる楽しいことについて良いアイディアを持っている人たちが、 + +09:29.900 --> 09:36.050 +他の人たちが遊べるように表に出した典型的な例だ。 + +09:36.080 --> 09:39.590 +スペースにはこのような例がたくさんある。 + +09:40.190 --> 09:49.260 +アバター・メニューで自分自身を見るとどうなるかをお見せしましょう。 + +09:49.260 --> 09:54.390 +だから、 ここで、 ええと、 僕はここにまっすぐ行くんだ。 + +09:54.570 --> 09:57.360 +自分のやったことは自分でわかるんだ。 + +09:57.360 --> 10:05.880 +私は1つのスペースを持っていて、 たくさんの異なるモデルを持っていて、 そのうちのいくつかは私たち自身と一緒にプレーすることになる。 + +10:05.880 --> 10:11.400 +自分だけがアクセスしたい場合は非公開に、 世界中に見てもらいたい場合は公開にすることができる。 + +10:11.610 --> 10:18.720 +ええと、 いろいろなデータセットがあるので、 これから数週間、 これらの多くについてお話しします。 + +10:19.080 --> 10:25.890 +例えば、 私が持っているこのスペースも、 このようなことがいかに簡単にできるかの一例として、 + +10:25.890 --> 10:28.350 +参考にするかもしれない。 + +10:28.440 --> 10:39.540 +しかし、 この場合、 これは公開されており、 私が作ったゲームで、 llm同士が対戦してバトルを試みることができる。 + +10:39.540 --> 10:45.360 +それは、 最初の週にあった戦い、 リーダーシップの戦いに触発されたものだ。 + +10:45.510 --> 10:57.390 +そして、 私はそれを少し強化し、 異なる子羊たちがある意味戦うことができるようにした。 + +10:57.390 --> 10:59.340 +そして、 ゲームをプレーし、 その走りを見ることができる。 + +10:59.340 --> 11:05.370 +来週、 子羊の違いについて話すときにでも、 時間があればやってみようかな。 + +11:05.370 --> 11:16.500 +もしそうでなければ、 ぜひ自分でやってみてください。 私がどのようにやったか、 そしてグラディオのフロントエンドのどちらかを使ってLLMのアプリケーションをどのように取るのが簡単かの例として見ることができます。 + +11:16.500 --> 11:21.120 +この場合、 これはストリームリットと呼ばれ、 別の種類のユーザーインターフェイスなどである。 + +11:21.120 --> 11:25.740 +そして、 huggingfaceのスペースで誰もが利用できるように公開する。 + +11:26.490 --> 11:33.300 +もうひとつ、 このメニューで特筆すべきことは、 自分のプロフィールに行き、 + +11:33.300 --> 11:42.790 +自分の設定、 つまりこの下の設定に行くと、 「アクセストークン」というセクションがあることだ。 + +11:42.790 --> 11:45.910 +これは、 まだやったことがないのであれば、 やらなければならないことだ。 + +11:45.940 --> 11:50.950 +トークンにアクセスし、 新しいトークンを作成するというシンプルなボタンがある。 + +11:50.950 --> 11:59.230 +これを押すと、 新しいトークン(APIトークン)が発行され、 読み取りと書き込みの両方のパーミッションが要求される。 + +11:59.230 --> 12:09.970 +このトークンは、 Jupyterでハブにアクセスしたり、 モデルやデータをダウンロードしたりアップロードしたりするために使用する。 + +12:09.970 --> 12:14.260 +だから、 これがハグする顔のアカウントを設定する重要な部分なのだ。 + +12:14.350 --> 12:18.250 +そして、 これでハグする顔のすべてをめぐる超簡単なツアーは終了だ。 + +12:18.250 --> 12:19.990 +探検することはたくさんある。 + +12:20.050 --> 12:22.420 +あなたのためにすべきことは今、 中に入ることだ。 + +12:22.450 --> 12:26.650 +まだの方は、 アカウントを設定し、 APIキーを設定してください。 + +12:26.650 --> 12:37.990 +そして、 データセットのモデルを探し出し、 いくつかのスペースを見て回り、 コミュニティ全員が試せるように人々が作ったクールな製品を試してみる。 + +12:38.110 --> 12:39.220 +それを楽しんでくれ。 diff --git a/week5/community-contributions/subtitles/srts/59271655/ko_KR.srt b/week5/community-contributions/subtitles/srts/59271655/ko_KR.srt new file mode 100755 index 0000000..54ef896 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59271655/ko_KR.srt @@ -0,0 +1,520 @@ +WEBVTT + +00:00.410 --> 00:06.860 +여기는 H징 페이스코어의 주 랜딩 페이지인데요 + +00:06.890 --> 00:07.790 +URL 같은 거요 + +00:07.790 --> 00:11.390 +얼굴 안기기 팸플릿을 몇 번 제작한 후부터요 + +00:11.510 --> 00:14.270 +이게 여러분이 보시게 될 거예요 + +00:14.270 --> 00:16.760 +로그인 안 하면 계정이 없어요 + +00:16.760 --> 00:18.530 +이런 화면이 보일 거예요 + +00:18.530 --> 00:20.870 +가장 먼저 할 일은 등록하는 거예요 + +00:20.870 --> 00:27.470 +포옹 얼굴 계정이 아직 없다면 이메일 주소와 비밀번호를 무료로 등록하면 + +00:27.470 --> 00:29.210 +가입할 수 있어요 + +00:29.240 --> 00:32.810 +get it에 들어가면 이런 걸 보게 돼요 + +00:33.590 --> 00:40.040 +페이스 포옹의 주 내비게이션은 이 위에 있어요 내비게이션의 첫 세 부분은 모델, + +00:40.040 --> 00:42.680 +데이터셋, 스페이스예요 + +00:42.680 --> 00:45.050 +지금 그걸 살펴볼 거예요 + +00:45.350 --> 00:52.820 +첫 번째 탭 모델은 포옹형 플랫폼 혹은 허브라고 불리는 곳에서 사용 + +00:52.820 --> 00:56.690 +가능한 모든 모델을 볼 수 있어요 + +00:56.900 --> 01:02.090 +800,000개 이상이라고 했는데 사실은 900,000개 이상이죠 + +01:02.120 --> 01:03.140 +보세요 + +01:03.530 --> 01:10.460 +이건 정렬된 것 같아요 기본 트렌딩으로요 재조정과 인기를 합친 거죠 + +01:10.490 --> 01:16.580 +얼마나 유명한지도 바꿀 수 있어요 얼마나 많이 다운로드되었는지 얼마나 최근에 생성되거나 + +01:16.580 --> 01:18.320 +업데이트되었는지도요 + +01:19.310 --> 01:24.920 +보시다시피 각각의 모델의 이름은 전형적으로 조직의 이름이고 다음은 + +01:24.920 --> 01:26.450 +모델 이름이죠 + +01:26.780 --> 01:34.610 +플럭스는 가장 최신의 이미지 모델 텍스트 중 하나입니다 언젠가 그걸 갖고 놀 거라고 + +01:34.610 --> 01:35.780 +믿어요 + +01:36.110 --> 01:42.170 +퀀타는 제가 몇 번 언급했던 알리바바 클라우드의 강력한 모델이죠 + +01:42.170 --> 01:48.590 +보통 양을 묘사할 때 매개 변수의 수를 볼 수 있어요 + +01:48.590 --> 01:56.540 +그리고 마지막에 지시 사항은 채팅이나 입력 상호 작용을 지시할 때 가장 + +01:56.540 --> 02:01.980 +잘 적용되도록 조율되었다는 것을 알려줘요 + +02:02.700 --> 02:05.820 +다른 모델도 많이 있어요 + +02:05.820 --> 02:07.740 +물론 필터가 있죠 + +02:07.740 --> 02:16.830 +llama 같은 것을 필터링해 llama 모델을 볼 수 있어요 메타 모델이죠 아주 + +02:16.830 --> 02:20.400 +유명하고 인기 있는 거예요 + +02:20.550 --> 02:21.240 +여기 있네요 + +02:21.270 --> 02:27.000 +이 모든 게 다양한 시기예요 메타나 라마가 이 묘사에 나오는 어느 시점에서 언급되겠죠 + +02:27.000 --> 02:35.220 +하지만 우리가 정말 원하는 건 여기 메타 라마 3이에요 180억 버전 모델이죠 + +02:35.220 --> 02:39.720 +교육 사용 사례에 잘 맞춰진 것과 같은 거죠 + +02:39.750 --> 02:43.500 +이런 종류의 채팅 사용 사례요 + +02:43.500 --> 02:45.000 +이쪽으로 오세요 + +02:45.000 --> 02:51.540 +라마 3에 가시면요 180억 개에 관한 정보가 엄청나게 많아요 + +02:51.540 --> 02:55.170 +여러 번 다운로드된 게 보이시죠 + +02:55.320 --> 03:00.450 +최근에는 직접 다운로드도 할 거예요 앞으로 몇 주간 자주 그럴 거예요 + +03:00.570 --> 03:06.990 +모델 아키텍처에 대한 정보도 얻을 수 있고 언어, 가족, 사용 의도에 대한 정보도 + +03:06.990 --> 03:12.270 +얻을 수 있습니다. 그리고 코드 예제도 볼 수 있어요. + +03:12.420 --> 03:15.420 +전부 다 유용한 책이에요 + +03:15.420 --> 03:17.550 +어떻게 훈련했는지도 알려줘요 + +03:17.820 --> 03:22.590 +그 외에도 읽을 만한 게 많아요 + +03:23.100 --> 03:30.330 +파일과 버전이라는 탭도 있어요 Git 저장소처럼 보이는 뭔가를 여는 + +03:30.330 --> 03:31.380 +탭이죠 + +03:31.380 --> 03:36.480 +그렇게 생각하다니 재미있네요 사실 그건 Git 저장소거든요 + +03:36.510 --> 03:42.930 +포옹하는 얼굴 허브는 git 리포스 시리즈의 인터페이스로 생각할 + +03:42.960 --> 03:44.190 +수 있어요 + +03:44.190 --> 03:51.330 +모델이나 데이터를 다운로드할 때 실제로 하는 일은 일종의 Git get에서 + +03:51.360 --> 03:56.830 +끌어와 이 파일들을 로컬로 가져오는 거죠 + +03:56.830 --> 04:08.110 +이게 메타 llama 3 뒤에 있는 폴더와 파일 구조예요 180억 달러요 + +04:08.440 --> 04:12.430 +다양한 방법으로 사용할 수 있어요 + +04:12.520 --> 04:14.020 +여기 버튼이 있어요 + +04:14.020 --> 04:18.880 +여러분이 해야 할 일에 대해 더 많은 정보를 주는 이 모델을 사용하세요 + +04:18.880 --> 04:21.490 +실제 코드 예제를 제공하죠 + +04:21.490 --> 04:24.370 +이 코드는 나중에 쓸 거니까 지금은 신경 쓰지 마세요 + +04:24.670 --> 04:28.780 +이걸 읽을 필요는 없지만 언제든 이 모델을 사용할 수 있다는 걸 알아두세요 + +04:28.780 --> 04:34.390 +얼굴 트랜스포머 라이브러리를 사용하려면 트랜스포머를 선택하세요 사용자 + +04:34.390 --> 04:38.230 +인터페이스에서 예제를 직접 복사하세요 + +04:38.260 --> 04:39.850 +아주 간단해요 + +04:40.480 --> 04:43.270 +다른 건 나중에 살펴보죠 + +04:43.300 --> 04:50.290 +물론 태그도 있어요 모델의 다양한 측면을 아주 빠르고 쉽게 필터링할 + +04:50.290 --> 04:51.940 +수 있게 해주죠 + +04:52.000 --> 04:55.420 +다른 모델도 많이 볼 거예요 + +04:55.420 --> 05:01.750 +마이크로소프트의 파이 모델 같은 것을 살펴볼 것입니다 제가 언급했던 것 같네요 + +05:01.780 --> 05:08.680 +물론 퀀타도 살펴볼 겁니다 이미 많이 보셨을 거예요 + +05:08.680 --> 05:14.350 +구글의 제마를 언급할까 봐요 제가 이걸 할 때 제마가 나올 거예요 + +05:14.590 --> 05:16.750 +네, 떴다가 다시 사라졌어요 + +05:16.750 --> 05:19.900 +구글 또는 제마로 해보죠 + +05:19.930 --> 05:21.070 +됐어요 + +05:21.340 --> 05:30.850 +예를 들어 20억 개의 아주 작은 장치 버전의젬마와 이전처럼 설명이 있어요 코드 + +05:30.850 --> 05:37.990 +예제, 파일과 버전 그리고 이렇게 클릭해서 이 모델을 사용할 + +05:37.990 --> 05:40.360 +수 있는 기능도요 + +05:40.870 --> 05:42.280 +모형이에요 + +05:42.280 --> 05:44.080 +데이터 집합으로 넘어가죠 + +05:44.110 --> 05:50.920 +데이터 집합은 페이스 허브에서 사용 가능한 방대한 데이터 리소스를 보여주죠 + +05:50.920 --> 05:53.470 +검색 기능도 있고요 + +05:53.500 --> 05:55.600 +기본값으로 정렬되어 있어요. + +05:55.870 --> 06:03.250 +나중에 말씀드릴게요 제품 가격을 이용한 실험을 할 거예요 + +06:03.250 --> 06:07.810 +저희가 보고 싶은 건 긁힌 제품 데이터예요 + +06:07.840 --> 06:15.850 +알고 보니 아마존 가격과 관련된 데이터 저장소가 정말 많더라고요 + +06:15.850 --> 06:17.170 +여기 몇 개 있네요 + +06:17.200 --> 06:21.220 +어떤 게 인기 있고 어떤 게 유용한지 살펴보세요 + +06:21.220 --> 06:26.380 +여기 있는 이걸 사용할 겁니다 아주 최근 것으로 포괄적이고 + +06:26.380 --> 06:29.830 +유용한 데이터가 아주 많아요 + +06:29.950 --> 06:32.950 +정말 경이로운 자원이에요 + +06:33.100 --> 06:40.390 +이걸 보세요 데이터 세트 뷰어 같은 것도 사용할 수 있어요 데이터 세트와 함께 + +06:40.390 --> 06:42.730 +오는 다른 도구도요 + +06:42.760 --> 06:50.360 +H깅페이스 허브 공간의 일부는 Gadio 라디오 앱과 다른 종류의 앱이 실행되는 곳입니다 커뮤니티의 + +06:50.360 --> 06:57.470 +사람들이 남들에게 자랑하고 싶어 하거나 사용하게 만드는 작업을 위해서요 + +06:57.470 --> 07:02.600 +포옹하는 얼굴 앱이나 라디오 앱도 똑같이 할 수 있어요 + +07:02.600 --> 07:05.090 +여긴 시도해 볼 게 많아요 + +07:05.240 --> 07:07.850 +금주의 공간 같은 게 있어요 + +07:07.850 --> 07:12.230 +요즘 추세인 것들이 있어요 그리고 몇 가지 유형의 공간이 있는데 + +07:12.230 --> 07:19.340 +다음 몇 주나 다음 주에 더 자세히 살펴볼 겁니다 대부분 리더보드죠 다양한 LMS를 비교하는 + +07:19.370 --> 07:20.240 +거요 + +07:20.480 --> 07:28.760 +흥미로운 LMS나 재생 인공지능 애플리케이션을 시험해 볼 수 있는 재미있는 공간이 + +07:28.760 --> 07:29.870 +많아요 + +07:30.140 --> 07:35.330 +제가 발견하는 것 중 하나는 가끔 이런 거예요 많은 것들이 무료로 실행되고 + +07:35.330 --> 07:41.120 +있기 때문에 약간 비트가 있을 수 있어요 만약 인기가 있는 것이라면 과용이 되고 실행하기가 + +07:41.120 --> 07:45.050 +꽤 어렵죠 지금 너무 바쁘다고 알려 주거든요 + +07:45.200 --> 07:48.750 +하지만 그건 프리 소프트웨어 영역에 따라오는 거라고 생각해요 + +07:49.050 --> 07:51.150 +효과가 있으면 정말 좋죠 + +07:51.150 --> 07:54.870 +예를 들어, 상단 스페이스에 있는 걸 몇 개 시도해봤어요 + +07:54.870 --> 08:02.850 +인공지능 코믹 팩토리에는 스타일과 제목을 줄 수 있고 제작할 때 어떤 + +08:02.850 --> 08:08.340 +생각을 하길 바라는지 알려줄 수 있어요 + +08:08.490 --> 08:15.720 +슈퍼 파워 데이터 사이언스 만화를 그렸는데 악당으로 추정되는 인물이 매우 화난 표정으로 + +08:15.720 --> 08:21.390 +나오고 여주인공은 데이터가 힘이라고 하지만 가끔은 좀 더 밀어붙여야 + +08:21.390 --> 08:22.050 +해요 + +08:22.440 --> 08:24.630 +정말 재미있어요 + +08:24.630 --> 08:31.740 +여러분이 선택한 주제에 기반해 상상으로 그린 만화입니다 여러분도 와서 한번 + +08:31.740 --> 08:32.700 +해 보세요 + +08:32.700 --> 08:40.410 +두세 번 시도한 끝에 겨우 이걸 얻었어요 워낙 인기가 많았거든요 하지만 제 취향이에요 + +08:40.410 --> 08:44.880 +현재 가장 인기 있는 사이트 첫 페이지에 있지만 이런 걸 발견하시면 한번 + +08:44.880 --> 08:45.590 +써 보세요 + +08:45.620 --> 08:50.480 +제가 시도했던 또 다른 방법은 이거였어요 + +08:50.480 --> 08:51.650 +이거 정말 웃기네요 + +08:51.710 --> 08:53.360 +자신의 사진을 업로드 할 수 있어요 + +08:53.360 --> 08:58.400 +아까 비행기 앞에서 탔던 비행기를 선택했어요 전 못 타지만요 + +08:58.790 --> 09:00.800 +그래도 계속 노력했어요 + +09:01.190 --> 09:07.790 +옷을 고르거나 업로드 하면 그 옷을 입은 모습이 나와요 + +09:07.820 --> 09:08.810 +아니, 괜찮아요 + +09:08.810 --> 09:09.860 +완벽하지 않아요 + +09:09.860 --> 09:11.030 +무슨 일인지 모르겠어요 + +09:11.240 --> 09:14.720 +약간 구부정하긴 하지만 비트를 보면 알 수 있죠 + +09:14.720 --> 09:19.760 +제 뒤에 있는 비행기와 기이한 물체가 충돌하다니 흥미롭네요 + +09:19.940 --> 09:23.570 +하지만 그것만 빼면 재밌어요 + +09:23.570 --> 09:29.900 +무료에 사용도 쉽고 인공지능이나 LM으로 할 수 있는 재미있는 일을 사람들이 + +09:29.900 --> 09:36.050 +떠올리는 전형적인 예죠 다른 사람들이 갖고 놀 수 있게 드러난 거예요 + +09:36.080 --> 09:39.590 +스페이스에 이런 예가 많아요 + +09:40.190 --> 09:47.460 +여러분께 보여드리고 싶은 또 다른 건 아바타 메뉴에서 자신을 보면 어떻게 되는지 + +09:47.460 --> 09:49.260 +보여드릴게요 + +09:49.260 --> 09:54.390 +여기서 바로 시작할 거예요 + +09:54.570 --> 09:57.360 +제가 한 일을 보면 알아요 + +09:57.360 --> 10:03.330 +한 스페이스가 있고 다양한 모델이 여러 개 있어요 일부는 우리 자신을 갖고 놀겠죠 데이터 세트도 + +10:03.330 --> 10:05.880 +여러 개 있는데 모두 개인이에요 + +10:05.880 --> 10:10.530 +오직 보고 싶을 때만 볼 수 있는 개인용으로 만들 수도 있고 온 세상이 보길 원한다면 공개용으로 + +10:10.530 --> 10:11.400 +만들 수도 있죠 + +10:11.610 --> 10:17.250 +다양한 데이터 모음이 있는데 앞으로 몇 주 동안 많이 얘기할 거예요 + +10:17.250 --> 10:18.720 +재미있을 거예요 + +10:19.080 --> 10:25.890 +예를 들어 여기 이 공간은 이런 작업이 얼마나 쉬운지 보여주는 + +10:25.890 --> 10:28.350 +예가 될 거예요 + +10:28.440 --> 10:37.050 +하지만 이 경우엔 공용이고 제가 만든 게임으로 llm들이 서로 경쟁하며 배틀을 + +10:37.050 --> 10:39.540 +시도하게 할 수 있죠 + +10:39.540 --> 10:45.360 +첫 주에 있었던 리더십 싸움에서 영감을 받았어요 + +10:45.510 --> 10:53.340 +거기에 비트를 더해서 양들이 서로 싸우면서 서로 이기려고 하고 돈을 + +10:53.370 --> 10:57.390 +뺏으려고 해요 규칙을 따르면서요 + +10:57.390 --> 10:59.340 +게임을 하면서 달리는 걸 볼 수 있죠 + +10:59.340 --> 11:05.370 +다음 주에 양들의 차이점을 얘기할 때 해 보죠 시간이 되는지 볼게요 + +11:05.370 --> 11:09.570 +그렇지 않다면, 직접 와서 제가 어떻게 했는지 보세요. LLM + +11:09.570 --> 11:16.500 +응용 프로그램이 얼마나 쉬운지 예로 볼 수 있습니다. 그러디오 프런트 엔드를 이용하거나 + +11:16.500 --> 11:21.120 +이 경우에 이것은 Streamlit이라고 합니다. 다른 종류의 사용자 인터페이스 같은 것이죠. + +11:21.120 --> 11:25.740 +그리고 포옹의 공간에 있는 모든 사람이 볼 수 있게 출판하는 거죠 + +11:26.490 --> 11:33.300 +이 메뉴에 대해 한 가지 더 언급할 것은 프로필로 가시면 죄송합니다, + +11:33.300 --> 11:40.840 +설정으로 가시면, 이 아래 설정으로요 이 아래에는 액세스 토큰이라는 섹션이 + +11:40.840 --> 11:42.790 +있어요 + +11:42.790 --> 11:45.910 +한 번도 안 해 본 사람은 꼭 해 봐야 해요 + +11:45.940 --> 11:50.950 +액세스 토큰으로 가서 간단한 버튼으로 새 토큰을 생성하세요 + +11:50.950 --> 11:58.060 +그걸 눌러 새 API 토큰을 주는 거죠 새 API 토큰이요 읽기와 쓰기 사용 권한을 둘 다 요청하는 + +11:58.060 --> 11:59.230 +거죠 + +11:59.230 --> 12:07.030 +Jupyter에서 사용할 토큰으로 허브에 접근하고 모델과 데이터를 다운로드 + +12:07.030 --> 12:09.970 +및 업로드 하기 위해서죠 + +12:09.970 --> 12:14.260 +포옹 얼굴 계정 설정에서 중요한 부분이죠 + +12:14.350 --> 12:18.250 +얼굴 맞대기 시범은 이걸로 간단히 마칠게요 + +12:18.250 --> 12:19.990 +탐험할 게 정말 많아요 + +12:20.050 --> 12:22.420 +이제 안으로 들어가세요 + +12:22.450 --> 12:26.650 +아직 계정과 API 키를 설정하지 않으셨다면 모두 무료예요 + +12:26.650 --> 12:34.300 +데이터셋 모델을 찾고 공간을 둘러보면서 사람들이 커뮤니티에서 누구나 사용할 수 + +12:34.300 --> 12:37.990 +있게 만든 멋진 제품을 시도해 보세요 + +12:38.110 --> 12:39.220 +맛있게 드세요 diff --git a/week5/community-contributions/subtitles/srts/59295363/en_US.srt b/week5/community-contributions/subtitles/srts/59295363/en_US.srt new file mode 100755 index 0000000..ca65c28 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295363/en_US.srt @@ -0,0 +1,64 @@ +WEBVTT + +00:00.920 --> 00:03.530 +Well, another congratulations moment. + +00:03.530 --> 00:13.010 +You have 40% on the way to being an LM engineer at a high level of proficiency at this point, in addition + +00:13.010 --> 00:19.340 +to the stuff that you you know very well about frontier models and open source models, you can also + +00:19.340 --> 00:28.160 +now use the Hugging Face open LM leaderboard to compare different models against a number of hard metrics, + +00:28.160 --> 00:34.010 +and you've got a great understanding of the difference between these metrics, what they score and why, + +00:34.010 --> 00:36.620 +and the limitations of the metrics as well. + +00:36.650 --> 00:42.860 +Hopefully you spent some quality time yourself now with the LM leaderboard, getting familiar with the + +00:42.860 --> 00:44.060 +different models. + +00:44.270 --> 00:47.480 +Next week there is some more on leaderboards to do. + +00:47.480 --> 00:51.050 +I hope you're not bored of leaderboards yet, because we're going to look at a bunch of others that + +00:51.050 --> 00:57.560 +are used to compare more specialized things about models, and we're also going to look at ones that + +00:57.560 --> 01:01.910 +bring together open source and closed source, which will be very interesting. + +01:02.330 --> 01:08.390 +We are going to talk about real world use cases of LM solving commercial problems. + +01:08.390 --> 01:16.220 +I imagine everyone has experienced this in your workplace and in just using products all the time. + +01:16.220 --> 01:22.430 +But I want to go through a few examples and have you think about other places where Llms have been applied + +01:22.460 --> 01:29.300 +to solve hard commercial problems beyond the obvious, and that should equip you to be able to confidently + +01:29.300 --> 01:36.530 +choose the right LM for your task at hand, so that you know when you're facing a commercial project, + +01:36.560 --> 01:43.670 +how do you go about navigating the huge number of possible models and select the right couple to build + +01:43.670 --> 01:44.690 +a prototype for? + +01:44.690 --> 01:47.060 +So with that, I will see you next time. + +01:47.060 --> 01:48.110 +We've got lots to do. diff --git a/week5/community-contributions/subtitles/srts/59295363/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295363/ja_JP.srt new file mode 100755 index 0000000..a2f71d9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295363/ja_JP.srt @@ -0,0 +1,43 @@ +WEBVTT + +00:00.920 --> 00:03.530 +おめでとうございます。 + +00:03.530 --> 00:13.010 +フロンティアモデルやオープンソースモデルについてよく知っていることに加え、 Hugging FaceのオープンLMリーダーボードを使用して、 + +00:13.010 --> 00:19.340 +さまざまなモデルを多くのハードなメトリクスと比較することができるようになりました。 + +00:19.340 --> 00:36.620 +これらのメトリクスの違い、 メトリクスのスコアとその理由、 メトリクスの限界についてもよく理解しています。 + +00:36.650 --> 00:44.060 +LMリーダーボードで、 さまざまなモデルに慣れ親しみながら、 充実した時間を過ごせたことだろう。 + +00:44.270 --> 00:47.480 +来週は、 リーダーボードについてもう少しやることがある。 + +00:47.480 --> 00:51.050 +まだリーダーボードに飽きていないことを願っている。 これから、 + +00:51.050 --> 01:01.910 +モデルについてより専門的なことを比較するために使われる他のものをたくさん見ていくつもりだし、 オープンソースとクローズドソースを一緒にしたものも見ていくつもりだ。 + +01:02.330 --> 01:08.390 +商業的な問題を解決するLMの実際の使用例について話すつもりだ。 + +01:08.390 --> 01:16.220 +このようなことは、 誰もが職場や製品を使用する上で経験したことがあるのではないだろうか。 + +01:16.220 --> 01:22.430 +そして、 商業的なプロジェクトに直面したときに、 + +01:22.460 --> 01:44.690 +膨大な数の可能性のあるモデルの中から、 プロトタイプを作るのにふさわしいものを選ぶにはどうすればいいのかがわかるようになるのだ。 + +01:44.690 --> 01:47.060 +それでは、 また次回お会いしましょう。 + +01:47.060 --> 01:48.110 +やることはたくさんある。 diff --git a/week5/community-contributions/subtitles/srts/59295363/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295363/ko_KR.srt new file mode 100755 index 0000000..f12cced --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295363/ko_KR.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:00.920 --> 00:03.530 +또 한 번 축하드려요 + +00:03.530 --> 00:13.010 +40%는 숙달된 LM 엔지니어가 될 수 있습니다 현재로선요 여러분이 잘 아는 프런티어 모델과 + +00:13.010 --> 00:19.340 +오픈 소스 모델 외에도 H징 페이스 오픈 LM leaderboard를 + +00:19.340 --> 00:28.160 +이용해 여러 모델을 여러 가지 어려운 지표와 비교할 수 있습니다 이 지표들 간의 차이점과 + +00:28.160 --> 00:36.620 +어떤 점과 이유 그리고 지표의 한계를 잘 이해하게 됐죠 + +00:36.650 --> 00:42.860 +LM leaderboard를 보면서 많은 시간을 보내셨길 바랍니다 다양한 모델들을 + +00:42.860 --> 00:44.060 +익히셨겠죠 + +00:44.270 --> 00:47.480 +다음 주에도 순위표에 추가할 게 있어요 + +00:47.480 --> 00:51.050 +아직 leaderboard가 지루하지 않으시길 + +00:51.050 --> 00:57.560 +바랍니다 모델에 대한 좀 더 전문화된 것을 비교하기 위해 사용되는 다른 것들도 볼 테니까요 오픈 소스와 + +00:57.560 --> 01:01.910 +폐쇄 소스를 함께 하는 것도 볼 겁니다 아주 흥미로울 거예요 + +01:02.330 --> 01:08.390 +LM의 실제 사용 사례를 얘기해 보겠습니다 상업적 문제를 해결하는 거죠 + +01:08.390 --> 01:16.220 +직장에서도 다들 이런 경험이 있을 것 같아요 항상 제품을 사용하면서요 + +01:16.220 --> 01:22.430 +몇 가지 예시를 살펴보고 다른 곳에서도 어려운 상업적 문제를 해결하기 + +01:22.460 --> 01:29.300 +위해 LM을 적용한 적이 있는지 보겠습니다 그러면 당면한 작업에 맞는 LM을 + +01:29.300 --> 01:36.530 +자신 있게 선택할 수 있죠 상업적 프로젝트를 할 때 엄청나게 많은 가능한 모델을 + +01:36.560 --> 01:44.690 +어떻게 살펴보고 프로토타입을 만들기에 적당한 모델을 어떻게 선택할까요? + +01:44.690 --> 01:47.060 +그럼 다음 시간에 뵙죠 + +01:47.060 --> 01:48.110 +할 일이 많아요 diff --git a/week5/community-contributions/subtitles/srts/59295377/en_US.srt b/week5/community-contributions/subtitles/srts/59295377/en_US.srt new file mode 100755 index 0000000..47701b6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295377/en_US.srt @@ -0,0 +1,229 @@ +WEBVTT + +00:00.770 --> 00:06.050 +Just before we go on to some of the more advanced metrics, I want to mention for a second something + +00:06.050 --> 00:12.410 +called the Chinchilla Scaling Law, which is a wonderfully named law coined by the Google DeepMind team + +00:12.410 --> 00:15.200 +after one of their models called Chinchilla. + +00:15.590 --> 00:22.640 +And it's related to how you think about the number of parameters that you need in a model, the number + +00:22.640 --> 00:28.580 +of weights in the neural network, and what the law says is that the number of parameters, how many + +00:28.580 --> 00:34.610 +parameters you have, is roughly proportional to the size of your training data to the number of training + +00:34.640 --> 00:35.630 +tokens. + +00:35.960 --> 00:42.290 +And what that means, basically, is that supposing you've got a model, let's say it's an 8 billion + +00:42.290 --> 00:47.000 +parameter model, and you get to the point where you start to see that you're getting diminishing returns. + +00:47.030 --> 00:50.660 +Adding in more training data isn't significantly affecting the model. + +00:50.660 --> 00:56.720 +So you have this sense, okay, I've got now the right amount of training data for this size of model. + +00:56.720 --> 00:58.430 +This is a good a good match up. + +00:58.430 --> 01:05.480 +We've we've used our training data successfully for the model to learn to its to its most capacity of + +01:05.480 --> 01:06.170 +learning. + +01:06.500 --> 01:08.450 +And the question might be all right. + +01:08.450 --> 01:15.240 +So if I wanted to add more parameters, give the model more flexibility to learn more and to be more, + +01:15.240 --> 01:16.740 +more powerful and nuanced. + +01:16.770 --> 01:20.910 +How many more parameters do I need given extra training data? + +01:20.940 --> 01:26.310 +And the answer is, if you were then to double the amount of training data from that that point of diminishing + +01:26.310 --> 01:31.920 +returns, you would need double the number of weights you'd need to go from 8 billion to 16 billion + +01:31.950 --> 01:39.330 +parameters to be able to consume twice the training data and learn from it in an effective way, and + +01:39.330 --> 01:42.720 +be that much more powerful and nuanced at the end of it. + +01:42.990 --> 01:50.130 +So it gives you a sense of how many more parameters do you need to absorb more training data effectively. + +01:50.340 --> 01:58.050 +And it also gives you the sort of the flip side, the opposite, uh, relationship to that. + +01:58.050 --> 02:02.970 +If you're if you've been working with a model which is an 8 billion model, and then someone says, + +02:02.970 --> 02:07.440 +we'd like to upgrade to a 16 billion parameter model, let's use that instead. + +02:07.650 --> 02:11.820 +Uh, and you're thinking, all right, well, obviously, if I'm going to take advantage of all of this + +02:11.820 --> 02:18.610 +extra flexibility, all of this extra predictive power in this bigger model with more, more dials, + +02:18.610 --> 02:20.170 +more weights to learn from. + +02:20.440 --> 02:25.270 +How much more training data am I going to need to be able to to take advantage of that? + +02:25.270 --> 02:30.070 +And the answer is you would you would roughly need to double the size of your training data set. + +02:30.070 --> 02:36.220 +So that relationship between the number of training tokens and parameters, uh, was, was suggested, + +02:36.250 --> 02:38.920 +uh, a few years ago, and it stood the test of time. + +02:38.920 --> 02:45.340 +It turns out that that for transformers for the transformer architecture, this scaling law appears + +02:45.340 --> 02:46.330 +to apply. + +02:46.330 --> 02:46.990 +Well. + +02:46.990 --> 02:49.780 +And it's a great rule of thumb to keep to hand. + +02:50.710 --> 02:51.580 +All right. + +02:51.610 --> 02:56.050 +With that, let's just move on now to benchmarks. + +02:56.050 --> 03:03.670 +So benchmarks are the common metrics that people talk about uh, which are used to weigh up different + +03:03.670 --> 03:04.630 +models. + +03:04.660 --> 03:12.670 +They are a series of tests that are applied and used in various leaderboards, which is where you rank + +03:12.670 --> 03:18.400 +different, different LMS, uh, to see the different pros and cons of different models. + +03:18.430 --> 03:22.270 +Now I've got this table of different benchmarks. + +03:22.270 --> 03:28.320 +I'm going to go through them one at a time and get a sense for each one. + +03:28.350 --> 03:32.850 +Now, you don't need to remember what each of these benchmarks are because you can always look it up. + +03:32.850 --> 03:36.810 +It's useful for you to have a sense of it so that it comes back to you quickly. + +03:36.810 --> 03:41.940 +So definitely focus and take this in and and and do some research. + +03:41.940 --> 03:42.870 +If you have questions. + +03:42.870 --> 03:47.160 +We're going to see these numbers in some of the analysis that we'll be doing later as we compare different + +03:47.160 --> 03:47.940 +models. + +03:48.030 --> 03:54.300 +So the first one I'm going to mention of the the seven most common benchmarks you see all over the place. + +03:54.300 --> 04:00.510 +The first one is called Arc, which is a benchmark that measures scientific reasoning. + +04:00.510 --> 04:03.030 +It's basically a bunch of multiple choice questions. + +04:03.060 --> 04:11.310 +Drop is a language comprehension test which involves looking at text, distilling it, and then doing + +04:11.310 --> 04:14.760 +things like adding or sorting or counting from that text. + +04:14.880 --> 04:21.270 +Hella swag, which stands for harder encodings, long context and low shot activities, is a kind of + +04:21.300 --> 04:23.790 +common sense reasoning test. + +04:24.240 --> 04:26.820 +MLU is super famous. + +04:26.820 --> 04:28.860 +You'll see it all over the place. + +04:28.860 --> 04:35.340 +It was a really common metric that involves reasoning across 57 subjects. + +04:35.800 --> 04:42.760 +There's been some there were some questions raised about how well formed the questions were. + +04:42.760 --> 04:46.000 +And there's some, some doubts on the effectiveness of them. + +04:46.030 --> 04:47.680 +Lou, it was perhaps overused. + +04:47.680 --> 04:53.860 +And you'll see later that there's a variation on Lou which is now more popular called MLU Pro. + +04:54.130 --> 04:55.930 +So this has somewhat been replaced. + +04:55.930 --> 05:05.140 +Now, truthful QA is about accuracy and robustness, particularly in adversarial conditions when the + +05:05.140 --> 05:07.720 +model is encouraged to not be truthful. + +05:08.290 --> 05:19.600 +Winogrand is testing that a model can resolve ambiguity in more confusing contexts, and then GSM eight + +05:19.600 --> 05:27.670 +K grade school math at the eight K level is both math and also word problems that are in elementary + +05:27.670 --> 05:30.010 +and middle school level. + +05:30.010 --> 05:35.110 +So these are seven common benchmarks you come across these a lot. + +05:35.200 --> 05:36.760 +Uh, keep note of them. + +05:36.790 --> 05:38.470 +They're in the resources. + +05:38.470 --> 05:42.910 +And you will as I say, these are these are things you will see a lot. + +05:42.940 --> 05:44.770 +And hopefully you will now recognize them. diff --git a/week5/community-contributions/subtitles/srts/59295377/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295377/ja_JP.srt new file mode 100755 index 0000000..83320af --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295377/ja_JP.srt @@ -0,0 +1,190 @@ +WEBVTT + +00:00.770 --> 00:15.200 +チンチラ・スケーリングの法則と呼ばれるもので、 グーグル・ディープマインド・チームがチンチラというモデルにちなんで作った素晴らしい名前の法則である。 + +00:15.590 --> 00:22.640 +そしてそれは、 モデルに必要なパラメーターの数、 ニューラルネットワークの重みの数をどう考えるかということに関係しています。 + +00:22.640 --> 00:28.580 +この法則によると、 パラメーターの数、 つまりいくつのパラメーターを持つかということは、 + +00:28.580 --> 00:35.630 +訓練データのサイズと訓練トークンの数にほぼ比例するということです。 + +00:35.960 --> 00:42.290 +つまり、 基本的には、 仮に80億のパラメーターを持つモデルがあったとして、 収穫が逓減していくのがわかるようになる、 + +00:42.290 --> 00:47.000 +ということだ。 + +00:47.030 --> 00:50.660 +トレーニングデータを増やしても、 モデルに大きな影響はない。 + +00:50.660 --> 00:56.720 +それで、 このサイズのモデルには適切な量のトレーニングデータがある、 という感覚を持つことができる。 + +00:56.720 --> 00:58.430 +これはいい試合だ。 + +00:58.430 --> 01:06.170 +我々は、 モデルの学習能力を最大限に引き出すために、 トレーニングデータをうまく使ってきた。 + +01:06.500 --> 01:08.450 +そして、 その質問は正しいかもしれない。 + +01:08.450 --> 01:16.740 +だから、 もし私がもっとパラメーターを増やしたかったら、 モデルにもっと柔軟性を与えて、 もっと学習させ、 もっとパワフルでニュアンスのあるものにする。 + +01:16.770 --> 01:20.910 +トレーニングデータを増やした場合、 あといくつパラメータが必要ですか? + +01:20.940 --> 01:26.310 +そしてその答えは、 もし収穫逓減の時点からトレーニングデータの量を2倍にするとしたら、 + +01:26.310 --> 01:31.920 +2倍のトレーニングデータを消費し、 効果的な方法でそこから学習し、 最終的にはよりパワフルでニュアンスのあるものにするためには、 + +01:31.950 --> 01:42.720 +80億から160億のパラメーターへと2倍のウェイト数が必要になるということだ。 + +01:42.990 --> 01:50.130 +つまり、 より多くのトレーニングデータを効率的に吸収するためには、 あといくつのパラメーターが必要なのかを知ることができる。 + +01:50.340 --> 01:58.050 +そして、 その裏返し、 つまり逆の関係も示している。 + +01:58.050 --> 02:02.970 +もしあなたが80億のモデルを使って仕事をしていて、 誰かが160億のパラメーターモデルにアップグレードしたいと言ったら、 + +02:02.970 --> 02:07.440 +代わりにそれを使いましょう。 + +02:07.650 --> 02:11.820 +そしてあなたはこう考えるだろう。 もし私が、 より大きなモデルで、 + +02:11.820 --> 02:20.170 +より多くのダイヤル、 より多くの重みから学習することで、 より多くの柔軟性、 より多くの予測力を活用するのであれば。 + +02:20.440 --> 02:25.270 +その利点を生かすためには、 あとどれくらいのトレーニングデータが必要なんだろう? + +02:25.270 --> 02:30.070 +そして答えは、 トレーニングデータセットのサイズをおよそ2倍にする必要があるということだ。 + +02:30.070 --> 02:36.220 +トレーニング・トークンの数とパラメーターの関係は、 数年前に提案され、 + +02:36.250 --> 02:38.920 +時の試練に耐えた。 + +02:38.920 --> 02:46.330 +その結果、 トランス・アーキテクチャーのトランスには、 このスケーリング法則が適用されることが判明した。 + +02:46.330 --> 02:46.990 +まあね。 + +02:46.990 --> 02:49.780 +手元に置いておくと便利な経験則だ。 + +02:50.710 --> 02:51.580 +分かった。 + +02:51.610 --> 02:56.050 +それでは、 ベンチマークの話に移ろう。 + +02:56.050 --> 03:04.630 +ベンチマークとは、 さまざまなモデルを比較検討するために使われる、 一般的な指標のことだ。 + +03:04.660 --> 03:12.670 +これは、 様々なリーダーボードに適用され使用される一連のテストであり、 様々なモデルの長所と短所を見るために、 + +03:12.670 --> 03:18.400 +様々なLMSをランク付けするものです。 + +03:18.430 --> 03:22.270 +今、 私はさまざまなベンチマークの表を持っている。 + +03:22.270 --> 03:28.320 +一つずつ見ていって、 それぞれの感覚を掴んでいきたい。 + +03:28.350 --> 03:32.850 +各ベンチマークが何であるかは、 いつでも調べることができるので、 覚えておく必要はない。 + +03:32.850 --> 03:36.810 +その感覚を持っておくと、 すぐに自分の中に戻ってくるから便利なんだ。 + +03:36.810 --> 03:41.940 +だから絶対に集中して、 このことを受け止めて、 研究してほしい。 + +03:41.940 --> 03:42.870 +質問があればどうぞ。 + +03:42.870 --> 03:47.940 +この数字は、 後ほどさまざまなモデルを比較する際の分析で目にすることになるだろう。 + +03:48.030 --> 03:54.300 +そこで最初に、 あちこちで目にする最も一般的な7つのベンチマークを紹介しよう。 + +03:54.300 --> 04:00.510 +最初のものはArcと呼ばれるもので、 科学的推論を測定するベンチマークである。 + +04:00.510 --> 04:03.030 +基本的には選択式の問題集だ。 + +04:03.060 --> 04:11.310 +ドロップは言語理解テストであり、 テキストを見て、 それを抽出し、 そのテキストから足し算や並べ替え、 + +04:11.310 --> 04:14.760 +数え上げなどを行う。 + +04:14.880 --> 04:23.790 +ヘラ・スワッグとは、 より難しい符号化、 長い文脈、 低い射撃活動の略で、 一種の常識的推理テストである。 + +04:24.240 --> 04:26.820 +MLUは超有名だ。 + +04:26.820 --> 04:28.860 +あちこちで目にするだろう。 + +04:28.860 --> 04:35.340 +57の教科にまたがる推論を含む、 実に一般的な指標だった。 + +04:35.800 --> 04:42.760 +質問の構成に疑問の声も上がっている。 + +04:42.760 --> 04:46.000 +そして、 その効果には疑問もある。 + +04:46.030 --> 04:47.680 +ルー、 使いすぎかもしれない。 + +04:47.680 --> 04:53.860 +ルーにはMLUプロと呼ばれるバリエーションがあり、 そちらの方が人気があることは後で説明する。 + +04:54.130 --> 04:55.930 +だから、 これはいくらか置き換わった。 + +04:55.930 --> 05:07.720 +さて、 真実のQAとは、 正確さとロバスト性のことであり、 特にモデルが真実でないことが奨励されるような敵対的な条件下でのことである。 + +05:08.290 --> 05:19.600 +ウィノグランドは、 モデルがより混乱した文脈での曖昧さを解決できることをテストしている。 そして、 GSMの8 + +05:19.600 --> 05:30.010 +Kレベルの小学校の算数は、 算数であると同時に、 小中学生レベルの単語問題でもある。 + +05:30.010 --> 05:35.110 +これが7つの一般的なベンチマークだ。 + +05:35.200 --> 05:36.760 +メモしておいてくれ。 + +05:36.790 --> 05:38.470 +彼らは資源の中にいる。 + +05:38.470 --> 05:42.910 +そして、 私が言うように、 これらのことはよく目にすることだろう。 + +05:42.940 --> 05:44.770 +そして、 願わくば、 今すぐ彼らを認識してほしい。 diff --git a/week5/community-contributions/subtitles/srts/59295377/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295377/ko_KR.srt new file mode 100755 index 0000000..e1d6990 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295377/ko_KR.srt @@ -0,0 +1,214 @@ +WEBVTT + +00:00.770 --> 00:06.050 +더 고급 지표로 넘어가기 전에 친칠라 스케일링 법칙을 + +00:06.050 --> 00:12.410 +언급하고 싶습니다 구글 딥마인드 팀이 만든 멋진 이름의 법이죠 친칠라 + +00:12.410 --> 00:15.200 +모델 이름을 따서요 + +00:15.590 --> 00:22.640 +모델에서 필요한 매개 변수의 개수와 신경망의 무게에 + +00:22.640 --> 00:28.580 +관한 거죠 법에 따르면 매개 변수의 개수는 + +00:28.580 --> 00:35.630 +훈련 데이터와 훈련 토큰의 개수에 비례해요 + +00:35.960 --> 00:42.290 +그 말은 모델이 있다고 가정해 봅시다. 예를 들어 80억 개의 매개 변수 모델이 있다고 합시다. + +00:42.290 --> 00:47.000 +그리고 어느 지점에 도달하면 반환률이 감소하기 시작할 거예요. + +00:47.030 --> 00:50.660 +훈련 데이터를 추가한다고 모델에 큰 영향을 주진 않아요 + +00:50.660 --> 00:56.720 +그래서 여러분은 이 크기의 모델에 대한 트레이닝 데이터가 적당하다고 느끼죠 + +00:56.720 --> 00:58.430 +좋은 대결이 될 거예요 + +00:58.430 --> 01:06.170 +훈련 데이터를 성공적으로 활용해 이 모델을 최대한의 학습 능력으로 끌어올렸죠 + +01:06.500 --> 01:08.450 +괜찮은 질문일 수도 있어요 + +01:08.450 --> 01:15.240 +매개 변수를 더 추가하고 싶다면 모델에 더 유연하게 학습할 수 있고 더 강력하고 미묘한 + +01:15.240 --> 01:16.740 +차이를 줄 수 있죠 + +01:16.770 --> 01:20.910 +훈련용 데이터가 얼마나 더 필요하죠? + +01:20.940 --> 01:26.310 +답은 이겁니다. 훈련 데이터를 두 배로 늘린다면 그 시점에서 수익이 + +01:26.310 --> 01:31.920 +감소할 테고 무게는 두 배가 될 겁니다. 80억에서 160억 매개 변수가 + +01:31.950 --> 01:39.330 +필요할 거예요. 훈련 데이터를 두 배로 쓰고 효과적으로 학습하려면요. 결과적으로 더 강력하고 + +01:39.330 --> 01:42.720 +미묘하게 표현되려면요. + +01:42.990 --> 01:50.130 +훈련 데이터를 효과적으로 흡수하려면 매개 변수가 얼마나 더 필요한지 알 수 있죠 + +01:50.340 --> 01:58.050 +그리고 그 이면에는 정반대의 관계도 있어요 + +01:58.050 --> 02:02.970 +여러분이 80억 모델로 작업했는데 누군가 160억 매개 변수 모델로 + +02:02.970 --> 02:07.440 +업그레이드하고 싶다고 하면 그걸 대신 사용하세요 + +02:07.650 --> 02:11.820 +이렇게 생각할 수 있죠 이런 추가적인 유연성과 + +02:11.820 --> 02:20.170 +예측력을 이용하려면 더 큰 모델에 다이얼과 추를 더 많이 적용해야 한다고요 + +02:20.440 --> 02:25.270 +이를 이용하기 위해 얼마나 더 많은 훈련 데이터가 필요할까요? + +02:25.270 --> 02:30.070 +그 대답은 훈련 데이터 집합의 두 배가 필요하단 거죠 + +02:30.070 --> 02:36.220 +훈련 토큰과 매개 변수의 관계는 몇 년 전에 제안된 것으로 + +02:36.250 --> 02:38.920 +지금까지도 유효하죠 + +02:38.920 --> 02:46.330 +변압기 구조에서 변압기 규격 법칙이 적용되는 것으로 밝혀졌죠 + +02:46.330 --> 02:46.990 +글쎄요 + +02:46.990 --> 02:49.780 +상황에 따라 행동하는 게 경험에서 나온 법칙이죠 + +02:50.710 --> 02:51.580 +좋아요 + +02:51.610 --> 02:56.050 +이제 벤치마크로 넘어가죠 + +02:56.050 --> 03:04.630 +벤치마크는 사람들이 흔히 말하는 측정 기준입니다 다양한 모델을 재는 데 사용되죠 + +03:04.660 --> 03:12.670 +일련의 테스트가 적용되고 다양한 leaderboard에서 사용됩니다 다양한 LMS의 + +03:12.670 --> 03:18.400 +순위를 매기는 곳이죠 다양한 모델의 장단점을 보는 거예요 + +03:18.430 --> 03:22.270 +이제 다양한 벤치마크 테이블이 있어요 + +03:22.270 --> 03:28.320 +하나씩 살펴보면서 감을 잡을 거예요 get it get it + +03:28.350 --> 03:32.850 +이런 벤치마크가 뭔지 기억할 필요는 없어요 언제든 찾아보면 되니까요 + +03:32.850 --> 03:36.810 +그런 걸 느끼는 게 도움이 돼요 빨리 돌아오니까요 + +03:36.810 --> 03:41.940 +그러니 집중해서 잘 보고 조사도 좀 하세요 + +03:41.940 --> 03:42.870 +궁금한 게 있으면요 + +03:42.870 --> 03:47.160 +이 숫자들은 나중에 다른 모델을 비교할 때 분석할 때 볼 + +03:47.160 --> 03:47.940 +거예요 + +03:48.030 --> 03:54.300 +첫 번째로 말씀드릴 것은 가장 흔한 벤치마크 7가지입니다 어디서나 볼 수 있죠 + +03:54.300 --> 04:00.510 +첫 번째는 Arc입니다 과학적 추론을 측정하는 척도죠 + +04:00.510 --> 04:03.030 +객관식 문제인 셈이죠 + +04:03.060 --> 04:11.310 +Drop은 언어 이해력 테스트로 텍스트를 보고 정제한 다음 텍스트를 추가하거나 + +04:11.310 --> 04:14.760 +정렬하거나 세는 작업을 하죠 + +04:14.880 --> 04:21.270 +더 어려운 암호화, 긴 컨텍스트 저가 활동의 약자인 헬 스웨그는 상식적 + +04:21.300 --> 04:23.790 +추론 테스트의 일종이에요 + +04:24.240 --> 04:26.820 +MLU는 엄청 유명해요 + +04:26.820 --> 04:28.860 +어디서나 볼 수 있어요 + +04:28.860 --> 04:35.340 +57개 피험자를 추론하는 아주 흔한 측정법이었어요 + +04:35.800 --> 04:42.760 +질문이 얼마나 잘 구성됐는지에 대해 의문이 제기되기도 했어요 + +04:42.760 --> 04:46.000 +효과적인지 의심하는 사람들도 있어요 + +04:46.030 --> 04:47.680 +루, 너무 많이 쓴 것 같아요 + +04:47.680 --> 04:53.860 +나중에 루의 변형 버전이 나오는데 지금은 MLU Pro라고 불리죠 + +04:54.130 --> 04:55.930 +이건 대체된 거예요 + +04:55.930 --> 05:05.140 +진실한 QA는 정확성과 견고성을 요구합니다 특히 적대적인 상황에서 진실을 말하지 말라고 + +05:05.140 --> 05:07.720 +부추기는 모델에서는요 + +05:08.290 --> 05:19.600 +위노그랜드는 모델이 더 혼란스러운 상황에서 모호함을 해결할 수 있다는 걸 시험합니다 GSM + +05:19.600 --> 05:27.670 +8K 수준의 초등학교 수학은 수학이자 초등학교와 중학교의 단어 문제이기도 + +05:27.670 --> 05:30.010 +하죠 + +05:30.010 --> 05:35.110 +이게 자주 보이는 7가지 공통 벤치마크예요 + +05:35.200 --> 05:36.760 +잘 기억해 두세요 + +05:36.790 --> 05:38.470 +자원부에 있어요 + +05:38.470 --> 05:42.910 +앞으로 이런 걸 많이 보게 될 거예요 + +05:42.940 --> 05:44.770 +이제 알아보실 수 있을 거예요 diff --git a/week5/community-contributions/subtitles/srts/59295423/en_US.srt b/week5/community-contributions/subtitles/srts/59295423/en_US.srt new file mode 100755 index 0000000..0ab8416 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295423/en_US.srt @@ -0,0 +1,223 @@ +WEBVTT + +00:00.650 --> 00:06.890 +Welcome to day two of week four, when we get more into leaderboards so that by the end of this, you'll + +00:06.890 --> 00:11.690 +know more than you ever wanted to know or dreamed of knowing about leaderboards and metrics and benchmarks + +00:11.690 --> 00:14.060 +and arenas and all of that. + +00:14.060 --> 00:20.420 +But you'll find it incredibly useful and important as you look to understand how to pick the right LLM + +00:20.420 --> 00:21.800 +for the task at hand. + +00:21.830 --> 00:30.890 +So the plan for today, uh, last time we talked about comparing Llms using basic attributes and using + +00:30.890 --> 00:31.820 +benchmarks. + +00:31.850 --> 00:38.330 +Today you're going to look beyond the open LLM leaderboard from hugging face at other leaderboards and + +00:38.330 --> 00:41.750 +arenas that let you compare and evaluate llms. + +00:42.320 --> 00:50.480 +We're also going to look at some real world use cases of llms solving commercial problems, uh, which + +00:50.600 --> 00:53.420 +will be hopefully, uh, insightful for you. + +00:53.450 --> 00:58.700 +We'll just do that quickly, because I think everyone has exposure to so many these days, but it will + +00:58.700 --> 01:05.960 +give you food for thought and it should, in summary, arm you to be able to choose the right LLM for + +01:05.960 --> 01:07.250 +your projects. + +01:08.000 --> 01:16.190 +So we're going to be doing a tour of six essential leaderboards that are available through Huggingface + +01:16.430 --> 01:17.360 +and beyond. + +01:17.390 --> 01:18.230 +Huggingface. + +01:18.260 --> 01:23.690 +For comparing Llms, the first of them is actually one that we've already seen the Huggingface OpenAI + +01:23.900 --> 01:25.190 +leaderboard comparing. + +01:25.220 --> 01:31.280 +There's both the old version, but now there's the new version, which has the harder metrics. + +01:31.310 --> 01:38.660 +It's of course open source models, but it is the go to place to compare open source models. + +01:39.230 --> 01:46.550 +There is also a leaderboard called Big Code on Huggingface, which is a leaderboard that compares models + +01:46.550 --> 01:50.420 +specifically that are designed to generate code. + +01:50.420 --> 01:56.060 +And we'll be doing that looking at those examples and how it assesses models in just a second. + +01:56.480 --> 02:03.530 +There is one called the LM perf board, which is another Huggingface board that talks, thinks about + +02:03.530 --> 02:11.090 +performance, uh, about both accuracy and the actual speed and cost of compute. + +02:11.090 --> 02:14.720 +So this is a super important one that looks at a different dimension. + +02:14.720 --> 02:20.040 +It's looking at some of the basic attributes, and it's one that again is going to become a go to resource + +02:20.040 --> 02:25.560 +for you, particularly when you're thinking about inference of open source models compared to their + +02:25.560 --> 02:27.180 +closed source cousins. + +02:27.420 --> 02:29.850 +And then there are some other hugging face boards. + +02:29.850 --> 02:31.110 +There's so much on hugging face. + +02:31.110 --> 02:31.920 +There are more. + +02:31.950 --> 02:37.260 +There are specific leader boards designed for different business areas. + +02:37.260 --> 02:44.160 +You're going to see a medical leaderboard which is designed for, uh, of course, uh, medical use + +02:44.160 --> 02:44.850 +cases. + +02:44.850 --> 02:47.940 +You're going to see leaderboards for other languages. + +02:47.970 --> 02:49.380 +I think there's one Portuguese. + +02:49.380 --> 02:50.190 +I just saw that. + +02:50.190 --> 02:50.820 +I'll show you. + +02:50.820 --> 02:57.150 +There's a bunch of different language specific leaderboards depending on your use case. + +02:57.150 --> 02:59.670 +That might be the leaderboard that's right for you. + +03:00.300 --> 03:02.970 +Then we're going to go and look at Valheim's leaderboard. + +03:02.970 --> 03:06.720 +You may remember we we briefly looked at this uh, early on. + +03:06.720 --> 03:12.960 +It's a very useful resource which has a number of leaderboards, um, that include open and closed source. + +03:12.960 --> 03:17.910 +So it's one of the places where you can bring together the full family of models. + +03:17.910 --> 03:24.990 +It also has that useful table, if you remember, that has cost, API costs and contacts window lengths. + +03:25.080 --> 03:31.840 +And so it's another one to add to your bookmarks because it's one of the few places which which reliably + +03:31.840 --> 03:35.440 +has all of that information up to date in one place. + +03:36.040 --> 03:44.140 +And then the last of the leaderboards that we'll look at is called Seal, which assesses various expert + +03:44.140 --> 03:48.190 +skills, and they are always adding new leaderboards to to this set of leaderboards. + +03:48.190 --> 03:52.210 +So it's it is, if I may say, another one to bookmark. + +03:52.210 --> 03:58.840 +So your bookmarks are going to get crowded because these are great resources and I think you will thank + +03:58.840 --> 04:00.850 +me when you need to use them in the future. + +04:02.350 --> 04:09.790 +We are then also going to look at something called the Chatbot Arena, the LMS Chatbot Arena. + +04:09.790 --> 04:12.100 +It is a fantastic resource. + +04:12.100 --> 04:19.390 +It is specifically looking at the instruct the chat use case, the ability of models to chat. + +04:19.390 --> 04:26.080 +But the idea is rather than having some sort of benchmark test, it relies on human judgement to decide + +04:26.080 --> 04:34.840 +which model is better at the kind of, uh, the, the, um, the instruction following chats. + +04:34.990 --> 04:45.340 +Um, And it's just a qualitative decision by a human to decide to pick a model A or model B whilst chatting + +04:45.340 --> 04:46.870 +with both models together. + +04:46.870 --> 04:52.810 +And it's a blind test in that you don't know which model is which and you have to vote without knowing. + +04:53.020 --> 05:01.420 +Um, and models are given an Elo rating, which I mentioned before based on their overall, uh, um, + +05:01.420 --> 05:08.500 +how they rank overall against their, their, their peers, um, from many human tests. + +05:08.500 --> 05:10.330 +So we'll see this. + +05:10.360 --> 05:12.700 +We'll get to do some voting ourselves. + +05:13.300 --> 05:18.580 +And then finally we're going to look at a bunch of commercial use cases, everything from law to talent + +05:18.610 --> 05:23.050 +to code to healthcare to education, seeing llms in action. + +05:23.050 --> 05:26.950 +And of course, this this could be ten times more. + +05:26.980 --> 05:34.060 +The Llms are making an impact in every business vertical you can imagine, but it's always useful to + +05:34.060 --> 05:37.270 +see a few of them and get a sense of what's out there. + +05:37.270 --> 05:39.610 +So we will do all of that now. + +05:39.610 --> 05:44.950 +And, uh, prepare, prepare to do some bookmarking because you're going to see a lot of useful stuff + +05:44.950 --> 05:45.370 +now. diff --git a/week5/community-contributions/subtitles/srts/59295423/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295423/ja_JP.srt new file mode 100755 index 0000000..62d9992 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295423/ja_JP.srt @@ -0,0 +1,175 @@ +WEBVTT + +00:00.650 --> 00:06.890 +4週目の2日目へようこそ。 リーダーボードについてより深く掘り下げていきますので、 これが終わるころには、 リーダーボードやメトリクス、 + +00:06.890 --> 00:14.060 +ベンチマーク、 アリーナなどについて、 これまで知りたかったこと、 あるいは知りたいと夢見ていたことよりも多くのことを知ることになるでしょう。 + +00:14.060 --> 00:21.800 +しかし、 目の前の課題に適したLLMを選ぶ方法を理解する上で、 信じられないほど有用で重要なことがわかるだろう。 + +00:21.830 --> 00:31.820 +さて、 今日の計画だが、 前回は基本的な属性とベンチマークを使ってLlmsを比較するという話をした。 + +00:31.850 --> 00:41.750 +今日は、 オープンLLMリーダーボードを超えて、 LLMを比較・評価できる他のリーダーボードやアリーナを見てみましょう。 + +00:42.320 --> 00:53.420 +また、 llmsが商業的な問題を解決している実際の使用例も見ていくつもりだ。 + +00:53.450 --> 00:58.700 +最近、 誰もが多くのLLMに触れていると思うので、 手短に説明するが、 + +00:58.700 --> 01:07.250 +考える材料にはなるだろうし、 要約すれば、 自分のプロジェクトに適したLLMを選択できるようになるはずだ。 + +01:08.000 --> 01:17.360 +そこで今回は、 Huggingfaceで利用できる6つの必須リーダーボードと、 それ以外のリーダーボードを紹介する。 + +01:17.390 --> 01:18.230 +ハギングフェイス + +01:18.260 --> 01:25.190 +Llmsを比較する場合、 その最初のものは、 実はすでにHuggingface OpenAIのリーダーボードで比較されているものだ。 + +01:25.220 --> 01:31.280 +旧バージョンと新バージョンの両方がある。 + +01:31.310 --> 01:38.660 +もちろんオープンソースのモデルだが、 オープンソースのモデルを比較するにはうってつけの場所だ。 + +01:39.230 --> 01:50.420 +また、 HuggingfaceにはBig Codeというリーダーボードがあり、 これは特にコードを生成するように設計されたモデルを比較するリーダーボードである。 + +01:50.420 --> 01:56.060 +そして、 その例とモデルをどのように評価するかを、 すぐに見ていくことになる。 + +01:56.480 --> 02:03.530 +LMパーフボードと呼ばれるものがあり、 これもHuggingfaceのボードで、 + +02:03.530 --> 02:11.090 +性能について、 正確さについて、 実際のスピードと計算コストについて考えている。 + +02:11.090 --> 02:14.720 +だから、 これは違う次元を見る超重要なものなんだ。 + +02:14.720 --> 02:20.040 +これは基本的な属性のいくつかを見ているもので、 特にクローズド・ソースと比較してオープン・ソース・モデルの推論を考えているときに、 + +02:20.040 --> 02:27.180 +あなたにとって頼りになるリソースになるでしょう。 + +02:27.420 --> 02:29.850 +そして、 他のハグ顔ボードもある。 + +02:29.850 --> 02:31.110 +ハグする顔にはたくさんのものがある。 + +02:31.110 --> 02:31.920 +まだまだある。 + +02:31.950 --> 02:37.260 +さまざまな事業分野ごとに設計された特定のリーダーボードがある。 + +02:37.260 --> 02:44.850 +医療用のリーダーボードをご覧いただけますが、 これはもちろん、 医療用にデザインされたものです。 + +02:44.850 --> 02:47.940 +他の言語のリーダーボードを見ることができる。 + +02:47.970 --> 02:49.380 +ポルトガル語は1つだと思う。 + +02:49.380 --> 02:50.190 +今見たところだ。 + +02:50.190 --> 02:50.820 +お見せしましょう。 + +02:50.820 --> 02:57.150 +用途に応じて、 さまざまな言語別のリーダーボードがある。 + +02:57.150 --> 02:59.670 +それがあなたにふさわしいリーダーボードかもしれない。 + +03:00.300 --> 03:02.970 +それからヴァルヘイムのリーダーボードを見に行く。 + +03:02.970 --> 03:06.720 +このことについては、 初期の頃に少し触れたので覚えているかもしれない。 + +03:06.720 --> 03:12.960 +オープンソースとクローズドソースを含む多くのリーダーボードがある、 とても便利なリソースだ。 + +03:12.960 --> 03:17.910 +だから、 モデル・ファミリーを一同に集めることができる場所のひとつなんだ。 + +03:17.910 --> 03:24.990 +また、 コスト、 APIコスト、 コンタクトウインドウの長さといった便利な表もある。 + +03:25.080 --> 03:35.440 +また、 ブックマークに追加しておくといいのは、 最新情報を確実にまとめている数少ない場所のひとつだからだ。 + +03:36.040 --> 03:48.190 +そして、 最後に紹介するのは、 さまざまなエキスパート・スキルを評価する「シール」と呼ばれるリーダーボードで、 このリーダーボード群には常に新しいリーダーボードが追加されている。 + +03:48.190 --> 03:52.210 +というわけで、 これもまたブックマークしておきたい一品だ。 + +03:52.210 --> 04:00.850 +というのも、 これらは素晴らしい情報源であり、 将来これらを使う必要が出てきたときに、 私に感謝することになると思うからだ。 + +04:02.350 --> 04:09.790 +さらに、 チャットボット・アリーナ(LMS Chatbot Arena)と呼ばれるものについても見ていきます。 + +04:09.790 --> 04:12.100 +素晴らしいリソースだ。 + +04:12.100 --> 04:19.390 +特にチャットのユースケース、 モデルのチャット能力に注目している。 + +04:19.390 --> 04:26.080 +しかし、 ある種のベンチマーク・テストを行うのではなく、 + +04:26.080 --> 04:34.840 +どのモデルがより優れているかを人間の判断に頼るという考え方だ。 + +04:34.990 --> 04:46.870 +それに、 Aモデルを選ぶかBモデルを選ぶかは、 人間が両方のモデルと一緒にチャットしながら定性的に決めることなんだ。 + +04:46.870 --> 04:52.810 +しかも、 どのモデルがどのモデルなのかわからないブラインドテストなので、 わからないまま投票しなければならない。 + +04:53.020 --> 05:01.420 +そしてモデルには、 総合的な、 えー、 えー、 多くの人間によるテストでの同業者に対する総合的な順位に基づいて、 + +05:01.420 --> 05:08.500 +先ほど言ったEloレーティングが与えられる。 + +05:08.500 --> 05:10.330 +だから、 これを見よう。 + +05:10.360 --> 05:12.700 +自分たちで投票することになる。 + +05:13.300 --> 05:18.580 +そして最後に、 法律からタレント、 コード、 ヘルスケア、 教育まで、 あらゆる商業的なユースケースを紹介し、 + +05:18.610 --> 05:23.050 +llmsが実際に使われている様子をご覧いただきます。 + +05:23.050 --> 05:26.950 +もちろん、 これは10倍以上になる可能性もある。 + +05:26.980 --> 05:34.060 +Llmsは、 あなたが想像できるあらゆるビジネス分野でインパクトを与えているが、 そのうちのいくつかを見て、 + +05:34.060 --> 05:37.270 +そこに何があるのかを知ることは常に有益だ。 + +05:37.270 --> 05:39.610 +だから、 これからそのすべてを行う。 + +05:39.610 --> 05:45.370 +そして、 準備、 準備、 ブックマークをする準備だ。 これから役に立つものをたくさん見ることになるからね。 diff --git a/week5/community-contributions/subtitles/srts/59295423/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295423/ko_KR.srt new file mode 100755 index 0000000..04fda92 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295423/ko_KR.srt @@ -0,0 +1,214 @@ +WEBVTT + +00:00.650 --> 00:06.890 +넷째 주 둘째 날입니다 순위표를 더 살펴보죠 오늘 하루가 끝날 때쯤이면 기대했던 + +00:06.890 --> 00:11.690 +것보다 더 많은 걸 알게 될 겁니다 순위표, 수치 벤치마크, + +00:11.690 --> 00:14.060 +경기장 등이요 + +00:14.060 --> 00:20.420 +하지만 이 작업은 매우 유용하고 중요합니다 작업에 적합한 LLM을 고르는 법을 이해하는 + +00:20.420 --> 00:21.800 +것이니까요 + +00:21.830 --> 00:31.820 +자, 오늘의 계획은 지난 시간에 기본 특성과 벤치마크를 이용한 LM 비교를 다뤘는데요 + +00:31.850 --> 00:38.330 +오늘 여러분은 오픈 LLM의 순위표 너머를 보게 될 겁니다 다른 순위표에서 얼굴을 맞대고 인사하는 + +00:38.330 --> 00:41.750 +것에서부터 비교와 평가의 무대까지 말이죠 + +00:42.320 --> 00:50.480 +또한 llms의 실제 유스케이스를 통해 상업적 문제를 해결할 겁니다 여러분에게 + +00:50.600 --> 00:53.420 +도움이 될 것 같군요 + +00:53.450 --> 00:58.700 +빠르게 하겠습니다 요즘은 다들 많이 접하셨겠지만 생각할 + +00:58.700 --> 01:05.960 +거리를 줄 겁니다 요약하자면 프로젝트에 맞는 LLM을 선택할 수 있도록 지원해주는 + +01:05.960 --> 01:07.250 +거죠 + +01:08.000 --> 01:16.190 +그래서 6개의 필수 리더보드를 투어에 쓸 거예요 H깅페이스와 그 너머를 통해 보실 + +01:16.430 --> 01:17.360 +수 있죠 + +01:17.390 --> 01:18.230 +포옹하는 얼굴요 + +01:18.260 --> 01:23.690 +LM을 비교해 보죠 첫 번째는 이미 봤던 H깅페이스 OpenAI 리더보드 + +01:23.900 --> 01:25.190 +비교고요 + +01:25.220 --> 01:31.280 +구 버전과 새 버전이 있는데 더 어려운 측정법이죠 + +01:31.310 --> 01:38.660 +물론 오픈 소스 모델이지만 오픈 소스 모델을 비교하기 위한 곳이죠 + +01:39.230 --> 01:46.550 +H깅페이스에 빅 코드라는 leaderboard도 있어요 코드를 생성하기 위해 + +01:46.550 --> 01:50.420 +특별히 디자인된 모델들을 비교하는 거죠 + +01:50.420 --> 01:56.060 +잠시 후 그 예시를 살펴보고 모델을 어떻게 평가하는지 살펴보죠 + +01:56.480 --> 02:03.530 +LM 향수 보드라는 것이 있는데 H징페이스 보드로서 + +02:03.530 --> 02:11.090 +성능과 정확성, 실제 속도와 연산 비용을 모두 고려하죠 + +02:11.090 --> 02:14.720 +이건 다른 차원을 보는 아주 중요한 거예요 + +02:14.720 --> 02:20.040 +기본 속성을 살펴보고 있어요 여러분을 위해 리소스로 가는 것이 + +02:20.040 --> 02:25.560 +될 겁니다 특히 오픈 소스 모델과 폐쇄 소스 모델을 비교해 추론할 때를 + +02:25.560 --> 02:27.180 +생각해 보세요 + +02:27.420 --> 02:29.850 +포옹하는 얼굴 보드도 있어요 + +02:29.850 --> 02:31.110 +포옹하는 표정에는 할 게 많아요 + +02:31.110 --> 02:31.920 +더 있어요 + +02:31.950 --> 02:37.260 +여러 사업 분야에 맞게 디자인된 리더 보드가 있어요 + +02:37.260 --> 02:44.160 +여러분이 보실 것은 의료용 리더보드입니다 의료용 사용 사례를 위해 설계된 + +02:44.160 --> 02:44.850 +것이죠 + +02:44.850 --> 02:47.940 +다른 언어에 대한 leaderboard도 볼 수 있어요 + +02:47.970 --> 02:49.380 +포르투갈인은 한 명이에요 + +02:49.380 --> 02:50.190 +방금 봤어요 + +02:50.190 --> 02:50.820 +보여드릴게요 + +02:50.820 --> 02:57.150 +사용 사례에 따라 다양한 언어별 leaderboard가 있어요 + +02:57.150 --> 02:59.670 +순위표에 잘 맞을지도 모르겠네요 + +03:00.300 --> 03:02.970 +발하임의 순위표를 보도록 하죠 + +03:02.970 --> 03:06.720 +기억하실지 모르겠지만 초기에 잠깐 살펴봤어요 + +03:06.720 --> 03:12.960 +여러 개의 leaderboard가 있는 아주 유용한 리소스죠 오픈소스와 폐쇄소스가 있어요 + +03:12.960 --> 03:17.910 +모든 모델이 한자리에 모일 수 있는 곳이죠 + +03:17.910 --> 03:24.990 +또 유용한 테이블도 있어요 비용이요, API 비용과 연락처 창 길이 + +03:25.080 --> 03:31.840 +책갈피에 추가할 만한 또 다른 장소죠 모든 정보가 한 곳에 + +03:31.840 --> 03:35.440 +있는 몇 안 되는 곳이니까요 + +03:36.040 --> 03:44.140 +마지막 순위표는 씰이라고 해요 전문가의 기술을 평가하는 거죠 순위표 세트에 + +03:44.140 --> 03:48.190 +항상 새로운 순위표를 추가해요 + +03:48.190 --> 03:52.210 +그러니 책갈피로 삼을 만한 또 다른 책이죠 + +03:52.210 --> 03:58.840 +여러분의 책갈피가 가득 찰 거예요. 왜냐하면 좋은 리소스들이니까요. 나중에 사용해야 할 때 + +03:58.840 --> 04:00.850 +저에게 고마워할 거예요. + +04:02.350 --> 04:09.790 +챗봇 아레나라는 것도 살펴볼 겁니다 LMS 챗봇 아레나죠 + +04:09.790 --> 04:12.100 +환상적인 자원이죠 + +04:12.100 --> 04:19.390 +채팅 사용 지시 사례를 특히 보고 있어요 모델이 채팅하는 기능이죠 + +04:19.390 --> 04:26.080 +하지만 이건 일종의 기준 테스트가 아니라 채팅으로 지시하는 + +04:26.080 --> 04:34.840 +모델에 어떤 모델이 더 적합한지 인간의 판단을 통해 결정하자는 거죠 + +04:34.990 --> 04:46.870 +모델 A와 B 중 하나를 선택하는 건 질적인 결정이에요 두 모델과 함께 대화하면서요 + +04:46.870 --> 04:52.810 +어떤 모델이 어떤 모델인지 모르는 블라인드 테스트예요 모르는 채로 투표해야 하죠 + +04:53.020 --> 05:01.420 +모델에게 Elo 점수를 주는데 앞서 언급한 대로 여러 번의 인체 + +05:01.420 --> 05:08.500 +실험을 통해 동료들과 비교해 종합 순위를 매겨요 + +05:08.500 --> 05:10.330 +이걸 보죠 + +05:10.360 --> 05:12.700 +Get it, Get it, Get it, Get it, Get it, Get it, Get, get 우리가 직접 투표를 할 거예요 + +05:13.300 --> 05:18.580 +마지막으로 상업적 유스 케이스를 살펴볼 겁니다 법과 재능, + +05:18.610 --> 05:23.050 +코드 의료와 교육 llms의 실태를 살펴볼 거예요 + +05:23.050 --> 05:26.950 +물론 이건 10배는 더 클 거예요 + +05:26.980 --> 05:34.060 +림은 모든 비즈니스 수직에 영향을 미치고 있습니다. 하지만 몇 가지를 + +05:34.060 --> 05:37.270 +보고 감을 잡는 것도 유용하죠. + +05:37.270 --> 05:39.610 +지금 다 할 거예요 + +05:39.610 --> 05:45.370 +책갈피도 꽂아두시고요 유용한 걸 많이 보시거든요 diff --git a/week5/community-contributions/subtitles/srts/59295429/en_US.srt b/week5/community-contributions/subtitles/srts/59295429/en_US.srt new file mode 100755 index 0000000..ee269d0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295429/en_US.srt @@ -0,0 +1,286 @@ +WEBVTT + +00:00.650 --> 00:06.260 +Continuing our investigation of benchmarks, and this will become more real when we actually see some + +00:06.290 --> 00:06.650 +action. + +00:06.650 --> 00:11.480 +So bear with me while we do this, but it is very important and useful background information. + +00:11.480 --> 00:18.230 +We're going to look at three specific benchmarks used to test, uh, more, more, uh, specialized + +00:18.230 --> 00:19.130 +skills. + +00:19.310 --> 00:27.920 +And the first of them is is used for evaluating chat between models, um, particularly in the in face + +00:27.950 --> 00:31.430 +offs between models in things like arenas, as we will see. + +00:31.430 --> 00:34.730 +And the benchmark is an Elo rating. + +00:34.850 --> 00:43.280 +Uh, if you are a chess player or familiar with with chess Elo ratings, Elo is a rating that you can + +00:43.280 --> 00:51.080 +give to competitors in a sport or some sort of a game where there is a loser for every winner, where + +00:51.080 --> 00:58.370 +it's a zero sum game, um, and uh, uh, based on the outcomes of these kinds of, of games, uh, + +00:58.370 --> 01:05.680 +you can give people this relative measure that affects their strength compared to others based on their + +01:05.680 --> 01:08.140 +performance in head to head face offs. + +01:08.140 --> 01:16.000 +So you'll see examples of of those from an arena later and that will that will bring it to life. + +01:16.090 --> 01:21.460 +Um, but it's a, it's used in particular the ones that I'm going to show you are used to evaluate the + +01:21.460 --> 01:26.770 +chat abilities, the instruct abilities of these models. + +01:27.400 --> 01:30.130 +And then the next two are about coding. + +01:30.220 --> 01:34.480 +Human eval is a very well known Python coding test. + +01:34.480 --> 01:37.180 +It's 164 problems. + +01:37.180 --> 01:41.050 +Uh, it's about writing code actually based on Python doc strings. + +01:41.110 --> 01:48.310 +Uh, and it's something where models have become increasingly effective at and then, uh, multiple + +01:48.340 --> 01:49.720 +or maybe it's pronounced multiple. + +01:50.590 --> 01:57.280 +Uh, I expect just multiple is the same thing, but translated to 18 different programming languages. + +01:57.280 --> 02:03.700 +So this is more of a wider variety of programming skills across different areas. + +02:04.870 --> 02:12.890 +Let me take a moment now to mention Mentioned the limitations of these benchmarks, which is such an + +02:12.890 --> 02:16.940 +important point, and it's something that that cannot be stressed enough. + +02:17.030 --> 02:24.050 +Benchmarks are useful for us for comparing, of course, the different where different models shine + +02:24.050 --> 02:26.300 +and where they're not not intended to be used. + +02:26.300 --> 02:30.350 +But there are problems with benchmarks, and you need to be aware of them when you look at them. + +02:30.560 --> 02:33.710 +One of them is that they are not consistently applied. + +02:33.710 --> 02:38.690 +So, um, depending on where you're seeing the benchmark, particularly if it's something like a press + +02:38.690 --> 02:44.510 +release from a company, they it's really up to them how they measured the benchmark, what kind of + +02:44.510 --> 02:46.310 +hardware they used and stuff like that. + +02:46.520 --> 02:52.190 +It's not like there's a gold standards that, that, um, put these measurements on rails. + +02:52.190 --> 02:55.400 +So everything has to be taken with a pinch of salt. + +02:55.400 --> 03:01.820 +If it's been published by a third party, there can be too narrow in scope, particularly when you think + +03:01.820 --> 03:05.930 +about like multiple choice style questions and a very similar point. + +03:05.960 --> 03:12.390 +Uh, it's hard to use these kinds of benchmarks to measure nuanced reasoning if you're dealing with + +03:12.390 --> 03:17.880 +with either multiple choice or very specific kinds of answers, it's that that's something that's hard + +03:17.880 --> 03:18.720 +to do. + +03:18.900 --> 03:25.290 +Um, another problem is training data leakage, which it's just very hard to make sure that there is + +03:25.290 --> 03:33.150 +no way that any of these answers can be found within the data that's used to train models, particularly + +03:33.150 --> 03:38.400 +as models get trained with more and more recent data that presumably involves lots of information about + +03:38.400 --> 03:39.480 +these benchmarks. + +03:39.480 --> 03:44.490 +Uh, it becomes harder and harder to control for training data leakage. + +03:44.970 --> 03:50.880 +And then the next one here is is a very important overfitting, a common term from traditional data + +03:50.880 --> 03:51.480 +science. + +03:51.480 --> 03:54.360 +The problem is and this is this is a bit subtle. + +03:54.360 --> 04:01.050 +Again you could get to a point where you've managed to make your model do really, really well on benchmarks, + +04:01.050 --> 04:05.850 +partly because you've just tried out lots of things, like you've tweaked lots of hyperparameters, + +04:05.850 --> 04:10.890 +lots of the sort of settings around a model, and you've kept rerunning it until you've tweaked all + +04:10.890 --> 04:14.910 +the hyperparameters, and now it's crushing this particular benchmark. + +04:14.930 --> 04:17.720 +and it might be something of a coincidence. + +04:17.720 --> 04:22.610 +It's like you've because you've had all of these different knobs that you've you've turned you've turned + +04:22.610 --> 04:27.980 +them specifically in such a way that it solves these benchmarks really, really well. + +04:27.980 --> 04:29.480 +And what's the problem with that? + +04:29.480 --> 04:33.470 +The problem with that is that you've just solved for this particular benchmark test. + +04:33.470 --> 04:39.350 +And if you ask another question, which is still trying to get to the heart of the same test, but it's + +04:39.350 --> 04:44.720 +just asked differently, or it's just a, you know, a different maths question or something like that, + +04:44.720 --> 04:50.300 +the model might fail spectacularly because you've overfit it, you've trained it, you've, you've, + +04:50.330 --> 04:54.530 +you've, you've, you've set all of its various dials so that it does really, really well on these + +04:54.530 --> 05:00.920 +very specific benchmark questions and doesn't do so well when it goes out of sample, when it goes out + +05:00.920 --> 05:05.300 +of these benchmarks, questions to other questions designed to test the same kind of thing. + +05:05.300 --> 05:08.990 +In other words, the results of the benchmark can end up being misleading. + +05:09.020 --> 05:13.670 +It can make it look like it's really good at Python coding or at maths problems or something like that, + +05:13.670 --> 05:19.420 +when really it's just really good at answering the specific questions that were in these benchmarks. + +05:19.420 --> 05:21.250 +So that's the problem of overfitting. + +05:21.250 --> 05:29.650 +It's very important that you're aware of that and take some healthy skepticism to reviewing benchmarks + +05:30.370 --> 05:31.690 +with this in mind. + +05:32.080 --> 05:38.800 +There is a new interesting point that's been raised recently, which isn't yet proven. + +05:38.800 --> 05:40.480 +It's not yet well understood. + +05:40.480 --> 05:47.740 +It's still a bit speculative, but there is some evidence that the latest frontier models, the really + +05:47.740 --> 05:57.880 +strong GPT four Claude 3.5 sonnet level models have some kind of awareness that they are being evaluated, + +05:57.880 --> 06:05.260 +that when they are being asked various benchmark style questions, and that some of their answers have + +06:05.260 --> 06:11.350 +indicated to experts that they are aware of the context, that they're being asked this because they + +06:11.380 --> 06:12.880 +are being evaluated. + +06:13.210 --> 06:17.800 +Um, and that might distort some of their answers. + +06:17.830 --> 06:22.600 +Now, you may wonder, why does that matter if we're testing things like how good they are at maths. + +06:22.630 --> 06:25.210 +Whether they know they're being evaluated or not doesn't matter. + +06:25.210 --> 06:27.130 +That's sure they can know they're being evaluated. + +06:27.130 --> 06:31.000 +And still, whether they do well or not in maths questions is is useful. + +06:31.060 --> 06:33.430 +Well, here's an example of where it matters. + +06:33.430 --> 06:40.240 +Supposing you're asking questions about things like safety and alignment, and some of the questions + +06:40.240 --> 06:44.050 +we saw about responding truthfully in adversarial conditions. + +06:44.050 --> 06:50.230 +If that's what you're trying to assess, then obviously if the model is aware that it's being assessed, + +06:50.230 --> 06:54.220 +that might change its approach to answering those questions. + +06:54.220 --> 07:01.150 +And perhaps, for example, give an impression that a model is highly truthful or well aligned, when + +07:01.180 --> 07:02.410 +in fact it is not. + +07:02.410 --> 07:07.510 +So it's premature for us to say that this is a real concern or a real problem. + +07:07.510 --> 07:14.320 +It's it's a it's a it's a risk that people are analysing and researching not yet known if it is a real + +07:14.320 --> 07:19.090 +problem, but at this point, it certainly is something that's a concern that's being explored. + +07:19.360 --> 07:19.900 +All right. + +07:19.930 --> 07:21.340 +Hope that was interesting to you. + +07:21.340 --> 07:24.070 +This gives you some of the limitations of benchmarks. + +07:24.070 --> 07:26.350 +And now we're going to move on to some more. diff --git a/week5/community-contributions/subtitles/srts/59295429/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295429/ja_JP.srt new file mode 100755 index 0000000..1182909 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295429/ja_JP.srt @@ -0,0 +1,217 @@ +WEBVTT + +00:00.650 --> 00:06.650 +ベンチマークの調査を続けているが、 これは実際に動きを見ることでより現実味を帯びてくるだろう。 + +00:06.650 --> 00:11.480 +しかし、 これは非常に重要で有益な背景情報である。 + +00:11.480 --> 00:19.130 +今回は、 より専門的なスキルをテストするために使われる3つのベンチマークを紹介する。 + +00:19.310 --> 00:31.430 +そして、 その最初のものは、 モデル間のチャットを評価するために使用される。 + +00:31.430 --> 00:34.730 +そしてベンチマークはEloレーティングである。 + +00:34.850 --> 00:43.280 +もしあなたがチェスのプレイヤーなら、 あるいはチェスのEloレーティングに詳しいなら、 + +00:43.280 --> 01:08.140 +Eloとはスポーツや、 勝者1人に対して敗者が1人というゼロサムゲームのようなゲームにおいて、 競争相手に与えることができるレーティングのことだ。 + +01:08.140 --> 01:16.000 +だから、 後でアリーナからその例を見ることになるし、 それが命を吹き込むことになる。 + +01:16.090 --> 01:26.770 +でも、 特にこれからお見せするようなものは、 これらのモデルのチャット能力やインストラクターの能力を評価するために使われるんだ。 + +01:27.400 --> 01:30.130 +そして次の2つはコーディングについてだ。 + +01:30.220 --> 01:34.480 +Human evalはPythonのコーディングテストとして非常によく知られている。 + +01:34.480 --> 01:37.180 +164の問題だ。 + +01:37.180 --> 01:41.050 +ええと、 Pythonのdoc文字列に基づいて実際にコードを書くことです。 + +01:41.110 --> 01:49.720 +そして、 モデルがますます効果的になってきている。 + +01:50.590 --> 01:57.280 +ええと、 "マルチプル "というのは、 同じものを18のプログラミング言語に翻訳したものだと思います。 + +01:57.280 --> 02:03.700 +つまり、 さまざまな分野にまたがる、 より幅広いプログラミング・スキルということだ。 + +02:04.870 --> 02:16.940 +これは非常に重要なポイントであり、 いくら強調してもしすぎることはない。 + +02:17.030 --> 02:26.300 +ベンチマークは、 もちろん、 異なるモデルが輝きを放つ場所とそうでない場所を比較するのに役立つ。 + +02:26.300 --> 02:30.350 +しかし、 ベンチマークには問題がある。 + +02:30.560 --> 02:33.710 +そのひとつは、 一貫して適用されていないことだ。 + +02:33.710 --> 02:38.690 +だから、 ベンチマークをどこで見るかによって、 特に企業のプレスリリースのようなものであれば、 + +02:38.690 --> 02:46.310 +どのようにベンチマークを測定したか、 どのようなハードウェアを使ったか、 そういったことはその企業次第なんだ。 + +02:46.520 --> 02:52.190 +このような測定値をレールに乗せるようなゴールドスタンダードがあるわけではないんだ。 + +02:52.190 --> 02:55.400 +だから、 何事も大目に見なければならない。 + +02:55.400 --> 03:01.820 +第三者によって出版されたものであれば、 特に多肢選択式の問題や非常に似たような点を考えると、 + +03:01.820 --> 03:05.930 +範囲が狭すぎる可能性がある。 + +03:05.960 --> 03:18.720 +多肢選択式や非常に特殊な答えを扱っている場合、 この種のベンチマークを使ってニュアンスのある推論を測定するのは難しい。 + +03:18.900 --> 03:25.290 +もう一つの問題は、 トレーニングデータの漏れです。 + +03:25.290 --> 03:39.480 +モデルのトレーニングに使われるデータの中に、 これらの答えがないことを確認するのは非常に難しい。 + +03:39.480 --> 03:44.490 +トレーニングデータの漏洩をコントロールするのは、 ますます難しくなる。 + +03:44.970 --> 03:51.480 +次に、 伝統的なデータ・サイエンスの用語でよく使われるオーバーフィッティングです。 + +03:51.480 --> 03:54.360 +問題は、 これがちょっと微妙だということだ。 + +03:54.360 --> 04:01.050 +ハイパーパラメーターやモデル周りの設定をいろいろと調整し、 + +04:01.050 --> 04:05.850 +すべてのハイパーパラメーターを調整し終えるまで再実行し続けた結果、 + +04:05.850 --> 04:14.910 +特定のベンチマークを打ち負かすことができたのです。 + +04:14.930 --> 04:17.720 +それは偶然の一致かもしれない。 + +04:17.720 --> 04:22.610 +まるで、 あなたがこれらの異なるノブをすべて持っていて、 それを特別に回して、 + +04:22.610 --> 04:27.980 +これらのベンチマークを本当に、 本当にうまく解決できるようにしたようなものだ。 + +04:27.980 --> 04:29.480 +それの何が問題なんだ? + +04:29.480 --> 04:33.470 +問題は、 この特定のベンチマークテストを解いただけだということだ。 + +04:33.470 --> 04:39.350 +そして、 同じテストの核心に迫ろうとしている別の問題を出題した場合、 質問の仕方が違うだけで、 + +04:39.350 --> 04:44.720 +違う数学の問題だったりすると、 モデルは大失敗するかもしれません。 モデルを過剰にフィットさせ、 + +04:44.720 --> 04:54.530 +訓練し、 様々なダイヤルを設定し、 これらの非常に特定のベンチマーク問題では本当に本当にうまくいくようにし、 これらのベンチマーク問題から外れると、 + +04:54.530 --> 05:05.300 +同じようなことをテストするように設計された他の問題ではうまくいかないようにしたのです。 + +05:05.300 --> 05:08.990 +言い換えれば、 ベンチマークの結果は誤解を招きかねない。 + +05:09.020 --> 05:13.670 +Pythonのコーディングが得意だとか、 数学の問題が得意だとか、 + +05:13.670 --> 05:19.420 +そういう風に見せることができる。 + +05:19.420 --> 05:21.250 +これがオーバーフィッティングの問題だ。 + +05:21.250 --> 05:31.690 +そのことを意識し、 健全な懐疑心を持ってベンチマークを見直すことが非常に重要です。 + +05:32.080 --> 05:38.800 +最近、 新たな興味深い指摘がなされているが、 それはまだ証明されていない。 + +05:38.800 --> 05:40.480 +まだよく理解されていない。 + +05:40.480 --> 05:57.880 +まだ推測の域を出ないが、 最新のフロンティアモデル、 GPT4クロード3が本当に強いという証拠がいくつかある。 + +05:57.880 --> 05:57.880 +5 + +05:57.880 --> 06:05.260 +ソネット・レベルのモデルは、 自分が評価されていること、 さまざまなベンチマーク・スタイルの質問をされ、 その答えのいくつかが、 専門家に対して、 + +06:05.260 --> 06:12.880 +文脈を理解していること、 自分が評価されているからこのような質問をされていることを示唆していることを、 何らかの形で認識している。 + +06:13.210 --> 06:17.800 +そうすると、 彼らの答えが歪んでしまうかもしれない。 + +06:17.830 --> 06:22.600 +数学が得意かどうかなどというテストがなぜ重要なのかと思うかもしれない。 + +06:22.630 --> 06:25.210 +評価されていることを知っているかどうかは問題ではない。 + +06:25.210 --> 06:27.130 +そうすることで、 彼らは自分が評価されていることを知ることができる。 + +06:27.130 --> 06:31.000 +それでも、 数学の問題でうまくいくかどうかは役に立つ。 + +06:31.060 --> 06:33.430 +さて、 ここで重要な例を挙げよう。 + +06:33.430 --> 06:44.050 +仮に、 安全性やアライメント、 そして敵対的な状況下での誠実な対応について質問したとしよう。 + +06:44.050 --> 06:50.230 +それが評価しようとするものであれば、 モデルが評価されていることを認識すれば、 + +06:50.230 --> 06:54.220 +その質問に答えるアプローチが変わるかもしれない。 + +06:54.220 --> 07:02.410 +また、 例えば、 あるモデルが非常に真実味がある、 あるいは整合性が取れているという印象を与えるかもしれないが、 実際にはそうではない。 + +07:02.410 --> 07:07.510 +だから、 これが本当の懸念や問題だと言うのは時期尚早だ。 + +07:07.510 --> 07:14.320 +それは、 人々が分析し、 研究しているリスクであり、 それが本当に問題なのかどうかはまだわからないが、 + +07:14.320 --> 07:19.090 +現時点では、 調査されている懸念事項であることは確かだ。 + +07:19.360 --> 07:19.900 +分かった。 + +07:19.930 --> 07:21.340 +興味を持ってもらえただろうか? + +07:21.340 --> 07:24.070 +これでベンチマークの限界の一端を知ることができる。 + +07:24.070 --> 07:26.350 +そして、 これからさらに話を進めていく。 diff --git a/week5/community-contributions/subtitles/srts/59295429/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295429/ko_KR.srt new file mode 100755 index 0000000..aa51dc0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295429/ko_KR.srt @@ -0,0 +1,271 @@ +WEBVTT + +00:00.650 --> 00:06.650 +벤치마크를 계속 조사할 겁니다 실제로 행동을 보면 더 실감이 날 거예요 + +00:06.650 --> 00:11.480 +그러니 참고 들으세요 아주 중요하고 유용한 배경 정보니까요 + +00:11.480 --> 00:18.230 +더 특별한 기술을 시험할 때 사용되는 세 가지 벤치마크를 살펴볼 + +00:18.230 --> 00:19.130 +거예요 + +00:19.310 --> 00:27.920 +첫 번째는 모델들 간의 채팅을 평가하는 데 사용해요 특히 인 페이스오프에서요 + +00:27.950 --> 00:31.430 +아레나 같은 곳에서요 + +00:31.430 --> 00:34.730 +평가 기준은 Elo예요 + +00:34.850 --> 00:43.280 +체스 선수라면 Elo 평가라는 게 있는데 Elo는 다른 스포츠의 + +00:43.280 --> 00:51.080 +경쟁자들에게 주는 평가예요 승자마다 패자가 있는 제로 섬 게임이죠 + +00:51.080 --> 00:58.370 +이런 게임의 결과에 따라 사람들에게 체력에 영향을 주는 + +00:58.370 --> 01:08.140 +상대적인 척도를 줄 수 있어요 헤드 투 헤드 대결의 성적을 토대로요 + +01:08.140 --> 01:16.000 +나중에 아레나에서 예제를 보여드릴 텐데 그럼 활기가 생기죠 + +01:16.090 --> 01:21.460 +제가 보여드릴 이 모듈은 채팅 기능을 평가할 때 사용됩니다 + +01:21.460 --> 01:26.770 +이 모델의 지시 기능을 평가할 때요 + +01:27.400 --> 01:30.130 +다음 두 개는 코딩에 관한 거예요 + +01:30.220 --> 01:34.480 +인간 평가는 파이썬 코딩 테스트로 아주 유명하죠 + +01:34.480 --> 01:37.180 +164개예요 + +01:37.180 --> 01:41.050 +파이썬 문서 문자열에 기반한 코드 작성하기예요 + +01:41.110 --> 01:48.310 +이런 모델은 점점 더 효과적으로 변하고 있습니다. 여러 번이라고 발음할 수도 + +01:48.340 --> 01:49.720 +있겠네요. + +01:50.590 --> 01:57.280 +여러 개라는 건 같은 거지만 18개의 다른 프로그래밍 언어로 번역된 거죠 + +01:57.280 --> 02:03.700 +다양한 영역에 걸친 프로그래밍 기술이 더 폭넓게 사용되는 거죠 + +02:04.870 --> 02:12.890 +잠시 언급할 게 있어요 이런 벤치마크의 한계를요 아주 중요한 점이죠 + +02:12.890 --> 02:16.940 +아무리 강조해도 지나치지 않아요 + +02:17.030 --> 02:24.050 +벤치마크는 비교에 유용합니다 각 모델이 돋보이는 부분과 사용 의도가 + +02:24.050 --> 02:26.300 +아닌 부분을 비교하죠 + +02:26.300 --> 02:30.350 +하지만 벤치마크에는 문제가 있어요 그걸 볼 때 인식해야 하죠 + +02:30.560 --> 02:33.710 +그 중 하나는 일관되게 적용되지 않는다는 거죠 + +02:33.710 --> 02:38.690 +기준점이 어디인지에 따라 달라요 특히 회사의 언론 자료라면 + +02:38.690 --> 02:44.510 +기준점을 어떻게 측정했는지 어떤 하드웨어를 사용했는지 그런 건 해당 + +02:44.510 --> 02:46.310 +기업에 달렸죠 + +02:46.520 --> 02:52.190 +Let's go 금 기준이 있는 것도 아니고 레일에 치수를 재는 것도 아니잖아요 + +02:52.190 --> 02:55.400 +그러니 모든 걸 염두에 둬야 해요 + +02:55.400 --> 03:01.820 +타사에 의해 게시된 경우 그 범위가 너무 좁을 수 있어요 특히 객관식 스타일 + +03:01.820 --> 03:05.930 +문제 같은 경우에요 아주 유사한 관점이죠 + +03:05.960 --> 03:12.390 +이런 벤치마크를 이용해 뉘앙스를 측정하는 건 어렵죠 객관식이나 + +03:12.390 --> 03:18.720 +아주 특정한 답을 다루는 문제라면요 그건 정말 어려운 일이에요 + +03:18.900 --> 03:25.290 +또 다른 문제는 훈련 데이터 유출인데 확신하기 어렵습니다 훈련에 사용되는 + +03:25.290 --> 03:33.150 +데이터에서 이런 해답을 찾을 방법이 없다는 것을요 특히 모델이 최근 데이터로 훈련될수록 + +03:33.150 --> 03:39.480 +이런 기준점에 관한 많은 정보가 포함될 테니까요 + +03:39.480 --> 03:44.490 +훈련 데이터 유출을 통제하기가 점점 더 어려워져요 + +03:44.970 --> 03:50.880 +다음은 아주 중요한 과잉 설정입니다 전통적인 데이터 과학에서 흔히 쓰이는 + +03:50.880 --> 03:51.480 +용어죠 + +03:51.480 --> 03:54.360 +문제는 비트가 좀 미묘하다는 거예요 + +03:54.360 --> 04:01.050 +벤치 마크에서 모델을 정말 정말 잘 만들도록 관리하는 지점에 도달할 수 있습니다 부분적으로 많은 걸 시도해 + +04:01.050 --> 04:05.850 +봤기 때문이죠 많은 hyperpaameter를 변경한 것처럼요 모델과 + +04:05.850 --> 04:10.890 +관련된 많은 종류의 설정들이요 모든 hyperpaameter를 변경할 때까지 + +04:10.890 --> 04:14.910 +계속 다시 실행해 이제 이 벤치 마크를 박살내고 있어요 + +04:14.930 --> 04:17.720 +우연의 일치일 수도 있어요 + +04:17.720 --> 04:22.610 +다양한 노브들이 있기 때문에 특정 방식으로 + +04:22.610 --> 04:27.980 +돌려서 벤치마크를 아주 잘 해결한 거죠 + +04:27.980 --> 04:29.480 +그게 뭐가 문제죠? + +04:29.480 --> 04:33.470 +문제는 이 특정 기준 테스트에 대해 방금 해결했다는 거죠 + +04:33.470 --> 04:39.350 +다른 질문을 할 때 같은 테스트의 핵심을 전달하려고 하지만 질문이 + +04:39.350 --> 04:44.720 +다르거나 수학이 다른 질문일 경우 모델이 대폭 실패할 수 있어요 + +04:44.720 --> 04:50.300 +너무 완벽해서죠 훈련시키고 다양한 채널을 설정해서 특정 벤치마크 + +04:50.330 --> 04:54.530 +문제에서 잘 작동하게 했어요 하지만 샘플에서 + +04:54.530 --> 05:00.920 +나올 때는 잘 작동하지 않죠 벤치마크 문제에서 다른 질문으로 나갈 때는 + +05:00.920 --> 05:05.300 +같은 걸 테스트하도록 설계됐어요 + +05:05.300 --> 05:08.990 +다시 말해 기준 결과가 오도될 수 있다는 거죠 + +05:09.020 --> 05:13.670 +파이썬 코딩이나 수학 문제에 아주 잘 맞는 것처럼 보이게 할 수 있습니다 + +05:13.670 --> 05:19.420 +벤치마크에 있는 특정 질문에 대한 답변을 아주 잘 하는 것처럼 보일 수도 있어요 + +05:19.420 --> 05:21.250 +그게 과잉복지의 문제예요 + +05:21.250 --> 05:29.650 +이를 인지하고 벤치마크를 검토하는 건전한 회의론을 갖는 것이 매우 + +05:30.370 --> 05:31.690 +중요해요 + +05:32.080 --> 05:38.800 +최근에 새로운 흥미로운 관점이 제기됐는데 아직 증명되진 않았어요 + +05:38.800 --> 05:40.480 +아직 잘 이해되지 않았어요 + +05:40.480 --> 05:47.740 +아직은 추측이지만 최신 프론티어 모델인 GPT 4 클로드 + +05:47.740 --> 05:57.880 +3이 비트코인이라는 증거가 있어요 5개의 소네트 수준 모델은 자신이 평가받는다는 걸 어느 정도 + +05:57.880 --> 06:05.260 +인지하고 있습니다 다양한 기준이 되는 질문을 받을 때 그들의 답변은 그들이 전후 사정을 + +06:05.260 --> 06:12.880 +알고 있다는 걸 나타내죠 그들이 이런 질문을 받는 건 평가받고 있기 때문이에요 + +06:13.210 --> 06:17.800 +그러면 일부 답변이 왜곡될 수 있어요 + +06:17.830 --> 06:22.600 +수학을 얼마나 잘하는지 테스트하는 건데 그게 왜 중요한지 궁금하실 거예요 + +06:22.630 --> 06:25.210 +그들이 평가받는다는 걸 알든 모르든 상관없어요 + +06:25.210 --> 06:27.130 +평가받는다는 걸 알 수 있겠죠 + +06:27.130 --> 06:31.000 +수학 문제를 잘 푸는지 못 푸는지가 중요하죠 + +06:31.060 --> 06:33.430 +중요한 게 뭔지 보여드리죠 + +06:33.430 --> 06:40.240 +안전과 정렬에 관한 질문을 한다고 가정해 보죠 적대적인 상황에서 진실하게 + +06:40.240 --> 06:44.050 +대응하는 것에 관한 질문도요 + +06:44.050 --> 06:50.230 +그걸 평가하려는 거라면 모델이 평가되고 있다는 걸 인식한다면 그 질문에 + +06:50.230 --> 06:54.220 +대한 답변에 대한 접근 방식이 바뀔 수도 있죠 + +06:54.220 --> 07:01.150 +예를 들어 모델이 매우 진실하거나 정렬이 잘 맞는 것처럼 보이게 하는 거죠 실제로는 그렇지 + +07:01.180 --> 07:02.410 +않은데도요 + +07:02.410 --> 07:07.510 +따라서 이것이 진짜 문제라고 단정 짓기엔 시기상조죠 + +07:07.510 --> 07:14.320 +아직은 실제 문제인지 알 수 없지만 사람들이 분석하고 연구하고 있는 위험 요소예요 + +07:14.320 --> 07:19.090 +하지만 현재로서는 확실히 고려하고 있는 문제죠 + +07:19.360 --> 07:19.900 +좋아요 + +07:19.930 --> 07:21.340 +그게 흥미로웠길 바라요 + +07:21.340 --> 07:24.070 +벤치마크의 한계를 보여 주죠 + +07:24.070 --> 07:26.350 +이제 다음 단계로 넘어가죠 diff --git a/week5/community-contributions/subtitles/srts/59295431/en_US.srt b/week5/community-contributions/subtitles/srts/59295431/en_US.srt new file mode 100755 index 0000000..29ea93b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295431/en_US.srt @@ -0,0 +1,211 @@ +WEBVTT + +00:00.950 --> 00:08.210 +Now I want to take a quick moment to give you a flyby of five different ways that llms are used commercially, + +00:08.210 --> 00:15.710 +and you probably know of 50 more, but it's good to take a moment to look at them and to be food for + +00:15.710 --> 00:20.840 +thought for you to consider other commercial applications of llms. + +00:20.840 --> 00:25.850 +And while we're doing this, also be thinking about how you would assess the right model for these different + +00:25.850 --> 00:31.700 +problems, using the kinds of leaderboards and the arena that we've already talked about. + +00:31.730 --> 00:37.970 +The first one I wanted to show you is a company called Harvey, which uses llms in the field of law, + +00:37.970 --> 00:43.610 +and you can read through their site, but it gives products for lawyers that will do things like answering + +00:43.610 --> 00:47.390 +questions on on law, what what is a claim of disloyalty? + +00:47.660 --> 00:53.630 +And I believe it also does things like looking through legal documents to find key terms and the like. + +00:53.630 --> 01:01.370 +It makes so much sense to be applying llms to the field of law that it's a no brainer. + +01:01.370 --> 01:09.820 +It's a classic example of needing to use language and nuance and apply it to difficult business Challenges. + +01:09.970 --> 01:11.140 +Here's another. + +01:11.140 --> 01:14.200 +And this is near and dear to my heart because this is my day job. + +01:14.200 --> 01:15.730 +This is Nebula. + +01:15.790 --> 01:24.160 +Uh, Nebula io, which is using llms to apply to the world of talent and recruitment, helping managers + +01:24.160 --> 01:27.160 +to hire and engage with great candidates. + +01:27.160 --> 01:33.310 +But it's also helping people to explore and understand where they can be most satisfied and successful. + +01:33.340 --> 01:37.210 +Using Llms to understand the content of people's careers. + +01:37.240 --> 01:41.230 +Again, it's the kind of use case that makes so much sense. + +01:42.610 --> 01:46.420 +This one is one that I find particularly annoying. + +01:46.420 --> 01:50.950 +I am, uh, upset by this particular company. + +01:50.950 --> 01:58.480 +Blooper AI um bloop, which is a platform that ports. + +01:58.510 --> 02:00.790 +Legacy code into Java. + +02:00.790 --> 02:06.220 +And the reason that I find this, uh, galling is because I wish I had thought of it. + +02:06.250 --> 02:07.870 +It's such a great idea. + +02:07.870 --> 02:09.310 +It's such an obvious idea. + +02:09.340 --> 02:09.700 +Obviously. + +02:09.700 --> 02:10.450 +Brilliant. + +02:10.480 --> 02:14.260 +As soon as I heard it, I thought, oh, that's a great idea. + +02:14.410 --> 02:21.180 +Uh, so of course there is tons of COBOL code out there and other legacy code. + +02:21.180 --> 02:27.150 +And it's it's a huge challenge for large corporations to figure out how to maintain this code, as people + +02:27.150 --> 02:29.700 +increasingly don't want to work with languages like COBOL. + +02:29.700 --> 02:33.600 +And there's a lot of legacy code that people can't read and understand. + +02:33.720 --> 02:42.270 +Um, and it's such a perfect use case for coding models that can learn different programming languages + +02:42.270 --> 02:48.480 +and then can use that to port from one language to another, and presumably can do things like adding + +02:48.480 --> 02:52.290 +in comments and test cases and everything else. + +02:52.440 --> 02:56.700 +Um, so, uh, yes, it's uh, dang it. + +02:56.730 --> 03:03.270 +They've they've obviously, uh, really struck on a fantastic idea. + +03:03.270 --> 03:09.420 +Uh, I think this is a great looking product from a Y Combinator backed company. + +03:09.420 --> 03:12.780 +And, uh, yes, congratulations to to bloop. + +03:13.530 --> 03:15.210 +And I love the name bloop. + +03:15.420 --> 03:24.570 +Uh, on on less uh, memorable name would be, uh, this Salesforce product Salesforce's Einstein copilot + +03:24.600 --> 03:31.110 +health actions Quite a mouthful, but I will say that aside from the slightly clunky name, the product + +03:31.110 --> 03:34.680 +itself again is one of those ones that's like, oh yes, that makes sense. + +03:34.830 --> 03:42.810 +Uh, a sort of, uh, dashboard that can be used by healthcare practitioners, for example, to do things + +03:42.810 --> 03:51.420 +like summarize for a care coordinator, the outcomes of a medical appointment, saving presumably tons + +03:51.420 --> 03:58.320 +of time, and giving this kind of very compelling summary of what happened, uh, which for all we know, + +03:58.350 --> 04:00.600 +could be a Gradio app because that easy. + +04:00.720 --> 04:08.190 +Uh, but it's probably something Salesforce, uh, but it's such a good use case makes a lot of sense. + +04:08.190 --> 04:12.990 +And obviously, uh, no doubt Salesforce is going to do really well with that product. + +04:13.650 --> 04:16.140 +And then I came across Khan. + +04:16.140 --> 04:17.550 +Mingo Khan. + +04:17.580 --> 04:17.940 +Yes. + +04:17.940 --> 04:20.070 +I think it's probably how it's pronounced conmigo. + +04:20.310 --> 04:24.810 +Um, uh, which is, uh, the Khan Academy. + +04:24.810 --> 04:28.710 +Uh, presumably, uh, with a mingo at the end of it. + +04:28.770 --> 04:35.780 +Uh, and this is an LLM based platform to be a companion for teachers, learners and parents. + +04:35.780 --> 04:37.760 +And what a great idea again. + +04:37.760 --> 04:44.450 +And the Khan Academy is a obviously a fabulous resource, and I'm sure that this is something that this + +04:44.450 --> 04:47.750 +solution is something that will be immensely valuable. + +04:47.750 --> 04:55.610 +And the application of LMS to education is something which is again a no brainer, absolute no brainer. + +04:55.610 --> 05:05.540 +So across these different examples, law, talent, coding, the medical field and education, you can + +05:05.540 --> 05:10.100 +see the impact that LMS will be able to make in each of these products. + +05:10.100 --> 05:15.140 +And I'm sure you have many other examples yourself, but hopefully, as I say, this has given you food + +05:15.140 --> 05:21.350 +for thought and also interesting to think about how you would be assessing different models that you + +05:21.350 --> 05:23.210 +would pick to solve any of these problems. + +05:23.210 --> 05:29.750 +To state the obvious, of course, for this one, no doubt we'd be looking at the medical LMS leaderboard + +05:29.750 --> 05:35.660 +and hugging face, and here we'd be looking at many of the coding metrics, not just human eval for + +05:35.660 --> 05:39.650 +Python, but the coding metrics for other languages too. + +05:40.610 --> 05:43.850 +Okay, with that we will go over to wrap up. diff --git a/week5/community-contributions/subtitles/srts/59295431/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295431/ja_JP.srt new file mode 100755 index 0000000..a70f2b2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295431/ja_JP.srt @@ -0,0 +1,175 @@ +WEBVTT + +00:00.950 --> 00:08.210 +おそらく、 皆さんはさらに50は知っているだろうが、 + +00:08.210 --> 00:20.840 +少し時間をとって、 llmsが商業的に使われている5つの方法を紹介したい。 + +00:20.840 --> 00:25.850 +そうしている間に、 私たちがすでに話したようなリーダーボードやアリーナを使って、 + +00:25.850 --> 00:31.700 +これらの異なる問題に対する適切なモデルをどのように評価するかについても考えてください。 + +00:31.730 --> 00:37.970 +最初に紹介したいのはHarveyという会社で、 法律の分野でllmsを使用しています。 + +00:37.970 --> 00:43.610 +そのサイトを読んでいただければわかりますが、 弁護士向けの製品を提供しており、 + +00:43.610 --> 00:47.390 +法律に関する質問に答えたり、 不誠実の主張とは何か? + +00:47.660 --> 00:53.630 +また、 法律文書に目を通して重要な用語などを見つけることもできると思う。 + +00:53.630 --> 01:01.370 +LLMSを法律の分野に応用することは、 非常に理にかなっている。 + +01:01.370 --> 01:09.820 +言葉やニュアンスを使いこなし、 難しいビジネス課題に応用する必要がある典型的な例だ。 + +01:09.970 --> 01:11.140 +もうひとつ。 + +01:11.140 --> 01:14.200 +そして、 これは私の本業であるため、 身近で大切なことなのだ。 + +01:14.200 --> 01:15.730 +これがネビュラだ。 + +01:15.790 --> 01:27.160 +ネビュラioは、 llmsを人材と採用の世界に応用し、 管理職が優秀な候補者を採用し、 エンゲージするのを支援している。 + +01:27.160 --> 01:33.310 +しかし、 それはまた、 人々がどこで最も満足し、 成功できるかを探求し、 理解する助けにもなっている。 + +01:33.340 --> 01:37.210 +人々のキャリアの内容を理解するためにLlmsを使用する。 + +01:37.240 --> 01:41.230 +繰り返しになるが、 このような使用例は非常に理にかなっている。 + +01:42.610 --> 01:46.420 +私が特に腹立たしいと思うのはこれだ。 + +01:46.420 --> 01:50.950 +私は、 その、 この特定の会社に動揺しているんだ。 + +01:50.950 --> 01:58.480 +Blooper AIは......Bloop、 つまり移植するプラットフォームだ。 + +01:58.510 --> 02:00.790 +レガシーコードをJavaに。 + +02:00.790 --> 02:06.220 +私がこのことを残念に思うのは、 私がこのことを考えたかったからだ。 + +02:06.250 --> 02:07.870 +素晴らしいアイデアだ。 + +02:07.870 --> 02:09.310 +当たり前のアイデアだ。 + +02:09.340 --> 02:09.700 +明らかにね。 + +02:09.700 --> 02:10.450 +素晴らしい。 + +02:10.480 --> 02:14.260 +それを聞いた瞬間、 ああ、 これはいいアイデアだと思った。 + +02:14.410 --> 02:21.180 +もちろん、 COBOLのコードやその他のレガシーコードは大量にある。 + +02:21.180 --> 02:29.700 +COBOLのような言語を使いたがらない人が増えているため、 大企業にとっては、 このコードをどのように保守していくかが大きな課題となっている。 + +02:29.700 --> 02:33.600 +そして、 人々が読んで理解できないレガシーコードがたくさんある。 + +02:33.720 --> 02:42.270 +異なるプログラミング言語を学ぶことができ、 それを使ってある言語から別の言語へ移植することができ、 + +02:42.270 --> 02:52.290 +おそらくコメントやテストケースの追加など、 あらゆることができる。 + +02:52.440 --> 02:56.700 +うーん、 だから、 そうだね。 + +02:56.730 --> 03:03.270 +彼らは明らかに......本当に素晴らしいアイデアを思いついた。 + +03:03.270 --> 03:09.420 +Yコンビネーターが支援した会社の製品で、 見た目は素晴らしいと思う。 + +03:09.420 --> 03:12.780 +そして、 ああ、 そうだね、 ブループにおめでとう。 + +03:13.530 --> 03:15.210 +ブループという名前も気に入っている。 + +03:15.420 --> 03:24.570 +Salesforceのアインシュタイン・コパイロット・ヘルス・アクションという製品名です。 かなりくどい名前ですが、 + +03:24.600 --> 03:34.680 +製品自体は、 少しくどい名前を除けば、 「ああ、 なるほど」と納得できるものです。 + +03:34.830 --> 04:00.600 +ダッシュボードのようなもので、 例えば医療従事者がケアコーディネーターのために診察の結果を要約するといったことができる。 + +04:00.720 --> 04:08.190 +でも、 セールスフォースとしては、 とても理にかなったユースケースだと思います。 + +04:08.190 --> 04:12.990 +そして明らかに、 セールスフォースがその製品で大きな成功を収めることは間違いない。 + +04:13.650 --> 04:16.140 +そしてカーンに出会った。 + +04:16.140 --> 04:17.550 +ミンゴ・カーン + +04:17.580 --> 04:17.940 +そうだ。 + +04:17.940 --> 04:20.070 +おそらくコンミーゴの発音だと思う。 + +04:20.310 --> 04:24.810 +カーン・アカデミーのことです。 + +04:24.810 --> 04:28.710 +ええと、 おそらく、 ええと、 最後にミンゴスがついている。 + +04:28.770 --> 04:35.780 +これは、 教師、 学習者、 保護者のためのLLMベースのプラットフォームです。 + +04:35.780 --> 04:37.760 +そしてまた、 なんと素晴らしいアイデアだろう。 + +04:37.760 --> 04:47.750 +そして、 カーン・アカデミーは明らかに素晴らしいリソースであり、 このソリューションが非常に価値のあるものであることは間違いない。 + +04:47.750 --> 04:55.610 +そして、 LMSを教育に応用することは、 これまた当然のことである。 + +04:55.610 --> 05:10.100 +つまり、 法律、 人材、 コーディング、 医療分野、 教育といったさまざまな例を通じて、 LMSがこれらの製品のそれぞれに与える影響を見ることができる。 + +05:10.100 --> 05:15.140 +あなた自身、 他にも多くの例をお持ちだと思いますが、 私が申し上げたように、 これがあなたにとって考える材料になり、 + +05:15.140 --> 05:23.210 +また、 これらの問題を解決するために選択するさまざまなモデルをどのように評価するかについて考える興味深い材料になれば幸いです。 + +05:23.210 --> 05:29.750 +当たり前のことを言えば、 もちろん、 この件に関しては、 間違いなく医療LMSのリーダーボードを見て、 顔を抱きしめていることだろう。 + +05:29.750 --> 05:39.650 +ここでは、 Pythonの人間評価だけでなく、 他の言語のコーディング評価基準も含めて、 多くのコーディング評価基準を見ているはずだ。 + +05:40.610 --> 05:43.850 +よし、 これで終わりにしよう。 diff --git a/week5/community-contributions/subtitles/srts/59295431/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295431/ko_KR.srt new file mode 100755 index 0000000..d7ef91d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295431/ko_KR.srt @@ -0,0 +1,211 @@ +WEBVTT + +00:00.950 --> 00:08.210 +잠시 시간을 내서 llms의 상업적 사용법을 다섯 가지로 나눠서 저공비행해 + +00:08.210 --> 00:15.710 +보겠습니다 아마 50가지도 더 있겠지만 잠시 살펴보고 llms의 다른 상업적 + +00:15.710 --> 00:20.840 +응용법을 고려해 볼 가치가 있다고 생각해요 + +00:20.840 --> 00:25.850 +이걸 하는 동안 이런 다양한 문제들에 맞는 모델을 어떻게 평가할지 + +00:25.850 --> 00:31.700 +생각해 보세요 리더보드와 이미 얘기한 경기장을 이용해서요 + +00:31.730 --> 00:37.970 +첫 번째로 하비라는 회사를 소개할게요 법률 분야의 llms를 사용하는 회사인데 + +00:37.970 --> 00:43.610 +이 사이트에서 변호사들에게 제공하는 상품이 있어요 법률 관련 질문에 대답하는 + +00:43.610 --> 00:47.390 +용도죠 불충의 주장이 무엇일까요? + +00:47.660 --> 00:53.630 +법률 문서를 뒤져서 핵심 용어를 찾는 일도 할 거예요 + +00:53.630 --> 01:01.370 +llms를 법학 분야에 적용하는 건 당연한 일이에요 + +01:01.370 --> 01:09.820 +언어와 뉘앙스를 활용해 어려운 비즈니스 챌린지에 적용해야 하는 전형적인 예죠 + +01:09.970 --> 01:11.140 +또 있어요 + +01:11.140 --> 01:14.200 +이건 제게 아주 소중한 일이에요 이게 제 본업이거든요 + +01:14.200 --> 01:15.730 +이건 네뷸라예요 + +01:15.790 --> 01:24.160 +네뷸라 이오라는 회사인데 llms를 이용해 인재 채용의 세계에 지원해서 매니저들이 훌륭한 후보를 + +01:24.160 --> 01:27.160 +고용하고 엮이게 돕는 곳이죠 + +01:27.160 --> 01:33.310 +하지만 동시에 사람들이 탐구하고 이해하도록 돕죠 자신이 가장 만족하고 성공할 수 있는 곳을요 + +01:33.340 --> 01:37.210 +Lms를 이용해 사람들의 커리어를 이해하는 거죠 + +01:37.240 --> 01:41.230 +다시 말하지만 이런 게 아주 합리적인 사용 사례죠 + +01:42.610 --> 01:46.420 +이게 특히 거슬려요 + +01:46.420 --> 01:50.950 +이 회사 때문에 화가 나요 + +01:50.950 --> 01:58.480 +실수 인공지능 움 블룹 포트가 되는 플랫폼이죠 + +01:58.510 --> 02:00.790 +Java로 레거시 코드를 보내죠 + +02:00.790 --> 02:06.220 +이 상황이 짜증 나는 이유는 내가 왜 그 생각을 못했을까요? + +02:06.250 --> 02:07.870 +정말 좋은 생각이에요 + +02:07.870 --> 02:09.310 +너무 뻔한 아이디어잖아요 + +02:09.340 --> 02:09.700 +당연하죠 + +02:09.700 --> 02:10.450 +훌륭해요 + +02:10.480 --> 02:14.260 +그 말을 듣자마자 좋은 생각이라고 생각했어요 + +02:14.410 --> 02:21.180 +코볼 관련 코드가 아주 많아요 다른 레거시 코드도 많고요 + +02:21.180 --> 02:27.150 +대기업이 이 코드를 유지하는 법을 알아내는 건 큰 도전입니다 사람들이 코볼 언어 + +02:27.150 --> 02:29.700 +같은 걸 점점 꺼리기 때문이죠 + +02:29.700 --> 02:33.600 +사람들이 읽고 이해할 수 없는 레거시 코드가 많아요 + +02:33.720 --> 02:42.270 +코딩 모델에 있어서는 완벽한 사용 예로 다른 프로그래밍 언어를 배울 수 있고 한 언어에서 + +02:42.270 --> 02:48.480 +다른 언어로 포트를 할 수 있고 주석 추가나 테스트 케이스 같은 + +02:48.480 --> 02:52.290 +것도 할 수 있을 것 같아요 + +02:52.440 --> 02:56.700 +네, 맞아요, 젠장 + +02:56.730 --> 03:03.270 +정말 환상적인 아이디어를 떠올린 것 같아요 + +03:03.270 --> 03:09.420 +와이 컴비네이터 백업 사 제품으로 아주 좋아 보여요 + +03:09.420 --> 03:12.780 +네, 축하해요, 블룹 + +03:13.530 --> 03:15.210 +블룹이란 이름도 좋아요 + +03:15.420 --> 03:24.570 +덜 인상적인 이름에는 이 세일즈포스 제품을 추천할게요 세일즈포스의 아인슈타인 부조종사 + +03:24.600 --> 03:31.110 +건강 행동 꽤 길지만 이름만 좀 투박할 뿐 제품 자체는 말이 된다고 + +03:31.110 --> 03:34.680 +생각할 만한 제품이에요 + +03:34.830 --> 03:42.810 +일종의 대시보드인데 의료 종사자가 사용할 수 있습니다 의료 관리자를 위한 요약 + +03:42.810 --> 03:51.420 +같은 걸 할 때 쓰죠 진료 예약 결과를 보면 아마 시간이 많이 절약될 겁니다 그리고 일어난 + +03:51.420 --> 03:58.320 +일을 아주 인상적으로 요약할 수 있죠 너무 쉬워서 그라디오 앱이 될 + +03:58.350 --> 04:00.600 +수도 있어요 + +04:00.720 --> 04:08.190 +세일즈포스 관련 문제일 수도 있지만 아주 유용한 사용 사례예요 + +04:08.190 --> 04:12.990 +세일즈포스는 그 제품으로 분명히 성공할 거예요 + +04:13.650 --> 04:16.140 +그러다 칸을 만났어요 + +04:16.140 --> 04:17.550 +밍고 칸이에요 + +04:17.580 --> 04:17.940 +네 + +04:17.940 --> 04:20.070 +콘미고라고 발음하는 게 맞을 거예요 + +04:20.310 --> 04:24.810 +칸 아카데미라는 곳이죠 + +04:24.810 --> 04:28.710 +아마 맨 끝에는 밍고가 있을 거예요 + +04:28.770 --> 04:35.780 +교사, 학습자, 부모를 위한 동반자로서 LLM 기반 플랫폼이죠 + +04:35.780 --> 04:37.760 +정말 좋은 생각이었어요 + +04:37.760 --> 04:44.450 +칸 아카데미는 분명 훌륭한 자원이고 이 해결책은 분명히 + +04:44.450 --> 04:47.750 +엄청난 가치가 있을 거예요 + +04:47.750 --> 04:55.610 +LMS 응용 프로그램은 생각할 것도 없어요 + +04:55.610 --> 05:05.540 +법, 인재, 코딩, 의학, 교육 등 다양한 예시를 보면 LMS가 이 모든 제품에 + +05:05.540 --> 05:10.100 +어떤 영향을 미칠지 알 수 있죠 + +05:10.100 --> 05:15.140 +물론 다른 예도 많이 있겠지만 부디 이번 사례가 생각할 거리를 제공했길 바랍니다 + +05:15.140 --> 05:21.350 +또한 이런 문제를 해결하기 위해 여러분이 선택한 다양한 모델을 어떻게 평가할지 생각해 보는 것도 + +05:21.350 --> 05:23.210 +흥미로웠으면 해요 + +05:23.210 --> 05:29.750 +당연한 걸 짚고 넘어가자면 이 강의에서는 의료용 LMS leaderboard와 포옹하는 + +05:29.750 --> 05:35.660 +얼굴이 보이겠죠 그리고 코딩 지표도 많이 볼 겁니다 파이썬에 대한 인간 평가뿐 + +05:35.660 --> 05:39.650 +아니라 다른 언어의 코딩 지표도 볼 거예요 + +05:40.610 --> 05:43.850 +좋아요, 이제 마무리하러 가죠 diff --git a/week5/community-contributions/subtitles/srts/59295435/en_US.srt b/week5/community-contributions/subtitles/srts/59295435/en_US.srt new file mode 100755 index 0000000..c8f94ee --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295435/en_US.srt @@ -0,0 +1,133 @@ +WEBVTT + +00:01.190 --> 00:07.160 +Well, just before we wrap up, let me introduce this week's challenge and talk about what we're going + +00:07.160 --> 00:07.460 +to do. + +00:07.460 --> 00:10.850 +And this is going to be, I think, again, a lot of fun. + +00:11.150 --> 00:14.870 +Hopefully you're finding all of the projects that we're doing together fun. + +00:14.960 --> 00:20.660 +I'm really trying to to come up with with interesting, juicy projects that stretch us in different + +00:20.660 --> 00:21.410 +ways. + +00:21.770 --> 00:27.440 +So I guess last time we've done, we built things like a system that generates minutes of meetings by + +00:27.470 --> 00:28.640 +listening to their audio. + +00:28.670 --> 00:31.070 +This time something completely different. + +00:31.280 --> 00:33.710 +It's going to be about writing code. + +00:33.710 --> 00:42.020 +And in particular, the idea is it's going to be about a code conversion, I guess somewhat inspired + +00:42.020 --> 00:43.430 +by bloop. + +00:43.430 --> 00:45.710 +And they're fiendishly brilliant idea. + +00:46.160 --> 00:54.560 +So in this case, what we're going to try and do is write a product that is designed to improve performance + +00:54.560 --> 00:59.390 +and performance critical code by converting Python to C plus. + +00:59.390 --> 01:00.020 +Plus. + +01:00.020 --> 01:01.270 +That's the idea. + +01:01.270 --> 01:05.710 +So we want to to find out how we can convert Python to C plus. + +01:05.710 --> 01:06.220 +Plus. + +01:06.220 --> 01:09.190 +And we're going to do this using a frontier model. + +01:09.190 --> 01:12.190 +And we're also going to do it with an open source model. + +01:12.190 --> 01:15.400 +And we're going to compare the performance of the results. + +01:15.400 --> 01:21.340 +But obviously we're going to have to start by selecting llms that are most suitable for the task. + +01:21.340 --> 01:22.960 +So that is the challenge at hand. + +01:22.990 --> 01:24.430 +I think it's going to be fun. + +01:24.520 --> 01:31.630 +We're going to see how we perform at optimizing code with the help of an LLM. + +01:33.640 --> 01:37.750 +But first, just a quick wrap up for day two of week four. + +01:37.780 --> 01:40.240 +Let me do that thing one more time. + +01:40.240 --> 01:44.110 +You're probably sick of this, but I do want to remind you of all the things you can do. + +01:44.170 --> 01:49.750 +You can code with frontier models, including building AI assistants that use tools. + +01:49.750 --> 01:57.670 +You can now also build open source solutions using hugging face the the Pipeline API across a variety + +01:57.670 --> 02:03.670 +of inference tasks and hugging faces, face, Tokenizers and models, which is the lower level APIs + +02:03.670 --> 02:09.010 +which give you more insight into what's going on and much more flexibility and will become essential + +02:09.010 --> 02:10.690 +when we get to training later. + +02:10.720 --> 02:12.010 +And now? + +02:12.010 --> 02:16.990 +Now, hopefully you should be in a position where you can confidently choose the right LLM for your + +02:16.990 --> 02:24.400 +project, backed by real results from leaderboards, from arenas, and from other resources. + +02:24.430 --> 02:29.530 +And when I say choose the right LLM, typically you'd be choosing the right 2 or 3 llms that you will + +02:29.560 --> 02:38.560 +then go into prototype with in order to then finally select the one that performs best after next time, + +02:38.560 --> 02:44.620 +you should have a deeper sense of how to assess the coding ability of models, and you'll have used + +02:44.620 --> 02:51.370 +a frontier model to generate code and built a solution front to back using Llms to generate code. + +02:51.370 --> 02:57.940 +And that's going to be another another skill that you will have acquired on the path to being a highly + +02:57.940 --> 02:59.530 +proficient LLM engineer. + +02:59.530 --> 03:00.700 +I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59295435/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295435/ja_JP.srt new file mode 100755 index 0000000..9ca48fb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295435/ja_JP.srt @@ -0,0 +1,106 @@ +WEBVTT + +00:01.190 --> 00:07.460 +さて、 最後に今週のチャレンジについて紹介しよう。 + +00:07.460 --> 00:10.850 +そして、 これはとても楽しいものになると思う。 + +00:11.150 --> 00:14.870 +僕らが一緒にやっているすべてのプロジェクトが楽しいと感じてくれていることを願っている。 + +00:14.960 --> 00:21.410 +私たちをさまざまな方法で伸ばしてくれるような、 面白くてジューシーな企画を考え出そうとしているんだ。 + +00:21.770 --> 00:28.640 +だから前回は、 会議の音声を聞いて議事録を作成するシステムなどを作ったんだ。 + +00:28.670 --> 00:31.070 +今回はまったく違う。 + +00:31.280 --> 00:33.710 +コードを書くことになる。 + +00:33.710 --> 00:43.430 +特に、 このアイデアはコード変換をテーマにしたもので、 Bloopにインスパイアされたものだと思う。 + +00:43.430 --> 00:45.710 +そして、 それは悪魔のように素晴らしいアイデアだ。 + +00:46.160 --> 00:59.390 +そこで今回は、 PythonをCプラスに変換することで、 パフォーマンスやパフォーマンスが重要なコードを改善するための製品を書こうと思います。 + +00:59.390 --> 01:00.020 +それに + +01:00.020 --> 01:01.270 +そういうことだ。 + +01:01.270 --> 01:05.710 +そこで、 PythonをCプラスに変換する方法を見つけたい。 + +01:05.710 --> 01:06.220 +それに + +01:06.220 --> 01:09.190 +そして、 フロンティアモデルを使ってこれを行うつもりだ。 + +01:09.190 --> 01:12.190 +また、 オープンソースモデルでそれを行うつもりだ。 + +01:12.190 --> 01:15.400 +そしてその結果を比較する。 + +01:15.400 --> 01:21.340 +しかし、 そのタスクに最も適したllmsを選ぶことから始めなければならないのは明らかだ。 + +01:21.340 --> 01:22.960 +それが目下の課題だ。 + +01:22.990 --> 01:24.430 +楽しくなると思うよ。 + +01:24.520 --> 01:31.630 +LLMの助けを借りて、 コードを最適化する際のパフォーマンスを見てみよう。 + +01:33.640 --> 01:37.750 +その前に、 4週目の2日目を簡単にまとめておこう。 + +01:37.780 --> 01:40.240 +もう一度だけやらせてくれ。 + +01:40.240 --> 01:44.110 +もううんざりしているだろうが、 君たちにできることをすべて思い出してほしい。 + +01:44.170 --> 01:49.750 +ツールを使ったAIアシスタントの構築など、 フロンティアモデルを使ったコーディングができる。 + +01:49.750 --> 01:57.670 +また、 様々な推論タスクでパイプラインAPIとハギングフェイス、 + +01:57.670 --> 02:10.690 +フェイス、 トーケナイザー、 モデルを使用してオープンソースのソリューションを構築することができます。 + +02:10.720 --> 02:12.010 +それで今は? + +02:12.010 --> 02:16.990 +これで、 リーダーボードやアリーナ、 その他のリソースから得られた実際の結果に裏打ちされた、 + +02:16.990 --> 02:24.400 +あなたのプロジェクトに適したLLMを自信を持って選択できる状態になったはずだ。 + +02:24.430 --> 02:29.530 +適切なLLMを選ぶというのは、 通常、 2つか3つのLLMを選んでプロトタイプを作成し、 + +02:29.560 --> 02:38.560 +次回以降、 最終的に最も優れたパフォーマンスを発揮するLLMを選ぶということです。 モデルのコーディング能力を評価する方法を深く理解し、 + +02:38.560 --> 02:51.370 +フロンティアモデルを使ってコードを生成し、 LLMを使ってソリューションをフロントからバックまで構築してコードを生成する。 + +02:51.370 --> 02:59.530 +そしてそれは、 高度に熟練したLLMエンジニアになるための道のりで、 あなたが身につけたもうひとつのスキルになるだろう。 + +02:59.530 --> 03:00.700 +そこで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59295435/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295435/ko_KR.srt new file mode 100755 index 0000000..cdb2a54 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295435/ko_KR.srt @@ -0,0 +1,124 @@ +WEBVTT + +00:01.190 --> 00:07.460 +마무리하기 전에 이번 주 과제를 소개하고 뭘 할지 말씀드릴게요 + +00:07.460 --> 00:10.850 +이것도 아주 재미있을 것 같아요 + +00:11.150 --> 00:14.870 +우리가 함께 하는 모든 프로젝트가 즐거웠으면 좋겠어요 + +00:14.960 --> 00:21.410 +여러 방면으로 확장할 흥미로운 프로젝트를 구상 중이에요 + +00:21.770 --> 00:27.440 +지난번에 했을 때 음성으로 몇 분짜리 회의를 생성하는 시스템을 + +00:27.470 --> 00:28.640 +만들었죠 + +00:28.670 --> 00:31.070 +이번에는 완전히 다른 걸 할 거예요 + +00:31.280 --> 00:33.710 +코드 작성에 관한 거예요 + +00:33.710 --> 00:42.020 +특히 아이디어는 코드 변환에 관한 것이 될 겁니다 bloop에서 영감을 받은 + +00:42.020 --> 00:43.430 +것 같네요 + +00:43.430 --> 00:45.710 +정말 기발한 아이디어예요 + +00:46.160 --> 00:54.560 +이 경우에 우리가 하려는 것은 성능을 향상시키기 위해 디자인된 제품을 만드는 것입니다 파이썬 을 C+로 + +00:54.560 --> 00:59.390 +변환함으로써 성능 결정적 코드를 개선하는 것이죠 + +00:59.390 --> 01:00.020 +더 있어요 + +01:00.020 --> 01:01.270 +그게 목적이죠 + +01:01.270 --> 01:05.710 +파이썬 을 C+로 변환하는 방법을 알아보려고 해요 + +01:05.710 --> 01:06.220 +더 있어요 + +01:06.220 --> 01:09.190 +개척자 모델을 사용할 거예요 + +01:09.190 --> 01:12.190 +오픈 소스 모델로도 할 거예요 + +01:12.190 --> 01:15.400 +그리고 그 결과의 성능을 비교해볼 거예요 + +01:15.400 --> 01:21.340 +하지만 작업에 가장 적합한 llms부터 선택해야 해요 + +01:21.340 --> 01:22.960 +그게 어려운 점이죠 + +01:22.990 --> 01:24.430 +재미있을 것 같아요 + +01:24.520 --> 01:31.630 +LLM의 도움으로 코드 최적화를 어떻게 수행하는지 보죠 + +01:33.640 --> 01:37.750 +하지만 먼저, 4주차 둘째 날을 간단히 정리하죠 + +01:37.780 --> 01:40.240 +한 번만 더 할게요 + +01:40.240 --> 01:44.110 +이런 얘기 지겹겠지만 당신이 할 수 있는 모든 걸 상기시켜 주고 싶어요 + +01:44.170 --> 01:49.750 +프론티어 모델과 코드를 만들 수 있습니다 도구를 사용하는 인공지능 조수 제작도 포함해서요 + +01:49.750 --> 01:57.670 +오픈 소스 솔루션도 구축할 수 있습니다 다양한 추론 작업과 얼굴 포옹 얼굴 포옹, 토큰라이저 + +01:57.670 --> 02:03.670 +모델 등을 통찰력 있는 낮은 수준의 API를 통해 진행 상황을 파악할 + +02:03.670 --> 02:10.690 +수 있고 유연성도 커지며 나중에 꼭 필요한 기능이 되겠죠 + +02:10.720 --> 02:12.010 +지금은요? + +02:12.010 --> 02:16.990 +이제 프로젝트에 맞는 LLM을 자신 있게 선택할 수 있는 위치에 + +02:16.990 --> 02:24.400 +서길 바랍니다 리더보드, 아레나스 등 여러 리소스에서 얻은 실제 결과에 기반해서요 + +02:24.430 --> 02:29.530 +올바른 LLM을 선택하라고 하면 보통 올바른 2, 3개의 LLM을 + +02:29.560 --> 02:38.560 +선택하게 됩니다 프로토타입으로 가서 다음 번에 가장 잘 작동하는 것을 선택하기 위해서요 모델의 코딩 능력을 + +02:38.560 --> 02:44.620 +평가하는 방법을 더 깊이 아셔야 합니다 코드 생성하기 위해 선구 모델을 사용했고 + +02:44.620 --> 02:51.370 +코드를 생성하기 위해 LM을 이용해 솔루션을 구축했죠 + +02:51.370 --> 02:57.940 +숙련된 LLM 엔지니어가 되려면 배워야 할 또 다른 기술이 + +02:57.940 --> 02:59.530 +되는 거죠 + +02:59.530 --> 03:00.700 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59295439/en_US.srt b/week5/community-contributions/subtitles/srts/59295439/en_US.srt new file mode 100755 index 0000000..d642549 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295439/en_US.srt @@ -0,0 +1,325 @@ +WEBVTT + +00:01.040 --> 00:05.300 +So I'm aware that there's a big risk that you are getting fed up of leaderboards, because we've done + +00:05.300 --> 00:11.000 +a lot of talking about leaderboards, and perhaps you are becoming fatigued to looking at benchmarks + +00:11.000 --> 00:12.020 +and leaderboards. + +00:12.020 --> 00:13.010 +You've bookmarked them all. + +00:13.010 --> 00:14.780 +You're thinking, okay, I get it. + +00:14.780 --> 00:18.110 +There are lots of great resources when it comes to choosing an LMS. + +00:18.110 --> 00:21.590 +I will look through your leaderboards and I will do it then. + +00:21.590 --> 00:22.280 +But enough. + +00:22.280 --> 00:23.390 +Enough with the leaderboards. + +00:23.390 --> 00:25.400 +But wait, there is one more. + +00:25.400 --> 00:26.480 +And it's a really fun one. + +00:26.480 --> 00:27.770 +And it's one you're going to love. + +00:27.800 --> 00:32.390 +So you're not going to mind this final leaderboard for us to look at. + +00:32.390 --> 00:35.240 +And it is in fact not a normal leaderboard. + +00:35.240 --> 00:38.810 +It is an arena and it's called the LMS Chatbot Arena. + +00:38.810 --> 00:43.550 +And you may have come across this already because it's so popular and fun and lots of people talk about + +00:43.550 --> 00:43.820 +it. + +00:43.820 --> 00:48.860 +So if you know it already, then hang on, bear with me because we're going to use it together and have + +00:48.860 --> 00:49.610 +some fun. + +00:49.610 --> 00:57.290 +So the idea is that the LMS chatbot arena, which is incredibly popular, is a place where models are + +00:57.290 --> 01:02.030 +assessed by humans and they're assessed for their chat abilities. + +01:02.090 --> 01:05.830 +It is, as they say, a crowdsourced open platform. + +01:05.830 --> 01:15.760 +They've collected over a million human votes, ranking llms and to display results in as as I say, + +01:15.790 --> 01:20.440 +in what's called an Elo rating type that will be familiar to chess players. + +01:20.440 --> 01:28.360 +It's a way of assessing an overall kind of leaderboard based on the outcomes of head to head battles. + +01:28.720 --> 01:32.410 +And you can see here some of the results. + +01:32.710 --> 01:43.570 +And we will now take a look at what we see in first place is a variant of GPT four called ChatGPT 4.0 + +01:43.600 --> 01:45.640 +that came out very recently. + +01:45.640 --> 01:54.820 +As of recording this video, it came out in early August 2024, and that previously GPT four was a little + +01:54.850 --> 01:56.170 +bit further down on this list. + +01:56.170 --> 02:05.470 +But with this latest trained, fine tuned model, uh ChatGPT four has taken the top position in the + +02:05.470 --> 02:10.490 +arena with an Elo of 1.316. + +02:10.700 --> 02:11.270 +Uh. + +02:11.420 --> 02:16.400 +A useful thing here is that the knowledge cut off is quoted over on the right. + +02:16.400 --> 02:20.210 +And so it's a if you want one place to go to see all the knowledge cut offs. + +02:20.240 --> 02:26.180 +Actually, the, uh, open the LMS chatbot arena is a good place to go, so that's worth knowing. + +02:26.210 --> 02:30.860 +I don't think it's it's intended to be its primary reason, but it's a useful tidbit. + +02:31.490 --> 02:39.770 +Um, you'll see that Gemini 1.5 Pro actually is is doing really well in this human evaluated assessment. + +02:40.040 --> 02:49.730 +Um, grok two, which is the latest model from X, uh, formerly known as Twitter is in strong place + +02:49.730 --> 02:50.600 +here. + +02:50.630 --> 02:52.430 +Uh, grok one had not done so well. + +02:52.460 --> 02:54.860 +Grok two is really doing very well. + +02:55.250 --> 03:01.220 +Um, and it's beating the earlier GPT four zero from, from May. + +03:01.610 --> 03:05.410 +Um, and uh, it's, uh, yeah. + +03:05.410 --> 03:10.310 +Uh, and you can see there's not much in that ELO difference, but at this point there is fairly significant + +03:10.550 --> 03:17.720 +difference from the the leader Claude 3.5 Sonet, which at one point was was the front runner by by + +03:17.750 --> 03:26.240 +I think by quite a way, if I remember right when it came out has now come down to, uh, I guess equal, + +03:26.240 --> 03:27.650 +equal sixth or so. + +03:27.650 --> 03:38.450 +Um, uh, right down here in, in, uh, Elo of 1270 compared to 1316 for the top, uh, GPT chat, + +03:38.450 --> 03:44.930 +GPT four zero, um, then Gemini 1.5 flash, which of course is a somewhat smaller model. + +03:44.930 --> 03:52.610 +And here comes llama 3.1 405 billion at 1266. + +03:52.820 --> 03:57.140 +Uh, so, uh, yeah, very interesting to see this lineup. + +03:57.140 --> 03:59.840 +You can keep going to find your favorite models. + +03:59.840 --> 04:02.570 +Claude three opus is way down here. + +04:02.780 --> 04:11.810 +Uh, and, uh, yeah, this is a great place to get a sense of how humans consider the experience of + +04:11.820 --> 04:17.550 +chatting with different, uh, instruction tuned uh, chatbots. + +04:17.580 --> 04:23.400 +Uh, you'll see that, uh, um, command r that we were looking at before. + +04:23.400 --> 04:27.120 +Uh, I just saw command R that features somewhere here. + +04:27.420 --> 04:30.810 +Uh, you will have to find that yourself. + +04:31.050 --> 04:32.310 +I've now lost it. + +04:32.310 --> 04:32.940 +There it is. + +04:32.940 --> 04:34.830 +Command R plus found it. + +04:34.950 --> 04:37.680 +Uh, there it is at 1213. + +04:37.680 --> 04:41.760 +Uh, it's one of the ones we looked at all the way back at the start in week one. + +04:41.940 --> 04:51.210 +So this is the famous LM, uh, LMS chatbot arena, and you can vote by clicking on this link, and + +04:51.210 --> 04:53.910 +it will bring up this voting screen. + +04:53.910 --> 04:55.290 +And here's how it works. + +04:55.290 --> 04:58.620 +You get presented with two models model A and model B. + +04:58.650 --> 05:02.160 +You don't know which one is which and you have to chat with them both. + +05:02.160 --> 05:05.910 +And so let's do one of our favorite little chat things. + +05:05.940 --> 05:19.930 +Uh, please tell me a light hearted light hearted joke suitable for a room full of data scientists. + +05:19.930 --> 05:26.920 +So this question will be asked to both models together and we will see how they answer. + +05:26.950 --> 05:31.990 +So the model A why do the data scientists break up with a computer? + +05:31.990 --> 05:37.270 +Because it had too many commitment issues with its null values. + +05:37.660 --> 05:43.480 +Okay, they did sciency, but not funny, I don't think. + +05:43.510 --> 05:44.860 +Maybe you found it funny. + +05:45.280 --> 05:47.200 +And model B, please tell me a joke. + +05:47.200 --> 05:48.310 +Here is a light hearted joke. + +05:48.310 --> 05:49.990 +Why does scientists go to the library? + +05:49.990 --> 05:52.720 +Because they heard it had a lot of volumes. + +05:52.840 --> 05:54.670 +Oh, yeah, that's pretty good, right? + +05:54.670 --> 05:58.030 +That's that's different plays on the dual meaning word volumes. + +05:58.030 --> 05:59.050 +It gives a little explanation. + +05:59.080 --> 05:59.770 +The joke is simple. + +05:59.770 --> 06:04.540 +It's kind of pun that data scientists tend to appreciate as it combines technical expertise with a bit + +06:04.540 --> 06:05.410 +of wordplay. + +06:05.440 --> 06:06.790 +I think that's solid. + +06:06.790 --> 06:07.900 +That's solid. + +06:08.140 --> 06:10.630 +Uh, I think B is better, is what I want to go with. + +06:10.660 --> 06:12.310 +Hopefully you agree with me. + +06:12.340 --> 06:17.560 +Uh, if not, you need to go into the arena and do, uh, do a ask a similar question and make your + +06:17.560 --> 06:23.570 +own vote, but I'm going to vote on your behalf on all of our behalves by pressing the B is better button, + +06:23.570 --> 06:25.730 +and it will then reveal the models. + +06:25.730 --> 06:26.600 +Let's see. + +06:26.900 --> 06:34.490 +Uh, interestingly, the we picked Claude three haiku and we picked that against grok two. + +06:34.610 --> 06:39.740 +Uh, if I remember right, grok two was quite significantly higher than Claude three haiku. + +06:39.740 --> 06:45.050 +So we have slightly fractionally shifted that balance through our vote. + +06:45.050 --> 06:47.030 +So I hope you enjoyed that. + +06:47.060 --> 06:52.130 +Of course, that was somewhat unscientific, but the idea is it's meant to be a human decision based + +06:52.130 --> 06:53.690 +on your interaction. + +06:53.900 --> 06:57.260 +And the ask for you now is go and do this. + +06:57.290 --> 06:59.270 +It's an important way to add back to the community. + +06:59.300 --> 07:00.980 +It is data that will be used. + +07:01.010 --> 07:03.020 +Uh, plus it's a it's a lot of fun. + +07:03.050 --> 07:09.620 +Uh, and it gives you a bit of sense of the capabilities and scope of the different models and some + +07:09.620 --> 07:14.630 +hands on experience whilst contributing back to the data science community. + +07:14.630 --> 07:20.930 +So enjoy spending some time on the chatbot arena, and I will see you next time to talk about commercial + +07:20.930 --> 07:21.710 +uses. diff --git a/week5/community-contributions/subtitles/srts/59295439/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295439/ja_JP.srt new file mode 100755 index 0000000..894632f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295439/ja_JP.srt @@ -0,0 +1,283 @@ +WEBVTT + +00:01.040 --> 00:05.300 +というのも、 私たちはリーダーボードの話をたくさんしてきたし、 + +00:05.300 --> 00:12.020 +おそらく皆さんはベンチマークやリーダーボードを見ることに疲れてきているからだ。 + +00:12.020 --> 00:13.010 +全部ブックマークしたんだね。 + +00:13.010 --> 00:14.780 +あなたは、 よし、 わかった、 と思っている。 + +00:14.780 --> 00:18.110 +LMSの選択に関しては、 素晴らしいリソースがたくさんあります。 + +00:18.110 --> 00:21.590 +あなたのリーダーボードに目を通してから、 そうするつもりだ。 + +00:21.590 --> 00:22.280 +でも、 もういい。 + +00:22.280 --> 00:23.390 +リーダーボードはもうたくさんだ。 + +00:23.390 --> 00:25.400 +だが、 もうひとつある。 + +00:25.400 --> 00:26.480 +本当に楽しいものだよ。 + +00:26.480 --> 00:27.770 +きっと気に入るはずだ。 + +00:27.800 --> 00:32.390 +だから、 この最終リーダーボードを私たちが見ても気にすることはないだろう。 + +00:32.390 --> 00:35.240 +しかも、 実は普通のリーダーボードではない。 + +00:35.240 --> 00:38.810 +それはアリーナであり、 LMSチャットボット・アリーナと呼ばれている。 + +00:38.810 --> 00:43.820 +そして、 これはとても人気があり、 楽しく、 多くの人が話題にするので、 すでに目にしたことがあるかもしれない。 + +00:43.820 --> 00:49.610 +だから、 もしもう知っているなら、 ちょっと待って、 我慢して。 + +00:49.610 --> 00:57.290 +つまり、 LMSのチャットボットの分野は、 非常に人気があり、 モデルが人間によって評価され、 + +00:57.290 --> 01:02.030 +チャットの能力が評価される場所だということです。 + +01:02.090 --> 01:05.830 +いわば、 クラウドソーシングのオープンプラットフォームだ。 + +01:05.830 --> 01:20.440 +彼らは100万人以上の人間の投票を集め、 llmsをランキングし、 その結果をチェスプレイヤーにはおなじみのEloレーティングと呼ばれるタイプで表示している。 + +01:20.440 --> 01:28.360 +頭脳戦の結果に基づいて、 総合的なリーダーボードのようなものを評価する方法だ。 + +01:28.720 --> 01:32.410 +その結果の一部をご覧いただきたい。 + +01:32.710 --> 01:45.640 +そして、 GPT 4の変形であるChatGPT 4と呼ばれるGPT 4を最初に見てみましょう。 0が出たのはごく最近のことだ。 + +01:45.640 --> 01:56.170 +このビデオを録画した時点では、 2024年8月初旬に発売され、 以前はGPT4はこのリストのもう少し下にあった。 + +01:56.170 --> 02:10.490 +しかし、 この最新の訓練され、 微調整されたモデルによって、 ChatGPTの4人はElo 1でアリーナのトップに立った。 + +02:10.490 --> 02:10.490 +316. + +02:10.700 --> 02:11.270 +ええと。 + +02:11.420 --> 02:16.400 +ここで便利なのは、 知識の切り捨てが右側に引用されていることだ。 + +02:16.400 --> 02:20.210 +だから、 1つの場所に行けば、 すべての知識の切り口を見ることができる。 + +02:20.240 --> 02:26.180 +実際、 LMSチャットボットの分野をオープンにしていくのは良いことなので、 知っておいて損はない。 + +02:26.210 --> 02:30.860 +それが第一の理由だとは思わないが、 役に立つ豆知識ではある。 + +02:31.490 --> 02:39.770 +あの、 ジェミニ1号を見てください。 5 実際にプロは、 この人間による評価で本当によくやっている。 + +02:40.040 --> 02:50.600 +グロック2はXの最新モデルで、 以前はツイッターとして知られていた。 + +02:50.630 --> 02:52.430 +ええと、 グロック・ワンはあまりうまくいっていなかった。 + +02:52.460 --> 02:54.860 +グロック2は本当によくやっている。 + +02:55.250 --> 03:01.220 +ええと、 5月に行われたGPTのフォーゼロに勝っているんだ。 + +03:01.610 --> 03:05.410 +うーん、 そうだね。 + +03:05.410 --> 03:17.720 +ELOの差はあまりないのがわかると思うが、 この時点でリーダーのクロード3とはかなり大きな差がある。 + +03:17.720 --> 03:17.720 +5 + +03:17.750 --> 03:27.650 +ソネットは、 一時は、 私の記憶が正しければ、 発売当初はかなりの差をつけてトップランナーだったと思うのですが、 今では、 ええと、 同じくらい、 6番目くらいに下がってきています。 + +03:27.650 --> 03:38.450 +ええと、 ええと、 トップのGPTチャット、 GPT4ゼロ、 それからジェミニ1が1316なのに対して、 この下は、 + +03:38.450 --> 03:44.930 +ええと、 エロが1270なんだ。 5フラッシュは、 もちろんやや小型のモデルだ。 + +03:44.930 --> 03:52.610 +そしてラマ3の登場だ。 1 1266で4050億円。 + +03:52.820 --> 03:57.140 +このラインナップはとても興味深い。 + +03:57.140 --> 03:59.840 +お気に入りのモデルを見つけるために、 何度でも通うことができる。 + +03:59.840 --> 04:02.570 +クロードの3大オーパスはこの下にある。 + +04:02.780 --> 04:17.550 +ええと、 ええと、 これは、 人間がさまざまな、 ええと、 指導調整された、 ええと、 チャットボットとのチャットの経験をどのように考えているかを知るには絶好の場所です。 + +04:17.580 --> 04:23.400 +ええと、 さっき見ていたコマンドRが見えると思う。 + +04:23.400 --> 04:27.120 +あ、 今、 ここのどこかでコマンドRを見たよ。 + +04:27.420 --> 04:30.810 +それは自分で見つけるしかない。 + +04:31.050 --> 04:32.310 +今、 私はそれを失った。 + +04:32.310 --> 04:32.940 +あれだ。 + +04:32.940 --> 04:34.830 +コマンドRプラスが見つけた。 + +04:34.950 --> 04:37.680 +あ、 1213番だ。 + +04:37.680 --> 04:41.760 +ええと、 1週目の最初にずっと見ていたもののひとつだ。 + +04:41.940 --> 04:53.910 +このリンクをクリックすると、 投票画面が表示されます。 + +04:53.910 --> 04:55.290 +その仕組みはこうだ。 + +04:55.290 --> 04:58.620 +モデルAとモデルBの2つのモデルが提示される。 + +04:58.650 --> 05:02.160 +どっちがどっちかわからず、 両方とおしゃべりしなければならない。 + +05:02.160 --> 05:05.910 +そして、 私たちのお気に入りのちょっとしたおしゃべりをしよう。 + +05:05.940 --> 05:19.930 +ええと、 データサイエンティストでいっぱいの部屋にふさわしい、 軽快なジョークを教えてください。 + +05:19.930 --> 05:26.920 +だから、 この質問は両モデルに一緒に投げかけ、 彼らがどう答えるかを見ることになる。 + +05:26.950 --> 05:31.990 +では、 モデルAはなぜデータサイエンティストはコンピューターと別れるのか? + +05:31.990 --> 05:37.270 +なぜなら、 ヌル値に対するコミットメントの問題が多すぎたからだ。 + +05:37.660 --> 05:43.480 +まあ、 科学的ではあったが、 面白くはなかったと思う。 + +05:43.510 --> 05:44.860 +もしかしたら面白いと思ったかもしれない。 + +05:45.280 --> 05:47.200 +モデルB、 ジョークを言ってくれ。 + +05:47.200 --> 05:48.310 +軽いジョークだ。 + +05:48.310 --> 05:49.990 +科学者はなぜ図書館に行くのか? + +05:49.990 --> 05:52.720 +ボリュームがあると聞いていたからだ。 + +05:52.840 --> 05:54.670 +ああ、 いい感じだろ? + +05:54.670 --> 05:58.030 +それは、 二重の意味を持つヴォリュームという言葉に対するさまざまな遊びだ。 + +05:58.030 --> 05:59.050 +少し説明がある。 + +05:59.080 --> 05:59.770 +冗談は簡単だ。 + +05:59.770 --> 06:05.410 +技術的な専門知識とちょっとした言葉遊びを組み合わせた、 データサイエンティストに好まれるダジャレの一種だ。 + +06:05.440 --> 06:06.790 +それは確かだと思う。 + +06:06.790 --> 06:07.900 +それは確かだ。 + +06:08.140 --> 06:10.630 +僕はBの方がいいと思う。 + +06:10.660 --> 06:12.310 +私の意見に同意してくれることを願っている。 + +06:12.340 --> 06:17.560 +もしそうでなければ、 アリーナに行って、 + +06:17.560 --> 06:25.730 +同じような質問をして、 自分で投票してください。 + +06:25.730 --> 06:26.600 +見てみよう。 + +06:26.900 --> 06:34.490 +面白いことに、 クロードの3つの俳句を選び、 グロック2に対してそれを選んだんだ。 + +06:34.610 --> 06:39.740 +ええと、 私の記憶が正しければ、 グロック2はクロード3の俳句よりかなり高かった。 + +06:39.740 --> 06:45.050 +だから、 私たちの投票によって、 そのバランスがわずかに変化したのだ。 + +06:45.050 --> 06:47.030 +だから、 楽しんでもらえたと思う。 + +06:47.060 --> 06:53.690 +もちろん、 それはやや非科学的なものだが、 考え方としては、 あなたの相互作用に基づいた人間的な判断ということだ。 + +06:53.900 --> 06:57.260 +そして今、 あなたに求められているのは、 行ってこれを実行することだ。 + +06:57.290 --> 06:59.270 +地域社会に恩返しをする重要な方法だ。 + +06:59.300 --> 07:00.980 +使用されるのはデータだ。 + +07:01.010 --> 07:03.020 +それに、 とても楽しいよ。 + +07:03.050 --> 07:09.620 +そして、 データサイエンス・コミュニティに貢献しながら、 さまざまなモデルの能力と範囲を感じ、 + +07:09.620 --> 07:14.630 +実際に体験することができます。 + +07:14.630 --> 07:21.710 +それでは、 チャットボット・アリーナでの時間をお楽しみください。 diff --git a/week5/community-contributions/subtitles/srts/59295439/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295439/ko_KR.srt new file mode 100755 index 0000000..8ed5e7f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295439/ko_KR.srt @@ -0,0 +1,313 @@ +WEBVTT + +00:01.040 --> 00:05.300 +여러분이 순위표에 질릴 위험이 있다는 걸 압니다 순위표에 + +00:05.300 --> 00:12.020 +대해 많은 얘기를 나눴는데 벤치마크와 순위표만 보는 데 지쳤을 수도 있어요 + +00:12.020 --> 00:13.010 +다 즐겨찾기 해놨잖아요 + +00:13.010 --> 00:14.780 +Get it, get it, get.라고 생각하죠 + +00:14.780 --> 00:18.110 +LMS를 선택할 때 훌륭한 리소스가 많아요 + +00:18.110 --> 00:21.590 +리더보드를 보고 나서 그렇게 할게요 + +00:21.590 --> 00:22.280 +하지만 그만하죠 + +00:22.280 --> 00:23.390 +순위표는 그만 봐요 + +00:23.390 --> 00:25.400 +잠깐만요, 하나 더 있어요 + +00:25.400 --> 00:26.480 +정말 재미있어요 + +00:26.480 --> 00:27.770 +여러분도 좋아하실 거예요 + +00:27.800 --> 00:32.390 +그러니 순위표는 신경 쓰지 않아도 되겠죠 + +00:32.390 --> 00:35.240 +사실 평범한 순위표는 아니에요 + +00:35.240 --> 00:38.810 +LMS 챗봇 아레나라는 아레나예요 + +00:38.810 --> 00:43.820 +이미 들어봤을 거예요 아주 인기 있고 재미있고 많은 사람이 얘기하니까요 + +00:43.820 --> 00:48.860 +이미 알고 계신다면 조금만 더 기다려 주세요 함께 사용해 보면서 재밌게 해 볼 + +00:48.860 --> 00:49.610 +테니까요 + +00:49.610 --> 00:57.290 +LMS 챗봇 아레나는 아주 인기가 많은데 채팅 능력을 평가하는 + +00:57.290 --> 01:02.030 +참가자들이 모델을 평가하는 곳이죠 + +01:02.090 --> 01:05.830 +크라우드소싱한 오픈 플랫폼이라고 하죠 + +01:05.830 --> 01:15.760 +100만 개가 넘는 인권 투표를 모아 llms의 순위를 매기고 결과를 표시합니다 체스 선수들에게 + +01:15.790 --> 01:20.440 +익숙한 Elo 평가 방식이죠 + +01:20.440 --> 01:28.360 +헤드 라이딩의 결과에 따라 전체 순위표를 평가하는 방식이에요 + +01:28.720 --> 01:32.410 +여기 결과를 좀 보세요 + +01:32.710 --> 01:45.640 +지금부터 살펴볼 것은 1위에 오른 GPT 4의 변종입니다 챗GPT 4라고 하죠 최근에 나온 0개요 + +01:45.640 --> 01:54.820 +이 영상은 2024년 8월 초에 공개됐는데 이전 GPT 4는 순위표에서 조금 더 아래였어요 + +01:54.850 --> 01:56.170 +비트 코어링 + +01:56.170 --> 02:05.470 +하지만 최신식 정교하게 훈련된 모델 챗GPT 4가 Elo 1을 기록하며 이 경기장의 + +02:05.470 --> 02:10.490 +선두를 차지했죠 316번요 + +02:10.700 --> 02:11.270 +네 + +02:11.420 --> 02:16.400 +여기서 유용한 건 지식이 차단된 부분이 오른쪽에 인용됐다는 거죠 + +02:16.400 --> 02:20.210 +모든 지식을 단절된 곳을 한 군데서 보고 싶다면요 + +02:20.240 --> 02:26.180 +LMS 챗봇 아레나도 좋은 곳이니까 알아두면 좋죠 + +02:26.210 --> 02:30.860 +그게 주요한 이유가 될 의도는 아니었을 거예요 하지만 유용한 정보죠 + +02:31.490 --> 02:39.770 +제미니 1호가 보일 거예요 5프로 이즈는 인체 평가에서 아주 잘하고 있어요 + +02:40.040 --> 02:49.730 +그록 2는 트위터였던 엑스 사의 최신 모델로 현재 강세를 보이고 + +02:49.730 --> 02:50.600 +있죠 + +02:50.630 --> 02:52.430 +그록 1은 잘하지 못했어요 + +02:52.460 --> 02:54.860 +그록 2는 아주 잘하고 있어요 + +02:55.250 --> 03:01.220 +5월 GPT 40 기록을 앞섰어요 + +03:01.610 --> 03:05.410 +네, 맞아요 + +03:05.410 --> 03:10.310 +ELO에서는 차이가 별로 안 나지만 현재 1위인 클로드 + +03:10.550 --> 03:17.720 +3과는 상당히 큰 차이가 있어요 5 소넷은 한때 선두주자였어요 + +03:17.750 --> 03:27.650 +제 기억으로는 출시 당시만 해도요 지금은 거의 6위까지 올라왔고요 + +03:27.650 --> 03:38.450 +여기 아래는 1270이고 위쪽은 1316이에요 GPT 채팅방은 1316이고 + +03:38.450 --> 03:44.930 +그다음은 제미니 1호죠 5 플래시요 물론 좀 더 작은 모델이죠 + +03:44.930 --> 03:52.610 +라마 3이 나오네요 1266에 1억 4,500억 달러요 + +03:52.820 --> 03:57.140 +이 라인업을 보는 게 아주 흥미로워요 + +03:57.140 --> 03:59.840 +좋아하는 모델을 계속 찾을 수 있어요 + +03:59.840 --> 04:02.570 +클로드 3백작은 이 아래에 있어요 + +04:02.780 --> 04:11.810 +그리고 여긴 사용법이 정립된 다양한 챗봇과 채팅하는 경험을 인간이 어떻게 생각하는지 + +04:11.820 --> 04:17.550 +알 수 있는 좋은 곳이에요 Get it + +04:17.580 --> 04:23.400 +아까 봤던 명령 r이 보일 거예요 + +04:23.400 --> 04:27.120 +명령어 R이 여기 어디 있는 걸 봤어요 + +04:27.420 --> 04:30.810 +그건 스스로 찾아야 해요 + +04:31.050 --> 04:32.310 +이제 못 하겠어요 + +04:32.310 --> 04:32.940 +저기 있네요 + +04:32.940 --> 04:34.830 +커맨드 R 플러스예요 + +04:34.950 --> 04:37.680 +12시 13분이에요 + +04:37.680 --> 04:41.760 +첫째 주에 시작할 때부터 계속 살펴본 부분이에요 + +04:41.940 --> 04:51.210 +여기는 그 유명한 LMS 챗봇 아레나입니다 이 링크를 클릭하면 투표할 수 있어요 그럼 + +04:51.210 --> 04:53.910 +이 투표 화면이 나오죠 + +04:53.910 --> 04:55.290 +이렇게 하는 거예요 + +04:55.290 --> 04:58.620 +모델 A와 B가 두 가지씩 준비돼 있어요. Get it. + +04:58.650 --> 05:02.160 +누가 누군지 모르고 둘 다와 대화를 해야 해요 + +05:02.160 --> 05:05.910 +우리가 좋아하는 간단한 대화를 해보죠 + +05:05.940 --> 05:19.930 +가벼운 농담 하나 해 주세요 데이터 과학자들이 가득한 방에 어울리는 거로요 + +05:19.930 --> 05:26.920 +두 모델이 동시에 이 질문을 받고 어떻게 대답하는지 보죠 + +05:26.950 --> 05:31.990 +모델 A는 왜 과학자들이 컴퓨터와 분리하는 걸까요? + +05:31.990 --> 05:37.270 +아무 가치도 없는 가치 때문에 헌신에 문제가 많았거든요 + +05:37.660 --> 05:43.480 +과학적이긴 했지만 재미는 없었어요 + +05:43.510 --> 05:44.860 +재미있었나 보죠 + +05:45.280 --> 05:47.200 +모델 B, 농담 하나 해 주세요 + +05:47.200 --> 05:48.310 +가벼운 농담이 있어요 + +05:48.310 --> 05:49.990 +과학자들은 왜 라이브러리에 가죠? + +05:49.990 --> 05:52.720 +양이 많다고 들었거든요 + +05:52.840 --> 05:54.670 +네, 꽤 괜찮죠? + +05:54.670 --> 05:58.030 +그건 이중적 의미의 볼륨을 사용하는 거예요 + +05:58.030 --> 05:59.050 +설명이 나오죠 + +05:59.080 --> 05:59.770 +농담은 간단해요 + +05:59.770 --> 06:04.540 +데이터 과학자들이 좋아하는 말장난이죠 기술적 전문성과 약간의 말장난이 합쳐진 + +06:04.540 --> 06:05.410 +거니까요 비트 + +06:05.440 --> 06:06.790 +튼튼한 것 같아요 + +06:06.790 --> 06:07.900 +탄탄하네요 + +06:08.140 --> 06:10.630 +B가 더 나은 것 같아요 + +06:10.660 --> 06:12.310 +동의하시길 바라요 + +06:12.340 --> 06:17.560 +그게 아니라면 경기장에 들어가서 비슷한 질문을 하고 투표하세요 + +06:17.560 --> 06:23.570 +하지만 제가 여러분을 대신해 투표하겠습니다 B가 더 낫다 버튼을 누르면 + +06:23.570 --> 06:25.730 +모델이 공개될 거예요 + +06:25.730 --> 06:26.600 +어디 보죠 + +06:26.900 --> 06:34.490 +흥미롭게도 클로드 3 하이쿠와 그록 2를 대조했어요 + +06:34.610 --> 06:39.740 +제 기억이 맞는다면 그록 2가 클로드 3 하이쿠보다 훨씬 높았어요 + +06:39.740 --> 06:45.050 +투표를 통해 균형이 약간 흔들린 거죠 + +06:45.050 --> 06:47.030 +즐겁게 보셨길 바라요 + +06:47.060 --> 06:52.130 +물론 비과학적인 방법이긴 하지만 상호 작용에 근거해 인간이 결정해야 + +06:52.130 --> 06:53.690 +한다는 게 핵심이죠 + +06:53.900 --> 06:57.260 +지금 당신에게 필요한 건 가서 이걸 하는 거예요 + +06:57.290 --> 06:59.270 +지역 사회에 다시 기여하는 중요한 방법이죠 + +06:59.300 --> 07:00.980 +사용될 데이터는 데이터죠 + +07:01.010 --> 07:03.020 +게다가 정말 재미있어요 + +07:03.050 --> 07:09.620 +다양한 모델의 기능과 스코프를 알게 되고 직접 체험해 보는 경험을 + +07:09.620 --> 07:14.630 +통해 데이터 과학계에 비트를 더할 수 있죠 + +07:14.630 --> 07:20.930 +챗봇 아레나에서 즐거운 시간 보내시고 다음 시간에는 상업적 용도에 대해 얘기 + +07:20.930 --> 07:21.710 +나누죠 diff --git a/week5/community-contributions/subtitles/srts/59295441/en_US.srt b/week5/community-contributions/subtitles/srts/59295441/en_US.srt new file mode 100755 index 0000000..2ad4524 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295441/en_US.srt @@ -0,0 +1,286 @@ +WEBVTT + +00:00.620 --> 00:08.150 +Okay, so welcome to our leaderboard fast as we go through a ton of essential leaderboards for your + +00:08.150 --> 00:08.900 +collection. + +00:08.900 --> 00:13.160 +The first of them, the Big Code Models leaderboard. + +00:13.430 --> 00:18.440 +You can see the URL there, but you can also just search for it in hugging face. + +00:18.590 --> 00:23.600 +Um, and as with all of them, it's running as a spaces um, app. + +00:23.600 --> 00:27.920 +And also we'll include links in the class resources. + +00:28.070 --> 00:34.430 +Um, so what you see here is the, uh, set of models. + +00:34.430 --> 00:36.890 +Let me start by filtering just on base models. + +00:36.890 --> 00:39.590 +So it's names that we all recognize. + +00:39.590 --> 00:47.900 +And you see the scores against the, uh, set of the human eval tests that I mentioned before, which + +00:47.900 --> 00:53.360 +is Python tests and then tests against Java and JavaScript and C plus plus. + +00:53.360 --> 00:59.930 +So you can compare the performance of these models in against different programming languages. + +00:59.930 --> 01:05.180 +And the win rate is a sort of a similar to like an average across them. + +01:05.180 --> 01:12.410 +And if you go to the about page, you'll get more information about how those are figured out and the, + +01:12.410 --> 01:19.820 +the way that this this column is calculated, you can see that the top model for coding is a specialized + +01:19.820 --> 01:23.570 +version of Queen for writing code that is called code Queen. + +01:23.570 --> 01:27.740 +And there's also a code llama that's not far behind. + +01:27.860 --> 01:31.340 +Deep Sea Coder is a model that's doing very well. + +01:31.460 --> 01:34.610 +Uh, a variant of Code Llama. + +01:34.610 --> 01:39.800 +And then Star Coder two is the model that we used ourselves early on. + +01:39.860 --> 01:43.160 +And so Star Coder two features here as well. + +01:43.340 --> 01:48.110 +Um, and then Code Gemma, which is Google's open source code generation model. + +01:48.110 --> 01:54.680 +If we include all, then we'll include some of the ones that have been, uh, tuned specifically on + +01:54.680 --> 01:56.690 +more specific data sets. + +01:56.690 --> 02:01.190 +And you'll see that actually, if you compare the scores, uh, it really Yeah. + +02:01.400 --> 02:09.470 +There's been a see that the code is far down now that a lot have been fine tuned to do much better. + +02:09.590 --> 02:19.190 +Um, somewhat surprisingly, Code Kwan 1.5 chat uh, seems to be, uh, outperforming the 1.5, uh, + +02:19.190 --> 02:23.480 +um, 7 billion down here, but but there may be various reasons for that. + +02:23.480 --> 02:28.400 +It might be to do with the way the data set that's been used to fine train it for that purpose, um, + +02:28.400 --> 02:31.010 +along with the kinds of questions that are asked here. + +02:31.100 --> 02:37.670 +So if the specific problem you're looking to solve involves coding the big code models, leaderboard, + +02:37.670 --> 02:39.200 +is the leaderboard for you. + +02:39.980 --> 02:45.380 +Next one we're going to look at is called the LM perf leaderboard, which is about looking at the performance + +02:45.380 --> 02:51.050 +of different models around things like their speed, their memory consumption and the like. + +02:51.080 --> 02:57.170 +And if you go to the leaderboard itself, you find the models listed out here with their various variations + +02:57.170 --> 03:02.570 +and then information about their speed, their consumption of energy and memory and so on. + +03:02.570 --> 03:10.310 +But I would actually suggest that you don't start with with with that page, but instead you flip to + +03:10.340 --> 03:16.340 +this, find your best model, you choose the hardware architecture that you're looking at, and then + +03:16.340 --> 03:22.640 +you pick find your best model and what you see here when you go to find your best model is this very + +03:22.640 --> 03:31.100 +interesting chart, which is actually a diagram that's displaying, uh, at least three different, + +03:31.100 --> 03:32.090 +uh, quantities. + +03:32.120 --> 03:35.930 +But if not, you could argue for, uh, along the x axis. + +03:35.930 --> 03:38.360 +Here you are seeing something about the speed. + +03:38.360 --> 03:42.650 +It's the time that this model takes to generate 64 tokens. + +03:42.650 --> 03:45.530 +So obviously the more to the left is better. + +03:45.530 --> 03:47.420 +It means faster time. + +03:47.420 --> 03:54.110 +We're looking for models that come to the left if you care about performance, uh, speed, performance. + +03:54.140 --> 03:59.450 +If you care about accuracy, that kind of performance, then you could you could use the total. + +03:59.480 --> 04:07.500 +The open LM score, that that, uh, aggregate score as your measure of model accuracy. + +04:07.680 --> 04:14.760 +If you care about the cost in terms of the memory footprint and a sense of the of the magnitude of hardware + +04:14.760 --> 04:19.890 +that you're going to need to run this model, then you need to look at the size of the blob. + +04:19.920 --> 04:27.480 +A bigger blob represents a greater memory need, and so it gives you a sense of what you'll need there. + +04:27.480 --> 04:33.480 +So we are looking ideally for models that are small blobs that are over on the left and that are quite + +04:33.510 --> 04:34.620 +high up. + +04:34.620 --> 04:39.720 +That would be a nice result for us if we don't need it to be high up. + +04:39.720 --> 04:44.820 +Particularly you can see a model like this one is doing really, really well and it is a Quan the Quan + +04:44.850 --> 04:47.040 +1.5 variant. + +04:47.130 --> 04:53.250 +Um, if you look right up here, if what you care most about is something which perform, which has + +04:53.250 --> 04:59.820 +very great, very strong accuracy in terms of its benchmark scores and is also quite fast. + +05:00.090 --> 05:06.720 +Uh, then maybe you come to this one here, which you can see is Llama Llama three model. + +05:07.050 --> 05:11.970 +And that does bring me to the final point, which is that the other, uh, bit of information that's + +05:11.970 --> 05:17.340 +being expressed in this chart is the family of models, but that is expressed by the color of blob. + +05:17.340 --> 05:24.870 +And you can see over here, uh, how to read those colors like yellow means it's a phi model or a Phi + +05:24.900 --> 05:25.830 +trained model. + +05:25.830 --> 05:29.250 +And you'll see the phi, uh, yellow model over there. + +05:29.610 --> 05:36.900 +So when it comes to the trade offs between speed, accuracy and memory footprint, which will affect + +05:36.900 --> 05:43.290 +your running costs, uh, for open source models, this is a fantastic resource. + +05:43.290 --> 05:51.330 +The, uh, perf leaderboard, uh, come to this, always turn to the Find your Best Model tab and browse + +05:51.330 --> 05:55.020 +around to understand what your options are. + +05:55.020 --> 06:01.260 +And this for example, if we're talking about a T4 hardware, then you would flip to the T4 tab to see + +06:01.260 --> 06:04.320 +what kind of, uh, options you have here. + +06:04.320 --> 06:08.400 +And looking that's the 01I hear. + +06:08.430 --> 06:10.560 +Is Kwan again doing well. + +06:10.830 --> 06:17.250 +And you can see other other models that might be most appropriate for you based on your use case. + +06:19.140 --> 06:26.040 +And now I just want to mention that there is a spaces uh, there is a you could you could go to spaces + +06:26.040 --> 06:27.540 +and search for leaderboards. + +06:27.540 --> 06:32.670 +All I've done here is done a leaderboard search in spaces, and you will see all of the different leaderboards + +06:32.670 --> 06:41.340 +that are out there that you could look at to see more details about your benchmarks of your LMS. + +06:41.610 --> 06:48.780 +And if you're not overwhelmed with the amount of information here, there is great, great utility to + +06:48.810 --> 06:51.420 +looking at these different leaderboards I mentioned a moment ago. + +06:51.420 --> 06:53.370 +There is a Portuguese focused leaderboard. + +06:53.370 --> 07:00.840 +You'll find many languages have their own leaderboards specifically to assess the abilities. + +07:00.840 --> 07:03.390 +I mentioned the open medical leaderboard. + +07:03.390 --> 07:10.350 +Let's bring this one up and you can see that there's a bunch of medical specific benchmarks like clinical + +07:10.350 --> 07:17.520 +knowledge, college biology, medical genetics, uh, and uh, and PubMed QA. + +07:17.610 --> 07:22.230 +And these are then scored against medical models. + +07:22.230 --> 07:29.280 +So if you were trying to build a solution that was designed for medical use cases, this is the leaderboard + +07:29.280 --> 07:31.050 +that you would come to right away. + +07:31.260 --> 07:38.490 +Um, and so really the and typically the, the, the about page will give you that extra information + +07:38.490 --> 07:42.510 +about the data sets, how it's used, how they are calculated. + +07:42.540 --> 07:50.820 +So this should give you a really good sense of how you select the right set of, of uh, open source + +07:50.820 --> 07:52.800 +models for the problem at hand. + +07:52.830 --> 07:59.340 +How you find a useful leaderboard and how you, uh, interpret the different metrics and can rank the + +07:59.340 --> 08:00.720 +different models out there. + +08:00.750 --> 08:05.250 +Next time we'll look at some leaderboards that combine open source and closed source. diff --git a/week5/community-contributions/subtitles/srts/59295441/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295441/ja_JP.srt new file mode 100755 index 0000000..37fa044 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295441/ja_JP.srt @@ -0,0 +1,238 @@ +WEBVTT + +00:00.620 --> 00:08.900 +さて、 リーダーボードのコレクションに欠かせないリーダーボードの数々をご紹介しよう。 + +00:08.900 --> 00:13.160 +その第一弾が、 ビッグ・コード・モデルのリーダーボードだ。 + +00:13.430 --> 00:18.440 +URLはそちらをご覧いただきたいが、 ハギング・フェイスで検索していただいても結構だ。 + +00:18.590 --> 00:23.600 +他のアプリと同じように、 スペースとして動いているんだ。 + +00:23.600 --> 00:27.920 +また、 授業で使う資料にもリンクを貼ります。 + +00:28.070 --> 00:34.430 +ええと、 ここにあるのはモデルのセットです。 + +00:34.430 --> 00:36.890 +まずは、 ベースモデルだけでフィルタリングしてみよう。 + +00:36.890 --> 00:39.590 +だから、 誰もが知っている名前なんだ。 + +00:39.590 --> 00:47.900 +Pythonのテストと、 Java、 JavaScript、 + +00:47.900 --> 00:53.360 +C++のテストです。 + +00:53.360 --> 00:59.930 +そのため、 異なるプログラミング言語に対して、 これらのモデルのパフォーマンスを比較することができる。 + +00:59.930 --> 01:05.180 +そして、 勝率はそれぞれの平均のようなものだ。 + +01:05.180 --> 01:12.410 +アバウト・ページに行けば、 その計算方法や、 + +01:12.410 --> 01:23.570 +このコラムの計算方法について、 より詳しい情報を得ることができる。 + +01:23.570 --> 01:27.740 +また、 コード・ラマもすぐ近くにいる。 + +01:27.860 --> 01:31.340 +ディープ・シー・コーダーは非常にうまくいっているモデルだ。 + +01:31.460 --> 01:34.610 +コード・ラマの亜種だね。 + +01:34.610 --> 01:39.800 +そしてスターコーダー2は、 私たち自身が初期に使っていたモデルだ。 + +01:39.860 --> 01:43.160 +そして、 Star Coderの2つの特徴がここにもある。 + +01:43.340 --> 01:48.110 +それから、 Googleのオープンソース・コード生成モデルであるCode Gemma。 + +01:48.110 --> 01:56.690 +もしすべてを含めるのであれば、 より特定のデータセットに特化して調整されたものも含めることになる。 + +01:56.690 --> 02:01.190 +そして、 実際に点数を比べてみればわかると思うが、 本当にそうなんだ。 + +02:01.400 --> 02:09.470 +現在、 多くの選手がより良いプレーができるように微調整されている。 + +02:09.590 --> 02:19.190 +うーん、 ちょっと意外なのはコード・クワン1だ。 5チャットが、 1チャットを上回っているようだ。 50億、 ええと、 70億、 この下ですが、 しかし、 + +02:19.190 --> 02:23.480 +それにはいろいろな理由があるかもしれません。 + +02:23.480 --> 02:31.010 +そのために使用されたデータセットの精緻な訓練方法と、 ここでの質問の種類が関係しているのかもしれない。 + +02:31.100 --> 02:39.200 +だから、 もしあなたが解決しようとしている特定の問題が、 大きなコードモデルのコーディングに関係しているなら、 リーダーボードはあなたのためのリーダーボードなのだ。 + +02:39.980 --> 02:45.380 +次に見ていくのは、 LM perf leaderboardと呼ばれるもので、 スピードやメモリ消費量など、 + +02:45.380 --> 02:51.050 +さまざまなモデルのパフォーマンスを見るものだ。 + +02:51.080 --> 02:57.170 +そして、 リーダーボードにアクセスすると、 さまざまなバリエーションと、 スピード、 + +02:57.170 --> 03:02.570 +エネルギー消費量、 メモリー消費量などの情報がリストアップされている。 + +03:02.570 --> 03:10.310 +しかし、 実際には、 そのページから始めるのではなく、 このページを開き、 + +03:10.340 --> 03:16.340 +最適なモデルを見つけ、 見ているハードウェア・アーキテクチャを選択し、 + +03:16.340 --> 03:22.640 +最適なモデルを見つけるを選ぶことをお勧めします。 最適なモデルを見つけに行くと、 + +03:22.640 --> 03:32.090 +この非常に興味深いチャートが表示されます。 + +03:32.120 --> 03:35.930 +しかし、 もしそうでないなら、 X軸に沿った議論もできるだろう。 + +03:35.930 --> 03:38.360 +ここで、 あなたはスピードに関する何かを見ている。 + +03:38.360 --> 03:42.650 +このモデルが64個のトークンを生成するのに要する時間だ。 + +03:42.650 --> 03:45.530 +だから、 左寄りの方がいいに決まっている。 + +03:45.530 --> 03:47.420 +つまり、 より速いタイムということだ。 + +03:47.420 --> 03:54.110 +パフォーマンス、 スピード、 性能を重視するなら、 左側に来るモデルを探している。 + +03:54.140 --> 03:59.450 +精度やそのような性能にこだわるのであれば、 トータルで使うこともできるだろう。 + +03:59.480 --> 04:07.500 +オープンLMスコアは、 モデルの正確さの指標となる総合スコアだ。 + +04:07.680 --> 04:14.760 +メモリフットプリントや、 このモデルを実行するために必要なハードウェアの大きさの意味でのコストを気にするのであれば、 + +04:14.760 --> 04:19.890 +ブロブのサイズに注目する必要がある。 + +04:19.920 --> 04:27.480 +より大きな塊は、 より大きなメモリーを必要とすることを表し、 そのため、 そこで何が必要になるかを知ることができる。 + +04:27.480 --> 04:34.620 +そのため、 私たちは左側にある小さな塊で、 かなり高い位置にあるモデルを理想的に探している。 + +04:34.620 --> 04:39.720 +上位である必要がないのであれば、 我々にとってはいい結果だろう。 + +04:39.720 --> 04:47.040 +特に、 このようなモデルが本当に、 本当にうまくいっているのがわかるだろう。 + +04:47.040 --> 04:47.040 +5バリアント。 + +04:47.130 --> 04:53.250 +もし、 あなたが最も気にするのが、 ベンチマークスコアが非常に高く、 非常に正確で、 + +04:53.250 --> 04:59.820 +しかもかなり速いものだとしたら、 ここを右に見てください。 + +05:00.090 --> 05:06.720 +ええと、 それから多分、 ここに来て、 これはララマ・ラマの3モデルです。 + +05:07.050 --> 05:11.970 +つまり、 このチャートで表現されているもう1つの情報は、 モデルのファミリーであるが、 + +05:11.970 --> 05:17.340 +それはブロブの色で表現されているということだ。 + +05:17.340 --> 05:25.830 +黄色はファイ(φ)モデル、 ファイ(φ)モデルはファイ(φ)モデルです。 + +05:25.830 --> 05:29.250 +そして、 あそこにファイ、 あー、 黄色いモデルが見えるだろう。 + +05:29.610 --> 05:36.900 +つまり、 スピード、 精度、 メモリフットプリントがトレードオフになり、 + +05:36.900 --> 05:43.290 +ランニングコストに影響するということだ。 + +05:43.290 --> 05:55.020 +ベストモデルを探す」タブを開き、 自分の選択肢を理解するためにいろいろと見て回ることだ。 + +05:55.020 --> 06:04.320 +例えば、 T4ハードウェアについて話しているのであれば、 T4タブに移動して、 どんなオプションがあるかを確認することになる。 + +06:04.320 --> 06:08.400 +そして、 それが私の耳にした01だ。 + +06:08.430 --> 06:10.560 +クワンはまた好調なのか。 + +06:10.830 --> 06:17.250 +また、 あなたのユースケースに基づいて、 あなたに最も適した他のモデルも見ることができる。 + +06:19.140 --> 06:27.540 +それから、 スペースには......スペースに行って、 リーダーボードを検索できるんだ。 + +06:27.540 --> 06:32.670 +ここで私がしたことは、 スペースでリーダーボードを検索することで、 + +06:32.670 --> 06:41.340 +LMSのベンチマークに関する詳細を見るために見ることができるさまざまなリーダーボードがすべて表示されます。 + +06:41.610 --> 06:48.780 +そして、 この情報量に圧倒されないのであれば、 先ほど述べたさまざまなリーダーボードを見ることは、 + +06:48.810 --> 06:51.420 +とてもとても有益なことなのだ。 + +06:51.420 --> 06:53.370 +ポルトガル語に特化したリーダーボードがある。 + +06:53.370 --> 07:00.840 +多くの言語には、 能力を評価するための独自のリーダーボードがある。 + +07:00.840 --> 07:03.390 +私はオープンメディカルリーダーボードについて述べた。 + +07:03.390 --> 07:17.520 +臨床知識、 大学生物学、 医療遺伝学、 PubMed QAなど、 医療に特化したベンチマークがたくさんあるのがわかるだろう。 + +07:17.610 --> 07:22.230 +そしてそれらを医療モデルと照らし合わせて採点する。 + +07:22.230 --> 07:31.050 +だから、 もしあなたが医療用の使用例を想定したソリューションを構築しようとしているなら、 このリーダーボードにすぐにたどり着くだろう。 + +07:31.260 --> 07:42.510 +そのため、 データセットやその使用方法、 計算方法については、 通常、 アバウトページに追加情報が記載されています。 + +07:42.540 --> 07:52.800 +これで、 目の前の問題に対して適切なオープンソースモデルをどのように選択すればいいのかがよくわかるはずだ。 + +07:52.830 --> 08:00.720 +役に立つリーダーボードをどのように見つけ、 どのようにさまざまな指標を解釈し、 さまざまなモデルをランク付けするのか。 + +08:00.750 --> 08:05.250 +次回は、 オープンソースとクローズドソースを組み合わせたリーダーボードを見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/59295441/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295441/ko_KR.srt new file mode 100755 index 0000000..6e01b74 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295441/ko_KR.srt @@ -0,0 +1,274 @@ +WEBVTT + +00:00.620 --> 00:08.900 +네, 순위표에 오신 걸 환영합니다 여러분의 컬렉션에 필요한 필수 순위표를 살펴보고 있는데요 + +00:08.900 --> 00:13.160 +그 중 첫 번째는 빅 코드 모델의 leaderboard죠 + +00:13.430 --> 00:18.440 +URL 보이시죠? 하지만 안는 얼굴에서 검색할 수도 있어요 + +00:18.590 --> 00:23.600 +다른 것들처럼 스페이스 앱으로 실행되고 있어요 + +00:23.600 --> 00:27.920 +클래스 리소스에 링크도 포함시킬 거예요 + +00:28.070 --> 00:34.430 +여기 보시는 건 모형 세트예요 + +00:34.430 --> 00:36.890 +베이스 모델에서 필터링부터 할게요 + +00:36.890 --> 00:39.590 +우리 모두 아는 이름이죠 + +00:39.590 --> 00:47.900 +그리고 점수를 보면 앞서 언급했던 인체 평가 시험 세트인 파이썬 테스트와 + +00:47.900 --> 00:53.360 +자바스크립트 테스트 C 플러스 테스트가 있죠 + +00:53.360 --> 00:59.930 +이 모델의 성능을 다른 프로그래밍 언어와 비교할 수 있어요 + +00:59.930 --> 01:05.180 +승률은 전체의 평균과 비슷하다고 할 수 있죠 + +01:05.180 --> 01:12.410 +get each 페이지로 가시면 더 많은 정보를 얻을 수 있습니다. 이 열을 계산하는 + +01:12.410 --> 01:19.820 +방식을 보면 코딩의 가장 윗부분이 코드를 작성하기 위한 퀸의 특수 버전인 것을 볼 수 있습니다. + +01:19.820 --> 01:23.570 +이것을 코드 퀸이라고 부르죠. + +01:23.570 --> 01:27.740 +근처에 라마 코드도 있어요 + +01:27.860 --> 01:31.340 +심해 코더는 아주 잘 나가는 모델이에요 + +01:31.460 --> 01:34.610 +코드 라마의 변종이죠 + +01:34.610 --> 01:39.800 +스타 코더 2는 초기에 우리가 사용한 모델이죠 + +01:39.860 --> 01:43.160 +스타 코더 2의 기능도 여기 있고요 + +01:43.340 --> 01:48.110 +다음은 코드 젬마 구글의 오픈 소스 코드 생성 모델이죠 + +01:48.110 --> 01:54.680 +전부를 포함하면 좀 더 구체적인 데이터 세트에 맞춰 조정된 일부도 + +01:54.680 --> 01:56.690 +포함할 거예요 + +01:56.690 --> 02:01.190 +점수를 비교해 보면 알 수 있어요 + +02:01.400 --> 02:09.470 +요즘 코드는 훨씬 더 낮아졌고 많은 것들이 더 잘 튜닝되었어요 + +02:09.590 --> 02:19.190 +좀 놀랍지만 코드 콴 1이에요 채팅 5개가 1을 능가하는 것 같아요 여기에는 50억 70억 명이 있어요 + +02:19.190 --> 02:23.480 +하지만 여러 가지 이유가 있을 수 있죠 + +02:23.480 --> 02:28.400 +그 목적으로 데이터를 훈련하는 데 사용된 방식 때문일 수도 있고 여기서 + +02:28.400 --> 02:31.010 +제기되는 질문 때문일 수도 있어요 + +02:31.100 --> 02:37.670 +여러분이 풀어야 할 문제가 대형 코드 모델 코딩과 관련 있다면 leaderboard가 + +02:37.670 --> 02:39.200 +딱이죠 + +02:39.980 --> 02:45.380 +다음은 LM 향기 리더보드입니다 속도나 메모리 소비량 + +02:45.380 --> 02:51.050 +같은 것에 관련된 다양한 모델의 성능을 살펴보는 거죠 + +02:51.080 --> 02:57.170 +leaderboard를 보면 다양한 변형이 있는 모델들이 나와 있습니다 + +02:57.170 --> 03:02.570 +속도, 에너지 소비량 메모리 사용량 등 정보도 있고요 + +03:02.570 --> 03:10.310 +하지만 제가 권하고 싶은 것은 그 페이지에서 시작하지 말고 이 페이지로 넘어가서 + +03:10.340 --> 03:16.340 +최고의 모델을 찾는 것입니다. 당신이 보고 있는 하드웨어 구조를 + +03:16.340 --> 03:22.640 +선택하고, 최고의 모델을 고릅니다. 여기 보이는 것은 매우 흥미로운 + +03:22.640 --> 03:32.090 +도표입니다. 다이어그램으로 적어도 세 가지의 양을 보여주고 있죠. + +03:32.120 --> 03:35.930 +그렇지 않다면 엑스 축을 따라야겠죠 + +03:35.930 --> 03:38.360 +여기서 속도에 대한 뭔가를 보게 되죠 + +03:38.360 --> 03:42.650 +이 모델이 토큰 64개를 생성하는 데 걸리는 시간이죠 + +03:42.650 --> 03:45.530 +왼쪽으로 더 기울일수록 좋아요 + +03:45.530 --> 03:47.420 +시간이 단축되니까요 + +03:47.420 --> 03:54.110 +왼쪽으로 오는 모델을 찾고 있어요 성능, 속도, 성능에 신경 쓴다면요 + +03:54.140 --> 03:59.450 +정확성에 신경 쓰신다면, 성능에 신경 쓰신다면 합계를 이용하실 수 있어요 + +03:59.480 --> 04:07.500 +오픈 LM 점수요 모델 정확도의 척도로써의 총점이죠 + +04:07.680 --> 04:14.760 +메모리 공간 측면에서 비용을 고려하고 이 모델을 실행하기 위해 필요한 하드웨어의 + +04:14.760 --> 04:19.890 +크기를 고려한다면 blob의 크기를 봐야 해요 + +04:19.920 --> 04:27.480 +큰 덩어리는 더 많은 메모리 수요를 나타내요 여러분이 필요한 게 뭔지 감을 잡게 해주죠 + +04:27.480 --> 04:34.620 +저희가 찾는 모델은 작은 점으로 왼쪽 위쪽에 있는 게 이상적이죠 + +04:34.620 --> 04:39.720 +높은 곳만 아니면 좋은 결과가 나올 거예요 + +04:39.720 --> 04:44.820 +특히 이런 모델이 아주 잘 되고 있는 걸 볼 수 있어요 Quan the Quan + +04:44.850 --> 04:47.040 +1이죠 5가지 변형이 있어요 + +04:47.130 --> 04:53.250 +여길 보시면 가장 신경 쓰이는 건 성능이 뛰어나고 + +04:53.250 --> 04:59.820 +정확하며 벤치마킹 점수 면에서 빠른 기술이죠 + +05:00.090 --> 05:06.720 +그리고 이 제품으로 오세요 라마 라마 3 모델이에요 + +05:07.050 --> 05:11.970 +여기서 마지막 요점이 등장합니다 이 도표에 표현된 또 + +05:11.970 --> 05:17.340 +다른 정보는 모델의 가족인데 비트의 색깔로 표현되죠 + +05:17.340 --> 05:25.830 +여기 보시면 노란색은 파이나 훈련을 받은 모델이란 뜻이에요 + +05:25.830 --> 05:29.250 +저기 노란색 파이 모델이 보일 거예요 + +05:29.610 --> 05:36.900 +속도와 정확도, 메모리 풋프린트의 양면성에 있어서 오픈 소스 모델의 + +05:36.900 --> 05:43.290 +실행 비용에 영향을 미칠 텐데 이건 환상적인 리소스예요 + +05:43.290 --> 05:51.330 +leaderboard를 눌러보세요. 여기로 오세요. 항상 당신의 가장 좋은 모델 찾기 탭을 누르세요. + +05:51.330 --> 05:55.020 +그리고 당신의 옵션이 뭔지 살펴보세요. + +05:55.020 --> 06:01.260 +예를 들어 T4 하드웨어를 이야기할 때 T4 탭으로 가서 어떤 + +06:01.260 --> 06:04.320 +옵션이 있는지 볼 수 있어요 + +06:04.320 --> 06:08.400 +보세요, 01I 청음이에요 + +06:08.430 --> 06:10.560 +콴은 잘 지내고 있나요? + +06:10.830 --> 06:17.250 +다른 모델도 볼 수 있어요 사용 사례에 따라 가장 적절한 모델이죠 + +06:19.140 --> 06:26.040 +제가 말씀드리고 싶은 것은 빈 공간이 있는데 빈 공간으로 가서 leaderboard를 + +06:26.040 --> 06:27.540 +검색할 수 있어요 + +06:27.540 --> 06:32.670 +여기서 한 건 공간에서 leaderboard를 검색한 것뿐입니다 + +06:32.670 --> 06:41.340 +다양한 leaderboard를 볼 수 있죠 LMS 벤치마크에 대한 보다 상세한 정보를 볼 수 있어요 + +06:41.610 --> 06:48.780 +여기 있는 정보의 양에 압도되지 않으셨다면 제가 조금 전에 언급한 이 다양한 leaderboard를 보는 + +06:48.810 --> 06:51.420 +데에 아주 훌륭한 유틸리티가 있어요 + +06:51.420 --> 06:53.370 +포르투갈이 집중해서 순위를 매기고 있어요 + +06:53.370 --> 07:00.840 +많은 언어가 독자적인 순위표를 갖고 있어요 능력을 평가하기 위해서요 + +07:00.840 --> 07:03.390 +오픈 메디컬 리더보드를 언급했죠 + +07:03.390 --> 07:10.350 +이걸 띄워 보면 여러 의학적 벤치마크가 있습니다 임상 지식, 대학 + +07:10.350 --> 07:17.520 +생물학 의학 유전학, 그리고 퍼브메드 QA 같은 것들이죠 + +07:17.610 --> 07:22.230 +의학 모델과 비교해서 점수를 매겨요 + +07:22.230 --> 07:29.280 +의료 유스 케이스용으로 설계된 솔루션을 개발하려 한다면 바로 이 순위표를 + +07:29.280 --> 07:31.050 +봐야 해요 + +07:31.260 --> 07:38.490 +어바웃 페이지는 추가 정보를 제공합니다 데이터 집합과 어떻게 사용되고 + +07:38.490 --> 07:42.510 +어떻게 계산되는지에 대해서요 + +07:42.540 --> 07:50.820 +이걸 보면 문제가 발생했을 때 올바른 오픈 소스 모델을 어떻게 선택할지 + +07:50.820 --> 07:52.800 +감이 올 거예요 + +07:52.830 --> 07:59.340 +유용한 순위표를 어떻게 찾고 다양한 지표를 어떻게 해석하고 다양한 모델을 어떻게 순위 + +07:59.340 --> 08:00.720 +매길지 말이죠 + +08:00.750 --> 08:05.250 +다음 시간에는 오픈 소스와 폐쇄 소스를 결합한 리더보드를 살펴보죠 diff --git a/week5/community-contributions/subtitles/srts/59295451/en_US.srt b/week5/community-contributions/subtitles/srts/59295451/en_US.srt new file mode 100755 index 0000000..111f5b7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295451/en_US.srt @@ -0,0 +1,523 @@ +WEBVTT + +00:00.920 --> 00:02.510 +I know that everybody. + +00:02.510 --> 00:08.210 +It seems like just the other day that we were embarking on our quest together, and now here we are + +00:08.210 --> 00:13.490 +at the beginning of week four, rapidly approaching the halfway point. + +00:13.490 --> 00:17.090 +So what is in store for us this time? + +00:17.210 --> 00:24.770 +Uh, this week we are getting into an essential period of building core knowledge. + +00:24.770 --> 00:30.560 +It's going to be about this very challenging question that hits us all, which is we are spoilt for + +00:30.560 --> 00:31.070 +choice. + +00:31.070 --> 00:33.140 +There are so many llms out there. + +00:33.140 --> 00:36.590 +There are so many decisions to be made, closed source or open source. + +00:36.590 --> 00:39.590 +And and then in each category, so many. + +00:39.620 --> 00:43.910 +How do you pick the right LLM for a problem at hand. + +00:43.910 --> 00:46.610 +And that is the main theme of this week. + +00:46.700 --> 00:53.570 +And in addition we'll be going to be generating code building llms that write code for us in open source + +00:53.570 --> 00:53.900 +land. + +00:53.930 --> 00:55.400 +So that will be some fun. + +00:55.910 --> 01:01.970 +Uh, what we're going to be doing today is getting cracking on this subject of how do you pick the right + +01:01.980 --> 01:03.450 +model for the task. + +01:03.450 --> 01:08.370 +We're going to be talking about attributes and benchmarks, and we're going to be using something called + +01:08.370 --> 01:15.840 +the Openml leaderboard, which is an amazing resource from Hugging Face to help you compare open source + +01:15.840 --> 01:16.950 +models. + +01:17.550 --> 01:21.690 +But first, of course, a moment to look at our eight weeks. + +01:21.690 --> 01:23.340 +We started over on the left. + +01:23.340 --> 01:25.320 +We're going to finish over on the right. + +01:25.320 --> 01:26.490 +In week one. + +01:26.490 --> 01:32.070 +We talked about all things frontier model and we compared a bunch of frontier LMS, six of them. + +01:32.070 --> 01:39.630 +In week two, we introduced UIs with Gradio agent ization and of course we played with Multi-modality + +01:39.660 --> 01:40.410 +week three. + +01:40.440 --> 01:43.980 +Last week we got stuck into open source with Hugging face. + +01:43.980 --> 01:49.110 +We looked at the hub, we looked at the high level API, the pipelines, and then we looked at Tokenizers + +01:49.110 --> 01:54.540 +and models and hopefully a lot came together in terms of really understanding how these chat interfaces + +01:54.540 --> 02:01.830 +work and how these sorts of lists of dictionaries end up being tokens, including special tokens fed + +02:01.830 --> 02:08.200 +into llms, LMS, resulting in the output the next predicted token. + +02:09.190 --> 02:15.820 +We get to week four about selecting LMS, and also the problem of generating code which which is going + +02:15.820 --> 02:16.510 +to be very interesting. + +02:16.540 --> 02:18.700 +I hope week five is rag. + +02:18.700 --> 02:20.620 +Week six is fine tuning. + +02:20.650 --> 02:22.090 +It's when we begin training. + +02:22.120 --> 02:27.970 +Training on the frontier and then training open source and bringing it all together. + +02:28.090 --> 02:30.520 +That is our eight week plan. + +02:30.910 --> 02:36.820 +And so for today we're talking about comparing models, comparing LMS. + +02:36.820 --> 02:44.140 +And if there's one takeaway from this session, the most important point is that it's not like there's + +02:44.170 --> 02:48.340 +a simple answer of like this LLM is better than the others. + +02:48.370 --> 02:52.360 +It isn't a a case of of ranking from best to worst. + +02:52.390 --> 02:55.690 +It's all about what are you trying to accomplish. + +02:55.690 --> 02:59.830 +And it's about picking the right LLM for the task at hand. + +02:59.830 --> 03:05.420 +So understanding weighing up different LMS and comparing them with your requirements is the name of + +03:05.420 --> 03:10.160 +the game, and there are two different ways that we can pair models. + +03:10.160 --> 03:14.810 +The first of them is looking just at the basic facts about the model. + +03:14.900 --> 03:18.320 +Um, obvious stuff like the cost of the model. + +03:18.410 --> 03:23.630 +Um, and that's going to really cut down your decision space into a smaller number of choices. + +03:23.630 --> 03:29.270 +And once you've investigated the sort of basic attributes, the basic aspects of the different models, + +03:29.270 --> 03:32.750 +then then you start looking at the detailed results. + +03:32.750 --> 03:37.100 +And that involves looking at things like benchmarks, leaderboards and arenas. + +03:37.100 --> 03:43.700 +And based on all of this, you should end up with a handful of candidate llms that you will then use + +03:43.700 --> 03:50.660 +for prototyping to allow you to finally select the best LLM for your task at hand. + +03:51.500 --> 03:54.770 +And so let's talk a bit about the basics. + +03:55.610 --> 04:01.880 +So when I say we're comparing the basics, I really do mean the most obvious things about different + +04:01.880 --> 04:03.740 +models that you would have to assess. + +04:03.740 --> 04:08.960 +And we start with understanding whether or not you're going to be looking at an open source model or + +04:08.990 --> 04:10.070 +a closed source model. + +04:10.070 --> 04:15.020 +And of course, there are pros and cons, and it will affect a lot of the other basic attributes. + +04:15.020 --> 04:20.030 +So as you develop your shortlist, the first thing to note down is is this open or closed source? + +04:20.150 --> 04:21.860 +When was it released? + +04:21.890 --> 04:26.450 +What is the release date and and presumably the same the same dates. + +04:26.450 --> 04:30.170 +But an important thing to note is what is the knowledge cutoff? + +04:30.170 --> 04:31.370 +What is the date? + +04:31.430 --> 04:38.180 +The last date of its training data, beyond which typically it won't have any knowledge of current events. + +04:38.180 --> 04:41.780 +And depending on your use case, that may be important to you or it might not. + +04:42.200 --> 04:44.570 +Then the number of parameters. + +04:44.600 --> 04:49.190 +This gives you a sense of the the strength of the model. + +04:49.190 --> 04:54.440 +It will also give you a strength of costs that will come to, and it will give you a sense of how much + +04:54.440 --> 04:55.820 +training data is needed. + +04:55.820 --> 04:59.630 +If you want to fine tune that model, which we will also talk about in just a moment. + +04:59.630 --> 05:04.760 +So the number of parameters, the size of the model is another of the basic facts that you would note + +05:04.760 --> 05:09.000 +down The number of tokens that were used during training. + +05:09.000 --> 05:15.120 +The size of the training dataset is an important thing to note, and it will give you a sense again + +05:15.150 --> 05:22.620 +of the power of the model, its level, its depth of expertise, and then of course, the context length, + +05:22.650 --> 05:24.180 +the size of the context window. + +05:24.210 --> 05:27.750 +The thing we spoke about a lot in the past. + +05:27.810 --> 05:33.840 +The total amount of tokens that it can keep effectively in its memory while it's predicting the next, + +05:33.870 --> 05:38.280 +next token, which needs to include the original system, prompt input prompts. + +05:38.280 --> 05:44.820 +And if you're in a in a instruct in a chat use case, then all of the exchanges between the user and + +05:44.820 --> 05:47.940 +the assistant all have to fit within the context length. + +05:47.940 --> 05:53.970 +If you're dealing with a multi shot prompting case where you're you're providing multiple examples at + +05:53.970 --> 05:57.900 +inference time for the model to learn from, then you need to make sure that you're going to have a + +05:57.900 --> 06:03.090 +sufficient context length to take all of those examples. + +06:03.480 --> 06:06.450 +Can you remember the model with the longest context length today? + +06:06.760 --> 06:13.480 +uh, Gemini 1.5 flash with a million size contacts window as of right now, but we'll see in a moment + +06:13.480 --> 06:17.410 +where you can go and look up and compare all of the context lengths. + +06:17.680 --> 06:19.780 +So that's one set of basics. + +06:19.780 --> 06:23.440 +Let's go on to some more basics that you would you would look at. + +06:23.440 --> 06:27.850 +So there's a bunch of of costs that you need to be mindful of. + +06:27.880 --> 06:32.350 +I've divided them into inference costs, training costs and build costs. + +06:32.350 --> 06:38.170 +So inference costs of course is how much is it going to cost you every time you run this model in production + +06:38.170 --> 06:40.420 +to generate an output given an input? + +06:40.420 --> 06:43.750 +That, put simply, is what we're talking about with inference. + +06:43.930 --> 06:49.330 +Um, and there depending on whether you're dealing with open or closed source or and how you're interacting, + +06:49.330 --> 06:51.430 +there could be a number of different types of costs. + +06:51.460 --> 06:56.500 +We know, of course, with frontier models, we're thinking about API costs, which we also know consists + +06:56.500 --> 07:01.840 +of a count of input tokens and output tokens that would need to go into that API cost. + +07:02.170 --> 07:08.330 +Uh, if you were talking about using the Pro user interfaces, the chat UIs, you'd be thinking of a + +07:08.330 --> 07:11.630 +subscription, a monthly subscription cost. + +07:11.630 --> 07:16.820 +And if you're talking about open source models that you would run yourself, then there would be some + +07:16.820 --> 07:22.970 +runtime compute cost, which could be like a colab cost, or later we'll be talking in the actually + +07:23.000 --> 07:26.960 +in probably the last week of this course about ways to deploy to production. + +07:26.990 --> 07:33.170 +Thinking about platforms like modal, which let you run your model in production on a GPU box, and + +07:33.170 --> 07:40.430 +then you're paying some, uh, some, some fee to run your compute box in their cloud. + +07:40.430 --> 07:44.180 +So that runtime compute for open source is another factor. + +07:44.180 --> 07:49.820 +And typically if you're if you're working with an open source model that you've trained yourself, your + +07:49.820 --> 07:52.400 +inference costs will be lower because it's your model. + +07:52.400 --> 07:55.400 +You're not going to be paying that API charge every time. + +07:55.400 --> 07:58.100 +But there's you know, it's not a clear calculus. + +07:58.130 --> 07:59.300 +It depends on your use case. + +07:59.300 --> 08:03.350 +It depends on your choice of model, how many parameters and so on. + +08:04.220 --> 08:06.710 +And then training cost. + +08:06.710 --> 08:13.470 +So obviously if you're using out of the box frontier model, then there isn't a training cost. + +08:13.470 --> 08:16.770 +If you're not further fine tuning it, as we'll do in week seven. + +08:17.010 --> 08:22.380 +But if you're doing, uh, building an open source model that you want to specialize for your domain + +08:22.380 --> 08:26.550 +and you're going to be providing it with, with training costs, with, with, with training, then + +08:26.550 --> 08:27.810 +there will be a cost associated with that. + +08:27.810 --> 08:29.820 +And you need to factor that into the equation. + +08:30.000 --> 08:31.680 +Build cost. + +08:31.740 --> 08:37.650 +So how much work will it be for you to create this solution. + +08:37.770 --> 08:42.240 +Uh, and that's highly related to the next one which is time to market, which is how long is it going + +08:42.240 --> 08:42.900 +to take you? + +08:43.170 --> 08:47.610 +Uh, the, the one of the selling points of using a frontier model. + +08:47.610 --> 08:50.280 +Is that the time to market? + +08:50.280 --> 08:52.590 +And the build cost can be very low. + +08:52.590 --> 08:59.640 +Uh, it can take very little time to be up and running with a powerful solution using frontier models. + +08:59.640 --> 09:04.140 +Typically, if you're looking to fine tune your own open source model, it's going to take longer and + +09:04.140 --> 09:05.340 +it's going to be harder. + +09:05.490 --> 09:16.150 +Uh, so that's that's a major factor to weigh up rate limits using frontier models, you may run into + +09:16.180 --> 09:18.790 +some limits on how frequently you can call them. + +09:18.790 --> 09:21.820 +This is typically the case for subscription plans. + +09:22.030 --> 09:25.660 +Um, and uh, maybe, maybe with rate limits. + +09:25.660 --> 09:30.610 +I point out reliability as well when using the frontier models through the APIs. + +09:30.610 --> 09:36.430 +There are times when some of the models I've experienced this with both, uh, GPT four and with Claude + +09:36.460 --> 09:43.930 +3.5 sonnet that the the, uh, APIs respond with an error that they are overloaded because they are + +09:43.930 --> 09:45.970 +too busy in production at that time. + +09:45.970 --> 09:54.460 +So that's sort of related to rate limit, but a sort of stability point, their speed, which is a sort + +09:54.460 --> 10:02.740 +of throughput, like how quickly can you generate a whole response, how quickly can new tokens be generated? + +10:02.740 --> 10:04.780 +And very similar. + +10:05.050 --> 10:11.230 +There's a sort of subtle distinction between speed and latency, which is sort of the request response + +10:11.230 --> 10:11.980 +time. + +10:12.040 --> 10:17.620 +When you ask how quickly does it first start responding with each token? + +10:17.920 --> 10:25.450 +You may remember when we built the AI assistant for our airline, which was multimodal and spoke back + +10:25.450 --> 10:25.990 +to us. + +10:25.990 --> 10:28.090 +Latency was a bit of a problem there. + +10:28.120 --> 10:33.040 +I don't think I mentioned it at the time, but there were some awkward pauses because of course when + +10:33.040 --> 10:35.260 +there's some text, it's then going out to the model. + +10:35.260 --> 10:39.940 +It's calling out to a frontier model, generating the audio, coming back and playing the audio. + +10:39.940 --> 10:45.910 +And it was even more jarring when we were generating images, because those images were taking some + +10:45.910 --> 10:49.510 +time to come back and we'd sit there waiting for the image. + +10:49.540 --> 10:54.310 +Obviously, there are ways to to handle that more gracefully than we did in our prototype, but that + +10:54.310 --> 10:56.200 +is a factor that has to be. + +10:56.350 --> 11:01.330 +Bear in mind if you're dealing with your own open source model, it's the sort of thing you have more + +11:01.330 --> 11:02.590 +control over. + +11:02.890 --> 11:08.800 +And then last of our basics, but definitely not least, is license. + +11:09.010 --> 11:12.620 +Whether you're dealing with open source or closed source. + +11:12.740 --> 11:19.730 +You need to be fully aware of any license restrictions in terms of where you are, and are not allowed + +11:19.730 --> 11:20.660 +to use it. + +11:20.810 --> 11:25.130 +Um, many of the open source models have very open licensing. + +11:25.160 --> 11:27.380 +Um, some of them do have fine print. + +11:27.380 --> 11:32.150 +I think stable diffusion is one that's known that that, uh, um, you are allowed to use it commercially + +11:32.150 --> 11:33.200 +up to a point. + +11:33.200 --> 11:38.120 +There's a point at which when your revenues are above a certain level, um, that at that point some, + +11:38.120 --> 11:43.730 +some kind of an arrangement business arrangement with stable diffusion, uh, is, is needed. + +11:43.760 --> 11:51.260 +Um, and we experienced ourselves signing the terms of service with, um, with llama 3.1 with meta. + +11:51.290 --> 11:56.090 +Uh, again, which I think is mostly to make sure that we're using it for good purposes, but still, + +11:56.090 --> 11:58.490 +it's part of the license that one needs to be aware of. + +11:58.520 --> 12:01.310 +So that wraps up the basics. + +12:01.310 --> 12:07.790 +These are all things that you would note down before going in to a more detailed analysis of the performance, + +12:07.820 --> 12:11.240 +the accuracy of the models for the task at hand. + +12:11.240 --> 12:13.880 +And we will continue in the next session. diff --git a/week5/community-contributions/subtitles/srts/59295451/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295451/ja_JP.srt new file mode 100755 index 0000000..46e4e47 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295451/ja_JP.srt @@ -0,0 +1,427 @@ +WEBVTT + +00:00.920 --> 00:02.510 +みんな知っているよ。 + +00:02.510 --> 00:08.210 +つい先日のことのように思えるが、 私たちは今、 4週目に入り、 + +00:08.210 --> 00:13.490 +急速に中間地点に近づいている。 + +00:13.490 --> 00:17.090 +さて、 今回は何が待ち受けているのだろうか? + +00:17.210 --> 00:24.770 +ええと、 今週は核となる知識を構築するのに不可欠な時期に入る。 + +00:24.770 --> 00:31.070 +今回は、 私たちすべてに襲いかかるこの非常に挑戦的な問題、 つまり、 私たちは選択の余地を奪われているということについて話すつもりだ。 + +00:31.070 --> 00:33.140 +世の中にはたくさんのLLMがある。 + +00:33.140 --> 00:36.590 +クローズドソースかオープンソースか、 決断しなければならないことはたくさんある。 + +00:36.590 --> 00:39.590 +そして、 それぞれのカテゴリーで、 非常に多くの選手がいる。 + +00:39.620 --> 00:43.910 +目の前の問題に適したLLMを選ぶにはどうすればいいのか。 + +00:43.910 --> 00:46.610 +それが今週のメインテーマだ。 + +00:46.700 --> 00:53.900 +さらに、 私たちはオープンソースの土地で私たちのためにコードを書いてくれるllmsを構築するコードを生成することになるだろう。 + +00:53.930 --> 00:55.400 +それは楽しみだね。 + +00:55.910 --> 01:01.970 +ええと、 今日私たちがやろうとしているのは、 タスクに適したモデルを選ぶにはどうすればいいかというテーマについて、 + +01:01.980 --> 01:03.450 +さっそく考えてみることだ。 + +01:03.450 --> 01:08.370 +オープンソースのモデルを比較するのに役立つ、 Hugging + +01:08.370 --> 01:16.950 +Faceの素晴らしいリソースであるOpenml leaderboardというものを使うつもりだ。 + +01:17.550 --> 01:21.690 +その前に、 もちろん、 8週間を振り返ってみよう。 + +01:21.690 --> 01:23.340 +左からやり直した。 + +01:23.340 --> 01:25.320 +私たちは右で終えるつもりだ。 + +01:25.320 --> 01:26.490 +週目だ。 + +01:26.490 --> 01:32.070 +私たちはフロンティア・モデルについて語り、 6つのフロンティアLMSを比較しました。 + +01:32.070 --> 01:40.410 +第2週はGradioのエージェント化でUIを紹介し、 第3週はもちろんマルチモダリティで遊んだ。 + +01:40.440 --> 01:43.980 +先週、 私たちはオープンソースにどっぷりはまった。 + +01:43.980 --> 01:49.110 +私たちはハブを調べ、 ハイレベルAPIとパイプラインを調べ、 そしてトークナイザーとモデルを調べました。 + +01:49.110 --> 01:54.540 +そして、 これらのチャット・インターフェースがどのように機能するのか、 + +01:54.540 --> 02:08.200 +そしてこれらの辞書のリストがどのようにトークンになるのかを理解するという点で、 多くのことがまとまったと思います。 + +02:09.190 --> 02:16.510 +第4週は、 LMSの選択と、 非常に興味深いコード生成の問題についてだ。 + +02:16.540 --> 02:18.700 +第5週がラグであることを願う。 + +02:18.700 --> 02:20.620 +第6週は微調整。 + +02:20.650 --> 02:22.090 +トレーニングを始めるときだ。 + +02:22.120 --> 02:27.970 +フロンティアでのトレーニング、 そしてオープンソースでのトレーニング、 そしてそのすべてを統合する。 + +02:28.090 --> 02:30.520 +これが8週間のプランだ。 + +02:30.910 --> 02:36.820 +それで今日は、 モデルの比較、 LMSの比較についてお話しします。 + +02:36.820 --> 02:48.340 +そして、 このセッションから1つ収穫があるとすれば、 最も重要なポイントは、 このLLMが他のLLMより優れているというような単純な答えがあるわけではないということだ。 + +02:48.370 --> 02:52.360 +ベストからワーストへというランキングではない。 + +02:52.390 --> 02:55.690 +何を達成しようとしているのかがすべてだ。 + +02:55.690 --> 02:59.830 +そして、 目の前の課題に適したLLMを選ぶことだ。 + +02:59.830 --> 03:05.420 +ですから、 さまざまなLMSを比較検討し、 + +03:05.420 --> 03:10.160 +要件と比較することが重要です。 + +03:10.160 --> 03:14.810 +その第一は、 モデルに関する基本的な事実だけを見ることだ。 + +03:14.900 --> 03:18.320 +ええと、 モデルの値段のような明らかなものです。 + +03:18.410 --> 03:23.630 +そうすれば、 決断の幅が狭まり、 選択肢が少なくなる。 + +03:23.630 --> 03:29.270 +そして、 基本的な属性や異なるモデルの基本的な側面を調査したら、 + +03:29.270 --> 03:32.750 +次に詳細な結果を調べ始める。 + +03:32.750 --> 03:37.100 +そのためには、 ベンチマーク、 リーダーボード、 アリーナといったものを見る必要がある。 + +03:37.100 --> 03:50.660 +そして、 これらすべてを踏まえて、 最終的にあなたのタスクに最適なLLMを選択するためのプロトタイピングに使用する候補のLLMを手に入れる必要があります。 + +03:51.500 --> 03:54.770 +それで、 基本的なことを少し話そう。 + +03:55.610 --> 04:03.740 +だから、 基本的なことを比較すると言ったのは、 異なるモデルについて評価しなければならない最も明白なことを意味しているんだ。 + +04:03.740 --> 04:10.070 +そして、 オープンソースモデルかクローズドソースモデルかを理解することから始める。 + +04:10.070 --> 04:15.020 +そしてもちろん、 長所と短所があり、 他の基本的な属性の多くに影響する。 + +04:15.020 --> 04:20.030 +そこで、 候補リストを作成する際、 最初にメモしておくべきことは、 オープンソースかクローズドソースかということだ。 + +04:20.150 --> 04:21.860 +発売はいつですか? + +04:21.890 --> 04:26.450 +発売日はいつですか? + +04:26.450 --> 04:30.170 +しかし、 注意すべき重要な点は、 知識のカットオフとは何かということだ。 + +04:30.170 --> 04:31.370 +日付は? + +04:31.430 --> 04:38.180 +学習データの最終更新日。 それ以降は通常、 現在の出来事に関する知識を持たない。 + +04:38.180 --> 04:41.780 +そして、 あなたのユースケースによって、 それはあなたにとって重要かもしれないし、 そうでないかもしれない。 + +04:42.200 --> 04:44.570 +次にパラメータの数。 + +04:44.600 --> 04:49.190 +これでモデルの強さがわかる。 + +04:49.190 --> 04:55.820 +また、 かかるコストの強さもわかるだろうし、 どれだけのトレーニングデータが必要かもわかるだろう。 + +04:55.820 --> 04:59.630 +そのモデルを微調整したいのであれば、 それについても少しお話ししましょう。 + +04:59.630 --> 05:09.000 +パラメーターの数、 モデルのサイズも基本的な事実のひとつで、 トレーニング中に使用されたトークンの数をメモしておく。 + +05:09.000 --> 05:15.120 +トレーニングデータセットのサイズは重要なポイントで、 モデルのパワー、 そのレベル、 + +05:15.150 --> 05:24.180 +専門知識の深さ、 そしてもちろんコンテキストの長さ、 コンテクストウィンドウの大きさを改めて感じさせてくれる。 + +05:24.210 --> 05:27.750 +過去に何度も話したことだ。 + +05:27.810 --> 05:33.840 +次のトークンを予測する間、 効率的にメモリに保持できるトークンの総量は、 オリジナルのシステム、 + +05:33.870 --> 05:38.280 +プロンプト入力プロンプトを含む必要がある。 + +05:38.280 --> 05:44.820 +また、 チャットのユースケースで指示する場合、 ユーザーとアシスタントのやりとりはすべて、 + +05:44.820 --> 05:47.940 +コンテキストの長さに収まる必要がある。 + +05:47.940 --> 05:53.970 +マルチショット・プロンプティングを扱う場合、 推論時に複数の例を提供してモデルに学習させるのであれば、 + +05:53.970 --> 06:03.090 +それらの例をすべて取り込むのに十分なコンテキストの長さを確保する必要がある。 + +06:03.480 --> 06:06.450 +今日の文脈の長さが最も長いモデルを覚えていますか? + +06:06.760 --> 06:17.410 +ええと、 ジェミニ1号。 5フラッシュに100万サイズのコンタクトウインドウがある。 + +06:17.680 --> 06:19.780 +これが基本中の基本だ。 + +06:19.780 --> 06:23.440 +もっと基本的なことに目を向けてみよう。 + +06:23.440 --> 06:27.850 +だから、 気をつけなければならないコストがたくさんある。 + +06:27.880 --> 06:32.350 +推論コスト、 トレーニングコスト、 ビルドコストに分けてみた。 + +06:32.350 --> 06:40.420 +つまり、 推論コストとは、 入力が与えられたときに出力を生成するために、 本番でこのモデルを実行するたびにどれだけのコストがかかるかということである。 + +06:40.420 --> 06:43.750 +簡単に言えば、 推論とはそういうことだ。 + +06:43.930 --> 06:51.430 +オープンソースかクローズドソースか、 あるいはどのようなやり取りをするかによって、 さまざまな種類のコストが発生する可能性がある。 + +06:51.460 --> 07:01.840 +もちろん、 フロンティアモデルでは、 APIコストについて考えていることは知っている。 + +07:02.170 --> 07:11.630 +Proのユーザーインターフェイス、 チャットUIを使うということであれば、 サブスクリプション、 つまり月額のサブスクリプション費用を考えていることでしょう。 + +07:11.630 --> 07:16.820 +また、 オープンソースのモデルを自分で実行するのであれば、 実行時の計算コストがかかるでしょう。 + +07:16.820 --> 07:26.960 +これは、 共同研究コストのようなものかもしれませんし、 このコースの最終週には、 本番環境へのデプロイ方法についてお話しすることになるでしょう。 + +07:26.990 --> 07:33.170 +モーダルのようなプラットフォームを考えてみると、 + +07:33.170 --> 07:40.430 +GPUボックス上でモデルを本番稼動させることができる。 + +07:40.430 --> 07:44.180 +オープンソースのランタイム・コンピュートもその要因のひとつだ。 + +07:44.180 --> 07:49.820 +通常、 自分でトレーニングしたオープンソースのモデルを使用している場合、 + +07:49.820 --> 07:52.400 +推論コストは低くなる。 + +07:52.400 --> 07:55.400 +毎回API使用料を支払う必要はない。 + +07:55.400 --> 07:58.100 +でも、 明確な計算はできない。 + +07:58.130 --> 07:59.300 +ユースケースによる。 + +07:59.300 --> 08:03.350 +モデルの選択、 パラメーターの数などによる。 + +08:04.220 --> 08:06.710 +そしてトレーニング費用。 + +08:06.710 --> 08:13.470 +だから、 箱から出してフロンティア・モデルを使うのであれば、 トレーニング・コストはかからない。 + +08:13.470 --> 08:16.770 +さらに微調整をするのでなければ、 第7週にすることになる。 + +08:17.010 --> 08:22.380 +しかし、 もしオープンソース・モデルを構築し、 それを自分の専門分野に特化させ、 トレーニング費用やトレーニング付きで提供するのであれば、 + +08:22.380 --> 08:27.810 +それにはコストがかかる。 + +08:27.810 --> 08:29.820 +それを方程式に組み入れる必要がある。 + +08:30.000 --> 08:31.680 +建設費。 + +08:31.740 --> 08:37.650 +では、 この解決策を作るのにどれだけの労力がかかるのか。 + +08:37.770 --> 08:42.900 +そして、 それは次の「市場投入までの時間」に大きく関係している。 + +08:43.170 --> 08:47.610 +フロンティア・モデルを使うことのセールスポイントのひとつだ。 + +08:47.610 --> 08:50.280 +それが市場投入のタイミングなのか? + +08:50.280 --> 08:52.590 +また、 建設コストも非常に低く抑えることができる。 + +08:52.590 --> 08:59.640 +フロンティア・モデルを使った強力なソリューションの立ち上げには、 ほとんど時間がかからない。 + +08:59.640 --> 09:04.140 +通常、 独自のオープンソースモデルを微調整しようとすれば、 より時間がかかり、 + +09:04.140 --> 09:05.340 +より難しくなる。 + +09:05.490 --> 09:18.790 +つまり、 フロンティア・モデルを使ったレート制限を検討する上で、 電話できる頻度の制限にぶつかる可能性があるということだ。 + +09:18.790 --> 09:21.820 +これは通常、 サブスクリプション・プランの場合である。 + +09:22.030 --> 09:25.660 +それと、 もしかしたら、 料金制限もあるかもしれない。 + +09:25.660 --> 09:30.610 +私は、 APIを通じてフロンティア・モデルを使用する際にも信頼性を指摘している。 + +09:30.610 --> 09:43.930 +GPT4とクロード3の両方で経験したことがある。 + +09:43.930 --> 09:43.930 +5ソネットのAPIは、 + +09:43.930 --> 09:45.970 +その時間帯は生産が忙しすぎるため、 負荷がかかりすぎているというエラーで応答する。 + +09:45.970 --> 09:54.460 +これはレート制限に関連するものだが、 ある種の安定ポイントであり、 スループットの一種であるスピード、 つまり、 どれくらいの速さでレスポンス全体を生成できるか、 + +09:54.460 --> 10:02.740 +どれくらいの速さで新しいトークンを生成できるかということだ。 + +10:02.740 --> 10:04.780 +そしてとてもよく似ている。 + +10:05.050 --> 10:11.980 +スピードとレイテンシー(リクエストの応答時間)には微妙な違いがある。 + +10:12.040 --> 10:17.620 +と尋ねると、 トークンごとにどれくらいのスピードで返答が始まるのですか? + +10:17.920 --> 10:25.990 +私たちが航空会社のためにAIアシスタントを開発したときのことを覚えていらっしゃるかもしれない。 + +10:25.990 --> 10:28.090 +そこではレイテンシーが少し問題になった。 + +10:28.120 --> 10:33.040 +その時は言わなかったと思うけど、 テキストがある時はもちろん、 それがモデルに送られるから、 + +10:33.040 --> 10:35.260 +気まずい間があったんだ。 + +10:35.260 --> 10:39.940 +フロンティアモデルに呼びかけ、 オーディオを生成し、 戻って来てオーディオを再生する。 + +10:39.940 --> 10:45.910 +そして、 画像を生成しているときはさらに衝撃的で、 画像が戻ってくるまでに時間がかかり、 + +10:45.910 --> 10:49.510 +私たちはそこに座って画像を待っていた。 + +10:49.540 --> 10:56.200 +もちろん、 私たちのプロトタイプでやったよりももっと潔くそれを処理する方法はある。 + +10:56.350 --> 11:02.590 +もしあなたが独自のオープンソースモデルを扱っているのであれば、 それはあなたがよりコントロールしやすいものであることを覚えておいてほしい。 + +11:02.890 --> 11:08.800 +そして、 基本中の基本だが、 間違いなく重要なのがライセンスだ。 + +11:09.010 --> 11:12.620 +オープンソースであろうとクローズドソースであろうと。 + +11:12.740 --> 11:20.660 +使用できる場所、 使用できない場所など、 ライセンスの制限を十分に認識しておく必要がある。 + +11:20.810 --> 11:25.130 +ええと、 オープンソースのモデルの多くは、 非常にオープンなライセンスを持っています。 + +11:25.160 --> 11:27.380 +うーん、 中には細かい字が書いてあるものもある。 + +11:27.380 --> 11:33.200 +安定した拡散というのは、 ある程度までは商業的に使うことが許されている。 + +11:33.200 --> 11:43.730 +収益があるレベルを超えると、 その時点で何らかの、 安定した普及のためのビジネスアレンジメントが必要になる。 + +11:43.760 --> 11:51.260 +そして、 私たちは、 ラマ3との利用規約にサインした。 メタで1。 + +11:51.290 --> 11:58.490 +でも、 ライセンスの一部であることは認識しておく必要がある。 + +11:58.520 --> 12:01.310 +基本的なことは以上だ。 + +12:01.310 --> 12:11.240 +これらはすべて、 目の前のタスクに対するモデルのパフォーマンスや精度をより詳細に分析する前にメモしておくべきことだ。 + +12:11.240 --> 12:13.880 +そして、 次のセッションに続く。 diff --git a/week5/community-contributions/subtitles/srts/59295451/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295451/ko_KR.srt new file mode 100755 index 0000000..a89d65e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295451/ko_KR.srt @@ -0,0 +1,502 @@ +WEBVTT + +00:00.920 --> 00:02.510 +모두 다 알아요 + +00:02.510 --> 00:08.210 +함께 여정을 시작하던 게 엊그제 같은데 벌써 4주 차가 + +00:08.210 --> 00:13.490 +시작됐고 여정의 절반이 빠르게 다가오고 있어요 + +00:13.490 --> 00:17.090 +이번엔 어떤 일이 벌어질까요? + +00:17.210 --> 00:24.770 +이번 주에는 핵심 지식을 쌓는 중요한 단계에 들어갈 거예요 + +00:24.770 --> 00:30.560 +우리 모두에게 닥치는 어려운 질문에 관한 내용일 거예요 우리는 선택권이 너무 많다는 + +00:30.560 --> 00:31.070 +거죠 + +00:31.070 --> 00:33.140 +llm이 너무 많아요 + +00:33.140 --> 00:36.590 +오픈 소스냐 폐쇄 소스냐 결정할 게 너무 많아요 + +00:36.590 --> 00:39.590 +각 카테고리에 정말 많아요 + +00:39.620 --> 00:43.910 +당장의 문제에 맞는 LLM을 어떻게 고르죠? + +00:43.910 --> 00:46.610 +그게 이번 주의 주요 주제죠 + +00:46.700 --> 00:53.900 +추가로 오픈 소스 랜드에서 코드를 작성하는 llms를 생성할 거예요 + +00:53.930 --> 00:55.400 +재미있을 거예요 + +00:55.910 --> 01:01.970 +오늘 우리가 할 일은 작업에 적합한 모델을 어떻게 고르느냐에 + +01:01.980 --> 01:03.450 +관한 거예요 + +01:03.450 --> 01:08.370 +특성과 벤치마크에 대해 얘기하고 오픈ml leaderboard라는 + +01:08.370 --> 01:16.950 +걸 사용할 겁니다 얼굴을 끌어안고 오픈 소스 모델 비교를 돕는 놀라운 리소스죠 + +01:17.550 --> 01:21.690 +그 전에 먼저 8주간의 여정을 돌아보죠 + +01:21.690 --> 01:23.340 +왼쪽에서부터 시작했어요 + +01:23.340 --> 01:25.320 +오른쪽을 마무리할 거예요 + +01:25.320 --> 01:26.490 +첫 주에요 + +01:26.490 --> 01:32.070 +프런티어 모델에 관해 얘기하면서 프런티어 LMS를 6대나 비교했어요 + +01:32.070 --> 01:39.630 +2주차에선 그래디오 에이전트화와 함께 UI를 소개했고 3주차엔 다중 양상을 + +01:39.660 --> 01:40.410 +다뤘죠 + +01:40.440 --> 01:43.980 +지난주엔 얼굴 맞대고 오픈 소스나 찍었잖아요 + +01:43.980 --> 01:49.110 +허브와 상위 레벨 API 파이프라인, 토큰라이저 모델도 살펴봤습니다 + +01:49.110 --> 01:54.540 +많은 도움이 됐기를 바랍니다 채팅 인터페이스가 어떻게 작동하고 + +01:54.540 --> 02:01.830 +이런 종류의 사전 목록이 어떻게 토큰이 되는지에 대해서요 특별한 토큰을 llms나 + +02:01.830 --> 02:08.200 +LMS에 공급해 다음 예상 토큰을 결과로 내놓는 거죠 + +02:09.190 --> 02:15.820 +LMS 선택에 관한 4주 차에 접어들었죠 코드 생성 문제도 있어요 아주 흥미로울 + +02:15.820 --> 02:16.510 +거예요 + +02:16.540 --> 02:18.700 +5주 차가 누더기였으면 좋겠어요 + +02:18.700 --> 02:20.620 +6주 차에는 조율을 잘해야 해요 + +02:20.650 --> 02:22.090 +훈련 시작부터요 + +02:22.120 --> 02:27.970 +개척지에서 훈련하고 오픈 소스를 훈련해서 전부 합쳤어요 + +02:28.090 --> 02:30.520 +그게 8주 계획이에요 + +02:30.910 --> 02:36.820 +오늘은 모델과 LMS 비교에 대해 얘기해보죠 + +02:36.820 --> 02:44.140 +이번 세션에서 얻은 교훈이 있다면 가장 중요한 점은 이 LLM이 다른 것보다 + +02:44.170 --> 02:48.340 +낫다는 간단한 답이 없다는 거예요 + +02:48.370 --> 02:52.360 +이건 순위를 매기는 문제가 아니에요 + +02:52.390 --> 02:55.690 +무엇을 이루고 싶은지가 중요하죠 + +02:55.690 --> 02:59.830 +작업에 적합한 LLM을 고르는 것도 중요하죠 + +02:59.830 --> 03:05.420 +다른 LMS를 저울질하고 요구 사항과 비교하는 게 게임 이름입니다 + +03:05.420 --> 03:10.160 +모델을 쌍으로 연결하는 두 가지 다른 방법이 있어요 + +03:10.160 --> 03:14.810 +첫 번째는 모델에 대한 기본적인 사실이죠 + +03:14.900 --> 03:18.320 +모델 가격 같은 뻔한 것들이요 + +03:18.410 --> 03:23.630 +그러면 선택할 공간이 줄어들어요 선택지가 줄어들죠 + +03:23.630 --> 03:29.270 +다양한 모델의 기본 속성과 기본 측면을 조사하고 나면 + +03:29.270 --> 03:32.750 +상세한 결과를 보기 시작하죠 + +03:32.750 --> 03:37.100 +벤치마크, 리더보드 경기장을 살펴봐야 해요 + +03:37.100 --> 03:43.700 +이 모든 걸 기반으로 여러분은 소수의 후보 llms를 갖게 됩니다 시제품 + +03:43.700 --> 03:50.660 +제작에 사용하여 당면한 작업에 최적의 LLM을 선택할 수 있도록 하죠 + +03:51.500 --> 03:54.770 +그럼 기본 비트부터 살펴보죠 + +03:55.610 --> 04:01.880 +제가 기초를 비교한다고 했을 때 다른 모델에서 평가해야 할 가장 명백한 + +04:01.880 --> 04:03.740 +점을 말하는 거예요 + +04:03.740 --> 04:08.960 +먼저 여러분이 오픈 소스 모델을 볼 건지 비공개 소스 모델을 볼 건지 이해해야 + +04:08.990 --> 04:10.070 +해요 + +04:10.070 --> 04:15.020 +물론 장단점이 있고 다른 기본 속성에 영향을 미치겠죠 + +04:15.020 --> 04:20.030 +최종 목록을 만들 때 가장 먼저 기억해야 할 건 소스 공개냐, 폐쇄냐죠 + +04:20.150 --> 04:21.860 +언제 출시됐죠? + +04:21.890 --> 04:26.450 +개봉일이 언제고 아마 같은 날짜일 거예요 + +04:26.450 --> 04:30.170 +하지만 중요한 건 지식의 제한이 얼마냐는 거죠 + +04:30.170 --> 04:31.370 +날짜가 언제죠? + +04:31.430 --> 04:38.180 +훈련 데이터의 마지막 날짜입니다 그 이후에는 현재 사건에 대한 지식이 전혀 없죠 + +04:38.180 --> 04:41.780 +사용 사례에 따라 중요할 수도 있고 아닐 수도 있죠 + +04:42.200 --> 04:44.570 +다음은 매개 변수의 수죠 + +04:44.600 --> 04:49.190 +모델의 힘이 느껴질 거예요 + +04:49.190 --> 04:54.440 +또한 앞으로의 비용에 대한 확신을 줄 수 있고 훈련 데이터가 얼마나 필요한지 + +04:54.440 --> 04:55.820 +알 수 있죠 + +04:55.820 --> 04:59.630 +모델을 조정하고 싶다면 잠시 후에 다시 이야기하도록 하죠 + +04:59.630 --> 05:04.760 +모델의 크기 같은 매개변수의 개수는 훈련에서 사용된 + +05:04.760 --> 05:09.000 +패의 개수를 기록하는 것과 같죠 + +05:09.000 --> 05:15.120 +훈련 데이터셋의 크기는 중요한 겁니다 모델의 힘과 수준, 전문성의 + +05:15.150 --> 05:24.180 +깊이를 다시 한 번 느끼게 될 겁니다 물론 컨텍스트 길이 컨텍스트 창의 크기도요 + +05:24.210 --> 05:27.750 +예전에 자주 얘기했던 거요 + +05:27.810 --> 05:33.840 +메모리에 효과적으로 저장할 수 있는 토큰의 총량을 계산하고 다음 토큰을 예측할 수 있습니다. + +05:33.870 --> 05:38.280 +이 토큰에는 기존 시스템인 입력 프롬프트가 있어야 하는데요. + +05:38.280 --> 05:44.820 +채팅방 사용 사례를 보면 사용자와 보조 사이의 모든 익스체인지가 + +05:44.820 --> 05:47.940 +컨텍스트 길이에 맞아야 하죠 + +05:47.940 --> 05:53.970 +Multi Shot PRpting을 사용할 때 모델을 추론할 시간에 + +05:53.970 --> 05:57.900 +여러 개의 예제를 제공한다면 이 예제들을 받아들이기에 + +05:57.900 --> 06:03.090 +충분한 컨텍스트 길이를 가지고 있는지 확인해야 해요 + +06:03.480 --> 06:06.450 +오늘 컨텍스트 길이가 가장 긴 모델이 뭐였죠? + +06:06.760 --> 06:13.480 +제미니 1호요 지금은 백만 개 크기의 연락처 창이 있지만 곧 + +06:13.480 --> 06:17.410 +살펴볼 거예요 모든 컨텍스트 길이를 비교할 수 있죠 + +06:17.680 --> 06:19.780 +이게 기본이고요 + +06:19.780 --> 06:23.440 +여러분이 살펴볼 만한 기본으로 넘어가 보죠 + +06:23.440 --> 06:27.850 +고려해야 할 비용이 아주 많아요 + +06:27.880 --> 06:32.350 +조사 비용, 훈련 비용 건축 비용으로 나눴어요 + +06:32.350 --> 06:38.170 +추론 비용은 물론 프로덕션에서 이 모델을 실행할 때마다 입력된 산출물을 생성하기 + +06:38.170 --> 06:40.420 +위해 드는 비용은 얼마일까요? + +06:40.420 --> 06:43.750 +간단히 말하자면요. 추측성으로 말하는 거예요. + +06:43.930 --> 06:49.330 +오픈 소스를 다루느냐 폐쇄 소스를 다루느냐 상호 작용 방식에 따라 비용이 여러 가지 + +06:49.330 --> 06:51.430 +유형으로 발생할 수 있어요 + +06:51.460 --> 06:56.500 +물론 프런티어 모델에서는 API 비용을 고려합니다 입력 토큰과 + +06:56.500 --> 07:01.840 +출력 토큰의 개수로 구성되어 있는데 API 비용에 들어가는 것이죠 + +07:02.170 --> 07:08.330 +Pro 사용자 인터페이스나 채팅 UI를 사용하는 것에 대해 얘기할 때 구독을 + +07:08.330 --> 07:11.630 +생각하게 됩니다 월간 구독 비용이죠 + +07:11.630 --> 07:16.820 +여러분이 직접 실행하는 오픈 소스 모델에 관해 얘기한다면 런타임 + +07:16.820 --> 07:22.970 +계산 비용이 있을 겁니다 Colab 비용과 비슷하죠 프로덕션으로 배포하는 + +07:23.000 --> 07:26.960 +방법에 관해 이 과정의 마지막 주에 얘기할 거예요 + +07:26.990 --> 07:33.170 +GPU 박스에서 프로덕션에서 모델을 실행하게 해주는 Modal 같은 플랫폼을 + +07:33.170 --> 07:40.430 +생각해보면 클라우드에서 계산 상자를 실행하는 데 약간의 비용을 지불하게 되죠 + +07:40.430 --> 07:44.180 +오픈 소스를 위한 계산 런타임도 또 다른 요소죠 + +07:44.180 --> 07:49.820 +일반적으로 여러분이 여러분이 직접 훈련한 오픈 소스 모델로 작업하는 경우 추론 비용은 + +07:49.820 --> 07:52.400 +낮아집니다 여러분 모델이니까요 + +07:52.400 --> 07:55.400 +매번 API 비용을 지불하지 않아도 돼요 + +07:55.400 --> 07:58.100 +하지만 명확한 계산은 아니에요 + +07:58.130 --> 07:59.300 +사용 사례에 따라 다르죠 + +07:59.300 --> 08:03.350 +모델 선택과 매개 변수 개수에 따라 달라요 + +08:04.220 --> 08:06.710 +훈련 비용도 있고요 + +08:06.710 --> 08:13.470 +틀에 박히지 않은 개척자 모델을 사용하면 훈련비가 안 들어요 + +08:13.470 --> 08:16.770 +7주 차에 할 건데 더 세밀하게 조정하지 않으면요 + +08:17.010 --> 08:22.380 +하지만 여러분의 도메인에 특화하고 싶은 오픈 소스 모델을 구축하는데 + +08:22.380 --> 08:27.810 +거기에 교육 비용을 추가한다면 그에 따른 비용이 발생하겠죠 + +08:27.810 --> 08:29.820 +그것도 고려해야 해요 + +08:30.000 --> 08:31.680 +공사비요 + +08:31.740 --> 08:37.650 +이 솔루션을 만드는 데 얼마나 많은 일이 필요할까요? + +08:37.770 --> 08:42.900 +다음 제품과 밀접한 관련이 있어요 다음 제품은 출시까지 얼마나 걸릴까요? + +08:43.170 --> 08:47.610 +개척지 모델을 쓰는 것의 장점 중 하나죠 + +08:47.610 --> 08:50.280 +지금이 시장철인가요? + +08:50.280 --> 08:52.590 +건축 비용은 아주 낮을 수 있죠 + +08:52.590 --> 08:59.640 +개척 시대 모델을 이용한 강력한 해결책을 실행하려면 시간이 아주 오래 걸리지 않아요 + +08:59.640 --> 09:04.140 +일반적으로, 여러분 고유의 오픈 소스 모델을 세밀하게 조정하려는 경우 더 오래 걸리고 + +09:04.140 --> 09:05.340 +더 어려워지죠 + +09:05.490 --> 09:16.150 +그게 선구 모델의 속도 제한을 높이는 데 중요한 요소입니다 제한에 부딪힐 + +09:16.180 --> 09:18.790 +수도 있어요 + +09:18.790 --> 09:21.820 +구독 플랜의 전형적인 경우죠 + +09:22.030 --> 09:25.660 +요금 제한도 포함해서요 + +09:25.660 --> 09:30.610 +API를 통한 프론티어 모델을 사용할 때도 신뢰성을 강조하죠 + +09:30.610 --> 09:36.430 +이런 경험을 한 모델들이 있어요 GPT + +09:36.460 --> 09:43.930 +4와 클로드 3 둘 다요 5개의 소네트 API가 에러에 반응하는데 과부하가 걸렸다는 거죠 + +09:43.930 --> 09:45.970 +당시엔 생산하느라 너무 바빴거든요 + +09:45.970 --> 09:54.460 +속도 한계에 관한 것이고 안정점과 속도도 있습니다 처리량과 관련된 것입니다 전체 + +09:54.460 --> 10:02.740 +응답을 생성하는 데 얼마나 걸릴까요? 새 토큰을 생성하는 데 얼마나 걸릴까요? + +10:02.740 --> 10:04.780 +아주 비슷하죠 + +10:05.050 --> 10:11.230 +속도와 대기 시간 사이에는 미묘한 차이가 있어요 요청 응답 시간 같은 + +10:11.230 --> 10:11.980 +거죠 + +10:12.040 --> 10:17.620 +토큰 하나에 얼마나 빨리 반응하는지 물으면요? + +10:17.920 --> 10:25.990 +기억하실지 모르겠지만 저희 항공사의 인공지능 보조 멀티모달에서 저희에게 말을 걸었어요 + +10:25.990 --> 10:28.090 +대기 시간이 문제였어요 비트 + +10:28.120 --> 10:33.040 +그때 언급 안 한 것 같은데 어색한 침묵이 흘렀어요 텍스트가 + +10:33.040 --> 10:35.260 +뜨면 모델로 가니까요 + +10:35.260 --> 10:39.940 +개척자 모델을 불러내서 음향을 생성하고 돌아와서 재생하죠 + +10:39.940 --> 10:45.910 +이미지를 생성할 때는 더 힘들었어요 이미지가 돌아오는 데 시간이 좀 걸렸고 + +10:45.910 --> 10:49.510 +우리는 앉아서 이미지를 기다려야 했거든요 + +10:49.540 --> 10:54.310 +물론 프로토타입보다 더 우아하게 다룰 방법은 있지만 반드시 + +10:54.310 --> 10:56.200 +그래야만 하는 요소죠 + +10:56.350 --> 11:01.330 +자신의 오픈 소스 모델을 다루고 있다면 여러분이 더 통제할 수 있다는 + +11:01.330 --> 11:02.590 +걸 기억하세요 + +11:02.890 --> 11:08.800 +마지막으로 기본이지만 가장 중요한 건 라이선스예요 + +11:09.010 --> 11:12.620 +오픈 소스든 비공개 소스든 말이에요 + +11:12.740 --> 11:20.660 +라이선스 제한을 잘 아셔야 합니다 여러분이 있는 곳과 사용 금지라는 측면에서요 + +11:20.810 --> 11:25.130 +많은 오픈 소스 모델은 오픈 라이선싱을 갖고 있어요 + +11:25.160 --> 11:27.380 +작은 글씨가 있는 것도 있어요 + +11:27.380 --> 11:32.150 +안정적인 확산이라는 건 상업적으로 어느 정도 사용할 수 있다는 + +11:32.150 --> 11:33.200 +뜻인데요 + +11:33.200 --> 11:38.120 +귀사의 수입이 일정 수준을 넘으면 안정적으로 + +11:38.120 --> 11:43.730 +확산하는 일종의 계약 사업 협약이 필요한 시점이죠 + +11:43.760 --> 11:51.260 +라마 3과 함께 서비스 약관에 서명하는 경험을 했어요 메타 1이요 + +11:51.290 --> 11:56.090 +좋은 목적으로 사용하고 있는지 확인하기 위한 것이지만 라이선스의 + +11:56.090 --> 11:58.490 +일부라는 걸 알아야 해요 + +11:58.520 --> 12:01.310 +자, 기본은 다 됐고요 + +12:01.310 --> 12:07.790 +성능을 더 자세히 분석하기 전에 기록해야 할 것들이죠 작업에 필요한 + +12:07.820 --> 12:11.240 +모델의 정확성도 마찬가지고요 + +12:11.240 --> 12:13.880 +다음 시간에 계속하죠 diff --git a/week5/community-contributions/subtitles/srts/59295459/en_US.srt b/week5/community-contributions/subtitles/srts/59295459/en_US.srt new file mode 100755 index 0000000..a457339 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295459/en_US.srt @@ -0,0 +1,508 @@ +WEBVTT + +00:01.220 --> 00:07.160 +And welcome back to More Leaderboard Fest as we go through some more leaderboards. + +00:07.160 --> 00:13.490 +But this time we're bringing into the mix open source and closed source models together on some of the + +00:13.490 --> 00:15.080 +leading leaderboards. + +00:15.080 --> 00:18.650 +Outside hugging face for for a change. + +00:18.650 --> 00:21.440 +We are actually not going to be looking at hugging face now. + +00:21.440 --> 00:27.200 +So the first leaderboard that I want to show you, that is another one that is definitely belonging + +00:27.200 --> 00:29.780 +on your bookmarks is the vellum leaderboard. + +00:29.780 --> 00:31.760 +We did touch on this briefly in the past. + +00:31.940 --> 00:40.520 +Uh, vellum AI company had publishes this essential resource for LM practitioners, which compares different + +00:40.520 --> 00:43.310 +models at the very top of the page. + +00:43.310 --> 00:49.580 +You get these comparison charts about some basic benchmarks that are some of the easier benchmarks these + +00:49.580 --> 00:49.910 +days. + +00:49.970 --> 00:56.150 +MLU the reasoning one not the Pro, but the basic one that's, uh, you know, pinch of salt on this + +00:56.150 --> 00:59.240 +metric, but still, it's still quoted a lot. + +00:59.420 --> 01:02.480 +Um, human eval for Python coding and math. + +01:02.810 --> 01:08.240 +Um, and what you're seeing here are generally the closed source models that you know and love, like + +01:08.240 --> 01:14.000 +GPT four and sonnet 3.5, sonnet and GPT turbo and four. + +01:14.030 --> 01:15.140 +But look at that. + +01:15.140 --> 01:20.540 +There is an open source model in the mix in the form of llama 3.1 4.05 billion. + +01:20.570 --> 01:24.500 +That is the largest open source model on the planet. + +01:24.500 --> 01:32.300 +And you can see that it is competing, competing favorably with some frontier closed source models. + +01:32.540 --> 01:39.590 +Uh, so it does appear that this is in order of strength with the strongest one first, GPT four zero, + +01:39.590 --> 01:42.410 +uh, crushing it with MLU. + +01:42.410 --> 01:47.780 +But you can see that llama 405 B is just fractionally behind. + +01:47.840 --> 01:50.000 +Um, and they're all neck and neck. + +01:50.000 --> 01:50.720 +Really? + +01:50.750 --> 01:57.740 +Uh, so, uh, obviously llama 4 or 5 billion is the open source model is very much a contender. + +01:58.280 --> 02:02.900 +Then when it comes to coding, you can see that this is the order. + +02:02.900 --> 02:12.050 +Clause 3.5 sonnet is the leader, then GPT four zero, then llama 405 be in third position, and then + +02:12.050 --> 02:14.150 +the mini version of GPT four zero. + +02:14.330 --> 02:17.930 +Not not far off given how much cheaper it is. + +02:18.170 --> 02:20.420 +And then GPT turbo. + +02:20.990 --> 02:28.610 +And then here is the ranking for math questions GPT four zero at the at the helm, followed by llama + +02:28.610 --> 02:35.300 +405 billion right after that, and then followed by the others, with Claude coming in fourth place. + +02:35.300 --> 02:41.570 +For those top models, here are some super useful charts on performance. + +02:41.810 --> 02:47.270 +A little bit easier to interpret than the multi-dimensional chart we saw in Hugging face, although + +02:47.300 --> 02:48.620 +less information, of course. + +02:48.800 --> 02:57.020 +Uh, so in terms of the speed, the fastest to generate tokens measured in tokens per second is llama + +02:57.050 --> 02:59.060 +8 billion open source model. + +02:59.060 --> 03:05.780 +Not surprising because of course with fewer parameters it's doing less, so probably worth understanding. + +03:05.810 --> 03:06.500 +Uh, yeah. + +03:06.530 --> 03:07.310 +I see. + +03:07.340 --> 03:07.520 +So. + +03:07.520 --> 03:13.160 +So this is all, uh, trying as much as possible to run in a consistent way. + +03:13.160 --> 03:16.040 +And the information explains a little bit more about that. + +03:16.160 --> 03:23.060 +Uh, so after llama eight comes llama 70, a bigger model, and then Gemini 1.5 flash. + +03:23.120 --> 03:26.480 +Uh, and then Claude, three haiku, and then GPT four. + +03:26.510 --> 03:28.070 +Oh, mini. + +03:28.070 --> 03:29.060 +Uh, the mini variant. + +03:29.060 --> 03:32.270 +So obviously the smaller models are the faster ones. + +03:32.270 --> 03:33.950 +No surprise there. + +03:34.220 --> 03:36.020 +Uh, latency. + +03:36.050 --> 03:42.830 +Uh, that's that's measured in the number of seconds until the first token is received. + +03:42.860 --> 03:44.480 +It's a nice way of capturing it. + +03:44.480 --> 03:49.730 +That's a good way to explain what I was talking about earlier, when I showed latency on the basic attributes. + +03:49.730 --> 03:55.910 +And you can see no surprise the smaller models are able to respond very rapidly. + +03:55.970 --> 04:01.970 +Um, and here GPT four surprisingly has improved latency over GPT four. + +04:02.010 --> 04:02.280 +zero. + +04:02.310 --> 04:08.070 +Many, which may be just related to the the hardware setup that it has. + +04:08.070 --> 04:10.170 +I'm not sure, but they're close anyway. + +04:10.740 --> 04:17.730 +And then the cheapest models, which is measured in terms of dollars per million tokens. + +04:17.730 --> 04:24.540 +Uh, llama 8 billion comes in cheapest Gemini 1.5 flash does well, GPT four and mini of course is very + +04:24.540 --> 04:25.080 +cheap. + +04:25.080 --> 04:34.950 +And then, uh, the, uh, Claude three haiku, um, and then GPT 3.5 turbo after that. + +04:34.950 --> 04:40.890 +And this is being shown as two separate bars, one for input cost, one for output cost. + +04:41.190 --> 04:48.000 +So, uh, there is then a nice little interactive ability to compare two models and see them side by + +04:48.000 --> 04:50.190 +side against different measures. + +04:50.190 --> 04:56.430 +This is showing Claude three point uh, sorry, Claude 3.0 Claude three opus against GPT four. + +04:56.460 --> 05:04.320 +Oh, let's see if we can change this around a bit and pick 3.5 sonnet against GPT four. + +05:04.350 --> 05:07.200 +Oh, this is the face to face that we like to look at. + +05:07.680 --> 05:09.900 +So here we go. + +05:10.140 --> 05:14.910 +I mean, really, it looks like generally it considers them neck and neck. + +05:14.940 --> 05:15.750 +What are they saying? + +05:15.780 --> 05:22.650 +88.3% for Claude, 3.5 and 88.7% for GPT four. + +05:22.680 --> 05:22.920 +Oh. + +05:22.950 --> 05:28.080 +So giving GPT four the edge there reasoning Claude does better coding. + +05:28.080 --> 05:32.820 +Claude does better math, Claude does worse tool use. + +05:32.940 --> 05:36.540 +Uh, of course, what we went through in week two. + +05:36.660 --> 05:41.580 +Uh, Claude does better and multilingual Claude does better. + +05:41.580 --> 05:43.320 +So great. + +05:43.320 --> 05:48.120 +Uh, fascinating to be able to compare the models side by side like this. + +05:48.390 --> 05:54.840 +Um, then this table has, uh, row by row, the different models. + +05:54.870 --> 06:01.290 +Um, and so you can come through and look at, uh, closed source models like Claude 3.5 sonnet. + +06:01.290 --> 06:06.720 +Uh, that in terms of the averages, here is the one at the at the top of this leaderboard. + +06:06.870 --> 06:12.570 +Um, what you're looking at here is MLU again, which is this metric where everything scores very well. + +06:12.990 --> 06:18.000 +The one that we talked about in the initial metrics human eval for Python. + +06:18.000 --> 06:25.620 +This is the be hard benchmark that I mentioned was the benchmark designed to test future capabilities + +06:25.620 --> 06:28.290 +of models above and beyond what they're capable of. + +06:28.380 --> 06:36.480 +Um, but would you believe when you look at this cloud 3.5, sonnet is already scoring 93.1% in B hard, + +06:36.570 --> 06:41.580 +which means that no longer is this a metric that's testing for future capabilities. + +06:41.580 --> 06:43.680 +It is very much current capabilities. + +06:43.680 --> 06:46.500 +And cloud 3.5 sonnet is crushing it. + +06:46.980 --> 06:51.390 +Uh, grade school math and harder math problems. + +06:51.420 --> 06:57.870 +So here you see the the the the results from these different models. + +06:57.870 --> 07:03.180 +And something I mentioned early on that that is a bit puzzling is that cloud 3.5. + +07:03.210 --> 07:06.210 +Sonnet performs better than Claude three. + +07:06.240 --> 07:07.230 +Opus. + +07:07.320 --> 07:15.090 +Um, but Claude three opus is still provided as a by anthropic as as an API and costs significantly + +07:15.090 --> 07:16.590 +more than 3.5 sonnet. + +07:16.800 --> 07:20.010 +So I'm not sure why anyone would choose Claude. + +07:20.040 --> 07:23.700 +Three opus over 3.5 sonnet unless there happens to be some specific. + +07:23.730 --> 07:29.100 +Well, it looks like in the case of, uh, of reasoning, uh, Claude three opus does do better. + +07:29.100 --> 07:33.840 +So there are some, some, some ways in which it does better, but I'm not sure if it would be worth + +07:33.840 --> 07:35.340 +that extra price point. + +07:36.210 --> 07:43.560 +Um, and what you'll also see, of course, is that llama, uh, enters onto this model comparison. + +07:43.560 --> 07:49.530 +I noticed that llama 405 billion is not shown here, and I can only imagine. + +07:49.530 --> 07:56.310 +That's because they haven't yet been able to carry out all of these tests for llama 4.5 billion, because + +07:56.310 --> 08:04.350 +I would, of course, imagine that it far outperforms the 70 billion llama three instruct variant. + +08:06.120 --> 08:06.840 +Um. + +08:07.710 --> 08:13.590 +And now coming down to this table, this is the one that I showed you before. + +08:13.590 --> 08:18.300 +It's one place you can come to that will show you for the different models. + +08:18.330 --> 08:22.920 +What is their context, window size and what is their cost per input and output tokens. + +08:22.920 --> 08:32.310 +So it's of course only comparing the um, the, the, the models, uh, where it has that data, but + +08:32.310 --> 08:34.140 +it's extremely useful. + +08:34.140 --> 08:41.400 +It's something where, uh, you would either be hunting through many different pages online, or you + +08:41.400 --> 08:45.120 +can come here and see it all in one place, and that's why you should bookmark it. + +08:45.120 --> 08:52.410 +Uh, it of course, highlights that Gemini 1.5 flash has the extraordinary a million context window. + +08:52.410 --> 08:59.250 +That is, of course, 750,000 words or so of common English, uh, almost the complete works of Shakespeare, + +08:59.280 --> 09:03.190 +a extraordinarily large context window. + +09:03.670 --> 09:06.010 +The Claude family at 200,000. + +09:06.040 --> 09:09.250 +The GPT family at 128,000. + +09:09.280 --> 09:15.400 +Which, as I said before, seems somewhat slim compared to the million in Gemini 1.5 flash. + +09:15.400 --> 09:23.080 +But that's still a lot of information to be able to digest in a context window and still give a good + +09:23.080 --> 09:24.220 +response. + +09:24.430 --> 09:24.940 +Uh. + +09:24.970 --> 09:28.930 +You'll also see some open source models in the mix here. + +09:28.930 --> 09:36.640 +You can see mixed trials, context window size, and that the llama three models have an 8000 token + +09:36.640 --> 09:37.660 +context window. + +09:37.660 --> 09:43.660 +And that's worth bearing in mind as you compare using open source models to their closed source cousins, + +09:43.660 --> 09:49.240 +that if you need these massive context windows, then you're probably needing to go to the closed source + +09:49.240 --> 09:49.930 +route. + +09:52.120 --> 10:00.100 +Okay, so there is then a coding leaderboard that you can look at to compare human eval and then that, + +10:00.130 --> 10:04.870 +uh, that concludes the leaderboards on the vellum web page. + +10:04.870 --> 10:06.370 +There is one more to look at. + +10:06.400 --> 10:13.300 +Of these, um, business, uh, sites, and it is called the seal Leaderboard, provided by a company + +10:13.300 --> 10:14.020 +called scale. + +10:14.020 --> 10:18.730 +Com and scale specializes in generating bespoke data sets. + +10:18.820 --> 10:26.080 +So if you are working on a particular problem and you need to have a data set, uh, crafted, curated + +10:26.080 --> 10:30.550 +for your problem, then that is something that scale com is in business for. + +10:30.760 --> 10:38.290 +Uh, if you aren't able to use the data generator that hopefully you built as part of last week's challenge. + +10:38.350 --> 10:46.720 +So this leaderboard has a bunch of very specific leaderboards for different tasks. + +10:46.750 --> 10:53.890 +And there's one on adversarial robustness, which is designed, as it explains very well here on the + +10:53.920 --> 11:02.230 +Learn More to, uh, test prompts designed to trigger harmful responses from large language models. + +11:02.230 --> 11:08.800 +And so there's this, specific examples of the kinds of problematic questions that are asked. + +11:08.920 --> 11:12.790 +And if, for example, you're looking sorry, I didn't mean to do that. + +11:12.790 --> 11:20.290 +If, for example, you're looking to surface this as a chat as perhaps your airline customer support + +11:20.290 --> 11:27.580 +chatbot, you will care about the fact that it is robust against being taken off track and doing something + +11:27.580 --> 11:31.210 +that that could be far off the rails. + +11:31.210 --> 11:34.090 +So this is a useful benchmark for that purpose. + +11:34.090 --> 11:41.080 +Coding gives a more detailed benchmark for coding skills, and you can see Claude 3.5 sonnet leads the + +11:41.080 --> 11:41.800 +way. + +11:41.980 --> 11:46.930 +Um, and Mistral, of course, this is another set of boards that combines closed and open source. + +11:46.930 --> 11:57.130 +And Mistral Large two um, is in that top three, uh, as an open source, uh entrant instruction following + +11:57.310 --> 12:04.780 +uh, here you'll see that, uh, wonderfully, the llama 3.1 405 billion. + +12:04.810 --> 12:07.570 +They have been able to test this against instruction following. + +12:07.570 --> 12:09.130 +And it's in second place. + +12:09.130 --> 12:11.140 +It's ahead of GPT four zero. + +12:11.320 --> 12:14.110 +It's just behind Claude 3.5 sonnet. + +12:14.290 --> 12:20.320 +Uh, and so that is an amazing result for the world of open source and for meta coming in second place + +12:20.320 --> 12:21.010 +there. + +12:21.250 --> 12:23.230 +Uh, and in math problems. + +12:23.260 --> 12:30.280 +Llama 3.1 405 B comes in third place, GPT four zero and second Claude 3.5. + +12:30.310 --> 12:33.490 +Sonnet leading the way for math. + +12:33.610 --> 12:40.570 +And then there is also a leaderboard for Spanish, uh, which shows some of the results here. + +12:40.660 --> 12:47.980 +Uh, and Mistral is the open source front runner in fourth place with GPT four zero in pole position + +12:47.980 --> 12:48.790 +here. + +12:49.000 --> 12:55.450 +And Qxl.com are adding more of these business specific leaderboards all the time. + +12:55.450 --> 13:03.700 +So come back to see what else has been added and use this as a great resource for more specific leaderboards + +13:03.700 --> 13:05.020 +for your business problem. diff --git a/week5/community-contributions/subtitles/srts/59295459/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295459/ja_JP.srt new file mode 100755 index 0000000..fcdf599 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295459/ja_JP.srt @@ -0,0 +1,418 @@ +WEBVTT + +00:01.220 --> 00:07.160 +さらにリーダーボードを紹介する「More Leaderboard Fest」にようこそ。 + +00:07.160 --> 00:15.080 +しかし今回は、 オープンソースとクローズドソースのモデルをミックスし、 いくつかの主要なリーダーボードに掲載する。 + +00:15.080 --> 00:18.650 +気分転換に外でのハグ顔。 + +00:18.650 --> 00:21.440 +実は今、 私たちはハグする顔を見るつもりはない。 + +00:21.440 --> 00:29.780 +最初のリーダーボードは、 ブックマークに入れておいて損はないものです。 + +00:29.780 --> 00:31.760 +過去に少し触れたことがある。 + +00:31.940 --> 00:43.310 +ベラムAIの会社は、 LMの実務家にとって不可欠なリソースを公開している。 + +00:43.310 --> 00:49.910 +最近の簡単なベンチマークである、 いくつかの基本的なベンチマークについての比較表があります。 + +00:49.970 --> 00:59.240 +国土交通省......プロではなく、 基本的なことなんですが......。 + +00:59.420 --> 01:02.480 +ええと、 Pythonのコーディングと数学のための人間の評価。 + +01:02.810 --> 01:14.000 +ここにあるのは、 GPT4やソネット3のような、 皆さんが知っているクローズドソースのモデルです。 + +01:14.000 --> 01:14.000 +5、 ソネット、 GPTターボと4。 + +01:14.030 --> 01:15.140 +しかし、 あれを見てほしい。 + +01:15.140 --> 01:20.540 +llama 3というオープンソースモデルもある。 1 4. 050億ドルだ。 + +01:20.570 --> 01:24.500 +これは地球上で最大のオープンソースモデルだ。 + +01:24.500 --> 01:32.300 +そして、 いくつかのフロンティアのクローズド・ソース・モデルと有利に競争していることがわかるだろう。 + +01:32.540 --> 01:42.410 +つまり、 強い順にGPT4、 ゼロ、 MLUと潰していく。 + +01:42.410 --> 01:47.780 +しかし、 ラマ405 Bがわずかに遅れているのがわかるだろう。 + +01:47.840 --> 01:50.000 +ええと、 そして、 みんな首の皮一枚なんだ。 + +01:50.000 --> 01:50.720 +本当に? + +01:50.750 --> 01:57.740 +ええと、 つまり、 明らかに40億か50億のラマは、 オープンソースモデルが非常に有力な候補です。 + +01:58.280 --> 02:02.900 +そしてコーディングになると、 これが順序だとわかる。 + +02:02.900 --> 02:14.150 +第3項 5ソネットがトップで、 次にGPTフォーゼロ、 そしてラマ405が3番手、 そしてGPTフォーゼロのミニ版となる。 + +02:14.330 --> 02:17.930 +これだけ安くなったのだから、 当たらずとも遠からずだろう。 + +02:18.170 --> 02:20.420 +そしてGPTターボ。 + +02:20.990 --> 02:35.300 +そして、 数学問題GPTのランキングである。 + +02:35.300 --> 02:41.570 +そんなトップモデルのために、 パフォーマンスに関する超便利なチャートを紹介しよう。 + +02:41.810 --> 02:48.620 +もちろん情報量は少ないが、 『ハグする顔』で見た多次元チャートよりは少し解釈しやすい。 + +02:48.800 --> 02:59.060 +スピードという点では、 1秒間にトークンを生成する速度が最も速いのはllamaの80億オープンソース・モデルです。 + +02:59.060 --> 03:05.780 +もちろん、 パラメータが少なければ、 その分やっていることも少なくなるので、 驚くことではないが、 おそらく理解する価値はあるだろう。 + +03:05.810 --> 03:06.500 +ああ、 そうだね。 + +03:06.530 --> 03:07.310 +なるほど。 + +03:07.340 --> 03:07.520 +だから + +03:07.520 --> 03:13.160 +だから、 できる限り一貫したやり方で走ろうとしているんだ。 + +03:13.160 --> 03:16.040 +そして、 この情報ではそれについてもう少し詳しく説明している。 + +03:16.160 --> 03:23.060 +ラマ8号の次はラマ70号、 より大きなモデル、 そしてジェミニ1号だ。 5フラッシュ + +03:23.120 --> 03:26.480 +それからクロード、 俳句3つ、 GPT4つ。 + +03:26.510 --> 03:28.070 +ああ、 ミニか。 + +03:28.070 --> 03:29.060 +ミニ・ヴァリアントだね。 + +03:29.060 --> 03:32.270 +だから、 小さいモデルの方が速いのは明らかだ。 + +03:32.270 --> 03:33.950 +驚きはない。 + +03:34.220 --> 03:36.020 +ええと、 待ち時間。 + +03:36.050 --> 03:42.830 +ええと、 それは最初のトークンを受け取るまでの秒数で測ります。 + +03:42.860 --> 03:44.480 +いい捉え方だ。 + +03:44.480 --> 03:49.730 +これは、 私が以前、 基本的なアトリビュートでレイテンシーを示したときに話していたことを説明する良い方法だ。 + +03:49.730 --> 03:55.910 +そして、 小型のモデルが非常に迅速に対応できるのも不思議ではない。 + +03:55.970 --> 04:01.970 +ええと、 ここでGPT 4は驚くことにGPT 4よりもレイテンシが改善されている。 + +04:02.010 --> 04:02.280 +ゼロだ。 + +04:02.310 --> 04:08.070 +その多くは、 ハードウェアのセットアップに関係しているのかもしれない。 + +04:08.070 --> 04:10.170 +よくわからないが、 とにかく近い。 + +04:10.740 --> 04:17.730 +そして最も安いモデルは、 100万トークンあたりのドルで測定される。 + +04:17.730 --> 04:25.080 +ええと、 ラマ80億は最も安いジェミニ1号で来る。 5フラッシュはよく動くし、 GPTは4つ、 ミニはもちろんとても安い。 + +04:25.080 --> 04:34.950 +そして、 クロードの3つの俳句、 GPT3。 その後5ターボ。 + +04:34.950 --> 04:40.890 +インプット・コストとアウトプット・コストの2つのバーが表示されている。 + +04:41.190 --> 04:50.190 +つまり、 2つのモデルを比較し、 異なる尺度に照らして並べて見ることができる、 ちょっとしたインタラクティブな機能があるんだ。 + +04:50.190 --> 04:56.430 +これはクロードの3ポイント......すみません、 クロード3です。 GPT 4に対して0クロード3オパス。 + +04:56.460 --> 05:04.320 +じゃあ、 ちょっと趣向を変えて3番を選ぼうか。 GPT4に対して5ソネット。 + +05:04.350 --> 05:07.200 +ああ、 これが私たちの好きな顔合わせだ。 + +05:07.680 --> 05:09.900 +それでは、 どうぞ。 + +05:10.140 --> 05:14.910 +つまり、 一般的には首の皮一枚と考えられているようだ。 + +05:14.940 --> 05:15.750 +彼らは何を言っているんだ? + +05:15.780 --> 05:22.650 +88. クロードは3%、 3. 5と88である。 GPT4は7%。 + +05:22.680 --> 05:22.920 +ああ。 + +05:22.950 --> 05:28.080 +つまり、 GPT4が有利なのは、 クロードのコーディングが優れているからだ。 + +05:28.080 --> 05:32.820 +クロードは数学が得意で、 クロードは道具の使い方が下手だ。 + +05:32.940 --> 05:36.540 +もちろん、 2週目に経験したことだ。 + +05:36.660 --> 05:41.580 +ええと、 クロードの方が上手いし、 マルチリンガルのクロードの方が上手だ。 + +05:41.580 --> 05:43.320 +とても素晴らしい。 + +05:43.320 --> 05:48.120 +こうやって並べて比較できるのは魅力的だね。 + +05:48.390 --> 05:54.840 +この表には、 一行ごとにさまざまなモデルが並んでいる。 + +05:54.870 --> 06:01.290 +それで、 クロード3のようなクローズドソースのモデルを見ることができるんだ。 5ソネット + +06:01.290 --> 06:06.720 +ええと、 平均で言うと、 このリーダーボードのトップにいるのはこの選手です。 + +06:06.870 --> 06:12.570 +ええと、 ここで見ているのはMLUで、 この指標ではすべてが非常に良いスコアです。 + +06:12.990 --> 06:18.000 +Pythonの最初のメトリクスのhuman evalで話したものだ。 + +06:18.000 --> 06:28.290 +このbe hardベンチマークは、 モデルの能力を超えて将来の能力をテストするために設計されたベンチマークであると述べた。 + +06:28.380 --> 06:41.580 +でも、 この雲3を見て信じられる? 5、 ソネットはすでに93点。 Bハードで1%ということは、 もはや将来の能力をテストする指標ではないということだ。 + +06:41.580 --> 06:43.680 +それは非常に現在の能力だ。 + +06:43.680 --> 06:46.500 +そして雲3。 5ソネットは破竹の勢いだ。 + +06:46.980 --> 06:51.390 +ええと、 小学生の算数と、 もっと難しい算数の問題。 + +06:51.420 --> 06:57.870 +それでは、 これらの異なるモデルの結果をご覧いただきたい。 + +06:57.870 --> 07:03.180 +序盤で少し不可解だと申し上げたのは、 クラウド3だ。 5. + +07:03.210 --> 07:06.210 +ソネットはクロード3人よりも良いパフォーマンスを見せている。 + +07:06.240 --> 07:07.230 +作品。 + +07:07.320 --> 07:16.590 +ええと、 でもクロード・スリー・オーパスはAPIとして人類によって提供されており、 3よりもかなり高価です。 + +07:16.590 --> 07:16.590 +5ソネット + +07:16.800 --> 07:20.010 +だから、 なぜクロードを選ぶのか分からない。 + +07:20.040 --> 07:23.700 +スリーオーパスオーバー3。 特別なことがない限り、 5ソネット。 + +07:23.730 --> 07:29.100 +まあ、 推理の場合、 クロード・スリー・オーパスの方がうまくいくようだ。 + +07:29.100 --> 07:35.340 +だから、 いくつか、 いくつか、 いくつか、 より優れている点がある。 + +07:36.210 --> 07:43.560 +そしてもちろん、 このモデル比較にラマが登場することもおわかりいただけるだろう。 + +07:43.560 --> 07:49.530 +ラマ4,050億がここに表示されていないことに気づいた。 + +07:49.530 --> 08:04.350 +というのも、 リャマ4については、 まだすべてのテストを実施できていないからだ。 50億ドルというのは、 もちろん、 700億リャマの3人のインストラクターのバリエーションをはるかに凌駕していると想像するからだ。 + +08:06.120 --> 08:06.840 +うーん。 + +08:07.710 --> 08:13.590 +そして、 このテーブルに来て、 これは前にお見せしたものです。 + +08:13.590 --> 08:18.300 +さまざまなモデルを紹介してくれる場所だ。 + +08:18.330 --> 08:22.920 +そのコンテキスト、 ウィンドウサイズ、 入力と出力のトークンあたりのコストはどのくらいか。 + +08:22.920 --> 08:34.140 +もちろん、 そのデータがあるモデルだけを比較するものだが、 非常に役に立つ。 + +08:34.140 --> 08:45.120 +ネットでいろいろなページを見て回るのもいいし、 ここに来て一箇所で全部見るのもいい。 + +08:45.120 --> 08:52.410 +もちろん、 ジェミニ1号のハイライトだ。 5フラッシュは、 100万コンテキストのウィンドウを持っています。 + +08:52.410 --> 08:59.250 +それはもちろん、 75万語ほどの一般英語、 つまりシェイクスピアのほぼ全作品という、 + +08:59.280 --> 09:03.190 +非常に大きな文脈の窓である。 + +09:03.670 --> 09:06.010 +クロード家は20万ドル。 + +09:06.040 --> 09:09.250 +GPTファミリーは128,000。 + +09:09.280 --> 09:15.400 +前にも言ったように、 双子座1号の100万人に比べると、 やや少ないように思える。 5フラッシュ + +09:15.400 --> 09:24.220 +しかし、 コンテクストウィンドウの中で消化し、 なおかつ適切な返答をするには、 まだ多くの情報が必要だ。 + +09:24.430 --> 09:24.940 +ええと。 + +09:24.970 --> 09:28.930 +ここにはオープンソースのモデルも混じっている。 + +09:28.930 --> 09:37.660 +混合トライアル、 コンテキスト・ウィンドウのサイズ、 そしてラマ3モデルのコンテキスト・ウィンドウが8000トークンであることがわかる。 + +09:37.660 --> 09:43.660 +そして、 オープンソースのモデルとクローズドソースのモデルを比較する際には、 これらの巨大なコンテキストウィンドウが必要なら、 + +09:43.660 --> 09:49.930 +おそらくクローズドソースのルートに行く必要があるということを覚えておく価値がある。 + +09:52.120 --> 10:04.870 +コーディング・リーダーボードは、 人間の評価を比較するために見ることができます。 + +10:04.870 --> 10:06.370 +もう1つ見ておきたいことがある。 + +10:06.400 --> 10:14.020 +これらの、 ええと、 ビジネス、 ええと、 サイトのうち、 スケールという会社が提供する、 シール・リーダーボードと呼ばれるものだ。 + +10:14.020 --> 10:18.730 +コム・アンド・スケールは、 オーダーメイドのデータセットの作成を専門としている。 + +10:18.820 --> 10:30.550 +ですから、 特定の問題に取り組んでいて、 その問題用に作成されたデータセットが必要な場合、 スケールコムはそのためにビジネスを展開しているのです。 + +10:30.760 --> 10:38.290 +あ、 もし先週のチャレンジで作ったデータジェネレーターが使えなかった場合は...。 + +10:38.350 --> 10:46.720 +このリーダーボードには、 さまざまなタスクに特化したリーダーボードがたくさんある。 + +10:46.750 --> 11:02.230 +敵対的ロバスト性については、 このLearn Moreで非常によく説明されているように、 大規模な言語モデルから有害な反応を引き起こすように設計されたプロンプトをテストするように設計されている。 + +11:02.230 --> 11:08.800 +そして、 このような、 問題のある質問の具体例がある。 + +11:08.920 --> 11:12.790 +例えば、 申し訳なさそうな顔をしていたら、 そんなつもりじゃなかったんだ。 + +11:12.790 --> 11:20.290 +例えば、 航空会社のカスタマー・サポート・チャットボットのように、 これをチャットとして利用しようと考えている場合、 + +11:20.290 --> 11:31.210 +軌道から大きく外れるようなことをしないよう、 堅牢であることを気にするだろう。 + +11:31.210 --> 11:34.090 +だから、 これはそのための有効なベンチマークなのだ。 + +11:34.090 --> 11:41.800 +コーディングは、 コーディングスキルのより詳細なベンチマークを提供し、 クロード3を参照してください。 5ソネットがリード + +11:41.980 --> 11:46.930 +ミストラルももちろん、 クローズドソースとオープンソースを組み合わせたボードセットだ。 + +11:46.930 --> 12:04.780 +そしてミストラルのラージ2が、 オープンソースとしてトップ3に入っている。 + +12:04.780 --> 12:04.780 +1 4050億円 + +12:04.810 --> 12:07.570 +彼らはこれを以下の指示に対してテストすることができた。 + +12:07.570 --> 12:09.130 +しかも2位だ。 + +12:09.130 --> 12:11.140 +GPTの4ゼロを上回っている。 + +12:11.320 --> 12:14.110 +クロード3のすぐ後ろだ。 5ソネット + +12:14.290 --> 12:21.010 +オープンソースの世界にとっても、 メタの2位という結果にとっても素晴らしいことだ。 + +12:21.250 --> 12:23.230 +数学の問題でもね。 + +12:23.260 --> 12:30.280 +ラマ 3. 1 405 Bが3位、 GPT4ゼロ、 2位クロード3。 5. + +12:30.310 --> 12:33.490 +数学をリードするソネット + +12:33.610 --> 12:40.570 +そして、 スペイン語のリーダーボードもある。 + +12:40.660 --> 12:48.790 +ミストラルはオープンソースのトップランナーで4位、 GPT four zeroがポールポジションだ。 + +12:49.000 --> 12:55.450 +そしてQxl。 comでは、 このようなビジネス別のリーダーボードを随時追加している。 + +12:55.450 --> 13:05.020 +また、 あなたのビジネス課題により具体的なリーダーボードを見つけるための素晴らしいリソースとしてご活用ください。 diff --git a/week5/community-contributions/subtitles/srts/59295459/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295459/ko_KR.srt new file mode 100755 index 0000000..bb0eb11 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295459/ko_KR.srt @@ -0,0 +1,490 @@ +WEBVTT + +00:01.220 --> 00:07.160 +다시 리더보드 페스티벌입니다 리더보드를 더 살펴보죠 + +00:07.160 --> 00:13.490 +하지만 이번에는 상위 리더보드에서 믹스 오픈 소스와 비공개 소스 모델을 함께 + +00:13.490 --> 00:15.080 +가지고 올 거예요. + +00:15.080 --> 00:18.650 +밖에서 얼굴이나 안아주고요 + +00:18.650 --> 00:21.440 +이제 포옹은 안 할 거예요 + +00:21.440 --> 00:27.200 +첫 번째 리더보드를 보여드릴게요 이것도 분명히 여러분 책갈피에 들어갈 + +00:27.200 --> 00:29.780 +거예요 피지 리더보드죠 + +00:29.780 --> 00:31.760 +과거에 잠깐 언급한 적이 있죠 + +00:31.940 --> 00:40.520 +피지 인공지능 회사는 LM 종사자들을 위한 필수 자료를 발표했고 페이지 맨 위에 있는 + +00:40.520 --> 00:43.310 +다른 모델을 비교했어요 + +00:43.310 --> 00:49.910 +비교 차트가 있는데 기본 벤치마크에 관한 거죠 요즘엔 쉬운 벤치마크예요 + +00:49.970 --> 00:56.150 +MLU는 추론이고 Pro는 아니고 기본 수치예요 소금 한 꼬집이지만 + +00:56.150 --> 00:59.240 +여전히 많이 인용되죠 + +00:59.420 --> 01:02.480 +파이썬 코딩과 수학에 대한 인간 평가예요 + +01:02.810 --> 01:08.240 +여기서 보시는 건 일반적으로 비공개 소스 모델입니다 여러분이 잘 알고 + +01:08.240 --> 01:14.000 +사랑하는 GPT 4, 소넷 3처럼요 5번 소네트와 GPT 터보 그리고 4번요 + +01:14.030 --> 01:15.140 +저것 좀 보세요 + +01:15.140 --> 01:20.540 +혼합물에 llama 3 형태의 오픈 소스 모델이 있어요 1분 4초 5억 5천만 달러요 + +01:20.570 --> 01:24.500 +지구상에서 가장 큰 오픈 소스 모델이죠 + +01:24.500 --> 01:32.300 +보다시피 일부 비공개 소스 모델과 호의적으로 경쟁하고 있어요 + +01:32.540 --> 01:39.590 +현재 가장 강력한 팀이 선두를 달리고 있습니다 GPT 40, MLU가 + +01:39.590 --> 01:42.410 +압도적으로 앞섰죠 + +01:42.410 --> 01:47.780 +하지만 라마는 405B에 살짝 뒤처져 있어요 + +01:47.840 --> 01:50.000 +다들 막상막하예요 + +01:50.000 --> 01:50.720 +정말요? + +01:50.750 --> 01:57.740 +라마는 40억 혹은 50억 개로 오픈 소스 모델이 강력한 경쟁자죠 + +01:58.280 --> 02:02.900 +코딩을 할 때는 이 순서가 보이시죠 + +02:02.900 --> 02:12.050 +3항을 보세요 5 소네트가 선두 그다음은 GPT 405 그 뒤는 라마 405 미니 + +02:12.050 --> 02:14.150 +버전 GPT 405가 3위죠 + +02:14.330 --> 02:17.930 +얼마 안 되는 것 같아요 훨씬 저렴하니까요 + +02:18.170 --> 02:20.420 +GPT 터보도 있죠 + +02:20.990 --> 02:28.610 +GPT 수학 문제 순위표입니다 GPT는 4-0이었고 그 뒤에는 라마가 4,050억 + +02:28.610 --> 02:35.300 +달러였죠 그 뒤로는 다른 순위표들이 나왔고 클로드가 4위를 차지했어요 + +02:35.300 --> 02:41.570 +그런 톱모델을 위해 아주 유용한 실적 차트를 보여드릴게요 + +02:41.810 --> 02:47.270 +얼굴 안기 때 봤던 다차원 차트보다 해석하기가 좀 더 쉽죠 물론 비트는 + +02:47.300 --> 02:48.620 +더 적지만요 + +02:48.800 --> 02:57.020 +속도 측면에서 보면 초당 토큰으로 토큰을 가장 빨리 생성하는 건 라마다 80억 오픈 + +02:57.050 --> 02:59.060 +소스 모델이에요 + +02:59.060 --> 03:05.780 +당연하죠, 매개 변수가 적을수록 작업량도 less니까요 그러니 이해할 가치가 있죠 + +03:05.810 --> 03:06.500 +네 + +03:06.530 --> 03:07.310 +그렇군요 + +03:07.340 --> 03:07.520 +그래서요? + +03:07.520 --> 03:13.160 +최대한 일관되게 달리려고 노력하는 게 중요해요 + +03:13.160 --> 03:16.040 +비트에 대한 정보가 조금 더 설명해줘요 + +03:16.160 --> 03:23.060 +라마 8 다음은 더 큰 모델인 라마 70 그리고 제미니 1호예요 5번 섬광이에요 + +03:23.120 --> 03:26.480 +클로드, 하이쿠 3개 GPT 4개요 + +03:26.510 --> 03:28.070 +미니예요 + +03:28.070 --> 03:29.060 +미니 변형요 + +03:29.060 --> 03:32.270 +작은 모델이 더 빠르죠 + +03:32.270 --> 03:33.950 +놀랍지도 않네요 + +03:34.220 --> 03:36.020 +대기 시간 때문에요 + +03:36.050 --> 03:42.830 +첫 번째 토큰이 도착하기까지 남은 시간을 의미하죠 + +03:42.860 --> 03:44.480 +그걸 포착하는 좋은 방법이죠 + +03:44.480 --> 03:49.730 +아까 말씀드렸던 걸 설명하기에 좋은 방법이네요 기본 속성에 대한 대기 시간을 보여드렸을 때요 + +03:49.730 --> 03:55.910 +작은 모델도 빠르게 반응하는 게 놀랍지 않죠 + +03:55.970 --> 04:01.970 +GPT 4는 놀랍게도 4보다 대기 시간이 개선됐어요 + +04:02.010 --> 04:02.280 +0명요 + +04:02.310 --> 04:08.070 +하드웨어 셋업과 관련 있는 게 많아요 + +04:08.070 --> 04:10.170 +잘은 모르지만 둘이 친해요 + +04:10.740 --> 04:17.730 +가장 싼 모델은 백만 달러당 달러로 계산하죠 + +04:17.730 --> 04:25.080 +라마 80억 달러는 제미니 1호에서 가장 싼 가격에 나온 거예요 5 플래시도 잘 팔리고 GPT 4와 미니는 물론 아주 저렴하죠 + +04:25.080 --> 04:34.950 +클로드 3 하이쿠와 GPT 3도 있어요 5 터보로 바꿨죠 + +04:34.950 --> 04:40.890 +두 개의 다른 막대로서 보여지고 있습니다 하나는 입력 비용, 하나는 출력 비용이죠 + +04:41.190 --> 04:48.000 +두 모델을 비교할 수 있는 상호 작용적 기능이 있어요 다른 측정법에 따라 + +04:48.000 --> 04:50.190 +나란히 볼 수 있죠 + +04:50.190 --> 04:56.430 +이건 클로드 3점 아니, 클로드 3점이에요 GPT 4에 맞서는 클로드 3집이에요 + +04:56.460 --> 05:04.320 +비트를 좀 바꿔서 픽 3을 선택해 보죠 소네트 5번과 GPT 4번요 + +05:04.350 --> 05:07.200 +우리가 보고 싶어 하는 얼굴이죠 + +05:07.680 --> 05:09.900 +자, 시작하죠 + +05:10.140 --> 05:14.910 +두 사람이 막상막하인 것처럼 보여요 + +05:14.940 --> 05:15.750 +뭐라는 거예요? + +05:15.780 --> 05:22.650 +88살요 클로드는 3%고요 5살, 88살요 GPT 4는 7%예요 + +05:22.680 --> 05:22.920 +네 + +05:22.950 --> 05:28.080 +클로드가 코딩을 더 잘하는 이유가 GPT 4 때문이군요 + +05:28.080 --> 05:32.820 +클로드는 계산을 더 잘하지만 도구 사용은 형편없어요 + +05:32.940 --> 05:36.540 +2주 차에 겪은 일요 + +05:36.660 --> 05:41.580 +클로드가 더 잘해요 다국어를 잘하는 클로드 + +05:41.580 --> 05:43.320 +정말 좋아요 + +05:43.320 --> 05:48.120 +이렇게 모델을 나란히 비교할 수 있다니 흥미롭네요 + +05:48.390 --> 05:54.840 +이 테이블에는 여러 모델이 한 줄씩 있어요 + +05:54.870 --> 06:01.290 +클로드 3 같은 비공개 소스의 모델을 볼 수 있어요 5 소네트요 + +06:01.290 --> 06:06.720 +평균 점수와 비교해서 1위에 오른 선수는 바로 이 선수예요 + +06:06.870 --> 06:12.570 +다시 MLU를 보고 계시는데요 모든 점수가 아주 좋은 수치예요 + +06:12.990 --> 06:18.000 +파이썬에 대한 인간 평가 지표에서 얘기했던 거죠 + +06:18.000 --> 06:25.620 +이게 제가 말씀드린 벤치마크입니다 모델의 능력 이상의 미래 역량을 시험하기 + +06:25.620 --> 06:28.290 +위해 고안된 벤치마크죠 + +06:28.380 --> 06:36.480 +이 클라우드 스리를 보면 믿겨지세요? 5점, 소네트가 벌써 93점을 득점했어요 B 하드에서 1%로 향후 + +06:36.570 --> 06:41.580 +역량을 평가하는 기준이 아니라는 뜻이죠 + +06:41.580 --> 06:43.680 +현재의 기능이라고 할 수 있죠 + +06:43.680 --> 06:46.500 +클라우드 스리도요 5번 소네트가 끝내줬어요 + +06:46.980 --> 06:51.390 +초등학교 수학이랑 더 어려운 수학 문제요 + +06:51.420 --> 06:57.870 +여기 보시면 다양한 모델에서 나온 결과가 있어요 + +06:57.870 --> 07:03.180 +앞서 제가 좀 아리송한 걸 언급했는데요 클라우드 3 비트는 5분 + +07:03.210 --> 07:06.210 +소네트가 클로드 3세보다 잘하네요 + +07:06.240 --> 07:07.230 +오퍼스요 + +07:07.320 --> 07:16.590 +클로드 3권은 여전히 API로도 제공되고 가격도 3백 달러가 훨씬 넘어요 + +07:16.590 --> 07:16.590 +5 소네트요 + +07:16.800 --> 07:20.010 +왜 클로드를 골랐는지 모르겠어요 + +07:20.040 --> 07:23.700 +3부작 이상이에요 특별한 게 없다면 소네트 5편요 + +07:23.730 --> 07:29.100 +논리에 관한 문제라면 클로드 3집이 더 잘했네요 + +07:29.100 --> 07:33.840 +어떤 면에서는 더 잘 팔리지만 추가 가격 포인트를 받을 가치가 있는지는 + +07:33.840 --> 07:35.340 +모르겠어요 + +07:36.210 --> 07:43.560 +그리고 라마가 이 비교 모델에 등장하는 것도 보실 수 있어요 + +07:43.560 --> 07:49.530 +라마 4,050억 달러는 안 보이네요 상상이 안 돼요 + +07:49.530 --> 07:56.310 +아직 라마 4의 모든 검사를 수행하지 못했기 때문이죠 50억 달러요, 왜냐하면 + +07:56.310 --> 08:04.350 +700억 라마 3 변종보다 훨씬 뛰어나다고 상상할 수 있거든요 + +08:06.120 --> 08:06.840 +네 + +08:07.710 --> 08:13.590 +이 테이블로 내려오면 전에 보여드린 건데요 + +08:13.590 --> 08:18.300 +다양한 모델을 볼 수 있는 유일한 장소예요 + +08:18.330 --> 08:22.920 +컨텍스트, 창 크기, 입력 및 출력 토큰 당 비용은 얼마일까요? + +08:22.920 --> 08:32.310 +물론 데이터가 있는 모델을 비교하는 거지만 굉장히 + +08:32.310 --> 08:34.140 +유용해요 + +08:34.140 --> 08:41.400 +온라인에서 여러 페이지를 찾아볼 수도 있고 여기 와서 한곳에 다 볼 수도 있어요 + +08:41.400 --> 08:45.120 +그래서 책갈피에 적어두셔야 해요 + +08:45.120 --> 08:52.410 +제미니 1호를 강조하죠 5번 플래시는 백만 가지 문맥을 보여주는 놀라운 창이 있죠 + +08:52.410 --> 08:59.250 +일반적인 영어로는 750,000단어 정도 됩니다 셰익스피어 작품의 + +08:59.280 --> 09:03.190 +거의 전부죠 문맥상 아주 큰 창이에요 + +09:03.670 --> 09:06.010 +클로드 가족은 200,000살이에요 + +09:06.040 --> 09:09.250 +GPT 가족은 128,000살이었죠 + +09:09.280 --> 09:15.400 +제미니 1호의 백만 대에 비하면 다소 적은 숫자죠 5번 섬광이에요 + +09:15.400 --> 09:23.080 +하지만 컨텍스트 창에서 소화해야 할 정보가 여전히 많아요 좋은 반응도 + +09:23.080 --> 09:24.220 +주고요 + +09:24.430 --> 09:24.940 +네 + +09:24.970 --> 09:28.930 +오픈 소스 모델도 보이실 거예요 + +09:28.930 --> 09:36.640 +혼합 평가판과 컨텍스트 창의 크기가 보입니다 llama 3 모델은 8000 토큰 컨텍스트 창을 가지고 + +09:36.640 --> 09:37.660 +있네요 + +09:37.660 --> 09:43.660 +오픈 소스 모델과 비공개 소스 모델을 비교할 때 명심할 가치가 있어요 + +09:43.660 --> 09:49.930 +이런 거대한 컨텍스트 창문이 필요하다면 비공개 소스 경로로 가야겠죠 + +09:52.120 --> 10:00.100 +코딩 리더보드가 있는데 사람 평가를 비교할 수 있어요 그게 피지 + +10:00.130 --> 10:04.870 +웹페이지의 리더보드를 마무리하죠 + +10:04.870 --> 10:06.370 +한 명 더 있어요 + +10:06.400 --> 10:14.020 +비즈니스 사이트들인데 실 리더보드라고 해요 스케일이라는 회사에서 제공하죠 + +10:14.020 --> 10:18.730 +컴 앤 스케일의 전문 분야는 맞춤형 데이터 세트 생성이죠 + +10:18.820 --> 10:26.080 +특정 문제를 작업 중이라면 자신의 문제에 맞는 데이터 세트를 제작하고 큐레이팅해야 합니다 + +10:26.080 --> 10:30.550 +스케일 컴이 비즈니스를 하는 게 바로 그런 거죠 + +10:30.760 --> 10:38.290 +데이터 발생기를 사용할 수 없다면 지난주 과제의 일부로 만든 것이길 바라요 + +10:38.350 --> 10:46.720 +이 leaderboard에는 여러 가지 작업에 대한 아주 구체적인 leaderboard가 있어요 + +10:46.750 --> 10:53.890 +그리고 대립적 견고성에 관한 것도 있습니다 이 강의에서 잘 설명했듯이 시험 + +10:53.920 --> 11:02.230 +학습 프롬프트에 대한 것도 큰 언어 모델에서 해로운 반응을 촉발하도록 설계됐죠 + +11:02.230 --> 11:08.800 +그래서 문제가 되는 질문의 구체적인 예가 있어요 + +11:08.920 --> 11:12.790 +예를 들어, 당신이∙∙∙ 미안해요, 일부러 그런 건 아니에요 + +11:12.790 --> 11:20.290 +예를 들어 이걸 항공사 고객 지원 챗봇으로 나타내고 싶다면 + +11:20.290 --> 11:27.580 +이게 궤도를 벗어나거나 뭔가 크게 잘못되지 않도록 한다는 + +11:27.580 --> 11:31.210 +걸 명심해야 해요 + +11:31.210 --> 11:34.090 +그 목적을 위한 유용한 벤치마크죠 + +11:34.090 --> 11:41.800 +코딩은 코딩 기술의 더 상세한 척도를 제공합니다 클로드 3을 보세요 소네트 5편이 길을 인도하죠 + +11:41.980 --> 11:46.930 +미스트랄도 마찬가지로 닫힌 소스와 오픈 소스를 결합한 회로죠 + +11:46.930 --> 11:57.130 +미스트랄 라지 투가 3위 안에 들어요 오픈 소스로서 참가 강좌죠 여기 + +11:57.310 --> 12:04.780 +보시면 라마 3이 있어요 1억 4,500억 달러요 + +12:04.810 --> 12:07.570 +지시를 따르지 않고도 이를 시험할 수 있었죠 + +12:07.570 --> 12:09.130 +2위에 올랐죠 + +12:09.130 --> 12:11.140 +GPT 40보다 앞섰어요 + +12:11.320 --> 12:14.110 +클로드 3호 바로 뒤예요 5 소네트요 + +12:14.290 --> 12:21.010 +오픈 소스 세계와 메타에 있어서 놀라운 결과입니다 2위를 차지했어요 + +12:21.250 --> 12:23.230 +수학 문제도 풀었고요 + +12:23.260 --> 12:30.280 +라마 3요 1 405 B가 3위 GPT 40, 2위는 클로드 3이죠 5분 + +12:30.310 --> 12:33.490 +소네트가 수학을 이끌고 있어요 + +12:33.610 --> 12:40.570 +스페인어 순위표도 있어요 여기 결과를 보여주죠 + +12:40.660 --> 12:47.980 +오픈 소스의 선두 주자 미스트랄은 4위입니다 GPT가 4위인 가운데 선두를 달리고 + +12:47.980 --> 12:48.790 +있죠 + +12:49.000 --> 12:55.450 +Qxl도요 비즈니스 특정 리더보드를 늘 추가하고 있어요 + +12:55.450 --> 13:03.700 +다시 돌아와서 추가된 다른 것이 있는지 보고 비즈니스 문제에 대한 보다 구체적인 leaderboard를 위한 훌륭한 리소스로 + +13:03.700 --> 13:05.020 +이걸 사용하세요 diff --git a/week5/community-contributions/subtitles/srts/59295493/en_US.srt b/week5/community-contributions/subtitles/srts/59295493/en_US.srt new file mode 100755 index 0000000..5dcefcb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295493/en_US.srt @@ -0,0 +1,115 @@ +WEBVTT + +00:00.710 --> 00:03.590 +And welcome to week four, day three. + +00:03.590 --> 00:09.650 +As we are about to embark upon another business project which will add real commercial value. + +00:09.680 --> 00:12.260 +It's going to be an exciting project. + +00:12.260 --> 00:17.450 +It's going to be a difficult challenge, and we're going to have, as always, a blast doing it. + +00:17.480 --> 00:19.250 +Let's get started. + +00:19.280 --> 00:25.400 +So first of all, just a recap of all the things you can do this time I'm going to let you read it, + +00:25.400 --> 00:31.190 +but after today you're going to be able to assess models for coding ability and use frontier models + +00:31.190 --> 00:36.710 +to generate code building a solution that uses Llms for code generation. + +00:36.710 --> 00:39.710 +And of course, it's going to include a user interface. + +00:39.710 --> 00:45.470 +It's going to include a lot of the bits and pieces to package it up nicely to be an app. + +00:45.920 --> 00:47.810 +So a quick reminder of the challenge. + +00:47.810 --> 00:53.660 +We want to build a tool that can convert Python to C plus plus for performance. + +00:53.660 --> 00:57.320 +And this time today we're going to do a frontier model solution. + +00:57.320 --> 01:04.820 +Next time we're going to add open source models as we battle the the frontier closed source versus open + +01:04.820 --> 01:07.520 +source and see what wins out. + +01:08.030 --> 01:12.380 +Uh, first, just a quick a quick explanation of what we're going to do. + +01:12.380 --> 01:18.710 +So we're going to prompt our models with a prompt that explains what we want them to re-implement Python + +01:18.710 --> 01:19.550 +code and C plus. + +01:19.550 --> 01:20.390 +Plus. + +01:20.420 --> 01:24.320 +Um, it's maybe going to be a little bit more wordy than this one, but it'll be along these lines, + +01:24.350 --> 01:30.680 +uh, trying to convince the model not to do any explanation, just simply to provide the C plus plus + +01:30.680 --> 01:31.430 +code. + +01:31.430 --> 01:37.250 +And its objective should be to do whatever it takes to run faster and give the same output. + +01:37.250 --> 01:40.040 +But otherwise, the world is its oyster. + +01:40.070 --> 01:44.120 +It can do what it wants, it just has to give the same output. + +01:44.150 --> 01:49.190 +And this is a bit of code that we'll start with to try out in Python. + +01:49.190 --> 01:56.120 +Uh, you may know already that there is a series which very slowly converges to a nice number, and + +01:56.120 --> 02:05.270 +that series is one minus a third plus a fifth, minus a seventh plus a ninth, minus 11th, and so on. + +02:05.300 --> 02:06.350 +You get the idea. + +02:06.380 --> 02:12.950 +Uh, if you keep that series going and you multiply the result by four, you gradually converge on pi. + +02:12.980 --> 02:14.840 +But it takes a very long time. + +02:14.960 --> 02:21.890 +And so we can run it for the first 100 million iterations, and we will actually get quite close to + +02:21.920 --> 02:23.030 +pi at that point. + +02:23.150 --> 02:28.670 +But you'd have to do an awful lot of typing on your calculator to to do 100 million iterations. + +02:28.730 --> 02:30.830 +Uh, but Python will do it reasonably fast. + +02:30.830 --> 02:35.690 +But we'll be hoping that C plus plus can do it a whole lot faster, and that Frontier models will help + +02:35.690 --> 02:36.410 +us get there. + +02:36.410 --> 02:37.400 +Let's find out. diff --git a/week5/community-contributions/subtitles/srts/59295493/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295493/ja_JP.srt new file mode 100755 index 0000000..ae7c8f3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295493/ja_JP.srt @@ -0,0 +1,97 @@ +WEBVTT + +00:00.710 --> 00:03.590 +そして第4週、 3日目へようこそ。 + +00:03.590 --> 00:09.650 +私たちは、 商業的価値を高める新たなビジネス・プロジェクトに着手しようとしている。 + +00:09.680 --> 00:12.260 +エキサイティングなプロジェクトになるだろう。 + +00:12.260 --> 00:17.450 +難しい挑戦になるだろうし、 いつものように楽しみながらやるつもりだ。 + +00:17.480 --> 00:19.250 +始めよう。 + +00:19.280 --> 00:36.710 +まず最初に、 今回あなたができることをまとめておきます。 + +00:36.710 --> 00:39.710 +もちろん、 ユーザーインターフェイスも含まれる。 + +00:39.710 --> 00:45.470 +アプリになるようにうまくパッケージングするために、 多くの断片が含まれることになる。 + +00:45.920 --> 00:47.810 +というわけで、 チャレンジの簡単な覚え書きだ。 + +00:47.810 --> 00:53.660 +私たちは、 PythonをCに変換できるツールを作りたい。 + +00:53.660 --> 00:57.320 +そして今日は、 フロンティアモデルの解答を行う。 + +00:57.320 --> 01:04.820 +次回はオープンソースのモデルを追加し、 クローズドソース対オープンソースのフロンティアを戦い、 + +01:04.820 --> 01:07.520 +どちらが勝つかを見てみたい。 + +01:08.030 --> 01:12.380 +ええと、 まず、 これからやることを簡単に説明します。 + +01:12.380 --> 01:19.550 +そこで、 モデルにPythonコードとCプラスを再実装してほしいことを説明するプロンプトを出すことにする。 + +01:19.550 --> 01:20.390 +それに + +01:20.420 --> 01:24.320 +ええと、 これより少し言葉が多くなるかもしれませんが、 このような内容で、 + +01:24.350 --> 01:31.430 +モデルには何の説明もせず、 単にC+++のコードを提供するよう説得するものです。 + +01:31.430 --> 01:37.250 +そしてその目的は、 より速く走り、 同じアウトプットを出すために必要なことは何でもすることであるべきだ。 + +01:37.250 --> 01:40.040 +しかし、 そうでなければ、 世界は彼らの牡蠣だ。 + +01:40.070 --> 01:44.120 +好きなことができる。 + +01:44.150 --> 01:49.190 +そして、 これはPythonで試すためのちょっとしたコードだ。 + +01:49.190 --> 01:56.120 +その級数とは、 1マイナス3分の1プラス5分の1、 + +01:56.120 --> 02:05.270 +マイナス7分の1プラス9分の1、 マイナス11分の1などである。 + +02:05.300 --> 02:06.350 +おわかりだろう。 + +02:06.380 --> 02:12.950 +その級数を続けて4倍すると、 だんだん円周率に収束していくんだ。 + +02:12.980 --> 02:14.840 +でも、 とても時間がかかる。 + +02:14.960 --> 02:23.030 +そして、 最初の1億回の反復で、 πにかなり近づくことができる。 + +02:23.150 --> 02:28.670 +しかし、 1億回の反復を行うには、 電卓で膨大なタイピングをしなければならない。 + +02:28.730 --> 02:30.830 +あー、 でもPythonならそれなりに速くできるよ。 + +02:30.830 --> 02:36.410 +しかし、 Cプラス・プラスがそれをもっと早く実現できることを期待しているし、 フロンティアのモデルたちがその手助けをしてくれることを期待している。 + +02:36.410 --> 02:37.400 +見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/59295493/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295493/ko_KR.srt new file mode 100755 index 0000000..5241b0d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295493/ko_KR.srt @@ -0,0 +1,109 @@ +WEBVTT + +00:00.710 --> 00:03.590 +4주 차, 3일째에 잘 오셨어요 + +00:03.590 --> 00:09.650 +또 다른 사업 프로젝트에 착수할 참이거든요 상업적 가치를 더할 프로젝트죠 + +00:09.680 --> 00:12.260 +신나는 프로젝트가 될 거예요 + +00:12.260 --> 00:17.450 +어려운 도전이 될 거예요 하지만 언제나처럼 즐거운 경험이 되겠죠 + +00:17.480 --> 00:19.250 +Get it, Get it, 시작하시죠 + +00:19.280 --> 00:25.400 +먼저, 여러분이 이번에 할 수 있는 모든 것의 요약입니다 여러분이 읽으실 수 있게 해드리죠 + +00:25.400 --> 00:31.190 +하지만 오늘 이후엔 코딩 능력을 위한 모델을 평가하고 코드 생성을 위해 LM을 사용하는 + +00:31.190 --> 00:36.710 +솔루션을 코드 구축하기 위해 개척자 모델을 사용할 수 있게 될 거예요 + +00:36.710 --> 00:39.710 +물론 사용자 인터페이스를 포함하죠 + +00:39.710 --> 00:45.470 +앱으로 잘 패키지하기 위해 많은 비트 조각들을 포함하고 있어요 + +00:45.920 --> 00:47.810 +과제를 다시 알려드릴게요 + +00:47.810 --> 00:53.660 +파이썬 을 C++로 변환하는 툴을 만들고자 합니다 성능을 위해서요 + +00:53.660 --> 00:57.320 +오늘은 개척자 모델 솔루션을 해볼게요 + +00:57.320 --> 01:04.820 +다음엔 오픈 소스 모델을 추가하겠습니다 개척지 폐쇄 소스와 오픈 소스에서 싸우면서 + +01:04.820 --> 01:07.520 +뭐가 이기는지 보죠 + +01:08.030 --> 01:12.380 +먼저, 우리가 할 일에 대해 간단히 설명할게요 + +01:12.380 --> 01:19.550 +모델에 뭘 재구성하길 원하는지 프롬프트할 거예요 파이썬 코드와 C+를요 + +01:19.550 --> 01:20.390 +더 있어요 + +01:20.420 --> 01:24.320 +이것보다 좀 더 장황할 수도 있지만 이런 내용입니다 + +01:24.350 --> 01:31.430 +모델을 설득하는 거죠 어떤 설명도 하지 말고 그냥 C 플러스 플러스 코드를 제공하라고요 + +01:31.430 --> 01:37.250 +그리고 목표는 어떻게 해서든 더 빨리 달리고 같은 출력을 내는 것이어야 하죠 + +01:37.250 --> 01:40.040 +그것만 빼면 세상은 그의 굴이에요 + +01:40.070 --> 01:44.120 +원하는 건 뭐든 할 수 있어요 같은 출력만 주면 되죠 + +01:44.150 --> 01:49.190 +파이썬 을 테스트하기 위해 비트 코드로 시작할게요 + +01:49.190 --> 01:56.120 +이미 아시겠지만 일렬로 나뉘는 숫자가 서서히 하나의 큰 숫자로 모입니다. 일렬은 + +01:56.120 --> 02:05.270 +1 빼기 3분의 1 더하기 5분의 1 빼기 7분의 1 더하기 9분의 1 빼기 11분의 1 이렇게 이어지죠. + +02:05.300 --> 02:06.350 +Get it, Get it 아시겠죠? + +02:06.380 --> 02:12.950 +어, 연속성을 유지하면서 4를 곱하면 점차 파이의 수렴이 되죠 + +02:12.980 --> 02:14.840 +하지만 시간이 오래 걸려요 + +02:14.960 --> 02:21.890 +그래서 처음 1억 번 반복할 수 있고 그 시점에서 파이에 꽤 근접하게 되죠 get + +02:21.920 --> 02:23.030 +it + +02:23.150 --> 02:28.670 +하지만 1억 개의 반복을 하려면 계산기에 엄청나게 많은 타자를 쳐야 해요 + +02:28.730 --> 02:30.830 +파이썬 을 이용하면 꽤 빠르게 할 수 있어요 + +02:30.830 --> 02:35.690 +C 플러스 모델들이 훨씬 더 빨리 할 수 있길 바라요 프런티어 모델들이 도움될 거예요 Get it, Get it, + +02:35.690 --> 02:36.410 +Get it + +02:36.410 --> 02:37.400 +알아보죠 diff --git a/week5/community-contributions/subtitles/srts/59295527/en_US.srt b/week5/community-contributions/subtitles/srts/59295527/en_US.srt new file mode 100755 index 0000000..b04fe38 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295527/en_US.srt @@ -0,0 +1,79 @@ +WEBVTT + +00:00.650 --> 00:08.030 +I'm so happy to welcome you to week four, day four on our journey to LLM Engineering and Mastery. + +00:08.180 --> 00:12.620 +Here we are looking again at Code Generation. + +00:12.620 --> 00:19.190 +We had tons of fun last time generating code using frontier models, using what ended up being a very + +00:19.190 --> 00:26.150 +sharp UI to allow us to try out different kinds of puzzles and see the startling performance improvement + +00:26.150 --> 00:31.040 +that Claude was able to generate for us winning the day in our competition. + +00:31.040 --> 00:39.380 +What was it more than 60,000 times faster when it reinterpreted the problem and applied Shannon's algorithm? + +00:39.770 --> 00:42.740 +So today we're moving to open source. + +00:42.740 --> 00:45.860 +You're going to be able to use open source models for coding. + +00:45.860 --> 00:50.690 +We're going to use something called hugging face endpoints, which is another feature of hugging Face + +00:50.690 --> 00:56.900 +that lets you deploy models and run them in the cloud privately for you for inference purposes. + +00:57.170 --> 01:02.960 +So it's another great feature of the fabulous resource that is hugging face. + +01:03.230 --> 01:10.490 +And we're going to build a solution that uses open source LMS along with frontier LMS to generate code. + +01:10.520 --> 01:11.720 +Let's see. + +01:12.800 --> 01:15.260 +First, quick reminder of the challenge. + +01:15.290 --> 01:16.250 +I'm sure you remember it. + +01:16.280 --> 01:21.980 +We're building a product that is able to convert C plus, Plus or Python code into high performance + +01:22.010 --> 01:23.330 +C plus plus code. + +01:23.600 --> 01:32.450 +And we used GPT four and Claude last time, uh, the Pi program ran 100 times faster. + +01:32.750 --> 01:39.950 +Um, GPT four optimized the code at least, uh, when it when it ran, it was able to do it with a speed + +01:39.980 --> 01:40.490 +up. + +01:40.490 --> 01:45.110 +Uh, Claude rewrote the algorithm for more than 60,000 times. + +01:45.110 --> 01:46.550 +Performance improvement. + +01:46.550 --> 01:48.350 +Fantastic. + +01:48.380 --> 01:55.640 +Now the time has come to see how open source measures up against the beasts that are the frontier models. + +01:55.670 --> 01:57.050 +Let's have a try. + +01:57.080 --> 01:58.820 +See you in a second. diff --git a/week5/community-contributions/subtitles/srts/59295527/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295527/ja_JP.srt new file mode 100755 index 0000000..9c571eb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295527/ja_JP.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:00.650 --> 00:08.030 +LLMエンジニアリングとマスタリーへの旅の4週目、 4日目へようこそ。 + +00:08.180 --> 00:12.620 +ここで再びコード・ジェネレーションを見てみよう。 + +00:12.620 --> 00:19.190 +前回は、 フロンティアモデルを使ってコードを生成するのがとても楽しかった。 結局、 非常にシャープなUIを使って、 + +00:19.190 --> 00:31.040 +さまざまな種類のパズルを試すことができたし、 クロードがコンペティションで優勝するために生成した驚くべきパフォーマンス向上も見ることができた。 + +00:31.040 --> 00:39.380 +問題を再解釈してシャノンのアルゴリズムを適用したところ、 6万倍以上速くなったとは? + +00:39.770 --> 00:42.740 +だから今日はオープンソースに移行する。 + +00:42.740 --> 00:45.860 +コーディングにオープンソースのモデルを使えるようになる。 + +00:45.860 --> 00:50.690 +これはハギング・フェイスのもうひとつの機能で、 推論を目的としてモデルをクラウドにプライベートでデプロイし、 + +00:50.690 --> 00:56.900 +実行することができる。 + +00:57.170 --> 01:02.960 +つまり、 ハグフェイスという素晴らしいリソースのもうひとつの大きな特徴なのだ。 + +01:03.230 --> 01:10.490 +そして、 オープンソースLMSとフロンティアLMSを使ってコードを生成するソリューションを構築するつもりだ。 + +01:10.520 --> 01:11.720 +見てみよう。 + +01:12.800 --> 01:15.260 +まず、 この挑戦について簡単に思い出してほしい。 + +01:15.290 --> 01:16.250 +覚えているだろう? + +01:16.280 --> 01:23.330 +私たちは、 Cプラス、 プラス、 またはパイソンのコードを高性能のCプラスプラスコードに変換できる製品を作っています。 + +01:23.600 --> 01:32.450 +そしてGPTを4つ使い、 前回はクロードが100倍速く走った。 + +01:32.750 --> 01:40.490 +GPT4は、 少なくともコードを最適化した。 + +01:40.490 --> 01:45.110 +クロードは6万回以上もアルゴリズムを書き直したんだ。 + +01:45.110 --> 01:46.550 +パフォーマンス向上。 + +01:46.550 --> 01:48.350 +ファンタスティックだ。 + +01:48.380 --> 01:55.640 +今こそ、 オープンソースがフロンティアモデルという野獣にどう対抗できるかを見極める時が来たのだ。 + +01:55.670 --> 01:57.050 +試してみよう。 + +01:57.080 --> 01:58.820 +またすぐに会おう。 diff --git a/week5/community-contributions/subtitles/srts/59295527/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295527/ko_KR.srt new file mode 100755 index 0000000..f5fe7da --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295527/ko_KR.srt @@ -0,0 +1,76 @@ +WEBVTT + +00:00.650 --> 00:08.030 +넷째 주에 오신 걸 환영합니다 LLM 엔지니어링과 숙련을 위한 여정 넷째 날이죠 + +00:08.180 --> 00:12.620 +코드 생성을 다시 보고 있는데요 + +00:12.620 --> 00:19.190 +프론티어 모델로 코드를 만들면서 정말 즐거웠어요 다양한 + +00:19.190 --> 00:26.150 +퍼즐을 시도할 수 있는 아주 선명한 UI 덕분에 클로드가 만들어낸 + +00:26.150 --> 00:31.040 +놀라운 성능 향상을 볼 수 있었죠 + +00:31.040 --> 00:39.380 +문제를 재해석하고 섀넌의 알고리즘을 적용했을 때 60,000배 이상 빨랐던 게 얼마죠? + +00:39.770 --> 00:42.740 +오늘은 오픈 소스로 이동하죠 + +00:42.740 --> 00:45.860 +코딩을 위해 오픈 소스 모델을 사용할 수 있어요 + +00:45.860 --> 00:50.690 +얼굴 끝점 포옹을 사용할 겁니다 얼굴을 안는 것의 또 다른 기능으로 모델을 + +00:50.690 --> 00:56.900 +배포하고 클라우드 프라이버시에서 실행할 수 있게 해줍니다 추론 목적을 위해서요 + +00:57.170 --> 01:02.960 +얼굴을 감싸 안는 것도 이 멋진 자원의 또 다른 기능이죠 + +01:03.230 --> 01:10.490 +코드를 생성하기 위해 오픈 소스 LMS와 프런티어 LMS를 사용하는 솔루션을 만들 거예요 + +01:10.520 --> 01:11.720 +어디 보죠 + +01:12.800 --> 01:15.260 +먼저 과제를 알려드리죠 + +01:15.290 --> 01:16.250 +기억하실 텐데요 + +01:16.280 --> 01:21.980 +C 플러스나 파이썬 코드를 고성능 C 플러스 플러스 코드로 변환할 수 있는 제품을 + +01:22.010 --> 01:23.330 +만들고 있어요 + +01:23.600 --> 01:32.450 +GPT 4와 클로드를 사용했는데 지난번엔 파이 프로그램이 100배나 빨랐어요 + +01:32.750 --> 01:40.490 +GPT 4는 코드를 최적화했습니다 적어도 실행될 때는 빠른 속도로 할 수 있었죠 + +01:40.490 --> 01:45.110 +클로드가 알고리즘을 60,000번 넘게 수정했어요 + +01:45.110 --> 01:46.550 +업무 능력 향상요 + +01:46.550 --> 01:48.350 +환상적이에요 + +01:48.380 --> 01:55.640 +이제 오픈 소스가 개척지 모델인 야수에 얼마나 잘 대적하는지 볼 때가 됐어요 + +01:55.670 --> 01:57.050 +한번 해 보죠 + +01:57.080 --> 01:58.820 +이따 봐요 diff --git a/week5/community-contributions/subtitles/srts/59295541/en_US.srt b/week5/community-contributions/subtitles/srts/59295541/en_US.srt new file mode 100755 index 0000000..e257836 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295541/en_US.srt @@ -0,0 +1,205 @@ +WEBVTT + +00:00.410 --> 00:01.820 +And welcome back. + +00:01.850 --> 00:08.660 +You've just seen GPT four zero spectacularly failed to work on our hard Python conversion problem. + +00:08.660 --> 00:12.980 +And now we're going to see how Claude handles the same problem. + +00:12.980 --> 00:15.950 +So we run the optimize method. + +00:15.950 --> 00:18.740 +We get back a bunch of stuff from Claude. + +00:18.740 --> 00:20.030 +Here it is. + +00:20.660 --> 00:29.930 +And now we will run the clang optimized, uh, the method to compile and optimize this code and run + +00:29.930 --> 00:31.850 +it and see what we get. + +00:36.110 --> 00:37.370 +Oh, it was still generating. + +00:37.370 --> 00:40.280 +And so a lot happened there. + +00:40.310 --> 00:45.320 +And the reason that there was a long pause is that it hadn't yet finished producing the code, as I + +00:45.320 --> 00:49.670 +just saw that it was like halfway finished, but it did just finish and then it compiled, and then + +00:49.670 --> 00:52.610 +it ran and it got the correct answer. + +00:52.610 --> 00:56.570 +And wowzer look at how fast that is. + +00:56.570 --> 00:58.010 +Look at the difference. + +00:58.040 --> 01:02.990 +Not only did Claude do this, but Claude has just done shockingly well. + +01:02.990 --> 01:12.330 +That you will notice, is two milliseconds, two milliseconds Compared to the time that the Python code + +01:12.360 --> 01:14.640 +have to go up to the Python code again. + +01:14.880 --> 01:18.210 +Uh, and where did we do the Python code? + +01:18.210 --> 01:18.630 +Here we go. + +01:18.660 --> 01:22.500 +The Python code got the same answer in 27 seconds. + +01:22.500 --> 01:30.510 +So, uh, I'm going to need to get a calculator here just to quickly, uh, tell myself 27. + +01:33.300 --> 01:37.830 +It's something like 13,000 times faster. + +01:37.830 --> 01:40.110 +Wow, wow. + +01:40.110 --> 01:43.380 +So you should be blown away by that. + +01:43.380 --> 01:47.730 +Uh, sometimes GPT four hasn't failed and has managed to generate some code. + +01:47.730 --> 01:53.280 +And when it does, the code that it's generated, for me at least, um, has been faster, but more + +01:53.280 --> 01:55.590 +like, uh, 10 or 100 times faster. + +01:55.590 --> 01:57.030 +Not like Claude. + +01:57.030 --> 02:01.020 +So how on earth has Claude been able to do this? + +02:01.050 --> 02:05.490 +Uh, how has it managed to make such highly optimized code? + +02:05.490 --> 02:09.480 +Like, is there something wrong with Python that, I mean, there must be something very wrong with + +02:09.480 --> 02:12.000 +Python if it can be so, so much faster. + +02:12.000 --> 02:15.450 +Well, no, there is a little bit more to the tale. + +02:15.540 --> 02:20.610 +Um, if we look at the optimized code that Claude generated. + +02:20.730 --> 02:20.970 +Hang on. + +02:20.970 --> 02:23.760 +I think I have to close this and double click again to see it. + +02:23.760 --> 02:24.690 +Here we go. + +02:25.080 --> 02:27.990 +Um, there is a bit more to the tale. + +02:27.990 --> 02:29.970 +There is a bit more to the tale. + +02:29.970 --> 02:31.590 +What has happened? + +02:31.620 --> 02:39.240 +What has happened is that, Claude, the direction that we gave Claude was to make sure that the same + +02:39.240 --> 02:44.520 +response was generated, identical response in the fastest possible time. + +02:44.520 --> 02:48.510 +And the prompt was very careful to say re-implement in C plus plus. + +02:48.510 --> 02:51.240 +And that is exactly what Claude has done. + +02:51.240 --> 02:58.500 +Claude, amazingly, has analyzed the code and understood the intent of the code. + +02:58.710 --> 03:01.800 +Um, perhaps with a hint by the name of the function. + +03:01.800 --> 03:05.310 +Although don't don't, uh, don't give it all away with the name of the function. + +03:05.310 --> 03:11.460 +And there's a few things to try and cause it off track, but it has re-implemented this in a completely + +03:11.460 --> 03:18.990 +different approach, using a theorem that I think it's called Shannon's, uh, algorithm. + +03:19.140 --> 03:20.520 +Uh, I think that's right. + +03:20.520 --> 03:22.890 +Yes, it is called Shannon's algorithm. + +03:23.070 --> 03:30.760 +Uh, and, uh, it is an approach that allows you to solve this puzzle just with one loop. + +03:30.760 --> 03:32.230 +One loop through. + +03:32.260 --> 03:33.820 +Uh, sorry, I'm on the wrong loop here. + +03:33.820 --> 03:34.570 +This is the loop. + +03:34.570 --> 03:38.590 +One loop through, not a nested loop. + +03:38.650 --> 03:44.920 +Uh, and as a result, uh, you can see there are there are, in fact, uh, two of them side by side. + +03:44.920 --> 03:50.140 +But it's not a, it's not a, uh, a nested loop, a loop within a loop. + +03:50.170 --> 03:55.510 +Um, and that allows you to get to the answer in a fraction of the time. + +03:55.510 --> 04:01.540 +So what Claude has done, which is so ingenious, is it's understood the intent of the function, it's + +04:01.540 --> 04:07.360 +not just translated something from Python to the equivalent C plus plus code, it has reimplemented + +04:07.360 --> 04:15.130 +it just as it was prompted to get the same answer in a blazingly, uh, fast amount of time. + +04:15.130 --> 04:20.020 +So I would say that is a terrific, terrific result by Claude. + +04:20.080 --> 04:23.230 +Uh, and, uh, a round of applause there. + +04:23.230 --> 04:26.800 +And it's certainly consistent with what we've seen from the Seal leaderboard. + +04:26.800 --> 04:31.000 +Claude 3.5 sonnet, uh, rules the show. + +04:31.000 --> 04:32.530 +Uh, Claude for the win. diff --git a/week5/community-contributions/subtitles/srts/59295541/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295541/ja_JP.srt new file mode 100755 index 0000000..5f365f3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295541/ja_JP.srt @@ -0,0 +1,184 @@ +WEBVTT + +00:00.410 --> 00:01.820 +そしてお帰りなさい。 + +00:01.850 --> 00:08.660 +GPT4のゼロがハードPythonの変換問題で見事に失敗したのを見ただろう。 + +00:08.660 --> 00:12.980 +そして今度は、 クロードが同じ問題にどう対処するか見てみよう。 + +00:12.980 --> 00:15.950 +そこでオプティマイズ・メソッドを実行する。 + +00:15.950 --> 00:18.740 +クロードからたくさんのものを取り戻した。 + +00:18.740 --> 00:20.030 +これだ。 + +00:20.660 --> 00:31.850 +次に、 このコードをコンパイルして最適化するclang最適化メソッドを実行して、 何が得られるか見てみよう。 + +00:36.110 --> 00:37.370 +ああ、 まだ発電していたんだ。 + +00:37.370 --> 00:40.280 +そこでいろいろなことが起こった。 + +00:40.310 --> 00:52.610 +長い間中断があったのは、 まだコードを生成し終えていなかったからだ。 + +00:52.610 --> 00:56.570 +その速さを見てほしい。 + +00:56.570 --> 00:58.010 +この違いを見てほしい。 + +00:58.040 --> 01:02.990 +クロードがやっただけでなく、 クロードは衝撃的なほどうまくやった。 + +01:02.990 --> 01:14.640 +Pythonのコードが再びPythonのコードに戻るまでの時間に比べれば、 2ミリ秒、 2ミリ秒であることにお気づきだろう。 + +01:14.880 --> 01:18.210 +Pythonのコードはどこでやったんだっけ? + +01:18.210 --> 01:18.630 +さあ、 始めよう。 + +01:18.660 --> 01:22.500 +Pythonのコードは27秒で同じ答えを出した。 + +01:22.500 --> 01:30.510 +というわけで、 手っ取り早く自分に27を伝えるために、 ここで電卓を手に入れる必要がありそうだ。 + +01:33.300 --> 01:37.830 +13,000倍も速いんだ。 + +01:37.830 --> 01:40.110 +すごい、 すごい。 + +01:40.110 --> 01:43.380 +だから、 あなたはそれに圧倒されるはずだ。 + +01:43.380 --> 01:47.730 +GPT4が失敗せず、 コードを生成できたこともある。 + +01:47.730 --> 01:55.590 +そうして生成されたコードは、 少なくとも私にとっては、 10倍か100倍速くなった。 + +01:55.590 --> 01:57.030 +クロードとは違う。 + +01:57.030 --> 02:01.020 +では、 クロードはいったいどうやってこんなことができたのか? + +02:01.050 --> 02:05.490 +どうやってこれほど最適化されたコードを作ったんだ? + +02:05.490 --> 02:12.000 +Pythonがこんなに速くなるのなら、 Pythonには何か問題があるに違いない。 + +02:12.000 --> 02:15.450 +いやいや、 この話にはもう少し続きがある。 + +02:15.540 --> 02:20.610 +クロードが生成した最適化されたコードを見てみよう。 + +02:20.730 --> 02:20.970 +ちょっと待て。 + +02:20.970 --> 02:23.760 +これを閉じて、 もう一度ダブルクリックしないと見られないと思う。 + +02:23.760 --> 02:24.690 +さあ、 始めよう。 + +02:25.080 --> 02:27.990 +この話にはもう少し続きがあるんだ。 + +02:27.990 --> 02:29.970 +物語にはもう少し続きがある。 + +02:29.970 --> 02:31.590 +何が起こったのか? + +02:31.620 --> 02:44.520 +何が起こったかというと、 クロード、 つまり私たちがクロードに与えた指示は、 可能な限り最速で同じ回答が得られるようにすることだった。 + +02:44.520 --> 02:48.510 +プロンプトには、 Cプラスプラスで再インプリメントするようにと注意書きがあった。 + +02:48.510 --> 02:51.240 +そして、 それこそがクロードが成し遂げたことなのだ。 + +02:51.240 --> 02:58.500 +クロードは驚くべきことに、 コードを分析し、 コードの意図を理解している。 + +02:58.710 --> 03:01.800 +ええと、 おそらく関数名でヒントがあるのでしょう。 + +03:01.800 --> 03:05.310 +でも、 関数名ですべてを明かしてはいけない。 + +03:05.310 --> 03:11.460 +シャノンのアルゴリズムと呼ばれる定理を使って、 + +03:11.460 --> 03:18.990 +まったく別のアプローチでこれを再実装したんだ。 + +03:19.140 --> 03:20.520 +その通りだと思う。 + +03:20.520 --> 03:22.890 +そう、 シャノンのアルゴリズムと呼ばれるものだ。 + +03:23.070 --> 03:30.760 +このパズルは、 1つのループだけで解くことができるんだ。 + +03:30.760 --> 03:32.230 +1ループスルー。 + +03:32.260 --> 03:33.820 +あ、 ごめん、 ループを間違えた。 + +03:33.820 --> 03:34.570 +これがループだ。 + +03:34.570 --> 03:38.590 +ネストされたループではなく、 1つのループスルー。 + +03:38.650 --> 03:44.920 +その結果、 2本が並んでいるのがわかるだろう。 + +03:44.920 --> 03:50.140 +しかし、 これはネストされたループではなく、 ループの中のループだ。 + +03:50.170 --> 03:55.510 +そうすれば、 ほんのわずかな時間で答えにたどり着くことができる。 + +03:55.510 --> 04:01.540 +つまり、 クロードがやったことは、 とても独創的で、 関数の意図を理解し、 + +04:01.540 --> 04:07.360 +Pythonの何かを同等のC++のコードに翻訳しただけでなく、 + +04:07.360 --> 04:15.130 +同じ答えを得るために、 促された通りに再実装したのだ。 + +04:15.130 --> 04:20.020 +だから、 クロードは素晴らしい、 素晴らしい結果を残したと言える。 + +04:20.080 --> 04:23.230 +そして拍手喝采。 + +04:23.230 --> 04:26.800 +そしてそれは、 シールのリーダーボードから見たものと一致している。 + +04:26.800 --> 04:31.000 +クロード 3. 5ソネットは、 ショーを支配している。 + +04:31.000 --> 04:32.530 +クロードの勝ちだ。 diff --git a/week5/community-contributions/subtitles/srts/59295541/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295541/ko_KR.srt new file mode 100755 index 0000000..402c81d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295541/ko_KR.srt @@ -0,0 +1,205 @@ +WEBVTT + +00:00.410 --> 00:01.820 +돌아오신 걸 환영해요 + +00:01.850 --> 00:08.660 +GPT 40이 하드 파이썬 변환 문제를 멋지게 해결하지 못한 걸 보셨는데요 + +00:08.660 --> 00:12.980 +이제 클로드가 같은 문제를 어떻게 해결하는지 보죠 + +00:12.980 --> 00:15.950 +최적화 메서드를 실행해요 + +00:15.950 --> 00:18.740 +클로드에게서 많은 걸 얻어요 Get up + +00:18.740 --> 00:20.030 +여기 있네요 + +00:20.660 --> 00:29.930 +이제 clang 최적화된 메서드를 실행하겠습니다 컴파일하고 코드를 최적화하는 거죠 실행해 + +00:29.930 --> 00:31.850 +어떻게 되는지 보죠 + +00:36.110 --> 00:37.370 +계속 발전하고 있었어요 + +00:37.370 --> 00:40.280 +많은 일이 있었어요 + +00:40.310 --> 00:45.320 +긴 정지가 있었던 이유는 아직 코드 생성을 완료하지 않았기 때문입니다. + +00:45.320 --> 00:49.670 +반 정도 완료된 것을 보았습니다. 하지만 완료하고 컴파일하고 + +00:49.670 --> 00:52.610 +실행해서 올바른 답을 얻었어요. + +00:52.610 --> 00:56.570 +정말 빠른 것 좀 보세요 + +00:56.570 --> 00:58.010 +차이를 보세요 + +00:58.040 --> 01:02.990 +클로드는 이 일뿐만 아니라 놀라울 정도로 잘했어요 + +01:02.990 --> 01:12.330 +2밀리초입니다 파이썬 코드에서 파이썬 코드로 다시 이동하는 시간의 + +01:12.360 --> 01:14.640 +2밀리초죠 + +01:14.880 --> 01:18.210 +파이썬 으로 코드는 어디서 했었죠? + +01:18.210 --> 01:18.630 +시작할게요 + +01:18.660 --> 01:22.500 +파이썬 + 코드는 27초 만에 같은 답을 얻죠 + +01:22.500 --> 01:30.510 +get 27 계산기 좀 가져올게요 빨리 27이라고 되뇌어 봐야겠어요 + +01:33.300 --> 01:37.830 +13,000배는 빨라요 + +01:37.830 --> 01:40.110 +대단해요 + +01:40.110 --> 01:43.380 +깜짝 놀라실 거예요 + +01:43.380 --> 01:47.730 +GPT 4가 실패하지 않고 코드를 생성할 때도 있어요 + +01:47.730 --> 01:53.280 +그렇게 되면 생성된 코드는 적어도 제 경우엔 더 빨라져요 하지만 10배나 + +01:53.280 --> 01:55.590 +100배 더 빨라지죠 + +01:55.590 --> 01:57.030 +클로드 같지 않아요 + +01:57.030 --> 02:01.020 +클로드는 대체 어떻게 이걸 해냈을까요? + +02:01.050 --> 02:05.490 +어떻게 최적화된 코드를 만들 수 있었을까요? + +02:05.490 --> 02:09.480 +파이썬 을 잘못 사용하고 있는 건가요? 파이썬 을 잘못 사용하고 있는 게 분명합니다. + +02:09.480 --> 02:12.000 +아주 아주 빠르게 사용할 수 있다면요. + +02:12.000 --> 02:15.450 +아뇨, 비트가 좀 더 있어요 + +02:15.540 --> 02:20.610 +클로드가 생성한 최적화된 코드를 보세요 + +02:20.730 --> 02:20.970 +잠깐만요 + +02:20.970 --> 02:23.760 +이걸 닫고 다시 더블 클릭해야 볼 수 있을 것 같아요 + +02:23.760 --> 02:24.690 +시작할게요 + +02:25.080 --> 02:27.990 +비트가 좀 더 있어요 + +02:27.990 --> 02:29.970 +비트가 더 있어요 + +02:29.970 --> 02:31.590 +무슨 일이죠? + +02:31.620 --> 02:39.240 +클로드에게 방향을 지시했을 때 같은 반응을 최대한 + +02:39.240 --> 02:44.520 +빨리 만들어내라고 했어요 + +02:44.520 --> 02:48.510 +프롬프트는 C++에서 다시 구현하라고 조심스럽게 말했죠 + +02:48.510 --> 02:51.240 +클로드가 그렇게 했어요 + +02:51.240 --> 02:58.500 +클로드는 놀랍게도 코드를 분석하고 그 의미를 이해했어요 + +02:58.710 --> 03:01.800 +함수 이름에 힌트를 준 것 같아요 + +03:01.800 --> 03:05.310 +함수 이름만 가지고 다 말씀하시진 마세요 + +03:05.310 --> 03:11.460 +궤도에서 벗어나게 한 몇 가지 요소가 있었지만 완전히 다른 + +03:11.460 --> 03:18.990 +방식으로 재설정했어요 섀넌의 알고리즘이라는 정리를 이용해서요 + +03:19.140 --> 03:20.520 +그런 것 같아요 + +03:20.520 --> 03:22.890 +네, 섀넌의 알고리즘이에요 + +03:23.070 --> 03:30.760 +이 방법은 이 퍼즐을 한 번의 루프로 풀 수 있게 해줘요 + +03:30.760 --> 03:32.230 +한 바퀴 돌아요 + +03:32.260 --> 03:33.820 +죄송해요, 다른 루프에 있었네요 + +03:33.820 --> 03:34.570 +이게 루프예요 + +03:34.570 --> 03:38.590 +한 루프만 통과하면 돼요 + +03:38.650 --> 03:44.920 +그 결과, 보시다시피 두 개가 나란히 놓여 있어요 + +03:44.920 --> 03:50.140 +하지만 이건 중첩된 루프가 아니라 루프 안의 루프예요 + +03:50.170 --> 03:55.510 +아주 짧은 시간 안에 get get에 도달할 수 있죠 + +03:55.510 --> 04:01.540 +클로드가 한 것은 정말 기발합니다 함수의 의도를 이해했어요 + +04:01.540 --> 04:07.360 +파이썬 을 동등한 C++ 코드로 번역한 게 아닙니다 취소했어요 + +04:07.360 --> 04:15.130 +같은 답을 얻도록 프롬프트된 거죠 아주 짧은 시간 동안에요 + +04:15.130 --> 04:20.020 +클로드가 정말 멋진 결과를 냈네요 + +04:20.080 --> 04:23.230 +박수 한번 주세요 + +04:23.230 --> 04:26.800 +씰 리더보드에서 본 것과 일치해요 + +04:26.800 --> 04:31.000 +클로드 3세예요 5개의 소네트가 쇼를 지배하죠 + +04:31.000 --> 04:32.530 +클로드가 이겼어요 diff --git a/week5/community-contributions/subtitles/srts/59295545/en_US.srt b/week5/community-contributions/subtitles/srts/59295545/en_US.srt new file mode 100755 index 0000000..3d6556a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295545/en_US.srt @@ -0,0 +1,73 @@ +WEBVTT + +00:00.350 --> 00:02.270 +I really hope you've enjoyed this week. + +00:02.270 --> 00:03.800 +We've got tons done. + +00:03.800 --> 00:10.190 +We've experimented with all sorts of new techniques and models, and hopefully you've learned a ton + +00:10.190 --> 00:11.120 +through it all. + +00:11.270 --> 00:17.660 +Uh, at this point, not only can you code frontier models, including AI assistants, not only can + +00:17.660 --> 00:24.770 +you choose the right model for your project backed by metrics from leaderboards arenas, but also you + +00:24.770 --> 00:30.980 +can use frontier and open source models to generate code and as an extra little tool to to add to your + +00:30.980 --> 00:31.640 +tool belt. + +00:31.670 --> 00:41.750 +You are also able to deploy models as inference endpoints using Hugging Face's inference endpoint functionality. + +00:41.750 --> 00:45.470 +So congratulations on all of these skills. + +00:45.620 --> 00:51.440 +Uh, and uh, perhaps, like me, you're slightly disappointed that open source didn't quite measure + +00:51.440 --> 00:54.080 +up, but it did a fine job. + +00:54.080 --> 00:55.280 +We had great fun with it. + +00:55.280 --> 01:02.690 +And for many tasks of optimizing Python to C plus plus, you would find that Codex would do great. + +01:02.750 --> 01:10.250 +Um, but when it comes down to it, uh, putting a 7 billion parameter model up against a much more + +01:10.280 --> 01:17.810 +than 1.76 trillion parameter model, that was GPT four and GPT four and Claude 3.5 sonnet are considered + +01:17.810 --> 01:19.070 +to be much bigger. + +01:19.070 --> 01:22.880 +Uh, so it wasn't a particularly fair match. + +01:22.880 --> 01:28.580 +And in the circumstances, I think Codex did very well indeed. + +01:28.640 --> 01:34.940 +Next time you're going to be comparing open source and closed source models, performance, talking + +01:34.940 --> 01:42.710 +about different commercial use cases for code generation, and being able to build solutions that use + +01:42.710 --> 01:46.760 +this kind of code generation technique for all sorts of tasks. + +01:46.760 --> 01:47.840 +Uh, looking forward to it. + +01:47.840 --> 01:48.710 +I'll see you then. diff --git a/week5/community-contributions/subtitles/srts/59295545/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295545/ja_JP.srt new file mode 100755 index 0000000..767a894 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295545/ja_JP.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:00.350 --> 00:02.270 +今週も楽しんでもらえると嬉しい。 + +00:02.270 --> 00:03.800 +何度もやっている。 + +00:03.800 --> 00:11.120 +私たちはあらゆる種類の新しいテクニックやモデルを試してきた。 + +00:11.270 --> 00:17.660 +この時点で、 AIアシスタントを含むフロンティア・モデルをコーディングできるだけでなく、 リーダーボード・アリーナからの指標に裏打ちされたプロジェクトに適したモデルを選択できるだけでなく、 + +00:17.660 --> 00:31.640 +フロンティア・モデルやオープンソース・モデルをコード生成に使用したり、 ツールベルトに追加するちょっとしたツールとして使用することもできる。 + +00:31.670 --> 00:41.750 +Hugging Faceの推論エンドポイント機能を使って、 推論エンドポイントとしてモデルをデプロイすることもできる。 + +00:41.750 --> 00:45.470 +おめでとう。 + +00:45.620 --> 00:51.440 +そして、 おそらく私のように、 オープンソースがまったく及ばなかったことに少しがっかりしているのだろうが、 + +00:51.440 --> 00:54.080 +それはそれで立派な仕事だった。 + +00:54.080 --> 00:55.280 +とても楽しかった。 + +00:55.280 --> 01:02.690 +また、 PythonをC+++に最適化する多くの作業では、 Codexが素晴らしいことがわかるだろう。 + +01:02.750 --> 01:17.810 +でも結局のところ、 70億のパラメータを持つモデルを、 1よりはるかに多いパラメータと比較することになるんだ。 + +01:17.810 --> 01:17.810 +76兆のパラメーターモデル、 GPT4とGPT4とクロード3だった。 5ソネットはもっと大きいと考えられている。 + +01:19.070 --> 01:22.880 +特にフェアな試合ではなかったね。 + +01:22.880 --> 01:28.580 +そのような状況の中で、 コーデックスは本当によくやったと思う。 + +01:28.640 --> 01:34.940 +次回は、 オープンソースとクローズドソースのモデルの比較、 パフォーマンス、 コード生成のさまざまな商用ユースケースについて、 + +01:34.940 --> 01:46.760 +そしてあらゆる種類のタスクにこの種のコード生成技術を使用するソリューションを構築できるようになることについて話す予定だ。 + +01:46.760 --> 01:47.840 +楽しみにしているよ。 + +01:47.840 --> 01:48.710 +それじゃ、 また diff --git a/week5/community-contributions/subtitles/srts/59295545/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295545/ko_KR.srt new file mode 100755 index 0000000..bf3fc1b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295545/ko_KR.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:00.350 --> 00:02.270 +이번 주를 즐기셨길 바라요 + +00:02.270 --> 00:03.800 +할 일이 산더미예요 + +00:03.800 --> 00:10.190 +다양한 기술과 모델로 실험을 해 봤는데요 많은 걸 배우셨길 + +00:10.190 --> 00:11.120 +바라요 + +00:11.270 --> 00:17.660 +현재 여러분은 인공지능 어시스턴트를 포함한 프론티어 모델을 코딩할 수 있을 뿐 아니라 + +00:17.660 --> 00:24.770 +리더보드 분야에서 정확한 모델을 선택할 수 있을 뿐 아니라 코드를 생성하기 위해 프론티어와 오픈 + +00:24.770 --> 00:31.640 +소스 모델을 사용할 수 있습니다 또한 여러분의 도구 벨트에 추가할 추가적인 도구로 말이죠 + +00:31.670 --> 00:41.750 +또한 얼굴을 안는 기능성으로 모델을 엔드포인트로서 배포할 수 있죠 + +00:41.750 --> 00:45.470 +그 모든 기술을 가진 걸 축하해요 + +00:45.620 --> 00:51.440 +그리고 저처럼 실망하셨을지도 모르겠네요 오픈 소스가 기대에 못 + +00:51.440 --> 00:54.080 +미쳐서요 하지만 잘 해냈죠 + +00:54.080 --> 00:55.280 +정말 재미있었어요 + +00:55.280 --> 01:02.690 +파이썬 을 C++로 최적화하는 많은 작업들을 할 수 있습니다 코덱스가 잘 될 것이라는 것을 알 수 있죠 + +01:02.750 --> 01:10.250 +하지만 따지고 보면 1명보다 훨씬 많은 70억 매개 변수 모델을 상대하는 + +01:10.280 --> 01:17.810 +건 힘들어요 76조 개의 매개 변수 모델로 GPT 4와 GPT 4 클로드 3이 있었죠 소네트 5편이 훨씬 크다고 + +01:17.810 --> 01:19.070 +여겨지죠 + +01:19.070 --> 01:22.880 +딱히 공평한 경기는 아니었죠 + +01:22.880 --> 01:28.580 +그 상황에서 코덱스는 아주 잘 해냈어요 + +01:28.640 --> 01:34.940 +다음에는 오픈 소스와 폐쇄 소스 모델을 비교하고 성능을 비교하고 코드 + +01:34.940 --> 01:42.710 +생성을 위한 상업적 이용 사례를 얘기하고 모든 종류의 작업에 이런 코드 생성 기술을 사용하는 + +01:42.710 --> 01:46.760 +솔루션을 만들 수 있는지 얘기할 거예요 + +01:46.760 --> 01:47.840 +정말 기대돼요 + +01:47.840 --> 01:48.710 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59295549/en_US.srt b/week5/community-contributions/subtitles/srts/59295549/en_US.srt new file mode 100755 index 0000000..b866a31 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295549/en_US.srt @@ -0,0 +1,235 @@ +WEBVTT + +00:00.890 --> 00:04.100 +And welcome back to our challenge again. + +00:04.130 --> 00:08.000 +And this time we are working with our beautiful prototype. + +00:08.060 --> 00:16.400 +Uh, with this time the default set to the Python Hard challenge rather than the simple pi code. + +00:16.400 --> 00:17.750 +We bring this up. + +00:17.750 --> 00:18.800 +Here it is. + +00:18.800 --> 00:25.910 +We're going to kick off the Python run, which you may remember takes about 2720 eight seconds. + +00:25.910 --> 00:27.620 +So we'll be sitting here for a little while. + +00:27.620 --> 00:34.430 +While that runs, I am then going to do use GPT to convert the code to C plus plus and run that. + +00:34.430 --> 00:40.580 +And then we'll do the same with Claude and see how Claude fares and see if there are any differences + +00:40.580 --> 00:42.200 +from last time. + +00:42.590 --> 00:44.060 +Uh, almost there. + +00:44.060 --> 00:48.080 +You can watch Gradio gives us a little timer, which is very handy in these situations. + +00:48.080 --> 00:49.790 +So we'll know that there we go. + +00:49.820 --> 00:52.070 +We get the answer that is the right answer. + +00:52.070 --> 00:54.530 +And it took about 28 seconds. + +00:54.560 --> 00:55.250 +All right. + +00:55.250 --> 00:58.370 +We asked GPT to convert this into C plus plus code. + +00:58.370 --> 01:00.440 +Here is the C plus plus code. + +01:00.560 --> 01:02.420 +There is the result. + +01:02.420 --> 01:06.110 +And we will then run that C plus plus code. + +01:07.820 --> 01:10.460 +And it has the same problem as before. + +01:10.490 --> 01:17.540 +There's I believe it's a number overflow that's resulting in the answer being zero and not even that + +01:17.540 --> 01:17.930 +quick. + +01:17.930 --> 01:23.870 +Because in giving that answer zero, it also had some nested loops happening. + +01:23.870 --> 01:26.090 +Let's see how Claude fares. + +01:26.090 --> 01:29.210 +We switch to Claude, we convert the code. + +01:34.850 --> 01:35.900 +Here we go. + +01:39.650 --> 01:41.690 +Let's see what Claude has done. + +01:42.560 --> 01:43.370 +Aha! + +01:44.270 --> 01:50.870 +Interestingly, this time Claude has not seen that it can do the single loop. + +01:50.870 --> 01:54.230 +So we've got a different answer from Claude this time. + +01:54.440 --> 01:54.980 +There we go. + +01:55.010 --> 01:58.550 +We'll see how Claude does if its code runs. + +01:58.550 --> 02:00.560 +At least let's run that C plus. + +02:00.560 --> 02:06.470 +Plus it got the right answer and it took 0.6 seconds. + +02:06.470 --> 02:09.770 +So Claude at least gets the right answer. + +02:09.770 --> 02:12.260 +But it is not this time. + +02:12.260 --> 02:13.730 +This time that we ran with Claude. + +02:13.730 --> 02:21.830 +It didn't crush it like last time, because it didn't actually spot that opportunity to to to collapse + +02:21.830 --> 02:24.570 +the loop using Canon's algorithm. + +02:24.930 --> 02:26.460 +And now remember it's called. + +02:26.820 --> 02:33.240 +So I guess we can try one more time converting the code and see if it if it gets it on a second attempt. + +02:33.480 --> 02:34.350 +Let's see. + +02:34.350 --> 02:35.760 +Let's see, let's see. + +02:37.590 --> 02:39.510 +That does look like it's one loop doesn't it. + +02:39.510 --> 02:39.870 +All right. + +02:39.870 --> 02:43.470 +Let's let's see if it's, uh, that's going to work for us. + +02:44.160 --> 02:45.330 +It did work. + +02:45.330 --> 02:46.560 +Second time lucky. + +02:46.590 --> 02:47.910 +Second time lucky. + +02:47.940 --> 02:49.320 +We get the right answer. + +02:49.320 --> 02:52.110 +And again we have that breathtaking. + +02:52.140 --> 02:52.980 +Oh my goodness. + +02:53.010 --> 02:55.200 +It's a whole lot better than last time as well. + +02:55.350 --> 02:59.970 +Obviously there's some, uh, dependency on what else is running on on my processor. + +02:59.970 --> 03:07.650 +We're probably down to the sort of the noise levels, but that is 0.4 of a, uh, of a millisecond, + +03:07.650 --> 03:11.640 +uh, compared to, uh, let's do the maths one more time. + +03:11.640 --> 03:16.680 +It's embarrassing that I can't do these sort of orders of magnitude in my head, but I'm too afraid + +03:16.680 --> 03:18.750 +that I'll get off by by too much. + +03:18.780 --> 03:25.200 +28.3 divided by 0.000446. + +03:25.560 --> 03:29.760 +Uh, it's more than 60,000 times faster. + +03:29.760 --> 03:33.900 +Uh, which, of course, is not that surprising given that it's found an algorithm that involves a single + +03:33.900 --> 03:37.560 +loop rather than a nested loop, but it is great to see that. + +03:37.560 --> 03:44.040 +So in summary, Claude managed to to to work where GPT four failed. + +03:44.040 --> 03:49.800 +And Claude sometimes not always, is breathtakingly faster. + +03:49.800 --> 03:54.390 +Breathtaking, I will say, from doing my experiments that there were a couple of occasions when Claude + +03:54.390 --> 03:59.850 +also made a mistake with the number rounding and and both Claude and GPT four zero got zero. + +03:59.850 --> 04:02.670 +But GPT four zero seems to consistently make that mistake. + +04:02.670 --> 04:09.210 +And more often than not, Claude not only gets it right, but also spots this opportunity to rewrite + +04:09.210 --> 04:12.090 +the algorithm and be staggeringly faster. + +04:12.090 --> 04:18.030 +So I think, again, I double down and say, this is a victory for Claude. + +04:18.180 --> 04:22.890 +Um, and then next week we are going to switch to open source. + +04:22.890 --> 04:25.080 +We're going to assess open source models. + +04:25.080 --> 04:32.040 +We're going to see how open source models generate code and use a solution with open source Llms. + +04:32.040 --> 04:36.630 +The question will be can open source compete with Claude? + +04:36.660 --> 04:38.310 +3.5 sonnet. + +04:38.370 --> 04:42.840 +Uh, with this astoundingly fast, uh, result. + +04:43.290 --> 04:44.010 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59295549/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295549/ja_JP.srt new file mode 100755 index 0000000..76bb67a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295549/ja_JP.srt @@ -0,0 +1,220 @@ +WEBVTT + +00:00.890 --> 00:04.100 +そして、 また我々の挑戦に戻ってきてくれたことを歓迎する。 + +00:04.130 --> 00:08.000 +そして今回は、 私たちの美しいプロトタイプを使っている。 + +00:08.060 --> 00:16.400 +ええと、 今回のデフォルトは、 単純なπコードではなく、 Python Hardチャレンジに設定されています。 + +00:16.400 --> 00:17.750 +私たちはこれを持ち出した。 + +00:17.750 --> 00:18.800 +これだ。 + +00:18.800 --> 00:25.910 +Pythonの実行を開始し、 約2720 8秒かかることを覚えているかもしれない。 + +00:25.910 --> 00:27.620 +だから、 しばらくここに座っていることになる。 + +00:27.620 --> 00:34.430 +その間に、 GPTを使ってコードをCプラスプラスに変換し、 それを実行するつもりだ。 + +00:34.430 --> 00:42.200 +そして、 クロードにも同じことをやって、 クロードがどう出るか、 前回との違いがあるかどうかを見る。 + +00:42.590 --> 00:44.060 +もう少しだ。 + +00:44.060 --> 00:48.080 +Gradioが小さなタイマーを提供してくれるのを見ることができる。 + +00:48.080 --> 00:49.790 +だから、 私たちはそのことを知ることになる。 + +00:49.820 --> 00:52.070 +私たちは正しい答えを得る。 + +00:52.070 --> 00:54.530 +そして約28秒かかった。 + +00:54.560 --> 00:55.250 +分かった。 + +00:55.250 --> 00:58.370 +私たちはGPTにこれをC++のコードに変換するよう依頼した。 + +00:58.370 --> 01:00.440 +これがCプラスプラスのコードだ。 + +01:00.560 --> 01:02.420 +これが結果だ。 + +01:02.420 --> 01:06.110 +そして、 そのCプラスプラスのコードを実行する。 + +01:07.820 --> 01:10.460 +そして、 以前と同じ問題を抱えている。 + +01:10.490 --> 01:17.930 +数字がオーバーフローして、 答えがゼロになっているのだと思う。 + +01:17.930 --> 01:23.870 +というのも、 その答えにゼロを与える際に、 いくつかの入れ子ループも起こっていたからだ。 + +01:23.870 --> 01:26.090 +クロードがどう出るか見てみよう。 + +01:26.090 --> 01:29.210 +クロードに切り替え、 コードを変換する。 + +01:34.850 --> 01:35.900 +さあ、 始めよう。 + +01:39.650 --> 01:41.690 +クロードが何をしたか見てみよう。 + +01:42.560 --> 01:43.370 +嗚呼! + +01:44.270 --> 01:50.870 +興味深いことに、 今回のクロードはシングルループができることを確認していない。 + +01:50.870 --> 01:54.230 +そこで、 今回はクロードから別の答えをもらった。 + +01:54.440 --> 01:54.980 +これでよし。 + +01:55.010 --> 01:58.550 +クロードのコードが実行されれば、 どうなるか見てみよう。 + +01:58.550 --> 02:00.560 +せめてCプラスを走らせよう。 + +02:00.560 --> 02:06.470 +それに、 正しい答えを導き出したし、 時間も0だった。 6秒。 + +02:06.470 --> 02:09.770 +だからクロードは少なくとも正しい答えを得た。 + +02:09.770 --> 02:12.260 +だが、 今回は違う。 + +02:12.260 --> 02:13.730 +今回はクロードと一緒に走った。 + +02:13.730 --> 02:24.570 +前回のように潰せなかったのは、 キヤノンのアルゴリズムでループを崩すチャンスを見つけられなかったからだ。 + +02:24.930 --> 02:26.460 +そして今、 それがこう呼ばれていることを思い出してほしい。 + +02:26.820 --> 02:33.240 +だから、 もう1回コードを変換してみて、 2回目にうまくいくかどうか試してみよう。 + +02:33.480 --> 02:34.350 +見てみよう。 + +02:34.350 --> 02:35.760 +見てみよう、 見てみよう。 + +02:37.590 --> 02:39.510 +1つのループのように見えるね。 + +02:39.510 --> 02:39.870 +分かった。 + +02:39.870 --> 02:43.470 +それがうまくいくかどうか、 確かめよう。 + +02:44.160 --> 02:45.330 +うまくいったよ。 + +02:45.330 --> 02:46.560 +2度目の幸運だ。 + +02:46.590 --> 02:47.910 +2度目の幸運だ。 + +02:47.940 --> 02:49.320 +私たちは正しい答えを得る。 + +02:49.320 --> 02:52.110 +またしても息をのむような展開になった。 + +02:52.140 --> 02:52.980 +なんてことだ。 + +02:53.010 --> 02:55.200 +前回よりも全然いい。 + +02:55.350 --> 02:59.970 +明らかに、 僕のプロセッサーで他に何が動いているかに依存している。 + +02:59.970 --> 03:07.650 +おそらく騒音レベルまで下がっているだろうが、 それは0だ。 4ミリ秒の、 ああ、 ミリ秒の、 ああ、 + +03:07.650 --> 03:11.640 +もう一回計算してみよう。 + +03:11.640 --> 03:18.750 +こういう桁違いのことが頭の中でできないのは恥ずかしいことだが、 あまりに大きくずれてしまうのが怖くてできない。 + +03:18.780 --> 03:25.200 +28. 3÷0. 000446. + +03:25.560 --> 03:29.760 +ええと、 60,000倍以上速いんだ。 + +03:29.760 --> 03:33.900 +もちろん、 ネストされたループではなく、 単一のループを含むアルゴリズムを発見したことを考えれば、 + +03:33.900 --> 03:37.560 +それほど驚くべきことではないが、 それを見るのは素晴らしいことだ。 + +03:37.560 --> 03:44.040 +まとめると、 クロードはGPT4が失敗したところをなんとか成功させたということだ。 + +03:44.040 --> 03:49.800 +そして、 クロードは常にではないが、 息をのむほど速い。 + +03:49.800 --> 03:54.390 +息を呑むようなことだが、 クロードも四捨五入を間違えて、 + +03:54.390 --> 03:59.850 +クロードとGPTの4人がゼロになったことが何度かあった。 + +03:59.850 --> 04:02.670 +しかし、 GPTフォーゼロは一貫してその間違いを犯しているようだ。 + +04:02.670 --> 04:12.090 +そして多くの場合、 クロードはそれを正しく理解するだけでなく、 アルゴリズムを書き換えて驚異的に速くなるチャンスを見つける。 + +04:12.090 --> 04:18.030 +だから、 もう一度言うが、 これはクロードの勝利だ。 + +04:18.180 --> 04:22.890 +それから来週はオープンソースに切り替える予定だ。 + +04:22.890 --> 04:25.080 +オープンソースのモデルを評価するつもりだ。 + +04:25.080 --> 04:32.040 +オープンソースのモデルがどのようにコードを生成し、 オープンソースのLlmsを使ったソリューションを使うかを見ていく。 + +04:32.040 --> 04:36.630 +問題は、 オープンソースがクロードに対抗できるかどうかだ。 + +04:36.660 --> 04:38.310 +3. 5ソネット + +04:38.370 --> 04:42.840 +ええと、 この驚くべき速さで、 ええと、 結果が出た。 + +04:43.290 --> 04:44.010 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59295549/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295549/ko_KR.srt new file mode 100755 index 0000000..284cbdd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295549/ko_KR.srt @@ -0,0 +1,232 @@ +WEBVTT + +00:00.890 --> 00:04.100 +다시 도전 과제를 시작하죠 + +00:04.130 --> 00:08.000 +이번에는 아름다운 프로토타입을 준비했어요 + +00:08.060 --> 00:16.400 +이번에는 파이썬 하드 챌린지로 기본 설정합니다 간단한 파이 코드보다는요 + +00:16.400 --> 00:17.750 +이걸 꺼내요 + +00:17.750 --> 00:18.800 +여기 있네요 + +00:18.800 --> 00:25.910 +파이썬 실행을 시작하겠습니다 기억하실지 모르겠지만 2720 8초가 걸리죠 + +00:25.910 --> 00:27.620 +여기 좀 앉아 있어야겠어요 + +00:27.620 --> 00:34.430 +실행되는 동안 GPT를 이용해 코드를 C++로 변환하고 실행할게요 + +00:34.430 --> 00:40.580 +클로드가 어떻게 하는지 보고 지난번과 다른 점이 + +00:40.580 --> 00:42.200 +있는지 보죠 + +00:42.590 --> 00:44.060 +거의 다 왔어요 + +00:44.060 --> 00:48.080 +그라디오가 타이머를 주는 걸 보세요 이런 상황에서 아주 편리하죠 + +00:48.080 --> 00:49.790 +알게 될 겁니다 됐어요 + +00:49.820 --> 00:52.070 +Get it, get it, get it, get it, get it, get it, get it, get it, get it, get, get 정답, 정답이 나왔어요 + +00:52.070 --> 00:54.530 +28초 정도 걸렸어요 + +00:54.560 --> 00:55.250 +좋아요 + +00:55.250 --> 00:58.370 +GPT에 이걸 C 플러스 플러스 코드로 변환하라고 했어요 + +00:58.370 --> 01:00.440 +C 플러스 플러스 코드예요 + +01:00.560 --> 01:02.420 +결과가 나왔네요 + +01:02.420 --> 01:06.110 +그런 다음 C++ 코드를 실행하죠 + +01:07.820 --> 01:10.460 +예전과 같은 문제가 있어요 + +01:10.490 --> 01:17.930 +숫자가 넘쳐나서 답이 0이 된 것 같아요 그렇게 빠르지도 않고요 + +01:17.930 --> 01:23.870 +왜냐하면 그 답에 0을 주는데 중첩된 루프도 발생하거든요 + +01:23.870 --> 01:26.090 +클로드가 어떻게 하는지 보죠 + +01:26.090 --> 01:29.210 +클로드로 바꿔서 코드를 변환해요 + +01:34.850 --> 01:35.900 +시작할게요 + +01:39.650 --> 01:41.690 +클로드가 뭘 했는지 보러 가요 + +01:42.560 --> 01:43.370 +네 + +01:44.270 --> 01:50.870 +흥미롭게도 클로드는 단일 회전을 할 수 있다는 걸 못 봤어요 + +01:50.870 --> 01:54.230 +클로드의 대답이 이번에는 달라요 + +01:54.440 --> 01:54.980 +됐어요 + +01:55.010 --> 01:58.550 +코드가 작동하는지 클로드가 어떻게 하는지 보죠 + +01:58.550 --> 02:00.560 +C 플러스라도 해 보죠 + +02:00.560 --> 02:06.470 +게다가 정답도 맞혔고 0점이었어요 6초요 + +02:06.470 --> 02:09.770 +클로드가 정답을 맞혔네요 + +02:09.770 --> 02:12.260 +이번엔 아니에요 + +02:12.260 --> 02:13.730 +이번에는 클로드와 달렸죠 + +02:13.730 --> 02:21.830 +지난번처럼 실패하진 않았어요 캐논 알고리즘을 이용해 루프를 무너뜨릴 기회를 + +02:21.830 --> 02:24.570 +포착하지 못했거든요 + +02:24.930 --> 02:26.460 +이 요리의 이름을 기억하세요 + +02:26.820 --> 02:33.240 +코드를 한 번 더 변환해 두 번째 시도에 성공하는지 보죠 + +02:33.480 --> 02:34.350 +어디 보죠 + +02:34.350 --> 02:35.760 +어디 봐요 + +02:37.590 --> 02:39.510 +한 바퀴 도는 것 같죠? + +02:39.510 --> 02:39.870 +좋아요 + +02:39.870 --> 02:43.470 +이 정도면 될지 보죠 + +02:44.160 --> 02:45.330 +효과가 있었어요 + +02:45.330 --> 02:46.560 +두 번째는 운이 좋았어요 + +02:46.590 --> 02:47.910 +두 번째는 운이 좋았어요 + +02:47.940 --> 02:49.320 +Get it, Get it 정답이 나오죠 + +02:49.320 --> 02:52.110 +숨막히게 아름다운 장면이죠 + +02:52.140 --> 02:52.980 +말도 안 돼요 + +02:53.010 --> 02:55.200 +지난번보다 훨씬 나아요 + +02:55.350 --> 02:59.970 +제 프로세서에 실행되는 다른 것들에 의존성이 있어요 + +02:59.970 --> 03:07.650 +소음 수준으로 떨어졌지만 0이에요 4분의 1초요, 비교하면... + +03:07.650 --> 03:11.640 +계산을 다시 해 보죠 + +03:11.640 --> 03:16.680 +머리로 규모를 계산할 수 없다는 게 부끄럽지만 너무 많이 계산하지 않을까 + +03:16.680 --> 03:18.750 +봐 두려워요. Get it. + +03:18.780 --> 03:25.200 +28살요 3 나누기 0은요? 000446년요 + +03:25.560 --> 03:29.760 +60,000배 이상 빨라요 + +03:29.760 --> 03:33.900 +물론 그리 놀라운 일은 아니죠 중첩된 루프가 아닌 싱글 루프를 + +03:33.900 --> 03:37.560 +포함한 알고리즘을 찾았으니까요 하지만 보기 좋네요 + +03:37.560 --> 03:44.040 +요약하자면 클로드는 GPT 4가 실패한 일을 해냈죠 + +03:44.040 --> 03:49.800 +클로드는 항상 빠른 건 아니지만 가끔은 엄청나게 빨라요 + +03:49.800 --> 03:54.390 +숨이 막힐 정도로요 실험을 하면서 클로드가 회전하는 숫자를 + +03:54.390 --> 03:59.850 +틀리는 경우가 몇 번 있었어요 클로드와 GPT 40 둘 다 0이 나왔죠 + +03:59.850 --> 04:02.670 +GPT 40은 계속 그런 실수를 반복하는 것 같아요 + +04:02.670 --> 04:09.210 +클로드는 종종 그걸 제대로 해낼 뿐 아니라 알고리즘을 다시 쓸 기회를 포착해 + +04:09.210 --> 04:12.090 +놀랍도록 빠르게 만들죠 + +04:12.090 --> 04:18.030 +그래서 전 다시 강조해요 클로드의 승리라고요 + +04:18.180 --> 04:22.890 +다음 주에는 오픈 소스로 전환할 거예요 + +04:22.890 --> 04:25.080 +오픈 소스 모델을 평가할 거예요 + +04:25.080 --> 04:32.040 +오픈 소스 모델이 어떻게 코드를 생성하고 오픈 소스 LMS 솔루션을 사용하는지 보죠 + +04:32.040 --> 04:36.630 +오픈 소스가 클로드와 경쟁할 수 있을까요? + +04:36.660 --> 04:38.310 +3번요 5 소네트요 + +04:38.370 --> 04:42.840 +이렇게 놀라울 정도로 빠른 결과로요 + +04:43.290 --> 04:44.010 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59295553/en_US.srt b/week5/community-contributions/subtitles/srts/59295553/en_US.srt new file mode 100755 index 0000000..ca8b6d9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295553/en_US.srt @@ -0,0 +1,217 @@ +WEBVTT + +00:00.500 --> 00:01.460 +Welcome back. + +00:01.460 --> 00:08.330 +In the last part, we gave our GPT four and clawed the challenge of converting a simple Python program + +00:08.330 --> 00:14.630 +to calculate Pi into efficient C plus plus code, and both frontier models did well. + +00:15.050 --> 00:21.620 +Although GPT four only did a few more hints, and now we're going to move on to a harder problem. + +00:21.890 --> 00:29.210 +This code here, this Python code called Python hard, uh, is is is doing a little bit more work. + +00:29.300 --> 00:37.040 +Uh, it is going to, uh, calculate something called the maximum subarray sum. + +00:37.040 --> 00:39.170 +And this is what that does. + +00:39.200 --> 00:45.590 +Uh, the challenge is suppose you are given an array, a list of a large number of positive and negative + +00:45.590 --> 00:49.820 +numbers of random or pseudo random positive and negative numbers. + +00:49.820 --> 00:59.590 +And the question you are asked is if you were to take any subarray, any set of, of consecutive Numbers + +00:59.590 --> 01:06.520 +in that bigger array and add them up, you would get a sum of that section. + +01:06.520 --> 01:11.920 +What is the largest sum of any possible subarray? + +01:11.920 --> 01:17.200 +Pick any start point and end point that you wish in the array, and try and pick it so that you get + +01:17.200 --> 01:24.460 +the largest subarray and return that largest number, the number of the largest subarray. + +01:25.000 --> 01:28.630 +And of course, you have to bear in mind that there are negative numbers here. + +01:28.630 --> 01:31.690 +So it's not there's not an obvious answer. + +01:31.960 --> 01:36.820 +And the indeed the way that this code works is simply trying everything. + +01:36.820 --> 01:43.390 +It's a loop within a loop, loops through the start point, loops through the end point, and figures + +01:43.390 --> 01:48.580 +out it compares the current sum with the maximum sum and returns that sum. + +01:49.330 --> 01:51.340 +So that is what this function is doing. + +01:51.340 --> 01:52.960 +That is the meat of this code. + +01:52.960 --> 01:55.180 +There are two other things going on here. + +01:55.300 --> 02:02.180 +One of them is that we want to create a large number of pseudo random numbers, and I don't want to + +02:02.180 --> 02:09.980 +use something like Python's random number method, the random library, because then that's obviously + +02:09.980 --> 02:12.620 +not going to match with C plus plus libraries. + +02:12.740 --> 02:20.360 +Um, so instead what we've done here is, is just implement a very common, very simple pseudo random + +02:20.360 --> 02:21.530 +number generator. + +02:21.530 --> 02:24.890 +It's known as the linear Congruential generator. + +02:24.890 --> 02:27.680 +And you can Google it if you need to see the formula. + +02:27.770 --> 02:28.670 +That's what I did. + +02:28.940 --> 02:36.260 +And it's a simple way to generate a sort of predictable stream of random numbers based on a seed, uh, + +02:36.260 --> 02:43.610 +which is what we do so that we can have consistency and check that our C plus plus code gives identical + +02:43.610 --> 02:45.650 +results to our Python code. + +02:46.070 --> 02:55.040 +Um, and so yeah, this with a generator yielding the value is going to create these random numbers. + +02:55.040 --> 02:59.560 +And of course that's sneaky of me, because it's going to be hard for the frontier models to figure + +02:59.560 --> 03:02.410 +out how to convert that into C plus plus code. + +03:02.410 --> 03:04.330 +It's going to have to approach it. + +03:04.450 --> 03:09.670 +Um, not it's not not not an easy conversion, but not giving it things like the Pi function. + +03:09.880 --> 03:12.610 +Um, this is the one that does the loop within the loop. + +03:12.910 --> 03:18.760 +Um, and this here is something which repeats this 20 times. + +03:18.760 --> 03:28.810 +And uh, for the, uh, takes the total sum of all 20 times of doing it with a different seed. + +03:28.870 --> 03:36.070 +There's some settings here to get us off to a to to a consistent make sure that we're doing this consistently + +03:36.070 --> 03:37.090 +for every run. + +03:37.270 --> 03:45.760 +Um, and then we time doing this 20 times so that this is another of those cases where I should probably + +03:45.790 --> 03:50.890 +have been talking about it while I was running it, because if I remember right, this will take about + +03:50.890 --> 03:52.570 +a minute to run in Python. + +03:52.570 --> 03:56.880 +So I'm going to have to figure out how to interest you for a minute of waiting now. + +03:57.000 --> 04:04.890 +Uh, again, exac is the, uh, the definitely one that should cause you to raise an eyebrow any time + +04:04.890 --> 04:07.920 +that someone tells you to run exac of anything. + +04:08.040 --> 04:13.800 +Um, but of course, you can take a careful look through my code here and satisfy yourself that there + +04:13.800 --> 04:20.070 +is nothing devious happening here, and there is nothing but a bunch of maths. + +04:20.160 --> 04:23.160 +Uh, and besides, I have a British accent. + +04:23.160 --> 04:24.840 +That must mean I'm believable. + +04:25.170 --> 04:25.440 +Uh. + +04:25.440 --> 04:26.010 +All right. + +04:26.040 --> 04:28.350 +Anyway, that's enough prattling. + +04:28.470 --> 04:29.880 +Uh, it's done its thing. + +04:29.880 --> 04:33.960 +It was only 27 seconds, so I didn't need to jabber away like that. + +04:34.110 --> 04:43.110 +Um, and the total, uh, maximum subarray sum across all 20 runs is this rather large number of 10,980? + +04:43.140 --> 04:47.460 +I mean, it's not a massive number because of course we have positives and negatives that have all balance + +04:47.460 --> 04:48.780 +themselves out. + +04:49.320 --> 04:50.460 +All right. + +04:50.460 --> 04:57.880 +So now it's time for us to ask, uh, the GPT to make its version. + +04:57.910 --> 04:59.680 +Let's do that now. + +05:05.140 --> 05:12.190 +Here comes GPT version, and we will now try compiling that. + +05:14.140 --> 05:19.030 +And it's generated a warning, uh, implicit conversion. + +05:19.240 --> 05:22.690 +And then it ran it and it got the wrong answer. + +05:22.690 --> 05:28.750 +So in this case, despite the fact that there are some hints and despite that, I warned it about exactly + +05:28.750 --> 05:37.870 +the mistake I believe it's made, uh, the, um, it has generated code that runs quite quickly, but + +05:37.870 --> 05:44.650 +sadly ends up with an error, uh, problem, I believe with the number overflows that some C plus plus + +05:44.650 --> 05:48.190 +expert will have to confirm, and it ends up with the number zero. + +05:48.280 --> 05:52.060 +After the break, we will find out how Claude does. diff --git a/week5/community-contributions/subtitles/srts/59295553/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295553/ja_JP.srt new file mode 100755 index 0000000..eda17ca --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295553/ja_JP.srt @@ -0,0 +1,175 @@ +WEBVTT + +00:00.500 --> 00:01.460 +お帰りなさい。 + +00:01.460 --> 00:08.330 +最後のパートでは、 円周率を計算するシンプルなPythonプログラムを効率的なCプラスアルファのコードに変換するという課題にGPTの4人とクローを与えたが、 + +00:08.330 --> 00:14.630 +どちらのフロンティアモデルもうまくいった。 + +00:15.050 --> 00:21.620 +GPT4ではもう少しヒントを出しただけだったが、 次はもっと難しい問題に移る。 + +00:21.890 --> 00:29.210 +このコードは、 Python hardと呼ばれるPythonのコードで、 もうちょっとだけ仕事がある。 + +00:29.300 --> 00:37.040 +最大サブアレイ和と呼ばれるものを計算するんだ。 + +00:37.040 --> 00:39.170 +そして、 これがその結果だ。 + +00:39.200 --> 00:45.590 +つまり、 ランダムな、 あるいは擬似的にランダムな正負の数の配列、 + +00:45.590 --> 00:49.820 +リストが与えられたとする。 + +00:49.820 --> 00:59.590 +この質問で問われているのは、 大きな配列の中にある連続した数字のサブ配列の集合をすべて取り出して足し合わせれば、 + +00:59.590 --> 01:06.520 +その部分の和が得られるということだ。 + +01:06.520 --> 01:11.920 +可能なサブアレイの最大の合計は? + +01:11.920 --> 01:17.200 +配列の任意の始点と終点を選び、 最大の部分配列が得られるように選び、 + +01:17.200 --> 01:24.460 +その最大の数、 最大の部分配列の数を返す。 + +01:25.000 --> 01:28.630 +そしてもちろん、 ここにはマイナスの数字があることを念頭に置かなければならない。 + +01:28.630 --> 01:31.690 +だから、 明白な答えがあるわけではない。 + +01:31.960 --> 01:36.820 +そして、 このコードが実際に機能する方法は、 単純にすべてを試してみることだ。 + +01:36.820 --> 01:48.580 +ループの中のループで、 開始点をループし、 終了点をループし、 現在の合計と最大合計を比較してその合計を返す。 + +01:49.330 --> 01:51.340 +つまり、 この機能はそういうことなのだ。 + +01:51.340 --> 01:52.960 +それがこのコードの肝だ。 + +01:52.960 --> 01:55.180 +ここにはもう2つのことがある。 + +01:55.300 --> 02:12.620 +Pythonの乱数メソッドやrandomライブラリのようなものは使いたくない。 + +02:12.740 --> 02:21.530 +その代わりに、 ここではごく一般的な、 ごく単純な擬似乱数発生器を実装した。 + +02:21.530 --> 02:24.890 +これは線形合同発生器として知られている。 + +02:24.890 --> 02:27.680 +公式を見たければググればいい。 + +02:27.770 --> 02:28.670 +それが私のしたことだ。 + +02:28.940 --> 02:45.650 +これは、 シードに基づいて予測可能な乱数のストリームを生成する簡単な方法である。 + +02:46.070 --> 02:55.040 +このジェネレーターが乱数を生成する。 + +02:55.040 --> 03:02.410 +フロンティア・モデルにとって、 それをC+++のコードに変換する方法を見つけるのは難しいだろうから。 + +03:02.410 --> 03:04.330 +アプローチしなければならないだろう。 + +03:04.450 --> 03:09.670 +うーん、 簡単には変換できないけど、 円周率関数のようなものは与えられない。 + +03:09.880 --> 03:12.610 +ええと、 これはループの中でループを行うものです。 + +03:12.910 --> 03:18.760 +そして、 これを20回繰り返す。 + +03:18.760 --> 03:28.810 +そして......別のシードで行った20回の合計を取る。 + +03:28.870 --> 03:37.090 +ここでいくつかのセッティングを行うことで、 すべての走行で一貫した走りができるようになる。 + +03:37.270 --> 03:45.760 +ええと、 それからこれを20回やって時間を計っているんだけど、 + +03:45.790 --> 03:52.570 +これも実行しながら話すべきだったかもしれない。 + +03:52.570 --> 03:56.880 +だから、 これから1分間、 どうすれば君に興味を持って待ってもらえるか考えなければならない。 + +03:57.000 --> 04:04.890 +ええと、 繰り返しになるけど、 exacは、 誰かが何かにexacを実行するように言うたびに、 + +04:04.890 --> 04:07.920 +眉をひそめたくなるようなものだ。 + +04:08.040 --> 04:13.800 +でも、 もちろん、 ここで私のコードを注意深く見て、 何も悪賢いことは起きていないし、 + +04:13.800 --> 04:20.070 +数学の束しかないことを自分で納得することができる。 + +04:20.160 --> 04:23.160 +それに、 僕にはイギリス訛りがあるんだ。 + +04:23.160 --> 04:24.840 +それは私が信じられるということに違いない。 + +04:25.170 --> 04:25.440 +ええと。 + +04:25.440 --> 04:26.010 +分かった。 + +04:26.040 --> 04:28.350 +とにかく、 おしゃべりはここまでだ。 + +04:28.470 --> 04:29.880 +ああ、 役目は終わったよ。 + +04:29.880 --> 04:33.960 +たった27秒だったから、 あんなにジャブジャブ喋る必要はなかった。 + +04:34.110 --> 04:43.110 +そして、 全20回を通したサブアレイの最大合計は、 10,980というかなり大きな数字になった。 + +04:43.140 --> 04:48.780 +もちろん、 プラスもマイナスもあり、 そのバランスが取れているからだ。 + +04:49.320 --> 04:50.460 +分かった。 + +04:50.460 --> 04:57.880 +だから今、 GPTにそのバージョンを作ってもらう時なんだ。 + +04:57.910 --> 04:59.680 +今、 そうしよう。 + +05:05.140 --> 05:12.190 +ここでGPTバージョンが登場したので、 それをコンパイルしてみよう。 + +05:14.140 --> 05:19.030 +そして、 暗黙の変換という警告が出た。 + +05:19.240 --> 05:22.690 +そしてそれを実行したところ、 間違った答えが返ってきた。 + +05:48.280 --> 05:52.060 +休憩の後、 クロードがどのようなプレーを見せるか見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/59295553/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295553/ko_KR.srt new file mode 100755 index 0000000..4d2f2a2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295553/ko_KR.srt @@ -0,0 +1,211 @@ +WEBVTT + +00:00.500 --> 00:01.460 +잘 돌아왔어요 + +00:01.460 --> 00:08.330 +마지막 부분에서 GPT 4를 제공했고 파이썬 을 이용한 간단한 파이 계산 프로그램을 효율적인 C++ + +00:08.330 --> 00:14.630 +코드로 전환하는 어려운 과제를 잘 해냈습니다 두 가지 미개척 모델 모두 좋은 결과를 거뒀죠 + +00:15.050 --> 00:21.620 +GPT 4는 힌트를 몇 개 더 줬을 뿐입니다 이제 더 어려운 문제로 넘어가죠 + +00:21.890 --> 00:29.210 +파이썬 코드의 경우, 파이썬 하드라고 불리는 이 코드는 좀 더 많은 작업을 수행하죠 비트 + +00:29.300 --> 00:37.040 +최대 저금액이라는 걸 계산하는 거예요 + +00:37.040 --> 00:39.170 +그 결과가 이거예요 + +00:39.200 --> 00:45.590 +배열을 받았다고 가정해 보죠 양수와 음수가 여러 개 있는 난수 + +00:45.590 --> 00:49.820 +혹은 양수와 음수가 나열된 배열이에요 + +00:49.820 --> 00:59.590 +여기서 나오는 질문은 get a-aray, 혹은 더 큰 배열에서 연속되는 숫자 집합을 + +00:59.590 --> 01:06.520 +가져다가 더하면 해당 섹션의 합계가 나온다는 거죠 + +01:06.520 --> 01:11.920 +가능한 가장 큰 자금은 얼마일까요? + +01:11.920 --> 01:17.200 +배열에서 원하는 시작점과 끝점을 선택해 그걸 선택해 가장 큰 + +01:17.200 --> 01:24.460 +서브 라이가 나오게 하고 가장 큰 숫자를 반환하는 거죠 가장 큰 서브 라이의 숫자요 + +01:25.000 --> 01:28.630 +물론 음수가 있다는 걸 명심해야 해요 + +01:28.630 --> 01:31.690 +확실한 답은 없어요 + +01:31.960 --> 01:36.820 +이 코드가 작동하는 방법은 모든 걸 시도하는 거죠 + +01:36.820 --> 01:43.390 +루프 안의 루프예요 시작점과 끝점을 통과하는 루프죠 현재 합계와 + +01:43.390 --> 01:48.580 +최대 합계를 비교해서 합계를 반환해요 + +01:49.330 --> 01:51.340 +이 함수가 하는 일이 그거죠 + +01:51.340 --> 01:52.960 +그게 이 코드의 핵심이에요 + +01:52.960 --> 01:55.180 +두 가지 문제가 더 있어요 + +01:55.300 --> 02:02.180 +그 중 하나는 다수의 의사 난수들을 생성하는 것입니다 파이썬의 난수 메서드 같은 + +02:02.180 --> 02:09.980 +것을 사용하고 싶지 않습니다 즉, 난수 라이브러리를 이용하지 않습니다 C++ 라이브러리와는 + +02:09.980 --> 02:12.620 +맞지 않기 때문이죠 + +02:12.740 --> 02:21.530 +그래서 여기서 우리가 한 건 아주 흔하고 아주 간단한 의사 난수 생성기를 구현했어요 + +02:21.530 --> 02:24.890 +선형 통합 발생기라고 하죠 + +02:24.890 --> 02:27.680 +공식이 궁금하면 구글에서 찾아보세요 + +02:27.770 --> 02:28.670 +그렇게 했어요 + +02:28.940 --> 02:36.260 +예측 가능한 난수의 흐름을 만드는 간단한 방법입니다 시드에 기반해서요 + +02:36.260 --> 02:43.610 +C 플러스 플러스 코드가 파이썬 코드와 동일한 결과를 내는지 일관성을 유지하기 + +02:43.610 --> 02:45.650 +위해서죠 + +02:46.070 --> 02:55.040 +이 생성기가 값을 생성해서 난수들을 생성해요 + +02:55.040 --> 02:59.560 +물론 저로서는 교묘한 방법이죠 왜냐하면 프런티어 모델에서는 C 플러스 플러스 코드로 + +02:59.560 --> 03:02.410 +변환하는 방법을 알아내기가 어려울 테니까요 + +03:02.410 --> 03:04.330 +접근해야 할 거예요 + +03:04.450 --> 03:09.670 +쉬운 변환이 아니진 않지만 파이 함수 같은 걸 주진 않아요 + +03:09.880 --> 03:12.610 +이건 루프 안의 루프를 만드는 거예요 + +03:12.910 --> 03:18.760 +이 과정을 20번 반복해요 + +03:18.760 --> 03:28.810 +그리고 다른 씨앗으로 20번의 합계를 계산하는 거죠 + +03:28.870 --> 03:36.070 +get으로 가기 위한 설정이 있습니다 일관되게 실행되도록요 매 실행마다 일관적으로 + +03:36.070 --> 03:37.090 +하도록요 + +03:37.270 --> 03:45.760 +그리고 20번 정도 반복합니다 실행할 때 미리 말씀을 드렸어야 하는 또 다른 경우입니다 + +03:45.790 --> 03:52.570 +파이썬 에서 실행되는 데 1분 정도 걸리기 때문이죠 + +03:52.570 --> 03:56.880 +어떻게 해야 흥미를 느끼실지 고민해 볼게요 1분 정도 기다려 주세요 + +03:57.000 --> 04:04.890 +다시 말씀드리지만 EXAC은 확실히 눈썹이 휘둥그레질 만한 기능이에요 누가 EXAC을 + +04:04.890 --> 04:07.920 +실행하라고 할 때마다요 + +04:08.040 --> 04:13.800 +물론 제 코드를 자세히 살펴보시고 기만적인 + +04:13.800 --> 04:20.070 +일은 없다는 걸 확인하세요 수학만 잔뜩 있어요 + +04:20.160 --> 04:23.160 +게다가 전 영국 억양도 쓰잖아요 + +04:23.160 --> 04:24.840 +그럼 내가 믿을 만하다는 뜻이겠죠 + +04:25.170 --> 04:25.440 +네 + +04:25.440 --> 04:26.010 +좋아요 + +04:26.040 --> 04:28.350 +잡담은 그만하죠 + +04:28.470 --> 04:29.880 +다 됐어요 + +04:29.880 --> 04:33.960 +27초밖에 안 되는데 그렇게 떠들 필요는 없잖아요 + +04:34.110 --> 04:43.110 +그럼 총 20번의 시도에서 최고 수익이 10,980개로 꽤 큰 건가요? + +04:43.140 --> 04:47.460 +물론 엄청난 숫자는 아니에요 장단점이 있으니까요 모두 + +04:47.460 --> 04:48.780 +균형을 이루죠 + +04:49.320 --> 04:50.460 +좋아요 + +04:50.460 --> 04:57.880 +이제 GPT에 버전 제작을 요청할 때예요 + +04:57.910 --> 04:59.680 +지금 하죠 + +05:05.140 --> 05:12.190 +GPT 버전이 있는데 컴파일을 해볼게요 + +05:14.140 --> 05:19.030 +경고를 생성했어요 암묵적 변환이죠 + +05:19.240 --> 05:22.690 +그런데 실행했더니 틀린 답이 나왔어요 + +05:22.690 --> 05:28.750 +이 경우 힌트가 있고 그런 사실에도 불구하고 그것이 저지른 + +05:28.750 --> 05:37.870 +실수에 대해 경고했어요 꽤 빨리 실행되는 코드를 생성했지만 슬프게도 문제가 발생하죠 + +05:37.870 --> 05:44.650 +숫자가 넘쳐서 C 플러스 전문가가 확인해야 할 거예요 결국 + +05:44.650 --> 05:48.190 +숫자 0이 되죠 + +05:48.280 --> 05:52.060 +광고 후에 클로드가 어떻게 하는지 보죠 diff --git a/week5/community-contributions/subtitles/srts/59295579/en_US.srt b/week5/community-contributions/subtitles/srts/59295579/en_US.srt new file mode 100755 index 0000000..20ed282 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295579/en_US.srt @@ -0,0 +1,403 @@ +WEBVTT + +00:00.470 --> 00:01.040 +All right. + +00:01.040 --> 00:02.750 +Are you excited to see how this goes? + +00:02.780 --> 00:04.130 +Let's give it a try. + +00:04.160 --> 00:12.890 +So in this next section, I create a string called pi which has within it the code to do this series + +00:12.890 --> 00:15.890 +one minus the third plus a fifth, minus the seventh plus a ninth. + +00:15.890 --> 00:17.420 +You can see that for yourself. + +00:17.450 --> 00:18.770 +Check how many mistakes. + +00:18.770 --> 00:23.690 +And then it's going to do that 100 a million times. + +00:24.170 --> 00:28.640 +And really it's like 200 million times because it's you can see each of this. + +00:28.670 --> 00:31.130 +It's doing like two two elements in that series. + +00:31.130 --> 00:33.710 +So it's 200 million of this series. + +00:33.710 --> 00:38.300 +And we'll see how close to pi it gets and then it will print the result. + +00:38.570 --> 00:40.700 +So let's do that. + +00:40.820 --> 00:46.700 +I'm now going to do some some evil code, which of course we can do because we know exactly what we're + +00:46.700 --> 00:46.970 +doing. + +00:46.970 --> 00:51.140 +But you should always be slightly suspicious anytime someone says exac. + +00:51.170 --> 00:56.690 +ExAC, of course, is the command that runs Python code as specified in a string. + +00:56.720 --> 01:02.820 +And now you can see with some confidence this string is doing nothing to to evil. + +01:02.820 --> 01:04.350 +It's just doing this series. + +01:04.350 --> 01:09.060 +But you should always double check any time someone gives you code that involves an exec to make sure + +01:09.060 --> 01:13.200 +there's no funny business with your file system, or making an internet connection or anything like + +01:13.200 --> 01:13.530 +that. + +01:13.530 --> 01:17.820 +But in this case, you can be assured this is pretty safe and let's give it a try. + +01:17.850 --> 01:18.600 +Exact pi. + +01:18.630 --> 01:20.040 +Let's see what happens. + +01:21.270 --> 01:23.400 +So there's a pause. + +01:23.430 --> 01:27.570 +While Python thinks 200 million of those terms. + +01:27.570 --> 01:28.500 +And there we go. + +01:28.500 --> 01:31.080 +So let's see how well the Pi comes out after that. + +01:31.080 --> 01:33.090 +3.14159265. + +01:33.090 --> 01:34.650 +And that digit is wrong. + +01:34.890 --> 01:39.960 +So 200 million of these terms don't get you that much. + +01:39.960 --> 01:40.320 +Pi. + +01:40.350 --> 01:45.570 +As you probably you may know, there are much better series that converge very quickly, but they are + +01:45.570 --> 01:48.000 +a lot more code and a lot more gumph. + +01:48.030 --> 01:51.840 +This is the benefit of being super simple to write, but slowly converging. + +01:51.840 --> 01:57.870 +So it took 8.5764 seconds for that to run. + +01:58.120 --> 02:01.870 +There's a if you run this a few times, you'll see that that changes a fair amount because of other + +02:01.870 --> 02:03.100 +things going on on the machine. + +02:03.100 --> 02:06.760 +And your machine is different to mine, but you get the general idea. + +02:06.790 --> 02:07.390 +Okay. + +02:07.420 --> 02:13.480 +So I should be able to run optimize GPT pi, and it should stream out C plus plus code that will do + +02:13.480 --> 02:18.460 +the same thing by sending that string as part of the user prompt to GPT four. + +02:18.490 --> 02:20.320 +Oh, let's see what we get. + +02:20.740 --> 02:22.690 +Uh, this is what I mentioned to you. + +02:22.690 --> 02:28.030 +It puts this at the top of the file, which we then strip out before saving this to disk. + +02:28.060 --> 02:29.800 +This is the method. + +02:29.920 --> 02:38.020 +Uh, not uh, you may or may not be a C plus plus guru, but, uh, regardless, from knowledge of similar + +02:38.020 --> 02:42.370 +things, you can probably get a good handle on the fact that this is doing the same thing. + +02:42.520 --> 02:48.460 +Uh, and it's doing a similar number of calculations, similar number of terms, and it has written + +02:48.460 --> 02:50.740 +a file here, optimized C plus plus. + +02:50.740 --> 02:52.390 +And there it is. + +02:52.390 --> 02:54.040 +You can see it all looks good. + +02:54.040 --> 03:00.470 +And it has included this package that needs to be included, which is because it uses a set precision + +03:00.470 --> 03:01.370 +function. + +03:01.370 --> 03:05.570 +And if I discovered if I don't put that in the user prompt, then it doesn't include this package which + +03:05.570 --> 03:06.650 +causes it to break. + +03:06.650 --> 03:07.910 +So there we go. + +03:08.030 --> 03:10.790 +For whatever it's worth, we've given it the hint and it's done it. + +03:11.330 --> 03:12.710 +Um, okay. + +03:12.710 --> 03:14.090 +So look at this next cell. + +03:14.090 --> 03:15.110 +So this next cell. + +03:15.110 --> 03:23.330 +You remember, if you do an exclamation mark in Jupyter, then it, uh, it will um, uh, it will run + +03:23.330 --> 03:25.010 +that as a terminal command. + +03:25.010 --> 03:27.800 +So what I'm doing here is a terminal command. + +03:27.800 --> 03:29.570 +Uh clang++. + +03:29.570 --> 03:35.030 +Uh, and this particular statement is running optimized. + +03:35.030 --> 03:39.740 +It's doing a compile and then running that C plus plus code. + +03:39.980 --> 03:44.000 +Um, and later we're going to use an even more optimized way of doing this. + +03:44.000 --> 03:47.750 +But this this is a good for now for your computer. + +03:47.750 --> 03:51.350 +You may want to Google what it will take to to compile. + +03:51.470 --> 03:57.690 +Um, if you're using a mac, then you may need to um, Uh, at least open Xcode to make sure you've + +03:57.690 --> 04:02.070 +got the latest Xcode tools installed so that you have the latest versions of compilers. + +04:02.190 --> 04:06.630 +Um, but but regardless, a quick Google will tell you how you compile a C plus plus file. + +04:06.690 --> 04:12.360 +Um, once you've changed this line to be an optimized C plus plus compile of the file optimized CPP, + +04:12.390 --> 04:14.370 +that is of course how you will run it. + +04:14.370 --> 04:21.450 +So remember what we are looking to beat is 8.57 seconds. + +04:21.450 --> 04:26.520 +Let's see how GPT four zero how its code has done. + +04:26.520 --> 04:27.540 +Here we go. + +04:29.280 --> 04:37.410 +And it's got the same answer I should imagine 658 at least to within. + +04:37.440 --> 04:37.920 +Yes. + +04:37.920 --> 04:38.550 +Looks like it. + +04:38.550 --> 04:39.990 +I mean you'd expect some. + +04:40.020 --> 04:42.300 +Oh no it's there's not even any rounding differences. + +04:42.300 --> 04:44.310 +It looks like it's exactly right. + +04:44.310 --> 04:47.520 +And it's in a fraction of the time 0.21. + +04:47.610 --> 04:55.180 +Uh, so it is clearly, uh, um, somewhere between 10 and 100 times faster. + +04:55.330 --> 04:58.810 +You'll find later, when we optimize further, we can do even better than this. + +04:58.810 --> 04:59.020 +But. + +04:59.020 --> 05:01.360 +But with these flags, that is what we get. + +05:01.390 --> 05:02.980 +It's pretty impressive. + +05:03.010 --> 05:10.990 +We've just built something that can do a conversion of code from Python to C plus plus and run it many + +05:10.990 --> 05:12.550 +times faster. + +05:12.580 --> 05:14.890 +Let's see how Claude does. + +05:18.340 --> 05:21.220 +Well, let's just quickly compare the code. + +05:22.960 --> 05:25.840 +The code looks pretty similar to me. + +05:26.020 --> 05:29.260 +Uh, you will find, if you look at it, that there are some subtle differences. + +05:29.260 --> 05:35.470 +For example, there is this stuff here, um, which is to do with, uh. + +05:35.470 --> 05:41.740 +Well, I am not a C plus plus expert, although I did code in C plus plus some time ago, 1 or 2 years + +05:41.740 --> 05:42.310 +ago. + +05:42.460 --> 05:45.640 +Uh, but, uh, sadly, that is all swapped out in my mind. + +05:45.670 --> 05:48.610 +Uh, and I would certainly never have got to this level of expertise. + +05:48.610 --> 05:54.590 +It's putting in, I believe, various hints to suggest that things can be parallelized, but I sort + +05:54.620 --> 06:00.080 +of suspect with some of this stuff the compiler would apply that anyway in both cases, but we will + +06:00.080 --> 06:00.320 +see. + +06:00.350 --> 06:07.820 +We'll see if this actually makes any real difference by running the same command for. + +06:10.490 --> 06:11.780 +Uh, sorry, did I run that? + +06:11.780 --> 06:12.620 +Let's try that again. + +06:12.650 --> 06:13.520 +Here we go. + +06:14.810 --> 06:17.630 +And this is the answer that we got. + +06:17.810 --> 06:27.770 +Um, and it looks like, um, I think we may have to go back and do. + +06:27.770 --> 06:28.100 +Oh, no. + +06:28.100 --> 06:28.430 +Here we go. + +06:28.460 --> 06:29.030 +Here we go. + +06:29.030 --> 06:29.930 +Sorry. + +06:29.930 --> 06:35.360 +This was Gpg's version, so GPT was 0.2121. + +06:35.540 --> 06:38.510 +And here I've jumped ahead. + +06:38.510 --> 06:39.470 +That's why I'm getting confused. + +06:39.470 --> 06:40.040 +Sorry. + +06:40.040 --> 06:41.720 +Let's go back up again. + +06:42.260 --> 06:55.410 +Uh, I'm going to have to write this down 0.2131 is GPT version and Claude's Version is in 0.2121. + +06:55.410 --> 06:58.500 +So Claude was a hair faster than GPT. + +06:58.530 --> 07:02.340 +But I gotta tell you, this is within in error bars. + +07:02.340 --> 07:05.640 +If you run this a few times, it will vary by at least that amount. + +07:05.640 --> 07:12.570 +So basically they've both produced code which when running on the machine, uh, has run at the same + +07:12.570 --> 07:12.990 +speed. + +07:12.990 --> 07:19.140 +I would expect that the actual compiled machine code is the same in both cases. + +07:19.290 --> 07:23.040 +Um, which is what you would expect for a fairly simple problem. + +07:23.190 --> 07:32.040 +Uh, so both GPT four and Claude have crushed it with a well optimized solution to this, uh, Python + +07:32.040 --> 07:33.540 +code to generate pi. + +07:33.870 --> 07:39.810 +Uh, but, um, uh, although, as I'll say, GPT four did need a few more hints than Claude, but they + +07:39.810 --> 07:42.120 +both got there after the break. + +07:42.120 --> 07:48.300 +We're going to move to a harder problem, much harder code to see how they can optimize. + +07:48.300 --> 07:50.220 +I will see you in the next video. diff --git a/week5/community-contributions/subtitles/srts/59295579/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295579/ja_JP.srt new file mode 100755 index 0000000..987a9dd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295579/ja_JP.srt @@ -0,0 +1,355 @@ +WEBVTT + +00:00.470 --> 00:01.040 +分かった。 + +00:01.040 --> 00:02.750 +どうなるか楽しみですか? + +00:02.780 --> 00:04.130 +試してみよう。 + +00:04.160 --> 00:15.890 +そこで次のセクションでは、 πという文字列を作り、 その中に1マイナス3プラス5、 マイナス7プラス9という一連の計算をするコードを入れる。 + +00:15.890 --> 00:17.420 +それは自分の目で確かめてほしい。 + +00:17.450 --> 00:18.770 +ミスの数をチェックする。 + +00:18.770 --> 00:23.690 +それを100万回繰り返すんだ。 + +00:24.170 --> 00:28.640 +そのひとつひとつを見ることができるのだから。 + +00:28.670 --> 00:31.130 +あのシリーズでは2つの要素を2つやっているようなものだ。 + +00:31.130 --> 00:33.710 +だから、 このシリーズは2億本だ。 + +00:33.710 --> 00:38.300 +そして、 πにどれだけ近づけるかを見て、 その結果をプリントする。 + +00:38.570 --> 00:40.700 +だから、 そうしよう。 + +00:40.820 --> 00:46.970 +もちろん、 自分たちが何をしているのかよくわかっているからこそできることだ。 + +00:46.970 --> 00:51.140 +しかし、 誰かがexacと言うときはいつも少し疑ってかかるべきだ。 + +00:51.170 --> 00:56.690 +ExACはもちろん、 文字列で指定されたPythonコードを実行するコマンドだ。 + +00:56.720 --> 01:02.820 +そして今、 あなたはこのストリングが悪に対して何もしていないことを確信できる。 + +01:02.820 --> 01:04.350 +このシリーズをやっているだけだよ。 + +01:04.350 --> 01:09.060 +しかし、 誰かがあなたにexecを含むコードを渡すときは、 ファイルシステムやインターネット接続などにおかしなことがないか、 + +01:09.060 --> 01:13.530 +常にダブルチェックする必要がある。 + +01:13.530 --> 01:17.820 +しかし、 この場合はかなり安全であることが保証されるので、 試してみよう。 + +01:17.850 --> 01:18.600 +正確な円周率。 + +01:18.630 --> 01:20.040 +どうなるか見てみよう。 + +01:21.270 --> 01:23.400 +だから間が空く。 + +01:23.430 --> 01:27.570 +Pythonはそのうちの2億語を考えている。 + +01:27.570 --> 01:28.500 +さあ、 行こう。 + +01:28.500 --> 01:31.080 +では、 その後、 円周率がどの程度になるか見てみよう。 + +01:31.080 --> 01:33.090 +3. 14159265. + +01:33.090 --> 01:34.650 +そしてその桁は間違っている。 + +01:34.890 --> 01:39.960 +つまり、 2億の用語はそれほど大きなものではないのだ。 + +01:39.960 --> 01:40.320 +円周率 + +01:40.350 --> 01:48.000 +ご存じかもしれないが、 収束が非常に速い、 より優れたシリーズがある。 + +01:48.030 --> 01:51.840 +これは、 書くのは超シンプルだが、 徐々に収束していくという利点だ。 + +01:51.840 --> 01:57.870 +だから8回かかった。 5764秒かかる。 + +01:58.120 --> 02:03.100 +これを何度か実行すれば、 マシン上で起こっている他のことのために、 かなりの量が変化することがわかるだろう。 + +02:03.100 --> 02:06.760 +あなたのマシンと私のマシンは違うけど、 大まかなことはわかるだろう。 + +02:06.790 --> 02:07.390 +オーケー。 + +02:07.420 --> 02:13.480 +つまり、 GPT piを最適化すれば、 C+++のコードがストリームアウトされ、 GPT 4にユーザー・プロンプトの一部としてその文字列を送ることで、 + +02:13.480 --> 02:18.460 +同じことができるはずだ。 + +02:18.490 --> 02:20.320 +さて、 何が出てくるかな。 + +02:20.740 --> 02:22.690 +ええと、 これは私が君に言ったことなんだ。 + +02:22.690 --> 02:28.030 +これはファイルの先頭に置かれ、 ディスクに保存する前にこれを取り除く。 + +02:28.060 --> 02:29.800 +これがその方法だ。 + +02:29.920 --> 02:42.370 +あなたはC++の第一人者かもしれないし、 そうでないかもしれないが、 似たようなことの知識から、 これが同じことをやっているという事実を理解することはできるだろう。 + +02:42.520 --> 02:50.740 +同じような計算を、 同じような項数で行っていて、 最適化されたCプラスプラスのファイルをここに書いている。 + +02:50.740 --> 02:52.390 +そして、 そこにある。 + +02:52.390 --> 02:54.040 +すべてがうまくいっているのがわかるだろう。 + +02:54.040 --> 03:01.370 +そして、 このパッケージには、 セット精度機能を使用するため、 必要なものが含まれている。 + +03:01.370 --> 03:06.650 +そして、 もしユーザー・プロンプトにそのように書かないと、 このパッケージが含まれていないことに気づき、 それが原因で壊れてしまう。 + +03:06.650 --> 03:07.910 +そうだ。 + +03:08.030 --> 03:10.790 +何はともあれ、 私たちはヒントを与え、 それを実行した。 + +03:11.330 --> 03:12.710 +うーん、 わかった。 + +03:12.710 --> 03:14.090 +次のセルを見てほしい。 + +03:14.090 --> 03:15.110 +だから、 この次のセルだ。 + +03:15.110 --> 03:25.010 +覚えているだろうか、 Jupyterで感嘆符をつけると、 それをターミナル・コマンドとして実行する。 + +03:25.010 --> 03:27.800 +ここでやっているのはターミナル・コマンドだ。 + +03:27.800 --> 03:29.570 +あー、 ガチャン++。 + +03:29.570 --> 03:35.030 +そして、 この特別な声明は最適化されている。 + +03:35.030 --> 03:39.740 +コンパイルして、 C++のコードを実行するのだ。 + +03:39.980 --> 03:44.000 +ええと、 この後、 さらに最適化された方法を使います。 + +03:44.000 --> 03:47.750 +しかし、 これは今のところ、 あなたのコンピューターには適している。 + +03:47.750 --> 03:51.350 +コンパイルに何が必要なのか、 ググってみるといいだろう。 + +03:51.470 --> 03:57.690 +ええと、 もしあなたがマックを使っているなら、 少なくともXcodeを開いて、 最新のXcodeツールがインストールされていることを確認し、 + +03:57.690 --> 04:02.070 +最新バージョンのコンパイラがあることを確認する必要があるかもしれません。 + +04:02.190 --> 04:06.630 +うーん、 でも、 でも、 そんなことはともかく、 ググればCプラス・プラス・ファイルのコンパイル方法がわかるよ。 + +04:06.690 --> 04:12.360 +ええと、 この行を最適化されたCプラス・コンパイルに変更し、 最適化されたCPPファイルをコンパイルすれば、 + +04:12.390 --> 04:14.370 +もちろんそれが実行方法となる。 + +04:14.370 --> 04:21.450 +だから、 私たちが打ち負かそうとしているのは8点であることを忘れないでほしい。 57秒 + +04:21.450 --> 04:26.520 +GPT4がどのような結果を残したか見てみよう。 + +04:26.520 --> 04:27.540 +さあ、 始めよう。 + +04:29.280 --> 04:37.410 +そして、 少なくとも658と同じ答えを持っている。 + +04:37.440 --> 04:37.920 +そうだ。 + +04:37.920 --> 04:38.550 +そのようだ。 + +04:38.550 --> 04:39.990 +つまり、 多少は期待できるということだ。 + +04:40.020 --> 04:42.300 +四捨五入の違いもない。 + +04:42.300 --> 04:44.310 +まさにその通りだと思う。 + +04:44.310 --> 04:47.520 +しかも、 ほんのわずかな時間でだ。 21. + +04:47.610 --> 04:55.180 +つまり、 10倍から100倍速いということだ。 + +04:55.330 --> 04:58.810 +後で、 さらに最適化すれば、 これよりもっといいものができることがわかるだろう。 + +04:58.810 --> 04:59.020 +でもね。 + +04:59.020 --> 05:01.360 +しかし、 このフラッグのおかげで、 それが実現したのだ。 + +05:01.390 --> 05:02.980 +かなり印象的だ。 + +05:03.010 --> 05:12.550 +我々は、 PythonからCプラスプラスにコードを変換し、 何倍も速く実行できるものを作ったところだ。 + +05:12.580 --> 05:14.890 +クロードがどうするか見てみよう。 + +05:18.340 --> 05:21.220 +では、 早速コードを見比べてみよう。 + +05:22.960 --> 05:25.840 +コードもよく似ている。 + +05:26.020 --> 05:29.260 +見ていただければわかると思いますが、 微妙な違いがあります。 + +05:29.260 --> 05:35.470 +例えば、 ここにこんなものがある。 + +05:35.470 --> 05:42.310 +私はCプラスプラスの専門家ではないが、 1、 2年前にCプラスプラスでコーディングしたことはある。 + +05:42.460 --> 05:45.640 +でも、 悲しいかな、 僕の中ではそれはすべて入れ替わっているんだ。 + +05:45.670 --> 05:48.610 +それに、 このレベルに達することはなかっただろうね。 + +05:48.610 --> 05:54.590 +並列化が可能であることを示唆するさまざまなヒントが含まれていると思うが、 このようなものについては、 + +05:54.620 --> 06:00.320 +コンパイラーはどちらの場合にもそれを適用するのではないかと思う。 + +06:00.350 --> 06:07.820 +同じコマンドを実行して、 実際に違いが出るかどうか見てみよう。 + +06:10.490 --> 06:11.780 +あ、 ごめん、 今のは走ったかな? + +06:11.780 --> 06:12.620 +もう一度やってみよう。 + +06:12.650 --> 06:13.520 +さあ、 始めよう。 + +06:14.810 --> 06:17.630 +そして得られた答えがこれだ。 + +06:17.810 --> 06:27.770 +ええと、 それで......もう一度、 やり直す必要がありそうなんだ。 + +06:27.770 --> 06:28.100 +そんな。 + +06:28.100 --> 06:28.430 +さあ、 始めよう。 + +06:28.460 --> 06:29.030 +さあ、 始めよう。 + +06:29.030 --> 06:29.930 +申し訳ない。 + +06:29.930 --> 06:35.360 +これはGpgのバージョンだったので、 GPTは0だった。 2121. + +06:35.540 --> 06:38.510 +そして、 ここで私は先を急いだ。 + +06:38.510 --> 06:39.470 +だから混乱しているんだ。 + +06:39.470 --> 06:40.040 +申し訳ない。 + +06:40.040 --> 06:41.720 +もう一度上に戻ろう。 + +06:42.260 --> 06:55.410 +ええと、 これはメモしておかないといけないね。 2131 は GPT バージョンで、 クロードのバージョンは 0 です。 2121. + +06:55.410 --> 06:58.500 +だからクロードはGPTより少し速かった。 + +06:58.530 --> 07:02.340 +でも、 これは誤差の範囲内なんだ。 + +07:02.340 --> 07:05.640 +これを何度か実行すれば、 少なくともその程度の差は出るだろう。 + +07:05.640 --> 07:12.990 +つまり、 基本的には両者とも、 マシン上で実行したときに同じスピードで動くコードを作ったということだ。 + +07:12.990 --> 07:19.140 +私は、 実際にコンパイルされたマシンコードはどちらの場合も同じだと予想している。 + +07:19.290 --> 07:23.040 +うーん、 かなり単純な問題だから、 そうなるんだろうね。 + +07:23.190 --> 07:33.540 +GPT4もクロードも、 パイを生成するパイソンコードに最適化されたソリューションで、 それを打ち砕いたんだ。 + +07:33.870 --> 07:42.120 +でも、 GPT4はクロードよりもう少しヒントが必要だったけど、 休憩を挟んで2人ともたどり着いた。 + +07:42.120 --> 07:48.300 +もっと難しい問題、 もっと難しいコードに移行して、 彼らがどのように最適化できるかを見ていくつもりだ。 + +07:48.300 --> 07:50.220 +また次のビデオで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59295579/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295579/ko_KR.srt new file mode 100755 index 0000000..835e774 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295579/ko_KR.srt @@ -0,0 +1,385 @@ +WEBVTT + +00:00.470 --> 00:01.040 +좋아요 + +00:01.040 --> 00:02.750 +어떻게 될지 기대돼요? + +00:02.780 --> 00:04.130 +한번 해 보죠 + +00:04.160 --> 00:12.890 +다음 섹션에서 파이 문자열을 생성합니다 그 코드 안에는 시리즈 1에서 3번째를 더하고 5번째를 더하고 + +00:12.890 --> 00:15.890 +7번째에 9번째를 더하죠 + +00:15.890 --> 00:17.420 +직접 보세요 + +00:17.450 --> 00:18.770 +몇 개 틀렸는지 봐요 + +00:18.770 --> 00:23.690 +그런 다음 100만 번 반복하죠 + +00:24.170 --> 00:28.640 +2억 번은 들었을 거예요 전부 다 보이거든요 + +00:28.670 --> 00:31.130 +그 시리즈에서 두 가지 요소를 하는 거죠 + +00:31.130 --> 00:33.710 +이런 시리즈가 2억 개예요 + +00:33.710 --> 00:38.300 +파이에 얼마나 가까운지 보고 결과를 프린트할 거예요 + +00:38.570 --> 00:40.700 +그렇게 하죠 + +00:40.820 --> 00:46.970 +이제 악성 코드를 하겠습니다 물론 할 수 있죠 우리가 뭘 하는지 정확히 아니까요 + +00:46.970 --> 00:51.140 +하지만 누가 엑스라고 하면 약간 의심해야 해요 + +00:51.170 --> 00:56.690 +ExAC은 파이썬 코드를 문자열에 명시된 대로 실행하는 명령어죠 + +00:56.720 --> 01:02.820 +이제 이 문자열이 악에게 아무 짓도 안 하는 걸 볼 수 있죠 + +01:02.820 --> 01:04.350 +이 시리즈를 하면서요 + +01:04.350 --> 01:09.060 +하지만 임원이 관련된 코드를 줄 땐 항상 다시 확인해야 합니다 파일 + +01:09.060 --> 01:13.530 +시스템에 문제가 있거나 인터넷 연결 같은 게 되지 않도록요 + +01:13.530 --> 01:17.820 +하지만 이 경우에는 꽤 안전하니 한번 해 보죠 + +01:17.850 --> 01:18.600 +정확한 파이예요 + +01:18.630 --> 01:20.040 +어떻게 되나 보죠 + +01:21.270 --> 01:23.400 +잠시 멈췄어요 + +01:23.430 --> 01:27.570 +파이썬은 그중 2억 달러를 생각해요 + +01:27.570 --> 01:28.500 +다 됐어요 + +01:28.500 --> 01:31.080 +파이가 어떻게 나오는지 보죠 + +01:31.080 --> 01:33.090 +3번요 14159265년요 + +01:33.090 --> 01:34.650 +그 숫자가 틀렸어요 + +01:34.890 --> 01:39.960 +Get in get 약관 2억 개로는 얼마 못 벌어요 + +01:39.960 --> 01:40.320 +파이요 + +01:40.350 --> 01:45.570 +아시겠지만, 빠르게 모이는 더 좋은 나열도 있어요 하지만 그것들은 + +01:45.570 --> 01:48.000 +훨씬 더 코드가 많고 검프하죠 + +01:48.030 --> 01:51.840 +쓰기는 쉽지만 점차 집약되는 것의 장점이죠 + +01:51.840 --> 01:57.870 +8시간 걸렸어요 5764초 걸렸어요 + +01:58.120 --> 02:01.870 +이걸 몇 번 실행하면 꽤 많이 변하는 걸 볼 수 있어요 컴퓨터에서 다른 일들이 + +02:01.870 --> 02:03.100 +벌어지고 있으니까요 + +02:03.100 --> 02:06.760 +Get it의 기계는 제 것과 다르지만 대략적인 건 아시겠죠 + +02:06.790 --> 02:07.390 +네 + +02:07.420 --> 02:13.480 +GPT 파이 최적화를 실행할 수 있어야 합니다 그러면 C++ 코드를 + +02:13.480 --> 02:18.460 +내보내는데 GPT4에 문자열을 보내 같은 일을 하죠 + +02:18.490 --> 02:20.320 +Get up, Get up, Get up! 뭐가 나올지 보죠 + +02:20.740 --> 02:22.690 +제가 말씀드린 게 이거예요 + +02:22.690 --> 02:28.030 +이걸 파일 상단에 놓습니다 디스크에 저장하기 전에 제거하죠 + +02:28.060 --> 02:29.800 +이게 방법이에요 + +02:29.920 --> 02:38.020 +여러분이 C 플러스 구루일 수도 아닐 수도 있지만 어쨌든 비슷한 지식을 바탕으로 보면 이 앱도 같은 + +02:38.020 --> 02:42.370 +일을 한다는 사실을 잘 이해하실 수 있을 거예요. + +02:42.520 --> 02:48.460 +비슷한 수의 계산을 하고 비슷한 수의 용어를 하고 여기에 파일을 작성했어요 + +02:48.460 --> 02:50.740 +최적화된 C 플러스 플러스 + +02:50.740 --> 02:52.390 +저기 있네요 + +02:52.390 --> 02:54.040 +다 좋아 보이죠 + +02:54.040 --> 03:01.370 +포함되어야 하는 이 패키지도 포함되어 있습니다. 왜냐하면 이것은 정밀 함수를 사용하니까요. + +03:01.370 --> 03:05.570 +이걸 발견하면 사용자 프롬프트에 넣지 않으면 패키지를 포함하지 않아요 파열을 + +03:05.570 --> 03:06.650 +유발하는 패키지죠 + +03:06.650 --> 03:07.910 +자, 됐어요 + +03:08.030 --> 03:10.790 +어찌 됐든 우리가 힌트를 줬고 그게 해냈어요 + +03:11.330 --> 03:12.710 +네 + +03:12.710 --> 03:14.090 +다음 방을 보세요 + +03:14.090 --> 03:15.110 +다음 감방이에요 + +03:15.110 --> 03:23.330 +기억하시겠지만 Jupyter에서 느낌표를 하면 그건 그걸 터미널 명령으로 + +03:23.330 --> 03:25.010 +실행할 거예요 + +03:25.010 --> 03:27.800 +여기서 하는 건 터미널 명령이에요 + +03:27.800 --> 03:29.570 +클랭++요 + +03:29.570 --> 03:35.030 +이 문장은 최적화되어 실행되고 있어요 + +03:35.030 --> 03:39.740 +컴파일을 하고 C++ 코드를 실행하고 있어요 + +03:39.980 --> 03:44.000 +나중에는 더 최적화된 방법을 사용할 거예요 + +03:44.000 --> 03:47.750 +하지만 이건 지금 컴퓨터에는 좋아요 + +03:47.750 --> 03:51.350 +컴파일에 뭐가 필요한지 구글에 검색해 보세요 + +03:51.470 --> 03:57.690 +맥을 사용한다면 적어도 Xcode를 열어서 최신 Xcode 도구들이 설치되었는지 + +03:57.690 --> 04:02.070 +확인해야 합니다 최신 버전의 컴파일러가 있도록요 + +04:02.190 --> 04:06.630 +어쨌든 구글에 검색하면 C 플러스 파일을 어떻게 컴파일하는지 알 수 있어요 + +04:06.690 --> 04:12.360 +이 라인을 최적화된 C++ 파일 최적화된 CPP의 컴파일로 바꾸고 나면 당연히 + +04:12.390 --> 04:14.370 +그렇게 실행하게 되죠 + +04:14.370 --> 04:21.450 +8점을 넘어서야 한다는 걸 기억하세요 57초요 + +04:21.450 --> 04:26.520 +GPT 40 코드가 어떻게 작동하는지 보죠 + +04:26.520 --> 04:27.540 +시작할게요 + +04:29.280 --> 04:37.410 +대답은 같아요 적어도 658개 안쪽으로요 + +04:37.440 --> 04:37.920 +네 + +04:37.920 --> 04:38.550 +그런 것 같아요 + +04:38.550 --> 04:39.990 +예상했던 대로예요 + +04:40.020 --> 04:42.300 +아뇨, 반반되는 것도 없어요 + +04:42.300 --> 04:44.310 +딱 맞는 것 같아요 + +04:44.310 --> 04:47.520 +시간의 0분의 1밖에 안 돼요 21살요 + +04:47.610 --> 04:55.180 +그러니까 확실히 10배에서 100배 정도 빨라요 + +04:55.330 --> 04:58.810 +나중에 최적화하면 더 좋아질 거예요 + +04:58.810 --> 04:59.020 +하지만요 + +04:59.020 --> 05:01.360 +하지만 이 깃발로 그걸 얻게 되죠 get it get it + +05:01.390 --> 05:02.980 +정말 인상적이에요 + +05:03.010 --> 05:10.990 +파이썬 에서 C++로 코드를 변환할 수 있는 것을 빌드했습니다 그리고 훨씬 빠르게 + +05:10.990 --> 05:12.550 +실행할 수 있죠 + +05:12.580 --> 05:14.890 +클로드는 어떤지 보죠 + +05:18.340 --> 05:21.220 +코드를 빨리 비교해보죠 + +05:22.960 --> 05:25.840 +코드가 비슷해 보여요 + +05:26.020 --> 05:29.260 +자세히 보시면 미묘한 차이가 있어요 + +05:29.260 --> 05:35.470 +예를 들어 여기 이런 게 있어요 + +05:35.470 --> 05:42.310 +전 C 플러스 전문가가 아닙니다 1, 2년 전에 C 플러스 코딩은 해봤지만요 + +05:42.460 --> 05:45.640 +하지만 안타깝게도 제 머릿속에선 다 바뀌었어요 + +05:45.670 --> 05:48.610 +이 정도 전문 지식은 절대 못 얻었을 거예요 + +05:48.610 --> 05:54.590 +병렬화될 수 있다는 걸 암시하는 다양한 힌트를 넣고 있는 것 같아요 하지만 이런 + +05:54.620 --> 06:00.320 +것과 관련해 제 생각엔 컴파일러가 두 경우 모두 적용할 것 같아요 두고 보죠 + +06:00.350 --> 06:07.820 +이게 실제로 차이를 만드는지 보죠 같은 명령을 실행하면서요 + +06:10.490 --> 06:11.780 +죄송해요, 제가 그랬나요? + +06:11.780 --> 06:12.620 +다시 해 보죠 + +06:12.650 --> 06:13.520 +시작할게요 + +06:14.810 --> 06:17.630 +이게 그 답이에요 + +06:17.810 --> 06:27.770 +아무래도 다시 돌아가서 작업해야 할 것 같아요 + +06:27.770 --> 06:28.100 +안 돼요 + +06:28.100 --> 06:28.430 +시작할게요 + +06:28.460 --> 06:29.030 +시작할게요 + +06:29.030 --> 06:29.930 +미안해요 + +06:29.930 --> 06:35.360 +이건 Gpg 버전이었어요 GPT는 0이었죠 2121요 + +06:35.540 --> 06:38.510 +여기서 앞서갔죠 + +06:38.510 --> 06:39.470 +그래서 헷갈리는 거예요 + +06:39.470 --> 06:40.040 +미안해요 + +06:40.040 --> 06:41.720 +다시 올라가죠 + +06:42.260 --> 06:55.410 +0번 적어 둘게요 2131은 GPT 버전이고 클로드 버전은 0이에요 2121요 + +06:55.410 --> 06:58.500 +클로드는 GPT보다 약간 빨랐어요 + +06:58.530 --> 07:02.340 +하지만 이건 에러바 안에 있어요 + +07:02.340 --> 07:05.640 +몇 번 실행하면 그 정도까지 차이가 나요 + +07:05.640 --> 07:12.990 +기본적으로 둘 다 코드를 만들었어요 컴퓨터에서 실행될 때 같은 속도로 실행되는 코드요 + +07:12.990 --> 07:19.140 +실제 컴파일된 머신 코드는 두 경우 모두 동일할 것 같아요 + +07:19.290 --> 07:23.040 +아주 간단한 문제니까 당연한 결과죠 + +07:23.190 --> 07:32.040 +GPT4와 클로드는 파이를 생성하기 위한 파이썬 코드의 최적화된 솔루션으로 + +07:32.040 --> 07:33.540 +잘 해냈죠 + +07:33.870 --> 07:39.810 +하지만 GPT 4는 클로드보다 힌트가 더 필요했지만 둘 다 휴식 시간이 + +07:39.810 --> 07:42.120 +끝나고 도착했어요 + +07:42.120 --> 07:48.300 +더 어려운 문제로 넘어가겠습니다 훨씬 어려운 코드요 어떻게 최적화할 수 있는지 보기 위해서요 + +07:48.300 --> 07:50.220 +그럼 다음 영상에서 만나요 diff --git a/week5/community-contributions/subtitles/srts/59295583/en_US.srt b/week5/community-contributions/subtitles/srts/59295583/en_US.srt new file mode 100755 index 0000000..2c8c63a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295583/en_US.srt @@ -0,0 +1,373 @@ +WEBVTT + +00:00.800 --> 00:02.930 +And here we are back in JupyterLab. + +00:02.930 --> 00:03.890 +It's been a minute. + +00:03.920 --> 00:10.250 +We've been working in Colab for last week, and now we're back to to Jupyter and running locally, where + +00:10.250 --> 00:13.160 +we will be enjoying ourselves today. + +00:13.220 --> 00:18.890 +Uh, before we get to the code and looking at what we're going to do, let's remind ourselves the leaderboards, + +00:18.890 --> 00:26.300 +which looked at coding abilities of frontier models just to see who is at the in the top of the pack + +00:26.300 --> 00:27.860 +when it comes to coding. + +00:27.860 --> 00:36.020 +So the vellum leaderboard you might remember from the AI company, vellum uh, has uh, the human eval + +00:36.380 --> 00:39.650 +uh, metric, which is the simple Python test. + +00:39.860 --> 00:45.110 +Um, and you can see against this metric that GPT four zero leads the way. + +00:45.170 --> 00:50.420 +And then if we look down Claude three sonnet not doing so great down here. + +00:50.570 --> 00:55.310 +Um, now, for various reasons, I feel like this perhaps isn't the most up to date. + +00:55.310 --> 01:02.570 +I notice elsewhere it doesn't have llama 3.1 on this site, so I'm thinking that perhaps this is a bit + +01:02.570 --> 01:03.140 +out of date. + +01:03.140 --> 01:11.090 +I also know that human eval isn't the best of the tests, and I'm more interested in the seal leaderboard + +01:11.090 --> 01:12.230 +for coding. + +01:12.290 --> 01:17.090 +And if you come into this and you read their blurb, you'll see that they do a number of different kinds + +01:17.090 --> 01:23.660 +of coding tests, including human eval, but also including live codebench, a bunch of programming + +01:23.660 --> 01:25.310 +puzzles, and a few others. + +01:25.310 --> 01:27.620 +And so I feel like this is super comprehensive. + +01:27.650 --> 01:32.330 +I'm also pleased to see that llama 3.1 is on their list, and so I get the impression that this is also + +01:32.330 --> 01:33.200 +more recent. + +01:33.560 --> 01:41.510 +And top of this leaderboard is Claude 3.5 sonnet, followed by GPT four zero, followed by Mistral large + +01:41.510 --> 01:44.720 +and open source model Mistral in third place. + +01:44.900 --> 01:47.570 +So this gives us a sense of what's going on. + +01:47.600 --> 01:50.120 +Let's see if GPT four are many features. + +01:50.120 --> 01:52.130 +Not that I can see. + +01:52.220 --> 01:56.510 +Uh, and that might suggest that we're going to need to use GPT four. + +01:56.540 --> 02:01.340 +Oh, if we're going to want to, uh, really compare the top models on this front. + +02:01.340 --> 02:07.500 +But you can feel free to use GPT two for mini, uh, if you'd rather be, uh, bit more frugal. + +02:07.530 --> 02:08.430 +Saved a bit of money. + +02:08.610 --> 02:16.140 +Uh, regardless, let's go over to Jupyter Lab and let's go into week four and into day three to see + +02:16.140 --> 02:21.090 +the code for this week, when we are going to be doing code generation and building an app around it. + +02:21.090 --> 02:25.170 +And as usual, this is a lot of things going on for what we'll be learning today. + +02:25.170 --> 02:29.640 +One of them is actually about the problem of generating code. + +02:29.790 --> 02:35.190 +Um, but we're also going to be using this as a way of exploring, comparing different models, um, + +02:35.190 --> 02:40.920 +looking at leaderboards as we just have, understanding how to solve a business problem with LM solutions. + +02:40.920 --> 02:43.650 +And you know what you might notice here? + +02:43.650 --> 02:48.210 +There's there's this little thing here, we're going to get another opportunity to play with Gradio + +02:48.210 --> 02:53.910 +and to show what it's like to package things up into a prototype, because that is just such a great + +02:53.910 --> 02:59.490 +way to collaborate with others on your LM solutions. + +02:59.490 --> 03:06.480 +So we're going to embark on this by running some imports, and we're then going to set our environment + +03:06.480 --> 03:08.910 +variables using the usual load env. + +03:08.910 --> 03:12.330 +And it's a nice time for me to remind you once again to have a EMV file. + +03:12.330 --> 03:16.560 +This time we'll be using OpenAI and anthropic. + +03:16.560 --> 03:23.670 +So have that set up, uh, in this, uh, cell here, we initialize the OpenAI and the cloud interfaces + +03:23.670 --> 03:27.090 +as usual, and we'll use OpenAI and cloud 3.5. + +03:27.120 --> 03:27.390 +Sorry. + +03:27.420 --> 03:29.520 +We'll use GPT four and cloud 3.5. + +03:29.550 --> 03:35.250 +Sonnet uh, which were in the top two positions in that leaderboard. + +03:35.520 --> 03:41.160 +So now we have, uh, it's time to develop our system message and our user prompt. + +03:41.160 --> 03:44.130 +And I'm using the same approach that we used back in the day. + +03:44.130 --> 03:47.370 +It feels like an age ago now where system message we just hard code it. + +03:47.370 --> 03:53.370 +And the user prompt we have something where we pass in a variable and generate the user prompt for that + +03:53.370 --> 03:54.090 +variable. + +03:54.090 --> 04:00.600 +So the system message I've gone with is you're an assistant that re-implements Python code in high performance + +04:00.600 --> 04:02.550 +C plus plus four and M1 Mac. + +04:02.580 --> 04:05.160 +Obviously I'm using an M1 Mac right here. + +04:05.190 --> 04:12.420 +Um, and I suggest that you substitute in here whatever kind of environment you have to make this most + +04:12.600 --> 04:19.500 +appropriate for you and you may need to do some some tweaking, particularly with the C plus plus setup + +04:19.500 --> 04:21.060 +to make sure that this works for you. + +04:21.120 --> 04:23.400 +Respond only with C plus plus code. + +04:23.400 --> 04:24.720 +Use comments sparingly. + +04:24.750 --> 04:28.710 +Do not provide any explanation other than occasional comments. + +04:28.740 --> 04:34.080 +The C plus plus response needs to produce an identical output in the fastest possible time, so this + +04:34.080 --> 04:39.570 +is a little bit more wordy than the prompt I showed you in the slide a second ago, but this is what + +04:39.570 --> 04:44.370 +I found worked best with some tweaking around, and you'll see that the user prompt is even more wordy. + +04:44.400 --> 04:47.280 +Rewrite this Python code to C plus plus fastest possible implementation. + +04:47.280 --> 04:48.690 +It's a bit repetitive. + +04:49.020 --> 04:54.210 +Um, and then just here you can see I've cheated a little bit from doing some of my experiments. + +04:54.210 --> 05:00.300 +I found actually, as you'll discover, uh, maybe this is as suggested by the leaderboards. + +05:00.300 --> 05:04.830 +Claude didn't need this extra hinting, but GPT four did need this. + +05:04.830 --> 05:08.160 +Otherwise, the C plus plus code it generated didn't work. + +05:08.340 --> 05:13.260 +Um, I had to say pay attention to number types to ensure that there are no overflows. + +05:13.260 --> 05:20.110 +And remember to Hash include all necessary cplusplus patches such as or packages such as. + +05:20.140 --> 05:26.740 +I even had to actually explicitly name a particular package, which if I didn't, uh, GPT four would + +05:26.740 --> 05:32.560 +generate the cplusplus code, but not correctly include that package. + +05:32.560 --> 05:36.220 +So for whatever reason, that's something that I ended up having to do. + +05:36.220 --> 05:38.980 +Uh, maybe when you try this out, you'll find that doesn't happen. + +05:39.010 --> 05:43.540 +You'll find a better way to prompt it without needing to be quite so directive. + +05:43.540 --> 05:47.620 +Uh, it feels a little bit like that's cheating for GPT four, and we should disqualify it. + +05:47.620 --> 05:48.670 +But there we go. + +05:48.670 --> 05:54.670 +Anyway, with that in mind, we now, uh, run this function to we've now defined a function to create + +05:54.670 --> 05:55.510 +this user prompt. + +05:55.540 --> 05:59.440 +And then this section here will be very familiar to you. + +05:59.470 --> 06:03.010 +Uh, messages for uh is where we create the list. + +06:03.040 --> 06:09.550 +We know so well now, uh, with two elements, uh, the role system for the system message and role + +06:09.550 --> 06:11.350 +user for the user prompt. + +06:11.500 --> 06:19.930 +Um, so that generates that messages list given Python, uh, and now Little utility function called + +06:19.930 --> 06:20.890 +writeoutput. + +06:20.890 --> 06:27.490 +That will take some cplusplus code, and it will just strip out anything in there that, uh, shouldn't + +06:27.490 --> 06:27.850 +be there. + +06:27.880 --> 06:35.350 +There's there's, uh, the models tend to respond with this keep at the top and this at the bottom. + +06:35.350 --> 06:41.020 +And so I just replace that with, I just remove that from the, from the text and then save it to a + +06:41.020 --> 06:44.290 +cplusplus file called optimized dot cpp. + +06:44.380 --> 06:49.510 +So when this runs we will see a file appearing in our directory optimized cpp. + +06:49.960 --> 06:51.100 +And when it's called. + +06:51.250 --> 06:51.820 +All right. + +06:51.820 --> 06:59.140 +And then here is uh function optimized GPT that is going to call the GPT API. + +06:59.170 --> 07:03.670 +We're going to call OpenAI dot chat dot completions dot create. + +07:03.700 --> 07:11.620 +Why do you know that that call by now, uh, model equals OpenAI model messages is and now we pass in + +07:11.620 --> 07:15.700 +the messages for Python and we set that to be streaming. + +07:15.700 --> 07:18.520 +And we do for chunk in stream. + +07:18.520 --> 07:24.980 +That means that the results come back and we print each little chunk as it comes back. + +07:24.980 --> 07:27.830 +And then at the end we write this to a file. + +07:28.100 --> 07:31.850 +Hopefully I don't need to go through this because this is super familiar to you. + +07:31.880 --> 07:38.390 +Now you've seen this a hundred times and side by side with it, here is the equivalent version for Claude + +07:38.420 --> 07:40.040 +doing the same thing. + +07:40.100 --> 07:41.390 +We're going to call Claude. + +07:41.570 --> 07:45.080 +Messages dot stream for the Claude model. + +07:45.230 --> 07:51.590 +Uh, we you remember in Claude's case, we have to provide the system message separately to the user + +07:51.590 --> 07:52.100 +prompt. + +07:52.100 --> 07:52.970 +So there we go. + +07:53.000 --> 07:55.340 +This is, again, a construct you're very familiar with. + +07:55.370 --> 07:57.710 +We have to tell it the maximum number of tokens. + +07:57.710 --> 08:01.040 +And then this is how we do the streaming back. + +08:01.070 --> 08:02.360 +Same kind of thing. + +08:02.390 --> 08:04.340 +Printing writing the output. + +08:05.060 --> 08:06.020 +All right. + +08:06.050 --> 08:11.270 +At this point, because we're getting ready to try this out for reals I will execute these two. + +08:11.300 --> 08:14.240 +And then I'm going to to pause for the next video. + +08:14.240 --> 08:20.360 +And in the next video you see us, you'll see us giving this a try and seeing how GPT four and Claude + +08:20.390 --> 08:23.960 +3.5 sonnet perform when faced with this challenge. + +08:23.990 --> 08:24.710 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59295583/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295583/ja_JP.srt new file mode 100755 index 0000000..6686df4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295583/ja_JP.srt @@ -0,0 +1,313 @@ +WEBVTT + +00:00.800 --> 00:02.930 +そしてJupyterLabに戻ってきた。 + +00:02.930 --> 00:03.890 +少し時間が経った。 + +00:03.920 --> 00:13.160 +先週はColabで作業していたが、 今日はJupyterに戻ってローカルで作業している。 + +00:13.220 --> 00:18.890 +コードを見て、 これからやろうとしていることを確認する前に、 + +00:18.890 --> 00:27.860 +フロンティア・モデルのコーディング能力を調べたリーダーボードを思い出してみよう。 + +00:27.860 --> 00:39.650 +AI企業であるヴェラムのリーダーボードを覚えているかもしれないが、 ヴェラムには人間による評価、 つまり簡単なPythonテストがある。 + +00:39.860 --> 00:45.110 +この指標に照らし合わせると、 GPTフォーゼロがリードしていることがわかるだろう。 + +00:45.170 --> 00:50.420 +そして、 クロード・スリー・ソネットを見下ろすと、 ここではあまりうまくいっていない。 + +00:50.570 --> 00:55.310 +ええと、 今、 いろいろな理由があって、 これはおそらく最新のものではないような気がするんだ。 + +00:55.310 --> 01:03.140 +他のところでは、 ラマ3がないことに気づいた。 このサイトでは1なので、 おそらくこれは少し古いのではないかと思っている。 + +01:03.140 --> 01:12.230 +人間による評価がテストのベストではないことも知っているし、 私はコーディングのシールリーダーボードに興味がある。 + +01:12.290 --> 01:17.090 +このサイトに来て彼らの紹介文を読むと、 人間による評価だけでなく、 ライブ・コードベンチやプログラミング・パズルなど、 + +01:17.090 --> 01:25.310 +さまざまな種類のコーディング・テストを行っていることがわかる。 + +01:25.310 --> 01:27.620 +だから、 これは超包括的だと感じている。 + +01:27.650 --> 01:33.200 +また、 ラマ3世も喜んでいる。 1がリストに載っているので、 これも最近のものという印象だ。 + +01:33.560 --> 01:44.720 +そして、 このリーダーボードのトップはクロード3だ。 5ソネット、 GPTフォーゼロと続き、 3位にはミストラルのラージとオープンソースモデルのミストラルが入った。 + +01:44.900 --> 01:47.570 +だから、 これで何が起こっているかがわかる。 + +01:47.600 --> 01:50.120 +GPT4が多くの特徴を持っているか見てみよう。 + +01:50.120 --> 01:52.130 +私には見えない。 + +01:52.220 --> 01:56.510 +GPT4が必要かもしれない。 + +01:56.540 --> 02:01.340 +ああ、 もしこの面でトップモデルを本当に比較したいのなら、 だが。 + +02:01.340 --> 02:07.500 +でも、 もっと質素にしたいのであれば、 GPT2をミニに使ってもかまわない。 + +02:07.530 --> 02:08.430 +少し節約できた。 + +02:08.610 --> 02:16.140 +それはともかく、 Jupyter Labに移動して4週目に入り、 + +02:16.140 --> 02:21.090 +3日目に今週のコードを見てみよう。 + +02:21.090 --> 02:25.170 +そして、 いつものように、 今日学ぶことのために、 いろいろなことが起こっている。 + +02:25.170 --> 02:29.640 +そのうちのひとつは、 コードを生成する問題についてのものだ。 + +02:29.790 --> 02:35.190 +しかし、 私たちはこれを、 さまざまなモデルを比較したり、 先ほどと同じようにリーダーボードを見たり、 + +02:35.190 --> 02:40.920 +LMソリューションでビジネス上の問題を解決する方法を理解したりする方法としても使うつもりです。 + +02:40.920 --> 02:43.650 +そして、 ここで気づくかもしれないことがある。 + +02:43.650 --> 02:53.910 +Gradioを使い、 プロトタイプにまとめるということがどういうことなのか、 + +02:53.910 --> 02:59.490 +また別の機会にお見せしたいと思います。 + +02:59.490 --> 03:08.910 +インポートを実行し、 通常のload envを使って環境変数を設定する。 + +03:08.910 --> 03:12.330 +今一度、 EMVファイルを持つことを思い出してほしい。 + +03:12.330 --> 03:16.560 +今回はOpenAIとanthropicを使う。 + +03:16.560 --> 03:27.090 +このセルをセットアップして、 OpenAIとクラウドのインターフェイスを通常通り初期化し、 OpenAIとクラウド3を使います。 + +03:27.090 --> 03:27.090 +5. + +03:27.120 --> 03:27.390 +申し訳ない。 + +03:27.420 --> 03:29.520 +GPT4とクラウド3を使う。 5. + +03:29.550 --> 03:35.250 +ソネット......そのリーダーボードで上位2位に入っていた。 + +03:35.520 --> 03:41.160 +それでは、 システム・メッセージとユーザー・プロンプトを作成しましょう。 + +03:41.160 --> 03:44.130 +昔と同じやり方でやっている。 + +03:44.130 --> 03:47.370 +システムメッセージがハードコードされていたのは、 もう昔のことのように感じる。 + +03:47.370 --> 03:54.090 +ユーザー・プロンプトは、 変数を渡して、 その変数に対するユーザー・プロンプトを生成するものだ。 + +03:54.090 --> 04:02.550 +つまり、 あなたはPythonのコードを高性能のCプラス4とM1マックで再インプリメントするアシスタントだということだ。 + +04:02.580 --> 04:05.160 +ここで僕が使っているのは明らかにM1マックだ。 + +04:05.190 --> 04:12.420 +特にCプラスプラスのセットアップについては、 + +04:12.600 --> 04:21.060 +微調整が必要かもしれません。 + +04:21.120 --> 04:23.400 +Cプラスプラスコードでのみ対応。 + +04:23.400 --> 04:24.720 +コメントは控えめに。 + +04:24.750 --> 04:28.710 +時折のコメント以外の説明はしないこと。 + +04:28.740 --> 04:34.080 +Cプラス・プラス・レスポンスは、 可能な限り最速で同一の出力を生成する必要があるため、 + +04:34.080 --> 04:39.570 +1秒前のスライドでお見せしたプロンプトよりも少し言葉が多くなっていますが、 + +04:39.570 --> 04:44.370 +これは私が微調整を加えて最もうまくいったものです。 + +04:44.400 --> 04:47.280 +このPythonコードをC言語に書き換え、 さらに可能な限り最速の実装を行う。 + +04:47.280 --> 04:48.690 +ちょっと繰り返しが多い。 + +04:49.020 --> 04:54.210 +ええと、 それから、 ここだけちょっとズルをして実験をしているのがわかるだろう。 + +04:54.210 --> 05:00.300 +実は、 今にわかると思うけど、 たぶん、 これはリーダーボードが示唆している通りなんだ。 + +05:00.300 --> 05:04.830 +クロードには余計なヒントは必要なかったが、 GPT4には必要だった。 + +05:04.830 --> 05:08.160 +そうでなければ、 生成されたC++のコードは機能しなかった。 + +05:08.340 --> 05:13.260 +ええと、 オーバーフローがないように、 数字の型に注意してくださいと言わなければならなかった。 + +05:13.260 --> 05:20.110 +また、 以下のような必要なcplusplusのパッチをすべて含めることも忘れずに。 + +05:20.140 --> 05:26.740 +もしそうしなければ、 GPT4はcplusplusコードを生成するが、 + +05:26.740 --> 05:32.560 +そのパッケージを正しくインクルードしない。 + +05:32.560 --> 05:36.220 +だから、 どんな理由であれ、 結局はそうしなければならなかったんだ。 + +05:36.220 --> 05:38.980 +あー、 たぶんこれを試してみたら、 そうならないことがわかると思うよ。 + +05:39.010 --> 05:43.540 +指示的でなくても、 もっといい促し方があるだろう。 + +05:43.540 --> 05:47.620 +ええと、 GPT4ではちょっとズルい気がします。 + +05:47.620 --> 05:48.670 +でも、 これでいい。 + +05:48.670 --> 05:55.510 +とにかく、 このことを念頭に置いて、 この関数を実行し、 ユーザー・プロンプトを作成する関数を定義します。 + +05:55.540 --> 05:59.440 +そして、 このセクションはあなたにとって非常に馴染み深いものだろう。 + +05:59.470 --> 06:03.010 +ええと、 ええと......のメッセージは、 リストを作成するところです。 + +06:03.040 --> 06:11.350 +システム・メッセージにはロール・システム、 ユーザー・プロンプトにはロール・ユーザー。 + +06:11.500 --> 06:20.890 +これで、 Pythonに与えられたメッセージ・リストが生成された。 + +06:20.890 --> 06:27.850 +これはcplusplusのコードで、 そこにあるべきでないものを取り除いてくれる。 + +06:27.880 --> 06:35.350 +モデルは、 上がこのキープ、 下がこのキープで反応する傾向があるんだ。 + +06:35.350 --> 06:44.290 +それをテキストから削除して、 optimized dot cppというcplusplusファイルに保存する。 + +06:44.380 --> 06:49.510 +つまり、 これが実行されると、 ディレクトリに最適化されたcppファイルが現れることになる。 + +06:49.960 --> 06:51.100 +と呼ばれたとき。 + +06:51.250 --> 06:51.820 +分かった。 + +06:51.820 --> 06:59.140 +そして、 GPT APIを呼び出すために最適化されたGPT関数です。 + +06:59.170 --> 07:03.670 +OpenAI dot chat dot completions dot createと呼ぶことにする。 + +07:03.700 --> 07:11.620 +モデル・イコールOpenAIのモデル・メッセージで、 Pythonのメッセージを渡して、 + +07:11.620 --> 07:15.700 +ストリーミングに設定します。 + +07:15.700 --> 07:18.520 +そして、 私たちはストリームでチャンクのためにプレーする。 + +07:18.520 --> 07:24.980 +つまり、 結果が戻ってくるたびに、 その小さなかたまりを印刷するのだ。 + +07:24.980 --> 07:27.830 +そして最後にこれをファイルに書き出す。 + +07:28.100 --> 07:31.850 +願わくば、 こんなことをする必要はないのだが......。 + +07:31.880 --> 07:40.040 +クロードが同じことをするのと同じバージョンだ。 + +07:40.100 --> 07:41.390 +クロードを呼ぶつもりだ。 + +07:41.570 --> 07:45.080 +クロード・モデルのメッセージ・ドット・ストリーム。 + +07:45.230 --> 07:52.100 +ええと、 クロードの場合、 システムメッセージをユーザー・プロンプトとは別に提供しなければならないことを覚えているよね。 + +07:52.100 --> 07:52.970 +そうだ。 + +07:53.000 --> 07:55.340 +これもまた、 皆さんよくご存知の構図だ。 + +07:55.370 --> 07:57.710 +トークンの最大数を伝えなければならない。 + +07:57.710 --> 08:01.040 +そして、 これがストリーミングバックのやり方だ。 + +08:01.070 --> 08:02.360 +同じようなことだ。 + +08:02.390 --> 08:04.340 +出力を印刷する。 + +08:05.060 --> 08:06.020 +分かった。 + +08:06.050 --> 08:11.270 +この時点で、 実際に試してみる準備をしているので、 この2つを実行する。 + +08:11.300 --> 08:14.240 +そして、 次のビデオのために一時停止します。 + +08:14.240 --> 08:23.960 +次のビデオでは、 GPT4とクロード3を試しているところを見てほしい。 + +08:23.960 --> 08:23.960 +5ソネットは、 この難題に直面したときに実行する。 + +08:23.990 --> 08:24.710 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59295583/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295583/ko_KR.srt new file mode 100755 index 0000000..64798bd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295583/ko_KR.srt @@ -0,0 +1,367 @@ +WEBVTT + +00:00.800 --> 00:02.930 +다시 유피터랩에 왔네요 + +00:02.930 --> 00:03.890 +1분 됐어요 + +00:03.920 --> 00:10.250 +지난주에 콜랍에서 작업했고 다시 주피터로 가서 현지에서 조깅합니다 + +00:10.250 --> 00:13.160 +오늘 즐겁게 보낼 거예요 + +00:13.220 --> 00:18.890 +코드로 가서 무엇을 할 것인지 살펴보기 전에 leaderboard를 + +00:18.890 --> 00:26.300 +다시 한 번 짚어보죠. 개척 모델의 코딩 능력을 살펴보고 누가 코딩의 상위권에 있는지 보기 + +00:26.300 --> 00:27.860 +위해서요. + +00:27.860 --> 00:36.020 +인공지능 회사인 피지 리더보드에 인체 평가가 있어요 간단한 + +00:36.380 --> 00:39.650 +파이썬 테스트죠 + +00:39.860 --> 00:45.110 +이 수치와 비교하면 GPT 40이 가장 앞섰어요 + +00:45.170 --> 00:50.420 +클로드 3번 소네트가 아래를 내려다보면 잘 안 풀리네요 + +00:50.570 --> 00:55.310 +여러 가지 이유로 이게 최신 자료는 아닌 것 같네요 + +00:55.310 --> 01:03.140 +다른 곳에는 라마 3이 없더라고요 이 사이트에는 1개 있어요 비트가 좀 오래된 것 같네요 + +01:03.140 --> 01:11.090 +인간 평가가 최고가 아니라는 것도 알아요 저는 코딩의 인장 순위표에 더 관심이 + +01:11.090 --> 01:12.230 +있죠 + +01:12.290 --> 01:17.090 +여기 와서 설명서를 읽어보면 다양한 코딩 테스트를 한다는 걸 + +01:17.090 --> 01:23.660 +알 수 있어요 인간 평가도 있고 라이브 코드벤치도 있고 다수의 프로그래밍 퍼즐과 다른 + +01:23.660 --> 01:25.310 +것들도 있어요 + +01:25.310 --> 01:27.620 +아주 포괄적인 것 같아요 + +01:27.650 --> 01:32.330 +라마 3도 반갑고요 1도 포함되어 있어요. 그래서 이건 최근의 get이라는 + +01:32.330 --> 01:33.200 +느낌이 들어요. + +01:33.560 --> 01:41.510 +순위표의 1위는 클로드 3이에요 5 소네트, 그다음은 GPT 40 미스트랄 라지 + +01:41.510 --> 01:44.720 +오픈 소스 모델 미스트랄이 3위죠 + +01:44.900 --> 01:47.570 +어떤 상황인지 알 수 있죠 + +01:47.600 --> 01:50.120 +GPT 4가 기능이 많은지 보죠 + +01:50.120 --> 01:52.130 +제가 보기엔 없어요 + +01:52.220 --> 01:56.510 +그렇다면 GPT 4를 사용해야 할 수도 있겠네요 + +01:56.540 --> 02:01.340 +톱모델들을 정면에서 비교하고 싶다면 이렇게 하세요 + +02:01.340 --> 02:07.500 +미니 비트는 GPT 2를 쓰세요 좀 더 검소하게 쓰고 싶다면요 + +02:07.530 --> 02:08.430 +비트 비트 좀 아꼈죠 + +02:08.610 --> 02:16.140 +어쨌든 주피터 랩으로 가서 4주 차와 3일째 되는 날 이번 주 코드를 보죠 + +02:16.140 --> 02:21.090 +코드 생성과 그걸 중심으로 앱을 만들 거예요 + +02:21.090 --> 02:25.170 +오늘도 많은 걸 배울 거예요 + +02:25.170 --> 02:29.640 +그중 하나는 코드 생성 문제예요 + +02:29.790 --> 02:35.190 +또한 이를 활용해 다양한 모델을 탐구하고 비교하고 기존의 순위표를 검토하며 + +02:35.190 --> 02:40.920 +LM 솔루션으로 비즈니스 문제를 해결하는 방법도 이해하려고 해요 + +02:40.920 --> 02:43.650 +여기서 뭐가 보이는지 아세요? + +02:43.650 --> 02:48.210 +여기 작은 게 있는데요 Gadio와 함께 플레이할 기회가 + +02:48.210 --> 02:53.910 +또 있을 겁니다 프로토타입으로 패키지하는 게 어떤 건지 보여드릴 겁니다 + +02:53.910 --> 02:59.490 +LM 솔루션에 관해 다른 이들과 협업할 수 있는 아주 좋은 방법이거든요 + +02:59.490 --> 03:06.480 +임포트 실행으로 착수하겠습니다 그런 다음 환경 변수를 설정하죠 일반적인 LoadInFv를 + +03:06.480 --> 03:08.910 +이용해서요 + +03:08.910 --> 03:12.330 +EMV 파일을 준비하라고 다시 한 번 말씀드리죠 + +03:12.330 --> 03:16.560 +이번에는 오픈라이와 인어로픽을 사용할 거예요 + +03:16.560 --> 03:23.670 +여기 이 셀에서 오픈AI와 클라우드 인터페이스를 평소처럼 초기화합니다 오픈AI와 클라우드 + +03:23.670 --> 03:27.090 +3을 사용하죠 5분 + +03:27.120 --> 03:27.390 +미안해요 + +03:27.420 --> 03:29.520 +GPT4와 클라우드 3을 사용하죠 5분 + +03:29.550 --> 03:35.250 +순위표에서 상위 2위에 드는 소네트였죠 + +03:35.520 --> 03:41.160 +이제 시스템 메시지와 사용자 프롬프트를 개발할 차례죠 + +03:41.160 --> 03:44.130 +예전에 썼던 방법을 그대로 쓰고 있어요 + +03:44.130 --> 03:47.370 +시스템 메시지에 하드 코드를 넣는 건 옛날 일 같아요 + +03:47.370 --> 03:53.370 +사용자 프롬프트에는 변수에서 전달하는 무언가가 있고 그 변수를 위한 사용자 프롬프트를 + +03:53.370 --> 03:54.090 +생성하죠 + +03:54.090 --> 04:00.600 +제가 전달한 시스템 메시지는 당신은 고성능 C+4, M1 Mac으로 파이썬 코드를 재구성하는 + +04:00.600 --> 04:02.550 +조수라는 거예요 + +04:02.580 --> 04:05.160 +제가 쓰는 건 M1맥이에요 + +04:05.190 --> 04:12.420 +여러분께 가장 적절한 환경을 갖추기 위해 어떤 것이든 여기서 대체하는 걸 권해드립니다 + +04:12.600 --> 04:19.500 +약간의 수정이 필요할 수도 있어요 특히 C 플러스 셋업에서요 이게 제대로 되도록 + +04:19.500 --> 04:21.060 +하기 위해서요 + +04:21.120 --> 04:23.400 +C 플러스 플러스 코드만 사용하세요 + +04:23.400 --> 04:24.720 +댓글은 아껴 써요 + +04:24.750 --> 04:28.710 +가끔 댓글 다는 것 외에는 어떤 설명도 하지 마세요 + +04:28.740 --> 04:34.080 +C++ 응답은 가장 빠른 시간 내에 동일한 출력을 내야 합니다 조금 + +04:34.080 --> 04:39.570 +전에 보여드린 프롬프트보다 좀 더 단어 같죠 하지만 약간 수정하면 가장 + +04:39.570 --> 04:44.370 +잘 되는 걸 찾았어요 사용자 프롬프트는 더 단어 같죠 + +04:44.400 --> 04:47.280 +파이썬 코드를 C+로 다시 쓰고 가장 빠른 구현을 더하세요 + +04:47.280 --> 04:48.690 +비트가 좀 반복적이에요 + +04:49.020 --> 04:54.210 +그리고 여기 보시면 실험에서 약간 비트를 썼어요 + +04:54.210 --> 05:00.300 +알게 되겠지만 순위표에 나온 대로일 수도 있어요 + +05:00.300 --> 05:04.830 +클로드는 이런 힌트가 필요 없었지만 GPT 4에는 이게 필요했죠 + +05:04.830 --> 05:08.160 +그렇지 않으면, 생성된 C++ 코드가 작동하지 않아요 + +05:08.340 --> 05:13.260 +넘치지 않도록 숫자 유형을 잘 봐야 해요 + +05:13.260 --> 05:20.110 +해시는 필수적인 모든 패치를 포함하는 걸 기억하세요 패키지 같은 거요 + +05:20.140 --> 05:26.740 +특정 패키지에 명시적으로 이름을 붙여야만 했어요 그렇게 하지 않으면 GPT 4가 Cplusplus + +05:26.740 --> 05:32.560 +코드를 생성할 테니까요 하지만 정확히 해당 패키지를 포함하진 않죠 + +05:32.560 --> 05:36.220 +이유는 모르겠지만 결국 그렇게 해야만 했어요 + +05:36.220 --> 05:38.980 +이걸 써보면 그렇지 않다는 걸 알게 될 거예요 + +05:39.010 --> 05:43.540 +그렇게 명령조로 말하지 않고도 더 잘 전달할 방법을 찾을 거예요 + +05:43.540 --> 05:47.620 +4번 비트를 조작하는 것 같아서 실격시켜야 할 것 같아요 + +05:47.620 --> 05:48.670 +하지만 됐어요 + +05:48.670 --> 05:54.670 +어쨌든 그걸 염두에 두고 이제 이 함수를 실행 이 사용자 프롬프트를 생성할 함수를 + +05:54.670 --> 05:55.510 +정의했죠 + +05:55.540 --> 05:59.440 +이 부분은 아주 익숙할 거예요 + +05:59.470 --> 06:03.010 +음 메시지 목록은 여기서 만들어요 + +06:03.040 --> 06:09.550 +지금은 두 가지 요소로 잘 알려져 있죠 시스템 메시지를 위한 역할 시스템과 사용자 프롬프트를 + +06:09.550 --> 06:11.350 +위한 역할 사용자요 + +06:11.500 --> 06:19.930 +파이썬 을 이용해서 메시지 리스트를 생성하고 쓰기출력이라는 유틸리티 함수가 + +06:19.930 --> 06:20.890 +있어요 + +06:20.890 --> 06:27.850 +그러려면 문장 코드가 필요해요 거기 있어선 안 될 건 뭐든 제거해 줄 거예요 + +06:27.880 --> 06:35.350 +모델은 그에 반응하는 경향이 있어요 이건 위에 두고 이건 아래에 두는 거죠 + +06:35.350 --> 06:41.020 +그래서 그걸 텍스트에서 제거한 다음 최적화된 .cpp이라는 cplusplus + +06:41.020 --> 06:44.290 +파일에 저장했어요 + +06:44.380 --> 06:49.510 +이게 실행되면 파일이 디렉터리 최적화된 cpp에 나타나는 걸 보게 되죠 + +06:49.960 --> 06:51.100 +언제 부르는지도 알고요 + +06:51.250 --> 06:51.820 +좋아요 + +06:51.820 --> 06:59.140 +그리고 여기 함수 최적화된 GPT가 있습니다 GPT API를 호출할 거예요 + +06:59.170 --> 07:03.670 +OpenAI.Tat.완료.Create라고 입력할게요 + +07:03.700 --> 07:11.620 +모델과 OpenAI 모델 메시지가 같다는 것을 어떻게 아셨나요? 파이썬 을 위한 메시지를 + +07:11.620 --> 07:15.700 +전달하는 것을 스트리밍으로 설정했죠 + +07:15.700 --> 07:18.520 +덩어리 인 스트림도 있어요 + +07:18.520 --> 07:24.980 +즉, 결과가 나오면 그 결과에 따라 조금씩 프린트하는 거죠 + +07:24.980 --> 07:27.830 +그리고 마지막에 이걸 파일로 작성해요 + +07:28.100 --> 07:31.850 +이런 건 안 해도 되겠죠? 당신한텐 익숙하잖아요 + +07:31.880 --> 07:38.390 +이걸 100번은 보셨을 겁니다 이것과 나란히서요 이건 클로드가 같은 걸 하는 + +07:38.420 --> 07:40.040 +동일한 버전이죠 + +07:40.100 --> 07:41.390 +클로드한테 전화할게요 + +07:41.570 --> 07:45.080 +메시지 클로드 모델에게 점 흐름 보내주세요 + +07:45.230 --> 07:51.590 +클로드의 경우를 기억하시겠지만 사용자 프롬프트에는 시스템 메시지를 따로 제공해야 + +07:51.590 --> 07:52.100 +해요 + +07:52.100 --> 07:52.970 +자, 됐어요 + +07:53.000 --> 07:55.340 +이것 역시 여러분이 아주 익숙한 구조죠 + +07:55.370 --> 07:57.710 +패의 최대 개수를 알려 줘야 해요 + +07:57.710 --> 08:01.040 +이렇게 스트리밍을 하는 거예요 + +08:01.070 --> 08:02.360 +비슷한 거예요 + +08:02.390 --> 08:04.340 +인쇄해서 출력하는 거죠 + +08:05.060 --> 08:06.020 +좋아요 + +08:06.050 --> 08:11.270 +이 시점에서, 실제로 해볼 준비가 되어 있기 때문에 이 두 가지를 실행할게요 + +08:11.300 --> 08:14.240 +그런 다음 다음 다음 비디오로 일시 정지하죠 + +08:14.240 --> 08:20.360 +다음 영상에서는 저희가 이 작업을 시도하고 GPT 4와 클로드 3이 어떻게 작동하는지 + +08:20.390 --> 08:23.960 +보여 드릴게요 이 난관에 직면했을 때 5개의 소네트가 공연되죠 + +08:23.990 --> 08:24.710 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59295587/en_US.srt b/week5/community-contributions/subtitles/srts/59295587/en_US.srt new file mode 100755 index 0000000..b630766 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295587/en_US.srt @@ -0,0 +1,337 @@ +WEBVTT + +00:00.980 --> 00:06.350 +When I left you, we had just created this simple user interface for converting from Python to C plus + +00:06.350 --> 00:09.290 +plus using different frontier models. + +00:09.290 --> 00:11.000 +And I said, you know what? + +00:11.000 --> 00:15.380 +Let's make this into a proper prototype UI and let's do that. + +00:15.380 --> 00:21.680 +So if I come back here to this Jupyter notebook, we're going to do, we're going to write a few more + +00:21.680 --> 00:22.340 +methods. + +00:22.340 --> 00:30.680 +So first of all this one execute Python takes code is going to do the same slightly dangerous thing + +00:30.680 --> 00:31.100 +here. + +00:31.130 --> 00:32.510 +Execute that code. + +00:32.510 --> 00:35.480 +So again this particular prototype is really just for you. + +00:35.480 --> 00:40.550 +This is not one to share because if you share this with anybody, they will have a way to execute Python + +00:40.550 --> 00:42.200 +code directly on your box. + +00:42.200 --> 00:43.370 +And that would be a bad thing. + +00:43.370 --> 00:50.270 +So share should always very much be false in in this particular Gradio app. + +00:50.360 --> 00:53.600 +Uh, but it's, it's a great prototype for you to work with. + +00:53.960 --> 00:57.170 +Um, so what this does is, is execute that code. + +00:57.170 --> 01:02.600 +And the reason there's some stuff, some gubbins around it is because we want to capture the standard + +01:02.600 --> 01:09.840 +out that comes from this Python code, and return it so that this becomes a function that takes code, + +01:09.840 --> 01:15.030 +executes it, and returns the output because we want to display it in gradio. + +01:15.030 --> 01:17.700 +So that's an execute Python. + +01:17.910 --> 01:24.510 +And now some slightly heftier code to execute C plus plus um given some code. + +01:24.510 --> 01:28.830 +So the first thing I do is I call that that that function we wrote a while back to actually write it + +01:28.830 --> 01:31.260 +out to the file optimized. + +01:31.590 --> 01:39.330 +Um, and then what I do is something which involves, uh, running the compile command. + +01:39.360 --> 01:46.350 +I use something called subprocess dot run, which is a way that you can, uh, have spawn a subprocess + +01:46.350 --> 01:48.180 +to, to carry something out. + +01:48.480 --> 01:49.920 +Um, and that's what I do. + +01:49.920 --> 01:53.370 +Now, you may notice this is slightly longer than before. + +01:53.370 --> 01:59.520 +I mentioned a while back what we're going to do this this, this is a really using all of the flags + +01:59.520 --> 02:02.430 +to make sure that the code is optimized for my computer. + +02:02.430 --> 02:06.750 +It's actually barely scratching the surface for people that are that know all about this stuff. + +02:06.760 --> 02:11.050 +There are a gazillion flags that you could add in, but I. + +02:11.080 --> 02:16.000 +After a while I figured this is good enough, so we put in a bunch of flags to do this. + +02:16.150 --> 02:27.520 +Um, and um, then we run the optimized program, and then we return the we, we capture the standard + +02:27.520 --> 02:28.720 +out and return that. + +02:28.750 --> 02:30.970 +Or if there's an error, we return standard error. + +02:31.210 --> 02:38.140 +So that is the method to execute C plus plus code and return the response. + +02:38.140 --> 02:41.200 +It's really compile and execute would be more accurate. + +02:41.470 --> 02:48.520 +Um here I've just got a little CSS string because I want to do some uh, a little bit of coloring in, + +02:48.850 --> 02:50.440 +um, as you will see. + +02:50.440 --> 02:53.740 +And then this is the extent of the user interface, that's all. + +02:53.740 --> 02:55.240 +We're not doing any more coding. + +02:55.240 --> 02:55.990 +It's just this. + +02:55.990 --> 03:02.410 +So let me quickly tell you, um, I've got again, I'm using the GR blocks, which is the the lower + +03:02.410 --> 03:07.570 +level Gradio API that lets you have more flexibility on your user interface, not using one of radio's + +03:07.600 --> 03:09.070 +off the shelf UIs. + +03:09.280 --> 03:12.770 +Um, you can pass in your own CSS, which is what I'm doing here. + +03:12.800 --> 03:15.140 +CSS equals this variable. + +03:15.320 --> 03:18.740 +Um, and that lets you do a little bit more customization. + +03:18.860 --> 03:22.100 +This dot markdown gives me a nice heading at the top. + +03:22.100 --> 03:26.510 +And then I've got one, two, three, four, five rows of widgets. + +03:26.690 --> 03:30.110 +Um you'll recognize Python code and C plus plus code. + +03:30.110 --> 03:35.000 +That's exactly what I had before setting the the the default value to be the pi code. + +03:35.510 --> 03:39.800 +Uh, then this is as before, the drop down to pick GPT or Claude. + +03:40.040 --> 03:42.680 +Uh, the button to convert code. + +03:42.680 --> 03:45.050 +And then I've got some more buttons. + +03:45.050 --> 03:51.770 +Python run to run Python and C plus plus CP run to run C plus plus. + +03:51.770 --> 03:58.580 +And then I've got a Python out with Python's output CP out for the C plus plus output. + +03:58.610 --> 04:02.720 +Hopefully you're following, but if not, when you see the user interface, this is all going to be + +04:02.750 --> 04:03.650 +utterly clear. + +04:03.650 --> 04:07.580 +And then you can look back at this and you'll say, wow, that's that's so simple. + +04:08.000 --> 04:13.000 +Um, so then we've got three buttons convert, Python run and C plus plus run. + +04:13.000 --> 04:16.600 +So we have to say what happens when you click any of those three buttons? + +04:16.630 --> 04:19.120 +If you press the convert button, it's just as before. + +04:19.120 --> 04:25.810 +It should run the optimize method function, passing in the inputs, the Python code, and the model. + +04:25.840 --> 04:30.640 +The output goes to the C plus plus code, so when you press convert it will take the Python code. + +04:30.640 --> 04:35.290 +It will use the model to convert it and put that into C plus plus. + +04:35.500 --> 04:42.220 +If you press the press the Python run button it's going to execute Python with the Python code to Python + +04:42.250 --> 04:42.910 +out. + +04:42.940 --> 04:49.270 +If you press C plus plus run, it's going to execute the C plus plus code with the C plus plus inputs. + +04:49.300 --> 04:51.310 +C plus plus output. + +04:51.610 --> 04:53.140 +That's all there is to it. + +04:53.170 --> 05:00.850 +If that sounded, uh, like a like a lot of spaghetti stuff that it's, it's gonna nicely unravel and + +05:00.880 --> 05:04.510 +be super clear when you see what you're about to see. + +05:04.780 --> 05:06.520 +Uh, let's run this. + +05:06.550 --> 05:08.470 +Here is the user interface. + +05:08.470 --> 05:11.080 +Welcome to a more colorful user interface. + +05:11.290 --> 05:14.830 +Uh, so there's Python code here, C plus plus code. + +05:14.830 --> 05:20.600 +And then we get to select a model convert code and we can run Python run C plus plus. + +05:20.600 --> 05:22.100 +So let's start. + +05:22.100 --> 05:25.640 +We can press Run Python to run this code here. + +05:25.790 --> 05:28.520 +Uh right from this user interface. + +05:28.550 --> 05:31.460 +It's calling that exact method right now. + +05:31.460 --> 05:33.530 +And we see the results here. + +05:33.680 --> 05:35.060 +Uh, there it is. + +05:35.210 --> 05:37.640 +Um, there is indeed Pi. + +05:37.640 --> 05:39.890 +And it took 8.4 seconds. + +05:40.070 --> 05:45.950 +Uh, and we will now flip to, uh, we will now convert that to C plus. + +05:45.950 --> 05:46.670 +Plus. + +05:46.820 --> 05:47.930 +Here it is. + +05:47.960 --> 05:50.930 +And we can now run the C plus plus. + +05:51.800 --> 05:55.550 +And there we have it the C plus plus. + +05:55.550 --> 06:00.890 +You'll see that that's significantly faster than before when if I remember right it was about 0.2 seconds. + +06:00.920 --> 06:08.090 +Uh, and that's because we've got these extra flags to really optimize this, uh, this code. + +06:08.120 --> 06:15.410 +Um, and so you can see we are now more than 100 times faster between the two implementations. + +06:15.410 --> 06:24.270 +And for sort of loop loopy mathy Math code, you would usually expect that a straight up compiled C + +06:24.300 --> 06:30.450 +plus plus, uh, compiled natively like this can be, uh, at least 100 times faster. + +06:30.450 --> 06:33.690 +So it's not not not too surprising, but it's a good result. + +06:33.960 --> 06:39.420 +Um, and of course, the numbers are, uh, well, they're off by a teeny bit, which is probably which + +06:39.420 --> 06:41.550 +is all down to floating point rounding stuff. + +06:41.550 --> 06:43.980 +Let's see how Claude does and just confirm. + +06:46.830 --> 06:50.460 +And let's run Claude's C plus plus code. + +06:50.760 --> 06:53.520 +And it's very similar, of course. + +06:53.520 --> 06:58.350 +Same number, similarly super blazingly fast. + +06:58.500 --> 07:05.010 +Um, and it has generated, I'm sure, very similar uh, c plus plus code which is being compiled to + +07:05.040 --> 07:07.260 +the same machine code I would expect. + +07:07.530 --> 07:08.400 +All right. + +07:08.400 --> 07:14.850 +One more, uh, flip to the next video, and then we will run for our hard problem through here. + +07:14.850 --> 07:17.730 +Uh, and then we will call it a day, but it's been. + +07:17.730 --> 07:19.620 +I hope you've enjoyed it as much as I have. + +07:19.650 --> 07:21.780 +This is a great a great little product. + +07:21.810 --> 07:22.770 +See you in a second. diff --git a/week5/community-contributions/subtitles/srts/59295587/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295587/ja_JP.srt new file mode 100755 index 0000000..027596e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295587/ja_JP.srt @@ -0,0 +1,295 @@ +WEBVTT + +00:00.980 --> 00:09.290 +私があなたと別れたとき、 私たちはPythonからCに変換するためのシンプルなユーザーインターフェイスを作ったばかりだった。 + +00:09.290 --> 00:11.000 +私はこう言った。 + +00:11.000 --> 00:15.380 +これをちゃんとしたプロトタイプUIにしよう。 + +00:15.380 --> 00:22.340 +このJupyterノートブックに戻ったら、 もう少しメソッドを書いてみよう。 + +00:22.340 --> 00:31.100 +まず第一に、 このPythonのテイク・コードの実行は、 ここでも同じように少し危険なことをしようとしている。 + +00:31.130 --> 00:32.510 +そのコードを実行する。 + +00:32.510 --> 00:35.480 +だから今回も、 この特別なプロトタイプは本当にあなたのためだけのものなんだ。 + +00:35.480 --> 00:42.200 +もしこれを誰かと共有したら、 彼らはあなたのボックス上で直接Pythonコードを実行する方法を手に入れることになるからだ。 + +00:42.200 --> 00:43.370 +それは悪いことだ。 + +00:43.370 --> 00:50.270 +だから、 このグラディオのアプリでは、 シェアは常に虚偽であるべきなのだ。 + +00:50.360 --> 00:53.600 +ええと、 でも、 あなたにとっては素晴らしいプロトタイプだよ。 + +00:53.960 --> 00:57.170 +つまり、 このコードを実行するんだ。 + +00:57.170 --> 01:02.600 +Pythonのコードから出力される標準出力をキャプチャして、 + +01:02.600 --> 01:15.030 +それを返すことで、 コードを受け取って実行し、 gradioに表示するための出力を返す関数にしたいからだ。 + +01:15.030 --> 01:17.700 +これがパイソンの実行だ。 + +01:17.910 --> 01:24.510 +そして今度は、 Cプラス・プラス・ウムのコードを実行するための、 少し重いコードだ。 + +01:24.510 --> 01:31.260 +それで、 最初にすることは、 少し前に書いた関数を呼び出して、 実際に最適化されたファイルに書き出すことだ。 + +01:31.590 --> 01:39.330 +それから、 コンパイルコマンドを実行するんだ。 + +01:39.360 --> 01:48.180 +私はサブプロセスのドットランというものを使っている。 これは、 何かを実行するためにサブプロセスをスポーンさせる方法だ。 + +01:48.480 --> 01:49.920 +それが僕の仕事なんだ。 + +01:49.920 --> 01:53.370 +さて、 以前より少し長くなっていることにお気づきだろうか。 + +01:53.370 --> 01:59.520 +これは、 コードが私のコンピュータに最適化されていることを確認するために、 + +01:59.520 --> 02:02.430 +すべてのフラグを使用するものです。 + +02:02.430 --> 02:06.750 +このようなことを知り尽くしている人たちにとっては、 ほとんど表面をなぞるようなものだ。 + +02:06.760 --> 02:11.050 +追加できるフラッグは山ほどある。 + +02:11.080 --> 02:16.000 +しばらくして、 これで十分だと思ったので、 そのためのフラッグをたくさん入れた。 + +02:16.150 --> 02:28.720 +そして、 最適化されたプログラムを実行し、 標準出力をキャプチャしてそれを返す。 + +02:28.750 --> 02:30.970 +エラーがあれば標準エラーを返す。 + +02:31.210 --> 02:38.140 +これが、 Cプラス・プラス・コードを実行し、 レスポンスを返す方法である。 + +02:38.140 --> 02:41.200 +本当はコンパイルして実行した方が正確なんだけどね。 + +02:41.470 --> 02:50.440 +ここではCSSの文字列を少し使っている。 + +02:50.440 --> 02:53.740 +そして、 これがユーザーインターフェースの範囲であり、 すべてである。 + +02:53.740 --> 02:55.240 +もうコーディングはしない。 + +02:55.240 --> 02:55.990 +ただこれだけだ。 + +02:55.990 --> 03:02.410 +GRブロックを使っていますが、 これはGradioの下位APIで、 + +03:02.410 --> 03:09.070 +ユーザーインターフェースの柔軟性を高めることができます。 + +03:09.280 --> 03:12.770 +自分のCSSを渡すこともできる。 + +03:12.800 --> 03:15.140 +CSSはこの変数に等しい。 + +03:15.320 --> 03:18.740 +それで、 もう少しカスタマイズできるんだ。 + +03:18.860 --> 03:22.100 +このドットマークダウンは、 トップに素敵な見出しをつけてくれる。 + +03:22.100 --> 03:26.510 +そして、 ウィジェットが1列、 2列、 3列、 4列、 5列と並んでいる。 + +03:26.690 --> 03:30.110 +あの......PythonのコードとCプラスアルファのコードはわかると思う。 + +03:30.110 --> 03:35.000 +デフォルト値を円周率コードに設定する前は、 まさにそうだった。 + +03:35.510 --> 03:39.800 +ええと、 これは前と同じで、 GPTかClaudeを選ぶドロップダウンです。 + +03:40.040 --> 03:42.680 +コードを変換するボタンだ。 + +03:42.680 --> 03:45.050 +それからもういくつかボタンがある。 + +03:45.050 --> 03:51.770 +Pythonの実行はPythonを、 CPの実行はC plus plusを実行する。 + +03:51.770 --> 03:58.580 +そして、 Pythonの出力CPをCプラスプラス出力のためにPythonのアウトを持っている。 + +03:58.610 --> 04:03.650 +もしそうでなければ、 ユーザーインターフェイスをご覧になれば、 すべてが明らかになるでしょう。 + +04:03.650 --> 04:07.580 +そして、 これを振り返って、 ワオ、 これはとてもシンプルだと言うだろう。 + +04:08.000 --> 04:13.000 +変換、 Python実行、 Cプラスプラス実行の3つのボタンがあります。 + +04:13.000 --> 04:16.600 +では、 この3つのボタンのどれかをクリックしたらどうなるのか? + +04:16.630 --> 04:19.120 +変換ボタンを押しても以前と同じだ。 + +04:19.120 --> 04:25.810 +optimizeメソッド関数を実行し、 入力、 Pythonコード、 モデルを渡す。 + +04:25.840 --> 04:30.640 +出力はC++のコードになるので、 変換を押すとPythonのコードが出力される。 + +04:30.640 --> 04:35.290 +モデルを使って変換し、 それをCプラスプラスに入れる。 + +04:35.500 --> 04:42.910 +Pythonの実行ボタンを押すと、 PythonコードをPythonに出力してPythonを実行する。 + +04:42.940 --> 04:49.270 +Cプラスプラス実行を押すと、 Cプラスプラスの入力でCプラスプラスのコードが実行される。 + +04:49.300 --> 04:51.310 +Cプラスプラス出力。 + +04:51.610 --> 04:53.140 +それだけだ。 + +04:53.170 --> 05:00.850 +もしそれが、 あー、 たくさんのスパゲッティみたいなものに聞こえたとしたら、 それは、 きれいに解けて、 + +05:00.880 --> 05:04.510 +これから見るものを見れば超明確になる。 + +05:04.780 --> 05:06.520 +ええと、 これを実行しよう。 + +05:06.550 --> 05:08.470 +これがユーザー・インターフェースだ。 + +05:08.470 --> 05:11.080 +よりカラフルなユーザーインターフェースへようこそ。 + +05:11.290 --> 05:14.830 +ここにはPythonのコードとC++のコードがある。 + +05:14.830 --> 05:20.600 +そして、 モデル変換コードを選択し、 Pythonを実行し、 Cプラスプラスを実行することができる。 + +05:20.600 --> 05:22.100 +では、 始めよう。 + +05:22.100 --> 05:25.640 +ここでPythonの実行を押して、 このコードを実行することができる。 + +05:25.790 --> 05:28.520 +このユーザー・インターフェイスから。 + +05:28.550 --> 05:31.460 +今まさにそのメソッドを呼び出している。 + +05:31.460 --> 05:33.530 +その結果がここにある。 + +05:33.680 --> 05:35.060 +あ、 あった。 + +05:35.210 --> 05:37.640 +確かに円周率はある。 + +05:37.640 --> 05:39.890 +そして8人かかった。 4秒。 + +05:40.070 --> 05:45.950 +そして、 Cプラスに変換する。 + +05:45.950 --> 05:46.670 +それに + +05:46.820 --> 05:47.930 +これだ。 + +05:47.960 --> 05:50.930 +そして、 Cプラスプラスを走らせることができるようになった。 + +05:51.800 --> 05:55.550 +これでCプラス・プラスだ。 + +05:55.550 --> 06:00.890 +私の記憶が正しければ、 約0だった以前よりかなり速くなっているのがわかるだろう。 2秒。 + +06:00.920 --> 06:08.090 +それは、 このコードを最適化するための追加フラグがあるからなんだ。 + +06:08.120 --> 06:15.410 +それで、 2つの実装の間で100倍以上速くなったことがわかるだろう。 + +06:15.410 --> 06:24.270 +そして、 ループするような数学的なコードでは、 通常、 C+++のネイティブ・コンパイルの方が、 + +06:24.300 --> 06:30.450 +少なくとも100倍は速くなると予想される。 + +06:30.450 --> 06:33.690 +だから、 それほど驚くような結果ではないが、 良い結果だ。 + +06:33.960 --> 06:41.550 +ええと、 もちろん、 数字は、 ええと、 ほんの少しずれているんだけど、 それはたぶん、 浮動小数点の丸め方のせいだと思う。 + +06:41.550 --> 06:43.980 +クロードの様子を見て、 確認しよう。 + +06:46.830 --> 06:50.460 +そして、 クロードのCプラスプラスのコードを実行してみよう。 + +06:50.760 --> 06:53.520 +もちろん、 とてもよく似ている。 + +06:53.520 --> 06:58.350 +同じ番号、 同じように超高速。 + +06:58.500 --> 07:07.260 +その結果、 非常によく似た......C+++のコードが生成され、 それが同じマシンコードにコンパイルされているのだと思う。 + +07:07.530 --> 07:08.400 +分かった。 + +07:08.400 --> 07:14.850 +もう1つ、 次のビデオに移ってから、 ここを通してハードな問題に取り組むことになる。 + +07:14.850 --> 07:17.730 +ええと、 それで今日は終わりにしよう。 + +07:17.730 --> 07:19.620 +僕と同じように楽しんでもらえたら嬉しい。 + +07:19.650 --> 07:21.780 +これは素晴らしい小さな製品だ。 + +07:21.810 --> 07:22.770 +すぐに会おう。 diff --git a/week5/community-contributions/subtitles/srts/59295587/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295587/ko_KR.srt new file mode 100755 index 0000000..435b9a7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295587/ko_KR.srt @@ -0,0 +1,325 @@ +WEBVTT + +00:00.980 --> 00:06.350 +파이썬에서 C++로 변환하는 간단한 사용자 인터페이스를 만들고 있었습니다 + +00:06.350 --> 00:09.290 +다양한 프론티어 모델을 이용해서요 + +00:09.290 --> 00:11.000 +그래서 제가 그랬죠 + +00:11.000 --> 00:15.380 +프로토타입 UI를 만들어 봅시다 그렇게 해 보죠 + +00:15.380 --> 00:22.340 +다시 Jupyter 노트북으로 돌아오면 메서드를 몇 개 더 쓸 거예요 + +00:22.340 --> 00:31.100 +먼저 실행 파이썬 을 코드로 이용합니다 이것과 비슷하게 약간 위험한 일을 하죠 + +00:31.130 --> 00:32.510 +코드를 실행하세요 + +00:32.510 --> 00:35.480 +이 프로토타입은 여러분을 위한 거예요 + +00:35.480 --> 00:40.550 +이건 공유가 안 돼요 다른 사람과 공유하면 파이썬 코드를 여러분 박스에서 직접 실행할 + +00:40.550 --> 00:42.200 +방법이 생기거든요 + +00:42.200 --> 00:43.370 +그건 나쁜 일이죠 + +00:43.370 --> 00:50.270 +이 특정 그래디오 앱에서 공유는 항상 거짓이어야 해요 + +00:50.360 --> 00:53.600 +하지만 작업하기에는 훌륭한 프로토타입이에요 + +00:53.960 --> 00:57.170 +이게 하는 일은 코드를 실행하는 거죠 + +00:57.170 --> 01:02.600 +주변에 잡동사니가 있는 이유는 파이썬 코드에서 나온 표준을 + +01:02.600 --> 01:09.840 +캡처해 반환하기 위해서입니다 코드를 취하고 실행하는 함수가 되고 결과물을 반환합니다 + +01:09.840 --> 01:15.030 +그러데이션에서 디스플레이하고 싶기 때문이죠 + +01:15.030 --> 01:17.700 +그게 실행 파이썬 이죠 + +01:17.910 --> 01:24.510 +이제 C++를 실행하기 위한 약간 더 무거운 코드입니다 코드에 따라서요 + +01:24.510 --> 01:28.830 +가장 먼저 할 일은 아까 만든 함수를 호출하는 겁니다 파일 + +01:28.830 --> 01:31.260 +최적화에 쓰기 위해서요 + +01:31.590 --> 01:39.330 +제가 하는 건 컴파일 명령 실행이 포함돼 있어요 + +01:39.360 --> 01:46.350 +저는 subprocess.lon이라는 것을 사용합니다. 이것은 당신이 무언가 수행하기 위해 subprocess를 + +01:46.350 --> 01:48.180 +생성할 수 있어요. + +01:48.480 --> 01:49.920 +그게 제 일이죠 + +01:49.920 --> 01:53.370 +아까보다 약간 더 길어졌죠 + +01:53.370 --> 01:59.520 +아까 우리가 뭘 할 건지 언급했었죠 플래그를 모두 사용해 코드를 컴퓨터에 + +01:59.520 --> 02:02.430 +맞게 최적화하는 거예요 + +02:02.430 --> 02:06.750 +이런 걸 잘 아는 사람들에겐 아직 시작에 불과해요 + +02:06.760 --> 02:11.050 +추가할 수 있는 깃발이 정말 많아요 + +02:11.080 --> 02:16.000 +이 정도면 충분하다 싶어서 깃발을 많이 달았어요 Put it + +02:16.150 --> 02:28.720 +그런 다음 최적화된 프로그램을 실행하고 표준을 캡처해 그걸 반환하죠 + +02:28.750 --> 02:30.970 +오류가 있으면 표준 오류를 리턴하고요 + +02:31.210 --> 02:38.140 +C++ 코드를 실행하고 응답을 반환하는 방법이죠 + +02:38.140 --> 02:41.200 +컴파일과 실행이 더 정확하겠죠 + +02:41.470 --> 02:48.520 +여기 CSS 문자열이 있어요 왜냐하면 제가 하려는 건 약간의 색칠하기거든요 곧 보시겠지만요 + +02:48.850 --> 02:50.440 +비트 + +02:50.440 --> 02:53.740 +이건 사용자 인터페이스의 확장이고요 + +02:53.740 --> 02:55.240 +코딩은 이제 안 해요 + +02:55.240 --> 02:55.990 +그냥 이거요 + +02:55.990 --> 03:02.410 +짧게 말씀드릴게요 GR 블록을 사용하고 있어요 하위 레벨 Gadio API죠 사용자 + +03:02.410 --> 03:07.570 +인터페이스에서 더 유연성을 제공합니다 라디오의 기성품 UI를 + +03:07.600 --> 03:09.070 +쓰지 않고요 + +03:09.280 --> 03:12.770 +자신만의 CSS를 통과시킬 수 있어요 저도 그렇게 하고 있죠 + +03:12.800 --> 03:15.140 +CSS는 이 변수와 같아요 + +03:15.320 --> 03:18.740 +비트를 좀 더 맞춤형으로 바꿀 수 있어요 + +03:18.860 --> 03:22.100 +점무늬가 있어서 상단에 헤딩이 잘 나왔어요 + +03:22.100 --> 03:26.510 +그리고 위젯이 하나, 둘, 셋, 넷 다섯 줄이에요 + +03:26.690 --> 03:30.110 +파이썬 코드와 C++ 코드를 인식할 수 있어요 + +03:30.110 --> 03:35.000 +그게 제가 파이 코드로 설정하기 전에 갖고 있던 기본값이죠 + +03:35.510 --> 03:39.800 +그럼 아까처럼 GPT나 클로드를 뽑으러 내려가죠 + +03:40.040 --> 03:42.680 +코드 변환 버튼요 + +03:42.680 --> 03:45.050 +단추가 더 있어요 + +03:45.050 --> 03:51.770 +파이썬 을 실행하기 위한 CP+CP+CP를 실행하는 거죠 + +03:51.770 --> 03:58.580 +그리고 파이썬 아웃이 있습니다 파이썬의 CPOut CPOut C++ OUTPUT를 위해서요 + +03:58.610 --> 04:02.720 +잘 따라오셨길 바랍니다 사용자 인터페이스를 보시면 아주 명확하게 + +04:02.750 --> 04:03.650 +보일 거예요 + +04:03.650 --> 04:07.580 +그리고 다시 보면 정말 간단하다고 생각하실 거예요 + +04:08.000 --> 04:13.000 +버튼이 3개 있습니다 파이썬 실행과 C++ 실행이죠 + +04:13.000 --> 04:16.600 +버튼 3개를 누르면 어떻게 되는지 물어봐야겠네요 + +04:16.630 --> 04:19.120 +변환 버튼을 누르면 아까와 똑같아요 + +04:19.120 --> 04:25.810 +메서드 함수 최적화를 실행하고 입력, 파이썬 코드와 모델을 전달하죠 + +04:25.840 --> 04:30.640 +결과물은 C++ 코드로 갑니다 변환을 누르면 파이썬 코드를 가져오는 거죠 + +04:30.640 --> 04:35.290 +모델을 이용해 변환하고 C++로 입력할 거예요. Tool karaoke, Tool karaoke. + +04:35.500 --> 04:42.910 +파이썬 실행 버튼을 누르면 파이썬 코드로 파이썬 아웃을 실행할 거예요 + +04:42.940 --> 04:49.270 +C++ 실행을 누르면 C++ 코드를 실행합니다 C++ 입력을 가지고요 + +04:49.300 --> 04:51.310 +C 플러스 출력요 + +04:51.610 --> 04:53.140 +그게 다예요 + +04:53.170 --> 05:00.850 +스파게티 같은 걸 만드는 것처럼 들렸다면 이제 곧 보게 될 걸 보면 아주 + +05:00.880 --> 05:04.510 +잘 풀리고 선명해질 거예요 + +05:04.780 --> 05:06.520 +이걸 실행하죠 + +05:06.550 --> 05:08.470 +이게 사용자 인터페이스예요 + +05:08.470 --> 05:11.080 +다채로운 사용자 인터페이스에 잘 오셨어요 + +05:11.290 --> 05:14.830 +파이썬 코드가 있고 C++ 코드가 있네요 + +05:14.830 --> 05:20.600 +모델 변환 코드를 선택해야 합니다 파이썬을 실행하고 get C++를 실행할 수 있어요 + +05:20.600 --> 05:22.100 +그럼 시작하죠 + +05:22.100 --> 05:25.640 +이 코드를 실행하려면 파이썬 실행을 눌러요 + +05:25.790 --> 05:28.520 +이 사용자 인터페이스에서 바로요 + +05:28.550 --> 05:31.460 +지금 바로 그 메서드를 호출하고 있어요 + +05:31.460 --> 05:33.530 +그 결과가 여기 있어요 + +05:33.680 --> 05:35.060 +저기 있네요 + +05:35.210 --> 05:37.640 +파이가 있긴 하죠 + +05:37.640 --> 05:39.890 +8시간 걸렸어요 4초요 + +05:40.070 --> 05:45.950 +이제 이걸 C 플러스로 변환할 거예요 + +05:45.950 --> 05:46.670 +더 있어요 + +05:46.820 --> 05:47.930 +여기 있네요 + +05:47.960 --> 05:50.930 +이제 C 플러스 실행할 수 있어요 + +05:51.800 --> 05:55.550 +C 플러스도 있네요 + +05:55.550 --> 06:00.890 +아까보다 훨씬 빨라진 걸 보실 수 있어요 제 기억이 맞는다면 0 정도였죠 2초 남았어요 + +06:00.920 --> 06:08.090 +이 코드를 최적화하는 추가 플래그가 있기 때문이죠 + +06:08.120 --> 06:15.410 +두 가지 구현 간격이 100배 이상 빠른 걸 보실 수 있어요 + +06:15.410 --> 06:24.270 +그리고 일종의 반복되는 수학 수학 코드는 일반적으로 컴파일된 C++가 이것처럼 기본적으로 + +06:24.300 --> 06:30.450 +컴파일된다면 적어도 100배는 더 빠를 거예요. + +06:30.450 --> 06:33.690 +놀랍지 않은 건 아니지만 좋은 결과예요 + +06:33.960 --> 06:39.420 +물론 비트는 아주 조금 빗나갔는데 아마도 플로팅 포인트 회전이 + +06:39.420 --> 06:41.550 +원인일 거예요 + +06:41.550 --> 06:43.980 +클로드가 어떻게 하는지 보고 확인해 보죠 + +06:46.830 --> 06:50.460 +클로드의 C 플러스 플러스 코드를 실행해 보죠 + +06:50.760 --> 06:53.520 +물론 아주 비슷하죠 + +06:53.520 --> 06:58.350 +같은 숫자지만 마찬가지로 엄청나게 빠르죠 + +06:58.500 --> 07:05.010 +그리고 아주 유사한 C 플러스 플러스 코드를 생성합니다 제가 기대하는 동일한 머신 + +07:05.040 --> 07:07.260 +코드에 컴파일되는 것이죠 + +07:07.530 --> 07:08.400 +좋아요 + +07:08.400 --> 07:14.850 +한 번만 더 다음 영상으로 넘기고 어려운 문제를 해결하러 달려갈게요 + +07:14.850 --> 07:17.730 +오늘은 여기까지 하죠 오늘은 여기까지예요 + +07:17.730 --> 07:19.620 +여러분도 저만큼 즐거우셨길 바라요 + +07:19.650 --> 07:21.780 +이건 정말 좋은 제품이에요 + +07:21.810 --> 07:22.770 +이따 봐요 diff --git a/week5/community-contributions/subtitles/srts/59295599/en_US.srt b/week5/community-contributions/subtitles/srts/59295599/en_US.srt new file mode 100755 index 0000000..efd6cf6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295599/en_US.srt @@ -0,0 +1,319 @@ +WEBVTT + +00:00.980 --> 00:04.850 +Welcome to the Jupyter Lab for day four. + +00:04.880 --> 00:10.820 +It's going to look very familiar because it's actually I've started with a duplicate of day three. + +00:11.030 --> 00:14.720 +And in this duplicate there's a few cells. + +00:14.720 --> 00:19.880 +We're going to want to rerun the imports at the beginning connecting our environment. + +00:19.880 --> 00:22.340 +We're going to want to connect to our frontier models. + +00:22.340 --> 00:23.630 +Because guess what. + +00:23.630 --> 00:28.520 +We're going to be able to get the frontier models to work side by side with open source models, which + +00:28.520 --> 00:29.900 +is going to be terrific. + +00:30.260 --> 00:37.340 +Uh, we'll get our prompts, our messages function, if you remember that something that can write to + +00:37.490 --> 00:39.530 +output C plus plus file. + +00:39.920 --> 00:43.880 +Um, this I don't think we need to run because it's going to be overwritten. + +00:43.880 --> 00:49.850 +But then our code for our Pi program, we don't need to try it out because you'll remember that. + +00:49.850 --> 00:59.750 +But we will get our code for the hard program that looks for subarrays of arrays with the largest cumulative + +00:59.750 --> 01:00.680 +number. + +01:01.360 --> 01:07.870 +Um, and we do need this stream, GPT function. + +01:07.870 --> 01:10.600 +And we need this stream, Claude function. + +01:10.600 --> 01:14.710 +And we need this optimize function, although we're about to overwrite it. + +01:14.710 --> 01:22.270 +And you'll remember the gorgeous gradient functions will take execute Python and execute C plus plus, + +01:22.270 --> 01:28.180 +which, uh, I hope you remember that this is something which is not to be shared with others. + +01:28.180 --> 01:33.280 +You don't want to allow people to have the ability to execute arbitrary Python and C plus plus code + +01:33.310 --> 01:34.360 +on your box. + +01:34.360 --> 01:35.650 +That's not a good idea. + +01:35.650 --> 01:40.960 +So keep the Gradio interface to yourself and don't say share equals true for that. + +01:40.960 --> 01:44.170 +And then some CSS to make it look fancy. + +01:44.200 --> 01:45.430 +All right. + +01:45.430 --> 01:47.770 +And now we get to our new code. + +01:47.770 --> 01:50.710 +And it is going to be extremely simple. + +01:50.710 --> 01:52.780 +We start with some imports. + +01:52.780 --> 01:58.420 +We're importing from Huggingface hub, the login function you've seen before, but also a class called + +01:58.420 --> 01:59.860 +inference client. + +02:00.280 --> 02:03.210 +And we're importing the the tokenizer. + +02:03.810 --> 02:05.790 +We're going to log in to hugging face. + +02:05.790 --> 02:06.780 +There we go. + +02:06.780 --> 02:09.030 +And now just some constants. + +02:09.150 --> 02:15.000 +Code is this is the name of the code model I've also got here code Gemma in case you want to give it + +02:15.000 --> 02:17.040 +a try and see if you have more success than me. + +02:17.070 --> 02:22.200 +This is the URL of my endpoint, so it's just taken from my endpoint page. + +02:22.200 --> 02:24.540 +I can just take this this URL. + +02:24.540 --> 02:29.040 +That is where my endpoint is running right now and costing me $0.80 an hour. + +02:29.040 --> 02:35.250 +And this is code Gemma that is sadly paused as it was not particularly stable. + +02:35.640 --> 02:38.370 +Okay, so let's give this a try. + +02:38.400 --> 02:45.090 +So remember tokenizer you can create using the auto tokenizer class and say Frompretrained pass in the + +02:45.090 --> 02:49.020 +name of your model and you will get the right tokenizer for that model. + +02:49.020 --> 02:51.390 +And then I'm going to create this. + +02:51.390 --> 02:57.540 +This is the function that we wrote before that's going to create the nice structured, um list of dictionaries + +02:57.540 --> 03:03.380 +to ask for the simple function for pi to be rewritten in C plus plus. + +03:03.380 --> 03:09.980 +And then we're going to use the Tokenizers apply chat template to turn that into text. + +03:10.010 --> 03:12.110 +Let's just take a look at what I'm talking about here. + +03:12.110 --> 03:15.650 +So if we remember to run that and then run this. + +03:16.250 --> 03:19.250 +So first of all remember what messages is. + +03:19.250 --> 03:22.490 +Messages is a list with two elements. + +03:22.490 --> 03:24.650 +One of them has a system role. + +03:24.650 --> 03:25.760 +And that's the content. + +03:25.790 --> 03:29.180 +You were insistent that re-implements Python code blah blah blah. + +03:29.900 --> 03:34.850 +And then user uh, and then the user method. + +03:34.880 --> 03:41.810 +Now I should mention here just to in the spirit of being completely open, that I've added in a little + +03:41.810 --> 03:48.590 +hint here, keep implementations of random number generators identical so that match, uh, results + +03:48.590 --> 03:49.940 +match exactly. + +03:50.060 --> 03:57.410 +Uh, so as you can imagine, uh, I've had to add hints here and there just to keep things on track. + +03:57.470 --> 04:00.830 +And you will see the results of that in a moment. + +04:00.880 --> 04:04.240 +So this is, of course, the messages that you're familiar with. + +04:04.240 --> 04:11.710 +And now, if I show you what what text became after we applied the chat template, this if I print it, + +04:11.740 --> 04:13.240 +it will be spaced out a bit better. + +04:13.270 --> 04:14.590 +Let's see it printed. + +04:14.950 --> 04:16.360 +Here we go. + +04:16.510 --> 04:17.440 +Have a look at this. + +04:17.440 --> 04:24.490 +So there is a special token I am start and then system and then the system message. + +04:24.490 --> 04:26.080 +And then the same thing with user. + +04:26.080 --> 04:27.970 +And then this is the code. + +04:27.970 --> 04:30.700 +And then it ends with this little prompt. + +04:30.700 --> 04:37.330 +That means that the LM is just going to keep going and give the assistance response. + +04:37.540 --> 04:46.090 +So with that now you would imagine that making the call to a hugging face endpoint, making this kind + +04:46.090 --> 04:51.430 +of message to an open source model and getting back the response might be really difficult. + +04:51.730 --> 04:52.840 +And it's not. + +04:52.840 --> 04:55.390 +It's one of those things that's just a couple of lines of code. + +04:55.390 --> 04:56.050 +Here it is. + +04:56.050 --> 04:57.490 +It's as simple as this. + +04:57.580 --> 05:04.020 +Uh, you create an object, the inference client and you tell it the URL of your endpoint and your hugging + +05:04.050 --> 05:09.150 +face token, and then you just say client dot text generation. + +05:09.150 --> 05:13.320 +You pass in your text, you say, in this case I want to stream back results. + +05:13.320 --> 05:15.780 +If you didn't have that, this would be even even shorter. + +05:15.780 --> 05:18.030 +But it's nice to see the results streaming back. + +05:18.330 --> 05:24.030 +Um, and then max new tokens, um, is where you tell it that it can keep going for a little while. + +05:24.270 --> 05:26.790 +Uh, and let's see what happens. + +05:28.080 --> 05:36.270 +We're getting streaming back from my endpoint, running in hugging face, the result of the, uh, of + +05:36.270 --> 05:38.610 +of this conversion. + +05:40.650 --> 05:44.580 +Now, one thing I will point out while it's doing this, hopefully you're recognizing this code and + +05:44.580 --> 05:50.940 +seeing that it's that it looks very legit, very solid conversion of py, uh, code from Python into + +05:50.940 --> 05:51.300 +C plus. + +05:51.300 --> 05:57.000 +Plus, uh, it has given this, this stuff at the beginning about C plus plus code, you know, giving + +05:57.000 --> 05:57.990 +a bit of an explanation. + +05:57.990 --> 06:01.850 +And it's also ending with a bunch of text about why it did what it did. + +06:01.880 --> 06:07.070 +And this is despite the fact that my system message insists that it shouldn't do that. + +06:07.190 --> 06:13.730 +Um, it says, uh, do not provide any explanation, but I was not able, through the system message + +06:13.730 --> 06:15.980 +to get it not to provide an explanation. + +06:16.010 --> 06:20.420 +Although, as you remember, GPT four and Claude do obey the instructions. + +06:20.420 --> 06:25.610 +So that's something I imagine you could spend more time working on that prompt to really try and make + +06:25.640 --> 06:26.900 +sure that it doesn't happen. + +06:26.900 --> 06:32.360 +But failing that, of course, it's very easy to do some post-processing and strip out the everything + +06:32.360 --> 06:39.800 +that comes up till the, uh, the start of the C plus plus code and up until the, um, so you could + +06:39.800 --> 06:45.890 +strip out everything up until this symbol here and take what comes between that and here. + +06:46.010 --> 06:49.850 +So it'd be pretty easy to do that with some post-processing. + +06:50.270 --> 06:51.170 +All right. + +06:51.170 --> 06:55.310 +Anyway, we've seen how easy it is to run inference. + +06:55.460 --> 07:00.620 +Uh, I will see you next time when we will put this together in a UI and give it a whirl. + +07:00.650 --> 07:01.520 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59295599/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295599/ja_JP.srt new file mode 100755 index 0000000..9b0027c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295599/ja_JP.srt @@ -0,0 +1,277 @@ +WEBVTT + +00:00.980 --> 00:04.850 +4日目のJupyter Labへようこそ。 + +00:04.880 --> 00:10.820 +とても見慣れたものに見えるだろうが、 実は3日目と重複して始めているからだ。 + +00:11.030 --> 00:14.720 +そしてこの複製にはいくつかの細胞がある。 + +00:14.720 --> 00:19.880 +インポートを再実行し、 環境をつなげたい。 + +00:19.880 --> 00:22.340 +フロンティアモデルにつなげたい。 + +00:22.340 --> 00:23.630 +だってそうだろう。 + +00:23.630 --> 00:29.900 +フロンティア・モデルとオープンソース・モデルを共存させることができる。 + +00:30.260 --> 00:39.530 +プロンプトやメッセージ関数は、 Cプラス・プラス・ファイルに書き出すことができる。 + +00:39.920 --> 00:43.880 +ええと、 これは上書きされるから実行する必要はないと思う。 + +00:43.880 --> 00:49.850 +でも、 Piプログラムのコードは、 覚えているだろうから、 試す必要はない。 + +00:49.850 --> 01:00.680 +しかし、 最大の累積数を持つ配列の部分配列を探すハード・プログラムのコードは得られるだろう。 + +01:01.360 --> 01:07.870 +そして、 このストリーム、 GPT機能が必要なんだ。 + +01:07.870 --> 01:10.600 +そして、 このストリーム、 クロード機能が必要なのだ。 + +01:10.600 --> 01:14.710 +そして、 これから上書きすることになるが、 このオプティマイズ関数が必要なのだ。 + +01:14.710 --> 01:22.270 +そして、 ゴージャスなグラデーション関数は、 Pythonを実行し、 + +01:22.270 --> 01:28.180 +Cプラスプラスを実行することを覚えているだろう。 + +01:28.180 --> 01:34.360 +PythonやCプラスアルファの任意のコードを実行させることを許したくはないだろう。 + +01:34.360 --> 01:35.650 +それはよくない。 + +01:35.650 --> 01:40.960 +だから、 グラディオのインターフェイスは自分だけのものにしておき、 それについてはシェア=トゥルーなんて言わないことだ。 + +01:40.960 --> 01:44.170 +そして、 CSSで見栄えを良くする。 + +01:44.200 --> 01:45.430 +分かった。 + +01:45.430 --> 01:47.770 +そして、 新しいコードに取りかかる。 + +01:47.770 --> 01:50.710 +そして、 それは極めてシンプルなものになるだろう。 + +01:50.710 --> 01:52.780 +まずは輸入品から。 + +01:52.780 --> 01:59.860 +Huggingface hubからインポートしているのは、 前に見たログイン関数と、 推論クライアントというクラスだ。 + +02:00.280 --> 02:03.210 +そしてトークナイザーをインポートする。 + +02:03.810 --> 02:05.790 +ハグ顔にログインするんだ。 + +02:05.790 --> 02:06.780 +これでよし。 + +02:06.780 --> 02:09.030 +そして今度は、 いくつかの定数だ。 + +02:09.150 --> 02:17.040 +コードとは、 このコードモデルの名前である。 + +02:17.070 --> 02:22.200 +これは私のエンドポイントのURLなので、 エンドポイントのページから取ってきただけです。 + +02:22.200 --> 02:24.540 +このURLを持っていけばいいんだ。 + +02:24.540 --> 02:29.040 +今、 私のエンドポイントはそこで動いていて、 コストは0ドルだ。 時給80円。 + +02:29.040 --> 02:35.250 +そしてこれは、 特に安定していなかったため、 惜しまれつつも一時停止しているコード・ジェンマだ。 + +02:35.640 --> 02:38.370 +オーケー、 ではこれを試してみよう。 + +02:38.400 --> 02:45.090 +そこで、 オートトークナイザークラスを使用して作成できるトークナイザーを覚えておき、 Frompretrainedと言い、 モデルの名前を渡すと、 + +02:45.090 --> 02:49.020 +そのモデルに適したトークナイザーが表示されます。 + +02:49.020 --> 02:51.390 +そして、 これを作るんだ。 + +02:51.390 --> 03:03.380 +これは前に書いた関数で、 構造化された辞書のリストを作成し、 円周率を表す単純な関数をC+++で書き換えるよう求めるものだ。 + +03:03.380 --> 03:09.980 +そして、 Tokenizersのapply chat templateを使ってテキストに変換します。 + +03:10.010 --> 03:12.110 +ここで私が話していることを見てみよう。 + +03:12.110 --> 03:15.650 +だから、 それを忘れずに実行し、 次にこれを実行すればいい。 + +03:16.250 --> 03:19.250 +だからまず、 メッセージとは何かを思い出してほしい。 + +03:19.250 --> 03:22.490 +メッセージは2つの要素を持つリストである。 + +03:22.490 --> 03:24.650 +そのうちの1人はシステムの役割を担っている。 + +03:24.650 --> 03:25.760 +それが内容だ。 + +03:25.790 --> 03:29.180 +あなたはPythonのコードを再インプリメントすることに固執していた。 + +03:29.900 --> 03:34.850 +そしてユーザー、 ユーザーメソッド。 + +03:34.880 --> 03:41.810 +ここで、 完全にオープンであることの精神から、 乱数ジェネレーターの実装を同一にし、 + +03:41.810 --> 03:49.940 +結果が正確に一致するようにする、 というちょっとしたヒントを付け加えておく。 + +03:50.060 --> 03:57.410 +想像できるように、 あちこちにヒントを付け加えなければならない。 + +03:57.470 --> 04:00.830 +その結果はすぐにわかるだろう。 + +04:00.880 --> 04:04.240 +これはもちろん、 皆さんがよく知っているメッセージだ。 + +04:04.240 --> 04:11.710 +そして、 チャットテンプレートを適用した後、 どのようなテキストになったかをお見せすると、 これを印刷すると、 + +04:11.740 --> 04:13.240 +少し間隔が空きます。 + +04:13.270 --> 04:14.590 +印刷されるのを見よう。 + +04:14.950 --> 04:16.360 +さあ、 始めよう。 + +04:16.510 --> 04:17.440 +これを見てほしい。 + +04:17.440 --> 04:24.490 +だから、 特別なトークンがあって、 それからシステム、 それからシステム・メッセージがあるんだ。 + +04:24.490 --> 04:26.080 +そしてユーザーも同じだ。 + +04:26.080 --> 04:27.970 +そしてこれがコードだ。 + +04:27.970 --> 04:30.700 +そして、 この小さなプロンプトで終わる。 + +04:30.700 --> 04:37.330 +つまり、 LMはそのままアシスタンス対応を続けるということだ。 + +04:37.540 --> 04:46.090 +そうなると、 ハグする顔のエンドポイントに電話をかけ、 オープンソースモデルにこの種のメッセージを送り、 + +04:46.090 --> 04:51.430 +レスポンスを返すのは本当に難しいことだと想像できるだろう。 + +04:51.730 --> 04:52.840 +そして、 そうではない。 + +04:52.840 --> 04:55.390 +ほんの2、 3行のコードでできることだ。 + +04:55.390 --> 04:56.050 +これだ。 + +04:56.050 --> 04:57.490 +簡単なことだ。 + +04:57.580 --> 05:04.020 +推論クライアントというオブジェクトを作り、 エンドポイントのURLとハグする顔のトークンを伝え、 + +05:04.050 --> 05:09.150 +クライアント・ドット・テキスト生成と言うだけだ。 + +05:09.150 --> 05:13.320 +テキストを渡して、 この場合は結果をストリームバックしたい、 と言う。 + +05:13.320 --> 05:15.780 +それがなかったら、 これはもっと短くなる。 + +05:15.780 --> 05:18.030 +でも、 結果がどんどん戻ってくるのはうれしい。 + +05:18.330 --> 05:24.030 +そして、 新しいトークンを最大まで増やすことで、 少しの間なら続けられると伝えるんだ。 + +05:24.270 --> 05:26.790 +どうなるかな? + +05:28.080 --> 05:38.610 +この変換の結果、 私のエンドポイントからストリーミングが戻ってきている。 + +05:40.650 --> 05:44.580 +このコードを認識し、 PythonからCプラスへのコードの変換がとても合法的で、 + +05:44.580 --> 05:51.300 +とてもしっかりしたものであることを確認してほしい。 + +05:51.300 --> 05:57.990 +それに、 冒頭でC++コードについて少し説明しているんだ。 + +05:57.990 --> 06:01.850 +そして、 なぜそのようなことをしたのかというたくさんの文章で締めくくられている。 + +06:01.880 --> 06:07.070 +私のシステムメッセージは、 そんなことをしてはいけないと主張しているにもかかわらず、 だ。 + +06:07.190 --> 06:15.980 +ええと、 説明をしないでくださいと書いてありますが、 システムメッセージで説明をしないようにすることはできませんでした。 + +06:16.010 --> 06:20.420 +とはいえ、 GPTの4番とクロードは指示に従っている。 + +06:20.420 --> 06:26.900 +だから、 そうならないようにするために、 もっと時間をかけてプロンプトに取り組むことができるだろうね。 + +06:26.900 --> 06:32.360 +しかし、 そうでない場合は、 もちろん、 後処理をするのはとても簡単で、 + +06:32.360 --> 06:45.890 +Cプラスプラスコードの始まりから、 その記号までのすべてを取り除くことができる。 + +06:46.010 --> 06:49.850 +だから、 後処理でそれをやるのはかなり簡単だろう。 + +06:50.270 --> 06:51.170 +分かった。 + +06:51.170 --> 06:55.310 +とにかく、 推論を実行するのがいかに簡単であるかはわかった。 + +06:55.460 --> 07:00.620 +ええと、 また次回、 UIにこれを組み込んでやってみよう。 + +07:00.650 --> 07:01.520 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59295599/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295599/ko_KR.srt new file mode 100755 index 0000000..6cca1a2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295599/ko_KR.srt @@ -0,0 +1,310 @@ +WEBVTT + +00:00.980 --> 00:04.850 +주피터 연구소의 4일째 날이죠 + +00:04.880 --> 00:10.820 +아주 익숙해 보일 거예요 왜냐하면 사실 3일째의 복제로 시작했거든요 + +00:11.030 --> 00:14.720 +이 복제본에는 셀이 몇 개 있어요 + +00:14.720 --> 00:19.880 +환경을 연결하는 시작에서 가져오기를 다시 실행해야죠 + +00:19.880 --> 00:22.340 +개척 시대 모델과 교감하고 싶어요 + +00:22.340 --> 00:23.630 +왜인 줄 알아요? + +00:23.630 --> 00:28.520 +개척 모델들을 오픈 소스 모델과 나란히 작동시킬 수 있게 될 겁니다 아주 근사하겠죠 get the + +00:28.520 --> 00:29.900 +new-file + +00:30.260 --> 00:37.340 +프롬프트, 메시지 함수도 있습니다. C++ 파일을 OUTPUT하여 쓸 수 있는 것을 + +00:37.490 --> 00:39.530 +기억하신다면요. + +00:39.920 --> 00:43.880 +이건 중복 기재될 테니 실행할 필요 없을 것 같아요 + +00:43.880 --> 00:49.850 +파이 프로그램 코드는 외우실 테니 안 해 봐도 돼요 + +00:49.850 --> 01:00.680 +근데 가장 누적된 수에 대한 배열을 찾는 하드 프로그램을 위한 코드를 얻게 될 거예요 + +01:01.360 --> 01:07.870 +스트림과 GPT 함수도 필요해요 + +01:07.870 --> 01:10.600 +이 개울이 필요해요 클로드 함수요 + +01:10.600 --> 01:14.710 +덮어쓰기 직전이지만 최적화 함수가 필요해요 + +01:14.710 --> 01:22.270 +이 멋진 그러데이션 함수는 파이썬 을 실행하고 C++를 실행합니다 다른 + +01:22.270 --> 01:28.180 +이들과 공유하지 않는다는 걸 기억하시길 바라요 + +01:28.180 --> 01:33.280 +파이썬 + C+ 코드를 임의로 실행할 수 있도록 허용해서는 + +01:33.310 --> 01:34.360 +안 되죠 + +01:34.360 --> 01:35.650 +좋은 생각이 아니에요 + +01:35.650 --> 01:40.960 +그러니 그래디오 인터페이스는 혼자만 알고 공유 = true라고 하지 마세요 + +01:40.960 --> 01:44.170 +CSS 덕분에 근사해 보이죠 + +01:44.200 --> 01:45.430 +좋아요 + +01:45.430 --> 01:47.770 +Get in get 새로운 코드예요 + +01:47.770 --> 01:50.710 +아주 간단한 거예요 + +01:50.710 --> 01:52.780 +수입품부터 시작하죠 + +01:52.780 --> 01:58.420 +휴깅페이스 허브에서 불러오고 있어요 로그인 함수죠 인퍼런스 클라이언트라는 + +01:58.420 --> 01:59.860 +클래스이기도 해요 + +02:00.280 --> 02:03.210 +토큰라이저를 불러올 거예요 + +02:03.810 --> 02:05.790 +얼굴 포옹에 로그인할게요 + +02:05.790 --> 02:06.780 +됐어요 + +02:06.780 --> 02:09.030 +이제 변함없는 것들만 남았어요 + +02:09.150 --> 02:15.000 +코드 모델의 이름이죠 여기 code젬마도 있어요 시도해보시고 저보다 + +02:15.000 --> 02:17.040 +성공했는지 보시라고요 + +02:17.070 --> 02:22.200 +이건 제 엔드포인트 URL이에요 제 엔드포인트 페이지에서 가져온 거죠 + +02:22.200 --> 02:24.540 +이 URL을 가져갈게요 + +02:24.540 --> 02:29.040 +지금 제 엔드포인트가 실행되는 곳이죠 0달러가 들었어요 시간당 80달러요 + +02:29.040 --> 02:35.250 +이건 코드 젬마인데 안타깝게도 일시 중단됐어요 안정적이지 않아서요 + +02:35.640 --> 02:38.370 +좋아요, 그럼 해 보죠 + +02:38.400 --> 02:45.090 +토큰마이저 기억하세요 오토 토큰마이저 클래스로 뭔가를 만들고 모델의 이름을 학습된 패스라고 + +02:45.090 --> 02:49.020 +하면 해당 모델에 맞는 토큰마이저를 얻게 돼요 + +02:49.020 --> 02:51.390 +이제 이걸 만들게요 + +02:51.390 --> 02:57.540 +이건 우리가 전에 쓴 함수인데 구조화된 사전 목록을 만들어 파이가 + +02:57.540 --> 03:03.380 +C++로 다시 쓰이는 간단한 함수를 요청할 거예요 + +03:03.380 --> 03:09.980 +그런 다음 토큰라이저를 이용해 채팅 템플릿을 적용해 텍스트로 바꿀 거예요 + +03:10.010 --> 03:12.110 +제가 무슨 말을 하는지 한번 보죠 + +03:12.110 --> 03:15.650 +저걸 실행하고 이걸 실행하는 걸 기억한다면요 + +03:16.250 --> 03:19.250 +먼저 메시지가 뭔지 기억하세요 + +03:19.250 --> 03:22.490 +메시지란 두 가지 요소로 된 목록이에요 + +03:22.490 --> 03:24.650 +그중 하나는 시스템 역할이에요 + +03:24.650 --> 03:25.760 +그게 내용이에요 + +03:25.790 --> 03:29.180 +파이썬 + 코드 어쩌고저쩌고를 다시 구현하라고 했죠 + +03:29.900 --> 03:34.850 +그리고 사용자 메서드가 있죠 + +03:34.880 --> 03:41.810 +완전히 열려 있다는 의미에서 여기서 언급해야 할 게 있어요 약간의 힌트를 추가했어요 + +03:41.810 --> 03:49.940 +난수 발생기의 구현을 동일하게 유지하세요 그래야 결과가 정확히 일치하니까요 + +03:50.060 --> 03:57.410 +짐작하시겠지만 진행을 유지하기 위해 여기저기 힌트를 추가해야 했어요 + +03:57.470 --> 04:00.830 +그 결과는 잠시 후에 보실 수 있어요 + +04:00.880 --> 04:04.240 +이게 여러분이 익숙한 메시지예요 + +04:04.240 --> 04:11.710 +이제 채팅 템플릿을 적용한 후 텍스트가 어떻게 됐는지 보여드릴게요 이걸 프린트하면 간격이 좀 더 길어질 + +04:11.740 --> 04:13.240 +거예요 비트 템플릿 + +04:13.270 --> 04:14.590 +인쇄해 봐요 + +04:14.950 --> 04:16.360 +시작할게요 + +04:16.510 --> 04:17.440 +이걸 보세요 + +04:17.440 --> 04:24.490 +특별한 토큰이 있어요 시작할게요, 다음은 시스템 그다음은 시스템 메시지죠 + +04:24.490 --> 04:26.080 +사용자에게도 같은 거죠 + +04:26.080 --> 04:27.970 +이게 코드예요 + +04:27.970 --> 04:30.700 +이 프롬프트로 끝나죠 + +04:30.700 --> 04:37.330 +달 착륙선은 계속 가서 지원 응답을 할 거예요 + +04:37.540 --> 04:46.090 +이제 여러분은 포옹하는 얼굴 엔드포인트에 전화를 걸고 오픈 소스 모델에 이런 종류의 메시지를 + +04:46.090 --> 04:51.430 +만들고 응답을 받는 게 아주 어려울 거라고 생각하겠죠 + +04:51.730 --> 04:52.840 +그렇지 않아요 + +04:52.840 --> 04:55.390 +코드 두어 줄이면 되는 그런 것 중 하나죠 + +04:55.390 --> 04:56.050 +여기 있네요 + +04:56.050 --> 04:57.490 +아주 간단해요 + +04:57.580 --> 05:04.020 +객체를 생성합니다 inference 클라이언트요 그리고 엔드포인트 URL과 포옹하는 얼굴 + +05:04.050 --> 05:09.150 +토큰을 알려줍니다 그런 다음 클라이언트 .text 생성이라고 하죠 + +05:09.150 --> 05:13.320 +메시지를 전달하는 거죠 이 경우엔 결과를 스트리밍하고 싶어요 + +05:13.320 --> 05:15.780 +그게 없으면 더 짧아져요 + +05:15.780 --> 05:18.030 +하지만 결과가 다시 흘러오는 걸 보니 좋네요 + +05:18.330 --> 05:24.030 +최대 새 토큰은 한동안 계속될 수 있다고 알려주는 거예요 + +05:24.270 --> 05:26.790 +어떻게 되는지 보죠 + +05:28.080 --> 05:36.270 +제 엔드포인트에서 스트리밍을 하고 있습니다 얼굴을 안으며 달리고 있습니다 + +05:36.270 --> 05:38.610 +이 변환의 결과죠 + +05:40.650 --> 05:44.580 +이 작업을 하는 동안 한 가지 지적하고 싶은 것은 여러분이 이 코드를 + +05:44.580 --> 05:51.300 +인식하고 계시길 바랍니다 아주 합법적으로 보이죠 파이썬 코드를 C+로 아주 탄탄하게 변환했어요 + +05:51.300 --> 05:57.000 +비트는 처음에 C 플러스 플러스 코드를 설명하고 + +05:57.000 --> 05:57.990 +있어요 + +05:57.990 --> 06:01.850 +왜 그렇게 했는지에 대한 문자로 끝나기도 하고요 + +06:01.880 --> 06:07.070 +내 시스템 메시지는 그러면 안 된다고 주장하는데도요 + +06:07.190 --> 06:13.730 +어떤 설명도 하지 말라고 돼 있는데 시스템 메시지를 통해 설명을 하지 말라는 메시지를 + +06:13.730 --> 06:15.980 +받을 수 없었어요 get it + +06:16.010 --> 06:20.420 +기억하시겠지만 GPT 4와 클로드는 지시를 잘 따르죠 + +06:20.420 --> 06:25.610 +프롬프트에서 시간을 더 들여 작업하면 그런 일이 일어나지 않도록 할 수 + +06:25.640 --> 06:26.900 +있을 것 같아요 + +06:26.900 --> 06:32.360 +하지만 그게 실패하면 post processing을 + +06:32.360 --> 06:39.800 +해서 C 플러스 플러스 코드 시작까지 모든 걸 제거하는 게 아주 쉽죠 이 기호까지 + +06:39.800 --> 06:45.890 +모든 걸 제거하고 이와 이 사이에 있는 걸 취할 수 있어요 + +06:46.010 --> 06:49.850 +post processing을 하면 쉽게 할 수 있어요. + +06:50.270 --> 06:51.170 +좋아요 + +06:51.170 --> 06:55.310 +추론을 실행하는 게 얼마나 쉬운지 보셨죠 + +06:55.460 --> 07:00.620 +UI 상에서 함께 조립하고 실행할 때 다시 뵙죠. TMI TMI UI + +07:00.650 --> 07:01.520 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59295601/en_US.srt b/week5/community-contributions/subtitles/srts/59295601/en_US.srt new file mode 100755 index 0000000..244754b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295601/en_US.srt @@ -0,0 +1,352 @@ +WEBVTT + +00:00.380 --> 00:07.730 +So it's time to continue our journey into the world of open source and understand which models we should + +00:07.730 --> 00:13.460 +pick to investigate the ability to generate open source code. + +00:13.460 --> 00:14.420 +Where do we begin? + +00:14.420 --> 00:17.780 +We begin, of course, with the big code models leaderboard. + +00:17.780 --> 00:23.300 +This is the leaderboard running as a hugging face space that we looked at before, and it's the one + +00:23.300 --> 00:28.820 +that compares the performance of open source models in code generation tasks. + +00:29.270 --> 00:34.370 +You remember we started by just looking at base models, and we can see some of the base models that + +00:34.370 --> 00:39.530 +are doing really well, including including Code Llama, Code Kwan and Deep Sikh Coder. + +00:39.770 --> 00:44.540 +Um, but we're going to look at all models because we're interested in models that have been fine tuned + +00:44.690 --> 00:51.110 +because they really do perform much, much better than some of the base models when they've been trained + +00:51.140 --> 00:55.370 +specifically for, uh, for tasks such as C plus plus code. + +00:55.520 --> 01:01.820 +Um, and we're going to be looking at Python and C plus plus test ability because that is, after all, + +01:01.820 --> 01:03.080 +what we're going to be doing. + +01:03.440 --> 01:09.770 +And when we look at these, one thing to note is that the ones that say exit here, their benchmark, + +01:09.890 --> 01:12.350 +some of their scoring has happened externally. + +01:12.350 --> 01:17.720 +So I'm personally going to tend to lean towards ones that don't have that symbol, that are the ones + +01:17.720 --> 01:25.790 +that have been benchmarked as part of the hugging face, uh, spaces platform and the the one that is + +01:25.790 --> 01:33.050 +top of the list here is code Kwan 1.57 billion, right up at the top. + +01:33.050 --> 01:35.090 +The code variant of Kwan. + +01:35.090 --> 01:40.850 +It's been trained for chat, which means that you can interact with it rather than just, for example, + +01:40.850 --> 01:43.670 +giving it some Python code and expecting it to complete. + +01:43.670 --> 01:50.510 +You can have a chat with it, such as saying, hey, could you convert this Python code to highly optimized + +01:50.510 --> 01:50.870 +C plus? + +01:50.870 --> 01:51.500 +Plus? + +01:51.500 --> 01:53.150 +Let's go in and have a look. + +01:53.630 --> 02:00.710 +We're now looking at the code Kwan 1.57 billion chat the top model. + +02:00.740 --> 02:04.250 +And you can see here it describes some of what it can do. + +02:04.400 --> 02:07.310 +Um and some of how you can interact with it. + +02:07.340 --> 02:11.270 +Like you can say, write a quicksort algorithm in Python. + +02:11.450 --> 02:18.080 +Um, and you can see that you can construct messages in much the way that we are used to, and then + +02:18.110 --> 02:25.070 +use the tokenizer apply chat template to convert that in tokens and run that through the model. + +02:25.070 --> 02:31.580 +So this is all very consistent with the way that we ran a number of models for other simple activities, + +02:31.580 --> 02:34.040 +text generation activities in the past. + +02:34.310 --> 02:42.950 +If we look at Codeclan we can see that it scores really very well indeed in Python coding and in C plus + +02:42.950 --> 02:43.760 +plus coding. + +02:43.760 --> 02:51.380 +It is only beaten by this one here, which is the externally run one which only fractionally does better. + +02:51.380 --> 02:56.780 +And if you look, this model itself is in fact a fine tune of Quinn itself. + +02:56.780 --> 03:01.040 +So I think frankly, we're doing pretty well by sticking with code. + +03:01.040 --> 03:01.610 +Quinn. + +03:01.640 --> 03:04.250 +There's a big drop off after that. + +03:04.250 --> 03:10.640 +The other one that I was interested in showing you was I wanted to also show you Code Gemma down here, + +03:10.640 --> 03:13.550 +which I thought would be great to try a different one and see Google. + +03:13.610 --> 03:15.590 +But as it happens, when I try to use code. + +03:15.620 --> 03:22.400 +Gemma uh, it was able to to answer very simple tasks, but it crashed whenever it was given harder + +03:22.400 --> 03:23.000 +tasks. + +03:23.000 --> 03:25.730 +You may want to try and see as an exercise whether you can get code. + +03:25.730 --> 03:31.010 +Gemma to work, but I wasn't able to for this particular problem, at least not for any complicated, + +03:31.010 --> 03:35.000 +uh, translation or reimplementation of Python in C plus plus. + +03:35.000 --> 03:40.040 +But certainly an exercise for you will be to try out other models, because we have so many to choose + +03:40.040 --> 03:42.470 +from and see what you find. + +03:42.470 --> 03:46.820 +So we've picked, uh, Code Quan. + +03:46.850 --> 03:48.560 +Let's go into it one more time. + +03:48.680 --> 03:52.970 +Uh, the next thing to do is to figure out, okay, how are we going to run this various ways? + +03:52.970 --> 03:53.450 +We could do it. + +03:53.450 --> 03:54.770 +You already know of one of them. + +03:54.770 --> 04:01.460 +We could go onto Google Colab and write the code to use the Hugging Face Transformers library to directly + +04:01.460 --> 04:07.280 +access Codex code, load it in and run it in inference time. + +04:07.610 --> 04:08.810 +And that would work great. + +04:08.810 --> 04:14.810 +The thing is, I want to execute the compiled code on my box, so it would be a bit inconvenient. + +04:14.840 --> 04:20.810 +We'd have to run that in Jupyter, in in Google Colab, and then copy and paste the code over to my + +04:20.810 --> 04:21.500 +box. + +04:21.650 --> 04:25.250 +And I was hoping to be able to do it all in one nice user interface. + +04:25.250 --> 04:30.230 +And so basically I want to be able to run this model as it is, but from my box. + +04:30.260 --> 04:34.370 +Now there are a few ways of doing that, and I'm not going to go into all of them, but there's one + +04:34.370 --> 04:40.550 +that's super convenient, and it's actually going to show us a way that you can deploy models in production + +04:40.550 --> 04:41.990 +for production purposes. + +04:41.990 --> 04:50.000 +And that is to ask hugging face to run this model for you and to give you an endpoint which you can + +04:50.000 --> 04:53.540 +use to call the model remotely from your code. + +04:53.540 --> 04:59.690 +So it's a sort of hosting solution for running a model like this model, or any open source model you + +04:59.690 --> 05:02.810 +want, and give you an endpoint to run it yourself. + +05:02.840 --> 05:04.790 +And that sounds super advanced. + +05:04.880 --> 05:07.340 +You may think that there's going to be a lot to it to do this. + +05:07.430 --> 05:09.680 +It turns out it's not difficult at all. + +05:09.680 --> 05:12.740 +All you have to do is go to this deploy button right here. + +05:12.740 --> 05:19.130 +And the first option here is inference endpoints dedicated inference deployments for production. + +05:19.130 --> 05:24.380 +And it's saying this is a way you can have your own deployment of this model. + +05:24.380 --> 05:30.350 +And you'll be able to call it yourself from anywhere and use this model on your own. + +05:30.350 --> 05:32.900 +Now of course you'll have to pay for that. + +05:33.230 --> 05:37.310 +It doesn't come for free, but it's not massively expensive. + +05:37.310 --> 05:39.260 +But this is one that is, it costs a little bit more. + +05:39.290 --> 05:41.480 +It's something certainly to be aware of. + +05:41.630 --> 05:48.890 +Um, so when you, when you come in to, to use one of these inference endpoints, you choose whether + +05:48.890 --> 05:53.450 +you want to run on AWS or Azure or GCP, Google Cloud. + +05:53.660 --> 05:57.440 +And you can also choose whether you want a CPU box or a GPU box. + +05:57.440 --> 06:05.690 +In the case of this model, we would need to have a GPU box, and we would need to have at least an + +06:05.690 --> 06:07.100 +Nvidia L4. + +06:07.130 --> 06:13.820 +At least that is what it is recommending to us, which has 24GB of Ram on the GPU. + +06:14.210 --> 06:22.100 +Uh, so we'd simply select this and then we would go ahead and, uh, fill in some of these things and + +06:22.100 --> 06:23.870 +click Create Endpoint. + +06:23.870 --> 06:25.190 +And it's as easy as that. + +06:25.190 --> 06:32.270 +We would have our own dedicated endpoint for for your personal use running with hugging face paying + +06:32.300 --> 06:34.070 +$0.80 an hour. + +06:34.070 --> 06:36.860 +If you're in the US, you may see different pricing there. + +06:36.860 --> 06:41.360 +So you know it's not super cheap, but you can turn it off and on whenever you want. + +06:41.390 --> 06:46.070 +So for doing an experiment, if this is just going to take you an hour or two, uh, it's something + +06:46.070 --> 06:53.180 +that's that's great fun to do, I recommend it, uh, if your budget allows for these things, but if + +06:53.180 --> 06:55.340 +not, never fear, I've done it for you. + +06:55.370 --> 06:57.560 +You can watch along and see what it's like. + +06:57.680 --> 07:01.700 +I'm going to go to my dedicated endpoints using this menu right here. + +07:01.700 --> 07:05.780 +And you'll see that I've got two dedicated endpoints that I've set up. + +07:05.780 --> 07:09.110 +One of them is indeed Codeclan, the one we've just been looking at. + +07:09.110 --> 07:15.560 +I basically followed through that screen and it's running on that box that was recommended, the L four. + +07:15.590 --> 07:21.410 +So I am currently spending $0.80 an hour for the pleasure of running this endpoint. + +07:21.440 --> 07:23.390 +Don't let it be said I don't do anything for you. + +07:23.930 --> 07:27.740 +And I also, as I say, I tried code Gemma. + +07:28.070 --> 07:29.690 +I tried it on an alpha box. + +07:29.690 --> 07:31.310 +I was not able to get this to run. + +07:31.310 --> 07:35.960 +It would do simple, simple stuff, but anything too complicated it would crash. + +07:36.200 --> 07:39.950 +But I love this to be an exercise for you to come in and try this. + +07:39.950 --> 07:46.040 +Try getting code Gemma to work, and then I'd be interested to see how it matches up with code. + +07:46.490 --> 07:52.100 +But I think you'll find that just as we saw from the leaderboard, there will be very little doubt that + +07:52.100 --> 07:59.450 +Codeclan is hugely superior and very, very capable of these kinds of tasks. + +07:59.540 --> 08:05.210 +So at this point, you have hopefully followed this through. + +08:05.240 --> 08:10.550 +You have to wait five minutes, go and get a coffee for for the codeclan endpoint to come up and running, + +08:10.550 --> 08:17.630 +and then we'll be ready to get back to JupyterLab running on our box to call this endpoint and to try + +08:17.630 --> 08:19.190 +out some code generation. + +08:19.190 --> 08:20.360 +I will see you in a minute. diff --git a/week5/community-contributions/subtitles/srts/59295601/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295601/ja_JP.srt new file mode 100755 index 0000000..3bf61dc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295601/ja_JP.srt @@ -0,0 +1,295 @@ +WEBVTT + +00:00.380 --> 00:13.460 +そこで、 オープンソースの世界への旅を続け、 オープンソース・コードを生成する能力を調査するためにどのモデルを選ぶべきかを理解する時が来た。 + +00:13.460 --> 00:14.420 +何から始めようか? + +00:14.420 --> 00:17.780 +もちろん、 まずは大きなコードモデルのリーダーボードから。 + +00:17.780 --> 00:28.820 +これは、 以前見た、 抱き合った顔の空間として動いているリーダーボードで、 コード生成タスクにおけるオープンソースモデルのパフォーマンスを比較するものだ。 + +00:29.270 --> 00:39.530 +コード・ラマ、 コード・クワン、 ディープ・シーク・コーダーなど、 本当にうまくいっているベースモデルのいくつかを見ることができる。 + +00:39.770 --> 00:44.540 +というのも、 C++コードのようなタスクのために特別に訓練されたモデルは、 + +00:44.690 --> 00:55.370 +基本モデルよりもはるかに優れたパフォーマンスを発揮するからです。 + +00:55.520 --> 01:03.080 +PythonとCに加えて、 テスト能力も見ていきます。 + +01:03.440 --> 01:12.350 +そして、 これらを見るとき、 ひとつ注目すべきことは、 ここで終了と書かれているものは、 彼らのベンチマークであり、 スコアリングの一部は外部で行われたものだということだ。 + +01:12.350 --> 01:17.720 +だから個人的には、 そのシンボルがないもの、 ハグフェイスの一部としてベンチマークされたもの、 + +01:17.720 --> 01:33.050 +スペース・プラットフォーム、 そしてこのリストのトップにあるコード・クワン1に傾倒する傾向がある。 + +01:33.050 --> 01:33.050 +570億ドル、 まさにトップだ。 + +01:33.050 --> 01:35.090 +クワンのコードバリエーション。 + +01:35.090 --> 01:40.850 +チャット用に訓練されているので、 例えばPythonのコードを与えて完了するのを期待するのではなく、 + +01:40.850 --> 01:43.670 +対話することができる。 + +01:43.670 --> 01:50.870 +このPythonのコードを最適化されたC言語に変換してくれない? + +01:50.870 --> 01:51.500 +プラス? + +01:51.500 --> 01:53.150 +中に入って見てみよう。 + +01:53.630 --> 02:00.710 +我々は今、 コード・クワン1を見ている。 570億円のチャットがトップモデル。 + +02:00.740 --> 02:04.250 +そして、 ここにその機能の一部が記されている。 + +02:04.400 --> 02:07.310 +そして、 どのようにそれに接することができるのか。 + +02:07.340 --> 02:11.270 +例えば、 Pythonでクイックソートのアルゴリズムを書けばいい。 + +02:11.450 --> 02:18.080 +そして、 トークナイザーがチャットテンプレートを適用してトークンに変換し、 + +02:18.110 --> 02:25.070 +それをモデルを通して実行します。 + +02:25.070 --> 02:34.040 +だからこれは、 過去に他の単純な活動、 つまりテキストを生成する活動に対して多くのモデルを実行した方法と非常に一致している。 + +02:34.310 --> 02:43.760 +Codeclanを見ると、 PythonのコーディングとCプラスアルファのコーディングで実に高いスコアを出していることがわかる。 + +02:43.760 --> 02:51.380 +ここで負けているのは、 ほんのわずかしか成績が良くない外部運営のこちらだけだ。 + +02:51.380 --> 02:56.780 +そして見てみると、 このモデル自体が実はクインの微調整そのものなのだ。 + +02:56.780 --> 03:01.040 +だから率直に言って、 コードにこだわることでうまくいっていると思う。 + +03:01.040 --> 03:01.610 +クインだ。 + +03:01.640 --> 03:04.250 +その後は大きく落ち込む。 + +03:04.250 --> 03:13.550 +もうひとつは、 コード・ジェンマをお見せしたかったんです。 + +03:13.610 --> 03:15.590 +しかし、 コードを使おうとすると、 そうなってしまう。 + +03:15.620 --> 03:23.000 +ジェマ......とても簡単なタスクには答えられたけど、 もっと難しいタスクを与えられるとクラッシュしたんだ。 + +03:23.000 --> 03:25.730 +コードを取得できるかどうか、 練習として試してみるのもいいかもしれない。 + +03:25.730 --> 03:35.000 +Gemmaは動くが、 この特別な問題では動かなかった。 少なくとも、 CプラスプラスでPythonの複雑な翻訳や再実装はできなかった。 + +03:35.000 --> 03:42.470 +しかし、 他のモデルを試してみるのもいい練習になるだろう。 + +03:42.470 --> 03:46.820 +だから、 コード・クアンを選んだんだ。 + +03:46.850 --> 03:48.560 +もう1度突っ込んでみよう。 + +03:48.680 --> 03:52.970 +ええと、 次にやるべきことは、 どうやってこれをいろいろな方法で走らせるかを考えることだ。 + +03:52.970 --> 03:53.450 +私たちならできる。 + +03:53.450 --> 03:54.770 +そのうちの一人はすでにご存じだろう。 + +03:54.770 --> 04:01.460 +Google Colabにアクセスして、 Hugging Face Transformersライブラリを使ってCodexコードに直接アクセスし、 + +04:01.460 --> 04:07.280 +それを読み込んで推論時に実行するコードを書けばいいのだ。 + +04:07.610 --> 04:08.810 +それは素晴らしいことだ。 + +04:08.810 --> 04:14.810 +問題は、 コンパイルしたコードを自分のマシンで実行したいので、 ちょっと不便だということだ。 + +04:14.840 --> 04:21.500 +そのコードをJupyterで実行し、 Google Colabで実行し、 私のボックスにコピー&ペーストしなければならない。 + +04:21.650 --> 04:25.250 +そして、 そのすべてを1つの素敵なユーザーインターフェイスで行えることを望んでいた。 + +04:25.250 --> 04:30.230 +だから基本的には、 このモデルをそのまま、 でも自分のボックスから実行できるようにしたいんだ。 + +04:30.260 --> 04:34.370 +その方法はいくつかあるので、 そのすべてを紹介するつもりはないが、 + +04:34.370 --> 04:41.990 +超便利な方法がある。 + +04:41.990 --> 04:50.000 +そしてそれは、 あなたのためにこのモデルを実行し、 あなたのコードからリモートでモデルを呼び出すために使用できるエンドポイントを与えてくれるよう、 + +04:50.000 --> 04:53.540 +hugging faceに依頼することである。 + +04:53.540 --> 04:59.690 +つまり、 このモデルのようなモデルや、 あなたが望むオープンソースのモデルを実行するためのホスティング・ソリューションのようなもので、 + +04:59.690 --> 05:02.810 +自分で実行するためのエンドポイントを提供する。 + +05:02.840 --> 05:04.790 +そして、 それは超高度に聞こえる。 + +05:04.880 --> 05:07.340 +こんなことをするのは大変だと思うかもしれない。 + +05:07.430 --> 05:09.680 +全然難しくないことがわかった。 + +05:09.680 --> 05:12.740 +ここにあるデプロイボタンをクリックするだけだ。 + +05:12.740 --> 05:19.130 +そして最初の選択肢は、 本番用の推論専用の推論エンドポイントだ。 + +05:19.130 --> 05:24.380 +そして、 このモデルを独自に展開することができる方法だと言っているのだ。 + +05:24.380 --> 05:30.350 +そして、 どこからでも自分で電話をかけ、 このモデルを自分で使うことができるようになる。 + +05:30.350 --> 05:32.900 +もちろん、 そのための費用は支払わなければならない。 + +05:33.230 --> 05:37.310 +タダではないが、 決して高くはない。 + +05:37.310 --> 05:39.260 +でも、 これはちょっと高いんだ。 + +05:39.290 --> 05:41.480 +確かに注意すべきことだ。 + +05:41.630 --> 05:48.890 +これらの推論エンドポイントを使用する場合、 AWS、 Azure、 GCP、 Google + +05:48.890 --> 05:53.450 +Cloudのどれで実行するかを選択します。 + +05:53.660 --> 05:57.440 +また、 CPUボックスかGPUボックスかを選ぶこともできる。 + +05:57.440 --> 06:07.100 +このモデルの場合、 GPUボックスが必要で、 少なくともNvidia L4が必要だ。 + +06:07.130 --> 06:13.820 +少なくとも、 GPUに24GBのラムを搭載している私たちには、 それが推奨されている。 + +06:14.210 --> 06:23.870 +これを選択し、 いくつかの項目を入力し、 エンドポイントの作成をクリックします。 + +06:23.870 --> 06:25.190 +それはとても簡単なことだ。 + +06:25.190 --> 06:34.070 +私たちは、 あなたの個人的な使用のために私たち自身の専用エンドポイントを持っている抱擁顔0ドルを支払って実行している。 + +06:34.070 --> 06:34.070 +時給80円。 + +06:34.070 --> 06:36.860 +米国にお住まいの方は、 価格が異なるかもしれません。 + +06:36.860 --> 06:41.360 +だから、 超安くはないけれど、 いつでも好きなときにオフにしたりオンにしたりできるんだ。 + +06:41.390 --> 06:46.070 +もし予算が許すのであれば、 実験することをお勧めするが、 + +06:46.070 --> 06:55.340 +もしそうでなければ、 心配はいらない。 + +06:55.370 --> 06:57.560 +どんなものか、 一緒に見てみるといい。 + +06:57.680 --> 07:01.700 +このメニューを使って、 専用のエンドポイントに移動します。 + +07:01.700 --> 07:05.780 +そして、 2つの専用エンドポイントを設定したことがわかるだろう。 + +07:05.780 --> 07:09.110 +そのひとつがコードクランである。 + +07:09.110 --> 07:15.560 +私は基本的にその画面に従い、 推奨されたL4ボックスで動作している。 + +07:15.590 --> 07:21.410 +というわけで、 現在の支出は0ドルだ。 このエンドポイントを走らせる喜びのために1時間80ドル。 + +07:21.440 --> 07:23.390 +私があなたのために何もしないとは言わせない。 + +07:23.930 --> 07:27.740 +そして、 ジェンマのコードも試した。 + +07:28.070 --> 07:29.690 +アルファボックスで試してみた。 + +07:29.690 --> 07:31.310 +私はこれを実行させることができなかった。 + +07:31.310 --> 07:35.960 +簡単なことはできるけど、 複雑すぎるとクラッシュするんだ。 + +07:36.200 --> 07:39.950 +でも、 私はこれを練習の場として使ってほしい。 + +07:39.950 --> 07:46.040 +コード・ジェンマを使えるようにしてみて、 それがコードとどう一致するのか興味がある。 + +07:46.490 --> 07:52.100 +しかし、 リーダーボードを見ればわかるように、 コーデクランがこの種のタスクにおいて非常に優れており、 + +07:52.100 --> 07:59.450 +非常に有能であることに疑いの余地はないだろう。 + +07:59.540 --> 08:05.210 +だから、 この時点で、 あなたは願わくば、 この通りにしてほしい。 + +08:05.240 --> 08:10.550 +codeclanエンドポイントが立ち上がって実行されるまで、 コーヒーを買いに行って5分待つ必要がある。 + +08:10.550 --> 08:19.190 +その後、 このエンドポイントを呼び出してコード生成を試すために、 私たちのボックスで実行されているJupyterLabに戻る準備ができる。 + +08:19.190 --> 08:20.360 +すぐに会おう。 diff --git a/week5/community-contributions/subtitles/srts/59295601/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295601/ko_KR.srt new file mode 100755 index 0000000..43ded9b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295601/ko_KR.srt @@ -0,0 +1,346 @@ +WEBVTT + +00:00.380 --> 00:07.730 +오픈 소스 세계로 여정을 계속하면서 오픈 소스 코드 생성 능력을 조사하기 + +00:07.730 --> 00:13.460 +위해 어떤 모델을 선택해야 할지 알아보죠 + +00:13.460 --> 00:14.420 +어디서부터 시작하죠? + +00:14.420 --> 00:17.780 +우선은 큰 코드 모델의 leaderboard를 보시죠 + +00:17.780 --> 00:23.300 +이건 아까 봤던 포옹하는 얼굴 공간으로 실행되는 leaderboard입니다 + +00:23.300 --> 00:28.820 +코드 생성 작업에서 오픈 소스 모델의 성능을 비교하는 거죠 + +00:29.270 --> 00:34.370 +처음에 베이스모델을 봤을 때 몇몇 베이스모델이 잘 작동하고 있는 + +00:34.370 --> 00:39.530 +것을 볼 수 있습니다 코드 라마, 코드 콴 딥 시크 코더가 있네요 + +00:39.770 --> 00:44.540 +하지만 모든 모델을 살펴볼 겁니다 미세 조정된 모델에 + +00:44.690 --> 00:51.110 +관심이 많거든요 C 플러스 플러스 코드 같은 작업에 맞게 특별히 훈련된 + +00:51.140 --> 00:55.370 +몇몇 기본 모델보다 훨씬 더 잘 수행하니까요 + +00:55.520 --> 01:01.820 +그리고 파이썬 + C+ 테스트 기능을 살펴볼 겁니다 왜냐하면 결국 우리가 + +01:01.820 --> 01:03.080 +할 것이니까요 + +01:03.440 --> 01:09.770 +이걸 볼 때 주목할 점은 여기서 나가라는 건 기준점이 되는 거죠 외부적으로 + +01:09.890 --> 01:12.350 +채점이 발생했어요 + +01:12.350 --> 01:17.720 +그래서 저는 개인적으로 이 기호가 없는 걸 선호합니다 + +01:17.720 --> 01:25.790 +안는 얼굴, 스페이스 플랫폼의 벤치마크로 지정된 것들요 그중에서도 가장 + +01:25.790 --> 01:33.050 +높은 게 코드 콴 1이죠 570억 달러요, 맨 위에요 + +01:33.050 --> 01:35.090 +코드 변종 콴이에요 + +01:35.090 --> 01:40.850 +채팅도 가능하게끔 훈련되어 있습니다 즉, 단순히 파이썬 코드를 주고 완료되기를 기대하는 + +01:40.850 --> 01:43.670 +것과는 다른 상호작용이 가능하다는 것이죠 + +01:43.670 --> 01:50.870 +채팅을 할 수 있습니다 파이썬 코드를 최적화된 C+로 변환할 수 있나요? + +01:50.870 --> 01:51.500 +그리고요? + +01:51.500 --> 01:53.150 +들어가서 한번 보죠 + +01:53.630 --> 02:00.710 +지금 보시는 건 코드 콴 1이에요 570억 챗의 톱모델이죠 + +02:00.740 --> 02:04.250 +여기 보시면 이게 뭘 할 수 있는지 설명돼 있죠 + +02:04.400 --> 02:07.310 +어떻게 상호 작용할 수 있는지도요 + +02:07.340 --> 02:11.270 +파이썬 에서 퀵 센서트 알고리즘을 작성하세요 + +02:11.450 --> 02:18.080 +보시다시피 우리가 쓰던 방식으로 메시지를 만들 수 있어요 토큰라이저 앱으로 + +02:18.110 --> 02:25.070 +채팅 템플릿을 적용하면 메시지를 토큰으로 변환하고 모델로 실행할 수 있죠 + +02:25.070 --> 02:31.580 +과거 간단한 작업, 텍스트 생성 작업을 위해 실행했던 여러 모델과 + +02:31.580 --> 02:34.040 +아주 일관된 방식이죠 + +02:34.310 --> 02:42.950 +Codeclan을 보시면 파이썬 코딩에서 C++ 코딩에서 아주 높은 점수를 얻을 수 + +02:42.950 --> 02:43.760 +있어요 + +02:43.760 --> 02:51.380 +외부 실행 장치인 이것에 의해만 패하고 있어요 이건 아주 조금 더 잘 작동하죠 + +02:51.380 --> 02:56.780 +잘 보면 이 모델 자체가 퀸의 훌륭한 곡조예요 + +02:56.780 --> 03:01.040 +솔직히 코드를 고수하면서 잘 되고 있다고 생각해요 + +03:01.040 --> 03:01.610 +퀸이에요 + +03:01.640 --> 03:04.250 +그 뒤엔 급경사면이 있어요 + +03:04.250 --> 03:10.640 +또 보여드리고 싶은 건 코드 젬마도 보여드리고 싶어요 다른 + +03:10.640 --> 03:13.550 +것도 해보고 구글도 보죠 + +03:13.610 --> 03:15.590 +하지만 코드를 사용하려 할 때 발생하는 일이죠 + +03:15.620 --> 03:23.000 +제마는 아주 간단한 임무에 대처할 수 있었지만 어려운 임무를 받을 때마다 고장 났어요 + +03:23.000 --> 03:25.730 +코드를 얻을 수 있는지 훈련으로 알아보세요. + +03:25.730 --> 03:31.010 +하지만 이 문제 때문에 작업을 할 수가 없었어요. 적어도 C 플러스 버전의 파이썬의 + +03:31.010 --> 03:35.000 +복잡한 번역이나 재공여가 일어나지 않았기 때문이죠. + +03:35.000 --> 03:40.040 +하지만 다른 모델도 시도해 보는 것도 분명 도움이 될 거예요 + +03:40.040 --> 03:42.470 +선택지가 아주 많거든요 + +03:42.470 --> 03:46.820 +코드 콴을 선택했어요 + +03:46.850 --> 03:48.560 +한 번 더 얘기해 보죠 + +03:48.680 --> 03:52.970 +다음으로 할 일은 다양한 방식으로 어떻게 실행할지 알아내는 거죠 + +03:52.970 --> 03:53.450 +할 수 있어요 + +03:53.450 --> 03:54.770 +한 명은 이미 알고 계시잖아요 + +03:54.770 --> 04:01.460 +구글 Colab으로 가서 코드를 작성할 수 있습니다 안기 트랜스포머 라이브러리를 + +04:01.460 --> 04:07.280 +이용해 코덱스 코드에 직접 접근해 로드하고 추론 시간에 실행하는 거죠 + +04:07.610 --> 04:08.810 +그럼 아주 좋을 거예요 + +04:08.810 --> 04:14.810 +비트 박스에서 컴파일된 코드를 실행하고 싶다면 약간 불편할 거예요 + +04:14.840 --> 04:20.810 +주피터에서 실행해야 합니다 구글 Colab에서요 제 컴퓨터에 코드를 복사 붙여넣기 + +04:20.810 --> 04:21.500 +하죠 + +04:21.650 --> 04:25.250 +하나의 사용자 인터페이스로 다 할 수 있길 바랐어요 + +04:25.250 --> 04:30.230 +기본적으로 이 모델을 있는 그대로 실행하고 싶어요 제 상자에서요 + +04:30.260 --> 04:34.370 +그렇게 할 방법이 몇 가지 있는데 전부 설명하진 않겠지만 아주 + +04:34.370 --> 04:40.550 +편리한 방법이 있어요 프로덕션에서 모델을 배포하는 방법을 보여드릴 겁니다 프로덕션 + +04:40.550 --> 04:41.990 +목적을 위해서요 + +04:41.990 --> 04:50.000 +페이스 포옹에 이 모델을 실행해 달라고 요청하는 겁니다 여러분이 코드에서 원격으로 모델을 + +04:50.000 --> 04:53.540 +호출할 수 있는 엔드포인트를 주는 거죠 + +04:53.540 --> 04:59.690 +이런 모델이나 여러분이 원하는 오픈 소스 모델을 실행하는 일종의 호스팅 솔루션으로 여러분이 + +04:59.690 --> 05:02.810 +실행할 수 있는 엔드포인트를 제공하죠 + +05:02.840 --> 05:04.790 +정말 고급 기술 같네요 + +05:04.880 --> 05:07.340 +할 일이 많을 거라고 생각할 수도 있어요 + +05:07.430 --> 05:09.680 +전혀 어렵지 않아요 + +05:09.680 --> 05:12.740 +여러분이 할 일은 여기 이 배포 버튼으로 가는 것뿐이에요 + +05:12.740 --> 05:19.130 +첫 번째 옵션은 종점 추론입니다 프로덕션에 대한 전용 추론 배포죠 + +05:19.130 --> 05:24.380 +이 모델의 고유한 배포를 가질 수 있는 방법이라고 하네요 + +05:24.380 --> 05:30.350 +어디서든 직접 호출해 이 모델을 사용할 수 있어요 + +05:30.350 --> 05:32.900 +물론 그 대가를 치러야겠죠 + +05:33.230 --> 05:37.310 +공짜는 아니지만 아주 비싸진 않아요 + +05:37.310 --> 05:39.260 +하지만 이건 비트가 좀 더 비싸요 + +05:39.290 --> 05:41.480 +확실히 알아야 할 일이죠 + +05:41.630 --> 05:48.890 +이런 추론 끝점을 사용할 때 AWS나 애저 GCP, 구글 + +05:48.890 --> 05:53.450 +클라우드에서 실행할지 선택해요 + +05:53.660 --> 05:57.440 +CPU 박스와 GPU 박스도 선택할 수 있어요 + +05:57.440 --> 06:05.690 +이 모델의 경우 GPU 박스가 필요하고 엔비디아 L4도 최소한 있어야 + +06:05.690 --> 06:07.100 +해요 + +06:07.130 --> 06:13.820 +적어도 그게 권장하는 거죠 GPU 상에 24GB 램이 있어요 + +06:14.210 --> 06:22.100 +그래서 이걸 선택하고 이 중 몇 개를 채우고 엔드포인트 생성을 + +06:22.100 --> 06:23.870 +클릭해요 + +06:23.870 --> 06:25.190 +아주 간단해요 + +06:25.190 --> 06:32.270 +전용 엔드포인트를 만들어서 당신을 위해 0달러 내고 얼굴 껴안고 달리게 할 + +06:32.300 --> 06:34.070 +거예요 시간당 80달러요 + +06:34.070 --> 06:36.860 +미국에서는 가격이 다를 수 있어요 + +06:36.860 --> 06:41.360 +아주 저렴하진 않지만 원할 때 껐다 켤 수 있어요 + +06:41.390 --> 06:46.070 +실험에 한두 시간밖에 안 걸린다면 정말 재미있는 + +06:46.070 --> 06:53.180 +실험이 될 거예요 예산에 맞는다면 추천합니다 그렇지 않다면 걱정 마세요 + +06:53.180 --> 06:55.340 +제가 해드릴게요 + +06:55.370 --> 06:57.560 +여러분도 함께 지켜보며 어떤지 보세요 + +06:57.680 --> 07:01.700 +여기 이 메뉴를 이용해 지정된 끝점으로 가보죠 + +07:01.700 --> 07:05.780 +지정된 끝점이 2개 있는 게 보이시죠 제가 설정한 거요 + +07:05.780 --> 07:09.110 +그중 하나가 방금 본 코드클란이에요 + +07:09.110 --> 07:15.560 +기본적으로 저 화면을 따라가면 추천받은 L4 상자에서 실행되고 있어요 + +07:15.590 --> 07:21.410 +현재 0달러 썼어요 시간당 80달러로 이 엔드포인트를 운영하는 기쁨을 누리세요 + +07:21.440 --> 07:23.390 +내가 당신을 위해 아무것도 안 한다는 말은 하지 말아요 + +07:23.930 --> 07:27.740 +코드 젬마도 시도해 봤어요 + +07:28.070 --> 07:29.690 +알파 박스에 써봤어요 + +07:29.690 --> 07:31.310 +Get it을 실행할 수가 없었어요 + +07:31.310 --> 07:35.960 +아주 단순한 작업이지만 너무 복잡하면 충돌해요 + +07:36.200 --> 07:39.950 +하지만 이걸 연습 삼아 한번 해 보세요 + +07:39.950 --> 07:46.040 +코드 제마를 작동시켜 보세요 코드랑 어떻게 일치하는지 보고 싶네요 + +07:46.490 --> 07:52.100 +하지만 리더보드에서 본 것처럼 의심의 여지가 없어요 + +07:52.100 --> 07:59.450 +코드클란이 훨씬 뛰어나고 이런 장애물을 잘 해낼 거예요 + +07:59.540 --> 08:05.210 +지금까지는 잘 따라오셨길 바라요 + +08:05.240 --> 08:10.550 +5분 기다렸다가 커피를 마시고 codeclan 엔드포인트가 실행될 때까지 + +08:10.550 --> 08:17.630 +기다리세요 그런 다음 JupyterLab으로 돌아가 이 엔드포인트를 호출하고 코드 생성을 시험해볼 + +08:17.630 --> 08:19.190 +준비가 될 거예요 + +08:19.190 --> 08:20.360 +잠시 후에 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59295607/en_US.srt b/week5/community-contributions/subtitles/srts/59295607/en_US.srt new file mode 100755 index 0000000..91a8566 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295607/en_US.srt @@ -0,0 +1,349 @@ +WEBVTT + +00:00.920 --> 00:06.650 +So to revisit then the solution that we built in the previous day and talk about the metrics. + +00:06.650 --> 00:13.640 +As I say, we've actually got very simple business centric metrics to gauge the performance and evaluate + +00:13.640 --> 00:15.020 +the performance of our results. + +00:15.020 --> 00:17.180 +It's quite simply the performance of the code. + +00:17.180 --> 00:19.100 +How much faster did we make the code? + +00:19.100 --> 00:27.500 +And obviously Claude 3.5 sonnet crushed it and and came through as the front runner, as the leader, + +00:27.740 --> 00:35.720 +followed by GPT four zero, which sometimes failed with the mission, but also sometimes succeeded and + +00:35.750 --> 00:37.670 +with a fairly fast solution. + +00:38.030 --> 00:45.320 +And then, unfortunately, Code Kwan did, as I say, a decent effort, but was not able ever to get + +00:45.320 --> 00:50.000 +the right results despite some attempts to force it through system prompts. + +00:50.030 --> 00:52.610 +Slightly cheating wasn't good enough. + +00:52.760 --> 01:00.230 +Uh, so, you know, again, I do just want to point out that Quinn has 7 billion parameters and it's + +01:00.230 --> 01:04.370 +closed source rivals way more than a trillion parameters. + +01:04.370 --> 01:09.050 +So the very fact that it could do what it could do is incredibly impressive. + +01:09.050 --> 01:15.530 +And remember that aside from that, that cost of running it, um, the $0.80, I was paying an hour + +01:15.530 --> 01:18.920 +for it to be running as an inference end point. + +01:18.920 --> 01:20.540 +Aside from that, it is free. + +01:20.540 --> 01:21.800 +It's open source. + +01:21.920 --> 01:25.070 +Uh, and so we're not paying API costs. + +01:25.070 --> 01:31.190 +And there are ways that you could run it on your local box, um, particularly if you have a GPU, uh, + +01:31.190 --> 01:37.790 +in which case, again, there's no cost other than the cost of running your box, the electricity. + +01:38.030 --> 01:42.110 +Uh, so it's a very impressive, uh, model. + +01:42.110 --> 01:48.350 +It's it's it's frankly extraordinary that an open source model with only 7 billion parameters can do + +01:48.350 --> 01:51.380 +things like converting that code to calculate pi. + +01:51.440 --> 01:56.060 +So at this point, I want to introduce the challenges for you. + +01:56.060 --> 02:01.540 +And this week I've got a number of them and they are quite hard, but they'll be so fulfilling, so + +02:01.540 --> 02:05.770 +satisfying, that I want to encourage you more than ever to give this a shot. + +02:05.890 --> 02:07.390 +I do hope you have time. + +02:07.390 --> 02:12.460 +I've tried to keep this days lectures relatively brief to free up some time so you can do it. + +02:12.460 --> 02:16.060 +This will really solidify a lot of what we learned so far. + +02:16.090 --> 02:18.010 +A couple of smaller things to tweak. + +02:18.010 --> 02:19.690 +The solution we've already got. + +02:19.720 --> 02:28.720 +You can add Gemini into the closed source models so that the Gemini is you've got GPT for Claude and + +02:28.720 --> 02:33.220 +Gemini, and you can also add in more open source models. + +02:33.220 --> 02:40.600 +As I was suggesting, you can add in code Llama and Star Coder, and you could see whether or not you + +02:40.600 --> 02:43.390 +can force Code Gemma to work. + +02:43.420 --> 02:50.140 +Um, maybe it can be convinced to not rewrite the random number generator by a series of aggressive + +02:50.140 --> 02:52.660 +prompts to try and stop that happening. + +02:52.660 --> 02:54.580 +Uh, see if you have more luck than me. + +02:54.580 --> 02:57.600 +If you do, then please, please push the code. + +02:57.660 --> 02:58.680 +Let me see it. + +02:58.710 --> 03:00.840 +Let me include it and I'll share it with others. + +03:00.840 --> 03:03.390 +And then we can all benefit from it. + +03:03.390 --> 03:04.290 +And that would be. + +03:04.320 --> 03:04.710 +Yeah. + +03:04.740 --> 03:05.940 +Really terrific. + +03:05.940 --> 03:09.240 +So give it a try and share your results. + +03:09.240 --> 03:10.470 +But then. + +03:11.190 --> 03:19.050 +But then I have three thorny challenges for you which is going to take your your abilities to generate + +03:19.050 --> 03:19.380 +code. + +03:19.380 --> 03:21.030 +It's going to take it to the next level. + +03:21.270 --> 03:24.630 +So here's some really great ideas. + +03:24.630 --> 03:31.140 +One of them would be to write a coding tool that can automatically add comments or docstrings to your + +03:31.140 --> 03:33.900 +code, something that might be really useful. + +03:34.080 --> 03:38.490 +I know that there's some products out there that do it already, but it'd be great to do it yourself + +03:38.490 --> 03:42.180 +and see it working and see whether code is good enough to do this for you. + +03:42.600 --> 03:47.610 +Uh, another would be a tool that can write unit tests. + +03:47.610 --> 03:53.340 +Uh, unit testing is something which is the bane of some of our lives. + +03:53.400 --> 03:55.620 +Uh, it can be very grueling. + +03:55.870 --> 04:02.170 +and sometimes unit tests can be very poorly written if all they're doing is just sort of replicating + +04:02.170 --> 04:06.190 +inputs and outputs of a function rather than doing something sensible. + +04:06.190 --> 04:15.040 +So see if you can use frontier models and maybe open source models to generate unit test cases for a + +04:15.040 --> 04:17.410 +particular Python module. + +04:17.620 --> 04:21.220 +And then finally, this is a really fun one. + +04:21.220 --> 04:21.940 +And I've done this. + +04:21.940 --> 04:26.980 +So I have this myself just just for for entertainment value alone. + +04:27.160 --> 04:28.330 +And it's great. + +04:28.330 --> 04:29.320 +It's great. + +04:29.440 --> 04:37.870 +And you should try it to write a code generator that writes code to make trading decisions for buying + +04:37.900 --> 04:41.590 +and selling equities in a simulated environment. + +04:41.590 --> 04:43.420 +It's a bigger project, I realize. + +04:43.420 --> 04:47.050 +But you could get you could take it baby steps at a time by just having it generate code. + +04:47.050 --> 04:49.810 +You don't necessarily need the simulated environment. + +04:49.900 --> 04:52.090 +So you provide it with an API. + +04:52.120 --> 04:57.460 +You could you could look for some of the common APIs that are available for looking up equity stock + +04:57.460 --> 04:58.120 +prices. + +04:58.120 --> 05:03.430 +Or you could just invent an API and just provide it in the prompt so that the model knows what methods + +05:03.430 --> 05:10.720 +it can call to get the price of a ticker on a particular date, and then have it generate functions + +05:10.720 --> 05:14.530 +that will carry out, buy or sell trading decisions. + +05:14.530 --> 05:17.710 +And yeah, I had terrific fun with this. + +05:17.710 --> 05:25.780 +I was able to build a ton of different trading signals, both using frontier models and open source + +05:25.780 --> 05:26.380 +models. + +05:26.380 --> 05:31.120 +One thing I will say is please, whatever you do, if you do this, do not actually use it to make trading + +05:31.120 --> 05:31.960 +decisions. + +05:31.960 --> 05:36.970 +I strongly advise you not to use these sorts of models to make trading decisions. + +05:36.970 --> 05:41.770 +In fact, if you try and ask frontier models to generate these methods, they'll do so, but they will + +05:41.770 --> 05:47.440 +warn you that it should not be used for real trading decisions under any circumstances. + +05:47.440 --> 05:49.720 +So consider that a stern warning. + +05:49.930 --> 05:59.050 +Use this purely for your own enjoyment and to to see the capabilities involved, not for actually risking + +05:59.080 --> 06:05.950 +fame and fortune on the whim of an LM, whether it's closed or open source. + +06:06.250 --> 06:08.530 +But this is a great project. + +06:08.530 --> 06:09.820 +I'm sure you'll enjoy it. + +06:09.820 --> 06:14.950 +I might share my code if no one else does, but I you know, first of all, I put the challenge to you + +06:14.980 --> 06:16.150 +have a shot at doing it. + +06:16.180 --> 06:19.420 +If you generate some trading signals, then I challenge you. + +06:19.420 --> 06:25.930 +I'll go head to head with you and see in a simulated environment only, of course, which of our different + +06:25.930 --> 06:32.980 +generated trading functions are able to make more more money in a fake market? + +06:33.070 --> 06:34.030 +There you go. + +06:34.030 --> 06:38.500 +That should be a really fun project for you, and I can't wait to see what people come up with. + +06:38.530 --> 06:41.050 +And with that, would you believe it? + +06:41.080 --> 06:42.490 +Would you believe this? + +06:42.520 --> 06:51.680 +We have just reached the 50% point in this journey to to being a proficient LM Engineer. + +06:51.710 --> 06:53.510 +You are half way there. + +06:53.540 --> 06:57.230 +At least you're halfway there after you've done some of that homework and built some of those projects. + +06:57.230 --> 07:01.100 +Because that's, as I say, the most, the best way to learn is by doing. + +07:01.130 --> 07:07.490 +And so once you've done that, then you can pat yourself on the back and say you've got 50% of the way + +07:07.490 --> 07:08.120 +there. + +07:08.480 --> 07:15.860 +And there is so much more to come next time we're going to be talking about rag Super Hot Topic, and + +07:15.860 --> 07:19.040 +we're talking about the big idea behind Rag. + +07:19.040 --> 07:25.310 +We're going to walk through the high level flow of adding expertise into your queries, and we're going + +07:25.310 --> 07:31.340 +to implement a toy version of Rag, a simple version without introducing these things called vector + +07:31.340 --> 07:35.900 +databases, which is the big, the big thinking behind Rag. + +07:35.900 --> 07:38.480 +So I can't wait to get to all of that. + +07:38.480 --> 07:41.300 +Next week is going to be really, really exciting. + +07:41.390 --> 07:50.060 +And again, one more time, a massive congratulations for being halfway on your journey to mastering + +07:50.090 --> 07:50.990 +Llms. diff --git a/week5/community-contributions/subtitles/srts/59295607/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295607/ja_JP.srt new file mode 100755 index 0000000..4a2f3df --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295607/ja_JP.srt @@ -0,0 +1,283 @@ +WEBVTT + +00:00.920 --> 00:06.650 +そこで、 前日に構築したソリューションをもう一度見直して、 測定基準について話をしよう。 + +00:06.650 --> 00:15.020 +今申し上げたように、 私たちは非常にシンプルなビジネス中心の指標で業績を測定し、 業績を評価しています。 + +00:15.020 --> 00:17.180 +単純にコードのパフォーマンスだ。 + +00:17.180 --> 00:19.100 +コードはどれだけ速くなったか? + +00:19.100 --> 00:27.500 +そして明らかにクロード3だ。 5ソネットはそれを打ち砕き、 トップランナーとして、 + +00:27.740 --> 00:37.670 +GPTフォーゼロがミッションに失敗することもあったが、 成功することもあった。 + +00:38.030 --> 00:45.320 +そして残念なことに、 コード・クワンは、 私が言うように、 まともな努力をしたが、 システム・プロンプトを通して強制的に結果を出そうと何度か試みたものの、 + +00:45.320 --> 00:50.000 +正しい結果を得ることはできなかった。 + +00:50.030 --> 00:52.610 +わずかなズルも十分ではなかった。 + +00:52.760 --> 01:04.370 +クインには70億のパラメータがあり、 クローズドソースでは1兆をはるかに超えるパラメータがライバルになる。 + +01:04.370 --> 01:09.050 +だから、 それができたという事実そのものが、 信じられないほど印象的なんだ。 + +01:09.050 --> 01:18.920 +それとは別に、 運営費、 つまり0ドルであることを忘れないでほしい。 80、 私は推論エンドポイントとして稼働するために1時間を支払っていた。 + +01:18.920 --> 01:20.540 +それはさておき、 無料である。 + +01:20.540 --> 01:21.800 +オープンソースだ。 + +01:21.920 --> 01:25.070 +だから、 APIのコストは払っていない。 + +01:25.070 --> 01:37.790 +特にGPUを持っている場合は、 GPUを動かすための電気代以外にコストはかからない。 + +01:38.030 --> 01:42.110 +とても印象的なモデルだよ。 + +01:42.110 --> 01:48.350 +たった70億のパラメーターを持つオープンソースのモデルが、 円周率を計算するためにそのコードを変換するようなことができるのは、 + +01:48.350 --> 01:51.380 +率直に言って異常なことだ。 + +01:51.440 --> 01:56.060 +そこで、 この時点で皆さんに課題を紹介したい。 + +01:56.060 --> 02:01.540 +そして今週、 私はそのいくつかを手に入れた。 それはとてもハードなものだが、 + +02:01.540 --> 02:05.770 +とても充実し、 満足のいくものだ。 + +02:05.890 --> 02:07.390 +時間があることを願うよ。 + +02:07.390 --> 02:12.460 +この数日間のレクチャーは、 時間を空けるために比較的簡潔なものにした。 + +02:12.460 --> 02:16.060 +これで、 これまで学んできたことの多くがより確かなものになるだろう。 + +02:16.090 --> 02:18.010 +微調整すべき点がいくつかある。 + +02:18.010 --> 02:19.690 +解決策はすでにある。 + +02:19.720 --> 02:33.220 +クローズドソースのモデルにジェミニを追加することで、 ジェミニはクロードとジェミニのGPTとなり、 さらにオープンソースのモデルを追加することもできる。 + +02:33.220 --> 02:43.390 +私が提案したように、 コード・ラマとスターコーダーを加えれば、 コード・ジェンマを強制的に働かせることができるかどうかがわかるだろう。 + +02:43.420 --> 02:52.660 +乱数ジェネレーターを書き換えないように、 積極的なプロンプトを出すことで、 そうならないように説得できるかもしれない。 + +02:52.660 --> 02:54.580 +ええと、 僕より運があるかどうか見てみて。 + +02:54.580 --> 02:57.600 +もしそうなら、 どうかコードをプッシュしてほしい。 + +02:57.660 --> 02:58.680 +見せてくれ。 + +02:58.710 --> 03:00.840 +他の人たちにも教えてあげよう。 + +03:00.840 --> 03:03.390 +そうすれば、 私たち全員がその恩恵を受けることができる。 + +03:03.390 --> 03:04.290 +そしてそれは + +03:04.320 --> 03:04.710 +そうだね。 + +03:04.740 --> 03:05.940 +本当に素晴らしい。 + +03:05.940 --> 03:09.240 +ぜひ試して、 結果をシェアしてほしい。 + +03:09.240 --> 03:10.470 +でも、 それなら + +03:11.190 --> 03:19.380 +しかし、 そんなあなたには、 コードを生成する能力を必要とする3つの難題がある。 + +03:19.380 --> 03:21.030 +それは次のレベルに進むことになる。 + +03:21.270 --> 03:24.630 +そこで、 本当に素晴らしいアイデアをいくつか紹介しよう。 + +03:24.630 --> 03:33.900 +そのひとつは、 コードにコメントやdocstringを自動的に追加できるコーディング・ツールを書くことだ。 + +03:34.080 --> 03:38.490 +すでにそれを実現する製品がいくつかあることは知っているが、 自分でやってみて、 それが機能しているのを確認し、 + +03:38.490 --> 03:42.180 +自分のためにこれを実行するのに十分なコードかどうかを確認するのは素晴らしいことだ。 + +03:42.600 --> 03:47.610 +もうひとつは、 ユニットテストを書けるツールだね。 + +03:47.610 --> 03:53.340 +ユニットテストは、 私たちの人生の悩みの種だ。 + +03:53.400 --> 03:55.620 +ああ、 とても過酷なものだよ。 + +03:55.870 --> 04:02.170 +また、 ユニットテストが単に関数の入出力を複製するだけで、 何か賢明なことをするわけではない場合、 + +04:02.170 --> 04:06.190 +ユニットテストは非常に稚拙に書かれることがある。 + +04:06.190 --> 04:17.410 +そこで、 特定のPythonモジュールのユニットテストケースを生成するために、 フロンティアモデルや、 もしかしたらオープンソースモデルを使うことができるかどうかを見てみましょう。 + +04:17.620 --> 04:21.220 +そして最後に、 これは本当に楽しいものだ。 + +04:21.220 --> 04:21.940 +そして、 私はこれをやった。 + +04:21.940 --> 04:26.980 +だから、 私自身はエンターテインメントとしての価値だけでこれを持っている。 + +04:27.160 --> 04:28.330 +そして素晴らしい。 + +04:28.330 --> 04:29.320 +素晴らしいよ。 + +04:29.440 --> 04:41.590 +そして、 シミュレートされた環境で株式の売買を決定するコードを書くコード・ジェネレーターを書くことを試すべきだ。 + +04:41.590 --> 04:43.420 +もっと大きなプロジェクトなんだ。 + +04:43.420 --> 04:47.050 +でも、 コードを生成させるだけで、 少しずつステップを踏んでいくことができる。 + +04:47.050 --> 04:49.810 +必ずしもシミュレートされた環境が必要なわけではない。 + +04:49.900 --> 04:52.090 +APIを提供するわけだ。 + +04:52.120 --> 04:58.120 +株価を調べるために利用できる一般的なAPIを探すこともできる。 + +04:58.120 --> 05:03.430 +あるいは、 APIを考案し、 それをプロンプトで提供するだけで、 モデルが特定の日付のティッカーの価格を取得するためにどのメソッドを呼び出すことができるかを知り、 + +05:03.430 --> 05:14.530 +売買の決定を行う関数を生成できるようにすることもできる。 + +05:14.530 --> 05:17.710 +そうそう、 これはすごく楽しかった。 + +05:17.710 --> 05:26.380 +フロンティア・モデルとオープンソース・モデルの両方を使って、 たくさんの異なる売買シグナルを構築することができた。 + +05:26.380 --> 05:31.960 +ひとつ言っておきたいのは、 何をするにしても、 もしこれをやったとしても、 実際に売買の意思決定に使わないでほしいということだ。 + +05:31.960 --> 05:36.970 +私は、 この種のモデルを使って取引を決定しないことを強く勧める。 + +05:36.970 --> 05:41.770 +実際、 フロンティアモデルにこのような手法を生成するように頼もうとすれば、 そうしてくれるだろうが、 + +05:41.770 --> 05:47.440 +どんなことがあっても実際の取引判断に使うべきではないと警告されるだろう。 + +05:47.440 --> 05:49.720 +だから、 これは厳重注意だ。 + +05:49.930 --> 05:59.050 +クローズドソースであろうとオープンソースであろうと、 LMの気まぐれで名声と富を実際に危険にさらすためではなく、 + +05:59.080 --> 06:05.950 +純粋に自分自身の楽しみのため、 そして関係する能力を確認するために使ってください。 + +06:06.250 --> 06:08.530 +しかし、 これは素晴らしいプロジェクトだ。 + +06:08.530 --> 06:09.820 +きっと楽しめると思うよ。 + +06:09.820 --> 06:16.150 +もし誰もやらなかったら、 僕のコードを共有するかもしれない。 でも、 まず第一に、 僕は君に挑戦状を出したんだ。 + +06:16.180 --> 06:19.420 +もしあなたが売買シグナルを出したのなら、 私はあなたに挑戦する。 + +06:19.420 --> 06:25.930 +もちろん、 シミュレートされた環境においてのみですが、 私たちが生成したさまざまな取引機能のうち、 + +06:25.930 --> 06:32.980 +どちらが偽の市場でより多くのお金を稼ぐことができるのか、 あなたと対決してみます。 + +06:33.070 --> 06:34.030 +そうだ。 + +06:34.030 --> 06:38.500 +それはあなたにとって本当に楽しいプロジェクトになるはずだし、 みんながどんなものを考え出すのか見るのが待ちきれないよ。 + +06:38.530 --> 06:41.050 +それで、 あなたはそれを信じるだろうか? + +06:41.080 --> 06:42.490 +信じられるか? + +06:42.520 --> 06:51.680 +私たちは、 熟練したLMエンジニアになるための旅の50%地点に到達したところです。 + +06:51.710 --> 06:53.510 +もう半分まで来ている。 + +06:53.540 --> 06:57.230 +少なくとも、 いくつかの宿題をこなし、 いくつかのプロジェクトを作り上げた後では、 その道半ばだ。 + +06:57.230 --> 07:01.100 +というのも、 私が言うように、 学ぶための最善の方法は実践することだからだ。 + +07:01.130 --> 07:08.120 +それができたら、 自分を褒めて、 50%は達成できたと言える。 + +07:08.480 --> 07:19.040 +そして次回は、 ラグ・スーパーホットトピックについて、 そしてラグの背後にある大きなアイデアについてお話しする予定です。 + +07:19.040 --> 07:35.900 +そして、 Ragのおもちゃバージョン、 つまりRagの背後にある大きな考え方であるベクトル・データベースと呼ばれるものを導入しないシンプルなバージョンを実装します。 + +07:35.900 --> 07:38.480 +だから、 そのすべてにたどり着くのが待ちきれない。 + +07:38.480 --> 07:41.300 +来週は本当に、 本当にエキサイティングになりそうだ。 + +07:41.390 --> 07:50.990 +そしてもう一度、 LLMSマスターへの道のりの半ばにいることを心から祝福したい。 diff --git a/week5/community-contributions/subtitles/srts/59295607/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295607/ko_KR.srt new file mode 100755 index 0000000..9e78ee9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295607/ko_KR.srt @@ -0,0 +1,337 @@ +WEBVTT + +00:00.920 --> 00:06.650 +그럼 전날 만든 솔루션을 다시 살펴보고 측정법에 대해 얘기해보죠 + +00:06.650 --> 00:13.640 +말씀드렸듯이 아주 간단한 비즈니스 중심 지표가 있습니다 성과를 측정하고 결과를 평가하는 + +00:13.640 --> 00:15.020 +지표죠 + +00:15.020 --> 00:17.180 +코드의 성능이 중요하죠 + +00:17.180 --> 00:19.100 +코드를 얼마나 빨리 만들었죠? + +00:19.100 --> 00:27.500 +클로드 3도 있고요 5 소네트가 멋지게 해냈고 선두로 나섰습니다 + +00:27.740 --> 00:35.720 +그 뒤를 GPT 40이 따랐죠 임무에 실패할 때도 있었지만 성공할 때도 있었습니다 꽤 빠른 + +00:35.750 --> 00:37.670 +해결책으로요 + +00:38.030 --> 00:45.320 +불행히도 코드 콴도 노력은 했지만 제대로 된 결과를 얻지 못했어요 + +00:45.320 --> 00:50.000 +강제로 get을 입력하려 했지만요 + +00:50.030 --> 00:52.610 +살짝 속이는 거로는 부족했어요 + +00:52.760 --> 01:00.230 +다시 한번 말씀드리지만 퀸의 매개 변수는 70억 개예요 1조 개보다 + +01:00.230 --> 01:04.370 +훨씬 많은 비공개 소스의 라이벌이죠 + +01:04.370 --> 01:09.050 +이런 일을 할 수 있다는 사실 자체가 정말 인상적이에요 + +01:09.050 --> 01:15.530 +그리고 운영 비용은 0달러라는 걸 기억하세요 80%면 결론으로 내려고 + +01:15.530 --> 01:18.920 +1시간이나 냈고요 + +01:18.920 --> 01:20.540 +그 외에는 무료예요 + +01:20.540 --> 01:21.800 +오픈 소스 소스예요 + +01:21.920 --> 01:25.070 +API 비용을 내지 않아요 + +01:25.070 --> 01:31.190 +로컬 박스에서 실행할 방법이 있습니다 특히 GPU가 있다면요 + +01:31.190 --> 01:37.790 +이 경우 비용은 없어요 박스를 실행하는 비용과 전기 비용 외에는요 + +01:38.030 --> 01:42.110 +아주 인상적인 모델이에요 + +01:42.110 --> 01:48.350 +그건 솔직히 정말 놀라워요 70억 매개 변수만 있는 오픈 소스 모델이 해당 코드를 파이 계산으로 + +01:48.350 --> 01:51.380 +변환하는 그런 일을 할 수 있다는 게요 + +01:51.440 --> 01:56.060 +이 시점에서 여러분께 과제를 알려 드릴게요 + +01:56.060 --> 02:01.540 +이번 주에는 여러 가지를 준비했는데 꽤 어렵지만 정말 만족스럽고 뿌듯할 거예요 + +02:01.540 --> 02:05.770 +그래서 그 어느 때보다 여러분께 도전해 보라고 권하고 싶어요 + +02:05.890 --> 02:07.390 +시간이 있으면 좋겠네요 + +02:07.390 --> 02:12.460 +오늘 강의는 상대적으로 짧게 하려고 했어요 여러분이 할 수 있도록 시간을 좀 내려고요 + +02:12.460 --> 02:16.060 +지금까지 배운 걸 확실히 보여줄 거예요 + +02:16.090 --> 02:18.010 +몇 가지만 수정하면 돼요 + +02:18.010 --> 02:19.690 +이미 있는 해결책이죠 + +02:19.720 --> 02:28.720 +제미니를 비공개 소스 모델에 추가할 수 있습니다. 제미니는 클로드와 제미니를 위한 GPT가 + +02:28.720 --> 02:33.220 +있고 더 많은 오픈 소스 모델을 추가할 수 있죠 + +02:33.220 --> 02:40.600 +제가 제안했던 것처럼 코드 라마와 별 코더를 추가할 수 있습니다 코드 젬마가 작동하도록 + +02:40.600 --> 02:43.390 +강제할 수 있는지 알 수 있죠 + +02:43.420 --> 02:50.140 +난수 발생기를 다시 쓰지 말라고 설득할 수 있을지도 몰라요 그런 일이 일어나지 않도록 공격적인 + +02:50.140 --> 02:52.660 +프롬프트를 여러 개 던져서요 + +02:52.660 --> 02:54.580 +나보다 운이 좋은지 보죠 + +02:54.580 --> 02:57.600 +있다면 제발 코드를 눌러주세요 + +02:57.660 --> 02:58.680 +어디 봐요 + +02:58.710 --> 03:00.840 +다른 사람들과 공유할 거예요 + +03:00.840 --> 03:03.390 +그럼 우리 모두 이득을 볼 수 있죠 + +03:03.390 --> 03:04.290 +그게 바로 그거죠 + +03:04.320 --> 03:04.710 +네 + +03:04.740 --> 03:05.940 +정말 멋져요 + +03:05.940 --> 03:09.240 +그러니 한번 시도해 보고 결과를 공유해 주세요 + +03:09.240 --> 03:10.470 +근데요 + +03:11.190 --> 03:19.380 +하지만 세 가지 가시 돋친 도전이 있어요 여러분의 능력을 이용해 코드를 생성해야 하죠 + +03:19.380 --> 03:21.030 +한 단계 더 발전할 거예요 + +03:21.270 --> 03:24.630 +좋은 아이디어가 있어요 + +03:24.630 --> 03:31.140 +그 중 하나는 코딩 도구를 작성하는 겁니다 코드에 자동으로 주석이나 docstrings을 추가하는 + +03:31.140 --> 03:33.900 +거죠 아주 유용할 수도 있는 거죠 + +03:34.080 --> 03:38.490 +이미 그렇게 하는 제품들이 있다는 걸 알지만 직접 해보시면 좋을 겁니다 + +03:38.490 --> 03:42.180 +작동하는 걸 보고 코드가 이걸 하기에 충분한지 보는 거죠 + +03:42.600 --> 03:47.610 +다른 건 단위 테스트를 작성하는 도구죠 + +03:47.610 --> 03:53.340 +단위 테스팅은 우리 삶의 골칫거리예요 + +03:53.400 --> 03:55.620 +아주 힘들 수도 있어요 + +03:55.870 --> 04:02.170 +때때로 단위 테스트는 아주 엉성하게 작성될 수 있어요. 그들이 하는 모든 것이 함수의 입력과 출력을 + +04:02.170 --> 04:06.190 +복제하는 것이라면요. 합리적인 것을 하는 것보다요. + +04:06.190 --> 04:15.040 +프론티어 모델과 오픈 소스 모델을 이용해 파이썬 특정 모듈을 위한 단위 테스트 케이스를 생성할 + +04:15.040 --> 04:17.410 +수 있는지 보세요 + +04:17.620 --> 04:21.220 +마지막으로 정말 재미있는 거예요 + +04:21.220 --> 04:21.940 +이것도 해 봤어요 + +04:21.940 --> 04:26.980 +이건 그냥 오락용으로 제가 갖고 있는 거예요 + +04:27.160 --> 04:28.330 +정말 좋아요 + +04:28.330 --> 04:29.320 +좋아요 + +04:29.440 --> 04:37.870 +모의 환경에서 주식을 사고팔기 위한 거래 결정을 내릴 코드를 작성하는 + +04:37.900 --> 04:41.590 +코드 생성기를 만들어 보세요 + +04:41.590 --> 04:43.420 +더 큰 프로젝트인 건 알아요 + +04:43.420 --> 04:47.050 +하지만 Get은 한 번에 하나씩 할 수 있어요 코드 생성만으로요 + +04:47.050 --> 04:49.810 +시뮬레이션 환경이 꼭 필요한 건 아니에요 + +04:49.900 --> 04:52.090 +API 기능을 제공하죠 + +04:52.120 --> 04:58.120 +일반적인 API를 찾아볼 수도 있어요 지분 주가를 알아보기 위해서요 + +04:58.120 --> 05:03.430 +아니면 API를 발명해 프롬프트에 제공해 특정 날짜에 대한 체크메이트 + +05:03.430 --> 05:10.720 +가격을 얻기 위해 어떤 방법을 호출할 수 있는지 모델이 알도록 할 수도 있죠 그런 다음 거래 결정을 수행하거나 + +05:10.720 --> 05:14.530 +구매, 판매하는 기능을 생성하게 하는 거죠 + +05:14.530 --> 05:17.710 +네, 정말 재미있었어요 + +05:17.710 --> 05:26.380 +엄청나게 다양한 트레이딩 신호를 구축할 수 있었어요 프론티어 모델과 오픈 소스 모델을 사용해서요 + +05:26.380 --> 05:31.120 +하나만 부탁할게요 뭘 하든, 하게 되더라도 그걸 거래 결정에 이용하진 + +05:31.120 --> 05:31.960 +마세요 + +05:31.960 --> 05:36.970 +이런 식으로 거래 결정을 내리지 말라고 강력히 권고할게요 + +05:36.970 --> 05:41.770 +사실, 선구 모델에 이런 방법을 만들라고 요청하면 만들겠지만 + +05:41.770 --> 05:47.440 +어떤 상황에서든 실무 거래 결정에 사용하면 안 된다고 경고할 거예요 + +05:47.440 --> 05:49.720 +엄중한 경고라고 생각하세요 + +05:49.930 --> 05:59.050 +순수하게 여러분의 즐거움을 위해 사용하세요 관련된 기능을 보고 싶으시다면 LM의 변덕에 + +05:59.080 --> 06:05.950 +명예와 부를 걸 필요는 없습니다 닫혀있든 오픈 소스든 간에요 + +06:06.250 --> 06:08.530 +하지만 이건 훌륭한 프로젝트예요 + +06:08.530 --> 06:09.820 +마음에 드실 거예요 + +06:09.820 --> 06:14.950 +아무도 제 코드를 공유하지 않는다면 전 할 수도 있어요 하지만 먼저 도전 과제를 드렸어요 여러분이 할 + +06:14.980 --> 06:16.150 +수 있는 기회요 Put + +06:16.180 --> 06:19.420 +거래 신호를 보내면 도전하겠어요 + +06:19.420 --> 06:25.930 +당신과 직접 시뮬레이션 환경에서 살펴볼 겁니다 물론 가짜 시장에서 + +06:25.930 --> 06:32.980 +더 많은 돈을 벌 수 있는 다양한 거래 기능이 있을까요? HDP, DM + +06:33.070 --> 06:34.030 +여기요 + +06:34.030 --> 06:38.500 +재미있는 프로젝트가 될 거예요 다른 참가자들의 작품을 빨리 보고 싶네요 + +06:38.530 --> 06:41.050 +그리고 또 있어요, 믿어지세요? + +06:41.080 --> 06:42.490 +이게 믿어져요? + +06:42.520 --> 06:51.680 +숙련된 LM 엔지니어가 되기 위한 여정의 50% 지점에 도달했어요 + +06:51.710 --> 06:53.510 +절반 남았어요 + +06:53.540 --> 06:57.230 +적어도 반은 온 거예요 숙제를 하고 프로젝트를 몇 개 완성하면요 + +06:57.230 --> 07:01.100 +가장 좋은 배우는 방법은 행동으로 배우는 거니까요 + +07:01.130 --> 07:07.490 +일단 그렇게 하고 나면 스스로를 칭찬하며 50%는 성공했다고 말할 수 + +07:07.490 --> 07:08.120 +있죠 + +07:08.480 --> 07:15.860 +다음 시간에는 더 많은 이야기가 기다리고 있습니다 슈퍼 핫한 래그 토픽과 래그의 숨은 원대한 + +07:15.860 --> 07:19.040 +아이디어에 관해 이야기할 거예요 + +07:19.040 --> 07:25.310 +여러분의 쿼리에 전문 지식을 추가하는 높은 수준의 흐름을 살펴보겠습니다 그리고 + +07:25.310 --> 07:31.340 +Rag의 장난감 버전을 구현할 겁니다 벡터 데이터베이스라는 걸 도입하지 + +07:31.340 --> 07:35.900 +않는 간단한 버전으로요 Rag의 가장 큰 개념이죠 + +07:35.900 --> 07:38.480 +그래서 빨리 get it을 하고 싶어요. + +07:38.480 --> 07:41.300 +다음 주는 정말 재미있을 거예요 + +07:41.390 --> 07:50.990 +다시 한번 진심으로 축하드려요 림스를 지배하는 여정의 절반을 지나왔잖아요 diff --git a/week5/community-contributions/subtitles/srts/59295609/en_US.srt b/week5/community-contributions/subtitles/srts/59295609/en_US.srt new file mode 100755 index 0000000..ec429e4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295609/en_US.srt @@ -0,0 +1,400 @@ +WEBVTT + +00:00.770 --> 00:03.530 +You must be feeling absolutely exhausted at this point. + +00:03.560 --> 00:05.330 +And if you are, that is okay. + +00:05.360 --> 00:07.430 +You have done a phenomenal job. + +00:07.430 --> 00:15.290 +This week has been a grueling week with tons of new information that you've had to take on and from + +00:15.290 --> 00:20.990 +the beginning, with the leaderboards and the arenas all the way through to our implementation of code + +00:21.020 --> 00:25.070 +translators, both in with frontier models and with open source models. + +00:25.070 --> 00:29.450 +We've gone through a lot, and it's been been quite a journey. + +00:29.480 --> 00:37.490 +You've acquired a ton of new skills, so I'm here to congratulate you and to tell you that this last + +00:37.490 --> 00:44.240 +session will be quite quick for today as we prepare for the upcoming week. + +00:44.270 --> 00:49.190 +So first of all, just to say, of course, again, it was score one for the frontier. + +00:49.190 --> 00:53.600 +This time we've got to let Claude win the win the show. + +00:53.690 --> 01:01.130 +Uh, we had great fun writing software that can write code and translate code between Python and C plus. + +01:01.130 --> 01:01.640 +Plus. + +01:01.640 --> 01:05.490 +And we saw Claude, uh, rule the roost with. + +01:05.490 --> 01:10.260 +It's a fantastic job of re-implementing those algorithms. + +01:10.500 --> 01:15.810 +Um, at the end of today, we're just going to do a little bit more on discussing the performance of + +01:15.810 --> 01:17.700 +open source and closed source models. + +01:17.790 --> 01:21.510 +Um, and we're going to talk about commercial use cases for generating code. + +01:21.510 --> 01:23.940 +And there's of course going to be an assignment for you. + +01:23.940 --> 01:25.560 +It's a big assignment for you. + +01:25.560 --> 01:32.550 +So whilst this session will be relatively short, the work for you is relatively great and I can't wait + +01:32.550 --> 01:34.350 +to see what you come up with. + +01:34.770 --> 01:42.930 +But first, I want to take a moment to talk about a seriously important problem, a question which is + +01:42.930 --> 01:49.170 +about how do you decide whether or not your AI solution is actually doing a good job? + +01:49.260 --> 01:53.400 +Uh, what is what is the what are the techniques for evaluation? + +01:53.460 --> 01:59.190 +Uh, and it's as I say here, it's perhaps the single most important question, uh, because so much + +01:59.190 --> 02:01.530 +rides on how you will gauge success. + +02:01.560 --> 02:06.070 +It's something that needs to be thought up front and it needs to be established and worked on. + +02:06.610 --> 02:13.720 +Uh, but there are actually two different kinds of performance metrics, and it's important to understand + +02:13.720 --> 02:17.770 +the differences between them and to use both in the right context. + +02:17.800 --> 02:23.560 +The first kind is what sometimes known as model centric or technical metrics. + +02:23.560 --> 02:29.620 +And these are the kinds of metrics that data scientists live and breathe by, because these are metrics + +02:29.620 --> 02:36.730 +which we can optimize our models with, and they tend to measure in a very immediate way the performance + +02:36.730 --> 02:37.690 +of the model. + +02:37.720 --> 02:41.200 +Now I'm going to talk to a couple of these metrics, but not all of them, because some of them are + +02:41.200 --> 02:43.300 +more related to traditional machine learning. + +02:43.300 --> 02:46.030 +And you may already have experience, and it doesn't matter if you don't. + +02:46.030 --> 02:53.170 +But the first one there, loss is just a general term for talking about how poorly an LLM has performed + +02:53.170 --> 02:56.530 +in its task, and is typically used during optimization. + +02:56.530 --> 03:01.480 +You look to try and minimize loss, that is, that is the task of optimization. + +03:01.480 --> 03:03.100 +When you are training a model. + +03:03.340 --> 03:10.310 +The type of loss that we use most frequently in this field is called cross-entropy loss, and this is + +03:10.310 --> 03:10.910 +how it works. + +03:10.940 --> 03:17.480 +So imagine you've got an input set of tokens, a sequence of tokens which is your input text. + +03:17.480 --> 03:19.730 +And you're trying to predict the next token. + +03:19.730 --> 03:23.570 +And you have what the next token actually is in your training data. + +03:23.780 --> 03:28.820 +But as part of training, you're going to try and feed in some amount of this sequence to the model, + +03:28.850 --> 03:32.750 +say, predict the next token, and then you're going to have the real next token. + +03:32.750 --> 03:37.220 +And you want to use something about these two results to calculate a loss. + +03:37.250 --> 03:38.780 +Here's one way of doing it. + +03:38.810 --> 03:44.000 +What the model actually does is it doesn't just predict the next token in the way I've been saying it + +03:44.000 --> 03:44.810 +up to this point. + +03:44.810 --> 03:50.450 +What it really does is gives you a probability distribution of the probabilities of all of the possible + +03:50.450 --> 03:52.370 +next tokens that could come in the list. + +03:52.370 --> 03:55.880 +And we may, for example, pick the one that has the highest probability. + +03:55.910 --> 04:02.750 +The way you calculate cross-entropy loss is you say okay, well now we know what the real the true next + +04:02.750 --> 04:03.770 +token was. + +04:03.800 --> 04:08.850 +Let's find out what probability did the model ascribe to that token? + +04:08.850 --> 04:13.800 +If the actual next thing that was coming was, you know, we started with hello, and the next thing + +04:13.800 --> 04:15.570 +was the token for the word there. + +04:15.600 --> 04:20.130 +Then let's look up what kind of probability the model gave to the word there. + +04:20.130 --> 04:25.560 +And that probability is what we will use as the basis for the cross-entropy loss. + +04:25.560 --> 04:27.360 +And in fact, to turn it into a loss. + +04:27.360 --> 04:31.260 +Because if we just took the probability that a higher number would be better. + +04:31.260 --> 04:35.190 +And loss is a bad thing, we want to we want a higher number to be worse. + +04:35.190 --> 04:39.810 +So what we do is we actually take the negative log. + +04:39.840 --> 04:42.600 +We take minus the log of the probability. + +04:42.630 --> 04:44.100 +And that might sound a bit confusing. + +04:44.130 --> 04:44.790 +Why do we do that? + +04:44.790 --> 04:50.910 +Because if you take the if the probability were one, which would be a perfect answer, it would mean + +04:50.910 --> 04:56.400 +that we said there was a 100% likelihood that the next token was exactly the thing that turned out to + +04:56.400 --> 04:57.390 +be the next token. + +04:57.390 --> 05:00.450 +So a probability of one would be a perfect answer. + +05:00.450 --> 05:04.320 +Well, the negative log of one is zero zero loss. + +05:04.320 --> 05:05.340 +Perfect answer. + +05:05.340 --> 05:06.360 +So that works. + +05:06.390 --> 05:12.540 +And if the probability is a very small number, as it gets smaller and smaller, negative log of that + +05:12.540 --> 05:18.000 +number as it gets closer and closer to zero becomes a higher and higher positive number. + +05:18.000 --> 05:19.410 +So again that works. + +05:19.410 --> 05:21.420 +It becomes a loss. + +05:21.450 --> 05:24.120 +A bigger loss is bad news. + +05:24.210 --> 05:31.590 +And so taking the negative log of the probability, the predicted probability of the thing that turned + +05:31.590 --> 05:38.940 +out to be the actual next token, that is called cross-entropy loss and is one of the fundamental metrics + +05:38.970 --> 05:39.720 +that are used. + +05:39.750 --> 05:44.700 +And very commonly with training LMS, we'll be using it ourselves at some point. + +05:45.330 --> 05:50.790 +Uh, another metric that you hear about quite a lot, which is very much related, is called perplexity, + +05:50.820 --> 05:54.180 +which is just, uh, e to the power of cross-entropy loss. + +05:54.210 --> 06:02.850 +It means when it turns out to be, is that a perplexity of one would mean that the model is completely + +06:02.850 --> 06:05.280 +confident and correct in its results. + +06:05.280 --> 06:08.490 +It's 100% accurate with 100% certainty. + +06:08.490 --> 06:10.470 +That would give you a perplexity of one. + +06:10.500 --> 06:13.560 +A perplexity of two would be like a 50 over 50. + +06:13.590 --> 06:15.540 +It's right half the time. + +06:15.690 --> 06:21.120 +Perplexity of four would be a 25%, uh, probability. + +06:21.120 --> 06:22.800 +So that gives you a sense. + +06:22.800 --> 06:31.350 +A higher, uh, perplexity gives you a sense of of how many tokens would need to be, uh, if all things + +06:31.350 --> 06:35.820 +were equal, uh, in order to, to predict the next token. + +06:36.030 --> 06:39.330 +So that gives you a sense of loss and perplexity. + +06:39.330 --> 06:45.870 +I won't talk about the others, but you get the sense that these are immediate ways to measure the accuracy + +06:45.870 --> 06:52.170 +or the inaccuracy of a model that can be used during optimization or for analysis of a model. + +06:52.350 --> 06:55.110 +So that's the model centric metrics. + +06:55.110 --> 06:56.880 +What's the other kind of metrics then? + +06:56.910 --> 07:00.750 +The other kind of metrics are business centric or outcome metrics. + +07:00.750 --> 07:05.220 +And these these are the ones that are going to resonate the most with your business audience. + +07:05.220 --> 07:08.610 +And ultimately this is the problem that they are asking you to solve. + +07:08.690 --> 07:14.990 +So it's KPIs that are tied to the actual outcomes that your business people have asked for. + +07:15.020 --> 07:17.390 +Maybe it's return on investment. + +07:17.480 --> 07:23.030 +Maybe it's if this is meant to be optimizing something, then it's improvements in the time. + +07:23.390 --> 07:28.850 +If you think about what we've just done, then the ultimate metric would be for code. + +07:28.850 --> 07:30.740 +For the code solution we just built. + +07:30.740 --> 07:34.610 +How much faster is the C plus plus code than the Python code? + +07:34.730 --> 07:38.030 +How many times faster if it has the same answer? + +07:38.210 --> 07:45.020 +So that would be an example of a business centric or outcome metric, because it requires us to to run + +07:45.020 --> 07:48.620 +the full product and see what comes out at the end. + +07:49.280 --> 07:53.780 +Uh, another example might be that could be comparisons to benchmarks. + +07:53.780 --> 07:58.400 +If you're doing something, you're building a, uh, some sort of solution that is then going to surpass + +07:58.400 --> 08:02.630 +other benchmarks at carrying out a certain business task. + +08:02.660 --> 08:08.360 +So obviously, the huge benefit of these kinds of metrics is that they are tangible, they are concrete, + +08:08.360 --> 08:10.700 +and they will speak to your business goals. + +08:10.700 --> 08:15.380 +If you are able to move the needle on these metrics, then you've delivered impact and you can prove + +08:15.380 --> 08:15.740 +it. + +08:15.770 --> 08:21.830 +The problem with them, of course, is that they're not so obviously immediately tied to the model performance. + +08:21.830 --> 08:25.730 +It's related to all sorts of other things, like the kind of data you've got, the environment, how + +08:25.730 --> 08:30.260 +it's used, and whether the original idea really works in solving the business problem. + +08:30.260 --> 08:36.260 +So there's a lot of unknowns that sit between your model's performance and the business metrics. + +08:36.260 --> 08:41.870 +But the business metrics have the great advantage of actually being meaningful in the real world. + +08:41.870 --> 08:46.130 +So the the answer is you need to use both these kinds of metrics. + +08:46.130 --> 08:47.780 +You need to use them in concert. + +08:47.780 --> 08:54.620 +One allows you to optimize your model to fine tune your model to to demonstrate its its fast performance. + +08:54.620 --> 09:01.520 +And the other of them is what you use to ultimately prove the business impact behind your solution. + +09:01.520 --> 09:04.490 +With that, I'm going to pause in the next session. + +09:04.490 --> 09:09.530 +We're going to go back one more time and look at our coding solutions, and then talk about what you + +09:09.530 --> 09:11.600 +can do to take it to the next level. diff --git a/week5/community-contributions/subtitles/srts/59295609/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295609/ja_JP.srt new file mode 100755 index 0000000..6d4ad16 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295609/ja_JP.srt @@ -0,0 +1,337 @@ +WEBVTT + +00:00.770 --> 00:03.530 +今、 あなたは疲れきっているはずだ。 + +00:03.560 --> 00:05.330 +もしそうなら、 それでもいい。 + +00:05.360 --> 00:07.430 +あなたは驚異的な仕事をした。 + +00:07.430 --> 00:15.290 +今週は、 リーダーボードやアリーナの導入から、 コードトランスレータの実装、 フロンティアモデルやオープンソースモデルの導入に至るまで、 + +00:15.290 --> 00:25.070 +大量の新情報に直面し、 大変な1週間だった。 + +00:25.070 --> 00:29.450 +私たちは多くのことを経験してきたし、 かなりの道のりだった。 + +00:29.480 --> 00:37.490 +あなたはたくさんの新しいスキルを身につけた。 だから、 私はあなたを祝福するためにここにいる。 そして、 + +00:37.490 --> 00:44.240 +来週に備えるために、 今日の最後のセッションはかなり早く終わることを伝える。 + +00:44.270 --> 00:49.190 +だから、 まず最初に言っておきたいのは、 もちろん、 フロンティアにとっては1点だったということだ。 + +00:49.190 --> 00:53.600 +今回はクロードにショーを勝たせなければならない。 + +00:53.690 --> 01:01.130 +コードを書いたり、 PythonとCの間でコードを変換したりするソフトウェアを書くのは、 とても楽しかった。 + +01:01.130 --> 01:01.640 +それに + +01:01.640 --> 01:05.490 +そして、 クロードがねぐらを支配するのを見た。 + +01:05.490 --> 01:10.260 +これらのアルゴリズムを再実装した素晴らしい仕事だ。 + +01:10.500 --> 01:17.700 +ええと、 今日の最後には、 オープンソースとクローズドソースのモデルのパフォーマンスについてもう少し議論したいと思います。 + +01:17.790 --> 01:21.510 +それから、 コード生成の商業的な使用例について話すつもりだ。 + +01:21.510 --> 01:23.940 +そしてもちろん、 あなたへの課題もある。 + +01:23.940 --> 01:25.560 +君にとっては大きな任務だ。 + +01:25.560 --> 01:34.350 +だから、 このセッションは比較的短いが、 あなたにとっての仕事は比較的大きい。 + +01:34.770 --> 01:49.170 +しかし、 その前に、 重大な問題についてお話ししたいと思います。 それは、 AIソリューションが実際に良い仕事をしているかどうかをどのように判断するかという問題です。 + +01:49.260 --> 01:53.400 +ええと、 評価のテクニックとは何ですか? + +01:53.460 --> 02:01.530 +ここで言うように、 おそらく最も重要な質問なんだ。 + +02:01.560 --> 02:06.070 +それは前もって考えなければならないことであり、 それを確立して取り組む必要がある。 + +02:06.610 --> 02:13.720 +しかし、 実際には2つの異なる種類のパフォーマンス指標があり、 その違いを理解し、 + +02:13.720 --> 02:17.770 +正しい文脈で両者を使い分けることが重要だ。 + +02:17.800 --> 02:23.560 +最初の種類は、 モデル中心または技術的なメトリクスとして知られているものです。 + +02:23.560 --> 02:29.620 +データ・サイエンティストにとって、 このような指標は生きているようなものだ。 なぜなら、 + +02:29.620 --> 02:37.690 +このような指標はモデルを最適化することができ、 モデルのパフォーマンスを非常に即座に測定することができるからだ。 + +02:37.720 --> 02:43.300 +これから、 これらのメトリクスのいくつかについて話すつもりだが、 すべてではない。 なぜなら、 これらの中には従来の機械学習に関連したものもあるからだ。 + +02:43.300 --> 02:46.030 +それに、 すでに経験があるかもしれないし、 なくても問題ない。 + +02:46.030 --> 02:53.170 +しかし、 最初の「損失」は、 LLMがそのタスクでどれだけうまくいかなかったかを表す一般的な用語で、 + +02:53.170 --> 02:56.530 +通常、 最適化の際に使われる。 + +02:56.530 --> 03:01.480 +つまり、 それが最適化の仕事なのだ。 + +03:01.480 --> 03:03.100 +モデルをトレーニングするとき。 + +03:03.340 --> 03:10.910 +この分野で最も頻繁に使用される損失はクロスエントロピー損失と呼ばれるもので、 このように機能する。 + +03:10.940 --> 03:17.480 +トークンの入力セット、 つまり入力テキストであるトークンのシーケンスがあるとしよう。 + +03:17.480 --> 03:19.730 +そして、 次のトークンを予測しようとしている。 + +03:19.730 --> 03:23.570 +そして、 次のトークンが実際に何であるかをトレーニング・データで知ることができる。 + +03:23.780 --> 03:28.820 +しかし、 トレーニングの一環として、 このシーケンスをある程度モデルに送り込み、 例えば次のトークンを予測し、 + +03:28.850 --> 03:32.750 +そして実際の次のトークンを持ってくる。 + +03:32.750 --> 03:37.220 +そして、 この2つの結果に関する何かを使って損失を計算したい。 + +03:37.250 --> 03:38.780 +その方法のひとつを紹介しよう。 + +03:38.810 --> 03:44.810 +このモデルが実際に行っているのは、 私がこれまで言ってきたような方法で次のトークンを予測するだけではない。 + +03:44.810 --> 03:52.370 +これは、 リストの中で次に来る可能性のあるすべてのトークンの確率分布を与えるものである。 + +03:52.370 --> 03:55.880 +そして例えば、 最も確率の高いものを選ぶかもしれない。 + +03:55.910 --> 04:03.770 +クロスエントロピーの損失を計算する方法は、 次のトークンが本当は何だったのかがわかったとする。 + +04:03.800 --> 04:08.850 +そのトークンにモデルがどのような確率を割り当てたかを調べてみよう。 + +04:08.850 --> 04:15.570 +実際に次に来るのが、 ハローから始まって、 次に来るのがその単語のトークンだったとする。 + +04:15.600 --> 04:20.130 +そこで、 モデルがその単語にどのような確率を与えたかを調べてみよう。 + +04:20.130 --> 04:25.560 +そして、 この確率こそが、 我々がクロスエントロピーの損失の根拠とするものである。 + +04:25.560 --> 04:27.360 +そして実際、 それを損失に変えてしまった。 + +04:27.360 --> 04:31.260 +なぜなら、 数字が大きい方が良いという確率だけを取れば良いからだ。 + +04:31.260 --> 04:35.190 +そして、 損失は悪いことであり、 私たちはより高い数字を望んでいる。 + +04:35.190 --> 04:39.810 +そこで私たちが行うのは、 実際に負の対数を取ることだ。 + +04:39.840 --> 04:42.600 +確率の対数をマイナスする。 + +04:42.630 --> 04:44.100 +少し混乱するかもしれない。 + +04:44.130 --> 04:44.790 +なぜそんなことをするのか? + +04:44.790 --> 04:57.390 +というのも、 もし確率が1だとしたら、 それは完璧な答えであり、 次のトークンがまさに次のトークンである可能性が100%あると言ったことになるからだ。 + +04:57.390 --> 05:00.450 +つまり、 確率が1であれば完璧な答えとなる。 + +05:00.450 --> 05:04.320 +まあ、 1のマイナス対数はゼロゼロロスだ。 + +05:04.320 --> 05:05.340 +完璧な答えだ。 + +05:05.340 --> 05:06.360 +それでいいんだ。 + +05:06.390 --> 05:12.540 +そして、 確率が非常に小さな数であれば、 それが小さくなるにつれて、 ゼロに近づいていくにつれて、 + +05:12.540 --> 05:18.000 +その数の負の対数はどんどん大きな正の数になっていく。 + +05:18.000 --> 05:19.410 +それでまたうまくいく。 + +05:19.410 --> 05:21.420 +それは損失となる。 + +05:21.450 --> 05:24.120 +これ以上の損失は悪いニュースだ。 + +05:24.210 --> 05:31.590 +そのため、 確率の負の対数、 つまり予測された確率が実際に次のトークンであることが判明した確率を取る。 + +05:31.590 --> 05:39.720 +これはクロスエントロピー損失と呼ばれ、 使用される基本的な指標の一つである。 + +05:39.750 --> 05:44.700 +そして、 トレーニング用LMSの場合、 ある時点で自分たちも使うことになるのが一般的だ。 + +05:45.330 --> 05:50.790 +もう一つ、 よく耳にする指標で、 非常に関連性の高いものですが、 + +05:50.820 --> 05:54.180 +これは「難解度」と呼ばれるものです。 + +05:54.210 --> 06:05.280 +それが判明したときの意味は、 当惑が1であれば、 そのモデルは完全に自信に満ちた正しい結果であるということである。 + +06:05.280 --> 06:08.490 +100%確実で100%正確だ。 + +06:08.490 --> 06:10.470 +そうなると、 戸惑いが1つ生まれる。 + +06:10.500 --> 06:13.560 +2人の当惑は50を超える50のようなものだろう。 + +06:13.590 --> 06:15.540 +半分は正しい。 + +06:15.690 --> 06:21.120 +4点の当惑は25%の確率だ。 + +06:21.120 --> 06:22.800 +だから、 それが感覚になる。 + +06:22.800 --> 06:31.350 +困惑度が高ければ高いほど、 次のトークンを予測するためには、 すべての条件が同じなら、 + +06:31.350 --> 06:35.820 +いくつのトークンが必要なのかがわかる。 + +06:36.030 --> 06:39.330 +だから、 喪失感と当惑が生まれる。 + +06:39.330 --> 06:45.870 +その他については割愛するが、 これらはモデルの精度や不正確さを測定する即席の方法であり、 + +06:45.870 --> 06:52.170 +最適化の際やモデルの分析に使えるということがお分かりいただけるだろう。 + +06:52.350 --> 06:55.110 +これがモデル中心の評価基準だ。 + +06:55.110 --> 06:56.880 +では、 もうひとつの指標とは? + +06:56.910 --> 07:00.750 +もう1つの指標は、 ビジネス中心の指標や成果指標である。 + +07:00.750 --> 07:05.220 +そして、 これらはあなたのビジネスの聴衆に最も響くものである。 + +07:05.220 --> 07:08.610 +そして最終的には、 この問題を解決することが求められているのだ。 + +07:08.690 --> 07:14.990 +つまり、 ビジネス・パーソンが求めた実際の成果に結びついたKPIなのだ。 + +07:15.020 --> 07:17.390 +投資対効果かもしれない。 + +07:17.480 --> 07:23.030 +もし、 これが何かを最適化するためのものであるならば、 それは時間における改善なのかもしれない。 + +07:23.390 --> 07:28.850 +今やったことを考えれば、 究極の指標はコードだろう。 + +07:28.850 --> 07:30.740 +先ほど構築したコード・ソリューションの場合。 + +07:30.740 --> 07:34.610 +C++のコードはPythonのコードよりどれくらい速いですか? + +07:34.730 --> 07:38.030 +同じ答えなら何倍速い? + +07:38.210 --> 07:48.620 +つまり、 これはビジネス中心、 あるいは成果指標の一例であり、 製品をフル稼働させ、 最後に何が出てくるかを確認する必要があるからだ。 + +07:49.280 --> 07:53.780 +ええと、 別の例としては、 ベンチマークとの比較があるかもしれない。 + +07:53.780 --> 08:02.630 +もし何かをするのであれば、 ある種のソリューションを構築し、 それがあるビジネス・タスクを遂行する上で他のベンチマークを凌駕することになる。 + +08:02.660 --> 08:10.700 +つまり、 この種の測定基準の大きな利点は、 目に見え、 具体的であり、 ビジネス目標につながるということだ。 + +08:10.700 --> 08:15.740 +これらの指標で針を動かすことができれば、 インパクトを提供したことになり、 それを証明することができる。 + +08:15.770 --> 08:21.830 +もちろん、 これらの問題点は、 モデルのパフォーマンスとそれほど明確に結びついていないことだ。 + +08:21.830 --> 08:25.730 +それは、 あなたが持っているデータの種類、 環境、 その使われ方、 ビジネス上の問題を解決するためにオリジナルのアイデアが本当に機能するかどうかなど、 + +08:25.730 --> 08:30.260 +他のあらゆることに関連している。 + +08:30.260 --> 08:36.260 +つまり、 モデルのパフォーマンスとビジネス指標の間には、 未知の部分がたくさんあるのだ。 + +08:36.260 --> 08:41.870 +しかし、 ビジネス・メトリクスには、 現実の世界で実際に意味を持つという大きな利点がある。 + +08:41.870 --> 08:46.130 +つまり答えは、 この2種類の指標を使う必要があるということだ。 + +08:46.130 --> 08:47.780 +協調して使う必要がある。 + +08:47.780 --> 08:54.620 +1つは、 モデルを最適化し、 その高速性能を実証するためにモデルを微調整することができます。 + +08:54.620 --> 09:01.520 +そしてもうひとつは、 最終的にソリューションの背景にあるビジネスインパクトを証明するために使うものだ。 + +09:01.520 --> 09:04.490 +これをもって、 次のセッションは一時中断する。 + +09:04.490 --> 09:11.600 +もう1度戻って、 コーディング・ソリューションを見て、 次のレベルに進むために何ができるかをお話しします。 diff --git a/week5/community-contributions/subtitles/srts/59295609/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295609/ko_KR.srt new file mode 100755 index 0000000..7503f27 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295609/ko_KR.srt @@ -0,0 +1,382 @@ +WEBVTT + +00:00.770 --> 00:03.530 +지금쯤이면 정말 지쳤을 거예요 + +00:03.560 --> 00:05.330 +그렇다고 해도 괜찮아요 + +00:05.360 --> 00:07.430 +정말 잘하셨어요 + +00:07.430 --> 00:15.290 +이번 주는 정말 힘들었죠 새로운 정보가 수없이 많았거든요 처음부터 리더보드와 + +00:15.290 --> 00:20.990 +경기장에서부터 코드 번역기 구현까지 프론티어 모델과 + +00:21.020 --> 00:25.070 +오픈 소스 모델에서요 + +00:25.070 --> 00:29.450 +많은 일을 겪었고 꽤 긴 여정이었어요 + +00:29.480 --> 00:37.490 +새로운 기술을 많이 익혔으니 축하해주러 온 거예요 그리고 오늘 마지막 + +00:37.490 --> 00:44.240 +수업은 꽤 짧을 거예요 다음 주를 준비하니까요 + +00:44.270 --> 00:49.190 +우선, 다시 말하지만 개척 시대의 음악이었어요 + +00:49.190 --> 00:53.600 +이번에는 클로드가 쇼에서 이기게 해줘야 해요 + +00:53.690 --> 01:01.130 +파이썬과 C 플러스 간의 코드를 작성하고 번역하는 소프트웨어를 개발하는 건 정말 즐거웠어요 + +01:01.130 --> 01:01.640 +더 있어요 + +01:01.640 --> 01:05.490 +클로드가 대장 노릇을 하는 걸 봤죠 + +01:05.490 --> 01:10.260 +알고리즘을 다시 구현한 건 정말 멋진 일이에요 + +01:10.500 --> 01:15.810 +오늘 마무리로 오픈 소스와 비공개 소스 모델의 성능에 대해 + +01:15.810 --> 01:17.700 +좀 더 논의할 텐데요 + +01:17.790 --> 01:21.510 +코드 생성 시 상업적 유스 케이스도 다룰 거예요 + +01:21.510 --> 01:23.940 +물론 당신에게 주어진 과제도 있어요 + +01:23.940 --> 01:25.560 +큰 임무잖아요 + +01:25.560 --> 01:32.550 +이 세션이 비교적 짧지만 당신에게 주어진 일은 상대적으로 훌륭합니다 어떤 걸 생각해내실지 + +01:32.550 --> 01:34.350 +빨리 보고 싶네요 + +01:34.770 --> 01:42.930 +하지만 먼저 중요하고 중요한 문제에 대해 잠시 얘기하고 싶습니다 여러분의 인공지능 + +01:42.930 --> 01:49.170 +솔루션이 실제로 잘 작동하는지 어떻게 결정하나요? + +01:49.260 --> 01:53.400 +평가할 때 어떤 기술이 필요한가요? + +01:53.460 --> 01:59.190 +아까도 말했지만 가장 중요한 질문일 겁니다 성공의 척도는 + +01:59.190 --> 02:01.530 +많은 걸 좌우하니까요 + +02:01.560 --> 02:06.070 +미리 생각하고 확립하고 연구해야 하는 거죠 + +02:06.610 --> 02:13.720 +하지만 성능 지표에는 두 종류가 있습니다 그 차이를 이해하고 + +02:13.720 --> 02:17.770 +적절하게 사용하는 게 중요하죠 + +02:17.800 --> 02:23.560 +첫 번째 종류는 모델 중심이라고도 불리는 기술적 측정법이에요 + +02:23.560 --> 02:29.620 +데이터 과학자들은 이런 측정 기준에 목숨을 걸어요 모델을 최적화할 + +02:29.620 --> 02:37.690 +수 있는 측정 기준이고 모델의 성능을 바로 측정하는 경향이 있거든요 + +02:37.720 --> 02:41.200 +이제 이런 지표 몇 가지에 대해 말씀드리겠습니다 전부는 아니고요 일부는 + +02:41.200 --> 02:43.300 +전통적인 머신 러닝과 더 관련이 있거든요 + +02:43.300 --> 02:46.030 +이미 경험이 있을 수도 있고 없어도 괜찮아요 + +02:46.030 --> 02:53.170 +첫 번째 항목 손실은 일반적인 용어로 LLM이 작업에서 얼마나 형편없었는지를 뜻합니다 + +02:53.170 --> 02:56.530 +일반적으로 최적화에 사용되죠 + +02:56.530 --> 03:01.480 +손실을 최소화하려고 노력하죠 그게 최적화의 작업이에요 + +03:01.480 --> 03:03.100 +모델을 훈련할 때요 + +03:03.340 --> 03:10.910 +이 분야에서 가장 자주 발생하는 손실은 교차 엔트로피 손실이라고 하는데 이렇게 작용해요 + +03:10.940 --> 03:17.480 +여러분의 입력 텍스트인 토큰의 입력 집합과 토큰의 시퀀스가 있다고 상상해 보세요 + +03:17.480 --> 03:19.730 +다음 토큰을 예측하는 거군요 + +03:19.730 --> 03:23.570 +훈련 데이터에서 다음 토큰이 무엇인지 알 수 있죠 + +03:23.780 --> 03:28.820 +하지만 훈련의 일부로 이 시퀀스의 일부를 모델에 피드하려고 할 겁니다 예를 들어 + +03:28.850 --> 03:32.750 +다음 토큰을 예측하고 진짜 다음 토큰을 갖게 되는 거죠 + +03:32.750 --> 03:37.220 +이 두 결과로 손실을 계산하고 싶겠죠 + +03:37.250 --> 03:38.780 +이런 방법도 있어요 + +03:38.810 --> 03:44.810 +모델이 실제로 하는 일은 다음 토큰을 예측하는 것이 아닙니다 지금까지 제가 말한 방식으로는요 + +03:44.810 --> 03:50.450 +이것은 확률을 분배하는 역할을 합니다. 목록에 있는 다음 토큰의 + +03:50.450 --> 03:52.370 +확률을 말이죠. + +03:52.370 --> 03:55.880 +예를 들어 가능성이 가장 높은 걸 고를 수도 있죠 + +03:55.910 --> 04:02.750 +교차 엔트로피 손실을 계산하는 방법은 이제 진짜 다음 토큰이 뭔지 안다고 + +04:02.750 --> 04:03.770 +하는 거죠 + +04:03.800 --> 04:08.850 +모델이 토큰에 어떤 확률을 기록했는지 알아보죠 + +04:08.850 --> 04:13.800 +다음에 나올 것은 hello로 시작해서 다음 것은 단어에 + +04:13.800 --> 04:15.570 +대한 토큰이죠 + +04:15.600 --> 04:20.130 +그럼 모델이 제시한 단어의 확률을 알아보죠 + +04:20.130 --> 04:25.560 +그 확률을 교차 엔트로피 손실의 토대로 사용할 거예요 + +04:25.560 --> 04:27.360 +사실, 손실로 바꾸어 놓았죠 + +04:27.360 --> 04:31.260 +확률을 따져 보면 높은 숫자가 더 낫거든요 + +04:31.260 --> 04:35.190 +인명 손실은 나쁜 거예요 우리는 인명 손실이 많으면 더 나빠지길 바라죠 + +04:35.190 --> 04:39.810 +그래서 네거티브 로그를 선택해요 + +04:39.840 --> 04:42.600 +확률을 빼고 계산해 보죠 + +04:42.630 --> 04:44.100 +비트 때문에 좀 헷갈리실 거예요 + +04:44.130 --> 04:44.790 +왜 그래야 하죠? + +04:44.790 --> 04:50.910 +만약 확률이 1이라면, 그게 완벽한 대답이 되겠죠. 그 말은 다음 + +04:50.910 --> 04:57.390 +토큰이 정확히 다음 토큰이 될 확률이 100%라는 뜻이 되겠죠. + +04:57.390 --> 05:00.450 +1의 가능성이 완벽한 답이겠네요 + +05:00.450 --> 05:04.320 +마이너스 1은 0, 0, 0 손실이에요 + +05:04.320 --> 05:05.340 +완벽한 대답이에요 + +05:05.340 --> 05:06.360 +잘 작동하네요 + +05:06.390 --> 05:12.540 +확률이 아주 작은 숫자라면 숫자가 점점 작아질수록 그 숫자에 대한 + +05:12.540 --> 05:18.000 +음수는 0에 가까워질수록 양수가 점점 더 높아져요 + +05:18.000 --> 05:19.410 +이것도 잘 작동하죠 + +05:19.410 --> 05:21.420 +손실이 되죠 + +05:21.450 --> 05:24.120 +손해가 크면 안 좋죠 + +05:24.210 --> 05:31.590 +그래서 확률의 부정적인 로그를 즉, 예상된 확률을 실제 다음 토큰으로 + +05:31.590 --> 05:39.720 +추정한 건데 교차 엔트로피 손실이라고 해요 사용되는 기본 지표 중 하나죠 + +05:39.750 --> 05:44.700 +일반적으로 훈련 LMS와 관련해 언젠가 우리가 직접 사용하게 될 텐데요 + +05:45.330 --> 05:50.790 +흔히 듣는 또 다른 측정법은 이와 밀접한 관련이 있는데 당혹스러움이라고 해요 + +05:50.820 --> 05:54.180 +교차 엔트로피 손실의 힘의 약자를 뜻하죠 + +05:54.210 --> 06:02.850 +결과가 그렇게 나오면 당혹스럽죠 그 모델은 결과에 대해 완전히 확신하고 + +06:02.850 --> 06:05.280 +정확해요 + +06:05.280 --> 06:08.490 +100% 정확하고 100% 확실하죠 + +06:08.490 --> 06:10.470 +그럼 한 가지 당혹감이 생기겠죠 + +06:10.500 --> 06:13.560 +2는 50점 만점에 50점이에요 + +06:13.590 --> 06:15.540 +절반은 맞아요 + +06:15.690 --> 06:21.120 +4의 당혹스러움은 25% 확률이에요 + +06:21.120 --> 06:22.800 +그걸 보면 감이 오죠 + +06:22.800 --> 06:31.350 +더 높고 복잡한 건 토큰이 몇 개 필요한지 알게 돼요 모든 게 평등하다면 + +06:31.350 --> 06:35.820 +다음 토큰을 예측하기 위해서요 + +06:36.030 --> 06:39.330 +그래서 상실감과 당혹감이 들죠 + +06:39.330 --> 06:45.870 +다른 건 언급하지 않겠지만 모델의 정확성 혹은 부정확성을 측정하는 + +06:45.870 --> 06:52.170 +즉각적인 방법들이고 모델 최적화나 분석에 사용될 수 있어요. + +06:52.350 --> 06:55.110 +그게 모델 중심 지표예요 + +06:55.110 --> 06:56.880 +다른 측정 기준은 뭔가요? + +06:56.910 --> 07:00.750 +사업 중심 지표나 결과 지표도 있어요 + +07:00.750 --> 07:05.220 +이런 것들이 비즈니스 팬들에게 가장 큰 반향을 일으킬 거예요 + +07:05.220 --> 07:08.610 +궁극적으로 그들이 해결하라고 요구하는 문제이기도 하고요 + +07:08.690 --> 07:14.990 +따라서 기업 측에서 요구한 실제 결과와 연관된 KPI죠 + +07:15.020 --> 07:17.390 +투자 수익률 같은 거겠죠 + +07:17.480 --> 07:23.030 +최적화를 위한 것이라면 시간이 지나면 개선되겠죠 + +07:23.390 --> 07:28.850 +우리가 방금 한 걸 생각해보면 궁극의 메트릭은 코드에 대한 거예요 + +07:28.850 --> 07:30.740 +방금 만든 코드 솔루션이요 + +07:30.740 --> 07:34.610 +C 플러스 플러스 코드가 파이썬 코드보다 얼마나 빠른가요? + +07:34.730 --> 07:38.030 +같은 답을 쓰면 몇 배나 빨라지죠? + +07:38.210 --> 07:45.020 +비즈니스 중심 혹은 결과 지표의 한 예입니다 전체 제품을 실행하고 결과가 + +07:45.020 --> 07:48.620 +어떻게 나오는지 보라는 거니까요 + +07:49.280 --> 07:53.780 +벤치마크와 비교할 수 있는 또 다른 예가 있어요 + +07:53.780 --> 07:58.400 +뭔가를 하고 있다면 특정 비즈니스 작업을 수행하는 데 다른 + +07:58.400 --> 08:02.630 +벤치마크를 뛰어넘는 솔루션을 구축하는 거죠 + +08:02.660 --> 08:08.360 +따라서 이런 측정법의 큰 이점은 명확하고 구체적인 측정법이 여러분의 사업 + +08:08.360 --> 08:10.700 +목표에 반영된다는 거죠 + +08:10.700 --> 08:15.740 +이런 측정값에서 바늘을 움직일 수 있다면 충격을 전달한 것이고 증명할 수 있어요 + +08:15.770 --> 08:21.830 +물론 문제는 모델의 연기와 바로 연결되지 않는다는 거죠 + +08:21.830 --> 08:25.730 +다른 모든 것과 관련돼 있죠 여러분이 가진 데이터, 환경, 사용 방법 그리고 + +08:25.730 --> 08:30.260 +원래 아이디어가 정말 비즈니스 문제를 해결하는 데 도움이 되는지 같은 거요 + +08:30.260 --> 08:36.260 +모델의 성과와 비즈니스 척도 사이에는 미지의 요소가 많아요 + +08:36.260 --> 08:41.870 +하지만 비즈니스 지표가 현실에서 의미 있다는 큰 장점이 있어요 + +08:41.870 --> 08:46.130 +그래서 답은 두 가지 측정법을 모두 써야 한다는 거죠 + +08:46.130 --> 08:47.780 +같이 써야 해요 + +08:47.780 --> 08:54.620 +하나는 모델을 최적화해 빠른 성능을 보여주기 위해 미세 조정하는 것이죠 + +08:54.620 --> 09:01.520 +다른 하나는 솔루션 뒤에 숨겨진 비즈니스 영향을 궁극적으로 증명할 때 사용하죠 + +09:01.520 --> 09:04.490 +다음 세션에선 잠시 중단할게요 + +09:04.490 --> 09:09.530 +한 번 더 돌아가서 코딩 솔루션을 살펴보고 다음 레벨로 넘어가기 위해 무엇을 + +09:09.530 --> 09:11.600 +할 수 있는지 얘기해보죠 diff --git a/week5/community-contributions/subtitles/srts/59295619/en_US.srt b/week5/community-contributions/subtitles/srts/59295619/en_US.srt new file mode 100755 index 0000000..28a7c6e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295619/en_US.srt @@ -0,0 +1,475 @@ +WEBVTT + +00:00.350 --> 00:05.540 +Welcome back to the the moment when we bring it all together into a beautiful user interface. + +00:05.540 --> 00:10.880 +But first, just a quick look one more time at the inference endpoint screen in Huggingface where you + +00:10.880 --> 00:12.260 +can see my running code. + +00:12.260 --> 00:15.380 +Quine 1.57 billion chat inference. + +00:15.680 --> 00:16.640 +We can. + +00:16.670 --> 00:23.060 +I just wanted to show you that you can come into this and take a look at how your inference endpoint + +00:23.060 --> 00:28.850 +is running, and you can do things like see analytics, see what's going on, see the number of requests, + +00:28.850 --> 00:32.090 +which, even though I made some, is not enough to get on the radar. + +00:32.210 --> 00:34.430 +Uh, latency, CPU usage. + +00:34.460 --> 00:34.940 +Oh, there we go. + +00:34.970 --> 00:38.930 +A little blip in CPU usage from what we just did and GPU usage. + +00:38.960 --> 00:39.920 +Nice. + +00:40.010 --> 00:49.790 +Uh, and you can also go over to cost and see that I've spent $3.64 so far on, uh, on this particular, + +00:49.790 --> 00:51.020 +uh, model. + +00:51.380 --> 00:53.150 +Um, okay. + +00:53.180 --> 00:59.870 +Now, with that in mind, let's leave it, I think, on the analytics, let's go back to our Jupyter + +00:59.870 --> 01:00.770 +lab. + +01:01.010 --> 01:09.070 +Um, and let's wrap this code to call Code Kwan in a nice little stream method, just like the other + +01:09.070 --> 01:14.620 +stream methods that we've already done before for GPT four and for Claude stream Quen. + +01:14.650 --> 01:17.200 +Same kind of method, but of course it's the same function. + +01:17.200 --> 01:18.700 +It's going to do it very differently. + +01:18.910 --> 01:21.370 +It's going to create a tokenizer. + +01:21.370 --> 01:27.700 +It's going to, of course, uh, turn Python into the usual messages list. + +01:27.700 --> 01:29.710 +It's going to apply the chat template. + +01:29.710 --> 01:34.420 +So we now have this in the text that is ready for tokenization. + +01:34.420 --> 01:41.590 +And then we make the magical call to inference client using the URL for our endpoint and passing in + +01:41.590 --> 01:43.120 +our Huggingface token. + +01:43.120 --> 01:47.020 +And here we are doing client text generation. + +01:47.020 --> 01:49.570 +Here's our text we want to stream. + +01:49.570 --> 01:52.930 +And that's our max new tokens. + +01:52.930 --> 01:55.900 +And then back comes the results. + +01:55.900 --> 02:03.400 +As we stream back each token we yield the total of everything so far because hopefully you remember + +02:03.400 --> 02:05.740 +that is what Gradio expects. + +02:05.740 --> 02:13.280 +It expects to a sort of cumulative total of everything that's been received so far in all of its chunks. + +02:13.280 --> 02:21.080 +So that function there stream collection is a companion function to the others we wrote before for stream + +02:21.080 --> 02:23.030 +GPT for stream Claude. + +02:23.030 --> 02:29.600 +So now we can have an optimized method that will replace the previous optimized method for optimizing + +02:29.600 --> 02:34.010 +code, which can flip between three models GPT Claude or Code Kwan. + +02:34.400 --> 02:34.910 +Um. + +02:34.910 --> 02:41.900 +And here we have the total of our user interface code for Gradio. + +02:41.930 --> 02:43.400 +Make sure I run this. + +02:43.820 --> 02:46.520 +Uh, so you'll remember how simple this is. + +02:46.520 --> 02:47.720 +It's crazy. + +02:47.960 --> 02:54.710 +Uh, we have a nice little title, and then we have a row for our Python code and C plus plus code. + +02:54.710 --> 02:57.140 +We have a row for selecting the model. + +02:57.140 --> 03:03.200 +And now we've added code Kwon to the three to the previously two models that you could choose between. + +03:03.320 --> 03:08.750 +And we've got a button to convert the code, a button to run Python, a button to run C plus plus, + +03:08.750 --> 03:13.600 +and then some output boxes for the Python results and the cplusplus results. + +03:13.600 --> 03:17.500 +And then these are the three actions. + +03:17.500 --> 03:22.870 +The three places where if a button is clicked, we take some kind of action. + +03:22.870 --> 03:26.290 +And I love the way that it just simply reads like English. + +03:26.290 --> 03:31.000 +If someone wants to convert, if they press convert button, it calls the optimize function. + +03:31.000 --> 03:33.550 +This is the inputs and that's the output. + +03:33.550 --> 03:37.120 +If they press the Python run button, it executes Python. + +03:37.120 --> 03:43.270 +The input is the Python code, the output is the python out, and the same for the C plus plus button + +03:43.270 --> 03:44.080 +as well. + +03:44.080 --> 03:47.020 +It should it look super simple. + +03:47.020 --> 03:48.880 +And that's because it is super simple. + +03:49.300 --> 03:51.310 +And with that we're going to launch it. + +03:51.340 --> 03:54.730 +Fingers crossed this is going to work beautifully for us. + +03:55.060 --> 03:58.840 +All right so here is our user interface. + +03:59.080 --> 04:08.050 +Uh, and um, what you're seeing here of course, is the Python code for the simple, uh, pi calculation. + +04:08.050 --> 04:09.280 +And why not? + +04:09.280 --> 04:12.490 +Let's just try doing it for, uh, for GPT. + +04:14.750 --> 04:17.720 +You'll remember that's the C plus plus equivalent. + +04:17.750 --> 04:21.080 +Let's run the Python variation. + +04:21.110 --> 04:23.240 +If I remember right this is about eight seconds. + +04:23.240 --> 04:25.220 +So we have to wait for this to count to about eight. + +04:25.250 --> 04:27.140 +And we should get the Python results. + +04:27.170 --> 04:29.810 +There it is 8.6 seconds. + +04:29.810 --> 04:34.820 +There is good old pi at least to some number of decimal places. + +04:34.820 --> 04:36.410 +And now we'll run the C plus. + +04:36.410 --> 04:38.630 +Plus that came back from GPT four. + +04:38.630 --> 04:47.630 +And great in 0.06 of a second a nice greater than 100 x improvement. + +04:47.810 --> 04:50.630 +Now one more time for Claude. + +04:50.750 --> 04:55.340 +We convert the the code courtesy of Anthropic's Claude. + +04:55.820 --> 04:57.380 +Um, there it is. + +04:57.410 --> 05:00.080 +And now we will run Claude's C plus. + +05:00.080 --> 05:00.680 +Plus. + +05:00.680 --> 05:05.480 +And it narrowly beats, uh, GPT four again. + +05:05.480 --> 05:10.940 +But I think it has this line in here, and maybe it has allowed it to be slightly faster. + +05:10.940 --> 05:13.610 +Maybe Claude's code really is quicker. + +05:13.790 --> 05:16.400 +Um, they're so similar that I am suspicious. + +05:16.400 --> 05:16.810 +This is. + +05:16.840 --> 05:19.480 +This all gets optimized anyway by the compiler. + +05:19.600 --> 05:22.570 +But it's possible this is consistently slightly faster. + +05:22.570 --> 05:24.880 +So you may be a C plus plus expert. + +05:24.880 --> 05:25.540 +That can tell me. + +05:25.540 --> 05:31.030 +And you may be able to try it yourself and satisfy yourself, whether on your architecture it is faster + +05:31.030 --> 05:31.960 +or not. + +05:31.960 --> 05:33.880 +But anyway, that is not the point. + +05:33.910 --> 05:38.260 +What we're here to see is how does code quality measure up? + +05:38.290 --> 05:39.220 +Can it convert? + +05:39.250 --> 05:42.130 +Does it make sense and is it any different? + +05:42.160 --> 05:45.430 +Let's press the convert code button and see what happens. + +05:45.430 --> 05:49.780 +So first of all as we know it's more it's got some chattiness to it. + +05:49.810 --> 05:53.920 +It hasn't correctly stripped out its explanation. + +05:53.920 --> 05:55.720 +So we will need to delete that. + +05:55.720 --> 05:57.880 +But we'll let it get away with that. + +05:57.910 --> 06:03.790 +We won't ding the code model for adding that extra. + +06:05.320 --> 06:11.530 +Remember, this is all streaming right now as as we watch it from the endpoint. + +06:11.560 --> 06:15.070 +If I go over to here, I may need to refresh that. + +06:15.070 --> 06:17.290 +We should be seeing that we do. + +06:17.320 --> 06:22.820 +We do indeed see a blip of CPU and GPU as it streams back the results, I love it. + +06:23.240 --> 06:29.990 +Uh, and so here by now, uh, go down to the, uh, sorry to our gradient screen. + +06:29.990 --> 06:30.860 +Here we go. + +06:30.860 --> 06:35.690 +Uh, we have the, the, uh, full solution. + +06:35.690 --> 06:42.260 +So what we're going to do now is we're going to remove the stuff at the top, and we're going to remove + +06:42.260 --> 06:46.400 +the explanation at the end that we don't need. + +06:46.430 --> 06:51.980 +And we are going to run this C plus plus code to see how code Quinn has done. + +06:52.250 --> 06:53.780 +Let's give it a try. + +06:56.060 --> 06:58.670 +And it ran and it was fast. + +06:58.700 --> 07:00.140 +It was about the same as GPT four. + +07:00.170 --> 07:02.720 +Oh, I imagine it's about the same. + +07:02.900 --> 07:09.170 +Uh, and I see it doesn't have that pragma thing in there, but it seems to have done a great job. + +07:09.170 --> 07:10.640 +It's got the same answer. + +07:10.640 --> 07:15.290 +And I think that is certainly a success for code. + +07:15.290 --> 07:16.010 +Quinn. + +07:16.340 --> 07:24.130 +Uh, and again, remember the difference in model parameters code Quinn running here with its 7 billion + +07:24.130 --> 07:33.730 +parameters and compared with the, uh, the uh, hundreds of, of of, uh, sorry, there are more than + +07:33.730 --> 07:37.660 +2 trillion parameters that you've got in GPT four and Claude. + +07:38.170 --> 07:44.200 +Uh, so let's now go back here and let's, let's, uh, raise the bar. + +07:44.230 --> 07:46.000 +Let's make the challenge harder. + +07:46.030 --> 07:57.580 +Let's change this value to be the Python hard, the code which calculates the maximum subarray sum. + +07:57.580 --> 08:05.110 +And we will see now how our open source model can handle this complicated case. + +08:08.260 --> 08:11.590 +So what's it doing its thing. + +08:13.240 --> 08:15.970 +So already there is a problem. + +08:15.970 --> 08:16.840 +There is a problem. + +08:16.840 --> 08:24.790 +And that problem is that, uh, it has decided to reimplement the approach for generating random numbers, + +08:24.790 --> 08:33.440 +changing the approach that we had set with this, uh, LCG, um, technique for generating repeatable + +08:33.440 --> 08:36.380 +and consistent random numbers between the implementations. + +08:36.410 --> 08:41.270 +Now, that's despite the fact that I very clearly put in the system prompt that it should not change + +08:41.270 --> 08:43.760 +the functionality around random number generation. + +08:43.790 --> 08:50.990 +So again, I was not able to convince Coetquen to change that strategy. + +08:51.050 --> 08:53.960 +Uh, you should experiment with this, see if you can do better. + +08:53.960 --> 08:58.580 +But I was not able to do so myself with some experimenting. + +08:59.000 --> 09:00.950 +Uh, it's almost finished. + +09:01.160 --> 09:01.940 +There we go. + +09:01.940 --> 09:02.540 +It's done. + +09:02.570 --> 09:07.640 +So we will take out what comes at the end, and we will take out what comes at the beginning. + +09:07.640 --> 09:09.680 +And now the moment of truth. + +09:09.680 --> 09:15.230 +We will run the C plus plus code from Code Kwan and scroll down. + +09:15.770 --> 09:21.380 +Uh, and what we find is, of course, that the number does not match. + +09:21.410 --> 09:23.930 +If you remember the the result from before. + +09:23.930 --> 09:30.220 +So unfortunately, Code Kwan has not been successful in replicating the number, and that's no surprise. + +09:30.220 --> 09:37.420 +That is because of course, it has, uh, got a its own random number generator. + +09:37.720 --> 09:42.850 +Um, it's, um, done some, uh, some interesting stuff here. + +09:42.850 --> 09:49.930 +It, uh, does appear to have potentially recognized the more efficient methodology, but since the + +09:49.930 --> 09:55.390 +numbers don't match, we can't validate that it has, in fact done everything correctly and got the + +09:55.390 --> 09:56.230 +right number. + +09:56.230 --> 10:00.970 +So unfortunately, unfortunately, I was so very hopeful. + +10:00.970 --> 10:08.830 +Codeclan did Laudably Codeclan was able to pass the the pie test, the simple test, but Codeclan did + +10:08.830 --> 10:16.930 +stumble with the harder test and wasn't able to reproduce the same exact answer as the Python code, + +10:16.930 --> 10:19.060 +which was its mission. + +10:19.060 --> 10:24.850 +So from that perspective, unfortunately, the frontier models come up on top. + +10:24.850 --> 10:27.370 +Clawed again for the win. + +10:27.370 --> 10:30.340 +Uh, and Codeclan didn't quite make it. + +10:31.240 --> 10:33.250 +I will see you next time for a wrap up. diff --git a/week5/community-contributions/subtitles/srts/59295619/ja_JP.srt b/week5/community-contributions/subtitles/srts/59295619/ja_JP.srt new file mode 100755 index 0000000..f6ded51 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295619/ja_JP.srt @@ -0,0 +1,430 @@ +WEBVTT + +00:00.350 --> 00:05.540 +すべてを美しいユーザー・インターフェースにまとめる瞬間へ、 ようこそ。 + +00:05.540 --> 00:12.260 +その前に、 もう一度Huggingfaceの推論エンドポイント画面を見てほしい。 + +00:12.260 --> 00:15.380 +クワイン1 570億チャットの推論 + +00:15.680 --> 00:16.640 +我々はできる。 + +00:16.670 --> 00:23.060 +分析結果を見たり、 何が起こっているのかを見たり、 + +00:23.060 --> 00:32.090 +リクエスト数を見たりすることができます。 + +00:32.210 --> 00:34.430 +ええと、 レイテンシー、 CPU使用率。 + +00:34.460 --> 00:34.940 +そうだ。 + +00:34.970 --> 00:38.930 +CPUの使用率とGPUの使用率が少し上がっている。 + +00:38.960 --> 00:39.920 +いいね。 + +00:40.010 --> 00:51.020 +それと、 "コスト "を見て、 僕が3ドル使っていることがわかると思う。 これまでのところ、 この特定のモデルで64回。 + +00:51.380 --> 00:53.150 +うーん、 わかった。 + +00:53.180 --> 01:00.770 +さて、 それを念頭に置いて、 アナリティクスは置いておいて、 Jupyterラボに戻ろう。 + +01:01.010 --> 01:09.070 +コード・クワンを呼び出すこのコードを、 GPT4やクロード・ストリーム・クエンですでにやった他のストリーム・メソッドと同じように、 + +01:09.070 --> 01:14.620 +小さなストリーム・メソッドにまとめよう。 + +01:14.650 --> 01:17.200 +同じような方法だが、 もちろん同じ機能だ。 + +01:17.200 --> 01:18.700 +そのやり方はまったく違う。 + +01:18.910 --> 01:21.370 +これはトークナイザーを作成する。 + +01:21.370 --> 01:27.700 +もちろん、 Pythonを通常のメッセージリストに変えることになる。 + +01:27.700 --> 01:29.710 +チャットのテンプレートが適用されます。 + +01:29.710 --> 01:34.420 +これでトークン化の準備が整った。 + +01:34.420 --> 01:43.120 +そして、 エンドポイントのURLとHuggingfaceトークンを使って推論クライアントを呼び出します。 + +01:43.120 --> 01:47.020 +そして、 ここではクライアントのテキスト生成を行っている。 + +01:47.020 --> 01:49.570 +ここにストリーミングしたいテキストがある。 + +01:49.570 --> 01:52.930 +これが新しいトークンの最大値だ。 + +01:52.930 --> 01:55.900 +そして結果が戻ってくる。 + +01:55.900 --> 02:05.740 +各トークンをストリームバックする際に、 これまでの合計を返す。 + +02:05.740 --> 02:13.280 +それは、 すべてのチャンクでこれまでに受信されたすべてのものの累積合計のようなものを期待している。 + +02:13.280 --> 02:23.030 +つまり、 このストリーム・コレクションという関数は、 前に書いたストリームGPTのストリーム・クロードの関数と同じものなのだ。 + +02:23.030 --> 02:34.010 +これで、 コードを最適化するための、 以前の最適化された方法に代わる最適化された方法を手に入れることができ、 3つのモデルGPTクロードかコード・クワンの間で反転することができる。 + +02:34.400 --> 02:34.910 +うーん。 + +02:34.910 --> 02:41.900 +そしてここに、 Gradioのユーザー・インターフェースのコードの合計がある。 + +02:41.930 --> 02:43.400 +これを必ず実行する。 + +02:43.820 --> 02:46.520 +ええと、 だから、 これがどれだけ簡単なことか思い出してほしい。 + +02:46.520 --> 02:47.720 +クレイジーだよ。 + +02:47.960 --> 02:54.710 +タイトルをつけて、 PythonのコードとC++のコードを並べます。 + +02:54.710 --> 02:57.140 +モデルを選択する行がある。 + +02:57.140 --> 03:03.200 +そして今回、 これまで選べた2つのモデルにコード・クォンが加わった。 + +03:03.320 --> 03:08.750 +コードを変換するボタン、 Pythonを実行するボタン、 C plus plusを実行するボタン、 + +03:08.750 --> 03:13.600 +そしてPythonの結果とcplusplusの結果の出力ボックスがあります。 + +03:13.600 --> 03:17.500 +そして、 これが3つのアクションだ。 + +03:17.500 --> 03:22.870 +ボタンがクリックされたら、 何らかのアクションを起こす3つの場所。 + +03:22.870 --> 03:26.290 +そして、 単純に英語のように読めるところが好きだ。 + +03:26.290 --> 03:31.000 +もし誰かがコンバートを望むなら、 コンバートボタンを押せば、 最適化機能が呼び出される。 + +03:31.000 --> 03:33.550 +これがインプットで、 これがアウトプットだ。 + +03:33.550 --> 03:37.120 +Pythonの実行ボタンを押すと、 Pythonが実行される。 + +03:37.120 --> 03:44.080 +入力はPythonのコード、 出力はPythonのout、 C plus plusボタンも同様。 + +03:44.080 --> 03:47.020 +超シンプルに見えるはずだ。 + +03:47.020 --> 03:48.880 +それは超シンプルだからだ。 + +03:49.300 --> 03:51.310 +そして、 私たちはそれを発表するつもりだ。 + +03:51.340 --> 03:54.730 +これが僕らにとって見事に機能することを祈っているよ。 + +03:55.060 --> 03:58.840 +さて、 これがユーザー・インターフェースだ。 + +03:59.080 --> 04:08.050 +もちろん、 ここにあるのは単純な円周率計算のPythonコードだ。 + +04:08.050 --> 04:09.280 +なぜダメなんだ? + +04:09.280 --> 04:12.490 +GPTでやってみよう。 + +04:14.750 --> 04:17.720 +Cプラス・プラス相当と覚えておいてほしい。 + +04:17.750 --> 04:21.080 +Pythonのバリエーションを実行してみよう。 + +04:21.110 --> 04:23.240 +私の記憶が正しければ、 これは約8秒だ。 + +04:23.240 --> 04:25.220 +だから、 これが8カウントになるのを待つしかない。 + +04:25.250 --> 04:27.140 +そして、 パイソンの結果を得るべきだ。 + +04:27.170 --> 04:29.810 +そこに8がある。 6秒。 + +04:29.810 --> 04:34.820 +古き良き円周率は、 少なくとも小数点以下何桁まで存在する。 + +04:34.820 --> 04:36.410 +そして今度はCプラスを走らせる。 + +04:36.410 --> 04:38.630 +それにGPT4から戻ってきた。 + +04:38.630 --> 04:47.630 +そして0点でも素晴らしい。 秒の06は100倍以上の向上だ。 + +04:47.810 --> 04:50.630 +クロードのためにもう1度。 + +04:50.750 --> 04:55.340 +Anthropicのクロードの好意でコードを変換した。 + +04:55.820 --> 04:57.380 +あ、 あった。 + +04:57.410 --> 05:00.080 +そして今度はクロードのCプラスを走らせる。 + +05:00.080 --> 05:00.680 +それに + +05:00.680 --> 05:05.480 +そして、 またGPT4に僅差で勝った。 + +05:05.480 --> 05:10.940 +でも、 このラインが入っていることで、 わずかに速くなったのかもしれない。 + +05:10.940 --> 05:13.610 +クロードのコードの方が本当に速いのかもしれない。 + +05:13.790 --> 05:16.400 +うーん、 あまりに似ているので疑ってしまう。 + +05:16.400 --> 05:16.810 +これがそうだ。 + +05:16.840 --> 05:19.480 +これはコンパイラによって最適化される。 + +05:19.600 --> 05:22.570 +しかし、 一貫してわずかに速い可能性はある。 + +05:22.570 --> 05:24.880 +だから、 あなたはCプラス・プラスの専門家かもしれない。 + +05:24.880 --> 05:25.540 +それは私に教えてくれる。 + +05:25.540 --> 05:31.960 +そして、 自分のアーキテクチャでそれが速いかどうか、 自分で試して納得できるかもしれない。 + +05:31.960 --> 05:33.880 +しかし、 とにかく、 それは問題ではない。 + +05:33.910 --> 05:38.260 +我々がここで見ているのは、 コードの品質がどの程度なのかということだ。 + +05:38.290 --> 05:39.220 +コンバートは可能か? + +05:39.250 --> 05:42.130 +それは理にかなっていて、 何か違いがあるのだろうか? + +05:42.160 --> 05:45.430 +コード変換ボタンを押して、 何が起こるか見てみよう。 + +05:45.430 --> 05:49.780 +だからまず第一に、 僕らが知っているように、 それはもっとおしゃべりなんだ。 + +05:49.810 --> 05:53.920 +その説明を正しく取り除けていない。 + +05:53.920 --> 05:55.720 +だから、 それを削除する必要がある。 + +05:55.720 --> 05:57.880 +しかし、 私たちはそれを許す。 + +05:57.910 --> 06:03.790 +そのような余計なものを加えたからといって、 コードモデルを非難するつもりはない。 + +06:05.320 --> 06:11.530 +覚えておいてほしいのは、 これはすべてエンドポイントから見ている今現在のストリーミングだということだ。 + +06:11.560 --> 06:15.070 +こっちに行ったら、 リフレッシュする必要があるかもしれない。 + +06:15.070 --> 06:17.290 +私たちは、 私たちがそうであることを確認しなければならない。 + +06:17.320 --> 06:22.820 +確かに、 結果をストリームバックするときにCPUとGPUが点滅するのが見える。 + +06:23.240 --> 06:29.990 +ええと、 ではここで、 ええと、 グラデーションのスクリーンに降りてください。 + +06:29.990 --> 06:30.860 +さあ、 始めよう。 + +06:30.860 --> 06:35.690 +ええと、 私たちは、 ええと、 完全な解決策を持っています。 + +06:35.690 --> 06:46.400 +だから、 これからやることは、 一番上にあるものを取り除き、 最後にある必要のない説明を取り除くことだ。 + +06:46.430 --> 06:51.980 +そして、 このCプラスプラスのコードを実行し、 クインがどのような結果を残したかを確認する。 + +06:52.250 --> 06:53.780 +試してみよう。 + +06:56.060 --> 06:58.670 +そして走り、 速かった。 + +06:58.700 --> 07:00.140 +GPT4とほぼ同じだった。 + +07:00.170 --> 07:02.720 +同じぐらいだと思うよ。 + +07:02.900 --> 07:09.170 +ええと、 プラグマは入っていないようですが、 素晴らしい仕事をしてくれているようです。 + +07:09.170 --> 07:10.640 +同じ答えだ。 + +07:10.640 --> 07:15.290 +そして、 それはコードにとって間違いなく成功だと思う。 + +07:15.290 --> 07:16.010 +クインだ。 + +07:16.340 --> 07:24.130 +そしてまた、 コード・クインがここで実行している70億のパラメーターと、 + +07:24.130 --> 07:37.660 +GPT4やクロードが持っている2兆以上のパラメーターの違いを思い出してほしい。 + +07:38.170 --> 07:44.200 +じゃあ、 ここに戻って、 ハードルを上げよう。 + +07:44.230 --> 07:46.000 +チャレンジをもっと難しくしよう。 + +07:46.030 --> 07:57.580 +この値をPythonのハード、 つまり最大部分配列和を計算するコードに変更してみよう。 + +07:57.580 --> 08:05.110 +そして、 我々のオープンソースモデルがこの複雑なケースにどのように対処できるかをこれから見ていくことにしよう。 + +08:08.260 --> 08:11.590 +それで何をやっているんだ。 + +08:13.240 --> 08:15.970 +だからすでに問題がある。 + +08:15.970 --> 08:16.840 +問題がある。 + +08:16.840 --> 08:24.790 +そしてその問題とは、 乱数を生成するためのアプローチを再実装することを決定し、 + +08:24.790 --> 08:36.380 +この、 LCGで設定したアプローチを変更したことだ。 + +08:36.410 --> 08:41.270 +システム・プロンプトには、 乱数生成に関する機能を変更してはいけないとはっきりと書いておいたにもかかわらず、 + +08:41.270 --> 08:43.760 +だ。 + +08:43.790 --> 08:50.990 +だから今回も、 コエトケンにその戦略を変えるよう説得することはできなかった。 + +08:51.050 --> 08:53.960 +ええと、 もっとうまくできるかどうか、 実験してみるといいよ。 + +08:53.960 --> 08:58.580 +しかし、 私自身はいくつかの実験をしながらも、 そうすることはできなかった。 + +08:59.000 --> 09:00.950 +もうすぐ終わるよ。 + +09:01.160 --> 09:01.940 +これでよし。 + +09:01.940 --> 09:02.540 +終わったよ。 + +09:02.570 --> 09:07.640 +だから、 最後に来るものを取り除き、 最初に来るものを取り除く。 + +09:07.640 --> 09:09.680 +そして今、 真実の瞬間が訪れる。 + +09:09.680 --> 09:15.230 +コード・クワンのCプラス・プラス・コードを実行し、 下にスクロールする。 + +09:15.770 --> 09:21.380 +それでわかったのは、 もちろん、 その数字が一致しないということだ。 + +09:21.410 --> 09:23.930 +以前の結果を覚えているのなら。 + +09:23.930 --> 09:30.220 +だから残念ながら、 コード・クワンはこの数字を再現することに成功していない。 + +09:30.220 --> 09:37.420 +それはもちろん、 独自の乱数発生器を持っているからだ。 + +09:37.720 --> 09:42.850 +ええと、 いくつか、 ええと、 興味深いことをやっているんだ。 + +09:42.850 --> 09:49.930 +より効率的な方法を潜在的に認識しているように見えるが、 数字が一致しないため、 実際にすべてを正しく実行し、 + +09:49.930 --> 09:56.230 +正しい数字を得たかどうかを検証することはできない。 + +09:56.230 --> 10:00.970 +だから残念ながら、 残念ながら、 私はとても期待していたんだ。 + +10:00.970 --> 10:08.830 +Codeclanは簡単なテストであるパイテストには合格したが、 Codeclanは難しいテストでつまずき、 + +10:08.830 --> 10:19.060 +Pythonコードと同じ答えを再現できなかった。 + +10:19.060 --> 10:24.850 +そういう観点からは、 残念ながらフロンティア・モデルが上位に来る。 + +10:24.850 --> 10:27.370 +勝利のために再び爪を立てた。 + +10:27.370 --> 10:30.340 +コーデクランは間に合わなかった。 + +10:31.240 --> 10:33.250 +それではまた次回、 総括を。 diff --git a/week5/community-contributions/subtitles/srts/59295619/ko_KR.srt b/week5/community-contributions/subtitles/srts/59295619/ko_KR.srt new file mode 100755 index 0000000..7a0a2e5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59295619/ko_KR.srt @@ -0,0 +1,463 @@ +WEBVTT + +00:00.350 --> 00:05.540 +다시 오신 걸 환영합니다 모든 걸 아름다운 사용자 인터페이스로 통합할 시간이네요 + +00:05.540 --> 00:10.880 +하지만 먼저 한 번 더 살펴보죠 추론 엔드포인트 스크린에서 제 실행 중인 코드를 + +00:10.880 --> 00:12.260 +볼 수 있어요 + +00:12.260 --> 00:15.380 +퀴네 1요 570억 개의 채팅 추론이죠 + +00:15.680 --> 00:16.640 +할 수 있어요 + +00:16.670 --> 00:23.060 +보여드리고 싶은 게 있는데 여기로 오셔서 여러분의 추론 엔드포인트가 어떻게 실행되고 있는지 살펴보세요 + +00:23.060 --> 00:28.850 +분석 보기 같은 걸 할 수 있어요 어떻게 되고 있는지, 요청 수를 볼 수 있죠 제가 만들긴 했지만 + +00:28.850 --> 00:32.090 +레이더에 잡히기엔 충분하지 않아요 + +00:32.210 --> 00:34.430 +대기 시간과 CPU 사용도요 + +00:34.460 --> 00:34.940 +저기 있네요 + +00:34.970 --> 00:38.930 +CPU 사용량과 GPU 사용량에 작은 변화가 있네요 + +00:38.960 --> 00:39.920 +좋아요 + +00:40.010 --> 00:51.020 +원가를 보시면 3달러가 사용된 걸 알 수 있어요 이 모델은 지금까지 64,000달러예요 + +00:51.380 --> 00:53.150 +네 + +00:53.180 --> 01:00.770 +그걸 염두에 두고 분석은 그만두고 주피터 연구소로 돌아가죠 + +01:01.010 --> 01:09.070 +코드 콴을 호출하기 위해 이 코드를 마무리하죠 멋진 스트림 메서드로요 GPT 4와 Clude + +01:09.070 --> 01:14.620 +스트림 Quen에서 이미 했던 다른 스트림 메서드처럼요 + +01:14.650 --> 01:17.200 +같은 종류의 메서드지만 물론 같은 함수죠 + +01:17.200 --> 01:18.700 +아주 다르게 할 거예요 + +01:18.910 --> 01:21.370 +토큰라이저를 생성할 거예요 + +01:21.370 --> 01:27.700 +파이썬 을 일반적인 메시지 리스트로 바꿔줄 거예요 + +01:27.700 --> 01:29.710 +채팅 템플릿을 적용할 거예요 + +01:29.710 --> 01:34.420 +이제 텍스트에서 토큰화 준비가 됐어요 + +01:34.420 --> 01:41.590 +그런 다음 수증 클라이언트를 위해 URL을 이용해 수증하는 마법을 부립니다 H깅페이스 토큰을 + +01:41.590 --> 01:43.120 +넘기기 위해서요 + +01:43.120 --> 01:47.020 +클라이언트 텍스트 생성을 하고 있는데요 + +01:47.020 --> 01:49.570 +여기 우리가 스트리밍할 텍스트가 있어요 + +01:49.570 --> 01:52.930 +최대 새 토큰이에요 + +01:52.930 --> 01:55.900 +그리고 결과가 나오죠 + +01:55.900 --> 02:03.400 +지금까지 우리가 준 토큰을 스트리밍하면서 지금까지의 총액을 확인하죠 그라디오가 기대하는 게 + +02:03.400 --> 02:05.740 +그거란 걸 기억하시길 바라요 + +02:05.740 --> 02:13.280 +지금까지 받은 모든 정보가 누적되어야 해요 모든 덩어리가요 + +02:13.280 --> 02:21.080 +저 스트림 컬렉션은 전에 만든 다른 스트림 GPT와 컴패니언 기능이에요 + +02:21.080 --> 02:23.030 +스트림 클로드요 + +02:23.030 --> 02:29.600 +이제 최적화된 메서드를 가질 수 있습니다 코드를 최적화하는 이전의 최적화된 메서드를 대체할 수 있죠 + +02:29.600 --> 02:34.010 +GPT Clude나 Code K콴 세 모델 사이에서 바꿀 수 있어요 + +02:34.400 --> 02:34.910 +네 + +02:34.910 --> 02:41.900 +사용자 인터페이스 그라디오 코드의 총합이 여기 있네요 + +02:41.930 --> 02:43.400 +이거 꼭 돌려야 해요 + +02:43.820 --> 02:46.520 +얼마나 간단한지 기억하시라고요 + +02:46.520 --> 02:47.720 +말도 안 돼요 + +02:47.960 --> 02:54.710 +작지만 멋진 타이틀이 있고요 파이썬 코드와 C++ 코드가 있는 행도 있네요 + +02:54.710 --> 02:57.140 +모델을 선택할 행이 있어요 + +02:57.140 --> 03:03.200 +이제 코드 권을 3개에 추가했어요 이전 두 모델 사이에서 선택할 수 있었던 것에 대해서요 + +03:03.320 --> 03:08.750 +코드를 변환하는 버튼과 파이썬을 실행하는 버튼 C++를 실행하는 버튼이 있습니다 + +03:08.750 --> 03:13.600 +파이썬 결과와 C플러스플러스 결과의 출력 상자도 있고요 + +03:13.600 --> 03:17.500 +세 가지 액션이 있어요 + +03:17.500 --> 03:22.870 +버튼을 클릭하면 어떤 행동을 취하는 세 장소요 + +03:22.870 --> 03:26.290 +단순히 영어처럼 읽히는 게 마음에 들어요 + +03:26.290 --> 03:31.000 +누군가 변환을 원한다면 변환 버튼을 누르면 최적화 함수를 호출해요 + +03:31.000 --> 03:33.550 +이게 입력이고 이게 출력이죠 + +03:33.550 --> 03:37.120 +파이썬 실행 버튼을 누르면 파이썬을 실행하죠 + +03:37.120 --> 03:44.080 +입력은 파이썬 코드이고 출력은 파이썬 아웃입니다 C++ 버튼도 마찬가지고요 + +03:44.080 --> 03:47.020 +아주 간단해 보여야 해요 + +03:47.020 --> 03:48.880 +아주 간단하기 때문이죠 + +03:49.300 --> 03:51.310 +이제 시작할 거예요 + +03:51.340 --> 03:54.730 +예쁘게 나오길 기도해 보죠 + +03:55.060 --> 03:58.840 +자, 이게 사용자 인터페이스예요 + +03:59.080 --> 04:08.050 +그리고 여기 보이는 것은 파이썬 코드입니다 간단한 파이 계산을 위한 코드죠 + +04:08.050 --> 04:09.280 +왜 안 되죠? + +04:09.280 --> 04:12.490 +GPT를 위해 한번 해 보죠 + +04:14.750 --> 04:17.720 +C 플러스 플러스 등가라는 걸 기억하세요 + +04:17.750 --> 04:21.080 +파이썬 변형을 실행해보죠 + +04:21.110 --> 04:23.240 +제 기억이 맞는다면 8초 정도 걸렸어요 + +04:23.240 --> 04:25.220 +8까지 셀 때까지 기다려야 해요 + +04:25.250 --> 04:27.140 +파이썬 결과가 Get in get으로 나올 거예요 + +04:27.170 --> 04:29.810 +8번 나왔네요 6초요 + +04:29.810 --> 04:34.820 +원주율은 소수점 이하 자리까지 있어요 + +04:34.820 --> 04:36.410 +이제 C+를 실행할게요 + +04:36.410 --> 04:38.630 +GPT 4에서도 연락이 왔고요 + +04:38.630 --> 04:47.630 +0년에도 잘했고요 06초면 100 엑스 이상 향상된 거죠 + +04:47.810 --> 04:50.630 +클로드를 위해 한 번 더 불러보죠 + +04:50.750 --> 04:55.340 +클로드의 코드를 변환하는 거죠 + +04:55.820 --> 04:57.380 +저기 있네요 + +04:57.410 --> 05:00.080 +클로드의 C 플러스도 해 볼게요 + +05:00.080 --> 05:00.680 +더 있어요 + +05:00.680 --> 05:05.480 +GPT 4를 다시 한번 간발의 차로 이기죠 + +05:05.480 --> 05:10.940 +하지만 이 선 때문에 속도가 약간 빨라진 것 같아요 + +05:10.940 --> 05:13.610 +클로드의 코드가 더 빠를지도 몰라요 + +05:13.790 --> 05:16.400 +너무 비슷해서 의심스러워요 + +05:16.400 --> 05:16.810 +맞아요 + +05:16.840 --> 05:19.480 +컴파일러에 의해 최적화돼요 + +05:19.600 --> 05:22.570 +하지만 이게 일정하게 약간 더 빠를 수도 있어요 + +05:22.570 --> 05:24.880 +C 플러스 전문가일 수도 있겠네요 + +05:24.880 --> 05:25.540 +그걸 보면 알 수 있죠 + +05:25.540 --> 05:31.030 +직접 해 보고 만족할 수도 있겠죠. 당신의 구조가 빠르든 느리든 + +05:31.030 --> 05:31.960 +간에요. + +05:31.960 --> 05:33.880 +어쨌든 중요한 건 그게 아니에요 + +05:33.910 --> 05:38.260 +코드 품질이 어떻게 측정되는지 알아보려고 해요 + +05:38.290 --> 05:39.220 +전환이 가능한가요? + +05:39.250 --> 05:42.130 +말이 되는지, 다른 점이 있는지요 + +05:42.160 --> 05:45.430 +코드 변환 버튼을 눌러 어떻게 되는지 보죠 + +05:45.430 --> 05:49.780 +우선, 알다시피 수다스러운 느낌이 더 강하죠 + +05:49.810 --> 05:53.920 +설명이 정확히 나오지 않았어요 + +05:53.920 --> 05:55.720 +그러니 삭제해야 해요 + +05:55.720 --> 05:57.880 +하지만 그렇게 하도록 놔두죠 get it + +05:57.910 --> 06:03.790 +추가적인 것을 추가하는 코드 모델을 치지 않을게요. + +06:05.320 --> 06:11.530 +기억하세요, 이건 지금 모두 스트리밍 중입니다 엔드포인트에서부터 보고 있으니까요 + +06:11.560 --> 06:15.070 +여기로 가면 새로 고침이 필요할지도 몰라요 + +06:15.070 --> 06:17.290 +그걸 봐야 해요 + +06:17.320 --> 06:22.820 +결과를 스트리밍할 때 CPU와 GPU 현상이 보여요 + +06:23.240 --> 06:29.990 +이제 여기 아래를 보시면... 죄송합니다, 그라데이션 스크린으로요 + +06:29.990 --> 06:30.860 +시작할게요 + +06:30.860 --> 06:35.690 +완벽한 해결책을 찾았어요 + +06:35.690 --> 06:42.260 +이제 상단에 있는 걸 제거하겠습니다 끝에 있는 설명도 + +06:42.260 --> 06:46.400 +제거할게요 필요 없으니까요 + +06:46.430 --> 06:51.980 +이 C++ 코드를 실행해 퀸 코드가 어떻게 됐는지 보죠 + +06:52.250 --> 06:53.780 +한번 해 보죠 + +06:56.060 --> 06:58.670 +아주 빠르게 달렸죠 + +06:58.700 --> 07:00.140 +GPT 4와 거의 비슷했어요 + +07:00.170 --> 07:02.720 +아마 비슷할 거예요 + +07:02.900 --> 07:09.170 +실용적인 느낌이 나진 않지만 아주 잘 만든 것 같아요 + +07:09.170 --> 07:10.640 +답은 같아요 + +07:10.640 --> 07:15.290 +그게 코드에 있어서는 확실히 성공이라고 생각해요 + +07:15.290 --> 07:16.010 +퀸이에요 + +07:16.340 --> 07:24.130 +코드 퀸이 실행 중인 모델 매개 변수는 70억 개로 다른 + +07:24.130 --> 07:33.730 +모델과 비교하면 죄송합니다 GPT 4와 클로드에는 2조 개 이상의 + +07:33.730 --> 07:37.660 +매개 변수가 있죠 + +07:38.170 --> 07:44.200 +그럼 다시 돌아가서 수준을 높여 보죠 + +07:44.230 --> 07:46.000 +더 어려운 과제를 내 보죠 + +07:46.030 --> 07:57.580 +이 값을 파이썬 하드 으로 바꾸겠습니다 최대 서브-러레이 합을 계산하는 코드죠 + +07:57.580 --> 08:05.110 +이제 오픈 소스 모델이 이 복잡한 케이스를 어떻게 처리하는지 보죠 + +08:08.260 --> 08:11.590 +뭘 하는 거죠? + +08:13.240 --> 08:15.970 +벌써 문제가 생겼네요 + +08:15.970 --> 08:16.840 +문제가 있어요 + +08:16.840 --> 08:24.790 +그 문제는 임의의 숫자를 생성하는 접근법을 보류하기로 결정했다는 + +08:24.790 --> 08:33.440 +거예요 LCG라는 기술로 구현마다 반복적이고 일관된 임의의 숫자를 + +08:33.440 --> 08:36.380 +생성하는 거죠 + +08:36.410 --> 08:41.270 +제가 분명히 시스템 프롬프트에 난수 생성 주변의 기능성 변경을 금한다고 + +08:41.270 --> 08:43.760 +입력했는데도 말이죠. + +08:43.790 --> 08:50.990 +그 전략을 바꾸라고 코텡을 설득할 수 없었어요 + +08:51.050 --> 08:53.960 +이걸로 실험해 보세요 더 잘할 수 있는지 보죠 + +08:53.960 --> 08:58.580 +하지만 실험만으로는 그렇게 할 수 없었어요 + +08:59.000 --> 09:00.950 +거의 다 됐어요 + +09:01.160 --> 09:01.940 +됐어요 + +09:01.940 --> 09:02.540 +다 됐어요 + +09:02.570 --> 09:07.640 +그래서 끝에 나오는 것도 빼고 처음에 나오는 것도 뺄 거예요 + +09:07.640 --> 09:09.680 +이제 진실의 순간이에요 + +09:09.680 --> 09:15.230 +콴 코드부터 C 플러스 플러스 코드를 실행하고 스크롤을 내리죠 + +09:15.770 --> 09:21.380 +물론 그 숫자가 일치하지 않는다는 걸 알게 됐죠 + +09:21.410 --> 09:23.930 +예전 결과를 기억한다면요 + +09:23.930 --> 09:30.220 +그래서 안타깝게도 코드 콴이 숫자를 복제하는 데 실패했는데 놀랄 일도 아니죠 + +09:30.220 --> 09:37.420 +왜냐하면 이 차에는 무작위 번호 생성기가 있거든요 + +09:37.720 --> 09:42.850 +흥미로운 일이 좀 있었어요 + +09:42.850 --> 09:49.930 +더 효율적인 방법론을 잠재적으로 인정한 것 같지만 숫자가 일치하지 않으니 + +09:49.930 --> 09:56.230 +모든 게 제대로 작동하고 숫자가 맞는지 확인할 수 없어요 + +09:56.230 --> 10:00.970 +그래서 정말 안타깝지만 희망을 걸었어요 + +10:00.970 --> 10:08.830 +코드클런은 파이 테스트 즉, 간단한 테스트를 통과했지만 더 + +10:08.830 --> 10:16.930 +어려운 테스트에 실패하고 말았습니다 파이썬 부호와 같은 답을 + +10:16.930 --> 10:19.060 +내지 못했죠 + +10:19.060 --> 10:24.850 +그런 관점에서 보면 안타깝게도 개척자 모델이 우세해요 + +10:24.850 --> 10:27.370 +다시 한 번 승리를 거머쥐었죠 + +10:27.370 --> 10:30.340 +코드클랜은 성공하지 못했죠 + +10:31.240 --> 10:33.250 +그럼 다음 시간에 마무리하죠 diff --git a/week5/community-contributions/subtitles/srts/59297561/en_US.srt b/week5/community-contributions/subtitles/srts/59297561/en_US.srt new file mode 100755 index 0000000..730a0e0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297561/en_US.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:01.160 --> 00:05.570 +And would you believe at this point you're 55% of the way along the journey? + +00:05.780 --> 00:09.890 +Uh, it's been a while since I've thrown that stat out there, but congratulations. + +00:09.890 --> 00:11.120 +That's really awesome. + +00:11.210 --> 00:14.270 +Uh, so we have made progress with rag. + +00:14.270 --> 00:18.560 +We're taking it slowly because it's about to get real as we get into vector databases. + +00:18.560 --> 00:24.920 +But at this point, uh, not only do you have the sort of foundational understanding of why we're going + +00:24.920 --> 00:31.310 +to be using vectors, but you can now use Lang chain to load in documents, to split them, to add in + +00:31.310 --> 00:32.120 +metadata. + +00:32.120 --> 00:36.620 +And hopefully you've played around with that and you've satisfied yourself about how it's working and + +00:36.620 --> 00:39.020 +you're ready for the real deal. + +00:39.020 --> 00:46.070 +We next time are going to take these chunks and convert them into vectors using OpenAI's OpenAI embeddings, + +00:46.070 --> 00:49.280 +which was one of the encoding llms that we talked about. + +00:49.310 --> 00:55.640 +We're then going to store the vectors in an open source vector data store called chroma, which is an + +00:55.640 --> 00:59.090 +extremely popular open source vector database. + +00:59.090 --> 01:00.080 +And it's terrific. + +01:00.080 --> 01:04.790 +And we're going to enjoy putting our vectors in there, because we're then going to visualize them and + +01:04.790 --> 01:10.010 +see them and get a sense of what does it even mean to have a vector in a database. + +01:10.010 --> 01:11.930 +So lots to be done. + +01:11.930 --> 01:13.790 +I will see you next time. diff --git a/week5/community-contributions/subtitles/srts/59297561/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297561/ja_JP.srt new file mode 100755 index 0000000..8759e8f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297561/ja_JP.srt @@ -0,0 +1,49 @@ +WEBVTT + +00:01.160 --> 00:05.570 +そしてこの時点で、 あなたは旅の55%を歩んでいることになる。 + +00:05.780 --> 00:09.890 +このスタッツは久しぶりだけど、 おめでとう。 + +00:09.890 --> 00:11.120 +本当にすごいよ。 + +00:11.210 --> 00:14.270 +ボロ雑巾の件では進展があった。 + +00:14.270 --> 00:18.560 +ベクター・データベースに入ると本格的になるから、 ゆっくりやっているんだ。 + +00:18.560 --> 00:24.920 +しかしこの時点で、 なぜベクターを使うのかという基礎的な理解ができただけでなく、 ラングチェーンを使ってドキュメントを読み込んだり、 + +00:24.920 --> 00:32.120 +分割したり、 メタデータを追加したりできるようになった。 + +00:32.120 --> 00:36.620 +そして願わくば、 それで遊んでみて、 それがどのように機能しているか自分自身で納得し、 + +00:36.620 --> 00:39.020 +本番の準備ができたことを願う。 + +00:39.020 --> 00:49.280 +次回は、 これらのチャンクをOpenAIのエンベッディングを使ってベクトルに変換します。 + +00:49.310 --> 00:59.090 +このベクターは、 chromaと呼ばれるオープンソースのベクターデータストアに保存されます。 chromaは非常に人気のあるオープンソースのベクターデータベースです。 + +00:59.090 --> 01:00.080 +そして素晴らしい。 + +01:00.080 --> 01:04.790 +そして、 そこにベクトルを入れて楽しむのだ。 ベクトルを視覚化して見ることで、 + +01:04.790 --> 01:10.010 +データベースにベクトルがあることの意味を知ることができるからだ。 + +01:10.010 --> 01:11.930 +だから、 やるべきことはたくさんある。 + +01:11.930 --> 01:13.790 +また次回、 お会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59297561/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297561/ko_KR.srt new file mode 100755 index 0000000..6206de0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297561/ko_KR.srt @@ -0,0 +1,55 @@ +WEBVTT + +00:01.160 --> 00:05.570 +지금 여정의 55%를 진행했다고 믿으시겠어요? + +00:05.780 --> 00:09.890 +이 통계를 꺼내 본 지 좀 됐지만 축하해요 + +00:09.890 --> 00:11.120 +정말 멋져요 + +00:11.210 --> 00:14.270 +래그와 진전이 있었어요 + +00:14.270 --> 00:18.560 +천천히 하고 있어요 벡터 데이터베이스로 들어가면서 진짜 시작될 테니까요 Get in get + +00:18.560 --> 00:24.920 +하지만 이 시점에선 벡터를 왜 사용할지에 대한 기본 이해를 가졌을 뿐 아니라 + +00:24.920 --> 00:32.120 +랭 체인을 이용해 문서를 로드하고, 나누고 메타데이터를 추가할 수도 있죠 + +00:32.120 --> 00:36.620 +그걸 갖고 놀면서 어떻게 작동하는지 스스로 만족했으면 좋겠네요 + +00:36.620 --> 00:39.020 +진짜를 할 준비가 됐으면 좋겠어요 + +00:39.020 --> 00:46.070 +다음에는 이 덩어리들을 벡터로 변환할 것입니다 인코딩 llm 중 하나인 OpenAI의 + +00:46.070 --> 00:49.280 +OpenAI 엠부딩을 이용해서요 + +00:49.310 --> 00:55.640 +그 다음 벡터들을 크로마라는 오픈 소스 벡터 데이터 저장소에 저장합니다 아주 + +00:55.640 --> 00:59.090 +유명한 오픈 소스 벡터 데이터베이스죠 + +00:59.090 --> 01:00.080 +정말 멋져요 + +01:00.080 --> 01:04.790 +벡터를 넣는 것도 재미있을 거예요 벡터를 시각화하고 보게 될 테니까요 + +01:04.790 --> 01:10.010 +데이터베이스에 벡터가 있다는 게 무슨 의미인지 이해하게 되겠죠 get it + +01:10.010 --> 01:11.930 +할 일이 많아요 + +01:11.930 --> 01:13.790 +다음에 봐요 diff --git a/week5/community-contributions/subtitles/srts/59297575/en_US.srt b/week5/community-contributions/subtitles/srts/59297575/en_US.srt new file mode 100755 index 0000000..7fb5669 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297575/en_US.srt @@ -0,0 +1,49 @@ +WEBVTT + +00:00.920 --> 00:04.550 +Well, welcome to the final part on rag. + +00:04.550 --> 00:10.310 +And this is the session where you go from being a rag expert to being a rag master. + +00:10.520 --> 00:13.010 +Uh, we are going to look today at a few things. + +00:13.010 --> 00:20.780 +We're going to look at the Lang chain, LCL, the Lang chain expression language, which is where we + +00:20.780 --> 00:24.440 +look at the declarative language for setting up these chains. + +00:24.560 --> 00:28.640 +Uh, and it's not something we're going to use in earnest, but we'll take a look at it. + +00:28.790 --> 00:34.520 +I'm going to talk a bit about how Lang Chain works behind the scenes, and we're going to fix and diagnose + +00:34.520 --> 00:37.700 +some common problems that come up with rag. + +00:37.730 --> 00:41.600 +And there's something else, something else that I just slipped in because I couldn't resist it. + +00:41.600 --> 00:44.120 +Something that I said we could do that was really easy. + +00:44.150 --> 00:47.240 +Uh, and I want to show you that we certainly can. + +00:47.240 --> 00:48.950 +And in fact, we're going to do that right away. + +00:48.980 --> 00:54.470 +So before we get on with the rest of today's activities, I'm going to take you to JupyterLab and show + +00:54.470 --> 01:01.070 +you something that I just knocked up to prove a little point, but to also illustrate another vector + +01:01.070 --> 01:01.790 +database. + +01:01.790 --> 01:02.690 +See you there. diff --git a/week5/community-contributions/subtitles/srts/59297575/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297575/ja_JP.srt new file mode 100755 index 0000000..f914979 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297575/ja_JP.srt @@ -0,0 +1,43 @@ +WEBVTT + +00:00.920 --> 00:04.550 +さて、 ボロについての最終章へようこそ。 + +00:04.550 --> 00:10.310 +そしてこのセッションは、 ボロ布のエキスパートからボロ布の達人になるためのセッションなのだ。 + +00:10.520 --> 00:13.010 +ええと、 今日はいくつかのことを見ていこうと思う。 + +00:13.010 --> 00:24.440 +ラング・チェーン、 LCL(ラング・チェーン表現言語)を見てみよう。 LCLは、 ラング・チェーンを設定するための宣言言語だ。 + +00:24.560 --> 00:28.640 +本格的に使うものではないけど、 ちょっと見てみるよ。 + +00:28.790 --> 00:34.520 +ラング・チェインが舞台裏でどのように機能しているのかについて少しお話しし、 + +00:34.520 --> 00:37.700 +ラグでよく起こる問題を修正・診断します。 + +00:37.730 --> 00:41.600 +そしてもうひとつ、 我慢できずに滑り込ませたものがある。 + +00:41.600 --> 00:44.120 +私が言ったことは、 本当に簡単なことだった。 + +00:44.150 --> 00:47.240 +そして、 そのことを証明したいんだ。 + +00:47.240 --> 00:48.950 +そして実際、 すぐにでもそうするつもりだ。 + +00:48.980 --> 00:54.470 +本日の残りのアクティビティに入る前に、 JupyterLabに移動して、 + +00:54.470 --> 01:01.790 +ちょっとしたポイントを証明するために作ったものをお見せします。 + +01:01.790 --> 01:02.690 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59297575/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297575/ko_KR.srt new file mode 100755 index 0000000..ee1e0ef --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297575/ko_KR.srt @@ -0,0 +1,46 @@ +WEBVTT + +00:00.920 --> 00:04.550 +마지막 단계에 잘 오셨어요 + +00:04.550 --> 00:10.310 +이제 헝겊 전문가에서 헝겊 전문가가 되는 단계예요 + +00:10.520 --> 00:13.010 +오늘은 몇 가지를 살펴볼 거예요 + +00:13.010 --> 00:20.780 +랭 체인을 살펴보죠 랭 체인 표현 언어인 LCL요 이런 체인을 설정하는 + +00:20.780 --> 00:24.440 +선언적 언어를 살펴보는 곳이죠 + +00:24.560 --> 00:28.640 +본격적으로 사용할 건 아니지만 한번 살펴보죠 + +00:28.790 --> 00:34.520 +랭 체인이 비트로 어떻게 작동하는지 잠시 말씀드리겠습니다 래그와 관련된 + +00:34.520 --> 00:37.700 +일반적인 문제를 고치고 진단할 거예요 + +00:37.730 --> 00:41.600 +또 다른 것도 있어요 그냥 넣은 거예요 거부할 수 없었거든요 + +00:41.600 --> 00:44.120 +제가 쉽게 할 수 있다고 했던 거요 + +00:44.150 --> 00:47.240 +할 수 있다는 걸 보여드리고 싶어요 + +00:47.240 --> 00:48.950 +지금 바로 할 거예요 + +00:48.980 --> 00:54.470 +오늘 작업의 나머지 부분을 살펴보기 전에 JupyterLab으로 가서 방금 보여드린 + +00:54.470 --> 01:01.790 +걸 보여드리겠습니다 약간의 주장을 증명하기 위해서요 다른 벡터 데이터베이스를 표시하기 위해서요 + +01:01.790 --> 01:02.690 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59297585/en_US.srt b/week5/community-contributions/subtitles/srts/59297585/en_US.srt new file mode 100755 index 0000000..de35fe8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297585/en_US.srt @@ -0,0 +1,85 @@ +WEBVTT + +00:00.440 --> 00:07.280 +Before we move on, let me show you one more time this fabulous slide that describes the simple three + +00:07.310 --> 00:13.130 +abstractions and one method call to build a rag pipeline the abstractions. + +00:13.160 --> 00:16.190 +Again, you create the LM, you create your memory. + +00:16.220 --> 00:20.690 +You create your retriever just by calling your chroma vector store or whatever vector store you're working + +00:20.690 --> 00:22.520 +with dot as retriever. + +00:22.520 --> 00:28.190 +And then you can simply create your conversation retrieval chain with this one line passing in your + +00:28.190 --> 00:30.770 +LM, your retriever, your memory, and you're done. + +00:30.770 --> 00:36.080 +And of course, with this conversation chain you call invoke and you pass in a dictionary with your + +00:36.080 --> 00:43.400 +question and with what you get back, you can simply call you look up the answer key and you will get + +00:43.400 --> 00:46.010 +back the response from the model. + +00:46.340 --> 00:51.770 +And so with that I would say that you have leveled up your skills. + +00:51.770 --> 00:53.540 +You are upskilled. + +00:53.540 --> 00:59.030 +You are now accomplished in the art of building rag pipelines. + +00:59.030 --> 01:00.380 +Congratulations! + +01:00.740 --> 01:06.170 +You can build your own rag knowledge worker not just for a fictitious company like Endure Elm, but + +01:06.170 --> 01:11.060 +hopefully for real companies too, including your own using lamp chain. + +01:11.060 --> 01:12.200 +And you can use chroma. + +01:12.230 --> 01:19.370 +You could also see how easy it is to switch that up for other vector data stores, and using models + +01:19.370 --> 01:23.480 +other than OpenAI, Lang chain makes that very simple indeed. + +01:23.570 --> 01:29.810 +Next time we're going to talk about lang chains declarative language briefly, it's not something we're + +01:29.810 --> 01:31.100 +going to use, but I'll show it to you. + +01:31.130 --> 01:32.900 +So you could use it if you wish to. + +01:33.200 --> 01:38.390 +Uh, I'm going to talk a bit about how Lang Chain works under the covers, just so you have more insight. + +01:38.390 --> 01:44.750 +And in particular, we're going to look at a common problem with Rag, diagnose it and then talk about + +01:44.750 --> 01:45.770 +how to fix it. + +01:45.770 --> 01:53.300 +So that will all, uh, equip you to be able to use rag in anger in production projects as you'll really + +01:53.300 --> 01:54.950 +understand the inner workings. + +01:55.040 --> 01:57.200 +And with that, I will see you next time. diff --git a/week5/community-contributions/subtitles/srts/59297585/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297585/ja_JP.srt new file mode 100755 index 0000000..3f29130 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297585/ja_JP.srt @@ -0,0 +1,67 @@ +WEBVTT + +00:00.440 --> 00:07.280 +次に進む前に、 この素晴らしいスライドをもう一度お見せしよう。 このスライドでは、 シンプルな3つの抽象化と、 + +00:07.310 --> 00:13.130 +抽象化されたパイプラインを構築するための1つのメソッド呼び出しについて説明している。 + +00:13.160 --> 00:16.190 +繰り返すが、 LMを作るのも、 記憶を作るのも自分だ。 + +00:16.220 --> 00:20.690 +クロマ・ベクター・ストアや、 あなたが使っているベクター・ストアのドットをretrieverとして呼び出すだけで、 + +00:20.690 --> 00:22.520 +retrieverを作成できます。 + +00:22.520 --> 00:30.770 +そして、 この1行をLM、 リトリーバー、 メモリーに渡すだけで、 会話の検索チェーンを作ることができる。 + +00:30.770 --> 00:36.080 +もちろん、 この会話チェーンでは、 + +00:36.080 --> 00:46.010 +invokeを呼び出し、 質問と一緒に辞書を渡す。 + +00:46.340 --> 00:51.770 +そうすれば、 あなたのスキルはレベルアップしたと言える。 + +00:51.770 --> 00:53.540 +あなたはスキルアップしている。 + +00:53.540 --> 00:59.030 +これであなたは、 ラグ・パイプラインを構築する術を会得したことになる。 + +00:59.030 --> 01:00.380 +おめでとう! + +01:00.740 --> 01:06.170 +エンデュアー・エルムのような架空の企業だけでなく、 できれば実在の企業(ランプチェーンを使っている自分の会社も含む)でも、 + +01:06.170 --> 01:11.060 +自分のボロ知識労働者を作ることができる。 + +01:11.060 --> 01:12.200 +クロマも使える。 + +01:12.230 --> 01:23.480 +また、 他のベクターデータストアや、 OpenAI以外のモデルを使用する場合にも、 ラングチェーンを使用すれば非常に簡単に切り替えることができます。 + +01:23.570 --> 01:31.100 +次回はラングチェーン宣言言語について簡単に話すつもりだ。 + +01:31.130 --> 01:32.900 +だから、 使いたければ使えばいい。 + +01:33.200 --> 01:38.390 +ラング・チェインがどのように機能するのか、 もう少しお話ししましょう。 + +01:38.390 --> 01:45.770 +特に、 ラグのよくある問題を見て、 それを診断し、 修正する方法について話すつもりだ。 + +01:45.770 --> 01:54.950 +そうすれば、 ボロ布の内部構造を理解し、 プロダクション・プロジェクトで怒りに任せてボロ布を使うことができるようになるだろう。 + +01:55.040 --> 01:57.200 +それでは、 また次回お会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59297585/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297585/ko_KR.srt new file mode 100755 index 0000000..5c140ee --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297585/ko_KR.srt @@ -0,0 +1,85 @@ +WEBVTT + +00:00.440 --> 00:07.280 +다음으로 넘어가기 전에 이 멋진 슬라이드를 한 번 더 보여드리죠 간단한 3개의 추상화를 + +00:07.310 --> 00:13.130 +묘사하고 추상화 랙 파이프라인을 구축하기 위한 하나의 메서드 호출이죠 + +00:13.160 --> 00:16.190 +LM을 생성하고 메모리를 생성하죠 + +00:16.220 --> 00:20.690 +당신은 당신의 레트리버를 생성합니다. 크로마 벡터 스토어나 .이 레트리버로 작동하는 + +00:20.690 --> 00:22.520 +어떤 벡터 스토어도 호출해서요. + +00:22.520 --> 00:28.190 +그런 다음 이 선 하나로 대화 회수 체인을 만들 수 있어요 LM, 레트리버 + +00:28.190 --> 00:30.770 +메모리로요 그럼 끝이죠 + +00:30.770 --> 00:36.080 +물론 이 대화 사슬을 통해 invoke를 호출하고 + +00:36.080 --> 00:43.400 +질문과 함께 사전을 전달하면 응답 키를 찾아 호출할 수 있고 모델에서 + +00:43.400 --> 00:46.010 +응답을 받을 수 있죠 + +00:46.340 --> 00:51.770 +그런 점에서 실력이 한 단계 향상됐어요 + +00:51.770 --> 00:53.540 +기운이 넘치네요 + +00:53.540 --> 00:59.030 +이제 랙 파이프라인 만드는 기술을 완성했어요 + +00:59.030 --> 01:00.380 +축하해요 + +01:00.740 --> 01:06.170 +자신만의 헝겊 지식 인력을 만들 수 있습니다 엔디어 엘름 같은 허구의 회사뿐 + +01:06.170 --> 01:11.060 +아니라 진짜 회사에도 만들 수 있습니다 램프 체인을 사용해서요 + +01:11.060 --> 01:12.200 +채도를 사용해도 돼요 + +01:12.230 --> 01:19.370 +다른 벡터 데이터 저장소를 위해 바꾸는 것도 얼마나 쉬운지 알 수 있습니다 OpenAI가 아닌 + +01:19.370 --> 01:23.480 +모델을 이용해 랭 체인이 아주 간단하게 만들었죠 + +01:23.570 --> 01:29.810 +다음에 랑의 사슬 선언 언어를 간단히 다룰 거예요 우리가 사용할 건 아니지만 + +01:29.810 --> 01:31.100 +보여드리죠 + +01:31.130 --> 01:32.900 +원한다면 쓸 수 있어요 + +01:33.200 --> 01:38.390 +랑 체인의 비트를 어떻게 활용하는지 알려드릴게요 여러분이 더 잘 이해하실 수 있게요 + +01:38.390 --> 01:44.750 +특히 래그의 공통된 문제를 살펴보고 진단한 후 해결 방법을 얘기해 + +01:44.750 --> 01:45.770 +보죠 + +01:45.770 --> 01:53.300 +제작 프로젝트에서 랙을 사용할 수 있는 장비는 이 정도입니다 내부 작동 방식을 제대로 이해하게 + +01:53.300 --> 01:54.950 +될 거예요 + +01:55.040 --> 01:57.200 +그럼 다음 시간에 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59297593/en_US.srt b/week5/community-contributions/subtitles/srts/59297593/en_US.srt new file mode 100755 index 0000000..84816ad --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297593/en_US.srt @@ -0,0 +1,151 @@ +WEBVTT + +00:00.860 --> 00:04.520 +And welcome to continuing our journey with Hrag. + +00:04.520 --> 00:08.120 +And today it's time to unveil Liang Chen. + +00:08.120 --> 00:12.410 +So first, a quick reminder of what you can already do when it comes to Hrag. + +00:12.410 --> 00:16.280 +So we last time talked about the simple idea behind Hrag. + +00:16.280 --> 00:21.380 +Just the idea that you could retrieve relevant contexts and include that in the prompt. + +00:21.380 --> 00:27.380 +And then we talked a bit about vectors and how text can be mapped to a vector that represents its meaning. + +00:27.380 --> 00:33.470 +And you can use these vectors to try to be more intelligent about how you select relevant context for + +00:33.470 --> 00:34.340 +the prompt. + +00:34.520 --> 00:37.220 +So today we're going to talk about Lang chain. + +00:37.220 --> 00:40.400 +I'm going to describe the framework and its pros and cons. + +00:40.400 --> 00:47.060 +We're going to use Lang chain to read in our knowledge base and to divide it into chunks of relevant + +00:47.060 --> 00:51.590 +information, which later will be putting in a vector database and retrieving. + +00:51.830 --> 00:54.830 +So here is the backstory to Lang Chain. + +00:54.830 --> 00:56.810 +It's a relatively recent framework. + +00:56.810 --> 01:05.870 +It was created in late 2022, and its main goal is to allow allow people to build LLM applications quickly, + +01:05.900 --> 01:11.840 +stitching together different bits of functionality into a sort of chain of processing. + +01:11.930 --> 01:19.860 +It actually has its own language, its own declarative language called Lang Chain Expression Language. + +01:19.890 --> 01:23.280 +LCL and we're not going to be using that. + +01:23.280 --> 01:28.110 +Particularly I am going to show you some of it, but there are simpler ways to use lang chain just by + +01:28.110 --> 01:29.820 +using Python code. + +01:30.330 --> 01:31.980 +So what are the pros and cons? + +01:31.980 --> 01:40.890 +So it does hugely simplify creating applications to do common things like assistance and Rag. + +01:40.920 --> 01:44.310 +As we will see, it's going to literally be a few lines of code. + +01:44.310 --> 01:46.800 +So it gives you very quick time to market. + +01:46.800 --> 01:50.190 +There's nothing particularly magical about something like rag. + +01:50.190 --> 01:55.470 +We could do it the brute force way, by looking up in a vector database by adding to the prompt. + +01:55.500 --> 02:01.260 +Lang chain just standardizes and simplifies and makes it all easier and quicker. + +02:01.290 --> 02:08.520 +It's also a useful wrapper code around common models, so that you can write your code once and then + +02:08.520 --> 02:13.770 +switch in different models like OpenAI or Claude, and not have to deal with the slight differences + +02:13.770 --> 02:17.400 +in the API, which you can imagine is just sort of convenient. + +02:17.400 --> 02:19.680 +You can imagine that we could do that ourselves. + +02:19.680 --> 02:25.320 +We could write a little wrapper function, much as we did when we were using Gradio to call two different + +02:25.350 --> 02:26.040 +llms. + +02:26.040 --> 02:28.470 +And that's essentially what Lang Chain has done. + +02:28.470 --> 02:32.140 +They've built wrappers around the common APIs. + +02:32.770 --> 02:39.730 +It has to be said, as the APIs for LMS has matured, as they've become increasingly similar, and as + +02:39.760 --> 02:46.090 +the sort of code scripts out there for doing this become more and more widespread, there is less of + +02:46.090 --> 02:51.130 +a need for an underlying framework like Lang Chain than there was perhaps a year ago. + +02:51.370 --> 02:53.020 +So the need is decreased a bit. + +02:53.050 --> 02:57.190 +There's a lot of people that are rolling their own in terms of building their own pipelines for things + +02:57.190 --> 03:00.880 +like Rag, and you'll see how easy it would be to do that too. + +03:01.000 --> 03:04.810 +But Lang Chain still gives you a tremendous head start. + +03:04.810 --> 03:08.620 +And so it's very useful framework as you will see today. + +03:09.670 --> 03:14.050 +So what we're going to do is we're going to use Lang chain to load in our knowledge base. + +03:14.050 --> 03:20.950 +We're first going to read all of the documents stored in the folders using some some tooling. + +03:21.340 --> 03:26.590 +We're then going to add metadata to the documents to say what kind of document it is, which is going + +03:26.620 --> 03:28.150 +to be useful for us later. + +03:28.180 --> 03:34.750 +And then we're going to use Lang chain to break the documents down into useful chunks, chunks which + +03:34.750 --> 03:41.500 +represent text that belongs nicely together and will be ready to be vectorized and put in our database. + +03:42.010 --> 03:45.490 +So with that, let's head back to JupyterLab. diff --git a/week5/community-contributions/subtitles/srts/59297593/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297593/ja_JP.srt new file mode 100755 index 0000000..c8733af --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297593/ja_JP.srt @@ -0,0 +1,127 @@ +WEBVTT + +00:00.860 --> 00:04.520 +そして、 フラグとの旅を続けることを歓迎する。 + +00:04.520 --> 00:08.120 +そして今日はリャン・チェンのお披露目だ。 + +00:08.120 --> 00:12.410 +そこでまず、 Hragに関してすでにできることを簡単に覚えておこう。 + +00:12.410 --> 00:16.280 +さて、 前回はHragのシンプルなアイデアについてお話した。 + +00:16.280 --> 00:21.380 +ただ、 関連するコンテクストを検索して、 それをプロンプトに含めることができるというアイデアだ。 + +00:21.380 --> 00:27.380 +そして、 ベクトルについて少し話をし、 テキストがどのようにその意味を表すベクトルにマッピングされるのかについて話した。 + +00:27.380 --> 00:34.340 +そして、 これらのベクトルを使って、 プロンプトに関連するコンテクストをどのように選択するかについて、 よりインテリジェントになろうとすることができる。 + +00:34.520 --> 00:37.220 +そこで今日は、 ラング・チェーンについて話そう。 + +00:37.220 --> 00:40.400 +フレームワークとその長所と短所について説明しよう。 + +00:40.400 --> 00:47.060 +ラングチェーンを使って知識ベースを読み込み、 関連する情報の塊に分割し、 + +00:47.060 --> 00:51.590 +後でベクターデータベースに入れて検索する。 + +00:51.830 --> 00:54.830 +ラング・チェーンの裏話をしよう。 + +00:54.830 --> 00:56.810 +比較的最近のフレームワークだ。 + +00:56.810 --> 01:11.840 +LLMは2022年後半に創設され、 その主な目的は、 人々がLLMアプリケーションを素早く構築できるようにすることである。 + +01:11.930 --> 01:19.860 +ラング・チェーン表現言語と呼ばれる独自の宣言言語がある。 + +01:19.890 --> 01:23.280 +LCLを使うつもりはない。 + +01:23.280 --> 01:29.820 +特にその一部をお見せしますが、 Pythonのコードを使うだけで、 もっと簡単にラング・チェインを使う方法があります。 + +01:30.330 --> 01:31.980 +では、 長所と短所は何か? + +01:31.980 --> 01:40.890 +そのため、 アシストやラグといった一般的なことを行うアプリケーションの作成が大幅に簡素化される。 + +01:40.920 --> 01:44.310 +これからわかるように、 これは文字通り数行のコードである。 + +01:44.310 --> 01:46.800 +そのため、 市場投入までの時間が非常に短い。 + +01:46.800 --> 01:50.190 +ボロ布のようなものに特別な魔法はない。 + +01:50.190 --> 01:55.470 +プロンプトに追加することでベクターデータベースを検索し、 総当たり的に行うこともできる。 + +01:55.500 --> 02:01.260 +ラングチェーンは標準化され、 単純化され、 すべてを簡単かつ迅速にする。 + +02:01.290 --> 02:08.520 +また、 一般的なモデルのラッパーコードとしても便利で、 一度コードを書いてしまえば、 OpenAIやClaudeのような異なるモデルに切り替えても、 + +02:08.520 --> 02:17.400 +APIのわずかな違いに対処する必要がない。 + +02:17.400 --> 02:19.680 +自分たちでもできると想像できるだろう。 + +02:19.680 --> 02:26.040 +Gradioを使って2つの異なるllmを呼び出したときと同じように、 小さなラッパー関数を書くことができる。 + +02:26.040 --> 02:28.470 +それがラング・チェーンの本質的なやり方だ。 + +02:28.470 --> 02:32.140 +彼らは共通のAPIの周りにラッパーを作った。 + +02:32.770 --> 02:39.730 +LMSのAPIが成熟し、 ますます似たようなものになり、 これを実行するためのコードスクリプトがますます広まるにつれ、 + +02:39.760 --> 02:51.130 +ラング・チェインのような基礎となるフレームワークの必要性は、 おそらく1年前よりも低くなっていると言わざるを得ない。 + +02:51.370 --> 02:53.020 +だからニーズは少し減っている。 + +02:53.050 --> 03:00.880 +ラグのようなもののためにパイプラインを独自に構築している人はたくさんいる。 + +03:01.000 --> 03:04.810 +しかし、 ラング・チェーンはそれでもあなたに圧倒的な先手を与えてくれる。 + +03:04.810 --> 03:08.620 +だから、 今日見てもらうように、 とても便利なフレームワークなんだ。 + +03:09.670 --> 03:14.050 +これからやることは、 ラングチェーンを使ってナレッジベースを読み込むことだ。 + +03:14.050 --> 03:20.950 +まず、 いくつかのツールを使って、 フォルダに保存されているドキュメントをすべて読み込む。 + +03:21.340 --> 03:28.150 +そして、 その文書がどのような文書であるかを示すメタデータを文書に追加する。 + +03:28.180 --> 03:34.750 +そして、 ラングチェーンを使って、 ドキュメントを有用なチャンクに分解します。 チャンクは、 テキストがうまくまとまっていて、 + +03:34.750 --> 03:41.500 +ベクトル化してデータベースに入れる準備が整っているものを表します。 + +03:42.010 --> 03:45.490 +それではJupyterLabに戻りましょう。 diff --git a/week5/community-contributions/subtitles/srts/59297593/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297593/ko_KR.srt new file mode 100755 index 0000000..534463d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297593/ko_KR.srt @@ -0,0 +1,148 @@ +WEBVTT + +00:00.860 --> 00:04.520 +Hrag와 여정을 계속하는 걸 환영해요 + +00:04.520 --> 00:08.120 +오늘 량천을 공개할 거예요 + +00:08.120 --> 00:12.410 +먼저 Hrag와 관련해 여러분이 이미 할 수 있는 걸 간단히 상기시켜 드리죠 + +00:12.410 --> 00:16.280 +지난 시간에는 Hrag의 단순한 아이디어를 다뤘죠 + +00:16.280 --> 00:21.380 +관련 맥락을 검색해 그걸 프롬프트 안에 포함할 수 있다는 생각이죠 + +00:21.380 --> 00:27.380 +벡터에 대해서도 비트 했죠 텍스트가 의미를 나타내는 벡터에 매핑되는 방법도요 + +00:27.380 --> 00:33.470 +이 벡터를 이용해 프롬프트에 대한 관련 컨텍스트를 어떻게 선택할지 보다 영리해지려고 노력할 + +00:33.470 --> 00:34.340 +수 있어요 + +00:34.520 --> 00:37.220 +오늘은 랑 체인에 대해 얘기해 보죠 + +00:37.220 --> 00:40.400 +프레임워크와 장단점을 설명해 드릴게요 + +00:40.400 --> 00:47.060 +랭 체인을 이용해 지식 기반에서 읽고 관련 정보 덩어리로 나눌 겁니다 나중에 + +00:47.060 --> 00:51.590 +벡터 데이터베이스에 넣고 검색할 정보죠 + +00:51.830 --> 00:54.830 +랑 체인의 배경은 이거예요 + +00:54.830 --> 00:56.810 +비교적 최근에 만들어진 프레임워크예요 + +00:56.810 --> 01:05.870 +2022년 말에 만들어진 이 제품의 주요 목표는 사람들이 LLM 애플리케이션을 빠르게 구축할 수 있게 하는 것입니다 + +01:05.900 --> 01:11.840 +다양한 기능성 비트를 함께 엮어서 일종의 프로세싱 사슬을 만드는 것이죠 + +01:11.930 --> 01:19.860 +그들만의 언어가 있어요 선언어죠 랭 체인 표현 언어예요 + +01:19.890 --> 01:23.280 +LCL이요 이건 사용하지 않을 거예요 + +01:23.280 --> 01:28.110 +특히 제가 보여드릴 텐데요 파이썬 코드로 랑 체인을 이용하는 더 + +01:28.110 --> 01:29.820 +간단한 방법도 있어요 + +01:30.330 --> 01:31.980 +장단점이 뭐가 있죠? + +01:31.980 --> 01:40.890 +어시스트나 Rag 같은 일반적인 작업을 위해 응용 프로그램 생성을 크게 단순화하죠 + +01:40.920 --> 01:44.310 +보다시피 코드 몇 줄로 끝날 거예요 + +01:44.310 --> 01:46.800 +그래서 판매 시간을 단축할 수 있죠 + +01:46.800 --> 01:50.190 +천 조각 같은 건 마법과는 거리가 멀어요 + +01:50.190 --> 01:55.470 +브루트 포스 방식으로 할 수도 있습니다 벡터 데이터베이스를 찾아 프롬프트에 추가하는 거죠 + +01:55.500 --> 02:01.260 +랭 체인은 모든 걸 쉽고 빠르게 표준화하고 단순화해요 + +02:01.290 --> 02:08.520 +일반적인 모델에 관한 유용한 래퍼 코드이기도 하죠 코드를 한 번 작성하고 OpenAI나 클로드 + +02:08.520 --> 02:13.770 +같은 다른 모델을 전환할 수 있어요 API에서의 약간의 차이를 처리할 + +02:13.770 --> 02:17.400 +필요가 없죠 좀 편리하다고 생각하실 거예요 + +02:17.400 --> 02:19.680 +우리가 직접 할 수 있다고 상상해 보세요 + +02:19.680 --> 02:26.040 +래퍼 함수를 쓸 수 있어요 그래디오를 이용해 두 개의 다른 llm을 호출했을 때처럼요 + +02:26.040 --> 02:28.470 +랑 체인이 한 일이 바로 그거예요 + +02:28.470 --> 02:32.140 +일반적인 API 주변에 래퍼를 구축했어요 + +02:32.770 --> 02:39.730 +LMS에 대한 API가 성숙해지면서 점점 유사해지고 이를 위한 일종의 + +02:39.760 --> 02:46.090 +코드 스크립트가 점점 광범위해지면서 랭 체인 같은 기본 프레임워크에 + +02:46.090 --> 02:51.130 +대한 필요성이 적어도 1년 전보다는 줄었어요 + +02:51.370 --> 02:53.020 +비트가 좀 줄었네요 + +02:53.050 --> 02:57.190 +래그 파이프라인을 직접 만드는 사람이 많아요 래그 파이프라인을 + +02:57.190 --> 03:00.880 +직접 만드는 거죠 얼마나 쉬운지 보실 거예요 + +03:01.000 --> 03:04.810 +랭 체인에서 출발하면 유리할 거예요 HMSW HMSW HMSW + +03:04.810 --> 03:08.620 +오늘날 보시게 될 아주 유용한 프레임워크죠 + +03:09.670 --> 03:14.050 +그래서 랭 체인을 이용해 지식 기반을 로드할 거예요 + +03:14.050 --> 03:20.950 +먼저 폴더에 저장된 모든 문서를 읽겠습니다 일부 도구를 이용해서요 + +03:21.340 --> 03:26.590 +그런 다음 문서에 메타데이터를 추가해 어떤 종류의 문서인지 알려줍니다 나중에 + +03:26.620 --> 03:28.150 +유용할 거예요 + +03:28.180 --> 03:34.750 +그런 다음 랭 체인을 이용해 문서를 유용한 청크로 나눌 겁니다 함께 잘 어우러진 + +03:34.750 --> 03:41.500 +텍스트를 나타내는 청크요 벡터화되어 데이터베이스에 들어갈 준비가 된 거죠 + +03:42.010 --> 03:45.490 +그럼 이제 주피터랩으로 돌아가죠 HOLO diff --git a/week5/community-contributions/subtitles/srts/59297595/en_US.srt b/week5/community-contributions/subtitles/srts/59297595/en_US.srt new file mode 100755 index 0000000..247a07d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297595/en_US.srt @@ -0,0 +1,124 @@ +WEBVTT + +00:00.560 --> 00:04.640 +So by the time you're watching this, hopefully you have played yourself with vectors. + +00:04.640 --> 00:10.370 +You've created your own chunks, you've put them in a data store, and you have looked at them in 2D + +00:10.370 --> 00:14.300 +and 3D and made up your mind which of those you prefer? + +00:14.780 --> 00:17.090 +Well, that introduces you to chroma. + +00:17.120 --> 00:18.260 +Very easy to use. + +00:18.260 --> 00:20.720 +You can also try using other data stores. + +00:20.750 --> 00:25.010 +Vice is one that's very easy to use using the same kind of code. + +00:25.040 --> 00:32.750 +It's a Facebook AI similarity search, and it's an in-memory vector data store that is even it's not + +00:32.750 --> 00:34.730 +even easy to the same amount of difficulty. + +00:34.760 --> 00:38.270 +It just involves changing 1 or 2 lines of what we already wrote. + +00:38.270 --> 00:40.070 +So that could be another exercise for you. + +00:40.100 --> 00:46.970 +Repeat this in office and you'll find it's it's trivial to do, and you will get, of course, consistent + +00:46.970 --> 00:49.610 +results if you're using OpenAI embeddings. + +00:50.420 --> 00:58.520 +Uh, so, uh, it's worth pointing out that what we've just experienced is the very best of long chain + +00:58.520 --> 01:03.050 +in that we were able to accomplish a lot in literally just two lines of code. + +01:03.050 --> 01:08.600 +There was the line when we said embeddings equals OpenAI embeddings, and that was immediately giving + +01:08.600 --> 01:14.960 +us access to OpenAI's API to use for calculating embedding vectors. + +01:15.260 --> 01:20.360 +Um, and then there was just that single line where we created our chroma database. + +01:20.360 --> 01:22.490 +We said chroma dot from documents. + +01:22.490 --> 01:28.180 +And you remember we passed in three things, documents, which in our case were in fact chunks of documents. + +01:28.630 --> 01:35.320 +Embeddings, which are the OpenAI embeddings and the database name is the directory that it used. + +01:35.320 --> 01:37.480 +And we could have put in any name we wanted. + +01:37.750 --> 01:41.860 +And I chose vector DB, but you can put in whatever name you wish. + +01:41.980 --> 01:42.970 +The. + +01:43.330 --> 01:51.310 +It's worth pointing out that we created these vectors for each chunk from our original, uh, text that + +01:51.310 --> 01:52.090 +we read in. + +01:52.120 --> 01:54.520 +We could equally well have put in documents there. + +01:54.520 --> 01:58.870 +We could have instead created vectors for entire documents instead of for chunks. + +01:58.870 --> 01:59.800 +And you can try that. + +01:59.830 --> 02:02.650 +Try replacing the word chunks with documents and see what you get. + +02:02.680 --> 02:09.310 +Of course, there'll be fewer of them, and you can see whether they are as separated out in the same + +02:09.310 --> 02:11.440 +way I'm guessing that they will be. + +02:13.000 --> 02:19.330 +So with that, we are finally ready to bring this together and build our Rag pipeline. + +02:19.360 --> 02:20.500 +Our Rag solution. + +02:20.500 --> 02:24.340 +And at this point, I'm hoping all of the concepts are very clear in your mind. + +02:24.370 --> 02:29.530 +Next time we're going to again see the power of Lang Chain to be able to stitch together a full solution + +02:29.530 --> 02:36.640 +just with a few lines of code, including a conversation chain and memory, which are some of the things + +02:36.640 --> 02:38.740 +that Lang Chain handles very nicely. + +02:38.740 --> 02:44.410 +And we'll be able to have question and answer session demonstrating expert knowledge of the space. + +02:44.410 --> 02:47.470 +So that's a big milestone for us. + +02:47.470 --> 02:50.830 +It's rag coming together and it's happening in the next session. diff --git a/week5/community-contributions/subtitles/srts/59297595/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297595/ja_JP.srt new file mode 100755 index 0000000..f3dc1a2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297595/ja_JP.srt @@ -0,0 +1,103 @@ +WEBVTT + +00:00.560 --> 00:04.640 +だから、 これをご覧になるころには、 ベクターで遊んだことがあるだろう。 + +00:04.640 --> 00:10.370 +自分のチャンクを作り、 データストアに保存し、 2Dと3Dで見て、 + +00:10.370 --> 00:14.300 +どちらが好きか決めた? + +00:14.780 --> 00:17.090 +さて、 クロマの紹介だ。 + +00:17.120 --> 00:18.260 +とても使いやすい。 + +00:18.260 --> 00:20.720 +他のデータストアを使うこともできる。 + +00:20.750 --> 00:25.010 +バイスは、 同じようなコードを使ってとても簡単に使えるものだ。 + +00:25.040 --> 00:34.730 +これはフェイスブックのAI類似検索であり、 インメモリー・ベクター・データストアで、 同じ難易度でも簡単ではない。 + +00:34.760 --> 00:38.270 +すでに書いたものの1、 2行を変えるだけだ。 + +00:38.270 --> 00:40.070 +だから、 これも君のための練習になるかもしれない。 + +00:40.100 --> 00:49.610 +OpenAIのエンベッディングを使用している場合は、 もちろん一貫した結果を得ることができる。 + +00:50.420 --> 01:03.050 +ええと、 つまり、 今経験したことは、 文字通りたった2行のコードで多くのことを成し遂げることができたという点で、 ロングチェーンの最たるものだということを指摘しておく価値があるだろう。 + +01:03.050 --> 01:14.960 +埋め込みはOpenAIの埋め込みに等しいと言ったとき、 すぐに埋め込みベクトルを計算するためのOpenAIのAPIにアクセスできるようになった。 + +01:15.260 --> 01:20.360 +それから、 クロマ・データベースを作成する1行があった。 + +01:20.360 --> 01:22.490 +クロマ・ドットと言ったのは書類からだ。 + +01:22.490 --> 01:28.180 +そして、 私たちが3つのもの、 書類、 つまり私たちの場合は書類の塊でパスをしたことを覚えているだろう。 + +01:28.630 --> 01:35.320 +EmbeddingsはOpenAIのエンベッディングで、 データベース名は使用したディレクトリです。 + +01:35.320 --> 01:37.480 +それに、 どんな名前でも入れることができた。 + +01:37.750 --> 01:41.860 +そして、 私はベクターDBを選んだが、 好きな名前を入れることができる。 + +01:41.980 --> 01:42.970 +その + +01:43.330 --> 01:52.090 +各チャンクのベクトルは、 我々が読み込んだテキストから作成したものである。 + +01:52.120 --> 01:54.520 +そこに文書を入れることも同じようにできた。 + +01:54.520 --> 01:58.870 +その代わりに、 チャンクではなくドキュメント全体のベクターを作成することもできた。 + +01:58.870 --> 01:59.800 +それを試してみるといい。 + +01:59.830 --> 02:02.650 +単語のかたまりを文書に置き換えてみてください。 + +02:02.680 --> 02:11.440 +もちろん、 その数は少なくなるだろうし、 私が推測しているように、 同じように分けられているかどうかを見ることができる。 + +02:13.000 --> 02:19.330 +これでようやく、 これをまとめてラグ・パイプラインを構築する準備が整ったわけだ。 + +02:19.360 --> 02:20.500 +ラグ・ソリューション + +02:20.500 --> 02:24.340 +そしてこの時点で、 すべてのコンセプトがあなたの頭の中で明確になっていることを望んでいる。 + +02:24.370 --> 02:29.530 +次回は、 会話チェーンやメモリなど、 ラング・チェインが非常にうまく処理するものを含め、 + +02:29.530 --> 02:38.740 +数行のコードで完全なソリューションを組み立てることができるラング・チェインのパワーを再び見てみたいと思います。 + +02:38.740 --> 02:44.410 +また、 スペースの専門的な知識を披露する質疑応答もできるだろう。 + +02:44.410 --> 02:47.470 +だから、 僕らにとっては大きな節目なんだ。 + +02:47.470 --> 02:50.830 +ボロが出て、 次のセッションで実現する。 diff --git a/week5/community-contributions/subtitles/srts/59297595/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297595/ko_KR.srt new file mode 100755 index 0000000..54e3d34 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297595/ko_KR.srt @@ -0,0 +1,124 @@ +WEBVTT + +00:00.560 --> 00:04.640 +이걸 보실 때쯤엔 벡터를 이해하셨길 바라요 + +00:04.640 --> 00:10.370 +자신만의 청크를 만들어 데이터 스토어에 넣고 2D, 3D로 + +00:10.370 --> 00:14.300 +검토한 후 어떤 게 더 좋은지 결정했죠 + +00:14.780 --> 00:17.090 +채도가 뭔지 알겠네요 + +00:17.120 --> 00:18.260 +사용하기 쉽죠 + +00:18.260 --> 00:20.720 +다른 데이터 저장소도 시도해 볼 수 있어요 + +00:20.750 --> 00:25.010 +바이스는 같은 종류의 코드를 사용하는 아주 쉬운 것 중 하나죠 + +00:25.040 --> 00:32.750 +페이스북 인공지능과 비슷한 검색이고 인 메모리 벡터 데이터 저장소인데 난이도가 비슷할 + +00:32.750 --> 00:34.730 +정도로 쉽지 않아요 + +00:34.760 --> 00:38.270 +이미 쓴 것의 한두 줄만 바꾸면 돼요 + +00:38.270 --> 00:40.070 +이것도 하나의 훈련이 될 수 있어요 + +00:40.100 --> 00:46.970 +사무실에서 반복하면 별거 아닌 걸 알게 될 겁니다 OpenAI 엠비딩을 사용한다면 + +00:46.970 --> 00:49.610 +일관된 결과를 얻을 수 있어요 + +00:50.420 --> 00:58.520 +이걸 짚고 넘어가야 할 것 같아요 방금 우리가 경험한 건 긴 사슬 중 가장 좋은 경험이었어요 + +00:58.520 --> 01:03.050 +말 그대로 코드 두 줄로 많은 걸 성취할 수 있었죠 + +01:03.050 --> 01:08.600 +삽입이 OpenAI 삽입과 같다고 한 줄이 있었는데 그건 곧바로 OpenAI + +01:08.600 --> 01:14.960 +API 사용에 액세스 권한을 주어 삽입 벡터를 계산할 수 있게 해줬죠 + +01:15.260 --> 01:20.360 +그리고 한 줄로 채도 데이터베이스를 만들었어요 + +01:20.360 --> 01:22.490 +서류에서 채도점을 썼어요 + +01:22.490 --> 01:28.180 +세 가지를 통과시켰죠 문서예요 우리 경우엔 서류 뭉치였죠 + +01:28.630 --> 01:35.320 +OpenAI 엠버딩스, 데이터베이스 이름은 사용된 디렉터리예요 + +01:35.320 --> 01:37.480 +어떤 이름이라도 넣을 수 있었어요 Put it up Put up Put it up Put it up Put it up Put it up Put it Put it up Put it + +01:37.750 --> 01:41.860 +벡터 DB를 선택했지만 어떤 이름이든 put을 수 있어요 + +01:41.980 --> 01:42.970 +그요 + +01:43.330 --> 01:51.310 +우리가 읽은 원래 텍스트에서 각각의 덩어리를 위해 벡터를 만들었다는 걸 강조할 가치가 + +01:51.310 --> 01:52.090 +있어요 + +01:52.120 --> 01:54.520 +문서를 넣을 수도 있었어요 Put it up Put it up Put it up Put it up Put it up Put it up Put it + +01:54.520 --> 01:58.870 +덩어리가 아닌 전체 문서에 대한 벡터를 만들 수도 있었어요 + +01:58.870 --> 01:59.800 +여러분도 해 보세요 + +01:59.830 --> 02:02.650 +get 을 문서로 바꿔치기해 보세요 + +02:02.680 --> 02:09.310 +물론 숫자는 더 적겠지만요 제 추측대로 각 칸이 얼마나 나뉘어 + +02:09.310 --> 02:11.440 +있는지 볼 수 있어요 + +02:13.000 --> 02:19.330 +이제 이걸 하나로 합쳐서 래그 파이프라인을 만들 거예요 + +02:19.360 --> 02:20.500 +우리 헝겊 용액이에요 + +02:20.500 --> 02:24.340 +지금은 모든 콘셉트가 명확히 이해되셨길 바라요 + +02:24.370 --> 02:29.530 +다음엔 랭 체인의 힘을 보죠 코드 몇 줄로 전체 솔루션을 + +02:29.530 --> 02:36.640 +엮을 수 있습니다 대화 체인과 메모리를 포함해서요 랭 체인이 아주 잘 + +02:36.640 --> 02:38.740 +다루는 거죠 + +02:38.740 --> 02:44.410 +질의응답 시간을 가질 수 있을 겁니다 이 공간에 대한 전문가의 지식을 보여줄 수 있죠 + +02:44.410 --> 02:47.470 +저희에겐 큰 이정표예요 + +02:47.470 --> 02:50.830 +다음 세션에서 래그가 완성될 거예요 diff --git a/week5/community-contributions/subtitles/srts/59297599/en_US.srt b/week5/community-contributions/subtitles/srts/59297599/en_US.srt new file mode 100755 index 0000000..6ae1c3c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297599/en_US.srt @@ -0,0 +1,133 @@ +WEBVTT + +00:00.350 --> 00:03.320 +Well, that was a sneaky detour I took you on in the last one. + +00:03.320 --> 00:07.670 +I hope you enjoyed it though, and I hope you found it satisfying and that you're playing around with + +00:07.670 --> 00:08.270 +that week. + +00:08.270 --> 00:10.760 +4.5 day 4.5 right now. + +00:10.850 --> 00:17.450 +But back to the main plan, which was that we were going to talk about Lang chain expression language, + +00:17.540 --> 00:24.110 +which is the way that you can set up your chains in Lang chain, which is how Lang Chain thinks about + +00:24.110 --> 00:29.870 +the different steps in the puzzle that are glued together to solve your pipeline. + +00:30.020 --> 00:37.160 +And you can do that by putting together a file that expresses, in a declarative style what it is that + +00:37.160 --> 00:38.750 +you're looking to achieve. + +00:38.930 --> 00:45.830 +So this LCL lang chain expression language can be used to lay out what you want to do. + +00:45.950 --> 00:47.690 +It's in the form of a YAML file. + +00:47.690 --> 00:50.420 +If you're familiar with YAML files and it looks like this. + +00:50.420 --> 00:56.570 +And if we look if we read through this, you can see that we've got here we're specifying a model with + +00:56.570 --> 01:02.720 +a temperature with a directory that, that um, that will be the persistent directory for our vector + +01:02.720 --> 01:03.500 +database. + +01:03.530 --> 01:10.130 +And then we have these different components the LM which is of type chat, open AI. + +01:10.160 --> 01:12.350 +We have the conversation memory. + +01:12.380 --> 01:18.110 +We have the open AI embeddings, the chroma vector store, the retriever and the chain and the output. + +01:18.110 --> 01:24.530 +So hopefully you see how this maps very closely indeed to the Python code that we wrote. + +01:24.530 --> 01:30.920 +And you can imagine that these kinds of declarative models can be put together to solve all sorts of + +01:30.920 --> 01:31.310 +problems. + +01:31.310 --> 01:37.970 +So it's a very powerful language, and it's by people who have spent time with it. + +01:37.970 --> 01:41.330 +I think it's it's very productive at this point. + +01:41.360 --> 01:45.920 +My personal preference is to stick with Python code and use that to put this together, to put together + +01:45.920 --> 01:48.080 +our workflows as we did before. + +01:48.320 --> 01:52.310 +But if this interests you, you could look more at this and consider this as an alternative. + +01:52.310 --> 01:57.350 +And if you come across this in some other project, you hopefully won't be perturbed by it. + +01:57.380 --> 02:04.880 +It maps pretty closely to the Python code, so the next thing I wanted to do was just talk a little + +02:04.910 --> 02:10.780 +bit about how Lang Chain works behind the scenes, but hopefully at this point you've got a pretty good + +02:10.780 --> 02:12.700 +intuition into that already. + +02:13.030 --> 02:16.330 +Uh Langshan isn't doing a ton of magic. + +02:16.330 --> 02:18.460 +It's just very convenient indeed. + +02:18.460 --> 02:24.940 +But really, it is just making the right calls to the different underlying components like chroma or + +02:24.940 --> 02:25.510 +Feis. + +02:25.540 --> 02:31.540 +It's retrieving the right, uh, documents, and then it is stitching them into the prompt. + +02:31.540 --> 02:36.700 +So I'm going to show you in a second how we can use things called callbacks to get langshan to tell + +02:36.730 --> 02:41.260 +us what actually was the prompt that it is sending to OpenAI. + +02:41.290 --> 02:46.540 +At the end of the day, after it's done this lookup, and we can use that to diagnose a common problem + +02:46.540 --> 02:54.070 +that happens, which is what happens if, for whatever reason, the right chunks aren't sent to the + +02:54.100 --> 02:58.330 +to the model, or at least not the chunk that we really wanted, so that it doesn't provide us with + +02:58.330 --> 02:59.770 +the kind of answer we wanted. + +02:59.800 --> 03:07.360 +Well, then fix that problem, and we'll end with some thoughts on just demystifying the whole, uh, + +03:07.360 --> 03:10.300 +infrastructure that Langshan provides us. + +03:10.330 --> 03:15.160 +And with that, we'll head back to JupyterLab for the real day five this time. diff --git a/week5/community-contributions/subtitles/srts/59297599/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297599/ja_JP.srt new file mode 100755 index 0000000..3aa5397 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297599/ja_JP.srt @@ -0,0 +1,100 @@ +WEBVTT + +00:00.350 --> 00:03.320 +まあ、 前作で君を連れて行ったのは、 卑劣な回り道だったね。 + +00:03.320 --> 00:08.270 +でも、 楽しんでもらえたならうれしいし、 満足してもらえたなら、 そしてその週を遊びつくしてもらえたらうれしい。 + +00:08.270 --> 00:10.760 +4. 5日目 4. 今は5だ。 + +00:10.850 --> 00:29.870 +ラング・チェーンは、 パイプラインを解決するためにパズルの様々なステップをつなぎ合わせていくものです。 + +00:30.020 --> 00:37.160 +そして、 あなたが達成しようとしていることを宣言的なスタイルで表現したファイルを作成することで、 + +00:37.160 --> 00:38.750 +それが可能になる。 + +00:38.930 --> 00:45.830 +つまり、 このLCLのチェーン表現言語は、 やりたいことをレイアウトするために使うことができるのだ。 + +00:45.950 --> 00:47.690 +YAMLファイルの形式だ。 + +00:47.690 --> 00:50.420 +もしあなたがYAMLファイルに慣れていて、 以下のように見えるなら。 + +00:50.420 --> 00:56.570 +これを読むと、 ベクター・データベースの永続的なディレクトリとなる、 + +00:56.570 --> 01:03.500 +温度とディレクトリを持つモデルを指定していることがわかる。 + +01:03.530 --> 01:10.130 +LMはチャットタイプで、 オープンAIだ。 + +01:10.160 --> 01:12.350 +私たちは会話の記憶を持っている。 + +01:12.380 --> 01:18.110 +我々は、 オープンAIエンベッディング、 クロマベクトルストア、 レトリーバー、 チェーン、 そしてアウトプットを持っている。 + +01:18.110 --> 01:24.530 +私たちが書いたPythonのコードと、 このコードがいかに密接に対応しているかがおわかりいただけたと思う。 + +01:24.530 --> 01:31.310 +そして、 この種の宣言的モデルを組み合わせることで、 あらゆる種類の問題を解決できることが想像できるだろう。 + +01:31.310 --> 01:37.970 +つまり、 非常にパワフルな言語であり、 それに時間を費やしてきた人々によるものなのだ。 + +01:37.970 --> 01:41.330 +現時点では非常に生産的だと思う。 + +01:41.360 --> 01:48.080 +個人的な好みとしては、 Pythonのコードにこだわって、 以前と同じようにワークフローをまとめるのに使いたい。 + +01:48.320 --> 01:52.310 +しかし、 もしこのことに興味があるのであれば、 このことをもっと調べて、 代替案として検討することもできるだろう。 + +01:52.310 --> 01:57.350 +そして、 もし他のプロジェクトでこのようなことに出くわしても、 できれば動揺しないでほしい。 + +01:57.380 --> 02:12.700 +Pythonのコードにかなり近い形でマッピングされているので、 次はLang Chainが舞台裏でどのように動いているのかについて少しお話ししたいと思います。 + +02:13.030 --> 02:16.330 +あー、 ランシャンはトンマジックをやっていない。 + +02:16.330 --> 02:18.460 +実に便利だ。 + +02:18.460 --> 02:25.510 +しかし実際には、 クロマやファイズといったさまざまな基礎コンポーネントに適切なコールをするだけだ。 + +02:25.540 --> 02:31.540 +適切なドキュメントを検索し、 それをプロンプトにつなぎ合わせる。 + +02:31.540 --> 02:36.700 +コールバックと呼ばれるものを使って、 ラングシャンがOpenAIに送信したプロンプトが何であったかを教えてもらう方法を、 + +02:36.730 --> 02:41.260 +ちょっとお見せしましょう。 + +02:41.290 --> 02:46.540 +つまり、 何らかの理由で正しいチャンクがモデルに送られなかったり、 + +02:46.540 --> 02:59.770 +少なくとも本当に欲しいチャンクが送られなかったりした場合に起こる問題だ。 + +02:59.800 --> 03:07.360 +では、 その問題を解決して、 最後に、 ランシャンが提供してくれるインフラ全体を解明することについて、 + +03:07.360 --> 03:10.300 +少し考えてみよう。 + +03:10.330 --> 03:15.160 +それでは、 5日目のJupyterLabに戻ることにしよう。 diff --git a/week5/community-contributions/subtitles/srts/59297599/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297599/ko_KR.srt new file mode 100755 index 0000000..6aba39d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297599/ko_KR.srt @@ -0,0 +1,127 @@ +WEBVTT + +00:00.350 --> 00:03.320 +지난번엔 내가 좀 엉뚱하게 둘러갔죠 + +00:03.320 --> 00:07.670 +그래도 즐거우셨길 바라요 만족스러우셨길 바라고 이번 주를 즐기셨길 + +00:07.670 --> 00:08.270 +바라요 + +00:08.270 --> 00:10.760 +4살요 5일째, 4일째예요 지금은 5시예요 + +00:10.850 --> 00:17.450 +본론으로 돌아가서 랭 체인 표현 언어를 얘기하려고 했는데요 랭 체인에서 + +00:17.540 --> 00:24.110 +체인을 설정하는 방법입니다 랭 체인은 파이프라인을 풀기 위해 퍼즐의 + +00:24.110 --> 00:29.870 +여러 단계를 어떻게 풀었는지를 뜻하죠 + +00:30.020 --> 00:37.160 +여러분이 이루고자 하는 것을 표현해주는 파일을 함께 놓을 + +00:37.160 --> 00:38.750 +수 있어요. + +00:38.930 --> 00:45.830 +이 LCL lang 체인 표현 언어는 여러분이 원하는 걸 레이아웃하는 데 사용될 수 있어요 + +00:45.950 --> 00:47.690 +YAML 파일 형태예요 + +00:47.690 --> 00:50.420 +YAML 파일에 익숙하신 분들을 위해 설명드리죠 + +00:50.420 --> 00:56.570 +이걸 읽어보면 온도를 가진 모델을 지정하고 있는 게 보이실 겁니다 + +00:56.570 --> 01:03.500 +디렉터리와 관련해서요 벡터 데이터베이스의 영구 디렉터리죠 + +01:03.530 --> 01:10.130 +그리고 다양한 구성 요소가 있죠 LM은 형식 채팅, 열린 인공지능이에요 + +01:10.160 --> 01:12.350 +대화 메모리가 있어요 + +01:12.380 --> 01:18.110 +열린 인공지능 내장 크로마 벡터 스토어 회수기와 체인, 출력도요 + +01:18.110 --> 01:24.530 +우리가 작성한 파이썬 코드와 아주 밀접하게 매핑되어 있죠 + +01:24.530 --> 01:31.310 +이런 선언적 모델이 한데 합쳐져 모든 문제를 해결할 수 있다고 상상해 보세요. TUI + +01:31.310 --> 01:37.970 +함께 시간을 보낸 사람들이 만든 아주 강력한 언어예요 + +01:37.970 --> 01:41.330 +지금은 아주 생산적인 것 같아요 + +01:41.360 --> 01:45.920 +저는 개인적으로 파이썬 코드를 선호합니다. 그리고 그것을 이용해서 이전에 했던 워크플로를 + +01:45.920 --> 01:48.080 +함께 만드는 것을 선호해요. + +01:48.320 --> 01:52.310 +하지만 이게 관심 있다면 이걸 좀 더 보고 대안으로 생각해도 돼요 + +01:52.310 --> 01:57.350 +다른 프로젝트에서 이걸 발견해도 동요하지 않으셨으면 해요 + +01:57.380 --> 02:04.880 +파이썬 코드와 아주 밀접하게 매핑됩니다 다음으로 살펴볼 내용은 랭 체인이 어떻게 + +02:04.910 --> 02:10.780 +비트에서 작동하는지에 대한 것입니다 이미 직관적으로 잘 이해하셨기를 + +02:10.780 --> 02:12.700 +바라면서요 + +02:13.030 --> 02:16.330 +랑산은 술법이 약해요 + +02:16.330 --> 02:18.460 +정말 편리한 방법이죠 + +02:18.460 --> 02:24.940 +사실 크로마나 Feis 같은 기본 구성 요소에 올바른 호출을 하는 + +02:24.940 --> 02:25.510 +거죠 + +02:25.540 --> 02:31.540 +올바른 문서를 검색해 프롬프트에 꿰매는 거죠 + +02:31.540 --> 02:36.700 +잠시 후에 보여드릴 건 콜백이라는 걸 어떻게 이용하느냐예요 랑산 박사에게 + +02:36.730 --> 02:41.260 +오픈AI에 보내는 프롬프트가 뭔지 알아내는 거죠 + +02:41.290 --> 02:46.540 +결국 조사가 끝난 후 공통적인 문제를 진단하는 데 사용할 수 있어요 + +02:46.540 --> 02:54.070 +어떤 이유에서든 올바른 덩어리가 모델로 보내지 않았거나 적어도 우리가 정말 원하는 덩어리가 + +02:54.100 --> 02:58.330 +아닐 경우 발생하죠 우리가 원하는 종류의 답을 제공하지 + +02:58.330 --> 02:59.770 +않는 거예요 + +02:59.800 --> 03:07.360 +그럼 그 문제를 해결하고 랭산의 인프라 구조를 이해하기 + +03:07.360 --> 03:10.300 +쉽게 마무리하죠 + +03:10.330 --> 03:15.160 +이것과 함께 주피터랩으로 돌아가 진짜 5일째를 살펴보죠 HDMI HDMI diff --git a/week5/community-contributions/subtitles/srts/59297601/en_US.srt b/week5/community-contributions/subtitles/srts/59297601/en_US.srt new file mode 100755 index 0000000..88afe1a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297601/en_US.srt @@ -0,0 +1,202 @@ +WEBVTT + +00:01.130 --> 00:05.450 +I'm not going to lie, at this point you have every reason to be impatient with me. + +00:05.480 --> 00:10.340 +We've been yammering away for ages about raga, and you've not actually had a chance to use rag yet. + +00:10.340 --> 00:11.840 +We've just talked about vectors. + +00:11.840 --> 00:16.520 +We've talked about prompts and context and cheap versions of Rag. + +00:16.910 --> 00:18.890 +Finally, it's time for the real deal. + +00:18.920 --> 00:22.100 +Today, it's time that we put a rag pipeline into action. + +00:22.100 --> 00:23.330 +And it's going to be stupid. + +00:23.330 --> 00:23.750 +Easy. + +00:23.750 --> 00:25.250 +Just you wait. + +00:25.280 --> 00:27.440 +So what's going to happen today? + +00:27.440 --> 00:32.480 +We're going to create a conversation chain in long chain, which is where long chain comes together + +00:32.480 --> 00:38.720 +to put the different pieces glued together to give you a conversation with retrieval with Rag. + +00:38.750 --> 00:44.840 +We're going to ask questions and get answers that demonstrate an expert understanding and will build + +00:44.870 --> 00:48.410 +ultimately a knowledge worker assistant with a chat UI. + +00:48.410 --> 00:51.560 +And because of all the wonderful things that you've already learned, you're going to see that it's + +00:51.560 --> 00:53.870 +going to be incredibly easy, of course. + +00:54.050 --> 01:00.080 +So first of all, just to give you a briefing, there are some abstractions in long chain, some some + +01:00.080 --> 01:03.920 +concepts that long chain has defined that make things easier. + +01:03.920 --> 01:07.490 +And here are the three of them that we will be using today. + +01:07.520 --> 01:10.250 +First of all, there's an abstraction around an LLM. + +01:10.250 --> 01:13.730 +An LLM is just represents, in our case, OpenAI. + +01:13.760 --> 01:15.350 +But it could represent others. + +01:15.350 --> 01:20.690 +And Liangcheng gives you that one object that represents your abstraction around a model. + +01:20.870 --> 01:23.750 +Then there is an abstraction called a retriever. + +01:23.750 --> 01:28.130 +And that is a sort of interface onto something like a vector store. + +01:28.130 --> 01:32.420 +In our case, it will be Cromer, which will be used for Rag retrieval. + +01:32.420 --> 01:38.090 +So that is a retriever interface around something that can take vectors and can enrich your prompt. + +01:38.150 --> 01:41.090 +And then the third abstraction is memory. + +01:41.090 --> 01:48.470 +And that represents some kind of a of a history of a discussion with a chatbot in some way, some memory. + +01:48.470 --> 01:55.040 +So this in practice, what we're used to here is that list of dicts, that list that comprises of a + +01:55.070 --> 01:59.090 +sort of a system message at the top, and then user assistant, user assistant. + +01:59.360 --> 02:05.990 +But that has been abstracted away into a concept called memory for long chain, which behind under the + +02:05.990 --> 02:11.390 +covers it will handle that list or whatever other kind of format different models might need. + +02:11.390 --> 02:19.090 +So these are the three key, uh, wrappers around more functionality that you get from long Chain. + +02:19.090 --> 02:26.020 +And with that in mind, take a look at how simple it's going to be to put together a rag pipeline. + +02:26.080 --> 02:29.650 +It's going to be done with four lines of code. + +02:29.650 --> 02:33.070 +And here are the four lines of code in front of you right now. + +02:33.310 --> 02:37.720 +And this is the brilliance that is in the first line. + +02:37.900 --> 02:42.280 +LM is chat open AI that is creating a lang chain ln object. + +02:42.370 --> 02:46.120 +LM object for open AI. + +02:46.450 --> 02:50.890 +And you can imagine there's similar objects that you could create for anything else. + +02:51.670 --> 02:53.860 +That's the first line, the first abstraction. + +02:53.890 --> 02:55.480 +LM the second line. + +02:55.480 --> 02:56.470 +The second abstraction. + +02:56.470 --> 02:57.130 +Memory. + +02:57.160 --> 03:01.210 +You create a lang chain object called a conversation buffered memory. + +03:01.630 --> 03:03.580 +You have to provide this a couple of things. + +03:03.580 --> 03:10.660 +The key is just the the how it will organize, what it will, what you can use to look up that memory + +03:10.660 --> 03:16.210 +and chat history is what it has to be, because that's what's going to be expected later and return + +03:16.210 --> 03:21.040 +messages is telling Lang that you're going to want this to be stored in a way that what comes back are + +03:21.040 --> 03:26.020 +going to be a series of messages, not just a big block of text representing the conversation. + +03:26.020 --> 03:30.910 +So you just need to know that these are what you have to use for this kind of chat application. + +03:31.810 --> 03:40.720 +The next line is quite simply saying we have a vector store that we've created its chroma, and we're + +03:40.720 --> 03:44.290 +going to call this this this method as retriever. + +03:44.290 --> 03:48.100 +And it's going to wrap that in an interface object called a retriever. + +03:48.100 --> 03:55.210 +And that is the the kind of, uh, object that Lang chain is expecting in order to be able to, to have + +03:55.210 --> 03:56.710 +a Rag workflow. + +03:56.980 --> 04:01.570 +So those are our three abstractions the LM, the memory and the retriever. + +04:01.600 --> 04:02.680 +They've all been created. + +04:02.680 --> 04:07.600 +And now that last line puts it together into something called a conversation chain. + +04:07.900 --> 04:14.470 +Uh, and that is something which is a conversation retrieval chain that you create, uh, and you call, + +04:14.500 --> 04:20.170 +you create it by calling that, that, um, uh, method from LM, and you just pass in three things + +04:20.170 --> 04:25.030 +the LM, the retriever and the memory, the three things we just created. + +04:25.030 --> 04:26.920 +And so it's as simple as that. + +04:26.920 --> 04:32.620 +With that fourth line of code, we have just created a Rag pipeline. + +04:33.490 --> 04:34.450 +You don't believe me? + +04:34.450 --> 04:37.150 +Let's go over to JupyterLab and give it a try ourselves. diff --git a/week5/community-contributions/subtitles/srts/59297601/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297601/ja_JP.srt new file mode 100755 index 0000000..2a4a70c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297601/ja_JP.srt @@ -0,0 +1,175 @@ +WEBVTT + +00:01.130 --> 00:05.450 +正直に言うと、 この時点で私には焦る理由がある。 + +00:05.480 --> 00:10.340 +私たちはずっとラガについてしゃべってきたのに、 あなたはまだ実際にラガを使う機会がなかった。 + +00:10.340 --> 00:11.840 +ベクターについて話したばかりだ。 + +00:11.840 --> 00:16.520 +プロンプトとコンテクスト、 そしてラグの廉価版について話してきた。 + +00:16.910 --> 00:18.890 +いよいよ本番だ。 + +00:18.920 --> 00:22.100 +今日は、 ボロ・パイプラインを実行に移す時だ。 + +00:22.100 --> 00:23.330 +そして、 バカバカしくなる。 + +00:23.330 --> 00:23.750 +簡単だ。 + +00:23.750 --> 00:25.250 +待っていてくれ。 + +00:25.280 --> 00:27.440 +それで今日はどうなるんだ? + +00:27.440 --> 00:32.480 +私たちはロング・チェーンで会話の連鎖を作ろうと思っています。 ロング・チェーンは、 + +00:32.480 --> 00:38.720 +ラグとのリトリーバルで会話をするために、 さまざまなピースを接着剤でくっつけたものです。 + +00:38.750 --> 00:48.410 +私たちは、 専門家の理解を示す質問と回答を得ることで、 最終的にはチャットUIを備えたナレッジワーカーアシスタントを構築するつもりです。 + +00:48.410 --> 00:53.870 +そして、 すでに学んだ素晴らしいことのおかげで、 もちろん、 信じられないほど簡単になることがわかるだろう。 + +00:54.050 --> 01:03.920 +まず最初に、 簡単に説明すると、 ロングチェーンにはいくつかの抽象化された概念がある。 + +01:03.920 --> 01:07.490 +今日使うのはこの3つだ。 + +01:07.520 --> 01:10.250 +まず第一に、 LLMには抽象的なイメージがある。 + +01:10.250 --> 01:13.730 +LLMは、 我々の場合はOpenAIを表しているに過ぎない。 + +01:13.760 --> 01:15.350 +だが、 他の選手を代表する可能性もある。 + +01:15.350 --> 01:20.690 +そして梁城は、 モデルを抽象化した1つのオブジェクトを提供する。 + +01:20.870 --> 01:23.750 +そして、 レトリーバーと呼ばれる抽象的な存在がある。 + +01:23.750 --> 01:28.130 +そしてそれは、 ベクターストアのようなものへのインターフェースのようなものだ。 + +01:28.130 --> 01:32.420 +この場合、 ラグ検索に使われるのはクロマーである。 + +01:32.420 --> 01:38.090 +つまり、 ベクトルを受け取ってプロンプトを充実させることができる、 レトリーバー・インターフェースのようなものだ。 + +01:38.150 --> 01:41.090 +そして3つ目の抽象化がメモリーだ。 + +01:41.090 --> 01:48.470 +そしてそれは、 チャットボットとのディスカッションの履歴のようなもの、 記憶を表している。 + +01:48.470 --> 01:55.040 +だから実際には、 私たちがここで慣れ親しんでいるのは、 ディクツのリスト、 一番上のシステム・メッセージのようなものからなるリスト、 + +01:55.070 --> 01:59.090 +そしてユーザー・アシスタント、 ユーザー・アシスタントだ。 + +01:59.360 --> 02:11.390 +しかし、 それはロングチェーン・メモリーという概念に抽象化され、 その裏側では、 リストやその他のさまざまなモデルが必要とするフォーマットを処理する。 + +02:11.390 --> 02:19.090 +これが、 ロング・チェインから得られる、 より多くの機能を包む3つの主要なラッパーだ。 + +02:19.090 --> 02:26.020 +そのことを念頭に置いて、 ラグ・パイプラインをいかにシンプルにまとめるかを見てみよう。 + +02:26.080 --> 02:29.650 +これは4行のコードでできる。 + +02:29.650 --> 02:33.070 +そしてこれが、 今目の前にある4行のコードだ。 + +02:33.310 --> 02:37.720 +そして、 これこそが1行目にある輝きなのだ。 + +02:37.900 --> 02:42.280 +LMは、 ラングチェーンlnオブジェクトを作成しているチャットオープンAIです。 + +02:42.370 --> 02:46.120 +オープンAI用のLMオブジェクト。 + +02:46.450 --> 02:50.890 +そして、 他のものにも似たようなものを作ることができると想像できるだろう。 + +02:51.670 --> 02:53.860 +これが最初のラインであり、 最初の抽象化だ。 + +02:53.890 --> 02:55.480 +2行目のLM。 + +02:55.480 --> 02:56.470 +2つ目の抽象化。 + +02:56.470 --> 02:57.130 +メモリ。 + +02:57.160 --> 03:01.210 +会話バッファメモリと呼ばれるラングチェーン・オブジェクトを作成します。 + +03:01.630 --> 03:03.580 +これには2つほど必要なものがある。 + +03:03.580 --> 03:10.660 +重要なのは、 どのように整理するか、 + +03:10.660 --> 03:26.020 +何を整理するか、 何を使ってメモリやチャット履歴を検索するかということだ。 + +03:26.020 --> 03:30.910 +だから、 この種のチャット・アプリケーションには、 これらを使用しなければならないことを知っておく必要がある。 + +03:31.810 --> 03:44.290 +次の行は、 単純にベクターストアがあり、 そのクロマを作成したと言っている。 + +03:44.290 --> 03:48.100 +そして、 それをリトリーバーと呼ばれるインターフェース・オブジェクトで包む。 + +03:48.100 --> 03:56.710 +ラング・チェーンがラグ・ワークフローを実現するために期待しているのは、 このようなオブジェクトなのだ。 + +03:56.980 --> 04:01.570 +つまり、 LM、 メモリー、 そしてレトリバーという3つの抽象的なものだ。 + +04:01.600 --> 04:02.680 +それらはすべて作られたものだ。 + +04:02.680 --> 04:07.600 +そして今、 最後の一行がそれを会話の連鎖と呼ばれるものにまとめた。 + +04:07.900 --> 04:14.470 +LMからこのメソッドを呼び出して作成し、 + +04:14.500 --> 04:25.030 +LM、 リトリーバー、 メモリの3つを渡すだけです。 + +04:25.030 --> 04:26.920 +それはとてもシンプルなことだ。 + +04:26.920 --> 04:32.620 +この4行目のコードで、 ラグ・パイプラインができた。 + +04:33.490 --> 04:34.450 +信じないのか? + +04:34.450 --> 04:37.150 +JupyterLabに行って試してみよう。 diff --git a/week5/community-contributions/subtitles/srts/59297601/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297601/ko_KR.srt new file mode 100755 index 0000000..b68866e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297601/ko_KR.srt @@ -0,0 +1,199 @@ +WEBVTT + +00:01.130 --> 00:05.450 +솔직히 지금 저한테 짜증 낼 이유가 충분하죠 + +00:05.480 --> 00:10.340 +우린 라가 얘기를 한참 했는데 아직 라가를 써 본 적이 없네요 + +00:10.340 --> 00:11.840 +벡터에 대해 얘기했죠 + +00:11.840 --> 00:16.520 +프롬프트와 컨텍스트 래그의 저렴한 버전에 관해 얘기했죠 + +00:16.910 --> 00:18.890 +드디어 본격적인 시작이죠 + +00:18.920 --> 00:22.100 +오늘은 Put 파이프라인을 실행할 거예요 + +00:22.100 --> 00:23.330 +바보 같은 짓이죠 + +00:23.330 --> 00:23.750 +진정해요 + +00:23.750 --> 00:25.250 +두고 봐요 + +00:25.280 --> 00:27.440 +오늘은 어떻게 될까요? + +00:27.440 --> 00:32.480 +긴 사슬로 된 대화 사슬을 만들 거예요 긴 사슬이 모여서 + +00:32.480 --> 00:38.720 +여러 조각을 접착제로 붙이는 거죠 걸레 회수와 대화를 할 수 있게요 + +00:38.750 --> 00:44.840 +전문가의 이해를 보여주는 질문을 하고 답을 얻을 겁니다 채팅 UI로 궁극적으로 + +00:44.870 --> 00:48.410 +지식 작업자 비서를 만들고요 + +00:48.410 --> 00:51.560 +이미 훌륭한 것들을 배웠기 때문에 아주 + +00:51.560 --> 00:53.870 +쉽다는 걸 알게 될 거예요 + +00:54.050 --> 01:00.080 +먼저 간략히 말씀드리자면 긴 연결의 추상화가 있어요 긴 연결의 + +01:00.080 --> 01:03.920 +개념이 일을 쉽게 만들어주죠 + +01:03.920 --> 01:07.490 +이건 오늘 사용할 3가지예요 + +01:07.520 --> 01:10.250 +우선 LLM에는 추상적인 게 있어요 + +01:10.250 --> 01:13.730 +LLM은 오픈아이를 의미해요 + +01:13.760 --> 01:15.350 +다른 사람을 상징할 수도 있죠 + +01:15.350 --> 01:20.690 +량청은 모델을 둘러싼 추상적인 객체를 대표해요 + +01:20.870 --> 01:23.750 +레트리버라는 추상화도 있어요 + +01:23.750 --> 01:28.130 +벡터 스토어 같은 인터페이스 같은 거죠 + +01:28.130 --> 01:32.420 +우리 경우엔 크로머예요 걸레 회수용으로 사용되죠 + +01:32.420 --> 01:38.090 +벡터를 가지고 프롬프트를 강화할 수 있는 레트리버 인터페이스죠 + +01:38.150 --> 01:41.090 +세 번째 추상화는 메모리예요 + +01:41.090 --> 01:48.470 +챗봇과 토론한 역사를 보여주는 거죠 어떤 식으로든 메모리요 + +01:48.470 --> 01:55.040 +실제로 여기서 사용되는 건 독감 목록이에요 상단의 시스템 메시지 같은 걸 + +01:55.070 --> 01:59.090 +구성한 목록이죠 다음은 사용자 보조예요 + +01:59.360 --> 02:05.990 +하지만 긴 연결을 위한 메모리라는 개념으로 추출되었어요. 이 커버 아래에서 리스트를 + +02:05.990 --> 02:11.390 +처리하거나 다른 모델이 필요로 하는 다른 형식을 처리하죠. + +02:11.390 --> 02:19.090 +이 세 가지가 핵심 래퍼입니다. Get-Tuck Season 2 닙턱 시즌 2 긴 체인에서 얻는 기능성 중에서요. + +02:19.090 --> 02:26.020 +이것을 명심하고 얼마나 간단한지 보세요. Get 파이프라인을 만드는 것이요. + +02:26.080 --> 02:29.650 +4줄의 코드로 완료될 거예요 + +02:29.650 --> 02:33.070 +지금 여러분 앞에 있는 코드 4줄이에요 + +02:33.310 --> 02:37.720 +첫 번째 줄의 우수함이 이거예요 + +02:37.900 --> 02:42.280 +LM은 채팅방개 인공지능으로 랑 체인 ln 객체를 생성하죠 + +02:42.370 --> 02:46.120 +개방형 인공지능을 위한 LM 객체죠 + +02:46.450 --> 02:50.890 +다른 어떤 것을 위해 만들 수 있는 비슷한 오브젝트가 있다고 상상할 수 있죠 + +02:51.670 --> 02:53.860 +그게 첫 번째 줄이자 첫 번째 추상화예요 + +02:53.890 --> 02:55.480 +두 번째 줄에 LM을 넣어요 + +02:55.480 --> 02:56.470 +두 번째 추상화요 + +02:56.470 --> 02:57.130 +메모리 말이에요 + +02:57.160 --> 03:01.210 +대화 버퍼 메모리라는 랑 체인 객체를 생성해요 + +03:01.630 --> 03:03.580 +몇 가지 사항을 제공해야 해요 + +03:03.580 --> 03:10.660 +핵심은 어떻게 정리되느냐죠 메모리와 채팅 기록을 검색하는 데 뭘 쓸 수 있느냐예요 + +03:10.660 --> 03:16.210 +나중에 예상되는 게 그거니까요 반송 메시지는 랭에게 이걸 + +03:16.210 --> 03:21.040 +저장할 거라고 말해요 일련의 메시지로 저장될 거라고요 + +03:21.040 --> 03:26.020 +대화를 나타내는 큰 텍스트 블록 말고요 + +03:26.020 --> 03:30.910 +이런 종류의 채팅 응용 프로그램에 이걸 사용해야 한다는 것만 아시면 돼요 + +03:31.810 --> 03:40.720 +다음 줄은 간단히 벡터 스토어가 있는데 크로마를 생성했고 이것을 불러오기 + +03:40.720 --> 03:44.290 +메서드로 부르겠다고 하네요 + +03:44.290 --> 03:48.100 +레트리버라는 인터페이스 객체로 감싸게 되죠 + +03:48.100 --> 03:56.710 +그게 랭 체인이 기대하는 객체예요 래그 워크플로우가 가능하려면요 + +03:56.980 --> 04:01.570 +세 가지 추상화입니다 LM, 메모리 그리고 리트리버죠 + +04:01.600 --> 04:02.680 +전부 만들어진 거예요 + +04:02.680 --> 04:07.600 +마지막 구절은 그걸 하나로 모아 대화 사슬을 형성해요 + +04:07.900 --> 04:14.470 +그건 여러분이 만든 대화 회수 사슬로 호출을, 그 음 LM으로부터의 + +04:14.500 --> 04:20.170 +메서드를 호출해 생성합니다 여러분은 3가지만 넘기면 돼요 + +04:20.170 --> 04:25.030 +LM, 레트리버, 메모리 방금 만든 3가지요 + +04:25.030 --> 04:26.920 +그렇게 간단한 거예요 + +04:26.920 --> 04:32.620 +코드 네 번째 줄로 방금 래그 파이프라인을 만들었어요 + +04:33.490 --> 04:34.450 +못 믿겠어요? + +04:34.450 --> 04:37.150 +유피터랩으로 가서 직접 해 보죠 diff --git a/week5/community-contributions/subtitles/srts/59297603/en_US.srt b/week5/community-contributions/subtitles/srts/59297603/en_US.srt new file mode 100755 index 0000000..bb50ef2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297603/en_US.srt @@ -0,0 +1,184 @@ +WEBVTT + +00:00.470 --> 00:05.930 +And I'm delighted to welcome you back to LM engineering on the day that we turn to vectors. + +00:05.930 --> 00:08.570 +Finally, I've been talking about vectors for so long. + +00:08.600 --> 00:11.390 +Today we actually get to play with them. + +00:11.540 --> 00:18.380 +We are going to be going into Jupyter Lab soon and creating chunks of. + +00:18.380 --> 00:24.320 +We're creating vectors from chunks of text using OpenAI embeddings, the encoding model that I talked + +00:24.320 --> 00:25.100 +about before. + +00:25.130 --> 00:31.460 +We're going to store those vectors in the very popular open source vector database called chroma. + +00:31.640 --> 00:36.440 +And we're then going to visualize the vectors get a sense for what they represent. + +00:36.530 --> 00:41.150 +And that will then be an exercise for you to keep playing with vectors. + +00:41.270 --> 00:48.140 +Try putting your own things into vectors and get a better and better sense of what it what it means + +00:48.140 --> 00:52.430 +to have a meaning by turning text into a bunch of numbers. + +00:52.610 --> 00:56.660 +So let me talk for a moment about these different types of of model. + +00:56.810 --> 00:58.970 +How how do you turn text into vectors. + +00:58.970 --> 01:03.080 +So there's first of all, there's this very simplistic way that you could do it. + +01:03.350 --> 01:07.160 +You could for example, say come up with a vocabulary. + +01:07.160 --> 01:10.190 +So come up with a list of possible words. + +01:10.190 --> 01:15.260 +Let's say the first word in your vocabulary is the word dog, and the second word in your vocabulary + +01:15.260 --> 01:16.730 +is the word cat. + +01:17.270 --> 01:23.360 +And what you could do is take a block of text and just count the number of times any particular word + +01:23.360 --> 01:24.860 +is in that block of text. + +01:24.860 --> 01:30.890 +And then if the word dog, for example, is in there twice, then you would put a two in the first location + +01:30.890 --> 01:33.320 +in your vector and cat is in there once. + +01:33.320 --> 01:36.320 +Then you would put one in that location and so on. + +01:36.320 --> 01:42.350 +So it would really just be counting the number of words of a of a particular type and putting that in + +01:42.350 --> 01:43.010 +a vector. + +01:43.010 --> 01:44.660 +And that would be very simplistic. + +01:44.660 --> 01:50.840 +It wouldn't reflect the order in which the words are laid out, and it wouldn't reflect the fact, for + +01:50.840 --> 01:56.120 +example, that the same word Java could refer to a type of coffee bean or to a programming language. + +01:56.120 --> 01:59.090 +It would just be the count of the number of words. + +01:59.420 --> 02:01.430 +So that would be rather simplistic. + +02:01.430 --> 02:04.670 +And luckily there are more advanced methods for doing this. + +02:04.700 --> 02:11.510 +One of the ones that got a lot of attention was in 2013, the arrival of something called word two vec, + +02:11.600 --> 02:20.660 +which was able to use use a deep neural network to start to convert words to vectors in a way that seemed + +02:20.660 --> 02:22.550 +to reflect their meaning. + +02:22.550 --> 02:28.550 +And it was really it was with word two vec that we started to talk about things like, uh, king minus + +02:28.550 --> 02:30.470 +man plus woman equals queen. + +02:30.830 --> 02:34.970 +Uh, Burt is the model that I talked about some time ago. + +02:35.000 --> 02:41.690 +Now it was, uh, it's a transformer model for encoding that Google produced shortly after publishing + +02:41.690 --> 02:43.700 +their paper, Inventing Transformers. + +02:43.760 --> 02:48.440 +And the one that we're going to use, OpenAI embeddings, is one that is from OpenAI. + +02:48.440 --> 02:52.610 +And the most recent version is with, uh, updates from 2024. + +02:52.640 --> 02:57.470 +Um, and so that is going to be the latest and greatest model that we're going to use for converting + +02:57.500 --> 03:00.680 +text into numbers vectors. + +03:01.760 --> 03:07.160 +So with that, uh, let me just quickly talk about chroma and then we will get to it. + +03:07.160 --> 03:14.290 +So chroma is an example of one of the, uh, vector data stores. + +03:14.290 --> 03:21.220 +There are quite a few of them, and many of the of the main databases also now support taking vectors + +03:21.220 --> 03:23.020 +and searching on vectors. + +03:23.140 --> 03:28.690 +An example MongoDB that lots of people use as a NoSQL data store will also take vectors and can act + +03:28.690 --> 03:29.950 +as a vector database. + +03:29.950 --> 03:33.700 +But Cromer was sort of first and foremost a vector database. + +03:33.790 --> 03:37.120 +And this is uh, its website. + +03:37.150 --> 03:45.520 +Uh, and you can see it makes a, it's got better pictures than I had with an old school, uh, Mac + +03:45.520 --> 03:46.570 +interface here. + +03:46.780 --> 03:52.030 +Uh, but the idea that, that you could do a query in your I application and it can retrieve from a + +03:52.030 --> 03:56.380 +bunch of vectors and that retrieve data gets put into the prompt and query. + +03:56.380 --> 04:01.150 +So it's a fancier version of the diagram that I showed you last time. + +04:01.510 --> 04:03.730 +Uh, so this is Cromer. + +04:03.760 --> 04:06.460 +This is what we'll be using to store our vectors. + +04:06.460 --> 04:08.740 +And I think quite enough chit chat. + +04:08.770 --> 04:13.570 +It's time for us to get JupyterLab, and it's time for us to use vectors first hand. + +04:13.600 --> 04:14.350 +See you there. diff --git a/week5/community-contributions/subtitles/srts/59297603/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297603/ja_JP.srt new file mode 100755 index 0000000..f3d68df --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297603/ja_JP.srt @@ -0,0 +1,151 @@ +WEBVTT + +00:00.470 --> 00:05.930 +そして、 ベクターに焦点を当てたこの日に、 LMエンジニアリングに戻ってこられたことを嬉しく思う。 + +00:05.930 --> 00:08.570 +最後に、 私はずっとベクターについて話してきた。 + +00:08.600 --> 00:11.390 +今日は実際に彼らと遊ぶことができる。 + +00:11.540 --> 00:18.380 +近々Jupyter Labに入り、 チャンクを作成する予定だ。 + +00:18.380 --> 00:25.100 +私たちはOpenAIのエンベッディングを使って、 テキストの塊からベクトルを作っています。 + +00:25.130 --> 00:31.460 +これらのベクターは、 chromaという非常に人気のあるオープンソースのベクターデータベースに保存します。 + +00:31.640 --> 00:36.440 +そして、 そのベクトルを視覚化し、 それが何を表しているかを把握する。 + +00:36.530 --> 00:41.150 +そしてそれは、 ベクターで遊び続けるための練習になる。 + +00:41.270 --> 00:48.140 +自分の物事をベクトルに置き換えてみて、 テキストを数字の束に変えることで意味を持つということがどういうことなのかを、 + +00:48.140 --> 00:52.430 +どんどん感じ取ってほしい。 + +00:52.610 --> 00:56.660 +そこで、 これらの異なるタイプのモデルについて少し話をしよう。 + +00:56.810 --> 00:58.970 +テキストをベクターに変換する方法。 + +00:58.970 --> 01:03.080 +だから、 まず第一に、 非常に単純化された方法がある。 + +01:03.350 --> 01:07.160 +例えば、 ボキャブラリーを考え出すとかね。 + +01:07.160 --> 01:10.190 +そこで、 可能性のある言葉のリストを考えてみよう。 + +01:10.190 --> 01:16.730 +例えば、 あなたのボキャブラリーの最初の単語がdogで、 2番目の単語がcatだとしよう。 + +01:17.270 --> 01:24.860 +そして、 テキストのブロックを取り出して、 そのブロックの中にある特定の単語の回数を数えることができる。 + +01:24.860 --> 01:30.890 +そして、 例えばdogという単語が2回入っていたら、 ベクトルの最初の位置に2を入れ、 + +01:30.890 --> 01:33.320 +catは1回入れる。 + +01:33.320 --> 01:36.320 +そして、 その場所に1本、 といった具合だ。 + +01:36.320 --> 01:43.010 +つまり、 特定のタイプの単語の数を数えて、 それをベクターに入れるだけなのだ。 + +01:43.010 --> 01:44.660 +それはとても単純なことだ。 + +01:44.660 --> 01:50.840 +単語が並べられた順番は反映されないし、 例えば同じJavaという単語がコーヒー豆の一種を指すこともあれば、 + +01:50.840 --> 01:56.120 +プログラミング言語を指すこともあるという事実も反映されない。 + +01:56.120 --> 01:59.090 +単語数のカウントだけだろう。 + +01:59.420 --> 02:01.430 +だから、 それはむしろ単純なことだ。 + +02:01.430 --> 02:04.670 +そして幸運なことに、 これにはもっと高度な方法がある。 + +02:04.700 --> 02:11.510 +注目されたもののひとつは、 2013年に登場したワード・ツー・ベックと呼ばれるもので、 + +02:11.600 --> 02:22.550 +ディープ・ニューラル・ネットワークを使って、 単語の意味を反映するような形でベクトル変換を始めることができた。 + +02:22.550 --> 02:30.470 +そして、 2人のベックという言葉をきっかけに、 王+男+女=女王というような話をするようになったんだ。 + +02:30.830 --> 02:34.970 +ええと、 バートは少し前に話したモデルだよ。 + +02:35.000 --> 02:43.700 +これは、 グーグルが論文『Inventing Transformers』を発表した直後に作成したエンコーディングのトランスフォーマーモデルだ。 + +02:43.760 --> 02:48.440 +今回使用するOpenAI embeddingsは、 OpenAIのものです。 + +02:48.440 --> 02:52.610 +そして最新バージョンは、 2024年から更新されたものだ。 + +02:52.640 --> 03:00.680 +それで、 これがテキストを数字ベクトルに変換するために使う最新で最高のモデルになるんだ。 + +03:01.760 --> 03:07.160 +ということで、 クロマについて手短にお話させてください。 + +03:07.160 --> 03:14.290 +クロマはベクトル・データ・ストアの一例だ。 + +03:14.290 --> 03:23.020 +その数はかなり多く、 主要なデータベースの多くも、 現在ではベクターの採取やベクターでの検索をサポートしている。 + +03:23.140 --> 03:29.950 +多くの人がNoSQLデータストアとして使っているMongoDBの例では、 ベクトルも扱うことができ、 ベクトルデータベースとして機能する。 + +03:29.950 --> 03:33.700 +しかし、 クロマーは何よりもまずベクター・データベースのようなものだった。 + +03:33.790 --> 03:37.120 +そしてこれがそのウェブサイトだ。 + +03:37.150 --> 03:46.570 +それに、 昔ながらのマックのインターフェイスで撮った写真よりもいい写真が撮れるんだ。 + +03:46.780 --> 03:52.030 +でも、 Iアプリケーションでクエリーを実行すれば、 たくさんのベクターから検索することができ、 + +03:52.030 --> 03:56.380 +検索されたデータはプロンプトとクエリーに入力されます。 + +03:56.380 --> 04:01.150 +つまり、 前回お見せした図のファンシー・バージョンというわけだ。 + +04:01.510 --> 04:03.730 +ええと、 クロマーです。 + +04:03.760 --> 04:06.460 +これをベクターの保存に使う。 + +04:06.460 --> 04:08.740 +雑談はもう十分だろう。 + +04:08.770 --> 04:13.570 +JupyterLabを手に入れ、 ベクターを実際に使う時が来たのだ。 + +04:13.600 --> 04:14.350 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59297603/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297603/ko_KR.srt new file mode 100755 index 0000000..9578947 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297603/ko_KR.srt @@ -0,0 +1,175 @@ +WEBVTT + +00:00.470 --> 00:05.930 +LM 엔지니어링으로 다시 오신 걸 환영합니다 벡터와 만나는 날이죠 + +00:05.930 --> 00:08.570 +벡터 얘기를 너무 오래 했네요 + +00:08.600 --> 00:11.390 +오늘은 실제로 갖고 놀 거예요 get it get it + +00:11.540 --> 00:18.380 +곧 주피터 연구소에 가서 그 덩어리를 만들 거예요 + +00:18.380 --> 00:24.320 +오픈AI 엠베딩을 이용해 텍스트 덩어리로부터 벡터를 생성합니다 앞서 말씀드린 인코딩 + +00:24.320 --> 00:25.100 +모델이죠 + +00:25.130 --> 00:31.460 +유명한 오픈 소스 벡터 데이터베이스에 벡터들을 저장할 겁니다 크로마라고 하죠 + +00:31.640 --> 00:36.440 +그런 다음 벡터들이 뭘 나타내는지를 시각화하는 거죠 get it + +00:36.530 --> 00:41.150 +벡터를 계속 갖고 노는 연습이 될 거예요 + +00:41.270 --> 00:48.140 +벡터에 여러분만의 것을 입력해 보세요 텍스트를 숫자로 바꾸어 의미를 갖는다는 게 어떤 + +00:48.140 --> 00:52.430 +것인지 더 나은 감각을 갖게 될 거예요 Get it + +00:52.610 --> 00:56.660 +잠시 이런 다양한 모델 유형에 관해 얘기할게요 + +00:56.810 --> 00:58.970 +텍스트를 벡터로 바꾸는 방법이요 + +00:58.970 --> 01:03.080 +우선 아주 간단한 방법이 있어요 + +01:03.350 --> 01:07.160 +예를 들어 어휘를 생각해 보라고 하는 거죠 + +01:07.160 --> 01:10.190 +가능한 단어를 하나씩 생각해 봐요 + +01:10.190 --> 01:15.260 +여러분이 아는 단어 중 첫 번째가 개고 두 번째가 + +01:15.260 --> 01:16.730 +고양이예요 + +01:17.270 --> 01:23.360 +텍스트 블록을 하나 골라서 그 안에 있는 특정 단어의 횟수를 + +01:23.360 --> 01:24.860 +세는 거예요 + +01:24.860 --> 01:30.890 +예를 들어 dog라는 단어가 두 번 들어갔다면 첫 번째 장소에 2를 + +01:30.890 --> 01:33.320 +넣고 캣은 한 번 넣어요 + +01:33.320 --> 01:36.320 +그런 다음 해당 위치에 Put을 하는 거죠 + +01:36.320 --> 01:43.010 +특정 유형의 단어 수를 세서 벡터로 입력하는 거죠 + +01:43.010 --> 01:44.660 +아주 단순하죠 + +01:44.660 --> 01:50.840 +단어가 배치된 순서를 반영하지 못해요 같은 단어인 Java가 커피콩이나 + +01:50.840 --> 01:56.120 +프로그래밍 언어를 참조할 수 없다는 사실도요 + +01:56.120 --> 01:59.090 +단어 개수만 세면 돼요 + +01:59.420 --> 02:01.430 +그러니 좀 단순하죠 + +02:01.430 --> 02:04.670 +다행히 더 진보된 방법이 있죠 + +02:04.700 --> 02:11.510 +주목을 받은 것 중 하나는 2013년에 워드 2벡이라는 것이 등장했을 + +02:11.600 --> 02:22.550 +때입니다 심층 신경망을 이용해 단어를 벡터로 변환할 수 있었죠 그 의미를 반영하는 방식으로요 + +02:22.550 --> 02:28.550 +우리는 단어 2개를 통해 이런 얘기를 하기 시작했어요 킹에서 남자를 빼면 + +02:28.550 --> 02:30.470 +여자가 퀸이 되죠 + +02:30.830 --> 02:34.970 +버트는 제가 전에 말했던 모델이에요 + +02:35.000 --> 02:41.690 +이건 변압기 모델로 구글이 논문을 발표한 직후에 만든 부호화 모델입니다 트랜스포머 + +02:41.690 --> 02:43.700 +발명이었죠 + +02:43.760 --> 02:48.440 +우리가 사용할 OpenAI 엠디딩은 OpenAI에서 온 거예요 + +02:48.440 --> 02:52.610 +가장 최근 버전은 2024년의 업데이트예요 + +02:52.640 --> 02:57.470 +텍스트를 숫자 벡터로 변환하는 데 사용할 수 있는 가장 + +02:57.500 --> 03:00.680 +최신의 가장 훌륭한 모델이죠 + +03:01.760 --> 03:07.160 +그럼 이제 채도에 대해 빠르게 이야기해 보고 다시 보도록 하죠. Get in the Cream + +03:07.160 --> 03:14.290 +채도는 벡터 데이터 저장소의 한 예인데요 + +03:14.290 --> 03:21.220 +꽤 많이 있고, 메인 데이터베이스도 벡터를 찾고 검색하는 + +03:21.220 --> 03:23.020 +것을 지원하죠 + +03:23.140 --> 03:28.690 +많은 사람들이 NoSQL 데이터 스토어로 사용하는 몽고DB의 예는 벡터를 취하고 벡터 데이터베이스로 + +03:28.690 --> 03:29.950 +작용할 수 있어요 + +03:29.950 --> 03:33.700 +크로머는 무엇보다도 벡터 데이터베이스였어요 + +03:33.790 --> 03:37.120 +이게 그 웹사이트예요 + +03:37.150 --> 03:46.570 +보시다시피 옛날 맥 인터페이스보다 사진이 더 잘 나와요 + +03:46.780 --> 03:52.030 +I 응용 프로그램에서 쿼리를 할 수 있고 여러 벡터에서 데이터를 검색할 + +03:52.030 --> 03:56.380 +수 있고 그 데이터를 입력한 프롬프트와 큐리에 넣는 거죠. + +03:56.380 --> 04:01.150 +저번에 보여드린 다이어그램의 좀 더 화려한 버전이에요 + +04:01.510 --> 04:03.730 +이쪽은 크로머예요 + +04:03.760 --> 04:06.460 +벡터를 저장하기 위해 사용할 것들이죠 + +04:06.460 --> 04:08.740 +잡담은 충분히 한 것 같네요 + +04:08.770 --> 04:13.570 +JupyterLab을 get 할 때입니다 벡터를 먼저 사용할 때죠 + +04:13.600 --> 04:14.350 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59297609/en_US.srt b/week5/community-contributions/subtitles/srts/59297609/en_US.srt new file mode 100755 index 0000000..7bede00 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297609/en_US.srt @@ -0,0 +1,208 @@ +WEBVTT + +00:01.040 --> 00:07.850 +Last week, we worked with models that were able to speed up code by a factor of 60,000 times, which + +00:07.850 --> 00:08.960 +was outrageous. + +00:08.990 --> 00:13.130 +Hopefully you were super impressed by that, because I certainly was, and I'm hoping you're going to + +00:13.130 --> 00:17.150 +be even more impressed at the end of this week when you see what I have in store for you. + +00:17.180 --> 00:20.330 +So it's all about rag retrieval. + +00:20.330 --> 00:25.850 +Augmented generation, where you can already do, of course, is code with frontier models, code with + +00:25.880 --> 00:27.200 +hugging face transformers. + +00:27.200 --> 00:32.660 +Choose the right LLM for your project and now build solutions that generate code. + +00:32.690 --> 00:38.480 +Today you're going to learn about the big idea behind Rag retrieval augmented generation. + +00:38.480 --> 00:43.520 +And we're going to walk through some of the interactions with Rag before we get there though, we're + +00:43.520 --> 00:48.440 +also going to talk about the little idea, the small idea behind rag, which is actually quite obvious. + +00:48.470 --> 00:50.960 +In fact, you may have already thought about it yourself. + +00:51.230 --> 00:55.520 +And then we're going to implement a toy version of Rag using the small idea. + +00:55.520 --> 00:57.410 +So you get a really good feel for how it works. + +00:57.410 --> 01:00.050 +Before next week, we go on to the real deal. + +01:00.080 --> 01:03.610 +Let me start by giving you some of the intuition behind rag. + +01:03.610 --> 01:09.340 +We've already seen that we can make the performance of models stronger by enriching the prompts, the + +01:09.340 --> 01:10.750 +information that we send to the models. + +01:10.750 --> 01:12.310 +And we've done that in several ways. + +01:12.310 --> 01:18.790 +We've used Multi-shot prompting to send a series of example questions and answers to the model. + +01:18.790 --> 01:26.290 +We've used tools so that the LLM can call back into our code almost kind of, and run some code that + +01:26.290 --> 01:30.250 +then is used to supplement its answers or carry out actions. + +01:30.250 --> 01:36.970 +And we've had other ways to provide additional context as part of what we send the LLM, including in + +01:36.970 --> 01:38.200 +the system prompt. + +01:38.290 --> 01:48.100 +So the thinking is, can we step up this idea and take it to a new level by supplying more concrete + +01:48.370 --> 01:49.840 +information into the prompt? + +01:49.840 --> 01:53.470 +That's going to be particularly relevant to the question at hand. + +01:53.500 --> 02:00.760 +So the idea is could we put together a database of information sometimes because this is a database + +02:00.760 --> 02:04.240 +of knowledge, it's known as a knowledge base, a knowledge base of information. + +02:04.540 --> 02:11.950 +And every time that the user asks us a question, we'll first look up in that knowledge base whether + +02:11.950 --> 02:15.970 +there's any relevant information that we can pluck out. + +02:15.970 --> 02:21.280 +And if there is, we simply stuff that in the prompt and that is sent in the prompt to the model. + +02:21.310 --> 02:22.510 +That's all there is to it. + +02:22.540 --> 02:27.340 +It's actually a very simple idea, and you probably already thought of it yourself while we were doing + +02:27.340 --> 02:29.020 +some of the earlier exercises. + +02:30.100 --> 02:36.910 +So let's just show this small idea behind rag in a diagram, and I promise you later we'll get to the + +02:36.910 --> 02:41.830 +bigger idea, which is where it becomes somewhat less less obvious and more meaningful. + +02:41.830 --> 02:47.290 +But in the little idea, what we're saying is let's start by the user asking us a question. + +02:47.290 --> 02:51.220 +It comes to our code, and normally we'd send that straight on to the LLM. + +02:51.220 --> 02:56.920 +But this time before we do so, we do a query in our knowledge base to see if we've got any relevant + +02:56.920 --> 02:58.210 +background information. + +02:58.210 --> 03:02.890 +And if we do, we pluck out that information and we include it in the prompt. + +03:02.920 --> 03:08.470 +We send the LM and of course the response comes back as always, but hopefully it takes into account + +03:08.500 --> 03:09.940 +some of this extra context. + +03:09.940 --> 03:12.880 +And that is what goes back to the user. + +03:13.030 --> 03:17.050 +That's really all there is to the small idea behind Rag. + +03:17.980 --> 03:23.500 +So we're now going to put this into action with a small example of the small idea. + +03:23.680 --> 03:27.670 +Let's say we work for an insurance tech startup. + +03:27.670 --> 03:34.420 +And it's going to be a fictional insurance tech startup called insurance, which happens to be the word + +03:34.420 --> 03:35.080 +insurer. + +03:35.080 --> 03:40.270 +And LM stuffed together, which is, I think, the limit of my creativity. + +03:40.630 --> 03:48.250 +We have a knowledge base in the form of a folder taken from the company's shared drive. + +03:48.250 --> 03:55.780 +It is the entire contents of their shared drive, and our task is to build an AI knowledge worker. + +03:55.780 --> 04:00.460 +Sometimes this expression knowledge worker is used to mean a person that works for a firm and is the + +04:00.460 --> 04:08.340 +expert and able to carry out analysis on information about the company and carry out questions and answers. + +04:08.340 --> 04:12.630 +Well, that's something that we can do with with with an LLM. + +04:12.630 --> 04:16.920 +And we can supplement it with information from the knowledge base. + +04:17.430 --> 04:22.110 +So we're going to do a toy implementation blunt instrument. + +04:22.350 --> 04:28.170 +Um, basically we're going to read in some of these files, products and employees. + +04:28.170 --> 04:30.780 +And we're going to store it in like a dictionary. + +04:30.780 --> 04:36.360 +And then anytime a question comes in, we're just going to look up whether or not the word, the name + +04:36.360 --> 04:38.610 +of the employee appears somewhere in the question. + +04:38.610 --> 04:42.750 +And if so, we're just going to shove that whole employee record into our prompt. + +04:42.750 --> 04:48.210 +So it's a kind of manual, brute force implementation of Rag, but it will give you a good sense of + +04:48.210 --> 04:49.950 +how this actually works behind the scenes. + +04:49.950 --> 04:52.350 +And it will show you that there's there's no magic to it. + +04:52.380 --> 04:55.500 +It just improves the performance of the model right away. + +04:55.500 --> 04:58.770 +And then once we've done that, we'll get on to the more exciting stuff. + +04:58.770 --> 05:03.000 +But for now, let's go to JupyterLab and build our own homemade rag. diff --git a/week5/community-contributions/subtitles/srts/59297609/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297609/ja_JP.srt new file mode 100755 index 0000000..a0964b3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297609/ja_JP.srt @@ -0,0 +1,181 @@ +WEBVTT + +00:01.040 --> 00:08.960 +先週は、 コードを60,000倍高速化できるモデルを扱った。 + +00:08.990 --> 00:13.130 +そして、 今週末には、 私が用意したものを見て、 + +00:13.130 --> 00:17.150 +さらに感動してくれることを期待している。 + +00:17.180 --> 00:20.330 +だから、 ボロ雑巾の回収がすべてなんだ。 + +00:20.330 --> 00:25.850 +もちろん、 フロンティア・モデルを使ったコードや、 顔を包み込むようなトランスフォーマーを使ったコードなど、 + +00:25.880 --> 00:27.200 +すでにできることはある。 + +00:27.200 --> 00:32.660 +プロジェクトに適したLLMを選択し、 コードを生成するソリューションを構築しよう。 + +00:32.690 --> 00:38.480 +今日は、 ラグ検索拡張世代の大きなアイデアについて学ぶことにしよう。 + +00:38.480 --> 00:43.520 +その前に、 ボロ雑巾の背後にある小さなアイデア、 実はごく当たり前のことなのだが、 + +00:43.520 --> 00:48.440 +その小さなアイデアについても話しておこうと思う。 + +00:48.470 --> 00:50.960 +実際、 あなた自身もすでに考えているかもしれない。 + +00:51.230 --> 00:55.520 +そして、 小さなアイデアを使ってラグのおもちゃバージョンを実装するんだ。 + +00:55.520 --> 00:57.410 +だから、 どのように機能するのか、 実にいい感触を得ることができる。 + +00:57.410 --> 01:00.050 +来週を前に、 本題に入る。 + +01:00.080 --> 01:03.610 +まず、 ボロ布の背後にある直感の一部を紹介しよう。 + +01:03.610 --> 01:10.750 +プロンプト、 つまりモデルに送る情報を充実させることで、 モデルのパフォーマンスをより強力なものにできることはすでに見てきた。 + +01:10.750 --> 01:12.310 +私たちはいくつかの方法でそれを実現した。 + +01:12.310 --> 01:18.790 +私たちはマルチショット・プロンプトを使って、 一連の質問と回答の例をモデルに送りました。 + +01:18.790 --> 01:30.250 +私たちはツールを使って、 LLMが私たちのコードにコールバックできるようにした。 + +01:30.250 --> 01:38.200 +また、 システム・プロンプトを含め、 LLMに送る内容の一部として追加的なコンテキストを提供する方法は他にもあった。 + +01:38.290 --> 01:48.100 +つまり、 もっと具体的な情報をプロンプトに提供することで、 このアイデアをステップアップさせ、 新たなレベルに引き上げることはできないか、 + +01:48.370 --> 01:49.840 +ということだ。 + +01:49.840 --> 01:53.470 +それは、 今の質問と特に関係がありそうだ。 + +01:53.500 --> 02:00.760 +つまり、 情報をデータベース化できないか、 + +02:00.760 --> 02:04.240 +ということだ。 + +02:04.540 --> 02:11.950 +そして、 ユーザーから質問を受けるたびに、 まずナレッジ・ベースで関連する情報がないかどうかを調べ、 + +02:11.950 --> 02:15.970 +それを引き出す。 + +02:15.970 --> 02:21.280 +もしプロンプトがあれば、 それをプロンプトに詰め込み、 プロンプトをモデルに送信する。 + +02:21.310 --> 02:22.510 +それだけだ。 + +02:22.540 --> 02:29.020 +これは実はとてもシンプルなアイデアで、 おそらく、 以前の練習をやっているときに、 すでに自分で考えたことだろう。 + +02:30.100 --> 02:36.910 +だから、 ボロ布の背後にあるこの小さなアイデアを図で示すだけにしておこう。 後で、 + +02:36.910 --> 02:41.830 +より大きなアイデアにたどり着くことを約束しよう。 + +02:41.830 --> 02:47.290 +しかし、 ちょっとしたアイデアでは、 私たちが言っているのは、 ユーザーが私たちに質問することから始めようということだ。 + +02:47.290 --> 02:51.220 +それは私たちのコードに反映され、 通常はそれをそのままLLMに送ることになる。 + +02:51.220 --> 02:58.210 +しかし今回は、 その前にナレッジベースに問い合わせを行い、 関連する背景情報があるかどうかを確認する。 + +02:58.210 --> 03:02.890 +そうすれば、 その情報を抜き出し、 プロンプトに盛り込む。 + +03:02.920 --> 03:09.940 +LMを送信すると、 もちろんいつものように返事が返ってくる。 + +03:09.940 --> 03:12.880 +そしてそれがユーザーに還元される。 + +03:13.030 --> 03:17.050 +ラグの背後にある小さなアイデアは本当にそれだけだ。 + +03:17.980 --> 03:23.500 +そこで、 これからこれを小さなアイデアの例で実践してみよう。 + +03:23.680 --> 03:27.670 +私たちが保険テックの新興企業に勤めているとしよう。 + +03:27.670 --> 03:35.080 +そして、 それは保険という架空の保険テック・スタートアップになる。 + +03:35.080 --> 03:40.270 +そしてLMの詰め物、 これが私の創造性の限界だと思う。 + +03:40.630 --> 03:48.250 +会社の共有ドライブから取り出したフォルダーの形で、 ナレッジベースを持っています。 + +03:48.250 --> 03:55.780 +それは彼らの共有ドライブの全コンテンツであり、 我々の仕事はAIナレッジワーカーを構築することだ。 + +03:55.780 --> 04:00.460 +このナレッジ・ワーカーという表現は、 企業で働く専門家であり、 + +04:00.460 --> 04:08.340 +企業に関する情報の分析や質疑応答ができる人という意味で使われることもある。 + +04:08.340 --> 04:12.630 +まあ、 それはLLMがあればできることだ。 + +04:12.630 --> 04:16.920 +そして、 知識ベースからの情報でそれを補うことができる。 + +04:17.430 --> 04:22.110 +そこで、 玩具を使った鈍器を使うことにする。 + +04:22.350 --> 04:28.170 +ええと、 基本的には、 これらのファイル、 製品、 従業員の一部を読むことになる。 + +04:28.170 --> 04:30.780 +そして、 それを辞書のように格納する。 + +04:30.780 --> 04:38.610 +そして、 質問が来るたびに、 その質問のどこかに従業員の名前が出てくるかどうかを調べます。 + +04:38.610 --> 04:42.750 +もしそうなら、 その従業員記録をすべてプロンプトに押し込むことになる。 + +04:42.750 --> 04:48.210 +ラグの実装は手作業で力技のようなものだが、 これが舞台裏で実際にどのように機能しているのか、 + +04:48.210 --> 04:49.950 +よく理解できるだろう。 + +04:49.950 --> 04:52.350 +そして、 そこには何の魔法もないことを教えてくれる。 + +04:52.380 --> 04:55.500 +ただ、 すぐにモデルのパフォーマンスが向上する。 + +04:55.500 --> 04:58.770 +それが終わったら、 もっとエキサイティングなことに取り掛かる。 + +04:58.770 --> 05:03.000 +しかし今は、 JupyterLabに行き、 自作のボロ布を作ってみよう。 diff --git a/week5/community-contributions/subtitles/srts/59297609/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297609/ko_KR.srt new file mode 100755 index 0000000..d5a71bb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297609/ko_KR.srt @@ -0,0 +1,205 @@ +WEBVTT + +00:01.040 --> 00:07.850 +지난주에는 코드를 60,000배나 빠르게 만드는 모델로 작업했는데 정말 + +00:07.850 --> 00:08.960 +굉장했죠 + +00:08.990 --> 00:13.130 +깊은 인상을 받았길 바라요 저도 그랬거든요 이번 주말에는 + +00:13.130 --> 00:17.150 +더 깊은 인상을 받길 바라요 제가 준비한 걸 보면요 + +00:17.180 --> 00:20.330 +걸레 회수 작업이군요 + +00:20.330 --> 00:25.850 +이미 가능한 증강세대 기술은 개척자 모델이나 얼굴 트랜스포머를 껴안는 + +00:25.880 --> 00:27.200 +코드였죠 + +00:27.200 --> 00:32.660 +프로젝트에 맞는 LLM을 선택하고 코드를 생성하는 솔루션을 빌드하세요 + +00:32.690 --> 00:38.480 +오늘은 걸레 회수 증강세대의 원대한 아이디어에 대해 배울 거예요 + +00:38.480 --> 00:43.520 +그 전에 Rag와의 상호 작용을 살펴볼 거예요 그리고 Rag에 숨은 + +00:43.520 --> 00:48.440 +작은 아이디어에 대해서도 얘기할 겁니다 사실 꽤 명백하죠 + +00:48.470 --> 00:50.960 +사실 이미 생각해 두셨을 수도 있죠 + +00:51.230 --> 00:55.520 +그런 다음 작은 아이디어를 이용해 래그의 장난감 버전을 구현할 거예요 + +00:55.520 --> 00:57.410 +어떻게 작동하는지 감이 잘 와요 Get it + +00:57.410 --> 01:00.050 +다음 주 전에 진짜 일을 할 거예요 + +01:00.080 --> 01:03.610 +우선 누더기에 대한 직감부터 말씀드릴게요 + +01:03.610 --> 01:09.340 +모델에 보내는 정보인 프롬프트를 강화함으로써 모델의 성능을 강화할 수 있다는 + +01:09.340 --> 01:10.750 +건 이미 확인했죠 + +01:10.750 --> 01:12.310 +여러 가지 방법으로 그렇게 했어요 + +01:12.310 --> 01:18.790 +멀티샷 프롬프팅을 이용해 모델에 일련의 질문과 답을 예로 보냈는데요 + +01:18.790 --> 01:26.290 +도구를 사용해 LLM이 우리 코드로 다시 호출해 응답을 보충하거나 작업을 수행하는 + +01:26.290 --> 01:30.250 +데 사용되는 코드를 실행했어요 + +01:30.250 --> 01:36.970 +LLM에 보내는 것의 일부로 추가적인 컨텍스트를 제공하는 다른 방법도 있었습니다 시스템 프롬프트를 + +01:36.970 --> 01:38.200 +포함해서요 + +01:38.290 --> 01:48.100 +그래서 이 아이디어를 새로운 단계로 끌어올릴 수 있을지 고민했죠 프롬프트에 좀 더 구체적인 정보를 + +01:48.370 --> 01:49.840 +제공하면서요 + +01:49.840 --> 01:53.470 +현재 당면한 문제와 특히 관련이 있죠 + +01:53.500 --> 02:00.760 +그래서 때로는 정보 데이터베이스를 만들기도 합니다. 왜냐하면 이건 지식 데이터베이스니까요. + +02:00.760 --> 02:04.240 +지식 기반으로 알려져 있죠. 지식 기반이요. + +02:04.540 --> 02:11.950 +사용자가 질문을 할 때마다 그 지식 기반에서 먼저 살펴봅니다 우리가 + +02:11.950 --> 02:15.970 +빼낼 수 있는 관련 정보가 있는지요 + +02:15.970 --> 02:21.280 +있다면 프롬프트에 입력하면 됩니다 모델에 프롬프트로 전송되죠 + +02:21.310 --> 02:22.510 +그게 다예요 + +02:22.540 --> 02:27.340 +사실 아주 간단한 아이디어입니다 아마 여러분도 이미 생각해두셨겠죠 + +02:27.340 --> 02:29.020 +아까 연습할 때요 + +02:30.100 --> 02:36.910 +그럼 다이어그램에서 get 뒤에 있는 작은 아이디어를 보여드리죠 나중에 더 큰 아이디어로 + +02:36.910 --> 02:41.830 +갈 거라고 약속드립니다 덜 명확해지고 더 의미 있어지는 거죠 + +02:41.830 --> 02:47.290 +하지만 여기서 우리가 하려는 말은 사용자가 질문을 하는 것부터 시작하자는 거죠 + +02:47.290 --> 02:51.220 +코드로 오면 보통은 그걸 바로 LLM으로 보내죠 + +02:51.220 --> 02:56.920 +하지만 그 전에 기술 기반에서 관련 배경 정보가 있는지 쿼리를 + +02:56.920 --> 02:58.210 +해요 + +02:58.210 --> 03:02.890 +그런 경우 해당 정보를 추출해 프롬프트에 포함시키죠 + +03:02.920 --> 03:08.470 +LM을 보내면 당연히 응답이 돌아오죠 늘 그렇듯이요 하지만 이 추가 컨텍스트를 + +03:08.500 --> 03:09.940 +고려하면 좋겠네요 + +03:09.940 --> 03:12.880 +그게 사용자에게 돌아가는 거죠 + +03:13.030 --> 03:17.050 +랙에 담긴 아이디어는 그게 다예요 + +03:17.980 --> 03:23.500 +이제 작은 아이디어의 작은 예로 이걸 작동시켜 볼게요. + +03:23.680 --> 03:27.670 +보험 기술 신생 기업에서 일한다고 가정해 보죠 + +03:27.670 --> 03:35.080 +가짜 기술 회사인 보험 신생 기업이 될 거예요 보험이란 단어가 인보어죠 + +03:35.080 --> 03:40.270 +LM을 한데 뭉쳐서요 그게 제 창의력의 한계죠 + +03:40.630 --> 03:48.250 +폴더 형태의 지식 기반은 회사의 공유 드라이브에서 가져온 거죠 + +03:48.250 --> 03:55.780 +공유 드라이브의 전체 콘텐츠죠 인공지능 지식 작업자를 만드는 게 우리 작업이에요 + +03:55.780 --> 04:00.460 +지식 노동자라는 표현은 때때로 회사에서 일하는 사람을 + +04:00.460 --> 04:08.340 +뜻하기도 합니다 전문가로서 회사의 정보를 분석하고 질문과 답을 수행하는 사람이죠 + +04:08.340 --> 04:12.630 +LLM으로 할 수 있는 거죠 + +04:12.630 --> 04:16.920 +지식 기반의 정보로 보충할 수 있어요 + +04:17.430 --> 04:22.110 +장난감 구현을 해볼게요 둔기요 + +04:22.350 --> 04:28.170 +기본적으로 이 파일들을 읽어볼 거예요 제품과 직원들 + +04:28.170 --> 04:30.780 +사전처럼 저장할 거예요 + +04:30.780 --> 04:36.360 +질문이 들어올 때마다 해당 단어나 직원의 이름이 질문 어딘가에 + +04:36.360 --> 04:38.610 +나오는지를 찾아보죠 + +04:38.610 --> 04:42.750 +만약 그렇다면 전체 직원 기록을 프롬프트에 밀어넣을 거예요 + +04:42.750 --> 04:48.210 +일종의 매뉴얼, 무식한 Rag 구현이죠 막후에서 어떻게 작동하는지 + +04:48.210 --> 04:49.950 +좋은 감각을 제공해요 + +04:49.950 --> 04:52.350 +마술이 없다는 걸 보여줄 거예요 + +04:52.380 --> 04:55.500 +모델의 성능을 즉시 향상시켜주죠 + +04:55.500 --> 04:58.770 +Get 절차가 끝나면 더 신나는 걸 할 거예요 + +04:58.770 --> 05:03.000 +하지만 지금은 주피터랩에 가서 직접 천을 만들어 보죠 diff --git a/week5/community-contributions/subtitles/srts/59297693/en_US.srt b/week5/community-contributions/subtitles/srts/59297693/en_US.srt new file mode 100755 index 0000000..0caeca4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297693/en_US.srt @@ -0,0 +1,295 @@ +WEBVTT + +00:01.040 --> 00:06.350 +So at the end of each week, it's customary for me to give you a challenge, an assignment to do on + +00:06.350 --> 00:06.950 +your own. + +00:06.950 --> 00:12.050 +And there's been some interesting assignments I hope you agree that we've had so far, but let me just + +00:12.050 --> 00:13.940 +put it out there this week. + +00:13.940 --> 00:19.970 +The assignment is by far the most interesting so far, and I really, really hope you take this one + +00:19.970 --> 00:24.320 +seriously and give it a good crack, because I'm going to do it myself too. + +00:24.320 --> 00:28.220 +And I challenge you to it because I want to do this as quickly as I can. + +00:28.220 --> 00:30.080 +I think it's going to make an amazing blog post. + +00:30.080 --> 00:33.410 +I think it's going to be something that's that's really cool and really fun. + +00:33.410 --> 00:35.480 +So here's the challenge. + +00:35.510 --> 00:43.400 +Take exactly what we've just built and use it to build your own knowledge worker on your own information + +00:43.970 --> 00:47.390 +as a way to boost your own personal productivity. + +00:47.390 --> 00:54.110 +So, for example, you could assemble all of your files that you've got in one place, and maybe you + +00:54.110 --> 00:56.750 +already have that on your hard drive, I certainly do. + +00:57.650 --> 01:03.670 +And so then that is effectively your own personal knowledge base with a folder structure. + +01:04.360 --> 01:11.890 +You can then vectorize everything in chroma, which will then become your own personal vector data store, + +01:11.890 --> 01:18.430 +and you can then build a conversational AI on top of it and ask questions about your information. + +01:18.460 --> 01:23.320 +And when I think about the amount of information I have about projects I've worked on, about things + +01:23.320 --> 01:26.260 +that I've done, there's no way I've got that in my mind. + +01:26.290 --> 01:33.580 +Like, I spend so much of my time digging things up, and the idea that I'd be able to have a a chatbot + +01:33.610 --> 01:42.160 +that is optimized for my own personal background, perhaps bringing together both the my work and personal + +01:42.160 --> 01:44.470 +stuff so that I could really connect the dots. + +01:44.500 --> 01:50.230 +Think about someone that might be able to help me with a particular problem from across all of my contacts, + +01:50.260 --> 01:53.080 +current job, previous jobs, and so on. + +01:53.230 --> 01:55.990 +It's just an amazingly powerful idea. + +01:55.990 --> 01:58.420 +And so I'm very excited to do this myself. + +01:58.450 --> 02:03.380 +There's a couple of things that you could do to take it even further if you want to go really insane. + +02:03.380 --> 02:04.820 +And again, that's what I'm planning to do. + +02:04.820 --> 02:13.640 +So I use a Gmail, as does most of the internet, and it's reasonably easy to write some code so that + +02:13.640 --> 02:21.920 +you can authenticate against the Google API and then have access to your own inbox to be able to read + +02:21.920 --> 02:25.280 +your emails, uh, through Google's API. + +02:25.310 --> 02:26.390 +I say it's easy. + +02:26.390 --> 02:27.830 +It's sort of medium easy. + +02:27.860 --> 02:33.170 +Like the the code that they have to authenticate is a bit of a bore, but so many people have done it + +02:33.170 --> 02:37.940 +that you can quite easily Google it and see step by step instructions for how to do it. + +02:37.940 --> 02:46.400 +So one could connect to one's email box and bring in emails and also vectorize them in Chrome. + +02:46.400 --> 02:49.850 +So you would have your email history there too. + +02:49.940 --> 02:56.780 +Uh, obviously it's it's completely unrealistic to provide all of this context in some massive, uh, + +02:56.780 --> 03:00.680 +context window to to to frontier model. + +03:00.680 --> 03:07.090 +You can hopefully imagine that all of your material would be bigger than the million tokens that even + +03:07.120 --> 03:09.100 +Gemini 1.5 flash can take. + +03:09.670 --> 03:18.220 +But using Rag, it would be entirely possible to pluck out the 25 closest documents in your vector database + +03:18.220 --> 03:21.310 +to a particular question and then be able to provide them. + +03:22.390 --> 03:25.510 +And so you could imagine you could do that for your email inbox. + +03:25.510 --> 03:29.500 +You could also do it for if you have Microsoft Office files. + +03:29.500 --> 03:35.860 +There are simple Python libraries that will read office files and bring out the text versions of them. + +03:35.860 --> 03:41.200 +And if you use Google Drive, then Google has an API to be able to read your documents in Google Drive. + +03:41.230 --> 03:43.390 +Again, not super easy. + +03:43.390 --> 03:49.840 +There's a bit of hokey stuff to authenticate, but it's completely doable, and I really think that + +03:49.840 --> 03:52.060 +the reward would make it worth it. + +03:52.090 --> 03:59.500 +One final tiny point to this you might have a concern about calling things like OpenAI embeddings to + +03:59.530 --> 04:05.570 +be vectorizing all of your data, because there's always a sense of, okay, so how confident are we + +04:05.570 --> 04:11.330 +that these calls we're making with our private data isn't getting kept anywhere? + +04:11.510 --> 04:16.430 +Um, and so as a final part to this challenge, if that is something that's a concern for you, then + +04:16.430 --> 04:21.200 +you can actually use an open source model like Bert to run it yourself. + +04:21.200 --> 04:23.630 +You can do the vectorization yourself. + +04:23.630 --> 04:25.520 +And again, there's a couple of ways of doing it. + +04:25.520 --> 04:31.400 +The way that you know about is you could just bring up a Google Colab, you could have your Google Drive + +04:31.430 --> 04:38.750 +mapped to that colab, and you can simply use that in Colab to be vectorizing all of your documents + +04:38.750 --> 04:39.410 +that way. + +04:39.410 --> 04:40.520 +So that's one way to do it. + +04:40.520 --> 04:41.780 +That would be very effective. + +04:41.810 --> 04:47.270 +Another way that's perhaps slightly more advanced is that you could use something called llama CP llama + +04:47.270 --> 04:54.200 +dot CP, which is a library that you can run on your computer locally, and it has optimized C plus + +04:54.200 --> 05:01.070 +plus code to run some of these models in inference mode locally on your box without ever leaving your + +05:01.100 --> 05:02.300 +own box. + +05:02.420 --> 05:10.870 +Um, and so that can be a final approach you could use if you wish to be able to vectorize all of your + +05:10.870 --> 05:13.690 +documents without having to go to the cloud. + +05:13.840 --> 05:21.820 +Um, but all in all, the challenge for you is make a personal, private knowledge worker for yourself + +05:21.820 --> 05:23.770 +to prove that you can do this. + +05:23.770 --> 05:29.050 +And if that's too much of an endeavor for you, then at the very least, take a few text documents that + +05:29.050 --> 05:33.160 +you've got and drop them in the same folder to do a mini version of it. + +05:33.160 --> 05:34.090 +At least do that. + +05:34.090 --> 05:35.410 +That is the minimum threshold. + +05:35.410 --> 05:36.760 +I at least ask for that. + +05:36.760 --> 05:41.920 +Some text documents in that folder structure, so that you can see how this might work for your own + +05:41.920 --> 05:42.280 +stuff. + +05:42.280 --> 05:46.270 +But I'm hoping someone does this whole project for real and I race you. + +05:46.270 --> 05:50.080 +I'm going to do it too, and write a blog post about it, and I can't wait. + +05:50.110 --> 05:53.230 +And that wraps up our week of Rag. + +05:53.230 --> 05:59.470 +And at that point, it brings you to 62.5% of your way along this journey. + +05:59.470 --> 06:02.710 +And I hope you feel that sense of upskilling. + +06:02.710 --> 06:07.490 +I hope you now feel so many things are coming together As long as you're doing these exercises and as + +06:07.490 --> 06:14.150 +long as you are learning by doing at this point, you've got a great intuition for how rag works and + +06:14.150 --> 06:15.890 +why it works and why it's effective. + +06:15.920 --> 06:22.070 +You understand about vector embedding and vector data stores, and all of that is in addition to everything + +06:22.070 --> 06:27.860 +else we've worked on in the past working with frontier models, AI assistants, using tools, using + +06:27.890 --> 06:34.130 +hugging face for open source models, for pipelines tokenizers models, and also choosing the right + +06:34.160 --> 06:39.050 +LLM using the various leaderboards like the open LLM leaderboard from Hugging Face. + +06:39.470 --> 06:42.230 +So it's a big moment. + +06:42.230 --> 06:43.910 +It's very exciting. + +06:44.000 --> 06:48.050 +Uh, but next week we start on something completely new. + +06:48.080 --> 06:51.380 +We're going to introduce a new commercial project. + +06:51.380 --> 06:56.750 +We're going to download a dataset from Hugging Face, and we're going to be curating our data to take + +06:56.750 --> 06:58.640 +on something new and exciting. + +06:58.640 --> 07:04.280 +That involves moving from the world of inference to the world of training, which is a very big step + +07:04.280 --> 07:04.880 +indeed. + +07:04.910 --> 07:07.160 +I can't wait, and I'll see you then. diff --git a/week5/community-contributions/subtitles/srts/59297693/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297693/ja_JP.srt new file mode 100755 index 0000000..0ff4967 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297693/ja_JP.srt @@ -0,0 +1,226 @@ +WEBVTT + +00:01.040 --> 00:06.950 +だから、 毎週末には、 私があなたたちに課題を与えるのが慣例となっている。 + +00:06.950 --> 00:13.940 +今週も興味深い仕事がいくつかあった。 + +00:13.940 --> 00:19.970 +この課題は、 これまでで断トツに面白い。 本当に、 本当に、 この課題に真剣に取り組んで、 + +00:19.970 --> 00:24.320 +しっかり挑戦してほしい。 + +00:24.320 --> 00:28.220 +そして、 私はできるだけ早くこれをやりたいので、 それに挑戦する。 + +00:28.220 --> 00:30.080 +素晴らしいブログ記事になると思うよ。 + +00:30.080 --> 00:33.410 +本当にクールで楽しいものになると思う。 + +00:33.410 --> 00:35.480 +そこで挑戦だ。 + +00:35.510 --> 00:47.390 +私たちが構築したものをそのまま使って、 あなた自身の生産性を高める方法として、 あなた自身の情報をもとにあなた自身のナレッジ・ワーカーを構築してください。 + +00:47.390 --> 00:56.750 +例えば、 あなたが持っているすべてのファイルを1つの場所に集めることができる。 + +00:57.650 --> 01:03.670 +そして、 それは事実上、 フォルダ構造を持つ自分だけの知識ベースとなる。 + +01:04.360 --> 01:11.890 +そして、 すべてをクロマでベクトル化し、 あなただけのベクトル・データ・ストアにし、 その上に会話型AIを構築して、 + +01:11.890 --> 01:18.430 +あなたの情報について質問することができる。 + +01:18.460 --> 01:23.320 +それに、 自分が手掛けたプロジェクトや、 自分がやったことについて、 自分が持っている情報量を考えても、 + +01:23.320 --> 01:26.260 +それが頭の中に入っているわけがない。 + +01:26.290 --> 01:33.580 +そして、 自分の個人的なバックグラウンドに最適化されたチャットボットがあれば、 + +01:33.610 --> 01:44.470 +仕事とプライベートの両方を結びつけることができるかもしれない。 + +01:44.500 --> 01:53.080 +私のすべての人脈、 現在の仕事、 以前の仕事などから、 特定の問題で私を助けてくれそうな人について考える。 + +01:53.230 --> 01:55.990 +驚くほどパワフルなアイデアだ。 + +01:55.990 --> 01:58.420 +だから、 私自身、 これをやるのがとても楽しみなんだ。 + +01:58.450 --> 02:03.380 +本当に狂気じみたことをしたいのなら、 それをさらに進めるためにできることがいくつかある。 + +02:03.380 --> 02:04.820 +そしてまた、 そうするつもりだ。 + +02:04.820 --> 02:25.280 +グーグルのAPIを使って認証し、 自分の受信トレイにアクセスしてメールを読めるようにするのは簡単だ。 + +02:25.310 --> 02:26.390 +私は簡単だと言っている。 + +02:26.390 --> 02:27.830 +中々簡単だよ。 + +02:27.860 --> 02:37.940 +認証に必要なコードもそうだが、 多くの人がやっていることなので、 ググれば簡単にステップ・バイ・ステップでやり方を見ることができる。 + +02:37.940 --> 02:46.400 +そのため、 メールボックスに接続してメールを取り込み、 クロームでベクトル化することもできる。 + +02:46.400 --> 02:49.850 +だから、 メールの履歴もそこにあるはずだ。 + +02:49.940 --> 03:00.680 +明らかに、 フロンティアモデルへの巨大なコンテクストウィンドウで、 このコンテクストをすべて提供するのは完全に非現実的だ。 + +03:00.680 --> 03:09.100 +うまくいけば、 ジェミニ1号でさえ100万トークンよりも大きな素材になることを想像できるだろう。 + +03:09.100 --> 03:09.100 +5フラッシュが使える。 + +03:09.670 --> 03:18.220 +しかし、 Ragを使えば、 ベクター・データベースから特定の質問に最も近い25の文書を抜き出して、 + +03:18.220 --> 03:21.310 +それを提供することができる。 + +03:22.390 --> 03:25.510 +だから、 Eメールの受信トレイにも同じことができる。 + +03:25.510 --> 03:29.500 +また、 マイクロソフト・オフィスのファイルがある場合にも、 この方法が使える。 + +03:29.500 --> 03:35.860 +Pythonには、 オフィス・ファイルを読み込んでテキスト・バージョンを表示するシンプルなライブラリがある。 + +03:35.860 --> 03:41.200 +また、 グーグル・ドライブを使用している場合、 グーグルはグーグル・ドライブ内のドキュメントを読むことができるAPIを持っている。 + +03:41.230 --> 03:43.390 +繰り返すが、 超簡単ではない。 + +03:43.390 --> 03:52.060 +認証には少し面倒なこともあるが、 完全に可能だし、 その報酬があればやる価値はあると思う。 + +03:52.090 --> 04:11.330 +というのも、 私たちの個人的なデータがどこにも保存されないという確信があるのだろうか? + +04:11.510 --> 04:21.200 +それで、 この挑戦の最後の部分として、 もしそれがあなたにとって心配なことであれば、 実際にBertのようなオープンソースのモデルを使って自分で実行することができます。 + +04:21.200 --> 04:23.630 +ベクトル化は自分でできる。 + +04:23.630 --> 04:25.520 +そしてまた、 それにはいくつかの方法がある。 + +04:25.520 --> 04:31.400 +GoogleのColabを立ち上げて、 Google DriveをそのColabにマッピングしておけば、 + +04:31.430 --> 04:39.410 +ColabでそのColabを使ってすべてのドキュメントをベクター化することができます。 + +04:39.410 --> 04:40.520 +それも一つの方法だね。 + +04:40.520 --> 04:41.780 +それは非常に効果的だ。 + +04:41.810 --> 04:47.270 +これはローカルで実行できるライブラリで、 最適化されたC++コードがあり、 + +04:47.270 --> 05:02.300 +自分のコンピュータを離れることなく、 ローカルで推論モードでこれらのモデルのいくつかを実行することができる。 + +05:02.420 --> 05:13.690 +クラウドに行かなくても、 すべてのドキュメントをベクター化したいのであれば、 これが最終的なアプローチになる。 + +05:13.840 --> 05:23.770 +うーん、 でも結局のところ、 あなたにとっての挑戦は、 これができることを証明するために、 個人的なプライベート・ナレッジワーカーを作ることなんだ。 + +05:23.770 --> 05:29.050 +また、 それがあなたにとってあまりにも大変な努力であるならば、 少なくとも、 あなたが持っているいくつかのテキスト文書を同じフォルダーにドロップして、 + +05:29.050 --> 05:33.160 +そのミニ・バージョンをやってみよう。 + +05:33.160 --> 05:34.090 +せめてそれくらいはしてほしい。 + +05:34.090 --> 05:35.410 +これが最低ラインだ。 + +05:35.410 --> 05:36.760 +少なくとも私はそれを求めている。 + +05:36.760 --> 05:42.280 +このフォルダー構造の中に、 いくつかのテキスト文書が含まれている。 + +05:42.280 --> 05:46.270 +でも、 誰かがこのプロジェクト全体を本気でやってくれることを期待しているし、 私はあなたにレースをする。 + +05:46.270 --> 05:50.080 +私もそれをやるつもりだし、 それについてブログ記事を書くつもりだ。 + +05:50.110 --> 05:53.230 +これで今週のラグは終了だ。 + +05:53.230 --> 05:59.470 +そしてその時点で62歳になる。 この旅の道程の5%。 + +05:59.470 --> 06:02.710 +そして、 スキルアップの感覚を感じてほしい。 + +06:02.710 --> 06:07.490 +このエクササイズをやっている限り、 そしてこの時点で実践して学んでいる限り、 + +06:07.490 --> 06:15.890 +あなたはラグがどのように機能し、 なぜ機能し、 なぜ効果的なのかについて、 素晴らしい直感を得ている。 + +06:15.920 --> 06:22.070 +ベクトル埋め込みとベクトルデータストアについて理解していただきましたが、 これに加えて、 フロンティアモデル、 + +06:22.070 --> 06:27.860 +AIアシスタント、 ツールの使用、 オープンソースモデルやパイプライン・トークナイザーモデル用のハギング・フェイスの使用、 + +06:27.890 --> 06:34.130 +ハギング・フェイスのオープンLLMリーダーボードのような様々なリーダーボードを使用した適切なLLMの選択など、 + +06:34.160 --> 06:39.050 +私たちが過去に取り組んできたすべてのことがあります。 + +06:39.470 --> 06:42.230 +だから大事な瞬間なんだ。 + +06:42.230 --> 06:43.910 +とてもエキサイティングだよ。 + +06:44.000 --> 06:48.050 +ええと、 でも来週はまったく新しいことを始めるんだ。 + +06:48.080 --> 06:51.380 +新しいコマーシャル・プロジェクトを紹介しよう。 + +06:51.380 --> 06:58.640 +ハギング・フェイスからデータセットをダウンロードし、 データをキュレーションして、 何か新しくエキサイティングなことに挑戦するつもりだ。 + +06:58.640 --> 07:04.880 +これは推論の世界からトレーニングの世界へと移行することを意味し、 実に大きな一歩である。 + +07:04.910 --> 07:07.160 +待ちきれないよ。 diff --git a/week5/community-contributions/subtitles/srts/59297693/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297693/ko_KR.srt new file mode 100755 index 0000000..96b22bc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297693/ko_KR.srt @@ -0,0 +1,286 @@ +WEBVTT + +00:01.040 --> 00:06.350 +매주 끝날 때마다 도전 과제를 드리는 게 관례예요 혼자 해내야 + +00:06.350 --> 00:06.950 +하죠 + +00:06.950 --> 00:12.050 +여러분도 동의하시길 바라며 흥미로운 과제들이 있었지만 이번 주에 공개할게요 Tudum + +00:12.050 --> 00:13.940 +Tudum Tudum + +00:13.940 --> 00:19.970 +이번 과제가 지금까지 중 가장 흥미로워요 다들 이번 과제를 진지하게 받아들이고 + +00:19.970 --> 00:24.320 +열심히 해 주길 바라요 나도 직접 할 거니까요 + +00:24.320 --> 00:28.220 +이의를 제기합니다 최대한 빨리 끝내고 싶거든요 + +00:28.220 --> 00:30.080 +멋진 post가 될 것 같아요 + +00:30.080 --> 00:33.410 +정말 멋지고 재미있는 일이 될 것 같아요 + +00:33.410 --> 00:35.480 +문제는 이거예요 + +00:35.510 --> 00:43.400 +방금 만든 걸 이용해 개인 생산성을 높이기 위해 여러분의 정보에 기반한 여러분의 + +00:43.970 --> 00:47.390 +지식 작업자를 만드는 거죠 + +00:47.390 --> 00:54.110 +예를 들어 한 곳에 있는 모든 파일을 조립할 수 있어요 이미 하드 드라이브에 + +00:54.110 --> 00:56.750 +있을 수도 있죠 전 그래요 + +00:57.650 --> 01:03.670 +폴더 구조가 있는 여러분의 개인적 지식 기반이죠 + +01:04.360 --> 01:11.890 +모든 걸 크로마로 벡터화할 수 있어요 그럼 여러분 고유의 벡터 데이터 저장소가 되죠 그 + +01:11.890 --> 01:18.430 +위에 대화용 인공지능을 만들어 여러분의 정보에 관해 질문할 수 있어요 + +01:18.460 --> 01:23.320 +제가 작업했던 프로젝트와 했던 일에 대한 정보의 양을 생각해 보면 + +01:23.320 --> 01:26.260 +제 마음속에 그런 게 있을 리가 없어요 + +01:26.290 --> 01:33.580 +뭔가를 파헤치는 데 많은 시간을 보내는데 제 개인적인 배경에 최적화된 + +01:33.610 --> 01:42.160 +챗봇을 만들 생각을 하니 어쩌면 일과 개인적인 것을 합쳐서 연결할 수 있을지도 + +01:42.160 --> 01:44.470 +몰라요 + +01:44.500 --> 01:50.230 +제 연락처, 현재 직장, 이전 직장 등에서 저를 도와줄 수 있는 + +01:50.260 --> 01:53.080 +누군가를 생각해 보세요. + +01:53.230 --> 01:55.990 +놀랍도록 강력한 아이디어예요 + +01:55.990 --> 01:58.420 +그래서 직접 하게 돼서 정말 신나요 + +01:58.450 --> 02:03.380 +더 멀리 갈 수 있는 방법이 몇 가지 있어요 정말 미친 짓을 하고 싶다면요 + +02:03.380 --> 02:04.820 +다시 말하지만 그게 제 계획이에요 + +02:04.820 --> 02:13.640 +그래서 전 지메일을 써요 인터넷 대부분이 그렇죠 코드 작성하는 건 꽤 쉬워요 구글 API에 + +02:13.640 --> 02:21.920 +대항해 인증을 하고 구글 API를 통해 이메일을 읽을 수 있는 자신의 메일함에 엑세스 + +02:21.920 --> 02:25.280 +권한을 가질 수 있죠 + +02:25.310 --> 02:26.390 +전 쉽다고 봐요 + +02:26.390 --> 02:27.830 +중간 크기예요 + +02:27.860 --> 02:33.170 +인증해야 하는 코드는 좀 지루하지만 많은 사람들이 해 봐서 구글에 쉽게 + +02:33.170 --> 02:37.940 +검색할 수 있어요 단계별로 어떻게 하는지 비트를 볼 수 있죠 + +02:37.940 --> 02:46.400 +이메일 상자에 접속해서 메일을 불러오고 크롬으로 벡터화할 수 있죠 + +02:46.400 --> 02:49.850 +이메일 기록도 있을 거예요 + +02:49.940 --> 02:56.780 +물론 이 모든 맥락을 개척자 모델로 보여주는 건 완전히 + +02:56.780 --> 03:00.680 +비현실적인 일이죠 + +03:00.680 --> 03:07.090 +모든 자재가 제미니 1호 토큰보다 훨씬 클 거라고 상상할 수 + +03:07.120 --> 03:09.100 +있어요 5번 섬광도 견뎌요 + +03:09.670 --> 03:18.220 +하지만 래그를 이용해 벡터 데이터베이스에서 특정 질문에 가장 가까운 25개의 문서를 뽑아내는 건 전적으로 + +03:18.220 --> 03:21.310 +가능합니다 그리고 그걸 제공할 수 있죠 + +03:22.390 --> 03:25.510 +이메일 받은 편지함에 그렇게 할 수 있다고 상상해 보세요 + +03:25.510 --> 03:29.500 +마이크로소프트 Office 파일이 있다면 그렇게 할 수도 있어요 + +03:29.500 --> 03:35.860 +간단한 파이썬 라이브러리가 있습니다 Office 파일을 읽고 텍스트 버전으로 불러오죠 + +03:35.860 --> 03:41.200 +구글 드라이브를 사용한다면 구글 드라이브에 있는 문서를 읽을 API가 있어요 + +03:41.230 --> 03:43.390 +아주 쉬운 일은 아니죠 + +03:43.390 --> 03:49.840 +감정할 게 좀 있지만 충분히 할 수 있어요 보상을 받으면 비트로도 + +03:49.840 --> 03:52.060 +가치가 있죠 + +03:52.090 --> 03:59.500 +마지막으로 작은 포인트를 말씀드리면 오픈AI 엠베딩 같은 것을 호출해 모든 데이터를 벡터화하는 + +03:59.530 --> 04:05.570 +것을 우려할 수 있습니다 왜냐하면 항상 이런 느낌이 들거든요 개인 데이터로 호출하는 + +04:05.570 --> 04:11.330 +것이 아무 데도 저장되지 않을 거라고 얼마나 확신하나요? + +04:11.510 --> 04:16.430 +그래서 이번 과제의 마지막 부분으로 그게 걱정된다면 + +04:16.430 --> 04:21.200 +버트 같은 오픈 소스 모델을 직접 활용해도 좋아요 + +04:21.200 --> 04:23.630 +벡터화는 직접 할 수 있어요 + +04:23.630 --> 04:25.520 +두 가지 방법이 있어요 + +04:25.520 --> 04:31.400 +여러분이 아는 방법은 구글 Colab을 불러올 수 있다는 겁니다 구글 드라이브를 + +04:31.430 --> 04:38.750 +그 Colab에 매핑할 수 있어요 그걸 Colab에서 이용해 모든 문서를 그런 식으로 벡터화할 수 + +04:38.750 --> 04:39.410 +있죠 + +04:39.410 --> 04:40.520 +그게 한 가지 방법이죠 + +04:40.520 --> 04:41.780 +아주 효과적일 거예요 + +04:41.810 --> 04:47.270 +좀 더 고급인 다른 방법은 llama CP llama.CP라는 걸 이용하는 + +04:47.270 --> 04:54.200 +겁니다 라이브러리인데 여러분 컴퓨터에서 로컬로 실행할 수 있죠 C++ 코드를 최적화해 + +04:54.200 --> 05:02.300 +일부 모델을 추론 모드에서 여러분 박스에서 로컬로 실행합니다 여러분 박스를 떠나지 않고요 + +05:02.420 --> 05:10.870 +클라우드로 가지 않고도 모든 문서를 벡터화하고 싶다면 이 방법을 최종적으로 + +05:10.870 --> 05:13.690 +사용할 수 있어요 + +05:13.840 --> 05:21.820 +하지만 결국 가장 어려운 건 개인적이고 개인적인 지식인을 만들어서 할 수 있다는 + +05:21.820 --> 05:23.770 +걸 증명하는 거예요 + +05:23.770 --> 05:29.050 +그게 너무 힘들다면 최소한 갖고 있는 텍스트 문서 몇 개를 + +05:29.050 --> 05:33.160 +같은 폴더에 넣어 축소판을 만드세요 + +05:33.160 --> 05:34.090 +그것만이라도 해 줘요 + +05:34.090 --> 05:35.410 +그게 최소 기준치예요 + +05:35.410 --> 05:36.760 +그 정도는 요구해야죠 + +05:36.760 --> 05:42.280 +폴더 구조의 텍스트 문서를요 여러분 것을 위해 어떻게 작동하는지 볼 수 있도록요 + +05:42.280 --> 05:46.270 +하지만 누군가 이 프로젝트를 실제로 해내면 좋겠어요 + +05:46.270 --> 05:50.080 +저도 할 거예요 블로그 post도 쓸 거고요 기대되네요 + +05:50.110 --> 05:53.230 +이것으로 래그 주간을 마칠게요 + +05:53.230 --> 05:59.470 +그 시점에서 62점이 되죠 5%밖에 못 갔어요 + +05:59.470 --> 06:02.710 +의욕이 샘솟는 걸 느끼셨길 바라요 + +06:02.710 --> 06:07.490 +이제 많은 게 합쳐진다고 느끼길 바라요 이런 훈련을 계속하고 + +06:07.490 --> 06:14.150 +지금 이렇게 배우기만 한다면 래그가 어떻게 작동하고 왜 효과적인지 직관적으로 + +06:14.150 --> 06:15.890 +알 수 있을 거예요 + +06:15.920 --> 06:22.070 +벡터 삽입과 벡터 데이터 저장소는 아실 겁니다 이 모든 것은 우리가 과거에 연구한 모든 것 외에도 프론티어 + +06:22.070 --> 06:27.860 +모델과 툴을 이용한 인공지능 조수들 오픈 소스 모델에 얼굴을 끌어안기 파이프라인 토큰라이저 모델에 + +06:27.890 --> 06:34.130 +맞는 LLM을 선택하는 데에 기여했죠 다양한 leaderboard를 이용해 올바른 LLM을 선택하기도 + +06:34.160 --> 06:39.050 +했습니다 페이스 포옹의 오픈 LLM leaderboard처럼요 + +06:39.470 --> 06:42.230 +중요한 순간이죠 + +06:42.230 --> 06:43.910 +정말 신나요 + +06:44.000 --> 06:48.050 +하지만 다음 주에는 완전히 새로운 걸 시작해요 + +06:48.080 --> 06:51.380 +새로운 상업 프로젝트를 소개할 거예요 + +06:51.380 --> 06:56.750 +얼굴 안기 게임 데이터셋을 다운로드 해서 데이터를 큐레이팅해 새롭고 흥미로운 + +06:56.750 --> 06:58.640 +걸 시도할 거예요 + +06:58.640 --> 07:04.880 +추론만 하던 것을 훈련으로 바꾼 셈이니 큰 발전이죠 + +07:04.910 --> 07:07.160 +기대되네요, 그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59297721/en_US.srt b/week5/community-contributions/subtitles/srts/59297721/en_US.srt new file mode 100755 index 0000000..ec9c8b7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297721/en_US.srt @@ -0,0 +1,394 @@ +WEBVTT + +00:00.740 --> 00:08.330 +And so now the time has come to talk about the most crucial aspect of Rag, which is the idea of vectors. + +00:08.360 --> 00:11.750 +If you're already familiar with vectors and vector embeddings, hang on in there. + +00:11.750 --> 00:13.610 +I'm going to go fairly quickly through this. + +00:13.610 --> 00:17.570 +You may pick up a thing that you didn't know about as I as I explained this. + +00:17.570 --> 00:21.290 +So first of all, there's an important bit of background information, which is that we've been talking + +00:21.290 --> 00:26.210 +about a bunch of different LMS through this course, but almost all the LMS we've been talking about + +00:26.210 --> 00:30.380 +have been one kind of LM called an autoregressive LM. + +00:30.380 --> 00:35.300 +And there is, in fact a completely different category of LM known as Autoencoding. + +00:35.300 --> 00:36.500 +So what's the difference? + +00:36.530 --> 00:44.990 +Autoregressive LMS or LMS, which are given a past set of tokens and they are required to generate the + +00:44.990 --> 00:49.070 +next token in the sequence a future token given the past. + +00:49.070 --> 00:52.400 +And they keep doing that repeating creating the next token. + +00:52.400 --> 00:56.300 +Given the history of tokens, that's an autoregressive LM. + +00:56.300 --> 00:58.100 +And of course it's all the rage. + +00:58.100 --> 01:00.710 +It's most of the ones that we work with. + +01:00.710 --> 01:04.270 +And of course it's it's GPT four and Claude and Gemini and so on. + +01:04.690 --> 01:11.380 +There's also these types called autoencoding, and they take a full input that represents both the past + +01:11.380 --> 01:12.700 +and the present and the future. + +01:12.700 --> 01:18.670 +It's a full bit of input, and they create one output that reflects the whole input. + +01:19.090 --> 01:22.120 +And so to make that real, there's some obvious examples. + +01:22.120 --> 01:26.620 +Sentiment analysis where you take in a sentence and say if it's positive or negative. + +01:26.770 --> 01:30.940 +Classification where you take a sentence and put it into buckets. + +01:31.420 --> 01:36.790 +Both things that we did actually explore briefly with the Huggingface pipelines a couple of weeks ago. + +01:36.850 --> 01:40.360 +And those are both examples of Autoencoding llms. + +01:41.260 --> 01:46.300 +There is another way that they are used as well, and it is to create something called a vector embedding. + +01:46.300 --> 01:48.700 +And that's what we're going to be talking about today. + +01:48.730 --> 01:54.520 +So a vector embedding is a way of taking a sentence of text or a bunch of different things, but usually + +01:54.520 --> 01:55.570 +a sentence of text. + +01:55.570 --> 02:02.620 +And turning that into a series of numbers, a series of numbers that in some way reflect the meaning + +02:02.620 --> 02:04.360 +behind that text. + +02:04.360 --> 02:07.000 +And we'll go through exactly what that means in just a second. + +02:07.000 --> 02:13.640 +It sounds a bit abstract right now, but the idea is that take some text, convert it into numbers, + +02:13.640 --> 02:17.960 +and those numbers you could think of as representing a point in space. + +02:17.960 --> 02:22.670 +So if we took some text and we turned it into three numbers, you could think of that as being like + +02:22.670 --> 02:28.190 +an X, Y, and z that would represent exactly where abouts something is located in space. + +02:28.370 --> 02:33.800 +As it happens, usually when you do this, it gets converted into hundreds or thousands of numbers. + +02:33.800 --> 02:37.760 +So it represents a point in like 1000 dimensional space. + +02:37.760 --> 02:41.690 +And that's kind of hard for us to visualize because we can only think in three dimensions, but it's + +02:41.690 --> 02:42.620 +the same idea. + +02:42.620 --> 02:49.580 +It's reflecting a point in space, and that point is meant to represent in some way the meaning behind + +02:49.580 --> 02:55.250 +the text that went in to generate that are examples of auto encoding. + +02:55.280 --> 02:58.640 +Llms are Bert from Google. + +02:58.670 --> 03:02.540 +You may remember we actually mentioned Bert right back, I think in the first week. + +03:02.570 --> 03:05.240 +Uh, so so Bert's been around for a while. + +03:05.390 --> 03:11.110 +Um, there's also open AI embeddings from OpenAI, and that's actually the auto autoencoder model that + +03:11.110 --> 03:14.680 +we'll be using this week for our Rag projects. + +03:15.160 --> 03:19.960 +So let me just talk a bit more about what we mean by meaning. + +03:19.990 --> 03:28.690 +So first of all, you can use you can create one of these vectors for a single character, for a token + +03:28.930 --> 03:34.660 +or a bunch of characters, for a word, for a sentence, for a paragraph, for an entire document, + +03:34.660 --> 03:36.490 +or even for something abstract. + +03:36.490 --> 03:44.020 +Like in my company, Nebula, we create vectors for things like talent and jobs and things like that. + +03:44.680 --> 03:49.930 +Often when you're working with these vectors, they will have hundreds or even thousands of dimensions, + +03:49.930 --> 03:54.700 +will be like a thousand numbers that represent this one block of text. + +03:55.030 --> 04:01.420 +And now I've said a few times that these numbers reflect the meaning behind the inputs. + +04:01.420 --> 04:03.010 +What exactly does that mean? + +04:03.040 --> 04:10.060 +So to put simply, one of the things it means is that if you have a bunch of paragraphs of text that + +04:10.060 --> 04:14.720 +all end up mapping to similar points in space that are close to each other. + +04:14.720 --> 04:18.800 +That should mean that these blocks of text have similar meaning. + +04:18.800 --> 04:21.500 +They don't necessarily need to contain the same words. + +04:21.500 --> 04:25.790 +They could be completely different words, but their meaning is the same. + +04:25.790 --> 04:32.090 +They will be close to each other in vector space, so things close to each other in when they're turned + +04:32.090 --> 04:35.540 +into numbers should mean similar things. + +04:35.540 --> 04:38.630 +And that's the basic idea behind this. + +04:38.630 --> 04:44.630 +There's also some more sophisticated ideas behind this, including this point that you can do what's + +04:44.660 --> 04:49.400 +what's sometimes called vector math behind the meanings of these things. + +04:49.400 --> 04:54.050 +And there's this example that's very often given it's been around for a long time, this example. + +04:54.050 --> 04:59.750 +And you may well have heard of it before, and it says, supposing that you have the word king, and + +04:59.750 --> 05:05.720 +you took the word king, and you used one of these vector encodings to find the the point in space that + +05:05.720 --> 05:07.880 +represents the word king. + +05:07.910 --> 05:11.990 +And you also find the vector that reflects the word man. + +05:11.990 --> 05:14.510 +And the vector that reflects the word woman. + +05:14.600 --> 05:21.790 +And you take the word king and you subtract man from it, which means you kind of move backwards in + +05:21.790 --> 05:28.750 +the direction of man and you add woman, which means that you move forwards in the direction of woman. + +05:29.110 --> 05:36.160 +What you've effectively done is you've taken the concept, the meaning of king, and you've said, I + +05:36.160 --> 05:41.470 +want to replace the man with woman in this meaning king. + +05:41.470 --> 05:49.210 +And somewhat remarkably, if you do this, you do actually end up in the position in vector space, + +05:49.210 --> 05:53.890 +which is the same position as the position for the word queen. + +05:53.920 --> 06:00.220 +So it really does seem that if you take the word king, the meaning of the word king, and you replace + +06:00.220 --> 06:06.850 +the man aspect of it with woman, you're then at something which reflects the meaning of the word queen. + +06:06.850 --> 06:13.780 +And so it's in that sense that these vectors really seem to reflect the meaning behind the words they + +06:13.780 --> 06:19.660 +represent, both in terms of similar words being close to each other and the ability to carry out this + +06:19.660 --> 06:26.420 +kind of vector math that allows you to understand the relationship between concepts. + +06:27.860 --> 06:30.860 +So what's this got to do with rag? + +06:30.950 --> 06:32.930 +Here's where it all comes together. + +06:32.930 --> 06:35.510 +This is the big idea behind Rag now. + +06:35.540 --> 06:38.570 +So this is the same diagram we had before. + +06:38.570 --> 06:40.550 +But there's going to be a little bit more going on. + +06:40.850 --> 06:43.970 +At the top we've got a new box called encoding LM. + +06:44.000 --> 06:48.650 +This is something which is able to take some text and turn it into a vector. + +06:48.650 --> 06:52.070 +And at the bottom we have something called a vector data store. + +06:52.100 --> 06:55.250 +It's like the data store we had before the knowledge base. + +06:55.250 --> 07:03.800 +But now along with text, we can also store the vector that represents that text, the vector that represents + +07:03.800 --> 07:05.750 +the meaning of that text. + +07:06.260 --> 07:07.130 +All right. + +07:07.130 --> 07:08.990 +So here's what we do. + +07:09.080 --> 07:12.260 +In comes a question from the user. + +07:12.290 --> 07:19.550 +The first thing we do is we take that question and we turn it into a vector sometimes called a vectorizing. + +07:19.550 --> 07:22.360 +So supposing the question was who is Amy Me. + +07:22.360 --> 07:23.260 +Lancaster. + +07:23.260 --> 07:30.160 +We take who is Amy Lancaster, and we turn that into a vector that reflects the meaning of the question, + +07:30.160 --> 07:31.870 +Who is Amy Lancaster? + +07:33.100 --> 07:34.990 +You can probably imagine what I'm going to say next. + +07:35.020 --> 07:43.120 +What we then do is we go to the vector database and we say, tell me what information is in this vector + +07:43.120 --> 07:49.180 +database where the vectors are close to the vector for who is Amy Lancaster? + +07:49.180 --> 07:51.760 +So look at all the different documents we've got in there. + +07:51.760 --> 07:53.530 +We've turned them all into vectors. + +07:53.530 --> 07:58.000 +Some of those vectors will be close to the question who is Amy Lancaster? + +07:58.030 --> 08:03.790 +Give me those vectors and give me the the original information, the text that was turned into those + +08:03.790 --> 08:04.630 +vectors. + +08:04.630 --> 08:11.890 +And presumably it's extremely likely that the actual air document for Amy Lancaster is going to be located + +08:11.890 --> 08:15.910 +somewhere close to the vector, who is Amy Lancaster? + +08:16.810 --> 08:20.860 +And so when we get that information, we quite simply take that text. + +08:20.860 --> 08:26.570 +And just like before with the toy example, we shove that in the prompt to the LLM, we get back the + +08:26.570 --> 08:29.870 +response, presumably taking advantage of the extra context. + +08:29.870 --> 08:32.210 +And that's what goes back to the user. + +08:32.300 --> 08:38.600 +So it's just like the toy example, except we're using a much more powerful technique for looking up + +08:38.600 --> 08:45.620 +the relevant data, using vectors as a way of understanding which of our bits of knowledge have the + +08:45.620 --> 08:49.010 +most similar meaning to the meaning of the question. + +08:49.010 --> 08:51.410 +Well, that's really all there is to it. + +08:51.410 --> 08:56.120 +And that's a wrap for this week, because it's next time that we're going to put this into action and + +08:56.120 --> 08:58.910 +actually see vectors in databases. + +08:58.940 --> 09:04.160 +We're also next time going to start looking at something called Lang Chain, a wonderful, wonderful + +09:04.160 --> 09:08.780 +framework which is designed to make it easy to build these kinds of applications. + +09:08.780 --> 09:10.550 +We could do it all the manual way. + +09:10.550 --> 09:15.860 +We could actually create vectors and store them in vector databases using various APIs. + +09:16.160 --> 09:19.610 +But Lang Chain makes it super simple, as you will see. + +09:19.640 --> 09:24.230 +And it's going to be a bit like the Gradio experience, where in just a couple of lines of code, we're + +09:24.230 --> 09:26.450 +going to be doing very powerful things. + +09:26.450 --> 09:28.160 +So I'm excited about it. + +09:28.160 --> 09:29.090 +I hope you are too. + +09:29.090 --> 09:30.260 +And I'll see you then. diff --git a/week5/community-contributions/subtitles/srts/59297721/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297721/ja_JP.srt new file mode 100755 index 0000000..71bd7a4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297721/ja_JP.srt @@ -0,0 +1,313 @@ +WEBVTT + +00:00.740 --> 00:08.330 +そして今、 ラグの最も重要な側面であるベクトルについて話す時が来た。 + +00:08.360 --> 00:11.750 +すでにベクトルやベクトル埋め込みに馴染みのある方は、 そのままお待ちください。 + +00:11.750 --> 00:13.610 +かなり手短に説明しよう。 + +00:13.610 --> 00:17.570 +こうして説明するうちに、 知らなかったことが出てくるかもしれない。 + +00:17.570 --> 00:21.290 +まず最初に、 重要な予備知識として、 このコースでは様々なLMSについてお話してきましたが、 + +00:21.290 --> 00:30.380 +これまでお話してきたLMSのほとんどは、 自己回帰LMと呼ばれるLMの一種です。 + +00:30.380 --> 00:35.300 +そして、 オートエンコーディングと呼ばれる全く異なるカテゴリーのLMも存在する。 + +00:35.300 --> 00:36.500 +では、 何が違うのか? + +00:36.530 --> 00:49.070 +自己回帰型LMS(Autoregressive LMS)またはLMSは、 過去のトークンのセットが与えられ、 過去のトークンが与えられたときに、 シーケンス内の次のトークン(未来のトークン)を生成することが要求される。 + +00:49.070 --> 00:52.400 +そして、 次のトークンを作ることを繰り返す。 + +00:52.400 --> 00:56.300 +トークンの歴史を考えれば、 これは自己回帰的なLMだ。 + +00:56.300 --> 00:58.100 +そしてもちろん、 大流行している。 + +00:58.100 --> 01:00.710 +一緒に仕事をしているほとんどの人がそうだ。 + +01:00.710 --> 01:04.270 +もちろん、 GPT4やクロード、 ジェミニなどもある。 + +01:04.690 --> 01:12.700 +オートエンコーディングと呼ばれるタイプもあり、 過去と現在と未来の両方を表す完全な入力を受け取る。 + +01:12.700 --> 01:18.670 +フルビットの入力で、 入力全体を反映した1つの出力を作り出すのだ。 + +01:19.090 --> 01:22.120 +そして、 それを現実のものとするために、 いくつかの明白な例がある。 + +01:22.120 --> 01:26.620 +センチメント分析では、 文章を取り込んで、 それが肯定的か否定的かを判断する。 + +01:26.770 --> 01:30.940 +文章をバケツに入れる分類。 + +01:31.420 --> 01:36.790 +どちらも、 数週間前にハギングフェイス・パイプラインについて少し検討したものだ。 + +01:36.850 --> 01:40.360 +そして、 これらはどちらもオートエンコードllmsの例である。 + +01:41.260 --> 01:46.300 +ベクトル埋め込みと呼ばれるものを作るためだ。 + +01:46.300 --> 01:48.700 +それが、 今日お話しすることです。 + +01:48.730 --> 01:55.570 +つまり、 ベクトル埋め込みとは、 テキストの一文、 あるいはさまざまなものを取り込む方法だが、 通常はテキストの一文である。 + +01:55.570 --> 02:04.360 +そして、 そのテキストに隠された意味を何らかの形で反映させた一連の数字に変える。 + +02:04.360 --> 02:07.000 +それが何を意味するのか、 すぐに説明しよう。 + +02:07.000 --> 02:17.960 +今は少し抽象的に聞こえるが、 テキストを数字に変換し、 その数字が空間上の点を表していると考えることができるというものだ。 + +02:17.960 --> 02:22.670 +つまり、 テキストを3つの数字に置き換えると、 X、 Y、 Zのようなもので、 + +02:22.670 --> 02:28.190 +何かが空間のどこにあるかを正確に表していると考えることができる。 + +02:28.370 --> 02:33.800 +そうすると、 通常は何百、 何千という数字に変換されてしまう。 + +02:33.800 --> 02:37.760 +つまり、 1000次元の空間の一点を表しているのだ。 + +02:37.760 --> 02:41.690 +私たちは3次元でしか考えることができないから、 それをイメージするのは難しいけど、 + +02:41.690 --> 02:42.620 +同じ考えなんだ。 + +02:42.620 --> 02:55.250 +その点は、 自動エンコーディングの例である、 生成に使われたテキストの背後にある意味を何らかの形で表すことを意味している。 + +02:55.280 --> 02:58.640 +LlmsはGoogleのバート。 + +02:58.670 --> 03:02.540 +バートについては、 最初の週に話したと思う。 + +03:02.570 --> 03:05.240 +ええと、 だからバートはしばらく前からいるんだ。 + +03:05.390 --> 03:14.680 +OpenAIが提供するオープンAIエンベッディングもありますし、 今週ラグ・プロジェクトで使用するオートエンコーダー・モデルもこれです。 + +03:15.160 --> 03:19.960 +では、 私たちが意味するところについて、 もう少しお話しさせてください。 + +03:19.990 --> 03:28.690 +まず第一に、 1文字、 1トークン、 1文字の束、 1単語、 1文、 1段落、 文書全体、 あるいは抽象的なものに対して、 + +03:28.930 --> 03:36.490 +これらのベクトルを作成することができます。 + +03:36.490 --> 03:44.020 +私の会社、 ネビュラでもそうですが、 私たちは人材や仕事などのベクトルを作っています。 + +03:44.680 --> 03:54.700 +ベクトルを扱う場合、 数百から数千の次元を持つことがよくあり、 1つのテキストブロックを表す1000個の数字のようなものだ。 + +03:55.030 --> 04:01.420 +そして今、 私は何度か、 これらの数字はインプットの背後にある意味を反映していると言ってきた。 + +04:01.420 --> 04:03.010 +それは一体どういう意味なのか? + +04:03.040 --> 04:10.060 +つまり、 簡単に言うと、 テキストの段落がたくさんあり、 それらがすべて、 空間内の互いに近い同じような点にマッピングされる、 + +04:10.060 --> 04:14.720 +ということだ。 + +04:14.720 --> 04:18.800 +つまり、 これらのテキストブロックは似たような意味を持つということだ。 + +04:18.800 --> 04:21.500 +必ずしも同じ言葉が含まれている必要はない。 + +04:21.500 --> 04:25.790 +まったく違う言葉かもしれないが、 意味は同じだ。 + +04:25.790 --> 04:35.540 +ベクトル空間では互いに近接しているはずだから、 数字にしたときに近接するものは似たような意味になるはずだ。 + +04:35.540 --> 04:38.630 +そして、 これが基本的な考え方だ。 + +04:38.630 --> 04:49.400 +この背景には、 より洗練されたアイデアもあり、 これらの意味の裏側でベクトル数学と呼ばれるものができることもある。 + +04:49.400 --> 04:54.050 +そして、 昔からよく言われるこの例がある。 + +04:54.050 --> 05:07.880 +王様という単語があると仮定して、 王様という単語を表す空間上の点を見つけるためにベクトルエンコーディングを使ったとする。 + +05:07.910 --> 05:11.990 +そして、 "man "という言葉を反映したベクトルも見つけることができる。 + +05:11.990 --> 05:14.510 +そして、 女性という言葉を映し出すベクトル。 + +05:14.600 --> 05:21.790 +王という言葉から男を引くと、 男の方向へ後退することになり、 + +05:21.790 --> 05:28.750 +女を足すと、 女の方向へ前進することになる。 + +05:29.110 --> 05:41.470 +あなたが効果的に行ったことは、 王の概念、 王の意味を取り上げて、 この王の意味において男を女に置き換えたいと言ったことだ。 + +05:41.470 --> 05:53.890 +そして少々驚くべきことに、 これを実行すると、 実際にベクトル空間の位置に行き着く。 + +05:53.920 --> 06:00.220 +つまり、 王という言葉の意味を男性から女性に置き換えると、 + +06:00.220 --> 06:06.850 +女王という言葉の意味を反映したものになるようだ。 + +06:06.850 --> 06:13.780 +その意味で、 ベクトルは、 似たような単語が近接しているという意味でも、 概念間の関係を理解するためのベクトル計算の能力という意味でも、 + +06:13.780 --> 06:26.420 +そのベクトルが表す単語の背後にある意味を如実に反映しているように見えるのだ。 + +06:27.860 --> 06:30.860 +ボロ雑巾との関係は? + +06:30.950 --> 06:32.930 +ここにすべてが集約される。 + +06:32.930 --> 06:35.510 +これが今のラグの大きな考え方だ。 + +06:35.540 --> 06:38.570 +これは以前と同じ図だ。 + +06:38.570 --> 06:40.550 +でも、 もう少し続くよ。 + +06:40.850 --> 06:43.970 +一番上にエンコーディングLMという新しいボックスがある。 + +06:44.000 --> 06:48.650 +これはテキストをベクターに変換できるものだ。 + +06:48.650 --> 06:52.070 +そして一番下にはベクター・データ・ストアと呼ばれるものがある。 + +06:52.100 --> 06:55.250 +ナレッジ・ベースの前にあったデータ・ストアのようなものだ。 + +06:55.250 --> 07:05.750 +しかし今は、 テキストと一緒に、 そのテキストを表すベクトル、 そのテキストの意味を表すベクトルも保存できる。 + +07:06.260 --> 07:07.130 +分かった。 + +07:07.130 --> 07:08.990 +だから、 こうするんだ。 + +07:09.080 --> 07:12.260 +ユーザーからの質問である。 + +07:12.290 --> 07:19.550 +最初にすることは、 その質問をベクトル化することだ。 + +07:19.550 --> 07:22.360 +では、 仮にエイミー・ミーとは誰かという質問があったとしよう。 + +07:22.360 --> 07:23.260 +ランカスター + +07:23.260 --> 07:31.870 +エイミー・ランカスターとは誰かという問いの意味を反映するベクトルに変える。 + +07:33.100 --> 07:34.990 +私が次に何を言おうとしているか、 想像がつくだろう。 + +07:35.020 --> 07:49.180 +このベクターデータベースの中で、 エイミー・ランカスターという人物のベクターに近いベクターは何か教えてください。 + +07:49.180 --> 07:51.760 +だから、 そこにあるさまざまな文書を見てほしい。 + +07:51.760 --> 07:53.530 +我々はそれらをすべてベクトルに変えた。 + +07:53.530 --> 07:58.000 +エイミー・ランカスターとは誰か? + +07:58.030 --> 08:04.630 +そのベクターを渡し、 そのベクターに変換された元の情報、 テキストを渡す。 + +08:04.630 --> 08:15.910 +そしておそらく、 エイミー・ランカスターの実際の航空文書が、 エイミー・ランカスターとは誰かというベクトルに近い場所にある可能性が極めて高い。 + +08:16.810 --> 08:20.860 +だから、 その情報を得たら、 単純にその文章を受け取る。 + +08:20.860 --> 08:29.870 +さっきのおもちゃの例と同じように、 LLMにプロンプトを送ると、 おそらく余分な文脈を利用したレスポンスが返ってくる。 + +08:29.870 --> 08:32.210 +そしてそれがユーザーに還元される。 + +08:32.300 --> 08:38.600 +つまり、 おもちゃの例と同じように、 関連するデータを調べるために、 より強力なテクニックを使っているということだ。 + +08:38.600 --> 08:49.010 +質問の意味と最も似た意味を持つ知識を理解する方法として、 ベクトルを使っている。 + +08:49.010 --> 08:51.410 +まあ、 本当にそれだけだ。 + +08:51.410 --> 08:58.910 +というのも、 次回はこれを実践して、 実際にデータベースでベクトルを見ることになるからだ。 + +08:58.940 --> 09:08.780 +また次回は、 この種のアプリケーションを簡単に構築できるように設計された、 素晴らしい素晴らしいフレームワークであるラング・チェインというものを見ていくつもりだ。 + +09:08.780 --> 09:10.550 +すべてマニュアル通りにやればいいんだ。 + +09:10.550 --> 09:15.860 +実際にベクターを作成し、 さまざまなAPIを使ってベクターデータベースに保存することができる。 + +09:16.160 --> 09:19.610 +しかし、 ラング・チェインでは、 ご覧のように超シンプルに仕上げている。 + +09:19.640 --> 09:26.450 +ほんの数行のコードで非常にパワフルなことができるようになる。 + +09:26.450 --> 09:28.160 +だから興奮しているんだ。 + +09:28.160 --> 09:29.090 +君もそうだといいね。 + +09:29.090 --> 09:30.260 +その時にまた会おう diff --git a/week5/community-contributions/subtitles/srts/59297721/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297721/ko_KR.srt new file mode 100755 index 0000000..5427101 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297721/ko_KR.srt @@ -0,0 +1,391 @@ +WEBVTT + +00:00.740 --> 00:08.330 +이제 랙의 가장 중요한 측면에 대해 얘기할 때가 됐습니다 벡터 개념이죠 + +00:08.360 --> 00:11.750 +벡터와 벡터 내장법에 익숙하시다면 잠시만요 + +00:11.750 --> 00:13.610 +빨리 훑어보도록 하죠 + +00:13.610 --> 00:17.570 +몰랐던 걸 알게 될 수도 있어요 제가 설명한 대로요 + +00:17.570 --> 00:21.290 +먼저 중요한 배경 정보가 있습니다 이 과정을 통해 다양한 + +00:21.290 --> 00:26.210 +LMS에 관해 얘기해왔는데요 우리가 얘기한 거의 모든 LMS는 하나의 + +00:26.210 --> 00:30.380 +종류였습니다 자동적 허용 LM이라고 불리는 거죠 + +00:30.380 --> 00:35.300 +LM에는 완전히 다른 범주가 있습니다 자동 코드화라고 하죠 + +00:35.300 --> 00:36.500 +차이가 뭐죠? + +00:36.530 --> 00:44.990 +LMS 자동 이탈 LMS는 과거의 토큰 세트를 받고 시퀀스에서 다음 토큰을 생성해야 + +00:44.990 --> 00:49.070 +합니다 미래 토큰이요 과거가 주어지죠 + +00:49.070 --> 00:52.400 +계속 반복해서 다음 토큰을 생성하죠 + +00:52.400 --> 00:56.300 +토큰의 역사를 보면 자동적 차별적인 LM이에요 + +00:56.300 --> 00:58.100 +물론 그게 대세죠 + +00:58.100 --> 01:00.710 +우리가 함께 일하는 대부분의 사람들이죠 + +01:00.710 --> 01:04.270 +GPT 4호와 클로드 제미니 계획도 있고요 + +01:04.690 --> 01:11.380 +자동 코드 형식이라는 것도 있습니다 과거와 현재, 미래를 모두 나타내는 완전한 + +01:11.380 --> 01:12.700 +입력을 하죠 + +01:12.700 --> 01:18.670 +입력 비트가 가득하고 입력 전체를 반영하는 출력 비트를 생성하죠 + +01:19.090 --> 01:22.120 +그걸 현실화하려면 몇 가지 확실한 예가 있죠 + +01:22.120 --> 01:26.620 +감정 분석은 문장을 보고 긍정인지 부정인지 판단하는 거예요 + +01:26.770 --> 01:30.940 +분류 구역에서는 문장을 양동이에 담아 Put을 해요 + +01:31.420 --> 01:36.790 +2주 전에 어깅페이스 파이프라인과 관련해 잠깐 탐구한 것들이죠 + +01:36.850 --> 01:40.360 +둘 다 자동 코드 llms의 예죠 + +01:41.260 --> 01:46.300 +또 다른 방법으로 사용되기도 합니다 벡터 내장이라는 것을 생성할 때 사용되죠 + +01:46.300 --> 01:48.700 +오늘 얘기할 게 바로 그거예요 + +01:48.730 --> 01:54.520 +벡터 엠베딩은 텍스트의 문장이나 다른 여러 가지를 취하는 방법인데 보통은 텍스트의 + +01:54.520 --> 01:55.570 +문장이죠 + +01:55.570 --> 02:02.620 +그걸 일련의 숫자로 바꾸는 거죠 어떤 면에선 그 텍스트의 의미를 반영하는 + +02:02.620 --> 02:04.360 +일련의 숫자요 + +02:04.360 --> 02:07.000 +그게 무슨 뜻인지 잠시 후에 살펴보죠 + +02:07.000 --> 02:13.640 +지금은 좀 추상적으로 들리지만 아이디어는 텍스트를 숫자로 변환하는 겁니다 + +02:13.640 --> 02:17.960 +그 숫자들은 공간의 한 점을 나타내는 비트죠 + +02:17.960 --> 02:22.670 +텍스트를 세 개의 숫자로 바꾼다고 해보죠 엑스나 Y, Z로 + +02:22.670 --> 02:28.190 +생각할 수 있어요 이건 공간에서 무언가가 있는 위치를 나타내죠 + +02:28.370 --> 02:33.800 +보통 이렇게 하면 수백, 수천 개의 숫자로 변환돼요 + +02:33.800 --> 02:37.760 +천 차원 공간에 있는 한 지점을 나타내는 거예요 + +02:37.760 --> 02:41.690 +시각화하기가 좀 어렵죠 우린 3차원으로만 생각할 수 있으니까요 하지만 + +02:41.690 --> 02:42.620 +생각은 같죠 + +02:42.620 --> 02:49.580 +공간의 점을 반영하고 어떤 의미에서든 그 점은 자동 인코딩의 + +02:49.580 --> 02:55.250 +예로 생성된 텍스트의 의미를 나타내죠 + +02:55.280 --> 02:58.640 +구글의 버트예요 + +02:58.670 --> 03:02.540 +첫 주에 버트 얘기를 했던 거 기억하시죠? + +03:02.570 --> 03:05.240 +버트는 꽤 오래 출연했어요 + +03:05.390 --> 03:11.110 +OpenAI의 오픈 인공지능 엠베딩도 있는데 자동 오토 젠코더 모델로 + +03:11.110 --> 03:14.680 +이번 주에 래그 프로젝트에 사용할 거예요 + +03:15.160 --> 03:19.960 +비트 코어의 의미에 대해 좀 더 얘기해 보죠 + +03:19.990 --> 03:28.690 +먼저, 여러분은 단일 문자에 대한 벡터 중 하나를 만들 수 있습니다 토큰이나 다수의 문자 + +03:28.930 --> 03:34.660 +단어, 문장, 단락, 전체 문서에 대한 것 혹은 추상적인 것에 + +03:34.660 --> 03:36.490 +대한 것에요 + +03:36.490 --> 03:44.020 +제 회사, 네뷸라처럼 재능이나 일자리 같은 것의 벡터를 만들어요 + +03:44.680 --> 03:49.930 +이런 벡터들을 작업할 때 수백, 수천 개의 차원이 있는 경우가 많아요 + +03:49.930 --> 03:54.700 +텍스트 한 덩어리를 나타내는 수천 개의 숫자처럼요 + +03:55.030 --> 04:01.420 +이 숫자들이 입력값의 의미를 반영한다고 여러 번 말씀드렸죠 + +04:01.420 --> 04:03.010 +그게 무슨 뜻이죠? + +04:03.040 --> 04:10.060 +간단히 말해서, 이 말은 여러 단락의 텍스트가 서로 가까운 공간의 + +04:10.060 --> 04:14.720 +비슷한 지점으로 매핑이 된다는 거죠. + +04:14.720 --> 04:18.800 +이 텍스트 블록들은 비슷한 의미를 가져야 해요 + +04:18.800 --> 04:21.500 +같은 단어를 쓸 필요는 없어요 + +04:21.500 --> 04:25.790 +완전히 다른 단어일 수도 있지만 의미는 같아요 + +04:25.790 --> 04:32.090 +벡터 공간에서 서로 가까워질 거예요 숫자로 바뀔 때 서로 가까워진다는 + +04:32.090 --> 04:35.540 +건 비슷한 걸 의미하죠 + +04:35.540 --> 04:38.630 +그게 기본 아이디어죠 + +04:38.630 --> 04:44.630 +이 이면에는 좀 더 정교한 아이디어가 있어요 이 점을 포함해서요 이런 것들의 + +04:44.660 --> 04:49.400 +의미 뒤에 숨은 벡터 수학이라고 불리는 걸 할 수 있어요 + +04:49.400 --> 04:54.050 +그리고 이 예시가 있습니다. 아주 오래된 예시죠. + +04:54.050 --> 04:59.750 +들어 보셨을 수도 있는데 킹이라는 단어가 있다고 가정해 봅시다 킹이라는 + +04:59.750 --> 05:05.720 +단어를 가지고 벡터 인코딩을 이용해서 킹이라는 단어를 나타내는 공간의 + +05:05.720 --> 05:07.880 +점을 찾는다고 해보죠 + +05:07.910 --> 05:11.990 +인간이란 단어를 반영하는 벡터도 찾을 수 있죠 + +05:11.990 --> 05:14.510 +여성이라는 단어를 반영하는 벡터도요 + +05:14.600 --> 05:21.790 +킹이라는 단어에서 남자를 빼면 남자가 있는 방향으로 + +05:21.790 --> 05:28.750 +뒤로 가고 여자가 있는 방향으로 전진하는 거예요 + +05:29.110 --> 05:36.160 +당신은 왕이라는 개념을 받아들여서 남자를 여자로 대체하고 + +05:36.160 --> 05:41.470 +싶다고 했어요 즉, 왕이라는 뜻이죠 + +05:41.470 --> 05:49.210 +그리고 놀랍게도 이렇게 하면 벡터 공간에 들어가게 됩니다 퀸이라는 + +05:49.210 --> 05:53.890 +단어가 있는 위치와 같은 위치죠 + +05:53.920 --> 06:00.220 +그러니까 왕이라는 단어에서 왕의 의미를 따지고 남자 측면을 여자로 + +06:00.220 --> 06:06.850 +대체하면 여왕이라는 단어의 의미를 반영하는 무언가가 되는 거죠 + +06:06.850 --> 06:13.780 +그런 점에서 벡터들은 벡터들이 나타내는 단어의 의미를 반영하는 것 + +06:13.780 --> 06:19.660 +같아요 비슷한 단어들이 서로 가까이 있다는 점과 벡터 수학을 + +06:19.660 --> 06:26.420 +수행하는 능력도요 덕분에 개념 간의 관계를 이해할 수 있죠 + +06:27.860 --> 06:30.860 +이게 래그랑 무슨 상관이죠? + +06:30.950 --> 06:32.930 +여기서 모든 게 합쳐져요 + +06:32.930 --> 06:35.510 +래그의 핵심 아이디어죠 + +06:35.540 --> 06:38.570 +아까와 같은 도표예요 + +06:38.570 --> 06:40.550 +비트보다 더 많은 걸 할 거예요 + +06:40.850 --> 06:43.970 +상단엔 LM 인코딩이라는 새 상자가 있죠 + +06:44.000 --> 06:48.650 +어떤 텍스트를 벡터로 바꿀 수 있는 거죠 + +06:48.650 --> 06:52.070 +맨 밑에는 벡터 데이터 저장소가 있어요 + +06:52.100 --> 06:55.250 +지식 기반 이전에 있던 데이터 저장소 같아요 + +06:55.250 --> 07:03.800 +텍스트와 함께 텍스트를 나타내는 벡터도 저장할 수 있어요 텍스트의 의미를 + +07:03.800 --> 07:05.750 +나타내는 벡터요 + +07:06.260 --> 07:07.130 +좋아요 + +07:07.130 --> 07:08.990 +이렇게 하죠 + +07:09.080 --> 07:12.260 +사용자가 질문을 던지죠 + +07:12.290 --> 07:19.550 +가장 먼저 할 일은 질문을 벡터로 바꾸는 겁니다 벡터화라고도 하죠 + +07:19.550 --> 07:22.360 +에이미 미가 누구냐는 질문이라면요 + +07:22.360 --> 07:23.260 +랭커스터요 + +07:23.260 --> 07:30.160 +에이미 랭커스터가 누구인지를 질문의 의미로 해석하는 + +07:30.160 --> 07:31.870 +거예요 + +07:33.100 --> 07:34.990 +제가 다음에 할 말은 아마 짐작이 가실 거예요 + +07:35.020 --> 07:43.120 +이제 벡터 데이터베이스로 가서 벡터 데이터베이스에서 벡터들이 에이미 랭커스터에 가까운 + +07:43.120 --> 07:49.180 +곳에 있는 정보를 요청합니다 에이미 랭커스터가 누구인가요? + +07:49.180 --> 07:51.760 +저 안에 있는 다양한 문서들을 보세요 + +07:51.760 --> 07:53.530 +전부 벡터로 만들었어요 + +07:53.530 --> 07:58.000 +에이미 랭커스터가 누구인지 궁금해할 벡터들이 있을 거예요 + +07:58.030 --> 08:04.630 +벡터들을 제공하고 벡터로 바뀐 원래 정보와 텍스트를 제공하죠 + +08:04.630 --> 08:11.890 +에이미 랭커스터의 항공 문서가 벡터와 가까운 곳에 있을 가능성이 높습니다 + +08:11.890 --> 08:15.910 +에이미 랭커스터가 누구죠? + +08:16.810 --> 08:20.860 +그래서 정보를 얻으면 텍스트를 get 하면 되죠 + +08:20.860 --> 08:26.570 +이전의 장난감 예제처럼 그걸 LLM 프롬프트에 밀어넣으면 응답을 얻습니다 + +08:26.570 --> 08:29.870 +아마도 추가적인 컨텍스트를 이용하겠죠 + +08:29.870 --> 08:32.210 +그게 사용자에게 돌아가는 거죠 + +08:32.300 --> 08:38.600 +장난감 예제와 같지만 관련 데이터를 찾는 데 훨씬 더 강력한 기술을 + +08:38.600 --> 08:45.620 +사용합니다 벡터를 이용해 우리의 지식 중 어떤 것이 질문의 의미와 가장 + +08:45.620 --> 08:49.010 +유사한지 이해하는 거죠 + +08:49.010 --> 08:51.410 +그게 다인 것 같아요 + +08:51.410 --> 08:56.120 +이번 주의 마무리가 되었네요. 왜냐하면 다음 시간에는 이것을 action에 넣고 + +08:56.120 --> 08:58.910 +데이터베이스에서 벡터를 볼 것이기 때문이죠. + +08:58.940 --> 09:04.160 +다음엔 랭 체인이라는 것도 살펴볼 겁니다 프레임워크가 훌륭하죠 + +09:04.160 --> 09:08.780 +이런 응용 프로그램을 쉽게 만들도록 디자인됐어요 + +09:08.780 --> 09:10.550 +전부 수동으로 할 수 있어요 + +09:10.550 --> 09:15.860 +벡터를 생성해 다양한 API를 이용해 벡터 데이터베이스에 저장할 수 있죠 + +09:16.160 --> 09:19.610 +랑 체인은 아주 간단해요 곧 보시겠지만요 + +09:19.640 --> 09:24.230 +그래디오 경험과 좀 비슷할 겁니다 코드 두어 줄로 아주 + +09:24.230 --> 09:26.450 +강력한 걸 하는 거죠 + +09:26.450 --> 09:28.160 +그래서 기대돼요 + +09:28.160 --> 09:29.090 +당신도 그랬으면 좋겠네요 + +09:29.090 --> 09:30.260 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59297723/en_US.srt b/week5/community-contributions/subtitles/srts/59297723/en_US.srt new file mode 100755 index 0000000..211f68e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297723/en_US.srt @@ -0,0 +1,421 @@ +WEBVTT + +00:00.800 --> 00:02.300 +So I know what you're thinking. + +00:02.300 --> 00:03.800 +You're thinking, what's going on here? + +00:03.830 --> 00:05.000 +We're on day five. + +00:05.030 --> 00:06.740 +We're on day five of week five. + +00:06.770 --> 00:08.840 +Why does he have a Jupyter notebook open? + +00:08.840 --> 00:10.730 +That's called day 4.5. + +00:10.730 --> 00:18.020 +And it's because it's just a little quick deviation on day four to show you genuinely how straightforward + +00:18.020 --> 00:21.110 +it is to switch out the vector data store for another one. + +00:21.110 --> 00:27.020 +And since this is super common out there, used a lot, I thought it was a great one to show you and + +00:27.020 --> 00:32.930 +to use it both to illustrate a different vector data store, and also to show you how easy it is to + +00:32.960 --> 00:34.310 +work with these abstractions. + +00:34.340 --> 00:39.140 +Now, something to to explain is that this isn't really a vector data store. + +00:39.140 --> 00:47.810 +It stands for Facebook AI Similarity Search, and it's a highly used, very common open source library + +00:47.840 --> 00:54.860 +put out by Facebook that lets you do quick search for vectors close to other vectors, and as such, + +00:54.890 --> 00:58.370 +it is often used as a kind of in-memory vector store. + +00:58.370 --> 01:04.160 +It doesn't persist on the disk, but you can use it to plonk in vectors and to retrieve other vectors + +01:04.160 --> 01:09.230 +that are similar, and you can also just use it as a library to do similarity search, to be looking + +01:09.230 --> 01:16.520 +for vectors close to other vectors, and Langshan has the right abstractions built around it, so it + +01:16.520 --> 01:19.220 +can just be a drop in replacement for chroma. + +01:19.220 --> 01:22.310 +So we use chroma before, which was persistent on disk. + +01:22.310 --> 01:26.090 +So it's a more serious hardcore data store vice. + +01:26.510 --> 01:29.360 +We can just use that in lieu of chroma. + +01:29.360 --> 01:34.070 +And I'm going to show you how easy it is because the code is going to be almost identical, except when + +01:34.070 --> 01:37.610 +we get to that abstraction and a couple of other things. + +01:38.240 --> 01:42.110 +So we run the imports as before in this import. + +01:42.110 --> 01:45.980 +You can see I have commented out the import for chroma, so I can't cheat. + +01:45.980 --> 01:52.160 +Chroma will not be included here and instead we're importing face Facebook AI similarity search. + +01:52.190 --> 01:58.760 +It's worth noting that face comes in two variations a CPU variant and a GPU variant, depending on which + +01:58.760 --> 01:59.930 +you have Pip installed. + +01:59.930 --> 02:04.490 +And with this environment, of course, I have Pip installed the CPU variant, but for high performance + +02:04.490 --> 02:11.750 +projects running on GPU, you can run the GPU version to run at blazing speed, so we've imported vice + +02:11.750 --> 02:13.880 +and no errors, which is great. + +02:14.150 --> 02:15.650 +Set up some things. + +02:15.650 --> 02:17.300 +This is all the same code. + +02:17.300 --> 02:19.220 +We load everything in as before. + +02:19.220 --> 02:25.970 +We're going to get hopefully 123 uh, to change that to be Len chunks. + +02:25.970 --> 02:28.310 +So we see do we have 123. + +02:28.340 --> 02:29.570 +Yes we do. + +02:29.600 --> 02:31.700 +Let's check that we have the right metadata. + +02:31.700 --> 02:33.590 +We have the four types. + +02:33.590 --> 02:35.840 +So far nothing is different. + +02:35.840 --> 02:36.980 +This is all the same. + +02:36.980 --> 02:38.000 +But look at this. + +02:38.000 --> 02:40.550 +One line has changed here cunningly. + +02:40.580 --> 02:44.060 +Uh, I didn't need to draw quite so much attention to it. + +02:44.060 --> 02:45.860 +I think you probably spotted it already. + +02:45.860 --> 02:49.850 +So we used to say vector store is chroma dot from documents. + +02:49.880 --> 02:53.720 +And we passed in the chunks embeddings and the persist directory. + +02:53.720 --> 02:58.580 +And now we just say vice dot from documents same construct. + +02:58.610 --> 03:00.380 +We also pass in the chunks. + +03:00.380 --> 03:01.970 +We also pass in the embeddings. + +03:01.970 --> 03:06.320 +And of course we don't pass in a persist directory because vice doesn't persist on disk. + +03:06.620 --> 03:09.980 +Um, now these two lines here are also different. + +03:09.980 --> 03:14.750 +So the lower level ways to ask questions of the data store are different. + +03:14.750 --> 03:20.300 +So if you're going to be using Feist, it's useful to have this to hand so you know how you can query + +03:20.300 --> 03:23.600 +for things like the number of vectors and the dimensionality. + +03:23.630 --> 03:27.530 +I don't know if you remember how many dimensions that there were when we did this in chroma. + +03:27.650 --> 03:32.630 +Um, but it's the same number of dimensions we're coming up with here. + +03:32.630 --> 03:36.590 +We're using the same OpenAI embeddings to vectorize. + +03:36.590 --> 03:40.550 +So we're still using the same LLM to create the vectors. + +03:40.550 --> 03:45.620 +The thing that has changed is how we are storing those vectors, not in chroma but in Feis. + +03:46.130 --> 03:50.540 +Um, and so uh, no surprise there 123 vectors, one for each chunk. + +03:50.540 --> 03:54.470 +And they have these dimensions coming back from OpenAI. + +03:55.190 --> 04:01.370 +Uh, so the other thing that's changed has been this pre-work section was different in day four. + +04:01.400 --> 04:04.010 +I'll remind you in day four, the pre-work. + +04:04.010 --> 04:05.660 +Let's go and take a look at that. + +04:06.080 --> 04:07.850 +Uh, ba ba ba ba. + +04:08.450 --> 04:14.270 +Uh, the pre-work actually looks a bit simpler just plucking out the vectors, documents, and doc types + +04:14.300 --> 04:15.890 +from chroma. + +04:15.890 --> 04:19.970 +And you can look at the ways that you query chroma to get this data. + +04:20.170 --> 04:22.810 +Um, and this is how I did it for vice. + +04:22.840 --> 04:25.840 +There might be a tighter way to do that, but this seems simple enough. + +04:25.840 --> 04:31.810 +I just collect the same vectors, documents, doc types, colors, and I map it into the color that + +04:31.990 --> 04:38.410 +Plotly is expecting so that we can just plot the same, uh, diagram so we can visualize our vectors + +04:38.410 --> 04:40.480 +just as we did before in 2D. + +04:40.600 --> 04:44.290 +This code is identical with one tiny exception. + +04:44.320 --> 04:45.670 +See if you can spot it. + +04:45.670 --> 04:47.290 +That's the only change. + +04:47.290 --> 04:52.570 +I've changed the title from saying chroma to saying vice, but otherwise the code is the same. + +04:52.750 --> 04:55.390 +Uh, we will visualize our data store. + +04:55.390 --> 05:00.250 +And so here then are the vectors as they are represented in vice. + +05:00.250 --> 05:04.660 +And of course, as you would expect, it's the same vectorization approach. + +05:04.660 --> 05:06.430 +So it looks pretty similar. + +05:06.430 --> 05:11.020 +We're just using a different underlying technology as our vector data store. + +05:11.260 --> 05:13.960 +Uh, and we can of course represent that in 3D. + +05:13.990 --> 05:21.190 +And we're now looking at the 3D representation in vice and nice to see that actually this time we've + +05:21.190 --> 05:24.370 +got, it's uh nicely it's very nicely separated. + +05:24.400 --> 05:25.060 +There we go. + +05:25.090 --> 05:26.560 +Anyway, I could look at that all day. + +05:27.160 --> 05:28.510 +So you get that sense. + +05:28.510 --> 05:31.930 +And then the code to bring it all together is identical. + +05:31.930 --> 05:33.310 +I haven't changed this at all. + +05:33.340 --> 05:38.020 +Vector stored as retriever can be called on chroma, or it can be called on face. + +05:38.020 --> 05:39.190 +And it is the same. + +05:39.190 --> 05:39.940 +So there we go. + +05:39.970 --> 05:40.750 +We run it. + +05:40.780 --> 05:41.740 +No errors. + +05:41.740 --> 05:42.370 +It's fine. + +05:42.370 --> 05:45.550 +Let's go straight to bringing up Gradio. + +05:45.820 --> 05:49.180 +Uh, and here we go. + +05:49.210 --> 05:51.820 +Here is our Gradio interface. + +05:52.150 --> 05:58.690 +Um, now, one of the things that I meant to show you last time that I now take this chance, and you + +05:58.690 --> 06:04.600 +can see the same will apply in chroma as well, is I can take questions like, uh, like you remember + +06:04.600 --> 06:07.060 +last time I asked, what did Avery do before? + +06:07.060 --> 06:11.710 +And it was cunning enough to look up the right context, even though I didn't say Lancaster and I spelled + +06:11.710 --> 06:17.500 +it with a lowercase a, we can take that even further, and I can say something like, what did? + +06:17.500 --> 06:21.370 +And I can spell Avery's name wrong like that. + +06:21.400 --> 06:24.400 +What did Avery do before? + +06:25.870 --> 06:27.670 +Um, uh. + +06:27.670 --> 06:32.920 +And I can run this piece of code like it is and have a look at what's come back. + +06:33.130 --> 06:40.390 +It has correctly identified that I'm talking about Avery Lancaster, and it looked up her HR document + +06:40.390 --> 06:45.580 +and it's again correctly identified that she worked at Innovate Insurance Solutions. + +06:45.670 --> 06:48.760 +Uh, we can might as well go in and just quickly check. + +06:48.760 --> 06:51.700 +Since it's set, it will go to the employees documents. + +06:51.700 --> 06:53.500 +We find Avery Lancaster. + +06:53.500 --> 06:54.580 +Here it is. + +06:54.580 --> 06:56.560 +Here is her HR record. + +06:56.560 --> 06:57.700 +Let's see what she did. + +06:57.700 --> 07:03.310 +She was indeed at Innovate Insurance Solutions before founding insurer Elm. + +07:03.550 --> 07:06.010 +Uh, it's good to see it didn't invent that. + +07:06.340 --> 07:16.270 +Uh, so, um, uh, the bottom line is that it's this this gives you a real sense that it didn't just + +07:16.270 --> 07:17.530 +do text matching. + +07:17.530 --> 07:23.230 +It didn't even, uh, get fussed by the wrong case that it was in lowercase and uppercase. + +07:23.230 --> 07:30.160 +It actually was able to identify that Avery spelt wrong, has the same meaning as Avery spelt with a + +07:30.190 --> 07:31.870 +Y Avery Lancaster. + +07:31.870 --> 07:39.220 +It has enough common sense to recognize that that is very plausibly what we were after. + +07:39.220 --> 07:44.890 +And again, the reason is because when it turned this into a vector and it put it in the vector and + +07:44.890 --> 07:51.640 +it looked in the vector data store for the vectors that were close to that, it found the Avery Lancaster's + +07:51.820 --> 07:56.050 +HR record as being something that was close to that in the data store. + +07:56.050 --> 08:00.430 +So I think it's fascinating to see that you can spell things wrong and it still works. + +08:00.430 --> 08:06.580 +And it's such a clear example of how much better using the vector lookup approach is than using the + +08:06.580 --> 08:11.080 +brute force technique that we used in the first session, which obviously would have failed hopelessly + +08:11.080 --> 08:12.130 +with this test. + +08:12.190 --> 08:17.890 +So I will let you prove to yourself that the same test will work if you use chroma, of course, and + +08:17.890 --> 08:19.720 +you can try out both, but you will see. + +08:19.720 --> 08:23.350 +I hope that chroma and vice have both worked very well indeed. + +08:23.440 --> 08:26.020 +But most importantly, you've seen that. + +08:26.050 --> 08:33.220 +True to what I explained before, Lang makes it super simple to switch out different vector data stores + +08:33.220 --> 08:37.510 +behind the scenes and use the same plumbing for your Rag workflow. + +08:38.080 --> 08:39.760 +All right, back to the slides. diff --git a/week5/community-contributions/subtitles/srts/59297723/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297723/ja_JP.srt new file mode 100755 index 0000000..35f8936 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297723/ja_JP.srt @@ -0,0 +1,358 @@ +WEBVTT + +00:00.800 --> 00:02.300 +だから、 あなたが何を考えているかは分かる。 + +00:02.300 --> 00:03.800 +何が起こっているんだ? + +00:03.830 --> 00:05.000 +もう5日目だ。 + +00:05.030 --> 00:06.740 +週目の5日目だ。 + +00:06.770 --> 00:08.840 +なぜ彼はJupyterノートブックを開いているのか? + +00:08.840 --> 00:10.730 +それが4日目だ。 5. + +00:10.730 --> 00:21.110 +ベクター・データ・ストアを別のものに変更するのがいかに簡単かを純粋に知ってもらうために、 4日目にはちょっとした逸脱があるからだ。 + +00:21.110 --> 00:34.310 +また、 このような抽象化されたデータ・ストアで作業することがいかに簡単であるかをお見せすることもできます。 + +00:34.340 --> 00:39.140 +さて、 説明しておかなければならないのは、 これは本当のベクトル・データ・ストアではないということだ。 + +00:39.140 --> 00:47.810 +Facebook AI Similarity Searchの略で、 Facebookが公開しているオープンソースのライブラリで、 + +00:47.840 --> 00:58.370 +他のベクトルに近いベクトルを素早く検索することができる。 + +00:58.370 --> 01:09.230 +Langshanは適切な抽象化機能を備えているので、 + +01:09.230 --> 01:19.220 +chromaの代わりに使うことができます。 + +01:19.220 --> 01:22.310 +そのため、 以前はクロマを使っていた。 + +01:22.310 --> 01:26.090 +つまり、 より深刻なハードコアデータストアの悪癖なのだ。 + +01:26.510 --> 01:29.360 +クロマの代わりにそれを使えばいい。 + +01:29.360 --> 01:37.610 +抽象化と他のいくつかのことを除けば、 コードはほとんど同じになるからだ。 + +01:38.240 --> 01:42.110 +そこで、 このインポートでは前回と同様にインポートを実行する。 + +01:42.110 --> 01:45.980 +クロマのインポートをコメントアウトしているのがわかるだろう。 + +01:45.980 --> 01:52.160 +Chromaはここには含まれず、 代わりにフェイスブックのAI類似性検索を取り込んでいる。 + +01:52.190 --> 01:59.930 +注目すべきは、 フェイスにはCPUとGPUの2つのバリエーションがあることだ。 + +01:59.930 --> 02:04.490 +そして、 この環境では、 もちろん、 PipはCPU版をインストールしていますが、 GPU上で動作する高性能プロジェクトでは、 + +02:04.490 --> 02:13.880 +GPU版を実行することで、 猛烈なスピードで動作させることができるので、 バイスをインポートしましたが、 エラーはありませんでした。 + +02:14.150 --> 02:15.650 +いくつかのことを準備する。 + +02:15.650 --> 02:17.300 +これはすべて同じコードだ。 + +02:17.300 --> 02:19.220 +以前と同じようにすべてを積み込む。 + +02:19.220 --> 02:25.970 +うまくいけば、 123をレンチャンクに変更できるかもしれない。 + +02:25.970 --> 02:28.310 +では、 123はあるのか? + +02:28.340 --> 02:29.570 +そうだ。 + +02:29.600 --> 02:31.700 +正しいメタデータがあることを確認しよう。 + +02:31.700 --> 02:33.590 +私たちには4つのタイプがある。 + +02:33.590 --> 02:35.840 +今のところ何も変わっていない。 + +02:35.840 --> 02:36.980 +これはすべて同じだ。 + +02:36.980 --> 02:38.000 +しかし、 これを見てほしい。 + +02:38.000 --> 02:40.550 +ここで一行、 狡猾に変更されている。 + +02:40.580 --> 02:44.060 +ああ、 そこまで注意を引く必要はなかったよ。 + +02:44.060 --> 02:45.860 +もうお気づきのことと思う。 + +02:45.860 --> 02:49.850 +だから私たちは、 ベクターストアはドキュメントからのクロマ・ドットだと言っていた。 + +02:49.880 --> 02:53.720 +そして、 チャンクの埋め込みとパーシスト・ディレクトリを渡した。 + +02:53.720 --> 02:58.580 +そして今、 私たちは同じ構成からバイスドットと言っているだけだ。 + +02:58.610 --> 03:00.380 +チャンクも渡す。 + +03:00.380 --> 03:01.970 +また、 埋め込みも渡す。 + +03:01.970 --> 03:06.320 +バイスはディスク上に残らないからだ。 + +03:06.620 --> 03:09.980 +ええと、 この2本のラインも違う。 + +03:09.980 --> 03:14.750 +そのため、 データストアに質問する下位レベルの方法は異なる。 + +03:14.750 --> 03:23.600 +Feistを使うのであれば、 これを手元に置いておくと、 ベクトル数や次元数などのクエリーの仕方がわかって便利です。 + +03:23.630 --> 03:27.530 +クロマでこれをやったとき、 いくつの次元があったか覚えているかどうかわからない。 + +03:27.650 --> 03:32.630 +うーん、 でも、 ここで考えているのは同じ次元の数だよ。 + +03:32.630 --> 03:36.590 +ベクトル化には同じOpenAIのエンベッディングを使っている。 + +03:36.590 --> 03:40.550 +だから、 ベクターを作るために同じLLMを使っていることに変わりはない。 + +03:40.550 --> 03:45.620 +変更されたのは、 これらのベクターをクロマではなくファイズに保存する方法だ。 + +03:46.130 --> 03:50.540 +各チャンクに1つずつ、 計123のベクターがある。 + +03:50.540 --> 03:54.470 +そして、 彼らはOpenAIから戻ってくる次元を持っている。 + +03:55.190 --> 04:01.370 +もうひとつ変わったのは、 プレワークのセクションが4日目から変わったことだ。 + +04:01.400 --> 04:04.010 +4日目のプレワークで思い出してほしい。 + +04:04.010 --> 04:05.660 +それを見てみよう。 + +04:06.080 --> 04:07.850 +ええと、 バ、 バ、 バ、 バ。 + +04:08.450 --> 04:15.890 +クロマからベクター、 ドキュメント、 ドキュメントタイプを抜き出すだけです。 + +04:15.890 --> 04:19.970 +そして、 このデータを得るためにクロマに問い合わせる方法を見ることができる。 + +04:20.170 --> 04:22.810 +それで、 悪徳業者にはこうしたんだ。 + +04:22.840 --> 04:25.840 +もっときっちりしたやり方があるかもしれないが、 これなら簡単そうだ。 + +04:25.840 --> 04:31.810 +同じベクトル、 ドキュメント、 ドキュメントタイプ、 色を集めて、 それをPlotlyが期待する色にマッピングして、 + +04:31.990 --> 04:40.480 +以前2Dでやったのと同じようにベクトルを視覚化できるように、 同じ図を描くだけです。 + +04:40.600 --> 04:44.290 +このコードは1つの小さな例外を除いて同一である。 + +04:44.320 --> 04:45.670 +見分けられるかな? + +04:45.670 --> 04:47.290 +それが唯一の変化だ。 + +04:47.290 --> 04:52.570 +タイトルをクロマからバイスに変更しましたが、 それ以外のコードは同じです。 + +04:52.750 --> 04:55.390 +では、 データストアを可視化します。 + +04:55.390 --> 05:00.250 +そして、 これがバイスで表現されたベクトルである。 + +05:00.250 --> 05:04.660 +そしてもちろん、 ご想像の通り、 同じベクトル化のアプローチだ。 + +05:04.660 --> 05:06.430 +だから、 かなり似ている。 + +05:06.430 --> 05:11.020 +ベクター・データストアとして別の基礎技術を使っているだけだ。 + +05:11.260 --> 05:13.960 +もちろん、 それを3Dで表現することもできる。 + +05:13.990 --> 05:24.370 +そして今、 バイスで3D表現を見ているところだが、 今回はとてもきれいに分離されている。 + +05:24.400 --> 05:25.060 +これでよし。 + +05:25.090 --> 05:26.560 +とにかく、 一日中見ていても飽きないよ。 + +05:27.160 --> 05:28.510 +だから、 そういう感覚があるんだ。 + +05:28.510 --> 05:31.930 +そして、 それをまとめるコードも同じだ。 + +05:31.930 --> 05:33.310 +これはまったく変えていない。 + +05:33.340 --> 05:38.020 +レトリバーとして保存されたベクターは、 クロマで呼び出すこともできるし、 フェースで呼び出すこともできる。 + +05:38.020 --> 05:39.190 +それは同じだ。 + +05:39.190 --> 05:39.940 +そうだ。 + +05:39.970 --> 05:40.750 +我々はそれを実行する。 + +05:40.780 --> 05:41.740 +エラーはない。 + +05:41.740 --> 05:42.370 +大丈夫だよ。 + +05:42.370 --> 05:45.550 +さっそくグラディオの話をしよう。 + +05:45.820 --> 05:49.180 +ええと、 それで......。 + +05:49.210 --> 05:51.820 +これがグラディオのインターフェイスだ。 + +05:52.150 --> 05:58.690 +ええと、 前回お見せしようと思っていたことの1つで、 + +05:58.690 --> 06:07.060 +今この機会にお見せしようと思っています。 + +06:07.060 --> 06:11.710 +ランカスターとは言っていないし、 + +06:11.710 --> 06:17.500 +スペルも小文字のaを使った。 + +06:17.500 --> 06:21.370 +エイブリーの名前のスペルも間違えるしね。 + +06:21.400 --> 06:24.400 +エイブリーは以前何をしていたのですか? + +06:25.870 --> 06:27.670 +ええと、 ええと。 + +06:27.670 --> 06:32.920 +そして、 このコードをそのまま実行して、 何が戻ってきたかを見ることができる。 + +06:33.130 --> 06:45.580 +私が話しているのはエイブリー・ランカスターのことだと正しく認識され、 彼女の人事文書を調べ、 彼女がイノベート・インシュアランス・ソリューションズで働いていたこともまた正しく認識された。 + +06:45.670 --> 06:48.760 +ええと、 中に入って手早く確認したほうがいいかもしれない。 + +06:48.760 --> 06:51.700 +設定されているので、 従業員の書類に移動する。 + +06:51.700 --> 06:53.500 +エイブリー・ランカスターを見つけた。 + +06:53.500 --> 06:54.580 +これだ。 + +06:54.580 --> 06:56.560 +これが彼女の人事記録だ。 + +06:56.560 --> 06:57.700 +彼女が何をしたか見てみよう。 + +06:57.700 --> 07:03.310 +保険会社エルムを設立する前は、 イノベイト・インシュアランス・ソリューションズに在籍していた。 + +07:03.550 --> 07:06.010 +ああ、 それを発明しなかったのはいいことだ。 + +07:06.340 --> 07:17.530 +ええと、 要するに、 テキストマッチングだけではなかったということが実感できる。 + +07:17.530 --> 07:23.230 +小文字と大文字を間違えても騒がなかった。 + +07:23.230 --> 07:31.870 +エイブリーの綴りが間違っていても、 エイブリー・ランカスターのYの綴りと同じ意味であることがわかったのだ。 + +07:31.870 --> 07:39.220 +それが、 私たちが目指していたものである可能性が非常に高いことを認識するのに十分な常識を備えている。 + +07:39.220 --> 07:44.890 +そしてまた、 これをベクトルにしてベクトルに入れ、 ベクトル・データ・ストアでそれに近いベクトルを探したところ、 + +07:44.890 --> 07:56.050 +エブリー・ランカスターのHR記録がデータ・ストアでそれに近いものとして見つかったからだ。 + +07:56.050 --> 08:00.430 +だから、 スペルを間違えてもうまくいくというのは魅力的なことだと思う。 + +08:00.430 --> 08:12.130 +最初のセッションで使ったブルートフォース・テクニックよりも、 ベクター・ルックアップ・アプローチを使ったほうがはるかに優れていることがよくわかる。 + +08:12.190 --> 08:17.890 +だから、 クロマを使っても同じテストがうまくいくことを自分で証明してもらおう。 もちろん、 + +08:17.890 --> 08:19.720 +両方試してみればわかる。 + +08:19.720 --> 08:23.350 +クロマもバイスも、 実にうまく機能していることを願っている。 + +08:23.440 --> 08:26.020 +しかし、 最も重要なのは、 それを見たことだ。 + +08:26.050 --> 08:37.510 +先に説明したとおり、 ラングでは、 裏で異なるベクター・データ・ストアを切り替えて、 同じ配管をラグのワークフローに使うことができる。 + +08:38.080 --> 08:39.760 +よし、 スライドに戻ろう。 diff --git a/week5/community-contributions/subtitles/srts/59297723/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297723/ko_KR.srt new file mode 100755 index 0000000..199291e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297723/ko_KR.srt @@ -0,0 +1,421 @@ +WEBVTT + +00:00.800 --> 00:02.300 +무슨 생각 하시는지 알아요 + +00:02.300 --> 00:03.800 +무슨 일인가 싶죠 + +00:03.830 --> 00:05.000 +오늘이 5일째예요 + +00:05.030 --> 00:06.740 +5주 차 중 5일째예요 + +00:06.770 --> 00:08.840 +왜 주피터 공책을 펼쳐놨죠? + +00:08.840 --> 00:10.730 +4일째라고 하죠 5분 + +00:10.730 --> 00:18.020 +넷째 날에 잠깐 편차가 생겼기 때문이죠 벡터 데이터 저장소를 다른 것으로 바꾸는 게 + +00:18.020 --> 00:21.110 +얼마나 간단한지 보여드리려고요 + +00:21.110 --> 00:27.020 +이건 아주 흔하고 많이 사용되니까 여러분께 보여드리기에 좋을 것 같았어요 다른 벡터 데이터 + +00:27.020 --> 00:32.930 +저장소를 표시하는 데 둘 다 사용하고 이런 추상화로 작업하는 게 얼마나 쉬운지도 보여드리기 + +00:32.960 --> 00:34.310 +위해서요 + +00:34.340 --> 00:39.140 +설명해드릴 게 있는데 이건 벡터 데이터 저장소가 아니에요 + +00:39.140 --> 00:47.810 +Facebook 인공지능 동시성 검색을 뜻합니다 페이스북이 만든 자주 사용되는 아주 일반적인 오픈 + +00:47.840 --> 00:54.860 +소스 라이브러리예요 서로 가까운 벡터에 대한 빠른 검색을 가능하게 하죠 그래서 일종의 + +00:54.890 --> 00:58.370 +인 메모리 벡터 스토어로 사용돼요 + +00:58.370 --> 01:04.160 +디스크에 남아 있진 않지만 벡터를 불러오고 유사한 다른 벡터를 검색하기 위해 + +01:04.160 --> 01:09.230 +사용할 수 있어요 라이브러리로도 사용할 수 있죠 유사한 검색을 하기 + +01:09.230 --> 01:16.520 +위해서요 다른 벡터에 가까운 벡터를 찾기 위해서요 랭샨은 그에 대해 올바른 추상화가 내장되어 있어요 + +01:16.520 --> 01:19.220 +그래서 크로마를 대체할 수 있죠 + +01:19.220 --> 01:22.310 +채도를 사용했는데 디스크에 영구적으로 남아 있었죠 + +01:22.310 --> 01:26.090 +좀 더 심각한 데이터 스토어 악습이죠 + +01:26.510 --> 01:29.360 +채도 대신 쓰면 돼요 + +01:29.360 --> 01:34.070 +얼마나 쉬운지 보여드릴게요 코드는 거의 동일할 테니까요 해당 추상화와 + +01:34.070 --> 01:37.610 +몇 가지 다른 걸 제외하면요 get get it + +01:38.240 --> 01:42.110 +이 import에서 아까처럼 import를 실행하고요. + +01:42.110 --> 01:45.980 +크로마에 불러오기를 주석으로 달아 놓은 게 보이시죠 커닝이 안 돼요 + +01:45.980 --> 01:52.160 +크로마는 여기 포함되지 않습니다 대신 페이스 페이스북 인공지능 유사성 검색을 불러왔죠 + +01:52.190 --> 01:58.760 +두 가지 변형이 있다는 걸 알아두세요 CPU 변형과 GPU 변형이죠 Pip가 설치된 + +01:58.760 --> 01:59.930 +것에 따라서요 + +01:59.930 --> 02:04.490 +이 환경에선 Pip가 CPU 변종을 설치했지만 GPU에서 + +02:04.490 --> 02:11.750 +실행되는 고성능 프로젝트를 위해서는 엄청난 속도로 실행할 수 있습니다 바이스를 불러왔는데 에러가 + +02:11.750 --> 02:13.880 +없어요, 훌륭하죠 + +02:14.150 --> 02:15.650 +몇 가지 준비해 주세요 + +02:15.650 --> 02:17.300 +모두 같은 코드예요 + +02:17.300 --> 02:19.220 +전처럼 전부 실어요 + +02:19.220 --> 02:25.970 +get 123개만 있으면 렌 덩어리로 바꿀 수 있어요 + +02:25.970 --> 02:28.310 +자, 123개가 나왔네요 + +02:28.340 --> 02:29.570 +네, 맞아요 + +02:29.600 --> 02:31.700 +메타데이터가 맞는지 확인하죠 + +02:31.700 --> 02:33.590 +네 가지 종류가 있어요 + +02:33.590 --> 02:35.840 +지금까지는 달라진 게 없어요 + +02:35.840 --> 02:36.980 +다 똑같아요 + +02:36.980 --> 02:38.000 +하지만 이걸 보세요 + +02:38.000 --> 02:40.550 +한 줄이 교묘하게 바뀌었어요 + +02:40.580 --> 02:44.060 +그렇게 주의를 끌 필요는 없었어요 + +02:44.060 --> 02:45.860 +이미 눈치챈 것 같은데요 + +02:45.860 --> 02:49.850 +벡터 스토어는 크로마.문서로부터의 크로마.이죠 + +02:49.880 --> 02:53.720 +덩크 삽입과 지속 디렉터리도 통과시켰죠 + +02:53.720 --> 02:58.580 +이제 같은 구성체에서 bice.를 입력해요 + +02:58.610 --> 03:00.380 +덩어리도 나눠줘요 + +03:00.380 --> 03:01.970 +삽입구도 통과시키죠 + +03:01.970 --> 03:06.320 +디렉터리에는 전달되지 않습니다 바이스가 디스크에 남지 않기 때문이죠 + +03:06.620 --> 03:09.980 +여기 이 두 선도 달라요 + +03:09.980 --> 03:14.750 +데이터 저장소에 질문을 하는 낮은 레벨의 방법은 달라요 + +03:14.750 --> 03:20.300 +Feist를 사용할 경우 이게 있으면 유용합니다 벡터의 수나 차원값 + +03:20.300 --> 03:23.600 +같은 것에 대해 쿼리를 할 수 있으니까요 + +03:23.630 --> 03:27.530 +채도로 할 때 몇 개의 차원이 있었는지 기억하실지 모르겠네요 + +03:27.650 --> 03:32.630 +하지만 여기 나오는 차원의 개수는 같아요 + +03:32.630 --> 03:36.590 +같은 오픈AI 엠딩을 벡터라이즈로 사용하고 있어요 + +03:36.590 --> 03:40.550 +벡터를 생성하기 위해 동일한 LLM을 사용하고 있죠 + +03:40.550 --> 03:45.620 +바뀐 건 벡터들을 저장하는 방법이에요 크로마가 아니라 Feis에요 + +03:46.130 --> 03:50.540 +123마리의 벡터가 한 덩어리당 한 마리씩 있는 건 당연해요 + +03:50.540 --> 03:54.470 +오픈라이에서 온 차원들이 있어요 + +03:55.190 --> 04:01.370 +또 바뀐 건 작업 전 섹션이 4일째에 달라졌다는 거예요 + +04:01.400 --> 04:04.010 +4일째에 다시 알려드릴게요 사전 작업이죠 + +04:04.010 --> 04:05.660 +가서 한번 보죠 + +04:06.080 --> 04:07.850 +네, 네 + +04:08.450 --> 04:14.270 +pre-work는 더 간단해 보입니다. 크로마에서 벡터, 문서, doc 타입을 + +04:14.300 --> 04:15.890 +가져오는 것이죠. + +04:15.890 --> 04:19.970 +이 데이터를 get을 위해 채도 쿼리를 하는 방법도 볼 수 있죠 + +04:20.170 --> 04:22.810 +풍기단속반에서 일할 때도 이렇게 했어요 + +04:22.840 --> 04:25.840 +더 빡빡한 방법이 있을 수도 있지만 이 정도면 간단해 보이네요 + +04:25.840 --> 04:31.810 +문서, 문서 타입, 색깔 등 같은 벡터를 수집하고 예상되는 색상에 매핑합니다. + +04:31.990 --> 04:38.410 +다이어그램과 같은 방식으로 벡터를 시각화할 수 있습니다. 2D에서 했던 + +04:38.410 --> 04:40.480 +것처럼요. + +04:40.600 --> 04:44.290 +이 코드는 한 가지만 빼고 동일해요 + +04:44.320 --> 04:45.670 +찾을 수 있나 보세요 + +04:45.670 --> 04:47.290 +그게 유일한 변화죠 + +04:47.290 --> 04:52.570 +제목을 크로마에서바이스로 바꿨습니다 하지만 그 외 코드는 같아요 + +04:52.750 --> 04:55.390 +데이터 저장소를 시각화할 거예요 + +04:55.390 --> 05:00.250 +여기 벡터가 있습니다 악행에서 묘사되는 모습이죠 + +05:00.250 --> 05:04.660 +물론 예상하셨겠지만 벡터화 접근법은 같아요 + +05:04.660 --> 05:06.430 +그래서 비슷해 보이죠 + +05:06.430 --> 05:11.020 +벡터 데이터 저장소로 다른 기본 기술을 사용할 뿐이죠 + +05:11.260 --> 05:13.960 +물론 3D로 표현할 수 있죠 + +05:13.990 --> 05:21.190 +지금 바이스에서 3D 표현을 보고 있는데 아주 보기 좋네요 이번엔 + +05:21.190 --> 05:24.370 +아주 잘 분리됐어요 + +05:24.400 --> 05:25.060 +됐어요 + +05:25.090 --> 05:26.560 +온종일 볼 수도 있겠어요 + +05:27.160 --> 05:28.510 +Get it, get it, get it. 그런 느낌이죠 + +05:28.510 --> 05:31.930 +그걸 한데 모으는 코드는 동일하고요 + +05:31.930 --> 05:33.310 +하나도 안 바꿨어요 + +05:33.340 --> 05:38.020 +레트리버로 저장된 벡터는 채도로 호출되거나 얼굴로 호출될 수 있어요 + +05:38.020 --> 05:39.190 +그게 그거죠 + +05:39.190 --> 05:39.940 +자, 됐어요 + +05:39.970 --> 05:40.750 +우리가 운영해요 + +05:40.780 --> 05:41.740 +오류는 없어요 + +05:41.740 --> 05:42.370 +괜찮아요 + +05:42.370 --> 05:45.550 +바로 그라디오 얘기를 하죠 + +05:45.820 --> 05:49.180 +자, 시작할게요 + +05:49.210 --> 05:51.820 +이게 그라디오 인터페이스예요 + +05:52.150 --> 05:58.690 +지난번에 보여드리려고 했던 것 중 하나가 채도 검사에서도 같은 + +05:58.690 --> 06:04.600 +효과를 볼 수 있어요 질문을 받을 수 있는데 에이버리가 전에 + +06:04.600 --> 06:07.060 +뭘 했는지 기억하시죠 + +06:07.060 --> 06:11.710 +올바른 컨텍스트를 검색할 만큼 교활합니다 랭커스터라고 + +06:11.710 --> 06:17.500 +하지 않고 소문자 a로 썼는데도요 더 나아가서 이렇게 말할 수도 있죠 + +06:17.500 --> 06:21.370 +에이버리 이름도 그렇게 틀리게 쓸 수 있어요 + +06:21.400 --> 06:24.400 +에이버리는 뭘 했죠? + +06:25.870 --> 06:27.670 +네 + +06:27.670 --> 06:32.920 +이 코드를 그대로 실행해 뭐가 나왔는지 보죠 + +06:33.130 --> 06:40.390 +에이버리 랭커스터에 대해 정확히 식별했습니다 그녀의 인사 기록을 찾아봤고 이노베이트 + +06:40.390 --> 06:45.580 +보험 솔루션에서 일했다는 걸 다시 정확히 식별했어요 + +06:45.670 --> 06:48.760 +들어가서 빨리 확인해 보죠 + +06:48.760 --> 06:51.700 +이미 정해졌으니까 직원들 서류로 가야죠 + +06:51.700 --> 06:53.500 +에이버리 랭커스터를 찾았어요 + +06:53.500 --> 06:54.580 +여기 있네요 + +06:54.580 --> 06:56.560 +인사 기록이에요 + +06:56.560 --> 06:57.700 +어떻게 했는지 보죠 + +06:57.700 --> 07:03.310 +이노베이트 보험 솔루션에서 일하긴 했죠 인큐어 엘름 창립 전에요 + +07:03.550 --> 07:06.010 +이게 발명품이 아니라 다행이네요 + +07:06.340 --> 07:16.270 +그러니까 요점은 이겁니다 이걸 보면 텍스트 매칭만 한 게 아니란 걸 알 수 + +07:16.270 --> 07:17.530 +있어요 + +07:17.530 --> 07:23.230 +심지어 잘못된 케이스로 비난받지도 않았어요 소문자나 대문자로요 get it get it + +07:23.230 --> 07:30.160 +철자가 틀렸다는 걸 밝혀냈어요 Y로 시작하는 랭커스터와 같은 + +07:30.190 --> 07:31.870 +의미라는 걸요 + +07:31.870 --> 07:39.220 +상식적으로 충분히 우리가 추구하는 게 그거라는 걸 알 수 있죠 + +07:39.220 --> 07:44.890 +왜냐하면 이것을 벡터로 바꿀 때 벡터로 놓았기 때문입니다. 벡터 데이터 + +07:44.890 --> 07:51.640 +스토어에서 그것과 가까운 벡터를 보면 에이버리 랭커스터의 HR 기록이 데이터 스토어에서 + +07:51.820 --> 07:56.050 +그것과 비슷한 것을 발견하게 되죠. + +07:56.050 --> 08:00.430 +철자를 틀려도 잘 된다는 게 정말 놀라워요 + +08:00.430 --> 08:06.580 +벡터 찾기 접근법이 첫 번째 시간에 사용한 브루트 포스 기술보다 얼마나 나은지 + +08:06.580 --> 08:11.080 +보여주는 명백한 예입니다 이 테스트에서는 가망이 전혀 + +08:11.080 --> 08:12.130 +없었겠죠 + +08:12.190 --> 08:17.890 +채도를 사용하면 같은 결과가 나온다는 걸 증명하게 해 드릴게요 둘 다 해 볼 + +08:17.890 --> 08:19.720 +수 있지만 두고 보세요 + +08:19.720 --> 08:23.350 +채도와 바이스가 잘 맞았으면 좋겠네요 + +08:23.440 --> 08:26.020 +하지만 가장 중요한 건 그걸 봤다는 거예요 + +08:26.050 --> 08:33.220 +전에 설명드린 대로 랭은 아주 간단하게 뒤에서 다른 벡터 데이터 저장소를 변경하게 합니다 + +08:33.220 --> 08:37.510 +래그 워크플로우에도 같은 배관을 사용하고요 + +08:38.080 --> 08:39.760 +슬라이드로 돌아가죠 diff --git a/week5/community-contributions/subtitles/srts/59297733/en_US.srt b/week5/community-contributions/subtitles/srts/59297733/en_US.srt new file mode 100755 index 0000000..5afddde --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297733/en_US.srt @@ -0,0 +1,460 @@ +WEBVTT + +00:00.440 --> 00:05.900 +Welcome back to JupyterLab and welcome to your first experiment with the world of Long Chain. + +00:05.900 --> 00:10.970 +Let me just remind you one more time how important it is that you are going through these Jupyter notebooks, + +00:10.970 --> 00:11.840 +as I do. + +00:11.840 --> 00:16.490 +Ideally, at the same time as I'm talking to you, you also are able to have Jupyter Lab up and you + +00:16.490 --> 00:23.390 +have it turned to week five and to this day to day two, and you're able to step through this as I step + +00:23.390 --> 00:23.870 +through it. + +00:23.900 --> 00:29.360 +If that's not possible, then immediately afterwards as soon as you can, then then give this a shot. + +00:29.390 --> 00:33.860 +It's so important that you experience this for yourself, particularly as we start talking about concepts + +00:33.860 --> 00:36.830 +like text chunks and then later vectors. + +00:36.860 --> 00:41.480 +It's going to be super important that you validate what I'm saying, that you experiment with it, that + +00:41.480 --> 00:46.790 +you see it for yourself, and try out the code and print out various things to get very comfortable + +00:46.790 --> 00:49.010 +with the way it's working behind the scenes. + +00:49.010 --> 00:54.080 +So we are, of course, in week five, in the folder we're on day two, and we're looking now at what + +00:54.080 --> 00:58.280 +is a bit of a copy of the previous day, but with more to come. + +00:58.760 --> 01:01.950 +We do some imports and now we've got some new imports. + +01:01.950 --> 01:07.020 +Our first time importing some code from Lang Chain, we're going to import some things called document + +01:07.020 --> 01:11.820 +loaders, which are utility classes which help us load in files. + +01:11.850 --> 01:17.460 +There's one called directory loader that loads in an entire folder and text loader for loading in an + +01:17.460 --> 01:19.320 +individual text file. + +01:19.440 --> 01:24.720 +I'm also importing something called character text splitter, which is something that's able to take + +01:24.720 --> 01:31.890 +in, uh, and divide a document into chunks of characters, as you will see. + +01:31.920 --> 01:34.170 +Let's run those imports. + +01:34.530 --> 01:39.060 +Uh, I set some constants that we're not actually going to use today, but we will do next time. + +01:39.390 --> 01:42.870 +Uh, and let's get to business. + +01:42.870 --> 01:51.360 +So you remember last time we did a hokey thing where we read in documents and put them in a dictionary? + +01:51.360 --> 01:52.770 +The key was the name of the document. + +01:52.770 --> 01:54.870 +The value was the contents of the document. + +01:54.870 --> 01:57.360 +Well, this time we're going to do something a bit smarter. + +01:57.360 --> 01:59.370 +Using lang chains help. + +01:59.370 --> 02:03.670 +So we first get a list of the different folders in our knowledge base. + +02:03.700 --> 02:08.680 +You'll remember those folders are these company contracts employees and products. + +02:08.680 --> 02:10.990 +So we put that into folders. + +02:11.500 --> 02:20.110 +And now for each of those folders we are going to first get the type of document company contracts employees + +02:20.110 --> 02:21.100 +or products. + +02:21.370 --> 02:31.300 +And we are then going to load in this directory using the directory loader where you pass in the the + +02:31.300 --> 02:35.050 +the handle to it, the directory path. + +02:35.050 --> 02:40.210 +And we also pass in something called the text loader loader class, which tells it that we should use + +02:40.210 --> 02:45.220 +that to bring in each of these files because they are text files, and it's as simple as that. + +02:45.220 --> 02:49.420 +We then just call loader dot load and it will bring in all of those documents. + +02:49.510 --> 02:52.600 +Going to iterate through each of those documents. + +02:52.630 --> 02:56.920 +I'm going to say can we set the metadata on each document. + +02:57.460 --> 03:03.390 +We want to add in something called doc type and set that to be the Doctype, whether it's company contracts, + +03:03.390 --> 03:04.740 +employees or products. + +03:04.740 --> 03:08.550 +And then add that to a list called documents. + +03:09.030 --> 03:10.680 +Hope that made sense. + +03:10.920 --> 03:16.590 +I'm going to run it and just show you that as a result of doing that, we now have 31 objects of type + +03:16.590 --> 03:21.240 +document and I can show you what one of them looks like. + +03:22.740 --> 03:23.940 +Here it is. + +03:23.970 --> 03:27.150 +It is in the knowledge base directory. + +03:27.300 --> 03:30.210 +The metadata has something called source which tells you where it is. + +03:30.240 --> 03:33.360 +And then this is doctype is what we added in products. + +03:33.930 --> 03:34.860 +So it's a product. + +03:34.860 --> 03:39.990 +It's called realm MD and that is the full contents of the file. + +03:40.020 --> 03:46.020 +Let's go and satisfy ourselves that if we go into products there is indeed something called realm.md. + +03:46.710 --> 03:52.260 +And if I double click on that, we'll see presumably that it is the same thing that got loaded in. + +03:52.290 --> 03:56.010 +We can also look at the first document here. + +03:56.130 --> 03:58.410 +And it's also in products. + +03:58.410 --> 04:00.370 +And it's called Mark calm. + +04:00.730 --> 04:03.580 +And there you can see where that comes from. + +04:03.580 --> 04:06.730 +And let's just pick some random number 24. + +04:07.360 --> 04:13.870 +The 24th one of our documents is an employee HR record for Maxine Thompson. + +04:13.870 --> 04:15.580 +And there is Maxine. + +04:15.580 --> 04:19.000 +She's a doc type employee and there is her contents. + +04:19.000 --> 04:22.570 +So nothing very complicated. + +04:22.720 --> 04:29.890 +We've loaded in the documents and we've given them a doc type, and they are 31 of them sitting in documents. + +04:30.250 --> 04:37.180 +Next thing we're going to do is use this text splitter thing, which is going to take the the documents, + +04:37.180 --> 04:41.080 +and it's going to divide each document into chunks of characters. + +04:41.080 --> 04:44.320 +And you specify two things to lang chain when you do this. + +04:44.350 --> 04:50.860 +One is the chunk size, which is roughly how many characters do you want to fit into each chunk? + +04:51.220 --> 04:56.410 +And I say roughly, because we're going to give lang chains some discretion to make sure that it that + +04:56.410 --> 05:03.500 +it tries to split these chunks in sensible boundaries where there's a space and an empty line or something, + +05:03.500 --> 05:06.620 +or a section between different parts of the document. + +05:06.620 --> 05:10.460 +So it's not cutting in the middle of a paragraph or in the middle of a word or something. + +05:10.460 --> 05:11.780 +That would make no sense. + +05:11.930 --> 05:17.870 +And that would result potentially in poor contacts that we'd end up providing to the LM. + +05:18.740 --> 05:26.990 +Chunk overlap says that we don't want these chunks of characters to be, uh, to, to be completely + +05:26.990 --> 05:28.160 +separate from each other. + +05:28.160 --> 05:30.710 +We want to have some level of overlap between them. + +05:30.710 --> 05:35.240 +So there's some content of the document that's in common across two chunks. + +05:35.390 --> 05:42.290 +Uh, again, so that it's more likely if you put in a query that will will pluck out a bunch of chunks + +05:42.290 --> 05:44.090 +that will be relevant to that query. + +05:44.120 --> 05:49.550 +We don't want to to risk because of some association, because of some critical word. + +05:49.550 --> 05:55.340 +And it only gets included in one chunk that we don't include another chunk that's really close to it. + +05:55.340 --> 05:56.630 +That's equally important. + +05:56.630 --> 06:02.790 +So the chunk overlap gives us this way of having potentially multiple chunks that contain some of the + +06:02.790 --> 06:04.380 +same keywords. + +06:05.220 --> 06:07.980 +So that is a text splitter. + +06:07.980 --> 06:11.100 +And we just say split documents and we pass in the documents. + +06:11.100 --> 06:14.730 +And if I run that it's going to to run. + +06:14.730 --> 06:23.100 +And it warns me that one of the chunks that it created, uh, has a size of 1088, which is bigger than + +06:23.100 --> 06:24.150 +what we'd asked for. + +06:24.150 --> 06:28.710 +And again, that's because it's trying to be smart about how it respects boundaries. + +06:28.740 --> 06:32.730 +Um, and so it's this, this is the decision that it has made. + +06:32.730 --> 06:40.680 +So if we look at how many chunks we've ended up with, we've ended up with 123, uh, chunks, 123 chunks + +06:40.680 --> 06:42.270 +from our 31 documents. + +06:42.270 --> 06:47.190 +And what we can now do is we can pick a chunk, which we pick, pick chunk number five and have a look + +06:47.220 --> 06:48.240 +at that chunk. + +06:48.450 --> 06:53.070 +Uh, the chunk itself has metadata, just like a document had metadata. + +06:53.340 --> 06:55.890 +Um, it knows the source where it came from. + +06:56.100 --> 06:59.520 +And it has doc type, the doc type that we that we had set. + +06:59.650 --> 06:59.830 +up. + +06:59.830 --> 07:04.240 +So we know that this particular chunk has been plucked from the products. + +07:04.480 --> 07:09.490 +Um, and it's in fact the product summary about Markham. + +07:09.490 --> 07:15.400 +And you can see how it just starts with a new section and it ends at the end of that section. + +07:15.580 --> 07:18.100 +So it's been careful to respect boundaries. + +07:18.100 --> 07:21.460 +It's got a reasonable chunk that's about a thousand characters. + +07:21.610 --> 07:25.630 +Um, and there'll be some overlap perhaps with the chunk right before it. + +07:25.960 --> 07:28.090 +Uh, see if it chose to. + +07:29.470 --> 07:32.530 +Not in this case, the chunk before it is a very small chunk. + +07:32.710 --> 07:39.820 +Uh, so anyway, you can you can play around and see if you can find examples of where it's possible + +07:39.820 --> 07:46.510 +options where, uh, there are overlaps between chunks. + +07:46.660 --> 07:51.910 +Um, and so have a, have that as a quick to do go in and experiment with some of these chunks. + +07:51.910 --> 07:57.100 +Find out if you can get an example of where two chunks contain the same information. + +07:58.330 --> 08:05.570 +So what we're going to do now is we're going to just inspect the Doctype metadata across these chunks + +08:05.570 --> 08:09.260 +and just convince ourselves that we have all the right doc types. + +08:09.260 --> 08:13.490 +So let's just see what what we have across all of our chunks. + +08:13.490 --> 08:18.230 +We have four doc types employees, contracts, company and products. + +08:18.230 --> 08:24.830 +Which is good because that of course, uh, exactly matches the four directories that we read in. + +08:24.830 --> 08:26.900 +So that all sounds good. + +08:27.440 --> 08:33.740 +Uh, let's now let me just, uh, let's, let's have a look to see if we were to look through each of + +08:33.740 --> 08:35.150 +these different chunks. + +08:35.150 --> 08:38.540 +Which chunks have the word Lancaster in it? + +08:38.570 --> 08:39.590 +Lancaster. + +08:39.620 --> 08:45.530 +Hopefully it's familiar to you, because that is the fictitious name of our fictitious CEO of our fictitious + +08:45.530 --> 08:46.070 +company. + +08:46.070 --> 08:49.250 +So Lancaster is her last name. + +08:49.250 --> 08:52.580 +And let's see which chunks have her last name in it. + +08:52.580 --> 08:53.600 +So here we go. + +08:53.840 --> 09:01.630 +Um, so the about insurance, uh, document Has her name in it, of course, as she was the founder + +09:01.630 --> 09:03.100 +of the of the company. + +09:03.490 --> 09:10.210 +Um, and then her HR record has Lancaster in it and the bottom of our HR record. + +09:10.210 --> 09:16.660 +Also, there's some other HR notes that mentions her, uh, by last name as well. + +09:17.110 --> 09:24.070 +So, uh, you probably realize this, but from the cheap and cheerful version of rag that we did last + +09:24.070 --> 09:28.930 +time, one of the big problems is that it just looks for the word Lancaster. + +09:28.930 --> 09:35.380 +So if there's any information about the CEO that isn't reflected in the worst Lancaster, then that + +09:35.380 --> 09:39.100 +would be missed in our in our toy version from last time. + +09:39.100 --> 09:45.340 +And you can see that if we do a search for Avery, her first name, you'll see that we get way more + +09:45.340 --> 09:49.360 +chunks because she's mentioned by first name all over the place. + +09:49.630 --> 09:51.760 +Um, and so, uh, yeah. + +09:51.790 --> 09:58.120 +And presumably also if we do CEO, we find that there's a bunch of chunks with with CEO in them as well + +09:58.240 --> 10:06.140 +that would potentially be missed if we were just, uh, looking at purely the, uh, those with the + +10:06.140 --> 10:07.460 +word Lancaster. + +10:07.520 --> 10:13.850 +So it gives you a sense that doing some kind of text based search through the documents is not a great + +10:13.850 --> 10:18.410 +way of doing it, and will miss important bits of context that we need to find. + +10:18.440 --> 10:25.250 +So what we're looking for in our vector search is something that can be smarter about the way it finds + +10:25.250 --> 10:31.850 +chunks that are relevant using not just text, but using some understanding of the meaning behind what + +10:31.850 --> 10:32.690 +you're looking for. + +10:32.720 --> 10:35.870 +And that, of course, is the big idea behind Rag. + +10:35.900 --> 10:43.280 +So at this point, hopefully you are proficient with reading in documents using the text loader and + +10:43.280 --> 10:45.710 +directory loader and dividing them into chunks. + +10:45.710 --> 10:50.780 +And if you do this exercise, you'll come in and play with the different chunks and convince yourself + +10:50.780 --> 10:54.440 +that they're being split in sensible places and that there's some overlap. + +10:54.440 --> 10:59.240 +And then we'll be ready to vectorize and put in vector databases. diff --git a/week5/community-contributions/subtitles/srts/59297733/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297733/ja_JP.srt new file mode 100755 index 0000000..e3bed70 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297733/ja_JP.srt @@ -0,0 +1,385 @@ +WEBVTT + +00:00.440 --> 00:05.900 +JupyterLabへようこそ、 そしてLong Chainの世界での最初の実験へようこそ。 + +00:05.900 --> 00:11.840 +私のようにJupyterノートブックを使いこなすことがいかに重要か、 もう一度思い出してほしい。 + +00:11.840 --> 00:16.490 +理想的なのは、 私が話しているのと同時に、 あなたもJupyter Labを立ち上げ、 + +00:16.490 --> 00:23.870 +5週目、 そして2日目のこの日に向けて、 私がステップを踏んでいるように、 あなたもステップを踏むことができることです。 + +00:23.900 --> 00:29.360 +もしそれが無理なら、 できるだけ早くその直後に、 それからこれを試してみてほしい。 + +00:29.390 --> 00:33.860 +特に、 テキスト・チャンクや後のベクターといった概念について話し始めると、 + +00:33.860 --> 00:36.830 +これを自分で体験することはとても重要だ。 + +00:36.860 --> 00:41.480 +私の言っていることを検証し、 実験し、 自分の目で確かめ、 コードを試したり、 + +00:41.480 --> 00:46.790 +いろいろなものをプリントアウトしたりして、 舞台裏で動いていることに慣れることが、 + +00:46.790 --> 00:49.010 +これからとても重要になる。 + +00:49.010 --> 00:58.280 +もちろん、 私たちは5週目に入り、 フォルダーの中では2日目に入っている。 + +00:58.760 --> 01:01.950 +輸入もいくつかやっているし、 今は新しい輸入もある。 + +01:01.950 --> 01:07.020 +ラングチェーンからコードをインポートするのは初めてなので、 ドキュメント・ローダーと呼ばれる、 + +01:07.020 --> 01:11.820 +ファイルを読み込むためのユーティリティ・クラスをインポートします。 + +01:11.850 --> 01:19.320 +フォルダ全体を読み込むディレクトリローダーと、 個々のテキストファイルを読み込むテキストローダーと呼ばれるものがある。 + +01:19.440 --> 01:24.720 +文字テキスト分割ツールというものもインポートしています。 + +01:24.720 --> 01:31.890 +これは、 ご覧のように、 ドキュメントを文字の塊に分割するものです。 + +01:31.920 --> 01:34.170 +インポートを実行しよう。 + +01:34.530 --> 01:39.060 +ええと、 今日は実際に使わないけど、 次回使う定数をいくつか設定した。 + +01:39.390 --> 01:42.870 +ええと、 仕事に取り掛かろう。 + +01:42.870 --> 01:51.360 +前回、 文書を読んで辞書に載せるという馬鹿げたことをやったのを覚えているかい? + +01:51.360 --> 01:52.770 +鍵は文書の名前だった。 + +01:52.770 --> 01:54.870 +その値はドキュメントの内容だった。 + +01:54.870 --> 01:57.360 +さて、 今回はもう少しスマートなことをするつもりだ。 + +01:57.360 --> 01:59.370 +ラングチェーンを使うのもいい。 + +01:59.370 --> 02:03.670 +そこでまず、 ナレッジ・ベース内のさまざまなフォルダーのリストを取得する。 + +02:03.700 --> 02:08.680 +これらのフォルダは、 これらの会社の契約社員や製品であることを覚えているだろう。 + +02:08.680 --> 02:10.990 +だから、 それをフォルダ分けしたんだ。 + +02:11.500 --> 02:21.100 +そして今、 それぞれのフォルダーについて、 まず文書会社の契約社員や製品の種類を取得する。 + +02:21.370 --> 02:35.050 +そして、 ディレクトリ・ローダーを使ってこのディレクトリをロードし、 ハンドルとディレクトリ・パスを渡す。 + +02:35.050 --> 02:40.210 +さらに、 テキスト・ローダー・ローダー・クラスというものを渡して、 テキスト・ファイルなので、 + +02:40.210 --> 02:45.220 +それを使ってそれぞれのファイルを取り込むように指示する。 + +02:45.220 --> 02:49.420 +そして、 loader dot loadを呼び出すだけで、 それらのドキュメントをすべて取り込んでくれる。 + +02:49.510 --> 02:52.600 +それぞれのドキュメントを繰り返し見ていく。 + +02:52.630 --> 02:56.920 +各ドキュメントにメタデータを設定することができるかと言います。 + +02:57.460 --> 03:04.740 +docタイプと呼ばれるものを追加し、 会社の契約書、 従業員、 製品などのDoctypeに設定したい。 + +03:04.740 --> 03:08.550 +そして、 それをドキュメントと呼ばれるリストに追加する。 + +03:09.030 --> 03:10.680 +ご理解いただけただろうか。 + +03:10.920 --> 03:16.590 +実行した結果、 ドキュメント・タイプのオブジェクトが31個になったので、 + +03:16.590 --> 03:21.240 +そのうちのひとつをお見せしよう。 + +03:22.740 --> 03:23.940 +これだ。 + +03:23.970 --> 03:27.150 +ナレッジベースのディレクトリにある。 + +03:27.300 --> 03:30.210 +メタデータにはソースと呼ばれるものがあり、 それがどこにあるかを教えてくれる。 + +03:30.240 --> 03:33.360 +そして、 これがdoctypeで、 製品に追加したものだ。 + +03:33.930 --> 03:34.860 +だから商品なんだ。 + +03:34.860 --> 03:39.990 +レルムMDと呼ばれるもので、 これがファイルの全内容だ。 + +03:40.020 --> 03:46.020 +製品に入れば、 確かに領域と呼ばれるものがあることを納得しに行こう。 md。 + +03:46.710 --> 03:52.260 +それをダブルクリックすると、 おそらく読み込まれたものと同じものであることがわかるだろう。 + +03:52.290 --> 03:56.010 +最初の文書はこちらでも見ることができる。 + +03:56.130 --> 03:58.410 +そして、 それは製品にも入っている。 + +03:58.410 --> 04:00.370 +マーク・カームと呼ばれている。 + +04:00.730 --> 04:03.580 +それがどこから来るのか、 おわかりいただけるだろう。 + +04:03.580 --> 04:06.730 +そして、 ランダムに24番を選んでみよう。 + +04:07.360 --> 04:13.870 +24番目の文書は、 マキシン・トンプソンの従業員人事記録である。 + +04:13.870 --> 04:15.580 +そしてマキシンがいる。 + +04:15.580 --> 04:19.000 +彼女はドクタータイプの社員で、 そこに彼女のコンテンツがある。 + +04:19.000 --> 04:22.570 +だから複雑なことは何もない。 + +04:22.720 --> 04:29.890 +ドキュメントを読み込み、 ドキュメント・タイプを指定し、 31のドキュメントがドキュメントに収まっている。 + +04:30.250 --> 04:41.080 +次にすることは、 このテキスト・スプリッターというものを使って、 ドキュメントを取り込み、 それぞれのドキュメントを文字の塊に分割することだ。 + +04:41.080 --> 04:44.320 +その際、 ラングチェーンには2つのことを指定する。 + +04:44.350 --> 04:50.860 +ひとつはチャンクサイズで、 各チャンクにだいたい何文字入れたいか。 + +04:51.220 --> 04:56.410 +というのも、 ラングチェーンにある程度の裁量を与えて、 スペースと空白行、 + +04:56.410 --> 05:06.620 +あるいはドキュメントの異なる部分の間にセクションがあるような、 賢明な境界でこれらのチャンクを分割するようにするつもりだからだ。 + +05:06.620 --> 05:10.460 +だから、 段落の途中や単語の途中などでカットすることはない。 + +05:10.460 --> 05:11.780 +それでは意味がない。 + +05:11.930 --> 05:17.870 +その結果、 LMに提供するコンタクトが悪くなる可能性がある。 + +05:18.740 --> 05:28.160 +チャンクオーバーラップとは、 それぞれのキャラクターが完全に分離していることを望まないということだ。 + +05:28.160 --> 05:30.710 +両者の間にある程度の重なりを持たせたい。 + +05:30.710 --> 05:35.240 +つまり、 2つのチャンクに共通するドキュメントの内容があるわけだ。 + +05:35.390 --> 05:44.090 +つまり、 クエリを入れると、 そのクエリに関連するチャンクの束が抽出される可能性が高くなる。 + +05:44.120 --> 05:49.550 +私たちは、 何かの連想や批判的な言葉のためにリスクを冒したくない。 + +05:49.550 --> 05:55.340 +そして、 あるチャンクにだけ含まれ、 そのチャンクに本当に近い別のチャンクは含まれない。 + +05:55.340 --> 05:56.630 +それも同様に重要だ。 + +05:56.630 --> 06:04.380 +チャンクのオーバーラップによって、 同じキーワードを含む複数のチャンクを持つことができる。 + +06:05.220 --> 06:07.980 +これがテキスト・スプリッターだ。 + +06:07.980 --> 06:11.100 +そして、 ドキュメントを分割し、 ドキュメントを渡す。 + +06:11.100 --> 06:14.730 +そして、 私がそれを実行すれば、 それは実行されることになる。 + +06:14.730 --> 06:24.150 +そして、 作成されたチャンクのひとつが、 サイズが1088であることを警告してきた。 + +06:24.150 --> 06:28.710 +そしてまた、 それは境界線をどのように尊重するかをスマートにしようとしているからだ。 + +06:28.740 --> 06:32.730 +それで、 このような決断を下したんだ。 + +06:32.730 --> 06:42.270 +それで、 最終的にいくつのチャンクができたかというと、 31のドキュメントから123のチャンクができた。 + +06:42.270 --> 06:48.240 +そして今できることは、 チャンクを選ぶこと、 つまり5番のチャンクを選んで、 そのチャンクを見てみることだ。 + +06:48.450 --> 06:53.070 +ドキュメントにメタデータがあるように、 チャンク自体にもメタデータがあるんだ。 + +06:53.340 --> 06:55.890 +ええと、 そのソースがどこから来たかを知っている。 + +06:56.100 --> 06:59.520 +そして、 docタイプ、 我々が設定したdocタイプを持っている。 + +06:59.650 --> 06:59.830 +アップ。 + +06:59.830 --> 07:04.240 +だから私たちは、 この特定の塊が製品から抜き取られたものだと知っている。 + +07:04.480 --> 07:09.490 +ええと、 実際、 マルカムについての商品概要なんだ。 + +07:09.490 --> 07:15.400 +新しいセクションで始まり、 そのセクションの終わりで終わっているのがわかるだろう。 + +07:15.580 --> 07:18.100 +だから、 境界線を尊重するように注意してきた。 + +07:18.100 --> 07:21.460 +1000文字程度の手頃な塊がある。 + +07:21.610 --> 07:25.630 +そして、 その直前のチャンクと重なる部分もあるだろう。 + +07:25.960 --> 07:28.090 +ええと、 そうすることを選んだかどうか見てみよう。 + +07:29.470 --> 07:32.530 +この場合、 その前のチャンクは非常に小さなチャンクだからだ。 + +07:32.710 --> 07:39.820 +とにかく、 チャンク間で重複がある場合、 それが可能なオプションの例を見つけられるかどうか、 + +07:39.820 --> 07:46.510 +試してみてください。 + +07:46.660 --> 07:51.910 +それで、 このチャンクのいくつかを使って実験してみるんだ。 + +07:51.910 --> 07:57.100 +つのチャンクに同じ情報が含まれている例を探してみてください。 + +07:58.330 --> 08:09.260 +だから、 これからすることは、 これらのチャンク全体のDoctypeメタデータを検査し、 すべての正しいdocタイプがあることを確信することだ。 + +08:09.260 --> 08:13.490 +では、 すべてのチャンクで何があるか見てみよう。 + +08:13.490 --> 08:18.230 +私たちには、 従業員、 契約、 会社、 製品の4つのドキュメント・タイプがあります。 + +08:18.230 --> 08:24.830 +というのも、 これはもちろん、 我々が読み込んだ4つのディレクトリと完全に一致するからだ。 + +08:24.830 --> 08:26.900 +それですべて良さそうだ。 + +08:27.440 --> 08:35.150 +では、 それぞれのチャンクを見てみよう。 + +08:35.150 --> 08:38.540 +ランカスターという単語が含まれているチャンクは? + +08:38.570 --> 08:39.590 +ランカスター + +08:39.620 --> 08:46.070 +これが架空の会社の架空のCEOの架空の名前なのだから。 + +08:46.070 --> 08:49.250 +ランカスターは彼女の名字なんだね。 + +08:49.250 --> 08:52.580 +そして、 どの塊に彼女の名字が入っているか見てみよう。 + +08:52.580 --> 08:53.600 +それでは、 どうぞ。 + +08:53.840 --> 09:03.100 +保険についての書類には、 もちろん彼女の名前があった。 + +09:03.490 --> 09:10.210 +それから、 彼女の人事記録にはランカスターが載っていて、 私たちの人事記録の一番下にも載っている。 + +09:10.210 --> 09:16.660 +また、 他の人事のメモにも、 彼女のことが、 ええと、 苗字で書かれている。 + +09:17.110 --> 09:28.930 +それで、 おそらく皆さんもお気づきだと思いますが、 前回やった安くて陽気なバージョンのラグからすると、 大きな問題のひとつはランカスターという単語を探すだけだということです。 + +09:28.930 --> 09:39.100 +だから、 最悪のランカスターに反映されていないCEOに関する情報があれば、 それは前回のおもちゃのバージョンでは見逃されることになる。 + +09:39.100 --> 09:49.360 +彼女のファーストネームであるエイブリーを検索すると、 より多くのチャンクがヒットすることがわかるだろう。 + +09:49.630 --> 09:51.760 +うーん、 それで、 そうだね。 + +09:51.790 --> 09:58.120 +そしておそらく、 CEOとした場合、 ランカスターという単語が含まれるものだけを見ていたのでは見逃される可能性のある、 + +09:58.240 --> 10:07.460 +CEOが含まれるチャンクもたくさんあることがわかる。 + +10:07.520 --> 10:13.850 +つまり、 ドキュメントをテキストベースで検索するのは、 + +10:13.850 --> 10:18.410 +あまりいい方法ではないということだ。 + +10:18.440 --> 10:25.250 +私たちがベクター検索に求めているのは、 テキストだけでなく、 探しているものの背後にある意味を理解した上で、 + +10:25.250 --> 10:32.690 +関連性のあるチャンクを見つける方法をよりスマートにできるものです。 + +10:32.720 --> 10:35.870 +そしてもちろん、 それがラグの背後にある大きなアイデアなのだ。 + +10:35.900 --> 10:45.710 +この時点で、 テキスト・ローダーやディレクトリ・ローダーを使って文書を読み込み、 チャンクに分割することに慣れていることを願う。 + +10:45.710 --> 10:50.780 +そして、 この練習をすることで、 さまざまなチャンクと戯れるようになり、 理にかなった場所で分割されていること、 + +10:50.780 --> 10:54.440 +重なり合う部分があることを自分自身に納得させることができる。 + +10:54.440 --> 10:59.240 +そして、 ベクター化してベクターデータベースに入れる準備が整う。 diff --git a/week5/community-contributions/subtitles/srts/59297733/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297733/ko_KR.srt new file mode 100755 index 0000000..d59248e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297733/ko_KR.srt @@ -0,0 +1,442 @@ +WEBVTT + +00:00.440 --> 00:05.900 +유피터랩에 잘 오셨습니다 긴 사슬 세계에서의 첫 실험에 잘 오셨어요 + +00:05.900 --> 00:11.840 +다시 한번 말하지만 저처럼 주피터 노트들을 훑어보는 게 중요해요 + +00:11.840 --> 00:16.490 +이상적으로는 제가 여러분과 얘기하는 동시에 주피터 랩을 띄워 + +00:16.490 --> 00:23.870 +5주 차로 바꾸고 오늘과 오늘, 그리고 오늘 2주 차로 해서 저처럼 이걸 하나씩 할 수 있어요 + +00:23.900 --> 00:29.360 +그게 불가능하다면 그 직후에 최대한 빨리 이걸 시도해 보세요 + +00:29.390 --> 00:33.860 +직접 경험해보는 게 아주 중요합니다 특히 텍스트 덩크나 + +00:33.860 --> 00:36.830 +벡터 같은 개념에 대해 얘기할 때요 + +00:36.860 --> 00:41.480 +제 말의 유효성을 검사하고 실험해보고 직접 보시는 게 아주 중요합니다 + +00:41.480 --> 00:46.790 +코드를 시험해보고 다양한 걸 프린트하세요 막후에서 작동하는 방식에 아주 + +00:46.790 --> 00:49.010 +익숙해지기 위해서요 + +00:49.010 --> 00:54.080 +5주 차 폴더에 들어온 게 2일째예요 지금 보시는 건 전날의 + +00:54.080 --> 00:58.280 +비트 복사본이지만 앞으로 더 나올 거예요 + +00:58.760 --> 01:01.950 +수입품도 있는데 새로운 것도 있네요 + +01:01.950 --> 01:07.020 +랭 체인에서 코드를 불러오는 건 처음이죠 문서 로더라는 걸 불러오겠습니다 + +01:07.020 --> 01:11.820 +파일을 불러오는 걸 도와주는 유틸리티 클래스죠 + +01:11.850 --> 01:17.460 +디렉토리 로더라는 게 있는데 전체 폴더에 로드하는 거죠 개별 텍스트 파일을 + +01:17.460 --> 01:19.320 +로드하는 텍스트 로더요 + +01:19.440 --> 01:24.720 +문자 텍스트 분할기라는 것도 불러오고 있어요 + +01:24.720 --> 01:31.890 +문서를 받아들여서 글자 덩어리로 나누는 거죠 + +01:31.920 --> 01:34.170 +수입 검사 결과나 보죠 + +01:34.530 --> 01:39.060 +제가 몇 가지 상수를 설정했는데 오늘은 사용하지 않겠지만 다음에 쓸 거예요 + +01:39.390 --> 01:42.870 +Get it, get it, get it, it! 자, 그럼 본론으로 들어가죠 + +01:42.870 --> 01:51.360 +지난번에 했던 유치한 거 기억나요? 문서를 읽어 사전에 넣는 거요 Put + +01:51.360 --> 01:52.770 +열쇠는 문서의 이름이었어요 + +01:52.770 --> 01:54.870 +그 서류의 내용이 가치 있는 거죠 + +01:54.870 --> 01:57.360 +이번에는 좀 더 현명한 비트를 만들 거예요 + +01:57.360 --> 01:59.370 +랑 체인을 쓰면 도움이 돼요 + +01:59.370 --> 02:03.670 +먼저, 지식 기반의 다양한 폴더들의 리스트를 get으로 하고요. + +02:03.700 --> 02:08.680 +이 폴더들은 회사 계약직 직원들과 제품들이라는 걸 기억하실 거예요 + +02:08.680 --> 02:10.990 +그래서 그걸 폴더에 넣었어요 Put + +02:11.500 --> 02:21.100 +이제 각각의 폴더에 대해 먼저 문서 유형을 갖게 됩니다 회사 계약 직원 또는 제품이요 + +02:21.370 --> 02:31.300 +그런 다음 이 디렉터리를 로드합니다 디렉터리 로더를 사용해서요 그에 대한 핸들, 디렉터리 + +02:31.300 --> 02:35.050 +경로를 넘기는 곳이죠 + +02:35.050 --> 02:40.210 +텍스트 로더 로더 클래스라는 것도 제출합니다 각각의 파일을 가져오는 + +02:40.210 --> 02:45.220 +데 사용하라고 말해주죠 텍스트 파일이니까요 그렇게 간단해요 + +02:45.220 --> 02:49.420 +그런 다음 loader.load를 호출합니다 그럼 모든 문서를 갖고 오죠 + +02:49.510 --> 02:52.600 +각각의 문서를 반복하는 거죠 + +02:52.630 --> 02:56.920 +각 문서의 메타데이터를 설정할 수 있는지 묻죠 + +02:57.460 --> 03:03.390 +doc 타입이라는 것을 추가하고 Doctype로 설정해야 합니다 회사 계약이든 직원이나 + +03:03.390 --> 03:04.740 +제품이든 간에요 + +03:04.740 --> 03:08.550 +그런 다음 문서라는 목록에 추가하세요 + +03:09.030 --> 03:10.680 +이해하셨길 바라요 + +03:10.920 --> 03:16.590 +실행해 보겠습니다. 그 결과로 이제 문서의 31개 객체가 생겼습니다. + +03:16.590 --> 03:21.240 +그중 하나가 어떻게 생겼는지 보여드릴게요. + +03:22.740 --> 03:23.940 +여기 있네요 + +03:23.970 --> 03:27.150 +지식 기반 디렉터리에 있어요 + +03:27.300 --> 03:30.210 +메타데이터에는 소스라는 게 있어서 위치를 알려줘요 + +03:30.240 --> 03:33.360 +그리고 이건 우리가 제품에 추가한 Doctype이에요 + +03:33.930 --> 03:34.860 +그러니까 제품이죠 + +03:34.860 --> 03:39.990 +Ermmd라고 하는데 파일의 전체 콘텐츠죠 + +03:40.020 --> 03:46.020 +제품에 관해 생각해 보면 렐름이라는 게 있어요 md요 + +03:46.710 --> 03:52.260 +그걸 더블 클릭하면 아마도 로드된 것과 같은 것이겠죠 + +03:52.290 --> 03:56.010 +첫 번째 문서도 볼 수 있어요 + +03:56.130 --> 03:58.410 +제품에도 적용되죠 + +03:58.410 --> 04:00.370 +마크의 침착함이죠 + +04:00.730 --> 04:03.580 +왜 그런 건지 아시겠죠? + +04:03.580 --> 04:06.730 +무작위로 24번을 뽑죠 + +04:07.360 --> 04:13.870 +24번째 서류는 맥신 톰프슨의 직원 인사 기록이에요 + +04:13.870 --> 04:15.580 +맥신도 있네요 + +04:15.580 --> 04:19.000 +문서 같은 직원이고 내용물도 있어요 + +04:19.000 --> 04:22.570 +복잡할 게 없죠 + +04:22.720 --> 04:29.890 +문서를 넣고 문서 유형을 줬는데 그중 31명이 서류에 앉아 있어요 + +04:30.250 --> 04:37.180 +다음으로 할 일은 텍스트 분할기입니다 문서를 가져다가 각 + +04:37.180 --> 04:41.080 +문서를 문자 덩어리로 나누죠 + +04:41.080 --> 04:44.320 +이걸 할 때 랑 체인에 두 가지를 지정하세요 + +04:44.350 --> 04:50.860 +하나는 덩어리 크기입니다 한 덩어리당 대략 몇 글자를 넣어야 하는가 하는 거죠 + +04:51.220 --> 04:56.410 +대강이요 랑 체인에 재량권을 줄 거니까요 합리적인 + +04:56.410 --> 05:03.500 +경계를 분할하도록 해야죠 빈 공간과 빈 줄 같은 거요 문서의 다른 + +05:03.500 --> 05:06.620 +부분 사이의 구역도요 + +05:06.620 --> 05:10.460 +단어나 단어 중간에 컷하는 게 아니에요 + +05:10.460 --> 05:11.780 +말이 안 되죠 + +05:11.930 --> 05:17.870 +그러면 잠재적으로 달 착륙선에 제공할 접촉이 좋지 않을 수 있죠 + +05:18.740 --> 05:26.990 +덩어리 중첩은 캐릭터들이 서로 완전히 분리되지 않게 하는 + +05:26.990 --> 05:28.160 +거예요 + +05:28.160 --> 05:30.710 +어느 정도 겹치게 하고 싶어요 + +05:30.710 --> 05:35.240 +두 덩크 사이에 공통으로 있는 문서의 내용이 있어요 + +05:35.390 --> 05:42.290 +다시 말하지만, 쿼리를 입력하면 그 쿼리와 관련된 많은 부분을 뽑아낼 가능성이 + +05:42.290 --> 05:44.090 +더 크죠. + +05:44.120 --> 05:49.550 +어떤 연관성이나 비판적인 단어 때문에 위험을 감수하고 싶진 않아요 + +05:49.550 --> 05:55.340 +한 덩어리에만 포함돼요 우리가 포함하지 않는 다른 덩어리는 그것과 아주 가깝죠 + +05:55.340 --> 05:56.630 +그것도 중요해요 + +05:56.630 --> 06:02.790 +청크가 겹치면 잠재적으로 같은 키워드를 포함하는 여러 개의 + +06:02.790 --> 06:04.380 +청크가 생기죠 + +06:05.220 --> 06:07.980 +이게 텍스트 분할기예요 + +06:07.980 --> 06:11.100 +분할 문서라고 하고 문서를 제출하죠 + +06:11.100 --> 06:14.730 +이걸 실행하면 실행될 거예요 + +06:14.730 --> 06:23.100 +그리고 그 물질이 만든 덩어리 하나가 1088이라는 걸 경고해줘요 우리가 요구한 것보다 + +06:23.100 --> 06:24.150 +더 크죠 + +06:24.150 --> 06:28.710 +경계를 존중하는 방법을 영리하게 쓰려고 노력했기 때문이죠 + +06:28.740 --> 06:32.730 +그래서 이렇게 하기로 결정했어요 + +06:32.730 --> 06:40.680 +덩어리가 몇 개인지 보면 123개가 나왔어요 31개의 문서에서 덩어리가 123개 + +06:40.680 --> 06:42.270 +나왔죠 + +06:42.270 --> 06:47.190 +이제 할 수 있는 건 덩어리를 고르는 거예요 5번 덩어리를 골라요 그 덩어리를 + +06:47.220 --> 06:48.240 +보죠 + +06:48.450 --> 06:53.070 +청크 자체에 메타데이터가 있어요 문서 메타데이터처럼요 + +06:53.340 --> 06:55.890 +어디서 왔는지 알아요 + +06:56.100 --> 06:59.520 +우리가 설정한 문서 타입이 있어요 + +06:59.650 --> 06:59.830 +위로요 + +06:59.830 --> 07:04.240 +이 덩어리가 그 제품에서 분리된 거예요 + +07:04.480 --> 07:09.490 +마컴에 관한 제품 요약본이에요 + +07:09.490 --> 07:15.400 +새 섹션에서 시작해서 그 섹션의 끝에서 끝나는 걸 보실 수 있어요 + +07:15.580 --> 07:18.100 +그래서 경계를 존중하는 데 신중해졌죠 + +07:18.100 --> 07:21.460 +대략 천 개의 글자가 들어있죠 + +07:21.610 --> 07:25.630 +바로 전 덩어리랑 겹치는 부분이 있을 거예요 + +07:25.960 --> 07:28.090 +그러기로 했는지 보죠 + +07:29.470 --> 07:32.530 +이 경우엔 앞에 있던 게 아주 작아요 + +07:32.710 --> 07:39.820 +어쨌든, 여러분이 찾아보실 수 있어요 가능한 + +07:39.820 --> 07:46.510 +옵션들을요 청크 사이에 겹치는 거요 + +07:46.660 --> 07:51.910 +그래서 빨리 이 덩어리를 실험해 보고 싶어요 + +07:51.910 --> 07:57.100 +두 덩크에 같은 정보가 들어 있는 예시를 Get get 해 보세요 + +07:58.330 --> 08:05.570 +이제 이 청크를 통해 Doctype 메타데이터를 검사해 올바른 문서 형식이 + +08:05.570 --> 08:09.260 +다 있다고 자신을 설득할 거예요 + +08:09.260 --> 08:13.490 +그럼 모든 덩어리에 뭐가 있는지 보죠 + +08:13.490 --> 08:18.230 +doc 타입의 직원, 계약서 회사, 제품은 네 가지죠 + +08:18.230 --> 08:24.830 +좋은 거죠, 우리가 읽은 4개의 디렉터리와 정확히 일치하니까요 + +08:24.830 --> 08:26.900 +다 괜찮은 것 같네요 + +08:27.440 --> 08:33.740 +그럼 이제 제가 한번 살펴볼게요 이 덩어리들을 하나씩 살펴보면 + +08:33.740 --> 08:35.150 +어떨까요? + +08:35.150 --> 08:38.540 +랭커스터라는 단어가 들어간 덩어리는요? + +08:38.570 --> 08:39.590 +랭커스터요 + +08:39.620 --> 08:46.070 +아시는지 모르겠지만 그건 가상의 이름이에요 가상의 회사의 가상의 CEO죠 + +08:46.070 --> 08:49.250 +성이 랭커스터군요 + +08:49.250 --> 08:52.580 +어떤 덩어리에서 그녀의 성이 나오는지 보죠 + +08:52.580 --> 08:53.600 +자, 시작하죠 + +08:53.840 --> 09:03.100 +보험 서류에 이름이 있어요 물론 그 회사의 창립자니까요 + +09:03.490 --> 09:10.210 +인사 기록에도 랭커스터가 있는데 우리 인사 기록 맨 아래예요 + +09:10.210 --> 09:16.660 +성까지 언급된 인사부 기록도 있고요 + +09:17.110 --> 09:24.070 +아마 아시겠지만 지난번에 했던 싸구려 명품 버전에서는 랭커스터라는 + +09:24.070 --> 09:28.930 +단어만 보이는 게 큰 문제였어요 + +09:28.930 --> 09:35.380 +최악의 랭커스터에 비치지 않는 CEO에 관한 정보가 있다면 지난 버전의 + +09:35.380 --> 09:39.100 +장난감에 그게 빠져 있을 거예요 + +09:39.100 --> 09:45.340 +보다시피 에이버리의 이름을 검색하면 정보가 훨씬 많이 나와요 get + +09:45.340 --> 09:49.360 +it's name으로만 언급돼 있거든요 + +09:49.630 --> 09:51.760 +그래서... 네 + +09:51.790 --> 09:58.120 +또한 CEO라고 적힌 부분도 많이 발견할 수 있을 + +09:58.240 --> 10:06.140 +겁니다 랭커스터라는 단어만 본다면 그 부분을 놓치게 될 수도 + +10:06.140 --> 10:07.460 +있죠 + +10:07.520 --> 10:13.850 +문서를 통해 텍스트 기반의 검색을 하는 게 좋은 방법이 아니라는 느낌을 줍니다 우리가 + +10:13.850 --> 10:18.410 +찾아야 할 컨텍스트의 중요한 부분을 놓칠 수도 있고요 + +10:18.440 --> 10:25.250 +벡터 검색에서 찾는 건 좀 더 영리하게 텍스트뿐 + +10:25.250 --> 10:32.690 +아니라 의미에 숨은 단어도 찾는 거죠 + +10:32.720 --> 10:35.870 +그게 바로 래그의 아이디어예요 + +10:35.900 --> 10:43.280 +이 시점에선 여러분이 문서 읽기에 능숙하길 바랍니다 텍스트 로더와 디렉터리 로더를 + +10:43.280 --> 10:45.710 +이용해 덩어리로 나누죠 + +10:45.710 --> 10:50.780 +이 운동을 하면 다른 덩어리들을 가지고 놀면서 합리적인 부분에서 분리되었고 + +10:50.780 --> 10:54.440 +겹치는 부분도 있다고 스스로 확신하게 되죠 + +10:54.440 --> 10:59.240 +그런 다음 벡터화하고 벡터 데이터베이스에 넣을 준비가 되죠 Let's go diff --git a/week5/community-contributions/subtitles/srts/59297735/en_US.srt b/week5/community-contributions/subtitles/srts/59297735/en_US.srt new file mode 100755 index 0000000..3e01d07 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297735/en_US.srt @@ -0,0 +1,532 @@ +WEBVTT + +00:00.890 --> 00:04.400 +And at last the time has come to see rag in action. + +00:04.430 --> 00:07.460 +After all of this talk, and here we are. + +00:07.460 --> 00:10.730 +We're in, of course, the week five folder in JupyterLab. + +00:10.730 --> 00:16.190 +We're looking at day four, the notebook, and it is, of course a duplicate of day three with more + +00:16.190 --> 00:16.760 +added on. + +00:16.760 --> 00:21.710 +Since we're still solving the same problem of a knowledge worker for our fictitious insurance tech company + +00:21.710 --> 00:22.970 +in serum. + +00:23.330 --> 00:27.620 +Uh, and we will start with the usual imports as before. + +00:28.070 --> 00:34.790 +And now we have some imports for Lang chain, and I have sneakily added in two new imports from Lang + +00:34.790 --> 00:35.570 +chain memory. + +00:35.600 --> 00:38.570 +We're importing conversational buffer memory. + +00:38.600 --> 00:43.010 +And from Lang chain chains we're bringing in conversational retrieval chain. + +00:43.010 --> 00:46.850 +And these are two of the abstractions that I mentioned before. + +00:46.880 --> 00:52.310 +Now the astute amongst you will have noticed that the third abstraction is also lurking in here. + +00:52.310 --> 00:57.920 +I just already imported it in one of our previous lectures without mentioning it, but here it is. + +00:57.920 --> 01:02.180 +Chat OpenAI is already being imported as part of from Lang chain. + +01:02.180 --> 01:08.650 +OpenAI, uh, we've only been using so far the OpenAI embeddings, but this time we're going to bring + +01:08.680 --> 01:10.990 +chat OpenAI into the mix. + +01:11.050 --> 01:14.410 +Okay, so I better run those imports. + +01:15.010 --> 01:17.020 +Otherwise we're not going to get very far. + +01:17.620 --> 01:19.720 +So then we do some constants. + +01:19.720 --> 01:21.850 +We load our environment variables. + +01:21.850 --> 01:23.800 +And now you're quite familiar with this. + +01:23.800 --> 01:30.580 +But we go through and we bring in our documents from the knowledge base directory over there. + +01:30.580 --> 01:35.350 +And now we're going to bring in the text chunks the let's see how many. + +01:35.350 --> 01:36.730 +But I do believe it's 123. + +01:36.760 --> 01:39.040 +Yes, 123 text chunks. + +01:39.040 --> 01:44.200 +And they are employees, products companies and contracts. + +01:44.590 --> 01:50.020 +And now we're going to again put them into our vector database. + +01:50.020 --> 01:52.300 +We delete and recreate the vector database. + +01:52.390 --> 01:57.640 +And we see that each vector has 1536 dimensions. + +01:57.640 --> 02:00.580 +Hard for us to visualize, but we can handle it in 2D. + +02:00.580 --> 02:01.930 +So that's what we do. + +02:01.930 --> 02:03.220 +And there they are. + +02:03.220 --> 02:07.450 +And we can also say let's see it in 3D as well. + +02:07.480 --> 02:08.810 +This is a little bit gratuitous. + +02:08.810 --> 02:12.650 +I didn't need to go through and rerun all of this, but I do love seeing these these diagrams. + +02:12.680 --> 02:16.820 +All right, so here, I didn't lie to you. + +02:16.820 --> 02:19.790 +It really is as simple as these four lines of code. + +02:19.820 --> 02:24.050 +We first create the new LM abstraction. + +02:24.080 --> 02:25.250 +We the chat OpenAI. + +02:25.280 --> 02:27.950 +We're going to put that that thing we've imported for a while. + +02:27.950 --> 02:29.570 +We're going to put it to use finally. + +02:29.960 --> 02:34.760 +And you supply a temperature and a model name, uh, memory. + +02:34.820 --> 02:39.710 +We create the conversational buffer memory passing in, as I mentioned before, the key and saying we + +02:39.710 --> 02:42.230 +want it returned in the form of a, of a list. + +02:42.560 --> 02:49.640 +Uh, we take our chroma vector store and we call as retriever to sort of wrap it in this abstraction, + +02:49.640 --> 02:52.580 +the retriever, which is needed by Lang chain. + +02:52.580 --> 02:58.370 +And that is where we now get to when we create the conversational retrieval chain. + +02:58.370 --> 03:03.200 +And we simply pass in the LM, the retriever and the memory. + +03:03.740 --> 03:04.700 +That's all it is. + +03:04.820 --> 03:05.780 +Let's run that. + +03:07.040 --> 03:07.640 +Okay. + +03:07.640 --> 03:08.960 +So we ran it. + +03:09.140 --> 03:10.430 +Perhaps a slight anticlimax. + +03:10.430 --> 03:14.590 +I'm not sure what you're expecting, whether you thought maybe we were going to get suddenly rag appearing + +03:14.590 --> 03:15.490 +in front of us. + +03:15.700 --> 03:16.960 +We have to actually call it. + +03:16.990 --> 03:19.270 +We have to do something to make use of it. + +03:19.300 --> 03:25.690 +So what we're going to say is we're going to say, um, um, query calls. + +03:26.770 --> 03:38.230 +Uh, can you describe in short film in a few sentences, a nice we will start simple. + +03:38.320 --> 03:38.620 +All right. + +03:38.650 --> 03:39.370 +And this is what you say. + +03:39.370 --> 03:41.800 +You say result is conversation chain. + +03:41.800 --> 03:46.510 +The thing that we've just created and we call the method invoke. + +03:46.600 --> 03:52.600 +And invoke takes a dictionary which has question as a key. + +03:52.690 --> 03:53.620 +Did I spell that right? + +03:53.650 --> 03:53.830 +Yes. + +03:53.830 --> 03:54.850 +Question. + +03:55.150 --> 03:58.780 +And we have to put in our message query. + +04:00.010 --> 04:01.420 +And there we have it. + +04:01.420 --> 04:04.810 +And then we're going to print result. + +04:06.430 --> 04:09.610 +This should be something under the key of answer in that result. + +04:09.610 --> 04:15.640 +So this then is the final piece of code that we put together to try and make use of our Rag pipeline. + +04:15.680 --> 04:16.970 +So what do we think is going to happen? + +04:16.970 --> 04:18.620 +It's going to take that query. + +04:18.620 --> 04:21.110 +It's going to turn that into a vector. + +04:21.110 --> 04:24.230 +It's going to look that up in our Chrome data store. + +04:24.260 --> 04:27.440 +It's going to find relevant chunks. + +04:27.440 --> 04:29.780 +And I say chunks plural. + +04:29.780 --> 04:31.250 +And we're going to come back to that. + +04:31.580 --> 04:33.320 +So it's going to find relevant chunks. + +04:33.320 --> 04:38.930 +And it's going to drop them into the prompt and send that to OpenAI. + +04:39.020 --> 04:43.130 +Uh, it's going to send it to GPT four mini because we've specified that here. + +04:43.130 --> 04:48.170 +And then with what comes back, it's going to package it up and put that in the answer key. + +04:48.200 --> 04:49.940 +Let's see if this works. + +04:52.400 --> 04:53.480 +There we go. + +04:53.510 --> 04:54.560 +There we go. + +04:54.560 --> 04:59.240 +We've just run our first Rag pipeline front to back in Shoreham. + +04:59.270 --> 05:05.810 +Innovative insurance tech firm founded by Avery Lancaster, a name we know well at this point and so + +05:05.810 --> 05:06.230 +on. + +05:06.230 --> 05:07.940 +And it's got bits of information. + +05:07.940 --> 05:12.980 +And I will leave this as an exercise for you to play around with, but you'll see that it's got that + +05:12.980 --> 05:18.530 +out from various documents, I think probably all from the about from the company section. + +05:18.530 --> 05:24.910 +Uh, but, uh, I hopefully you'll see that it has retrieved that from various chunks of information. + +05:25.960 --> 05:26.860 +All right. + +05:26.860 --> 05:28.750 +Well, wouldn't it be nice? + +05:29.230 --> 05:30.310 +Do you know where this is going? + +05:30.340 --> 05:35.020 +Wouldn't it be nice if we could package that up into a beautiful user interface, so that we could actually + +05:35.020 --> 05:37.480 +use it through a chat UI? + +05:37.480 --> 05:41.290 +And of course, Gradio makes it super simple as well. + +05:41.290 --> 05:47.890 +We know at this point all we have to do is create a chat function in the format that Gradio expects. + +05:47.890 --> 05:50.170 +That takes a message and a history. + +05:50.170 --> 05:55.540 +So what I've done is I've taken exactly the line that we just wrote, and I've put it here, and then + +05:55.540 --> 05:58.480 +I return exactly what we're expecting. + +05:58.510 --> 06:03.070 +Now, you might see something curious about that, give you a moment to look at it and see anything, + +06:03.070 --> 06:04.540 +anything strike you as odd. + +06:06.070 --> 06:09.910 +Well, the one thing that might potentially strike you as odd is that we don't actually do anything + +06:09.910 --> 06:12.430 +with this history parameter. + +06:12.520 --> 06:14.440 +Uh, we ignore it completely. + +06:14.440 --> 06:19.570 +And the reason we ignore it is because, of course, Lang Chain already handles history for us. + +06:19.570 --> 06:26.820 +So even though Gradio has this chat UI that calls that sort of maintains the history in the user interface, + +06:26.820 --> 06:30.570 +and then calls back every time with the with the full chat history. + +06:30.600 --> 06:36.540 +We don't need that because because Lang has already given us this, this memory, and it's already keeping + +06:36.570 --> 06:38.430 +track of the conversation so far. + +06:38.460 --> 06:41.730 +All it needs to know is the new message and the new answer. + +06:42.570 --> 06:45.180 +So anyway, I'm rerunning the cell here. + +06:45.210 --> 06:47.970 +I actually already reran it, but I'm rerunning it to clean out the memory. + +06:47.970 --> 06:50.190 +So we're starting absolutely fresh. + +06:50.220 --> 06:55.230 +We call that we bring this up and now we can chat. + +06:55.500 --> 06:56.340 +Hi there. + +06:57.780 --> 06:58.380 +Hello. + +06:58.380 --> 06:59.700 +How can I assist you today? + +07:00.060 --> 07:02.520 +What is insurance? + +07:02.640 --> 07:03.090 +Um. + +07:08.490 --> 07:09.150 +There we go. + +07:09.180 --> 07:10.560 +No surprise. + +07:10.680 --> 07:13.230 +And now we can do something sneaky. + +07:13.260 --> 07:14.970 +We can say something like. + +07:15.030 --> 07:19.470 +What did Avery do before? + +07:20.100 --> 07:25.260 +And now the reason I'm, uh, I'm bringing this up is that there's a few things that I want to surface + +07:25.290 --> 07:26.490 +in a question like this. + +07:26.490 --> 07:34.030 +So first of all, to state the obvious, our brute force solution before our toy version of Rag was + +07:34.030 --> 07:38.860 +able to look at Lancaster as a last name and search for that in documents, which was pretty hopeless. + +07:38.860 --> 07:41.560 +And if we tried the word aviary, then it failed on us. + +07:41.560 --> 07:45.160 +So it's of course interesting to try it here. + +07:45.190 --> 07:50.470 +Secondly, I have intentionally put aviary with a lowercase a, because anything that's doing a kind + +07:50.470 --> 07:56.770 +of text search is going to get that wrong because aviary is uh, is spelt differently. + +07:56.770 --> 08:02.620 +So, uh, it'll be interesting to see whether it can handle the fact that we've not used the right case. + +08:02.620 --> 08:07.540 +And then thirdly, I'm sort of taking advantage of this memory idea because I'm referring to what she + +08:07.540 --> 08:08.380 +did before. + +08:08.410 --> 08:11.920 +Meaning what did she do before she founded in Elm. + +08:11.920 --> 08:18.220 +And we'll see whether the model is has a good enough sense of what's going on to be able to keep the + +08:18.220 --> 08:22.780 +context, both retrieve relevant information about aviary and what she did before. + +08:22.810 --> 08:25.000 +That will need to come from her employee record. + +08:25.120 --> 08:29.350 +Uh, and also, uh, just answer the question in a coherent way. + +08:29.350 --> 08:30.310 +Let's see. + +08:33.420 --> 08:38.670 +Before founding in Shoreham, Avery Lancaster worked as a senior product manager at Innovate Insurance + +08:38.670 --> 08:40.620 +Solutions, where she developed groundbreaking insurance. + +08:40.980 --> 08:43.830 +Prior to that, business analyst focusing on market trends. + +08:43.860 --> 08:49.860 +So I will leave it as an exercise for you to check her employee record and make sure that you're satisfied + +08:49.860 --> 09:00.360 +that it is indeed correctly finding it and getting the right background on Avery, uh, as a fun thing + +09:00.390 --> 09:01.200 +to try. + +09:01.320 --> 09:05.850 +Um, and also, of course, try other difficult questions. + +09:06.000 --> 09:10.200 +Uh, what does, um. + +09:12.960 --> 09:13.140 +Uh. + +09:13.140 --> 09:13.860 +Let's see. + +09:13.890 --> 09:20.730 +Calm do that was the car, um, uh, product. + +09:21.210 --> 09:21.390 +Um. + +09:21.420 --> 09:24.420 +Or how about, um, let's ask it differently. + +09:24.420 --> 09:35.650 +Let's say does insurance offer any products in the car in the auto Assurance space. + +09:36.550 --> 09:38.770 +Let's give it a nice, tricky question. + +09:39.040 --> 09:40.570 +And there we go. + +09:40.600 --> 09:40.960 +Yes. + +09:40.990 --> 09:44.530 +Insurance offers Calm, which is a portal for auto insurance companies. + +09:44.530 --> 09:53.020 +So it's able to even though I didn't use the word calm or even the word car, it was able to find the + +09:53.020 --> 09:58.060 +relevant document, the relevant chunk, and answer the question in an accurate way. + +09:58.540 --> 10:03.160 +So that is your first experiment with Rag. + +10:03.190 --> 10:04.930 +I hope you will now try this. + +10:04.930 --> 10:10.450 +I hope you will investigate, ask difficult questions, find out if you can break it or get it to give + +10:10.450 --> 10:15.190 +wrong information, or go off the rails and stretch it to its limits. + +10:15.220 --> 10:21.340 +Next time, amongst a few other things, we'll talk about some of the ways, some of the common problems + +10:21.340 --> 10:26.260 +that you can get with these kinds of prompts, and how you can debug and find out more about what's + +10:26.260 --> 10:28.180 +going on under the covers. + +10:28.210 --> 10:36.670 +But I hope you enjoyed your first end to end rag pipeline built for our fictional insurance tech company + +10:36.670 --> 10:37.540 +in Shoreham. + +10:37.600 --> 10:38.620 +See you next time. diff --git a/week5/community-contributions/subtitles/srts/59297735/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297735/ja_JP.srt new file mode 100755 index 0000000..edcaccb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297735/ja_JP.srt @@ -0,0 +1,478 @@ +WEBVTT + +00:00.890 --> 00:04.400 +そしてついに、 ボロの活躍を見る時が来た。 + +00:04.430 --> 00:07.460 +このような話の後、 我々はここにいる。 + +00:07.460 --> 00:10.730 +私たちはもちろん、 JupyterLabの第5週目のフォルダにいる。 + +00:10.730 --> 00:16.760 +私たちは4日目のノートを見ているが、 もちろん3日目と同じ内容で、 さらに追加されている。 + +00:16.760 --> 00:22.970 +私たちは、 血清中の架空の保険技術会社のために、 知識労働者という同じ問題を今も解決しているのだから。 + +00:23.330 --> 00:27.620 +ええと、 これまでと同じように輸入品から始めよう。 + +00:28.070 --> 00:35.570 +そして今、 ラング・チェーン用のインポートがいくつかある。 ラング・チェーンのメモリーから2つの新しいインポートをこっそり追加した。 + +00:35.600 --> 00:38.570 +会話バッファーのメモリーをインポートしている。 + +00:38.600 --> 00:43.010 +そしてラング・チェーン・チェーンから会話型検索チェーンを導入する。 + +00:43.010 --> 00:46.850 +そして、 この2つは前に述べた抽象的なものだ。 + +00:46.880 --> 00:52.310 +さて、 勘のいい方はお気づきだろうが、 ここには3つ目の抽象化も潜んでいる。 + +00:52.310 --> 00:57.920 +ただ、 以前の講義で言及せずにすでに輸入していたのだが、 ここに紹介しよう。 + +00:57.920 --> 01:02.180 +チャットOpenAIはすでにラングチェーンの一部としてインポートされている。 + +01:02.180 --> 01:10.990 +OpenAIは、 これまでOpenAIのエンベッディングしか使っていませんでしたが、 今回はチャットのOpenAIをミックスします。 + +01:11.050 --> 01:14.410 +それじゃ、 インポートをしたほうがいいね。 + +01:15.010 --> 01:17.020 +そうでなければ、 私たちは遠くへ行くことはできない。 + +01:17.620 --> 01:19.720 +そこで、 いくつかの定数を設定する。 + +01:19.720 --> 01:21.850 +環境変数をロードする。 + +01:21.850 --> 01:23.800 +そして今、 あなたはこのことをよく知っている。 + +01:23.800 --> 01:30.580 +しかし、 私たちはナレッジ・ベース・ディレクトリからドキュメントを持ってくる。 + +01:30.580 --> 01:35.350 +では、 テキスト・チャンクを取り込んで、 その数を見てみよう。 + +01:35.350 --> 01:36.730 +でも、 123だと信じている。 + +01:36.760 --> 01:39.040 +そう、 123のテキストチャンクだ。 + +01:39.040 --> 01:44.200 +そしてそれらは従業員であり、 製品会社であり、 契約である。 + +01:44.590 --> 01:50.020 +そして今度は、 それらを再びベクター・データベースに入れる。 + +01:50.020 --> 01:52.300 +ベクターデータベースを削除し、 再作成する。 + +01:52.390 --> 01:57.640 +各ベクトルは1536次元であることがわかる。 + +01:57.640 --> 02:00.580 +視覚化するのは難しいが、 2Dなら対応できる。 + +02:00.580 --> 02:01.930 +だから、 そうするんだ。 + +02:01.930 --> 02:03.220 +そして、 彼らはそこにいる。 + +02:03.220 --> 02:07.450 +そして、 3Dでも見ましょうと言うこともできる。 + +02:07.480 --> 02:08.810 +これはちょっとありがた迷惑だ。 + +02:08.810 --> 02:12.650 +でも、 こういう図を見るのは好きだよ。 + +02:12.680 --> 02:16.820 +よし、 じゃあここで、 私は嘘をついていない。 + +02:16.820 --> 02:19.790 +この4行のコードと同じくらい簡単だ。 + +02:19.820 --> 02:24.050 +まず、 新しいLM抽象化を作成する。 + +02:24.080 --> 02:25.250 +チャットのOpenAI。 + +02:25.280 --> 02:27.950 +しばらく輸入していたものを置くつもりだ。 + +02:27.950 --> 02:29.570 +ついにそれを使うことになる。 + +02:29.960 --> 02:34.760 +そして、 温度とモデル名、 メモリーを指定する。 + +02:34.820 --> 02:42.230 +前にも書いたように、 キーを渡して、 リストの形で返してほしいと言って、 カンバセーショナル・バッファ・メモリを作る。 + +02:42.560 --> 02:52.580 +クロマ・ベクター・ストアをリトリーバーと呼び、 ラング・チェーンに必要な抽象化されたリトリーバーで包みます。 + +02:52.580 --> 02:58.370 +そして、 会話による検索チェーンを構築することになる。 + +02:58.370 --> 03:03.200 +そして私たちは、 LM、 レトリーバー、 そして記憶を受け継ぐだけなのだ。 + +03:03.740 --> 03:04.700 +それだけだ。 + +03:04.820 --> 03:05.780 +それを実行しよう。 + +03:07.040 --> 03:07.640 +オーケー。 + +03:07.640 --> 03:08.960 +だから私たちはそれを実行した。 + +03:09.140 --> 03:10.430 +少し拍子抜けしたかもしれない。 + +03:10.430 --> 03:15.490 +突然ボロが目の前に現れるとでも思っていたのか。 + +03:15.700 --> 03:16.960 +実際に電話しなければならない。 + +03:16.990 --> 03:19.270 +それを生かすために何かをしなければならない。 + +03:19.300 --> 03:25.690 +だから、 これから言うのは、 うーん、 うーん、 クエリーコールだ。 + +03:26.770 --> 03:38.230 +簡単なところから始めましょう。 + +03:38.320 --> 03:38.620 +分かった。 + +03:38.650 --> 03:39.370 +そして、 あなたはこう言う。 + +03:39.370 --> 03:41.800 +あなたは結果が会話の連鎖だと言う。 + +03:41.800 --> 03:46.510 +今作ったものをメソッドinvokeと呼ぶ。 + +03:46.600 --> 03:52.600 +そして、 invokeはquestionをキーとする辞書を取る。 + +03:52.690 --> 03:53.620 +スペルは正しかったかな? + +03:53.650 --> 03:53.830 +そうだ。 + +03:53.830 --> 03:54.850 +質問だ。 + +03:55.150 --> 03:58.780 +そして、 メッセージクエリーを入れなければならない。 + +04:00.010 --> 04:01.420 +そうだ。 + +04:01.420 --> 04:04.810 +そして結果を印刷する。 + +04:06.430 --> 04:09.610 +これは、 その結果の答えのキーの下にあるものでなければならない。 + +04:09.610 --> 04:15.640 +これが、 ラグ・パイプラインを利用するための最後のコードだ。 + +04:15.680 --> 04:16.970 +では、 どうなると思う? + +04:16.970 --> 04:18.620 +その問い合わせが必要だ。 + +04:18.620 --> 04:21.110 +それをベクトルに変えるんだ。 + +04:21.110 --> 04:24.230 +クロームのデータストアでそれを検索する。 + +04:24.260 --> 04:27.440 +関連するチャンクを見つけるだろう。 + +04:27.440 --> 04:29.780 +チャンクは複数形だ。 + +04:29.780 --> 04:31.250 +そして、 またその話に戻るつもりだ。 + +04:31.580 --> 04:33.320 +だから、 関連するチャンクを見つけることができる。 + +04:33.320 --> 04:38.930 +そしてそれをプロンプトにドロップし、 OpenAIに送る。 + +04:39.020 --> 04:43.130 +GPT4ミニに送られます。 + +04:43.130 --> 04:48.170 +そして、 戻ってきたものをパッケージして、 それを解答用紙に入れる。 + +04:48.200 --> 04:49.940 +うまくいくかどうか見てみよう。 + +04:52.400 --> 04:53.480 +これでよし。 + +04:53.510 --> 04:54.560 +これでよし。 + +04:54.560 --> 04:59.240 +ショアハムで初めてのラグ・パイプラインをフロントからバックまで走らせたところだ。 + +04:59.270 --> 05:06.230 +エイブリー・ランカスターによって設立された革新的な保険技術会社である。 + +05:06.230 --> 05:07.940 +それに断片的な情報もある。 + +05:07.940 --> 05:12.980 +いろいろな文書から、 おそらく会社のセクションのアバウトなものから、 + +05:12.980 --> 05:18.530 +それを取り出しているのがわかるだろう。 + +05:18.530 --> 05:24.910 +ええと、 でも、 いろいろな情報のかたまりからそれを取り出していることがわかるといいんだけど。 + +05:25.960 --> 05:26.860 +分かった。 + +05:26.860 --> 05:28.750 +まあ、 いいんじゃない? + +05:29.230 --> 05:30.310 +これがどこに向かっているのかわかるかい? + +05:30.340 --> 05:37.480 +それを美しいユーザーインターフェイスにパッケージして、 チャットUIで実際に使えるようになればいいと思いませんか? + +05:37.480 --> 05:41.290 +そしてもちろん、 Gradioはそれを超シンプルにしてくれる。 + +05:41.290 --> 05:47.890 +この時点で私たちがしなければならないことは、 Gradioが期待するフォーマットでチャット関数を作成することだとわかっている。 + +05:47.890 --> 05:50.170 +それにはメッセージと歴史が必要だ。 + +05:50.170 --> 05:58.480 +だから、 さっき書いたセリフをそのままここに書いて、 僕らが期待しているものをそのまま返しているんだ。 + +05:58.510 --> 06:04.540 +さて、 何か不思議なものが見えるかもしれない。 少し時間をおいて、 何か、 何か奇妙に感じるものがないか見てみよう。 + +06:06.070 --> 06:12.430 +さて、 奇妙に思われるかもしれないが、 このヒストリー・パラメーターでは実際には何もしない。 + +06:12.520 --> 06:14.440 +ああ、 完全に無視している。 + +06:14.440 --> 06:19.570 +そして、 私たちがそれを無視するのは、 もちろん、 ラング・チェインがすでに私たちのために歴史を扱っているからだ。 + +06:19.570 --> 06:26.820 +GradioのチャットUIは、 ユーザー・インターフェースの履歴を保持し、 + +06:26.820 --> 06:30.570 +チャット履歴を毎回コールバックする。 + +06:30.600 --> 06:38.430 +なぜなら、 ラングがすでに私たちにこの記憶力を与えてくれていて、 これまでの会話を記録してくれているからだ。 + +06:38.460 --> 06:41.730 +それが知る必要があるのは、 新しいメッセージと新しい答えだけだ。 + +06:42.570 --> 06:45.180 +だからとにかく、 ここでセルを再運転しているんだ。 + +06:45.210 --> 06:47.970 +実はすでに再実行したんだけど、 メモリを一掃するために再実行しているんだ。 + +06:47.970 --> 06:50.190 +だから、 まったく新しいスタートを切る。 + +06:50.220 --> 06:55.230 +この話を持ち出したら、 あとはおしゃべりするだけだ。 + +06:55.500 --> 06:56.340 +こんにちは。 + +06:57.780 --> 06:58.380 +こんにちは。 + +06:58.380 --> 06:59.700 +本日はどのようなご用件でしょうか? + +07:00.060 --> 07:02.520 +保険とは何か? + +07:02.640 --> 07:03.090 +うーん。 + +07:08.490 --> 07:09.150 +これでよし。 + +07:09.180 --> 07:10.560 +驚きはない。 + +07:10.680 --> 07:13.230 +そして今、 私たちは卑劣なことをすることができる。 + +07:13.260 --> 07:14.970 +私たちは次のように言うことができる。 + +07:15.030 --> 07:19.470 +エイブリーは以前何をしていたのですか? + +07:20.100 --> 07:26.490 +なぜこのような話をしたかというと、 このような質問にはいくつか表面化させたいことがあるんだ。 + +07:26.490 --> 07:34.030 +つまり、 まず明白なことを言うと、 私たちのおもちゃのラグ・バージョンの前のブルートフォース・ソリューションは、 ランカスターを名字として見て、 + +07:34.030 --> 07:38.860 +ドキュメントからそれを検索することができたが、 これはかなり絶望的だった。 + +07:38.860 --> 07:41.560 +そして、 鳥小屋という言葉を使おうとしたら、 それは私たちの失敗だった。 + +07:41.560 --> 07:45.160 +だから、 ここで試してみるのはもちろん面白い。 + +07:45.190 --> 07:50.470 +第二に、 aviaryをわざと小文字のaにした。 aviaryはスペルが違うので、 + +07:50.470 --> 07:56.770 +テキスト検索をするものが間違ってしまうからだ。 + +07:56.770 --> 08:02.620 +だから、 僕らが正しいケースを使っていないという事実を扱えるかどうか、 興味深いところだね。 + +08:02.620 --> 08:08.380 +そして第三に、 私はこの記憶のアイデアを利用しているんだ。 + +08:08.410 --> 08:11.920 +つまり、 エルムで創業する前は何をしていたのか、 ということだ。 + +08:11.920 --> 08:18.220 +そして、 このモデルが、 鳥小屋に関する関連情報を検索し、 以前彼女が何をしたかという文脈を保つことができるほど、 + +08:18.220 --> 08:22.780 +何が起こっているのかを十分に理解しているかどうかを見ることになる。 + +08:22.810 --> 08:25.000 +それは彼女の従業員記録から得る必要がある。 + +08:25.120 --> 08:29.350 +それと、 質問に首尾一貫して答えてください。 + +08:29.350 --> 08:30.310 +見てみよう。 + +08:33.420 --> 08:38.670 +エイブリー・ランカスターはショーハムで創業する以前、 イノベート・インシュアランス・ソリューションズでシニア・プロダクト・マネージャーとして働き、 + +08:38.670 --> 08:40.620 +画期的な保険を開発していた。 + +08:40.980 --> 08:43.830 +それ以前は、 市場動向に焦点を当てたビジネスアナリスト。 + +08:43.860 --> 08:49.860 +だから、 彼女の従業員記録をチェックし、 それが本当に正しく発見され、 + +08:49.860 --> 09:01.200 +エイブリーの正しい経歴が得られるかどうか、 納得してもらうための練習として、 ええと、 試してみる楽しみとして残しておこう。 + +09:01.320 --> 09:05.850 +そしてもちろん、 他の難しい質問にも挑戦する。 + +09:06.000 --> 09:10.200 +ええと、 どういうこと? + +09:12.960 --> 09:13.140 +ええと。 + +09:13.140 --> 09:13.860 +見てみよう。 + +09:13.890 --> 09:20.730 +落ち着いて......あれは車だよ。 + +09:21.210 --> 09:21.390 +うーん。 + +09:21.420 --> 09:24.420 +あるいは、 うーん、 別の聞き方はどうだろう。 + +09:24.420 --> 09:35.650 +保険は自動車保険の分野で車内に何か商品を提供しているとしよう。 + +09:36.550 --> 09:38.770 +トリッキーな質問をしよう。 + +09:39.040 --> 09:40.570 +さあ、 行こう。 + +09:40.600 --> 09:40.960 +そうだ。 + +09:40.990 --> 09:44.530 +保険は、 自動車保険会社のポータルサイトであるCalmを提供している。 + +09:44.530 --> 09:53.020 +だから、 私がcalmという単語を使わなくても、 carという単語を使わなくても、 関連する文書、 関連するチャンクを見つけ、 + +09:53.020 --> 09:58.060 +質問に正確に答えることができた。 + +09:58.540 --> 10:03.160 +これがラグを使った最初の実験だね。 + +10:03.190 --> 10:04.930 +ぜひ試してほしい。 + +10:04.930 --> 10:10.450 +調査し、 難しい質問をし、 それを破ることができるか、 間違った情報を与えるように仕向けることができるか、 + +10:10.450 --> 10:15.190 +あるいはレールから外れて限界まで引き伸ばすことができるか、 見極めてほしい。 + +10:15.220 --> 10:21.340 +次回は、 このようなプロンプトが表示される場合によくある問題や、 + +10:21.340 --> 10:28.180 +デバッグの方法についてお話しします。 + +10:28.210 --> 10:37.540 +しかし、 ショアハムにある架空の保険技術会社のために作られた初のエンド・トゥ・エンドのラグ・パイプラインを楽しんでいただけたなら幸いだ。 + +10:37.600 --> 10:38.620 +それではまた次回。 diff --git a/week5/community-contributions/subtitles/srts/59297735/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297735/ko_KR.srt new file mode 100755 index 0000000..36f1dff --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297735/ko_KR.srt @@ -0,0 +1,520 @@ +WEBVTT + +00:00.890 --> 00:04.400 +마침내 래그가 활약하는 걸 보러 왔죠 + +00:04.430 --> 00:07.460 +그렇게 떠들어대더니 결국 이렇게 됐네요 + +00:07.460 --> 00:10.730 +주피터랩의 5주 차 폴더에 있어요 + +00:10.730 --> 00:16.760 +4일째인 노트북을 보고 있는데요 3일째에 추가된 걸 복제한 거예요 + +00:16.760 --> 00:21.710 +가상의 보험 기술 회사의 지식 노동자 세럼의 문제를 여전히 해결하는 + +00:21.710 --> 00:22.970 +중이니까요 + +00:23.330 --> 00:27.620 +기존의 수입품부터 시작할 거예요 + +00:28.070 --> 00:35.570 +랑체인용 수입 기능도 추가했는데요 랑체인 메모리에서 두 가지를 추가했어요 + +00:35.600 --> 00:38.570 +대화 버퍼 메모리를 불러올게요 + +00:38.600 --> 00:43.010 +랭 체인에서 대화용 회수 체인을 가져왔어요 + +00:43.010 --> 00:46.850 +이건 제가 전에 언급한 추상화 두 가지인데요 + +00:46.880 --> 00:52.310 +눈치 빠른 분들은 눈치챘겠지만 세 번째 추상화도 여기 들어 있어요 + +00:52.310 --> 00:57.920 +이전 강의에서 언급하지 않고 이미 불러왔지만 여기 있네요 + +00:57.920 --> 01:02.180 +챗 오픈아이는 이미 랑가 체인에서 수입되고 있어요 + +01:02.180 --> 01:08.650 +오픈AI, 지금까지 오픈AI 엠딩만 사용했는데 이번에는 채팅 오픈AI를 + +01:08.680 --> 01:10.990 +사용할 거예요 + +01:11.050 --> 01:14.410 +수입 검사를 해 봐야겠어요 + +01:15.010 --> 01:17.020 +Get it 안 하면 멀리 못 가요 + +01:17.620 --> 01:19.720 +그런 다음 상수를 하죠 + +01:19.720 --> 01:21.850 +환경 변수를 로드해요 + +01:21.850 --> 01:23.800 +이제 익숙해질 거예요 + +01:23.800 --> 01:30.580 +하지만 검토하고 저기 있는 지식 기반 디렉터리에서 문서를 갖고 오죠 + +01:30.580 --> 01:35.350 +이제 텍스트 덩어리를 갖고 와서 얼마나 많은지 보죠 + +01:35.350 --> 01:36.730 +123번인 건 확실해요 + +01:36.760 --> 01:39.040 +네, 123개요 + +01:39.040 --> 01:44.200 +직원, 제품 회사, 계약이죠 + +01:44.590 --> 01:50.020 +이제 다시 벡터 데이터베이스에 Put을 할게요 + +01:50.020 --> 01:52.300 +벡터 데이터베이스를 삭제하고 다시 생성하죠 + +01:52.390 --> 01:57.640 +각 벡터는 1536개의 차원을 가지고 있죠 + +01:57.640 --> 02:00.580 +상상하긴 어렵지만 2D로 처리할 수 있어요 + +02:00.580 --> 02:01.930 +그게 우리 일이죠 + +02:01.930 --> 02:03.220 +저기 있네요 + +02:03.220 --> 02:07.450 +3D로 보자고 할 수도 있어요 + +02:07.480 --> 02:08.810 +비트가 좀 불필요하네요 + +02:08.810 --> 02:12.650 +이걸 전부 다시 볼 필요는 없었지만 이 도표들을 보니 좋네요 + +02:12.680 --> 02:16.820 +난 거짓말한 적 없어요 + +02:16.820 --> 02:19.790 +이 코드 네 줄만큼 간단해요 + +02:19.820 --> 02:24.050 +먼저 새 LM 추상화를 생성하죠 + +02:24.080 --> 02:25.250 +채팅방 오픈라이예요 + +02:25.280 --> 02:27.950 +아까 가져온 것을 put을 거예요 + +02:27.950 --> 02:29.570 +드디어 그걸 쓸 수 있게 됐죠 Put it up Put it up Put it up Put it up Put it up Put it + +02:29.960 --> 02:34.760 +온도와 모델명 메모리를 제공해요 + +02:34.820 --> 02:39.710 +전달되는 대화 버퍼 메모리를 생성합니다 아까 말씀드렸듯이 키를 생성하고 + +02:39.710 --> 02:42.230 +목록의 형태로 반환하라고 하죠 + +02:42.560 --> 02:49.640 +크로마 벡터 스토어를 가져와서 리트리버를 호출합니다 이 추상화, 리트리버에서 래핑하기 + +02:49.640 --> 02:52.580 +위해서요 랭 체인에 필요한 거죠 + +02:52.580 --> 02:58.370 +그래서 지금 대화 회수 체인을 만들 때 거기에 도달하는 거죠 get it + +02:58.370 --> 03:03.200 +LM, 리트리버, 메모리를 전달하는 거죠 + +03:03.740 --> 03:04.700 +그게 다예요 + +03:04.820 --> 03:05.780 +실행해 보죠 + +03:07.040 --> 03:07.640 +네 + +03:07.640 --> 03:08.960 +그래서 실행했죠 + +03:09.140 --> 03:10.430 +살짝 실망스럽긴 하지만요 + +03:10.430 --> 03:14.590 +뭘 기대하셨는지 모르겠지만요 갑자기 get이 나타날 거라고 생각하셨는지 + +03:14.590 --> 03:15.490 +모르겠네요 + +03:15.700 --> 03:16.960 +실제로 호출해야 해요 + +03:16.990 --> 03:19.270 +그걸 활용할 방법을 찾아야 해요 + +03:19.300 --> 03:25.690 +그래서 우리가 하려는 말은 쿼리 호출이라고 하죠 + +03:26.770 --> 03:38.230 +짧은 영상으로 짧게 설명해 주시겠어요? 간단하게 시작할게요 + +03:38.320 --> 03:38.620 +좋아요 + +03:38.650 --> 03:39.370 +이렇게 말해요 + +03:39.370 --> 03:41.800 +결과가 대화의 사슬이라고 하셨죠? + +03:41.800 --> 03:46.510 +방금 생성한 거요 메서드 호출이라고 부르죠 + +03:46.600 --> 03:52.600 +인보크는 질문이 열쇠인 사전을 선택해요 + +03:52.690 --> 03:53.620 +철자가 맞나요? + +03:53.650 --> 03:53.830 +네 + +03:53.830 --> 03:54.850 +질문 있어요 + +03:55.150 --> 03:58.780 +그리고 메시지 쿼리를 입력해야 해요 Put + +04:00.010 --> 04:01.420 +다 됐어요 + +04:01.420 --> 04:04.810 +그런 다음 print결과를 하죠 + +04:06.430 --> 04:09.610 +이건 그 결과의 해답 키 아래에 있어야 해요 + +04:09.610 --> 04:15.640 +이게 우리가 합친 마지막 코드입니다 래그 파이프라인을 사용하기 위해서요 + +04:15.680 --> 04:16.970 +어떻게 될 것 같아요? + +04:16.970 --> 04:18.620 +쿼리를 가져가요 + +04:18.620 --> 04:21.110 +벡터로 바꿀 거예요 + +04:21.110 --> 04:24.230 +크롬 데이터 스토어에서 찾아볼 거예요 + +04:24.260 --> 04:27.440 +관련 있는 덩어리를 찾을 거예요 + +04:27.440 --> 04:29.780 +덩어리 복수형이라고 하죠 + +04:29.780 --> 04:31.250 +그건 나중에 다시 얘기하죠 + +04:31.580 --> 04:33.320 +관련 있는 덩어리를 찾죠 + +04:33.320 --> 04:38.930 +프롬프트에 그걸 넣어 OpenAI에 보내죠 + +04:39.020 --> 04:43.130 +GPT for 미니로 보낼 겁니다 여기서 지정했거든요 + +04:43.130 --> 04:48.170 +그리고 돌아온 것은 그것을 패키지로 해서 answer key에 두죠 + +04:48.200 --> 04:49.940 +이게 통할지 보죠 + +04:52.400 --> 04:53.480 +됐어요 + +04:53.510 --> 04:54.560 +됐어요 + +04:54.560 --> 04:59.240 +쇼어햄에서 처음으로 래그 파이프라인을 탔어요 + +04:59.270 --> 05:06.230 +혁신적인 보험 기술 회사 에이버리 랭커스터가 설립했죠 지금도 잘 알려진 이름이에요 + +05:06.230 --> 05:07.940 +정보도 좀 있고요 + +05:07.940 --> 05:12.980 +여러분이 갖고 놀 수 있는 연습용으로 남겨두겠습니다만 다양한 문서에서 + +05:12.980 --> 05:18.530 +그걸 갖고 온 걸 보실 수 있어요 회사 섹션의 모든 것에서 나온 거겠죠 + +05:18.530 --> 05:24.910 +다양한 정보로부터 그걸 검색한 걸 보셨으면 좋겠네요 + +05:25.960 --> 05:26.860 +좋아요 + +05:26.860 --> 05:28.750 +그럼 좋겠죠? + +05:29.230 --> 05:30.310 +어떻게 될지 알아요? + +05:30.340 --> 05:35.020 +그걸 멋진 사용자 인터페이스로 패키지할 수 있다면 좋지 않을까요? 채팅 + +05:35.020 --> 05:37.480 +UI를 통해 사용할 수 있도록요 + +05:37.480 --> 05:41.290 +물론 그라디오는 아주 간단하게 만들고요 + +05:41.290 --> 05:47.890 +이 시점에서 우리가 할 일은 그라디오가 기대하는 포맷으로 채팅 함수를 만드는 거예요 + +05:47.890 --> 05:50.170 +그러려면 메시지와 역사가 필요하죠 + +05:50.170 --> 05:55.540 +제가 한 일은 방금 작성한 바로 그 줄을 취해 여기 둔 겁니다 그런 다음 우리가 + +05:55.540 --> 05:58.480 +기대하는 걸 정확히 반환하죠 + +05:58.510 --> 06:03.070 +뭔가 이상한 점이 보일 수도 있으니 잠시 시간을 두고 살펴보세요 뭔가 이상한 + +06:03.070 --> 06:04.540 +점이 보이나요? + +06:06.070 --> 06:09.910 +한 가지 이상하게 생각하실 만한 건 이 역사 매개 변수를 + +06:09.910 --> 06:12.430 +실제로 아무것도 안 한다는 거예요 + +06:12.520 --> 06:14.440 +완전히 무시해요 + +06:14.440 --> 06:19.570 +우리가 그걸 무시하는 이유는 랭 체인이 이미 역사를 다루기 때문이죠 + +06:19.570 --> 06:26.820 +그래디오에 채팅 UI가 있어서 사용자 인터페이스에서 대화 기록을 유지하고 매번 전체 + +06:26.820 --> 06:30.570 +대화 기록과 함께 호출되지만 말이에요 + +06:30.600 --> 06:36.540 +랭이 이미 우리에게 메모리를 줬기 때문에 지금까지의 대화를 + +06:36.570 --> 06:38.430 +기록하고 있어요 + +06:38.460 --> 06:41.730 +새 메시지와 새 답만 알면 돼요 + +06:42.570 --> 06:45.180 +어쨌든 이 감방을 다시 찍고 있어요 + +06:45.210 --> 06:47.970 +사실 이미 재실행했는데 메모리를 지우려고 재실행 중이에요 + +06:47.970 --> 06:50.190 +완전히 새롭게 시작하는 거죠 + +06:50.220 --> 06:55.230 +이걸 불러오면 이제 채팅할 수 있어요 + +06:55.500 --> 06:56.340 +안녕하세요 + +06:57.780 --> 06:58.380 +안녕하세요 + +06:58.380 --> 06:59.700 +무엇을 도와드릴까요? + +07:00.060 --> 07:02.520 +보험이 뭐죠? + +07:02.640 --> 07:03.090 +네 + +07:08.490 --> 07:09.150 +됐어요 + +07:09.180 --> 07:10.560 +놀랍지도 않네요 + +07:10.680 --> 07:13.230 +이제 몰래 뭔가 할 수 있어요 + +07:13.260 --> 07:14.970 +이런 식으로요 + +07:15.030 --> 07:19.470 +에이버리는 뭘 했죠? + +07:20.100 --> 07:25.260 +제가 이 얘기를 꺼낸 이유는 이런 질문에서 몇 가지 짚고 넘어가고 싶은 + +07:25.290 --> 07:26.490 +게 있어서예요 + +07:26.490 --> 07:34.030 +먼저, 무력을 이용한 해결책이 명확합니다 장난감 버전 래그에서는 랭커스터를 성으로 + +07:34.030 --> 07:38.860 +보고 문서에서 검색할 수 없었지만 가망이 없었죠 + +07:38.860 --> 07:41.560 +새장이라는 단어를 쓰면 안 돼요 + +07:41.560 --> 07:45.160 +여기서 시도하는 건 당연히 흥미롭죠 + +07:45.190 --> 07:50.470 +둘째, aviery를 소문자 a로 썼어요. 왜냐하면 + +07:50.470 --> 07:56.770 +a는 철자가 다르기 때문에 텍스트로 검색하면 틀릴 테니까요. + +07:56.770 --> 08:02.620 +그래서 우리가 제대로 된 케이스를 사용하지 않았다는 사실을 감당할 수 있을지 보는 것도 흥미로울 거예요 + +08:02.620 --> 08:07.540 +세 번째는 메모리라는 개념을 이용하는 거예요 그녀가 전에 했던 걸 말하는 + +08:07.540 --> 08:08.380 +거니까요 + +08:08.410 --> 08:11.920 +엘름에 설립하기 전에 뭘 했는지 묻는 거예요 + +08:11.920 --> 08:18.220 +모델이 현재 상황을 잘 파악하고 맥락을 유지하는지 볼 거예요 조류 새장에 + +08:18.220 --> 08:22.780 +관한 정보와 이전 행동을 둘 다 검색할 수 있는지요 + +08:22.810 --> 08:25.000 +직원 기록에서 찾아봐야 해요 + +08:25.120 --> 08:29.350 +그리고 일관성 있게 질문에 대답하세요 + +08:29.350 --> 08:30.310 +어디 보죠 + +08:33.420 --> 08:38.670 +쇼어햄에 입사하기 전 에이버리 랭커스터는 이노베이트 보험 솔루션스의 수석 제품 관리자로 일했습니다 + +08:38.670 --> 08:40.620 +혁신적인 보험을 개발한 곳이죠 + +08:40.980 --> 08:43.830 +그 전에는 시장 경향에 초점을 맞춘 사업 분석가였죠 + +08:43.860 --> 08:49.860 +그래서 여러분이 에이버리의 직원 기록을 확인하시도록 + +08:49.860 --> 09:01.200 +남겨둘게요 그리고 에이버리의 신원을 제대로 파악하고 있는지 확인하세요 재미있는 실험이니까요 + +09:01.320 --> 09:05.850 +물론 다른 어려운 질문들도 시도해 보세요 + +09:06.000 --> 09:10.200 +무슨 뜻이죠? + +09:12.960 --> 09:13.140 +네 + +09:13.140 --> 09:13.860 +어디 보죠 + +09:13.890 --> 09:20.730 +침착하게가 그 차의 제품이었어요 + +09:21.210 --> 09:21.390 +네 + +09:21.420 --> 09:24.420 +아니면 다르게 물어보죠 + +09:24.420 --> 09:35.650 +자동차 보험 회사에서 자동차에 제품을 공급하나요? + +09:36.550 --> 09:38.770 +까다로운 문제를 내 보죠 + +09:39.040 --> 09:40.570 +다 됐어요 + +09:40.600 --> 09:40.960 +네 + +09:40.990 --> 09:44.530 +자동차 보험 회사들을 위한 포털인 안정감을 주는 보험이죠 + +09:44.530 --> 09:53.020 +그래서 제가 진정이나 자동차라는 단어를 사용하지 않는데도 관련 문서와 관련 + +09:53.020 --> 09:58.060 +덩어리를 찾아 정확한 답변을 할 수 있죠 + +09:58.540 --> 10:03.160 +랙으로 하는 첫 실험이었군요 + +10:03.190 --> 10:04.930 +이제 이걸 드셔 보세요 + +10:04.930 --> 10:10.450 +여러분이 수사하고 어려운 질문을 해서 그걸 깨뜨리거나 잘못된 정보를 제공할 수 있는지 알아내길 + +10:10.450 --> 10:15.190 +바랍니다 아니면 선을 벗어나 한계까지 밀어붙여 보세요 get it get it + +10:15.220 --> 10:21.340 +다음 시간엔 몇 가지 다른 걸 포함해 방법 몇 가지를 다루겠습니다 이런 종류의 프롬프트와 관련해 + +10:21.340 --> 10:26.260 +공통된 문제 몇 가지와 어떻게 디버그 하고 Get up 밑에서 무슨 일이 있는지 + +10:26.260 --> 10:28.180 +알아내는 방법도요 + +10:28.210 --> 10:36.670 +하지만 쇼어햄에 있는 우리 가상 보험 기술 회사를 위해 만든 래그 파이프라인을 즐겁게 보셨길 + +10:36.670 --> 10:37.540 +바라요 + +10:37.600 --> 10:38.620 +다음에 봐요 diff --git a/week5/community-contributions/subtitles/srts/59297743/en_US.srt b/week5/community-contributions/subtitles/srts/59297743/en_US.srt new file mode 100755 index 0000000..5bf6782 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297743/en_US.srt @@ -0,0 +1,526 @@ +WEBVTT + +00:01.310 --> 00:03.650 +And welcome to day five. + +00:03.680 --> 00:04.490 +For reals. + +00:04.490 --> 00:06.680 +We're actually in the proper Jupyter notebook. + +00:06.710 --> 00:11.060 +This time we're in day five, in week five, ready for action. + +00:11.060 --> 00:13.610 +And it's the same as before. + +00:13.940 --> 00:16.580 +It's a duplicate of day four, not day four and a half. + +00:16.580 --> 00:22.760 +We're using the Chroma Datastore here, and I'm going to really quickly go through this because you + +00:22.760 --> 00:24.560 +know all of this already. + +00:24.560 --> 00:31.160 +And we're going to get back to our Gradio interface. + +00:31.160 --> 00:31.970 +Just like that. + +00:31.970 --> 00:33.110 +It caught up with us. + +00:33.230 --> 00:33.890 +It's drawn. + +00:33.890 --> 00:36.080 +It's 2D and 3D diagrams behind the scenes. + +00:36.080 --> 00:37.130 +But no time for that. + +00:37.130 --> 00:37.490 +Now. + +00:37.490 --> 00:38.840 +We need to press on. + +00:39.200 --> 00:45.230 +Uh, first of all, I might as well show you that that aviary test we did before still works in chroma + +00:45.260 --> 00:46.550 +as it did in vice. + +00:46.550 --> 00:49.190 +So let's just quickly try that ourselves. + +00:49.400 --> 00:56.750 +Um, what did aviary spelt wrong do before ensure? + +00:57.320 --> 01:00.350 +Um, and I imagine. + +01:00.350 --> 01:01.130 +We'll see. + +01:01.160 --> 01:01.820 +Yes. + +01:01.820 --> 01:06.280 +That chroma has no problems whatsoever with that either. + +01:06.310 --> 01:07.810 +No surprises there. + +01:07.840 --> 01:11.980 +Okay, but let me now show you something which isn't going to go so well. + +01:11.980 --> 01:16.360 +First of all, I want to take a peek at an employee HR document. + +01:16.390 --> 01:22.210 +If we go into our knowledge base and we go to employees, we're going to look at the employee record + +01:22.210 --> 01:25.360 +for a certain Maxine Thompson. + +01:25.390 --> 01:27.610 +Let's open this with markdown. + +01:27.610 --> 01:30.010 +So we see it in its full markdown glory. + +01:30.040 --> 01:35.830 +Here is HR record for Maxine Thompson, um, a data engineer in Austin, Texas. + +01:35.830 --> 01:41.110 +And the thing I wanted to draw your attention to for one second is that if you look down here, you'll + +01:41.110 --> 01:47.860 +notice that Maxine was recognized as the Ensure Elm Innovator of the year in 2023. + +01:47.890 --> 01:56.410 +She received the prestigious I o T Award in Elm Innovator of the year award in 2023. + +01:56.440 --> 01:59.110 +Now, I have to confess, I added this sentence in myself. + +01:59.110 --> 02:06.160 +It wasn't as if this was, uh, invented as part of the synthetic data, uh, by, uh, GPT four or + +02:06.160 --> 02:06.760 +Claude. + +02:07.090 --> 02:08.620 +This is all my doing. + +02:08.830 --> 02:09.640 +And it's awful. + +02:09.640 --> 02:11.500 +So blame me. + +02:11.830 --> 02:19.720 +Uh, so what we're going to do now is we're going to go back to our day five, and we are going to ask + +02:19.720 --> 02:21.580 +the question, who won? + +02:24.400 --> 02:36.820 +Oh, we say, who received the prestigious, uh and Shriram Innovator of the year award in 2023. + +02:36.850 --> 02:38.740 +And let's see what it says. + +02:40.570 --> 02:42.160 +It says I don't know. + +02:42.190 --> 02:43.510 +And quite a blunt way. + +02:43.540 --> 02:44.710 +Quite curt. + +02:44.770 --> 02:46.420 +Uh, so that's interesting. + +02:46.450 --> 02:47.530 +Uh, it has failed. + +02:47.530 --> 02:49.330 +That was information that it was provided. + +02:49.330 --> 02:51.370 +It was there in the documents. + +02:51.370 --> 02:52.900 +And that is a bit disappointing. + +02:52.900 --> 02:56.290 +And so the thing to do now is to try and diagnose this problem. + +02:56.290 --> 03:00.520 +And in doing so, we're going to learn a little bit about how Lang Chain works under the hood. + +03:00.700 --> 03:02.950 +And it's not going to be very surprising. + +03:03.370 --> 03:06.310 +Uh, so here we get to see what's going on. + +03:06.370 --> 03:11.850 +Uh, there is this very useful thing we can do, which is create something called the standard out callback + +03:11.850 --> 03:17.010 +handler, which, much as it sounds, is going to be something which will let us print to the standard + +03:17.010 --> 03:19.590 +out what is going on behind the scenes. + +03:19.620 --> 03:22.710 +So this is the same familiar code that you're very used to. + +03:22.740 --> 03:24.000 +We create the alarm. + +03:24.090 --> 03:25.530 +We create the memory. + +03:25.560 --> 03:32.340 +We create the retriever and we create our conversation chain in this beautiful one liner passing in + +03:32.340 --> 03:35.850 +the LM, the retriever, the memory. + +03:35.850 --> 03:40.410 +And now you can see I'm passing in one more thing, which is a list of callbacks. + +03:40.410 --> 03:46.560 +And I'm only creating one callback in here, which is this standard out callback handler. + +03:46.560 --> 03:53.430 +And that as you can probably, uh, expect, is going to be printing, uh, repeatedly to standard out + +03:53.430 --> 03:55.710 +as this conversation chain runs. + +03:55.800 --> 03:58.650 +So here is the question again who won? + +03:58.680 --> 03:59.460 +I put it differently. + +03:59.730 --> 04:00.540 +Let's do it the same way. + +04:00.570 --> 04:07.020 +Who received the prestigious Iet Award in 2023? + +04:07.050 --> 04:07.770 +There we go. + +04:07.800 --> 04:09.030 +We'll ask that question. + +04:09.030 --> 04:10.110 +We'll get back the answer. + +04:10.110 --> 04:11.930 +We'll see what it says. + +04:12.770 --> 04:18.350 +So we get this kind of trace as we look through it, which gives us a bit of insight into how Lang Chain + +04:18.350 --> 04:19.010 +works. + +04:19.010 --> 04:21.170 +It has these different objects. + +04:21.170 --> 04:28.010 +These are called the chain that are sort of hooked together as it goes through the steps of building + +04:28.010 --> 04:30.440 +the conversation, the Rag query. + +04:30.440 --> 04:34.490 +And you can actually use different callbacks to be printing lots more detail about what's happening + +04:34.490 --> 04:36.590 +at each stage, should you wish. + +04:36.590 --> 04:41.600 +But what we really care about is the prompt that ends up going to GPT four. + +04:41.600 --> 04:43.040 +And here it is. + +04:43.040 --> 04:44.090 +System. + +04:44.090 --> 04:47.330 +Use the following piece of context to answer the user's question. + +04:47.330 --> 04:50.300 +If you don't know the answer, just say that you don't know. + +04:50.300 --> 04:52.220 +Don't try to make up an answer. + +04:52.250 --> 04:57.200 +I think this is really interesting, because this is the prompt that specialists at Lang Chain, like + +04:57.230 --> 05:02.090 +experts, have crafted as an ideal prompt to send to different llms. + +05:02.090 --> 05:06.020 +And so this is a great one for for you to steal and use in your own projects. + +05:06.020 --> 05:07.730 +It's very carefully written. + +05:07.730 --> 05:13.250 +It's clearly very effective because it stopped, uh, GPT four from hallucinating. + +05:13.370 --> 05:16.670 +Um, and so it's nice well worded prompting. + +05:17.390 --> 05:18.710 +But here's the problem. + +05:18.710 --> 05:25.520 +This is the context that was then provided to the to the LM coming up right here. + +05:25.520 --> 05:30.740 +And you'll see that it is, in fact a few chunks taken from different chunks that we've got. + +05:30.770 --> 05:36.200 +It's 2 or 3 chunks and they appear to be taken from HR records. + +05:36.410 --> 05:38.180 +But they're not right. + +05:38.180 --> 05:42.140 +Because they don't mention the I o t award. + +05:42.140 --> 05:47.060 +So it's wrong chunks that have been identified unfortunately in this case. + +05:47.300 --> 05:51.230 +Um oh, and that this at the end here is is the question. + +05:51.230 --> 05:55.400 +It says human who received the prestigious I o t award. + +05:55.730 --> 05:56.780 +I'm the human. + +05:56.780 --> 06:03.320 +Uh, and clearly there wasn't good context to answer that question in what comes above. + +06:03.320 --> 06:07.160 +And that's why the response was, I don't know. + +06:07.850 --> 06:10.370 +So what can we do about this? + +06:10.370 --> 06:14.990 +Well, it's a it's a very common problem with Rag when you find that you're not providing the right + +06:14.990 --> 06:15.590 +context. + +06:15.590 --> 06:17.440 +And there's a few different things that you can do. + +06:17.680 --> 06:22.420 +Uh, one of them is to go back and look at your chunking strategy. + +06:22.540 --> 06:25.270 +How are you dividing documents into chunks? + +06:25.270 --> 06:26.050 +And are you doing that? + +06:26.050 --> 06:26.500 +Right. + +06:26.500 --> 06:28.780 +And there's a few things that we could try right off the bat. + +06:28.810 --> 06:34.300 +One of them is instead of chunking, we could send entire documents in as the context. + +06:34.300 --> 06:40.480 +So we we just put full documents in Cromer and then we look for the document that's closest. + +06:40.510 --> 06:46.930 +We could also go the other way and chunk more, have more fine grained chunks, smaller chunks. + +06:47.140 --> 06:52.030 +We can also investigate that overlap between chunks to see if we increase or decrease the overlap, + +06:52.030 --> 06:52.960 +presumably increase. + +06:52.960 --> 06:57.490 +In this case, we are more likely to provide a useful chunk. + +06:57.490 --> 07:04.990 +So those are all things to investigate to get your chunking strategy working well so the right context + +07:04.990 --> 07:06.160 +is being provided. + +07:06.190 --> 07:07.420 +There is another thing. + +07:07.420 --> 07:08.230 +And it's very simple. + +07:08.230 --> 07:09.910 +And it's what we're going to do in this case. + +07:10.090 --> 07:15.850 +And that is to control the number of chunks, the amount of context that actually does get sent in. + +07:16.090 --> 07:21.390 +Um, so in our case, we're just sending a I think it's actually three chunks that are getting sent + +07:21.390 --> 07:27.870 +in here, and you can actually control the number of chunks that get sent in, and you can do that in + +07:27.870 --> 07:28.650 +this way. + +07:28.650 --> 07:36.240 +When we create the retriever vector store as retriever, we can actually say how many chunks we want + +07:36.270 --> 07:38.040 +returned and passed in. + +07:38.040 --> 07:44.340 +And in this case, I have specified now that I want 25 chunks to be created and passed in. + +07:44.370 --> 07:49.470 +As a general rule of thumb, it's a good idea to send a lot of context to the LLM. + +07:49.500 --> 07:56.730 +Llms are very good at, at uh, only focusing on relevant contexts and ignoring irrelevant context. + +07:56.730 --> 07:59.670 +So it's good practice to send plenty of chunks. + +07:59.670 --> 08:03.810 +There are a few occasional situations where it's better not to do that. + +08:03.840 --> 08:10.260 +One of them, for example, is in one of the very latest models that OpenAI is offering, a model which + +08:10.260 --> 08:17.760 +looks in much more detail at the prompt and does some more analysis behind the scenes to really understand + +08:17.790 --> 08:17.970 +it. + +08:17.970 --> 08:20.070 +Sort of chain of thought processing on it. + +08:20.250 --> 08:25.710 +Um, and the recommendation there is that you don't provide lots of extra irrelevant context because + +08:25.710 --> 08:28.230 +that will slow down and distract the model. + +08:28.230 --> 08:34.020 +But with those occasional examples to one side, general rule of thumb is that more context is generally + +08:34.020 --> 08:35.040 +a good thing. + +08:35.490 --> 08:42.660 +And so in this case, there's not much harm in providing the 25 nearest chunks rather than 2 or 3 nearest + +08:42.660 --> 08:43.260 +chunks. + +08:43.260 --> 08:45.900 +We've got a total of what, 123 chunks. + +08:45.900 --> 08:48.810 +So this is still about a fifth of our total data. + +08:48.810 --> 08:51.210 +So we're not shipping our entire data set. + +08:51.240 --> 08:58.680 +We're picking the most relevant 25 chunks, the most relevant fifth of our content to send the LLM. + +08:58.680 --> 09:00.360 +So let's see if this works. + +09:00.360 --> 09:02.010 +So we will run this. + +09:02.010 --> 09:07.530 +And then as as before we will bring up our usual Gradio interface. + +09:07.530 --> 09:15.540 +And right off the bat we'll ask the question who won the uh sorry who received to use the. + +09:15.570 --> 09:16.860 +So I keep it consistent. + +09:16.890 --> 09:26.960 +Who received the prestigious I t y Award in 2023. + +09:26.990 --> 09:27.950 +And let's see. + +09:27.980 --> 09:29.240 +Drum roll please. + +09:29.270 --> 09:30.230 +Maxine. + +09:30.260 --> 09:35.300 +Maxine received the prestigious I o t 2023 award. + +09:35.300 --> 09:40.730 +So indeed, providing more chunks to the LM did solve the problem. + +09:40.940 --> 09:46.370 +So with that, the exercise for you is to now go back, experiment with this. + +09:46.370 --> 09:47.960 +Try some hard questions. + +09:47.960 --> 09:51.140 +You could always insert a few things in the documents yourself and see what happens. + +09:51.350 --> 09:54.440 +Um, and then experiment with different chunking strategies. + +09:54.440 --> 10:01.370 +Try full documents, try smaller chunks, maybe 100 characters with more and less overlap, and get + +10:01.370 --> 10:09.140 +a good feel for how that affects the quality of results and and how you can either give too little context. + +10:09.170 --> 10:11.780 +Maybe you can see some effects of providing too much context. + +10:11.810 --> 10:14.540 +Maybe that causes the responses to be less accurate. + +10:14.540 --> 10:23.000 +So experiment, get a good sense for the the the good and the bad, and a good knack for how to do this + +10:23.030 --> 10:24.830 +in a way that is most effective. + +10:24.830 --> 10:28.010 +And I will see you for the next video to wrap up. diff --git a/week5/community-contributions/subtitles/srts/59297743/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297743/ja_JP.srt new file mode 100755 index 0000000..87aa6f8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297743/ja_JP.srt @@ -0,0 +1,460 @@ +WEBVTT + +00:01.310 --> 00:03.650 +そして5日目へようこそ。 + +00:03.680 --> 00:04.490 +本当だよ。 + +00:04.490 --> 00:06.680 +我々は実際に適切なJupyterノートブックにいる。 + +00:06.710 --> 00:11.060 +今回は5日目、 5週目に突入した。 + +00:11.060 --> 00:13.610 +そして、 それは以前と同じだ。 + +00:13.940 --> 00:16.580 +4日半ではなく、 4日目と重複している。 + +00:16.580 --> 00:24.560 +ここではChromaデータストアを使用しています。 すでにご存知のことばかりなので、 本当に手短に説明します。 + +00:24.560 --> 00:31.160 +そしてGradioのインターフェイスに戻る。 + +00:31.160 --> 00:31.970 +ただそれだけだ。 + +00:31.970 --> 00:33.110 +追いつかれた。 + +00:33.230 --> 00:33.890 +描かれている。 + +00:33.890 --> 00:36.080 +舞台裏の2Dと3Dのダイアグラムだ。 + +00:36.080 --> 00:37.130 +しかし、 そんな時間はない。 + +00:37.130 --> 00:37.490 +今すぐだ。 + +00:37.490 --> 00:38.840 +我々は前進する必要がある。 + +00:39.200 --> 00:46.550 +ええと、 まず最初に、 前にやった鳥小屋のテストがクロマでもバイスと同じように機能することをお見せしましょう。 + +00:46.550 --> 00:49.190 +では、 早速試してみよう。 + +00:49.400 --> 00:56.750 +あのー、 aviaryのスペルミスって、 確保する前は何をしていたんですか? + +00:57.320 --> 01:00.350 +そして、 私は想像している。 + +01:00.350 --> 01:01.130 +今にわかるよ。 + +01:01.160 --> 01:01.820 +そうだ。 + +01:01.820 --> 01:06.280 +そのクロマも何の問題もない。 + +01:06.310 --> 01:07.810 +驚きはない。 + +01:07.840 --> 01:11.980 +よし、 しかし、 今度はそううまくはいかないものをお見せしよう。 + +01:11.980 --> 01:16.360 +まず、 従業員の人事文書を覗いてみたい。 + +01:16.390 --> 01:25.360 +ナレッジ・ベースに入り、 従業員のところに行くと、 あるマキシン・トンプソンの従業員記録を見ることになる。 + +01:25.390 --> 01:27.610 +マークダウンで開いてみよう。 + +01:27.610 --> 01:30.010 +というわけで、 値下げされた栄光の姿をご覧いただこう。 + +01:30.040 --> 01:35.830 +テキサス州オースティンのデータ・エンジニア、 マキシン・トンプソンの人事記録だ。 + +01:35.830 --> 01:47.860 +そして、 ちょっと注目していただきたいのは、 この下を見ていただくと、 マキシンが2023年のエンシュア・エルム・イノベーター・オブ・ザ・イヤーに認定されていることです。 + +01:47.890 --> 01:56.410 +2023年には名誉あるI o T Award in Elm Innovator of the yearを受賞。 + +01:56.440 --> 01:59.110 +さて、 正直に告白すると、 私はこの一文を自分で付け加えた。 + +01:59.110 --> 02:06.760 +これは、 GPT4やクロードが合成データの一部として発明したようなものではなかった。 + +02:07.090 --> 02:08.620 +これはすべて私がやったことだ。 + +02:08.830 --> 02:09.640 +そして、 ひどいものだ。 + +02:09.640 --> 02:11.500 +だから私を責めなさい。 + +02:11.830 --> 02:21.580 +では、 5日目に戻って、 誰が勝ったのか? + +02:24.400 --> 02:36.820 +2023年に名誉あるシュリーラム・イノベーター・オブ・ザ・イヤーを受賞したのは誰だったかな? + +02:36.850 --> 02:38.740 +その内容を見てみよう。 + +02:40.570 --> 02:42.160 +わからないと書いてある。 + +02:42.190 --> 02:43.510 +しかも、 かなりぶっきらぼうに。 + +02:43.540 --> 02:44.710 +かなり素っ気ない。 + +02:44.770 --> 02:46.420 +それは興味深いね。 + +02:46.450 --> 02:47.530 +失敗したんだ。 + +02:47.530 --> 02:49.330 +それが提供された情報だった。 + +02:49.330 --> 02:51.370 +文書にもあった。 + +02:51.370 --> 02:52.900 +それは少し残念だ。 + +02:52.900 --> 02:56.290 +だから今すべきことは、 この問題を診断することだ。 + +02:56.290 --> 03:00.520 +そうすることで、 ラング・チェインがボンネットの中でどのように機能するのかを少し学ぶことになる。 + +03:00.700 --> 03:02.950 +そして、 それはあまり驚くべきことではないだろう。 + +03:03.370 --> 03:06.310 +それで、 ここで何が起こっているのか見てみよう。 + +03:06.370 --> 03:11.850 +標準出力コールバックハンドラーと呼ばれるものを作れば、 + +03:11.850 --> 03:19.590 +舞台裏で起こっていることを標準出力に出力することができる。 + +03:19.620 --> 03:22.710 +つまり、 これは皆さんが慣れ親しんでいるコードと同じなのだ。 + +03:22.740 --> 03:24.000 +私たちは警報を発する。 + +03:24.090 --> 03:25.530 +私たちは思い出を作る。 + +03:25.560 --> 03:35.850 +私たちはレトリーバーを作り、 LM、 レトリーバー、 思い出の中ですれ違うこの美しいワンライナーで会話の連鎖を作る。 + +03:35.850 --> 03:40.410 +コールバックのリストだ。 + +03:40.410 --> 03:46.560 +ここではコールバックをひとつだけ作っている。 + +03:46.560 --> 03:55.710 +そして、 この会話の連鎖が続くにつれ、 おそらく、 あー、 予想できるように、 標準的なものが何度も印刷されることになるだろう。 + +03:55.800 --> 03:58.650 +では、 誰が勝ったのか? + +03:58.680 --> 03:59.460 +私は別の言い方をした。 + +03:59.730 --> 04:00.540 +同じようにやろう。 + +04:00.570 --> 04:07.020 +2023年に栄えあるIet賞を受賞したのは? + +04:07.050 --> 04:07.770 +これでよし。 + +04:07.800 --> 04:09.030 +その質問をしよう。 + +04:09.030 --> 04:10.110 +答えは後ほど。 + +04:10.110 --> 04:11.930 +何が書いてあるか見てみよう + +04:12.770 --> 04:19.010 +つまり、 ラング・チェインがどのように機能しているのか、 その一端を知ることができるのだ。 + +04:19.010 --> 04:21.170 +これにはさまざまなオブジェクトがある。 + +04:21.170 --> 04:30.440 +これらは、 会話やラグ・クエリーを構築する段階を経るにつれて、 ある種のチェーンと呼ばれるものだ。 + +04:30.440 --> 04:36.590 +また、 各ステージで何が起こっているのか、 より詳細に表示するために、 さまざまなコールバックを使用することもできる。 + +04:36.590 --> 04:41.600 +しかし、 私たちが本当に気にしているのは、 GPT4に行くことになるプロンプトだ。 + +04:41.600 --> 04:43.040 +そしてここにある。 + +04:43.040 --> 04:44.090 +システム。 + +04:44.090 --> 04:47.330 +ユーザーの質問に答えるには、 次のような文脈を利用する。 + +04:47.330 --> 04:50.300 +答えがわからなければ、 わからないと言えばいい。 + +04:50.300 --> 04:52.220 +答えを作ろうとするな。 + +04:52.250 --> 04:57.200 +このプロンプトは、 ラング・チェーンのスペシャリストたちが、 専門家のように、 + +04:57.230 --> 05:02.090 +さまざまなLLMに送る理想的なプロンプトとして作り上げたものだからだ。 + +05:02.090 --> 05:06.020 +だから、 これは盗んで自分のプロジェクトに使うのに最適なものなんだ。 + +05:06.020 --> 05:07.730 +とても丁寧に書かれている。 + +05:07.730 --> 05:13.250 +GPT4の幻覚を止めたのだから、 非常に効果的なのは明らかだ。 + +05:13.370 --> 05:16.670 +うーん、 それにしても、 よくできた促し文句だね。 + +05:17.390 --> 05:18.710 +しかし、 問題はここからだ。 + +05:18.710 --> 05:25.520 +これが、 これから登場するLMに提供された文脈である。 + +05:25.520 --> 05:30.740 +そして、 実際には、 私たちが持っているさまざまなチャンクから取り出したいくつかのチャンクであることがわかるだろう。 + +05:30.770 --> 05:36.200 +2つか3つの塊で、 HRの記録から取られたようだ。 + +05:36.410 --> 05:38.180 +でも、 彼らは正しくない。 + +05:38.180 --> 05:42.140 +なぜなら、 彼らはI o t賞について触れていないからだ。 + +05:42.140 --> 05:47.060 +つまり、 このケースでは残念ながら間違ったチャンクが特定されてしまったのだ。 + +05:47.300 --> 05:51.230 +うーん、 そして最後にこれが問題なんだ。 + +05:51.230 --> 05:55.400 +栄えあるI o t賞を受賞したのは人間である。 + +05:55.730 --> 05:56.780 +私は人間だ。 + +05:56.780 --> 06:03.320 +その質問に答えるには、 明らかに文脈が足りなかった。 + +06:03.320 --> 06:07.160 +だから、 その反応は "わからない "というものだった。 + +06:07.850 --> 06:10.370 +では、 どうすればいいのか? + +06:10.370 --> 06:15.590 +正しい文脈を提供できていないというのは、 ラグではよくある問題なんだ。 + +06:15.590 --> 06:17.440 +できることはいくつかある。 + +06:17.680 --> 06:22.420 +そのひとつは、 チャンキング戦略を見直すことだ。 + +06:22.540 --> 06:25.270 +どのように文書をチャンクに分けていますか? + +06:25.270 --> 06:26.050 +そうしているのか? + +06:26.050 --> 06:26.500 +そうだね。 + +06:26.500 --> 06:28.780 +そして、 すぐにでも試せることがいくつかある。 + +06:28.810 --> 06:34.300 +そのひとつは、 チャンキングの代わりに、 ドキュメント全体をコンテキストとして送ることだ。 + +06:34.300 --> 06:40.480 +だから、 クロマーにフルドキュメントを入れて、 一番近いドキュメントを探すんだ。 + +06:40.510 --> 06:46.930 +もっと細かく、 もっと小さな塊にすることもできる。 + +06:47.140 --> 06:52.960 +チャンク間のオーバーラップを調査して、 オーバーラップを増やすか減らすかを確認することもできる。 + +06:52.960 --> 06:57.490 +この場合、 有用なチャンクを提供できる可能性が高くなる。 + +06:57.490 --> 07:06.160 +つまり、 チャンキング戦略をうまく機能させ、 適切なコンテキストを提供するためには、 これらのことを調査する必要があるのだ。 + +07:06.190 --> 07:07.420 +もうひとつある。 + +07:07.420 --> 07:08.230 +そして、 それはとてもシンプルだ。 + +07:08.230 --> 07:09.910 +そしてそれが、 今回のケースで我々がやろうとしていることだ。 + +07:10.090 --> 07:15.850 +そしてそれは、 チャンクの数、 つまり実際に送信されるコンテキストの量をコントロールすることだ。 + +07:16.090 --> 07:21.390 +この場合、 実際には3つのチャンクを送信しているのですが、 + +07:21.390 --> 07:28.650 +送信されるチャンクの数をコントロールすることができます。 + +07:28.650 --> 07:38.040 +retrieverとしてretrieverベクターストアを作成する際、 実際にいくつのチャンクを返して欲しいか、 渡して欲しいかを指定することができる。 + +07:38.040 --> 07:44.340 +そして今回のケースでは、 25個のチャンクを作成し、 渡すことを指定した。 + +07:44.370 --> 07:49.470 +一般的な経験則として、 LLMに多くの文脈を送るのは良い考えだ。 + +07:49.500 --> 07:56.730 +LLMSは、 つまり、 関連する文脈だけに焦点を当て、 無関係な文脈を無視することに長けている。 + +07:56.730 --> 07:59.670 +だから、 チャンクをたくさん送るのは良い習慣だ。 + +07:59.670 --> 08:03.810 +そうしない方がいい状況もたまにある。 + +08:03.840 --> 08:10.260 +例えば、 オープンAIが提供している最新のモデルのひとつに、 プロンプトをより詳細に見て、 + +08:10.260 --> 08:17.970 +それを本当に理解するために舞台裏でさらに分析を行うモデルがある。 + +08:17.970 --> 08:20.070 +思考の連鎖のようなものだ。 + +08:20.250 --> 08:28.230 +ええと、 そこで推奨されているのは、 無関係なコンテクストをたくさん提供しないことです。 + +08:28.230 --> 08:35.040 +しかし、 そのような時折の例を横に置いて、 一般的な経験則から言えば、 より多くの文脈を持つことは一般的に良いことである。 + +08:35.490 --> 08:43.260 +この場合、 2つか3つの最も近いチャンクを提供するよりも、 25の最も近いチャンクを提供することにそれほど大きな問題はない。 + +08:43.260 --> 08:45.900 +全部で123のチャンクがある。 + +08:45.900 --> 08:48.810 +つまり、 これはまだ全データの5分の1程度なのだ。 + +08:48.810 --> 08:51.210 +だから、 全データを出荷するわけではない。 + +08:51.240 --> 08:58.680 +私たちは、 LLMに送るために、 最も関連性の高い25のチャンク、 つまり私たちのコンテンツの最も関連性の高い5分の1を選んでいる。 + +08:58.680 --> 09:00.360 +では、 これがうまくいくかどうか見てみよう。 + +09:00.360 --> 09:02.010 +だから、 これを実行する。 + +09:02.010 --> 09:07.530 +そして前回同様、 いつものGradioのインターフェイスを表示させる。 + +09:07.530 --> 09:15.540 +そしてすぐに、 誰が優勝したのか......申し訳ないのですが、 誰が優勝したのか......。 + +09:15.570 --> 09:16.860 +だから一貫性を保っている。 + +09:16.890 --> 09:26.960 +2023年に名誉あるI t y 賞を受賞した。 + +09:26.990 --> 09:27.950 +見てみよう。 + +09:27.980 --> 09:29.240 +ドラムロールをお願いします。 + +09:29.270 --> 09:30.230 +マキシン + +09:30.260 --> 09:35.300 +マキシンは名誉あるI o t 2023賞を受賞した。 + +09:35.300 --> 09:40.730 +つまり、 LMにチャンクを増やすことで問題は解決したのだ。 + +09:40.940 --> 09:46.370 +ということで、 あなたへの練習は、 もう一度戻って、 このことを実験してみることだ。 + +09:46.370 --> 09:47.960 +難しい質問に挑戦してみよう。 + +09:47.960 --> 09:51.140 +自分で文書にいくつか挿入して、 何が起こるか見てみるのもいい。 + +09:51.350 --> 09:54.440 +そして、 いろいろなチャンキング戦略を試してみる。 + +09:54.440 --> 10:01.370 +文書全体を試したり、 100文字程度の小さなチャンクで、 重なりを多くしたり少なくしたりして、 + +10:01.370 --> 10:09.140 +それが結果の質にどのように影響するか、 またどのように文脈を与えすぎるかについて、 良い感触を得る。 + +10:09.170 --> 10:11.780 +文脈を提供しすぎることの影響もわかるかもしれない。 + +10:11.810 --> 10:14.540 +そのせいで、 回答の精度が落ちるのかもしれない。 + +10:14.540 --> 10:24.830 +だから実験して、 良いことも悪いこともよく理解し、 最も効果的なやり方についてコツをつかむのだ。 + +10:24.830 --> 10:28.010 +それではまた、 次のビデオでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59297743/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297743/ko_KR.srt new file mode 100755 index 0000000..bce3619 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297743/ko_KR.srt @@ -0,0 +1,514 @@ +WEBVTT + +00:01.310 --> 00:03.650 +5일째에 오신 걸 환영해요 + +00:03.680 --> 00:04.490 +진짜로요 + +00:04.490 --> 00:06.680 +주피터 공책에 있는 거예요 + +00:06.710 --> 00:11.060 +오늘은 5일째예요 5주 차죠, 준비됐어요 + +00:11.060 --> 00:13.610 +예전과 똑같아요 + +00:13.940 --> 00:16.580 +4일 반이 아니라 4일 차와 똑같은 거예요 + +00:16.580 --> 00:22.760 +크로마 Datastore를 사용하고 있습니다 빠르게 살펴보겠습니다 이미 + +00:22.760 --> 00:24.560 +다 알고 계시니까요 + +00:24.560 --> 00:31.160 +그리고 그래디오 인터페이스로 돌아갈 거예요 get it + +00:31.160 --> 00:31.970 +이렇게요 + +00:31.970 --> 00:33.110 +그게 우리를 따라잡았죠 + +00:33.230 --> 00:33.890 +그려진 거예요 + +00:33.890 --> 00:36.080 +무대 뒤에서 2D와 3D 도표가 펼쳐지죠 + +00:36.080 --> 00:37.130 +하지만 그럴 시간이 없어요 + +00:37.130 --> 00:37.490 +지금요 + +00:37.490 --> 00:38.840 +계속 가야 해요 + +00:39.200 --> 00:45.230 +먼저 보여드릴 게 있어요 전에 했던 조류 실험은 바이브스 실험처럼 채도로도 + +00:45.260 --> 00:46.550 +효과가 있어요 + +00:46.550 --> 00:49.190 +우리도 빨리 해보죠 + +00:49.400 --> 00:56.750 +철자를 틀리게 쓴 새장은 어떤 역할을 했죠? + +00:57.320 --> 01:00.350 +상상이 돼요 + +01:00.350 --> 01:01.130 +두고 봐야죠 + +01:01.160 --> 01:01.820 +네 + +01:01.820 --> 01:06.280 +채도가 전혀 문제가 없어요 + +01:06.310 --> 01:07.810 +놀랍지도 않네요 + +01:07.840 --> 01:11.980 +좋아요, 하지만 잘 안 될 만한 걸 보여드리죠 + +01:11.980 --> 01:16.360 +먼저 직원 인사 자료를 좀 볼게요 + +01:16.390 --> 01:22.210 +지식 기반으로 가서 직원들을 살펴보죠 맥신 톰프슨의 + +01:22.210 --> 01:25.360 +직원 기록을 살펴볼 거예요 + +01:25.390 --> 01:27.610 +마크다운으로 시작하죠 + +01:27.610 --> 01:30.010 +그래서 완전히 파손된 모습을 보게 되죠 + +01:30.040 --> 01:35.830 +맥신 톰프슨의 인사 기록이에요 텍사스주 오스틴에 사는 데이터 엔지니어죠 + +01:35.830 --> 01:41.110 +잠시 주목해 주셨으면 하는 게 있어요 여기 아래를 보시면 + +01:41.110 --> 01:47.860 +맥신이 2023년 올해의 엘름 이노베이터로 선정된 게 보일 거예요 + +01:47.890 --> 01:56.410 +2023년 엘름 최고의 혁신가 부문에서 아이오티상을 수상했죠 + +01:56.440 --> 01:59.110 +고백하자면 이 문장은 제가 추가했어요 + +01:59.110 --> 02:06.760 +GPT 4나 클로드가 만든 합성 데이터의 일부로 만들어진 건 아니었어요 + +02:07.090 --> 02:08.620 +다 내 탓이에요 + +02:08.830 --> 02:09.640 +끔찍해요 + +02:09.640 --> 02:11.500 +그러니 날 탓해요 + +02:11.830 --> 02:21.580 +이제 5일 차로 돌아가서 질문을 하나 할게요 누가 이겼을까요? + +02:24.400 --> 02:36.820 +누가 2023년에 권위 있는 슈리람 올해의 혁신자 상을 받았는지를요 + +02:36.850 --> 02:38.740 +뭐라고 쓰여 있는지 보죠 + +02:40.570 --> 02:42.160 +모른다고 나오네요 + +02:42.190 --> 02:43.510 +직설적으로 말하네요 + +02:43.540 --> 02:44.710 +퉁명스럽죠 + +02:44.770 --> 02:46.420 +그게 흥미로워요 + +02:46.450 --> 02:47.530 +실패하고 말았죠 + +02:47.530 --> 02:49.330 +그게 우리가 제공한 정보였어요 + +02:49.330 --> 02:51.370 +서류에 다 나와 있었어요 + +02:51.370 --> 02:52.900 +비트가 좀 실망스럽네요 + +02:52.900 --> 02:56.290 +그러니 이제 이 문제를 진단해 봐야죠 + +02:56.290 --> 03:00.520 +그렇게 함으로써 랑체인의 작동 원리를 비트 아래서 배울 거예요 + +03:00.700 --> 03:02.950 +그리 놀랍지도 않을 거예요 + +03:03.370 --> 03:06.310 +여기선 어떤 상황인지 볼 수 있죠 Get up + +03:06.370 --> 03:11.850 +아주 유용한 게 있어요 표준 아웃 콜백 처리기라는 걸 만드는 거죠 + +03:11.850 --> 03:17.010 +말 그대로 뒤에서 무슨 일이 일어나는지 표준에 프린트할 + +03:17.010 --> 03:19.590 +수 있게 해주는 거예요 + +03:19.620 --> 03:22.710 +여러분이 아주 익숙한 동일한 코드예요 + +03:22.740 --> 03:24.000 +경보를 울리는 거죠 + +03:24.090 --> 03:25.530 +메모리를 만드는 거죠 + +03:25.560 --> 03:32.340 +리트리버와 대화 사슬을 만듭니다 이 아름다운 한 줄이 LM을 통과하죠 + +03:32.340 --> 03:35.850 +리트리버, 메모리요 + +03:35.850 --> 03:40.410 +제가 하나를 더 넘기고 있는 게 보이시죠 콜백 목록이에요 + +03:40.410 --> 03:46.560 +여기선 한 개의 콜백만 만들고 있어요 표준 아웃 콜백 처리기죠 + +03:46.560 --> 03:53.430 +여러분이 예상하시듯이 반복적으로 표준에 프린트될 겁니다 이 대화 + +03:53.430 --> 03:55.710 +사슬이 실행될 때요 + +03:55.800 --> 03:58.650 +다시 문제입니다 누가 이겼을까요? + +03:58.680 --> 03:59.460 +전 다르게 표현했어요 Put it up Put it up Put it up + +03:59.730 --> 04:00.540 +똑같이 해 보죠 + +04:00.570 --> 04:07.020 +2023년 Iet상을 받은 사람은 누구일까요? + +04:07.050 --> 04:07.770 +됐어요 + +04:07.800 --> 04:09.030 +그 질문을 해보죠 + +04:09.030 --> 04:10.110 +Get get get, get get, get 답을 찾아볼게요 + +04:10.110 --> 04:11.930 +뭐라고 쓰여 있는지 보죠 + +04:12.770 --> 04:18.350 +비트 박스를 통해 흔적을 얻으면 랭 체인이 어떻게 돌아가는지 알 + +04:18.350 --> 04:19.010 +수 있죠 + +04:19.010 --> 04:21.170 +다양한 물체가 있어요 + +04:21.170 --> 04:28.010 +이런 걸 체인이라고 하는데 대화를 구성하는 단계를 거치면서 연결되는 + +04:28.010 --> 04:30.440 +거예요 래그 쿼리요 + +04:30.440 --> 04:34.490 +다양한 콜백을 이용해 각 단계에서 일어나는 일에 대해 보다 상세히 + +04:34.490 --> 04:36.590 +프린트할 수도 있어요 원한다면요 + +04:36.590 --> 04:41.600 +하지만 정말 중요한 건 GPT 4에 도달하는 프롬프트죠 + +04:41.600 --> 04:43.040 +여기 있네요 + +04:43.040 --> 04:44.090 +시스템요 + +04:44.090 --> 04:47.330 +다음 컨텍스트를 이용해 사용자의 질문에 답하세요 + +04:47.330 --> 04:50.300 +모르면 모른다고 하면 되잖아요 + +04:50.300 --> 04:52.220 +없는 말 지어내지 마세요 + +04:52.250 --> 04:57.200 +정말 흥미로운 건 랭 체인 전문가들이 다양한 llm에 + +04:57.230 --> 05:02.090 +보낼 이상적인 프롬프트란 거예요 + +05:02.090 --> 05:06.020 +따라서 이건 여러분이 자신의 프로젝트에 사용하기에 아주 좋아요 + +05:06.020 --> 05:07.730 +아주 공들여 쓴 거예요 + +05:07.730 --> 05:13.250 +아주 효과적인 약이에요 GPT 4가 환각 증상을 보이지 않게 막았으니까요 + +05:13.370 --> 05:16.670 +각본이 잘 짜여진 게 좋았어요 + +05:17.390 --> 05:18.710 +하지만 문제가 있어요 + +05:18.710 --> 05:25.520 +이 컨텍스트가 제공된 곳은∙∙∙ 여기 나오는 LM이죠 + +05:25.520 --> 05:30.740 +보시면 아시겠지만 여러 덩어리에서 몇 개만 추출한 거예요 + +05:30.770 --> 05:36.200 +두세 덩어리인데 인사 기록에서 빼낸 것 같아요 + +05:36.410 --> 05:38.180 +하지만 옳지 않아요 + +05:38.180 --> 05:42.140 +아이오티상은 언급하지 않으니까요 + +05:42.140 --> 05:47.060 +안타깝게도 이 경우엔 엉뚱한 덩어리를 식별했어요 + +05:47.300 --> 05:51.230 +그리고 이 마지막 부분이 질문이에요 + +05:51.230 --> 05:55.400 +아이오티상을 받은 인간이라고 쓰여 있어요 + +05:55.730 --> 05:56.780 +내가 인간이에요 + +05:56.780 --> 06:03.320 +그 질문에 대한 답을 하기에는 좋은 맥락이 없었어요 + +06:03.320 --> 06:07.160 +그래서 이런 반응을 보였겠죠 + +06:07.850 --> 06:10.370 +그럼 어떻게 해야 할까요? + +06:10.370 --> 06:14.990 +래그와 관련해 아주 흔한 문제입니다 올바른 컨텍스트를 제공하지 않을 + +06:14.990 --> 06:15.590 +때요 + +06:15.590 --> 06:17.440 +할 수 있는 게 몇 가지 있어요 + +06:17.680 --> 06:22.420 +하나는 청킹 전략을 다시 살펴보는 거예요 + +06:22.540 --> 06:25.270 +서류를 어떻게 덩어리로 나누죠? + +06:25.270 --> 06:26.050 +그러고 있어요? + +06:26.050 --> 06:26.500 +네 + +06:26.500 --> 06:28.780 +바로 시도해 볼 만한 게 몇 가지 있어요 + +06:28.810 --> 06:34.300 +그 중 하나는 청크링 대신에 전체 문서를 컨텍스트로 보낼 수 있다는 거죠 + +06:34.300 --> 06:40.480 +그래서 전체 문서를 get get get에 넣고 가장 가까운 문서를 찾아봤어요 + +06:40.510 --> 06:46.930 +반대로 큼직큼직하게 잘라 입자가 곱거나 작을 수도 있어요 + +06:47.140 --> 06:52.030 +또한 덩어리 간의 중첩을 조사해서 겹치는 부분이 늘어나는지 줄어들는지 확인할 + +06:52.030 --> 06:52.960 +수도 있죠 + +06:52.960 --> 06:57.490 +이런 경우에는 유용한 정보를 제공할 가능성이 더 크죠 + +06:57.490 --> 07:04.990 +모두 조사해볼 만한 것들이에요. 청킹 전략이 잘 작동하도록 하기 위해서요. 그래서 올바른 컨텍스트가 제공되고 있어요. + +07:04.990 --> 07:06.160 +Get up! + +07:06.190 --> 07:07.420 +한 가지 더 있어요 + +07:07.420 --> 07:08.230 +아주 간단해요 + +07:08.230 --> 07:09.910 +이 경우에 그렇게 할 거예요 + +07:10.090 --> 07:15.850 +get의 개수를 조절하는 거죠 실제로 전송되는 컨텍스트의 양을요 + +07:16.090 --> 07:21.390 +음, 저희 경우엔 그냥∙∙∙ 여기로 전송되는 건 3개일 + +07:21.390 --> 07:28.650 +거예요 전송되는 덩어리의 수를 컨트롤할 수 있어요 이렇게 할 수 있죠 + +07:28.650 --> 07:36.240 +리트리버 벡터 스토어를 리트리버로 생성할 때 얼마나 많은 덩어리를 반환하고 넘길지 + +07:36.270 --> 07:38.040 +정할 수 있어요 + +07:38.040 --> 07:44.340 +이 경우에 전 25개의 덩어리가 생성돼 전달되도록 지정했어요 + +07:44.370 --> 07:49.470 +경험상 LLM에 많은 컨텍스트를 보내는 게 좋아요 + +07:49.500 --> 07:56.730 +Rms는 관련 맥락에만 초점을 맞추고 불필요한 맥락을 무시하는 데 뛰어나요 + +07:56.730 --> 07:59.670 +그러니 덩어리를 많이 보내는 게 좋아요 + +07:59.670 --> 08:03.810 +그러지 않는 게 나은 상황도 가끔 있어요 + +08:03.840 --> 08:10.260 +예를 들어 오픈AI가 제공하는 최신 모델 중 하나가 그 예입니다 즉각적으로 + +08:10.260 --> 08:17.970 +더 자세히 살펴보고 보이지 않는 곳에서 분석을 통해 제대로 이해하는 모델이죠 + +08:17.970 --> 08:20.070 +일종의 사고의 연속이죠 + +08:20.250 --> 08:25.710 +그리고 비관련 컨텍스트를 많이 제공하지 않는 걸 추천합니다 그러면 속도가 + +08:25.710 --> 08:28.230 +느려지고 모델이 산만해지니까요 + +08:28.230 --> 08:34.020 +하지만 가끔씩 나오는 예들을 보면 경험상 일반적인 규칙은 더 많은 컨텍스트가 + +08:34.020 --> 08:35.040 +좋다는 거죠 + +08:35.490 --> 08:42.660 +이 경우에는 큰 해가 되지 않아요 가장 가까운 25개를 두세 개보다 더 많이 주는 + +08:42.660 --> 08:43.260 +거죠 + +08:43.260 --> 08:45.900 +덩어리가 총 123개예요 + +08:45.900 --> 08:48.810 +그러니 이건 전체 데이터의 5분의 1에 불과해요 + +08:48.810 --> 08:51.210 +전체 데이터 세트를 보내는 게 아니에요 + +08:51.240 --> 08:58.680 +가장 관련 있는 25개의 청크를 선택하고 있어요 LLM을 보내기 위한 콘텐츠 중 가장 관련 있는 5분의 1이죠 + +08:58.680 --> 09:00.360 +잘 되는지 보죠 + +09:00.360 --> 09:02.010 +이걸 실행할게요 + +09:02.010 --> 09:07.530 +그리고 전처럼 평소대로 그래디오 인터페이스를 켜죠 + +09:07.530 --> 09:15.540 +그럼 바로 질문을 시작하죠 누가 이겼는지 누가 그걸 사용하게 됐는지요 + +09:15.570 --> 09:16.860 +그래서 일관성을 유지하죠 + +09:16.890 --> 09:26.960 +2023년 아이티유상을 수상했죠 + +09:26.990 --> 09:27.950 +어디 보죠 + +09:27.980 --> 09:29.240 +드럼 부탁해요 + +09:29.270 --> 09:30.230 +맥신이에요 + +09:30.260 --> 09:35.300 +맥신은 명망 높은 Iot 2023 상을 받았어요 + +09:35.300 --> 09:40.730 +달 착륙선에 더 많은 덩어리를 제공한 게 문제 해결이었죠 + +09:40.940 --> 09:46.370 +이제 돌아가서 이걸 실험해 보세요 + +09:46.370 --> 09:47.960 +어려운 질문을 해보세요 + +09:47.960 --> 09:51.140 +언제든 문서에 몇 가지 삽입해 어떻게 되는지 볼 수 있어요 + +09:51.350 --> 09:54.440 +그리고 다양한 청량 전략을 실험해 보는 거죠 + +09:54.440 --> 10:01.370 +전체 문서를 입력해보세요 더 작은 덩어리로요 100글자 정도로요 더 많거나 덜 겹치게요 그게 + +10:01.370 --> 10:09.140 +결과의 질에 어떤 영향을 미치는지 느껴보세요 너무 적은 컨텍스트를 제공할 수도 있고요 Get it + +10:09.170 --> 10:11.780 +너무 많은 컨텍스트를 제공하면 어떤 영향을 미치는지 알 수 있죠 + +10:11.810 --> 10:14.540 +그래서 NTSB의 반응이 less로 나오는 것 같아요 + +10:14.540 --> 10:23.000 +그래서 좋은 점과 나쁜 점을 파악하고 가장 효과적인 방법을 잘 파악할 수 있도록요 + +10:23.030 --> 10:24.830 +get it + +10:24.830 --> 10:28.010 +그럼 다음 영상에서 다시 만나요 diff --git a/week5/community-contributions/subtitles/srts/59297749/en_US.srt b/week5/community-contributions/subtitles/srts/59297749/en_US.srt new file mode 100755 index 0000000..7bf2757 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297749/en_US.srt @@ -0,0 +1,586 @@ +WEBVTT + +00:01.070 --> 00:04.640 +It's always welcome back to JupyterLab, my favorite place to be. + +00:04.640 --> 00:07.790 +And now we are, of course in the week five folder. + +00:07.790 --> 00:14.000 +And I'm looking here at the day one Jupyter notebook in week five, which is going to be our home for + +00:14.000 --> 00:20.240 +today, where we look at building our own DIY rag implementation in a very simplistic way, but it will + +00:20.240 --> 00:22.610 +give you some real insight into how this works. + +00:22.700 --> 00:29.870 +So first of all, let me tell you about our company in Shoreham, an insurance tech company. + +00:29.990 --> 00:37.400 +We have got access to their company shared folder and it's called Knowledge Base and I have dropped + +00:37.400 --> 00:41.690 +it right here in the same week five folder for your perusal. + +00:41.690 --> 00:48.260 +If we open it up, you'll see that it contains four folders company contracts, employees and products + +00:48.260 --> 00:53.660 +representing four different um divisions or four different areas of the company. + +00:54.020 --> 01:00.050 +Um, now, one thing I will mention, as you may have guessed perhaps, is that you'll see a fair amount + +01:00.050 --> 01:03.450 +of company data here, which is all completely fictitious. + +01:03.660 --> 01:07.440 +This was, of course created by an LLM. + +01:07.470 --> 01:14.940 +I took took my own exercise from a couple of weeks ago, wrote a data generator and used it to craft + +01:14.940 --> 01:19.020 +some of this data and keep relationships between some of the documents and so on. + +01:19.050 --> 01:24.450 +I did add in a little couple of tweaks here and there myself, but almost all of this is generated by + +01:24.450 --> 01:28.530 +either GPT four or by Claude. + +01:29.370 --> 01:34.470 +So just to have a quick peek at this in company, there's a few things here about the company. + +01:34.470 --> 01:39.180 +I think these are quite short documents if these are in the form of markdown documents. + +01:39.180 --> 01:46.440 +So if I can open it in this way, then we get to see some nice, um, write ups about the insurer Elm. + +01:46.440 --> 01:50.880 +It was founded apparently by Avery Lancaster in 2015. + +01:50.880 --> 01:52.230 +Remember, Avery's name will be. + +01:52.260 --> 01:53.910 +We'll be meeting her a few times. + +01:54.240 --> 01:57.030 +Um, and if you want to see the original documents, of course. + +01:57.030 --> 02:00.210 +They look like the markdown that we are familiar with. + +02:00.210 --> 02:03.850 +Um, then if we go back into knowledge base in contracts. + +02:03.850 --> 02:10.540 +We've got some more substantive information about contracts, which includes the contract terms renewal, + +02:10.570 --> 02:15.130 +the features that are included in that contract that's going to come back and be important later. + +02:15.130 --> 02:16.960 +So so keep note of that. + +02:17.260 --> 02:19.150 +Um, and some other things. + +02:19.510 --> 02:25.480 +Uh, if we go back into knowledge base again and look at employees, these are the employee HR records + +02:25.480 --> 02:26.770 +of the different employees. + +02:26.770 --> 02:32.440 +And even the CEO, Avery Lancaster herself has an HR record that you'll see right here. + +02:32.770 --> 02:41.050 +Uh, so all everyone has their their HR documents, um, and then products contains a write up of the + +02:41.050 --> 02:49.840 +different offerings from insurer Elm, including car Elm, uh, for auto insurance, uh home Elm and + +02:49.870 --> 02:50.530 +and so on. + +02:50.530 --> 02:55.600 +You can look through these documents, but they are wonderful in how real they are. + +02:55.630 --> 02:59.800 +All of course courtesy of Frontier Models. + +02:59.920 --> 03:03.480 +A nice product summary right there with even with a roadmap. + +03:03.480 --> 03:04.500 +Love it. + +03:04.980 --> 03:06.360 +Uh, okay. + +03:06.390 --> 03:08.940 +Back then to day one. + +03:08.940 --> 03:10.800 +So what are we going to do? + +03:10.830 --> 03:16.380 +We are going to write something now that is going to be asking questions about this company, and we + +03:16.410 --> 03:20.700 +are going to artificially look up and insert context into the prompt. + +03:20.700 --> 03:22.110 +So we do some imports. + +03:22.110 --> 03:25.470 +We're going to use GPT four mini this time. + +03:25.530 --> 03:31.170 +Um, and um uh, so now we get to the juice. + +03:31.170 --> 03:40.560 +So what we do here this this is a Python function which will, uh, look in this folder and take all + +03:40.560 --> 03:43.350 +of its contents and put it into a list. + +03:43.350 --> 03:44.100 +Employees. + +03:44.100 --> 03:51.600 +So employees is, in fact, now going to be a list of the file names in knowledge base slash employees. + +03:51.600 --> 03:59.400 +And for each one, this is some janky code that that is just sort of hacked together, which basically + +03:59.640 --> 04:06.490 +takes the name of the file, which is in fact an employee's Floyd's name and takes the their last name. + +04:06.520 --> 04:14.140 +It just splits out their last name opens up that file and puts it into a dictionary where the key is + +04:14.140 --> 04:17.080 +the employee name and the document is the document. + +04:17.080 --> 04:20.020 +So let's just run this if we have a look at this. + +04:20.020 --> 04:22.090 +So we're expecting to see a dictionary. + +04:23.080 --> 04:24.760 +Uh sorry. + +04:25.210 --> 04:28.570 +Um let's see it's called context or dictionary. + +04:28.570 --> 04:31.180 +Let's just look at all of the keys in context. + +04:31.390 --> 04:33.130 +So here are the keys. + +04:33.130 --> 04:38.410 +The keys are indeed the last names of the employees, as you would expect. + +04:38.410 --> 04:42.040 +And one of them is Lancaster, our CEO. + +04:42.040 --> 04:44.560 +So let's just have a quick look at Lancaster. + +04:44.560 --> 04:48.520 +So we should simply be able to look in our dictionary at Lancaster. + +04:48.520 --> 04:53.620 +And we should just see bam the markdown document from her HR record. + +04:53.650 --> 04:58.990 +Nothing magic at all going on here, reading in files, shoving them in a dictionary. + +04:58.990 --> 05:00.670 +That's all there is to it. + +05:01.480 --> 05:09.200 +So now let's just extend this And what we're going to do here is do exactly the same thing for the products. + +05:09.230 --> 05:16.520 +Look in the products folder, iterate through each of the files, pluck out its name, and then shove + +05:16.520 --> 05:18.800 +that in the same context dictionary. + +05:18.800 --> 05:22.520 +So now if I look in the if I run that first, try not to forget that. + +05:22.520 --> 05:23.180 +There we go. + +05:23.180 --> 05:24.650 +And now look at the keys. + +05:24.650 --> 05:28.580 +We should see a combination of employee last names and product names. + +05:28.940 --> 05:32.570 +Here are the last names and their market. + +05:32.750 --> 05:33.620 +L'm home. + +05:33.620 --> 05:34.910 +L'm calm. + +05:35.030 --> 05:38.150 +Are the product names okay? + +05:38.810 --> 05:40.130 +Ready for action. + +05:40.160 --> 05:41.780 +We'll have a system message. + +05:41.780 --> 05:47.180 +You are an expert in answering accurate questions about ensure l'm the insurance tech company. + +05:47.210 --> 05:49.100 +Give brief, accurate answers. + +05:49.100 --> 05:51.590 +If you don't know the answer, say so. + +05:51.590 --> 05:52.820 +Do not make anything up. + +05:52.820 --> 05:55.550 +If you haven't been provided with relevant context. + +05:56.060 --> 06:02.570 +It turns out that making these kinds of very authoritative instructions in the system prompt to tell + +06:02.570 --> 06:08.790 +the model not to invent things is effective in stopping it from hallucinating and keeping accuracy high. + +06:08.970 --> 06:13.500 +We're going to see that again later, too, but it's worth bearing in mind in keeping this one in your + +06:13.530 --> 06:19.500 +in your notebook or in a list of useful things, because that kind of prompting is useful when accuracy + +06:19.500 --> 06:20.550 +is essential. + +06:20.940 --> 06:32.610 +So we will see that we will add that in to get we will make a function called get relevant context. + +06:32.610 --> 06:35.490 +So what this function is going to do this is very important. + +06:35.490 --> 06:45.090 +It's going to um uh it's going to take um a message, any kind of message. + +06:45.450 --> 06:53.160 +Uh, and it's then going to iterate through the context, taking the title and the details out of each + +06:53.160 --> 06:55.020 +item in the context. + +06:55.020 --> 07:02.370 +And it's going to see whether or not the title, the, the, either the last name of the employee or + +07:02.370 --> 07:07.370 +the product name, if it's a product, if that exists anywhere in the message. + +07:07.370 --> 07:14.030 +If that text, like the word Lancaster, is anywhere in the message, then it's simply going to shove + +07:14.030 --> 07:18.920 +that into this list of relevant context and return that list in the end. + +07:18.950 --> 07:21.110 +So let me show you exactly what that means. + +07:21.140 --> 07:33.620 +So if I say get relevant context and I pass in something completely irrelevant, like the cat, we'll + +07:33.620 --> 07:34.670 +get nothing back. + +07:34.700 --> 07:38.870 +The cat is not relevant to the questions about ensure. + +07:38.900 --> 07:46.160 +And if I put who is Avery Lancaster then hooray! + +07:46.190 --> 07:53.690 +We will in fact get a good robust, uh, answer in there, which is exactly what we would expect. + +07:53.840 --> 08:04.310 +If I say, who is Avery Lancaster and what is calm, then we should hopefully let's have a look. + +08:04.370 --> 08:09.030 +See that we'll get back a list of two things that will include. + +08:09.060 --> 08:10.800 +Yes, you can see there's the second one beginning. + +08:10.800 --> 08:12.600 +That's column in there. + +08:12.600 --> 08:17.940 +We get back a list that contains Avery's document and the column document. + +08:18.450 --> 08:21.030 +Um, but of course this is very brittle. + +08:21.030 --> 08:28.650 +If I don't include Avery's last name, I just say Avery and I spell this without a capital C because + +08:28.650 --> 08:32.220 +I haven't even done a case insensitive match there. + +08:32.220 --> 08:34.950 +We're going to discover we get nothing back. + +08:34.950 --> 08:36.360 +This is brittle. + +08:36.360 --> 08:39.780 +It's only going to work in basic cases. + +08:40.620 --> 08:46.950 +All right, so now I just build on this one step further by having a function add context that takes + +08:46.950 --> 08:47.970 +a message. + +08:48.120 --> 08:53.190 +Um, so the idea is that this will simply get any relevant context. + +08:53.190 --> 08:56.370 +And if it finds some it's going to add that into the message. + +08:56.370 --> 09:01.290 +It's going to say the following additional context might be relevant in answering this question. + +09:01.290 --> 09:04.320 +So it's just sort of building on top of whatever it's passed. + +09:04.320 --> 09:07.900 +So to give an example Let's just ask again the question. + +09:07.960 --> 09:12.160 +Um, but yeah, let me start with who is Avery Lancaster, a positive example, and I'll give you a + +09:12.160 --> 09:12.880 +bad example. + +09:12.880 --> 09:17.170 +Who is Avery Lancaster and what, you have to run the cell. + +09:19.270 --> 09:23.410 +Uh, so, uh, who is Avery Lancaster? + +09:23.410 --> 09:24.700 +What do we see? + +09:24.940 --> 09:25.840 +Uh, we see. + +09:25.840 --> 09:26.830 +Here we go. + +09:26.830 --> 09:29.650 +Uh, who is Avery Lancaster? + +09:29.680 --> 09:32.620 +The following additional context might be relevant in answering this question. + +09:32.620 --> 09:34.540 +And then there is the detail. + +09:34.750 --> 09:37.870 +Uh, and then I can give a counter example. + +09:37.870 --> 09:39.340 +Another kind of problem. + +09:39.340 --> 09:44.290 +If I say who is Alex Lancaster, it will still see the word Lancaster in there. + +09:44.290 --> 09:47.530 +And of course, it's still going to say here is relevant context. + +09:47.740 --> 09:50.380 +Um, and it's of course missed the trick there. + +09:50.380 --> 09:53.320 +So it is rough and ready. + +09:53.440 --> 09:56.470 +Um, it really is just doing a string lookup. + +09:56.470 --> 10:01.570 +But that's all we need, of course, to be writing our favorite chat function. + +10:01.570 --> 10:04.510 +You remember, this is the function that Gradio expects. + +10:04.510 --> 10:09.230 +If it's to build a quick user interface, it takes the current message and the history of messages, + +10:09.230 --> 10:12.200 +and we hopefully this is now second nature to you. + +10:12.230 --> 10:20.510 +We convert that history into the format that OpenAI expects, the list of dicts in that particular structure. + +10:20.510 --> 10:23.660 +And then we make this lovely call here. + +10:23.660 --> 10:28.880 +We added extra context to our message, and that's what we send to OpenAI. + +10:28.880 --> 10:36.590 +And then this of course OpenAI chat completions create also second nature to you and we will stream + +10:36.590 --> 10:38.150 +back the response. + +10:38.150 --> 10:40.190 +So let's give this a run. + +10:40.280 --> 10:46.430 +And now we know the last step is the one liner that brings this up in Gradio. + +10:46.760 --> 10:47.510 +Here we go. + +10:47.540 --> 10:49.370 +Let's see how this looks. + +10:54.230 --> 10:56.000 +So let's say, uh. + +10:56.060 --> 10:56.780 +Hi there. + +10:59.510 --> 11:00.050 +Hello. + +11:00.050 --> 11:01.460 +How can I assist you today? + +11:01.490 --> 11:06.410 +So we'll say, who is Avery Lancaster? + +11:08.620 --> 11:09.190 +That's right. + +11:09.190 --> 11:09.460 +Yeah. + +11:09.490 --> 11:10.570 +Lancaster. + +11:11.950 --> 11:21.670 +And of course, we get back a nice good answer because we know full well that the Avery Lancaster information + +11:21.670 --> 11:24.940 +was shoved into the prompt and sent to OpenAI. + +11:25.300 --> 11:30.040 +Um, and now we can also say what is calm. + +11:32.350 --> 11:34.870 +And innovative auto insurance product. + +11:34.900 --> 11:35.770 +And there we go. + +11:35.770 --> 11:40.660 +It's got a whole ton of information summarized from that big chunk of context that will have centered + +11:40.660 --> 11:41.830 +about the product. + +11:41.950 --> 11:45.370 +And it even mentions the roadmap for continuous improvement. + +11:45.400 --> 11:46.390 +Lovely. + +11:46.990 --> 11:50.110 +Uh, so you can see it's rag. + +11:50.110 --> 11:54.130 +It's simplistic, but, you know, this is what it's all about. + +11:54.220 --> 11:56.440 +Let's just break it for a second. + +11:56.440 --> 11:57.760 +You can imagine what I'm going to do. + +11:57.790 --> 12:02.380 +I'm just going to say, uh, who is Avery? + +12:02.410 --> 12:09.170 +Well, it may it actually has the the conversation from the past, so we should probably start this + +12:09.170 --> 12:09.950 +again. + +12:10.040 --> 12:11.570 +Bring up a fresh chat. + +12:11.600 --> 12:15.470 +Otherwise it will know who Avery it is because we just asked it before and it got the context. + +12:15.470 --> 12:22.250 +So let's bring up a fresh new user interface and try it from scratch and not give it that benefit and + +12:22.250 --> 12:22.880 +just ask it. + +12:22.910 --> 12:24.320 +Who is Avery? + +12:25.760 --> 12:30.170 +I'm sorry, but I don't have any information about Avery in relation to ensure. + +12:30.470 --> 12:32.540 +So you can see that it's brittle. + +12:32.540 --> 12:34.430 +It has to take the last name. + +12:34.430 --> 12:46.040 +And even if I spell the last name wrong, who is Lancaster without, uh, a capital L, it's also going + +12:46.040 --> 12:48.200 +to break, so you get the idea. + +12:48.200 --> 12:51.620 +This is an effective way to add supplemental information into the prompt. + +12:51.650 --> 12:57.140 +It gets back more accurate answers, but it is brittle and it requires exact text matching. + +12:57.140 --> 13:01.550 +Not particularly flexible, not very scalable solution. + +13:01.670 --> 13:03.140 +Uh, we can do better. + +13:03.140 --> 13:06.710 +And that is what we are going to be doing in the in the next day. + +13:06.710 --> 13:09.350 +But now back to the slides. diff --git a/week5/community-contributions/subtitles/srts/59297749/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297749/ja_JP.srt new file mode 100755 index 0000000..1e0505f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297749/ja_JP.srt @@ -0,0 +1,520 @@ +WEBVTT + +00:01.070 --> 00:04.640 +JupyterLabは僕のお気に入りの場所だ。 + +00:04.640 --> 00:07.790 +そして今、 我々はもちろん5週目のフォルダにいる。 + +00:07.790 --> 00:14.000 +日目のJupyterノートブックは、 + +00:14.000 --> 00:22.610 +今日のホームグラウンドになる。 + +00:22.700 --> 00:29.870 +ではまず、 ショアハムにある私たちの会社、 保険技術会社についてお話ししましょう。 + +00:29.990 --> 00:41.690 +我々は彼らの会社の共有フォルダにアクセスすることができ、 それはナレッジ・ベースと呼ばれています。 + +00:41.690 --> 00:48.260 +それを開いてみると、 4つの異なるUM部門、 あるいは会社の4つの異なる分野を代表する4つのフォルダー会社の契約書、 + +00:48.260 --> 00:53.660 +従業員、 製品が含まれていることがわかる。 + +00:54.020 --> 01:00.050 +ええと、 ひとつだけ言っておくと、 おそらくお察しの通り、 ここにはかなりの量の企業データが掲載されているが、 + +01:00.050 --> 01:03.450 +これらはすべて完全に架空のものである。 + +01:03.660 --> 01:07.440 +これはもちろん、 LLMによって作られたものだ。 + +01:07.470 --> 01:14.940 +私は数週間前の練習で、 データジェネレーターを作成し、 それを使っていくつかのデータを作成し、 + +01:14.940 --> 01:19.020 +いくつかの文書間の関係などを保持した。 + +01:19.050 --> 01:28.530 +私自身、 あちこちに手を加えたが、 そのほとんどすべてがGPT4かクロードによって生み出されたものだ。 + +01:29.370 --> 01:34.470 +そこで、 この会社についてちょっと覗いてみよう。 + +01:34.470 --> 01:39.180 +マークダウン形式の文書であれば、 かなり短い文書だと思う。 + +01:39.180 --> 01:46.440 +だから、 もしこのような形でオープンすることができれば、 保険会社エルムについての素敵な、 うーん、 記事を見ることができる。 + +01:46.440 --> 01:50.880 +2015年にエイブリー・ランカスターによって設立されたようだ。 + +01:50.880 --> 01:52.230 +エイブリーの名前を覚えておいてほしい。 + +01:52.260 --> 01:53.910 +彼女とは何度か会うことになるだろう。 + +01:54.240 --> 01:57.030 +もちろん、 原本を見たいのであればね。 + +01:57.030 --> 02:00.210 +私たちがよく知っているマークダウンに似ている。 + +02:00.210 --> 02:03.850 +では、 契約のナレッジ・ベースに戻るとしよう。 + +02:03.850 --> 02:10.540 +契約に関するより本質的な情報を入手した。 契約条件の更新や、 + +02:10.570 --> 02:15.130 +その契約に含まれる機能などだ。 + +02:15.130 --> 02:16.960 +だから、 それを覚えておいてほしい。 + +02:17.260 --> 02:19.150 +他にもいろいろあるんだ。 + +02:19.510 --> 02:26.770 +もう一度ナレッジ・ベースに戻って従業員を見てみると、 これはさまざまな従業員の人事記録だ。 + +02:26.770 --> 02:32.440 +そして、 CEOのエイブリー・ランカスター自身にも、 ここにあるような人事記録がある。 + +02:32.770 --> 02:41.050 +そして、 エルム保険会社から提供されるさまざまな商品(自動車保険のエルム、 + +02:41.050 --> 02:50.530 +住宅保険のエルムなど)が紹介されている。 + +02:50.530 --> 02:55.600 +これらの文書に目を通すことはできるが、 そのリアルさが素晴らしい。 + +02:55.630 --> 02:59.800 +もちろん、 すべてフロンティア・モデルズの提供だ。 + +02:59.920 --> 03:03.480 +ロードマップまである素晴らしい製品概要だ。 + +03:03.480 --> 03:04.500 +大好きだ。 + +03:04.980 --> 03:06.360 +ああ、 わかった。 + +03:06.390 --> 03:08.940 +初日に戻る。 + +03:08.940 --> 03:10.800 +それでどうするんだ? + +03:10.830 --> 03:20.700 +これから、 この会社についての質問を書くのだが、 人為的に調べて文脈をプロンプトに挿入する。 + +03:20.700 --> 03:22.110 +だから輸入もする。 + +03:22.110 --> 03:25.470 +今回はGPTフォーミニを使う。 + +03:25.530 --> 03:31.170 +ええと、 それで......ジュースの話なんだけど。 + +03:31.170 --> 03:40.560 +つまり、 これはPythonの関数で、 このフォルダーの中身をすべて取り出し、 + +03:40.560 --> 03:43.350 +リストにまとめる。 + +03:43.350 --> 03:44.100 +従業員 + +03:44.100 --> 03:51.600 +つまり、 employeesはナレッジ・ベース内のファイル名のリストとなり、 スラッシュemployeesとなる。 + +03:51.600 --> 03:59.400 +そのひとつひとつに、 ただハックしただけの奇妙なコードがあって、 基本的にファイル名(実際には従業員のフロイドの名前)を取り、 + +03:59.640 --> 04:06.490 +その姓を取る。 + +04:06.520 --> 04:17.080 +姓を分割してそのファイルを開き、 キーが従業員名、 ドキュメントがドキュメントである辞書に入れるだけです。 + +04:17.080 --> 04:20.020 +では、 これを見てみよう。 + +04:20.020 --> 04:22.090 +だから辞書に期待している。 + +04:23.080 --> 04:24.760 +ああ、 申し訳ない。 + +04:25.210 --> 04:28.570 +ええと......文脈というか辞書というか。 + +04:28.570 --> 04:31.180 +すべてのキーを文脈に沿って見てみよう。 + +04:31.390 --> 04:33.130 +その鍵はここにある。 + +04:33.130 --> 04:38.410 +キーは予想通り、 従業員の姓である。 + +04:38.410 --> 04:42.040 +その一人がランカスターで、 我々のCEOだ。 + +04:42.040 --> 04:44.560 +では、 ランカスターを簡単に見てみよう。 + +04:44.560 --> 04:48.520 +だから、 ランカスターの辞書を引けばいいのだ。 + +04:48.520 --> 04:53.620 +そして、 彼女の人事記録からマークダウンされた文書をバンバン見るべきだ。 + +04:53.650 --> 04:58.990 +ファイルを読んだり、 辞書に押し込んだりするようなマジックは、 ここではまったく行われていない。 + +04:58.990 --> 05:00.670 +それだけだ。 + +05:01.480 --> 05:09.200 +では、 これを拡張して、 商品についてもまったく同じことをやってみましょう。 + +05:09.230 --> 05:18.800 +productsフォルダを探し、 それぞれのファイルを繰り返し見て、 名前を抜き出し、 それを同じコンテキスト辞書に突っ込む。 + +05:18.800 --> 05:22.520 +だから今、 もし私が最初にそれを実行したら、 それを忘れないようにする。 + +05:22.520 --> 05:23.180 +これでよし。 + +05:23.180 --> 05:24.650 +そして、 次はキーを見てください。 + +05:24.650 --> 05:28.580 +従業員の姓と製品名の組み合わせが表示されるはずだ。 + +05:28.940 --> 05:32.570 +以下は姓とその市場である。 + +05:32.750 --> 05:33.620 +ただいま + +05:33.620 --> 05:34.910 +私は落ち着いている。 + +05:35.030 --> 05:38.150 +商品名は大丈夫ですか? + +05:38.810 --> 05:40.130 +準備はできている。 + +05:40.160 --> 05:41.780 +システムメッセージが出ます + +05:41.780 --> 05:47.180 +あなたは、 私が保険技術会社であることを保証するための正確な質問に答える専門家です。 + +05:47.210 --> 05:49.100 +簡潔で正確な回答をすること。 + +05:49.100 --> 05:51.590 +答えがわからなければ、 そう言ってください。 + +05:51.590 --> 05:52.820 +何も作るな。 + +05:52.820 --> 05:55.550 +関連する文脈が提供されていないのであれば。 + +05:56.060 --> 06:02.570 +このような非常に権威的な指示をシステムのプロンプトに表示させ、 モデルに捏造をしないように指示することが、 + +06:02.570 --> 06:08.790 +幻覚を見るのを止めさせ、 精度を高く保つのに効果的であることがわかった。 + +06:08.970 --> 06:13.500 +正確さが要求される場合には、 + +06:13.530 --> 06:20.550 +このようなプロンプトが役に立つからだ。 + +06:20.940 --> 06:32.610 +そこで、 関連するコンテキストの取得という関数を作り、 それをgetに追加することにしよう。 + +06:32.610 --> 06:35.490 +だから、 この関数が何をするのか、 これはとても重要なことだ。 + +06:35.490 --> 06:45.090 +どんなメッセージでもいい。 + +06:45.450 --> 06:55.020 +そして、 コンテキストの各項目からタイトルと詳細を取り出しながら、 コンテキストの中を反復していく。 + +06:55.020 --> 07:07.370 +そして、 役職、 従業員の姓名、 製品名(製品であれば)がメッセージのどこかに存在するかどうかを確認する。 + +07:07.370 --> 07:14.030 +ランカスターという単語のようなテキストがメッセージのどこかにあれば、 それを関連する文脈のリストに押し込んで、 + +07:14.030 --> 07:18.920 +最後にそのリストを返すだけだ。 + +07:18.950 --> 07:21.110 +では、 それが何を意味するのか、 具体的にお見せしよう。 + +07:21.140 --> 07:34.670 +だから、 関連する文脈を得ると言っているのに、 猫のようにまったく関係ないものを渡してしまったら、 何も返ってこない。 + +07:34.700 --> 07:38.870 +猫は確保に関する質問とは関係ない。 + +07:38.900 --> 07:46.160 +そして、 エイブリー・ランカスターが誰なのかを書けば、 万々歳だ! + +07:46.190 --> 07:53.690 +私たちが期待するような、 しっかりとした答えが返ってくるだろう。 + +07:53.840 --> 08:04.310 +エイブリー・ランカスターとは誰なのか、 そして何が落ち着いているのか。 + +08:04.370 --> 08:09.030 +それを含む2つのリストが返ってくるので見てほしい。 + +08:09.060 --> 08:10.800 +そう、 2つ目が始まっているのがわかるだろう。 + +08:10.800 --> 08:12.600 +それはコラムだ。 + +08:12.600 --> 08:17.940 +エイブリーのドキュメントとカラムのドキュメントを含むリストが返ってくる。 + +08:18.450 --> 08:21.030 +うーん、 でももちろん、 これは非常にもろい。 + +08:21.030 --> 08:32.220 +エイブリーの姓を入れない場合は、 エイブリーとだけ言い、 大文字のCを使わずに綴る。 + +08:32.220 --> 08:34.950 +何も返ってこないことに気づくだろう。 + +08:34.950 --> 08:36.360 +これは脆い。 + +08:36.360 --> 08:39.780 +それは基本的な場合にしか使えない。 + +08:40.620 --> 08:47.970 +よし、 ではこれをもう一歩進めて、 メッセージを受け取るコンテキストの追加関数を作ってみよう。 + +08:48.120 --> 08:53.190 +ええと、 つまり、 これは単純に関連する文脈を取得するということです。 + +08:53.190 --> 08:56.370 +そして、 もし何か見つかれば、 それをメッセージに追加する。 + +08:56.370 --> 09:01.290 +この質問に答えるには、 次のような追加的な文脈が関係するかもしれないと言うことだ。 + +09:01.290 --> 09:04.320 +だから、 通過したものは何であれ、 その上に積み上げていくようなものなんだ。 + +09:04.320 --> 09:07.900 +では、 例を挙げよう。 + +09:07.960 --> 09:12.880 +ええと、 でも、 エイブリー・ランカスターは誰なのか、 ポジティブな例から始めましょう。 + +09:12.880 --> 09:17.170 +エイブリー・ランカスターとは誰なのか? + +09:19.270 --> 09:23.410 +エイブリー・ランカスターって誰? + +09:23.410 --> 09:24.700 +何が見えるのか? + +09:24.940 --> 09:25.840 +そうだね。 + +09:25.840 --> 09:26.830 +さあ、 始めよう。 + +09:26.830 --> 09:29.650 +エイブリー・ランカスターって誰? + +09:29.680 --> 09:32.620 +この質問に答えるには、 次のような追加的な背景が関係するかもしれない。 + +09:32.620 --> 09:34.540 +そしてディテールだ。 + +09:34.750 --> 09:37.870 +ええと、 それなら反対の例を挙げよう。 + +09:37.870 --> 09:39.340 +別の種類の問題だ。 + +09:39.340 --> 09:44.290 +アレックス・ランカスターは誰ですかと言うと、 ランカスターと表示される。 + +09:44.290 --> 09:47.530 +そしてもちろん、 ここに関連した文脈があると言うことに変わりはない。 + +09:47.740 --> 09:50.380 +うーん、 それはもちろんトリックを外している。 + +09:50.380 --> 09:53.320 +だから、 ラフで準備ができている。 + +09:53.440 --> 09:56.470 +ええと、 本当に文字列のルックアップをしているだけです。 + +09:56.470 --> 10:01.570 +しかし、 もちろん、 お気に入りのチャット機能を書くのに必要なのはそれだけだ。 + +10:01.570 --> 10:04.510 +覚えているだろうか、 これがグラディオが期待する関数なのだ。 + +10:04.510 --> 10:12.200 +クイック・ユーザー・インターフェースを構築するのであれば、 現在のメッセージとメッセージの履歴が必要です。 + +10:12.230 --> 10:20.510 +その履歴を、 OpenAIが期待するフォーマット、 つまり特定の構造のディクトのリストに変換する。 + +10:20.510 --> 10:23.660 +そして、 この素敵な電話をかける。 + +10:23.660 --> 10:28.880 +私たちはメッセージに特別なコンテキストを追加し、 それをOpenAIに送信する。 + +10:28.880 --> 10:38.150 +そして、 このOpenAIのチャットの完了は、 あなたにとって第二の天性であり、 私たちは応答をストリームバックします。 + +10:38.150 --> 10:40.190 +では、 これを試してみよう。 + +10:40.280 --> 10:46.430 +そして今、 私たちは最後のステップが、 グラディオにこれをもたらすワンライナーであることを知っている。 + +10:46.760 --> 10:47.510 +さあ、 始めよう。 + +10:47.540 --> 10:49.370 +どう見えるか見てみよう。 + +10:54.230 --> 10:56.000 +じゃあ、 えーと。 + +10:56.060 --> 10:56.780 +こんにちは。 + +10:59.510 --> 11:00.050 +こんにちは。 + +11:00.050 --> 11:01.460 +本日はどのようなご用件でしょうか? + +11:01.490 --> 11:06.410 +では、 エイブリー・ランカスターとは誰なのか? + +11:08.620 --> 11:09.190 +その通りだ。 + +11:09.190 --> 11:09.460 +そうだね。 + +11:09.490 --> 11:10.570 +ランカスター + +11:11.950 --> 11:24.940 +もちろん、 エブリー・ランカスターの情報がプロンプトに押し込まれ、 OpenAIに送信されたことは十分承知しているからだ。 + +11:25.300 --> 11:30.040 +そして今、 私たちは何が冷静なのかも言える。 + +11:32.350 --> 11:34.870 +そして革新的な自動車保険商品。 + +11:34.900 --> 11:35.770 +さあ、 行こう。 + +11:35.770 --> 11:41.830 +それは、 その製品を中心としたコンテキストの大きな塊から要約された、 1トンの情報を持っている。 + +11:41.950 --> 11:45.370 +そして、 継続的改善のためのロードマップにまで言及している。 + +11:45.400 --> 11:46.390 +素敵だ。 + +11:46.990 --> 11:50.110 +ええと、 だからボロ雑巾だってわかるでしょ。 + +11:50.110 --> 11:54.130 +単純なことだけど、 これがすべてなんだ。 + +11:54.220 --> 11:56.440 +ちょっと休憩しよう。 + +11:56.440 --> 11:57.760 +私が何をするかは想像がつくだろう。 + +11:57.790 --> 12:02.380 +エイブリーって誰? + +12:02.410 --> 12:09.950 +まあ、 実際に過去からの会話があるのかもしれない。 + +12:10.040 --> 12:11.570 +新鮮なチャットを持ち出す。 + +12:11.600 --> 12:15.470 +そうでなければ、 エイブリーが誰なのかがわかってしまう。 + +12:15.470 --> 12:22.880 +だから、 フレッシュな新しいユーザーインターフェースを立ち上げて、 ゼロから試してみよう。 + +12:22.910 --> 12:24.320 +エイブリーとは? + +12:25.760 --> 12:30.170 +申し訳ないが、 エイブリーに関する確実な情報は持っていない。 + +12:30.470 --> 12:32.540 +だから、 もろいのがわかるだろう。 + +12:32.540 --> 12:34.430 +姓を名乗らなければならない。 + +12:34.430 --> 12:48.200 +苗字のスペルを間違えても、 ランカスターが大文字の "L "でなくても、 壊れてしまう。 + +12:48.200 --> 12:51.620 +これは、 プロンプトに補足情報を加える効果的な方法である。 + +12:51.650 --> 12:57.140 +より正確な答えが返ってくるが、 もろく、 正確なテキストマッチングを必要とする。 + +12:57.140 --> 13:01.550 +特に柔軟性があるわけでもなく、 拡張性の高いソリューションでもない。 + +13:01.670 --> 13:03.140 +もっといい方法がある。 + +13:03.140 --> 13:06.710 +そして、 それが次の日に私たちがやることだ。 + +13:06.710 --> 13:09.350 +しかし、 スライドに戻ろう。 diff --git a/week5/community-contributions/subtitles/srts/59297749/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297749/ko_KR.srt new file mode 100755 index 0000000..5624472 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297749/ko_KR.srt @@ -0,0 +1,571 @@ +WEBVTT + +00:01.070 --> 00:04.640 +유피터랩은 언제나 환영이죠 제가 제일 좋아하는 곳이에요 + +00:04.640 --> 00:07.790 +지금은 5주 차 폴더에 있죠 + +00:07.790 --> 00:14.000 +이건 5주 차에 작성된 첫째 날 주피터 노트예요 오늘 우리가 지낼 곳이죠 아주 + +00:14.000 --> 00:20.240 +단순한 방식으로 DIY 래그 구현을 구축할 겁니다 하지만 이게 어떻게 작동하는지 + +00:20.240 --> 00:22.610 +통찰력을 줄 거예요 + +00:22.700 --> 00:29.870 +우선 쇼어햄에 있는 우리 보험 기술 회사에 대해 말씀드리죠 + +00:29.990 --> 00:37.400 +회사 공유 폴더에 엑세스 권한이 있어요 노웨어 베이스라는 거죠 같은 주에 여기 5개 + +00:37.400 --> 00:41.690 +폴더에 넣어뒀어요 여러분이 정독하세요 + +00:41.690 --> 00:48.260 +열어보면 4개의 폴더가 있어요 회사 계약서, 직원과 제품 4개의 + +00:48.260 --> 00:53.660 +다른 부서나 4개의 분야를 나타내는 것들이죠 + +00:54.020 --> 01:00.050 +한 가지 언급할 것은 이미 짐작하셨겠지만 여기 회사 데이터가 꽤 많이 + +01:00.050 --> 01:03.450 +있습니다 전부 완전히 허구예요 + +01:03.660 --> 01:07.440 +이건 LLM이 만든 거예요 + +01:07.470 --> 01:14.940 +몇 주 전에 했던 운동을 그대로 하면서 데이터 생성기를 작성해서 데이터 일부를 만드는 데 사용했어요 + +01:14.940 --> 01:19.020 +몇몇 문서와 기타 등등의 관계를 유지하기 위해서요 + +01:19.050 --> 01:24.450 +저도 몇 가지 수정을 추가하긴 했지만 거의 모든 것은 + +01:24.450 --> 01:28.530 +GPT 4나 클로드가 생성했어요 + +01:29.370 --> 01:34.470 +회사에서 이걸 잠깐 보죠 회사에 관한 게 몇 가지 있어요 + +01:34.470 --> 01:39.180 +마크다운 문서라면 꽤 짧은 문서라고 생각해요 + +01:39.180 --> 01:46.440 +이렇게 열 수 있으면 INurer Elmlm에 대한 평가서를 볼 수 있어요 + +01:46.440 --> 01:50.880 +2015년에 에이버리 랭커스터가 설립한 것으로 알려져 있죠 + +01:50.880 --> 01:52.230 +에이버리 이름을 쓸 거예요 + +01:52.260 --> 01:53.910 +몇 번 만날 거예요 + +01:54.240 --> 01:57.030 +원본 문서도 보여 드릴게요 + +01:57.030 --> 02:00.210 +우리가 잘 아는 마크다운처럼 보여요 + +02:00.210 --> 02:03.850 +그럼 계약의 지식 기반으로 돌아가 보죠 + +02:03.850 --> 02:10.540 +계약에 관해 보다 실질적인 정보를 얻었는데요 계약 조건 갱신과 계약에 포함된 + +02:10.570 --> 02:15.130 +기능은 나중에 중요하게 다루게 될 텐데요 + +02:15.130 --> 02:16.960 +그러니 기억해 두세요 + +02:17.260 --> 02:19.150 +다른 것들도요 + +02:19.510 --> 02:25.480 +다시 지식 기반으로 돌아가서 직원들을 살펴보면 이건 다양한 직원들의 직원 인사 + +02:25.480 --> 02:26.770 +기록이에요 + +02:26.770 --> 02:32.440 +CEO인 에이버리 랭커스터도 인사팀의 기록이 있어요 + +02:32.770 --> 02:41.050 +모두 인사 서류를 갖고 있고 제품에는 인큐어 엘름에서 제안한 다양한 + +02:41.050 --> 02:50.530 +공지 사항이 적혀 있어요 자동차 보험에 대한 카 엘름도 있고 홈 엘름 등도 있죠 + +02:50.530 --> 02:55.600 +이 문서들을 훑어보면 정말 사실적이라는 게 놀라워요 + +02:55.630 --> 02:59.800 +프론티어 모델스 협찬이죠 + +02:59.920 --> 03:03.480 +로드맵을 가지고도 훌륭한 제품 요약이에요 + +03:03.480 --> 03:04.500 +좋아요 + +03:04.980 --> 03:06.360 +네 + +03:06.390 --> 03:08.940 +그 시절 첫날부터요 + +03:08.940 --> 03:10.800 +그럼 어떻게 하죠? + +03:10.830 --> 03:16.380 +이 회사에 대한 질문을 하는 뭔가를 지금 작성하겠습니다 인위적으로 + +03:16.410 --> 03:20.700 +찾아보고 프롬프트에 컨텍스트를 삽입하는 거죠 + +03:20.700 --> 03:22.110 +수입도 좀 하고요 + +03:22.110 --> 03:25.470 +이번에는 GPT 4 미니를 사용할 거예요 + +03:25.530 --> 03:31.170 +이제 주스 얘기를 해보죠 get it get it + +03:31.170 --> 03:40.560 +그래서 여기서 하는 건 이건 Python 함수입니다 이 폴더 안에서 모든 콘텐츠를 + +03:40.560 --> 03:43.350 +취해 리스트에 넣는 거죠 + +03:43.350 --> 03:44.100 +직원들요 + +03:44.100 --> 03:51.600 +따라서 직원은 이제 지식 기반/직원 파일명 목록이 되겠죠 + +03:51.600 --> 03:59.400 +각각의 파일은 엉성한 코드인데 해킹을 당해서 파일의 이름을 취해요 + +03:59.640 --> 04:06.490 +사실 직원의 플로이드 이름인데 거기에 성을 취하죠 + +04:06.520 --> 04:14.140 +성을 분할해 파일을 열고 사전에 넣죠 키는 직원 이름이고 + +04:14.140 --> 04:17.080 +문서는 문서예요 + +04:17.080 --> 04:20.020 +이걸 실행해 보죠 이걸 보세요 + +04:20.020 --> 04:22.090 +사전이 나올 거라고 기대해요 + +04:23.080 --> 04:24.760 +미안해요 + +04:25.210 --> 04:28.570 +문맥이나 사전이라고 하죠 + +04:28.570 --> 04:31.180 +모든 키를 문맥 안에서 살펴보죠 + +04:31.390 --> 04:33.130 +여기 열쇠요 + +04:33.130 --> 04:38.410 +예상하셨겠지만 열쇠는 직원들의 성이에요 + +04:38.410 --> 04:42.040 +그중 한 명은 CEO인 랭커스터예요 + +04:42.040 --> 04:44.560 +랭커스터를 간단히 살펴보죠 + +04:44.560 --> 04:48.520 +랭커스터 사전에서 간단히 찾아볼 수 있죠 + +04:48.520 --> 04:53.620 +뱀이 인사 기록에서 하락 문서를 떼는 걸 봐야 해요 + +04:53.650 --> 04:58.990 +마법 같은 일은 전혀 없어요 파일을 읽고 사전에 쑤셔 넣는 거요 + +04:58.990 --> 05:00.670 +그게 다예요 + +05:01.480 --> 05:09.200 +이제 이걸 확장해보죠 여기서 하려는 건 제품에 대해 정확히 같은 걸 하는 거예요 + +05:09.230 --> 05:16.520 +제품 폴더를 보세요, 각각의 파일을 반복해 이름을 빼내고 같은 컨텍스트 + +05:16.520 --> 05:18.800 +사전에 밀어 넣어요 + +05:18.800 --> 05:22.520 +이제 보면∙∙∙ 먼저 실행하면∙∙∙ 잊지 않도록 하세요 + +05:22.520 --> 05:23.180 +됐어요 + +05:23.180 --> 05:24.650 +이제 건반을 보세요 + +05:24.650 --> 05:28.580 +직원들의 성과 제품명을 조합한 게 보일 거예요 + +05:28.940 --> 05:32.570 +이게 그 회사 성이랑 시장이에요 + +05:32.750 --> 05:33.620 +다녀왔어요 + +05:33.620 --> 05:34.910 +진정했어요 + +05:35.030 --> 05:38.150 +제품명은 괜찮아요? + +05:38.810 --> 05:40.130 +준비됐죠? + +05:40.160 --> 05:41.780 +시스템 메시지예요 + +05:41.780 --> 05:47.180 +보험 회사인지 아닌지 묻는 질문에 정확하게 대답하는 전문가죠 + +05:47.210 --> 05:49.100 +간결하고 정확하게 대답하세요 + +05:49.100 --> 05:51.590 +모르면 모른다고 하세요 + +05:51.590 --> 05:52.820 +지어내지 마세요 + +05:52.820 --> 05:55.550 +관련 맥락을 제공받지 못했다면요 + +05:56.060 --> 06:02.570 +매우 신뢰할 만한 설명을 시스템에 입력하면 아무것도 발명하지 말라고 + +06:02.570 --> 06:08.790 +경고하는 효과가 있어서 환각 증상을 막고 정확도를 높일 수 있죠 + +06:08.970 --> 06:13.500 +나중에 다시 살펴보겠지만 기억해두실 가치가 있어요 이걸 여러분 노트북에 + +06:13.530 --> 06:20.550 +두시거나 유용한 것 목록에 두시길 바랍니다 정확성이 필수적일 땐 프롬프트도 유용하거든요 + +06:20.940 --> 06:32.610 +그걸 추가해 관련 컨텍스트 get이라는 함수를 만들 거예요 + +06:32.610 --> 06:35.490 +이 함수가 하는 일은 아주 중요해요 + +06:35.490 --> 06:45.090 +이건 메시지가 될 거예요 어떤 종류의 메시지든요 + +06:45.450 --> 06:53.160 +컨텍스트를 반복할 거예요 컨텍스트에 있는 각각의 아이템에서 제목과 세부 사항을 + +06:53.160 --> 06:55.020 +가져오는 거죠 + +06:55.020 --> 07:02.370 +그리고 타이틀인지 아닌지 직원의 성인지 제품인지를 봅니다 + +07:02.370 --> 07:07.370 +제품이라면요 메시지 어딘가에 있다면요 + +07:07.370 --> 07:14.030 +랭커스터라는 단어가 메시지의 어디에 있든 간에 이 관련 컨텍스트 리스트에 + +07:14.030 --> 07:18.920 +밀어넣을 것입니다 그리고 리스트를 반환하죠 + +07:18.950 --> 07:21.110 +그게 무슨 뜻인지 보여드리죠 + +07:21.140 --> 07:34.670 +관련 컨텍스트를 get 하고 완전히 무관한 걸 넘긴다면 고양이 같은 거요 아무것도 못 얻어요 + +07:34.700 --> 07:38.870 +고양이는 보장 문제와 관련이 없어요 + +07:38.900 --> 07:46.160 +그래서 에이버리 랭커스터가 누구인지를 입력하면, 만세! Tipet up! Tipet up! Tipet up! Tipet up! Tipet up! Tipet up! + +07:46.190 --> 07:53.690 +사실 아주 확실한 답이 나올 겁니다 우리가 기대하는 바로 그 답이죠 get it get it get it + +07:53.840 --> 08:04.310 +에이버리 랭커스터가 누구고 누가 차분한지 물으면 한번 보죠 + +08:04.370 --> 08:09.030 +포함할 수 있는 두 가지 목록이 get을 받게 되죠 + +08:09.060 --> 08:10.800 +네, 두 번째가 시작되는 게 보이시죠 + +08:10.800 --> 08:12.600 +저게 기둥이에요 + +08:12.600 --> 08:17.940 +에이버리의 문서와 열 문서가 담긴 목록을 받았죠 get it get it + +08:18.450 --> 08:21.030 +물론 아주 잘 부서지죠 + +08:21.030 --> 08:28.650 +Avery 성을 빼고 Avery라고만 쓸게요 대문자 C 없이요 대소문자 구분이 + +08:28.650 --> 08:32.220 +안 되는 매치도 아직 안 했거든요 + +08:32.220 --> 08:34.950 +Get을 해도 얻는 게 없다는 걸 알게 되겠죠 + +08:34.950 --> 08:36.360 +잘 부서져요 + +08:36.360 --> 08:39.780 +기본 케이스에만 적용될 거예요 + +08:40.620 --> 08:46.950 +이제 한 단계 더 나아가서 함수 추가 컨텍스트를 만들어 메시지를 + +08:46.950 --> 08:47.970 +받죠 + +08:48.120 --> 08:53.190 +간단히 어떤 관련 컨텍스트든 get이 되는 거죠 + +08:53.190 --> 08:56.370 +뭔가를 찾으면 그걸 메시지에 추가하죠 + +08:56.370 --> 09:01.290 +다음의 추가적인 컨텍스트가 이 질문에 대답하는 데 관련 있을 수 있다고 할 거예요 + +09:01.290 --> 09:04.320 +지나간 것 위에 뭔가를 쌓는 거예요 + +09:04.320 --> 09:07.900 +예를 들어보죠 질문을 다시 해보죠 + +09:07.960 --> 09:12.160 +에이버리 랭커스터부터 시작할게요 긍정적인 예죠 나쁜 예도 + +09:12.160 --> 09:12.880 +있어요 + +09:12.880 --> 09:17.170 +에이버리 랭커스터가 누구고 감방을 운영해야 한다고요 + +09:19.270 --> 09:23.410 +에이버리 랭커스터가 누구죠? + +09:23.410 --> 09:24.700 +뭐가 보이죠? + +09:24.940 --> 09:25.840 +그렇군요 + +09:25.840 --> 09:26.830 +시작할게요 + +09:26.830 --> 09:29.650 +에이버리 랭커스터가 누구죠? + +09:29.680 --> 09:32.620 +다음의 추가적인 맥락은 이 질문에 대답하는 데 관련이 있을 수 있죠 + +09:32.620 --> 09:34.540 +세세한 부분까지 신경 써야 해요 + +09:34.750 --> 09:37.870 +제가 반대 예시를 들어볼게요 + +09:37.870 --> 09:39.340 +또 다른 문제요 + +09:39.340 --> 09:44.290 +알렉스 랭커스터가 누구냐고 물으면 랭커스터라고 쓰여 있을 거예요 + +09:44.290 --> 09:47.530 +물론 여전히 관련 컨텍스트라고 하죠 + +09:47.740 --> 09:50.380 +물론 거기서 묘기를 놓쳤죠 + +09:50.380 --> 09:53.320 +그래서 조잡해요 + +09:53.440 --> 09:56.470 +문자열만 살펴보는 거예요 + +09:56.470 --> 10:01.570 +하지만 최애 채팅 함수를 쓰려면 그 정도면 충분하죠 + +10:01.570 --> 10:04.510 +이게 그라디오가 기대하는 함수라는 걸 기억하세요 + +10:04.510 --> 10:09.230 +사용자 인터페이스를 빠르게 만들려면 현재 메시지와 메시지의 이력을 취해야 합니다 + +10:09.230 --> 10:12.200 +이제 이게 여러분께 제2의 본성이 됐으면 좋겠네요 + +10:12.230 --> 10:20.510 +히스토리를 OpenAI가 기대하는 형식으로 변환합니다 특정 구조의 독재 목록이죠 + +10:20.510 --> 10:23.660 +그리고 이 멋진 전화를 걸죠 + +10:23.660 --> 10:28.880 +메시지에 컨텍스트를 추가해 오픈AI에 보냈어요 + +10:28.880 --> 10:36.590 +오픈AI 채팅 완료 역시 여러분께 제2의 본성이 되어 응답을 스트림으로 + +10:36.590 --> 10:38.150 +보내드리죠 + +10:38.150 --> 10:40.190 +실행해 보죠 + +10:40.280 --> 10:46.430 +이제 마지막 단계는 그래디오에서 이걸 불러오는 한 줄 대사예요 + +10:46.760 --> 10:47.510 +시작할게요 + +10:47.540 --> 10:49.370 +어떤지 보죠 + +10:54.230 --> 10:56.000 +이렇게 말해 보죠 + +10:56.060 --> 10:56.780 +안녕하세요 + +10:59.510 --> 11:00.050 +안녕하세요 + +11:00.050 --> 11:01.460 +무엇을 도와드릴까요? + +11:01.490 --> 11:06.410 +에이버리 랭커스터가 누구냐고 물어보죠 + +11:08.620 --> 11:09.190 +맞아요 + +11:09.190 --> 11:09.460 +네 + +11:09.490 --> 11:10.570 +랭커스터요 + +11:11.950 --> 11:21.670 +물론 좋은 답변이 나왔죠 에이버리 랭커스터 정보가 입력된 정보를 오픈라이에 보냈다는 걸 알고 + +11:21.670 --> 11:24.940 +있었으니까요 get it + +11:25.300 --> 11:30.040 +이제 뭐가 평온한지 말해 볼까요? + +11:32.350 --> 11:34.870 +혁신적인 자동차 보험 상품도 있죠 + +11:34.900 --> 11:35.770 +다 됐어요 + +11:35.770 --> 11:40.660 +제품에 대해 중점을 두고 있는 큰 컨텍스트에서 요약되는 정보가 엄청나게 + +11:40.660 --> 11:41.830 +많아요 + +11:41.950 --> 11:45.370 +로드맵을 통해 계속 개선할 수 있다고도 하죠 + +11:45.400 --> 11:46.390 +좋아요 + +11:46.990 --> 11:50.110 +누더기인 거 보이시죠? + +11:50.110 --> 11:54.130 +단순하지만 이게 바로 핵심이에요 + +11:54.220 --> 11:56.440 +잠깐 끊고 얘기하죠 + +11:56.440 --> 11:57.760 +내가 뭘 할지 알죠? + +11:57.790 --> 12:02.380 +에이버리가 누군지 물어볼게요 + +12:02.410 --> 12:09.950 +사실 예전에 했던 대화가 들어 있어요 다시 시작해야겠네요 + +12:10.040 --> 12:11.570 +새로운 대화를 해요 + +12:11.600 --> 12:15.470 +안 그러면 에이버리가 누군지 알 거예요 아까 물어봤잖아요 + +12:15.470 --> 12:22.880 +새로운 사용자 인터페이스를 가져와서 처음부터 시도해보죠 그런 장점은 주지 말고 그냥 물어보세요 + +12:22.910 --> 12:24.320 +에이버리가 누구죠? + +12:25.760 --> 12:30.170 +죄송하지만 에이버리에 대한 정보가 없어요 + +12:30.470 --> 12:32.540 +잘 부러지는 게 보이죠? + +12:32.540 --> 12:34.430 +성을 따야 해요 + +12:34.430 --> 12:46.040 +성 철자를 틀려도 대문자 L이 없는 랭커스터는 망가지기 마련이니 이해하시죠? + +12:46.040 --> 12:48.200 +Get it + +12:48.200 --> 12:51.620 +추가 정보를 추가하는 효과적인 방법이죠 + +12:51.650 --> 12:57.140 +더 정확한 답을 얻지만 글자가 정확히 일치해야 해요 + +12:57.140 --> 13:01.550 +유연하지도 확장성도 낮은 해결책이죠 + +13:01.670 --> 13:03.140 +더 잘할 수 있어요 + +13:03.140 --> 13:06.710 +내일 우리가 할 일이 바로 그거예요 + +13:06.710 --> 13:09.350 +이제 슬라이드로 돌아가죠 diff --git a/week5/community-contributions/subtitles/srts/59297773/en_US.srt b/week5/community-contributions/subtitles/srts/59297773/en_US.srt new file mode 100755 index 0000000..ced3536 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297773/en_US.srt @@ -0,0 +1,754 @@ +WEBVTT + +00:00.620 --> 00:06.080 +Well, I hope you're eager with anticipation for this session in JupyterLab as we finally get to see + +00:06.110 --> 00:07.640 +vectors firsthand. + +00:07.670 --> 00:13.220 +So hopefully you are in the week five folder, you're in day three, and you're going to be following + +00:13.250 --> 00:15.710 +along with me more than ever. + +00:15.710 --> 00:19.910 +It's important for this one because it's going to be visual, and you're going to get a real sense of + +00:19.910 --> 00:20.960 +what's going on. + +00:20.960 --> 00:25.940 +So this is just a duplicate of the previous day with more added. + +00:25.940 --> 00:32.690 +So we begin with some imports, and now we've added to the imports to include some special things that + +00:32.690 --> 00:33.380 +we'll be working on. + +00:33.380 --> 00:33.920 +Today. + +00:33.920 --> 00:41.960 +We're going to be importing OpenAI embeddings, which is part of Lang Lang chain OpenAI package. + +00:42.230 --> 00:45.860 +We're going to be importing chroma from lang chain chroma. + +00:46.130 --> 00:50.690 +Um, and then there's a couple of things we're going to be importing so that we can have some fun and + +00:50.690 --> 00:56.180 +actually visualize these vectors, something called t-SNE, which we'll talk about, and then something + +00:56.180 --> 00:59.720 +from Plotly that is going to give us some nice diagrams. + +00:59.720 --> 01:01.750 +Let's do all of that importing. + +01:02.380 --> 01:07.120 +Okay, uh, let's run the constants. + +01:07.120 --> 01:09.040 +Bring in our environment variables. + +01:09.040 --> 01:11.440 +This, of course, is now super familiar to you. + +01:11.470 --> 01:14.440 +We're using the loaders to load in our knowledge base. + +01:14.440 --> 01:20.920 +We're using the text splitter to create our chunks, of which there are 123. + +01:20.920 --> 01:27.730 +And there's chunks for each of our four folders our contracts, products, employees and company. + +01:28.300 --> 01:35.110 +So I wanted to take one more moment to remind you about embeddings and auto encoding LMS that we've + +01:35.110 --> 01:40.030 +been talking about auto regressive LMS in most of the of the course so far. + +01:40.060 --> 01:46.120 +This is the first time we're going to look at auto encoding LMS that take an entire input and use it + +01:46.120 --> 01:49.660 +to create one output, in our case, a vector. + +01:50.140 --> 01:53.890 +And an example of an auto encoding alarm is Bert. + +01:53.920 --> 01:56.380 +We're going to be using this one here. + +01:56.410 --> 01:58.000 +OpenAI embeddings. + +01:58.210 --> 01:59.280 +Let's run that. + +01:59.520 --> 02:00.420 +Okay. + +02:00.420 --> 02:05.010 +So chroma is we're about to create our chroma data store. + +02:05.310 --> 02:10.770 +And I've just put in this little section here to delete or empty the database if it's already there. + +02:10.800 --> 02:15.480 +Otherwise every time you run this code it adds in the vectors another time a sort of duplicate set of + +02:15.480 --> 02:17.520 +vectors and things get confusing. + +02:17.520 --> 02:20.760 +So this helps to refresh the database. + +02:20.760 --> 02:25.800 +If you've already run this once and you can imagine I've already run this once or twice, so this is + +02:25.800 --> 02:27.090 +a useful thing for me. + +02:27.360 --> 02:28.890 +We will delete it. + +02:29.400 --> 02:37.350 +It's sitting in this folder here vector db um, which is just a db name, a constant that I set at the + +02:37.350 --> 02:41.340 +top to show you that I said DB name is vector DB. + +02:41.370 --> 02:45.480 +It can be whatever you want and it's based on SQLite. + +02:45.510 --> 02:49.440 +So if you go in here you'll see there's some SQLite stuff going on. + +02:49.440 --> 02:55.950 +So it's just got a bunch of different files sitting within the name of whatever database name you chose + +02:55.950 --> 02:56.730 +to give it. + +02:56.730 --> 02:58.730 +In this case vector db. + +02:59.660 --> 03:06.170 +Okay, so it's time to create our vector database and populate it with our vectors. + +03:06.170 --> 03:08.930 +And I'm going to have another of those gradio moments here. + +03:08.930 --> 03:14.870 +You might think that the process of vectorizing a bunch of documents, turning them into vectors and + +03:14.870 --> 03:19.670 +storing them in a vector database is something that would be reasonably sophisticated and would be, + +03:19.700 --> 03:21.740 +I mean, at least several cells. + +03:21.980 --> 03:27.200 +But you'll find, of course, that we can do it in one cell and we can do it in one line of code. + +03:27.200 --> 03:34.010 +And that is, of course, thanks to Lang Chain putting a lot of great glue code around this. + +03:34.040 --> 03:39.560 +It wouldn't actually be that hard to do it ourselves manually to iterate through, to turn each of these + +03:39.560 --> 03:44.900 +embeddings into each of these chunks into vectors, and then store it ourselves in chroma, it would + +03:44.900 --> 03:46.970 +perhaps just be 4 or 5 lines. + +03:47.060 --> 03:50.870 +Um, but it's that much simpler if we can just do it in one like this. + +03:50.870 --> 03:55.490 +And the other great thing about this is that if you try different vector data store, it's the same + +03:55.490 --> 04:00.280 +thing just with a different vector store imported from Lang Chain, and you might want to give that + +04:00.280 --> 04:01.630 +a try yourself. + +04:01.630 --> 04:06.700 +But let me run this one line and it has run. + +04:06.700 --> 04:11.200 +And the vector store has been created with 123 documents. + +04:11.200 --> 04:14.800 +And that's comforting to know, because 123 was the number of chunks that we had. + +04:14.800 --> 04:21.040 +So it's good that there is indeed a same, uh, number of documents in our vector data store. + +04:21.520 --> 04:26.950 +Um, and the way that we found that, by the way, is that the vector store object that came back from + +04:26.950 --> 04:33.880 +creating Chrome, we passed in, of course, the chunks, the embeddings, which was the OpenAI embeddings, + +04:33.880 --> 04:35.860 +and then just simply the name of the database. + +04:35.860 --> 04:40.060 +Those were the three things we provided, which makes total sense. + +04:40.090 --> 04:42.820 +And that's all that we needed to create our vector store. + +04:42.820 --> 04:50.170 +And that vector store we can call underscore collection dot count to get the number of documents in + +04:50.170 --> 04:51.220 +the vector store. + +04:51.850 --> 05:03.270 +So let's just find out how many dimensions these vectors have so we can get a vector from the collection + +05:03.270 --> 05:04.380 +with this. + +05:04.410 --> 05:10.650 +So so so the collection is just what you get if you call this underscore collection um uh attribute + +05:10.650 --> 05:11.640 +from Vector Store. + +05:11.640 --> 05:14.040 +So we can then call dot get. + +05:14.070 --> 05:15.300 +We can pass in a limit. + +05:15.300 --> 05:16.380 +We only want one. + +05:16.380 --> 05:20.550 +We want it to bring back the embeddings which is the vector itself. + +05:20.760 --> 05:26.490 +Um, and then we will just have a look at that and let's have a look at how large it is. + +05:26.490 --> 05:30.180 +It is 1536 dimensions. + +05:30.180 --> 05:34.920 +So 1536 numbers make up this chunk. + +05:34.920 --> 05:38.850 +So let's have a look at this ourselves so we can just just print it. + +05:38.850 --> 05:43.260 +Let's just print sample embedding see what it looks like. + +05:43.890 --> 05:44.430 +Wowza. + +05:44.430 --> 05:45.810 +It's a lot of numbers. + +05:46.080 --> 05:48.000 +So it's a bunch of numbers. + +05:48.030 --> 05:51.360 +It's meaningless to to you and me. + +05:51.540 --> 05:54.770 +Uh, but these numbers in some way. + +05:55.460 --> 06:02.450 +We see these numbers in some way reflect the meaning of the chunk that they're associated with. + +06:02.450 --> 06:03.230 +And we'll see. + +06:03.650 --> 06:06.560 +We'll associate it with a chunk in just a second. + +06:07.370 --> 06:09.140 +So it's a ton of numbers. + +06:09.140 --> 06:17.480 +It's 1536 numbers, which we can interpret as representing a coordinate in 1536 dimensional space. + +06:17.720 --> 06:24.830 +Um, and that that coordinate is chosen such that other other vectors that have a similar coordinate + +06:24.830 --> 06:28.790 +that are close in vector space should have similar meaning. + +06:28.790 --> 06:30.740 +That's the whole idea behind this. + +06:30.980 --> 06:34.940 +So let's take a moment to visualize this. + +06:34.940 --> 06:37.640 +That's going to be a nice, uh, get rid of some of these comments. + +06:37.640 --> 06:44.960 +This will be some nice, uh, um, uh, a nice way to, to be able to really investigate what's going + +06:44.960 --> 06:46.070 +on behind the scenes. + +06:46.070 --> 06:53.320 +So you can call Collectiongetty and ask for the embeddings, the documents and the metadata, like this. + +06:53.320 --> 06:56.320 +And once I've done that, I can put the vectors. + +06:56.320 --> 07:03.010 +I can take the embeddings and put them into an array of vectors, and I can take the the doc types and + +07:03.010 --> 07:04.720 +put that into something called doc types. + +07:04.720 --> 07:07.960 +And then I'm going to make some some colors up as you will see. + +07:07.990 --> 07:10.300 +So this is just some pre-work to get ready. + +07:10.600 --> 07:18.400 +Now there's one problem that we have is that, uh, as human beings have a deficiency, that we have + +07:18.430 --> 07:22.360 +a problem visualizing anything in more than three dimensions. + +07:22.360 --> 07:27.610 +So visualizing something in 1536 dimensions is going to be very challenging indeed. + +07:27.670 --> 07:34.360 +Um, but luckily there are various techniques we can use to do what they call projecting down or trying + +07:34.360 --> 07:41.290 +to, uh, reduce the dimensions to only, let's say, two dimensions in a way that does the best possible + +07:41.290 --> 07:47.680 +job at separating things out to stay faithful to the, the, uh, multi-dimensional representation. + +07:47.680 --> 07:52.860 +So trying to do it in a way that is going to have things that are further apart in all of these dimensions + +07:52.860 --> 07:57.780 +will still be fairly far apart, even when it's projected down to two dimensions. + +07:58.290 --> 07:59.340 +That's the idea. + +07:59.370 --> 08:03.240 +There are various techniques for doing it that are not going to go into, but the one that we're going + +08:03.270 --> 08:04.710 +to use is extremely common. + +08:04.710 --> 08:09.660 +It's called t-SNE, which stands for T-distributed Stochastic Neighbor embedding, and is one of those + +08:09.660 --> 08:14.670 +ones that you can Google or ask ChatGPT if you want an explanation behind it. + +08:15.150 --> 08:19.710 +Um, so we pass into it, how many dimensions do we want? + +08:19.740 --> 08:24.630 +We want it projected down from the thousand and whatever to two dimensions. + +08:24.630 --> 08:29.790 +And this random state is a way of setting its random seed so that each time we call this, we get the + +08:29.790 --> 08:33.210 +same thing so that we can reproduce this. + +08:33.450 --> 08:40.440 +Um, and then you can get your reduced vectors just by calling the fit transform method on this t-SNE + +08:40.470 --> 08:41.400 +object. + +08:41.820 --> 08:49.770 +Um, what I'm then doing is I'm using the fabulous library plotly to make a nice scatter diagram. + +08:49.890 --> 08:55.570 +Um, And, you know, this is all sort of this kind of code that you can copy and paste and reuse as + +08:55.570 --> 08:56.170 +you wish. + +08:56.170 --> 09:01.720 +But I've got some nice things like I'm making the markers have different colors based on the colors + +09:01.720 --> 09:08.260 +that we set here for the different document types, and also got some pop up text which is going to + +09:08.290 --> 09:14.560 +actually have a fragment, the first 100 characters from the chunk itself. + +09:14.560 --> 09:20.050 +So what we're hoping to see here is we're hoping to see how do the documents look in the vector database. + +09:20.050 --> 09:25.810 +And then for each of these different vectors, we're going to color it by what kind of document is it. + +09:25.810 --> 09:27.850 +And it's going to have hover over text. + +09:27.850 --> 09:33.100 +So we can actually read a little bit of the fragment of text that this chunk represents. + +09:33.100 --> 09:34.600 +So we'll see if this works. + +09:34.600 --> 09:35.800 +That would be quite cool. + +09:36.370 --> 09:37.420 +Of course I know it works. + +09:37.420 --> 09:38.470 +I've already tried it. + +09:38.530 --> 09:39.580 +Here it is. + +09:39.610 --> 09:40.420 +Okay. + +09:40.420 --> 09:42.040 +So what are we looking at. + +09:42.670 --> 09:50.670 +So we're looking at a visualization of the multi-dimensional vectors projected down to 2D for us, there's + +09:50.670 --> 09:53.370 +no particular meaning of the x axis and the y axis. + +09:53.370 --> 09:59.100 +It's just the best possible way of spreading out the different points. + +09:59.190 --> 10:01.560 +Now there's a few things to notice. + +10:01.560 --> 10:07.500 +The green dots that you see here are representing employees. + +10:07.530 --> 10:10.350 +The red dots are representing. + +10:10.530 --> 10:17.730 +And when I say employees I mean they're representing chunks of text plucked out of employee documents. + +10:17.760 --> 10:22.020 +The red things are chunks of text coming out of contracts. + +10:22.050 --> 10:28.830 +The blue are from product documentation and the yellow is from about documents. + +10:29.760 --> 10:36.900 +Now, there's something a bit magical that you have to to appreciate, to realize at this point when + +10:36.930 --> 10:44.400 +the OpenAI embeddings, when when it was vectorizing these chunks of text, it did not know the document + +10:44.430 --> 10:44.850 +type. + +10:44.850 --> 10:46.830 +We didn't tell it the metadata. + +10:46.830 --> 10:52.190 +We didn't we didn't tell it that summer employees, summer products and summer contracts. + +10:52.190 --> 10:55.580 +We just gave it the chunk of text and said, turn this into a vector. + +10:55.580 --> 10:57.500 +And so that's all it had. + +10:57.770 --> 11:04.880 +Um, and so it's kind of magical that based on those chunks, these chunks have been separated out in + +11:04.880 --> 11:10.730 +vector space and they occupy different distinct regions in space. + +11:10.910 --> 11:16.070 +Um, because they're somewhat similar to each other in terms of the content, the meaning of what they + +11:16.070 --> 11:20.630 +represent, the employees are all kind of in the same general territory. + +11:20.630 --> 11:26.570 +The, the, the contracts are over here and the products are here. + +11:26.690 --> 11:34.820 +You may notice that some of the contract information appears to be more a sort of a, uh, in the same + +11:34.820 --> 11:37.100 +vicinity as the products. + +11:37.100 --> 11:39.290 +And that might be surprising for a moment. + +11:39.290 --> 11:41.210 +It might look like maybe something's gone wrong. + +11:41.210 --> 11:48.010 +But no, if you hover over these chunks, you'll see that this is the particular chunk of text in a + +11:48.010 --> 11:53.170 +contract, which describes the key features that that client has signed up for. + +11:53.380 --> 11:56.260 +And you can see there it says type the text in there. + +11:56.290 --> 12:02.920 +Sorry if I if I hover for a moment where you see that second line where it says text, that is the extract + +12:02.920 --> 12:08.440 +from the chunk that has been put in that location in vector space, and you'll see that it starts features + +12:08.440 --> 12:10.420 +one AI powered matching. + +12:10.660 --> 12:12.250 +And let's find another one here. + +12:12.610 --> 12:17.620 +Uh, AI powered risk assessment features. + +12:17.800 --> 12:19.420 +It's the same thing. + +12:19.870 --> 12:27.310 +Um, so what we're seeing here is that within contracts, there is some information that is more like + +12:27.310 --> 12:28.420 +functionality. + +12:28.420 --> 12:35.980 +And that functionality lives in vector space in a similar kind of space to other product information. + +12:35.980 --> 12:38.800 +So, uh, hopefully that makes sense. + +12:38.830 --> 12:46.890 +It's again, it's almost spooky how good it is at kind of associating the right location in space to + +12:46.920 --> 12:49.290 +the meaning behind the document. + +12:49.590 --> 12:56.610 +And the final point I'll mention, and I might be I might be putting, uh, putting taking this too + +12:56.610 --> 12:59.520 +far, but it really seems to me like this is what's going on. + +12:59.550 --> 13:04.260 +You'll notice that the three documents that are about the company. + +13:04.440 --> 13:07.380 +Uh, these three here are kind of in the center. + +13:07.410 --> 13:10.290 +They're right in the middle between all these different documents. + +13:10.290 --> 13:17.580 +And I think that's because it kind of encompasses some information that pertains to the employees and + +13:17.580 --> 13:23.370 +the contracts and the, the, the, the product as well. + +13:23.580 --> 13:28.080 +Uh, it's kind of central information that that is related to everything. + +13:28.080 --> 13:34.950 +And that's why it's somehow in a sort of place that is in the middle of all of the rest of the information. + +13:35.400 --> 13:37.350 +Um, so I find that fascinating. + +13:37.350 --> 13:41.280 +I might be reading too much into it, but it does seem that way to me. + +13:41.400 --> 13:46.730 +Uh, and one of the things that you can do is just experiment with putting any chunks of text that you + +13:46.730 --> 13:48.770 +want into the vector database. + +13:48.860 --> 13:56.270 +You can just imagine you can just go back and add in other documents, uh, into the documents that + +13:56.270 --> 13:57.080 +we load here. + +13:57.080 --> 14:02.360 +Either you can just specify a file directly, or you can just put in some more text or just add text + +14:02.360 --> 14:08.690 +into the existing documents and then see where they end up in vector space, and use that as a way of + +14:08.690 --> 14:16.070 +really getting to appreciate, uh, the the way that OpenAI embeddings is able to understand the meaning + +14:16.070 --> 14:24.440 +behind a different chunk of text and position that in a way that is, that is closest to other things + +14:24.440 --> 14:25.940 +with similar meaning. + +14:26.060 --> 14:28.220 +And that's that's the whole idea. + +14:28.730 --> 14:33.140 +Well, this 2D representation was was really cool. + +14:33.140 --> 14:35.420 +Uh, is there anything better than a 2D representation? + +14:35.450 --> 14:36.320 +Of course there is. + +14:36.350 --> 14:40.040 +There's a 3D representation and that's what we're going to do next. + +14:40.220 --> 14:46.160 +Uh, we're going to try 3D because it's just as simple as turning that number two into a number three, + +14:46.160 --> 14:51.140 +and we will then be able to visualize these vectors in 3D. + +14:51.170 --> 14:52.280 +It wasn't quite that simple. + +14:52.310 --> 14:57.620 +I also had to put a 3D in here, and I had to mess around with with some other vectors and all of that + +14:57.620 --> 14:59.480 +stuff, but it's pretty similar. + +14:59.480 --> 15:02.510 +And I had to change the title from 2D to 3D as well. + +15:02.690 --> 15:04.910 +Anyway, let's see how this looks. + +15:04.910 --> 15:06.170 +Here it is. + +15:06.440 --> 15:13.010 +Um, so these are our vectors projected not all the way down to 2D, but now to 3D. + +15:13.160 --> 15:17.990 +Um, and the first thing you might notice is that actually, this is one of those rare times, uh, + +15:17.990 --> 15:23.090 +a bit like movies that, uh, 3D isn't necessarily better than 2D. + +15:23.300 --> 15:27.200 +Uh, that, uh, it does look a little bit more of a of a jumble. + +15:27.200 --> 15:28.790 +It's a bit harder to to see. + +15:28.820 --> 15:31.100 +It does look again like the yellow is in the middle. + +15:31.220 --> 15:34.910 +Uh, and green, red and blue are clearly separated out. + +15:34.910 --> 15:40.610 +But there are some, um, that have combined, as you would expect, as we saw before. + +15:40.640 --> 15:46.360 +But the nice thing about this is we can, uh, actively, uh, we can interactively play with this. + +15:46.390 --> 15:48.310 +We can rotate it like this. + +15:49.060 --> 15:50.260 +Isn't that something? + +15:50.260 --> 15:55.150 +And we can use this to get a better sense of how they are laid out. + +15:55.150 --> 15:59.830 +And it does give you a nice sense that there is a distinction between them. + +16:00.130 --> 16:07.540 +Um, but I do have to admit that it's not perhaps as clear as the 2D representation. + +16:07.630 --> 16:10.780 +Uh, but nonetheless, it looks great. + +16:10.780 --> 16:13.900 +If we hover, you get to see the meaning behind the points. + +16:13.900 --> 16:19.870 +Of course, as before, the yellow is the about, which does seem to be somewhat in the middle of everything, + +16:19.960 --> 16:26.410 +uh, consistent with the point I was making before, uh, this blue one here is a bit of an outlier. + +16:26.800 --> 16:34.270 +Um, and, uh, yeah, you, uh, you can certainly have fun with the 3D representation, although perhaps + +16:34.270 --> 16:36.970 +the 2D one is a bit more clear in some ways. + +16:37.360 --> 16:43.310 +Anyway, that concludes our playing with vectors, but there's a big takeaway, which is now it's over + +16:43.310 --> 16:49.010 +to you, not just about looking at these diagrams and inspecting the different chunks of text, but + +16:49.010 --> 16:50.930 +adding your own chunks of text. + +16:50.960 --> 16:55.970 +Simple way to do it is just to put things documents into the knowledge, into the knowledge base directory. + +16:55.970 --> 16:58.040 +You can just do that and it will just work right away. + +16:58.130 --> 17:04.670 +But you can also play around with the text loader classes to add in chunks using lang chains code instead + +17:04.670 --> 17:06.260 +of just dropping a document in there. + +17:06.260 --> 17:12.230 +But when you've put your chunks in, see where they end up in vector space and get a sense of how that + +17:12.230 --> 17:12.800 +works. + +17:12.830 --> 17:17.480 +Or you can replace the whole knowledge base directory, rename that to something else, create a new + +17:17.480 --> 17:23.840 +directory and just fill it with some some very simple things just so you can experiment with seeing + +17:23.840 --> 17:28.430 +how documents get put into different locations in vector space. + +17:28.790 --> 17:36.500 +And that's super important homework because it's going to give you a good basis for the next part, + +17:36.500 --> 17:39.170 +which is when we get to rag for real. + +17:39.200 --> 17:41.000 +First, let's go back to the slides. diff --git a/week5/community-contributions/subtitles/srts/59297773/ja_JP.srt b/week5/community-contributions/subtitles/srts/59297773/ja_JP.srt new file mode 100755 index 0000000..ca7c4a2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297773/ja_JP.srt @@ -0,0 +1,616 @@ +WEBVTT + +00:00.620 --> 00:07.640 +さて、 JupyterLabのこのセッションは、 ついにベクトルを直接見ることができるということで、 期待に胸を膨らませていることだろう。 + +00:07.670 --> 00:15.710 +だから、 あなたが5週目のフォルダに入っていて、 3日目に入っていて、 これまで以上に私についてきてくれることを願っている。 + +00:15.710 --> 00:20.960 +今回はビジュアルが重要で、 何が起こっているのかを実感できるだろうからね。 + +00:20.960 --> 00:25.940 +だから、 これは前日の重複にさらに追加しただけだ。 + +00:25.940 --> 00:33.380 +まずはインポートをいくつか用意し、 そのインポートにこれから取り組む特別なものを追加した。 + +00:33.380 --> 00:33.920 +今日は + +00:33.920 --> 00:41.960 +OpenAIのエンベッディングをインポートします。 これはLang Lang chain OpenAIパッケージの一部です。 + +00:42.230 --> 00:45.860 +ラングチェーンchromaからクロマをインポートする。 + +00:46.130 --> 00:50.690 +t-SNEと呼ばれるものについてはこれから説明します。 + +00:50.690 --> 00:59.720 +それからPlotlyのもので、 素敵な図を作成することができます。 + +00:59.720 --> 01:01.750 +インポートを全部やろう。 + +01:02.380 --> 01:07.120 +よし、 定数を調べてみよう。 + +01:07.120 --> 01:09.040 +環境変数を持ってくる。 + +01:09.040 --> 01:11.440 +もちろん、 これはもうお馴染みだろう。 + +01:11.470 --> 01:14.440 +ナレッジベースを読み込むためにローダーを使っている。 + +01:14.440 --> 01:20.920 +テキスト分割ツールを使ってチャンク(123個)を作っている。 + +01:20.920 --> 01:27.730 +そして、 契約、 製品、 従業員、 会社の4つのフォルダごとにチャンクがある。 + +01:28.300 --> 01:35.110 +そこで、 エンベッディングと自動エンコーディングLMSについて、 もう1度思い出していただきたいのですが、 + +01:35.110 --> 01:40.030 +これまでのコースのほとんどで、 自動回帰LMSについてお話ししてきました。 + +01:40.060 --> 01:49.660 +今回は、 入力全体を受け取り、 それを使って1つの出力(この場合はベクトル)を作成するオートエンコーディングLMSについて初めて見ていく。 + +01:50.140 --> 01:53.890 +そして、 自動エンコーディングのアラームの例がバートだ。 + +01:53.920 --> 01:56.380 +これからこれを使う。 + +01:56.410 --> 01:58.000 +OpenAIのエンベッディング。 + +01:58.210 --> 01:59.280 +それを実行しよう。 + +01:59.520 --> 02:00.420 +オーケー。 + +02:00.420 --> 02:05.010 +クロマはこれからクロマ・データ・ストアを作るところだ。 + +02:05.310 --> 02:10.770 +そして、 もしすでにデータベースがある場合は、 削除するか空にするために、 この小さなセクションをここに入れました。 + +02:10.800 --> 02:15.480 +そうしないと、 このコードを実行するたびに、 また別のベクトルが追加され、 ある種の重複したベクトル・セットとなり、 + +02:15.480 --> 02:17.520 +混乱することになる。 + +02:17.520 --> 02:20.760 +これはデータベースをリフレッシュするのに役立つ。 + +02:20.760 --> 02:27.090 +すでに1度実行したことがある人なら想像がつくと思うが、 私はすでに1度か2度実行しているので、 これは私にとって便利なものだ。 + +02:27.360 --> 02:28.890 +削除します。 + +02:29.400 --> 02:41.340 +このフォルダーの中にあるvector db umは、 DB名で、 DB名がvector DBであることを示すために一番上に設定した定数だ。 + +02:41.370 --> 02:45.480 +SQLiteをベースにしている。 + +02:45.510 --> 02:49.440 +この中に入ってみると、 SQLiteが使われているのがわかるだろう。 + +02:49.440 --> 02:56.730 +つまり、 あなたが選んだデータベース名の中に、 さまざまなファイルが並んでいるだけなのだ。 + +02:56.730 --> 02:58.730 +この場合はベクトルdb。 + +02:59.660 --> 03:06.170 +それでは、 ベクターデータベースを作成し、 ベクターを入れてみましょう。 + +03:06.170 --> 03:08.930 +そして、 私はここでまたグラディオの瞬間を迎えることになる。 + +03:08.930 --> 03:14.870 +ドキュメントの束をベクトル化し、 それをベクトル化してベクトル・データベースに保存するプロセスは、 + +03:14.870 --> 03:21.740 +それなりに高度なもので、 少なくとも数セルは必要だと思うかもしれない。 + +03:21.980 --> 03:27.200 +しかし、 もちろん、 1つのセルで、 1行のコードでできることがわかるだろう。 + +03:27.200 --> 03:34.010 +そしてそれはもちろん、 ラング・チェインが素晴らしいグルーコードをたくさん用意してくれたおかげだ。 + +03:34.040 --> 03:39.560 +手作業でそれを繰り返し、 それぞれの埋め込みを、 それぞれのチャンクをベクトルに変換し、 + +03:39.560 --> 03:46.970 +それをクロマに保存するのは、 実際にはそれほど難しいことではない。 + +03:47.060 --> 03:50.870 +うーん、 でも、 こうやってひとつにまとめられたら、 もっとシンプルになるよね。 + +03:50.870 --> 03:55.490 +そして、 これのもうひとつの素晴らしいところは、 別のベクター・データ・ストアを試してみても、 + +03:55.490 --> 04:01.630 +ラング・チェーンからインポートされた別のベクター・ストアを使うだけで同じことができるということです。 + +04:01.630 --> 04:06.700 +でも、 この1行を走らせてみたら走った。 + +04:06.700 --> 04:11.200 +そして、 ベクターストアは123のドキュメントで作られている。 + +04:11.200 --> 04:14.800 +123個のチャンクがあったのだから。 + +04:14.800 --> 04:21.040 +だから、 ベクター・データ・ストアに同じ数のドキュメントがあるのはいいことだ。 + +04:21.520 --> 04:26.950 +Chromeを作成して戻ってきたベクターストアオブジェクトに、 + +04:26.950 --> 04:35.860 +チャンク、 埋め込み(OpenAIの埋め込み)、 そしてデータベース名を渡しました。 + +04:35.860 --> 04:40.060 +この3つは私たちが提供したもので、 まったく理にかなっている。 + +04:40.090 --> 04:42.820 +ベクターストアを作るために必要なことはこれだけだ。 + +04:42.820 --> 04:51.220 +そして、 このベクターストアは、 underscore collection dot countを呼び出して、 ベクターストア内のドキュメント数を取得することができる。 + +04:51.850 --> 05:04.380 +そこで、 これらのベクトルが何次元かを調べ、 これでコレクションからベクトルを取得できるようにしよう。 + +05:04.410 --> 05:11.640 +つまり、 このアンダースコア・コレクションをベクター・ストアから呼び出すと、 コレクションが生成されます。 + +05:11.640 --> 05:14.040 +そこでドットゲットを呼び出せばいい。 + +05:14.070 --> 05:15.300 +制限内でパスできる。 + +05:15.300 --> 05:16.380 +欲しいのは1つだけだ。 + +05:16.380 --> 05:20.550 +ベクトルそのものであるエンベッディングを持ち帰らせたいのだ。 + +05:20.760 --> 05:26.490 +その大きさを見てみよう。 + +05:26.490 --> 05:30.180 +寸法は1536。 + +05:30.180 --> 05:34.920 +つまり、 1536の数字がこの塊を構成している。 + +05:34.920 --> 05:38.850 +では、 これを印刷できるように自分で見てみよう。 + +05:38.850 --> 05:43.260 +埋め込みのサンプルを印刷してみましょう。 + +05:43.890 --> 05:44.430 +すごい。 + +05:44.430 --> 05:45.810 +たくさんの数字だ。 + +05:46.080 --> 05:48.000 +つまり、 数字の羅列だ。 + +05:48.030 --> 05:51.360 +あなたや私にとっては無意味なことだ。 + +05:51.540 --> 05:54.770 +ええと、 でもこの数字は何らかの形で + +05:55.460 --> 06:02.450 +私たちは、 これらの数字が何らかの形で、 その数字に関連するチャンクの意味を反映していると見ている。 + +06:02.450 --> 06:03.230 +いずれ分かることだ。 + +06:03.650 --> 06:06.560 +すぐにチャンクと関連付ける。 + +06:07.370 --> 06:09.140 +だから、 トンデモナイ数字なんだ。 + +06:09.140 --> 06:17.480 +これは1536個の数字で、 1536次元空間の座標を表していると解釈できる。 + +06:17.720 --> 06:28.790 +そして、 その座標は、 ベクトル空間において近い位置にある、 似たような座標を持つ他のベクトルが同じような意味を持つように選ばれている。 + +06:28.790 --> 06:30.740 +それがこの構想のすべてだ。 + +06:30.980 --> 06:34.940 +では、 ちょっとイメージしてみよう。 + +06:34.940 --> 06:37.640 +そうすれば、 このようなコメントもなくなるだろう。 + +06:37.640 --> 06:46.070 +これは、 舞台裏で何が起こっているのかを本当に調査することができる、 素晴らしい、 ええと、 ええと、 ええと、 いい方法になるだろう。 + +06:46.070 --> 06:53.320 +だから、 Collectiongettyを呼び出して、 エンベッディング、 ドキュメント、 メタデータを求めることができる。 + +06:53.320 --> 06:56.320 +それができたら、 ベクターを置くことができる。 + +06:56.320 --> 07:04.720 +埋め込みをベクターの配列に入れ、 docタイプをdocタイプというものに入れる。 + +07:04.720 --> 07:07.960 +それから、 見ての通り、 いくつかの色を作るつもりだ。 + +07:07.990 --> 07:10.300 +だから、 これは準備のためのプレワークにすぎない。 + +07:10.600 --> 07:22.360 +今ひとつ問題なのは、 人間には3次元以上のものを視覚化する能力がないということだ。 + +07:22.360 --> 07:27.610 +つまり、 1536次元で何かを視覚化することは、 実に難しいことなのだ。 + +07:27.670 --> 07:34.360 +でも、 幸運なことに、 投影法とか、 次元を2次元に縮小して、 + +07:34.360 --> 07:41.290 +多次元表現に忠実であるように、 可能な限り物事を分離できるようにする方法とか、 + +07:41.290 --> 07:47.680 +いろいろなテクニックがあるんだ。 + +07:47.680 --> 07:57.780 +だから、 これらすべての次元で離れているものを、 2次元に投影してもまだかなり離れているようにしようとする。 + +07:58.290 --> 07:59.340 +そういうことだ。 + +07:59.370 --> 08:04.710 +そのためのテクニックはいろいろあるが、 ここでは割愛する。 + +08:04.710 --> 08:09.660 +これはt-SNEと呼ばれるもので、 T-distributed Stochastic + +08:09.660 --> 08:14.670 +Neighbor embeddingの略です。 + +08:15.150 --> 08:19.710 +何次元がいい? + +08:19.740 --> 08:24.630 +千差万別なものを二次元に投影してほしい。 + +08:24.630 --> 08:29.790 +そしてこのランダム・ステートは、 ランダム・シードを設定する方法であり、 これを呼び出すたびに同じものが得られるので、 + +08:29.790 --> 08:33.210 +これを再現できる。 + +08:33.450 --> 08:41.400 +そして、 このt-SNEオブジェクトのfit transformメソッドを呼び出すだけで、 縮小されたベクトルを得ることができます。 + +08:41.820 --> 08:49.770 +それから、 素晴らしいライブラリplotlyを使って散布図を作っています。 + +08:49.890 --> 08:56.170 +そして、 これはすべて、 コピー&ペーストして好きなように再利用できるコードのようなものなんだ。 + +08:56.170 --> 09:01.720 +でも、 ここで設定したドキュメントの種類によってマーカーの色を変えたり、 + +09:01.720 --> 09:14.560 +ポップアップ・テキストでチャンク自体の最初の100文字を表示させたりしています。 + +09:14.560 --> 09:20.050 +つまり、 ここで見たいのは、 ベクター・データベースでドキュメントがどのように見えるか、 ということだ。 + +09:20.050 --> 09:25.810 +そして、 これらの異なるベクトルそれぞれについて、 どのような文書であるかによって色分けする。 + +09:25.810 --> 09:27.850 +そして、 テキストにカーソルを合わせる。 + +09:27.850 --> 09:33.100 +だから、 このチャンクが表すテキストの断片を少し読むことができる。 + +09:33.100 --> 09:34.600 +だから、 これがうまくいくかどうか見てみよう。 + +09:34.600 --> 09:35.800 +それはとてもクールだ。 + +09:36.370 --> 09:37.420 +もちろん、 効果があることは知っている。 + +09:37.420 --> 09:38.470 +もう試したよ。 + +09:38.530 --> 09:39.580 +これだ。 + +09:39.610 --> 09:40.420 +オーケー。 + +09:40.420 --> 09:42.040 +では、 私たちは何を見ているのか。 + +09:42.670 --> 09:53.370 +つまり、 私たちは多次元ベクトルを2次元に投影して可視化したものを見ているのであって、 X軸とY軸に特別な意味はない。 + +09:53.370 --> 09:59.100 +いろいろなポイントを分散させる最善の方法なんだ。 + +09:59.190 --> 10:01.560 +さて、 いくつか気になることがある。 + +10:01.560 --> 10:07.500 +ここに見える緑の点は従業員を表している。 + +10:07.530 --> 10:10.350 +赤い点がそれを表している。 + +10:10.530 --> 10:17.730 +従業員というのは、 従業員の文書から抜き出したテキストのかたまりという意味だ。 + +10:17.760 --> 10:22.020 +赤いのは契約書から出てくるテキストの塊。 + +10:22.050 --> 10:28.830 +青は製品ドキュメントから、 黄色はアバウトなドキュメントから。 + +10:29.760 --> 10:36.900 +さて、 OpenAIのエンベッディングは、 テキストの塊をベクトル化するときに、 + +10:36.930 --> 10:44.850 +ドキュメントタイプを知らなかった。 + +10:44.850 --> 10:46.830 +メタデータは伝えていない。 + +10:46.830 --> 10:52.190 +夏の従業員、 夏の製品、 夏の契約ということは伝えていない。 + +10:52.190 --> 10:55.580 +テキストの塊を渡して、 これをベクターに変換してくれ、 と言っただけだ。 + +10:55.580 --> 10:57.500 +それしかなかった。 + +10:57.770 --> 11:04.880 +だから、 そのチャンクに基づいて、 これらのチャンクがベクトル空間で分離され、 空間内の異なる領域を占めるというのは、 + +11:04.880 --> 11:10.730 +ある意味不思議なことなんだ。 + +11:10.910 --> 11:16.070 +内容や象徴する意味という点では、 + +11:16.070 --> 11:20.630 +ある程度似ているからね。 + +11:20.630 --> 11:26.570 +契約はこっちで、 製品はこっちだ。 + +11:26.690 --> 11:37.100 +契約情報の一部が、 商品と同じ近辺にあるように見えるかもしれない。 + +11:37.100 --> 11:39.290 +そして、 それは一瞬驚くかもしれない。 + +11:39.290 --> 11:41.210 +何かが間違っているように見えるかもしれない。 + +11:41.210 --> 11:48.010 +しかし、 そうではなく、 これらの塊にカーソルを合わせると、 これが契約書の特定のテキストの塊であり、 + +11:48.010 --> 11:53.170 +そのクライアントが契約した主な機能が記述されていることがわかる。 + +11:53.380 --> 11:56.260 +そこにテキストを入力してください。 + +11:56.290 --> 12:02.920 +2行目のテキストと書いてあるところにカーソルを合わせると、 それがベクター空間のその場所に置かれたチャンクから抽出されたもので、 + +12:02.920 --> 12:10.420 +1つのAIパワーのマッチングから始まっているのがわかると思います。 + +12:10.660 --> 12:12.250 +そして、 ここでもう一つ探してみよう。 + +12:12.610 --> 12:17.620 +ええと、 AIを使ったリスク評価機能です。 + +12:17.800 --> 12:19.420 +同じことだよ。 + +12:19.870 --> 12:28.420 +つまり、 契約には機能的な情報も含まれているということだ。 + +12:28.420 --> 12:35.980 +そしてその機能は、 他の製品情報と同じようなベクトル空間に存在する。 + +12:35.980 --> 12:38.800 +だから、 あー、 理解してもらえるといいんだけど。 + +12:38.830 --> 12:49.290 +空間上の適切な位置と文書の背後にある意味を結びつけるのが、 これまた不気味なくらいうまいんだ。 + +12:49.590 --> 12:59.520 +そして最後のポイントは、 これは言い過ぎかもしれないが、 本当にこういうことだと思う。 + +12:59.550 --> 13:04.260 +会社に関する3つの文書があることにお気づきだろう。 + +13:04.440 --> 13:07.380 +この3人が真ん中だね。 + +13:07.410 --> 13:10.290 +彼らはこれらの異なる文書のちょうど真ん中にいる。 + +13:10.290 --> 13:23.370 +それは、 従業員や契約、 製品に関連する情報も含まれているからだと思う。 + +13:23.580 --> 13:28.080 +ええと、 それはすべてに関連する中心的な情報なんだ。 + +13:28.080 --> 13:34.950 +だからこそ、 他のすべての情報の真ん中にあるような場所にあるんだ。 + +13:35.400 --> 13:37.350 +それはとても魅力的なことだ。 + +13:37.350 --> 13:41.280 +深読みしすぎかもしれないが、 私にはそう見える。 + +13:41.400 --> 13:48.770 +ベクター・データベースに好きなテキストのかたまりを入れて試してみるのもいい。 + +13:48.860 --> 13:57.080 +ここで読み込んだドキュメントに、 他のドキュメントを追加することができる。 + +13:57.080 --> 14:02.360 +ファイルを直接指定するか、 + +14:02.360 --> 14:08.690 +テキストを追加するか、 + +14:08.690 --> 14:25.940 +既存のドキュメントにテキストを追加するだけです。 + +14:26.060 --> 14:28.220 +それが全体のアイデアなんだ。 + +14:28.730 --> 14:33.140 +この2D表現は本当にクールだった。 + +14:33.140 --> 14:35.420 +ええと、 2Dで表現するよりもいいものがありますか? + +14:35.450 --> 14:36.320 +もちろんある。 + +14:36.350 --> 14:40.040 +3D表現があり、 それが次にやることだ。 + +14:40.220 --> 14:51.140 +2という数字を3という数字に変えるだけで、 ベクトルを3Dで視覚化することができる。 + +14:51.170 --> 14:52.280 +それほど単純なことではなかった。 + +14:52.310 --> 14:59.480 +ここに3Dも入れなければならなかったし、 他のベクターもいじらなければならなかった。 + +14:59.480 --> 15:02.510 +そして、 タイトルも2Dから3Dに変更しなければならなかった。 + +15:02.690 --> 15:04.910 +とにかく、 これがどう見えるか見てみよう。 + +15:04.910 --> 15:06.170 +これだ。 + +15:06.440 --> 15:13.010 +これが、 2Dではなく3Dに投影されたベクトルです。 + +15:13.160 --> 15:17.990 +ええと、 まずお気づきになるかもしれないのは、 実は、 これは映画のように、 3Dが必ずしも2Dより優れているわけではない、 + +15:17.990 --> 15:23.090 +稀な時代のひとつだということです。 + +15:23.300 --> 15:27.200 +ちょっとごちゃごちゃしているように見えるね。 + +15:27.200 --> 15:28.790 +ちょっと見づらいけど。 + +15:28.820 --> 15:31.100 +黄色が真ん中にあるようにまた見える。 + +15:31.220 --> 15:34.910 +そして、 緑、 赤、 青がはっきりと分かれている。 + +15:34.910 --> 15:40.610 +しかし、 以前見たように、 予想通り、 いくつか組み合わされたものもある。 + +15:40.640 --> 15:46.360 +でも、 これのいいところは、 積極的に、 あー、 インタラクティブにこれで遊ぶことができるんだ。 + +15:46.390 --> 15:48.310 +このように回転させることができる。 + +15:49.060 --> 15:50.260 +そうだろう? + +15:50.260 --> 15:55.150 +そして、 これを使うことで、 どのようにレイアウトされているかを把握することができる。 + +15:55.150 --> 15:59.830 +そして、 両者の間に区別があることを感じさせてくれる。 + +16:00.130 --> 16:07.540 +うーん、 でも、 2Dの表現ほど明確ではないことは認めざるを得ないね。 + +16:07.630 --> 16:10.780 +あー、 それにしても、 いい感じだね。 + +16:10.780 --> 16:13.900 +カーソルを合わせると、 ポイントの背後にある意味を見ることができる。 + +16:13.900 --> 16:26.410 +もちろん、 前回と同じように、 黄色はすべての真ん中に位置しているように見える。 + +16:26.800 --> 16:36.970 +3Dで表現するのも楽しいけど、 2Dの方がもう少しわかりやすい部分もあるかもしれない。 + +16:37.360 --> 16:43.310 +とにかく、 これでベクトルを使った遊びは終わりですが、 大きな収穫があります。 それは、 これらの図を見て、 + +16:43.310 --> 16:50.930 +さまざまなテキストのかたまりを調べるだけでなく、 あなた自身のテキストのかたまりを追加することです。 + +16:50.960 --> 16:55.970 +簡単な方法は、 ドキュメントをナレッジ・ベース・ディレクトリに置くことだ。 + +16:55.970 --> 16:58.040 +そうするだけで、 すぐにうまくいく。 + +16:58.130 --> 17:06.260 +しかし、 単にドキュメントをそこにドロップするのではなく、 ラング・チェイン・コードを使ってチャンクを追加するために、 テキスト・ローダー・クラスを弄ることもできる。 + +17:06.260 --> 17:12.800 +しかし、 チャンクを入れたら、 ベクタースペースのどこに配置されるかを見て、 それがどのように機能するか感覚をつかんでほしい。 + +17:12.830 --> 17:17.480 +あるいは、 ナレッジ・ベース・ディレクトリを丸ごと置き換えて、 別の名前に変更し、 + +17:17.480 --> 17:28.430 +新しいディレクトリを作成し、 その中に非常に単純なものを入れるだけで、 ドキュメントがベクター・スペース内のさまざまな場所にどのように配置されるかを実験することができます。 + +17:28.790 --> 17:39.170 +これは超重要な宿題で、 次のパート、 つまり本格的にボロを出すときのいい基礎になるからだ。 + +17:39.200 --> 17:41.000 +まず、 スライドに戻ろう。 diff --git a/week5/community-contributions/subtitles/srts/59297773/ko_KR.srt b/week5/community-contributions/subtitles/srts/59297773/ko_KR.srt new file mode 100755 index 0000000..23309f7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59297773/ko_KR.srt @@ -0,0 +1,736 @@ +WEBVTT + +00:00.620 --> 00:06.080 +JupyterLab의 이번 세션을 기대하시기 바랍니다 드디어 벡터를 + +00:06.110 --> 00:07.640 +직접 보게 되니까요 + +00:07.670 --> 00:13.220 +여러분은 5주 차 폴더에 있고 3일째에 있습니다 어느 때보다 + +00:13.250 --> 00:15.710 +저를 따라오게 될 거예요 + +00:15.710 --> 00:19.910 +이 예제에선 시각적으로 중요하고, 실제로 어떻게 진행되는지 감을 잡을 수 있기 때문에 중요하죠. + +00:19.910 --> 00:20.960 +Get it. + +00:20.960 --> 00:25.940 +이건 전날의 복사본에 더 추가된 거예요 + +00:25.940 --> 00:33.380 +자, 가져오기부터 시작해보죠 가져오기에 추가된 것은 우리가 다룰 특별한 기능들이고요 + +00:33.380 --> 00:33.920 +오늘요 + +00:33.920 --> 00:41.960 +OpenAI 엠디딩을 가져올 겁니다 랑랑 체인 OpenAI 패키지의 일부죠 + +00:42.230 --> 00:45.860 +랑 체인 크로마를 가져올 거예요 + +00:46.130 --> 00:50.690 +그리고 불러올 것이 몇 가지 있어요 재미있게 실제로 이 벡터들을 + +00:50.690 --> 00:56.180 +시각화할 수 있도록요 t-SNE라는 거죠 나중에 얘기할 거예요 그리고 + +00:56.180 --> 00:59.720 +플롯리에서는 멋진 다이어그램을 줄 거예요 + +00:59.720 --> 01:01.750 +불러오기를 해보죠 + +01:02.380 --> 01:07.120 +좋아요, 상수를 실행해보죠 + +01:07.120 --> 01:09.040 +환경 변수를 불러오는 거죠 + +01:09.040 --> 01:11.440 +이제 아주 익숙해지셨겠죠 + +01:11.470 --> 01:14.440 +로더를 이용해 지식 기반을 실을 거예요 + +01:14.440 --> 01:20.920 +텍스트 분할기를 이용해 덩어리를 만들 건데, 123개예요 + +01:20.920 --> 01:27.730 +4개의 폴더 각각에 구획이 있어요 계약, 제품, 직원, 회사 + +01:28.300 --> 01:35.110 +한 번 더 삽입과 자동 역행 LMS에 대해 상기시켜 드리고 싶네요 지금까지 코스 + +01:35.110 --> 01:40.030 +대부분에서 자동 역행 LMS에 대해 얘기했었죠 + +01:40.060 --> 01:46.120 +자동 인코딩 LMS를 처음으로 살펴보겠습니다 전체 입력을 가져다가 하나의 출력을 + +01:46.120 --> 01:49.660 +생성하기 위해 사용하죠 우리 경우엔 벡터죠 + +01:50.140 --> 01:53.890 +자동 인코딩 경보의 예가 버트죠 + +01:53.920 --> 01:56.380 +이걸 사용할 거예요 + +01:56.410 --> 01:58.000 +오픈아이 삽입구죠 + +01:58.210 --> 01:59.280 +실행해 보죠 + +01:59.520 --> 02:00.420 +네 + +02:00.420 --> 02:05.010 +크로마 데이터 저장소를 만들 거예요 + +02:05.310 --> 02:10.770 +여기 이 부분을 집어넣었어요 이미 있는 데이터베이스를 지우거나 비우기 위해서요 + +02:10.800 --> 02:15.480 +그렇지 않으면 코드를 실행할 때마다 벡터가 추가됩니다. 벡터 모음이 중복되어서 + +02:15.480 --> 02:17.520 +혼란스러워져요. + +02:17.520 --> 02:20.760 +데이터베이스를 새로 고침하는 데 도움이 되죠 + +02:20.760 --> 02:25.800 +여러분이 이미 한 번 실행했다면∙∙∙ 저도 이미 한두 번 실행했다고 상상하실 수 있죠 그러니 + +02:25.800 --> 02:27.090 +이건 제게 유용해요 + +02:27.360 --> 02:28.890 +저희가 지울게요 + +02:29.400 --> 02:37.350 +벡터 db 음 이 폴더에 있어요 그냥 db 이름이죠 상수예요 DB 이름이 + +02:37.350 --> 02:41.340 +벡터 DB라고 표시해뒀어요 + +02:41.370 --> 02:45.480 +SQLite 기반의 뭐든 될 수 있어요 + +02:45.510 --> 02:49.440 +여길 보면 SQLite 관련 작업이 진행 중이죠 + +02:49.440 --> 02:55.950 +여러 개의 다른 파일들이 있는 거죠 여러분이 부여하고자 선택한 데이터베이스 이름 + +02:55.950 --> 02:56.730 +내에요 + +02:56.730 --> 02:58.730 +이 경우엔 벡터 db죠 + +02:59.660 --> 03:06.170 +이제 벡터 데이터베이스를 생성하고 벡터로 채울 차례군요 + +03:06.170 --> 03:08.930 +또 다른 그러디오 순간을 보여드리죠 + +03:08.930 --> 03:14.870 +여러분은 문서들을 벡터화하고 벡터 데이터베이스에 저장하는 프로세스가 + +03:14.870 --> 03:19.670 +꽤나 복잡할 거라고 생각하실 수도 있습니다 적어도 여러 개의 + +03:19.700 --> 03:21.740 +셀이 있을 거라고 말이죠 + +03:21.980 --> 03:27.200 +물론 한 셀에서 할 수도 있고 한 줄의 코드에서도 할 수 있어요 + +03:27.200 --> 03:34.010 +랭 체인이 접착 코드를 많이 발라서 가능했어요 + +03:34.040 --> 03:39.560 +수동으로 하는 건 그리 어렵지 않아요 반복하기 위해서요 이런 각각의 + +03:39.560 --> 03:44.900 +삽입을 각각의 덩크마다 벡터로 전환해 크로마에 저장하는 거죠 4, + +03:44.900 --> 03:46.970 +5줄이면 될 거예요 + +03:47.060 --> 03:50.870 +이렇게 한 번에 하면 훨씬 간단해요 + +03:50.870 --> 03:55.490 +이것과 관련해 또 다른 장점은 다른 벡터 데이터 저장소를 시도하면 + +03:55.490 --> 04:00.280 +랭 체인에서 불러온 다른 벡터 저장소와 같은 겁니다 여러분도 시도해 + +04:00.280 --> 04:01.630 +보세요 + +04:01.630 --> 04:06.700 +이 라인을 실행해 볼게요 실행됐네요 + +04:06.700 --> 04:11.200 +벡터 스토어는 123개의 문서가 만들어졌죠 + +04:11.200 --> 04:14.800 +그 말을 들으니 안심이 되네요 덩어리 수가 123개였거든요 + +04:14.800 --> 04:21.040 +벡터 데이터 저장소에 같은 수의 문서가 있다는 건 좋은 일이죠 + +04:21.520 --> 04:26.950 +크롬을 생성하고 돌아온 벡터 스토어 객체를 발견할 수 있었습니다 + +04:26.950 --> 04:33.880 +덩크와 OpenAI 엠베딩을 전달했습니다 간단하게 데이터베이스의 + +04:33.880 --> 04:35.860 +이름을요 + +04:35.860 --> 04:40.060 +그게 우리가 제공한 3가지예요 완전히 말이 되죠 + +04:40.090 --> 04:42.820 +벡터 스토어를 생성하는데 필요한 건 그게 다죠 + +04:42.820 --> 04:50.170 +벡터 스토어는 get_Cetting.Count라고 호출할 수 있습니다 벡터 스토어 내 문서의 수를 + +04:50.170 --> 04:51.220 +얻기 위해서요 + +04:51.850 --> 05:04.380 +벡터들의 크기가 얼마나 되는지 알아봅시다. 이것으로 컬렉션으로부터 벡터를 얻을 수 있어요. + +05:04.410 --> 05:10.650 +그래서 컬렉션은 이 밑줄 get을 호출하면 얻게 되는 거죠 벡터 스토어의 + +05:10.650 --> 05:11.640 +특성 + +05:11.640 --> 05:14.040 +그런 다음 .get을 호출하죠 + +05:14.070 --> 05:15.300 +한계를 넘을 수 있어요 + +05:15.300 --> 05:16.380 +우린 한 명만 원해요 + +05:16.380 --> 05:20.550 +벡터인 내장들을 불러오길 원하죠 + +05:20.760 --> 05:26.490 +그리고 그걸 살펴보고 얼마나 큰지 볼 거예요 + +05:26.490 --> 05:30.180 +1536차원이에요 + +05:30.180 --> 05:34.920 +1536개의 숫자가 이 덩어리를 이루죠 + +05:34.920 --> 05:38.850 +우리가 직접 보죠 그냥 프린트할 수 있게요 + +05:38.850 --> 05:43.260 +삽입 샘플을 인쇄해서 어떤지 보죠 + +05:43.890 --> 05:44.430 +놀랍네요 + +05:44.430 --> 05:45.810 +숫자가 많네요 + +05:46.080 --> 05:48.000 +숫자가 많네요 + +05:48.030 --> 05:51.360 +당신과 나에겐 무의미해요 + +05:51.540 --> 05:54.770 +하지만 이 숫자들은 어떤 식으로든 그래요 + +05:55.460 --> 06:02.450 +이 숫자들은 어떤 식으로든 숫자와 관련된 덩어리의 의미를 반영해요 + +06:02.450 --> 06:03.230 +두고 봐야죠 + +06:03.650 --> 06:06.560 +잠시 후 덩어리랑 연결시켜 보죠 + +06:07.370 --> 06:09.140 +숫자가 엄청나요 + +06:09.140 --> 06:17.480 +1536개의 숫자로 1536차원 공간의 좌표를 해석할 수 있어요 + +06:17.720 --> 06:24.830 +그 좌표가 선택되면 비슷한 좌표를 가진 다른 벡터들이 벡터 공간에서 + +06:24.830 --> 06:28.790 +비슷한 의미를 갖는 거죠 + +06:28.790 --> 06:30.740 +그게 이 요리의 핵심이죠 + +06:30.980 --> 06:34.940 +잠시 시각화해 보죠 + +06:34.940 --> 06:37.640 +댓글을 없애면 좋을 것 같아요 Get it + +06:37.640 --> 06:44.960 +좋은 방법이 될 거예요 무대 뒤에서 무슨 일이 일어나는지 제대로 조사할 수 있는 + +06:44.960 --> 06:46.070 +방법이죠 + +06:46.070 --> 06:53.320 +컬렉션겟티에 전화해 메타데이터와 문서 내장 정보를 요청할 수 있어요 + +06:53.320 --> 06:56.320 +일단 그렇게 하고 나면 벡터들을 put 할 수 있어요 + +06:56.320 --> 07:03.010 +박아둔 것들을 벡터들의 배열에 넣을 수 있습니다. doc 타입을 doc 타입이라는 것에 + +07:03.010 --> 07:04.720 +넣을 수 있어요. + +07:04.720 --> 07:07.960 +이제 색을 좀 만들어 볼게요 + +07:07.990 --> 07:10.300 +Get it get get it 사전 작업이에요 + +07:10.600 --> 07:18.400 +한 가지 문제가 있습니다 인간은 결점이 있는데 모든 걸 3차원 이상으로 + +07:18.430 --> 07:22.360 +시각화하는 데 문제가 있다는 거죠 + +07:22.360 --> 07:27.610 +1536차원으로 시각화하는 건 정말 어려운 일이에요 + +07:27.670 --> 07:34.360 +다행히 다양한 기술을 활용할 수 있어요 하향 투사라는 걸 하는데 차원을 + +07:34.360 --> 07:41.290 +2차원으로만 축소하는 거죠 그렇게 하면 사물을 분리하는 데 가장 효과적인 + +07:41.290 --> 07:47.680 +방법으로 다차원적 표현에 충실할 수 있어요 + +07:47.680 --> 07:52.860 +그래서 모든 면에서 좀 더 멀리 떨어져 있는 걸 만들려 하면 2차원으로 + +07:52.860 --> 07:57.780 +투사했을 때도 여전히 꽤 멀리 떨어져 있을 거예요 + +07:58.290 --> 07:59.340 +그게 목적이죠 + +07:59.370 --> 08:03.240 +그에 대한 다양한 기술이 있지만 자세히 다루진 않겠습니다만 저희가 사용할 + +08:03.270 --> 08:04.710 +기술은 아주 일반적인 거죠 + +08:04.710 --> 08:09.660 +t-SNE라고 하는데 T 분산 가설 이웃 침투의 약자죠 구글에 검색하거나 + +08:09.660 --> 08:14.670 +설명을 듣고 싶을 때 챗GPT에 물어보면 나오는 거예요 + +08:15.150 --> 08:19.710 +통과할 때 몇 가지 차원을 원하죠? + +08:19.740 --> 08:24.630 +천 가지에서 2차원으로 영사하는 거죠 + +08:24.630 --> 08:29.790 +이 무작위 상태는 무작위 씨앗을 설정하는 방식이에요 이걸 호출할 때마다 같은 + +08:29.790 --> 08:33.210 +결과가 나와서 이걸 재생산할 수 있죠 get it + +08:33.450 --> 08:41.400 +그러면 감소된 벡터를 얻을 수 있습니다. t-SNE 객체에 대한 get transform 메서드를 호출하면요. + +08:41.820 --> 08:49.770 +이제 멋진 라이브러리 플리틀리를 이용해서 멋진 분산 도표를 만들 거예요 + +08:49.890 --> 08:55.570 +이건 전부 일종의 코드예요 원하는 대로 복사, 붙여넣기, 재사용할 수 + +08:55.570 --> 08:56.170 +있죠 + +08:56.170 --> 09:01.720 +하지만 멋진 걸 했어요 마커에 다른 색을 입혔죠 다양한 + +09:01.720 --> 09:08.260 +문서 유형에 맞춰 설정한 색에 근거해서요 팝업 텍스트도 있어요 + +09:08.290 --> 09:14.560 +청크 자체에서 첫 100개의 글자가 조각으로 나오죠 + +09:14.560 --> 09:20.050 +여기서 보고 싶은 건 문서가 벡터 데이터베이스에서 어떻게 보이는지 보는 거예요 + +09:20.050 --> 09:25.810 +그런 다음 각각의 다른 벡터에 대해 어떤 종류의 문서인지에 따라 색을 입힐 거예요 + +09:25.810 --> 09:27.850 +텍스트 위로 마우스가 떠요 + +09:27.850 --> 09:33.100 +이 비트 조각이 나타내는 텍스트 조각을 읽을 수 있어요 + +09:33.100 --> 09:34.600 +잘 되는지 보죠 + +09:34.600 --> 09:35.800 +그럼 멋지겠네요 + +09:36.370 --> 09:37.420 +당연히 알죠 + +09:37.420 --> 09:38.470 +이미 해 봤어요 + +09:38.530 --> 09:39.580 +여기 있네요 + +09:39.610 --> 09:40.420 +네 + +09:40.420 --> 09:42.040 +이게 뭐죠? + +09:42.670 --> 09:50.670 +다차원 벡터의 시각화를 보고 있는데 2D로 보여줍니다 엑스 축과 Y축의 + +09:50.670 --> 09:53.370 +특별한 의미가 없어요 + +09:53.370 --> 09:59.100 +여러 지점을 분산시킬 수 있는 최선의 방법이죠 + +09:59.190 --> 10:01.560 +몇 가지 알아두셔야 할 게 있어요 + +10:01.560 --> 10:07.500 +여기 보이는 녹색 점들은 직원들을 나타내요 + +10:07.530 --> 10:10.350 +빨간 점은 뭘 의미할까요? + +10:10.530 --> 10:17.730 +여기서 직원이라는 건 직원 문서에서 텍스트를 잔뜩 뽑아낸다는 뜻이에요 + +10:17.760 --> 10:22.020 +빨간 건 계약서에서 나온 텍스트 덩어리예요 + +10:22.050 --> 10:28.830 +파란색은 제품 문서화 노란색은 어바웃 문서화예요 + +10:29.760 --> 10:36.900 +이 시점에서 아셔야 할 마법 같은 게 있어요 OpenAI가 비트를 + +10:36.930 --> 10:44.850 +삽입할 때 텍스트 덩어리를 벡터화할 때 문서 유형을 몰랐어요 + +10:44.850 --> 10:46.830 +메타데이터는 말 안 했어요 + +10:46.830 --> 10:52.190 +여름철 직원, 여름철 제품 여름철 계약에 대해 말하지 않았어요 + +10:52.190 --> 10:55.580 +텍스트 덩어리를 주고 벡터로 바꾸라고 했죠 + +10:55.580 --> 10:57.500 +그게 다였어요 + +10:57.770 --> 11:04.880 +그 덩어리들을 바탕으로 보면 이 덩어리들이 벡터 공간으로 분리되어 + +11:04.880 --> 11:10.730 +공간 내의 다른 영역을 차지하고 있어요 + +11:10.910 --> 11:16.070 +서로 어느 정도 비슷하거든요 내용이나 의미 면에서요 + +11:16.070 --> 11:20.630 +직원들은 모두 같은 영역에 있어요 + +11:20.630 --> 11:26.570 +계약서는 여기 있고 제품은 여기 있어요 + +11:26.690 --> 11:34.820 +일부 계약 정보가 제품과 같은 부근에 있는 걸 눈치채셨을 + +11:34.820 --> 11:37.100 +거예요 + +11:37.100 --> 11:39.290 +잠시 놀라실지도 모르겠네요 + +11:39.290 --> 11:41.210 +뭔가 잘못된 것처럼 보일 수도 있어요 + +11:41.210 --> 11:48.010 +이 덩어리들을 마우스로 가리키면 계약의 특정 텍스트 덩어리란 걸 알 수 있어요 + +11:48.010 --> 11:53.170 +클라이언트가 등록한 핵심 기능을 설명하는 거죠 + +11:53.380 --> 11:56.260 +텍스트를 입력하라고 하는 게 보이시죠 + +11:56.290 --> 12:02.920 +죄송합니다 잠시 마우스로 가리켜보면 텍스트라는 두 번째 줄이 있는데 벡터 공간에 있는 + +12:02.920 --> 12:08.440 +위치에 있는 덩어리를 추출한 것입니다. 하나의 인공지능으로 된 매칭을 + +12:08.440 --> 12:10.420 +볼 수 있는데요. + +12:10.660 --> 12:12.250 +다른 걸 찾아보죠 + +12:12.610 --> 12:17.620 +인공지능 위험 평가 기능이죠 + +12:17.800 --> 12:19.420 +그게 그거죠 + +12:19.870 --> 12:28.420 +여기서 보이는 건 계약 안에 기능성 정보 같은 게 있어요 + +12:28.420 --> 12:35.980 +기능성 물질은 벡터 공간에 존재합니다 다른 제품과 비슷한 공간에 존재하죠 + +12:35.980 --> 12:38.800 +이해가 되셨으면 좋겠네요 + +12:38.830 --> 12:46.890 +다시 말씀드리지만 공간의 올바른 위치를 문서의 의미와 연결하는 건 거의 소름 + +12:46.920 --> 12:49.290 +끼칠 정도로 잘하죠 + +12:49.590 --> 12:56.610 +마지막으로 말씀드릴 요점은 너무 멀리 간 것일 수도 있지만 제가 + +12:56.610 --> 12:59.520 +보기엔 이런 상황 같아요 + +12:59.550 --> 13:04.260 +보시면 회사 관련 서류가 세 장 있어요 + +13:04.440 --> 13:07.380 +여기 이 세 개가 중앙에 있어요 + +13:07.410 --> 13:10.290 +여러 문서들 가운데에 있어요 + +13:10.290 --> 13:17.580 +그건 일부 정보를 포함하고 있기 때문이라고 봅니다 + +13:17.580 --> 13:23.370 +직원과 계약 제품과 관련된 정보요 + +13:23.580 --> 13:28.080 +모든 것과 관련된 중심 정보 같은 거죠 + +13:28.080 --> 13:34.950 +그래서 이 모든 게 어떤 식으로든 모든 정보의 한가운데에 있는 거죠 REST + +13:35.400 --> 13:37.350 +그게 참 흥미로워요 + +13:37.350 --> 13:41.280 +내가 오버하는 걸 수도 있지만 그런 것 같아요 + +13:41.400 --> 13:46.730 +여러분이 할 수 있는 것 중 하나는 벡터 데이터베이스에 넣고 싶은 텍스트 덩어리를 + +13:46.730 --> 13:48.770 +실험해보는 거예요 + +13:48.860 --> 13:56.270 +돌아가서 다른 문서를 추가할 수 있다고 상상해 보세요 여기 로드한 문서에 + +13:56.270 --> 13:57.080 +말이죠 + +13:57.080 --> 14:02.360 +파일을 직접 지정하거나 텍스트를 추가하거나 기존 + +14:02.360 --> 14:08.690 +문서에 텍스트를 추가해서 벡터 공간으로 보내거나 둘 중 하나입니다. + +14:08.690 --> 14:16.070 +그리고 이것을 통해 오픈AI가 다른 텍스트의 의미를 이해하고 비슷한 + +14:16.070 --> 14:25.940 +의미의 다른 것들에 가깝게 배치할 수 있다는 것을 알게 되었는데요. + +14:26.060 --> 14:28.220 +그게 핵심이에요 + +14:28.730 --> 14:33.140 +이 2D 모형은 정말 멋졌어요 + +14:33.140 --> 14:35.420 +2D 표현보다 더 좋은 게 있을까요? + +14:35.450 --> 14:36.320 +당연히 있죠 + +14:36.350 --> 14:40.040 +3D 표현이 있어요 그게 다음에 할 일이죠 + +14:40.220 --> 14:46.160 +3D로 해보죠 간단하거든요 2를 3으로 바꾸는 것만큼요 + +14:46.160 --> 14:51.140 +그럼 3D의 이 벡터를 시각화할 수 있어요 + +14:51.170 --> 14:52.280 +그렇게 간단하지 않았어요 + +14:52.310 --> 14:57.620 +여기에 3D도 넣어야 했어요. 다른 벡터들도 만져야 했죠. 하지만 + +14:57.620 --> 14:59.480 +꽤 비슷해요. + +14:59.480 --> 15:02.510 +타이틀도 2D에서 3D로 바꿔야 했어요 + +15:02.690 --> 15:04.910 +어쨌든, 어떤지 보죠 + +15:04.910 --> 15:06.170 +여기 있네요 + +15:06.440 --> 15:13.010 +이게 벡터예요 2D에서 3D로 영사된 거죠 + +15:13.160 --> 15:17.990 +첫 번째로 눈치챌 수 있는 건 사실 아주 드문 경우라는 거예요 + +15:17.990 --> 15:23.090 +영화처럼 3D가 2D보다 나을 때도 있어요 비트 비트 + +15:23.300 --> 15:27.200 +비트가 좀 뒤죽박죽인 것 같네요 + +15:27.200 --> 15:28.790 +비트는 잘 안 보여요 + +15:28.820 --> 15:31.100 +노란색이 가운데 있는 것처럼 보이긴 해요 + +15:31.220 --> 15:34.910 +초록, 빨강, 파랑은 확실히 구분돼 있고요 + +15:34.910 --> 15:40.610 +하지만 예상하셨듯이 일부 요소가 합쳐졌어요 + +15:40.640 --> 15:46.360 +이 제품의 장점은 상호 작용적으로 놀 수 있다는 거예요 + +15:46.390 --> 15:48.310 +이렇게 돌릴 수 있어요 + +15:49.060 --> 15:50.260 +대단하지 않아요? + +15:50.260 --> 15:55.150 +이걸 이용해서 어떻게 배치됐는지 더 잘 알 수 있어요 get get get get it + +15:55.150 --> 15:59.830 +둘 사이에 차이가 있다는 걸 알 수 있죠 + +16:00.130 --> 16:07.540 +하지만 2D 묘사만큼 명확하진 않은 것 같아요 + +16:07.630 --> 16:10.780 +그래도 보기 좋아요 + +16:10.780 --> 16:13.900 +Get up 하면 포인트 뒤에 있는 의미가 보이시죠? + +16:13.900 --> 16:19.870 +물론 아까처럼 노란색은 어바웃입니다 모든 것의 중간에 있는 것처럼 + +16:19.960 --> 16:26.410 +보이죠 아까 말씀드린 것과 일치합니다 여기 이 파란색은 좀 특이하죠 + +16:26.800 --> 16:34.270 +3D로 표현한 비트도 충분히 즐길 수 있어요 2D가 어떤 면에서는 + +16:34.270 --> 16:36.970 +더 명확하긴 하지만요 + +16:37.360 --> 16:43.310 +벡터 실험은 이걸로 끝났는데 중요한 게 있어요 이제 여러분에게 달렸다는 거죠 이 + +16:43.310 --> 16:49.010 +도표를 보고 텍스트의 다양한 덩어리를 검사하는 게 아니라 자신의 텍스트 덩어리를 + +16:49.010 --> 16:50.930 +추가하는 거예요 + +16:50.960 --> 16:55.970 +간단한 방법은 문서를 지식 저장소에 넣는 겁니다 지식 기반 디렉터리에 말이에요 Put + +16:55.970 --> 16:58.040 +그렇게만 하면 바로 효과가 나타나죠 + +16:58.130 --> 17:04.670 +텍스트 로더 클래스도 활용할 수 있어요 문서를 그냥 드롭하는 대신 랑가인 코드를 이용해 청크를 + +17:04.670 --> 17:06.260 +추가하는 거죠 + +17:06.260 --> 17:12.230 +하지만 덩어리들을 넣고 나면 벡터 공간에서 어디에 도달하는지 보고 어떻게 작동하는지 감을 잡으세요 Get in get in + +17:12.230 --> 17:12.800 +get + +17:12.830 --> 17:17.480 +또는 전체 지식 기반 디렉터리를 대체할 수도 있습니다 다른 것으로 이름을 + +17:17.480 --> 17:23.840 +바꾸고 새 디렉터리를 생성해 아주 간단한 것으로 채우는 거죠 문서가 벡터 스페이스 내 다른 위치에 + +17:23.840 --> 17:28.430 +어떻게 놓이는지 실험할 수 있도록요 Get up Get up + +17:28.790 --> 17:36.500 +아주 중요한 숙제예요 다음 단계에 대한 기반을 제공하거든요 실제로 + +17:36.500 --> 17:39.170 +Get을 하는 단계요 + +17:39.200 --> 17:41.000 +먼저 슬라이드로 돌아가죠 diff --git a/week5/community-contributions/subtitles/srts/59471979/en_US.srt b/week5/community-contributions/subtitles/srts/59471979/en_US.srt new file mode 100755 index 0000000..969c82e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59471979/en_US.srt @@ -0,0 +1,253 @@ +WEBVTT + +00:01.490 --> 00:06.830 +So we now turn to the parts of the problem, which is perhaps, let's say, not as glamorous as some + +00:06.830 --> 00:12.770 +of the rest of it, but it is perhaps the most essential, which is finding and crafting your data. + +00:12.770 --> 00:16.850 +And there are a bunch of places that people can go to look for data to hunt data. + +00:16.850 --> 00:19.010 +But the first, the first place you go. + +00:19.040 --> 00:25.640 +First and foremost, of course, is your own proprietary data that your company has. + +00:25.640 --> 00:31.820 +That is something which hopefully is pertains directly to the problem you're solving, and is going + +00:31.820 --> 00:34.460 +to be crucial for your fine tuning. + +00:34.460 --> 00:41.060 +In the case of the pretend fictitious Rag project, we did. + +00:41.090 --> 00:47.120 +We we pretended we had the shared drive of the company that we used then to build our knowledge base. + +00:47.150 --> 00:51.260 +That's an example of where we went to find proprietary data. + +00:51.260 --> 00:57.740 +In the case of of my business, Nebula, we have information about talent and jobs, careers that we + +00:57.740 --> 01:00.800 +can use to train proprietary Models. + +01:00.950 --> 01:07.790 +So finding your own company, proprietary data sets that are specific for your problem, that's the + +01:07.790 --> 01:09.140 +first place to start. + +01:09.170 --> 01:15.380 +Of course, then there is Kaggle, a wonderful resource for data scientists. + +01:15.740 --> 01:17.600 +You probably heard of it and used it. + +01:17.630 --> 01:19.160 +If not, go and take a look at it. + +01:19.280 --> 01:22.400 +It's got, uh, so much, uh, data. + +01:22.430 --> 01:29.180 +Data, um, that goes over a long period of time that people have contributed to Kaggle. + +01:29.330 --> 01:32.390 +And then there is, of course, hugging face. + +01:32.420 --> 01:36.080 +Uh, that is just such a fabulous resource for us. + +01:36.110 --> 01:39.170 +And we will be using hugging Face in just a moment. + +01:39.740 --> 01:42.290 +There's also synthetic data, as it happens. + +01:42.290 --> 01:46.640 +For our Rag project, we didn't use a real company's shared drive. + +01:46.640 --> 01:51.350 +We used an LLM to generate synthetic data and that is an option. + +01:51.350 --> 01:52.880 +There are pros and cons, of course. + +01:52.880 --> 01:59.780 +If you're if you're trying to use the frontier model to actually, uh, learn from the data, then it + +01:59.780 --> 02:04.690 +may not make sense to have the frontier model generate the data and then learn from it, but if you're + +02:04.690 --> 02:09.130 +trying to build your own model, or you're trying to build a cheaper model, that's that's going to + +02:09.160 --> 02:15.400 +be seeded by a frontier model, then you could use the front end model to generate some some data and + +02:15.400 --> 02:19.870 +then use that data to train your smaller, cheaper, more lightweight model. + +02:19.870 --> 02:24.310 +So the various circumstances where synthetic data makes sense. + +02:25.060 --> 02:31.750 +And then I'll mention that there are specialist companies whose task it is to go out there and curate + +02:31.780 --> 02:32.890 +a data set for you. + +02:32.920 --> 02:37.390 +We actually encountered this company earlier when we were looking at one of the leaderboards, the leaderboard + +02:37.390 --> 02:45.340 +called seal, which were business specific leaderboards for Llms that is put together by a company called + +02:45.340 --> 02:45.940 +scale. + +02:45.940 --> 02:51.040 +And scale specializes in building crafted data sets for your problem. + +02:51.040 --> 02:53.500 +So that is another place to go. + +02:53.680 --> 02:59.650 +In our case though, we are going to Hugging Face, which is a treasure trove of data and it contains + +02:59.680 --> 03:04.090 +much data contributed by the community, including this particular data set. + +03:04.240 --> 03:07.600 +Scrape over many years of Amazon reviews. + +03:07.600 --> 03:09.940 +That is, uh, enormous. + +03:09.940 --> 03:11.500 +It is an absolutely enormous data set. + +03:11.500 --> 03:18.580 +And in addition to the reviews that it takes, it also has metadata just associated with the products, + +03:18.850 --> 03:22.870 +including the description of the products and their prices. + +03:22.870 --> 03:28.360 +And that, of course, is exactly what we're after, product descriptions and prices. + +03:28.360 --> 03:33.280 +And this data set has them in large quantities. + +03:33.280 --> 03:35.680 +And so it is perfect for us. + +03:35.680 --> 03:37.600 +This is where we will be heading. + +03:39.370 --> 03:42.640 +So how do you go about digging into the data. + +03:42.640 --> 03:44.530 +What are the steps that you take? + +03:44.590 --> 03:48.850 +We're going to be doing some of this work today, and we're going to be refining some of it tomorrow. + +03:49.000 --> 03:55.900 +Um, but there are these six different stages, perhaps, to getting deep into the data. + +03:55.900 --> 04:01.960 +First of all, there's a time when you're Investigating, just understanding the data, what fields + +04:01.990 --> 04:02.380 +to have? + +04:02.380 --> 04:04.420 +How well populated is the data? + +04:04.420 --> 04:07.090 +What kind of data quality issues do you have? + +04:07.180 --> 04:13.180 +Then typically at least the way I like to approach this is then parsing in that data into a structure + +04:13.180 --> 04:15.490 +which is going to be easier to handle. + +04:15.670 --> 04:19.900 +Um, that, that's, that's uh, so that you're no longer working with raw data sets. + +04:19.900 --> 04:23.350 +You're working typically with, with objects at that point. + +04:23.620 --> 04:32.710 +It's great then to do some visualizations, it's important to see things like the, the how wide, how + +04:32.710 --> 04:33.340 +spread out. + +04:33.340 --> 04:37.930 +Some of your values are when we're thinking of things like prices of products, what's the range in + +04:37.930 --> 04:38.650 +prices? + +04:38.650 --> 04:42.940 +Does it turn out that there are a lot of that the distribution is skewed in some way. + +04:42.940 --> 04:46.360 +So visualize it so you can get a really good sense of it. + +04:46.930 --> 04:49.720 +Now a deeper assessment of data quality. + +04:49.720 --> 04:53.710 +Understand what kind of limitations you have with your data. + +04:53.830 --> 05:01.290 +Um, that will help you decide how to take action on it and curate is when you then go and decide how + +05:01.290 --> 05:03.630 +you're going to craft this data set. + +05:03.630 --> 05:12.030 +For example, if it turns out that you discover that a quarter of your data is in has poor data quality + +05:12.030 --> 05:16.020 +in some way, you may decide just to exclude that data altogether. + +05:16.020 --> 05:20.190 +You may think that you have a big enough sample that you can focus on the three quarters. + +05:20.190 --> 05:25.770 +If you find that your data set is very imbalanced in some way, and you're concerned that that as part + +05:25.770 --> 05:31.800 +of training, the model will only learn a sort of a particular balance of the data, then this would + +05:31.800 --> 05:35.160 +be a time to potentially fix address that balance. + +05:35.160 --> 05:43.110 +So curating is about crafting the data set that is most suitable for your training and then saving it. + +05:43.110 --> 05:46.200 +In our case, we'll be uploading it to the Huggingface hub. + +05:46.500 --> 05:50.430 +That's the final step before you'd be ready for training. + +05:50.460 --> 05:55.590 +So with that, we're going to head to JupyterLab for the first time with our new project, and we're + +05:55.590 --> 05:58.860 +going to get about curating some data. + +05:58.890 --> 05:59.700 +See you there. diff --git a/week5/community-contributions/subtitles/srts/59471979/ja_JP.srt b/week5/community-contributions/subtitles/srts/59471979/ja_JP.srt new file mode 100755 index 0000000..a2c8c53 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59471979/ja_JP.srt @@ -0,0 +1,211 @@ +WEBVTT + +00:01.490 --> 00:06.830 +それは、 おそらく他の部分ほど華やかではないが、 + +00:06.830 --> 00:12.770 +おそらく最も重要なものである。 + +00:12.770 --> 00:16.850 +そして、 データを狩るために人々がデータを探しに行ける場所はたくさんある。 + +00:16.850 --> 00:19.010 +でも、 最初に、 最初に行く場所。 + +00:19.040 --> 00:25.640 +もちろん、 何よりもまず、 あなたの会社が持っている独自のデータだ。 + +00:25.640 --> 00:34.460 +それは願わくば、 あなたが解決しようとしている問題に直接関係するものであり、 あなたの微調整にとって極めて重要なものになるだろう。 + +00:34.460 --> 00:41.060 +架空のラグ・プロジェクトの場合、 私たちはそうした。 + +00:41.090 --> 00:47.120 +私たちは会社の共有ドライブを持っていることにして、 ナレッジベースを構築するのに使った。 + +00:47.150 --> 00:51.260 +これは私たちが独自データを探しに行った例だ。 + +00:51.260 --> 01:00.800 +私のビジネスであるネビュラの場合、 人材や仕事、 キャリアに関する情報があり、 それを使って独自のモデルを育成することができる。 + +01:00.950 --> 01:09.140 +だから、 自分の問題に特化した独自のデータセットを自社で見つけることが、 最初のスタート地点になる。 + +01:09.170 --> 01:15.380 +もちろん、 データサイエンティストにとって素晴らしいリソースであるKaggleもある。 + +01:15.740 --> 01:17.600 +おそらく聞いたことも使ったこともあるだろう。 + +01:17.630 --> 01:19.160 +そうでなければ、 行って見てください。 + +01:19.280 --> 01:22.400 +たくさんのデータがあるんだ。 + +01:22.430 --> 01:29.180 +データというのは、 Kaggleに投稿された長い期間に渡るものです。 + +01:29.330 --> 01:32.390 +そしてもちろん、 ハグする顔もある。 + +01:32.420 --> 01:36.080 +僕らにとっては素晴らしい情報源だよ。 + +01:36.110 --> 01:39.170 +そして、 ハグする顔をすぐに使うことになる。 + +01:39.740 --> 01:42.290 +合成データもある。 + +01:42.290 --> 01:46.640 +ラグ・プロジェクトでは、 実際の会社の共有ドライブは使いませんでした。 + +01:46.640 --> 01:51.350 +我々はLLMを使って合成データを作成したが、 それも選択肢の一つだ。 + +01:51.350 --> 01:52.880 +もちろん、 長所も短所もある。 + +01:52.880 --> 01:59.780 +フロンティアモデルを使って実際にデータから学習しようとするのであれば、 フロンティアモデルにデータを生成させ、 そこから学習させるのは意味がないかもしれない。 + +01:59.780 --> 02:04.690 +しかし、 独自のモデルを構築しようとしている、 あるいはフロンティアモデルからシードを得て、 + +02:04.690 --> 02:09.130 +より安価なモデルを構築しようとしているのであれば、 フロントエンドモデルを使ってデータを生成し、 + +02:09.160 --> 02:19.870 +そのデータを使って、 より小さく、 より安価で、 より軽量なモデルを学習させることができる。 + +02:19.870 --> 02:24.310 +つまり、 合成データが意味を持つさまざまな状況だ。 + +02:25.060 --> 02:32.890 +そして、 あなたのためにデータセットをキュレートしてくれる専門会社があることもお伝えしておこう。 + +02:32.920 --> 02:37.390 +実はこの会社、 以前、 あるリーダーボードを見ていたときに出会ったのだが、 + +02:37.390 --> 02:45.940 +そのリーダーボードはsealと呼ばれるもので、 scaleという会社が作成したLLMS用のビジネス別リーダーボードだった。 + +02:45.940 --> 02:51.040 +そして、 スケールはあなたの問題に対して、 精巧なデータセットを構築することに特化している。 + +02:51.040 --> 02:53.500 +だから、 そこも行くべき場所だ。 + +02:53.680 --> 02:59.650 +私たちの場合は、 データの宝庫であるHugging Faceを利用する。 Hugging Faceには、 この特定のデータセットを含め、 + +02:59.680 --> 03:04.090 +コミュニティから提供された多くのデータが含まれている。 + +03:04.240 --> 03:07.600 +長年にわたるアマゾンのレビューをかき集める。 + +03:07.600 --> 03:09.940 +それは莫大なものだ。 + +03:09.940 --> 03:11.500 +実に膨大なデータセットだ。 + +03:11.500 --> 03:22.870 +また、 レビューだけでなく、 商品の説明や価格など、 商品に関連するメタデータも持っている。 + +03:22.870 --> 03:28.360 +そしてもちろん、 それこそが私たちが求めているもの、 商品説明と価格なのだ。 + +03:28.360 --> 03:33.280 +そして、 このデータセットにはそれが大量に含まれている。 + +03:33.280 --> 03:35.680 +だから僕らにとっては完璧なんだ。 + +03:35.680 --> 03:37.600 +ここが私たちが向かう場所だ。 + +03:39.370 --> 03:42.640 +では、 どうやってデータを掘り下げていくのか。 + +03:42.640 --> 03:44.530 +どのようなステップを踏むのですか? + +03:44.590 --> 03:48.850 +今日はこの作業の一部を行い、 明日はその一部を洗練させるつもりだ。 + +03:49.000 --> 03:55.900 +でも、 データに深く入り込むには、 おそらく6つの異なる段階がある。 + +03:55.900 --> 04:02.380 +まず第一に、 調査をしている時に、 データを理解するために、 どのようなフィールドを持つべきか? + +04:02.380 --> 04:04.420 +データはどの程度蓄積されているのか? + +04:04.420 --> 04:07.090 +データ品質にはどのような問題がありますか? + +04:07.180 --> 04:15.490 +それから、 少なくとも私が好きなアプローチ方法は、 データを解析して扱いやすい構造にすることだ。 + +04:15.670 --> 04:19.900 +つまり、 生のデータセットを扱うことはもうないんだ。 + +04:19.900 --> 04:23.350 +その時点では、 一般的にモノを使って仕事をしている。 + +04:23.620 --> 04:33.340 +ビジュアル化することは素晴らしいことだ。 + +04:33.340 --> 04:38.650 +あなたの価値観の中には、 例えば商品の価格について考えているとき、 価格の幅はどれくらいですか? + +04:38.650 --> 04:42.940 +分布が何らかの形で偏っているものが多いことが判明したのだろうか。 + +04:42.940 --> 04:46.360 +だから、 それを視覚化することで、 本当に良い感覚を得ることができる。 + +04:46.930 --> 04:49.720 +今度はデータの質をより深く評価する。 + +04:49.720 --> 04:53.710 +データにどのような制限があるかを理解する。 + +04:53.830 --> 05:03.630 +そしてキュレーションとは、 このデータセットをどのように作るかを決めることです。 + +05:03.630 --> 05:16.020 +例えば、 データの4分の1が何らかの形でデータ品質が低いことが判明した場合、 そのデータを完全に除外することもできる。 + +05:16.020 --> 05:20.190 +十分に大きなサンプルを持っているので、 4分の3に集中できると思うかもしれない。 + +05:20.190 --> 05:25.770 +データセットのバランスが非常に悪いことがわかり、 トレーニングの一環として、 + +05:25.770 --> 05:35.160 +モデルがデータの特定のバランスしか学習しないことを懸念しているのであれば、 この機会にそのバランスに対処する可能性がある。 + +05:35.160 --> 05:43.110 +つまりキュレーションとは、 トレーニングに最適なデータセットを作成し、 それを保存することなのだ。 + +05:43.110 --> 05:46.200 +我々の場合は、 Huggingfaceのハブにアップロードする。 + +05:46.500 --> 05:50.430 +これがトレーニングに入る前の最終段階だ。 + +05:50.460 --> 05:55.590 +というわけで、 新しいプロジェクトで初めてJupyterLabに向かい、 + +05:55.590 --> 05:58.860 +データのキュレーションに取りかかろう。 + +05:58.890 --> 05:59.700 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59471979/ko_KR.srt b/week5/community-contributions/subtitles/srts/59471979/ko_KR.srt new file mode 100755 index 0000000..69f9148 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59471979/ko_KR.srt @@ -0,0 +1,241 @@ +WEBVTT + +00:01.490 --> 00:06.830 +이제 문제의 일부로 넘어가 보죠. 이 부분은 다른 부분만큼 화려하진 + +00:06.830 --> 00:12.770 +않지만 가장 중요한 부분일 겁니다. 바로 데이터를 찾고 만드는 것이죠. + +00:12.770 --> 00:16.850 +데이터를 찾고 데이터를 사냥할 수 있는 장소가 많아요 + +00:16.850 --> 00:19.010 +하지만 제일 먼저 가는 곳이잖아요 + +00:19.040 --> 00:25.640 +가장 중요한 건 귀사의 독점 데이터죠 + +00:25.640 --> 00:31.820 +여러분이 해결하려는 문제와 직접 관련이 있으면 좋겠고 미세한 + +00:31.820 --> 00:34.460 +조정에 아주 중요할 거예요 + +00:34.460 --> 00:41.060 +가짜로 만든 랙 프로젝트의 경우 그렇게 했죠 + +00:41.090 --> 00:47.120 +우린 우린 지식 기반 구축에 사용한 회사의 공유 드라이브가 있는 척했죠 + +00:47.150 --> 00:51.260 +사유 데이터를 찾는 예가 바로 그거죠 + +00:51.260 --> 00:57.740 +제 사업인 네뷸라의 경우 독자 모델을 훈련하는 데 쓸 수 있는 재능과 직업, + +00:57.740 --> 01:00.800 +경력에 대한 정보가 있어요 + +01:00.950 --> 01:07.790 +여러분의 문제에 구체적인 소유 데이터 세트를 찾는 것이 첫 번째 + +01:07.790 --> 01:09.140 +시작이죠 + +01:09.170 --> 01:15.380 +캐글도 있습니다 데이터 과학자를 위한 훌륭한 자원이죠 + +01:15.740 --> 01:17.600 +아마 들어보고 사용하셨겠죠 + +01:17.630 --> 01:19.160 +없으면 가서 봐요 + +01:19.280 --> 01:22.400 +데이터가 정말 많아요 + +01:22.430 --> 01:29.180 +데이터는 오랜 기간 동안 사람들이 캐글에 기여한 거예요 + +01:29.330 --> 01:32.390 +그리고 포옹하는 얼굴도 있죠 + +01:32.420 --> 01:36.080 +저희에겐 정말 멋진 자원이에요 + +01:36.110 --> 01:39.170 +포옹하는 표정도 잠시 후에 보여드릴게요 + +01:39.740 --> 01:42.290 +합성 데이터도 있어요 + +01:42.290 --> 01:46.640 +우리 랙 프로젝트는 진짜 회사의 공유 드라이브를 사용하지 않았어요 + +01:46.640 --> 01:51.350 +LLM을 이용해 합성 데이터를 생성했는데 그건 옵션이죠 + +01:51.350 --> 01:52.880 +물론 장단점이 있죠 + +01:52.880 --> 01:59.780 +프런티어 모델을 사용해 데이터를 얻으려 한다면 프런티어 모델이 데이터를 생성하고 학습하는 + +01:59.780 --> 02:04.690 +것은 말이 안 될 수도 있습니다 하지만 자신만의 모델을 만들거나 더 + +02:04.690 --> 02:09.130 +저렴한 모델을 만들려 한다면 프런티어 모델이 시드를 하게 + +02:09.160 --> 02:15.400 +될 것입니다 프런트엔드 모델을 이용해 데이터를 생성하고 그 데이터를 더 작고 저렴하고 + +02:15.400 --> 02:19.870 +가벼운 모델을 훈련하는 데 사용할 수 있죠 + +02:19.870 --> 02:24.310 +다양한 상황에서 합성 데이터가 말이 되죠 + +02:25.060 --> 02:31.750 +그런 다음 여러분을 위해 데이터 세트를 큐레이팅하는 작업을 하는 전문 회사들이 있다고 + +02:31.780 --> 02:32.890 +말씀드리죠 + +02:32.920 --> 02:37.390 +이 회사를 만난 적이 있어요 예전에 실이라는 이름의 leaderboard를 + +02:37.390 --> 02:45.940 +볼 때였죠 Scale이라는 회사에서 만든 Lms의 비즈니스용 leaderboard예요 + +02:45.940 --> 02:51.040 +비율은 여러분의 문제를 위해 정교한 데이터 세트를 만드는 데 전문이죠 + +02:51.040 --> 02:53.500 +또 다른 장소죠 + +02:53.680 --> 02:59.650 +하지만 우리는 얼굴 안기 게임을 할 거예요 아주 귀중한 데이터고 공동체가 기여한 데이터가 + +02:59.680 --> 03:04.090 +많이 들어 있어요 이 특정 데이터 세트도 포함해서요 + +03:04.240 --> 03:07.600 +수년에 걸친 아마존 리뷰를 긁어모아요 + +03:07.600 --> 03:09.940 +정말 거대하네요 + +03:09.940 --> 03:11.500 +정말 방대한 데이터 세트예요 + +03:11.500 --> 03:18.580 +리뷰를 작성하는 것 외에도 제품과 관련된 메타데이터도 있어요 + +03:18.850 --> 03:22.870 +제품 설명과 가격을 포함해서요 + +03:22.870 --> 03:28.360 +그게 바로 우리가 원하는 거죠 제품 설명과 가격이요 + +03:28.360 --> 03:33.280 +이 데이터 세트에는 그 양이 아주 많아요 + +03:33.280 --> 03:35.680 +그래서 저희에게 완벽하죠 + +03:35.680 --> 03:37.600 +우리가 향할 곳은 여기예요 + +03:39.370 --> 03:42.640 +데이터를 어떻게 파헤칠 건가요? + +03:42.640 --> 03:44.530 +어떤 단계를 밟나요? + +03:44.590 --> 03:48.850 +오늘 이 작업 중 일부를 하고 일부는 내일 다듬을 거예요 + +03:49.000 --> 03:55.900 +데이터를 깊이 파고들려면 6단계가 있어요 + +03:55.900 --> 04:02.380 +먼저, 조사할 때 데이터를 이해하는 시기가 있어요 어떤 필드를 가질까요? + +04:02.380 --> 04:04.420 +데이터가 얼마나 꽉 찼나요? + +04:04.420 --> 04:07.090 +데이터 품질에 어떤 문제가 있나요? + +04:07.180 --> 04:13.180 +일반적으로 제가 접근하는 방법은 데이터를 구조로 파싱 하는 겁니다 그럼 + +04:13.180 --> 04:15.490 +처리하기 더 쉬워지죠 + +04:15.670 --> 04:19.900 +더는 가공되지 않은 데이터로 작업하지 않아도 되죠 + +04:19.900 --> 04:23.350 +그 시점에서 일반적으로 개체로 작업하죠 + +04:23.620 --> 04:33.340 +시각화를 하는 건 좋은 일입니다 얼마나 넓고 얼마나 퍼져 있는지 보는 건 중요하죠 + +04:33.340 --> 04:38.650 +제품 가격 같은 걸 생각할 때 가격의 차이는 어느 정도인가요? + +04:38.650 --> 04:42.940 +분포가 어떤 식으로든 삐뚤어져 있는 게 많은가요? + +04:42.940 --> 04:46.360 +상상해 보세요 그럼 감이 잘 오겠죠 get it get it + +04:46.930 --> 04:49.720 +이제 데이터 품질을 더 심도 있게 평가해 보죠 + +04:49.720 --> 04:53.710 +데이터의 한계를 이해해야 해요 + +04:53.830 --> 05:01.290 +이를 통해 어떻게 행동하고 큐레이팅할지 결정하고 이 데이터 세트를 어떻게 + +05:01.290 --> 05:03.630 +만들지 결정하게 되죠 + +05:03.630 --> 05:12.030 +예를 들어 데이터의 4분의 1이 데이터 품질이 나쁘다는 사실이 밝혀진다면 해당 데이터를 + +05:12.030 --> 05:16.020 +아예 제외하기로 할 수도 있죠 + +05:16.020 --> 05:20.190 +샘플이 충분히 있어서 4분의 3에 집중할 수 있다고 생각할 수도 있어요 + +05:20.190 --> 05:25.770 +데이터 집합이 어떤 면에서 매우 불균형하고 훈련의 일부로서 + +05:25.770 --> 05:31.800 +모델이 데이터의 특정 균형만 배울까 봐 걱정된다면 이 시기가 잠재적으로 + +05:31.800 --> 05:35.160 +그 균형을 바로잡을 때죠 + +05:35.160 --> 05:43.110 +큐레이팅은 훈련에 가장 적합한 데이터 세트를 만들어 저장하는 거예요 + +05:43.110 --> 05:46.200 +어깅페이스 허브에 업로드할 거예요 + +05:46.500 --> 05:50.430 +훈련하기 전 마지막 단계죠 + +05:50.460 --> 05:55.590 +이제 새로운 프로젝트를 가지고 처음으로 JupyterLab으로 가서 데이터를 + +05:55.590 --> 05:58.860 +큐레이팅하는 법을 배워볼 거예요. HET + +05:58.890 --> 05:59.700 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59472007/en_US.srt b/week5/community-contributions/subtitles/srts/59472007/en_US.srt new file mode 100755 index 0000000..7bde09f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472007/en_US.srt @@ -0,0 +1,367 @@ +WEBVTT + +00:00.620 --> 00:07.460 +So I hope you enjoyed our first bash at Scrubbing Data, and that you are now looking through the code, + +00:07.460 --> 00:13.310 +convincing yourself that I've done good things when I have cleansed the data and I haven't thrown out + +00:13.310 --> 00:15.380 +anything that we should be including for the model. + +00:15.470 --> 00:21.200 +If you do find something like that, please let me know right away, because I love squeezing a bit + +00:21.200 --> 00:26.360 +more juice out of the model and making the results a little bit better, and changing the data. + +00:26.390 --> 00:32.270 +Improving the quality of the data that goes into the model has had by far the most profound impact on + +00:32.270 --> 00:33.830 +the performance of the model. + +00:33.950 --> 00:39.980 +You can spend huge amounts of time doing what they call hyperparameter tuning, which we will all be + +00:39.980 --> 00:44.690 +doing for for a while to try and squeeze a little tiny bit of juice out of your model and make it a + +00:44.690 --> 00:49.310 +little bit better at predicting outcomes as we try and go head to head against frontier models, which + +00:49.310 --> 00:50.450 +is going to be hard. + +00:50.540 --> 00:57.560 +Um, but it turns out that improving the data quality of what goes in is the easiest and most effective + +00:57.560 --> 00:58.850 +way to move the needle. + +00:58.850 --> 01:01.640 +So that's why it's worth spending the time doing it. + +01:01.640 --> 01:06.950 +And in that last Jupyter notebook, we we really worked on numbers one, two and three on this list, + +01:06.950 --> 01:08.600 +and some of four and five. + +01:08.630 --> 01:13.790 +We've got more of four and five to go, but most important is that you look through that Isom class + +01:13.790 --> 01:16.460 +and see how it's doing its thing yourself. + +01:17.690 --> 01:26.780 +Now just before we we we wrap up, I do want to measure and mention again the topic of performance evaluation, + +01:26.810 --> 01:33.710 +how we're going to decide whether or not we have actually succeeded in our goals with this model. + +01:33.950 --> 01:40.520 +Uh, there are, as I mentioned a while back, these two kinds of of metrics that you can use to evaluate + +01:40.520 --> 01:41.150 +performance. + +01:41.150 --> 01:48.020 +There are model centric metrics which are more in the sort of data scientist's domain of understanding + +01:48.020 --> 01:52.310 +how how well your model is performing mathematically. + +01:52.430 --> 01:59.030 +Uh, and then there are outcome business centric metrics, which are the ones that will resonate with + +01:59.030 --> 02:03.650 +the, the the user, the sponsor, the the CEO. + +02:03.680 --> 02:07.430 +They're the ultimate goals of the project. + +02:07.460 --> 02:13.310 +The model centric ones are much more directly measurable by the model, and can usually be measured + +02:13.310 --> 02:15.620 +while it's training and optimized. + +02:15.650 --> 02:22.520 +The business centric ones are the ones that matter the most, but not necessarily as immediately tied + +02:22.550 --> 02:23.540 +to the model. + +02:24.080 --> 02:30.680 +Although in our case, we have this wonderful luxury of having business centric metrics which are going + +02:30.710 --> 02:34.760 +to be tied to the model and are going to be great first on the model centric metrics. + +02:34.760 --> 02:41.120 +So you'll often hear talk of training loss and validation loss that training losses is is the loss with + +02:41.120 --> 02:41.720 +the training set. + +02:41.750 --> 02:46.430 +Validation loss is with the validation set, which is a part of the data that you've held back. + +02:46.880 --> 02:53.300 +And this is going to be a metric that the model will calculate that we'll talk more about later, and + +02:53.300 --> 02:54.620 +we'll see it happening. + +02:54.620 --> 02:59.540 +And during the course of training you will see your training loss hopefully gradually going down and + +02:59.540 --> 03:05.120 +down and down, which means that the model is getting more and more accurate at doing its task. + +03:05.510 --> 03:13.890 +Um uh, and there are some more sophisticated data science metrics that uh, we will also be looking + +03:13.890 --> 03:15.240 +at at various points. + +03:15.240 --> 03:19.230 +There's one called the root mean squared log error. + +03:19.260 --> 03:20.190 +RMSE l. + +03:20.490 --> 03:27.690 +And the benefit of this particular error is that it's something that will both penalize the model if + +03:27.690 --> 03:33.390 +it's if it's wrong on an, on an absolute basis and on a, on a percentage basis in different ways. + +03:33.390 --> 03:38.910 +And it's not going to unfairly penalize the model if it's out by a few dollars on a low priced item. + +03:38.910 --> 03:47.460 +So it's a it's a good balanced metric that weighs up absolute differences and relative percentage differences. + +03:47.640 --> 03:48.990 +Um, appropriately. + +03:48.990 --> 03:51.450 +So that's something that we'll be looking at too. + +03:51.480 --> 03:56.790 +Another one that people often look at is much simpler is just mean squared error, which is the the + +03:56.790 --> 04:00.900 +square of the difference between the prediction and the actual price. + +04:00.990 --> 04:06.270 +The challenge with mean squared error is that it blows up for larger prices. + +04:06.270 --> 04:14.100 +So if we have something that costs $800 and we predict $900, then there's a difference of $100 there. + +04:14.100 --> 04:20.610 +And if you square that, you get 10,000, which is a big number and which will dwarf all of your other + +04:20.610 --> 04:21.330 +errors. + +04:21.420 --> 04:26.460 +So mean squared error can be more troubling for our kind of problem. + +04:26.850 --> 04:29.340 +So those are the model centric metrics. + +04:29.340 --> 04:32.040 +But now let's talk about the business metrics. + +04:32.040 --> 04:36.960 +So the wonderful thing about our problem is that there is this metric which is really it's really belongs + +04:36.960 --> 04:37.890 +in both camps. + +04:37.890 --> 04:42.480 +It's just the the average absolute price difference. + +04:42.480 --> 04:45.210 +Basically how wrong was the model. + +04:45.210 --> 04:49.560 +It said that the fridge cost 100 bucks and it actually cost 120 bucks. + +04:49.560 --> 04:51.060 +So it was out by 20. + +04:51.090 --> 04:56.220 +That average price difference is so simple, so human, understandable. + +04:56.460 --> 05:00.240 +And that is a great business outcome metric. + +05:00.240 --> 05:01.710 +It has some flaws. + +05:01.710 --> 05:10.050 +It's sort of unfair that that more expensive things like something again that costs like $800 if you're + +05:10.050 --> 05:14.670 +out, if you if you guess that it costs $850, you didn't do badly. + +05:14.670 --> 05:18.990 +But that's going to count as a $50 error, which is quite significant. + +05:19.230 --> 05:24.720 +Whereas if something costs $10, $10, then you would want to do much better than be out by $50. + +05:24.720 --> 05:26.100 +So it's not ideal. + +05:26.100 --> 05:29.460 +It has some some problems, but it's it's pretty good. + +05:29.460 --> 05:34.110 +And it's going to give us such a human understandable way of telling whether our model is doing any + +05:34.110 --> 05:34.710 +good. + +05:34.890 --> 05:39.990 +In conjunction with that, we could look at the percentage price difference, uh, you know, which + +05:39.990 --> 05:41.970 +which has the opposite problem. + +05:41.970 --> 05:47.610 +That's sort of more makes more sense, more reasonable for high priced items, but seems a bit unfair + +05:47.610 --> 05:48.990 +for cheap items. + +05:49.200 --> 05:51.540 +Something costs ten bucks and you get 12. + +05:51.570 --> 05:56.370 +Then saying that that's 20% out seems a bit a bit harsh. + +05:57.120 --> 06:03.690 +Um, and then another way to do it is to have some criteria that you will use to judge that an estimate + +06:03.690 --> 06:09.000 +is good quality, and that criteria could combine an absolute difference and a percentage difference. + +06:09.000 --> 06:18.120 +You might say, look, if something is, is, um, either within $40 or it's within 20%, then I consider + +06:18.120 --> 06:18.780 +that a hit. + +06:18.780 --> 06:20.370 +I consider that a good estimate. + +06:20.400 --> 06:22.530 +Good enough, good enough estimate. + +06:22.680 --> 06:28.830 +Um, and so then you could just measure what percentage of the model's predictions meet that criteria + +06:28.830 --> 06:30.870 +and are considered good. + +06:30.900 --> 06:36.030 +So that's another business metric that we could use and that we will use. + +06:36.180 --> 06:40.860 +Uh, and so these these are ways that we can assess the performance. + +06:40.860 --> 06:46.110 +And as I say, the delight with our project is that they're going to be so human. + +06:46.110 --> 06:47.070 +Understandable. + +06:47.070 --> 06:53.340 +So we're going to be able to do things like try different model, different hyperparameters. + +06:53.370 --> 06:54.510 +We're going to try different models. + +06:54.510 --> 06:56.790 +We're going to try frontier and open source. + +06:56.790 --> 07:03.360 +And we'll simply be able to see which ones are better able to predict product prices. + +07:04.920 --> 07:13.620 +And so we are very nearly ready to roll our sleeves up and get coding and get assessing how this performs, + +07:13.620 --> 07:14.820 +how we can predict prices. + +07:14.820 --> 07:16.740 +And it's going to be such a lot of fun. + +07:16.830 --> 07:20.970 +Uh, but there are just a couple of housekeeping things we have to do first. + +07:21.000 --> 07:29.230 +I want to talk to you a bit about, uh, the sort of business This strategy for how you go about solving + +07:29.230 --> 07:30.340 +a problem like this. + +07:30.370 --> 07:36.010 +And I realize that if you're like me, you are itching to get into it, itching to get in, to actually + +07:36.040 --> 07:40.810 +finally building training our model and seeing how good are these different models out there. + +07:40.900 --> 07:48.010 +Um, but I just want to give you this, this grounding, this foundation, so that you have this context + +07:48.100 --> 07:52.060 +that's going to be reinforced when we then do go and solve the business problem. + +07:52.690 --> 07:59.410 +I'm going to contrast the different techniques for improving performance and explain, like what's the + +07:59.410 --> 08:00.130 +difference between them? + +08:00.130 --> 08:04.990 +I really want to to clarify, in what circumstances do you use Rag? + +08:04.990 --> 08:07.570 +In what circumstances do you use fine tuning? + +08:07.570 --> 08:09.340 +It's a question I get asked a lot. + +08:09.370 --> 08:12.250 +Um, and I think it's helpful to clarify that before we get into it. + +08:12.250 --> 08:17.410 +So you've got that, that, that context, and then we still have some, some homework to do with the + +08:17.410 --> 08:18.040 +data set. + +08:18.070 --> 08:21.640 +We've still got to curate and upload our final data set. + +08:21.640 --> 08:22.570 +It's going to be big. + +08:22.570 --> 08:25.690 +It's going to be a big data set, because we're going to want to do a lot of training. + +08:25.810 --> 08:27.430 +So we have some more to do there. + +08:27.460 --> 08:30.580 +All of that, all of that coming tomorrow. + +08:30.610 --> 08:31.420 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59472007/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472007/ja_JP.srt new file mode 100755 index 0000000..bf6c4a3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472007/ja_JP.srt @@ -0,0 +1,286 @@ +WEBVTT + +00:00.620 --> 00:07.460 +そして、 私がデータをクレンジングし、 モデルに含めるべきものを捨てていないとき、 + +00:07.460 --> 00:15.380 +私は良いことをしたと自分自身を納得させる。 + +00:15.470 --> 00:21.200 +もしそのようなものを見つけたら、 すぐに教えてほしい。 私はモデルからもう少し汁を絞り出したり、 + +00:21.200 --> 00:26.360 +結果を少し良くしたり、 データを変えたりするのが大好きだからだ。 + +00:26.390 --> 00:33.830 +モデルに投入するデータの質を向上させることが、 モデルのパフォーマンスに最も大きな影響を与えた。 + +00:33.950 --> 00:39.980 +ハイパーパラメーターのチューニングと呼ばれるものに膨大な時間を費やすことができる。 私たちは皆、 + +00:39.980 --> 00:44.690 +しばらくの間、 自分のモデルからほんのわずかな力を絞り出し、 フロンティアモデルに対して真っ向勝負を挑む中で、 + +00:44.690 --> 00:50.450 +結果を予測する能力を少しでも高めようとする。 + +00:50.540 --> 00:58.850 +しかし、 結局のところ、 そのデータの質を向上させることが、 針を動かす最も簡単で効果的な方法なのだ。 + +00:58.850 --> 01:01.640 +だからこそ、 時間を費やす価値があるんだ。 + +01:01.640 --> 01:08.600 +そして最後のJupyterノートブックでは、 このリストの1番、 2番、 3番、 そして4番と5番の一部に取り組んだ。 + +01:08.630 --> 01:16.460 +まだ4と5が残っているが、 最も重要なのは、 イソムのクラスに目を通し、 それがどのように機能しているかを自分で確認することだ。 + +01:17.690 --> 01:33.710 +さて、 最後にもう一度、 このモデルの目標が達成できたかどうかをどのように判断するかという業績評価について触れておきたい。 + +01:33.950 --> 01:41.150 +ええと、 少し前にお話ししたように、 パフォーマンスを評価するために使える2種類の指標があります。 + +01:41.150 --> 01:48.020 +モデル中心のメトリクスもあり、 これはモデルが数学的にどの程度うまく機能しているかを理解する、 + +01:48.020 --> 01:52.310 +データサイエンティストの領域に近い。 + +01:52.430 --> 01:59.030 +そして、 ユーザーやスポンサー、 CEOの心に響くような、 + +01:59.030 --> 02:03.650 +成果ビジネス中心の指標がある。 + +02:03.680 --> 02:07.430 +それがプロジェクトの最終目標だ。 + +02:07.460 --> 02:15.620 +モデル中心的なものは、 モデルによってより直接的に測定可能であり、 通常はトレーニング中や最適化中に測定することができる。 + +02:15.650 --> 02:23.540 +ビジネス中心のものは、 最も重要なものだが、 必ずしもすぐにモデルと結びつくものではない。 + +02:24.080 --> 02:34.760 +私たちの場合、 ビジネス・セントリック・メトリックスという素晴らしい贅沢がある。 + +02:34.760 --> 02:41.720 +トレーニング・ロスとバリデーション・ロスという言葉をよく耳にするが、 トレーニング・ロスとはトレーニング・セットでのロスのことだ。 + +02:41.750 --> 02:46.430 +バリデーション・ロスは、 バリデーション・セット、 つまり、 あなたが保留したデータの一部である。 + +02:46.880 --> 02:54.620 +これはモデルが計算する指標で、 後で詳しく説明する。 + +02:54.620 --> 03:05.120 +トレーニングの過程で、 トレーニングの損失が徐々に減っていくのがわかるだろう。 + +03:05.510 --> 03:15.240 +そして、 より洗練されたデータサイエンスの指標もあり、 それらも様々な場面で見ていくつもりだ。 + +03:15.240 --> 03:19.230 +平均二乗対数誤差と呼ばれるものがある。 + +03:19.260 --> 03:20.190 +RMSE l. + +03:20.490 --> 03:27.690 +この特別なエラーの利点は、 モデルが絶対的に間違っている場合と、 パーセンテージベースで間違っている場合の両方で、 + +03:27.690 --> 03:33.390 +モデルにペナルティを与えることができることだ。 + +03:33.390 --> 03:38.910 +また、 低価格の商品で数ドルの差があっても、 そのモデルに不当なペナルティを課すことはない。 + +03:38.910 --> 03:47.460 +つまり、 絶対的な差と相対的なパーセンテージの差を天秤にかけた、 バランスの取れた指標なのだ。 + +03:47.640 --> 03:48.990 +うーん、 適当だ。 + +03:48.990 --> 03:51.450 +だから、 その点にも注目している。 + +03:51.480 --> 04:00.900 +もうひとつは、 もっと単純な平均二乗誤差で、 これは予想と実際の価格の差の二乗である。 + +04:00.990 --> 04:06.270 +平均二乗誤差の課題は、 価格が大きくなると爆発してしまうことだ。 + +04:06.270 --> 04:14.100 +つまり、 800ドルのものを900ドルと予想した場合、 そこには100ドルの差がある。 + +04:14.100 --> 04:21.330 +二乗すれば1万となり、 これは大きな数字で、 他のすべての誤差を凌駕する。 + +04:21.420 --> 04:26.460 +そのため、 平均二乗誤差は私たちのような問題にとってはより厄介なものとなる。 + +04:26.850 --> 04:29.340 +これがモデル中心の評価基準だ。 + +04:29.340 --> 04:32.040 +しかし、 次はビジネス指標について話そう。 + +04:32.040 --> 04:37.890 +だから、 この問題の素晴らしいところは、 どちらの陣営にも属するこの指標があるということだ。 + +04:37.890 --> 04:42.480 +あくまで平均的な絶対価格差だ。 + +04:42.480 --> 04:45.210 +基本的に、 このモデルがいかに間違っていたか。 + +04:45.210 --> 04:49.560 +冷蔵庫は100ドルと書いてあったが、 実際は120ドルだった。 + +04:49.560 --> 04:51.060 +だから20点差だった。 + +04:51.090 --> 04:56.220 +その平均的な価格差はとてもシンプルで、 人間的で、 理解できる。 + +04:56.460 --> 05:00.240 +そして、 それは素晴らしいビジネス成果の指標である。 + +05:00.240 --> 05:01.710 +欠点もある。 + +05:01.710 --> 05:10.050 +800ドルもするような高価なものが、 もし850ドルもするようなものだとわかったら、 + +05:10.050 --> 05:14.670 +悪くないと思う。 + +05:14.670 --> 05:18.990 +しかし、 これは50ドルのエラーにカウントされる。 + +05:19.230 --> 05:24.720 +一方、 10ドル、 10ドルのものであれば、 50ドル差よりはずっといいものを作りたいと思うだろう。 + +05:24.720 --> 05:26.100 +だから理想的ではない。 + +05:26.100 --> 05:29.460 +いくつか問題はあるが、 かなりいい。 + +05:29.460 --> 05:34.710 +そして、 我々のモデルが良い結果を出しているかどうかを判断する、 人間にとって理解しやすい方法を与えてくれる。 + +05:34.890 --> 05:41.970 +それに関連して、 価格差のパーセンテージを見ることもできる。 + +05:41.970 --> 05:48.990 +その方が理にかなっているし、 高額商品には合理的だが、 安い商品には少し不公平に思える。 + +05:49.200 --> 05:51.540 +10ドルのものが12ドルになる。 + +05:51.570 --> 05:56.370 +それなら、 20%アウトというのはちょっと厳しすぎるような気がする。 + +05:57.120 --> 06:03.690 +そしてもう一つの方法は、 見積もりが良質であると判断するための基準を持つことです。 その基準は、 + +06:03.690 --> 06:09.000 +絶対的な差とパーセンテージの差を組み合わせることができます。 + +06:09.000 --> 06:18.780 +40ドル以内か、 20%以内ならヒットだと思う。 + +06:18.780 --> 06:20.370 +私は、 これは良い見積もりだと思っている。 + +06:20.400 --> 06:22.530 +十分な見積もりだ。 + +06:22.680 --> 06:30.870 +そうすれば、 モデルの予測の何パーセントがその基準を満たし、 良好とみなされるかを測定することができる。 + +06:30.900 --> 06:36.030 +だから、 これもビジネス指標として使えるし、 使うつもりだ。 + +06:36.180 --> 06:40.860 +それで、 これがパフォーマンスを評価する方法なんだ。 + +06:40.860 --> 06:46.110 +そして、 私たちのプロジェクトで嬉しいのは、 彼らがとても人間的になることだ。 + +06:46.110 --> 06:47.070 +理解できる。 + +06:47.070 --> 06:53.340 +そのため、 異なるモデルや異なるハイパーパラメーターを試すようなことができるようになる。 + +06:53.370 --> 06:54.510 +いろいろなモデルを試してみるつもりだ。 + +06:54.510 --> 06:56.790 +フロンティアとオープンソースを試してみるつもりだ。 + +06:56.790 --> 07:03.360 +そして、 単純にどれが商品価格の予測に優れているかを見ることができるだろう。 + +07:04.920 --> 07:14.820 +そして、 私たちは袖をまくり、 コーディングに取り掛かり、 これがどのように機能するか、 どのように価格を予測できるかを評価する準備がほとんどできている。 + +07:14.820 --> 07:16.740 +そして、 とても楽しくなりそうだ。 + +07:16.830 --> 07:20.970 +ええと、 でも、 その前にやらなきゃいけないことがいくつかあるんだ。 + +07:21.000 --> 07:30.340 +このような問題を解決するためのビジネス戦略について少しお話したいと思います。 + +07:30.370 --> 07:36.010 +そして、 もしあなたが私のようなものであれば、 早くこの世界に入りたい、 + +07:36.040 --> 07:40.810 +この世界に入りたいとうずうずしていることだろう。 + +07:40.900 --> 07:48.010 +でも、 この基礎固めをしておきたいんだ。 そうすれば、 ビジネス上の問題を解決するときに、 + +07:48.100 --> 07:52.060 +その背景がより強固なものになるからね。 + +07:52.690 --> 08:00.130 +パフォーマンスを向上させるためのさまざまなテクニックを対比させ、 それぞれの違いは何なのか? + +08:00.130 --> 08:04.990 +どんな状況でラグを使うのか? + +08:04.990 --> 08:07.570 +どのような状況で微調整を行うのですか? + +08:07.570 --> 08:09.340 +よく聞かれる質問だ。 + +08:09.370 --> 08:12.250 +そのことをはっきりさせておいた方がいいと思うんだ。 + +08:12.250 --> 08:18.040 +だから、 その文脈を理解した上で、 データセットについてまだ宿題があるんだ。 + +08:18.070 --> 08:21.640 +まだ最終的なデータセットをキュレーションしてアップロードしなければならない。 + +08:21.640 --> 08:22.570 +大きくなるだろうね。 + +08:22.570 --> 08:25.690 +大きなデータセットになるだろうし、 たくさんのトレーニングをしたいからね。 + +08:25.810 --> 08:27.430 +だから、 そこでもう少しやることがある。 + +08:27.460 --> 08:30.580 +そのすべてが、 明日やってくる。 + +08:30.610 --> 08:31.420 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59472007/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472007/ko_KR.srt new file mode 100755 index 0000000..d22a1da --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472007/ko_KR.srt @@ -0,0 +1,352 @@ +WEBVTT + +00:00.620 --> 00:07.460 +데이터 스크러빙 첫 번째 강습이 즐거우셨길 바랍니다 이제 코드를 훑어보며 데이터를 삭제했을 + +00:07.460 --> 00:13.310 +때 좋은 일을 했다고 자신을 설득하세요 모델에 포함해야 할 건 아무것도 버리지 + +00:13.310 --> 00:15.380 +않았다고요 + +00:15.470 --> 00:21.200 +이런 걸 발견하시면 바로 알려주세요 모델에서 좀 더 비트 짜내고 결과를 + +00:21.200 --> 00:26.360 +좀 더 좋게 만들고 데이터를 바꾸는 걸 좋아하거든요 + +00:26.390 --> 00:32.270 +모델로 들어가는 데이터의 품질 개선이 모델의 성능에 가장 큰 + +00:32.270 --> 00:33.830 +영향을 미쳤죠 + +00:33.950 --> 00:39.980 +hyperferameter 튜닝에 많은 시간을 할애할 수 있습니다. 한동안 + +00:39.980 --> 00:44.690 +이 방법으로 모델에서 쥐어짜낸 비트들을 통해 결과를 예측하는 + +00:44.690 --> 00:49.310 +게 더 쉬워질 겁니다. 물론 개척 모델과 맞붙어서 어려운 작업을 + +00:49.310 --> 00:50.450 +해야겠죠. + +00:50.540 --> 00:58.850 +하지만 데이터 품질을 향상시키는 게 가장 쉽고 효과적인 방법이에요 + +00:58.850 --> 01:01.640 +그래서 시간을 들여야 하는 거죠 + +01:01.640 --> 01:06.950 +그리고 마지막 주피터 노트에는 이 목록의 1번, 2번, 3번 4번, 5번까지 + +01:06.950 --> 01:08.600 +작업했어요 + +01:08.630 --> 01:13.790 +아직 4개와 5개가 더 남았지만 가장 중요한 건 아이솜 클래스를 + +01:13.790 --> 01:16.460 +직접 살펴보는 거예요 + +01:17.690 --> 01:26.780 +마무리하기 전에 다시 한번 측정과 평가 주제를 언급하고 싶습니다 이 모델로 + +01:26.810 --> 01:33.710 +목표 달성을 달성했는지 어떻게 결정하는지에 관해서요 + +01:33.950 --> 01:41.150 +앞서 말씀드렸듯이 성능을 평가할 때 사용하는 두 가지 측정법이 있어요 + +01:41.150 --> 01:48.020 +모델 중심 지표라는 게 있는데 모델이 수학적으로 얼마나 잘 작동하는지 + +01:48.020 --> 01:52.310 +이해하는 데이터 과학자의 영역에 속하죠 + +01:52.430 --> 01:59.030 +그리고 결과 비즈니스 중심 지표가 있습니다 사용자, 스폰서, + +01:59.030 --> 02:03.650 +CEO의 마음을 사로잡는 지표죠 + +02:03.680 --> 02:07.430 +그게 이 프로젝트의 궁극적인 목표예요 + +02:07.460 --> 02:13.310 +모델 중심의 것은 모델이 훨씬 직접 측정합니다 훈련과 최적화 과정에서 + +02:13.310 --> 02:15.620 +측정할 수 있죠 + +02:15.650 --> 02:22.520 +비즈니스 중심의 것이 가장 중요하지만 모델과 바로 연결될 필요는 + +02:22.550 --> 02:23.540 +없죠 + +02:24.080 --> 02:30.680 +물론 우리 같은 경우에는 비즈니스 중심 지표가 있어서 호사를 누리고 있죠 + +02:30.710 --> 02:34.760 +모델 중심 지표에서 가장 성공적일 거예요 + +02:34.760 --> 02:41.120 +훈련의 손실이나 검증의 손실이란 말을 종종 듣게 될 겁니다 훈련의 손실은 훈련 세트와 함께 잃는 + +02:41.120 --> 02:41.720 +거죠 + +02:41.750 --> 02:46.430 +검증 손실은 검증 집합과 함께 발생합니다 검증 집합은 데이터의 일부죠 + +02:46.880 --> 02:53.300 +모델은 이 측정법을 통해 계산할 겁니다 나중에 자세히 + +02:53.300 --> 02:54.620 +살펴보죠 + +02:54.620 --> 02:59.540 +훈련 과정에서 손실된 훈련량이 점차 줄어들 겁니다 + +02:59.540 --> 03:05.120 +모델이 작업을 점점 더 정확하게 수행한다는 뜻이죠 + +03:05.510 --> 03:13.890 +좀 더 정교한 데이터 과학 지표도 있는데 다양한 관점을 살펴볼 + +03:13.890 --> 03:15.240 +거예요 + +03:15.240 --> 03:19.230 +루트 평제곱 로그 에러라는 게 있어요 + +03:19.260 --> 03:20.190 +RMSE l요 + +03:20.490 --> 03:27.690 +이런 오류의 장점은 모델이 절대적으로 잘못되거나 퍼센트를 기준으로 + +03:27.690 --> 03:33.390 +하면 여러 면에서 감점을 받는다는 거죠 + +03:33.390 --> 03:38.910 +저가 제품에 몇 달러 차이로 불이익을 당해도 모델에게 부당하지 않아요 + +03:38.910 --> 03:47.460 +절대적인 차이와 상대적인 퍼센트 차이를 측정하는 균형 잡힌 측정법이죠 + +03:47.640 --> 03:48.990 +적절하죠 + +03:48.990 --> 03:51.450 +그것도 살펴볼 거예요 + +03:51.480 --> 03:56.790 +사람들이 훨씬 단순하게 생각하는 또 다른 건 평균 제곱 오류입니다 + +03:56.790 --> 04:00.900 +예측과 실제 가격 사이의 차이의 제곱을 의미하죠 + +04:00.990 --> 04:06.270 +제곱 오차의 문제는 가격이 높아도 터질 수 있다는 거죠 + +04:06.270 --> 04:14.100 +800달러가 드는 것이 있고 900달러가 들 것으로 예상한다면 100달러 차이가 나는 거죠 + +04:14.100 --> 04:20.610 +그걸 제곱하면 10,000가 되죠 아주 큰 숫자입니다 다른 모든 오류가 작아질 + +04:20.610 --> 04:21.330 +거예요 + +04:21.420 --> 04:26.460 +비열한 제곱 오차는 우리 같은 문제에선 더 큰 골칫거리죠 + +04:26.850 --> 04:29.340 +그게 모델 중심 지표예요 + +04:29.340 --> 04:32.040 +이제 비즈니스 지표에 대해 얘기해 보죠 + +04:32.040 --> 04:36.960 +우리 문제의 멋진 점은 측정법이 있다는 거예요 양쪽 진영에 + +04:36.960 --> 04:37.890 +속해 있죠 + +04:37.890 --> 04:42.480 +평균 절대 가격 차이죠 + +04:42.480 --> 04:45.210 +모델도 잘못됐고요 + +04:45.210 --> 04:49.560 +냉장고 가격이 100달러라고 했는데 120달러였어요 + +04:49.560 --> 04:51.060 +20점 차로 졌어요 + +04:51.090 --> 04:56.220 +평균 가격 차이는 단순하고 인간적이며 이해할 만하죠 + +04:56.460 --> 05:00.240 +그건 훌륭한 비즈니스 결과 척도예요 + +05:00.240 --> 05:01.710 +결점이 좀 있어요 + +05:01.710 --> 05:10.050 +좀 불공평해요 800달러 정도 하는 더 비싼 것이 850달러라고 + +05:10.050 --> 05:14.670 +추측했다면 나쁘지 않은 거예요 + +05:14.670 --> 05:18.990 +50달러 오류로 간주됩니다 꽤 중요한 거죠 + +05:19.230 --> 05:24.720 +10달러짜리 물건이라면 50달러보다 훨씬 싸게 팔아야 해요 + +05:24.720 --> 05:26.100 +이상적인 상황은 아니죠 + +05:26.100 --> 05:29.460 +문제가 좀 있지만 꽤 괜찮아요 + +05:29.460 --> 05:34.110 +우리 모델이 좋은 일을 하는지 인간적으로 이해할 수 있는 방법을 제공할 + +05:34.110 --> 05:34.710 +거예요 + +05:34.890 --> 05:39.990 +그것과 함께 가격 차이도 살펴볼 수 있어요 반대의 + +05:39.990 --> 05:41.970 +문제가 있죠 + +05:41.970 --> 05:47.610 +비트 박스가 더 합리적이고 고가의 물품에는 더 합리적이지만 저렴한 물품에는 불공평한 + +05:47.610 --> 05:48.990 +것 같아요 + +05:49.200 --> 05:51.540 +10달러짜리 물건에 12달러씩 얹어 주죠 Get it + +05:51.570 --> 05:56.370 +그런데 20% 비트는 건 좀 심하네요 + +05:57.120 --> 06:03.690 +또 다른 방법은 견적서의 품질을 판단하는 기준을 갖는 겁니다 견적서는 + +06:03.690 --> 06:09.000 +절대적인 차이와 퍼센트 차이를 합칠 수 있죠 + +06:09.000 --> 06:18.780 +오차가 40달러 이하나 20% 이하면 히트라고 볼 수 있죠 + +06:18.780 --> 06:20.370 +좋은 추정이네요 + +06:20.400 --> 06:22.530 +그 정도면 충분해요 + +06:22.680 --> 06:28.830 +그러면 모델의 예측이 그 기준에 부합하고 좋다고 간주되는 비율을 + +06:28.830 --> 06:30.870 +측정할 수 있죠 + +06:30.900 --> 06:36.030 +우리가 사용할 수 있고 또 사용할 또 다른 비즈니스 지표죠 + +06:36.180 --> 06:40.860 +이렇게 성능을 평가할 수 있어요 + +06:40.860 --> 06:46.110 +우리 프로젝트의 기쁨은 그들이 인간적이라는 거죠 + +06:46.110 --> 06:47.070 +이해해요 + +06:47.070 --> 06:53.340 +다른 모델, hyperpaameter 같은 걸 시도해볼 수 있어요 + +06:53.370 --> 06:54.510 +다른 모델을 시도해 보죠 + +06:54.510 --> 06:56.790 +프런티어와 오픈 소스를 시도해 보려고요 + +06:56.790 --> 07:03.360 +어떤 회사가 제품 가격을 더 잘 예측하는지 쉽게 알 수 있죠 + +07:04.920 --> 07:13.620 +이제 소매를 걷어붙이고 코딩을 시작하고 성능을 평가하고 가격을 예측할 준비가 거의 다 됐어요 + +07:13.620 --> 07:14.820 +get it + +07:14.820 --> 07:16.740 +정말 재미있을 거예요 + +07:16.830 --> 07:20.970 +하지만 그 전에 몇 가지 정리할 게 있어요 + +07:21.000 --> 07:29.230 +비즈니스 전략에 대해 좀 더 얘기하고 싶어요 이런 문제를 해결하는 방법에 + +07:29.230 --> 07:30.340 +대해서요 + +07:30.370 --> 07:36.010 +여러분도 저와 같다면 들어가고 싶어 안달일 겁니다 드디어 우리 모델을 훈련하고 + +07:36.040 --> 07:40.810 +이 다양한 모델이 얼마나 좋은지 보고 싶어 하죠 get it + +07:40.900 --> 07:48.010 +하지만 저는 이 기반과 기반을 알려드리고 싶어요 그래야 사업 문제를 해결하러 + +07:48.100 --> 07:52.060 +갔을 때 이 맥락을 강화할 수 있으니까요 + +07:52.690 --> 08:00.130 +퍼포먼스 향상을 위한 다양한 기술을 대조하고 그 차이점을 설명할 거예요 + +08:00.130 --> 08:04.990 +어떤 상황에서 랙을 쓰는지 확실히 하고 싶어요 + +08:04.990 --> 08:07.570 +어떤 상황에서 세세한 조율을 사용하나요? + +08:07.570 --> 08:09.340 +Get it, Get it, Get it, Get it, Get it, Get it. 많이 듣는 질문이에요 + +08:09.370 --> 08:12.250 +get it 시작하기 전에 명확히 하는 게 도움이 될 것 같아요 + +08:12.250 --> 08:18.040 +컨텍스트를 보았고 데이터 집합에 대한 숙제가 아직 남아 있어요 + +08:18.070 --> 08:21.640 +최종 데이터 세트를 큐레이팅하고 업로드 해야 해요 + +08:21.640 --> 08:22.570 +엄청날 거예요 + +08:22.570 --> 08:25.690 +빅데이터 집합이 될 거예요 훈련을 많이 해야 하니까요 + +08:25.810 --> 08:27.430 +아직 할 일이 남았어요 + +08:27.460 --> 08:30.580 +내일이면 모든 게 끝나요 + +08:30.610 --> 08:31.420 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59472011/en_US.srt b/week5/community-contributions/subtitles/srts/59472011/en_US.srt new file mode 100755 index 0000000..7450ff8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472011/en_US.srt @@ -0,0 +1,427 @@ +WEBVTT + +00:00.710 --> 00:02.270 +Welcome everybody. + +00:02.300 --> 00:08.900 +So in the past I've said quite a few times, I am excited to start this this week or this topic. + +00:08.900 --> 00:15.020 +And I want to say that all of that has been nonsense, because the excitement that I've had has paled + +00:15.020 --> 00:18.260 +in significance compared to the level of excitement I have today. + +00:18.260 --> 00:24.530 +As we start week six and we embark upon the world of training, this is what it's all about. + +00:24.530 --> 00:25.850 +Now it gets real. + +00:25.850 --> 00:27.260 +Prepare yourself. + +00:28.160 --> 00:32.900 +So up to this point, we've always been talking about what they call inference. + +00:32.900 --> 00:39.320 +And inference is when you take a model that's been trained to against a ton of data, and it's then + +00:39.350 --> 00:43.700 +used at runtime to predict the next token given an input. + +00:43.700 --> 00:47.750 +And we've been looking at different ways to get that inference better and better. + +00:47.750 --> 00:53.180 +We're now going to go and look at the models themselves and understand how can you train models so that + +00:53.180 --> 00:56.330 +they are even better at runtime and inference. + +00:56.660 --> 00:59.780 +And this this is where it gets advanced. + +00:59.780 --> 01:04.270 +We start with the less glamorous part of it, which is about the data. + +01:04.270 --> 01:09.640 +And whilst crafting a data set might might not sound as glamorous, it isn't as glamorous. + +01:09.640 --> 01:14.170 +It happens to be absolutely essential and perhaps one of the most important parts. + +01:14.170 --> 01:20.590 +And we're going to spend today and tomorrow spending, getting really deep into the data, understanding + +01:20.590 --> 01:26.050 +it back to front, visualizing it, cleaning it up, curating it, getting it to a form where we really + +01:26.050 --> 01:27.250 +like it a lot. + +01:27.400 --> 01:29.410 +Um, and that's something that you have to do. + +01:29.440 --> 01:34.960 +We're also going to spend some time understanding, how are we going to gauge success of this project? + +01:34.990 --> 01:38.950 +What are we trying to achieve and how do we know if we've done it or not? + +01:39.970 --> 01:47.050 +But first, let's talk for a second about the eight weeks that you've had, where you've come from and + +01:47.050 --> 01:48.520 +where you are going. + +01:48.520 --> 01:56.500 +We started some time ago now, six weeks ago, uh, right on the left where you're heading is an LM + +01:56.500 --> 01:58.030 +engineering master. + +01:58.030 --> 01:59.230 +Over on the right. + +01:59.260 --> 02:03.190 +In week one, we talked about frontier models, and we tried some of them out. + +02:03.190 --> 02:06.420 +In week two we were using multiple APIs. + +02:06.420 --> 02:10.020 +We were building UIs with Gradio and Multi-modality. + +02:10.050 --> 02:15.540 +Week three we explored hugging Face the Pipelines Tokenizers and then the models. + +02:15.540 --> 02:18.360 +In week four, we were selecting models. + +02:18.360 --> 02:23.910 +We were using code generation and we built something that was what, 60,000 times faster? + +02:23.910 --> 02:27.120 +It was able to optimize code 60,000 times is remarkable. + +02:27.150 --> 02:28.410 +Week five. + +02:28.410 --> 02:33.870 +Last week, of course, was rag all about rag very hot topic. + +02:33.870 --> 02:42.390 +And now that brings us to week six fine tuning a frontier model where we've arrived at training in week + +02:42.390 --> 02:42.840 +seven. + +02:42.840 --> 02:43.860 +We'll do this again. + +02:43.860 --> 02:44.640 +But now. + +02:44.640 --> 02:49.320 +Now we'll be dealing with open source models and basically building our own model. + +02:49.320 --> 02:52.140 +And week eight is where it all comes together. + +02:52.350 --> 02:58.320 +So with that, let's talk a bit about the transition that we are now making. + +02:58.320 --> 03:01.050 +We are moving from inference to training as I say. + +03:01.050 --> 03:05.490 +So let's just talk about when we when we've been working on inference, what have we been doing. + +03:05.490 --> 03:11.300 +We've we've developed different techniques so that when we are running these models, we can try and + +03:11.300 --> 03:13.310 +get them to perform better and better. + +03:13.340 --> 03:17.990 +We've tried Multi-shot prompting when we give it lots of examples for it to work on. + +03:17.990 --> 03:23.420 +We've tried prompt chaining when we send multiple different messages and build on top of each other + +03:23.420 --> 03:24.860 +and combine the results. + +03:24.890 --> 03:31.460 +We've used tools where we had the model almost be able to call back into our code, although it wasn't + +03:31.460 --> 03:32.540 +quite that magical. + +03:32.840 --> 03:39.020 +In order to do things like calculate the price of an airline ticket or the price of to travel to a different + +03:39.140 --> 03:40.040 +city. + +03:40.460 --> 03:48.860 +And then most recently, we worked on Rag injecting more relevant content context into the prompt. + +03:48.890 --> 03:54.800 +So all of these, what they have in common is they're all about taking an existing trained model and + +03:54.800 --> 04:00.320 +figuring out how we can best use it to take advantage of what it knows by calling it multiple times, + +04:00.350 --> 04:02.150 +adding in context, and so on. + +04:02.180 --> 04:06.140 +What we're going to do now is move on to training. + +04:06.170 --> 04:11.950 +So in training, what we're trying to do is take a deep neural network, potentially with its billions + +04:11.950 --> 04:17.680 +of parameters, and figure out how can we tweak those parameters, change those weights, optimize them + +04:17.680 --> 04:26.140 +very slightly based on data so that it gets better and better at predicting future tokens, and whilst + +04:26.500 --> 04:33.130 +adding in more context at inference and things like that is a is a bit of a broad brush stroke in terms + +04:33.130 --> 04:35.800 +of how you can affect the outcomes with training. + +04:35.800 --> 04:43.750 +It's a much more nuanced technique that allows you to gradually build up deeper, finer grained understanding + +04:43.780 --> 04:45.640 +of the problem you're trying to solve. + +04:46.030 --> 04:53.710 +Now, trying to train a multi-billion parameter LLM is a rather expensive proposition. + +04:53.710 --> 05:01.600 +It's something that the Frontier Labs probably spend well, they do spend north of $100 million on training + +05:01.630 --> 05:05.620 +their their best models, and that's outside my budget. + +05:05.620 --> 05:07.900 +And I'm guessing it's outside your budget. + +05:08.200 --> 05:10.890 +And so that's unfortunately not possible for us. + +05:10.890 --> 05:14.940 +But luckily we could take advantage of something called transfer learning. + +05:14.940 --> 05:21.630 +And transfer learning says that it's perfectly doable to take an existing trained LM, a model that's + +05:21.630 --> 05:28.230 +already been pre-trained on a ton of data, and you can then just continue the training with a particular + +05:28.230 --> 05:29.070 +data set. + +05:29.070 --> 05:34.710 +Perhaps that solves a very specialized problem, and it will sort of transfer all of the knowledge that's + +05:34.710 --> 05:39.840 +already accumulated, and you'll be able to add on some extra knowledge on top of that. + +05:40.110 --> 05:45.990 +Um, and so you could take a pre-trained model space and then you can sort of, um, you can, you can + +05:45.990 --> 05:48.750 +make it more precisely trained for your task. + +05:48.750 --> 05:51.240 +And that process is known as fine tuning. + +05:51.450 --> 05:53.760 +Um, as just, just as it sounds. + +05:53.910 --> 05:58.230 +Um, and of course, we're going to be using some techniques that I've name dropped in the past, like + +05:58.230 --> 06:00.450 +Q Laura, as ways to do it. + +06:00.450 --> 06:04.230 +That will be manageable in terms of memory and so on. + +06:05.100 --> 06:11.520 +So let me now introduce the the problem, the commercial problem that we are going to be working on + +06:11.670 --> 06:14.220 +for most of the next few weeks. + +06:14.220 --> 06:21.510 +So let's say we work at an e-commerce company or a marketplace company working in products, and we + +06:21.510 --> 06:27.510 +want to build a model that can take a description of any of almost any product, a wide variety of products, + +06:27.510 --> 06:34.140 +let's say electrical electronic products or computers, fridges, washing machines and other things + +06:34.170 --> 06:41.400 +for the home and car and be able to estimate how much it costs based just on the description. + +06:41.790 --> 06:47.160 +Um, now, uh, that's it's a it's a nice, easy to understand problem. + +06:47.160 --> 06:50.010 +It's got a very easy to measure outcome. + +06:50.010 --> 06:56.010 +You might if the data scientists amongst you might raise your hand and say, that doesn't particularly + +06:56.010 --> 07:01.980 +sound like a problem that's designed for a generative AI solution that generates text. + +07:02.010 --> 07:04.890 +Sounds like something that needs a model that creates a number. + +07:04.890 --> 07:08.580 +And typically that's the domain of what people call regression models. + +07:08.580 --> 07:13.520 +Types of models that produce a number that you can then try and fit to, um. + +07:13.640 --> 07:14.510 +And you'd be right. + +07:14.540 --> 07:15.860 +You'd be making a valid point. + +07:15.860 --> 07:19.100 +It is typically more a regression kind of problem. + +07:19.310 --> 07:23.990 +Um, but it turns out it's still is going to be a great problem for us to work with. + +07:23.990 --> 07:25.460 +And there's a few reasons for that. + +07:25.490 --> 07:32.300 +One of them is that turns out frontier models are actually great at solving this kind of problem to + +07:32.330 --> 07:37.130 +to start with, they were intended just to generate text and just to be able to do things that involve + +07:37.160 --> 07:42.140 +things like tasks like summarization and other and other text generation activities. + +07:42.140 --> 07:48.380 +But as we've discovered, when we ask a model to respond in JSON and respond with with information, + +07:48.530 --> 07:54.410 +it can be very effective at responding back with quantitative results. + +07:54.470 --> 08:00.170 +And so actually the frontier models, um, perhaps because of the emergent intelligence we talked about + +08:00.170 --> 08:06.080 +a long time ago, now have become highly effective at even these kinds of problems that were traditionally + +08:06.080 --> 08:08.330 +the domain of a regression model. + +08:08.450 --> 08:12.320 +So it is absolutely possible to use JNI for this. + +08:12.320 --> 08:18.640 +And in fact, you'll see the frontier models are going to do spectacularly well at this better than + +08:18.640 --> 08:20.950 +simple regression models that we'll build to. + +08:21.250 --> 08:25.750 +So it turns out that it does work in this space. + +08:25.870 --> 08:31.000 +Um, it's also going to be much more enjoyable for us when we try and build our own models. + +08:31.000 --> 08:32.020 +And here's why. + +08:32.260 --> 08:39.340 +The great thing about this problem is that it's very easy to to measure whether we're doing it well + +08:39.370 --> 08:40.000 +or not. + +08:40.030 --> 08:45.220 +If if we predict if we take a product and we know how much it costs, we can put that in and see how + +08:45.220 --> 08:46.690 +well the product does. + +08:46.720 --> 08:50.650 +See how well our model does in guessing the price of that product. + +08:50.680 --> 08:56.890 +Where are some other text generation problems are harder to to measure in a very human understandable + +08:56.890 --> 08:57.340 +way. + +08:57.370 --> 09:02.110 +So if you're doing translation between two languages, say, then sure, you can tell whether something + +09:02.110 --> 09:08.050 +is generating good Spanish from from English, but how good becomes a judgment call. + +09:08.050 --> 09:10.360 +And there are of course scoring methodologies. + +09:10.360 --> 09:16.860 +But then you get into a lot of complexity about how they work and, and whether you're actually doing + +09:16.860 --> 09:17.760 +better or not. + +09:17.760 --> 09:26.040 +So, you know, there are lots of other problems that are more text generation related, such as building + +09:26.040 --> 09:31.080 +something that would actually write the description of a product, but they're not as easy to measure + +09:31.080 --> 09:33.270 +as just saying, come up with a price. + +09:33.270 --> 09:36.420 +Come up with the price is fabulously simple to measure. + +09:36.420 --> 09:40.920 +If we're doing it well and and measure it in a very humanly understandable way. + +09:40.950 --> 09:47.010 +Not with some fancy data science metric like perplexity and stuff, but with something that we'll all + +09:47.010 --> 09:47.790 +understand. + +09:47.940 --> 09:51.660 +How accurate is the price of this fridge that we've just told it? + +09:51.990 --> 09:55.230 +We'll be able to tell that and see it and watch the improvement. + +09:55.230 --> 10:04.320 +So for that reason and the other reasons, I actually think this is a really nice, well-defined challenge + +10:04.320 --> 10:12.330 +for us to take on, and we're going to have some success with it so that that is the problem for you. + +10:12.840 --> 10:13.590 +All right. + +10:13.590 --> 10:17.370 +In the next video, we're going to start talking about data and I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59472011/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472011/ja_JP.srt new file mode 100755 index 0000000..7a4b666 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472011/ja_JP.srt @@ -0,0 +1,337 @@ +WEBVTT + +00:00.710 --> 00:02.270 +ようこそ皆さん。 + +00:02.300 --> 00:08.900 +だから、 過去に何度も言ってきたけど、 今週やこのトピックを始めるのが楽しみなんだ。 + +00:08.900 --> 00:15.020 +というのも、 私が抱いていた興奮は、 今日の私の興奮のレベルに比べれば、 + +00:15.020 --> 00:18.260 +ほんのわずかなものだったからだ。 + +00:18.260 --> 00:24.530 +第6週を迎え、 トレーニングの世界に乗り出すにあたり、 これがすべてだ。 + +00:24.530 --> 00:25.850 +いよいよ現実味を帯びてきた。 + +00:25.850 --> 00:27.260 +心の準備をする。 + +00:28.160 --> 00:32.900 +だから、 ここまでは常に推論と呼ばれるものについて話してきた。 + +00:32.900 --> 00:43.700 +推論とは、 大量のデータに対して学習させたモデルを、 入力が与えられたときに次のトークンを予測するために実行時に使用することだ。 + +00:43.700 --> 00:47.750 +そして、 その推論をより良いものにするために、 さまざまな方法を検討してきた。 + +00:47.750 --> 00:56.330 +これからはモデルそのものを見て、 実行時や推論時にさらに優れたモデルになるように、 どのようにモデルをトレーニングすればよいかを理解していく。 + +00:56.660 --> 00:59.780 +そして、 ここからが高度になる。 + +00:59.780 --> 01:04.270 +私たちは、 華やかではない部分、 つまりデータについてから始める。 + +01:04.270 --> 01:09.640 +また、 データセットを作成することは、 華やかに聞こえないかもしれないが、 決して華やかではない。 + +01:09.640 --> 01:14.170 +これは絶対に必要なことであり、 おそらく最も重要な部分のひとつだろう。 + +01:14.170 --> 01:20.590 +そして今日と明日は、 データに深く入り込み、 それを隅々まで理解し、 視覚化し、 整理し、 + +01:20.590 --> 01:27.250 +キュレーションし、 私たちがとても気に入る形にすることに費やすつもりだ。 + +01:27.400 --> 01:29.410 +それは、 あなたがやらなければならないことなんだ。 + +01:29.440 --> 01:34.960 +また、 このプロジェクトの成功をどのように評価するつもりなのか? + +01:34.990 --> 01:38.950 +何を達成しようとしているのか、 それができたかどうかをどうやって知るのか。 + +01:39.970 --> 01:48.520 +その前に、 あなたが過ごした8週間、 あなたがどこから来て、 どこへ行こうとしているのかについて少し話しましょう。 + +01:48.520 --> 01:58.030 +今から6週間前に始めたんだけど、 君たちが向かっている左側にLMエンジニアリングのマスターがいるんだ。 + +01:58.030 --> 01:59.230 +右側だ。 + +01:59.260 --> 02:03.190 +週目にはフロンティア・モデルについて話し、 いくつか試してみた。 + +02:03.190 --> 02:06.420 +2週目は複数のAPIを使っていた。 + +02:06.420 --> 02:10.020 +私たちはGradioとMulti-modalityを使ってUIを構築していた。 + +02:10.050 --> 02:15.540 +第3週は、 パイプライン・トークナイザー、 そしてモデルのハグについて研究した。 + +02:15.540 --> 02:18.360 +第4週はモデルの選考だった。 + +02:18.360 --> 02:23.910 +コード生成を使っていたが、 6万倍速いものを作ったんだ。 + +02:23.910 --> 02:27.120 +コードを6万回も最適化できたのは驚くべきことだ。 + +02:27.150 --> 02:28.410 +第5週 + +02:28.410 --> 02:33.870 +先週はもちろん、 非常にホットな話題で持ちきりだった。 + +02:33.870 --> 02:42.840 +そして今、 6週目のフロンティア・モデルを微調整し、 7週目のトレーニングにたどり着いた。 + +02:42.840 --> 02:43.860 +またやろう。 + +02:43.860 --> 02:44.640 +だが、 今は違う。 + +02:44.640 --> 02:49.320 +これからはオープンソースのモデルを扱い、 基本的には自分たちでモデルを作っていくことになる。 + +02:49.320 --> 02:52.140 +そして、 第8週がすべての集大成となる。 + +02:52.350 --> 02:58.320 +ということで、 私たちが現在進めている移行について少しお話ししましょう。 + +02:58.320 --> 03:01.050 +私たちは推論からトレーニングへと移行しつつある。 + +03:01.050 --> 03:05.490 +では、 私たちが推論に取り組んできたとき、 何をしてきたかについてお話ししましょう。 + +03:05.490 --> 03:13.310 +我々は、 これらのモデルを実行する際に、 より良いパフォーマンスを発揮できるよう、 さまざまなテクニックを開発してきた。 + +03:13.340 --> 03:17.990 +私たちはマルチショット・プロンプトを試してみた。 + +03:17.990 --> 03:24.860 +プロンプト・チェイニングは、 複数の異なるメッセージを送信し、 互いを重ね合わせ、 その結果を組み合わせることで試した。 + +03:24.890 --> 03:32.540 +私たちは、 そこまで魔法のようなものではないが、 モデルをコードにコールバックできるようなツールを使ったことがある。 + +03:32.840 --> 03:40.040 +航空券の値段や違う都市への旅行代金を計算するためだ。 + +03:40.460 --> 03:48.860 +そして最近では、 より関連性の高いコンテンツのコンテクストをプロンプトに注入するラグに取り組んだ。 + +03:48.890 --> 03:54.800 +つまり、 これらすべてに共通しているのは、 既存の学習済みモデルを利用し、 それを複数回呼び出したり、 + +03:54.800 --> 04:02.150 +コンテキストを追加したりすることで、 そのモデルが知っていることを最大限に活用する方法を見つけ出すということだ。 + +04:02.180 --> 04:06.140 +これからはトレーニングに移る。 + +04:06.170 --> 04:17.680 +トレーニングで私たちがやろうとしているのは、 潜在的には何十億ものパラメータを持つディープ・ニューラル・ネットワークを利用することです。 + +04:17.680 --> 04:26.140 +どのようにパラメータを微調整し、 重みを変更し、 データに基づいてわずかに最適化することで、 + +04:26.500 --> 04:35.800 +将来のトークンを予測する能力がますます向上するようにすることができます。 + +04:35.800 --> 04:45.640 +解決しようとしている問題について、 より深く、 より細かい粒度の理解を徐々に積み上げていくことができる、 よりニュアンスのあるテクニックなのだ。 + +04:46.030 --> 04:53.710 +今、 数十億のパラメーターを持つLLMを訓練しようとすると、 かなり高価な提案となる。 + +04:53.710 --> 05:05.620 +フロンティア・ラボはおそらく、 最高のモデルのトレーニングに1億ドル以上を費やすだろう。 + +05:05.620 --> 05:07.900 +それに予算外だろう。 + +05:08.200 --> 05:10.890 +だから、 残念ながらそれは不可能なんだ。 + +05:10.890 --> 05:14.940 +しかし幸運なことに、 我々は転移学習というものを利用することができた。 + +05:14.940 --> 05:21.630 +そして転移学習は、 すでに訓練されたLM、 つまりすでに大量のデータで事前訓練されたモデルを用いて、 + +05:21.630 --> 05:29.070 +特定のデータセットで訓練を続けることは完全に可能だと言う。 + +05:29.070 --> 05:34.710 +おそらく、 それは非常に専門的な問題を解決するもので、 すでに蓄積された知識をすべて移し、 + +05:34.710 --> 05:39.840 +その上にさらに知識を加えることができるだろう。 + +05:40.110 --> 05:48.750 +そうすれば、 事前に訓練されたモデル空間を利用して、 タスクに合わせてより正確に訓練することができる。 + +05:48.750 --> 05:51.240 +そしてそのプロセスは微調整として知られている。 + +05:51.450 --> 05:53.760 +うーん、 そのまんま、 そのまんま。 + +05:53.910 --> 06:00.450 +ええと、 もちろん、 その方法として、 Qローラのように過去に私が取り上げたテクニックを使うつもりだ。 + +06:00.450 --> 06:04.230 +メモリなどの面でも管理しやすいだろう。 + +06:05.100 --> 06:14.220 +それでは、 これから数週間の大半を費やすことになる商業的な問題を紹介しよう。 + +06:14.220 --> 06:21.510 +例えば、 電化製品、 コンピューター、 冷蔵庫、 洗濯機、 + +06:21.510 --> 06:27.510 +その他家庭用や車用のものなど、 様々な製品の説明文を受け取り、 + +06:27.510 --> 06:41.400 +説明文だけからその製品の価格を推定できるモデルを構築したいとします。 + +06:41.790 --> 06:47.160 +ええと、 今のは......わかりやすくていい問題だね。 + +06:47.160 --> 06:50.010 +結果を測るのはとても簡単だ。 + +06:50.010 --> 06:56.010 +データサイエンティストが手を挙げて、 それはテキストを生成するジェネレーティブAIソリューションのために設計された問題には聞こえない、 + +06:56.010 --> 07:01.980 +と言うかもしれない。 + +07:02.010 --> 07:04.890 +数字を生み出すモデルが必要なようだ。 + +07:04.890 --> 07:08.580 +一般的に回帰モデルと呼ばれるものの領域だ。 + +07:08.580 --> 07:13.520 +このようなモデルには、 その数字にフィットするようなものがある。 + +07:13.640 --> 07:14.510 +そして、 それは正しい。 + +07:14.540 --> 07:15.860 +それは正しい指摘だ。 + +07:15.860 --> 07:19.100 +一般的には、 どちらかというと退行のような問題だ。 + +07:19.310 --> 07:23.990 +うーん、 でも結局のところ、 僕らにとってはまだ大きな問題になりそうなんだ。 + +07:23.990 --> 07:25.460 +それにはいくつかの理由がある。 + +07:25.490 --> 07:32.300 +そのうちのひとつは、 フロンティアモデルがこの種の問題を解決するのに優れていることがわかったことだ。 そもそもフロンティアモデルは、 + +07:32.330 --> 07:42.140 +テキストを生成することだけを目的としていて、 要約などのタスクやその他のテキスト生成アクティビティを含むことができるようにすることだけを目的としている。 + +07:42.140 --> 07:48.380 +しかし、 私たちが発見したように、 モデルにJSONで応答するよう求め、 情報とともに応答すると、 + +07:48.530 --> 07:54.410 +定量的な結果を返すのに非常に効果的である。 + +07:54.470 --> 08:00.170 +そして実際、 フロンティアモデルは、 おそらくずっと前に話した創発的知性のおかげで、 + +08:00.170 --> 08:08.330 +従来は回帰モデルの領域だったこの種の問題でさえも、 非常に効果的になりました。 + +08:08.450 --> 08:12.320 +だから、 このためにJNIを使うことは絶対に可能だ。 + +08:12.320 --> 08:20.950 +そして実際、 フロンティア・モデルは、 我々が構築する単純な回帰モデルよりも、 この点で目を見張るほど優れていることがわかるだろう。 + +08:21.250 --> 08:25.750 +だから、 この空間ではうまくいくことがわかった。 + +08:25.870 --> 08:31.000 +自分たちで模型を作れば、 もっと楽しくなるだろう。 + +08:31.000 --> 08:32.020 +その理由はこうだ。 + +08:32.260 --> 08:40.000 +この問題の素晴らしいところは、 うまくいっているかどうかを測るのがとても簡単だということだ。 + +08:40.030 --> 08:46.690 +もし、 ある製品を予測し、 その製品の価格がわかれば、 それを投入して製品の出来を見ることができる。 + +08:46.720 --> 08:50.650 +私たちのモデルが、 その製品の価格を推測する際にどの程度の結果を出すか見てみましょう。 + +08:50.680 --> 08:57.340 +他のテキスト生成の問題は、 人間にとって理解しやすい方法で測定するのが難しい。 + +08:57.370 --> 09:08.050 +つまり、 2つの言語間の翻訳をする場合、 確かに英語からスペイン語がうまく生成されているかどうかはわかる。 + +09:08.050 --> 09:10.360 +もちろん採点方法もある。 + +09:10.360 --> 09:17.760 +しかし、 そうなると、 その仕組みや、 実際にうまくいっているのかどうかなど、 複雑なことがたくさん出てくる。 + +09:17.760 --> 09:26.040 +だから、 もっとテキスト生成に関連した問題は他にもたくさんあるんだ。 + +09:26.040 --> 09:33.270 +例えば、 商品の説明を実際に書くようなものを作るとかね。 + +09:33.270 --> 09:36.420 +この値段は、 測定するのが驚くほど簡単だ。 + +09:36.420 --> 09:40.920 +もし私たちがそれをうまくやっていて、 とても人間的に理解しやすい方法で測定していればね。 + +09:40.950 --> 09:47.790 +複雑なデータサイエンスの指標ではなく、 誰もが理解できるようなものだ。 + +09:47.940 --> 09:51.660 +今話した冷蔵庫の値段はどれくらい正確ですか? + +09:51.990 --> 09:55.230 +私たちはそれを見分けることができるし、 改善を見ていくことができる。 + +09:55.230 --> 10:04.320 +だから、 そのような理由も含めて、 この挑戦は私たちにとって本当に素晴らしく、 + +10:04.320 --> 10:12.330 +明確に定義された挑戦だと思う。 + +10:12.840 --> 10:13.590 +分かった。 + +10:13.590 --> 10:17.370 +次回のビデオでは、 データについてお話しします。 diff --git a/week5/community-contributions/subtitles/srts/59472011/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472011/ko_KR.srt new file mode 100755 index 0000000..f0a010f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472011/ko_KR.srt @@ -0,0 +1,415 @@ +WEBVTT + +00:00.710 --> 00:02.270 +환영합니다, 여러분 + +00:02.300 --> 00:08.900 +과거에도 몇 번 말씀드렸는데요 이번 주 혹은 이번 주제를 시작하게 되어 정말 신나요 + +00:08.900 --> 00:15.020 +그 모든 게 말도 안 된다고 말씀드리고 싶어요 제가 느꼈던 흥분은 오늘 느끼는 + +00:15.020 --> 00:18.260 +흥분에 비하면 아무것도 아니었거든요 + +00:18.260 --> 00:24.530 +6주 차를 맞아 세계 훈련에 돌입합니다 바로 이거죠 + +00:24.530 --> 00:25.850 +이제 실감이 나네요 + +00:25.850 --> 00:27.260 +준비하세요 + +00:28.160 --> 00:32.900 +지금까지 우린 추론이라는 걸 늘 얘기했죠 + +00:32.900 --> 00:39.320 +추론은 엄청난 양의 데이터에 대항하도록 훈련된 모델을 런타임에 사용하여 + +00:39.350 --> 00:43.700 +입력을 받은 다음 토큰을 예측하는 거예요 + +00:43.700 --> 00:47.750 +그 추론을 더 좋게 하려고 다양한 방법을 찾고 있어요 Get it + +00:47.750 --> 00:53.180 +이제 모델 자체를 살펴보고 모델을 어떻게 훈련할 수 있는지 알아보죠 + +00:53.180 --> 00:56.330 +런타임과 추론에서 더 잘하도록요 + +00:56.660 --> 00:59.780 +여기서부터 발전하는 거죠 + +00:59.780 --> 01:04.270 +덜 화려한 부분부터 시작하죠 데이터에 관한 거예요 + +01:04.270 --> 01:09.640 +데이터 세트를 만드는 게 그렇게 멋져 보이지 않을지 몰라도 실제로도 그래요 + +01:09.640 --> 01:14.170 +절대적으로 필수적이고 가장 중요한 부분 중 하나죠 + +01:14.170 --> 01:20.590 +오늘과 내일 우리는 소비를 하며 데이터에 깊이 파고들고 처음부터 끝까지 이해하고 + +01:20.590 --> 01:26.050 +시각화하고, 정돈하고, 큐레이팅해서 우리가 정말 좋아하는 형태로 + +01:26.050 --> 01:27.250 +만들 거예요 + +01:27.400 --> 01:29.410 +그렇게 해야만 하죠 + +01:29.440 --> 01:34.960 +또한 시간을 들여 이해하려고 합니다 이 프로젝트의 성공을 어떻게 가늠할까요? + +01:34.990 --> 01:38.950 +우리는 무엇을 이루고 싶으며 성공했는지 어떻게 알 수 있을까요? + +01:39.970 --> 01:47.050 +하지만 먼저 8주 동안의 경험에 대해 얘기해 보죠 어디서부터 어떻게 시작했는지 앞으로 어떻게 + +01:47.050 --> 01:48.520 +될지에 대해서요 + +01:48.520 --> 01:56.500 +6주 전에 시작했어요 왼쪽을 보시면 달 착륙선 엔지니어링 마스터가 + +01:56.500 --> 01:58.030 +있어요 + +01:58.030 --> 01:59.230 +오른쪽에 있어요 + +01:59.260 --> 02:03.190 +1주 차에는 개척자 모델에 대해 얘기했고 몇 가지를 시도해 봤어요 + +02:03.190 --> 02:06.420 +2주차에는 다중 API를 사용했죠 + +02:06.420 --> 02:10.020 +그래디오와 다중 모듈로 UI를 구축했어요 + +02:10.050 --> 02:15.540 +3주 차에는 페이스 더 파이프라인 토큰이저와 모델을 살펴봤죠 + +02:15.540 --> 02:18.360 +4주 차에 모델들을 선발했어요 + +02:18.360 --> 02:23.910 +코드 생성을 사용했고 60,000배 빠른 걸 만들었죠? + +02:23.910 --> 02:27.120 +코드를 60,000배 최적화할 수 있었다는 건 놀라워요 + +02:27.150 --> 02:28.410 +5주 차예요 + +02:28.410 --> 02:33.870 +지난주에는 래그에 관한 래그의 뜨거운 토픽이었죠 + +02:33.870 --> 02:42.840 +이제 6주 차로 접어들었어요 개척지 모델을 세밀하게 조정했고 7주 차에는 훈련을 시작했어요 + +02:42.840 --> 02:43.860 +다음에 또 해요 + +02:43.860 --> 02:44.640 +지금은요 + +02:44.640 --> 02:49.320 +오픈 소스 모델을 다룰 겁니다 기본적으로 우리 고유의 모델을 구축하는 거죠 + +02:49.320 --> 02:52.140 +8주 차부터 모든 게 합쳐지죠 + +02:52.350 --> 02:58.320 +이제 비트를 가지고 우리가 만들고 있는 변화에 대해 얘기해보죠 + +02:58.320 --> 03:01.050 +추론에서 훈련으로 전환하고 있어요 + +03:01.050 --> 03:05.490 +추론을 작업했을 때 얘길 해보죠 뭘 하고 있었죠? + +03:05.490 --> 03:11.300 +다양한 기술을 개발해서 이 모델들을 실행할 때 성능을 더 향상하려고 + +03:11.300 --> 03:13.310 +노력해요 get it + +03:13.340 --> 03:17.990 +작업 예시를 많이 주면서 멀티샷 프롬프트도 해봤어요 + +03:17.990 --> 03:23.420 +여러 개의 다른 메시지를 보내고 빌드를 해서 결과를 조합하는 프롬프트 연결을 + +03:23.420 --> 03:24.860 +시도했었죠 + +03:24.890 --> 03:31.460 +모델을 코드로 불러올 수 있는 도구를 사용하기도 했어요 그렇게 마법 같지는 + +03:31.460 --> 03:32.540 +않았지만요 + +03:32.840 --> 03:39.020 +항공권의 가격을 계산하거나 다른 도시로 가는 여행 비용을 계산하는 + +03:39.140 --> 03:40.040 +거죠 + +03:40.460 --> 03:48.860 +가장 최근에는 Rag 인젝팅에 대해 작업했습니다 더 관련 있는 콘텐츠 컨텍스트를 프롬프트에요 + +03:48.890 --> 03:54.800 +이 모든 것의 공통점은 현존하는 훈련 모델을 취해 그것이 아는 것을 최대한 활용하기 + +03:54.800 --> 04:00.320 +위해 어떻게 이용할지 알아내는 거죠 여러 번 호출하고 컨텍스트에 추가하는 + +04:00.350 --> 04:02.150 +식으로요 + +04:02.180 --> 04:06.140 +이제 훈련으로 넘어갈 거예요 + +04:06.170 --> 04:11.950 +그래서 훈련 때 수십억 개의 매개 변수가 있는 신경망을 활용해 그 매개 + +04:11.950 --> 04:17.680 +변수를 수정하고 무게를 변경하고 데이터에 근거해 최적화할 방법을 + +04:17.680 --> 04:26.140 +연구합니다 그러면 미래 토큰을 예측하는 게 더 쉬워지죠 추론하기 위해 더 많은 컨텍스트를 추가하는 + +04:26.500 --> 04:33.130 +등 다양한 방법을 연구합니다 결과에 영향을 주는 측면에서 보면 일종의 붓 + +04:33.130 --> 04:35.800 +터치로군요 + +04:35.800 --> 04:43.750 +훨씬 미묘한 기술로 점점 더 깊고 섬세한 결로 문제를 이해할 수 + +04:43.780 --> 04:45.640 +있게 해주죠 + +04:46.030 --> 04:53.710 +LLM을 수십억 매개 변수로 훈련하는 건 꽤 비싼 제안이죠 + +04:53.710 --> 05:01.600 +프런티어 연구소들은 아마 1억 달러 이상을 최고의 모델을 훈련하는 + +05:01.630 --> 05:05.620 +데 쓸 텐데 제 예산 밖이에요 + +05:05.620 --> 05:07.900 +예산도 안 맞겠죠? + +05:08.200 --> 05:10.890 +불행히도 그건 불가능하죠 + +05:10.890 --> 05:14.940 +다행히도 전송 학습이라는 걸 이용할 수 있었죠 + +05:14.940 --> 05:21.630 +트랜스퍼 러닝에 따르면 훈련된 기존 LM을 취하는 게 완벽하게 가능합니다 엄청난 양의 + +05:21.630 --> 05:29.070 +데이터로 이미 훈련된 모델이죠 그런 다음 특정 데이터 집합으로 훈련을 계속할 수 있어요 + +05:29.070 --> 05:34.710 +아마도 아주 전문적인 문제를 해결하고 이미 축적된 모든 지식을 + +05:34.710 --> 05:39.840 +이동시켜 그 위에 추가된 지식을 추가할 수 있겠죠 + +05:40.110 --> 05:45.990 +미리 훈련된 모델을 가지고 작업에 더 정밀하게 + +05:45.990 --> 05:48.750 +훈련할 수 있어요 + +05:48.750 --> 05:51.240 +그 과정을 미세 조정이라고 하죠 + +05:51.450 --> 05:53.760 +말 그대로예요 + +05:53.910 --> 05:58.230 +물론 몇 가지 기술을 사용할 거예요 제가 전에 언급한 적이 있죠 + +05:58.230 --> 06:00.450 +큐 로라 같은 방법으로요 + +06:00.450 --> 06:04.230 +메모리 같은 측면에서 관리할 수 있어요 + +06:05.100 --> 06:11.520 +이제 문제를 알려 드리죠 앞으로 몇 주 동안 우리가 씨름할 + +06:11.670 --> 06:14.220 +상업적 문제요 + +06:14.220 --> 06:21.510 +전자 상거래 회사나 마켓플레이스 회사에서 제품을 만든다고 해보죠 거의 모든 제품에 + +06:21.510 --> 06:27.510 +대한 설명을 담을 수 있는 모델을 구축하고자 합니다 다양한 제품들 + +06:27.510 --> 06:34.140 +예를 들어 전자 제품이나 컴퓨터, 냉장고, 세탁기 같은 가정과 차에 사용하는 + +06:34.170 --> 06:41.400 +제품들 말이죠 그리고 설명을 바탕으로 가격을 추정할 수 있어야 하죠 + +06:41.790 --> 06:47.160 +그건 쉽고 이해하기 쉬운 문제예요 + +06:47.160 --> 06:50.010 +결과를 측정하기 아주 쉬워요 + +06:50.010 --> 06:56.010 +여러분 중 데이터 과학자가 손을 들고 텍스트를 생성하는 인공지능 + +06:56.010 --> 07:01.980 +용액을 위해 설계된 문제 같지 않다고 말할 수도 있죠 + +07:02.010 --> 07:04.890 +숫자를 만드는 모델이 필요한 것 같네요 + +07:04.890 --> 07:08.580 +이게 회귀 모델의 도메인이죠 + +07:08.580 --> 07:13.520 +모델의 종류에 따라 숫자를 만들어 거기에 맞추는 거죠 + +07:13.640 --> 07:14.510 +당신 말이 맞아요 + +07:14.540 --> 07:15.860 +당신 말도 일리가 있어요 + +07:15.860 --> 07:19.100 +보통 퇴행하는 것에 더 가깝죠 + +07:19.310 --> 07:23.990 +하지만 여전히 우리가 다루기엔 큰 문제인 것 같아요 + +07:23.990 --> 07:25.460 +거기엔 몇 가지 이유가 있어요 + +07:25.490 --> 07:32.300 +그 중 하나는 개척자 모델이 이런 문제를 해결하는 데 좋다는 겁니다 텍스트를 + +07:32.330 --> 07:37.130 +생성하는 데만 사용되며 요약이나 다른 텍스트 생성 + +07:37.160 --> 07:42.140 +활동과 관련된 작업을 하는 데 사용되죠 + +07:42.140 --> 07:48.380 +하지만 모델에 JSON 내에서 반응하도록 요청하고 정보로 반응하게 + +07:48.530 --> 07:54.410 +하면 양적 결과로 대응하는 데 아주 효과적일 수 있어요 + +07:54.470 --> 08:00.170 +개척 모델은 아마도 우리가 오래전에 얘기했던 신생 지능 + +08:00.170 --> 08:06.080 +덕분에 퇴행 모델이라는 영역에서 특히 효과적인 문제를 해결할 + +08:06.080 --> 08:08.330 +수 있게 된 거죠 + +08:08.450 --> 08:12.320 +JNI를 사용할 수 있어요 + +08:12.320 --> 08:18.640 +프런티어 모델도 성능이 뛰어납니다. 우리가 만들 단순 회귀 + +08:18.640 --> 08:20.950 +모델보다 훨씬 낫죠. + +08:21.250 --> 08:25.750 +이 공간에서 작동하는 게 밝혀졌어요 + +08:25.870 --> 08:31.000 +우리만의 모델을 만들 때 훨씬 더 즐거울 거예요 + +08:31.000 --> 08:32.020 +이유는 이거예요 + +08:32.260 --> 08:40.000 +이 문제의 장점은 우리가 잘하고 있는지 아닌지 측정하기가 아주 쉽다는 거예요 + +08:40.030 --> 08:45.220 +만약 제품을 선택하고 가격을 안다면 그걸 입력해서 제품의 효과를 + +08:45.220 --> 08:46.690 +볼 수 있어요. + +08:46.720 --> 08:50.650 +우리 모델이 그 제품의 가격을 얼마나 잘 추산하는지 보세요 + +08:50.680 --> 08:57.340 +다른 문서 생성 문제도 있습니다. 인간이 이해할 수 있는 방식으로 측정하기 어려운 문제들이죠. + +08:57.370 --> 09:02.110 +두 언어 간의 번역을 할 때, 무언가가 영어를 통해 좋은 스페인어를 + +09:02.110 --> 09:08.050 +생성하는지 알 수 있지만 얼마나 좋은지는 판단할 수 있죠 + +09:08.050 --> 09:10.360 +물론 방법론도 채점하고요 + +09:10.360 --> 09:16.860 +하지만 Get들이 작동하는 방식과 제가 정말 잘하고 있는지에 대한 복잡한 심경이 + +09:16.860 --> 09:17.760 +생기죠 + +09:17.760 --> 09:26.040 +텍스트 생성 관련 문제들이 많습니다 제품 설명을 실제로 적을 수 있는 뭔가를 만드는 + +09:26.040 --> 09:31.080 +것처럼요 하지만 가격을 제시하는 것만큼 측정하기가 + +09:31.080 --> 09:33.270 +쉽지는 않죠 + +09:33.270 --> 09:36.420 +가격을 생각해 보면 측정하기가 아주 쉬워요 + +09:36.420 --> 09:40.920 +우리가 잘한다면 인간적으로 이해할 수 있는 방법으로 측정할 수 있어요 + +09:40.950 --> 09:47.010 +복잡한 데이터 측정법 같은 거 말고 모두가 이해할 수 있는 + +09:47.010 --> 09:47.790 +거요 + +09:47.940 --> 09:51.660 +방금 말씀드린 냉장고 가격은 얼마나 정확하죠? + +09:51.990 --> 09:55.230 +개선된 모습을 볼 수 있을 거예요 + +09:55.230 --> 10:04.320 +그 이유와 다른 이유 때문에 이건 우리가 도전하기에 아주 좋고 잘 정의된 도전이라고 + +10:04.320 --> 10:12.330 +생각해요 그리고 성공도 좀 할 거예요 그게 당신 문제군요 + +10:12.840 --> 10:13.590 +좋아요 + +10:13.590 --> 10:17.370 +다음 비디오에선 데이터에 관해 얘기할 겁니다 거기서 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59472017/en_US.srt b/week5/community-contributions/subtitles/srts/59472017/en_US.srt new file mode 100755 index 0000000..c0459fe --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472017/en_US.srt @@ -0,0 +1,532 @@ +WEBVTT + +00:01.430 --> 00:06.980 +And this is the first time that we'll be coding against our big project of the course. + +00:06.980 --> 00:08.930 +Welcome to Jupyter Lab. + +00:08.930 --> 00:14.390 +Welcome to the week six folder as we embark upon our big project. + +00:14.390 --> 00:20.870 +So again, our project is to build a model that can estimate how much something costs based on the description + +00:20.870 --> 00:21.710 +of the product. + +00:21.710 --> 00:26.120 +And today we're going to be doing the first step in data curation. + +00:26.120 --> 00:34.220 +And we'll start by looking at a subset of the data for home appliances, washing machines and the like. + +00:34.220 --> 00:36.710 +So first let me just show you the data set itself. + +00:36.710 --> 00:39.260 +The data set is is at this link right here. + +00:39.260 --> 00:46.190 +This is the data set on the hugging face datasets section of the Hugging Face hub. + +00:46.580 --> 00:55.520 +Um, and it is a series of um, of scraped Amazon reviews that that goes back in time. + +00:55.520 --> 00:59.810 +But this latest scrape that was from 20 was from late 2023. + +01:00.020 --> 01:07.450 +Um, it contains a huge number of reviews, but it also contains almost 50 million items in there. + +01:07.450 --> 01:15.040 +So there's a lot of different products and they're divided into these different categories. + +01:15.250 --> 01:17.410 +We are not going to be working with all of these. + +01:17.410 --> 01:22.870 +We're going to pluck out a subset of this that are the kinds of categories that interest us the most + +01:22.870 --> 01:24.070 +for this exercise. + +01:24.490 --> 01:28.930 +Otherwise, everything would take an awfully long time to train and that wouldn't be any fun. + +01:28.930 --> 01:34.000 +So this gives you a good sense of the kind of data that we're working with. + +01:34.330 --> 01:41.560 +And if I go into the folder, the hugging face folder that contains the data for what, what the dataset + +01:41.590 --> 01:47.380 +calls meta data, which is the data of the products and descriptions and prices themselves, which is + +01:47.380 --> 01:48.400 +what we really care about. + +01:48.430 --> 01:49.270 +Here it is. + +01:49.300 --> 01:56.380 +And you can get a good sense if you look at the data set for things like electronics, you can see that + +01:56.380 --> 01:59.320 +it's just over five gigabytes in size. + +01:59.320 --> 02:05.110 +So these are big data sets and they're going to have a ton of useful information. + +02:05.110 --> 02:08.160 +And it was uploaded seven months ago. + +02:08.160 --> 02:10.290 +So this is all quite recent. + +02:11.100 --> 02:14.010 +So let's get going. + +02:14.010 --> 02:16.410 +We begin with some imports. + +02:16.440 --> 02:19.440 +Nothing particularly complicated there. + +02:19.470 --> 02:20.700 +Not as yet. + +02:20.730 --> 02:22.380 +There will be more to come. + +02:22.680 --> 02:24.660 +Uh, we're going to set up our environment. + +02:24.690 --> 02:26.550 +Not that we're going to be using any of this today. + +02:26.550 --> 02:28.260 +We're just going to be using hugging face. + +02:28.290 --> 02:29.970 +Log in to hugging face. + +02:30.720 --> 02:37.230 +Um, and, uh, this this makes sure that, uh, matplotlib can show us charts in the Jupyter notebook. + +02:37.380 --> 02:41.070 +So the first thing to do is to load in our data set. + +02:41.070 --> 02:45.660 +And what we're going to do is we specify the name of the data set, Amazon reviews. + +02:45.660 --> 02:49.980 +And we're going to just choose to start with the appliances category. + +02:50.010 --> 02:56.970 +Appliances home appliances like like uh fridges and, and uh, washing machines and the like, um, + +02:57.000 --> 03:02.550 +are going to be the first things that we're going to load in using hugging faces load data set, uh, + +03:02.550 --> 03:03.480 +function. + +03:03.780 --> 03:09.330 +Um, and the first time you run this, it will actually download it from the Huggingface hub, since + +03:09.350 --> 03:13.370 +I've already done that, that won't be required for me. + +03:13.400 --> 03:20.870 +It will just bring in and this has already completed and we'll see how many appliances we have. + +03:20.990 --> 03:26.510 +We have 94,000 home appliances in there. + +03:26.510 --> 03:28.520 +So let's have a look at one of these guys. + +03:28.520 --> 03:34.550 +Let's have a look at uh let's say data point equals data set. + +03:35.030 --> 03:36.290 +Let's take the first one. + +03:36.290 --> 03:37.100 +Why not. + +03:37.370 --> 03:38.360 +Let's have a look at it. + +03:42.590 --> 03:44.210 +So this is what it looks like. + +03:44.210 --> 03:45.950 +It's got tons of information. + +03:45.950 --> 03:49.370 +But in particular you can see it has something called features. + +03:49.370 --> 03:54.350 +It has a title and it has a few other things that are probably going to be useful for us. + +03:54.530 --> 04:01.910 +And in particular it has a title, a description, features, details and price. + +04:01.940 --> 04:04.970 +Let's just print each of them out so we can have a quick look. + +04:05.030 --> 04:06.770 +So this is the title. + +04:06.770 --> 04:09.860 +This is an ice maker machine countertop. + +04:09.890 --> 04:13.280 +This is its description which is empty. + +04:13.820 --> 04:16.430 +This is its details. + +04:16.460 --> 04:24.830 +So features lots of features, details here and price which we immediately see a problem. + +04:24.830 --> 04:26.870 +Price is none in this case. + +04:26.870 --> 04:30.080 +So clearly not all of the items have a price. + +04:30.560 --> 04:35.780 +And you'll notice that description appears to come in the form of a list. + +04:35.780 --> 04:41.090 +So A features whereas details comes in the form of a dictionary. + +04:41.090 --> 04:43.790 +Although that is deceiving, it's actually not a dictionary. + +04:43.790 --> 04:47.180 +It is a string that contains JSON. + +04:47.180 --> 04:54.980 +So this is text that if we want to read into that, we would need to load that in and use A and convert + +04:54.980 --> 04:58.940 +it into a dictionary using JSON load string load s. + +05:00.020 --> 05:01.310 +Um, okay. + +05:01.310 --> 05:05.060 +Let's look at a different data point just to see the next one. + +05:05.060 --> 05:12.740 +Looks like an egg holder for a refrigerator, uh, and holds up to ten eggs also none. + +05:13.250 --> 05:15.390 +No price for that one either. + +05:15.420 --> 05:17.880 +And this third one doesn't have a price either. + +05:17.880 --> 05:20.940 +It's a brand new dryer drum slide. + +05:20.940 --> 05:24.720 +So at this point we might have our first moment of being concerned. + +05:24.750 --> 05:27.300 +We've got 94,000 appliances. + +05:27.300 --> 05:30.000 +The first three that we've looked at don't have a price. + +05:30.000 --> 05:32.340 +So let's see how many do have a price. + +05:32.340 --> 05:37.800 +So a simple way to do that is we will iterate through all of the data points in our data set. + +05:37.800 --> 05:40.410 +And we will get the price. + +05:40.650 --> 05:44.040 +And we will put that in a try block. + +05:44.190 --> 05:50.010 +Because if it doesn't have one, it will fail and we will just skip that data point. + +05:50.010 --> 05:53.220 +So we'll also ignore anything that is priced at zero. + +05:53.220 --> 05:57.120 +So we're just going to be looking at things that have a price that is a number. + +05:57.120 --> 06:01.140 +And that that price is non-zero is more than zero. + +06:01.170 --> 06:05.490 +I don't think there are any negative prices in there, but if there are, they're not going to get counted. + +06:06.360 --> 06:11.310 +So this is now going to be going through and trying to figure that out. + +06:11.310 --> 06:12.540 +And there we go. + +06:12.540 --> 06:19.560 +So it tells us that there are 726, which is almost 50%. + +06:19.560 --> 06:20.580 +So it's not terrible. + +06:20.580 --> 06:22.290 +That's fine, that's fine. + +06:22.290 --> 06:26.790 +It's, uh, for a moment might be worried that there would be a, that it would be slim pickings, but + +06:26.790 --> 06:29.700 +no, at least for the appliances. + +06:29.970 --> 06:35.010 +Um, uh, data set, half of them have prices. + +06:35.400 --> 06:36.960 +It's a tiny side point. + +06:36.960 --> 06:39.270 +I don't know if you've spotted when I've been printing numbers. + +06:39.270 --> 06:45.210 +Generally, they've had a comma to separate the thousands, which I always find so useful when when + +06:45.240 --> 06:47.370 +being able to read these kinds of things. + +06:47.370 --> 06:53.340 +The way that you do that is, if you're using Python's f strings, you say colon comma like this. + +06:53.460 --> 06:59.850 +Um, to you use that for your formatting and then you'll get numbers in this style. + +07:00.000 --> 07:02.610 +Just a little a hot tip. + +07:02.700 --> 07:06.510 +Uh, you may have known that already, but if not, it's a useful one to be aware of. + +07:07.500 --> 07:08.490 +Okay. + +07:08.970 --> 07:14.490 +So what we're going to do now is we're going to take all of the ones with prices. + +07:14.850 --> 07:22.800 +Um, and we're going to figure out how many characters it has in its title, description, features + +07:22.800 --> 07:23.370 +and details. + +07:23.370 --> 07:29.130 +We're going to add up the total number of characters and put that into a list of lengths. + +07:29.130 --> 07:36.150 +So what we now have is a list of prices and a list of lengths, so we can get a sense of how how many + +07:36.180 --> 07:41.610 +characters of detail we have and see if it's uniform or if it's something that's that's in some way + +07:41.640 --> 07:42.390 +skewed. + +07:42.450 --> 07:50.400 +So now we're going to use matplotlib, which we'll be using a lot, uh, to make a plot of the lengths + +07:50.400 --> 07:53.250 +in the form of a histogram. + +07:53.250 --> 08:00.360 +And hopefully you remember from statistics classes of some time ago, a histogram is basically going + +08:00.360 --> 08:06.930 +to take everything and bucket it into into bins and show how many we have in each bin. + +08:06.960 --> 08:09.060 +It's easier to show you what that looks like. + +08:09.060 --> 08:10.170 +This is what it looks like. + +08:10.170 --> 08:18.330 +So along the x axis we have the lengths of our different, um, uh, appliances, our different washing + +08:18.330 --> 08:22.830 +machines or whatever, uh, in terms of how many characters they have in that description. + +08:22.830 --> 08:29.240 +And this is the count of how many appliances do we have with that many characters. + +08:29.240 --> 08:36.470 +And you can see that there's a nice kind of peak around here, but there is this long tail of more characters + +08:36.470 --> 08:38.960 +coming in now. + +08:38.960 --> 08:43.610 +This is going to be a challenge for us when we're training, because ultimately we're going to want + +08:43.610 --> 08:48.650 +to use our own, uh, open source models and train them. + +08:48.650 --> 08:55.730 +And one of the constraints that's very important for us to understand is the maximum number of characters + +08:55.730 --> 09:00.680 +that we might pass in, or actually the maximum number of tokens that we might pass in to the model + +09:00.680 --> 09:01.490 +at each point. + +09:01.490 --> 09:06.770 +And the more tokens that we might need to pass in for each of our training points, the more memory + +09:06.770 --> 09:09.950 +that we need for training and the harder it is to achieve. + +09:09.980 --> 09:14.990 +Another point is that even when we're using frontier models, whilst they don't have that problem, + +09:14.990 --> 09:19.430 +they have a different problem, which is that it's more expensive if we're passing in more tokens than + +09:19.430 --> 09:24.710 +they are going to of course cost us more, which doesn't really mean very much for a for a few of these. + +09:24.830 --> 09:30.950 +But if we want to do this in anger for a large number of products, then the numbers will start to add + +09:30.950 --> 09:31.490 +up. + +09:31.520 --> 09:37.430 +So ideally we would pick a cutoff and we would constrain our data at that point. + +09:37.550 --> 09:40.820 +Um, and so that's something that we'll be thinking about later. + +09:41.120 --> 09:45.200 +Another thing for us to look at is the distribution of the prices. + +09:45.200 --> 09:47.270 +So how much do things cost? + +09:47.300 --> 09:53.150 +You may have gotten the hint from our earlier analysis that whilst we thought appliances was going to + +09:53.150 --> 09:59.900 +be full of fridges and washing machines and the like, the things that we looked at were rather smaller. + +09:59.900 --> 10:05.810 +They were egg holders and ice makers, and it shouldn't be that much of a surprise when you think about + +10:05.810 --> 10:12.860 +it, that the data is probably going to have a very large number of cheaper things that might sort of, + +10:13.010 --> 10:16.730 +um, squash out some of the higher priced items. + +10:16.730 --> 10:17.660 +So let's see that. + +10:17.690 --> 10:19.100 +Let's see how this looks. + +10:20.360 --> 10:22.850 +Well, that does appear to be the case. + +10:23.120 --> 10:29.280 +So the average price in our data set is $6. + +10:29.310 --> 10:33.360 +The highest price is $21,000. + +10:33.390 --> 10:40.290 +There is a home appliance for $21,000 in this list, but you can see that there's a very large number + +10:40.290 --> 10:43.140 +that have smaller prices. + +10:43.470 --> 10:48.150 +And for those that remember the difference between mean, median and mode. + +10:48.180 --> 10:54.930 +Again, from school statistics, this is a nice illustration of where the mean can be pulled up by expensive + +10:54.930 --> 11:01.200 +items and is clearly going to be bigger than, well, certainly than the mode and and probably the median + +11:01.200 --> 11:01.710 +too. + +11:02.730 --> 11:10.080 +Uh, so, um, yes, you can you can certainly see we have skewed distribution where there is a very + +11:10.080 --> 11:12.690 +large number of cheap products. + +11:12.690 --> 11:19.650 +And that might be challenging during training because the training data is going to be really crowded + +11:19.650 --> 11:22.140 +out by these low cost items. + +11:22.440 --> 11:26.880 +Let's just have a quick look for this super expensive thing and see what it is. + +11:26.880 --> 11:30.630 +This this $21,000, uh, item. + +11:30.630 --> 11:35.300 +We will go through our data set and pluck out whatever it is that costs more than $21,000. + +11:35.300 --> 11:38.000 +It is, it seems, a turbochef bullet. + +11:38.000 --> 11:41.300 +Rapid cook electric microwave convection oven. + +11:41.330 --> 11:45.290 +Now, if someone had told me that description, I would never have thought that that was going to cost + +11:45.320 --> 11:46.940 +$21,000. + +11:47.300 --> 11:52.970 +I did find something not identical, but something that I think is probably the latest version in Amazon + +11:52.970 --> 11:53.870 +right now. + +11:53.960 --> 12:00.590 +And if we go over to have a look at this, you can see here this is also made by Turbochef. + +12:00.590 --> 12:05.420 +It's a bargain price of only $18,000, not $21,000. + +12:05.900 --> 12:07.640 +But I don't know about you. + +12:07.640 --> 12:12.440 +I had no idea that microwaves could cost this much, but it's clearly a very professional microwave, + +12:12.470 --> 12:17.090 +a very high end microwave, and going, as I say, for that bargain price. + +12:17.090 --> 12:27.110 +That is the $21,000 version of that is over here somewhere in our way off the scale in our data. + +12:28.340 --> 12:35.120 +So it's now time for us to curate our data, and we'll do that in the next video. diff --git a/week5/community-contributions/subtitles/srts/59472017/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472017/ja_JP.srt new file mode 100755 index 0000000..e39769b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472017/ja_JP.srt @@ -0,0 +1,460 @@ +WEBVTT + +00:01.430 --> 00:06.980 +そして今回は、 このコースの大きなプロジェクトに対してコーディングする初めての機会だ。 + +00:06.980 --> 00:08.930 +Jupyter Labへようこそ。 + +00:08.930 --> 00:14.390 +ビッグプロジェクトに乗り出す第6週のフォルダへようこそ。 + +00:14.390 --> 00:21.710 +というわけで、 私たちのプロジェクトは、 商品の説明に基づいて、 その商品の値段を推定できるモデルを構築することである。 + +00:21.710 --> 00:26.120 +そして今日は、 データ・キュレーションの最初のステップを行う。 + +00:26.120 --> 00:34.220 +まずは、 家電製品、 洗濯機などのデータの一部を見てみよう。 + +00:34.220 --> 00:36.710 +それではまず、 データセットそのものをお見せしよう。 + +00:36.710 --> 00:39.260 +データセットはこちらのリンクにある。 + +00:39.260 --> 00:46.190 +これはHugging Face hubのHugging Face datasetsセクションにあるデータセットです。 + +00:46.580 --> 00:55.520 +アマゾンのレビューをスクレイピングしたもので、 過去にさかのぼります。 + +00:55.520 --> 00:59.810 +しかし、 20日に行われたこの最新のスクレイプは、 2023年後半に行われたものだった。 + +01:00.020 --> 01:07.450 +ええと、 膨大な数のレビューが含まれていますが、 ほぼ5000万件の商品も含まれています。 + +01:07.450 --> 01:15.040 +だから、 いろいろな製品があり、 それぞれのカテゴリーに分かれている。 + +01:15.250 --> 01:17.410 +これらすべてを扱うつもりはない。 + +01:17.410 --> 01:24.070 +この練習のために、 この中から最も興味のあるカテゴリーのサブセットを抜き出す。 + +01:24.490 --> 01:28.930 +そうでないと、 トレーニングにものすごく時間がかかるし、 面白くない。 + +01:28.930 --> 01:34.000 +これで、 私たちが扱っているデータの種類をご理解いただけると思います。 + +01:34.330 --> 01:41.560 +データセットがメタデータと呼んでいる、 商品、 + +01:41.590 --> 01:48.400 +説明、 価格そのもののデータだ。 + +01:48.430 --> 01:49.270 +これだ。 + +01:49.300 --> 01:59.320 +電子機器などのデータセットを見れば、 5ギガバイト強のサイズであることがよくわかるだろう。 + +01:59.320 --> 02:05.110 +つまり、 これらは大きなデータセットであり、 有益な情報をたくさん持っていることになる。 + +02:05.110 --> 02:08.160 +アップロードされたのは7カ月前だ。 + +02:08.160 --> 02:10.290 +だから、 これはすべてごく最近のことなんだ。 + +02:11.100 --> 02:14.010 +さあ、 始めよう。 + +02:14.010 --> 02:16.410 +まずは輸入品から。 + +02:16.440 --> 02:19.440 +特に複雑なことはない。 + +02:19.470 --> 02:20.700 +今のところはまだだ。 + +02:20.730 --> 02:22.380 +まだまだ続くだろう。 + +02:22.680 --> 02:24.660 +これから環境を整えるんだ。 + +02:24.690 --> 02:26.550 +今日使うわけではない。 + +02:26.550 --> 02:28.260 +ハグする顔を使うだけだ。 + +02:28.290 --> 02:29.970 +ハグ顔にログイン。 + +02:30.720 --> 02:37.230 +そして、 matplotlibがJupyterノートブックにチャートを表示できるようにする。 + +02:37.380 --> 02:41.070 +そこで最初にすることは、 データセットをロードすることだ。 + +02:41.070 --> 02:45.660 +これから行うのは、 データセットの名前、 アマゾンのレビューを指定することだ。 + +02:45.660 --> 02:49.980 +まずは家電製品のカテゴリーから。 + +02:50.010 --> 02:56.970 +冷蔵庫や洗濯機などのような家電製品は、 最初にHugging faces + +02:57.000 --> 03:03.480 +load data set機能を使って読み込みます。 + +03:03.780 --> 03:13.370 +最初にこれを実行すると、 Huggingfaceのハブからダウンロードされる。 + +03:13.400 --> 03:20.870 +これはすでに完成しており、 どれだけの電化製品があるか見てみることになる。 + +03:20.990 --> 03:26.510 +そこには94,000台の家電製品がある。 + +03:26.510 --> 03:28.520 +では、 そのうちの一人を見てみよう。 + +03:28.520 --> 03:34.550 +データ・ポイントとデータ・セットを見てみよう。 + +03:35.030 --> 03:36.290 +まず1つ目。 + +03:36.290 --> 03:37.100 +なぜだ。 + +03:37.370 --> 03:38.360 +見てみよう。 + +03:42.590 --> 03:44.210 +というわけで、 こんな感じだ。 + +03:44.210 --> 03:45.950 +たくさんの情報がある。 + +03:45.950 --> 03:49.370 +しかし、 特に特徴というものがあるのがわかるだろう。 + +03:49.370 --> 03:54.350 +タイトルもついているし、 他にもいくつか役に立つことが書いてある。 + +03:54.530 --> 04:01.910 +そして特に、 タイトル、 説明、 特徴、 詳細、 価格を持っている。 + +04:01.940 --> 04:04.970 +それぞれをプリントアウトして、 ざっと見てみよう。 + +04:05.030 --> 04:06.770 +これがタイトルだ。 + +04:06.770 --> 04:09.860 +これは製氷機のカウンタートップ。 + +04:09.890 --> 04:13.280 +これは空虚な説明である。 + +04:13.820 --> 04:16.430 +これがその詳細だ。 + +04:16.460 --> 04:24.830 +そのため、 多くの機能、 ここでの詳細、 そしてすぐに問題が見えてくる価格が特徴だ。 + +04:24.830 --> 04:26.870 +この場合、 値段は関係ない。 + +04:26.870 --> 04:30.080 +だから、 すべての商品に値段がついているわけではないのは明らかだ。 + +04:30.560 --> 04:35.780 +そして、 その説明はリストの形で表示されることに気づくだろう。 + +04:35.780 --> 04:41.090 +つまり、 Aには特徴があるのに対して、 詳細は辞書の形で提供される。 + +04:41.090 --> 04:43.790 +しかし、 これは辞書ではない。 + +04:43.790 --> 04:47.180 +JSONを含む文字列である。 + +04:47.180 --> 04:58.940 +つまり、 これはテキストであり、 これを読み込むには、 それをロードしてAを使い、 JSONロード文字列ロードsを使って辞書に変換する必要がある。 + +05:00.020 --> 05:01.310 +うーん、 わかった。 + +05:01.310 --> 05:05.060 +次は別のデータを見てみよう。 + +05:05.060 --> 05:12.740 +冷蔵庫のエッグホルダーのようだが、 卵は10個まで入る。 + +05:13.250 --> 05:15.390 +これも値段はない。 + +05:15.420 --> 05:17.880 +そしてこの3本目も値段がない。 + +05:17.880 --> 05:20.940 +真新しい乾燥機のドラムスライドだ。 + +05:20.940 --> 05:24.720 +だからこの時点で、 私たちは初めて心配することになるかもしれない。 + +05:24.750 --> 05:27.300 +94,000台の家電製品がある。 + +05:27.300 --> 05:30.000 +最初の3つは値段がない。 + +05:30.000 --> 05:32.340 +では、 いくつに値段がついているのか見てみよう。 + +05:32.340 --> 05:37.800 +そのための簡単な方法は、 データ・セットのすべてのデータ・ポイントを繰り返し処理することだ。 + +05:37.800 --> 05:40.410 +そして、 その対価を得る。 + +05:40.650 --> 05:44.040 +それをトライブロックに入れる。 + +05:44.190 --> 05:50.010 +なぜなら、 もしそれがなければ、 失敗してそのデータポイントをスキップしてしまうからだ。 + +05:50.010 --> 05:53.220 +だから、 価格がゼロのものも無視する。 + +05:53.220 --> 05:57.120 +だから私たちは、 価格が数字になっているものだけを見ることにしている。 + +05:57.120 --> 06:01.140 +そして、 その価格がゼロでないことはゼロ以上である。 + +06:01.170 --> 06:05.490 +マイナス価格があるとは思わないが、 もしあったとしてもカウントされないだろう。 + +06:06.360 --> 06:11.310 +だから、 これはこれから調べて、 それを見つけ出そうとしているんだ。 + +06:11.310 --> 06:12.540 +さあ、 行こう。 + +06:12.540 --> 06:19.560 +つまり、 726人で、 ほぼ50%ということになる。 + +06:19.560 --> 06:20.580 +だからひどくはない。 + +06:20.580 --> 06:22.290 +それでいい、 それでいい。 + +06:22.290 --> 06:29.700 +一瞬、 収穫が少ないのではと心配になるかもしれないが、 少なくとも電化製品に関してはそうではない。 + +06:29.970 --> 06:35.010 +ええと、 ええと、 データセットで、 半分が価格を持っている。 + +06:35.400 --> 06:36.960 +小さなサイドポイントだ。 + +06:36.960 --> 06:39.270 +私がいつから数字を印刷しているのか、 お気づきになっただろうか。 + +06:39.270 --> 06:47.370 +一般的に、 コンマで数千を区切っている。 + +06:47.370 --> 06:53.340 +その方法は、 Pythonのf文字列を使う場合、 次のようにコロン・コンマと言う。 + +06:53.460 --> 06:59.850 +そうすれば、 このスタイルで数字が表示されます。 + +07:00.000 --> 07:02.610 +ちょっとホットな情報を。 + +07:02.700 --> 07:06.510 +ええと、 すでにご存じかもしれないが、 そうでなければ、 知っておくと便利なことだ。 + +07:07.500 --> 07:08.490 +オーケー。 + +07:08.970 --> 07:14.490 +だから、 これからやることは、 値段がついているものをすべて取り上げるということだ。 + +07:14.850 --> 07:23.370 +そして、 タイトル、 説明、 特徴、 詳細の文字数を調べます。 + +07:23.370 --> 07:29.130 +文字数を合計して、 長さのリストにするんだ。 + +07:29.130 --> 07:36.150 +だから、 私たちが今持っているのは価格と長さのリストであり、 私たちがどれだけの詳細な文字数を持っているかを把握し、 + +07:36.180 --> 07:42.390 +それが均一なものなのか、 それとも何らかの形で偏っているものなのかを確認することができる。 + +07:42.450 --> 07:53.250 +では、 これからよく使うmatplotlibを使って、 長さをヒストグラムの形でプロットしてみよう。 + +07:53.250 --> 08:06.930 +ヒストグラムは基本的に、 すべてのものをバケツに入れて各ビンにいくつ入っているかを示すものだ。 + +08:06.960 --> 08:09.060 +それがどんなものかをお見せするのは簡単だ。 + +08:09.060 --> 08:10.170 +こんな感じだ。 + +08:10.170 --> 08:18.330 +つまり、 X軸に沿って、 さまざまな家電製品、 さまざまな洗濯機などの長さを、 + +08:18.330 --> 08:22.830 +その説明文の文字数で表している。 + +08:22.830 --> 08:29.240 +そしてこれは、 それだけのキャラクターを持つ電化製品がいくつあるかという数である。 + +08:29.240 --> 08:38.960 +そして、 このあたりにピークがあるのがわかるだろう。 + +08:38.960 --> 08:48.650 +というのも、 最終的には自分たちのオープンソースモデルを使ってトレーニングしたいからだ。 + +08:48.650 --> 08:55.730 +そして、 理解することが非常に重要な制約の1つは、 各ポイントでモデルに渡す可能性のある最大文字数、 + +08:55.730 --> 09:01.490 +つまり最大トークン数です。 + +09:01.490 --> 09:06.770 +そして、 各トレーニングポイントに渡す必要があるトークンの数が多ければ多いほど、 トレーニングに必要なメモリが増え、 + +09:06.770 --> 09:09.950 +達成するのが難しくなる。 + +09:09.980 --> 09:14.990 +もう一つのポイントは、 フロンティア・モデルを使う場合でも、 + +09:14.990 --> 09:24.710 +そのような問題はないものの、 別の問題があるということです。 + +09:24.830 --> 09:31.490 +しかし、 これを怒りにまかせて多数の製品で行おうとすれば、 数字がどんどん増えていくことになる。 + +09:31.520 --> 09:37.430 +だから理想的なのは、 カットオフを決めて、 その時点でデータを制約することだ。 + +09:37.550 --> 09:40.820 +それは後で考えることにしよう。 + +09:41.120 --> 09:45.200 +もうひとつ注目すべきは、 価格の分布だ。 + +09:45.200 --> 09:47.270 +では、 いくらかかるのか? + +09:47.300 --> 09:53.150 +家電製品は冷蔵庫や洗濯機などでいっぱいになると思っていたのに、 私たちが見たものはむしろ小さかったということは、 + +09:53.150 --> 09:59.900 +先の分析でお分かりいただけたかもしれない。 + +09:59.900 --> 10:05.810 +エッグホルダーや製氷機などである。 考えてみれば、 データにはおそらく、 + +10:05.810 --> 10:12.860 +高価格帯の商品を押しのけるような安価なものが非常に多く含まれるはずで、 + +10:13.010 --> 10:16.730 +それほど驚くことではない。 + +10:16.730 --> 10:17.660 +では、 それを見てみよう。 + +10:17.690 --> 10:19.100 +どう見えるか見てみよう。 + +10:20.360 --> 10:22.850 +まあ、 それは事実のようだ。 + +10:23.120 --> 10:29.280 +つまり、 このデータセットの平均価格は6ドルということになる。 + +10:29.310 --> 10:33.360 +最高額は21,000ドル。 + +10:33.390 --> 10:43.140 +このリストの中には21,000ドルの家電製品もあるが、 それ以下の価格のものも非常に多いことがわかるだろう。 + +10:43.470 --> 10:48.150 +そして、 平均値、 中央値、 最頻値の違いを覚えている人のために。 + +10:48.180 --> 10:54.930 +学校の統計学で習ったことだが、 これは平均値が高価なものによって引き上げられ、 + +10:54.930 --> 11:01.710 +最頻値や中央値よりも明らかに大きくなるといういい例だ。 + +11:02.730 --> 11:12.690 +そうですね、 確かに、 安い商品が非常に多いという偏った分布になっていますね。 + +11:12.690 --> 11:22.140 +トレーニングの間、 トレーニングデータが低価格のアイテムに埋もれてしまうからだ。 + +11:22.440 --> 11:26.880 +この超高価なものが何なのか、 ちょっと見てみよう。 + +11:26.880 --> 11:30.630 +この2万1,000ドルの品だ。 + +11:30.630 --> 11:35.300 +データセットを調べて、 21,000ドル以上するものは何でも抜き出す。 + +11:35.300 --> 11:38.000 +どうやらターボチェフ弾のようだ。 + +11:38.000 --> 11:41.300 +ラピッド・クック電気電子レンジ・コンベクション・オーブン。 + +11:41.330 --> 11:46.940 +今、 もし誰かが私にそのような説明をしたとしたら、 それが21,000ドルもするとは思わなかっただろう。 + +11:47.300 --> 11:53.870 +同じものではないが、 おそらく今アマゾンで最新版だと思われるものを見つけた。 + +11:53.960 --> 12:00.590 +これもターボシェフ製だ。 + +12:00.590 --> 12:05.420 +21,000ドルどころか、 わずか18,000ドルのバーゲンプライスだ。 + +12:05.900 --> 12:07.640 +でも、 私はあなたのことを知らない。 + +12:07.640 --> 12:12.440 +電子レンジがこんなに高いとは知らなかったが、 明らかにプロ仕様の電子レンジで、 + +12:12.470 --> 12:17.090 +非常に高級な電子レンジだ。 + +12:17.090 --> 12:27.110 +その2万1,000ドルバージョンは、 私たちのデータでは、 こちらのどこかにある。 + +12:28.340 --> 12:35.120 +次のビデオでは、 それをやってみよう。 diff --git a/week5/community-contributions/subtitles/srts/59472017/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472017/ko_KR.srt new file mode 100755 index 0000000..bd7eb51 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472017/ko_KR.srt @@ -0,0 +1,508 @@ +WEBVTT + +00:01.430 --> 00:06.980 +이 과정의 큰 프로젝트에 대해 코딩하는 건 이번이 처음이죠 + +00:06.980 --> 00:08.930 +주피터 연구소에 잘 오셨어요 + +00:08.930 --> 00:14.390 +6주 차 폴더에 오신 걸 환영합니다 큰 프로젝트에 착수했죠 + +00:14.390 --> 00:20.870 +그래서 우리 프로젝트는 제품 설명에 근거해 가격을 추정할 수 있는 모델을 만드는 + +00:20.870 --> 00:21.710 +거죠 + +00:21.710 --> 00:26.120 +오늘은 데이터 큐레이션의 첫 단계를 할 거예요 + +00:26.120 --> 00:34.220 +우선 가정용 가전제품이나 세탁기 같은 가전제품의 데이터부터 살펴보죠 + +00:34.220 --> 00:36.710 +먼저 데이터 집합을 보여드릴게요 + +00:36.710 --> 00:39.260 +데이터 집합은 이 링크에 있어요 + +00:39.260 --> 00:46.190 +이건 안아주는 얼굴 허브의 안아주는 얼굴 데이터셋 섹션의 데이터셋이에요 + +00:46.580 --> 00:55.520 +아마존에서 조사한 리뷰의 일련인데 그 시대를 거슬러 올라가죠 + +00:55.520 --> 00:59.810 +하지만 20년에 생긴 이 잔해는 2023년 말에 생긴 거예요 + +01:00.020 --> 01:07.450 +리뷰도 엄청나게 많이 실렸고 거의 5천만 개의 아이템이 실려 있어요 + +01:07.450 --> 01:15.040 +다양한 제품이 있고 여러 카테고리로 나뉘어 있어요 + +01:15.250 --> 01:17.410 +이걸 다 가지고는 일 못 해요 + +01:17.410 --> 01:22.870 +여기서 일부를 추출할 겁니다 이 훈련에 가장 흥미로운 + +01:22.870 --> 01:24.070 +범주예요 + +01:24.490 --> 01:28.930 +안 그러면 훈련하는 데 너무 오래 걸려서 재미가 없을 거예요 + +01:28.930 --> 01:34.000 +우리가 작업하는 데이터의 종류에 대한 좋은 감각을 제공하죠 + +01:34.330 --> 01:41.560 +안는 얼굴 폴더로 들어가 볼게요. 메타데이터가 들어있는 폴더입니다. 제품과 + +01:41.590 --> 01:48.400 +설명, 가격에 대한 데이터입니다. 우리가 정말 신경 쓰는 것들이죠. + +01:48.430 --> 01:49.270 +여기 있네요 + +01:49.300 --> 01:56.380 +좋은 감을 잡을 수 있어요 전자 제품의 데이터 세트를 보면요 크기가 5기가바이트 + +01:56.380 --> 01:59.320 +조금 넘는 걸 보실 수 있어요 + +01:59.320 --> 02:05.110 +빅데이터 집합이죠 유용한 정보가 엄청나게 많아요 + +02:05.110 --> 02:08.160 +7개월 전에 업로드됐고요 + +02:08.160 --> 02:10.290 +최근 일이군요 + +02:11.100 --> 02:14.010 +Get up, Get up, Get up, Get up, Get, 시작, 시작할게요 + +02:14.010 --> 02:16.410 +수입 문제부터 시작하죠 + +02:16.440 --> 02:19.440 +복잡할 게 없죠 + +02:19.470 --> 02:20.700 +아직은요 + +02:20.730 --> 02:22.380 +더 많이 올 거예요 + +02:22.680 --> 02:24.660 +환경을 설정할 거예요 + +02:24.690 --> 02:26.550 +오늘 이걸 쓸 건 아니지만요 + +02:26.550 --> 02:28.260 +그냥 껴안는 얼굴만 할 거예요 + +02:28.290 --> 02:29.970 +얼굴 포옹에 로그인하세요 + +02:30.720 --> 02:37.230 +매트플로틀리브가 유피터 노트북에 차트를 띄울 수 있게 해 주죠 + +02:37.380 --> 02:41.070 +먼저 데이터 세트를 로드해요 + +02:41.070 --> 02:45.660 +우리가 할 일은 데이터 집합의 이름을 지정하는 겁니다 Amazon reviews요 + +02:45.660 --> 02:49.980 +어플라이언스 카테고리로 시작하도록 선택할게요 + +02:50.010 --> 02:56.970 +냉장고나 세탁기 같은 가전제품을 가장 먼저 로드할 거예요 + +02:57.000 --> 03:03.480 +얼굴 포옹 데이터 로드셋 함수를 이용해서요 + +03:03.780 --> 03:09.330 +이걸 처음 실행하면 H깅페이스 허브에서 다운로드 됩니다 + +03:09.350 --> 03:13.370 +제가 이미 했기 때문에 그건 필요 없죠 + +03:13.400 --> 03:20.870 +이미 완성된 가전제품이 몇 개나 있는지 확인할 거예요 + +03:20.990 --> 03:26.510 +가전제품이 94,000개 있어요 + +03:26.510 --> 03:28.520 +이 중 하나를 보죠 + +03:28.520 --> 03:34.550 +어디 보자 데이터 포인트가 데이터셋이라고 해보죠 + +03:35.030 --> 03:36.290 +첫 번째 걸 보죠 + +03:36.290 --> 03:37.100 +안 될 거 없죠 + +03:37.370 --> 03:38.360 +한번 보죠 + +03:42.590 --> 03:44.210 +이렇게 생긴 거군요 + +03:44.210 --> 03:45.950 +정보가 아주 많아요 + +03:45.950 --> 03:49.370 +하지만 특히 기능이라는 게 있어요 + +03:49.370 --> 03:54.350 +제목도 있고 몇 가지 다른 것도 있는데 아마 유용할 거예요 + +03:54.530 --> 04:01.910 +특히 제목, 설명, 기능 세부 사항, 가격이 있죠 + +04:01.940 --> 04:04.970 +하나씩 프린트해서 잠깐 보도록 하죠 + +04:05.030 --> 04:06.770 +이게 제목이에요 + +04:06.770 --> 04:09.860 +얼음 제조기 조리대예요 + +04:09.890 --> 04:13.280 +이 묘사는 비어 있어요 + +04:13.820 --> 04:16.430 +이게 세부 사항이에요 + +04:16.460 --> 04:24.830 +기능이 아주 많아요 상세 정보와 가격은 문제가 바로 보이네요 + +04:24.830 --> 04:26.870 +이 경우엔 프라이스가 없어요 + +04:26.870 --> 04:30.080 +모든 물건에 가격이 있는 건 아니군요 + +04:30.560 --> 04:35.780 +보다시피 설명은 목록의 형태로 나타나죠 + +04:35.780 --> 04:41.090 +특성이라면 세부 사항은 사전 형태로 나오겠죠 + +04:41.090 --> 04:43.790 +그건 속임수지만 사전은 아니에요 + +04:43.790 --> 04:47.180 +JSON을 포함하는 문자열이죠 + +04:47.180 --> 04:54.980 +이 텍스트를 읽어내려면 로드 인을 하고 A를 써서 사전으로 변환해야 해요 JSON + +04:54.980 --> 04:58.940 +로드 문자열 로드 s를 써서요 + +05:00.020 --> 05:01.310 +네 + +05:01.310 --> 05:05.060 +다른 데이터 포인트를 보고 다음 걸 보죠 + +05:05.060 --> 05:12.740 +냉장고 달걀꽂이처럼 생겼고 달걀 10개까지 들어가며 하나도 없어요 + +05:13.250 --> 05:15.390 +이것도 가격은 없어요 + +05:15.420 --> 05:17.880 +세 번째도 가격표가 없어요 + +05:17.880 --> 05:20.940 +새 건조기 드럼 슬라이드예요 + +05:20.940 --> 05:24.720 +이 시점에서 처음으로 걱정하게 될지도 몰라요 + +05:24.750 --> 05:27.300 +가전제품이 94,000개예요 + +05:27.300 --> 05:30.000 +처음 본 세 개는 가격이 없었어요 + +05:30.000 --> 05:32.340 +몇 개나 가격이 적혀 있는지 보죠 + +05:32.340 --> 05:37.800 +간단한 방법은 데이터 집합의 모든 데이터 포인트를 반복하는 거죠 + +05:37.800 --> 05:40.410 +Get it, get it, get it, it! 그럼 우린 돈을 벌겠죠 + +05:40.650 --> 05:44.040 +그걸 try 블록에 넣을 거예요 + +05:44.190 --> 05:50.010 +그게 없으면 실패할 테니까요 그럼 데이터 포인트를 건너뛰죠 + +05:50.010 --> 05:53.220 +0으로 매기는 것도 전부 무시하고요 + +05:53.220 --> 05:57.120 +따라서 가격이란 숫자가 있는 것만 살펴볼 거예요 + +05:57.120 --> 06:01.140 +가격이 0이 아닌 0이 0보다 크다는 거죠 + +06:01.170 --> 06:05.490 +마이너스 가격은 없을 것 같지만 있다 해도 개수는 확인 안 할 거예요 get it + +06:06.360 --> 06:11.310 +이제 이걸 통해 그걸 알아내려 할 거예요 + +06:11.310 --> 06:12.540 +다 됐어요 + +06:12.540 --> 06:19.560 +726개라는 뜻인데 거의 50%에 해당하죠 + +06:19.560 --> 06:20.580 +그러니 끔찍하진 않죠 + +06:20.580 --> 06:22.290 +괜찮아요, 괜찮아요 + +06:22.290 --> 06:26.790 +잠시 걱정되는 건 살 게 별로 없을까 봐서요 하지만 적어도 + +06:26.790 --> 06:29.700 +가전제품은 걱정 안 해요 + +06:29.970 --> 06:35.010 +데이터 세트인데 절반은 가격이 적혀 있어요 + +06:35.400 --> 06:36.960 +작은 측면이죠 + +06:36.960 --> 06:39.270 +제가 숫자를 인쇄한 때를 눈치채셨는지 모르겠네요 + +06:39.270 --> 06:45.210 +보통 쉼표로 수천을 구분하는데 이런 걸 읽을 때 + +06:45.240 --> 06:47.370 +아주 유용하죠 + +06:47.370 --> 06:53.340 +이렇게 하는 방법은 파이썬의 f 문자열을 사용한다면: 콤마를 합니다 이렇게요 + +06:53.460 --> 06:59.850 +get을 서식에 사용하면 이런 스타일로 숫자가 나와요 + +07:00.000 --> 07:02.610 +팁을 하나 드리죠 + +07:02.700 --> 07:06.510 +어, 이미 알고 계실 수도 있지만 모르셨더라도 알아두면 유용한 정보죠 + +07:07.500 --> 07:08.490 +네 + +07:08.970 --> 07:14.490 +이제 할 일은 가격이 있는 건 전부 다 가져오는 거예요 + +07:14.850 --> 07:23.370 +제목, 설명, 특징, 세부 사항에 글자가 몇 개인지 알아낼 거예요 + +07:23.370 --> 07:29.130 +문자의 총수를 더해서 set의 길이에 입력할 거예요 + +07:29.130 --> 07:36.150 +이제 가격 목록과 길이 목록이 나왔어요 디테일이 얼마나 많은지 가늠할 수 있죠 + +07:36.180 --> 07:42.390 +균일한지, 아니면 뭔가 어떤 식으로든 삐뚤어졌는지 알 수 있어요 + +07:42.450 --> 07:50.400 +그래서 매트플로틀리브를 많이 사용할 거예요 길이를 히스토그램 형태로 + +07:50.400 --> 07:53.250 +줄거리를 만들 거예요 + +07:53.250 --> 08:00.360 +기억하시면 좋겠네요 얼마 전 통계학 클래스에서 히스토그램은 기본적으로 + +08:00.360 --> 08:06.930 +모든 걸 통에 버킷으로 넣는 거예요 각 통에 몇 개가 있는지 보여주죠 + +08:06.960 --> 08:09.060 +직접 보여드리는 게 더 쉽겠네요 + +08:09.060 --> 08:10.170 +이렇게 생겼어요 + +08:10.170 --> 08:18.330 +엑스 축을 따라 각기 다른 가전제품과 세탁기의 길이를 표시합니다 + +08:18.330 --> 08:22.830 +설명에 포함된 글자의 개수죠 + +08:22.830 --> 08:29.240 +이 정도로 많은 글자를 가진 기기가 몇 개인지 세어보죠 + +08:29.240 --> 08:36.470 +이 주변이 봉우리처럼 솟아 있는데 긴 꼬리가 더 + +08:36.470 --> 08:38.960 +많이 나타나요 + +08:38.960 --> 08:43.610 +이건 우리가 훈련할 때 도전이 될 겁니다 궁극적으로 우리 + +08:43.610 --> 08:48.650 +자신의 오픈 소스 모델을 사용하고 훈련해야 하니까요 + +08:48.650 --> 08:55.730 +우리가 이해해야 하는 중요한 제약 중 하나는 통과할 수 있는 글자 수의 최대치입니다 + +08:55.730 --> 09:01.490 +혹은 각 시점에서 통과할 수 있는 토큰의 최대치죠 + +09:01.490 --> 09:06.770 +훈련 포인트를 통과할 때마다 메모리가 더 많이 필요하고 + +09:06.770 --> 09:09.950 +달성하기 더 어려워져요 + +09:09.980 --> 09:14.990 +또 다른 요점은 개척자 모델을 사용해도 이와 관련된 문제는 없지만 다른 문제가 + +09:14.990 --> 09:19.430 +있다는 것입니다 토큰을 많이 제출하는 것이 더 많은 토큰을 제출하는 + +09:19.430 --> 09:24.710 +것보다 비용이 더 많이 듭니다 이 문제는 몇몇의 경우에 큰 문제가 되지 않죠 + +09:24.830 --> 09:31.490 +하지만 분노로 많은 제품을 공격한다면 그 숫자는 점점 늘어날 거예요 + +09:31.520 --> 09:37.430 +이상적으로는 컷오프를 선택하고 그 시점에서 데이터를 제한하는 거죠 + +09:37.550 --> 09:40.820 +그건 나중에 생각해 볼 거예요 + +09:41.120 --> 09:45.200 +또 하나 주목할 점은 가격 분배예요 + +09:45.200 --> 09:47.270 +그래서 가격이 얼마죠? + +09:47.300 --> 09:53.150 +아까 분석에서 힌트를 얻으셨을 거예요 냉장고나 세탁기 + +09:53.150 --> 09:59.900 +같은 가전제품이 가득할 줄 알았는데 우리가 본 건 좀 더 작았어요 + +09:59.900 --> 10:05.810 +달걀 홀더와 제빙기였어요 생각해 보면 그리 놀랍지 않아요 + +10:05.810 --> 10:12.860 +데이터에 따르면 값싼 제품들이 훨씬 더 많이 있었을 거예요 비싼 + +10:13.010 --> 10:16.730 +제품들을 눌러 버렸겠죠 + +10:16.730 --> 10:17.660 +한 번 보죠 + +10:17.690 --> 10:19.100 +어떤지 보죠 + +10:20.360 --> 10:22.850 +그런 것 같네요 + +10:23.120 --> 10:29.280 +데이터 집합의 평균 가격은 6달러예요 + +10:29.310 --> 10:33.360 +최고 21,000달러까지 올라가요 + +10:33.390 --> 10:40.290 +목록에 21,000달러짜리 가전제품이 있는데 가격이 낮은 + +10:40.290 --> 10:43.140 +제품들이 아주 많아요 + +10:43.470 --> 10:48.150 +중간값과 모드 차이를 기억하시는 분들을 위해 설명드리죠 + +10:48.180 --> 10:54.930 +학교 통계를 보면 비싼 아이템을 기준으로 평균값을 올릴 + +10:54.930 --> 11:01.710 +수 있고 확실히 모드를 능가합니다 중앙값도 능가하겠죠 + +11:02.730 --> 11:10.080 +보시다시피 저렴한 제품이 많이 유통되고 삐뚤게 + +11:10.080 --> 11:12.690 +분배돼 있어요 + +11:12.690 --> 11:19.650 +훈련 중에는 힘들 수도 있어요 훈련 데이터가 저비용 아이템으로 인해 + +11:19.650 --> 11:22.140 +정말 좁아질 테니까요 + +11:22.440 --> 11:26.880 +이 엄청나게 비싼 물건이 뭔지 빨리 살펴보죠 + +11:26.880 --> 11:30.630 +21,000달러짜리 물건이에요 + +11:30.630 --> 11:35.300 +데이터 세트에서 21,000달러 이상 하는 건 다 골라낼 거예요 + +11:35.300 --> 11:38.000 +터보셰프 총알 같아요 + +11:38.000 --> 11:41.300 +급속 조리 전자레인지 대류식 오븐이에요 + +11:41.330 --> 11:45.290 +누가 그런 설명을 해줬다면 그게 21,000달러나 할 줄은 + +11:45.320 --> 11:46.940 +상상도 못 했을 거예요 + +11:47.300 --> 11:53.870 +똑같은 건 아니지만 아마존 최신 버전인 것 같아요 + +11:53.960 --> 12:00.590 +이걸 자세히 보면 이것도 터보셰프에서 만든 거예요 + +12:00.590 --> 12:05.420 +21,000달러가 아니라 18,000달러밖에 안 해요 + +12:05.900 --> 12:07.640 +당신은 어떤지 모르겠네요 + +12:07.640 --> 12:12.440 +전자레인지가 이렇게 비싼지 몰랐어요 하지만 전문가용이고 + +12:12.470 --> 12:17.090 +고급 전자레인지예요 가격도 저렴하고요 + +12:17.090 --> 12:27.110 +그것의 21,000달러 버전이 여기 어딘가에 있어요 우리 데이터에서 스케일을 벗어나서요 + +12:28.340 --> 12:35.120 +이제 데이터를 큐레이팅할 시간입니다 다음 비디오에서 하죠 diff --git a/week5/community-contributions/subtitles/srts/59472027/en_US.srt b/week5/community-contributions/subtitles/srts/59472027/en_US.srt new file mode 100755 index 0000000..63e4758 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472027/en_US.srt @@ -0,0 +1,655 @@ +WEBVTT + +00:00.620 --> 00:03.530 +And now the time has come to curate our data set. + +00:03.530 --> 00:09.110 +And the way we're going to do this is we're going to take each of the data points that we got from hugging + +00:09.140 --> 00:14.780 +face, and we're going to convert it into a Python object, an object, a class that we're going to + +00:14.780 --> 00:16.430 +create called an item. + +00:16.430 --> 00:22.310 +And it's so important that I've actually set up a different module items.py, where I have written this + +00:22.310 --> 00:29.450 +class, and I've done it as Python code, not in a Jupyter notebook, but in its own own module, so + +00:29.450 --> 00:34.670 +that it can be reused from different places, and so that we don't clutter our Jupyter notebook with + +00:34.670 --> 00:35.630 +the code behind it. + +00:35.630 --> 00:42.770 +And it contains some messy code to do some data munging, some unraveling of the data to clean it up, + +00:42.770 --> 00:47.750 +I'm going to show you this item, this module now and talk through it. + +00:47.750 --> 00:52.520 +But really there's an exercise for you to go and look through this in more detail and understand it + +00:52.520 --> 00:54.050 +a little bit more closely. + +00:54.110 --> 00:59.870 +So as I say it's in its own module items.py and it defines a class item. + +00:59.890 --> 01:05.980 +And I should point out, before we even get going with it, that we start by setting a constant called + +01:06.010 --> 01:11.560 +base model to be the llama 3.18 billion variant base model. + +01:11.920 --> 01:16.960 +Now, you might say to me, what on earth has the llama model got to do with what we're doing at the + +01:16.960 --> 01:17.380 +moment? + +01:17.380 --> 01:20.080 +We're not going on to open source until next week. + +01:20.080 --> 01:23.560 +This week it's all about using frontier models for for fine tuning. + +01:23.950 --> 01:25.330 +And here's the answer. + +01:25.330 --> 01:33.460 +We're going to be crafting our data set so that it fits within a certain fixed number of tokens as a + +01:33.460 --> 01:36.970 +maximum tokens for the llama tokenizer. + +01:36.970 --> 01:41.200 +And the reason we're going to do that is because that's going to make it cheaper and easier to train + +01:41.200 --> 01:43.900 +when we end up using our open source model. + +01:43.900 --> 01:48.610 +It's also, as I say, going to make it cheaper when we use the frontier model as well, and we want + +01:48.610 --> 01:50.530 +everyone to be on the same playing field. + +01:50.530 --> 01:56.710 +So when we craft our prompts and we fix them to a certain number of tokens, we want to make sure that + +01:56.710 --> 02:01.690 +both the frontier model and the open source model gets the same amount of information. + +02:01.690 --> 02:09.190 +If you have more budget and you have ability to train on bigger GPUs, or to use, uh, or more budget + +02:09.190 --> 02:13.600 +with frontier models, then you can extend the cutoff so that we can have bigger and bigger amounts + +02:13.600 --> 02:14.260 +of text. + +02:14.260 --> 02:19.570 +But I think we've got we're going to you'll see, we'll have plenty of text in each of these data points. + +02:19.570 --> 02:25.270 +And so it's perfectly sufficient for our frontier models and our open source models to be training against. + +02:25.870 --> 02:30.670 +Anyway, that is why we're looking at the llama model, because we're going to be using its tokenizer + +02:30.670 --> 02:34.660 +to when we check whether or not we have the right number of characters. + +02:34.840 --> 02:42.280 +So the class item, then it is something which each item is going to have a title, a price of course, + +02:42.310 --> 02:49.000 +a category which will be things like appliances, a token count, how many tokens does it contain. + +02:49.000 --> 02:55.990 +And then most importantly, a prompt which is going to be the text which will be fed into an LLM, which + +02:56.020 --> 03:00.660 +it will then use to either train or to test against. + +03:01.770 --> 03:07.080 +So just very briefly, the the takeaway is that you must look through this code yourself and satisfy + +03:07.080 --> 03:13.950 +yourself that I'm not doing anything evil and that all of this is just good, uh, wholesome housekeeping + +03:13.950 --> 03:15.810 +and cleaning up of strings. + +03:15.810 --> 03:22.680 +I have a function called scrub details, which removes stuff from the text that feels like it's going + +03:22.710 --> 03:24.360 +to be distracting to the model. + +03:24.390 --> 03:27.030 +Stuff like batteries included. + +03:27.150 --> 03:33.060 +Um, and, uh, some other things you see in there, the word manufacturer by manufacturer. + +03:33.060 --> 03:39.870 +So a bunch of things where it's not relevant or it's not massively relevant, and it seemed better to + +03:39.900 --> 03:44.310 +remove it than to have it use up precious tokens by being in there. + +03:45.030 --> 03:52.410 +There's this, uh, method scrub, which goes through and cleans out weird characters, and it also + +03:52.440 --> 03:59.990 +turns multiple spaces into one space, using some regex for regex ninjas out there. + +04:00.080 --> 04:03.500 +This is probably, uh, easy stuff for you. + +04:03.530 --> 04:11.450 +For others, this is one of the kinds of bits of script that you can reuse as ways to remove different + +04:11.450 --> 04:13.280 +problems in your in your strings. + +04:13.280 --> 04:17.840 +And you can also test this out at a Jupyter notebook to satisfy yourself that it's doing what it says + +04:17.870 --> 04:18.680 +on the tin. + +04:19.070 --> 04:24.890 +I will mention this line here, because this is just a little extra trick I put in there that is useful + +04:24.890 --> 04:29.270 +for our particular case, and it's an example of the kind of thing you only discover when you really + +04:29.270 --> 04:30.500 +dig into the data. + +04:30.530 --> 04:38.180 +I noticed that there were a lot of products on Amazon which quote part numbers in their description. + +04:38.180 --> 04:42.650 +So they say this is compatible with part number, blah blah and blah. + +04:42.650 --> 04:49.790 +And those part numbers are often eight digits, eight characters long or longer and contain letters + +04:49.790 --> 04:50.720 +and numbers. + +04:50.720 --> 04:56.570 +And the problem with that is that when that gets turned into tokens, it uses up a lot of tokens because + +04:56.600 --> 04:59.360 +obviously it's not in the vocabulary in any way. + +04:59.360 --> 05:06.500 +And so you end up cramming all of your, your token, your unlimited capacity for tokens with tokens + +05:06.500 --> 05:11.330 +that represent part numbers that are going to be totally irrelevant for our model. + +05:11.360 --> 05:19.190 +So what this line here does is it says if there's any, any word that has eight or more characters and + +05:19.190 --> 05:23.180 +contains a number inside it, then scrap that word. + +05:23.180 --> 05:24.950 +It's going to be a distraction. + +05:25.160 --> 05:30.440 +Um, so the reason I highlight this is, is really, again, to show that, that you only come across + +05:30.440 --> 05:32.720 +this kind of discovery when you dig into your data. + +05:32.750 --> 05:37.190 +You look at lots of examples and you see this happening, and then you come across this. + +05:37.190 --> 05:39.410 +You have the moment you try this out. + +05:39.410 --> 05:44.630 +And when you rerun your model, uh, which you can imagine I've done once or twice in the last few weeks, + +05:44.930 --> 05:51.290 +you find that you've made an improvement because your data is richer and has more, more accuracy to + +05:51.320 --> 05:51.860 +it. + +05:52.460 --> 05:56.080 +So that's an important part of the process. + +05:56.530 --> 06:06.040 +And we will then use a method parse, which takes a data point, and then does all of the various scrubbing + +06:06.040 --> 06:10.180 +and stripping and ends up turning it into a prompt. + +06:10.420 --> 06:14.890 +And along with the prompt, it counts the number of tokens in that prompt. + +06:14.890 --> 06:17.440 +And you're going to see the prompt in just a second. + +06:17.440 --> 06:22.600 +But the prompt is the thing that's going to get passed into an LLM, and it will be asked to complete + +06:22.600 --> 06:23.110 +it. + +06:23.230 --> 06:25.510 +And it's going to say, how much does this cost? + +06:25.510 --> 06:27.490 +And it's going to have a cost. + +06:28.390 --> 06:31.870 +There's going to be an ability to look at a look at a prompt. + +06:31.900 --> 06:36.580 +There's also going to be something called the test prompt, which is the same as the prompt, but it + +06:36.580 --> 06:38.410 +doesn't reveal the answer. + +06:38.440 --> 06:42.370 +The prompt will be used during training and it has the answer in there. + +06:42.370 --> 06:48.160 +So during training, the model will get better and better at predicting the answer during test time. + +06:48.160 --> 06:50.620 +We don't want to show it the answer. + +06:50.620 --> 06:54.540 +We want to give it the text and see whether or not it gets the right answer. + +06:54.540 --> 06:58.680 +So we have those two different prompts the training prompt and the test prompt. + +06:58.710 --> 07:03.660 +Later we're going to talk about breaking down your data into a training set and a test set. + +07:03.900 --> 07:05.520 +You'll see you'll see more. + +07:05.550 --> 07:07.350 +It will become much more clear later on. + +07:08.160 --> 07:10.740 +So this is the item class. + +07:10.740 --> 07:13.650 +And I really suggest that you take more of a look through this. + +07:13.650 --> 07:17.880 +But never fear, we're going to be spending a lot of time with these items and looking at them. + +07:17.880 --> 07:21.840 +And so you're going to get a good handle for for what this functionality does. + +07:21.990 --> 07:30.720 +So back here, what we're now going to do is create one of these items objects for everything in in + +07:30.720 --> 07:33.120 +the data set that has a price. + +07:33.450 --> 07:36.090 +So let's run that right now. + +07:37.110 --> 07:40.620 +So the this is running through that code. + +07:40.620 --> 07:43.200 +It's it's doing the scrubbing. + +07:43.200 --> 07:45.540 +It's removing things like part numbers. + +07:45.540 --> 07:51.540 +It's replacing weird characters with with with with spaces. + +07:51.870 --> 08:00.140 +And it's creating a prompt and then making sure that the prompt will fit into a decent number of tokens. + +08:00.140 --> 08:08.060 +So all of that is happening right now, and it's going to be doing that for the 40 odd thousand appliances, + +08:08.060 --> 08:11.450 +home appliances that have a price. + +08:11.570 --> 08:14.870 +So it should be just about wrapping up now. + +08:17.240 --> 08:20.990 +While it's finishing that off I will prepare for us to look at. + +08:20.990 --> 08:21.680 +It's done. + +08:21.830 --> 08:22.490 +There we go. + +08:22.490 --> 08:26.750 +So let's say let's just have a look at the first first one in there. + +08:28.040 --> 08:33.440 +So the first one in there is a rack roller and stud assembly kit. + +08:33.440 --> 08:37.580 +Full pack by Ami parts replaces blah blah blah blah blah. + +08:37.610 --> 08:43.940 +So this is the the title of the item and that's how much it costs $9. + +08:43.940 --> 08:46.910 +And you will indeed see that in the title of the item. + +08:46.910 --> 08:50.390 +There are these part numbers, these long part numbers. + +08:50.420 --> 08:51.260 +Let's see another one. + +08:51.260 --> 08:52.980 +Let's see the first item in there. + +08:53.760 --> 08:56.310 +Again, the first item which is. + +08:56.340 --> 09:00.750 +A compatible A door pivot block compatible. + +09:00.780 --> 09:01.680 +Kenmore KitchenAid. + +09:01.680 --> 09:03.480 +Maytag whirlpool refrigerator. + +09:03.510 --> 09:06.090 +Again, lots of part numbers in there. + +09:06.300 --> 09:15.960 +So let's now look at what happens if I look at the prompt that that I say that this function created + +09:17.580 --> 09:18.390 +items. + +09:18.390 --> 09:20.070 +Even try that again. + +09:21.060 --> 09:22.260 +Let's print that. + +09:22.260 --> 09:25.740 +So it comes up formatted with nice empty lines. + +09:28.410 --> 09:30.930 +So this is what the prompt says. + +09:30.930 --> 09:33.570 +How much does this cost to the nearest dollar. + +09:33.600 --> 09:35.640 +I'll talk more about that to the nearest dollar. + +09:35.670 --> 09:36.810 +In a later times. + +09:36.810 --> 09:42.300 +We'll talk about why I ended up going with that and and the pros and cons. + +09:42.300 --> 09:45.210 +So how much does this cost to the nearest dollar. + +09:45.960 --> 09:51.910 +And here then is there's actually one line for the, for the, for the heading and one line for the + +09:51.910 --> 09:52.780 +description. + +09:52.780 --> 09:58.870 +And what you'll see is that, sure enough, these part numbers have been plucked out from this description, + +09:58.870 --> 10:00.970 +and you'll see that it has been truncated. + +10:00.970 --> 10:03.760 +When we've got to the end of a certain number of tokens. + +10:03.760 --> 10:09.100 +It's actually, uh, comes to just under 180 tokens, is what I've kept. + +10:09.100 --> 10:10.930 +And that's what you can see here. + +10:11.110 --> 10:16.300 +And you can tell from reading this that it's a rich description of the item itself. + +10:16.300 --> 10:18.730 +That should be sufficient for training. + +10:19.330 --> 10:21.760 +Let's take a look at the next one. + +10:22.000 --> 10:25.570 +This of course, is our pivot block. + +10:25.600 --> 10:27.130 +Our door pivot block. + +10:27.160 --> 10:33.070 +Let's go for number 100 and Ice Maker mech. + +10:33.190 --> 10:35.650 +This is a Samsung replacement part. + +10:36.100 --> 10:41.200 +So you'll also notice there are a lot of things in here that are parts and replacement parts. + +10:41.200 --> 10:46.990 +Again consistent with what we saw before that this space could be crowded out by some of the bits and + +10:46.990 --> 10:50.400 +pieces like like replacement parts that are lower cost. + +10:50.400 --> 10:56.460 +Although somewhat surprisingly, this this part is $118, so it's not not such a simple part. + +10:56.880 --> 11:02.310 +I hope I never need this particular, uh, Samsung Assembly ice maker mech. + +11:03.180 --> 11:09.120 +Um, okay, so this is looking at the training prompt. + +11:09.120 --> 11:12.330 +This is what we'll be passing in during training time. + +11:12.330 --> 11:20.940 +And so the model will be will be given this and it will start to learn how best to recreate this price + +11:20.940 --> 11:22.830 +here during training time. + +11:22.830 --> 11:24.810 +What about during test time. + +11:24.810 --> 11:29.010 +What about when it's time to assess whether or not the model is doing any good? + +11:29.010 --> 11:35.880 +So let's look at this guy at item number 100 and see what we will do when it comes to test time. + +11:35.880 --> 11:37.260 +We will then. + +11:39.660 --> 11:42.600 +Provide the model with this. + +11:42.630 --> 11:46.590 +It's exactly the same but it ends here. + +11:46.620 --> 11:54.420 +And of course the idea is that the model will have seen so many examples of this, covering such a wide + +11:54.420 --> 12:01.980 +variety of different items that when it's shown this again at runtime, it will know how to complete + +12:01.980 --> 12:09.120 +it will it will have a good nuanced understanding based on this description that will help it to complete + +12:09.120 --> 12:10.740 +this price. + +12:12.030 --> 12:12.960 +All right. + +12:12.990 --> 12:22.230 +Let's look at how many tokens we typically have in these items by doing another of our diagrams. + +12:22.230 --> 12:28.860 +And what you'll see is that the highest number of tokens is 178. + +12:28.890 --> 12:32.850 +Never quite get to 180, and the average is 176. + +12:32.850 --> 12:35.010 +It's really sort of crammed in there. + +12:35.340 --> 12:43.650 +So we've generally generally selected and and crafted data sets that have about this much information. + +12:43.650 --> 12:47.250 +And it comes to up to 180 tokens. + +12:47.250 --> 12:51.470 +And as I say, this is going to be very helpful during training because we're going to know the maximum + +12:51.470 --> 12:57.830 +number of tokens we need to be able to support in any item, and it's also going to keep costs lower + +12:57.830 --> 13:00.350 +when we end up using frontier models for this. + +13:01.640 --> 13:02.450 +Okay. + +13:02.450 --> 13:09.800 +And then let's just have another look at the distribution of prices in these items that we have selected. + +13:10.160 --> 13:11.450 +Here we go. + +13:12.020 --> 13:16.190 +So the average price is $100. + +13:16.340 --> 13:21.560 +Uh, over here the highest price is, uh, almost $11,000. + +13:21.560 --> 13:28.430 +So in the process of doing some weeding out, we have actually removed that super expensive microwave + +13:28.430 --> 13:29.450 +along the way. + +13:29.570 --> 13:31.760 +But we've still got something that's fairly expensive. + +13:31.790 --> 13:36.440 +You can figure out what that is by by by replicating what I had above. + +13:36.590 --> 13:43.640 +Um, and you can still see that the distribution is very heavily skewed towards super cheap things that + +13:43.640 --> 13:46.730 +are presumably replacement parts, as we have been seeing. + +13:46.730 --> 13:51.010 +So that is another area for us to investigate next time. + +13:51.910 --> 13:57.790 +Uh, and so I did want to mention something that that visualizing these data sets is something we'll + +13:57.790 --> 13:58.810 +be doing a lot. + +13:58.840 --> 14:01.420 +And you will be doing a lot in different ways. + +14:01.450 --> 14:06.280 +Uh, it's it's nice to be able to take advantage of various features in matplotlib. + +14:06.280 --> 14:12.100 +And one of them is that it allows you to produce charts with many a huge array of different colors. + +14:12.100 --> 14:16.660 +And if you would like to know what those colors are, I've included a link that will take you to the + +14:16.660 --> 14:23.110 +page, uh, in matplotlib, where it will describe the different color schemes that you can use, including, + +14:23.110 --> 14:25.360 +uh, something called xkcd's colors. + +14:25.360 --> 14:27.610 +And it's good to take a look at that. + +14:27.760 --> 14:32.680 +Uh, so this is, uh, just a by the by as a little extra thing for you to bookmark. + +14:32.710 --> 14:38.800 +Uh, another another little pro tip for today, but the real to do's, what you have to do now, please, + +14:38.830 --> 14:40.570 +is go and look at the item class. + +14:40.570 --> 14:42.100 +I realize I went through it quickly. + +14:42.100 --> 14:49.660 +It's because it's got some of the more gruesome data A scrubbing the data munging that one does based + +14:49.660 --> 14:53.770 +on real examples of data to make the data as high quality as possible. + +14:54.130 --> 14:59.680 +And I haven't bored you with all of the details, but that's partly because I trust that you will now + +14:59.680 --> 15:01.780 +go in and look at the details yourself. + +15:01.810 --> 15:10.510 +Use JupyterLab to to investigate, try out and understand how these functions have cleaned up some of + +15:10.510 --> 15:18.310 +the data and got us to a point where we have about 180 tokens of rich description, rich wording for + +15:18.310 --> 15:25.990 +each of our data points, each of our items that will be used as training prompts and test prompts in + +15:25.990 --> 15:26.890 +the future. + +15:27.220 --> 15:34.000 +So next time we'll be expanding this to combine many, many other types of products. + +15:34.000 --> 15:39.700 +And if you thought this data set was a large data set, you ain't seen nothing yet. + +15:39.700 --> 15:41.260 +So prepare for that. + +15:41.260 --> 15:45.730 +But first, a couple more slides to wrap up this day. diff --git a/week5/community-contributions/subtitles/srts/59472027/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472027/ja_JP.srt new file mode 100755 index 0000000..a2e9d89 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472027/ja_JP.srt @@ -0,0 +1,526 @@ +WEBVTT + +00:00.620 --> 00:03.530 +そして今、 データセットをキュレーションする時が来た。 + +00:03.530 --> 00:09.110 +ハグした顔から得たデータ・ポイントを、 Pythonのオブジェクト、 + +00:09.140 --> 00:16.430 +itemというクラスに変換します。 + +00:16.430 --> 00:22.310 +とても重要なことなので、 実は別のモジュール項目を設定したんだ。 このクラスはJupyterノートブックではなく、 + +00:22.310 --> 00:35.630 +独自のモジュールにPythonのコードとして書いた。 + +00:35.630 --> 00:47.750 +その中には、 データをきれいにするために、 データを分解するための面倒なコードが含まれている。 + +00:47.750 --> 00:54.050 +でも、 もっと詳しく見て、 もう少し詳しく理解するための練習があるんだ。 + +00:54.110 --> 00:59.870 +だから、 独自のモジュール項目になっているんだ。 pyで、 クラス項目を定義しています。 + +00:59.890 --> 01:11.560 +そして、 それを始める前に、 ベースモデルという定数をラマ3に設定することから始めることを指摘しておく。 + +01:11.560 --> 01:11.560 +180億バリアントのベースモデル。 + +01:11.920 --> 01:17.380 +今、 私たちがやっていることとラマ・モデルがいったい何の関係があるのか、 と言われるかもしれない。 + +01:17.380 --> 01:20.080 +オープンソースに進むのは来週だ。 + +01:20.080 --> 01:23.560 +今週は、 微調整のためのフロンティアモデルの使い方についてだ。 + +01:23.950 --> 01:25.330 +その答えはこうだ。 + +01:25.330 --> 01:36.970 +llamaトークナイザーの最大トークン数として、 ある一定のトークン数に収まるようにデータセットを作成します。 + +01:36.970 --> 01:41.200 +なぜそうするかというと、 オープンソースのモデルを使うことになったときに、 より安く、 + +01:41.200 --> 01:43.900 +より簡単にトレーニングできるからだ。 + +01:43.900 --> 01:50.530 +また、 フロンティア・モデルを使用することで安くなる。 + +01:50.530 --> 02:01.690 +だから、 プロンプトを作成し、 それを一定のトークン数に固定するとき、 フロンティアモデルとオープンソースモデルの両方が同じ量の情報を得られるようにしたい。 + +02:01.690 --> 02:09.190 +もっと予算があり、 もっと大きなGPUで訓練したり、 フロンティアモデルでもっと予算があれば、 カットオフの幅を広げて、 + +02:09.190 --> 02:14.260 +もっともっと大量のテキストを扱えるようになる。 + +02:14.260 --> 02:19.570 +でも、 それぞれのデータにはたくさんのテキストがあるはずだ。 + +02:19.570 --> 02:25.270 +だから、 フロンティア・モデルとオープンソース・モデルのトレーニングはこれで十分なのだ。 + +02:25.870 --> 02:30.670 +とにかく、 これがllamaモデルを見ている理由だ。 文字数が正しいかどうかをチェックするときに、 + +02:30.670 --> 02:34.660 +このモデルのトークナイザーを使うことになるからだ。 + +02:34.840 --> 02:42.280 +各アイテムには、 タイトル、 価格、 カテゴリー(家電製品など)、 + +02:42.310 --> 02:49.000 +トークン数(トークンの数)が設定されます。 + +02:49.000 --> 03:00.660 +そして最も重要なのは、 LLMに入力されるテキストとなるプロンプトだ。 + +03:01.770 --> 03:07.080 +つまり、 ごく簡単に言うと、 このコードに目を通し、 私は何も悪いことはしていない、 + +03:07.080 --> 03:15.810 +これはすべて、 健全な家事と文字列の掃除に過ぎない、 と納得することだ。 + +03:15.810 --> 03:24.360 +スクラブ・ディテールという機能があって、 モデルにとって邪魔になりそうなものをテキストから削除するんだ。 + +03:24.390 --> 03:27.030 +バッテリーが含まれているようなものだ。 + +03:27.150 --> 03:33.060 +それから、 この中にある "メーカー "という言葉。 + +03:33.060 --> 03:39.870 +そのため、 関連性がない、 または関連性が大きくないものがたくさんあり、 そこにあることで貴重なトークンを消費するよりは、 + +03:39.900 --> 03:44.310 +取り除いたほうがいいと思った。 + +03:45.030 --> 03:59.990 +スクラブという方法があって、 変な文字を一掃してくれるし、 正規表現忍者のために正規表現を使って複数のスペースを1つのスペースに変換してくれる。 + +04:00.080 --> 04:03.500 +あなたにとっては簡単なことでしょう。 + +04:03.530 --> 04:13.280 +また、 このスクリプトを再利用することで、 文字列のさまざまな問題を取り除くことができる。 + +04:13.280 --> 04:18.680 +また、 Jupyterノートブックでこれをテストして、 書いてあるとおりに動いていることを確認することもできる。 + +04:19.070 --> 04:30.500 +というのも、 この行は、 私たちの特別なケースに役立つちょっとしたトリックを入れただけだからだ。 + +04:30.530 --> 04:38.180 +アマゾンの商品説明に品番を引用しているものが多いことに気づいた。 + +04:38.180 --> 04:42.650 +だから彼らは、 これは部品番号と互換性があると言うんだ。 + +04:42.650 --> 04:50.720 +そして、 これらの部品番号は多くの場合、 8桁、 8文字以上で、 文字と数字を含んでいる。 + +04:50.720 --> 04:59.360 +トークンに変換されるときに、 トークンをたくさん使ってしまうという問題がある。 + +04:59.360 --> 05:11.330 +そうすると、 トークンの無制限な容量を、 私たちのモデルにはまったく関係のない部品番号を表すトークンでいっぱいにしてしまうことになる。 + +05:11.360 --> 05:19.190 +つまり、 この行は、 8文字以上で数字を含む単語があれば、 その単語をスクラップする、 + +05:19.190 --> 05:23.180 +というものだ。 + +05:23.180 --> 05:24.950 +気が散るだろうね。 + +05:25.160 --> 05:32.720 +このことを強調したのは、 やはり、 データを掘り下げて初めてこのような発見に出くわすということを示すためです。 + +05:32.750 --> 05:37.190 +たくさんの例を見て、 こういうことが起こっていることを知った。 + +05:37.190 --> 05:39.410 +これを試す瞬間がある。 + +05:39.410 --> 05:44.630 +そしてモデルを再実行すると、 ここ数週間で1度か2度やったことがあると思うが、 + +05:44.930 --> 05:51.860 +データがよりリッチになり、 より正確になっているため、 改善されていることに気づく。 + +05:52.460 --> 05:56.080 +だから、 それはプロセスの重要な部分なんだ。 + +05:56.530 --> 06:10.180 +そして、 データ・ポイントを受け取り、 さまざまなスクラブやストリッピングを行い、 最終的にプロンプトに変換するparseメソッドを使用する。 + +06:10.420 --> 06:14.890 +そしてプロンプトとともに、 そのプロンプトに含まれるトークンの数をカウントする。 + +06:14.890 --> 06:17.440 +すぐにプロンプトが表示されます。 + +06:17.440 --> 06:23.110 +しかし、 プロンプトはLLMに合格するためのものであり、 それを完成させることが求められる。 + +06:23.230 --> 06:25.510 +そして、 これにはいくらかかるんだ? + +06:25.510 --> 06:27.490 +その代償は大きい。 + +06:28.390 --> 06:31.870 +プロンプトを見る機能もあるだろう。 + +06:31.900 --> 06:36.580 +また、 テストプロンプトと呼ばれるものもあり、 これはプロンプトと同じものだが、 + +06:36.580 --> 06:38.410 +答えは明らかにされない。 + +06:38.440 --> 06:42.370 +このプロンプトはトレーニング中に使用され、 そこには答えが書かれている。 + +06:42.370 --> 06:48.160 +そのため、 トレーニングの間に、 モデルはテスト時に答えを予測する能力がどんどん向上していく。 + +06:48.160 --> 06:50.620 +答えは見せたくない。 + +06:50.620 --> 06:54.540 +テキストを与えて、 それが正しい答えを導き出すかどうかを確かめたい。 + +06:54.540 --> 06:58.680 +トレーニングのプロンプトとテストのプロンプト。 + +06:58.710 --> 07:03.660 +後で、 データをトレーニングセットとテストセットに分けることについて話すつもりだ。 + +07:03.900 --> 07:05.520 +もっと見ることができるだろう。 + +07:05.550 --> 07:07.350 +後でもっとはっきりする。 + +07:08.160 --> 07:10.740 +これがアイテム・クラスだ。 + +07:10.740 --> 07:13.650 +そして、 もっとこの記事に目を通すことをお勧めする。 + +07:13.650 --> 07:17.880 +しかし、 恐れることはない。 私たちは、 これらのアイテムと多くの時間を過ごし、 それらを眺めることになるのだ。 + +07:17.880 --> 07:21.840 +それで、 この機能が何をするものなのか、 よく理解できるだろう。 + +07:21.990 --> 07:33.120 +さて、 ここに戻って、 これから行うのは、 データセットの中の価格を持つすべてのものに対して、 これらのアイテム・オブジェクトを1つ作成することだ。 + +07:33.450 --> 07:36.090 +では、 今すぐ実行してみよう。 + +07:37.110 --> 07:40.620 +つまり、 これはそのコードの中を走っているのだ。 + +07:40.620 --> 07:43.200 +それはゴシゴシ洗うことだ。 + +07:43.200 --> 07:45.540 +部品番号のようなものを削除しているんだ。 + +07:45.540 --> 07:51.540 +変な文字をスペースに置き換えている。 + +07:51.870 --> 08:00.140 +そして、 プロンプトを作成し、 そのプロンプトが適切な数のトークンに収まるようにするのだ。 + +08:00.140 --> 08:11.450 +つまり、 そのすべてが今起きていることであり、 4万台以上の家電製品、 つまり価格のある家電製品に対してそれを行うことになる。 + +08:11.570 --> 08:14.870 +だから、 もうそろそろ終わるはずだ。 + +08:17.240 --> 08:20.990 +それを終えている間に、 私は私たちに見てもらうための準備をする。 + +08:20.990 --> 08:21.680 +終わったよ。 + +08:21.830 --> 08:22.490 +これでよし。 + +08:22.490 --> 08:26.750 +では、 その中の最初の1枚を見てみよう。 + +08:28.040 --> 08:33.440 +そこで最初に紹介するのが、 ラックローラーとスタッドのアッセンブリーキットだ。 + +08:33.440 --> 08:37.580 +アミ・パーツのフルパックは、 ブラブラブラに代わる。 + +08:37.610 --> 08:43.940 +これが商品のタイトルで、 これが9ドルの値段だ。 + +08:43.940 --> 08:46.910 +そして、 そのことはアイテムのタイトルにも記されている。 + +08:46.910 --> 08:50.390 +こういう部品番号、 こういう長い部品番号がある。 + +08:50.420 --> 08:51.260 +もうひとつ見てみよう。 + +08:51.260 --> 08:52.980 +その中の最初の項目を見てみよう。 + +08:53.760 --> 08:56.310 +繰り返すが、 最初の項目はこうだ。 + +08:56.340 --> 09:00.750 +ドアピボットブロック対応。 + +09:00.780 --> 09:01.680 +ケンモア・キッチンエイド + +09:01.680 --> 09:03.480 +メイタグのワールプール冷蔵庫。 + +09:03.510 --> 09:06.090 +ここにもたくさんの部品番号がある。 + +09:06.300 --> 09:18.390 +では、 この関数がアイテムを作成したというプロンプトを見たらどうなるか見てみよう。 + +09:18.390 --> 09:20.070 +もう一度やってみてもいい。 + +09:21.060 --> 09:22.260 +それを印刷しよう。 + +09:22.260 --> 09:25.740 +そのため、 きれいな空行でフォーマットされて表示される。 + +09:28.410 --> 09:30.930 +プロンプトにはこう書いてある。 + +09:30.930 --> 09:33.570 +1ドル単位でいくらかかりますか? + +09:33.600 --> 09:35.640 +それについては1ドル単位で詳しく話そう。 + +09:35.670 --> 09:36.810 +後の時代には + +09:36.810 --> 09:42.300 +なぜそうすることにしたのか、 そのメリットとデメリットについて話そう。 + +09:42.300 --> 09:45.210 +では、 1ドル単位でいくらかかるのか。 + +09:45.960 --> 09:52.780 +そしてここに、 見出しのための1行と、 説明のための1行がある。 + +09:52.780 --> 10:00.970 +そしておわかりになるのは、 案の定、 これらの部品番号はこの説明から抜き出され、 切り捨てられたものだということだ。 + +10:00.970 --> 10:03.760 +トークンの数が一定数に達したらね。 + +10:03.760 --> 10:09.100 +実際、 180トークン弱になる。 + +10:09.100 --> 10:10.930 +それがここにある。 + +10:11.110 --> 10:16.300 +そして、 これを読めば、 その品物そのものについての豊かな記述であることがわかる。 + +10:16.300 --> 10:18.730 +トレーニングにはこれで十分だろう。 + +10:19.330 --> 10:21.760 +次を見てみよう。 + +10:22.000 --> 10:25.570 +もちろん、 これが私たちのピボット・ブロックだ。 + +10:25.600 --> 10:27.130 +ドアピボットブロック。 + +10:27.160 --> 10:33.070 +背番号100、 アイスメーカー・メカを目指そう。 + +10:33.190 --> 10:35.650 +これはサムスンの交換部品です。 + +10:36.100 --> 10:41.200 +ここには部品や交換用パーツがたくさんあることにもお気づきだろう。 + +10:41.200 --> 10:50.400 +ここでもまた、 以前我々が見たように、 このスペースは、 交換部品のような低価格の部品によって混雑する可能性がある。 + +10:50.400 --> 10:56.460 +やや意外だが、 この部品は118ドルもするので、 そう簡単な部品ではない。 + +10:56.880 --> 11:02.310 +このサムスン製氷機のメカが必要ないことを祈るよ。 + +11:03.180 --> 11:09.120 +そう、 これはトレーニングのプロンプトを見ているんだ。 + +11:09.120 --> 11:12.330 +これがトレーニング中にパスするものだ。 + +11:12.330 --> 11:22.830 +そして、 モデルにはこれが与えられ、 トレーニング中にこの価格を再現する最善の方法を学び始める。 + +11:22.830 --> 11:24.810 +テスト中はどうなんだ。 + +11:24.810 --> 11:29.010 +そのモデルが役に立っているかどうかを評価する時はどうだろう? + +11:29.010 --> 11:35.880 +では、 アイテム番号100のこの男を見て、 テスト時間になったらどうするか見てみよう。 + +11:35.880 --> 11:37.260 +私たちはそうする。 + +11:39.660 --> 11:42.600 +これをモデルに提供する。 + +11:42.630 --> 11:46.590 +まったく同じだが、 ここで終わっている。 + +11:46.620 --> 11:54.420 +そしてもちろん、 モデルはこのような例を数多く見てきており、 さまざまな異なるアイテムをカバーしているため、 + +11:54.420 --> 12:10.740 +実行時に再びこのような例を見せられたときに、 どのようにしてこの価格を完成させればよいかを知っている。 + +12:12.030 --> 12:12.960 +分かった。 + +12:12.990 --> 12:22.230 +これらのアイテムに通常いくつのトークンが入っているのか、 別の図を使って見てみよう。 + +12:22.230 --> 12:28.860 +そしておわかりのように、 トークンの数が最も多いのは178である。 + +12:28.890 --> 12:32.850 +平均は176。 + +12:32.850 --> 12:35.010 +本当にぎっしり詰まっているんだ。 + +12:35.340 --> 12:43.650 +だから、 私たちは一般的に、 この程度の情報を持つデータセットを選択し、 作成してきた。 + +12:43.650 --> 12:47.250 +トークンは最大180枚。 + +12:47.250 --> 12:51.470 +また、 フロンティア・モデルを使用する場合にも、 + +12:51.470 --> 13:00.350 +コストを低く抑えることができる。 + +13:01.640 --> 13:02.450 +オーケー。 + +13:02.450 --> 13:09.800 +そして、 選んだアイテムの価格分布を見てみよう。 + +13:10.160 --> 13:11.450 +さあ、 始めよう。 + +13:12.020 --> 13:16.190 +だから平均価格は100ドルだ。 + +13:16.340 --> 13:21.560 +こっちの最高値は......11,000ドル近くだ。 + +13:21.560 --> 13:29.450 +そのため、 選別の過程で、 実はあの超高価な電子レンジを外したのだ。 + +13:29.570 --> 13:31.760 +それでも、 かなり高価なものを手に入れた。 + +13:31.790 --> 13:36.440 +それが何であるかは、 私が上に書いたことを再現すればわかる。 + +13:36.590 --> 13:46.730 +そして、 私たちが見てきたように、 おそらく交換部品と思われる激安品に大きく偏っていることがわかる。 + +13:46.730 --> 13:51.010 +だから、 これも次回の調査対象だ。 + +13:51.910 --> 13:58.810 +それから、 これらのデータセットを視覚化することは、 私たちがよくやることだということも言っておきたかったんだ。 + +13:58.840 --> 14:01.420 +そして、 さまざまな方法で多くのことを行うことになる。 + +14:01.450 --> 14:06.280 +ええと、 matplotlibの様々な機能を利用できるのはいいことだよ。 + +14:06.280 --> 14:12.100 +そのひとつが、 さまざまな色のチャートを作成できることだ。 + +14:12.100 --> 14:16.660 +これらの色が何色なのか知りたい方は、 + +14:16.660 --> 14:25.360 +matplotlibのページにリンクしてあります。 + +14:25.360 --> 14:27.610 +それを見てみるのはいいことだ。 + +14:27.760 --> 14:32.680 +ええと、 これは、 ブックマークしておくためのちょっとしたおまけです。 + +14:32.710 --> 14:40.570 +ええと、 今日はもうひとつ、 ちょっとしたプロとしての心得があるんだけど、 本当にやるべきことは、 アイテムクラスを見てくることなんだ。 + +14:40.570 --> 14:42.100 +あっという間に終わってしまった。 + +14:42.100 --> 14:53.770 +それは、 データを可能な限り高品質にするために、 実際のデータ例に基づいて行う、 よりぞっとするようなデータAスクラブがあるからだ。 + +14:54.130 --> 15:01.780 +そして、 私はすべての詳細であなたを退屈させませんでしたが、 それは、 あなたがこれから自分自身で詳細を見てくれると信じているからでもあります。 + +15:01.810 --> 15:10.510 +JupyterLabを使って、 これらの関数がどのようにデータをクリーンアップし、 将来トレーニングプロンプトやテストプロンプトとして使用される各データポイント、 + +15:10.510 --> 15:18.310 +各アイテムについて、 約180トークンのリッチな説明、 リッチな表現を持つようになったかを調べ、 + +15:18.310 --> 15:26.890 +試し、 理解する。 + +15:27.220 --> 15:34.000 +だから次回は、 これをさらに拡大して、 他の多くの種類の製品を組み合わせることになるだろう。 + +15:34.000 --> 15:39.700 +このデータセットが大きなデータセットだと思ったのなら、 あなたはまだ何も見ていない。 + +15:39.700 --> 15:41.260 +だから、 その準備をしておくんだ。 + +15:41.260 --> 15:45.730 +その前に、 この日を締めくくるスライドをもう2枚。 diff --git a/week5/community-contributions/subtitles/srts/59472027/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472027/ko_KR.srt new file mode 100755 index 0000000..ff0990a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472027/ko_KR.srt @@ -0,0 +1,625 @@ +WEBVTT + +00:00.620 --> 00:03.530 +이제 데이터 세트를 큐레이팅할 때가 됐어요 + +00:03.530 --> 00:09.110 +어떻게 할 거냐면요 얼굴을 안아서 얻은 각각의 데이터 포인트를 가지고 + +00:09.140 --> 00:14.780 +파이썬 객체로 변환할 겁니다 우리가 생성할 클래스로 항목이라고 부르는 + +00:14.780 --> 00:16.430 +객체죠 + +00:16.430 --> 00:22.310 +다른 모듈 항목을 셋업하는 건 정말 중요해요 파이썬 코드를 이용해서 이 클래스를 만들었습니다 + +00:22.310 --> 00:29.450 +주피터 노트북이 아니라 파이썬 코드 모듈에서 다루었습니다 여러 곳에서 다시 사용할 수 있습니다 + +00:29.450 --> 00:35.630 +주피터 노트북에 코드가 너무 많아서 어지럽지 않도록 말이죠 + +00:35.630 --> 00:42.770 +데이터 머깅을 위한 복잡한 코드를 포함하고 있습니다 데이터를 정리하기 + +00:42.770 --> 00:47.750 +위한 코드요 이 모듈을 보여드리고 말씀드릴게요 + +00:47.750 --> 00:52.520 +하지만 비트를 좀 더 자세히 살펴보고 더 자세히 이해하는 + +00:52.520 --> 00:54.050 +연습이 있어요 + +00:54.110 --> 00:59.870 +말씀드렸듯이 고유한 모듈 항목에 있어요 급이 다른 품목을 정의하죠 + +00:59.890 --> 01:05.980 +시작하기 전에 짚고 넘어가야 할 것은 get mebodel이라는 상수를 설정하는 + +01:06.010 --> 01:11.560 +것부터 시작한다는 점이에요. 180억 가지 변형 기본 모델이죠 + +01:11.920 --> 01:17.380 +라마 모델이 이 작업과 무슨 상관이냐고 하실지도 모르겠네요 + +01:17.380 --> 01:20.080 +오픈 소스는 다음 주에 공개해요 + +01:20.080 --> 01:23.560 +이번 주에는 개척 시대 모델을 미세 조정할 거예요 + +01:23.950 --> 01:25.330 +이게 그 답이에요 + +01:25.330 --> 01:33.460 +정해진 개수의 토큰 안에 들어가도록 데이터 세트를 만들 겁니다 라마 토큰라이저의 + +01:33.460 --> 01:36.970 +최대 토큰으로요 + +01:36.970 --> 01:41.200 +그렇게 하는 이유는 오픈 소스 모델을 사용할 때 더 저렴하고 + +01:41.200 --> 01:43.900 +쉽게 훈련할 수 있기 때문이죠 + +01:43.900 --> 01:48.610 +앞서 말했듯이 개척자 모델을 쓰면 비용이 줄어들 거예요 모두가 같은 조건에서 + +01:48.610 --> 01:50.530 +작업했으면 좋겠어요 + +01:50.530 --> 01:56.710 +프롬프트를 만들고 특정 수의 토큰에 수정할 때 프런티어 모델과 오픈 + +01:56.710 --> 02:01.690 +소스 모델이 같은 양의 정보를 얻도록 해야 해요 + +02:01.690 --> 02:09.190 +예산이 더 많고 더 큰 GPU로 훈련할 능력이 있거나 프론티어 모델로 예산을 더 쓸 수 있다면 + +02:09.190 --> 02:14.260 +컷오프를 연장해서 텍스트를 더 많이 쓸 수 있어요 + +02:14.260 --> 02:19.570 +하지만 제 생각엔∙∙∙ 보시면 아시겠지만 각각의 데이터 포인트에 텍스트가 많아요 + +02:19.570 --> 02:25.270 +개척 시대 모델과 오픈 소스 모델에 완벽하게 대응할 수 있어요 + +02:25.870 --> 02:30.670 +어쨌든 그래서 llama 모델을 살펴보는 겁니다 토큰라이저를 + +02:30.670 --> 02:34.660 +사용할 테니까요 글자의 수가 맞는지 확인하려고요 + +02:34.840 --> 02:42.280 +클래스 항목은 각각의 항목은 제목을 갖게 되고 가격은 물론 어플라이언스나 + +02:42.310 --> 02:49.000 +토큰 개수 같은 카테고리로 구성되죠 토큰은 몇 개인가? + +02:49.000 --> 02:55.990 +가장 중요한 건 프롬프트입니다 LLM에 입력될 텍스트죠 훈련하거나 + +02:56.020 --> 03:00.660 +테스트할 때 사용될 거예요 + +03:01.770 --> 03:07.080 +간단히 말씀드리면, 이 코드를 직접 훑어보시고 스스로 만족하셔야 합니다. + +03:07.080 --> 03:13.950 +제가 나쁜 짓을 하는 게 아니고 이 모든 게 좋은 일이고, 건전한 가정 환경과, 실밥을 치우는 + +03:13.950 --> 03:15.810 +일이라고요. + +03:15.810 --> 03:22.680 +스크럽 디테일이라는 함수가 있는데 모델이 집중하기 힘들 것 같은 텍스트에서 뭔가를 제거하는 + +03:22.710 --> 03:24.360 +기능이에요 + +03:24.390 --> 03:27.030 +배터리도 포함해서요 + +03:27.150 --> 03:33.060 +다른 것들도 보이는데 제조업체마다 이름이 있어요 + +03:33.060 --> 03:39.870 +관련 없는 게 많거나 크게 관련 없는 게 많죠 소중한 토큰을 다 쓰는 + +03:39.900 --> 03:44.310 +것보다 제거하는 게 나을 것 같았어요 + +03:45.030 --> 03:52.410 +메서드 스크럽이 있어요 이상한 캐릭터를 제거하고 여러 공간을 하나로 + +03:52.440 --> 03:59.990 +합치는 거죠 레벡스 닌자를 위한 레벡스 스캐너를 사용해서요 + +04:00.080 --> 04:03.500 +당신한텐 쉬운 일이겠죠 + +04:03.530 --> 04:11.450 +다른 분들을 위해 이건 스크립트의 종류 중 하나로 재사용할 수 있습니다 문자열의 다양한 문제를 + +04:11.450 --> 04:13.280 +제거하는 방법으로요 + +04:13.280 --> 04:17.840 +주피터 공책으로 시험하면서 틀에 적힌 대로 작동하는지 확인할 수도 + +04:17.870 --> 04:18.680 +있어요 + +04:19.070 --> 04:24.890 +이 라인을 언급할게요 제가 넣은 추가 트릭이거든요 특정 경우에 유용하죠 + +04:24.890 --> 04:29.270 +이건 데이터를 깊이 파고들어야만 발견할 수 있는 일종의 + +04:29.270 --> 04:30.500 +예죠 + +04:30.530 --> 04:38.180 +아마존에는 설명서에 숫자를 인용하는 제품이 많더라고요 + +04:38.180 --> 04:42.650 +부품 번호와 호환 가능 어쩌고저쩌고요 + +04:42.650 --> 04:50.720 +그런 부분 번호는 대개 8자리, 8문자 길이 혹은 더 길고 문자와 숫자를 포함하죠 + +04:50.720 --> 04:56.570 +여기서 문제는 토큰으로 바뀔 때 토큰을 많이 쓴다는 겁니다 왜냐하면 토큰은 + +04:56.600 --> 04:59.360 +어떤 어휘에도 없기 때문이죠 + +04:59.360 --> 05:06.500 +결국 여러분은 토큰을 전부 쑤셔 넣어야 합니다 토큰에 대한 무제한 수용력을 부분 번호를 나타내는 + +05:06.500 --> 05:11.330 +토큰으로 채우는 거죠 우리 모델과는 전혀 상관없는 거예요 + +05:11.360 --> 05:19.190 +이 줄의 역할은 8자 이상의 글자가 있고 그 안에 숫자가 있는 단어가 있다면 + +05:19.190 --> 05:23.180 +그 단어를 삭제하라는 거죠 + +05:23.180 --> 05:24.950 +주의를 분산시킬 거예요 + +05:25.160 --> 05:30.440 +이걸 강조하는 이유는 다시 말씀드리지만 이런 발견은 데이터를 깊이 파고들어야만 + +05:30.440 --> 05:32.720 +발견할 수 있어요 + +05:32.750 --> 05:37.190 +많은 예시를 보고 이런 일이 일어나는 걸 보고 이걸 발견하게 되죠 + +05:37.190 --> 05:39.410 +이걸 시도해볼 기회가 있어요 + +05:39.410 --> 05:44.630 +모델을 재실행하면 지난 몇 주 동안 한두 번 했던 것 같은데 향상된 + +05:44.930 --> 05:51.860 +걸 발견하게 됩니다 데이터가 더 풍부해지고 정확도도 더 높아졌거든요 + +05:52.460 --> 05:56.080 +그게 중요한 과정이에요 + +05:56.530 --> 06:06.040 +그리고 메서드 PARSE를 사용하는데 데이터 포인트를 취하고 다양한 스크래핑과 + +06:06.040 --> 06:10.180 +스트러핑을 해서 프롬프트로 바꾸죠 + +06:10.420 --> 06:14.890 +프롬프트와 함께 그 안의 토큰 개수도 세죠 + +06:14.890 --> 06:17.440 +잠시 후에 프롬프트를 보실 거예요 + +06:17.440 --> 06:23.110 +프롬프트는 LLM으로 전달될 것인데 get을 완료하도록 요청될 거예요 + +06:23.230 --> 06:25.510 +비용은 얼마나 드는지 묻네요 + +06:25.510 --> 06:27.490 +비용이 들 거예요 + +06:28.390 --> 06:31.870 +프롬프트를 볼 수 있는 기능이 있어요 + +06:31.900 --> 06:36.580 +테스트 프롬프트라는 것도 있어요 프롬프트와 같지만 + +06:36.580 --> 06:38.410 +답을 보여주진 않죠 + +06:38.440 --> 06:42.370 +훈련 중에 사용될 프롬프트 안에 답이 있어요 + +06:42.370 --> 06:48.160 +그래서 훈련을 하다 보면 시험 시간에 답을 예측하는 능력이 점점 더 좋아져요. Get it. + +06:48.160 --> 06:50.620 +답을 보여드리기 싫어요 + +06:50.620 --> 06:54.540 +텍스트를 주고 올바른 답이 나오는지 봐야죠 + +06:54.540 --> 06:58.680 +훈련과 테스트 프롬프트가 두 개 있는데요 + +06:58.710 --> 07:03.660 +나중에 데이터를 훈련 세트와 테스트 세트로 나누는 걸 얘기할 거예요 + +07:03.900 --> 07:05.520 +더 보게 될 거예요 + +07:05.550 --> 07:07.350 +나중에 더 명확해질 거예요 + +07:08.160 --> 07:10.740 +이게 아이템 클래스죠 + +07:10.740 --> 07:13.650 +이걸 좀 더 자세히 보세요 + +07:13.650 --> 07:17.880 +하지만 걱정 마세요 이 물건들을 보면서 많은 시간을 보낼 테니까요 + +07:17.880 --> 07:21.840 +Get it으로 이 기능성 기능에 대한 좋은 정보를 얻을 수 있어요 + +07:21.990 --> 07:30.720 +이제 여기서 할 것은 이 아이템 객체를 만드는 겁니다 데이터 집합에 있는 모든 + +07:30.720 --> 07:33.120 +것에 대해서요 + +07:33.450 --> 07:36.090 +지금 실행해보죠 + +07:37.110 --> 07:40.620 +이건 그 코드를 통해 실행되고 있어요 + +07:40.620 --> 07:43.200 +문지르는 거예요 + +07:43.200 --> 07:45.540 +파트 넘버 같은 걸 제거하는 거죠 + +07:45.540 --> 07:51.540 +이상한 캐릭터들을 공간으로 대체하는 거죠 + +07:51.870 --> 08:00.140 +프롬프트를 생성해서 여러 개의 토큰에 맞게 하는 것이죠 + +08:00.140 --> 08:08.060 +현재 모든 게 진행 중이고 4만여 개의 가전제품을 위한 일이 될 거예요 + +08:08.060 --> 08:11.450 +가격이 붙은 가전제품요 + +08:11.570 --> 08:14.870 +이제 마무리할 때가 된 것 같아요 + +08:17.240 --> 08:20.990 +다 익는 동안 제가 볼 준비를 할게요 + +08:20.990 --> 08:21.680 +다 됐어요 + +08:21.830 --> 08:22.490 +됐어요 + +08:22.490 --> 08:26.750 +첫 번째 것을 보도록 하죠 + +08:28.040 --> 08:33.440 +첫 번째 키트는 랙 롤러 스터드 조립 키트예요 + +08:33.440 --> 08:37.580 +아미의 풀 팩이 어쩌고저쩌고 부품을 대체한대요 + +08:37.610 --> 08:43.940 +이게 그 항목의 제목이고 가격은 9달러예요 + +08:43.940 --> 08:46.910 +그 항목의 제목에서 그걸 볼 수 있어요 + +08:46.910 --> 08:50.390 +파트 번호가 있어요 긴 파트 번호요 + +08:50.420 --> 08:51.260 +다른 것도 보죠 + +08:51.260 --> 08:52.980 +첫 번째 항목을 보죠 + +08:53.760 --> 08:56.310 +첫 번째 항목이죠 + +08:56.340 --> 09:00.750 +호환되는 A 도어 피벗 블록 호환이요 + +09:00.780 --> 09:01.680 +켄모어 키친에이드요 + +09:01.680 --> 09:03.480 +메이택 월풀 냉장고예요 + +09:03.510 --> 09:06.090 +다시 말하지만 많은 파트 번호가 있어요 + +09:06.300 --> 09:18.390 +이제 어떻게 되는지 보죠 이 함수가 생성된 항목을 프롬프트에서 볼게요 + +09:18.390 --> 09:20.070 +다시 해 봐요 + +09:21.060 --> 09:22.260 +프린트하죠 + +09:22.260 --> 09:25.740 +그래서 멋진 빈 행과 함께 형식이 나타나요 + +09:28.410 --> 09:30.930 +프롬프트가 말하는 게 이거예요 + +09:30.930 --> 09:33.570 +이건 얼마 정도 할까요? + +09:33.600 --> 09:35.640 +그 얘기는 최대한 많이 할게요 + +09:35.670 --> 09:36.810 +나중에 봐요 + +09:36.810 --> 09:42.300 +왜 그렇게 했는지 장단점이 뭔지 얘기해 보죠 + +09:42.300 --> 09:45.210 +이건 얼마 정도 하나요? + +09:45.960 --> 09:52.780 +여기 보면 한 줄은 제목이고 한 줄은 설명이에요 + +09:52.780 --> 09:58.870 +여러분이 보실 것은 이 부분 번호가 이 설명에서 뽑힌 겁니다 잘려진 + +09:58.870 --> 10:00.970 +걸 보실 수 있죠 + +10:00.970 --> 10:03.760 +패의 개수가 다 되면요 + +10:03.760 --> 10:09.100 +제가 보관한 건 180토큰이 조금 안 돼요 + +10:09.100 --> 10:10.930 +여기서 볼 수 있죠 + +10:11.110 --> 10:16.300 +이걸 읽어 보면 그 물건에 대한 풍부한 설명이 나와요 + +10:16.300 --> 10:18.730 +훈련에는 충분할 거예요 + +10:19.330 --> 10:21.760 +다음 사진을 보죠 + +10:22.000 --> 10:25.570 +이건 물론 피벗 블록이고요 + +10:25.600 --> 10:27.130 +도어 피벗 블록이에요 + +10:27.160 --> 10:33.070 +100번과 제빙기 기계요 + +10:33.190 --> 10:35.650 +삼성 부품이에요 + +10:36.100 --> 10:41.200 +여기 보시면 부품과 대체 부품들이 많이 들어 있어요 + +10:41.200 --> 10:46.990 +아까도 보셨듯이 이 공간은 저렴한 교체 부품 + +10:46.990 --> 10:50.400 +때문에 꽉 차게 될 거예요 + +10:50.400 --> 10:56.460 +하지만 좀 놀랍게도 이 부분은 118달러예요 그리 간단한 부분이 아니죠 + +10:56.880 --> 11:02.310 +이 삼성 조립식 아이스메이커 메크는 필요 없었으면 좋겠어요 + +11:03.180 --> 11:09.120 +자, 이건 훈련 시간을 보는 건데요 + +11:09.120 --> 11:12.330 +훈련할 때 통과시킬 것들이에요 + +11:12.330 --> 11:20.940 +모델에게 이걸 주고 훈련 기간 동안 이 가격을 어떻게 재현할지 배우게 + +11:20.940 --> 11:22.830 +될 거예요 + +11:22.830 --> 11:24.810 +시험 시간에는요? + +11:24.810 --> 11:29.010 +모델이 효과가 있는지 평가할 때는 어떨까요? + +11:29.010 --> 11:35.880 +100번 아이템을 봅시다 테스트 시간에 어떻게 하는지 보죠 + +11:35.880 --> 11:37.260 +그러죠 + +11:39.660 --> 11:42.600 +이걸 모델로 제공하세요 + +11:42.630 --> 11:46.590 +똑같은데 여기서 끝나죠 + +11:46.620 --> 11:54.420 +물론 그 아이디어는 모델은 이것의 정말 많은 예들을 봤을 거라는 거죠 정말 다양한 + +11:54.420 --> 12:01.980 +다양한 항목을 다루고 런타임에 이걸 다시 보여드릴 때 어떻게 완료하는지 알 + +12:01.980 --> 12:09.120 +겁니다 이 설명을 기반으로 미묘한 이해를 잘해서 이 가격을 완료하도록 + +12:09.120 --> 12:10.740 +돕겠죠 + +12:12.030 --> 12:12.960 +좋아요 + +12:12.990 --> 12:22.230 +이 아이템들에 토큰이 얼마나 많은지 봅시다 다른 다이어그램을 이용해서요 + +12:22.230 --> 12:28.860 +가장 많은 패가 178개인 걸 알 수 있어요 + +12:28.890 --> 12:32.850 +180까지 못 올라가고 평균 176까지 가죠 Get it + +12:32.850 --> 12:35.010 +정말 꽉 들어찼어요 + +12:35.340 --> 12:43.650 +그래서 일반적으로 이 정도 양의 정보를 가진 데이터 세트를 선택하고 다듬었어요 + +12:43.650 --> 12:47.250 +180냥까지 나올 거예요 + +12:47.250 --> 12:51.470 +훈련할 때 큰 도움이 될 것입니다 아이템을 지원하는 + +12:51.470 --> 12:57.830 +데 필요한 최대 토큰의 수를 알 수 있고 프론티어 모델을 사용할 때 비용을 절감할 + +12:57.830 --> 13:00.350 +수 있기 때문이죠 + +13:01.640 --> 13:02.450 +네 + +13:02.450 --> 13:09.800 +그런 다음 우리가 선택한 이 아이템의 가격 분배를 다시 한 번 보죠 + +13:10.160 --> 13:11.450 +시작할게요 + +13:12.020 --> 13:16.190 +평균 가격이 100달러예요 + +13:16.340 --> 13:21.560 +최고 11,000달러까지 받을 수 있어요 + +13:21.560 --> 13:29.450 +잡초를 제거하는 과정에서 엄청나게 비싼 전자레인지를 제거했어요 + +13:29.570 --> 13:31.760 +하지만 여전히 꽤 비싼 걸 사야 해요 + +13:31.790 --> 13:36.440 +제가 위에 쓴 걸 복제하면 그게 뭔지 알 수 있어요 + +13:36.590 --> 13:43.640 +지금도 보시다시피 아주 저렴한 물건 쪽으로 유통되고 있어요 우리가 본 + +13:43.640 --> 13:46.730 +것처럼 부품을 대체하는 거죠 + +13:46.730 --> 13:51.010 +다음에 조사할 또 다른 지역이네요 + +13:51.910 --> 13:57.790 +이 데이터 집합을 시각화하는 건 앞으로 자주 하게 될 + +13:57.790 --> 13:58.810 +거예요 + +13:58.840 --> 14:01.420 +다양한 방식으로 많은 걸 하게 될 거예요 + +14:01.450 --> 14:06.280 +맷플로틀리브의 다양한 기능을 활용할 수 있어서 좋아요 + +14:06.280 --> 14:12.100 +그중 하나는 다양한 색상의 차트를 만들 수 있다는 거죠 + +14:12.100 --> 14:16.660 +그 색들이 뭔지 알고 싶으시면 Mattplotlib 페이지로 + +14:16.660 --> 14:23.110 +연결되는 링크를 달아 두었습니다 사용할 수 있는 다양한 색 배열을 설명하고 있습니다 xkcd + +14:23.110 --> 14:25.360 +색상도 포함해서요 + +14:25.360 --> 14:27.610 +그걸 살펴보는 건 좋은 일이죠 + +14:27.760 --> 14:32.680 +이건 책갈피로 남기기 좋은 작은 단서예요 + +14:32.710 --> 14:38.800 +오늘 팁을 하나 더 드리자면 지금 하셔야 할 일은 아이템 클래스를 + +14:38.830 --> 14:40.570 +보시는 거예요 + +14:40.570 --> 14:42.100 +제가 너무 빨리 지나갔어요 + +14:42.100 --> 14:49.660 +데이터 머징 스크러빙이 좀 더 섬뜩하기 때문이죠 실제 데이터 예제에 근거해 데이터를 + +14:49.660 --> 14:53.770 +최대한 고품질로 만드는 거예요 + +14:54.130 --> 14:59.680 +제가 너무 자세히 설명해서 지루하지 않으셨다면 이제 직접 들어가서 자세히 보시리라 + +14:59.680 --> 15:01.780 +믿었기 때문일 거예요 + +15:01.810 --> 15:10.510 +JupyterLab을 이용해 이런 기능이 어떻게 데이터를 정화했는지 조사하고 이해하세요 + +15:10.510 --> 15:18.310 +덕분에 약 180개의 토큰이 생겼습니다 각 데이터 포인트에 대한 풍부한 설명과 + +15:18.310 --> 15:26.890 +단어들이죠 각각의 항목은 미래에 훈련과 테스트 프롬프트로 사용될 거예요 + +15:27.220 --> 15:34.000 +다음에는 더 많은 제품을 결합하도록 확장할 거예요 + +15:34.000 --> 15:39.700 +이 데이터 모음이 크다고 생각했다면 아직 아무것도 못 본 거예요 + +15:39.700 --> 15:41.260 +그러니 준비하세요 + +15:41.260 --> 15:45.730 +하지만 먼저 슬라이드 몇 개만 더 보고 마무리하죠 diff --git a/week5/community-contributions/subtitles/srts/59472067/en_US.srt b/week5/community-contributions/subtitles/srts/59472067/en_US.srt new file mode 100755 index 0000000..49166ac --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472067/en_US.srt @@ -0,0 +1,82 @@ +WEBVTT + +00:00.920 --> 00:04.850 +So we've covered steps 1 to 4 of the five step strategy. + +00:04.850 --> 00:10.010 +And that brings us to step five, which is productionize the one that maybe you haven't thought as much + +00:10.010 --> 00:11.240 +about to date. + +00:11.240 --> 00:13.520 +And this is about saying we built our model. + +00:13.520 --> 00:16.250 +It's time for prime time. + +00:16.730 --> 00:23.810 +We typically will start by defining an API that can be used to call our model to call our code. + +00:23.810 --> 00:32.570 +That itself is either using a open source model or making a call to a frontier model with with various + +00:32.780 --> 00:36.680 +scaffolding around it, like rag or prompt engineering. + +00:37.190 --> 00:42.500 +We then decide how are we going to host and deploy our model, our API. + +00:42.530 --> 00:48.410 +We address things like how it's going to be monitored, the information security concerns, how will + +00:48.410 --> 00:49.580 +it scale up? + +00:49.730 --> 00:56.750 +We then measure the business metrics that we had identified all the way back in step one and continuously + +00:56.750 --> 01:02.390 +retrain, measure performance, and improve our model on an ongoing basis. + +01:02.420 --> 01:06.320 +These are all part of the Productionize activity. + +01:07.820 --> 01:14.660 +And so with that, where are we in our business problem that we've taken on to predict product prices. + +01:14.660 --> 01:17.600 +So the first step understand we've done it. + +01:17.630 --> 01:18.410 +We understood it. + +01:18.440 --> 01:19.850 +We've talked about metrics. + +01:19.850 --> 01:21.170 +We know what we've got to do. + +01:21.200 --> 01:23.900 +Preparation is what we're doing right now. + +01:23.900 --> 01:26.630 +We are in the middle of preparing our data set. + +01:26.630 --> 01:30.290 +And then we need to select our model or models. + +01:30.290 --> 01:32.300 +We need to customize. + +01:32.390 --> 01:35.060 +And then finally we need to productionize. + +01:35.060 --> 01:38.060 +So we will be going through all of those steps. + +01:38.150 --> 01:43.400 +But we will be first of all finishing the second step preparation. + +01:43.400 --> 01:49.520 +And with that let's go right now back to JupyterLab and get to preparing. diff --git a/week5/community-contributions/subtitles/srts/59472067/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472067/ja_JP.srt new file mode 100755 index 0000000..8f98525 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472067/ja_JP.srt @@ -0,0 +1,76 @@ +WEBVTT + +00:00.920 --> 00:04.850 +というわけで、 5つのステップのうち、 ステップ1から4までを説明した。 + +00:04.850 --> 00:11.240 +そして、 ステップ5、 つまり、 これまであまり考えていなかったかもしれないが、 プロダクション化だ。 + +00:11.240 --> 00:13.520 +そしてこれは、 私たちがモデルを構築したと言うことだ。 + +00:13.520 --> 00:16.250 +プライムタイムの時間だ。 + +00:16.730 --> 00:23.810 +私たちは通常、 コードを呼び出すためにモデルを呼び出すのに使えるAPIを定義することから始める。 + +00:23.810 --> 00:36.680 +それ自体は、 オープンソースのモデルを使うか、 ラグやプロンプト・エンジニアリングのような様々な足場を持つフロンティア・モデルを呼び出すかのどちらかだ。 + +00:37.190 --> 00:42.500 +そして、 我々のモデルやAPIをどのようにホストし、 デプロイするかを決める。 + +00:42.530 --> 00:48.410 +どのように監視するのか、 情報セキュリティーへの懸念、 どのように規模を拡大するのか、 + +00:48.410 --> 00:49.580 +などだ。 + +00:49.730 --> 00:56.750 +そして、 ステップ1で特定したビジネス指標を測定し、 継続的に再訓練を行い、 + +00:56.750 --> 01:02.390 +パフォーマンスを測定し、 モデルを改善していく。 + +01:02.420 --> 01:06.320 +これらはすべてプロダクショナイズの活動の一部である。 + +01:07.820 --> 01:14.660 +というわけで、 製品価格を予測するために私たちが取り組んでいるビジネス上の問題とは、 どのようなものなのでしょうか? + +01:14.660 --> 01:17.600 +だから、 最初の一歩は理解できた。 + +01:17.630 --> 01:18.410 +私たちはそれを理解していた。 + +01:18.440 --> 01:19.850 +メトリクスの話はした。 + +01:19.850 --> 01:21.170 +やるべきことは分かっている。 + +01:21.200 --> 01:23.900 +今やっているのは準備だ。 + +01:23.900 --> 01:26.630 +データセットを準備している最中だ。 + +01:26.630 --> 01:30.290 +そして、 モデルを選択する必要がある。 + +01:30.290 --> 01:32.300 +カスタマイズが必要だ。 + +01:32.390 --> 01:35.060 +そして最後にプロダクション化する必要がある。 + +01:35.060 --> 01:38.060 +だから私たちは、 そのすべてのステップを踏むことになる。 + +01:38.150 --> 01:43.400 +しかし、 まずは第2段階の準備を終える。 + +01:43.400 --> 01:49.520 +それではJupyterLabに戻って準備に取り掛かりましょう。 diff --git a/week5/community-contributions/subtitles/srts/59472067/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472067/ko_KR.srt new file mode 100755 index 0000000..c26d4a4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472067/ko_KR.srt @@ -0,0 +1,82 @@ +WEBVTT + +00:00.920 --> 00:04.850 +5단계 전략 중 1단계에서 4단계를 살펴봤죠 + +00:04.850 --> 00:10.010 +이제 5단계로 넘어갑니다 지금까지 생각 안 해본 걸 제작하는 + +00:10.010 --> 00:11.240 +거죠 + +00:11.240 --> 00:13.520 +이건 모형을 만들었다는 걸 의미해요 + +00:13.520 --> 00:16.250 +황금 시간대예요 + +00:16.730 --> 00:23.810 +API 정의부터 시작합니다 모델을 호출하고 코드를 호출하는 데 사용되죠 + +00:23.810 --> 00:32.570 +오픈 소스 모델을 사용하거나 프론티어 모델을 호출하는 거죠 래그나 프롬프트 엔지니어링 + +00:32.780 --> 00:36.680 +같은 다양한 스캐폴딩을 둘러싼 모델요 + +00:37.190 --> 00:42.500 +그런 다음 모델, API 호스트 및 배포 방법을 결정하죠 + +00:42.530 --> 00:48.410 +어떻게 감시될지 정보 보안 문제나 어떻게 확장할지에 대해 + +00:48.410 --> 00:49.580 +얘기하죠 + +00:49.730 --> 00:56.750 +그다음 1단계에서 확인한 비즈니스 지표를 측정합니다 지속적인 재훈련과 + +00:56.750 --> 01:02.390 +성능 측정 그리고 지속적인 기반으로 모델을 개선하죠 + +01:02.420 --> 01:06.320 +모두 프로덕션 활동의 일부예요 + +01:07.820 --> 01:14.660 +그럼 제품 가격을 예측하기 위해 직면한 사업상의 문제는 어느 정도일까요? + +01:14.660 --> 01:17.600 +첫 단계는 성공했다는 걸 이해하는 거죠 + +01:17.630 --> 01:18.410 +이해했어요 + +01:18.440 --> 01:19.850 +측정법에 대해 얘기했죠 + +01:19.850 --> 01:21.170 +뭘 해야 할지 알죠 + +01:21.200 --> 01:23.900 +지금 준비하는 중이에요 + +01:23.900 --> 01:26.630 +데이터 세트를 준비 중이에요 + +01:26.630 --> 01:30.290 +그리고 모델을 선택해야 해요 + +01:30.290 --> 01:32.300 +맞춤형으로 만들어야 해요 + +01:32.390 --> 01:35.060 +마지막으로 생산해야 해요 + +01:35.060 --> 01:38.060 +이 모든 단계를 거치게 될 거예요 + +01:38.150 --> 01:43.400 +하지만 먼저 2단계 준비를 마칠 거예요 + +01:43.400 --> 01:49.520 +이제 유피터랩으로 돌아가서 준비하도록 하죠 get it diff --git a/week5/community-contributions/subtitles/srts/59472137/en_US.srt b/week5/community-contributions/subtitles/srts/59472137/en_US.srt new file mode 100755 index 0000000..a3fddf7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472137/en_US.srt @@ -0,0 +1,61 @@ +WEBVTT + +00:00.740 --> 00:05.720 +Well, well, well, it's been a long day, but congratulations, you've made it. + +00:05.750 --> 00:12.260 +We've gone through and curated a pristine data set, working very hard to make sure that it's got a + +00:12.260 --> 00:16.310 +good sample representation of the data we want to train by. + +00:16.340 --> 00:23.450 +And at the end, we of course turned it into a hugging face data set, a data set dict with the training + +00:23.450 --> 00:28.310 +and test parts to it, and we uploaded it to the Hugging Face Hub. + +00:28.310 --> 00:31.880 +And if you've gone through all of these instructions, which I know you will have done, you've been + +00:31.880 --> 00:36.410 +following along in JupyterLab, getting comfortable with the different things that I've been doing then + +00:36.410 --> 00:44.210 +now you'll be rewarded with your own data set, sitting there in the hub that you will be able to use + +00:44.210 --> 00:46.820 +in the subsequent sessions. + +00:46.820 --> 00:50.060 +So congratulations on on on getting that far. + +00:50.270 --> 00:56.660 +Uh, so we've added to the skills that you've acquired, the understanding of the five step strategy + +00:56.660 --> 01:03.560 +to solving commercial business problems with Llms, uh, weighing up the three different optimization + +01:03.560 --> 01:11.900 +techniques and some real detail in data set curation, including some thorny, uh, bits of code there + +01:11.900 --> 01:18.020 +that I do hope you'll look through and understand them and then use them in your projects, like sampling + +01:18.050 --> 01:20.180 +from existing data sets. + +01:20.630 --> 01:25.550 +So next time, next time we're going to be talking about baseline models. + +01:25.550 --> 01:29.210 +We're going to be creating a traditional machine learning solution. + +01:29.210 --> 01:36.320 +And we're going to be applying some traditional and advanced techniques to see what gives us good results. + +01:36.320 --> 01:38.120 +And I'm excited for it. + +01:38.120 --> 01:39.440 +And I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59472137/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472137/ja_JP.srt new file mode 100755 index 0000000..f657c88 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472137/ja_JP.srt @@ -0,0 +1,43 @@ +WEBVTT + +00:00.740 --> 00:05.720 +いやはや、 いやはや、 長い一日だったが、 おめでとう。 + +00:05.750 --> 00:16.310 +私たちは、 トレーニングに使用するデータのサンプルとなるよう、 細心の注意を払ってデータセットを作成しました。 + +00:16.340 --> 00:23.450 +そして最後に、 私たちはもちろんそれをハグする顔のデータセット、 つまりトレーニング部分とテスト部分を持つデータセットディクトにし、 + +00:23.450 --> 00:28.310 +ハグする顔ハブにアップロードした。 + +00:28.310 --> 00:31.880 +JupyterLabで私がしてきた様々なことに慣れ親しみながら、 + +00:31.880 --> 00:46.820 +この説明をすべて読み終えたなら、 この後のセッションで使える自分のデータセットがハブの中にあることになります。 + +00:46.820 --> 00:50.060 +ここまで来られたことを祝福したい。 + +00:50.270 --> 01:03.560 +Llmsを使った商業的なビジネス問題を解決するための5つのステップ戦略の理解、 + +01:03.560 --> 01:20.180 +3つの異なる最適化テクニックの比較、 データセットのキュレーションの詳細などです。 + +01:20.630 --> 01:25.550 +というわけで、 次回はベースラインモデルについて。 + +01:25.550 --> 01:29.210 +我々は伝統的な機械学習ソリューションを作るつもりだ。 + +01:29.210 --> 01:36.320 +そして、 伝統的なテクニックと先進的なテクニックを適用し、 何が良い結果をもたらすかを見極めるつもりだ。 + +01:36.320 --> 01:38.120 +そして、 私はそれを楽しみにしている。 + +01:38.120 --> 01:39.440 +そこでまた会おう diff --git a/week5/community-contributions/subtitles/srts/59472137/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472137/ko_KR.srt new file mode 100755 index 0000000..1bcc653 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472137/ko_KR.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:00.740 --> 00:05.720 +이런, 긴 하루였지만 축하합니다, 통과했어요 + +00:05.750 --> 00:12.260 +우린 신비의 데이터 세트를 검토하고 큐레이팅했습니다 우리가 훈련하고자 하는 데이터의 + +00:12.260 --> 00:16.310 +좋은 샘플 표본을 갖도록 아주 열심히 일했죠 + +00:16.340 --> 00:23.450 +결국 안아주기 데이터셋으로 전환했습니다 트레이닝과 테스트 파트가 포함된 데이터셋 + +00:23.450 --> 00:28.310 +독자였죠 안아주기 페이스 허브에 업로드했어요 + +00:28.310 --> 00:31.880 +이 모든 지침을 거쳤다면 물론 그러셨겠지만 + +00:31.880 --> 00:36.410 +JupyterLab을 따라왔다면 제가 하는 다양한 + +00:36.410 --> 00:44.210 +것에 익숙해지셨을 겁니다 그럼 여러분만의 데이터 세트를 얻게 될 겁니다 허브에 있는 데이터는 + +00:44.210 --> 00:46.820 +다음 세션에서 사용할 수 있죠 + +00:46.820 --> 00:50.060 +그 정도까지 온 걸 축하해요 + +00:50.270 --> 00:56.660 +그래서 여러분이 습득한 기술에 5단계 전략을 추가했습니다 LM을 + +00:56.660 --> 01:03.560 +이용한 비즈니스 문제를 해결하는 5단계 전략 이해와 세 가지 최적화 기술을 + +01:03.560 --> 01:11.900 +재고 데이터셋 큐레이션에 관한 세부 사항을 추가했습니다 그중에는 여러분이 살펴보고 이해해서 + +01:11.900 --> 01:20.180 +프로젝트에 활용할 수 있는 코드도 있습니다 기존 데이터셋의 샘플링 같은 거죠 + +01:20.630 --> 01:25.550 +다음 시간에는 베이스라인 모델에 대해 얘기할 거예요 + +01:25.550 --> 01:29.210 +전통적인 머신 러닝 솔루션을 만들 거예요 + +01:29.210 --> 01:36.320 +어떤 결과가 나올지 보기 위해 전통적이고 고급 기술을 적용할 거예요 + +01:36.320 --> 01:38.120 +정말 기대돼요 + +01:38.120 --> 01:39.440 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59472307/en_US.srt b/week5/community-contributions/subtitles/srts/59472307/en_US.srt new file mode 100755 index 0000000..f66805b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472307/en_US.srt @@ -0,0 +1,325 @@ +WEBVTT + +00:00.770 --> 00:02.240 +Welcome to week six. + +00:02.240 --> 00:03.320 +Day two a day. + +00:03.320 --> 00:09.560 +When we get back into the data, we look back in anger at our data sets, and we build a massive data + +00:09.590 --> 00:13.520 +set that is going to allow us to move the needle when we get to training. + +00:13.640 --> 00:18.470 +But first, the majority of today is actually going to be spent talking. + +00:18.470 --> 00:24.380 +It's going to be a talky, uh, it's going to be a session when we're speaking about strategy, perhaps + +00:24.380 --> 00:29.630 +not the most gripping stuff, not why you signed up, but it is very important. + +00:29.630 --> 00:34.880 +This is good foundational information that is going to ensure that we're approaching what's to come + +00:34.880 --> 00:36.050 +in the right way. + +00:36.050 --> 00:42.170 +In particular, I want to talk to you about a strategy for how you go from facing a business problem + +00:42.170 --> 00:48.020 +all the way through to an LM solution in production and the steps it takes along that path. + +00:48.020 --> 00:52.340 +And I want to tell you that now, because we're about to do it, we're going to go through that exercise + +00:52.340 --> 00:54.170 +for our real commercial problem. + +00:54.170 --> 00:59.340 +And it's important that you're able to relate to the journey that we go through, because you'll be + +00:59.340 --> 01:02.550 +doing the same thing with your business problems after this. + +01:02.580 --> 01:08.250 +I also want to take a moment to compare the three types of technique that we'll be talking about, or + +01:08.250 --> 01:11.640 +that we've talked about for optimizing models. + +01:11.640 --> 01:16.590 +I'm talking about whether we're prompting using Rag or using fine tuning. + +01:16.590 --> 01:23.100 +And it's there's a lot of confusion about in what situations do you pick one of those different approaches. + +01:23.100 --> 01:29.040 +And I want to demystify that and just give you some concrete examples of how you go about deciding what + +01:29.070 --> 01:30.270 +technique to use. + +01:31.050 --> 01:38.970 +So first then let me talk about the five step strategy to applying a model to a commercial problem. + +01:39.510 --> 01:42.150 +And the first step is understanding. + +01:42.150 --> 01:47.130 +This is about really getting deep into the business requirements and understanding what problem are + +01:47.130 --> 01:47.940 +you solving? + +01:47.940 --> 01:49.560 +How will you judge success? + +01:49.560 --> 01:56.470 +What are the Non-functionals we'll talk about in a second and make sure that that's all carefully documented + +01:56.560 --> 02:06.430 +preparation is then about things like testing baseline models and curating your data set and and generally + +02:06.430 --> 02:08.620 +preparing yourself for what is to come. + +02:08.620 --> 02:14.800 +And what is to come initially is selecting the models, the models that are going to be the either the + +02:14.800 --> 02:19.240 +the model you're going to be using, or the handful of models you'll be using as part of the rest of + +02:19.240 --> 02:19.990 +the project. + +02:19.990 --> 02:25.210 +And this is where we'll Thai will draw on a lot of the content from prior weeks. + +02:25.210 --> 02:30.430 +When we looked at leaderboards and analyzed the pros and cons of different models. + +02:30.910 --> 02:38.890 +Customize is where we use one of the big techniques, like Rag or fine tuning to get more juice out + +02:38.890 --> 02:44.650 +of the model, and then productionize something we've not talked about at all, but is hugely important, + +02:44.650 --> 02:49.600 +which is then once we've done, we've built and trained our model and it's performing great. + +02:49.600 --> 02:50.710 +What comes next? + +02:50.710 --> 02:57.230 +Because it's not exactly like the Jupyter notebook that we've been hacking away at is going to end up + +02:57.230 --> 02:57.950 +in production. + +02:57.950 --> 03:01.280 +Something more has to be done, and that's what we will talk about in a sec. + +03:02.030 --> 03:03.800 +Let's start with step one though. + +03:03.800 --> 03:06.680 +So understanding and this is all common sense. + +03:06.680 --> 03:08.720 +But you know this stuff can't be said enough. + +03:08.720 --> 03:13.100 +So just very briefly of course you need to gather the business requirements. + +03:13.100 --> 03:14.960 +You need to evaluate. + +03:14.960 --> 03:18.650 +You need to understand up front how will success be measured. + +03:18.650 --> 03:20.000 +Super important. + +03:20.000 --> 03:25.820 +And we're not just talking about the data science metrics that we know well, but also how will your + +03:25.820 --> 03:31.490 +your users and your your business sponsors decide whether the project has achieved its goals? + +03:31.490 --> 03:36.980 +What are the ultimate business metrics that you may not have as immediate influence over, but they + +03:36.980 --> 03:38.210 +need to be understood. + +03:38.600 --> 03:42.710 +You need to dig into the data as we've been doing the quantity of it. + +03:42.740 --> 03:43.460 +How much? + +03:43.490 --> 03:46.460 +What's the DQ, what's the data quality situation like? + +03:46.460 --> 03:50.540 +And the format, is it structured, unstructured or a bit of both? + +03:50.540 --> 03:55.990 +really make sure that that is deeply understood, because that will affect the model you choose and + +03:55.990 --> 03:58.150 +how you go about approaching this. + +03:58.780 --> 04:01.510 +Determining the non-functional requirements. + +04:01.540 --> 04:08.770 +Non-functional are stuff like your budget, the how, how much it will need to scale to latency is is + +04:08.800 --> 04:13.780 +you know how long you can you can wait for each response back from the model if it needs to be a split + +04:13.780 --> 04:15.130 +second response. + +04:15.280 --> 04:18.550 +Um, and also understanding time to market. + +04:18.580 --> 04:23.620 +Is there a requirement that this is built in a very short time frame, or is there time to to be working + +04:23.620 --> 04:24.070 +on this? + +04:24.070 --> 04:27.940 +And of course, if it's something that's needed in a very short time frame, it will lend itself to + +04:27.970 --> 04:30.280 +a frontier model using an API. + +04:30.280 --> 04:35.080 +And you know, when it comes to the user interface, something like Gradio is of course allows you to + +04:35.110 --> 04:36.940 +be up and running in a matter of minutes. + +04:36.940 --> 04:41.140 +So this will steer some of your later decisions. + +04:42.160 --> 04:46.900 +When it comes to preparation, there are really three activities involved. + +04:46.900 --> 04:52.790 +First of all, you need to research what is already out there, what kind of existing solutions. + +04:52.790 --> 04:59.270 +Solve this problem today and get a very good handle for how well they perform and what they do already. + +04:59.300 --> 05:05.210 +As part of that, you should look at at solutions that don't involve data science at all. + +05:05.240 --> 05:10.190 +Maybe there's solutions that just have a few if statements in them, and then look at some traditional + +05:10.190 --> 05:14.330 +data science solutions, perhaps like linear regression kind of model. + +05:14.330 --> 05:17.840 +If this is something which is trying to predict product prices, say. + +05:17.930 --> 05:21.620 +Then that would be a place that you would initially go to. + +05:22.040 --> 05:28.100 +And even if you might say to me, look, I absolutely know, I have no question that an LLM is going + +05:28.100 --> 05:31.940 +to massively outperform what's already out there or these existing models. + +05:31.940 --> 05:33.680 +I don't care how they are today. + +05:33.680 --> 05:37.520 +The answer would be it's still worth doing this because it gives you a baseline. + +05:37.520 --> 05:43.580 +It gives you a starting point on which you will improve, and you'll be able to demonstrate the improvement + +05:43.580 --> 05:48.150 +in a quantified way based on the investment that's made in the new model. + +05:48.150 --> 05:53.130 +So even as just a baseline, this is a valuable exercise to do. + +05:53.130 --> 05:56.220 +But more than that, you need to know what is already out there. + +05:57.120 --> 06:00.480 +Then comparing the relevant LMS. + +06:00.480 --> 06:06.240 +First of all, of course you remember we divided this into the basics stuff like the price, the context + +06:06.270 --> 06:10.200 +length, the licensing constraints and then the benchmarks. + +06:10.200 --> 06:16.710 +Looking on on leaderboards, looking at arenas and understanding if there are specialist scores for + +06:16.710 --> 06:18.960 +what you're trying to do for this particular task. + +06:18.990 --> 06:23.370 +Using things like the seal specialist leaderboards from scale. + +06:23.490 --> 06:31.140 +Com that we that we mentioned last time, and of course curating the data, scrubbing it, pre-processing + +06:31.140 --> 06:31.410 +it. + +06:31.410 --> 06:35.910 +And then something that we haven't talked about particularly yet is splitting your data set. + +06:35.940 --> 06:42.060 +Typically you take all of your data and you split it into your training data, and then you reserve + +06:42.090 --> 06:46.610 +a chunk for what's called validation when you that you'll be using to evaluate your model, and then + +06:46.610 --> 06:48.980 +you reserve a final chunk for test. + +06:48.980 --> 06:54.080 +And that's something that you hold all the way out so that you can use the validation to be tweaking + +06:54.080 --> 06:56.090 +your hyperparameters and getting everything right. + +06:56.090 --> 07:02.870 +And at the very, very end, you will use the test to gauge the ultimate success of your model. + +07:03.410 --> 07:09.680 +So, uh, cleaning your data, pre-processing it, uh, which is parsing, which is what we've been + +07:09.680 --> 07:14.630 +doing, and then ultimately splitting it up is part of preparation. + +07:15.110 --> 07:23.720 +And then select this is something that we've done already, uh, choosing LMS, uh, based on the criteria, + +07:23.720 --> 07:29.570 +experimenting with them and then training and validating with your curated data. + +07:29.570 --> 07:30.620 +We haven't done that yet. + +07:30.650 --> 07:33.560 +That's something that that we are excited to do. + +07:33.770 --> 07:41.930 +So I will now pause and we'll continue in the next session with the all important step four to optimize. diff --git a/week5/community-contributions/subtitles/srts/59472307/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472307/ja_JP.srt new file mode 100755 index 0000000..8e237fd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472307/ja_JP.srt @@ -0,0 +1,256 @@ +WEBVTT + +00:00.770 --> 00:02.240 +第6週へようこそ。 + +00:02.240 --> 00:03.320 +2日目は1日。 + +00:03.320 --> 00:13.520 +データに戻ったとき、 私たちはデータセットを怒りにまかせて振り返り、 トレーニングに入ったときに針を動かすことができるような巨大なデータセットを構築する。 + +00:13.640 --> 00:18.470 +その前に、 今日の大半は会話に費やされる。 + +00:18.470 --> 00:24.380 +このセッションは、 戦略について話すもので、 おそらく、 最も心をつかむような内容ではなく、 + +00:24.380 --> 00:29.630 +契約した理由でもないだろうが、 非常に重要なものだ。 + +00:29.630 --> 00:36.050 +これは、 これから起こることに正しい方法で取り組むための、 良い基礎情報となる。 + +00:36.050 --> 00:42.170 +特に、 ビジネス上の問題に直面してから、 本番でLMソリューションを実現するまでの戦略と、 + +00:42.170 --> 00:48.020 +そのステップについてお話ししたいと思います。 + +00:48.020 --> 00:54.170 +そして今、 私たちはそれを実行しようとしているのだから、 私たちの実際の商業的な問題について、 その練習をするつもりだということを伝えたい。 + +00:54.170 --> 01:02.550 +そして、 私たちが経験する旅に共感してもらうことが重要だ。 なぜなら、 この後、 自分のビジネスの問題で同じことをすることになるからだ。 + +01:02.580 --> 01:11.640 +また、 これからお話しする、 あるいはこれまでお話ししてきた、 モデルを最適化するための3種類のテクニックを少し比較してみたいと思います。 + +01:11.640 --> 01:16.590 +ラグを使ってプロンプトを出すのか、 微調整を使ってプロンプトを出すのかという話だ。 + +01:16.590 --> 01:23.100 +そして、 どのような状況でそれらの異なるアプローチのいずれかを選択するのかについて、 多くの混乱がある。 + +01:23.100 --> 01:30.270 +どのようなテクニックを使うべきか、 具体的な例を挙げて説明したい。 + +01:31.050 --> 01:38.970 +それではまず、 商業的な問題にモデルを適用するための5つのステップ戦略についてお話ししましょう。 + +01:39.510 --> 01:42.150 +そして最初のステップは理解することだ。 + +01:42.150 --> 01:47.940 +これは、 ビジネス要件に深く入り込み、 どのような問題を解決しようとしているのかを理解することである。 + +01:47.940 --> 01:49.560 +成功をどう判断するのか? + +01:49.560 --> 01:56.470 +非機能とは何かについては後で話すが、 それをすべて入念に文書化し、 + +01:56.560 --> 02:08.620 +ベースラインモデルのテストやデータセットのキュレーションなど、 一般的に来るべきものに対する準備をすることだ。 + +02:08.620 --> 02:19.990 +そして、 最初にしなければならないのは、 使用するモデル、 あるいは残りのプロジェクトの一部として使用するモデルを選択することだ。 + +02:19.990 --> 02:25.210 +そして、 タイは前の週の内容を多く取り入れるだろう。 + +02:25.210 --> 02:30.430 +リーダーボードを見て、 さまざまなモデルの長所と短所を分析した。 + +02:30.910 --> 02:38.890 +カスタマイズとは、 ラグやファインチューニングといった大きなテクニックを使ってモデルからより多くの力を引き出すことです。 + +02:38.890 --> 02:49.600 +そして、 これまでまったくお話ししてきませんでしたが、 非常に重要なことをプロダクション化します。 + +02:49.600 --> 02:50.710 +次に何が来るのか? + +02:50.710 --> 02:57.950 +Jupyterノートブックが本番で使えるようになるとは限らないからだ。 + +02:57.950 --> 03:01.280 +もっと何かをしなければならない。 + +03:02.030 --> 03:03.800 +まずはステップ1から始めよう。 + +03:03.800 --> 03:06.680 +だから理解できるし、 これはすべて常識的なことだ。 + +03:06.680 --> 03:08.720 +でも、 こういうことはいくら言っても足りないんだ。 + +03:08.720 --> 03:13.100 +つまり、 ごく簡単に言えば、 ビジネス要件を収集する必要があるということだ。 + +03:13.100 --> 03:14.960 +評価する必要がある。 + +03:14.960 --> 03:18.650 +成功がどのように評価されるのか、 前もって理解しておく必要がある。 + +03:18.650 --> 03:20.000 +超重要だ。 + +03:20.000 --> 03:25.820 +また、 私たちがよく知っているデータ・サイエンスの指標についてだけでなく、 あなたのユーザーやあなたのビジネス・スポンサーは、 + +03:25.820 --> 03:31.490 +プロジェクトが目標を達成したかどうかをどのように判断するのでしょうか? + +03:31.490 --> 03:36.980 +最終的なビジネス指標とは何か、 それはあなたがすぐに影響を及ぼすことはできないかもしれないが、 + +03:36.980 --> 03:38.210 +理解する必要がある。 + +03:38.600 --> 03:42.710 +私たちが量をこなしてきたように、 データを掘り下げる必要がある。 + +03:42.740 --> 03:43.460 +いくらですか? + +03:43.490 --> 03:46.460 +DQ、 つまりデータクオリティの状況はどうなっているのか? + +03:46.460 --> 03:50.540 +形式は、 構造化されているのか、 構造化されていないのか、 あるいはその両方なのか? + +03:50.540 --> 03:58.150 +それは、 あなたが選択するモデルや、 どのようにアプローチしていくかに影響するからだ。 + +03:58.780 --> 04:01.510 +非機能要件の決定 + +04:01.540 --> 04:08.770 +非機能的なものとは、 予算、 どのように、 どれくらいのスケールが必要なのか、 レイテンシーとは、 一瞬のレスポンスが必要な場合、 + +04:08.800 --> 04:15.130 +モデルからのレスポンスが返ってくるまでどれくらいの時間待つことができるのか、 といったようなものだ。 + +04:15.280 --> 04:18.550 +それと、 市場投入までの時間も理解している。 + +04:18.580 --> 04:24.070 +非常に短期間でこれを作らなければならないのか、 それともこれに取り組んでいる時間があるのか。 + +04:24.070 --> 04:30.280 +そしてもちろん、 非常に短期間で必要とされるものであれば、 APIを使ったフロンティア・モデルが適している。 + +04:30.280 --> 04:36.940 +そして、 ユーザー・インターフェースに関して言えば、 Gradioのようなものであれば、 もちろん数分で立ち上げることができる。 + +04:36.940 --> 04:41.140 +だから、 これが後の決断の指針になる。 + +04:42.160 --> 04:46.900 +準備に関しては、 実に3つの活動がある。 + +04:46.900 --> 04:52.790 +まず第一に、 何がすでにあるのか、 どのような既存のソリューションがあるのかを調査する必要がある。 + +04:52.790 --> 04:59.270 +今すぐこの問題を解決し、 彼らのパフォーマンスや仕事内容について、 非常に良いハンドルを手に入れよう。 + +04:59.300 --> 05:05.210 +その一環として、 データ・サイエンスをまったく伴わないソリューションにも目を向けるべきだ。 + +05:05.240 --> 05:14.330 +いくつかのif文だけのソリューションもあるだろうし、 線形回帰のような伝統的なデータサイエンス・ソリューションもあるだろう。 + +05:14.330 --> 05:17.840 +これが商品価格を予測しようとするものであれば、 例えばこうだ。 + +05:17.930 --> 05:21.620 +それなら、 最初はそこに行くことになるだろう。 + +05:22.040 --> 05:28.100 +そして、 たとえあなたが私に、 いいか、 私は絶対に知っている、 LLMがすでにあるもの、 あるいはこれらの既存のモデルを大幅に凌駕することに疑問の余地はない、 + +05:28.100 --> 05:31.940 +と言うかもしれない。 + +05:31.940 --> 05:33.680 +今の彼らがどうであろうと関係ない。 + +05:33.680 --> 05:37.520 +その答えは、 基準値が得られるので、 まだやる価値があるということだ。 + +05:37.520 --> 05:48.150 +そして、 新しいモデルへの投資に基づいて、 その改善を定量的に示すことができる。 + +05:48.150 --> 05:53.130 +だから、 単なる基準値としても、 これは貴重な練習になる。 + +05:53.130 --> 05:56.220 +しかしそれ以上に、 何がすでに世に出ているのかを知る必要がある。 + +05:57.120 --> 06:00.480 +そして、 関連するLMSを比較する。 + +06:00.480 --> 06:10.200 +まず第一に、 価格、 コンテキストの長さ、 ライセンスの制約、 そしてベンチマークといった基本的なものに分けて説明したのを覚えているだろう。 + +06:10.200 --> 06:18.960 +リーダーボードを見たり、 アリーナを見たり、 自分がやろうとしていることのスペシャリストのスコアがあるかどうかを理解する。 + +06:18.990 --> 06:23.370 +スケールからシールスペシャリストのリーダーボードのようなものを使用している。 + +06:23.490 --> 06:31.410 +そしてもちろん、 データのキュレーション、 スクラビング、 前処理。 + +06:31.410 --> 06:35.910 +そして、 まだ特に話していないのが、 データセットの分割だ。 + +06:35.940 --> 06:42.060 +通常、 全データをトレーニング用データと、 モデルの評価に使う検証用データに分け、 + +06:42.090 --> 06:48.980 +最後にテスト用データに分けます。 + +06:48.980 --> 06:54.080 +そして、 ハイパーパラメータを微調整し、 すべてを正しくするために検証を利用できるように、 + +06:54.080 --> 06:56.090 +ずっと保持しておくものだ。 + +06:56.090 --> 07:02.870 +そして最後の最後には、 このテストであなたのモデルの最終的な成功を測るのだ。 + +07:03.410 --> 07:09.680 +つまり、 データのクリーニング、 前処理、 つまり解析、 + +07:09.680 --> 07:14.630 +そして最終的な分割は準備の一部なんだ。 + +07:15.110 --> 07:23.720 +LMSを基準に基づいて選択し、 実験し、 トレーニングし、 + +07:23.720 --> 07:29.570 +キュレーションしたデータで検証する。 + +07:29.570 --> 07:30.620 +まだやっていない。 + +07:30.650 --> 07:33.560 +それは、 私たちが楽しみにしていることだ。 + +07:33.770 --> 07:41.930 +というわけで、 ここで一旦中断し、 次回は重要なステップ4である最適化について話を続けることにする。 diff --git a/week5/community-contributions/subtitles/srts/59472307/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472307/ko_KR.srt new file mode 100755 index 0000000..089d23f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472307/ko_KR.srt @@ -0,0 +1,310 @@ +WEBVTT + +00:00.770 --> 00:02.240 +6주 차에 잘 오셨어요 + +00:02.240 --> 00:03.320 +하루 이틀씩요 + +00:03.320 --> 00:09.560 +데이터로 돌아가면 데이터셋에 분노하며 돌아봅니다 엄청난 양의 데이터셋을 구축해서 + +00:09.590 --> 00:13.520 +훈련할 때 진척이 있게 할 거예요 Get it + +00:13.640 --> 00:18.470 +하지만 먼저, 오늘 수업은 대부분 대화를 나눌 거예요 + +00:18.470 --> 00:24.380 +전략에 대해 이야기하는 세션이 될 것입니다. 아주 흥미로운 내용은 아닐 것입니다. + +00:24.380 --> 00:29.630 +왜 등록했는지는 모르겠지만 매우 중요한 내용이죠. + +00:29.630 --> 00:34.880 +이건 좋은 기본 정보예요 올바른 방식으로 접근하고 있다는 걸 확실히 + +00:34.880 --> 00:36.050 +해줄 거예요 + +00:36.050 --> 00:42.170 +특히 전략에 대해 말씀드리고 싶습니다 비즈니스 문제에 직면하는 것에서부터 프로덕션에서 + +00:42.170 --> 00:48.020 +LM 솔루션으로 가는 방법과 그 경로를 따라 진행되는 단계에 대해서요 + +00:48.020 --> 00:52.340 +지금 말씀드리고 싶은 건 우리가 그걸 하려고 하기 때문에 그 연습을 통해 진짜 상업적 + +00:52.340 --> 00:54.170 +문제를 해결할 거라는 거예요 + +00:54.170 --> 00:59.340 +우리가 겪는 여정에 공감하는 게 중요해요 여러분도 + +00:59.340 --> 01:02.550 +사업 문제로 똑같이 할 테니까요 + +01:02.580 --> 01:08.250 +잠시 시간을 내서 우리가 설명할 세 가지 테크닉을 비교해 보죠 모델 + +01:08.250 --> 01:11.640 +최적화에 관해선 이미 얘기했죠 + +01:11.640 --> 01:16.590 +랙을 사용하든 미세 튜닝을 하든 말이에요 + +01:16.590 --> 01:23.100 +많은 혼란이 있었어요 어떤 상황에서 그런 다양한 접근법 중 하나를 선택해야 하는지에 대해서요 + +01:23.100 --> 01:29.040 +어떤 기술을 사용할지 어떻게 결정하는지 구체적인 예를 들어 설명해 + +01:29.070 --> 01:30.270 +드릴게요 + +01:31.050 --> 01:38.970 +그럼 먼저 상업적 문제에 모델을 적용하는 5단계 전략을 말씀드리죠 + +01:39.510 --> 01:42.150 +첫 단계는 이해예요 + +01:42.150 --> 01:47.130 +사업 요구 사항을 깊이 파고들고 어떤 문제를 해결하는지 이해하는 게 + +01:47.130 --> 01:47.940 +중요해요 + +01:47.940 --> 01:49.560 +성공을 어떻게 판단하죠? + +01:49.560 --> 01:56.470 +비기능적인 건 무엇인지 잠시 후 얘기하고 신중하게 문서화한 준비에 + +01:56.560 --> 02:06.430 +관한 겁니다 기준 모델 테스트와 데이터 집합 큐레이팅 같은 거죠 앞으로 닥칠 일에 일반적으로 + +02:06.430 --> 02:08.620 +대비하는 거예요 + +02:08.620 --> 02:14.800 +가장 먼저 할 일은 모델을 선택하는 겁니다 여러분이 사용할 모델이나 + +02:14.800 --> 02:19.990 +프로젝트 REST의 일부로 사용할 몇 가지 모델이요 + +02:19.990 --> 02:25.210 +여기서 타이가 지난 주 내용을 많이 끌어낼 거예요 + +02:25.210 --> 02:30.430 +순위표를 보고 여러 모델의 장단점을 분석했죠 + +02:30.910 --> 02:38.890 +고객 맞춤화는 래그나 미세 튜닝 같은 중요한 기술을 이용해 모델의 느낌을 더 살리는 작업이에요 + +02:38.890 --> 02:44.650 +그리고 전혀 언급하지 않았지만 매우 중요한 걸 제작하죠 그렇게 모델을 + +02:44.650 --> 02:49.600 +만들고 훈련한 후에 성능이 아주 좋아져요 + +02:49.600 --> 02:50.710 +그다음은요? + +02:50.710 --> 02:57.950 +우리가 해킹해 온 주피터 노트 같은 게 생산될 것 같지는 않으니까요 + +02:57.950 --> 03:01.280 +뭔가 더 해야 합니다 잠시 후 그 얘기를 하죠 + +03:02.030 --> 03:03.800 +1단계부터 시작하죠 + +03:03.800 --> 03:06.680 +이해심과 상식의 문제죠 + +03:06.680 --> 03:08.720 +하지만 이런 건 아무리 말해도 모자라요 + +03:08.720 --> 03:13.100 +아주 간략하게 비즈니스 요구 사항을 모아야 해요 + +03:13.100 --> 03:14.960 +평가를 해야죠 + +03:14.960 --> 03:18.650 +성공을 어떻게 측정할지 미리 이해해야 해요 + +03:18.650 --> 03:20.000 +아주 중요하죠 + +03:20.000 --> 03:25.820 +우리가 잘 아는 데이터 과학 지표뿐 아니라 여러분의 사용자와 비즈니스 + +03:25.820 --> 03:31.490 +스폰서는 프로젝트가 목표를 달성했는지 어떻게 결정할까요? + +03:31.490 --> 03:38.210 +직접적인 영향력은 없지만 이해해야 하는 비즈니스 척도는 무엇일까요? + +03:38.600 --> 03:42.710 +우리가 작업한 데이터 양을 살펴봐야 해요 + +03:42.740 --> 03:43.460 +얼마예요? + +03:43.490 --> 03:46.460 +DQ가 뭐고 데이터 품질은 어떤지 말이에요 + +03:46.460 --> 03:50.540 +형식은 구조적인가요? 비구조적인가요? 아니면 둘 다인가요? + +03:50.540 --> 03:55.990 +깊이 이해하도록 하세요 여러분이 선택한 모델과 그에 접근하는 방식에 + +03:55.990 --> 03:58.150 +영향을 미치니까요 + +03:58.780 --> 04:01.510 +비기능적 요구 사항을 결정하는 거죠 + +04:01.540 --> 04:08.770 +비기능적인 것은 예산 같은 것입니다 얼마나 규모를 키우고 대기 시간을 줄여야 하는지 모델로부터의 응답을 + +04:08.800 --> 04:13.780 +얼마나 기다려야 하는지 같은 것입니다 아주 짧은 시간 내에 응답이 와야 하는지를 + +04:13.780 --> 04:15.130 +알 수 있죠 + +04:15.280 --> 04:18.550 +시장에 내놓을 시간도 이해해야 하고요 + +04:18.580 --> 04:24.070 +아주 짧은 시간 안에 만들어야 한다는 조건이 있나요? 아니면 이걸 작업할 시간이 있나요? + +04:24.070 --> 04:27.940 +물론 짧은 시간 내에 필요한 것이라면 API를 이용한 + +04:27.970 --> 04:30.280 +프런티어 모델에 적합하죠 + +04:30.280 --> 04:35.080 +사용자 인터페이스에 관해서는 그래디오 같은 것이 몇 분 안에 + +04:35.110 --> 04:36.940 +실행할 수 있게 해주죠 + +04:36.940 --> 04:41.140 +앞으로의 결정에 영향을 줄 거예요 + +04:42.160 --> 04:46.900 +준비 단계에는 세 가지 활동이 있어요 + +04:46.900 --> 04:52.790 +먼저, 이미 시중에 나와 있는 게 뭔지 알아야 합니다 현존하는 솔루션이 뭔지요 + +04:52.790 --> 04:59.270 +오늘 이 문제를 해결하고 성능과 현재 하는 일을 잘 파악할 수 있게 될 거예요 get it + +04:59.300 --> 05:05.210 +그 일환으로 데이터 과학이 전혀 포함되지 않은 솔루션을 찾아보세요 + +05:05.240 --> 05:10.190 +if문이 몇 개 있는 솔루션도 있을 수 있고 전통적인 데이터 과학 솔루션도 + +05:10.190 --> 05:14.330 +있을 수 있어요 예를 들면 선형 회귀 모델 같은 거요 + +05:14.330 --> 05:17.840 +제품 가격을 예측하려는 것이라면요 + +05:17.930 --> 05:21.620 +그럼 처음 가는 곳이 되겠네요 + +05:22.040 --> 05:28.100 +LLM이 이미 있는 것 혹은 기존 모델을 크게 능가할 거라는 데에는 + +05:28.100 --> 05:31.940 +의심의 여지가 없다고 말할 수도 있겠죠 + +05:31.940 --> 05:33.680 +오늘 상태가 어떻든 상관없어요 + +05:33.680 --> 05:37.520 +대답은 여전히 이렇게 할 가치가 있다는 거죠 기준선을 제공하니까요 + +05:37.520 --> 05:43.580 +여러분이 개선할 부분을 표시하는 시작점을 제공하고 새로운 모델에 + +05:43.580 --> 05:48.150 +투자한 만큼 퀀트화하여 개선된 점을 보여줄 수 있죠 + +05:48.150 --> 05:53.130 +기준점만 따져도 좋은 운동이 될 수 있어요 + +05:53.130 --> 05:56.220 +하지만 그보다 이미 존재하는 걸 알아야 해요 + +05:57.120 --> 06:00.480 +그리고 관련 LMS를 비교하죠 + +06:00.480 --> 06:06.240 +먼저, 기억하시겠지만 이걸 기본으로 나눴죠 가격, 컨텍스트 길이 + +06:06.270 --> 06:10.200 +라이선싱 제약 조건 그리고 벤치마크 같은 거요 + +06:10.200 --> 06:16.710 +순위표와 경기장을 보면서 이 특정 작업에 대해 어떤 전문적인 점수가 + +06:16.710 --> 06:18.960 +있는지 이해하게 되죠 + +06:18.990 --> 06:23.370 +물개 전문가의 리더보드 같은 걸 사용해서요 + +06:23.490 --> 06:31.410 +지난번에 언급했던 컴과 데이터를 큐레이팅하고 스크래핑하고 사전 프로세싱하는 거죠 + +06:31.410 --> 06:35.910 +그리고 아직 특별히 다루지 않은 게 있는데 데이터 집합 분할이에요 + +06:35.940 --> 06:42.060 +일반적으로 모든 데이터를 트레이닝 데이터로 나누고 모델을 평가할 때 유효성 + +06:42.090 --> 06:46.610 +검증을 위해 일부를 예약합니다 그리고 테스트를 위해 마지막 + +06:46.610 --> 06:48.980 +일부를 예약하죠 + +06:48.980 --> 06:54.080 +끝까지 유지해야 하죠 하이퍼파라미터 수정을 위한 유효성 검증을 사용할 + +06:54.080 --> 06:56.090 +수 있고 모든 게 제대로 되죠 + +06:56.090 --> 07:02.870 +그리고 마지막에는 이 테스트를 이용해 모델의 궁극적인 성공을 측정하세요 + +07:03.410 --> 07:09.680 +데이터를 정리하고 사전 프로세싱하는 건 우리가 해온 파싱이에요 + +07:09.680 --> 07:14.630 +그리고 궁극적으로 나누는 건 준비 과정의 일부죠 + +07:15.110 --> 07:23.720 +선택은 이미 했던 건데요 기준에 기반해 LMS를 선택하고 실험하고 여러분이 + +07:23.720 --> 07:29.570 +엄선한 데이터로 훈련하고 유효성을 검사하죠 + +07:29.570 --> 07:30.620 +아직 안 했어요 + +07:30.650 --> 07:33.560 +저희도 정말 기대되네요 + +07:33.770 --> 07:41.930 +이제 잠시 멈추고 다음 시간에 계속하죠 최적화의 중요한 4단계와 함께요 diff --git a/week5/community-contributions/subtitles/srts/59472333/en_US.srt b/week5/community-contributions/subtitles/srts/59472333/en_US.srt new file mode 100755 index 0000000..c3226d8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472333/en_US.srt @@ -0,0 +1,178 @@ +WEBVTT + +00:00.560 --> 00:04.880 +Thank you for putting up with me during my foray into traditional machine learning. + +00:04.880 --> 00:08.990 +I think it was useful for us and I hope that you didn't mind it too much. + +00:09.020 --> 00:14.330 +Maybe you enjoyed yourself a little bit like I did and tried out your own models too. + +00:14.360 --> 00:17.690 +Let's just look at how they appear side by side. + +00:17.690 --> 00:27.320 +We started with a random model, which came in at a somewhat shocking $341 off from the from the reality. + +00:27.320 --> 00:34.220 +And then we tried a constant model that did a whole lot better, but was still $146 wrong. + +00:34.520 --> 00:40.520 +We then did some proper models, the features and and linear regression model. + +00:40.550 --> 00:49.370 +At 139 we did a whole lot better with a bag of words model, the Countvectorizer and $114. + +00:50.150 --> 00:56.570 +We were slightly disappointed that when we layered on the powerful word two vec, it came in at $115. + +00:56.570 --> 01:02.480 +You may have noticed that there were 400 dimensions of word two vec, whilst there were a thousand dimensions + +01:02.480 --> 01:07.000 +in the bag of words model, But still, you would expect that the 400 dimensions in the word two vec + +01:07.030 --> 01:09.490 +would be just so much. + +01:09.520 --> 01:14.830 +There would be so much more signal in those vectors that you would expect better results. + +01:14.830 --> 01:18.070 +So whilst that was a bit disappointing, we quickly made up for it. + +01:18.100 --> 01:24.700 +First of all, by getting a hair better when we use support vector machines, but then random forests + +01:24.700 --> 01:29.980 +save the day with a nice $97 error there. + +01:29.980 --> 01:35.410 +And you know, there's there's this potential school of thought that would be to say that $97 is still + +01:35.410 --> 01:40.510 +disappointing given just predicting the the price of a product. + +01:40.510 --> 01:41.920 +But I'll tell you something. + +01:41.920 --> 01:47.860 +I challenge you yourself to go in and pick some of those products and blindly try and price them. + +01:47.860 --> 01:49.120 +It ain't easy. + +01:49.120 --> 01:50.890 +It's surprisingly difficult. + +01:50.890 --> 01:55.360 +You saw when we were confronted with that LED light, that we looked at that example a moment ago, + +01:55.390 --> 02:00.340 +I think, and I don't know if I had seen that, I would have probably have guessed that's about $40 + +02:00.340 --> 02:00.820 +or something. + +02:00.820 --> 02:02.500 +And it was 200 and something. + +02:02.500 --> 02:09.270 +So, you know, it's actually surprisingly hard just given a description of something to figure out. + +02:09.270 --> 02:11.160 +Where is this on a scale? + +02:11.250 --> 02:19.320 +And so getting within $97 based purely on a description of some product that could be electronics, + +02:19.350 --> 02:23.970 +it could be an appliance, it could be any of those other automotive, of course, any of the other + +02:23.970 --> 02:28.290 +things, the categories that we picked, it's it's not as easy as it sounds. + +02:28.290 --> 02:35.610 +And so getting within $97 of it on average across our test set is not bad at all. + +02:35.610 --> 02:36.810 +Not bad at all. + +02:36.840 --> 02:39.210 +But potentially we'll be able to do better. + +02:39.210 --> 02:40.320 +We will see. + +02:40.590 --> 02:41.550 +All right. + +02:41.550 --> 02:44.520 +So well done on getting to this point. + +02:44.520 --> 02:46.590 +It's been a lot of fun for me. + +02:46.590 --> 02:47.220 +Anyway. + +02:47.520 --> 02:51.660 +You've tolerated me and hopefully you didn't mind it. + +02:51.720 --> 02:57.330 +But fear not, the time has arrived for us to go to the frontier. + +02:57.330 --> 03:04.980 +So, uh, next time we're going to be talking about solving commercial problems using frontier models, + +03:04.980 --> 03:12.430 +we are then going to run that runner against GPT four mini and see how it fares. + +03:12.460 --> 03:14.140 +And then I'm going to be brave. + +03:14.170 --> 03:21.040 +I'm going to set our sights high, and we are going to run our test dataset against the big guy, against + +03:21.040 --> 03:27.610 +GPT four zero maxi, the full version, the frontier version from August. + +03:27.820 --> 03:30.790 +And that's going to be a big test for us. + +03:30.790 --> 03:32.080 +We'll see how it does. + +03:32.380 --> 03:38.380 +And yeah, remember, it's quite a challenge for an LLM because we're we're basically we're not going + +03:38.380 --> 03:40.060 +to give it any training data. + +03:40.090 --> 03:45.130 +Unlike these traditional models where we've given them training data, we're simply going to send the + +03:45.130 --> 03:51.520 +test data to the LLM and say, given all of your worldly knowledge, how much do you think this is going + +03:51.520 --> 03:53.980 +to get and how much do you think it's going to be worth? + +03:53.980 --> 03:56.470 +And that's not an easy problem to set. + +03:56.470 --> 04:01.480 +So in many ways, the traditional machine learning models have a big advantage that they've been trained + +04:01.480 --> 04:03.190 +based on a training data set. + +04:03.190 --> 04:08.320 +In the case of these frontier models, we're just going to give them the descriptions and say, okay, + +04:08.320 --> 04:09.460 +how much is this? + +04:09.910 --> 04:13.120 +We will see how they get on in the next video. diff --git a/week5/community-contributions/subtitles/srts/59472333/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472333/ja_JP.srt new file mode 100755 index 0000000..aae2c1a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472333/ja_JP.srt @@ -0,0 +1,160 @@ +WEBVTT + +00:00.560 --> 00:04.880 +私の伝統的な機械学習への進出に付き合ってくれてありがとう。 + +00:04.880 --> 00:08.990 +僕らにとっては有益だったと思うし、 君があまり気にしなかったことを願うよ。 + +00:09.020 --> 00:14.330 +もしかしたら、 私のように少し楽しんで、 自分のモデルも試してみたかもしれない。 + +00:14.360 --> 00:17.690 +並べてどのように見えるかを見てみよう。 + +00:17.690 --> 00:27.320 +私たちはまず、 無作為に選んだモデルからスタートした。 + +00:27.320 --> 00:34.220 +その後、 一定のモデルを試したが、 それでも146ドルの間違いだった。 + +00:34.520 --> 00:40.520 +そして、 特徴量と線形回帰モデルという適切なモデルを作成した。 + +00:40.550 --> 00:49.370 +139ドルでは、 バッグ・オブ・ワードのモデル、 カウントベクトライザーと114ドルの方が全然良かった。 + +00:50.150 --> 00:56.570 +パワフルな2ヴェクという言葉を重ねると、 115ドルになるのが少し残念だった。 + +00:56.570 --> 01:02.480 +単語2vecの次元が400であるのに対して、 bag of + +01:02.480 --> 01:09.490 +wordsモデルには1000の次元があることにお気づきだろうか。 + +01:09.520 --> 01:14.830 +それらのベクトルにはより多くの信号があり、 より良い結果が期待できるだろう。 + +01:14.830 --> 01:18.070 +少し残念だったが、 すぐに挽回した。 + +01:18.100 --> 01:24.700 +まず最初に、 サポートベクターマシンを使うと少し良くなるが、 その後、 + +01:24.700 --> 01:29.980 +ランダムフォレストが97ドルの誤差で窮地を脱する。 + +01:29.980 --> 01:35.410 +そして、 97ドルという価格は、 製品の価格を予測することを考えると、 + +01:35.410 --> 01:40.510 +まだ期待外れだと言う考え方もある。 + +01:40.510 --> 01:41.920 +でも、 言っておくよ。 + +01:41.920 --> 01:47.860 +私は、 あなた自身がそのような商品をいくつか選び、 やみくもに値段を付けてみることに挑戦したい。 + +01:47.860 --> 01:49.120 +簡単なことじゃない。 + +01:49.120 --> 01:50.890 +意外と難しいんだ。 + +01:50.890 --> 01:55.360 +あのLEDライトに直面したとき、 さっきの例を見ていただいたと思うのですが、 + +01:55.390 --> 02:00.820 +もし私があれを見ていたら、 おそらく40ドルかそこらだと推測していたと思います。 + +02:00.820 --> 02:02.500 +しかも200ドルとかだった。 + +02:02.500 --> 02:09.270 +だから、 何かについて説明されただけで、 それを理解するのは意外と難しいんだ。 + +02:09.270 --> 02:11.160 +これはどこのスケールですか? + +02:11.250 --> 02:19.320 +そして、 電子機器かもしれないし、 電化製品かもしれないし、 自動車かもしれないし、 + +02:19.350 --> 02:28.290 +もちろん、 私たちが選んだ他のカテゴリーかもしれない。 + +02:28.290 --> 02:35.610 +だから、 テストセット全体で平均97ドル以内に収まるのは、 まったく悪くない。 + +02:35.610 --> 02:36.810 +全然悪くないよ。 + +02:36.840 --> 02:39.210 +でも、 もっとうまくやれる可能性はある。 + +02:39.210 --> 02:40.320 +いずれわかるだろう。 + +02:40.590 --> 02:41.550 +分かった。 + +02:41.550 --> 02:44.520 +よくぞここまでたどり着いた。 + +02:44.520 --> 02:46.590 +僕にとってはとても楽しいことだった。 + +02:46.590 --> 02:47.220 +とにかくだ。 + +02:47.520 --> 02:51.660 +あなたは私を大目に見てくれたし、 気にならなかったと思う。 + +02:51.720 --> 02:57.330 +しかし、 恐れることはない。 フロンティアに行く時が来たのだ。 + +02:57.330 --> 03:04.980 +だから、 次回はフロンティアモデルを使って商業的な問題を解決することについて話をするつもりだが、 + +03:04.980 --> 03:12.430 +その時はGPTの4つのミニに対してそのランナーを走らせ、 その結果を見るつもりだ。 + +03:12.460 --> 03:14.140 +そして勇気を出す。 + +03:14.170 --> 03:21.040 +目標を高く設定し、 テストデータセットを大物、 GPT four zero maxi、 + +03:21.040 --> 03:27.610 +フルバージョン、 8月のフロンティアバージョンに対して実行するつもりだ。 + +03:27.820 --> 03:30.790 +そして、 それは我々にとって大きな試練となるだろう。 + +03:30.790 --> 03:32.080 +どうなるか見てみよう。 + +03:32.380 --> 03:40.060 +LLMは、 基本的にトレーニングデータを与えないから、 かなり難しいんだ。 + +03:40.090 --> 03:45.130 +トレーニングデータを与える従来のモデルとは異なり、 私たちは単純にテストデータをLLMに送り、 + +03:45.130 --> 03:53.980 +あなたの世俗的な知識をすべて考慮した上で、 これはいくらになりそうで、 いくらの価値があると思いますか? + +03:53.980 --> 03:56.470 +そして、 それは簡単な問題ではない。 + +03:56.470 --> 04:03.190 +つまり、 従来の機械学習モデルには、 訓練データセットに基づいて訓練されているという大きな利点があるのだ。 + +04:03.190 --> 04:08.320 +これらのフロンティアモデルの場合、 我々はただ説明を与え、 よし、 これはいくらだ、 + +04:08.320 --> 04:09.460 +と言うだけだ。 + +04:09.910 --> 04:13.120 +彼らの様子は次のビデオで見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/59472333/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472333/ko_KR.srt new file mode 100755 index 0000000..bda432f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472333/ko_KR.srt @@ -0,0 +1,175 @@ +WEBVTT + +00:00.560 --> 00:04.880 +전통적인 머신 러닝을 배우는 동안 절 참아주셔서 감사해요 + +00:04.880 --> 00:08.990 +저희에게 유용했던 것 같아요 너무 불쾌하지 않으셨으면 해요 + +00:09.020 --> 00:14.330 +저처럼 비트도 즐기면서 직접 모델을 만들어 보기도 했죠 + +00:14.360 --> 00:17.690 +나란히 있는 모습을 보시죠 + +00:17.690 --> 00:27.320 +무작위 모델로 시작했는데 현실에서 341달러나 차이가 나더군요 + +00:27.320 --> 00:34.220 +그리고 지속적인 모델을 썼는데 훨씬 잘 나왔지만 여전히 146달러가 모자랐어요 + +00:34.520 --> 00:40.520 +그런 다음 제대로 된 모델을 만들었죠 특징과 선형 회귀 모델요 + +00:40.550 --> 00:49.370 +139달러로 훨씬 더 벌었죠 단어 봉지 모델 카운트베이터라이저와 114달러로요 + +00:50.150 --> 00:56.570 +조금 실망스러웠던 건 강렬한 단어 2개를 겹쳤을 때 115달러가 나왔다는 거예요 + +00:56.570 --> 01:02.480 +두 벡이라는 단어의 400차원이란 걸 아셨을 겁니다 단어 모델에는 1,000차원이 + +01:02.480 --> 01:07.000 +있지만 그래도 두 벡이라는 단어의 400차원은 너무 과하다고 + +01:07.030 --> 01:09.490 +생각하실 거예요 + +01:09.520 --> 01:14.830 +그런 벡터에는 훨씬 더 많은 신호가 있어서 더 나은 결과를 기대할 수 있죠 + +01:14.830 --> 01:18.070 +그래서 좀 실망스러웠지만 비트를 빠르게 메꿨죠 + +01:18.100 --> 01:24.700 +우선 지원 벡터 머신을 사용할 때 조금 더 나아집니다 그런데 임의의 + +01:24.700 --> 01:29.980 +숲이 97달러짜리 오류로 문제를 해결하죠 + +01:29.980 --> 01:35.410 +97달러가 여전히 실망스럽다는 의견도 있어요 + +01:35.410 --> 01:40.510 +제품 가격을 예측한 것만으로도요 + +01:40.510 --> 01:41.920 +하지만 이건 말씀드리죠 + +01:41.920 --> 01:47.860 +직접 가서 제품을 골라 보고 맹목적으로 가격을 매겨 보세요 + +01:47.860 --> 01:49.120 +쉽지 않아요 + +01:49.120 --> 01:50.890 +의외로 어렵네요 + +01:50.890 --> 01:55.360 +LED 라이트가 나왔을 때 보셨었죠 조금 전에 본 예제인데요 + +01:55.390 --> 02:00.820 +제가 그걸 봤는지는 모르겠지만 40달러 정도라고 생각했을 거예요 + +02:00.820 --> 02:02.500 +200달러 정도였어요 + +02:02.500 --> 02:09.270 +그래서 사실 묘사를 하는 것만으로 이해하기가 정말 어려워요 + +02:09.270 --> 02:11.160 +여긴 몇 등급이에요? + +02:11.250 --> 02:19.320 +전자 제품일 수도 있고 전자제품일 수도 있고 다른 제품일 수도 있지만 + +02:19.350 --> 02:23.970 +97달러 이내로 좁히는 건 우리가 고른 + +02:23.970 --> 02:28.290 +모든 분야에서 쉽지 않아요 + +02:28.290 --> 02:35.610 +테스트 세트에서 평균 97달러 이내로 들어온 건 나쁘지 않아요 + +02:35.610 --> 02:36.810 +나쁘지 않네요 + +02:36.840 --> 02:39.210 +하지만 더 잘할 수 있어요 + +02:39.210 --> 02:40.320 +두고 봐야죠 + +02:40.590 --> 02:41.550 +좋아요 + +02:41.550 --> 02:44.520 +여기까지 오느라 고생했어요 + +02:44.520 --> 02:46.590 +정말 즐거운 시간이었어요 + +02:46.590 --> 02:47.220 +어쨌든요 + +02:47.520 --> 02:51.660 +저를 참아주셨는데 괜찮으셨길 바라요 + +02:51.720 --> 02:57.330 +하지만 걱정 마세요 우리가 변방으로 갈 때가 왔어요 + +02:57.330 --> 03:04.980 +다음에 프론티어 모델을 이용해 상업적 문제를 해결할 때는 + +03:04.980 --> 03:12.430 +GPT 4 미니와 함께 달리면서 성능을 확인해 보죠 + +03:12.460 --> 03:14.140 +그리고 용기를 낼 거예요 + +03:14.170 --> 03:21.040 +목표를 높게 잡고 이 거대한 GPT 40 맥시와 데이터 집합을 + +03:21.040 --> 03:27.610 +시험할 겁니다 8월에 출시된 풀 버전 프런티어 버전이죠 + +03:27.820 --> 03:30.790 +우리에겐 큰 시험이 될 거예요 + +03:30.790 --> 03:32.080 +어떻게 될지 봐야죠 + +03:32.380 --> 03:38.380 +기억하셔야 할 것은 LLM에게는 꽤나 어려운 일입니다 왜냐하면 우리는 기본적으로 어떤 훈련 데이터도 제공하지 + +03:38.380 --> 03:40.060 +않을 것이기 때문이죠 + +03:40.090 --> 03:45.130 +훈련 데이터를 제공했던 전통적인 모델과는 달리 테스트 데이터를 LLM에 + +03:45.130 --> 03:51.520 +보낼 겁니다 그리고 세속적인 지식을 바탕으로 얼마나 얻을 수 있다고 생각하나요? 값어치는 + +03:51.520 --> 03:53.980 +얼마나 될까요? + +03:53.980 --> 03:56.470 +설정하기 쉬운 문제는 아니죠 + +03:56.470 --> 04:01.480 +전통적인 머신 러닝 모델은 여러모로 큰 이점이 있어요 훈련 데이터 집합을 바탕으로 + +04:01.480 --> 04:03.190 +훈련했다는 점이죠 + +04:03.190 --> 04:08.320 +이 개척 시대 모델의 경우 그냥 설명을 해 줄 거예요 가격 + +04:08.320 --> 04:09.460 +같은 거요 + +04:09.910 --> 04:13.120 +다음 영상에서 어떻게 하는지 보죠 get it get it diff --git a/week5/community-contributions/subtitles/srts/59472383/en_US.srt b/week5/community-contributions/subtitles/srts/59472383/en_US.srt new file mode 100755 index 0000000..7368276 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472383/en_US.srt @@ -0,0 +1,418 @@ +WEBVTT + +00:00.620 --> 00:03.290 +And welcome back to the week six folder. + +00:03.290 --> 00:08.450 +We're now at day two, which is the second and final stage of data curation. + +00:08.720 --> 00:11.840 +We're going to be extending our data set to be much bigger. + +00:11.840 --> 00:16.880 +And we're going to craft it into something that is exceptional, uh, just right. + +00:16.880 --> 00:23.150 +For our training purposes, uh, we start with some imports and setting up our environment and logging + +00:23.150 --> 00:27.110 +into hugging face, and we get ourselves ready. + +00:27.260 --> 00:35.240 +So last time I talked about a Python module called items that I wrote that allows us to parse our data + +00:35.240 --> 00:37.550 +points into a nice item. + +00:37.550 --> 00:43.610 +You remember it well, and hopefully you have spent some time digging through this yourself and confirming + +00:43.610 --> 00:44.750 +the way that it works. + +00:44.750 --> 00:51.470 +There's another Python module that I've made that is shorter and simpler called loaders, uh, which + +00:51.470 --> 01:00.470 +has a single class item loader and is just a nice way to load in one of the data sets from the hugging + +01:00.500 --> 01:02.060 +face repository. + +01:02.540 --> 01:10.850 +And what it's going to do is it's going to use some fancy, uh, stuff from the concurrent futures package + +01:10.850 --> 01:16.700 +in Python to do this with multiple workers, because otherwise this takes a long time. + +01:16.850 --> 01:20.780 +Now, you don't need to do look through this in too much detail. + +01:20.780 --> 01:24.260 +There's a few things that I want to tell you about, and then you should just convince yourself that + +01:24.260 --> 01:26.870 +it's doing what it says on the tin again. + +01:26.990 --> 01:31.130 +Uh, so the main, uh, method that gets called is called load. + +01:31.130 --> 01:35.990 +And you pass in a number of workers and by default it assumes eight workers. + +01:35.990 --> 01:41.420 +I'm working on a MacBook Pro here that has eight cores and so it can handle it. + +01:41.450 --> 01:43.430 +It does really hammer my machine. + +01:43.430 --> 01:44.300 +While this is going. + +01:44.300 --> 01:50.090 +You might want to pass in a smaller number of workers, depending on how much you're willing to give + +01:50.090 --> 01:54.020 +up your machine's CPU if you're not doing other things at the same time. + +01:54.350 --> 02:02.480 +Uh, so then, um, this load in parallel method is the one that uses this process. + +02:02.480 --> 02:03.470 +Pool executor. + +02:03.500 --> 02:09.560 +If you're familiar, if you have used this before, this basically spawns a number of other. + +02:09.680 --> 02:15.590 +Um, I've just seen a mistake there that that should obviously say workers so that, uh, it does actually + +02:15.590 --> 02:16.520 +use the eight workers. + +02:16.520 --> 02:18.320 +Otherwise it will always be using six. + +02:18.590 --> 02:26.480 +Uh, so this, um, this will spawn up, uh, the number of workers that are specified, uh, and it + +02:26.480 --> 02:32.300 +will then load in each of our data points, but it will do so in chunks. + +02:32.300 --> 02:38.000 +And you can see that that really what I've done is I've just created a generator familiar with generators + +02:38.000 --> 02:42.500 +now because of us using them earlier with when we're streaming back results. + +02:42.650 --> 02:48.440 +Um, but we're using a generator to, to chop up our data set into chunks of. + +02:48.470 --> 02:51.350 +As it happens, I set a chunk size here of 1000. + +02:51.350 --> 02:53.360 +So a thousand data points at a time. + +02:53.360 --> 02:56.960 +So it's chunked up into sets of 1000 data points at a time. + +02:56.960 --> 03:06.930 +And each chunk is then passed in and ultimately made to create a new data point using the same item + +03:06.930 --> 03:08.970 +class that we worked on last time. + +03:08.970 --> 03:10.650 +So no real magic here. + +03:10.650 --> 03:16.290 +This is just some fancy packaging to load in items in an efficient way. + +03:16.320 --> 03:20.820 +1000 at a time and spread out across eight different workers. + +03:20.820 --> 03:25.920 +But I will say it's worth reviewing this code because this is something that's the kind of thing, again, + +03:25.950 --> 03:28.350 +that one tends to do a lot with these kinds of projects. + +03:28.350 --> 03:35.760 +You need to make good use of your box because it's going to be hefty, and the the kinds of tasks involved + +03:35.850 --> 03:40.110 +often are very suitable for being divided into multiple workers. + +03:40.110 --> 03:42.210 +And this code should be quite reusable. + +03:42.240 --> 03:49.950 +I've hopefully written it in a way that's easy to read and and and reuse so much encourage you to do + +03:49.950 --> 03:50.520 +so. + +03:51.150 --> 03:56.490 +Um, and uh, there is one other little trick that I want to point out, not a another trick. + +03:56.490 --> 03:56.700 +Sorry. + +03:56.730 --> 04:02.010 +One other, um, uh, decision that has been made that is going to affect our results. + +04:02.160 --> 04:14.160 +Uh, I've decided that we will select only products which cost anywhere between $0.50 and $999 and and + +04:14.160 --> 04:15.180 +$0.49. + +04:15.180 --> 04:19.380 +So we're going to limit it to things that price in that range. + +04:19.380 --> 04:21.270 +And there's various reasons I did that. + +04:21.390 --> 04:28.140 +Um, one is that if you take things that, uh, that go, uh, much more than that number than then + +04:28.140 --> 04:29.460 +the results are distorted. + +04:29.490 --> 04:36.210 +There's a very small number of things with a huge price and that can completely mess up things like + +04:36.240 --> 04:37.290 +our test performance. + +04:37.290 --> 04:43.050 +If we happen to pick something that costs an enormous amount, then our errors can be wild if we're + +04:43.050 --> 04:43.830 +off by a bit. + +04:43.830 --> 04:49.140 +And because we do want to be using absolute error, just the difference between the recommended price + +04:49.140 --> 04:50.310 +and the actual price. + +04:50.340 --> 04:57.120 +It's nice if we can keep our prices within a reasonable range so that we have a pretty decent sense + +04:57.120 --> 05:00.870 +of how the model's performing, and we don't have any wild things out there. + +05:00.870 --> 05:06.030 +So essentially, I'm saying for the scope of this project, for what we're doing, we are going to be + +05:06.030 --> 05:08.790 +talking about things that cost under $1,000. + +05:08.790 --> 05:13.800 +That's going to be our scope, and it allows us to focus in our data set. + +05:13.890 --> 05:20.220 +You could also experiment with doing this with different boundaries and try try bigger ranges and see + +05:20.220 --> 05:21.150 +how it goes. + +05:21.150 --> 05:26.040 +But I found this to be easiest to work with and give good, good results across the board. + +05:26.040 --> 05:28.920 +So that's what is going on here. + +05:29.370 --> 05:34.980 +Um, so let me save this and go back to our day. + +05:34.980 --> 05:45.840 +And I'm going to reload this because I made that little bug fix, restart kernel and run this again. + +05:45.840 --> 05:50.310 +Run our imports, log into Huggingface again. + +05:50.400 --> 05:58.050 +And now we're going to load in the appliances data set like we did before, and it's now going to be + +05:58.080 --> 05:59.760 +hammering my computer. + +06:00.300 --> 06:07.500 +I can see it doing its thing, and I'll tell you that before it took about a minute to load in all of + +06:07.500 --> 06:14.070 +the appliances data set last time, and this time it takes 0.2 minutes. + +06:14.070 --> 06:17.190 +So it's rather faster when you have it broken into. + +06:17.220 --> 06:20.280 +Eight workers hammering my computer. + +06:20.400 --> 06:22.440 +And so I recommend you do the same. + +06:22.440 --> 06:29.850 +But obviously pass in here workers equals four or less if you if you have fewer processes. + +06:30.330 --> 06:34.830 +Um so that is the appliance data set. + +06:34.830 --> 06:42.600 +It has the 28,625 data points that are priced within that range that we have restricted it to. + +06:42.870 --> 06:47.610 +Um, and they have all been loaded in, uh, let's have a look at the first one. + +06:47.640 --> 06:49.470 +Do you remember what the first one was? + +06:51.780 --> 06:52.620 +I did that again. + +06:52.650 --> 06:53.610 +I did that last time. + +06:53.610 --> 06:53.970 +There we go. + +06:54.000 --> 06:54.810 +Try this. + +06:55.020 --> 06:56.400 +And here we go. + +06:56.400 --> 06:59.490 +It is the rack, roller and stud assembly kit. + +06:59.520 --> 07:02.220 +Let's, uh, print its prompt. + +07:04.530 --> 07:12.180 +And we'll hopefully also again convince ourselves that those, uh, clunky part numbers are filtered + +07:12.180 --> 07:12.690 +out. + +07:12.720 --> 07:13.740 +Yes they are. + +07:13.770 --> 07:19.350 +And this is, again, the dishwasher top wrack reels and stud assembly kit. + +07:22.680 --> 07:26.460 +And this is the door pivot block. + +07:26.850 --> 07:28.500 +Always needed one of those. + +07:28.650 --> 07:30.510 +Uh, so there we go. + +07:30.660 --> 07:35.100 +Uh, here are our data points loaded in for us. + +07:35.460 --> 07:43.560 +Um, now it's going to be time for us to scale up and embark upon a much bigger problem. + +07:43.560 --> 07:49.980 +We are going to be bringing in all of these data sets from the the Amazon. + +07:49.980 --> 07:55.120 +Uh, Product Prices dataset repository. + +07:55.150 --> 08:00.010 +I've selected these to be fairly similar kinds of things that are sort of stuff you might find at a + +08:00.010 --> 08:01.810 +large home retail store. + +08:01.810 --> 08:08.770 +So it's really almost anything on Amazon, not including stuff like clothes, beauty products, books + +08:08.920 --> 08:10.690 +and software and things like that. + +08:10.690 --> 08:14.650 +So it's everything that felt like it was sort of similar kind of stuff. + +08:14.740 --> 08:18.040 +Um, that would make a really good comprehensive data set. + +08:18.040 --> 08:19.570 +So here are the data sets. + +08:19.570 --> 08:25.000 +And of course, um, if you're doing this as you follow along, if you're doing this when you do this, + +08:25.000 --> 08:30.550 +as you do this, you can play with this and you can choose a different set of data. + +08:30.550 --> 08:31.030 +If you wish. + +08:31.030 --> 08:34.090 +You could do this for clothes and see how it performs. + +08:34.180 --> 08:39.970 +Um, if you are concerned about the size of data and you want to be doing things quicker, you can just + +08:39.970 --> 08:40.480 +limit it. + +08:40.480 --> 08:43.450 +The appliances data set is one of the smallest ones there. + +08:43.480 --> 08:46.210 +Um, electronics is a nice and one in the middle. + +08:46.210 --> 08:48.340 +So you could just focus on electronics. + +08:48.520 --> 08:54.760 +Um, and you'll still have great fun, and you'll still get much the same kinds of performance results + +08:54.760 --> 08:57.730 +as we will get with the entire data set. + +08:57.880 --> 09:00.160 +So those are all of the data set names. + +09:00.160 --> 09:06.550 +And now I'm going to read all of them in using the item loader to get after them. + +09:06.550 --> 09:09.370 +And it's going to read them in one at a time. + +09:09.370 --> 09:12.790 +The biggest one is automotive which is the first one on the list. + +09:12.790 --> 09:16.180 +I've generally ordered it, I think with the biggest ones at the beginning. + +09:16.330 --> 09:20.920 +Um, it might take a bit longer for you because the first time you run this, it has to download the + +09:20.920 --> 09:26.410 +data from Huggingface to your computer because I've already run it once that step has happened, and + +09:26.410 --> 09:29.110 +so it just reuses it from a cache on my computer. + +09:29.230 --> 09:33.130 +Um, but it may it may take a bit, um, a bit longer for you because of that. + +09:33.130 --> 09:35.170 +And then off it will go. + +09:35.380 --> 09:40.180 +Now the total time for for on my computer is about 20 minutes. + +09:40.240 --> 09:47.890 +Uh, my can see my, my CPU is flat out, so I'm going to take a pause and see you again right after + +09:47.890 --> 09:50.080 +the break when the data has all loaded. diff --git a/week5/community-contributions/subtitles/srts/59472383/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472383/ja_JP.srt new file mode 100755 index 0000000..2694fc5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472383/ja_JP.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:00.620 --> 00:03.290 +そして、 6週目のフォルダーへようこそ。 + +00:03.290 --> 00:08.450 +私たちは今、 データ・キュレーションの最終段階である2日目を迎えている。 + +00:08.720 --> 00:11.840 +これからデータセットをもっと大きくしていくつもりだ。 + +00:11.840 --> 00:16.880 +そして、 私たちはそれを特別な、 あー、 ちょうどいいものに作り上げるんだ。 + +00:16.880 --> 00:27.110 +トレーニングの目的としては、 まずインポートして環境を整え、 ハグフェイスにログインして準備をする。 + +00:27.260 --> 00:37.550 +前回、 私が書いたitemsというPythonモジュールの話をした。 + +00:37.550 --> 00:44.750 +あなたはそれをよく覚えているし、 願わくば、 あなた自身がこれを調べ、 それが機能する方法を確認するのに時間を費やしたことがあるだろう。 + +00:44.750 --> 00:51.470 +私が作ったPythonモジュールには、 + +00:51.470 --> 01:02.060 +loadersと呼ばれるもっと短くてシンプルなものがある。 + +01:02.540 --> 01:10.850 +Pythonのコンカレント・フューチャーズ・パッケージを使い、 + +01:10.850 --> 01:16.700 +複数のワーカーでこれを行う。 + +01:16.850 --> 01:20.780 +あまり詳しく説明する必要はないだろう。 + +01:20.780 --> 01:26.870 +いくつかお伝えしたいことがあるのですが、 それについてはまた、 書いてあるとおりになっているとご自身を納得させてください。 + +01:26.990 --> 01:31.130 +メインで呼ばれるメソッドはloadだ。 + +01:31.130 --> 01:35.990 +そして労働者の数を渡すと、 デフォルトでは8人の労働者を想定している。 + +01:35.990 --> 01:41.420 +私は8コアのMacBook Proで作業しているので、 対応できる。 + +01:41.450 --> 01:43.430 +私のマシンに大きな衝撃を与えている。 + +01:43.430 --> 01:44.300 +そうしているうちに + +01:44.300 --> 01:50.090 +同時に他のことをしないのであれば、 マシンのCPUをどれだけ捨てても構わないかにもよるが、 + +01:50.090 --> 01:54.020 +より少ない数のワーカーを渡した方がいいかもしれない。 + +01:54.350 --> 02:02.480 +ええと、 では、 このロード・イン・パラレル方式は、 このプロセスを使うものですね。 + +02:02.480 --> 02:03.470 +プール執行人。 + +02:03.500 --> 02:09.560 +ご存知の方、 使ったことがある方はわかると思うが、 これは基本的にいくつもの他の選手を生み出す。 + +02:09.680 --> 02:16.520 +今、 ミスを見つけたんだけど、 "workers "と書くべきところを、 "8 workers "と書いてしまったんだ。 + +02:16.520 --> 02:18.320 +そうでなければ、 常に6本を使うことになる。 + +02:18.590 --> 02:32.300 +そして、 指定された数のワーカーを起動し、 各データポイントを読み込みます。 + +02:32.300 --> 02:42.500 +そして、 私がやったことは、 ジェネレーターに慣れ親しんだジェネレーターを作っただけだということがお分かりいただけると思います。 + +02:42.650 --> 02:48.440 +でも、 私たちはジェネレーターを使って、 データセットを切り刻んでいるんだ。 + +02:48.470 --> 02:51.350 +たまたまだが、 ここではチャンクサイズを1000に設定した。 + +02:51.350 --> 02:53.360 +つまり、 一度に1000のデータポイントだ。 + +02:53.360 --> 02:56.960 +そのため、 一度に1000点のデータセットに分割される。 + +02:56.960 --> 03:08.970 +そして、 それぞれのチャンクが渡され、 最終的には、 前回作業したのと同じアイテム・クラスを使って新しいデータ・ポイントを作成する。 + +03:08.970 --> 03:10.650 +だから、 ここに本当のマジックはない。 + +03:10.650 --> 03:16.290 +これは、 効率的な方法で商品を積み込むための、 ちょっと凝った包装に過ぎない。 + +03:16.320 --> 03:20.820 +一度に1000人、 8人の作業員に分散している。 + +03:20.820 --> 03:28.350 +しかし、 このコードを見直す価値はあると言っておく。 なぜなら、 この種のプロジェクトではよくやりがちなことだからだ。 + +03:28.350 --> 03:40.110 +箱は重くなるし、 複数の作業員に分担するのに適した作業内容であることが多いからだ。 + +03:40.110 --> 03:42.210 +そして、 このコードはかなり再利用できるはずだ。 + +03:42.240 --> 03:50.520 +読みやすく、 再利用しやすいように書いたつもりなので、 ぜひそうしてほしい。 + +03:51.150 --> 03:56.490 +それと、 もうひとつ、 ちょっとしたトリックがあるんだ。 + +03:56.490 --> 03:56.700 +申し訳ない。 + +03:56.730 --> 04:02.010 +もうひとつ、 えーと、 えーと、 結果に影響する決定がなされた。 + +04:02.160 --> 04:15.180 +ええと、 0ドルの間の商品だけを選ぶことにしました。 50ドルと999ドルと0ドル。 + +04:15.180 --> 04:15.180 +49. + +04:15.180 --> 04:19.380 +だから、 その範囲の価格のものに限定するつもりだ。 + +04:19.380 --> 04:21.270 +そうした理由はいろいろある。 + +04:21.390 --> 04:29.460 +ええと、 ひとつは、 その数字よりはるかに大きいものを取ると、 結果が歪んでしまうということです。 + +04:29.490 --> 04:37.290 +ごくわずかなことだが、 大きな代償を伴うことがあり、 それが我々のテストパフォーマンスのようなものを完全に台無しにしてしまうことがある。 + +04:37.290 --> 04:43.830 +たまたま莫大な値段のものを選んでしまった場合、 少しずれただけで誤差が大きくなってしまう。 + +04:43.830 --> 04:50.310 +また、 絶対誤差を使用したいので、 推奨価格と実際の価格との差だけである。 + +04:50.340 --> 05:00.870 +価格が妥当な範囲に保たれていれば、 そのモデルがどのようなパフォーマンスをしているのかをきちんと把握することができる。 + +05:00.870 --> 05:08.790 +だから基本的に、 このプロジェクトの範囲、 我々がやろうとしていることについては、 1,000ドル以下のものについて話すつもりだと言っているんだ。 + +05:08.790 --> 05:13.800 +それが私たちのスコープとなり、 データセットに集中することができる。 + +05:13.890 --> 05:21.150 +また、 境界線を変えてやってみたり、 より大きな範囲を試してみたりして、 様子を見ることもできる。 + +05:21.150 --> 05:26.040 +しかし、 私はこれが最も扱いやすく、 全体的に良い結果をもたらすと感じた。 + +05:26.040 --> 05:28.920 +それがここで起こっていることだ。 + +05:29.370 --> 05:34.980 +じゃあ、 これを保存して、 今日の話に戻ろうか。 + +05:34.980 --> 05:45.840 +ちょっとバグを修正したので、 カーネルを再起動してもう一度実行します。 + +05:45.840 --> 05:50.310 +インポートを実行し、 再びHuggingfaceにログインする。 + +05:50.400 --> 05:59.760 +そして今度は、 前と同じように家電製品のデータセットをロードする。 + +06:00.300 --> 06:07.500 +前回、 家電製品のデータセットをすべて読み込むのに約1分かかったが、 + +06:07.500 --> 06:14.070 +今回は0分だ。 2分。 + +06:14.070 --> 06:17.190 +だから、 むしろ分解した方が早いんだ。 + +06:17.220 --> 06:20.280 +8人の作業員が私のコンピューターを叩いている。 + +06:20.400 --> 06:22.440 +だから、 あなたにもそうすることを勧める。 + +06:22.440 --> 06:29.850 +しかし、 工程数が少なければ、 明らかにここでパスする労働者は4人以下になる。 + +06:30.330 --> 06:34.830 +これが家電製品のデータセットだ。 + +06:34.830 --> 06:42.600 +28,625のデータポイントがあり、 その範囲内で価格が設定されている。 + +06:42.870 --> 06:47.610 +ええと、 それらはすべてロードされています。 + +06:47.640 --> 06:49.470 +最初は何だったか覚えている? + +06:51.780 --> 06:52.620 +またやってしまった。 + +06:52.650 --> 06:53.610 +前回もそうだった。 + +06:53.610 --> 06:53.970 +これでよし。 + +06:54.000 --> 06:54.810 +これを試してみてほしい。 + +06:55.020 --> 06:56.400 +そして、 これだ。 + +06:56.400 --> 06:59.490 +ラック、 ローラー、 スタッドのアッセンブリーキットです。 + +06:59.520 --> 07:02.220 +そのプロンプトを印刷してみよう。 + +07:04.530 --> 07:12.690 +そして、 願わくば、 そのような不格好な部品番号がフィルターにかけられたものであることをもう一度納得させたい。 + +07:12.720 --> 07:13.740 +そうだ。 + +07:13.770 --> 07:19.350 +そしてこれが、 やはり食器洗い機のトップラックリールとスタッド組み立てキットだ。 + +07:22.680 --> 07:26.460 +そしてこれがドアピボットブロック。 + +07:26.850 --> 07:28.500 +いつもそれを必要としていた。 + +07:28.650 --> 07:30.510 +ああ、 そうだ。 + +07:30.660 --> 07:35.100 +ええと、 ここにデータがあります。 + +07:35.460 --> 07:43.560 +うーん、 これからは規模を拡大し、 もっと大きな問題に着手する時期になるだろう。 + +07:43.560 --> 07:49.980 +我々はアマゾンからこれらのデータセットをすべて持ち込むつもりだ。 + +07:49.980 --> 07:55.120 +商品価格データセットのリポジトリです。 + +07:55.150 --> 08:01.810 +これらは、 大きなホームセンターで売っているような、 かなり似たような種類のものを選んだ。 + +08:01.810 --> 08:10.690 +つまり、 服や美容品、 本、 ソフトウェアといったものを除けば、 アマゾンにあるものならほとんど何でも手に入るということだ。 + +08:10.690 --> 08:14.650 +だから、 似たようなものだと感じたのは全部そうなんだ。 + +08:14.740 --> 08:18.040 +うーん、 それは本当に良い包括的なデータセットになるだろうね。 + +08:18.040 --> 08:19.570 +これがそのデータセットだ。 + +08:19.570 --> 08:25.000 +そしてもちろん、 もしあなたがこれをフォローしながらやっているのであれば、 もしあなたがこれをやっているときにこれをやっているのであれば、 + +08:25.000 --> 08:30.550 +これをやっているときにこれをやっているのであれば、 これで遊ぶことができますし、 別のデータセットを選ぶこともできます。 + +08:30.550 --> 08:31.030 +お望みなら + +08:31.030 --> 08:34.090 +洋服のためにこれをやって、 そのパフォーマンスを見ることができるだろう。 + +08:34.180 --> 08:40.480 +ええと、 もしあなたがデータサイズを気にしていて、 より速く物事を進めたいのであれば、 データを制限すればいい。 + +08:40.480 --> 08:43.450 +家電製品のデータセットは、 その中でも最も小さいもののひとつだ。 + +08:43.480 --> 08:46.210 +ええと、 エレクトロニクスはナイスで、 真ん中に1つ。 + +08:46.210 --> 08:48.340 +だから、 エレクトロニクスに集中すればいい。 + +08:48.520 --> 08:57.730 +それでも、 とても楽しいし、 データセット全体を使った場合とほとんど同じようなパフォーマンス結果を得ることができる。 + +08:57.880 --> 09:00.160 +これがすべてのデータセット名だ。 + +09:00.160 --> 09:06.550 +そして今、 私はアイテム・ローダーを使ってそれらを追いかけるために、 それらをすべて読むつもりだ。 + +09:06.550 --> 09:09.370 +そして、 一人ずつ読み込んでいく。 + +09:09.370 --> 09:12.790 +最大のものは、 リストの最初にある自動車だ。 + +09:12.790 --> 09:16.180 +私は大体、 最初に一番大きなものを注文してきたと思う。 + +09:16.330 --> 09:29.110 +というのも、 最初にこれを実行するとき、 Huggingfaceからあなたのコンピューターにデータをダウンロードする必要があるからです。 + +09:29.230 --> 09:33.130 +うーん、 でも、 そのせいでちょっと、 うーん、 時間がかかるかもしれない。 + +09:33.130 --> 09:35.170 +そして出発する。 + +09:35.380 --> 09:40.180 +今、 私のコンピューターにかかる時間は合計で20分ほどだ。 + +09:40.240 --> 09:50.080 +ええと、 CPUがパンクしそうなので、 一旦中断して、 データがすべて読み込まれた休憩の直後にまたお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59472383/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472383/ko_KR.srt new file mode 100755 index 0000000..68720c5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472383/ko_KR.srt @@ -0,0 +1,412 @@ +WEBVTT + +00:00.620 --> 00:03.290 +6주 차 폴더에 잘 오셨어요 + +00:03.290 --> 00:08.450 +이제 이틀째예요, 데이터 큐레이션의 두 번째이자 마지막 단계죠 + +00:08.720 --> 00:11.840 +데이터 집합을 훨씬 더 크게 확장할 거예요 + +00:11.840 --> 00:16.880 +그걸 가지고 아주 특별하고 완벽한 요리를 만들 거예요 + +00:16.880 --> 00:23.150 +훈련을 위해 수입과 환경 설정 포옹 로깅으로 시작합니다 + +00:23.150 --> 00:27.110 +그리고 준비하죠 + +00:27.260 --> 00:35.240 +지난 시간에는 파이썬 을 이용한 모듈을 다뤘었죠. 제가 작성한 항목으로 데이터 포인트를 좋은 항목으로 + +00:35.240 --> 00:37.550 +구문 분석할 수 있었죠. + +00:37.550 --> 00:43.610 +잘 기억하시니 시간을 들여 직접 살펴보면서 작동 원리를 확인하셨길 + +00:43.610 --> 00:44.750 +바라요 + +00:44.750 --> 00:51.470 +제가 만든 파이썬 툴의 다른 모듈은 더 짧고 간단합니다 로더라고 합니다 + +00:51.470 --> 01:00.470 +단일 클래스 아이템 로더를 가지고 있고 포옹하는 얼굴 저장소에서 데이터셋을 가져오는 좋은 + +01:00.500 --> 01:02.060 +방법이죠 + +01:02.540 --> 01:10.850 +하는 일은 몇 가지 복잡한 것을 이용하는 것입니다 파이썬 에서의 동시 미래 패키지에서 여러 작업자와 + +01:10.850 --> 01:16.700 +함께 작업합니다 그렇지 않으면 시간이 오래 걸리기 때문이죠 + +01:16.850 --> 01:20.780 +너무 자세히 볼 필요는 없어요 + +01:20.780 --> 01:24.260 +몇 가지 말씀드릴 게 있는데 다시 틀에 적힌 대로 잘되고 + +01:24.260 --> 01:26.870 +있다고 스스로를 설득해 보세요 + +01:26.990 --> 01:31.130 +주요 메서드는 로드라고 불려요 + +01:31.130 --> 01:35.990 +작업자 수를 전달하면 자동으로 8명으로 추정하죠 + +01:35.990 --> 01:41.420 +맥북 프로를 작업 중인데 코어가 8개라 감당할 수 있어요 + +01:41.450 --> 01:43.430 +제 기계가 정말 망가졌어요 + +01:43.430 --> 01:44.300 +이걸 하는 동안에요 + +01:44.300 --> 01:50.090 +더 적은 수의 작업자를 통과시키고 싶을 수도 있죠 얼마나 기꺼이 머신의 CPU를 포기할지에 + +01:50.090 --> 01:54.020 +따라서요 동시에 다른 것을 하지 않을 경우에요 + +01:54.350 --> 02:02.480 +이 load in parallel 메서드가 이 프로세스를 사용하는 메서드예요 + +02:02.480 --> 02:03.470 +수영장 집행인요 + +02:03.500 --> 02:09.560 +익숙하시다면, 전에 사용해 보셨다면 기본적으로 다른 여러 개를 생성하죠 + +02:09.680 --> 02:15.590 +여기서 실수를 발견했어요 노동자라고 써야 하는데 실제로는 노동자 8명을 쓰고 + +02:15.590 --> 02:16.520 +있네요 + +02:16.520 --> 02:18.320 +안 그러면 항상 6개를 사용하죠 + +02:18.590 --> 02:26.480 +이건 지정된 작업자 수를 생성할 겁니다 그런 다음 각각의 데이터 + +02:26.480 --> 02:32.300 +포인트를 로드하는데 단위로 하죠 + +02:32.300 --> 02:38.000 +제가 한 일은 발생기를 만든 겁니다 발생기에 익숙한 거죠 결과를 + +02:38.000 --> 02:42.500 +스트리밍백할 때 앞서 사용했으니까요 + +02:42.650 --> 02:48.440 +하지만 우리는 발전기를 사용해서 데이터를 토막 내고 있어요 + +02:48.470 --> 02:51.350 +마침 여기 1000으로 덩어리를 설정했네요 + +02:51.350 --> 02:53.360 +한 번에 1,000개의 데이터 포인트가 있는 거죠 + +02:53.360 --> 02:56.960 +한 번에 1,000개의 데이터 포인트로 나누어져 있죠 + +02:56.960 --> 03:06.930 +각각의 덩어리는 전달되고 궁극적으로 새 데이터 포인트를 생성합니다 지난 시간에 작업했던 동일한 아이템 + +03:06.930 --> 03:08.970 +클래스를 이용해서요 + +03:08.970 --> 03:10.650 +진짜 마법은 아니군요 + +03:10.650 --> 03:16.290 +효율적으로 물건을 담기 위한 화려한 포장일 뿐이에요 + +03:16.320 --> 03:20.820 +한 번에 천 개씩 8명의 노동자에게 나눠 주죠 + +03:20.820 --> 03:25.920 +하지만 이 코드를 검토할 가치가 있다고 말씀드리고 싶네요 왜냐하면 이건 이런 종류의 것이니까요 + +03:25.950 --> 03:28.350 +이런 종류의 프로젝트에서 많은 일을 하죠 + +03:28.350 --> 03:35.760 +상자는 무거우니 잘 활용해야 합니다 그리고 여러 인부로 나누어 + +03:35.850 --> 03:40.110 +작업하기 좋은 경우가 많죠 + +03:40.110 --> 03:42.210 +이 코드는 재사용이 가능해야 해요 + +03:42.240 --> 03:50.520 +읽기 쉽게 썼으면 좋겠고 재사용도 많이 했으니 여러분도 그렇게 하세요 + +03:51.150 --> 03:56.490 +트릭이 하나 더 있는데 지적하고 싶어요 트릭은 아니고요 + +03:56.490 --> 03:56.700 +미안해요 + +03:56.730 --> 04:02.010 +또 다른 결정이 내려졌는데 그게 결과에 영향을 미칠 거예요 + +04:02.160 --> 04:15.180 +0달러 미만 제품만 고르기로 결정했어요 50달러, 999달러 그리고 0달러요 + +04:15.180 --> 04:15.180 +49살요 + +04:15.180 --> 04:19.380 +그 범위에서 그 가격으로 제한할 거예요 + +04:19.380 --> 04:21.270 +여러 가지 이유가 있어요 + +04:21.390 --> 04:28.140 +첫째, 그 수치보다 훨씬 더 큰 수치가 나오면 그 결과가 왜곡될 + +04:28.140 --> 04:29.460 +수 있어요 + +04:29.490 --> 04:36.210 +적은 것에는 큰돈이 들 수 있습니다 그래서 테스트 성능 같은 걸 완전히 망칠 + +04:36.240 --> 04:37.290 +수 있죠 + +04:37.290 --> 04:43.050 +만약 엄청나게 비싼 걸 골랐다면 비트만 잘못돼도 엄청난 오류가 날 수 + +04:43.050 --> 04:43.830 +있어요 + +04:43.830 --> 04:49.140 +절대 오류를 사용해야 하니까요 권장 가격과 실제 가격의 + +04:49.140 --> 04:50.310 +차이죠 + +04:50.340 --> 04:57.120 +가격을 합리적인 범위 내에 유지하면 모델 성능을 알 수 + +04:57.120 --> 05:00.870 +있고 이상한 게 나오지 않아요 + +05:00.870 --> 05:06.030 +본질적으로 이 프로젝트의 스코프에서 우리가 하는 걸 위해서는 1,000달러 + +05:06.030 --> 05:08.790 +이하의 것에 대해 얘기할 거예요 + +05:08.790 --> 05:13.800 +그게 우리 스코프가 될 겁니다 데이터 집합에 집중할 수 있게 해주죠 + +05:13.890 --> 05:20.220 +다른 경계를 두고 실험해 볼 수도 있어요 더 큰 범위를 시도해 보고 어떻게 되는지 + +05:20.220 --> 05:21.150 +보죠 + +05:21.150 --> 05:26.040 +하지만 이렇게 하면 작업하기 쉽고 전반적으로 좋은 결과를 낼 수 있어요 + +05:26.040 --> 05:28.920 +그런 일이 벌어지고 있어요 + +05:29.370 --> 05:34.980 +이걸 저장하고 일상으로 돌아가 보죠 + +05:34.980 --> 05:45.840 +이걸 다시 로드하겠습니다 저 버그를 고치고 커널을 재시작하고 이걸 다시 실행했으니까요 + +05:45.840 --> 05:50.310 +수입 기록 조회하고 포옹 사이트에 다시 로그인해요 + +05:50.400 --> 05:58.050 +이제 아까 했던 것처럼 데이터 어플라이언스를 로드할게요 제 컴퓨터를 + +05:58.080 --> 05:59.760 +박살 낼 거예요 + +06:00.300 --> 06:07.500 +작업하는 게 보여요 1분 전에 데이터 세트 전체 어플라이언스를 로드했는데 + +06:07.500 --> 06:14.070 +이번엔 0분 걸렸어요 2분 + +06:14.070 --> 06:17.190 +그래서 길들이는 게 더 빠르죠 + +06:17.220 --> 06:20.280 +직원 8명이 내 컴퓨터를 망가뜨려요 + +06:20.400 --> 06:22.440 +여러분도 그렇게 하세요 + +06:22.440 --> 06:29.850 +하지만 여기서 통과하면 작업자는 for less죠 프로세스가 적다면요 + +06:30.330 --> 06:34.830 +그게 가전제품 데이터 세트예요 + +06:34.830 --> 06:42.600 +제한된 범위 내에 가격 책정된 28,625개의 데이터 포인트가 있어요 + +06:42.870 --> 06:47.610 +전부 차에 실었어요 첫 번째 사진을 보죠 + +06:47.640 --> 06:49.470 +첫 번째가 뭐였는지 기억하세요? + +06:51.780 --> 06:52.620 +또 그랬네요 + +06:52.650 --> 06:53.610 +저번에도 그랬잖아요 + +06:53.610 --> 06:53.970 +됐어요 + +06:54.000 --> 06:54.810 +이거 먹어 봐요 + +06:55.020 --> 06:56.400 +자, 보세요 + +06:56.400 --> 06:59.490 +랙, 롤러 스터드 조립 키트예요 + +06:59.520 --> 07:02.220 +프롬프트를 프린트하죠 + +07:04.530 --> 07:12.690 +또 스스로 납득할 수 있길 바라요 그 투박한 부품 번호는 걸러졌다고요 + +07:12.720 --> 07:13.740 +맞아요 + +07:13.770 --> 07:19.350 +이건 식기세척기 톱 슬랙 릴과 스터드 조립 세트예요 + +07:22.680 --> 07:26.460 +이건 문 피벗 블록이에요 + +07:26.850 --> 07:28.500 +늘 그런 게 필요했죠 + +07:28.650 --> 07:30.510 +자, 됐어요 + +07:30.660 --> 07:35.100 +로드된 데이터 포인트예요 + +07:35.460 --> 07:43.560 +이제 규모를 키워서 더 큰 문제에 착수할 때예요 + +07:43.560 --> 07:49.980 +아마존에서 모든 데이터를 가져올 거예요 + +07:49.980 --> 07:55.120 +제품 가격 데이터셋 저장소예요 + +07:55.150 --> 08:00.010 +대형 가정용품 가게에서 볼 수 있는 물건들과 꽤 비슷한 + +08:00.010 --> 08:01.810 +걸 골랐어요 + +08:01.810 --> 08:08.770 +아마존에서 거의 모든 걸 취급해요 옷, 화장품, 책, 소프트웨어 그런 + +08:08.920 --> 08:10.690 +건 제외하고요 + +08:10.690 --> 08:14.650 +모든 게 비슷하게 느껴졌어요 + +08:14.740 --> 08:18.040 +포괄적인 데이터 집합이 될 거예요 + +08:18.040 --> 08:19.570 +여기 데이터 집합이 있어요 + +08:19.570 --> 08:25.000 +물론 여러분이 따라 할 때 이걸 하고 이걸 할 때 이걸 한다면 + +08:25.000 --> 08:30.550 +이걸 갖고 놀면서 다른 데이터 세트를 선택할 수 있어요 + +08:30.550 --> 08:31.030 +원하신다면요 + +08:31.030 --> 08:34.090 +이렇게 옷을 만들고 어떻게 작동하는지 보세요 + +08:34.180 --> 08:39.970 +데이터의 크기가 걱정되고 작업을 빨리하고 싶다면 제한하면 + +08:39.970 --> 08:40.480 +돼요 + +08:40.480 --> 08:43.450 +어플라이언스 데이터셋은 가장 작은 것 중 하나예요 + +08:43.480 --> 08:46.210 +전자 제품은 가운데에 있어요 + +08:46.210 --> 08:48.340 +전자 제품에만 집중할 수 있게요 + +08:48.520 --> 08:54.760 +음, 그래도 여전히 재미있고 성능도 데이터셋에서 얻을 수 있는 것과 + +08:54.760 --> 08:57.730 +동일한 결과를 얻게 될 거예요. + +08:57.880 --> 09:00.160 +이게 모든 데이터 세트 이름이에요 + +09:00.160 --> 09:06.550 +이제 아이템 로더를 이용해 get을 불러오는 모든 것을 읽어볼게요. + +09:06.550 --> 09:09.370 +한 번에 하나씩 읽어요 + +09:09.370 --> 09:12.790 +가장 큰 도전은 자동차인데 목록 1번이에요 + +09:12.790 --> 09:16.180 +전 보통 제일 큰 걸 주문해요 + +09:16.330 --> 09:20.920 +시간이 좀 걸릴 거예요 처음 실행할 때 허깅페이스 데이터를 컴퓨터에 + +09:20.920 --> 09:26.410 +다운로드 해야 하거든요 그 단계를 거치고 나면 이미 실행했기 때문에 제 컴퓨터 캐시에서 + +09:26.410 --> 09:29.110 +데이터를 재사용하는 거죠 + +09:29.230 --> 09:33.130 +하지만 비트 때문에 시간이 좀 더 걸릴 거예요 + +09:33.130 --> 09:35.170 +그리고 떨어질 거예요 + +09:35.380 --> 09:40.180 +제 컴퓨터로 작업하는 데 걸리는 시간은 20분 정도예요 + +09:40.240 --> 09:47.890 +제 CPU가 완전히 다운된 걸 알 수 있죠 잠시 쉬었다가 다시 만나겠습니다 데이터가 모두 로드된 + +09:47.890 --> 09:50.080 +휴식 시간 직후에요 diff --git a/week5/community-contributions/subtitles/srts/59472413/en_US.srt b/week5/community-contributions/subtitles/srts/59472413/en_US.srt new file mode 100755 index 0000000..9268510 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472413/en_US.srt @@ -0,0 +1,292 @@ +WEBVTT + +00:01.160 --> 00:02.000 +Wonderful. + +00:02.000 --> 00:09.890 +Where we left off is we had just created the Get Features function, which builds our features dictionary + +00:09.890 --> 00:11.180 +with our four features. + +00:11.210 --> 00:20.180 +Let's look at one so we can call get features for let's say our initial training point. + +00:20.180 --> 00:24.980 +And what we get back is this nice little this little dictionary. + +00:25.010 --> 00:28.100 +Apparently it's £2.2 in its weight. + +00:28.100 --> 00:30.380 +That's its average rank. + +00:30.410 --> 00:32.510 +That's the length of the text. + +00:32.510 --> 00:35.510 +And it is not a top electronics brand. + +00:35.510 --> 00:39.620 +So these become the rather meager features that we have engineered. + +00:39.830 --> 00:42.980 +You can do better and I challenge you to do so. + +00:43.220 --> 00:43.970 +All right. + +00:44.000 --> 00:47.660 +Now it's time for some machine learning. + +00:47.660 --> 00:54.260 +There's this little utility function that's going to take a list of items and convert it into a dataframe. + +00:54.290 --> 00:55.730 +A pandas dataframe. + +00:55.730 --> 01:00.710 +Not going to go through this in detail, because this is not a course about traditional machine learning. + +01:00.920 --> 01:05.990 +If you know DataFrames and you'll be familiar with this, and we use this to make a training dataframe + +01:05.990 --> 01:11.690 +and a test dataframe, just picking the top 250 points in our test data set. + +01:12.020 --> 01:13.250 +So there we go. + +01:13.280 --> 01:16.490 +We've made our conversion and now this is the business. + +01:16.490 --> 01:20.330 +This is where we run traditional linear regression. + +01:20.750 --> 01:23.900 +We set our features. + +01:24.170 --> 01:28.880 +Um we specify the names of the columns of our features. + +01:28.910 --> 01:31.250 +This is where all the action happens. + +01:31.250 --> 01:35.840 +Model equals linear regression is saying we want a linear regression model. + +01:35.840 --> 01:43.760 +And then we fit that model to our x values, our features and our y values is the actual prices of our + +01:43.760 --> 01:45.050 +training data set. + +01:45.080 --> 01:50.540 +And that this this is where the action happens and where the model is actually fit. + +01:50.720 --> 01:57.980 +Then going to print the the features and their coefficients or how much weight they got. + +01:57.980 --> 02:02.870 +So we can see that and get a sense of how important were each of our features. + +02:02.900 --> 02:09.380 +And then we will actually run a prediction on that test set and get things like the, the MSE, the + +02:09.380 --> 02:14.300 +mean squared error and the r squared for for the data scientists amongst you that want to have a look + +02:14.330 --> 02:14.810 +at that. + +02:14.810 --> 02:19.940 +But never fear, we're about to see it of course Using the framework that we built before. + +02:19.970 --> 02:22.070 +That's going to show it on the same graph. + +02:22.070 --> 02:27.710 +So make your guess where you think this is going to come out compared to the average model. + +02:27.710 --> 02:31.460 +Let's quickly look back at the average model to remind ourselves what we're trying to beat. + +02:31.460 --> 02:37.310 +So an average guess has an error of 145 $146. + +02:37.310 --> 02:41.960 +So hopefully linear regression can do better than average. + +02:41.960 --> 02:42.620 +Let's see. + +02:42.650 --> 02:44.000 +Let's first run it. + +02:45.470 --> 02:46.460 +It's quick. + +02:47.090 --> 02:53.030 +Uh, so the different uh, um coefficients, the weights that it gave things, you can see that how + +02:53.030 --> 02:58.100 +heavy something is gets a small uh, positive weight. + +02:58.130 --> 03:01.400 +How it ranks gets a larger one. + +03:01.400 --> 03:05.450 +The text length is very small signal very low. + +03:05.480 --> 03:07.460 +Is it a top electronics brand? + +03:07.490 --> 03:08.690 +Makes a big difference. + +03:08.720 --> 03:11.210 +Things that are top electronics brands get a lot. + +03:11.720 --> 03:20.540 +Um, okay, so now, uh, we simply wrap this in a function because this is what we're going to use + +03:20.540 --> 03:23.060 +in our cool test visualizer. + +03:23.060 --> 03:26.530 +We wrap it in a function called linear regression Pricer. + +03:26.560 --> 03:32.710 +And we will then just use we will it passes in an item. + +03:32.710 --> 03:34.720 +We will get the features for that item. + +03:34.720 --> 03:37.030 +We will then convert that to a data frame. + +03:37.030 --> 03:43.030 +And then we will call our linear regression model to predict where that comes. + +03:43.060 --> 03:45.220 +And let's see what happens. + +03:45.490 --> 03:47.560 +Tester dot test. + +03:49.000 --> 03:51.400 +Linear regression Pricer. + +03:53.380 --> 03:54.550 +Are you ready for this. + +03:54.580 --> 03:55.990 +Remember what the average number was. + +03:55.990 --> 03:56.860 +Here we go. + +03:57.010 --> 04:00.970 +Oh, uh, execute the cell before. + +04:01.480 --> 04:04.270 +Uh, how many times have I done that now? + +04:05.650 --> 04:06.340 +Bam! + +04:06.340 --> 04:08.680 +Well, we can see the colors. + +04:08.680 --> 04:13.210 +We can see that it's got a lot of reds in there, but maybe some more greens than before. + +04:13.240 --> 04:15.190 +Maybe it hasn't done terribly. + +04:15.190 --> 04:17.050 +It's getting some things right. + +04:17.470 --> 04:18.490 +Let's see. + +04:18.520 --> 04:20.500 +Well, there we have it. + +04:20.530 --> 04:23.860 +It's only done a little bit better than the average. + +04:23.860 --> 04:25.480 +Only a little bit better. + +04:25.480 --> 04:32.740 +And indeed if you look at the results you can see that basically there's a small increase here, but + +04:32.740 --> 04:39.710 +it's clustered Stood around the average kind of point, with some of the points coming in about $200 + +04:39.710 --> 04:40.250 +more. + +04:40.250 --> 04:41.600 +And guess what? + +04:41.600 --> 04:47.900 +Those are going to be the ones where is electronics brand is is is true is top electronics brand. + +04:48.170 --> 04:53.300 +Uh, and so they got a little uplift which did well for this one point here. + +04:53.300 --> 04:57.950 +But otherwise uh, didn't particularly work out well for the model. + +04:58.160 --> 05:00.680 +Uh, so it tried its best. + +05:00.710 --> 05:06.890 +It got a, uh, 139, um, uh, error. + +05:06.890 --> 05:09.170 +And it, it it had a hit. + +05:09.200 --> 05:13.040 +It was green, uh, almost 16% of the time. + +05:13.340 --> 05:15.410 +So that's our linear regression model. + +05:15.410 --> 05:16.580 +You can do better. + +05:16.610 --> 05:17.300 +Come on in. + +05:17.330 --> 05:18.920 +Now, engineer some features. + +05:18.920 --> 05:20.810 +I know it's not new. + +05:20.930 --> 05:22.100 +Uh, great. + +05:22.250 --> 05:27.650 +LM data science, but it's really good to build this foundational knowledge and doing some old school + +05:27.650 --> 05:28.730 +feature engineering. + +05:28.730 --> 05:33.200 +And besides, it's going to make it all the more satisfying when we start working with LMS and see how + +05:33.200 --> 05:33.710 +they do. + +05:33.710 --> 05:37.250 +So come on in there, build some features, see how you do. + +05:37.250 --> 05:42.890 +But next time we're going to, uh, look at some more sophisticated baseline models. + +05:42.890 --> 05:43.820 +I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59472413/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472413/ja_JP.srt new file mode 100755 index 0000000..2ac1127 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472413/ja_JP.srt @@ -0,0 +1,271 @@ +WEBVTT + +00:01.160 --> 00:02.000 +素晴らしい。 + +00:02.000 --> 00:11.180 +この関数は、 4つの機能を使って機能辞書を作成します。 + +00:11.210 --> 00:20.180 +例えば、 最初のトレーニングポイントについて、 フィーチャーを呼び出すことができる。 + +00:20.180 --> 00:24.980 +そして返ってくるのは、 この小さな小さな辞書だ。 + +00:25.010 --> 00:28.100 +どうやら2ポンドらしい。 2の重さである。 + +00:28.100 --> 00:30.380 +これが平均的な順位だ。 + +00:30.410 --> 00:32.510 +それが本文の長さだ。 + +00:32.510 --> 00:35.510 +しかも、 エレクトロニクスのトップブランドではない。 + +00:35.510 --> 00:39.620 +だから、 これらは私たちが設計したかなり貧弱な機能になる。 + +00:39.830 --> 00:42.980 +あなたならもっとうまくやれる。 + +00:43.220 --> 00:43.970 +分かった。 + +00:44.000 --> 00:47.660 +さあ、 機械学習の時間だ。 + +00:47.660 --> 00:54.260 +この小さなユーティリティ関数は、 アイテムのリストを受け取ってデータフレームに変換する。 + +00:54.290 --> 00:55.730 +パンダのデータフレーム。 + +00:55.730 --> 01:00.710 +これは伝統的な機械学習についてのコースではないので、 詳しく説明するつもりはない。 + +01:00.920 --> 01:05.990 +DataFramesをご存じであれば、 これはよくご存じだと思うが、 これを使ってトレーニング・データフレームとテスト・データフレームを作り、 + +01:05.990 --> 01:11.690 +テスト・データセットの上位250ポイントを選ぶ。 + +01:12.020 --> 01:13.250 +そうだ。 + +01:13.280 --> 01:16.490 +転換を果たし、 今はこれがビジネスだ。 + +01:16.490 --> 01:20.330 +ここで伝統的な線形回帰を実行する。 + +01:20.750 --> 01:23.900 +私たちは機能を設定した。 + +01:24.170 --> 01:28.880 +ここで特徴量のカラム名を指定する。 + +01:28.910 --> 01:31.250 +ここがすべてのアクションが起こる場所だ。 + +01:31.250 --> 01:35.840 +Model equals linear regressionは、 線形回帰モデルが欲しいと言っている。 + +01:35.840 --> 01:45.050 +そして、 そのモデルをx値、 つまり特徴量に当てはめ、 y値をトレーニングデータセットの実際の価格に当てはめる。 + +01:45.080 --> 01:50.540 +そして、 ここがアクションが起こる場所であり、 モデルが実際にフィットする場所なのだ。 + +01:50.720 --> 01:57.980 +そして、 特徴量とその係数、 あるいは重み付けを印刷する。 + +01:57.980 --> 02:02.870 +そのため、 各機能がどれだけ重要であったかを知ることができる。 + +02:02.900 --> 02:09.380 +そして、 実際にテスト・セットで予測を実行し、 MSE、 + +02:09.380 --> 02:14.810 +平均二乗誤差、 r二乗などを求めます。 + +02:14.810 --> 02:19.940 +しかし、 恐れることはない。 私たちが以前に構築したフレームワークを使って、 もちろんそれを見ることができるのだ。 + +02:19.970 --> 02:22.070 +それが同じグラフに表示される。 + +02:22.070 --> 02:27.710 +だから、 平均的なモデルと比較して、 これがどこに出てくると思うか、 推測してみてほしい。 + +02:27.710 --> 02:31.460 +私たちが何を目指しているのかを思い出すために、 平均的なモデルを簡単に振り返ってみよう。 + +02:31.460 --> 02:37.310 +つまり、 平均的な推測では145ドルの誤差がある。 + +02:37.310 --> 02:41.960 +だから、 線形回帰が平均よりも良い結果を出してくれることを願っている。 + +02:41.960 --> 02:42.620 +見てみよう。 + +02:42.650 --> 02:44.000 +まずは実行してみよう。 + +02:45.470 --> 02:46.460 +早いよ。 + +02:47.090 --> 02:53.030 +つまり、 係数や重みを変えてみると、 重いものには小さな、 + +02:53.030 --> 02:58.100 +プラスの重みがつくことがわかる。 + +02:58.130 --> 03:01.400 +どのようにランク付けされるかは、 もっと大きなものだ。 + +03:01.400 --> 03:05.450 +テキストの長さは非常に小さい。 + +03:05.480 --> 03:07.460 +エレクトロニクスのトップブランドですか? + +03:07.490 --> 03:08.690 +大きな違いだ。 + +03:08.720 --> 03:11.210 +エレクトロニクスのトップブランドは、 多くのものを手に入れる。 + +03:11.720 --> 03:23.060 +さて、 それでは、 クールなテスト・ヴィジュアライザーで使用するために、 この関数を単純にラップします。 + +03:23.060 --> 03:26.530 +これを線形回帰Pricerと呼ばれる関数で包む。 + +03:26.560 --> 03:32.710 +そして、 私たちはそれをアイテムに使うだけだ。 + +03:32.710 --> 03:34.720 +私たちはそのアイテムの機能を取得します。 + +03:34.720 --> 03:37.030 +それをデータフレームに変換する。 + +03:37.030 --> 03:43.030 +そして、 その結果を予測するために線形回帰モデルを呼び出す。 + +03:43.060 --> 03:45.220 +どうなるか見てみよう。 + +03:45.490 --> 03:47.560 +テスターのドットテスト。 + +03:49.000 --> 03:51.400 +線形回帰 プライサー + +03:53.380 --> 03:54.550 +準備はできているか? + +03:54.580 --> 03:55.990 +平均的な数字が何だったか覚えている? + +03:55.990 --> 03:56.860 +さあ、 始めよう。 + +03:57.010 --> 04:00.970 +あ、 あの、 前の独房でやってください。 + +04:01.480 --> 04:04.270 +ええと、 もう何度目だろう? + +04:05.650 --> 04:06.340 +バム! + +04:06.340 --> 04:08.680 +まあ、 色は見える。 + +04:08.680 --> 04:13.210 +赤が多いが、 以前より緑が増えたかもしれない。 + +04:13.240 --> 04:15.190 +たぶん、 ひどい結果にはなっていない。 + +04:15.190 --> 04:17.050 +いくつかのことはうまくいっている。 + +04:17.470 --> 04:18.490 +見てみよう。 + +04:18.520 --> 04:20.500 +まあ、 そういうことだ。 + +04:20.530 --> 04:23.860 +平均より少し良くなった程度だ。 + +04:23.860 --> 04:25.480 +少し良くなっただけだ。 + +04:25.480 --> 04:32.740 +そして、 その結果を見てみると、 基本的にはわずかな上昇が見られるが、 + +04:32.740 --> 04:40.250 +平均的なポイントに集中しており、 200ドルほど高いポイントもある。 + +04:40.250 --> 04:41.600 +そして何だと思う? + +04:41.600 --> 04:47.900 +これらのブランドは、 エレクトロニクス・ブランドであり、 トップ・エレクトロニクス・ブランドである。 + +04:48.170 --> 04:53.300 +それで、 この1点に関しては、 少し上昇した。 + +04:53.300 --> 04:57.950 +でも、 それ以外は......モデルとしては特にうまくいかなかった。 + +04:58.160 --> 05:00.680 +ベストを尽くした。 + +05:00.710 --> 05:06.890 +139エラーが出たんだ。 + +05:06.890 --> 05:09.170 +そして、 ヒットした。 + +05:09.200 --> 05:13.040 +ほぼ16%の確率でグリーンだった。 + +05:13.340 --> 05:15.410 +これが線形回帰モデルだ。 + +05:15.410 --> 05:16.580 +もっとうまくやれるはずだ。 + +05:16.610 --> 05:17.300 +どうぞお入りください。 + +05:17.330 --> 05:18.920 +では、 いくつかの機能を設計してみよう。 + +05:18.920 --> 05:20.810 +今に始まったことではないのは分かっている。 + +05:20.930 --> 05:22.100 +ああ、 素晴らしい。 + +05:22.250 --> 05:28.730 +LMのデータ・サイエンスは、 基礎的な知識を築き、 昔ながらのフィーチャー・エンジニアリングを行うのに適している。 + +05:28.730 --> 05:33.710 +それに、 LMSと一緒に仕事を始めて、 その成果を見ることができれば、 もっと満足できるだろう。 + +05:33.710 --> 05:37.250 +だから、 そこに来て、 いくつかの機能を作って、 どうやるか見てみよう。 + +05:37.250 --> 05:42.890 +でも次回は、 もう少し洗練されたベースラインモデルを見てみよう。 + +05:42.890 --> 05:43.820 +それではまた。 diff --git a/week5/community-contributions/subtitles/srts/59472413/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472413/ko_KR.srt new file mode 100755 index 0000000..bf6b2ee --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472413/ko_KR.srt @@ -0,0 +1,283 @@ +WEBVTT + +00:01.160 --> 00:02.000 +좋아요 + +00:02.000 --> 00:09.890 +멈춘 곳에서 기능 Get 기능을 만들었죠 4가지 기능으로 기능 사전을 만드는 + +00:09.890 --> 00:11.180 +거예요 + +00:11.210 --> 00:20.180 +한 번 보죠 기능을 get 요청해 초기 훈련 지점을 호출하는 거예요 + +00:20.180 --> 00:24.980 +Get을 누르면 이 멋진 사전이 나와요 + +00:25.010 --> 00:28.100 +2파운드예요 무게로 2kg이에요 + +00:28.100 --> 00:30.380 +그게 평균 등급이죠 + +00:30.410 --> 00:32.510 +텍스트의 길이예요 + +00:32.510 --> 00:35.510 +일류 전자 제품 브랜드도 아니고요 + +00:35.510 --> 00:39.620 +그래서 우리가 만든 건 다소 빈약한 형태가 됐죠 + +00:39.830 --> 00:42.980 +더 잘할 수 있어요 더 잘하도록 해요 + +00:43.220 --> 00:43.970 +좋아요 + +00:44.000 --> 00:47.660 +이제 머신 러닝을 할 거예요 + +00:47.660 --> 00:54.260 +항목 목록을 데이터 프레임으로 변환하는 유틸리티 함수가 있어요 + +00:54.290 --> 00:55.730 +팬더스 데이터 프레임이에요 + +00:55.730 --> 01:00.710 +자세히 설명하진 않겠습니다 전통적인 머신 러닝 과목이 아니니까요 + +01:00.920 --> 01:05.990 +데이터 프레임에 대해 잘 아신다면 훈련 데이터와 테스트 데이터 프레임에 + +01:05.990 --> 01:11.690 +사용합니다 테스트 데이터 세트에서 상위 250포인트를 고르는 거죠 + +01:12.020 --> 01:13.250 +자, 됐어요 + +01:13.280 --> 01:16.490 +변환 작업을 했고 이제 비즈니스가 시작됐어요 + +01:16.490 --> 01:20.330 +전통적인 선형 회귀를 실행하는 곳이죠 + +01:20.750 --> 01:23.900 +특징을 설정하죠 + +01:24.170 --> 01:28.880 +기능의 열 이름을 지정해요 + +01:28.910 --> 01:31.250 +모든 액션이 일어나는 곳이죠 + +01:31.250 --> 01:35.840 +모델 = 선형 회귀는 우리가 선형 회귀 모델을 원한다는 뜻이죠 + +01:35.840 --> 01:45.050 +그 모델을 엑스 값, 기능, y 값에 맞추는 건 훈련 데이터 집합의 실제 가격이에요 + +01:45.080 --> 01:50.540 +여기서 액션이 일어나고 모델이 실제로 맞는 거죠 + +01:50.720 --> 01:57.980 +그런 다음 특징과 계수 무게를 출력하죠 + +01:57.980 --> 02:02.870 +그걸 보면서 각 특징이 얼마나 중요한지 알 수 있죠 get get get get it + +02:02.900 --> 02:09.380 +그런 다음 테스트 세트에서 예측을 실행합니다 평균 제곱 에러와 R 제곱을 얻기 + +02:09.380 --> 02:14.810 +위해서요 여러분 중 데이터 과학자들이 그걸 보고 싶어 하니까요 + +02:14.810 --> 02:19.940 +하지만 걱정 마세요, 곧 보게 될 테니까요 우리가 전에 만든 프레임워크를 이용해서요 + +02:19.970 --> 02:22.070 +같은 그래프에 나타나요 + +02:22.070 --> 02:27.710 +평균 모델과 비교해서 어디서 나올지 추측해 보세요 + +02:27.710 --> 02:31.460 +우리가 이기려고 하는 걸 상기시키기 위해 평균 모델을 잠깐 보죠 + +02:31.460 --> 02:37.310 +평균 오차는 145달러 146센트죠 + +02:37.310 --> 02:41.960 +선형 회귀가 평균보다 나아졌으면 좋겠어요 + +02:41.960 --> 02:42.620 +어디 보죠 + +02:42.650 --> 02:44.000 +일단 해 보죠 + +02:45.470 --> 02:46.460 +금방 끝나요 + +02:47.090 --> 02:53.030 +계수와 무게에 따라 무게가 달라지면 무게가 작아지다가 + +02:53.030 --> 02:58.100 +양극이 되는 걸 알 수 있어요 + +02:58.130 --> 03:01.400 +순위가 더 커졌어요 + +03:01.400 --> 03:05.450 +텍스트 길이가 아주 작고 신호도 아주 낮아요 + +03:05.480 --> 03:07.460 +최고의 전자제품 브랜드인가요? + +03:07.490 --> 03:08.690 +큰 차이가 있죠 + +03:08.720 --> 03:11.210 +톱 전자 제품은 많이 받아요 Get it + +03:11.720 --> 03:20.540 +이제 이걸 함수로 래핑할 거예요 이걸 테스트용 시각화기에 사용할 + +03:20.540 --> 03:23.060 +거거든요 + +03:23.060 --> 03:26.530 +선형 회귀 프라이서라는 함수로 감싸요 + +03:26.560 --> 03:32.710 +그런 다음 우린 그냥∙∙∙ 항목을 전달하죠 + +03:32.710 --> 03:34.720 +해당 아이템의 기능을 get 할 거예요 + +03:34.720 --> 03:37.030 +그걸 데이터 프레임으로 변환할 거예요 + +03:37.030 --> 03:43.030 +그런 다음 선형 회귀 모델을 불러 어디서 올지 예측하죠 + +03:43.060 --> 03:45.220 +어떻게 되는지 보죠 + +03:45.490 --> 03:47.560 +테스터요, 점 테스터 + +03:49.000 --> 03:51.400 +선형 회귀 프라이저예요 + +03:53.380 --> 03:54.550 +마음의 준비 됐어요? + +03:54.580 --> 03:55.990 +평균을 기억하세요 + +03:55.990 --> 03:56.860 +시작할게요 + +03:57.010 --> 04:00.970 +그 전에 감옥을 실행해요 + +04:01.480 --> 04:04.270 +제가 몇 번이나 그랬죠? + +04:05.650 --> 04:06.340 +좋아요! + +04:06.340 --> 04:08.680 +색깔이 보이네요 + +04:08.680 --> 04:13.210 +붉은색이 많이 보이는데 전보다 녹색이 더 많아요 + +04:13.240 --> 04:15.190 +그렇게 끔찍하진 않았을 거예요 + +04:15.190 --> 04:17.050 +제대로 하고 있어요 + +04:17.470 --> 04:18.490 +어디 보죠 + +04:18.520 --> 04:20.500 +자, 다 됐어요 + +04:20.530 --> 04:23.860 +평균보다 약간 더 비트 박스가 잘 나왔어요 + +04:23.860 --> 04:25.480 +비트보다 조금 더 낫네요 + +04:25.480 --> 04:32.740 +실제로 결과를 보시면 기본적으로 여기에 약간 증가한 것을 볼 수 있습니다. 하지만 + +04:32.740 --> 04:40.250 +평균 점 부근에 클러스터되어 있습니다. 어떤 점들은 약 200달러 더 증가하고요. + +04:40.250 --> 04:41.600 +그거 알아요? + +04:41.600 --> 04:47.900 +전자 제품 브랜드가 최고라는 걸 보여주는 제품들이 될 거예요 + +04:48.170 --> 04:53.300 +그래서 약간 들뜬 상태였고 한 시점에서 잘 풀렸어요 + +04:53.300 --> 04:57.950 +하지만 그 외에는 모델에게 도움이 되지 않았어요 + +04:58.160 --> 05:00.680 +최선을 다했어요 + +05:00.710 --> 05:06.890 +139... 에러가 났네요 + +05:06.890 --> 05:09.170 +그리고 히트 쳤죠 + +05:09.200 --> 05:13.040 +16% 정도는 녹색이었어요 + +05:13.340 --> 05:15.410 +이게 우리 선형 회귀 모델이에요 + +05:15.410 --> 05:16.580 +더 잘할 수 있잖아요 + +05:16.610 --> 05:17.300 +들어오세요 + +05:17.330 --> 05:18.920 +이제 기능을 설계해 보죠 + +05:18.920 --> 05:20.810 +새롭진 않죠 + +05:20.930 --> 05:22.100 +좋아요 + +05:22.250 --> 05:27.650 +기본 지식을 쌓는 것도 좋지만 구식 기능 공학도 + +05:27.650 --> 05:28.730 +좋아요 + +05:28.730 --> 05:33.200 +게다가 LMS로 작업을 시작하면 훨씬 더 만족스러울 겁니다 어떻게 되는지 + +05:33.200 --> 05:33.710 +보죠 + +05:33.710 --> 05:37.250 +들어가서 기능들을 만들어 보세요 + +05:37.250 --> 05:42.890 +하지만 다음 시간에는 좀 더 정교한 기본 모델을 살펴볼 거예요 + +05:42.890 --> 05:43.820 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59472421/en_US.srt b/week5/community-contributions/subtitles/srts/59472421/en_US.srt new file mode 100755 index 0000000..65c2226 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472421/en_US.srt @@ -0,0 +1,295 @@ +WEBVTT + +00:00.110 --> 00:05.510 +And welcome back to our final time in Jupyter Lab with traditional machine learning. + +00:05.510 --> 00:07.100 +It's almost over. + +00:07.130 --> 00:09.170 +Personally, I find it a lot of fun. + +00:09.260 --> 00:12.290 +I hope, I hope it hasn't been too unbearable for you. + +00:12.320 --> 00:17.600 +Uh, it's a great experience to have had, though, and I'm really hoping that you've been playing around + +00:17.600 --> 00:23.000 +yourself, adding some more features, doing some more experiments, seeing if you can't get more out + +00:23.000 --> 00:23.660 +of this. + +00:23.660 --> 00:29.150 +This was the last chart we looked at, which was word two vec getting an error of 115 on average. + +00:29.180 --> 00:35.390 +And you may remember that we did better with that with the original Bag of Words NLP model that got + +00:35.390 --> 00:39.320 +us to, I think 114 113.6 or something. + +00:39.680 --> 00:46.010 +So, um, what we're now going to do is unveil the last two models. + +00:46.040 --> 00:54.980 +We're going to use support vector regression from Support Vector Machines, uh, which is a fancy schmancy + +00:55.010 --> 00:56.630 +traditional machine learning technique. + +00:56.630 --> 01:03.070 +When you take your, your, your data points and you try and fit a hyperplane that separates the data + +01:03.100 --> 01:08.560 +using things called support vectors, which are the the vectors with the points that are closest to + +01:08.590 --> 01:09.730 +the hyperplane. + +01:09.820 --> 01:14.530 +This may be nonsense to you, or it may be stuff that you know back to front and that I'm not explaining + +01:14.530 --> 01:15.100 +it well. + +01:15.100 --> 01:16.840 +In either case, it doesn't matter. + +01:16.840 --> 01:22.630 +We're just going to take the library as it is from scikit learn, which is so easy to use. + +01:22.750 --> 01:24.970 +We are using a linear SVR. + +01:25.000 --> 01:30.550 +There are other types with different kernels that maybe give better results, but they take ages to + +01:30.580 --> 01:31.000 +run. + +01:31.000 --> 01:36.700 +This one runs very quickly, almost too quickly, which makes me think maybe I'm not using it to to + +01:36.730 --> 01:37.630 +its best. + +01:37.720 --> 01:44.380 +Um, but I have already run it and it took about five seconds, but the one I used with a different + +01:44.380 --> 01:47.890 +kernel I ran all night and still hadn't finished. + +01:47.890 --> 01:53.920 +So maybe that's somewhere in the middle that that is something that you may be able to to, to explore. + +01:54.070 --> 01:58.480 +But this was the the best that I could do. + +01:58.510 --> 02:02.610 +Uh, and let's see how it performs. + +02:02.640 --> 02:03.240 +Are you ready? + +02:03.270 --> 02:04.080 +Put in your bets. + +02:04.080 --> 02:05.700 +And now I will run it. + +02:05.970 --> 02:06.990 +No I won't. + +02:07.290 --> 02:09.510 +Oh, there we go. + +02:09.600 --> 02:10.470 +That works. + +02:10.890 --> 02:12.360 +Uh, okay. + +02:12.360 --> 02:17.190 +So lots of yellows, lots of reds, lots of greens. + +02:17.190 --> 02:22.740 +It's obviously not crushing it, but there's some, uh, looks not terrible. + +02:22.740 --> 02:25.860 +Let's see how that does when we get to the charts. + +02:26.940 --> 02:33.210 +Well, so, uh, it is a winner so far. + +02:33.240 --> 02:35.220 +112.5. + +02:35.250 --> 02:43.170 +It is a hair better than the, uh, the prior winner, which was the bag of words linear regression + +02:43.170 --> 02:43.680 +model. + +02:43.710 --> 02:49.980 +You can see visually that there's some good things going on, but obviously it's struggling to estimate, + +02:50.070 --> 02:52.740 +um, much above the average point. + +02:52.860 --> 02:58.950 +Uh, so you can see that there's some progress, but not tremendous progress. + +02:59.400 --> 03:03.590 +That is our support vector regression model. + +03:03.770 --> 03:11.450 +And now that brings us to our last one, our last model, which is a random forest regression random + +03:11.450 --> 03:11.990 +forest. + +03:12.020 --> 03:13.610 +A particular technique. + +03:13.610 --> 03:19.040 +It's a type of ensemble technique that involves combining lots of smaller models. + +03:19.250 --> 03:27.050 +The models that it combines, each of them take a random sample of your data points and a random sample + +03:27.050 --> 03:32.270 +of your features, which in our case means different chunks of our vectors. + +03:32.390 --> 03:38.900 +Uh, and trains many models based on that and then combines all of those models. + +03:38.900 --> 03:45.350 +In the case of a regression, it takes the average across all of these mini models, and that is called + +03:45.350 --> 03:47.000 +a random forest. + +03:47.090 --> 03:49.610 +So we will see how that works. + +03:49.610 --> 03:55.700 +These are generally known to perform well for all shapes and sizes of datasets. + +03:55.730 --> 03:59.540 +And they're they're good in that they don't have a lot of hyper parameters. + +03:59.570 --> 04:04.520 +Hyper parameters is what people call just extra knobs to tweak extra things. + +04:04.520 --> 04:06.800 +You have to try lots of different values for. + +04:07.100 --> 04:09.230 +Random forests don't have a lot of them. + +04:09.230 --> 04:11.480 +You just use it as it is and see how it does. + +04:11.480 --> 04:15.560 +So we've used it as it is and now we will see how it does. + +04:15.590 --> 04:19.970 +Tester dot test and we pass in random forest processor. + +04:19.970 --> 04:21.680 +And again put in your bets. + +04:21.980 --> 04:25.880 +Uh, do you think the random forest is going to do better or worse? + +04:25.910 --> 04:28.010 +112 is the number to beat. + +04:28.010 --> 04:30.740 +Let's see how traditional machine learning performs. + +04:30.740 --> 04:31.790 +We see some greens. + +04:31.790 --> 04:34.130 +We see some reds, we see some greens. + +04:34.370 --> 04:36.830 +It takes a little bit slower to to run. + +04:36.860 --> 04:42.020 +We're seeing some greens, greens, greens, reds, lots of reds. + +04:42.320 --> 04:45.230 +But generally there we have it. + +04:45.230 --> 04:46.820 +There we have it. + +04:46.820 --> 04:50.060 +So random forest for the win. + +04:50.090 --> 04:52.940 +The error is $97. + +04:52.940 --> 04:54.920 +It's come in under 100. + +04:54.950 --> 04:56.780 +We have a nine handle. + +04:56.780 --> 04:58.610 +We've come in under $100. + +04:58.640 --> 04:59.930 +Our best so far. + +04:59.930 --> 05:02.370 +34% of the dots are green. + +05:02.550 --> 05:03.840 +Here is our line. + +05:03.840 --> 05:05.130 +Here are the green dots. + +05:05.130 --> 05:09.450 +It's it's also had a bit of a problem getting predicting above the average but not too bad. + +05:09.720 --> 05:11.310 +You see how well it did with that guy there. + +05:11.340 --> 05:14.220 +It came in green for the really expensive item. + +05:14.340 --> 05:23.340 +Uh, and uh uh, it's generally it's generally fared pretty well I would say certainly are running winner. + +05:23.340 --> 05:25.410 +Congratulations to Random Forest. + +05:25.500 --> 05:27.450 +Uh, and of course, congratulations to you. + +05:27.450 --> 05:34.710 +If you've beaten this, you can do things like you can use random forest, but put in not only the vectors + +05:34.710 --> 05:38.160 +that we've just come up with, but you can add in features as well. + +05:38.160 --> 05:45.540 +You can manufacture engineer some features and shove them in as well and use that to try and beat beat + +05:45.540 --> 05:49.110 +this number, get do better than than 97. + +05:49.230 --> 05:55.530 +Uh, and see how you do have fun with traditional machine learning, because this is going to be the + +05:55.530 --> 05:59.640 +end of it before we move on to trying out LMS. + +05:59.640 --> 06:02.730 +But first, a quick wrap up with the slides. diff --git a/week5/community-contributions/subtitles/srts/59472421/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472421/ja_JP.srt new file mode 100755 index 0000000..84dd968 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472421/ja_JP.srt @@ -0,0 +1,253 @@ +WEBVTT + +00:00.110 --> 00:05.510 +そして、 Jupyter Labでの伝統的な機械学習の最終回へようこそ。 + +00:05.510 --> 00:07.100 +もうすぐ終わる。 + +00:07.130 --> 00:09.170 +個人的には、 とても楽しいと思う。 + +00:09.260 --> 00:12.290 +あなたにとって耐え難いことでなければいいのですが。 + +00:12.320 --> 00:23.660 +もっと機能を追加したり、 実験したり、 もっとこれを活用できないか試したりしてほしい。 + +00:23.660 --> 00:29.150 +このチャートは最後に見たもので、 平均115のエラーを記録したワード2ベックだった。 + +00:29.180 --> 00:39.320 +そして、 「バッグ・オブ・ワード」NLPのオリジナル・モデルで、 114 113に到達したことを覚えていらっしゃるかもしれません。 + +00:39.320 --> 00:39.320 +6とかなんとか。 + +00:39.680 --> 00:46.010 +それで、 ええと、 これから最後の2つのモデルを発表します。 + +00:46.040 --> 00:56.630 +サポート・ベクトル・マシンのサポート・ベクトル回帰を使う。 + +00:56.630 --> 01:03.070 +データ点を取り、 サポート・ベクトルと呼ばれる、 超平面に最も近い点を持つベクトルを使って、 + +01:03.100 --> 01:09.730 +データを分離する超平面に当てはめようとします。 + +01:09.820 --> 01:15.100 +あなたにとってはナンセンスなことかもしれないし、 私がうまく説明できていないだけで、 あなたが知っていることかもしれない。 + +01:15.100 --> 01:16.840 +いずれにせよ、 それは問題ではない。 + +01:16.840 --> 01:22.630 +使いやすいscikit learnのライブラリをそのまま使おう。 + +01:22.750 --> 01:24.970 +我々は線形SVRを使用している。 + +01:25.000 --> 01:31.000 +カーネルが異なる他のタイプもあり、 より良い結果が得られるかもしれないが、 実行に時間がかかる。 + +01:31.000 --> 01:37.630 +これは非常に速く、 速すぎるくらいだ。 + +01:37.720 --> 01:44.380 +ええと、 でも、 すでに実行したところ、 5秒ほどで終わったんですが、 別のカーネルで使ったものは、 + +01:44.380 --> 01:47.890 +一晩中実行してもまだ終わりませんでした。 + +01:47.890 --> 01:53.920 +だから、 その真ん中あたりを探ってみるのもいいかもしれない。 + +01:54.070 --> 01:58.480 +でも、 これが精一杯だった。 + +01:58.510 --> 02:02.610 +どうなるか見てみよう。 + +02:02.640 --> 02:03.240 +準備はできているか? + +02:03.270 --> 02:04.080 +賭け金を入れる。 + +02:04.080 --> 02:05.700 +そして今、 私はそれを実行する。 + +02:05.970 --> 02:06.990 +いや、 しないよ。 + +02:07.290 --> 02:09.510 +そうだ。 + +02:09.600 --> 02:10.470 +うまくいったよ。 + +02:10.890 --> 02:12.360 +ああ、 わかった。 + +02:12.360 --> 02:17.190 +だから黄色、 赤、 緑が多い。 + +02:17.190 --> 02:22.740 +明らかに砕けてはいないが、 いくつか、 あー、 ひどくはなさそうだ。 + +02:22.740 --> 02:25.860 +チャートを見てみよう。 + +02:26.940 --> 02:33.210 +まあ、 だから、 今のところ勝者だよ。 + +02:33.240 --> 02:35.220 +112. 5. + +02:35.250 --> 02:43.680 +その前の勝者であるバッグ・オブ・ワード線形回帰モデルよりは少しましだ。 + +02:43.710 --> 02:52.740 +視覚的に、 良いことがいくつか起こっているのがわかると思うが、 明らかに平均点よりはるかに上の点を見積もろうと苦戦している。 + +02:52.860 --> 02:58.950 +多少の進歩は見られるが、 大きな進歩はない。 + +02:59.400 --> 03:03.590 +これがサポート・ベクトル回帰モデルである。 + +03:03.770 --> 03:11.990 +最後のモデルは、 ランダムフォレスト回帰ランダムフォレストだ。 + +03:12.020 --> 03:13.610 +特殊なテクニック。 + +03:13.610 --> 03:19.040 +アンサンブル・テクニックの一種で、 小さなモデルをたくさん組み合わせる。 + +03:19.250 --> 03:32.270 +組み合わせるモデルは、 それぞれデータポイントのランダムなサンプルと特徴のランダムなサンプルを取る。 + +03:32.390 --> 03:38.900 +そして、 それに基づいて多くのモデルを訓練し、 それらのモデルをすべて組み合わせる。 + +03:38.900 --> 03:47.000 +回帰の場合、 これらのミニモデルの平均をとり、 それをランダムフォレストと呼ぶ。 + +03:47.090 --> 03:49.610 +だから、 それがどう作用するか見てみよう。 + +03:49.610 --> 03:55.700 +これらは一般に、 あらゆる形や大きさのデータセットに対して優れた性能を発揮することが知られている。 + +03:55.730 --> 03:59.540 +ハイパーパラメーターが少ないのもいい。 + +03:59.570 --> 04:04.520 +ハイパーパラメーターは、 余計なことを微調整するための単なる追加ノブと呼ばれるものだ。 + +04:04.520 --> 04:06.800 +いろいろな値を試してみる必要がある。 + +04:07.100 --> 04:09.230 +ランダムフォレストにはあまりない。 + +04:09.230 --> 04:11.480 +そのまま使って様子を見るだけだ。 + +04:11.480 --> 04:15.560 +だから、 このまま使って、 これから様子を見ようというわけだ。 + +04:15.590 --> 04:19.970 +テスターがドットテストを行い、 ランダムフォレストプロセッサーでパスした。 + +04:19.970 --> 04:21.680 +そして、 また賭けをする。 + +04:21.980 --> 04:25.880 +ええと、 ランダムフォレストの方が良い結果になると思いますか、 それとも悪い結果になると思いますか? + +04:25.910 --> 04:28.010 +112は倒すべき数字だ。 + +04:28.010 --> 04:30.740 +従来の機械学習のパフォーマンスを見てみよう。 + +04:30.740 --> 04:31.790 +グリーンも見える。 + +04:31.790 --> 04:34.130 +赤もあれば緑もある。 + +04:34.370 --> 04:36.830 +走るのに少し時間がかかる。 + +04:36.860 --> 04:42.020 +グリーン、 グリーン、 グリーン、 赤、 たくさんの赤が見られる。 + +04:42.320 --> 04:45.230 +しかし、 一般的にはこうだ。 + +04:45.230 --> 04:46.820 +そうだ。 + +04:46.820 --> 04:50.060 +つまり、 ランダムフォレストの勝利だ。 + +04:50.090 --> 04:52.940 +誤差は97ドル。 + +04:52.940 --> 04:54.920 +100を切ってきた。 + +04:54.950 --> 04:56.780 +ハンドルは9本だ。 + +04:56.780 --> 04:58.610 +我々は100ドル以下でやってきた。 + +04:58.640 --> 04:59.930 +これまでのベストだ。 + +04:59.930 --> 05:02.370 +点の34%が緑。 + +05:02.550 --> 05:03.840 +これが我々のラインだ。 + +05:03.840 --> 05:05.130 +これが緑の点だ。 + +05:05.130 --> 05:09.450 +また、 平均以上の成績を残すことに少し問題があるが、 それほど悪くはない。 + +05:09.720 --> 05:11.310 +あの男がいたことで、 どれだけうまくいったかわかるだろう。 + +05:11.340 --> 05:14.220 +本当に高価なものは緑色だった。 + +05:14.340 --> 05:23.340 +そして......一般的には......かなり良い結果を残している。 + +05:23.340 --> 05:25.410 +おめでとう、 ランダムフォレスト。 + +05:25.500 --> 05:27.450 +そしてもちろん、 おめでとう。 + +05:27.450 --> 05:34.710 +これを克服したなら、 ランダムフォレストを使うこともできますが、 今思いついたベクトルだけでなく、 + +05:34.710 --> 05:38.160 +特徴量も加えることができます。 + +05:38.160 --> 05:49.110 +いくつかの機能を製造し、 それを押し込むことで、 この数字に打ち勝ち、 97よりも良い結果を出そうとすることができる。 + +05:49.230 --> 05:59.640 +LMSの試用に移る前に、 従来の機械学習をどう楽しむかを見てほしい。 + +05:59.640 --> 06:02.730 +その前に、 スライドで簡単にまとめよう。 diff --git a/week5/community-contributions/subtitles/srts/59472421/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472421/ko_KR.srt new file mode 100755 index 0000000..36225da --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472421/ko_KR.srt @@ -0,0 +1,283 @@ +WEBVTT + +00:00.110 --> 00:05.510 +주피터 연구소에서 마지막 시간을 보내게 됐습니다 전통적인 머신 러닝이죠 + +00:05.510 --> 00:07.100 +거의 다 끝났어요 + +00:07.130 --> 00:09.170 +개인적으로는 정말 재미있어요 + +00:09.260 --> 00:12.290 +너무 견디기 힘들진 않았길 바라요 + +00:12.320 --> 00:17.600 +그래도 멋진 경험이었어요 스스로 실험해 보셨길 바라요 기능을 + +00:17.600 --> 00:23.660 +추가하고 실험하면서 더 많은 걸 얻을 수 있는지 보세요 get it + +00:23.660 --> 00:29.150 +이게 마지막으로 본 도표인데 단어 2개가 평균 115 오류로 나왔어요 + +00:29.180 --> 00:35.390 +기억하실지 모르겠지만 원래 Wards NLP 모델에선 더 잘했죠 114 113까지 + +00:35.390 --> 00:39.320 +가능했던 것 같아요 6살쯤요 + +00:39.680 --> 00:46.010 +이제 마지막 모델 두 개를 공개할 거예요 + +00:46.040 --> 00:54.980 +벡터 머신의 벡터 회귀를 지원할 겁니다 전통적인 머신 러닝 + +00:55.010 --> 00:56.630 +기술이죠 + +00:56.630 --> 01:03.070 +데이터 포인트를 가지고 초고층 행성에 맞춰 데이터를 분리할 때 서포트 + +01:03.100 --> 01:08.560 +벡터라는 것을 이용합니다 초고층 행성에 가장 가까운 지점의 + +01:08.590 --> 01:09.730 +벡터죠 + +01:09.820 --> 01:14.530 +여러분에겐 말도 안 되는 얘기일 수도 있고 이미 아는 걸 제가 잘 설명하지 못하는 걸 수도 + +01:14.530 --> 01:15.100 +있죠 + +01:15.100 --> 01:16.840 +어느 쪽이든 상관없어요 + +01:16.840 --> 01:22.630 +라이브러리를 스키킷 배우기에서 그대로 가져올 거예요 사용하기 아주 쉽죠 + +01:22.750 --> 01:24.970 +선형 SVR을 사용해요 + +01:25.000 --> 01:31.000 +더 나은 결과를 내는 다른 종류의 알갱이들도 있지만 실행하는 데 오래 걸리죠 + +01:31.000 --> 01:36.700 +이건 너무 빨리 돌아가요 너무 빠르죠 그래서 제가 제대로 사용하지 않는다는 생각이 + +01:36.730 --> 01:37.630 +들어요 + +01:37.720 --> 01:44.380 +이미 실행해봤고 5초 정도 걸렸지만 다른 커널로 사용한 건 밤새 + +01:44.380 --> 01:47.890 +실행했는데도 아직 안 끝났어요 + +01:47.890 --> 01:53.920 +그 중간쯤인 것 같아요 한번 살펴봐 주세요 + +01:54.070 --> 01:58.480 +하지만 이게 제가 할 수 있는 최선이었어요 + +01:58.510 --> 02:02.610 +어떻게 작동하는지 보죠 + +02:02.640 --> 02:03.240 +준비됐어요? + +02:03.270 --> 02:04.080 +Put it! Put it! 베팅하세요 + +02:04.080 --> 02:05.700 +이제 실행할게요 + +02:05.970 --> 02:06.990 +안 그럴게요 + +02:07.290 --> 02:09.510 +저기 있네요 + +02:09.600 --> 02:10.470 +좋아요 + +02:10.890 --> 02:12.360 +네 + +02:12.360 --> 02:17.190 +노랑, 빨강, 초록이 많이 들어갔어요 + +02:17.190 --> 02:22.740 +확실히 으깨지진 않지만 심각해 보이지는 않네요 + +02:22.740 --> 02:25.860 +Get-Tuck Season 1 닙턱 시즌 1 차트에서는 어떨지 보죠 + +02:26.940 --> 02:33.210 +지금까지는 아주 좋아요 + +02:33.240 --> 02:35.220 +112살요 5분 + +02:35.250 --> 02:43.680 +이전 우승자보다 머리카락이 더 낫네요 그 사람은 단어 한 자루로 선형 회귀 모델이었어요 + +02:43.710 --> 02:49.980 +겉으로 보기에는 좋은 점이 많지만 평균보다 훨씬 높은 점수를 + +02:50.070 --> 02:52.740 +추정하기는 어렵네요 + +02:52.860 --> 02:58.950 +진전이 있긴 하지만 대단한 진전은 없어요 + +02:59.400 --> 03:03.590 +지원 벡터 회귀 모델이에요 + +03:03.770 --> 03:11.990 +이제 마지막 모델로 넘어가죠 임의 숲 임의 숲 퇴행 임의 숲이에요 + +03:12.020 --> 03:13.610 +특별한 기술이죠 + +03:13.610 --> 03:19.040 +작은 모델들을 조합하는 일종의 앙상블 기술이에요 + +03:19.250 --> 03:27.050 +그게 결합하는 모델은 각각 데이터 포인트와 기능의 무작위 샘플을 취합니다 + +03:27.050 --> 03:32.270 +우리 경우엔 벡터의 다른 덩어리를 의미하죠 + +03:32.390 --> 03:38.900 +그걸 바탕으로 훈련하고 그 모델들을 전부 합쳤어요 + +03:38.900 --> 03:45.350 +회귀의 경우 이 미니 모델들의 평균을 취합니다 그걸 임의 숲이라고 + +03:45.350 --> 03:47.000 +하죠 + +03:47.090 --> 03:49.610 +어떻게 되는지 보죠 + +03:49.610 --> 03:55.700 +데이터셋의 형태와 크기에 상관없이 잘 작동한다고 알려져 있죠 + +03:55.730 --> 03:59.540 +하이퍼 매개 변수가 많지 않다는 점에서 좋아요 + +03:59.570 --> 04:04.520 +하이퍼 매개변수는 추가적인 것을 수정하기 위한 추가적인 노브라고 부르죠 + +04:04.520 --> 04:06.800 +다양한 가치를 시도해 봐야 해요 + +04:07.100 --> 04:09.230 +아무 숲이나 있는 건 아니죠 + +04:09.230 --> 04:11.480 +있는 그대로 사용하고 어떻게 되는지 보는 거죠 + +04:11.480 --> 04:15.560 +있는 그대로 사용했으니 어떻게 되는지 보죠 + +04:15.590 --> 04:19.970 +테스터 닷 테스트 무작위 처리기로 통과했어요 + +04:19.970 --> 04:21.680 +다시 한 번 베팅을 걸어요 Put + +04:21.980 --> 04:25.880 +무작위 숲이 더 잘할까요? 아니면 더 못할까요? + +04:25.910 --> 04:28.010 +112가 넘어야 할 숫자죠 + +04:28.010 --> 04:30.740 +전통적인 머신 러닝은 어떤지 보죠 + +04:30.740 --> 04:31.790 +녹색도 좀 보이고요 + +04:31.790 --> 04:34.130 +붉은색도 있고 녹색도 있어요 + +04:34.370 --> 04:36.830 +뛰는 건 비트보다 좀 느려요 + +04:36.860 --> 04:42.020 +녹색과 붉은색이 아주 많이 보여요 + +04:42.320 --> 04:45.230 +하지만 일반적으로 그런 게 있어요 + +04:45.230 --> 04:46.820 +다 됐어요 + +04:46.820 --> 04:50.060 +무작위 숲이 이겼어요 + +04:50.090 --> 04:52.940 +오류는 97달러예요 + +04:52.940 --> 04:54.920 +10만 달러도 안 돼요 + +04:54.950 --> 04:56.780 +손잡이가 9개예요 + +04:56.780 --> 04:58.610 +100달러도 안 돼요 + +04:58.640 --> 04:59.930 +지금까지는 최고였죠 + +04:59.930 --> 05:02.370 +34%가 초록색이에요 + +05:02.550 --> 05:03.840 +우리 대사예요 + +05:03.840 --> 05:05.130 +여기 초록색 점들이 있고요 + +05:05.130 --> 05:09.450 +비트를 예상하는 데 문제가 좀 있었지만 그렇게 나쁘진 않았어요 + +05:09.720 --> 05:11.310 +저 사람 때문에 얼마나 잘 됐는지 보세요 + +05:11.340 --> 05:14.220 +정말 비싼 제품인데 녹색이네요 + +05:14.340 --> 05:23.340 +그리고 대체로 잘 진행되고 있어요 확실히 우승 후보라고 할 수 있죠 + +05:23.340 --> 05:25.410 +축하해요, 랜덤 포레스트 + +05:25.500 --> 05:27.450 +물론 축하할 일이죠 + +05:27.450 --> 05:34.710 +이걸 극복했다면 난수 포레스트 같은 걸 쓸 수 있어요 방금 나온 벡터만 넣는 + +05:34.710 --> 05:38.160 +게 아니라 기능을 추가할 수도 있죠 + +05:38.160 --> 05:45.540 +어떤 기능을 만들어 끼워 넣을 수도 있죠 그걸 이용해서 이 기록을 깰 수도 있고요 97보다 + +05:45.540 --> 05:49.110 +더 잘 만들 수도 있죠 get it + +05:49.230 --> 05:55.530 +전통적인 머신 러닝을 어떻게 즐기는지 보죠 LMS를 + +05:55.530 --> 05:59.640 +시험하기 전에 이게 끝이니까요 + +05:59.640 --> 06:02.730 +그 전에 슬라이드로 간단히 마무리하죠 diff --git a/week5/community-contributions/subtitles/srts/59472425/en_US.srt b/week5/community-contributions/subtitles/srts/59472425/en_US.srt new file mode 100755 index 0000000..ce9cb7f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472425/en_US.srt @@ -0,0 +1,295 @@ +WEBVTT + +00:00.590 --> 00:03.110 +Welcome to week six, day three. + +00:03.140 --> 00:09.950 +Today is going to be a day that you will either love or you will hate, depending on your particular + +00:09.950 --> 00:12.470 +preference, but I'm sure there's going to be no middle ground. + +00:12.500 --> 00:18.410 +It's either going to be a great day or a miserable day, depending on whether you like or are interested + +00:18.410 --> 00:24.380 +in traditional machine learning, because today we're going to take a step back in time and look at + +00:24.380 --> 00:31.160 +some foundational machine learning and get some practical examples of how things used to be. + +00:31.370 --> 00:36.740 +So with that intro, and it's only for one day, I'm sure you can put up with it, even if it is something + +00:36.740 --> 00:38.270 +that you find very unsavory. + +00:38.420 --> 00:45.500 +Well, as a quick reminder, what you can already do is work with frontier models, building AI assistants + +00:45.500 --> 00:51.020 +with tools, and with open source models like hugging face with pipelines and tokenizers and models. + +00:51.020 --> 00:55.520 +You can use long chain to build a complete Rag pipeline. + +00:55.520 --> 01:01.010 +And in fact, as you saw, it's actually quite, quite perfectly possible at least to do it without + +01:01.010 --> 01:01.910 +long chain as well. + +01:01.910 --> 01:06.330 +It's just quicker if you use line chain, but you know that there's not anything particularly magic + +01:06.360 --> 01:07.140 +about rag. + +01:07.170 --> 01:13.020 +And then we've talked about a five step strategy to solve commercial problems. + +01:13.020 --> 01:16.140 +And we've got really, really deep with data. + +01:16.140 --> 01:18.030 +I hope it wasn't too deep. + +01:18.120 --> 01:20.430 +Hopefully you've survived the experience. + +01:20.430 --> 01:21.750 +We did a lot of work with data. + +01:21.750 --> 01:23.460 +We saw lots of charts. + +01:23.490 --> 01:29.760 +You hopefully by now are very familiar with the item class and the item loader more than than perhaps + +01:29.760 --> 01:31.350 +you'd ever intended to be. + +01:31.560 --> 01:34.740 +But at this point we know our data back to front. + +01:35.010 --> 01:38.250 +So today we talk about baselines. + +01:38.280 --> 01:41.550 +I'm going to talk about what a baseline model is and why it's so important. + +01:41.550 --> 01:47.790 +And then we are going to at least I'm going to have an absolute blast playing with some baseline models + +01:47.790 --> 01:54.270 +and exploring more traditional machine learning to see how good we can do without using all this fancy + +01:54.270 --> 01:55.920 +schmancy LM stuff. + +01:56.040 --> 02:00.060 +Um, before, uh, tomorrow, we turn to the frontier. + +02:00.060 --> 02:04.730 +So without further ado, let's talk a bit about a baseline. + +02:04.730 --> 02:11.540 +So it's mostly common sense stuff that if you're looking to tackle a problem you start simple. + +02:12.200 --> 02:20.600 +But in particular it's it's something which is fundamentally important in the world of data science + +02:20.600 --> 02:22.520 +for really a couple of reasons. + +02:22.790 --> 02:29.000 +The obvious one is that it gives us a sort of yardstick, which we can use to measure progress against. + +02:29.000 --> 02:36.110 +If we start with something simple and traditional, then we know we're using, uh, sophisticated deep + +02:36.110 --> 02:37.190 +neural networks properly. + +02:37.190 --> 02:43.070 +When we see the needle moving and we see ourselves achieving far greater heights without that baseline, + +02:43.070 --> 02:48.530 +we just don't know whether we're getting fabulous results or whether we're just making small steps in + +02:48.530 --> 02:50.570 +a unpleasant direction. + +02:50.750 --> 02:52.970 +So obviously it gives us that yardstick. + +02:53.090 --> 02:57.920 +Uh, but there's another thing, too, which is that llms are not always the right solutions. + +02:57.920 --> 03:03.650 +In fact, in the specific business problem we're setting out to solve around, uh, around predicting + +03:03.650 --> 03:05.100 +prices of products. + +03:05.100 --> 03:11.220 +It's not immediately obvious at all that llms are the right solution because typically, as I said before, + +03:11.250 --> 03:20.370 +generating a price, a number from a description seems like it's more traditional NLP and linear regression, + +03:20.370 --> 03:23.970 +so it feels like it belongs in the field of machine learning. + +03:24.000 --> 03:25.560 +A traditional machine learning. + +03:25.590 --> 03:29.850 +Um, and that makes it even more important to build a baseline, because for all we know, we'll do + +03:29.850 --> 03:33.600 +that and then we'll try out frontier models and they won't do any better. + +03:33.600 --> 03:38.820 +So it's obvious stuff, but it explains why we do this. + +03:39.000 --> 03:43.140 +So what models are we going to be playing with today. + +03:43.170 --> 03:44.310 +And it's only one day. + +03:44.310 --> 03:46.860 +It's only one time that we're going to spend doing this. + +03:46.860 --> 03:50.550 +And you know, it's really worth it if you're already super familiar with these models. + +03:50.550 --> 03:55.710 +And it's just going to be an interesting quick experiment with our particular commercial problem if + +03:55.710 --> 04:00.990 +you're new to them, I'm not going to go into tons of detail on them, but it will give you a good sense + +04:00.990 --> 04:02.550 +of the perspective. + +04:02.940 --> 04:06.620 +Um, so the first thing we're going to do is we're going to take our business problem. + +04:06.620 --> 04:09.080 +We're going to do something that's very old school. + +04:09.080 --> 04:11.870 +We're going to do what they call feature engineering. + +04:11.870 --> 04:18.350 +When we understand the data and we say, okay, what do we think are going to be some of the important + +04:18.350 --> 04:21.530 +factors which are likely to affect the price? + +04:21.530 --> 04:25.220 +And we try and come up with these things that we will call features. + +04:25.220 --> 04:32.270 +And we'll come up with some pretty obvious features, like how how do they rank in Amazon's best seller + +04:32.270 --> 04:33.680 +rank, that kind of thing. + +04:33.980 --> 04:41.060 +And we will then try and see whether some linear combination of these features does a good job of predicting + +04:41.060 --> 04:42.500 +the price or not. + +04:42.500 --> 04:47.720 +And that is often the place where you start when you're dealing with a machine learning model. + +04:48.050 --> 04:54.770 +We're then going to do something called Bag of Words, which is one of the first our first forays into + +04:54.860 --> 04:56.180 +natural language processing. + +04:56.210 --> 05:02.240 +NLP Bag of Words is a particularly simplistic approach, where you quite literally count up the number + +05:02.240 --> 05:08.550 +of words and you build yourself a little vector that consists of just how many times does each particular + +05:08.550 --> 05:11.130 +word feature in this description? + +05:11.130 --> 05:15.930 +So if you have a word, you one doesn't include what are known as stop words, which are words like + +05:15.930 --> 05:19.620 +the which, which aren't going to make much difference to anything. + +05:19.830 --> 05:26.880 +But if there's a word like Intel, uh, which may indicate that it's a laptop or a computer that would + +05:26.880 --> 05:30.600 +have a certain value, Intel might be one of the words in our vocab. + +05:30.600 --> 05:35.580 +And depending on whether that appears or not or if it does, how many times it appears that will affect + +05:35.580 --> 05:37.200 +that location. + +05:37.290 --> 05:44.670 +Uh, in our in in this bag of words, this list of counts of words in each product. + +05:44.850 --> 05:50.250 +And then we're going to take that bag of words and again see if there's some linear combination of these + +05:50.250 --> 05:55.470 +different words that, when combined together predicts the price of a product. + +05:56.220 --> 06:01.980 +We then going to use something called word two vec, which I mentioned some time ago, which was one + +06:01.980 --> 06:10.310 +of the first real sort of, uh, neural network, um, Encoding algorithms that could produce a vector + +06:10.310 --> 06:13.100 +in a way that is rather smarter than a bag of words. + +06:13.100 --> 06:15.950 +And we'll first use that with linear regression. + +06:16.070 --> 06:21.860 +And then we're going to use that with random forests, which is a more sophisticated technique that + +06:21.860 --> 06:22.790 +I'll talk about then. + +06:22.820 --> 06:30.560 +But it involves taking random chunks of your data and your features in the form of bits of vectors, + +06:30.560 --> 06:37.880 +and seeing whether and then creating an ensemble, a series of models that combines averages across + +06:37.880 --> 06:40.190 +many of these little samples. + +06:40.190 --> 06:47.480 +And then we're going to have something called support vector regression, a type of support vector machines, + +06:47.480 --> 06:53.660 +which is another technique, a specific way of trying to separate out your data into different groups. + +06:53.810 --> 06:55.880 +So we will try these different techniques. + +06:55.880 --> 07:03.050 +We will see which one does best and see how that fares in solving our problem of predicting the price + +07:03.050 --> 07:06.710 +of a product based only on its description. + +07:07.010 --> 07:09.410 +With that, let's go to JupyterLab. diff --git a/week5/community-contributions/subtitles/srts/59472425/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472425/ja_JP.srt new file mode 100755 index 0000000..75c7e00 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472425/ja_JP.srt @@ -0,0 +1,226 @@ +WEBVTT + +00:00.590 --> 00:03.110 +第6週、 3日目へようこそ。 + +00:03.140 --> 00:09.950 +今日は、 あなたの好みに応じて、 好きになるか嫌いになるかのどちらかの日になるだろうが、 + +00:09.950 --> 00:12.470 +中間はないだろう。 + +00:12.500 --> 00:18.410 +あなたが伝統的な機械学習が好きか、 興味があるかによって、 素晴らしい一日になるか、 + +00:18.410 --> 00:24.380 +惨めな一日になるかが分かれるだろう。 今日は、 過去に一歩さかのぼって、 基礎的な機械学習を見て、 + +00:24.380 --> 00:31.160 +昔はどうだったかの実践的な例をいくつか紹介しよう。 + +00:31.370 --> 00:36.740 +だから、 このイントロで、 しかもたった1日だけなら、 たとえあなたがとても不愉快に思うことであっても、 + +00:36.740 --> 00:38.270 +我慢できるはずだ。 + +00:38.420 --> 00:51.020 +さて、 ざっと思い出すと、 すでにできることは、 フロンティアモデル、 ツールを使ったAIアシスタントの構築、 パイプラインやトークナイザーやモデルを使ったハグフェイスのようなオープンソースモデルを使った作業だ。 + +00:51.020 --> 00:55.520 +ロング・チェーンを使えば、 完全なラグ・パイプラインを構築することができる。 + +00:55.520 --> 01:01.910 +そして実際、 ご覧いただいたように、 ロングチェーンなしでも、 少なくともかなり、 完全に可能なのだ。 + +01:01.910 --> 01:07.140 +ラインチェーンを使えば手っ取り早いが、 ラグに特別なマジックがあるわけではないことはご存知の通りだ。 + +01:07.170 --> 01:13.020 +そして、 商業的な問題を解決するための5段階の戦略について話してきた。 + +01:13.020 --> 01:16.140 +そして、 私たちはデータについて本当に深く知っている。 + +01:16.140 --> 01:18.030 +深く考えすぎなければいいのだが + +01:18.120 --> 01:20.430 +この経験を乗り越えられたことを願っている。 + +01:20.430 --> 01:21.750 +私たちはデータを使って多くの仕事をした。 + +01:21.750 --> 01:23.460 +たくさんのチャートを見た。 + +01:23.490 --> 01:31.350 +アイテム・クラスとアイテム・ローダーについては、 おそらく意図していた以上に詳しくなっていることだろう。 + +01:31.560 --> 01:34.740 +しかし、 この時点で我々は自分たちのデータを隅から隅まで把握している。 + +01:35.010 --> 01:38.250 +今日はベースラインについて話そう。 + +01:38.280 --> 01:41.550 +ベースライン・モデルとは何か、 なぜそれが重要なのかについてお話しします。 + +01:41.550 --> 01:47.790 +そして、 ベースラインモデルと、 より伝統的な機械学習で遊んで、 + +01:47.790 --> 01:55.920 +派手なLMのようなものを使わなくても、 どれだけのことができるかを調べるんだ。 + +01:56.040 --> 02:00.060 +ええと、 明日、 フロンティアに行く前にね。 + +02:00.060 --> 02:04.730 +それでは早速、 ベースラインについて少し話をしよう。 + +02:04.730 --> 02:11.540 +だから、 問題に取り組むならシンプルなものから始めるというのが、 ほとんど常識的なことなんだ。 + +02:12.200 --> 02:22.520 +しかし、 特にデータサイエンスの世界では、 いくつかの理由から根本的に重要なことなのだ。 + +02:22.790 --> 02:29.000 +明らかなのは、 私たちが進歩を測るための一種の基準を与えてくれることだ。 + +02:29.000 --> 02:37.190 +シンプルで伝統的なものから始めれば、 高度なディープ・ニューラル・ネットワークを適切に使っていることがわかる。 + +02:37.190 --> 02:43.070 +針が動いているのを見たり、 ベースラインなしではるかに大きな高みを目指しているのを見ると、 素晴らしい結果を得ているのか、 + +02:43.070 --> 02:50.570 +それとも不愉快な方向に小さな一歩を踏み出しているだけなのか、 わからなくなってしまうのだ。 + +02:50.750 --> 02:52.970 +だから、 その目安になるのは明らかだ。 + +02:53.090 --> 02:57.920 +あ、 でも、 もうひとつあって、 llmsが必ずしも正しい解決策とは限らないんだ。 + +02:57.920 --> 03:05.100 +実際、 我々が解決しようとしている具体的なビジネス上の問題は、 商品の価格を予測することなんだ。 + +03:05.100 --> 03:11.220 +というのも、 先ほども言ったように、 説明文から価格や数字を生成するのは、 + +03:11.250 --> 03:23.970 +より伝統的なNLPや線形回帰であり、 機械学習の分野に属するように思えるからだ。 + +03:24.000 --> 03:25.560 +伝統的な機械学習。 + +03:25.590 --> 03:29.850 +そうなると、 ベースラインを構築することがより重要になる。 なぜなら、 そうしてフロンティアモデルを試してみても、 + +03:29.850 --> 03:33.600 +それ以上の結果は得られないからだ。 + +03:33.600 --> 03:38.820 +だから、 当たり前のことなんだけど、 なぜこんなことをするのかを説明してくれるんだ。 + +03:39.000 --> 03:43.140 +さて、 今日はどんなモデルで勝負しようか。 + +03:43.170 --> 03:44.310 +しかもたった1日だけだ。 + +03:44.310 --> 03:46.860 +こんなことに費やすのは一度きりだ。 + +03:46.860 --> 03:50.550 +そして、 あなたがすでにこれらのモデルに精通しているなら、 それは本当に価値がある。 + +03:50.550 --> 03:55.710 +もし、 あなたが初めて商業的な問題に触れるのであれば、 + +03:55.710 --> 04:02.550 +私たちの特定の商業的な問題についての興味深い簡単な実験になるだろう。 + +04:02.940 --> 04:06.620 +まず最初に、 ビジネス上の問題を取り上げます。 + +04:06.620 --> 04:09.080 +私たちは、 とても古風なことをするつもりだ。 + +04:09.080 --> 04:11.870 +フィーチャー・エンジニアリングと呼ばれることをするつもりだ。 + +04:11.870 --> 04:21.530 +データを理解し、 さて、 価格に影響を与えそうな重要な要因は何だろう? + +04:21.530 --> 04:25.220 +そして、 私たちは機能と呼ぶべきものを考え出そうとする。 + +04:25.220 --> 04:33.680 +そして、 アマゾンのベストセラーランキングの順位はどうなのか、 といったような、 ごく当たり前の特徴を考えていきます。 + +04:33.980 --> 04:42.500 +そして、 これらの特徴の線形結合が価格を予測するのに有効かどうかを試してみる。 + +04:42.500 --> 04:47.720 +そして、 機械学習モデルを扱う場合、 そこから始めることが多い。 + +04:48.050 --> 04:56.180 +これは自然言語処理の最初の試みのひとつだ。 + +04:56.210 --> 05:02.240 +NLP Bag of Wordsは特に単純化されたアプローチで、 文字通り単語の数を数え上げ、 + +05:02.240 --> 05:11.130 +この説明文の中で各特定の単語が何回登場するかという小さなベクトルを構築する。 + +05:11.130 --> 05:19.620 +つまり、 ある単語があったとして、 その単語にはストップワードと呼ばれるものは含まれない。 + +05:19.830 --> 05:30.600 +しかし、 もしインテルのような単語があるとすれば、 それはラップトップやコンピューターに一定の価値があることを示すかもしれない。 + +05:30.600 --> 05:35.580 +そして、 それが現れるかどうか、 あるいは現れたとしても何回現れるかによって、 + +05:35.580 --> 05:37.200 +その場所に影響が出る。 + +05:37.290 --> 05:44.670 +ええと、 この単語袋の中にある、 各商品に含まれる単語数のリストです。 + +05:44.850 --> 05:50.250 +そして、 その単語の袋を取り出し、 これらの異なる単語を組み合わせたときに、 + +05:50.250 --> 05:55.470 +商品の価格を予測する線形結合があるかどうかをもう一度調べます。 + +05:56.220 --> 06:01.980 +これは、 最初の本格的なニューラルネットワークの1つで、 + +06:01.980 --> 06:13.100 +単語のバッグよりも賢い方法でベクトルを生成できるエンコーディング・アルゴリズムだ。 + +06:13.100 --> 06:15.950 +まずは線形回帰を使ってみよう。 + +06:16.070 --> 06:22.790 +そして、 それをランダムフォレストと一緒に使うのですが、 これはもっと洗練された手法なので、 その時にお話しします。 + +06:22.820 --> 06:30.560 +しかしそれは、 データのランダムな塊と、 ベクトルのビットの形をした特徴を取り、 それがアンサンブル、 + +06:30.560 --> 06:40.190 +つまりこれらの小さなサンプルの多くの平均を組み合わせた一連のモデルを作成するかどうかを確認することを含む。 + +06:40.190 --> 06:53.660 +サポート・ベクトル回帰と呼ばれる、 サポート・ベクトル・マシンの一種である。 + +06:53.810 --> 06:55.880 +だから、 私たちはこうしたさまざまなテクニックを試してみる。 + +06:55.880 --> 07:06.710 +どれが最も優れているのか、 また、 説明文だけから商品の価格を予測するという我々の問題を解決する上でどうなのかを見てみよう。 + +07:07.010 --> 07:09.410 +それでは、 JupyterLabに行ってみよう。 diff --git a/week5/community-contributions/subtitles/srts/59472425/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472425/ko_KR.srt new file mode 100755 index 0000000..4187b1f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472425/ko_KR.srt @@ -0,0 +1,292 @@ +WEBVTT + +00:00.590 --> 00:03.110 +6주 차, 3일 차예요 + +00:03.140 --> 00:09.950 +오늘은 여러분의 선호도에 따라 호불호가 갈리는 날이 될 겁니다 하지만 + +00:09.950 --> 00:12.470 +중간은 없을 거예요 + +00:12.500 --> 00:18.410 +멋진 하루가 되거나 끔찍한 하루가 될 겁니다 전통적인 머신 러닝에 + +00:18.410 --> 00:24.380 +관심 있느냐에 따라서요 오늘은 과거로 한 걸음 돌아가 기본적인 머신 + +00:24.380 --> 00:31.160 +러닝을 살펴보고 과거 어떻게 작동했는지 실질적인 예시를 볼 테니까요 + +00:31.370 --> 00:36.740 +그런 소개를 하루만 하면 참을 수 있을 거예요 아주 불쾌한 소개라도요 Put + +00:36.740 --> 00:38.270 +it's go! + +00:38.420 --> 00:45.500 +다시 한번 말씀드리면 이미 여러분이 할 수 있는 건 개척 모델과 함께 작업하는 겁니다 도구를 이용한 인공지능 + +00:45.500 --> 00:51.020 +조수 만들기 파이프라인, 토큰라이저 모델로 얼굴 안기 같은 오픈 소스 모델로요 + +00:51.020 --> 00:55.520 +긴 체인을 이용해 래그 파이프라인을 만들 수 있죠 + +00:55.520 --> 01:01.010 +사실, 보셨다시피 긴 체인 없이도 충분히 할 수 + +01:01.010 --> 01:01.910 +있어요 + +01:01.910 --> 01:06.330 +라인 체인을 쓰면 더 빠르지만 헝겊은 특별한 마법이 + +01:06.360 --> 01:07.140 +없어요 + +01:07.170 --> 01:13.020 +그리고 상업적 문제를 해결하는 5단계 전략을 얘기했죠 + +01:13.020 --> 01:16.140 +데이터도 아주 심오하고요 + +01:16.140 --> 01:18.030 +상처가 깊지 않아야 할 텐데요 + +01:18.120 --> 01:20.430 +살아남았길 바라요 + +01:20.430 --> 01:21.750 +데이터로 많은 작업을 했어요 + +01:21.750 --> 01:23.460 +차트를 많이 봤어요 + +01:23.490 --> 01:29.760 +지금쯤이면 아이템 클래스와 아이템 로더에 익숙해졌을 겁니다 여러분이 의도했던 + +01:29.760 --> 01:31.350 +것보다요 + +01:31.560 --> 01:34.740 +하지만 이 시점에선 데이터를 처음부터 다 알아요 + +01:35.010 --> 01:38.250 +오늘은 기준점에 대해 얘기할 거예요 + +01:38.280 --> 01:41.550 +기준 모델이 무엇인지 왜 중요한지 말씀드리겠어요 + +01:41.550 --> 01:47.790 +그런 다음 적어도 저는 기본 모델을 가지고 신나게 놀 겁니다 전통적인 머신 + +01:47.790 --> 01:54.270 +러닝을 연구해 이런 화려한 LM 같은 걸 쓰지 않고도 얼마나 잘 만들어지는지 + +01:54.270 --> 01:55.920 +볼 거예요 + +01:56.040 --> 02:00.060 +내일이 오기 전에 개척지로 향할 거예요 + +02:00.060 --> 02:04.730 +비트코인 기준에 대해 얘기해 보죠 + +02:04.730 --> 02:11.540 +상식적인 문제지만 문제를 해결하려면 단순하게 시작해야죠 + +02:12.200 --> 02:20.600 +특히 데이터 과학의 세계에서 근본적으로 중요한 부분입니다 두 가지 + +02:20.600 --> 02:22.520 +이유가 있어요 + +02:22.790 --> 02:29.000 +확실한 건 일종의 잣대를 제공한다는 거예요 진보를 측정할 때 사용할 수 있죠 + +02:29.000 --> 02:36.110 +단순하고 전통적인 것부터 시작하면 정교한 심층 신경망을 제대로 사용한다는 걸 + +02:36.110 --> 02:37.190 +알 수 있죠 + +02:37.190 --> 02:43.070 +바늘이 움직이고 기준선 없이 훨씬 더 높은 곳에 도달하는 걸 볼 때 알 수 + +02:43.070 --> 02:48.530 +없어요 멋진 결과를 얻는 건지 아니면 좋지 않은 방향으로 작은 발걸음을 + +02:48.530 --> 02:50.570 +내딛는 건지요 + +02:50.750 --> 02:52.970 +이게 그 잣대를 제공하죠 + +02:53.090 --> 02:57.920 +하지만 다른 문제도 있어요 llm이 항상 옳은 해결책은 아니에요 + +02:57.920 --> 03:03.650 +사실 저희가 해결하고자 하는 특정 사업상의 문제는 제품 가격 예측 + +03:03.650 --> 03:05.100 +문제인데요 + +03:05.100 --> 03:11.220 +llms가 올바른 솔루션인지는 잘 알 수 없습니다 왜냐하면 일반적으로 + +03:11.250 --> 03:20.370 +앞서 말했듯이 설명에서 가격과 숫자를 생성하는 것은 더 전통적인 NLP와 선형 회귀처럼 보이기 때문입니다 + +03:20.370 --> 03:23.970 +머신 러닝 분야에 속한 것처럼 느껴지죠 + +03:24.000 --> 03:25.560 +전통적인 머신 러닝이죠 + +03:25.590 --> 03:29.850 +그래서 기준점을 만드는 게 더 중요해요 기준점을 만든 다음에 + +03:29.850 --> 03:33.600 +개척 시대 모델을 써 봐도 더 나을 게 없으니까요 + +03:33.600 --> 03:38.820 +뻔한 거지만 왜 이걸 하는지 설명해주죠 + +03:39.000 --> 03:43.140 +오늘은 어떤 모델을 가지고 놀까요? + +03:43.170 --> 03:44.310 +하루면 돼요 + +03:44.310 --> 03:46.860 +딱 한 번 하는 거잖아요 + +03:46.860 --> 03:50.550 +이런 모델에 이미 아주 익숙하다면 그럴 가치가 있어요 + +03:50.550 --> 03:55.710 +우리의 상업적 문제에 대한 흥미롭고 간단한 실험이 될 겁니다 처음 + +03:55.710 --> 04:00.990 +보시는 분들을 위해 자세하게 설명하진 않겠지만 어떤 관점인지 + +04:00.990 --> 04:02.550 +잘 아실 거예요 + +04:02.940 --> 04:06.620 +가장 먼저 할 일은 사업상의 문제를 해결하는 거예요 + +04:06.620 --> 04:09.080 +아주 옛날 방식으로 할 거예요 + +04:09.080 --> 04:11.870 +기능 엔지니어링이라고 부르는 걸 할 거예요 + +04:11.870 --> 04:18.350 +데이터를 이해하고 가격에 영향을 줄 수 있는 중요한 요인이 + +04:18.350 --> 04:21.530 +무엇인지 생각해 보는 거죠 + +04:21.530 --> 04:25.220 +그리고 기능이라 부르는 것들을 생각해내려고 노력하죠 + +04:25.220 --> 04:32.270 +그리고 몇 가지 특징을 생각해 낼 거예요 아마존 베스트셀러 순위에 어떻게 올랐는지 + +04:32.270 --> 04:33.680 +그런 거요 + +04:33.980 --> 04:41.060 +그리고 이 세 가지 요소의 직선적 조합이 가격을 예측하는 데 도움이 되는지 + +04:41.060 --> 04:42.500 +볼 거예요 + +04:42.500 --> 04:47.720 +머신 러닝 모델을 다룰 때는 종종 거기서부터 시작하죠 + +04:48.050 --> 04:54.770 +그리고 낱말 봉지라는 걸 할 거예요 자연 언어 처리를 처음 시도한 + +04:54.860 --> 04:56.180 +것 중 하나죠 + +04:56.210 --> 05:02.240 +NLP 단어 주머니는 특히 단순한 접근법입니다 말 그대로 단어 수를 + +05:02.240 --> 05:08.550 +세고 작은 벡터를 만드는 거죠 이 설명에서 각각의 단어가 몇 번 기능하는지 + +05:08.550 --> 05:11.130 +확인하는 거예요 + +05:11.130 --> 05:15.930 +단어가 있다면 정지 단어는 포함하지 않습니다. 이런 단어는 + +05:15.930 --> 05:19.620 +어떤 것과도 크게 다르지 않아요. + +05:19.830 --> 05:26.880 +하지만 인텔 같은 단어가 특정 값을 가진 노트북이나 컴퓨터를 뜻한다면 + +05:26.880 --> 05:30.600 +인텔이 우리 단어 중 하나가 될 거예요 + +05:30.600 --> 05:35.580 +그게 나타나느냐에 따라 혹은 나타난다면 그 장소에 얼마나 많이 나타나느냐에 + +05:35.580 --> 05:37.200 +따라 달라져요 + +05:37.290 --> 05:44.670 +각 제품에 들어간 단어 목록이 들어 있어요 + +05:44.850 --> 05:50.250 +그런 다음 단어 봉지를 가져다가 이 다양한 단어들의 선형 조합이 + +05:50.250 --> 05:55.470 +있는지 보겠습니다 그걸 다 합쳤을 때 제품 가격을 예측하죠 + +05:56.220 --> 06:01.980 +그리고 워드 2 벡이라는 걸 사용할 겁니다 아까도 언급했지만 + +06:01.980 --> 06:10.310 +최초의 실제 신경망 중 하나로 알고리즘을 인코딩하는 거죠 단어보다 더 영리한 방식으로 + +06:10.310 --> 06:13.100 +벡터를 생성할 수 있어요 + +06:13.100 --> 06:15.950 +그걸 먼저 선형 회귀와 함께 사용할 거예요 + +06:16.070 --> 06:22.790 +그다음 무작위 포레스트에 사용할 겁니다 좀 더 복잡한 기술이죠 그때 말씀드릴게요 + +06:22.820 --> 06:30.560 +하지만 벡터 비트 형태로 데이터와 기능을 무작위로 취하고 벡터 비트가 벡터인지 + +06:30.560 --> 06:37.880 +아닌지를 확인한 후 이런 샘플 여러 개를 합친 일련의 모델로 앙상블을 생성하는 + +06:37.880 --> 06:40.190 +걸 포함하죠 + +06:40.190 --> 06:47.480 +그리고 벡터 회귀 지원이라는 것이 있습니다 벡터 머신의 지원 유형으로 다른 + +06:47.480 --> 06:53.660 +기술입니다 데이터를 여러 그룹으로 분리하는 방법이죠 + +06:53.810 --> 06:55.880 +다양한 기술을 시도해 볼 거예요 + +06:55.880 --> 07:03.050 +어느 제품이 가장 잘 팔리고 제품 설명만 보고 가격을 예측하는 문제를 + +07:03.050 --> 07:06.710 +해결할 수 있을지 지켜보죠 + +07:07.010 --> 07:09.410 +그럼 주피터랩으로 가보죠 diff --git a/week5/community-contributions/subtitles/srts/59472429/en_US.srt b/week5/community-contributions/subtitles/srts/59472429/en_US.srt new file mode 100755 index 0000000..446237d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472429/en_US.srt @@ -0,0 +1,400 @@ +WEBVTT + +00:01.400 --> 00:08.090 +And continuing on our strategy to solve commercial problems with LMS, we get to step four, which is + +00:08.090 --> 00:13.040 +about optimizing our model to solve problems really, really well. + +00:13.310 --> 00:19.220 +Taking it beyond the pre-trained model that we might have, um, either or an existing frontier model + +00:19.220 --> 00:21.530 +and getting more juice out of it. + +00:21.770 --> 00:26.930 +So there are these three different approaches that two of them we've used, and one of them talked about + +00:27.110 --> 00:33.170 +there is prompting that we've used a ton now, which is things like multi-shot prompting, chaining + +00:33.170 --> 00:36.170 +when we have multiple prompts and using tools. + +00:36.170 --> 00:39.800 +These are all ways to get better outcomes. + +00:39.920 --> 00:44.360 +There is Rag, of course, that you're now super familiar with. + +00:44.360 --> 00:50.270 +I do hope you've built that extra project to be doing a knowledge work on on your own life. + +00:50.540 --> 00:59.240 +Uh, so there is rag and then the new thing, Fine tuning, which is about training the model to be + +00:59.240 --> 01:00.140 +even better. + +01:00.140 --> 01:03.260 +So these these are the three techniques. + +01:03.260 --> 01:07.160 +And there's certainly a lot of confusion out there. + +01:07.190 --> 01:08.870 +There's a lot of questions I get asked about. + +01:08.870 --> 01:12.140 +How do you decide which technique to use in which situation. + +01:12.140 --> 01:17.390 +And of course, it's possible to use all the techniques together, but one does typically, uh, focus + +01:17.390 --> 01:19.730 +on one, uh, at least initially. + +01:19.730 --> 01:25.520 +And it's worth pointing out that the first two techniques there are inference time techniques. + +01:25.520 --> 01:31.070 +These are about taking a trained model and at inference time, figuring out how to get more juice out + +01:31.070 --> 01:31.490 +of it. + +01:31.490 --> 01:35.930 +And the third one is a training time, uh, technique. + +01:35.930 --> 01:41.720 +It's about saying, all right, let's take a pre-trained model and figure out how to supply more data, + +01:41.720 --> 01:46.790 +to tweak the weights to make it even better at solving its problem. + +01:47.510 --> 01:55.700 +So to talk about the benefits of each of those techniques just very quickly in prompting, obviously + +01:55.700 --> 01:57.470 +it's super fast to do this. + +01:57.500 --> 02:05.960 +We've done this so quickly, so easily having different prompting strategies, uh, with maybe the exception + +02:05.960 --> 02:08.990 +of tools, was a little bit more involved, but still, you get the idea. + +02:08.990 --> 02:10.280 +You can just replicate that. + +02:10.280 --> 02:17.900 +You can quite easily get to a point where you are continually improving, uh, the prompt messages to + +02:17.930 --> 02:20.480 +an LLM and getting better and better results. + +02:20.600 --> 02:25.280 +And you typically see very quick direct improvement from it. + +02:25.280 --> 02:31.040 +You add in some multi-shot, uh, prompt, uh, background, some context into your prompts, and you + +02:31.040 --> 02:32.450 +immediately get the improvement. + +02:32.450 --> 02:34.100 +And it's a low cost too. + +02:34.130 --> 02:36.650 +So lots of benefits of using prompting. + +02:37.520 --> 02:47.240 +So rag, uh, has the benefit of bringing about this, this strong accuracy because you can pluck out, + +02:47.300 --> 02:54.650 +uh, this, uh, this very specific fact of information to arm the LLM with. + +02:54.680 --> 03:02.240 +It's, it's very scalable in that you can have huge quantities of data that can pour in, and your Rag + +03:02.240 --> 03:07.250 +pipeline can pluck out the relevant context, so you don't have to spend all the extra money pumping + +03:07.250 --> 03:09.560 +bigger and bigger prompts to your model. + +03:09.710 --> 03:14.630 +And that ties to the third point, which is that it's efficient because you can you can do that. + +03:15.140 --> 03:17.390 +Um, so fine tuning. + +03:17.390 --> 03:19.670 +So what are the benefits? + +03:19.700 --> 03:27.080 +So it allows you to build deep expertise, specialist skill sets into your model. + +03:27.080 --> 03:33.530 +You can build a model that is really great at doing something in a way that is very nuanced. + +03:33.530 --> 03:36.470 +So it's not just being given an extra fact. + +03:36.590 --> 03:38.600 +Um, about the CEO. + +03:38.630 --> 03:39.620 +What was our CEO's name? + +03:39.650 --> 03:40.730 +Avery Lancaster. + +03:40.730 --> 03:45.320 +It's not just being given a specific fact about Avery and what she used to do. + +03:45.500 --> 03:48.350 +Uh, it's something which over time is learning. + +03:48.380 --> 03:56.060 +I don't know the careers of CEOs, or it's learning about the insurer Elm Company and more about its + +03:56.060 --> 03:59.000 +culture and about its communications. + +03:59.000 --> 04:07.790 +So it gets this deeper insight, which allows it to show a kind of almost human like ability to reason + +04:07.790 --> 04:10.160 +about the data that it's being showed. + +04:10.160 --> 04:20.660 +So it's it's much more of a, um, it's, it's a much deeper way to change the abilities and capabilities + +04:20.660 --> 04:25.040 +of the model than the inference time techniques. + +04:25.280 --> 04:29.210 +It allows a model to learn a different style and tone. + +04:29.360 --> 04:34.190 +Of course, you can achieve some of that by just prompting, as we saw early on when we just added a + +04:34.190 --> 04:41.270 +system prompt and asked for for snarky comedic style, or when we had llms battling and we had a GPT + +04:41.300 --> 04:42.710 +four zero being the adversary. + +04:42.710 --> 04:45.410 +So you can do that with with system prompts. + +04:45.410 --> 04:52.250 +But if you want a very subtle tone, like you want a model that's going to emulate the the style of + +04:52.250 --> 04:57.740 +your customer service specialists who've been trained over many years, then they need it will need + +04:57.740 --> 04:58.970 +to see a lot of data. + +04:59.000 --> 05:01.280 +A lot of examples to learn from. + +05:01.700 --> 05:09.620 +Um, and then the fourth point is that whilst this is something which requires a big investment in training, + +05:09.620 --> 05:13.550 +once you've trained it, you can then run it at inference time. + +05:13.550 --> 05:16.250 +And you don't need to do things like in Rag. + +05:16.250 --> 05:21.050 +You have to then go and look up the context and provide that in the context that's no longer needed, + +05:21.050 --> 05:24.740 +because you've already baked that into the model's weights. + +05:24.770 --> 05:26.210 +So it's faster. + +05:27.020 --> 05:28.820 +So what about the cons? + +05:28.850 --> 05:33.110 +Well, many of these cons follow from the pros of the others, as you will see. + +05:33.110 --> 05:38.810 +Uh, in the case of prompting, one con is that it's limited by the total context window. + +05:38.810 --> 05:44.270 +Of course, you can only shove so much in the prompt, and even if you do use up, even if you've got + +05:44.270 --> 05:50.780 +mega context windows like Gemini one five flash, uh, the million tokens, if you remember that, um, + +05:50.780 --> 05:56.150 +you still find that if you pump lots and lots into that context, then you get somewhat diminishing + +05:56.220 --> 06:03.270 +Returns from how much it learns from that at inference times, and obviously. + +06:03.300 --> 06:06.990 +Inference itself becomes slower and more expensive. + +06:07.020 --> 06:10.080 +The more context you are pumping in. + +06:10.080 --> 06:16.380 +And if you're doing something like a prompt chaining when you're making multiple inference calls to + +06:16.410 --> 06:19.890 +solve a bigger problem, then of course that slows everything down. + +06:20.670 --> 06:26.370 +So rag some of the cons it's more of a lift to build it. + +06:26.580 --> 06:29.100 +Um, you need the vector database. + +06:29.100 --> 06:31.080 +You need to populate that vector database. + +06:31.380 --> 06:38.460 +Um, it needs the, uh, sort of the, it needs the knowledge base to be supplied and kept up to date, + +06:38.460 --> 06:44.370 +presumably if it's giving accurate data, if Avery Lancaster steps down as CEO, will need to make sure + +06:44.370 --> 06:47.310 +that the rag, uh, effect reflects that. + +06:47.550 --> 06:49.710 +Um, and it lacks nuance. + +06:49.800 --> 06:57.450 +Um, it doesn't have the same, um, ability to to learn the deeper meaning behind the data. + +06:57.450 --> 07:03.420 +It's just taking facts and the negatives of fine tuning. + +07:03.450 --> 07:04.710 +Of course it is. + +07:04.740 --> 07:05.820 +It's hard. + +07:06.150 --> 07:07.920 +It's harder to to build it. + +07:07.950 --> 07:11.640 +It's going to be we're going to have a lot of fun with it, but it's going to be that we're going to + +07:11.640 --> 07:12.180 +be sweating. + +07:12.180 --> 07:13.410 +It's going to be difficult. + +07:13.710 --> 07:16.320 +Um, you need a ton of data. + +07:16.350 --> 07:18.510 +You need a lot of examples. + +07:18.570 --> 07:24.360 +Uh, it's, uh, depends on on how specialized you want to be and your objectives. + +07:24.360 --> 07:29.400 +But generally speaking, we'll see that there's going to be a high data need, and there's going to + +07:29.400 --> 07:30.960 +be a training cost. + +07:30.960 --> 07:37.260 +There's one more con that's that's often talked about, which is known as catastrophic forgetting, + +07:37.260 --> 07:38.910 +which sounds very serious. + +07:38.910 --> 07:46.320 +Uh, catastrophic forgetting, if you hear that, is saying that, um, if you take a pre-trained model + +07:46.680 --> 07:54.480 +like llama 3.1 and you fine tune it with a large amount of data, it will get better and better at solving + +07:54.480 --> 07:57.480 +your particular problem, but over time it will. + +07:57.510 --> 08:05.220 +Over over training time it will start to forget some of the base information in the base model, and + +08:05.220 --> 08:10.680 +as a result, some of its quality might degrade if it's taken outside the specific kinds of questions + +08:10.680 --> 08:11.880 +you're training it for. + +08:12.090 --> 08:20.130 +Um, and so that's a that's a behavior that's been noticed and that has, has some, some concerning + +08:20.130 --> 08:21.120 +ramifications. + +08:21.120 --> 08:27.450 +So if you need to make sure that you don't lose any of the information in the base model, if that will + +08:27.450 --> 08:30.270 +affect your performance, then you need to be careful about this. + +08:31.410 --> 08:32.160 +All right. + +08:32.160 --> 08:39.570 +So just to wrap up these then let me finish by saying that, uh, the the times when you typically use + +08:39.570 --> 08:44.430 +them with prompting, it's often used as the starting point for a project. + +08:44.460 --> 08:49.830 +Often your first version of your model will be perhaps a frontier model, and you will use prompting + +08:49.830 --> 09:00.780 +as a way to, to add, uh, performance Rag is in the specific case where you want, you need the accuracy. + +09:00.930 --> 09:06.780 +You don't want to spend the extra money on training and you have an existing knowledge base of data. + +09:06.810 --> 09:07.020 +Then. + +09:07.050 --> 09:13.770 +Then you're in perfectly suited for a Rag kind of workflow, and fine tuning is you have a specialized + +09:13.800 --> 09:19.110 +task, you have a very high volume of data, and you need top performance. + +09:19.350 --> 09:22.980 +Um, and, and you want nuance as well. + +09:22.980 --> 09:28.650 +And that, of course, is a situation that we are in with our product price predictor. + +09:28.650 --> 09:30.060 +We have tons of data. + +09:30.060 --> 09:31.650 +We have a specialized task. + +09:31.650 --> 09:39.030 +We want top performance, and we do want a nuanced understanding of products so much that it can differentiate + +09:39.030 --> 09:43.410 +between a great variety in product prices. + +09:44.430 --> 09:51.030 +Okay, I will pause here for one moment, and we will come back to wrap up the strategy section before + +09:51.030 --> 09:55.170 +we then turn back to our data and get to curation. diff --git a/week5/community-contributions/subtitles/srts/59472429/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472429/ja_JP.srt new file mode 100755 index 0000000..aba923e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472429/ja_JP.srt @@ -0,0 +1,337 @@ +WEBVTT + +00:01.400 --> 00:08.090 +LMSで商業的な問題を解決するという私たちの戦略を継続し、 + +00:08.090 --> 00:13.040 +ステップ4に進む。 + +00:13.310 --> 00:19.220 +事前に訓練されたモデル、 あるいは既存のフロンティアモデルを超えて、 + +00:19.220 --> 00:21.530 +より多くの力を引き出す。 + +00:21.770 --> 00:26.930 +そのうちの2つは私たちが使ってきたもので、 そのうちの1つは、 + +00:27.110 --> 00:33.170 +マルチショット・プロンプト、 複数のプロンプトがある場合のチェイニング、 + +00:33.170 --> 00:36.170 +ツールの使用といったものです。 + +00:36.170 --> 00:39.800 +これらはすべて、 より良い結果を得るための方法だ。 + +00:39.920 --> 00:44.360 +もちろん、 今では超お馴染みのラグもある。 + +00:44.360 --> 00:50.270 +私は、 あなた自身の人生について知識を深めるための余分なプロジェクトができたことを望んでいる。 + +00:50.540 --> 01:00.140 +ボロ布があり、 そしてファインチューニングという新しいものがある。 + +01:00.140 --> 01:03.260 +これが3つのテクニックだ。 + +01:03.260 --> 01:07.160 +そして、 そこには確かに多くの混乱がある。 + +01:07.190 --> 01:08.870 +質問されることはたくさんある。 + +01:08.870 --> 01:12.140 +どの場面でどのテクニックを使うか、 どうやって決めるのか。 + +01:12.140 --> 01:17.390 +もちろん、 すべてのテクニックを併用することも可能だが、 普通は、 少なくとも最初は、 + +01:17.390 --> 01:19.730 +ひとつに集中するものだ。 + +01:19.730 --> 01:25.520 +そして、 最初の2つのテクニックは推論時間のテクニックであることを指摘する価値がある。 + +01:25.520 --> 01:31.490 +これらは、 訓練されたモデルを推論時に、 より多くの力を引き出す方法を見つけ出すというものだ。 + +01:31.490 --> 01:35.930 +そして3つ目は、 トレーニングの時間、 テクニックだ。 + +01:35.930 --> 01:41.720 +つまり、 事前に訓練されたモデルを使って、 より多くのデータを供給し、 重みを微調整して、 + +01:41.720 --> 01:46.790 +問題を解決する能力をさらに高める方法を考えようということだ。 + +01:47.510 --> 01:57.470 +そこで、 それぞれのテクニックの利点について、 プロンプトを出す際にとても手短にお話ししましょう。 + +01:57.500 --> 02:08.990 +私たちはこのように素早く、 簡単に、 さまざまなプロンプトを出すことができた。 + +02:08.990 --> 02:10.280 +それを再現すればいい。 + +02:10.280 --> 02:20.480 +LLMへのプロンプトメッセージを継続的に改善し、 より良い結果を得ることは簡単だ。 + +02:20.600 --> 02:25.280 +そして、 通常、 それによって非常に早く直接的な改善が見られる。 + +02:25.280 --> 02:32.450 +マルチショット、 プロンプト、 背景、 文脈をプロンプトに加えると、 すぐに改善される。 + +02:32.450 --> 02:34.100 +しかも低コストだ。 + +02:34.130 --> 02:36.650 +だから、 プロンプトを使うメリットはたくさんある。 + +02:37.520 --> 02:47.240 +つまり、 ボロ布は、 LLMを武装させるために、 非常に特定の情報を抜き出すことができるため、 + +02:47.300 --> 02:54.650 +このような強力な正確さをもたらすという利点があるのだ。 + +02:54.680 --> 03:09.560 +非常にスケーラブルで、 膨大な量のデータが入ってきても、 ラグ・パイプラインが関連するコンテキストを抜き出すことができる。 + +03:09.710 --> 03:14.630 +そして、 それは3つ目のポイント、 つまり、 それができるから効率的だということにつながる。 + +03:15.140 --> 03:17.390 +うーん、 微調整だね。 + +03:17.390 --> 03:19.670 +では、 どんなメリットがあるのか? + +03:19.700 --> 03:27.080 +そのため、 深い専門知識、 スペシャリストのスキルセットをモデルに組み込むことができる。 + +03:27.080 --> 03:33.530 +非常に微妙なニュアンスで、 何かをするのにとても優れたモデルを作ることができる。 + +03:33.530 --> 03:36.470 +だから、 ただ余分な事実が与えられているわけではない。 + +03:36.590 --> 03:38.600 +あの、 CEOのことなんですが。 + +03:38.630 --> 03:39.620 +CEOの名前は? + +03:39.650 --> 03:40.730 +エイブリー・ランカスター + +03:40.730 --> 03:45.320 +エイブリーについての具体的な事実や、 彼女が以前していたことを教えてもらうだけではない。 + +03:45.500 --> 03:48.350 +時間をかけて学んでいくものなんだ。 + +03:48.380 --> 03:59.000 +私はCEOのキャリアを知らないし、 エルム社という保険会社について、 またその文化やコミュニケーションについて学ぶこともない。 + +03:59.000 --> 04:10.160 +そのため、 より深い洞察力を得ることができ、 示されたデータについて推論する、 ほとんど人間のような能力を示すことができる。 + +04:10.160 --> 04:25.040 +つまり、 推論時間のテクニックよりも、 モデルの能力と性能を変える方法の方がはるかに深いということだ。 + +04:25.280 --> 04:29.210 +それによって、 モデルは異なるスタイルやトーンを学ぶことができる。 + +04:29.360 --> 04:34.190 +もちろん、 システム・プロンプトを追加して辛辣なコメディー・スタイルを求めたり、 + +04:34.190 --> 04:42.710 +llmsをバトルさせてGPTの4ゼロを敵に回した初期のように、 プロンプトを出すだけでもある程度は達成できる。 + +04:42.710 --> 04:45.410 +だから、 システム・プロンプトでそれができる。 + +04:45.410 --> 04:52.250 +しかし、 もしあなたが非常に微妙な口調を求めるのであれば、 例えば長年訓練を受けてきたカスタマーサービス・スペシャリストのスタイルを模倣したモデルを求めるのであれば、 + +04:52.250 --> 04:58.970 +彼らには多くのデータを見る必要がある。 + +04:59.000 --> 05:01.280 +学ぶべき事例がたくさんある。 + +05:01.700 --> 05:09.620 +そして4つ目のポイントは、 これはトレーニングに大きな投資を必要とするものですが、 一度トレーニングしてしまえば、 + +05:09.620 --> 05:13.550 +推論時に実行することができるということです。 + +05:13.550 --> 05:16.250 +ラグのようなことをする必要もない。 + +05:16.250 --> 05:21.050 +そうなると、 文脈を調べて、 もう必要のない文脈を提供しなければならない。 + +05:21.050 --> 05:24.740 +すでにモデルの重みに焼き込んでしまったからだ。 + +05:24.770 --> 05:26.210 +だから速くなった。 + +05:27.020 --> 05:28.820 +では、 短所はどうだろう? + +05:28.850 --> 05:33.110 +さて、 これらの欠点の多くは、 他の欠点の長所から派生したものである。 + +05:33.110 --> 05:38.810 +ええと、 プロンプトの場合、 1つの欠点は、 全体のコンテキストウィンドウによって制限されることです。 + +05:38.810 --> 05:44.270 +もちろん、 プロンプトに突っ込める量は限られるし、 たとえ使い切ったとしても、 + +05:44.270 --> 05:50.780 +ジェミニのようなメガコンテキストウィンドウがあったとしても、 100万個のトークンがあったとしても、 + +05:50.780 --> 05:56.150 +それを覚えているのであれば、 そのコンテキストに大量に突っ込めば、 推論時にそこから学習する量が減り、 + +05:56.220 --> 06:03.270 +明らかにリターンが逓減することがわかる。 + +06:03.300 --> 06:06.990 +推論そのものが遅くなり、 コストもかかる。 + +06:07.020 --> 06:10.080 +より多くの文脈を汲み取る。 + +06:10.080 --> 06:16.380 +また、 より大きな問題を解決するために複数の推論を呼び出すときに、 プロンプト・チェイニングのようなことをしていると、 + +06:16.410 --> 06:19.890 +当然、 すべてが遅くなる。 + +06:20.670 --> 06:26.370 +だから、 いくつかの欠点がある。 + +06:26.580 --> 06:29.100 +ベクターのデータベースが必要なんだ。 + +06:29.100 --> 06:31.080 +ベクター・データベースに入力する必要がある。 + +06:31.380 --> 06:38.460 +もしエイブリー・ランカスターがCEOを退任するのであれば、 + +06:38.460 --> 06:47.310 +おそらく正確なデータを提供し、 最新の状態に保つ必要がある。 + +06:47.550 --> 06:49.710 +ニュアンスに欠ける + +06:49.800 --> 06:57.450 +データの背後にある深い意味を知る能力がないんだ。 + +06:57.450 --> 07:03.420 +事実と微調整のマイナスを取り上げているだけだ。 + +07:03.450 --> 07:04.710 +もちろんそうだ。 + +07:04.740 --> 07:05.820 +難しいよ。 + +07:06.150 --> 07:07.920 +作る方が難しい。 + +07:07.950 --> 07:12.180 +とても楽しくなりそうだが、 汗だくになりそうだ。 + +07:12.180 --> 07:13.410 +難しいだろうね。 + +07:13.710 --> 07:16.320 +大量のデータが必要だ。 + +07:16.350 --> 07:18.510 +たくさんの例が必要だ。 + +07:18.570 --> 07:24.360 +ええと、 どの程度専門的になりたいかとか、 目的にもよるんだけどね。 + +07:24.360 --> 07:30.960 +しかし、 一般的に言って、 高いデータニーズがあり、 トレーニングコストがかかることは目に見えている。 + +07:30.960 --> 07:38.910 +もうひとつ、 よく語られる詐欺がある。 それは「壊滅的な忘却」と呼ばれるもので、 とても深刻に聞こえる。 + +07:38.910 --> 07:54.480 +壊滅的な忘却というのは、 つまり、 ラマ3のような事前に訓練されたモデルを使った場合のことです。 + +07:54.480 --> 07:54.480 +1、 + +07:54.480 --> 07:57.480 +そしてそれを大量のデータで微調整していけば、 あなたの特定の問題を解決するのにどんどん良くなっていくだろうが、 時間が経てばそうなるだろう。 + +07:57.510 --> 08:05.220 +トレーニング時間が長くなるにつれて、 ベースモデルのベース情報のいくつかを忘れるようになり、 その結果、 + +08:05.220 --> 08:11.880 +トレーニングしている特定の種類の質問以外を行った場合、 品質が低下する可能性があります。 + +08:12.090 --> 08:21.120 +だから、 これは注目されている行動であり、 いくつか懸念される影響もある。 + +08:21.120 --> 08:27.450 +そのため、 ベースモデルの情報を失わないようにする必要がある場合、 それがパフォーマンスに影響するのであれば、 + +08:27.450 --> 08:30.270 +この点には注意が必要だ。 + +08:31.410 --> 08:32.160 +分かった。 + +08:32.160 --> 08:44.430 +最後に、 プロンプトを使うときは、 プロジェクトの出発点として使われることが多いということをお伝えしておこう。 + +08:44.460 --> 08:49.830 +多くの場合、 モデルの最初のバージョンはおそらくフロンティアモデルで、 + +08:49.830 --> 09:00.780 +精度が必要な特定のケースで、 性能を追加する方法としてプロンプトを使用します。 + +09:00.930 --> 09:06.780 +トレーニングに余分なお金をかけたくないし、 既存の知識ベースのデータもある。 + +09:06.810 --> 09:07.020 +それからだ。 + +09:07.050 --> 09:13.770 +そして、 特殊なタスクがあり、 非常に大量のデータがあり、 最高のパフォーマンスが必要な場合は、 + +09:13.800 --> 09:19.110 +ファインチューニングを行う。 + +09:19.350 --> 09:22.980 +それに、 ニュアンスも必要でしょう? + +09:22.980 --> 09:28.650 +そしてそれはもちろん、 私たちが製品価格予測装置を使っている状況でもある。 + +09:28.650 --> 09:30.060 +私たちには大量のデータがある。 + +09:30.060 --> 09:31.650 +私たちには専門的な仕事がある。 + +09:31.650 --> 09:39.030 +私たちは最高のパフォーマンスを求めているし、 多種多様な商品の価格を区別できるほど、 + +09:39.030 --> 09:43.410 +商品に対するニュアンスの理解を求めている。 + +09:44.430 --> 09:55.170 +さて、 ここでちょっと小休止して、 また戦略のセクションに戻り、 データに戻ってキュレーションに入ります。 diff --git a/week5/community-contributions/subtitles/srts/59472429/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472429/ko_KR.srt new file mode 100755 index 0000000..d3d2fbe --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472429/ko_KR.srt @@ -0,0 +1,382 @@ +WEBVTT + +00:01.400 --> 00:08.090 +LMS로 상업적 문제를 해결하는 전략을 계속하겠습니다 4단계를 거치죠 모델 + +00:08.090 --> 00:13.040 +최적화하기입니다 문제를 아주 잘 해결하기 위해서요 + +00:13.310 --> 00:19.220 +기존의 선구 모델이나 훈련된 모델을 넘어서서 더 많은 + +00:19.220 --> 00:21.530 +걸 끌어내는 거죠 + +00:21.770 --> 00:26.930 +세 가지 다른 접근법이 있는데 두 가지는 이미 사용했고 한 가지는 많이 + +00:27.110 --> 00:33.170 +사용했다는 프롬프트입니다 멀티샷 프롬프트, 여러 프롬프트가 있을 때 연결하기, + +00:33.170 --> 00:36.170 +도구를 사용하는 것 등이죠 + +00:36.170 --> 00:39.800 +모두 더 나은 결과를 얻는 방법이죠. Get up! + +00:39.920 --> 00:44.360 +여러분이 아주 잘 아는 랙도 있어요 + +00:44.360 --> 00:50.270 +자신의 삶을 위한 지식을 쌓을 수 있는 프로젝트를 만들었길 바라요 + +00:50.540 --> 00:59.240 +래그와 새로운 게 있어요 파인 튜닝인데 모델이 더 나아지도록 훈련하는 + +00:59.240 --> 01:00.140 +거예요 + +01:00.140 --> 01:03.260 +이 세 가지 기술이 필요해요 + +01:03.260 --> 01:07.160 +확실히 혼란스러운 상황이에요 + +01:07.190 --> 01:08.870 +Get in get 질문이 많아요 + +01:08.870 --> 01:12.140 +어떤 기술을 어떤 상황에서 쓸지 어떻게 결정하나요? + +01:12.140 --> 01:17.390 +물론 모든 기술을 함께 사용할 수도 있지만 보통은 한 가지 기술에 집중하죠 + +01:17.390 --> 01:19.730 +적어도 처음에는요 + +01:19.730 --> 01:25.520 +첫 두 가지 기술은 시간을 추론하는 기술이에요 + +01:25.520 --> 01:31.490 +숙련된 모델을 취하고 추론 시간에 어떻게 즙을 더 얻을 수 있는지 알아내는 과정이죠. + +01:31.490 --> 01:35.930 +세 번째는 훈련 시간과 기술이에요 + +01:35.930 --> 01:41.720 +미리 훈련된 모델을 가지고 더 많은 데이터를 공급하고 + +01:41.720 --> 01:46.790 +무게를 조정해서 문제를 더 잘 해결하자는 거죠 + +01:47.510 --> 01:55.700 +각각의 테크닉의 장점에 대해 아주 빠르게 말씀드릴게요 물론 + +01:55.700 --> 01:57.470 +아주 빠르죠 + +01:57.500 --> 02:05.960 +아주 빠르고 쉽게 다양한 프롬프트 전략을 쓸 수 있죠 도구는 예외지만 비트만 좀 더 복잡해요 + +02:05.960 --> 02:08.990 +그래도 감은 잡으셨죠 + +02:08.990 --> 02:10.280 +그걸 복제하면 돼요 + +02:10.280 --> 02:17.900 +쉽게 개선되는 지점에 도달할 수 있어요 신속하게 LLM에 메시지를 보내면서 점점 더 나은 + +02:17.930 --> 02:20.480 +결과를 내는 거죠 Get it + +02:20.600 --> 02:25.280 +일반적으로 아주 빠른 직접적 향상을 볼 수 있죠 + +02:25.280 --> 02:31.040 +멀티샷 프롬프트 배경, 컨텍스트를 프롬프트 안에 추가하면 즉시 개선되죠 + +02:31.040 --> 02:32.450 +Get up + +02:32.450 --> 02:34.100 +비용도 저렴하고요 + +02:34.130 --> 02:36.650 +즉, 입력을 하면 장점이 많아요 + +02:37.520 --> 02:47.240 +랙은 매우 정확하게 정보를 수집할 수 있습니다 LLM을 무장할 + +02:47.300 --> 02:54.650 +수 있는 아주 구체적인 정보를 캐낼 수 있죠 + +02:54.680 --> 03:02.240 +매우 확장 가능합니다 엄청난 양의 데이터가 쏟아져 들어올 수 있고 래그 파이프라인이 관련 컨텍스트를 + +03:02.240 --> 03:07.250 +제거할 수 있으니까요 모델에 점점 더 큰 프롬프트를 만드는 데 추가 + +03:07.250 --> 03:09.560 +돈을 쓰지 않아도 되죠 + +03:09.710 --> 03:14.630 +세 번째 요점과 연결됩니다 효율적이라는 거죠 그렇게 할 수 있으니까요 + +03:15.140 --> 03:17.390 +미세한 조율이죠 + +03:17.390 --> 03:19.670 +어떤 혜택이 있죠? + +03:19.700 --> 03:27.080 +깊은 전문 지식과 전문 기술을 모델에 구축할 수 있게 해주죠 + +03:27.080 --> 03:33.530 +아주 미묘한 방식으로 뭔가를 하는 데 뛰어난 모델을 만들 수 있어요 + +03:33.530 --> 03:36.470 +추가적인 사실만 알려주는 게 아니에요 + +03:36.590 --> 03:38.600 +CEO 말인데요 + +03:38.630 --> 03:39.620 +대표 이름이 뭐였죠? + +03:39.650 --> 03:40.730 +에이버리 랭커스터요 + +03:40.730 --> 03:45.320 +에이버리가 과거에 뭘 했는지 구체적인 사실만 알려주는 게 아니에요 + +03:45.500 --> 03:48.350 +시간이 지나면서 배우는 거예요 + +03:48.380 --> 03:56.060 +CEO들의 경력은 몰라요 인큐러 엘름 컴퍼니와 그 문화와 통신에 + +03:56.060 --> 03:59.000 +대해 배우는 거죠 + +03:59.000 --> 04:07.790 +더 깊은 통찰력을 얻어서 보여주는 데이터를 추론하는 인간 같은 + +04:07.790 --> 04:10.160 +능력을 보여줘요 + +04:10.160 --> 04:20.660 +모델의 능력과 기능을 바꾸는 훨씬 심오한 방법이죠 시간 + +04:20.660 --> 04:25.040 +추론 기술보다요 + +04:25.280 --> 04:29.210 +모델이 다른 스타일과 톤을 익히게 해주죠 + +04:29.360 --> 04:34.190 +물론 일부는 그냥 요청만 하면 할 수 있습니다 앞서 시스템 프롬프트를 + +04:34.190 --> 04:42.710 +추가하고 비꼬는 코믹한 스타일을 요청했을 때나 llms 배틀이 있었을 때 GPT 40이 상대였다면요 + +04:42.710 --> 04:45.410 +시스템 프롬프트와 함께 할 수 있어요 + +04:45.410 --> 04:52.250 +하지만 고객 서비스 전문가들의 스타일을 모방하는 아주 + +04:52.250 --> 04:58.970 +섬세한 톤의 모델을 원한다면 많은 데이터를 봐야겠죠 + +04:59.000 --> 05:01.280 +배울 점이 많아요 + +05:01.700 --> 05:09.620 +네 번째 요점은 이 기능은 훈련에 많은 투자가 필요하지만 일단 훈련이 끝나면 + +05:09.620 --> 05:13.550 +추론 시간에 실행할 수 있다는 거예요 + +05:13.550 --> 05:16.250 +랙 같은 건 필요 없어요 + +05:16.250 --> 05:21.050 +그러면 컨텍스트를 찾아서 더 이상 필요하지 않은 컨텍스트에서 제공해야 합니다 + +05:21.050 --> 05:24.740 +왜냐하면 모델의 무게에 이미 굽어 놓았기 때문이죠 + +05:24.770 --> 05:26.210 +더 빠르게요 + +05:27.020 --> 05:28.820 +단점은 뭐죠? + +05:28.850 --> 05:33.110 +단점의 대부분은 장점을 따라가는 거죠 + +05:33.110 --> 05:38.810 +어, 프롬프트할 때 단점 중 하나는 전체 컨텍스트 창에 의해 제한된다는 거예요 + +05:38.810 --> 05:44.270 +물론 프롬프트에 밀어 넣는 것에는 한계가 있습니다. 다 사용한다고 + +05:44.270 --> 05:50.780 +해도, 제미니1-5 플래시처럼 메가 컨텍스트 윈도우를 가지고 있고 백만 + +05:50.780 --> 05:56.150 +토큰을 가지고 있다고 해도, 컨텍스트에 많은 양을 집어넣어도 + +05:56.220 --> 06:03.270 +결과물은 감소합니다. 추론 시간 동안 학습하는 양에 비해서요. + +06:03.300 --> 06:06.990 +추론 자체가 느려지고 비용이 더 들죠 + +06:07.020 --> 06:10.080 +더 많은 맥락을 끌어당기죠 + +06:10.080 --> 06:16.380 +프롬프트 연결 같은 걸 작업하는 경우 큰 문제를 해결하기 위해 다수의 + +06:16.410 --> 06:19.890 +추론 호출을 할 경우 모든 게 느려지죠 + +06:20.670 --> 06:26.370 +단점을 없애고 짓는 게 더 어렵겠어요 + +06:26.580 --> 06:29.100 +벡터 데이터베이스가 필요해요 + +06:29.100 --> 06:31.080 +벡터 데이터베이스를 채워야 해요 + +06:31.380 --> 06:38.460 +그래서 필요한 게 최신 정보를 제공하고 유지해야 합니다 만약 에이버리 랭커스터가 + +06:38.460 --> 06:44.370 +CEO 자리에서 물러난다면 누더기에도 정확한 데이터가 반영되어야 + +06:44.370 --> 06:47.310 +하겠죠 + +06:47.550 --> 06:49.710 +뉘앙스가 부족해요 + +06:49.800 --> 06:57.450 +데이터 뒤에 숨겨진 더 깊은 의미를 알아내는 능력이 없어요 + +06:57.450 --> 07:03.420 +섬세 조율의 사실과 단점을 취할 뿐이죠 + +07:03.450 --> 07:04.710 +당연히 그렇겠죠 + +07:04.740 --> 07:05.820 +어렵네요 + +07:06.150 --> 07:07.920 +만들기가 더 어렵죠 + +07:07.950 --> 07:12.180 +아주 재미있을 거예요 땀도 많이 나겠지만요 + +07:12.180 --> 07:13.410 +쉽지 않을 거예요 + +07:13.710 --> 07:16.320 +데이터가 엄청 많이 필요해요 + +07:16.350 --> 07:18.510 +예시가 많이 필요하죠 + +07:18.570 --> 07:24.360 +어느 정도 전문화하고 싶은지 목표에 따라 달라요 + +07:24.360 --> 07:29.400 +일반적으로 보면 데이터 수요가 높을 것이고 훈련 비용이 + +07:29.400 --> 07:30.960 +들 거예요 + +07:30.960 --> 07:37.260 +자주 언급되는 단점이 하나 더 있어요 재앙적 망각이라고 하는데 아주 + +07:37.260 --> 07:38.910 +심각하게 들려요 + +07:38.910 --> 07:46.320 +치명적인 망각이란 미리 훈련된 라마 3 같은 + +07:46.680 --> 07:54.480 +모델을 말하는 거예요 Get 1 많은 양의 데이터로 미세 조정하면 여러분의 문제 해결에 + +07:54.480 --> 07:57.480 +점점 더 나아집니다 시간이 지나면 그렇게 되죠 + +07:57.510 --> 08:05.220 +훈련 시간이 지나면서 기본 모델의 기본 정보를 잊기 시작할 겁니다 그 결과 품질의 일부가 + +08:05.220 --> 08:11.880 +떨어질 수 있어요 여러분이 훈련하는 특정 종류의 질문을 벗어나면요 + +08:12.090 --> 08:21.120 +그런 행동이 눈에 띄었고 우려스러운 결과도 있었어요 + +08:21.120 --> 08:27.450 +기본 모델에서 어떤 정보도 잃지 않도록 해야만 한다면 그게 여러분의 성능에 영향을 + +08:27.450 --> 08:30.270 +미친다면 이걸 조심해야 해요 + +08:31.410 --> 08:32.160 +좋아요 + +08:32.160 --> 08:39.570 +마무리로 이 말만 할게요 보통 프롬프트할 때 프롬프트를 사용하는 시간은 + +08:39.570 --> 08:44.430 +프로젝트의 시작점으로 자주 사용돼요 + +08:44.460 --> 08:49.830 +종종 첫 번째 모델은 프런티어 모델일 수 있습니다. 프롬프트 + +08:49.830 --> 09:00.780 +기능을 추가하는 방법으로 사용할 수도 있습니다. 퍼포먼스 래그는 정확성이 필요한 특정한 경우에 사용하죠. + +09:00.930 --> 09:06.780 +훈련에 추가 비용을 쓰고 싶지 않고 이미 존재하는 데이터 기반도 갖고 싶지 않죠 + +09:06.810 --> 09:07.020 +그럼 말이죠 + +09:07.050 --> 09:13.770 +그러면 래그 워크플로우에 완벽하게 적응하게 됩니다 미세 튜닝은 특별한 작업을 수행하는 + +09:13.800 --> 09:19.110 +것이고 데이터의 양이 매우 많기 때문에 최고의 성능이 필요하죠 + +09:19.350 --> 09:22.980 +뉘앙스도 있어야 하고요 + +09:22.980 --> 09:28.650 +물론 우리 제품 가격 예측기가 처한 상황이죠 + +09:28.650 --> 09:30.060 +데이터는 많아요 + +09:30.060 --> 09:31.650 +특별한 작업이 있어요 + +09:31.650 --> 09:39.030 +우리는 최고의 성능을 원하고 제품에 대한 미묘한 이해도 원합니다 그래서 훌륭한 + +09:39.030 --> 09:43.410 +제품 가격의 차이를 만들 수 있어야 하죠 + +09:44.430 --> 09:51.030 +여기서 잠시 멈추고 전략 섹션을 마무리하러 돌아오겠습니다 그 후에 + +09:51.030 --> 09:55.170 +데이터로 돌아가 큐레이션으로 가죠 diff --git a/week5/community-contributions/subtitles/srts/59472441/en_US.srt b/week5/community-contributions/subtitles/srts/59472441/en_US.srt new file mode 100755 index 0000000..2878192 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472441/en_US.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:00.740 --> 00:01.670 +Welcome back. + +00:01.670 --> 00:07.070 +So we've been doing the thoroughly distasteful, unsavory work of feature engineering. + +00:07.070 --> 00:09.110 +Very grotty, uh, work. + +00:09.110 --> 00:11.990 +But I still find it a bit fun, I have to confess. + +00:11.990 --> 00:17.360 +But it's quite, quite, uh, hacky and involves getting very deep into the data. + +00:17.360 --> 00:23.690 +We went through a bunch of stuff to figure out the weights of items in our data set and stuff in an + +00:23.690 --> 00:25.730 +average weight if we can't find the weights. + +00:25.940 --> 00:30.980 +Um, we're now going to look at the best sellers rank for each of our items. + +00:31.190 --> 00:36.110 +And so we're going to try and collect best sellers rank from its features. + +00:36.230 --> 00:44.570 +Uh, and then uh, what comes back is in fact itself a dictionary, because a product on Amazon can + +00:44.570 --> 00:48.860 +actually be ranked against multiple different bestsellers lists. + +00:49.010 --> 00:53.600 +Um, and so we're going to do something again, very rough and ready. + +00:53.600 --> 00:59.090 +And if it's features in multiple bestsellers lists, we're just going to take the average if it ranks + +00:59.090 --> 01:03.350 +first in one list and 10,000in another, we're just going to take the midpoint. + +01:03.380 --> 01:07.460 +We're which is we're just going to take the the one around the 5000 mark. + +01:07.760 --> 01:14.540 +Uh, so this this is, it's it's, uh, it's all a little bit of guesswork. + +01:14.630 --> 01:16.190 +Um, it's a bit of trial and error. + +01:16.190 --> 01:19.670 +And often this kind of traditional data science is a bit like this, actually. + +01:19.670 --> 01:23.270 +So as we'll discover, is the modern data science as well. + +01:23.270 --> 01:25.250 +There's plenty of trial and error. + +01:25.280 --> 01:29.750 +Typically, what you do with this, this kind of technique is that you try lots of features. + +01:29.750 --> 01:33.980 +You might try taking the average, or you might try taking the best and a few other things. + +01:33.980 --> 01:37.640 +And you shove all of these features in there and you see which one wins out. + +01:37.670 --> 01:39.920 +Now, in this case, we're just going to pick the average. + +01:39.920 --> 01:45.380 +But if you've got the stomach for it and you're enjoying this, this, as I say, slightly distasteful + +01:45.380 --> 01:48.560 +work of digging around in features, then try some more features. + +01:48.560 --> 01:55.370 +Try adding in the minimum rank, the maximum rank, uh, whatever you wish, um, to see what gives + +01:55.370 --> 01:56.720 +the most signal. + +01:57.110 --> 01:59.600 +So in our case, we picked the average rank. + +01:59.600 --> 02:00.740 +We just do it. + +02:00.740 --> 02:05.360 +And we're then going to to do the same trick we did with weights. + +02:05.360 --> 02:11.490 +We're going to find out what is the average of our average ranks, which turns out to be that slightly + +02:11.490 --> 02:14.520 +curious number of 380,000 or so. + +02:14.520 --> 02:21.240 +And then we're going to give ourselves a get rank with default function, which tries to get a rank. + +02:21.240 --> 02:26.220 +And if something doesn't have a rank, it gives you the average, the average rank from the training + +02:26.220 --> 02:27.180 +data set. + +02:27.390 --> 02:28.230 +Okay. + +02:28.230 --> 02:32.490 +And then one more feature I'm going to add into the mix that I didn't mention before. + +02:32.580 --> 02:39.540 +Uh, you may have guessed, I don't know is I'm going to say how long is the test prompt with all of + +02:39.540 --> 02:41.310 +the detail that it's got in there? + +02:41.340 --> 02:46.980 +I don't know if you remember, there was that scatter diagram that we did, uh, a couple of days ago, + +02:47.010 --> 02:53.490 +or maybe just one day ago, uh, with lots of red dots on it that was trying to see, is there any correlation + +02:53.490 --> 02:56.130 +between the price and the amount of text? + +02:56.130 --> 03:00.390 +And when we looked at that visually, it appeared that there was a slight correlation. + +03:00.420 --> 03:02.070 +I probably got that up. + +03:02.070 --> 03:06.350 +We can just take a quick peek at that to see, oh no, it's not there anymore. + +03:06.380 --> 03:07.700 +I've cleared it out. + +03:07.700 --> 03:10.370 +You'll have to look back yourself if you ran it. + +03:10.580 --> 03:15.590 +I hope you did go back and look at that red diagram again and you'll see what I mean. + +03:15.710 --> 03:17.690 +There is a slight correlation there. + +03:17.690 --> 03:19.820 +So let's add that in. + +03:19.850 --> 03:23.630 +Let's get get text length and we'll use that as well. + +03:24.320 --> 03:27.740 +And then the final one we're going to look at the brands. + +03:28.130 --> 03:31.940 +Let's first look at the most common 40 brands. + +03:31.940 --> 03:34.730 +So we're going to count them all up using the same approach as before. + +03:34.760 --> 03:39.500 +Brands most common 40. + +03:40.820 --> 03:43.280 +Let's look at the most common 40 brands. + +03:43.280 --> 03:44.510 +Here they are. + +03:45.080 --> 03:51.980 +And what you'll notice here is that there's a few, um, automobile car related brands, which I'm not + +03:51.980 --> 03:53.120 +very knowledgeable about. + +03:53.120 --> 03:54.500 +You may be more knowledgeable than me. + +03:54.500 --> 03:54.920 +You may. + +03:54.950 --> 03:56.180 +You may think I'm missing a trick. + +03:56.210 --> 04:01.370 +You may say, oh, there's a beautiful feature there that we could engineer of looking at top auto brands, + +04:01.370 --> 04:05.630 +in which case you should create that feature, add it in and see how you do. + +04:05.780 --> 04:09.020 +Uh, I sadly don't have that domain expertise. + +04:09.170 --> 04:15.920 +Um, and so what I've plucked out is a little category called top electronics brands, where I have + +04:15.920 --> 04:22.070 +shoved in things like HP, Dell, Lenovo, Samsung, Asus, Sony, canon, Apple, Intel which I've + +04:22.070 --> 04:25.700 +just plucked out of here into this category. + +04:25.700 --> 04:29.660 +And then that gives me a feature is Top Electronics brand. + +04:29.660 --> 04:32.630 +And this is one where again, I've done one feature. + +04:32.630 --> 04:34.640 +You could come up with a bunch of features. + +04:34.640 --> 04:36.800 +You could you could pick out different kinds of brands. + +04:36.800 --> 04:38.780 +You could pick out some auto brands. + +04:38.780 --> 04:41.390 +You can create as many features as you want. + +04:41.390 --> 04:46.820 +There's no harm in having more features, because the regression model is going to decide which of the + +04:46.820 --> 04:49.100 +features actually gives you some signal. + +04:49.280 --> 04:55.640 +And so a fun competition for you is to be generating features and see how well you can do with handcrafted + +04:55.640 --> 04:56.480 +features. + +04:56.600 --> 05:00.500 +I'll make one more important observation I mentioned a moment ago. + +05:00.500 --> 05:06.650 +I don't have the car expertise, which means I can't pluck out auto brands. + +05:06.650 --> 05:11.450 +And that leads to an interesting point, which is in this kind of traditional data science, it was + +05:11.450 --> 05:18.050 +important that data scientists had some strong knowledge of the domain they were working in. + +05:18.080 --> 05:22.580 +If you were working in products you needed to understand about different products. + +05:22.580 --> 05:26.870 +You needed to understand about different car manufacturers, because you needed to know which features + +05:26.870 --> 05:30.290 +to engineer to have the most likely chance of success. + +05:30.320 --> 05:39.260 +One of the curious and remarkable surprises of deep neural networks and modern machine learning and + +05:39.260 --> 05:46.070 +modern modern deep learning is that the model figures out for itself which features matter. + +05:46.070 --> 05:52.820 +And so there's no longer this requirement for data scientists like you and me to have deep domain expertise + +05:52.820 --> 05:57.260 +in the field that we were building models around, because we just have to have expertise in how to + +05:57.290 --> 06:02.420 +build llms and models and both of any kind of deep neural network. + +06:02.420 --> 06:10.670 +And they have billions of parameters, and they are able to use the the understanding power of all of + +06:10.700 --> 06:14.540 +their parameters to learn about the business area. + +06:14.540 --> 06:19.250 +But back in the day, in feature engineering, one had to understand it oneself and make things like + +06:19.280 --> 06:22.880 +top electronics, brands features which we have done. + +06:22.880 --> 06:26.120 +And all of this brings us to this function here. + +06:26.150 --> 06:27.710 +Get features. + +06:27.740 --> 06:35.120 +It takes an item and it creates this nice little dictionary here with a weight, a rank, a text length, + +06:35.120 --> 06:40.220 +and an is top electronics brand, which is either a one or a zero. + +06:40.490 --> 06:48.650 +Um, and that, that is uh, our features group for this first model. + +06:48.650 --> 06:50.660 +The first real model that we're building. + +06:50.660 --> 06:59.510 +Uh, and please, I urge you to, to spend some time turning this into your features of your dreams. + +06:59.510 --> 07:02.150 +Uh, see how well you could do by engineering features. + +07:02.150 --> 07:03.830 +And you can probably do quite well. + +07:03.950 --> 07:06.890 +Um, but I don't think you'll be much of a match for what's to come. + +07:06.890 --> 07:08.180 +But give it a try. + +07:08.390 --> 07:08.600 +Now. + +07:08.600 --> 07:10.190 +Give it your best shot. + +07:10.190 --> 07:14.630 +But after this, uh, this coming up in this next video, we will actually run. + +07:14.630 --> 07:19.760 +This run our traditional machine learning model and see how it fares. + +07:19.760 --> 07:20.990 +I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59472441/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472441/ja_JP.srt new file mode 100755 index 0000000..a296d55 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472441/ja_JP.srt @@ -0,0 +1,286 @@ +WEBVTT + +00:00.740 --> 00:01.670 +お帰りなさい。 + +00:01.670 --> 00:07.070 +だから私たちは、 フィーチャー・エンジニアリングという、 徹底的に不愉快で不愉快な仕事をしてきた。 + +00:07.070 --> 00:09.110 +とてもグロテスクな、 あー、 仕事だ。 + +00:09.110 --> 00:11.990 +でも、 正直言って、 今でもちょっと楽しいんだ。 + +00:11.990 --> 00:17.360 +しかし、 それはかなり、 かなり、 かなり、 かなり、 厄介で、 データに深く入り込む必要がある。 + +00:17.360 --> 00:25.730 +データセットに含まれるアイテムの重みを計算し、 重みがわからない場合は平均的な重みを入力する。 + +00:25.940 --> 00:30.980 +それでは、 各アイテムのベストセラーランキングを見ていきましょう。 + +00:31.190 --> 00:36.110 +そこで、 その特徴からベストセラーランキングを集めてみようと思う。 + +00:36.230 --> 00:48.860 +アマゾンの商品は複数のベストセラーリストにランクインすることができるからだ。 + +00:49.010 --> 00:53.600 +ええと、 それで、 また何かやろうと思っているんだ、 とてもラフで準備万端なんだけどね。 + +00:53.600 --> 00:59.090 +また、 複数のベストセラーリストに掲載されている場合、 あるリストで1位、 別のリストで10,000位にランクインしている場合は、 + +00:59.090 --> 01:03.350 +その平均を取るだけである。 + +01:03.380 --> 01:07.460 +私たちは、 5000ドル付近のものを取るつもりだ。 + +01:07.760 --> 01:14.540 +ええと、 だから、 これは......ええと、 すべてちょっとした推測なんだ。 + +01:14.630 --> 01:16.190 +試行錯誤の繰り返しだよ。 + +01:16.190 --> 01:19.670 +伝統的なデータサイエンスは、 実はこのようなものなのだ。 + +01:19.670 --> 01:23.270 +これからわかるように、 現代のデータサイエンスも同様である。 + +01:23.270 --> 01:25.250 +試行錯誤はいくらでもある。 + +01:25.280 --> 01:29.750 +通常、 このようなテクニックで行うのは、 たくさんの機能を試すことだ。 + +01:29.750 --> 01:33.980 +平均をとってみてもいいし、 ベストをとってみてもいい。 + +01:33.980 --> 01:37.640 +そして、 これらの機能をすべて押し込んで、 どれが勝つかを見るんだ。 + +01:37.670 --> 01:39.920 +さて、 今回は平均値を選ぶことにする。 + +01:39.920 --> 01:45.380 +しかし、 もしあなたにその気概があり、 この、 私が言うのもなんだが、 機能を掘り下げるという少し嫌な作業を楽しんでいるのであれば、 + +01:45.380 --> 01:48.560 +さらにいくつかの機能を試してみてほしい。 + +01:48.560 --> 01:56.720 +最低ランクでも最高ランクでも、 好きなものを入れてみて、 何が一番シグナルになるか試してみてください。 + +01:57.110 --> 01:59.600 +だから、 私たちの場合は平均順位を選んだ。 + +01:59.600 --> 02:00.740 +ただやるだけだ。 + +02:00.740 --> 02:05.360 +そして、 ウエイトでやったのと同じトリックをやるんだ。 + +02:05.360 --> 02:14.520 +平均ランクの平均を調べることになるが、 38万人というちょっと不思議な数字になった。 + +02:14.520 --> 02:21.240 +そして、 ランクを取得しようとするデフォルト関数でランクを取得する。 + +02:21.240 --> 02:27.180 +もしランクがない場合は、 トレーニング・データ・セットの平均ランクが表示される。 + +02:27.390 --> 02:28.230 +オーケー。 + +02:28.230 --> 02:32.490 +そしてもうひとつ、 これまで触れなかった機能をミックスに加えようと思う。 + +02:32.580 --> 02:41.310 +ええと、 お察しかもしれませんが、 テストプロンプトの長さはどのくらいですか? + +02:41.340 --> 02:46.980 +覚えていらっしゃるかどうか分かりませんが、 2、 + +02:47.010 --> 02:56.130 +3日前、 あるいはほんの1日前、 赤い点がたくさん並んだ散布図がありました。 + +02:56.130 --> 03:00.390 +そして、 それを視覚的に見ると、 わずかながら相関関係があるように見えた。 + +03:00.420 --> 03:02.070 +たぶん、 私が立てたんだと思う。 + +03:02.070 --> 03:06.350 +それをちょっと覗いて、 ああ、 もうないんだ、 と確認すればいい。 + +03:06.380 --> 03:07.700 +もう片付けたよ。 + +03:07.700 --> 03:10.370 +走ったのなら、 自分で振り返ってみなければならない。 + +03:10.580 --> 03:15.590 +もう一度、 あの赤い図を見てもらえれば、 私の言っている意味がわかると思う。 + +03:15.710 --> 03:17.690 +そこには若干の相関関係がある。 + +03:17.690 --> 03:19.820 +だから、 それを加えよう。 + +03:19.850 --> 03:23.630 +テキストの長さを取得しよう。 + +03:24.320 --> 03:27.740 +そして最後はブランドについて。 + +03:28.130 --> 03:31.940 +まず、 最も一般的な40銘柄を見てみよう。 + +03:31.940 --> 03:34.730 +だから、 前と同じやり方で、 すべてを数え上げる。 + +03:34.760 --> 03:39.500 +最も一般的な40のブランド。 + +03:40.820 --> 03:43.280 +最も一般的な40ブランドを見てみよう。 + +03:43.280 --> 03:44.510 +それがこれだ。 + +03:45.080 --> 03:53.120 +そして、 ここでお気づきになるのは、 私はあまり知識がないのですが、 自動車関連のブランドがいくつかあることです。 + +03:53.120 --> 03:54.500 +あなたの方が詳しいかもしれない。 + +03:54.500 --> 03:54.920 +そうかもしれない。 + +03:54.950 --> 03:56.180 +あなたは私がトリックを見逃していると思うかもしれない。 + +03:56.210 --> 04:01.370 +その場合は、 その機能を作って追加し、 + +04:01.370 --> 04:05.630 +どうなるか見てみるべきだ。 + +04:05.780 --> 04:09.020 +残念ながら、 私にはその分野の専門知識はないんだ。 + +04:09.170 --> 04:15.920 +トップ・エレクトロニクス・ブランドと呼ばれる小さなカテゴリーに、 HP、 デル、 レノボ、 + +04:15.920 --> 04:25.700 +サムスン、 アスース、 ソニー、 キャノン、 アップル、 インテルなど、 ここから抜き出したものをこのカテゴリーに押し込んだ。 + +04:25.700 --> 04:29.660 +そして、 トップ・エレクトロニクス・ブランドという特徴を教えてくれた。 + +04:29.660 --> 04:32.630 +そして、 これはまた私が1つの特集を組んだものだ。 + +04:32.630 --> 04:34.640 +たくさんの機能を思いつくことができるだろう。 + +04:34.640 --> 04:36.800 +いろいろなブランドを選ぶことができる。 + +04:36.800 --> 04:38.780 +いくつかの自動車ブランドを選ぶことができる。 + +04:38.780 --> 04:41.390 +好きなだけ機能を作ることができる。 + +04:41.390 --> 04:49.100 +回帰モデルは、 どの特徴が実際にシグナルを与えるかを決定するからだ。 + +04:49.280 --> 04:56.480 +だから、 あなたにとって楽しい競争は、 機能を生み出し、 手作りの機能でどれだけうまくやれるかを見ることだ。 + +04:56.600 --> 05:00.500 +もう1つ、 先ほど述べた重要な観察をしておこう。 + +05:00.500 --> 05:06.650 +私は車の専門知識がないので、 自動車ブランドを抜き出すことはできない。 + +05:06.650 --> 05:11.450 +つまり、 この種の伝統的なデータサイエンスでは、 データサイエンティストは、 + +05:11.450 --> 05:18.050 +自分たちが研究している領域について強い知識を持っていることが重要だったのだ。 + +05:18.080 --> 05:22.580 +製品に携わるのであれば、 さまざまな製品について理解する必要がある。 + +05:22.580 --> 05:30.290 +さまざまな自動車メーカーについて理解する必要があったし、 成功する可能性が最も高いのはどの機能をエンジニアリングすればよいかを知る必要があったからだ。 + +05:30.320 --> 05:39.260 +ディープ・ニューラル・ネットワークや最新の機械学習、 最新のディープ・ラーニングの不思議で驚くべき点は、 + +05:39.260 --> 05:46.070 +どの特徴が重要かをモデルが自ら見つけ出すことだ。 + +05:46.070 --> 05:52.820 +そのため、 あなたや私のようなデータサイエンティストには、 + +05:52.820 --> 06:02.420 +モデルを構築していた分野の深い専門知識が必要ではなくなりました。 + +06:02.420 --> 06:14.540 +そして、 彼らは何十億ものパラメーターを持っており、 そのすべてのパラメーターの理解力を使ってビジネスエリアについて学ぶことができる。 + +06:14.540 --> 06:22.880 +しかし、 その昔、 フィーチャー・エンジニアリングでは、 自分自身でそれを理解し、 トップエレクトロニクスやブランド機能のようなものを作らなければならなかった。 + +06:22.880 --> 06:26.120 +そして、 そのすべてがこの機能へとつながっている。 + +06:26.150 --> 06:27.710 +特徴をつかめ。 + +06:27.740 --> 06:40.220 +この辞書には、 重さ、 ランク、 テキストの長さ、 そしてトップ・エレクトロニクス・ブランド(1か0)が入っている。 + +06:40.490 --> 06:48.650 +これが、 この最初のモデルの特徴だ。 + +06:48.650 --> 06:50.660 +私たちが作っている最初の本格的なモデルだ。 + +06:50.660 --> 06:59.510 +そして、 どうか、 これを夢の特集にするために時間を費やしてほしい。 + +06:59.510 --> 07:02.150 +ええと、 エンジニアリングの機能でどれだけうまくやれるか見てみよう。 + +07:02.150 --> 07:03.830 +そして、 おそらくかなりうまくやれるだろう。 + +07:03.950 --> 07:06.890 +うーん、 でも、 これから起こることにあなたが太刀打ちできるとは思えない。 + +07:06.890 --> 07:08.180 +しかし、 試してみてほしい。 + +07:08.390 --> 07:08.600 +今すぐだ。 + +07:08.600 --> 07:10.190 +ベストを尽くせ。 + +07:10.190 --> 07:14.630 +でもこの後、 次のビデオでは実際に走るんだ。 + +07:14.630 --> 07:19.760 +従来の機械学習モデルを実行し、 その結果を見る。 + +07:19.760 --> 07:20.990 +それではまた。 diff --git a/week5/community-contributions/subtitles/srts/59472441/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472441/ko_KR.srt new file mode 100755 index 0000000..40cdbf5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472441/ko_KR.srt @@ -0,0 +1,331 @@ +WEBVTT + +00:00.740 --> 00:01.670 +잘 돌아왔어요 + +00:01.670 --> 00:07.070 +그래서 저희는 완전히 불쾌하고 불쾌한 기능들을 작업해왔어요 + +00:07.070 --> 00:09.110 +아주 지저분한 일이죠 + +00:09.110 --> 00:11.990 +그래도 비트는 좀 재미있어요 + +00:11.990 --> 00:17.360 +하지만 좀 허접하고 데이터에 깊이 들어가야 해요 + +00:17.360 --> 00:23.690 +데이터에 있는 물건의 무게를 알아내려고 여러 가지를 살펴봤어요 무게를 못 찾으면 + +00:23.690 --> 00:25.730 +평균 무게를 계산했고요 + +00:25.940 --> 00:30.980 +이제 각 제품의 베스트셀러 순위를 살펴볼게요 + +00:31.190 --> 00:36.110 +그래서 특징을 기준으로 베스트셀러 순위를 모을 거예요 + +00:36.230 --> 00:44.570 +그리고 돌아오는 건 사실 사전이에요 아마존 제품 하나가 여러 베스트셀러 + +00:44.570 --> 00:48.860 +목록과 순위를 매길 수 있거든요 + +00:49.010 --> 00:53.600 +그래서 다시 한번 거칠고 준비된 걸 해볼게요 + +00:53.600 --> 00:59.090 +여러 베스트셀러 목록에 속성들이 있다면 평균을 선택해서 한 목록에서 1위 다른 + +00:59.090 --> 01:03.350 +목록에서 10,000위인 경우 중간 지점을 선택할게요 + +01:03.380 --> 01:07.460 +5천 마크에 해당하는 걸 가져갈 거예요 + +01:07.760 --> 01:14.540 +비트 박스는 추측일 뿐이에요 + +01:14.630 --> 01:16.190 +비트 박스는 시행착오를 거치죠 + +01:16.190 --> 01:19.670 +전통적인 데이터 과학은 사실 비트 박스와 비슷해요 + +01:19.670 --> 01:23.270 +현대 데이터 과학도 마찬가지죠 + +01:23.270 --> 01:25.250 +시행착오가 많아요 + +01:25.280 --> 01:29.750 +일반적으로 이런 기술로 하는 건 많은 기능을 시도해보는 거죠 + +01:29.750 --> 01:33.980 +평균을 택하거나 최고를 택하거나 다른 걸 택할 수도 있죠 + +01:33.980 --> 01:37.640 +이 모든 기능을 밀어 넣으면 어떤 게 이기는지 알 수 있죠 + +01:37.670 --> 01:39.920 +이 경우엔 평균을 선택할게요 + +01:39.920 --> 01:45.380 +하지만 배짱이 있고 이 과정이 즐겁다면... 이렇게 여기저기 파헤치는 + +01:45.380 --> 01:48.560 +게 좀 별로라면 다른 것도 해 보세요 + +01:48.560 --> 01:55.370 +최저 계급이든 최대 계급이든 원하는 걸 추가해 보세요 가장 신호가 강한 게 + +01:55.370 --> 01:56.720 +뭔지 보게요 + +01:57.110 --> 01:59.600 +그래서 저희는 평균 등급을 뽑았어요 + +01:59.600 --> 02:00.740 +그냥 하는 거예요 + +02:00.740 --> 02:05.360 +추로 했던 것과 같은 기술을 할 거예요 + +02:05.360 --> 02:11.490 +평균 등급을 계산해 볼게요 380,000명이라는 다소 + +02:11.490 --> 02:14.520 +신기한 숫자가 나왔네요 + +02:14.520 --> 02:21.240 +그런 다음 기본 함수로 get-rance를 줍니다 기본 함수는 랭크를 얻으려 하죠 + +02:21.240 --> 02:26.220 +순위가 없는 것들은 평균을 줍니다. 훈련 데이터에서 평균 순위를 + +02:26.220 --> 02:27.180 +주죠. + +02:27.390 --> 02:28.230 +네 + +02:28.230 --> 02:32.490 +전에 말씀드리지 않은 기능이 하나 더 있어요 + +02:32.580 --> 02:39.540 +추측하셨을 수도 있는데 모든 세부 사항이 담긴 테스트 프롬프트가 얼마나 긴지 + +02:39.540 --> 02:41.310 +여쭤보려고요 + +02:41.340 --> 02:46.980 +기억하실지 모르겠지만 며칠 전에 했던 분산도표가 있는데 빨간 + +02:47.010 --> 02:53.490 +점이 많이 있는 그 도표는 가격과 텍스트의 양 사이에 상관관계가 있는지 + +02:53.490 --> 02:56.130 +알아보려고 했었죠 + +02:56.130 --> 03:00.390 +시각적으로 봤을 때 약간의 상관관계가 있는 것 같았어요 + +03:00.420 --> 03:02.070 +제가 올렸을 거예요 + +03:02.070 --> 03:06.350 +잠깐 살펴보면 이런, 이제 없네요 + +03:06.380 --> 03:07.700 +다 치웠어요 + +03:07.700 --> 03:10.370 +당신이 운영하면 당신이 돌아봐야 할 거예요 + +03:10.580 --> 03:15.590 +다시 돌아가서 빨간 도표를 보면 무슨 뜻인지 알 거예요 + +03:15.710 --> 03:17.690 +약간 상관관계가 있어요 + +03:17.690 --> 03:19.820 +이걸 추가하죠 + +03:19.850 --> 03:23.630 +텍스트 길이 get get 그것도 사용하죠 + +03:24.320 --> 03:27.740 +마지막은 브랜드를 볼 거예요 + +03:28.130 --> 03:31.940 +가장 흔한 40가지 브랜드부터 살펴보죠 + +03:31.940 --> 03:34.730 +아까와 같은 접근 방법으로 모두 세어볼게요 + +03:34.760 --> 03:39.500 +가장 흔한 브랜드 40개요 + +03:40.820 --> 03:43.280 +가장 흔한 40가지 브랜드를 보죠 + +03:43.280 --> 03:44.510 +여기 있네요 + +03:45.080 --> 03:53.120 +여기 보시면 자동차 관련 브랜드가 몇 개 있는데 제가 잘 모르는 것들이에요 + +03:53.120 --> 03:54.500 +저보다 더 잘 아실지도 몰라요 + +03:54.500 --> 03:54.920 +그러세요 + +03:54.950 --> 03:56.180 +제가 뭘 놓쳤다고 생각하시겠죠 + +03:56.210 --> 04:01.370 +최고의 자동차 브랜드에서 볼 수 있는 멋진 기능이 있다고 할 수도 있죠 그런 + +04:01.370 --> 04:05.630 +경우 해당 기능을 만들어 추가하고 어떻게 하는지 보세요 + +04:05.780 --> 04:09.020 +안타깝게도 전 그런 분야는 잘 몰라요 + +04:09.170 --> 04:15.920 +그래서 제가 고른 건 일류 전자 제품 브랜드예요 HP, + +04:15.920 --> 04:22.070 +델, 레노버 삼성, 소니, 캐논, 애플 같은 제품을 + +04:22.070 --> 04:25.700 +이 카테고리에 넣은 거죠 + +04:25.700 --> 04:29.660 +그럼 톱 일렉트로닉스 브랜드의 기능이 생기죠 + +04:29.660 --> 04:32.630 +이건 제가 한 가지 기능을 끝낸 거예요 + +04:32.630 --> 04:34.640 +여러 가지 기능을 생각해낼 수 있어요 + +04:34.640 --> 04:36.800 +여러 가지 브랜드를 고를 수 있어요 + +04:36.800 --> 04:38.780 +자동차 브랜드를 골라요 + +04:38.780 --> 04:41.390 +원하는 만큼 많은 기능을 만들 수 있어요 + +04:41.390 --> 04:46.820 +기능이 더 있다고 나쁠 건 없어요 회귀 모델이 어떤 기능이 신호를 + +04:46.820 --> 04:49.100 +주는지 결정하거든요 + +04:49.280 --> 04:55.640 +재미있는 대회는 기능을 생성하고 수공예 기능을 얼마나 잘 만드는지 보는 + +04:55.640 --> 04:56.480 +거예요 + +04:56.600 --> 05:00.500 +아까 중요한 걸 하나 더 말씀드릴게요 + +05:00.500 --> 05:06.650 +자동차 전문가가 아니니 자동차 브랜드를 고를 수 없죠 + +05:06.650 --> 05:11.450 +여기서 흥미로운 점이 등장하는데 전통적인 데이터 과학에서는 + +05:11.450 --> 05:18.050 +데이터 과학자가 자신이 연구하는 분야에 대해 어느 정도 아는 게 중요했어요 + +05:18.080 --> 05:22.580 +제품을 다루는 일을 한다면 다양한 제품을 이해해야 해요 + +05:22.580 --> 05:26.870 +여러 자동차 제조사를 이해해야 했어요 어떤 기능을 설계해야 + +05:26.870 --> 05:30.290 +성공 확률이 가장 높을지 알아야 했으니까요 + +05:30.320 --> 05:39.260 +현대 딥 뉴런네트워크와 머신 러닝 딥 러닝이 가진 신기하고 놀라운 점은 + +05:39.260 --> 05:46.070 +물질을 포함한 모델이 스스로 만들어 낸다는 거예요 + +05:46.070 --> 05:52.820 +따라서 여러분이나 저 같은 데이터 과학자가 모델을 구축하는 분야에서 딥 도메인 전문성을 + +05:52.820 --> 05:57.260 +가질 필요가 더는 없습니다 llms와 모델을 구축하는 + +05:57.290 --> 06:02.420 +방법과 심층 신경망 구축에 관한 전문성이 있어야 하니까요 + +06:02.420 --> 06:10.670 +수십억 개의 매개 변수를 가지고 있고 그 모든 매개 변수의 힘을 이용해서 + +06:10.700 --> 06:14.540 +비즈니스 분야를 배울 수 있죠 + +06:14.540 --> 06:19.250 +하지만 과거 피처링 엔지니어링에서는 스스로를 이해해야 했고 톱 전자 + +06:19.280 --> 06:22.880 +공학이나 브랜드 같은 기능을 만들어야 했어요 + +06:22.880 --> 06:26.120 +이 모든 게 이 함수로 연결돼요 + +06:26.150 --> 06:27.710 +get 피처링이요 + +06:27.740 --> 06:35.120 +항목을 하나 골라 멋진 사전을 만듭니다 중량, 순위, 텍스트 길이 + +06:35.120 --> 06:40.220 +1이나 0으로 된 톱 전자 제품 브랜드가 있죠 + +06:40.490 --> 06:48.650 +이게 첫 번째 모델의 특징 그룹이에요 + +06:48.650 --> 06:50.660 +우리가 만드는 첫 번째 모형이죠 + +06:50.660 --> 06:59.510 +그리고 부디 시간을 들여 이걸 꿈의 형태로 바꿔 보세요 + +06:59.510 --> 07:02.150 +엔지니어링 기능으로 얼마나 잘할지 보는 거죠 + +07:02.150 --> 07:03.830 +잘할 수 있을 거예요 + +07:03.950 --> 07:06.890 +하지만 앞으로의 일에 비하면 상대도 안 될 거예요 + +07:06.890 --> 07:08.180 +그래도 한번 해 봐요 + +07:08.390 --> 07:08.600 +지금요 + +07:08.600 --> 07:10.190 +최선을 다해 봐요 + +07:10.190 --> 07:14.630 +하지만 이다음에 나오는 다음 영상에서는 실제로 달릴 거예요 + +07:14.630 --> 07:19.760 +전통적인 머신 러닝 모델을 실행해 보고 결과를 보죠 + +07:19.760 --> 07:20.990 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59472463/en_US.srt b/week5/community-contributions/subtitles/srts/59472463/en_US.srt new file mode 100755 index 0000000..bbaacf5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472463/en_US.srt @@ -0,0 +1,325 @@ +WEBVTT + +00:00.800 --> 00:06.560 +So last time we looked at a humble linear regression model with feature engineering, and now we say + +00:06.560 --> 00:12.080 +goodbye to features and we start working with natural language processing. + +00:12.080 --> 00:19.490 +And we're going to begin with a rather simplistic type of NLP, which is called Bag of words, which + +00:19.490 --> 00:21.110 +I briefly mentioned before. + +00:21.140 --> 00:24.950 +So we're going to start by putting making these two very useful things. + +00:24.950 --> 00:33.920 +One is called prices and one is called documents and prices is just the list of all of the prices in + +00:33.920 --> 00:42.020 +our training data set and documents is all of the test prompts from our training data set. + +00:42.230 --> 00:47.480 +Just a bit of text, and what we're going to try and do now is build models which use the text in our + +00:47.480 --> 00:51.440 +documents, rather than using features that we have engineered ourselves. + +00:51.440 --> 00:52.940 +Just use the text. + +00:53.330 --> 01:00.660 +Um, and yeah, one one little trick you might notice here is that I use the test prompt, not the training + +01:00.660 --> 01:01.440 +prompt. + +01:01.590 --> 01:06.000 +If I use the training prompt, it would include the price in the training prompt. + +01:06.120 --> 01:12.210 +And that wouldn't work because then of course the model would just learn to spot the price itself is + +01:12.210 --> 01:13.470 +actually in the prompt. + +01:13.470 --> 01:16.470 +And then when it got to test time, it would fail rather miserably. + +01:16.470 --> 01:20.220 +So that's a little trick to watch out for. + +01:20.460 --> 01:22.980 +Um, a trap I'm more likely to watch out for. + +01:22.980 --> 01:25.710 +So we prepare that. + +01:25.710 --> 01:31.770 +We're now going to use something called a countvectorizer, which sounds super fancy. + +01:31.800 --> 01:36.330 +At any time that you think you're going to be working today with a count of Vectorizer, sounds like + +01:36.330 --> 01:41.850 +you're going to be building something that's highly sophisticated, but it is, alas, not sophisticated. + +01:41.940 --> 01:49.740 +What it's going to do is simply look for the counting, the number of words, and then build a vector + +01:49.740 --> 01:57.740 +where each location in the vector represents a particular word, and then how many times that word appears + +01:57.740 --> 01:59.420 +in your document. + +01:59.540 --> 02:06.110 +So each document will be a vector, and every row in that vector represents a word in your vocabulary. + +02:06.110 --> 02:09.170 +And the counter counts how many times it's there. + +02:09.170 --> 02:15.050 +And that whole way of thinking about things is known as a bag of words model, because it's like you + +02:15.050 --> 02:19.880 +have a bag of words that you are counting up, and it doesn't matter what order the words appear in, + +02:19.880 --> 02:23.840 +it's just the fact that they are there a certain number of times. + +02:24.170 --> 02:31.130 +Uh, and so we're doing that we're going to count up to the 1000 most common or most important of the + +02:31.130 --> 02:31.700 +words. + +02:31.700 --> 02:41.150 +And we're going to use this parameter here to make sure that it only, uh, it, it removes, uh, common + +02:41.150 --> 02:47.270 +stop words, which is what people call things like and and the and in and it that are going to be not + +02:47.270 --> 02:49.820 +useful for the model and only a distraction. + +02:49.820 --> 02:54.320 +And so they will get plucked out leaving us with juicy words behind. + +02:54.320 --> 03:00.000 +And um, what we're then going to do is, uh, and now you can see we do things much quicker. + +03:00.000 --> 03:05.340 +We're going to, uh, we're going to create our data set based on these documents. + +03:05.340 --> 03:09.240 +We're going to create a linear regression model again just as before. + +03:09.270 --> 03:14.610 +And we're going to fit our bag of words, um, to our prices. + +03:14.610 --> 03:21.090 +In other words, we're instead of using features this time we're replacing our features with this bag + +03:21.090 --> 03:29.130 +of words, this vector counting the number of words, um, for each word, each of our 1000 words in + +03:29.130 --> 03:32.850 +our most common dictionary vocab. + +03:32.880 --> 03:34.650 +So that's what we're going to do. + +03:34.680 --> 03:36.480 +We're going to run that linear regression. + +03:36.480 --> 03:37.980 +It's happening right now. + +03:37.980 --> 03:46.110 +It's counting up the uh, the, the 1000, uh, vector points across all of our data set. + +03:46.440 --> 03:53.250 +Um, and then once it's done that, we're going to put that into a bag of words, linear regression, + +03:53.250 --> 03:54.180 +pricer. + +03:54.180 --> 03:58.130 +and we are then going to test it, so we will test it again. + +03:58.160 --> 03:59.660 +It's just completed running. + +03:59.690 --> 04:07.280 +Um, so this is our function, our simple function that will try it out and we will test it with tester + +04:07.310 --> 04:08.570 +dot test. + +04:09.320 --> 04:10.790 +Bag of words. + +04:10.820 --> 04:13.520 +Linear regression Pricer. + +04:16.010 --> 04:17.630 +Let's see how it does. + +04:19.550 --> 04:21.050 +Lots of greens there I see. + +04:21.050 --> 04:22.730 +But also lots of reds. + +04:23.300 --> 04:23.960 +You can see. + +04:23.990 --> 04:25.340 +Let's just pluck one out. + +04:25.370 --> 04:27.140 +It guest $74. + +04:27.140 --> 04:29.030 +And it was in fact $46. + +04:29.030 --> 04:31.640 +So you can see that it's got things that are right. + +04:31.640 --> 04:33.530 +It's also got things that are way off. + +04:33.560 --> 04:35.540 +How do you think the graph is going to look. + +04:35.570 --> 04:36.830 +Let's see. + +04:37.430 --> 04:39.350 +Okay okay. + +04:39.350 --> 04:42.680 +Well it's looking more like there's some something good happening. + +04:42.680 --> 04:46.130 +We're starting to see things converging around the line. + +04:46.130 --> 04:48.080 +We're seeing a lot more green dots. + +04:48.080 --> 04:51.140 +The average is $113. + +04:51.140 --> 04:59.310 +So distinctly better than the linear regression with features and distinctly better than guessing. + +04:59.670 --> 05:01.980 +Uh, so, uh, progress happening. + +05:01.980 --> 05:05.760 +There's still some weird outliers here, some some problems. + +05:06.000 --> 05:10.860 +Um, but, uh, that this was the true value and it should have been right up there somewhere. + +05:11.160 --> 05:15.810 +Um, but there you can see that there is a progress being made. + +05:16.200 --> 05:24.540 +So next, the the last of the of this set before we go into more advanced models is going to be using + +05:24.540 --> 05:31.560 +the amazing Gensim library to, uh, introduce this word to vec model. + +05:31.560 --> 05:37.050 +That was uh, it was one of the first times that I really encountered, uh, neural networks with NLP, + +05:37.410 --> 05:45.060 +uh, using a vectorization model, um, with, with something that is a more sophisticated, uh, vector + +05:45.060 --> 05:46.440 +embedding model. + +05:46.500 --> 05:54.950 +Um, and we are going to use this word two vec function class from Gensim, and we are going to build + +05:54.950 --> 05:57.980 +vectors with 400 dimensions. + +05:58.250 --> 06:02.960 +And it's I've set it to use eight workers, which means it really hammers my box. + +06:02.960 --> 06:07.940 +This still took several minutes to run, and I ran it in advance so we wouldn't have to wait for for + +06:07.940 --> 06:08.840 +all of this. + +06:08.870 --> 06:19.610 +Uh, and I also then ran this, and that should mean that hopefully we are ready just to run this and + +06:19.610 --> 06:24.410 +to immediately see the results tester dot test. + +06:25.160 --> 06:33.200 +We should be able to pass in the word two vec linear regression pricer and see how this fancy vectorization + +06:33.200 --> 06:36.140 +model performs with linear regression. + +06:37.070 --> 06:39.950 +Well, it looks good so far from the first two lines, but oh no. + +06:39.980 --> 06:46.130 +Then there's some red, some green, some red, some greens, lots of reds that scoot down past the + +06:46.130 --> 06:49.290 +250 test data points to the chart. + +06:50.100 --> 06:52.080 +So here we have it. + +06:52.110 --> 06:54.150 +It looks quite decent again. + +06:54.180 --> 07:01.980 +Interestingly, the bad news is it's actually a hair worse than linear regression based on the simple + +07:02.010 --> 07:03.540 +bag of words model. + +07:03.540 --> 07:11.850 +So unveiling the lovely word two vec vector hasn't yet particularly helped us. + +07:11.850 --> 07:17.850 +We're still much the same territory as before, and that may be because the linear regression model + +07:17.850 --> 07:23.730 +isn't powerful enough to take advantage of all of the extra information that we have in these word two + +07:23.730 --> 07:25.140 +vec vectors. + +07:25.410 --> 07:31.800 +So in the next time we're going to explore some slightly more sophisticated models, and then we'll + +07:31.800 --> 07:34.890 +be done with our with our traditional machine learning. + +07:34.890 --> 07:38.820 +So just hang on in there for a bit longer, because I want to dig a little bit more juice out of this. + +07:38.850 --> 07:43.920 +We want to get a bit better with our baseline models, because we don't want our LMS to have an easy + +07:43.920 --> 07:44.730 +run at this at all. + +07:44.730 --> 07:46.350 +We want to we want to put up a fight. + +07:46.350 --> 07:48.330 +So see you next time. diff --git a/week5/community-contributions/subtitles/srts/59472463/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472463/ja_JP.srt new file mode 100755 index 0000000..0e4f31c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472463/ja_JP.srt @@ -0,0 +1,262 @@ +WEBVTT + +00:00.800 --> 00:06.560 +前回は、 素性工学を使った地味な線形回帰モデルを見てきたが、 + +00:06.560 --> 00:12.080 +今回は素性に別れを告げて、 自然言語処理に取りかかる。 + +00:12.080 --> 00:21.110 +そして、 前に少し触れた「言葉の袋」と呼ばれる、 かなり単純化されたタイプのNLPから始めます。 + +00:21.140 --> 00:24.950 +だから、 まずはこの2つのとても便利なものを作ることから始めよう。 + +00:24.950 --> 00:33.920 +1つはprice、 もう1つはdocumentsと呼ばれ、 priceはトレーニングデータセットに含まれるすべての価格のリスト、 + +00:33.920 --> 00:42.020 +documentsはトレーニングデータセットに含まれるすべてのテストプロンプトである。 + +00:42.230 --> 00:47.480 +私たちがこれからやろうとしているのは、 私たち自身で設計した特徴を使うのではなく、 + +00:47.480 --> 00:51.440 +文書中のテキストを使ったモデルを構築することです。 + +00:51.440 --> 00:52.940 +テキストを使うだけでいい。 + +00:53.330 --> 01:01.440 +そうそう、 ここでひとつ、 ちょっとしたトリックにお気づきかもしれないが、 私はトレーニング用のプロンプトではなく、 テスト用のプロンプトを使っている。 + +01:01.590 --> 01:06.000 +トレーニング・プロンプトを使えば、 トレーニング・プロンプトに価格が含まれることになる。 + +01:06.120 --> 01:13.470 +というのも、 そうすれば当然、 モデルは価格そのものが実際にプロンプトの中にあることを見抜くことを学習してしまうからだ。 + +01:13.470 --> 01:16.470 +そしてテストになると、 かなり惨めに失敗する。 + +01:16.470 --> 01:20.220 +だから、 これは気をつけるべきちょっとしたトリックなんだ。 + +01:20.460 --> 01:22.980 +うーん、 罠には気をつけたいね。 + +01:22.980 --> 01:25.710 +だからそれを準備する。 + +01:25.710 --> 01:31.770 +これからカウントベクタライザーと呼ばれるものを使う。 + +01:31.800 --> 01:36.330 +今日、 ベクターライザーのカウントで仕事をしようと思うと、 高度に洗練されたものを作ろうとしているように聞こえるが、 + +01:36.330 --> 01:41.850 +残念ながら洗練されていない。 + +01:41.940 --> 01:49.740 +この機能は、 単純に単語の数を数え、 ベクトルを構築する。 ベクトル内の各位置が特定の単語を表し、 + +01:49.740 --> 01:59.420 +その単語が文書内に何回現れるかを示す。 + +01:59.540 --> 02:06.110 +つまり、 各文書はベクトルとなり、 そのベクトルの各行が語彙の単語を表す。 + +02:06.110 --> 02:09.170 +そして、 カウンターはその回数をカウントする。 + +02:09.170 --> 02:15.050 +このような物事の考え方は、 バッグ・オブ・ワード・モデルとして知られている。 + +02:15.050 --> 02:23.840 +バッグの中に単語が入っていて、 それを数え上げるようなものだ。 + +02:24.170 --> 02:31.700 +それで、 最も一般的な、 あるいは最も重要な単語を1000まで数えるんだ。 + +02:31.700 --> 02:41.150 +そして、 このパラメータを使って、 一般的なストップワード(andやthe、 inやitのような、 + +02:41.150 --> 02:49.820 +モデルにとって役に立たず、 邪魔にしかならないもの)のみを削除するようにします。 + +02:49.820 --> 02:54.320 +そうして、 彼らは私たちにジューシーな言葉を残したまま、 引き抜かれてしまうのだ。 + +02:54.320 --> 03:00.000 +そして、 これからやることは......ああ、 今見てもらえばわかると思うけど、 僕らはもっと早くやるんだ。 + +03:00.000 --> 03:05.340 +これらの文書に基づいてデータセットを作成します。 + +03:05.340 --> 03:09.240 +前回と同じように、 線形回帰モデルを作成します。 + +03:09.270 --> 03:14.610 +そして、 私たちは言葉のバッグを、 うーん、 私たちの価格に合わせるつもりだ。 + +03:14.610 --> 03:21.090 +言い換えれば、 今回は特徴量を使う代わりに、 この単語の袋、 つまり、 最も一般的な辞書の語彙に含まれる1000の単語について、 + +03:21.090 --> 03:32.850 +それぞれの単語の数をカウントするベクトルで特徴量を置き換えるのだ。 + +03:32.880 --> 03:34.650 +だから、 そうするつもりだ。 + +03:34.680 --> 03:36.480 +線形回帰を実行する。 + +03:36.480 --> 03:37.980 +今まさに起きていることだ。 + +03:37.980 --> 03:46.110 +データセット全体の1000のベクトルポイントをカウントアップしている。 + +03:46.440 --> 03:54.180 +そうしたら、 それを線形回帰、 プライサーという言葉の袋に入れるんだ。 + +03:54.180 --> 03:58.130 +そして、 それをテストするつもりなので、 もう一度テストする。 + +03:58.160 --> 03:59.660 +走り終えたばかりだ。 + +03:59.690 --> 04:08.570 +ええと、 これが私たちの関数で、 単純な関数で、 tester dot testでテストしてみます。 + +04:09.320 --> 04:10.790 +言葉の袋。 + +04:10.820 --> 04:13.520 +線形回帰 プライサー + +04:16.010 --> 04:17.630 +どうなるか見てみよう。 + +04:19.550 --> 04:21.050 +緑がたくさんあるね。 + +04:21.050 --> 04:22.730 +だが、 赤も多い。 + +04:23.300 --> 04:23.960 +分かるだろう。 + +04:23.990 --> 04:25.340 +ひとつだけ抜こう。 + +04:25.370 --> 04:27.140 +客は74ドル。 + +04:27.140 --> 04:29.030 +そして、 実際には46ドルだった。 + +04:29.030 --> 04:31.640 +だから、 正しいことをやっているのがわかる。 + +04:31.640 --> 04:33.530 +また、 大きくずれていることもある。 + +04:33.560 --> 04:35.540 +グラフはどうなると思う? + +04:35.570 --> 04:36.830 +見てみよう。 + +04:37.430 --> 04:39.350 +オーケー、 オーケー。 + +04:39.350 --> 04:42.680 +まあ、 何かいいことが起こりそうな雰囲気はある。 + +04:42.680 --> 04:46.130 +ライン周辺に収束しつつあるのが見え始めている。 + +04:46.130 --> 04:48.080 +緑の点が多くなってきた。 + +04:48.080 --> 04:51.140 +平均は113ドル。 + +04:51.140 --> 04:59.310 +つまり、 特徴量を使った線形回帰よりも、 推測よりも明らかに優れているのだ。 + +04:59.670 --> 05:01.980 +それで、 進展があったんだ。 + +05:01.980 --> 05:05.760 +ここにはまだ奇妙な異常値やいくつかの問題がある。 + +05:06.000 --> 05:10.860 +うーん、 でも、 これが本当の価値で、 どこかに書いてあるはずなんだ。 + +05:11.160 --> 05:15.810 +うーん、 でも前進しているのは見てわかるだろう。 + +05:16.200 --> 05:24.540 +では次に、 より高度なモデルに入る前のこのセットの最後として、 素晴らしいGensimライブラリを使って、 + +05:24.540 --> 05:31.560 +このvecモデルという言葉を紹介しよう。 + +05:31.560 --> 05:37.050 +それは、 私がNLPで、 ベクトル化モデルを使って、 より洗練されたベクトル埋め込みモデルを使って、 + +05:37.410 --> 05:46.440 +ニューラルネットワークに出会った最初の時だった。 + +05:46.500 --> 05:57.980 +Gensimの2つのvec関数クラスを使い、 400次元のベクトルを作る。 + +05:58.250 --> 06:02.960 +そして、 8人のワーカーを使うように設定している。 + +06:02.960 --> 06:08.840 +これでも実行には数分かかったが、 事前に実行しておいたので、 このために待たされることはなかった。 + +06:08.870 --> 06:24.410 +そして、 これを実行した。 うまくいけば、 これを実行するだけで、 すぐにテスター・ドット・テストの結果を見ることができるはずだ。 + +06:25.160 --> 06:36.140 +2つのvec線形回帰プライサーという単語を渡して、 この空想的なベクトル化モデルが線形回帰でどのように機能するかを見ることができるはずだ。 + +06:37.070 --> 06:39.950 +まあ、 最初の2行を見る限りは良さそうだが、 いやはや。 + +06:39.980 --> 06:49.290 +赤、 緑、 赤、 緑、 赤がたくさんあって、 250のテストデータポイントを通り越してチャートにすとんと落ちる。 + +06:50.100 --> 06:52.080 +だから、 ここにある。 + +06:52.110 --> 06:54.150 +今回もかなりまともに見える。 + +06:54.180 --> 07:03.540 +興味深いことに、 悪いニュースとしては、 単純な単語袋モデルに基づく線形回帰よりも、 実際は少し悪いということだ。 + +07:03.540 --> 07:11.850 +だから、 2ヴェク・ベクターという素敵な言葉を披露しても、 まだ特に役には立っていない。 + +07:11.850 --> 07:25.140 +それは、 線形回帰モデルが、 この2つのvecベクトルに含まれる余分な情報をすべて活用できるほど強力ではないからかもしれない。 + +07:25.410 --> 07:34.890 +次回は、 もう少し洗練されたモデルを探求し、 伝統的な機械学習は終わりにしよう。 + +07:34.890 --> 07:38.820 +だから、 もう少し我慢してくれ。 ここからもう少し汁を出したいんだ。 + +07:38.850 --> 07:44.730 +私たちは、 ベースラインモデルをもう少し良くしたい。 LMSがこのようなことに簡単に走らないようにしたいからだ。 + +07:44.730 --> 07:46.350 +我々は戦いを挑みたい。 + +07:46.350 --> 07:48.330 +それではまた次回。 diff --git a/week5/community-contributions/subtitles/srts/59472463/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472463/ko_KR.srt new file mode 100755 index 0000000..18eb6bc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472463/ko_KR.srt @@ -0,0 +1,313 @@ +WEBVTT + +00:00.800 --> 00:06.560 +지난 시간에는 기능 공학의 단순한 선형 회귀 모델을 살펴봤는데요 + +00:06.560 --> 00:12.080 +이제 기능과는 작별하고 자연 언어 프로세싱을 시작해보죠 + +00:12.080 --> 00:19.490 +NLP의 다소 단순한 유형으로 시작하겠습니다 앞서 짧게 언급한 단어 주머니라고 + +00:19.490 --> 00:21.110 +하죠 + +00:21.140 --> 00:24.950 +이 두 가지 유용한 것부터 만들어 볼게요 + +00:24.950 --> 00:33.920 +하나는 가격이고 다른 하나는 문서인데 가격은 훈련 데이터 집합에 있는 모든 가격 + +00:33.920 --> 00:42.020 +목록입니다 문서는 훈련 데이터 집합에 있는 모든 테스트 프롬프트고요 + +00:42.230 --> 00:47.480 +비트 텍스트죠 이제 우리가 할 건 문서에 텍스트를 사용하는 모델을 만드는 + +00:47.480 --> 00:51.440 +겁니다 우리가 만든 기능을 사용하는 대신에요 + +00:51.440 --> 00:52.940 +그냥 글자를 써요 + +00:53.330 --> 01:01.440 +눈치채셨겠지만 작은 트릭이 하나 있어요 훈련용이 아니라 테스트 프롬프트죠 + +01:01.590 --> 01:06.000 +훈련 시간을 이용하면 가격도 포함되어 있죠 + +01:06.120 --> 01:12.210 +그건 작동하지 않을 겁니다 모델은 당연히 가격 자체를 알아내는 법을 배우게 될 테니까요 프롬프트 + +01:12.210 --> 01:13.470 +안에 있죠 + +01:13.470 --> 01:16.470 +테스트 시간이 되면 처참하게 실패하죠 + +01:16.470 --> 01:20.220 +조심해야 할 트릭이죠 + +01:20.460 --> 01:22.980 +제가 더 조심해야 할 함정이죠 + +01:22.980 --> 01:25.710 +그래서 준비하죠 + +01:25.710 --> 01:31.770 +이제 countvectorizer를 사용할 거예요 이름만 들어도 근사하죠 + +01:31.800 --> 01:36.330 +오늘 카운트 오브 벡터라이저로 작업한다고 생각한다면 매우 + +01:36.330 --> 01:41.850 +복잡한 무언가를 만들 거라고 생각하지만 실제로는 그렇지 않아요 + +01:41.940 --> 01:49.740 +단순히 단어 수를 세는 것을 찾는 것입니다 그리고 벡터 내의 각 위치들이 특정 단어를 + +01:49.740 --> 01:57.740 +나타내는 곳에 벡터를 구축합니다 그리고 그 단어가 여러분 문서에 몇 번 나타날지 + +01:57.740 --> 01:59.420 +확인하죠 + +01:59.540 --> 02:06.110 +각 문서는 벡터입니다 벡터의 모든 행은 여러분 어휘의 단어를 나타내죠 + +02:06.110 --> 02:09.170 +카운터는 몇 번 왔는지 세어보고요 + +02:09.170 --> 02:15.050 +이런 사고방식을 단어 자루 모델이라고 해요 단어 자루를 들고 숫자를 + +02:15.050 --> 02:19.880 +세는 것과 같죠 단어 순서는 상관없어요 특정 횟수만큼만 + +02:19.880 --> 02:23.840 +단어에 대한 개념이 있는 거죠 + +02:24.170 --> 02:31.700 +그래서 가장 흔하거나 가장 중요한 단어 1,000개까지 셀 거예요 + +02:31.700 --> 02:41.150 +여기 이 매개 변수를 이용해 확실히 하려는 건 일반적인 스톱 단어를 제거하는 겁니다 사람들이 + +02:41.150 --> 02:47.270 +앤 앤 더 앤 인이라고 부르는 거죠 모델에 유용하지 않고 방해만 + +02:47.270 --> 02:49.820 +되는 것들이죠 + +02:49.820 --> 02:54.320 +get이 뽑히면 즙이 흐르는 단어만 남죠 + +02:54.320 --> 03:00.000 +이제 우리가 할 일은 보다시피 훨씬 빨라졌어요 + +03:00.000 --> 03:05.340 +이 문서에 기반해 데이터 세트를 생성할 거예요 + +03:05.340 --> 03:09.240 +아까처럼 선형 회귀 모델을 다시 만들 거예요 + +03:09.270 --> 03:14.610 +우리 단어를 가격에 맞춰야 해요 + +03:14.610 --> 03:21.090 +다시 말해, 이번엔 기능을 사용하는 대신 이 단어들을 사용하는 + +03:21.090 --> 03:29.130 +겁니다 벡터 단어로요 단어 개수를 세는 거죠 가장 흔한 사전 용어에 있는 + +03:29.130 --> 03:32.850 +단어 천 개의 개수를요 + +03:32.880 --> 03:34.650 +그렇게 할 거예요 + +03:34.680 --> 03:36.480 +선형 회귀를 실행할 거예요 + +03:36.480 --> 03:37.980 +지금 진행 중이에요 + +03:37.980 --> 03:46.110 +모든 데이터 집합에 걸친 벡터 포인트 1,000개를 세고 있어요 + +03:46.440 --> 03:53.250 +그 작업이 끝나면 그걸 단어 모음에 넣습니다 선형 회귀, 프라이서죠 + +03:53.250 --> 03:54.180 +Put + +03:54.180 --> 03:58.130 +그리고 테스트할 거예요 다시 테스트하죠 + +03:58.160 --> 03:59.660 +방금 실행이 완료됐어요 + +03:59.690 --> 04:07.280 +이건 우리 함수예요 시험해 볼 간단한 함수죠 테스터.테스트라는 기기로 테스트할 + +04:07.310 --> 04:08.570 +거예요 + +04:09.320 --> 04:10.790 +말주변이 좋네요 + +04:10.820 --> 04:13.520 +선형 회귀 프라이저예요 + +04:16.010 --> 04:17.630 +어떻게 되나 보죠 + +04:19.550 --> 04:21.050 +채소가 많네요 + +04:21.050 --> 04:22.730 +붉은색도 많고요 + +04:23.300 --> 04:23.960 +보이시죠? + +04:23.990 --> 04:25.340 +하나만 뽑죠 + +04:25.370 --> 04:27.140 +74달러가 들었죠 + +04:27.140 --> 04:29.030 +사실 46달러였어요 + +04:29.030 --> 04:31.640 +그래서 제대로 된 것들이 있는 걸 볼 수 있죠 + +04:31.640 --> 04:33.530 +너무 많이 벗어난 부분도 있어요 + +04:33.560 --> 04:35.540 +그래프가 어떻게 보일까요? + +04:35.570 --> 04:36.830 +어디 보죠 + +04:37.430 --> 04:39.350 +네, 좋아요 + +04:39.350 --> 04:42.680 +뭔가 좋은 일이 일어날 것 같아요 + +04:42.680 --> 04:46.130 +점점 더 가까워지고 있어요 + +04:46.130 --> 04:48.080 +녹색 점이 더 많이 보여요 + +04:48.080 --> 04:51.140 +평균 113달러죠 + +04:51.140 --> 04:59.310 +특징이 있는 선형 퇴행보다 훨씬 낫고 추측보다 훨씬 낫네요 + +04:59.670 --> 05:01.980 +진전이 있네요 + +05:01.980 --> 05:05.760 +아직 이상한 점이 있어요 문제가 좀 있죠 + +05:06.000 --> 05:10.860 +하지만 이게 진정한 가치이고 여기 어딘가에 있어야 한다고 생각했죠 + +05:11.160 --> 05:15.810 +하지만 진전이 있다는 걸 알 수 있어요 + +05:16.200 --> 05:24.540 +좀 더 고급 모델로 넘어가기 전에 이 세트의 마지막은 젠심 라이브러리를 + +05:24.540 --> 05:31.560 +이용해 이 단어를 베크 모델에 소개하는 거예요 + +05:31.560 --> 05:37.050 +그게 제가 NLP와 함께 있는 신경 네트워크를 처음 접한 + +05:37.410 --> 05:46.440 +것 중 하나입니다. 벡터화 모델을 사용했고 더 복잡한 벡터 내장 모델을 사용했죠. + +05:46.500 --> 05:54.950 +겐심에서 2VC 함수 클래스를 사용하겠습니다 400D로 + +05:54.950 --> 05:57.980 +벡터를 빌드하죠 + +05:58.250 --> 06:02.960 +그래서 일꾼 8명을 쓰도록 설정했어요 제 상자를 망치질하는 셈이죠 + +06:02.960 --> 06:08.840 +실행하는 데 몇 분 걸렸어요 이걸 다 기다리지 않게 미리 실행했죠 + +06:08.870 --> 06:19.610 +그리고 이걸 실행했어요 이제 실행할 준비가 됐으면 좋겠네요 + +06:19.610 --> 06:24.410 +바로 결과를 볼 수 있게요 + +06:25.160 --> 06:33.200 +2bc 선형 회귀 프라이어를 통과시킬 수 있을 거예요 이 멋진 벡터화 모델이 선형 회귀로 + +06:33.200 --> 06:36.140 +어떻게 작동하는지 보죠 + +06:37.070 --> 06:39.950 +처음 두 줄까지는 괜찮아 보이는데 큰일이네요 + +06:39.980 --> 06:46.130 +빨강도 있고 초록도 있고 빨강도 있고 초록도 있고 빨강이 많네요 250개의 데이터 + +06:46.130 --> 06:49.290 +포인트를 지나 해도로 내려갔어요 + +06:50.100 --> 06:52.080 +여기 있네요 + +06:52.110 --> 06:54.150 +다시 괜찮아졌어요 + +06:54.180 --> 07:01.980 +흥미롭게도 나쁜 소식은 단순한 단어 모델에 따르면 선형 회귀보다 머리카락이 더 나쁘다는 + +07:02.010 --> 07:03.540 +거예요 + +07:03.540 --> 07:11.850 +멋진 단어를 공개하죠 벡터 2개는 아직 도움이 안 됐어요 + +07:11.850 --> 07:17.850 +여전히 이전과 같은 영역입니다. 아마도 2Vc 벡터에 있는 + +07:17.850 --> 07:23.730 +추가 정보를 활용할 만큼 선형 회귀 모델이 강력하지 않기 때문일 + +07:23.730 --> 07:25.140 +거예요. + +07:25.410 --> 07:31.800 +다음 시간에는 좀 더 복잡한 모델을 탐구해 보고 전통적인 머신 + +07:31.800 --> 07:34.890 +러닝을 마무리할 거예요 + +07:34.890 --> 07:38.820 +조금만 더 기다려 주세요 비트를 좀 더 짜내고 싶거든요 + +07:38.850 --> 07:43.920 +베이스라인 모델로 좀 더 나아지길 원합니다 LMS가 비트 코일에서 쉽게 실행되는 걸 원치 + +07:43.920 --> 07:44.730 +않으니까요 + +07:44.730 --> 07:46.350 +Put it's go 우린 싸우고 싶어요 + +07:46.350 --> 07:48.330 +다음에 봐요 diff --git a/week5/community-contributions/subtitles/srts/59472491/en_US.srt b/week5/community-contributions/subtitles/srts/59472491/en_US.srt new file mode 100755 index 0000000..18dcc60 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472491/en_US.srt @@ -0,0 +1,508 @@ +WEBVTT + +00:00.620 --> 00:01.790 +Welcome back. + +00:01.790 --> 00:07.370 +If you are following along with me in JupyterLab, as I hope you are, then you will need it to have + +00:07.370 --> 00:12.620 +gone off for a coffee break because it will have taken about 20 minutes or so to have downloaded all + +00:12.620 --> 00:16.550 +of our datasets, but they will now be downloaded and lovingly crafted. + +00:16.550 --> 00:17.750 +Here they are. + +00:17.780 --> 00:23.810 +The automotive one is the largest with north of 900,000 data points and office. + +00:23.870 --> 00:27.830 +Uh, sorry, electronics has more than 400,000. + +00:27.830 --> 00:29.600 +So in total. + +00:29.600 --> 00:30.980 +Let's have a look at what we've got. + +00:30.980 --> 00:36.410 +We have a grand total of just over 2.8 million data points. + +00:36.410 --> 00:38.120 +That's a lot of data points. + +00:38.120 --> 00:39.680 +It's too many data points. + +00:39.680 --> 00:43.580 +We don't need anything like that number for the sorts of training we're going to be doing. + +00:43.640 --> 00:50.480 +Um, and that means that there's an opportunity for us to hone this data set and select the data points + +00:50.480 --> 00:54.020 +that are going to be most valuable for us and give us the most signal. + +00:54.020 --> 00:59.480 +So first of all, let's take another look at the distribution of how many tokens we have. + +00:59.510 --> 01:05.310 +This is the same chart we did last time, and it shows you that we don't ever have more than 180 tokens + +01:05.310 --> 01:10.440 +in any of our training prompts, which is something that we specifically set out to achieve in order + +01:10.440 --> 01:17.160 +to be able to fine tune well with our open source llama model next time, but also to keep costs low + +01:17.160 --> 01:19.350 +when we're dealing with frontier models. + +01:19.890 --> 01:22.050 +Let's look at the prices again. + +01:22.290 --> 01:29.850 +This is the complete price distribution across the 2 million or so, and you'll see that it is constrained + +01:29.880 --> 01:33.690 +to be, uh, no more than $999. + +01:33.780 --> 01:39.510 +So it's between 1 and 999, because that's the constraint we've put in to make sure that we've got a + +01:39.510 --> 01:45.060 +manageable data set without crazy outliers that will distort all of our, uh, training. + +01:45.450 --> 01:51.150 +Um, but you'll see that we still have the same problem, that the data set is very skewed to the smaller + +01:51.150 --> 01:52.200 +numbers. + +01:52.230 --> 01:55.320 +And there's a very thin trail. + +01:55.320 --> 01:57.660 +Uh, and this only goes up to 300. + +01:57.720 --> 02:05.460 +So if we go all the way up to 1000 to the, uh, to the end of our, uh, data set. + +02:05.490 --> 02:06.030 +There you go. + +02:06.060 --> 02:06.540 +Look at that. + +02:06.540 --> 02:07.110 +This is the. + +02:07.140 --> 02:14.850 +We do have, uh, data points in there which reach up to 909.49, but you can barely see them. + +02:14.850 --> 02:18.840 +They barely touch the, uh, the the axis. + +02:18.960 --> 02:30.030 +Um, because the data set is so dominated by the 800,000 or so that are coming in at lower cost points. + +02:30.480 --> 02:34.170 +Uh, one other thing to do is just to have a quick look at the categories. + +02:34.170 --> 02:40.110 +This nice little bar chart is showing us how many we have in each of the different categories of data + +02:40.140 --> 02:40.920 +of product. + +02:40.920 --> 02:44.970 +So again, automotive dominating here with 900,000. + +02:44.970 --> 02:51.510 +And you can see it's followed by tools and home improvement followed by electronics with 400,000. + +02:51.510 --> 02:59.580 +So one of the things we want to do now is do some massaging of our data so that we have a more balanced + +02:59.580 --> 03:06.810 +data set, because we don't want the model to be skewed, distorted towards learning more about one + +03:06.810 --> 03:09.670 +particular price of one particular category. + +03:09.790 --> 03:15.310 +Um, we don't mind if it's if it's somewhat, uh, favors some some, like, like cheaper prices because + +03:15.310 --> 03:17.380 +that is the reality in the world. + +03:17.530 --> 03:23.110 +But we don't want to go so far that it distorts or impedes our training progress. + +03:23.350 --> 03:31.090 +Um, so, uh, what I'm going to do now is, is, is do some selection from this data set sample from + +03:31.090 --> 03:37.960 +our data set to get a smaller data set that is going to have a better representation of prices and categories. + +03:37.960 --> 03:42.910 +And the sort of data set size I'm going for is about 400,000 data points. + +03:42.940 --> 03:48.100 +Um, and even that's a large data set for fine tuning purposes, really doesn't need to be that big. + +03:48.130 --> 03:50.290 +But I wanted to have a big juicy data set. + +03:50.290 --> 03:52.420 +So 400,000 is what I've gone for. + +03:52.510 --> 03:54.910 +Um, and we'll talk about how I do that. + +03:55.000 --> 04:00.580 +So first of all, I've created a dictionary called slots. + +04:00.580 --> 04:01.810 +And let me tell you what this is. + +04:01.840 --> 04:04.060 +And then you'll understand exactly why I've done it. + +04:04.090 --> 04:12.980 +Slots is a dictionary where the key of the dictionary is every whole dollar price of a product. + +04:12.980 --> 04:17.720 +So it's from $1 to $9.99 one, two, three all the way through to 999. + +04:17.720 --> 04:21.830 +So there are 999, uh, keys to this dictionary. + +04:21.830 --> 04:29.570 +And the value is going to be a list of all of the products, all of the items which have that price. + +04:29.570 --> 04:36.590 +So in the slots dictionary in slot number two will be a list of all of the items which cost $2. + +04:36.620 --> 04:39.860 +And so it's organizing everything into these slots. + +04:39.860 --> 04:43.100 +It's bucketing our data set basically. + +04:43.370 --> 04:46.010 +Um hopefully that makes total sense. + +04:46.010 --> 04:47.750 +If not of course bring up this code. + +04:47.750 --> 04:48.380 +Step through it. + +04:48.380 --> 04:54.710 +I'm using Defaultdict is a nice little thing to know about, which is basically a dictionary which will + +04:54.710 --> 05:00.800 +if something is missing from the dictionary, it will automatically initialize it to be of whatever + +05:00.800 --> 05:01.940 +type you pass in. + +05:01.970 --> 05:06.230 +It avoids you having to put a sort of if test in your code. + +05:06.230 --> 05:08.900 +So it makes a nice, nice, elegant code. + +05:08.900 --> 05:14.430 +All right, so here's a bit of a meaty function here, but I explain what's going on. + +05:14.460 --> 05:14.850 +Amity. + +05:14.880 --> 05:15.360 +Amity. + +05:15.420 --> 05:16.170 +Jupyter notebook. + +05:16.170 --> 05:16.740 +Cell. + +05:17.010 --> 05:21.390 +Um, I am going to go through each of these slots. + +05:21.420 --> 05:23.760 +Each of the 999 slots. + +05:23.760 --> 05:30.870 +And I'm going to sample from those slots a subset of the data, which I think will be a nice representative + +05:30.870 --> 05:33.090 +sample to use for training. + +05:33.240 --> 05:40.470 +Now, some of this I've tweaked around with arbitrarily until I've gotten comfortable with the histograms + +05:40.470 --> 05:41.640 +that will follow this. + +05:41.640 --> 05:45.390 +So it's not like there's any particular special reason. + +05:45.390 --> 05:50.160 +It's more of a case of trial and error and getting to a point where you feel good about the balanced + +05:50.160 --> 05:51.330 +data set you're producing. + +05:51.330 --> 05:56.460 +And of course, I've then run it through training and satisfy myself that I'm getting higher quality + +05:56.460 --> 05:59.040 +results, uh, by doing this. + +05:59.400 --> 06:05.370 +Um, and so what I do is I go through each of the slots in turn, and I've decided that for anything + +06:05.370 --> 06:09.930 +that's worth more than $240, I simply take that whole slot. + +06:09.960 --> 06:12.870 +I take all of those points and add them to my sample. + +06:13.320 --> 06:13.950 +Um. + +06:14.400 --> 06:16.710 +For something less than that. + +06:16.710 --> 06:24.930 +I basically have some code here that samples 1200 items from that slot. + +06:24.930 --> 06:29.790 +So it takes that slot, and that slot might have in it several thousand. + +06:29.820 --> 06:37.830 +I just pick 1200 from that slot, and I use a numpy method called choice, which lets you pick a certain + +06:37.830 --> 06:39.090 +number from the slot. + +06:39.090 --> 06:43.920 +And one of the nice things about choice is that you can pass in something called the weights, which + +06:43.920 --> 06:48.870 +is telling it to give more importance to some of your items over others. + +06:48.870 --> 06:53.970 +And uh, hopefully this comes together no surprise for the weights. + +06:53.970 --> 07:00.330 +What I'm saying is let's give anything that's an automotive, a weight of one, and everything else + +07:00.330 --> 07:02.160 +gets a weight of five. + +07:02.310 --> 07:07.290 +And again, this was I just played around with different numbers until I got comfortable with what it + +07:07.290 --> 07:08.160 +was coming up with. + +07:08.160 --> 07:14.070 +And I didn't want to take it too far because we want to stay roughly true to to the the kind of data + +07:14.070 --> 07:15.930 +we have in the real world. + +07:15.930 --> 07:19.700 +But we wanted to correct for some imbalances in the data set. + +07:19.820 --> 07:23.570 +So I'm not going to go line by line through explaining this. + +07:23.570 --> 07:29.900 +I've given you the construct, and I'm hoping you'll now look through this and satisfy yourself that + +07:29.900 --> 07:32.900 +it's doing what I say and that you like the outcome. + +07:32.900 --> 07:37.550 +And of course, if you prefer to craft the data set a bit differently, this is your chance. + +07:37.610 --> 07:43.880 +Uh, it's also perfectly possible that you will be able to beat my results in terms of my model performance, + +07:43.880 --> 07:49.580 +and you may think that it would be better to to perhaps have a different weighting of the categories + +07:49.730 --> 07:52.010 +or to choose differently from the slots. + +07:52.010 --> 07:57.170 +So you should absolutely experiment, um, and see what you come up with. + +07:57.170 --> 07:59.270 +But I've run this now. + +07:59.270 --> 08:07.460 +It has now created a sample list, and there are 408,000 data points in that sample. + +08:07.460 --> 08:10.040 +So that's about the size that we were aiming for. + +08:10.460 --> 08:14.360 +Um, and now let's see the distribution of prices. + +08:14.360 --> 08:18.230 +And that looks a lot more reasonable in terms of the distribution of prices. + +08:18.230 --> 08:23.960 +We've got a lot that are cheaper still, but but it's a consistent number for every price point in the + +08:23.960 --> 08:24.710 +cheaper end. + +08:24.740 --> 08:31.790 +And as we get to more expensive prices, there's a perfectly decent set of of data points with higher + +08:31.790 --> 08:32.510 +price. + +08:32.540 --> 08:37.160 +You'll notice this interesting effect that there are various points. + +08:37.160 --> 08:44.420 +Uh, predictably enough, it's things that are priced $399, $499 that have a little spike in terms + +08:44.420 --> 08:46.100 +of how many data points there are. + +08:46.130 --> 08:48.530 +And that's great because that reflects the real world. + +08:48.530 --> 08:51.140 +So it's good that we're going to have that in our data set. + +08:51.140 --> 08:53.840 +I wouldn't want to to to squash that out. + +08:54.230 --> 09:01.760 +Um, so when we compare this histogram of prices with our earlier histogram of prices here, hopefully + +09:01.760 --> 09:07.220 +you immediately see the improvement we have made to the distribution of prices in our data. + +09:07.250 --> 09:12.080 +This is clearly a more it's still skewed and the real world is skewed. + +09:12.170 --> 09:16.070 +Um, but there's a better representation of higher priced products. + +09:16.070 --> 09:22.800 +And it's going to mean that we're going to be able to learn in a high quality way and validate our sample + +09:22.800 --> 09:23.220 +more. + +09:23.250 --> 09:26.700 +If you're not satisfied by that, by all means create a couple of data sets. + +09:26.730 --> 09:32.130 +And when we get to training, you can try them both and see the impact it makes to have a well-balanced + +09:32.130 --> 09:33.120 +data set. + +09:33.900 --> 09:36.810 +Let's also look at the categories again. + +09:36.930 --> 09:38.640 +Um, this is the categories. + +09:38.640 --> 09:40.500 +So actually it hasn't made a ton of difference. + +09:40.500 --> 09:42.030 +It's slightly shifted. + +09:42.210 --> 09:44.760 +Um, we've got a bit of a better balance. + +09:44.820 --> 09:50.970 +Um, I didn't want to further correct it because I feel that this is, after all, somewhat reflective + +09:50.970 --> 09:51.990 +of the real world. + +09:51.990 --> 09:54.360 +And so we don't want to overly distort. + +09:54.360 --> 10:00.630 +There are a healthy number of automotive products on sale, more so than others. + +10:00.630 --> 10:04.950 +And so this this seem good enough, but it's slightly corrected some of the imbalance there. + +10:05.130 --> 10:08.370 +Perhaps another way of looking at this is looking at a pie chart. + +10:08.370 --> 10:13.410 +Generally speaking, often pie charts are unpopular with data scientists because bar charts are better + +10:13.410 --> 10:18.210 +for seeing quantities side by side and seeing them in a very quantitative way. + +10:18.420 --> 10:23.400 +But pie charts sometimes are useful visuals, and let's have a look at it. + +10:23.490 --> 10:31.470 +Here is a pie chart by category, and I should obviously do a bit of work to separate out some of these + +10:31.470 --> 10:33.180 +words, but you get the idea. + +10:33.390 --> 10:40.530 +Um, and it's showing you here that automotive does have the biggest the lion's share, but it's not + +10:40.530 --> 10:42.150 +like it's massively dominating. + +10:42.150 --> 10:45.600 +And obviously a couple of these together are more than automotive. + +10:45.660 --> 10:47.340 +So it's perfectly reasonable. + +10:47.340 --> 10:50.460 +And the little guy here is appliances. + +10:50.460 --> 10:57.510 +The one that we started with way back yesterday has 1% the smallest, the smallest piece of the pie. + +10:57.510 --> 10:59.400 +Uh, quite literally in this case. + +11:00.000 --> 11:04.020 +So that is our data set curated. + +11:04.020 --> 11:07.170 +Uh, it was um, a bit of work, I agree. + +11:07.170 --> 11:13.200 +And I did gloss over some of the, uh, thornier, uh, pieces in there, like the sampling. + +11:13.350 --> 11:19.350 +And I urge you to come back and look through that and evaluate it yourself and potentially craft a better + +11:19.350 --> 11:20.190 +data set. + +11:20.370 --> 11:25.260 +Uh, we're finally going to do some last analysis on it before we upload it to the hub. + +11:25.260 --> 11:27.630 +And I will see you for that in the next video. diff --git a/week5/community-contributions/subtitles/srts/59472491/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472491/ja_JP.srt new file mode 100755 index 0000000..bf57e68 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472491/ja_JP.srt @@ -0,0 +1,421 @@ +WEBVTT + +00:00.620 --> 00:01.790 +お帰りなさい。 + +00:01.790 --> 00:07.370 +もしあなたがJupyterLabで私についてきているのであれば、 + +00:07.370 --> 00:16.550 +コーヒー休憩のためにJupyterLabを立ち去る必要があるだろう。 + +00:16.550 --> 00:17.750 +それがこれだ。 + +00:17.780 --> 00:23.810 +自動車関連は最大で、 90万を超えるデータポイントがあり、 オフィスもある。 + +00:23.870 --> 00:27.830 +あ、 すみません、 エレクトロニクスは40万人以上です。 + +00:27.830 --> 00:29.600 +だから合計すると + +00:29.600 --> 00:30.980 +何があるのか見てみよう。 + +00:30.980 --> 00:36.410 +合計で2人強だ。 800万データポイント。 + +00:36.410 --> 00:38.120 +たくさんのデータだ。 + +00:38.120 --> 00:39.680 +データが多すぎるんだ。 + +00:39.680 --> 00:43.580 +私たちがやろうとしているようなトレーニングには、 そのような数字は必要ない。 + +00:43.640 --> 00:54.020 +つまり、 このデータセットに磨きをかけ、 我々にとって最も価値があり、 最もシグナルを与えてくれるデータポイントを選択する機会があるということだ。 + +00:54.020 --> 00:59.480 +ではまず、 トークンの数の分布をもう一度見てみよう。 + +00:59.510 --> 01:05.310 +これは、 次回オープンソースのラマ・モデルをうまく微調整できるようにするため、 + +01:05.310 --> 01:10.440 +またフロンティア・モデルを扱うときにコストを低く抑えるために、 + +01:10.440 --> 01:19.350 +特に達成しようとしたことです。 + +01:19.890 --> 01:22.050 +もう一度価格を見てみよう。 + +01:22.290 --> 01:33.690 +これは200万ドルほどの価格分布で、 999ドル以下という制約があることがわかるだろう。 + +01:33.780 --> 01:39.510 +つまり、 1〜999の間です。 これは、 トレーニングのすべてを歪めてしまうような異常値がなく、 + +01:39.510 --> 01:45.060 +管理しやすいデータセットにするために入れた制約です。 + +01:45.450 --> 01:52.200 +しかし、 データセットが小さい数字に非常に偏っているという同じ問題があることがわかるだろう。 + +01:52.230 --> 01:55.320 +そして、 非常に細いトレイルがある。 + +01:55.320 --> 01:57.660 +ええと、 これは300までです。 + +01:57.720 --> 02:05.460 +だから、 1000まで行って、 データセットの終わりまで行ってみよう。 + +02:05.490 --> 02:06.030 +そうだ。 + +02:06.060 --> 02:06.540 +あれを見ろ。 + +02:06.540 --> 02:07.110 +これがそうだ。 + +02:07.140 --> 02:14.850 +909に達するデータもある。 49だが、 ほとんど見えない。 + +02:14.850 --> 02:18.840 +軸にはほとんど触れていない。 + +02:18.960 --> 02:30.030 +ええと、 80万人ほどのデータセットが低価格帯のものばかりだからです。 + +02:30.480 --> 02:34.170 +ええと、 もうひとつ、 カテゴリーをざっと見てみましょう。 + +02:34.170 --> 02:40.920 +この小さな棒グラフは、 各カテゴリーのデータ数を示している。 + +02:40.920 --> 02:44.970 +つまり、 ここでも自動車が90万人と圧倒している。 + +02:44.970 --> 02:51.510 +続いて、 工具とホームセンターが40万件、 エレクトロニクスが40万件と続く。 + +02:51.510 --> 02:59.580 +というのも、 モデルが特定のカテゴリーの特定の価格についてより深く学ぶことに偏ってしまったり、 + +02:59.580 --> 03:09.670 +歪んでしまったりするのは避けたいからだ。 + +03:09.790 --> 03:17.380 +それが世界の現実なのだから。 + +03:17.530 --> 03:23.110 +しかし、 トレーニングの進歩を歪めたり、 妨げたりするようなことはしたくない。 + +03:23.350 --> 03:31.090 +それで、 これからやることは、 このデータセットのサンプルから、 価格とカテゴリーをよりよく表現したより小さなデータセットを得るために、 + +03:31.090 --> 03:37.960 +いくつかの選択をすることだ。 + +03:37.960 --> 03:42.910 +データセットのサイズは約40万点だ。 + +03:42.940 --> 03:48.100 +それに、 微調整のための大規模なデータセットであっても、 それほど大きくする必要はない。 + +03:48.130 --> 03:50.290 +しかし、 私は大きなジューシーなデータセットを持ちたかった。 + +03:50.290 --> 03:52.420 +だから、 40万ドルを目指した。 + +03:52.510 --> 03:54.910 +その方法については、 また今度話そう。 + +03:55.000 --> 04:00.580 +そこでまず、 スロットという辞書を作った。 + +04:00.580 --> 04:01.810 +そして、 これが何なのかを教えよう。 + +04:01.840 --> 04:04.060 +そうすれば、 私がなぜこのようなことをしたのか、 よく理解してもらえるだろう。 + +04:04.090 --> 04:12.980 +スロットは辞書であり、 辞書のキーは商品の全ドル価格である。 + +04:12.980 --> 04:17.720 +つまり、 1ドルから9ドルだ。 99の1、 2、 3から999まで。 + +04:17.720 --> 04:21.830 +この辞書には999個のキーがある。 + +04:21.830 --> 04:29.570 +そしてその値は、 その価格を持つすべての商品、 すべてのアイテムのリストになる。 + +04:29.570 --> 04:36.590 +つまり、 スロット辞書の2番スロットには、 2ドルの商品がすべてリストアップされていることになる。 + +04:36.620 --> 04:39.860 +そうして、 すべてをこれらのスロットに整理している。 + +04:39.860 --> 04:43.100 +基本的にデータセットをバケット化しているんだ。 + +04:43.370 --> 04:46.010 +うーん......それで納得できればいいんだけど + +04:46.010 --> 04:47.750 +もちろん、 そうでなければこのコードを持ち出す。 + +04:47.750 --> 04:48.380 +それを踏み越える。 + +04:48.380 --> 04:54.710 +Defaultdictは、 基本的に辞書で、 辞書にないものがあれば、 + +04:54.710 --> 05:01.940 +自動的に初期化してくれる。 + +05:01.970 --> 05:06.230 +コードにifテストのようなものを入れる必要がなくなる。 + +05:06.230 --> 05:08.900 +だから、 エレガントで素敵なコードになる。 + +05:08.900 --> 05:14.430 +さて、 ここでちょっと肉厚な機能を説明しよう。 + +05:14.460 --> 05:14.850 +アミティ + +05:14.880 --> 05:15.360 +アミティ + +05:15.420 --> 05:16.170 +Jupyterノートブック。 + +05:16.170 --> 05:16.740 +細胞だ。 + +05:17.010 --> 05:21.390 +ええと、 この枠をひとつひとつ見ていきます。 + +05:21.420 --> 05:23.760 +999の各スロット。 + +05:23.760 --> 05:33.090 +そして、 これらのスロットからデータのサブセットをサンプリングし、 トレーニングに使用する代表的なサンプルとするつもりだ。 + +05:33.240 --> 05:41.640 +この後のヒストグラムに慣れるまで、 任意で微調整した部分もある。 + +05:41.640 --> 05:45.390 +だから、 特別な理由があるわけではない。 + +05:45.390 --> 05:51.330 +試行錯誤を繰り返し、 バランスの取れたデータセットに満足できるようになることだ。 + +05:51.330 --> 05:59.040 +そしてもちろん、 それをトレーニングに回し、 こうすることでより質の高い結果が得られると自分自身を納得させる。 + +05:59.400 --> 06:05.370 +それで、 僕がやっているのは、 各スロットを順番に見ていって、 240ドル以上の価値があるものは、 + +06:05.370 --> 06:09.930 +単純にそのスロット全部を取ることにしているんだ。 + +06:09.960 --> 06:12.870 +私はそれらの点をすべてサンプルに加える。 + +06:13.320 --> 06:13.950 +うーん。 + +06:14.400 --> 06:16.710 +それ以下の金額で。 + +06:16.710 --> 06:24.930 +私は基本的に、 そのスロットから1200のアイテムをサンプリングするコードをここに持っている。 + +06:24.930 --> 06:29.790 +その枠に数千の選手が入るかもしれない。 + +06:29.820 --> 06:39.090 +私はそのスロットから1200を選ぶだけで、 choiceというnumpyのメソッドを使って、 スロットから特定の数字を選ぶことができる。 + +06:39.090 --> 06:48.870 +そして、 チョイスのいいところのひとつは、 ウェイトと呼ばれるものを渡すことができることだ。 + +06:48.870 --> 06:53.970 +そして......、 これがウェイトにとって驚きのない形でまとまることを願っている。 + +06:53.970 --> 07:02.160 +私が言いたいのは、 自動車に関係するものには1の重みを、 それ以外のものには5の重みを与えようということだ。 + +07:02.310 --> 07:08.160 +繰り返しになるけど、 これは、 出てくる数字に納得がいくまで、 いろいろな数字を使って遊んでみただけなんだ。 + +07:08.160 --> 07:15.930 +そして、 現実の世界で我々が持っているようなデータに忠実でありたいので、 あまりやり過ぎたくなかった。 + +07:15.930 --> 07:19.700 +しかし、 我々はデータセットの不均衡を補正したかった。 + +07:19.820 --> 07:23.570 +だから、 一行ずつ説明するつもりはない。 + +07:23.570 --> 07:32.900 +そして、 私の言うとおりになっていること、 そしてその結果が気に入っていることを確認してほしい。 + +07:32.900 --> 07:37.550 +そしてもちろん、 データセットをもう少し違った形で作りたいのであれば、 これはチャンスである。 + +07:37.610 --> 07:43.880 +また、 私のモデルのパフォーマンスという点では、 あなたが私の結果を打ち負かすことも十分にあり得るし、 + +07:43.880 --> 07:52.010 +カテゴリの重み付けを変えたり、 スロットから別のものを選んだりした方がいいと思うかもしれない。 + +07:52.010 --> 07:57.170 +だから、 絶対に実験してみるべきだよ。 + +07:57.170 --> 07:59.270 +でも、 今はこれを実行した。 + +07:59.270 --> 08:07.460 +現在、 サンプルリストが作成され、 そのサンプルには408,000のデータポイントがある。 + +08:07.460 --> 08:10.040 +だから、 僕らが目指していたのはそのくらいのサイズなんだ。 + +08:10.460 --> 08:14.360 +では、 価格の分布を見てみよう。 + +08:14.360 --> 08:18.230 +そして、 価格の分布という点では、 その方がずっと合理的に見える。 + +08:18.230 --> 08:24.710 +まだ安いものもたくさんあるが、 安い方ではどの価格帯でも一貫した数字だ。 + +08:24.740 --> 08:32.510 +そして、 より高価格になるにつれて、 高価格に伴うデータポイントの完全なまともなセットが存在する。 + +08:32.540 --> 08:37.160 +いろいろなポイントがあることに気づくだろう。 + +08:37.160 --> 08:46.100 +予想通り、 399ドル、 499ドルという値段のものは、 データ・ポイントの数という点で少し急増する。 + +08:46.130 --> 08:48.530 +それが現実の世界を反映しているのだから。 + +08:48.530 --> 08:51.140 +だから、 データセットの中にそれがあるのはいいことだ。 + +08:51.140 --> 08:53.840 +それを潰したくはない。 + +08:54.230 --> 09:07.220 +この価格のヒストグラムを、 先ほどの価格のヒストグラムと比較すると、 データの価格分布が改善されていることがすぐにおわかりいただけると思います。 + +09:07.250 --> 09:12.080 +これは明らかにもっと歪んでいるし、 現実の世界も歪んでいる。 + +09:12.170 --> 09:16.070 +うーん、 でも、 高価格帯の商品の方がよく表現されている。 + +09:16.070 --> 09:23.220 +そして、 質の高い学習ができるようになり、 サンプルの検証も進むということだ。 + +09:23.250 --> 09:26.700 +それで満足できないなら、 ぜひいくつかのデータセットを作ってほしい。 + +09:26.730 --> 09:33.120 +そしてトレーニングに入ったら、 両方を試して、 バランスの取れたデータセットがもたらす影響を見ることができる。 + +09:33.900 --> 09:36.810 +カテゴリーももう一度見てみよう。 + +09:36.930 --> 09:38.640 +これがカテゴリーだ。 + +09:38.640 --> 09:40.500 +だから、 実は大した違いはないんだ。 + +09:40.500 --> 09:42.030 +少しずれている。 + +09:42.210 --> 09:44.760 +少しバランスが良くなったよ。 + +09:44.820 --> 09:51.990 +うーん、 これ以上訂正したくなかったのは、 結局のところ、 これが現実の世界を多少なりとも反映していると感じたからだ。 + +09:51.990 --> 09:54.360 +だから、 過度に歪めたくはない。 + +09:54.360 --> 10:00.630 +販売されている自動車関連製品の数は、 他よりも多い。 + +10:00.630 --> 10:04.950 +それで、 これで十分だと思われるが、 アンバランスな部分が少し修正された。 + +10:05.130 --> 10:08.370 +おそらく、 別の見方として円グラフを見ることができるだろう。 + +10:08.370 --> 10:13.410 +一般的に言って、 円グラフはデータサイエンティストに不人気であることが多い。 なぜなら、 + +10:13.410 --> 10:18.210 +棒グラフの方が量を並べて見たり、 非常に定量的に見たりするのに適しているからだ。 + +10:18.420 --> 10:23.400 +しかし、 円グラフは時として有用なビジュアルである。 + +10:23.490 --> 10:33.180 +これはカテゴリー別の円グラフで、 これらの単語のいくつかを分離するために、 私は明らかに少し仕事をしなければならないが、 あなたはアイデアを得るだろう。 + +10:33.390 --> 10:40.530 +つまり、 自動車が最大のシェアを占めてはいるのだが、 それが大量に支配しているわけではない、 + +10:40.530 --> 10:42.150 +ということだ。 + +10:42.150 --> 10:45.600 +そして、 この2、 3台が一緒になれば、 明らかに自動車以上のものになる。 + +10:45.660 --> 10:47.340 +だから、 まったく理にかなっている。 + +10:47.340 --> 10:50.460 +そして、 このチビは家電製品だ。 + +10:50.460 --> 10:57.510 +私たちが昨日始めたのは、 パイの中で1%、 最も小さいものだった。 + +10:57.510 --> 10:59.400 +この場合は文字通りだ。 + +11:00.000 --> 11:04.020 +これが、 私たちのデータセットだ。 + +11:04.020 --> 11:07.170 +うーん、 ちょっと大変だったね。 + +11:07.170 --> 11:13.200 +そして、 サンプリングのような、 いくつかの、 ええと、 もっと難しい部分については、 私は目をつぶっていた。 + +11:13.350 --> 11:20.190 +そして、 また戻ってきて、 それに目を通し、 自分で評価し、 より良いデータセットを作る可能性があることを強く勧める。 + +11:20.370 --> 11:25.260 +ええと、 ハブにアップロードする前に、 ようやく最後の分析ができるんだ。 + +11:25.260 --> 11:27.630 +それはまた次のビデオで。 diff --git a/week5/community-contributions/subtitles/srts/59472491/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472491/ko_KR.srt new file mode 100755 index 0000000..641c5d9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472491/ko_KR.srt @@ -0,0 +1,496 @@ +WEBVTT + +00:00.620 --> 00:01.790 +잘 돌아왔어요 + +00:01.790 --> 00:07.370 +유피터랩에서 저와 함께 하고 계신다면 커피 마시는 동안 잠시 꺼두세요 + +00:07.370 --> 00:12.620 +모든 데이터셋을 다운로드하는 데 20분 정도 걸리거든요 하지만 + +00:12.620 --> 00:16.550 +이제 정성껏 다운로드해서 다듬어질 거예요 + +00:16.550 --> 00:17.750 +여기 있네요 + +00:17.780 --> 00:23.810 +자동차용은 가장 크고 데이터 포인트와 사무실이 900,000개 이상이죠 + +00:23.870 --> 00:27.830 +전자 제품은 400,000개가 넘어요 + +00:27.830 --> 00:29.600 +총합이요 + +00:29.600 --> 00:30.980 +뭐가 있는지 보죠 + +00:30.980 --> 00:36.410 +총 2마리가 조금 넘어요 데이터 포인트가 8백만 개예요 + +00:36.410 --> 00:38.120 +데이터 포인트가 많네요 + +00:38.120 --> 00:39.680 +데이터 포인트가 너무 많아요 + +00:39.680 --> 00:43.580 +우리가 할 훈련에는 그런 숫자가 필요 없어요 + +00:43.640 --> 00:50.480 +즉, 이 데이터셋을 연마하고 가장 가치 있는 데이터 포인트를 선택하고 가장 + +00:50.480 --> 00:54.020 +많은 신호를 줄 기회가 있다는 거죠 + +00:54.020 --> 00:59.480 +그럼 먼저 패가 얼마나 있는지 분배하는 걸 다시 살펴보죠 + +00:59.510 --> 01:05.310 +지난 번과 같은 도표입니다 훈련 프롬프트에는 180개 이상의 + +01:05.310 --> 01:10.440 +토큰이 없습니다 오픈 소스 라마 모델을 잘 조정하기 위해서 + +01:10.440 --> 01:17.160 +이렇게 설정했습니다 또한 개척자 모델에서는 비용을 절감하기 위해서이기도 + +01:17.160 --> 01:19.350 +하죠 + +01:19.890 --> 01:22.050 +가격을 다시 보죠 + +01:22.290 --> 01:29.850 +이건 2백만 달러 정도에 걸친 전체 가격 분배입니다 보다시피 999달러 + +01:29.880 --> 01:33.690 +이하로 제한돼 있죠 + +01:33.780 --> 01:39.510 +1과 999 사이예요 그게 제약 조건이거든요 관리 가능한 데이터 집합을 + +01:39.510 --> 01:45.060 +확보하려면요 훈련 전체를 왜곡하는 터무니없는 예외 없이요 + +01:45.450 --> 01:51.150 +하지만 여전히 같은 문제가 있어요 데이터 세트가 작은 숫자로 많이 편향돼 + +01:51.150 --> 01:52.200 +있죠 + +01:52.230 --> 01:55.320 +길이 아주 얇아요 + +01:55.320 --> 01:57.660 +이건 300달러까지밖에 안 돼요 + +01:57.720 --> 02:05.460 +1,000까지 올릴 수 있다면 데이터 집합의 끝까지 올릴 수 있어요 + +02:05.490 --> 02:06.030 +여기요 + +02:06.060 --> 02:06.540 +보세요 + +02:06.540 --> 02:07.110 +여기예요 + +02:07.140 --> 02:14.850 +909까지 도달하는 데이터 포인트가 있어요 49개요, 근데 잘 안 보여요 + +02:14.850 --> 02:18.840 +축에 거의 닿지 않아요 + +02:18.960 --> 02:30.030 +데이터 세트가 800,000개 정도에 지배당하기 때문에 낮은 비용 포인트로 접근하는 거죠 + +02:30.480 --> 02:34.170 +한 가지 더 할 일은 카테고리를 빠르게 살펴보는 거예요 + +02:34.170 --> 02:40.110 +이 멋진 막대 차트는 얼마나 많은지 보여줍니다 제품의 다양한 데이터 카테고리에 각각 얼마나 + +02:40.140 --> 02:40.920 +있는지요 + +02:40.920 --> 02:44.970 +다시 900,000점으로 자동차가 우세하네요 + +02:44.970 --> 02:51.510 +그 다음은 도구와 집 꾸미기입니다 전자 제품 번호는 400,000고요 + +02:51.510 --> 02:59.580 +지금 우리가 하고자 하는 것 중 하나는 데이터를 좀 더 균형 잡힌 데이터 집합을 가질 수 있도록 다듬는 겁니다 + +02:59.580 --> 03:06.810 +왜냐하면 하나의 특정 카테고리에서 하나의 가격에 대해 더 왜곡되어 배우는 쪽으로 모델이 + +03:06.810 --> 03:09.670 +편향되는 건 원치 않으니까요 + +03:09.790 --> 03:15.310 +가격이 싼 곳을 선호해도 저희는 괜찮아요 그게 + +03:15.310 --> 03:17.380 +현실이니까요 + +03:17.530 --> 03:23.110 +하지만 훈련에 방해가 될 정도로 너무 멀리 가면 안 돼요 + +03:23.350 --> 03:31.090 +그래서 지금 하려는 건 데이터셋 샘플을 선택하는 겁니다 데이터셋에서 선택해 더 작은 + +03:31.090 --> 03:37.960 +데이터셋을 확보하는 거죠 가격과 카테고리를 더 잘 나타낼 수 있도록요 + +03:37.960 --> 03:42.910 +제가 원하는 데이터 집합 사이즈는 약 400,000개의 데이터 포인트예요 + +03:42.940 --> 03:48.100 +미세 튜닝 목적으로 큰 데이터 세트지만 그렇게 클 필요는 없어요 + +03:48.130 --> 03:50.290 +하지만 전 데이터 모음이 컸으면 했어요 + +03:50.290 --> 03:52.420 +그래서 400,000달러로 정했어요 + +03:52.510 --> 03:54.910 +어떻게 하는지에 대해 얘기해 보죠 + +03:55.000 --> 04:00.580 +먼저 슬롯머신이라는 사전을 만들었어요 + +04:00.580 --> 04:01.810 +이게 뭔지 말씀드리죠 + +04:01.840 --> 04:04.060 +그럼 내가 왜 그랬는지 이해할 거예요 + +04:04.090 --> 04:12.980 +슬롯머신은 사전인데 사전의 키가 제품의 모든 달러 가격이죠 + +04:12.980 --> 04:17.720 +1달러에서 9달러로 늘었어요 99, 1, 2, 3 999까지 쭉 가세요 + +04:17.720 --> 04:21.830 +이 사전의 열쇠는 999개예요 + +04:21.830 --> 04:29.570 +값은 해당 가격이 있는 모든 제품의 목록이 되겠죠 + +04:29.570 --> 04:36.590 +슬롯 사전에 2번 슬롯에 있는 모든 항목이 2달러짜리 목록으로 나와 있어요 + +04:36.620 --> 04:39.860 +모든 걸 이 구멍에 정리하는 거죠 + +04:39.860 --> 04:43.100 +데이터 세트를 버킷으로 만드는 거죠 + +04:43.370 --> 04:46.010 +이해가 되면 좋겠네요 + +04:46.010 --> 04:47.750 +아니면 이 코드를 불러오고요 + +04:47.750 --> 04:48.380 +통과해요 + +04:48.380 --> 04:54.710 +Defaultexpt를 사용하고 있는데 기본적으로 사전이죠 + +04:54.710 --> 05:00.800 +사전에서 빠진 게 있으면 자동으로 초기화해 넘겨주는 형식이 뭐든 + +05:00.800 --> 05:01.940 +돼요 + +05:01.970 --> 05:06.230 +코드에 일종의 if 테스트를 둘 필요가 없죠 + +05:06.230 --> 05:08.900 +멋지고 우아한 코드를 만들죠 + +05:08.900 --> 05:14.430 +비트가 있는 함수가 있는데 제가 설명해 드릴게요 + +05:14.460 --> 05:14.850 +아미티요 + +05:14.880 --> 05:15.360 +아미티요 + +05:15.420 --> 05:16.170 +주피터 공책이에요 + +05:16.170 --> 05:16.740 +휴대폰요 + +05:17.010 --> 05:21.390 +여기 있는 모든 구멍을 통과할 거예요 + +05:21.420 --> 05:23.760 +999개의 슬롯이 있어요 + +05:23.760 --> 05:30.870 +저 슬롯에서 데이터 하위 집합을 샘플로 만들 거예요 훈련에 사용하기에 좋은 대표 + +05:30.870 --> 05:33.090 +샘플이 될 것 같아요 + +05:33.240 --> 05:40.470 +어떤 건 제가 제멋대로 변형했어요 이것 다음에 나올 조직 문자에도 익숙해질 + +05:40.470 --> 05:41.640 +때까지요 + +05:41.640 --> 05:45.390 +특별한 이유 같은 건 없어요 + +05:45.390 --> 05:50.160 +시행착오의 사례에 더 가깝죠 여러분이 생산하는 균형 잡힌 데이터셋에 대해 만족할 수 있는 + +05:50.160 --> 05:51.330 +지점까지 가는 거예요 + +05:51.330 --> 05:56.460 +물론 훈련을 통해 스스로 만족하게 되죠 이렇게 함으로써 더 + +05:56.460 --> 05:59.040 +좋은 결과를 얻을 수 있다고요 + +05:59.400 --> 06:05.370 +그래서 저는 차례대로 칸을 하나씩 통과했어요 240달러 이상의 + +06:05.370 --> 06:09.930 +가치가 있는 물건은 칸 전체를 다 가져갔죠 + +06:09.960 --> 06:12.870 +그 모든 점을 제 샘플에 추가해요 + +06:13.320 --> 06:13.950 +네 + +06:14.400 --> 06:16.710 +적어도 less로요 + +06:16.710 --> 06:24.930 +여기 코드가 있는데 샘플이 1,200개예요 + +06:24.930 --> 06:29.790 +그래서 그 구멍에 수천 개의 공간이 있을 수 있죠 + +06:29.820 --> 06:37.830 +1200개를 골라서 선택이라는 누피 기법을 사용합니다 특정 숫자를 고를 수 있게 + +06:37.830 --> 06:39.090 +해주는 거죠 + +06:39.090 --> 06:43.920 +선택기의 장점 중 하나는 무게라는 걸 입력할 수 있다는 거예요 + +06:43.920 --> 06:48.870 +어떤 물건의 중요성을 다른 물건보다 높일 수 있다는 거죠 + +06:48.870 --> 06:53.970 +무게추가 잘 어우러졌으면 좋겠어요 + +06:53.970 --> 07:00.330 +자동차에 들어가는 모든 건 1kg이고 나머지는 5kg이라고 + +07:00.330 --> 07:02.160 +가정해 보죠 + +07:02.310 --> 07:07.290 +이 부분도 여러 가지로 시도해봤죠 어떤 곡이 나올지 익숙해질 + +07:07.290 --> 07:08.160 +때까지요 + +07:08.160 --> 07:14.070 +너무 멀리 가고 싶진 않았어요 현실 세계에 있는 데이터의 유형을 대략 유지하고 + +07:14.070 --> 07:15.930 +싶었거든요 + +07:15.930 --> 07:19.700 +하지만 데이터 집합의 불균형을 바로잡고 싶었어요 + +07:19.820 --> 07:23.570 +한 줄씩 설명하진 않을 거예요 + +07:23.570 --> 07:29.900 +제가 구상한 걸 보고 만족하셨으면 좋겠어요 제가 말한 대로 되고 결과물이 + +07:29.900 --> 07:32.900 +마음에 들면 좋겠어요 + +07:32.900 --> 07:37.550 +물론 데이터 세트를 약간 다르게 만들고 싶다면 지금이 기회예요 비트 + +07:37.610 --> 07:43.880 +모델 성능 측면에서 제 결과를 이길 가능성도 충분히 있습니다 카테고리마다 + +07:43.880 --> 07:49.580 +다른 무게를 두거나 다른 슬롯을 선택하는 게 낫다고 생각할 + +07:49.730 --> 07:52.010 +수도 있죠 + +07:52.010 --> 07:57.170 +그러니 실험을 해 보고 결과를 봐야죠 + +07:57.170 --> 07:59.270 +하지만 지금은 제가 운영해요 + +07:59.270 --> 08:07.460 +이제 샘플 리스트를 생성했고 그 샘플 안에 408,000개의 데이터 포인트가 있어요 + +08:07.460 --> 08:10.040 +이 정도 크기로 만들 거예요 + +08:10.460 --> 08:14.360 +이제 가격 분배 결과를 보죠 + +08:14.360 --> 08:18.230 +가격 분배 측면에서 훨씬 합리적으로 보이죠 + +08:18.230 --> 08:24.710 +더 저렴한 제품도 많지만 저렴한 제품의 가격은 일정하게 유지돼요 + +08:24.740 --> 08:32.510 +더 비싼 가격으로 넘어가면 더 비싼 데이터 포인트가 완벽하게 갖춰져 있어요. + +08:32.540 --> 08:37.160 +흥미로운 효과를 보실 거예요 다양한 포인트가 있죠 + +08:37.160 --> 08:44.420 +예상대로 399, 499달러로 가격을 매긴 것들은 데이터 포인트가 얼마나 많은지 측면에서 + +08:44.420 --> 08:46.100 +약간 상승하죠 + +08:46.130 --> 08:48.530 +현실 세계를 반영해서 좋아요 + +08:48.530 --> 08:51.140 +데이터 집합에 있는 게 좋아요 + +08:51.140 --> 08:53.840 +그 문제를 해결하고 싶지 않아요 + +08:54.230 --> 09:01.760 +이 가격 조직도표와 아까 보여드린 가격 조직도표를 비교하면 데이터에서 + +09:01.760 --> 09:07.220 +가격 분배가 얼마나 개선됐는지 바로 아실 거예요 + +09:07.250 --> 09:12.080 +현실 세계는 여전히 삐뚤어져 있어요 + +09:12.170 --> 09:16.070 +하지만 더 비싼 제품을 더 잘 보여주는 곳이 있어요 + +09:16.070 --> 09:23.220 +그러면 품질 좋은 방법으로 배울 수 있고 샘플의 유효성을 더 확인할 수 있죠 + +09:23.250 --> 09:26.700 +그게 만족스럽지 않다면 데이터 세트를 몇 개 생성하세요 + +09:26.730 --> 09:32.130 +훈련할 때 둘 다 시도해보고 균형 잡힌 데이터셋의 영향을 볼 수 있어요. + +09:32.130 --> 09:33.120 +Get it + +09:33.900 --> 09:36.810 +카테고리를 다시 보죠 + +09:36.930 --> 09:38.640 +주제별로 나눠요 + +09:38.640 --> 09:40.500 +사실 큰 차이는 없었어요 + +09:40.500 --> 09:42.030 +약간 기울었어요 + +09:42.210 --> 09:44.760 +비트가 더 잘 맞거든요 + +09:44.820 --> 09:51.990 +더 바로잡고 싶지 않았어요 결국 이 그림은 현실을 반영한다고 생각했거든요 + +09:51.990 --> 09:54.360 +너무 왜곡하면 안 돼요 + +09:54.360 --> 10:00.630 +자동차 제품을 파는 곳이 다른 곳보다 많아요 + +10:00.630 --> 10:04.950 +이 정도면 괜찮은 것 같지만 불균형이 약간 수정됐어요 + +10:05.130 --> 10:08.370 +파이 차트를 보는 다른 방법도 있을 것 같아요 + +10:08.370 --> 10:13.410 +일반적으로 파이 도표는 데이터 과학자들에게 인기가 없어요 바 + +10:13.410 --> 10:18.210 +도표가 양을 나란히 보고 정량적으로 보는 데 더 좋거든요 + +10:18.420 --> 10:23.400 +파이 도표는 시각적으로 유용할 때가 있죠, 한번 볼까요? + +10:23.490 --> 10:31.470 +카테고리별 파이 차트가 있습니다. 이 단어들을 구분하려면 작업을 좀 해야겠지만, 어떤 느낌인지 아시겠죠. + +10:31.470 --> 10:33.180 +비트 주세요. + +10:33.390 --> 10:40.530 +자동차 업계의 지분이 가장 크다는 걸 보여주지만 그렇게 압도적이진 + +10:40.530 --> 10:42.150 +않아요 + +10:42.150 --> 10:45.600 +이 두 개를 합치면 자동차 이상의 의미가 있죠 + +10:45.660 --> 10:47.340 +그러니 합리적이죠 + +10:47.340 --> 10:50.460 +여기 작은 건 가전제품이에요 + +10:50.460 --> 10:57.510 +어제 처음 시작한 건 파이에서 1%가 가장 작은 조각이에요 + +10:57.510 --> 10:59.400 +이 경우엔 말 그대로요 + +11:00.000 --> 11:04.020 +이게 우리 데이터 세트 큐레이팅이에요 + +11:04.020 --> 11:07.170 +비트 좀 썼다고 인정할게요 + +11:07.170 --> 11:13.200 +샘플링처럼 가시 돋친 부분을 광택제로 닦았어요 + +11:13.350 --> 11:19.350 +다시 와서 살펴보고 스스로 평가해서 더 나은 데이터 세트를 만들길 + +11:19.350 --> 11:20.190 +바라요 + +11:20.370 --> 11:25.260 +허브에 업로드 하기 전에 마지막으로 분석할 거예요 + +11:25.260 --> 11:27.630 +다음 영상에서 다시 만나요 diff --git a/week5/community-contributions/subtitles/srts/59472503/en_US.srt b/week5/community-contributions/subtitles/srts/59472503/en_US.srt new file mode 100755 index 0000000..755e730 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472503/en_US.srt @@ -0,0 +1,400 @@ +WEBVTT + +00:01.010 --> 00:02.810 +Welcome back to Jupyter Lab. + +00:02.810 --> 00:09.050 +Last time, we looked at some silly models for predicting the price of products to make our basic, + +00:09.050 --> 00:10.520 +basic baselines. + +00:10.550 --> 00:14.270 +Now we're going to look at some more interesting baseline models. + +00:14.270 --> 00:20.150 +This of course, again is the diagram showing you that very simple model of predicting a flat average + +00:20.150 --> 00:20.900 +price. + +00:21.230 --> 00:26.810 +You may notice a tiny, tiny change here, which is that I've changed the color, the yellow color that + +00:26.810 --> 00:30.740 +was here into a more pleasing orange color because I think the yellow is harder to see. + +00:30.740 --> 00:34.370 +But otherwise this should be a familiar picture for you. + +00:34.460 --> 00:39.950 +And you'll notice that on average, it's out by $145. + +00:40.010 --> 00:45.470 +I should mention that when we looked at the the prior diagram, I'm not sure if I looked at this if + +00:45.470 --> 00:49.610 +I showed this to you, but on average that was out by $340. + +00:49.610 --> 00:55.610 +So considerably worse performance, if you guess randomly than if you take an average, for obvious + +00:55.610 --> 00:56.420 +reasons. + +00:57.170 --> 01:02.980 +Uh, because yeah, obviously, because the data set isn't, uh, evenly distributed. + +01:02.980 --> 01:06.220 +It's uh and Nord is it's average 500. + +01:06.940 --> 01:08.170 +Uh, okay. + +01:08.170 --> 01:15.640 +So now let me at this point, uh, we're going to move to the topic of feature engineering, which is, + +01:15.640 --> 01:20.230 +uh, one of the most fundamental of the traditional machine learning techniques. + +01:20.230 --> 01:23.470 +And frankly, it's the way that data science used to work. + +01:23.470 --> 01:25.060 +This is what we would do. + +01:25.060 --> 01:31.120 +Uh, when this kind of problem came up, you would start trying to think about what are the aspects + +01:31.120 --> 01:38.560 +of this problem, what are the aspects of each product from Amazon that would be most suitable to use + +01:38.560 --> 01:40.570 +to try and predict the price? + +01:40.570 --> 01:47.830 +And a lot of time was spent trying to to do what people call feature engineering, which is figure out + +01:47.830 --> 01:54.010 +what properties of a particular item are most meaningful to predict its price. + +01:54.190 --> 01:59.530 +And people used to spend lots of time working on that before we found that deep neural networks can + +01:59.530 --> 02:00.790 +do all of that for you. + +02:00.910 --> 02:07.360 +So anyways, uh, what we're going to do now is work on feature engineering and I should say one more + +02:07.360 --> 02:12.370 +time that sometimes feature engineering and traditional machine learning will perform great. + +02:12.370 --> 02:14.860 +Sometimes that is what your problem needs. + +02:14.860 --> 02:19.630 +And you may think that in the case of an Amazon product, we're in that kind of territory. + +02:19.690 --> 02:21.970 +Uh, but we'll we'll see how it performs. + +02:21.970 --> 02:24.370 +So first of all, let me remind you of something. + +02:24.370 --> 02:29.590 +If I look at one of my training data points, uh, you may remember this. + +02:29.590 --> 02:31.630 +There is a field called details. + +02:31.630 --> 02:36.280 +That was one of the fields that we sucked from our Amazon data set. + +02:36.430 --> 02:41.230 +Um, and what this is, it looks a bit like a, like a Python dictionary. + +02:41.260 --> 02:42.790 +At first blush. + +02:42.910 --> 02:44.800 +You're seeing keys and values. + +02:44.860 --> 02:48.280 +Uh, but then you'll notice that the whole thing is, in fact, a string. + +02:48.280 --> 02:50.380 +Uh, it's all one big string. + +02:50.380 --> 02:53.980 +It's a JSON blob representing a dictionary. + +02:54.190 --> 03:00.640 +Uh, so it would be nice if we could read in this details field on every one of our data set points + +03:00.640 --> 03:07.040 +in training and in test and convert it from being text into being a Python dictionary. + +03:07.040 --> 03:10.160 +And luckily, the standard library gives us a way to do that. + +03:10.160 --> 03:19.430 +Using the JSON package, we can do Json.loads or Loadstring and it will convert these strings into objects. + +03:19.430 --> 03:22.880 +So we're going to run that and then we'll we'll run that. + +03:22.910 --> 03:25.190 +It will just take a few seconds. + +03:25.190 --> 03:30.830 +And then now what I can now do is say train zero dot features. + +03:30.830 --> 03:36.830 +And we'll expect to see this same string but now converted into a Python dictionary. + +03:37.010 --> 03:37.550 +Let's see. + +03:37.550 --> 03:38.540 +Let's run that. + +03:38.570 --> 03:39.680 +There we go. + +03:39.710 --> 03:40.820 +You can see that. + +03:40.850 --> 03:41.180 +Sorry. + +03:41.210 --> 03:46.220 +As I zoom around a dictionary and you can see that it's the same as that text. + +03:46.430 --> 03:50.900 +And in fact we can do dot keys and see its keys are right here. + +03:51.380 --> 03:59.120 +Now there's a problem with our data, which is that turns out these dictionaries are populated differently + +03:59.120 --> 04:00.320 +for different products. + +04:00.320 --> 04:05.320 +Some products don't have any, uh, any features at all. + +04:05.440 --> 04:09.700 +Some of them have, um, just, uh, sparse, uh, features. + +04:09.700 --> 04:11.950 +So, so it's inconsistently populated. + +04:11.950 --> 04:13.630 +Let's get a sense of that. + +04:13.720 --> 04:20.590 +We can use another useful Python standard library, uh, tool, the counter, um, in the collections + +04:20.590 --> 04:21.520 +package. + +04:21.550 --> 04:26.470 +Uh, and what you can do with the counter is you can count things up, and then you can say things like, + +04:26.470 --> 04:35.290 +uh, feature count, dot most common and asked to see the most common 40 of these. + +04:35.290 --> 04:37.990 +So let's run that and you'll see what comes back. + +04:38.200 --> 04:44.740 +So what we're seeing here is what are the most common, uh, 40 features that are populated against + +04:44.740 --> 04:46.450 +all of our training data points. + +04:46.690 --> 04:51.340 +Uh, and so date first available is populated a lot. + +04:51.370 --> 04:52.180 +Uh, almost. + +04:52.180 --> 04:52.810 +Uh, yeah. + +04:52.840 --> 05:02.260 +90% of our, of our population has that populated, uh, it's, what, 360,000 of the 400,000 that we + +05:02.260 --> 05:03.970 +have in the data set. + +05:04.090 --> 05:07.190 +Uh, item weight is very well populated. + +05:07.220 --> 05:08.990 +Manufacturer brand. + +05:09.020 --> 05:10.820 +They're quite similar bestsellers. + +05:10.820 --> 05:14.780 +Rank is also well populated and then it starts to tail off. + +05:15.050 --> 05:19.910 +So what are good candidates for us to use for features? + +05:19.910 --> 05:22.520 +Well, we're really looking for something that's well populated. + +05:22.520 --> 05:23.600 +That's a good start. + +05:23.630 --> 05:30.110 +We want it to be consistently populated, and we also want it to be something that feels like it's likely + +05:30.110 --> 05:33.350 +to be meaningfully related to the price. + +05:34.040 --> 05:38.600 +And so looking at these item weights, that feels like it's a pretty solid candidate. + +05:38.630 --> 05:44.840 +Like you think that that that I mean, it's not clear, but probably there's some correlation some of + +05:44.840 --> 05:47.300 +the time between weight and price. + +05:47.510 --> 05:56.180 +Uh, you know, like a bigger, heavier thing, maybe more valuable on average, uh, brand seems like, + +05:56.360 --> 06:01.100 +uh, obviously it's not going to, to exactly match with a feature, but maybe there's a way that we + +06:01.100 --> 06:03.920 +can make it and maybe best sellers rank. + +06:03.950 --> 06:04.880 +That could be something. + +06:04.880 --> 06:07.360 +That's something that's a bestseller might do well. + +06:07.390 --> 06:08.980 +So we'll start with those. + +06:09.010 --> 06:12.430 +Those feel like they are reasonable features to begin with. + +06:12.430 --> 06:16.990 +And we'll add on one more thing that just is a throwback to something we talked about a while ago. + +06:17.320 --> 06:21.850 +Um, so I'm going to start with something that's a bit janky. + +06:21.880 --> 06:25.090 +As I put here, this is a this is a little bit hokey. + +06:25.210 --> 06:32.560 +Uh, so it turns out that the weight that's populated in this dictionary is just like, very, uh, + +06:32.560 --> 06:34.510 +it's very dirty data. + +06:34.510 --> 06:40.450 +In some cases, it's in pounds, in some cases it's in ounces, in some cases it's in hundredths of + +06:40.450 --> 06:45.490 +pounds and a milligrams and kilograms and various other things. + +06:45.490 --> 06:52.000 +So I've just got a big old if statement here that goes through, figures out what units is this weight + +06:52.000 --> 06:58.720 +in, and converts it all to a number of pounds and returns that amount. + +06:58.720 --> 07:00.100 +So that's what this is. + +07:00.100 --> 07:03.100 +I'm not going to necessarily convince you that this does the job. + +07:03.100 --> 07:04.270 +You could take my word for it. + +07:04.270 --> 07:09.330 +Or if you distrust me, Then come on in and try it out for some of these. + +07:09.510 --> 07:16.170 +Um, and, uh, yeah, I then I'm going to get all of the weights for all of my training items. + +07:16.350 --> 07:25.230 +Um, and, uh, this line here, uh, if isn't obvious, filters out any, any nones from there so that + +07:25.230 --> 07:31.290 +because I return none if there's something that I, that I don't recognize the units for, um, and + +07:31.290 --> 07:36.030 +that allows me to calculate the average weight of all of our training data set. + +07:36.030 --> 07:40.800 +The average weight is £13.6. + +07:40.950 --> 07:44.430 +Uh, now you may say, why do I need to calculate the average weight? + +07:44.430 --> 07:49.350 +Well, it's for a slightly technical reason that when we're dealing with this kind of linear regression, + +07:49.350 --> 07:55.290 +you have to make some decisions about how are you going to handle the items which don't have a weight + +07:55.290 --> 07:59.880 +populated the 10% of our items of our training set that doesn't have a weight. + +07:59.880 --> 08:04.770 +And there are various techniques you can use, uh, people, data scientists amongst you probably know + +08:04.770 --> 08:10.550 +that you can do some tricks where you you have a feature which represents whether or not there is a + +08:10.550 --> 08:11.180 +weight. + +08:11.300 --> 08:16.880 +And then you have to do some, some jiggery pokery with how you incorporate that in your model. + +08:16.940 --> 08:22.880 +Um, and one approach that's a perfectly respectable approach is to say if something doesn't have a + +08:22.880 --> 08:26.420 +weight, just pick the average and plonk that in there. + +08:26.420 --> 08:34.700 +And so I have this function get weight with default, which takes an item, it tries to get its weight, + +08:34.700 --> 08:41.360 +and it either returns the weight or if the weight is none or zero, because that's presumably a problem. + +08:41.360 --> 08:46.040 +If something has no weight, then we swap it out for an average weight instead. + +08:46.580 --> 08:49.940 +Uh, so that is the get weight with default. + +08:50.690 --> 08:55.100 +I think this was a fair amount of, uh, grotty work as we do our feature engineering. + +08:55.100 --> 08:58.760 +So I'm going to take a break, let you mull over the other features we've got to do. + +08:58.790 --> 09:03.830 +And when we come back, we're going to go into best sellers rank before wrapping up feature engineering + +09:03.830 --> 09:04.880 +and running our model. + +09:04.880 --> 09:06.650 +And seeing how it predicts prices. + +09:06.680 --> 09:07.910 +See you in a second. diff --git a/week5/community-contributions/subtitles/srts/59472503/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472503/ja_JP.srt new file mode 100755 index 0000000..e7df4a3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472503/ja_JP.srt @@ -0,0 +1,331 @@ +WEBVTT + +00:01.010 --> 00:02.810 +Jupyter Labへようこそ。 + +00:02.810 --> 00:10.520 +前回は、 基本中の基本のベースラインを作るために、 商品の価格を予測するための愚かなモデルをいくつか見てみた。 + +00:10.550 --> 00:14.270 +次は、 もっと興味深いベースライン・モデルを見てみよう。 + +00:14.270 --> 00:20.900 +これはもちろん、 一律の平均価格を予測する非常にシンプルなモデルを示した図である。 + +00:21.230 --> 00:30.740 +小さな、 小さな変更にお気づきかもしれませんが、 黄色は見づらいと思うので、 ここにあった黄色をより見やすいオレンジ色に変更しました。 + +00:30.740 --> 00:34.370 +しかし、 そうでなければ、 これはあなたにとって見慣れた写真になるはずだ。 + +00:34.460 --> 00:39.950 +そして、 平均して145ドルの差があることにお気づきだろう。 + +00:40.010 --> 00:49.610 +この前の図を見たとき、 これを見せたかどうか定かではないが、 平均して340ドルの差があったことを述べておく。 + +00:49.610 --> 00:56.420 +だから、 平均を取るよりランダムに当てる方が、 明らかに成績が悪い。 + +00:57.170 --> 01:02.980 +データセットが均等に分布していないからだ。 + +01:02.980 --> 01:06.220 +それはええと、 ノルドは平均500だ。 + +01:06.940 --> 01:08.170 +ああ、 わかった。 + +01:08.170 --> 01:20.230 +それではここで、 特徴工学の話題に移りたいと思います。 特徴工学は、 伝統的な機械学習技術の中でも最も基本的なもののひとつです。 + +01:20.230 --> 01:23.470 +率直に言って、 データ・サイエンスはかつてそうだった。 + +01:23.470 --> 01:25.060 +これが私たちのやることだ。 + +01:25.060 --> 01:31.120 +この種の問題が出てきたとき、 あなたはこの問題の側面は何か、 + +01:31.120 --> 01:40.570 +アマゾンの各商品のどのような側面が、 価格を予測するのに最も適しているかを考え始めるだろう。 + +01:40.570 --> 01:54.010 +そして多くの時間を、 フィーチャー・エンジニアリングと呼ばれる、 特定の商品のどのような特性がその価格を予測するのに最も意味があるのかを解明することに費やした。 + +01:54.190 --> 02:00.790 +ディープ・ニューラル・ネットワークがそのすべてを代行してくれるとわかるまでは、 人々はその作業に多くの時間を費やしていた。 + +02:00.910 --> 02:12.370 +もう1度言っておくと、 フィーチャー・エンジニアリングと伝統的な機械学習が素晴らしい結果を出すこともある。 + +02:12.370 --> 02:14.860 +それがあなたの問題に必要なこともある。 + +02:14.860 --> 02:19.630 +アマゾンの商品の場合、 そのような領域に入っていると思うかもしれない。 + +02:19.690 --> 02:21.970 +あー、 でも、 どうなるか様子を見よう。 + +02:21.970 --> 02:24.370 +そこでまず、 あることを思い出してほしい。 + +02:24.370 --> 02:29.590 +トレーニング・データのひとつを見てみよう。 + +02:29.590 --> 02:31.630 +detailsというフィールドがある。 + +02:31.630 --> 02:36.280 +これはアマゾンのデータセットから吸い出したフィールドのひとつだ。 + +02:36.430 --> 02:41.230 +これはPythonの辞書のようなものです。 + +02:41.260 --> 02:42.790 +一見したところ。 + +02:42.910 --> 02:44.800 +あなたはキーと値を見ている。 + +02:44.860 --> 02:48.280 +ええと、 でも、 実は全部が文字列であることにお気づきでしょうか。 + +02:48.280 --> 02:50.380 +ええと、 すべて大きな糸なんだ。 + +02:50.380 --> 02:53.980 +これは辞書を表すJSONブロブだ。 + +02:54.190 --> 03:00.640 +トレーニングでもテストでも、 データセットのすべてのポイントでこの詳細フィールドを読み込んで、 + +03:00.640 --> 03:07.040 +テキストからPython辞書に変換できたらいいですね。 + +03:07.040 --> 03:10.160 +そして幸運なことに、 標準ライブラリーがその方法を教えてくれる。 + +03:10.160 --> 03:19.430 +JSONパッケージを使えば、 Jsonができる。 をロードまたはロードストリングすると、 これらの文字列をオブジェクトに変換する。 + +03:19.430 --> 03:22.880 +だから、 それを実行して、 それを実行するんだ。 + +03:22.910 --> 03:25.190 +数秒かかるだけだ。 + +03:25.190 --> 03:30.830 +そして今、 私ができることは、 トレイン・ゼロのドット機能だ。 + +03:30.830 --> 03:36.830 +同じ文字列がPythonの辞書に変換される。 + +03:37.010 --> 03:37.550 +見てみよう。 + +03:37.550 --> 03:38.540 +それを実行しよう。 + +03:38.570 --> 03:39.680 +これでよし。 + +03:39.710 --> 03:40.820 +それはわかるだろう。 + +03:40.850 --> 03:41.180 +申し訳ない。 + +03:41.210 --> 03:46.220 +辞書を拡大すると、 そのテキストと同じであることがわかる。 + +03:46.430 --> 03:50.900 +実際、 ドットキーを使えば、 そのキーがここにあることがわかる。 + +03:51.380 --> 04:00.320 +今、 私たちのデータには問題がある。 それは、 これらの辞書が商品によって異なるデータが入力されていることだ。 + +04:00.320 --> 04:05.320 +製品によっては、 ええと、 機能がまったくないものもある。 + +04:05.440 --> 04:09.700 +中には、 うーん、 ただ、 まばらな、 うーん、 特徴のない人もいる。 + +04:09.700 --> 04:11.950 +だから、 人口に齟齬がある。 + +04:11.950 --> 04:13.630 +それを感じ取ってみよう。 + +04:13.720 --> 04:21.520 +Python標準ライブラリのもう一つの便利なツール、 collectionsパッケージのカウンターを使うことができる。 + +04:21.550 --> 04:26.470 +カウンターを使えば、 いろいろなことを数え上げることができるし、 + +04:26.470 --> 04:35.290 +例えば、 "特徴数"、 "最も一般的なドット"、 "最も一般的な40個 "などと言うことができる。 + +04:35.290 --> 04:37.990 +何が返ってくるか見てみよう。 + +04:38.200 --> 04:46.450 +つまり、 ここに表示されているのは、 すべてのトレーニング・データ・ポイントに対して入力された、 最も一般的な40の特徴です。 + +04:46.690 --> 04:51.340 +それで、 最初に空いた日付はたくさん埋まっているんだ。 + +04:51.370 --> 04:52.180 +ほとんどね。 + +04:52.180 --> 04:52.810 +ああ、 そうだね。 + +04:52.840 --> 05:03.970 +人口40万人のうち36万人というデータセットがある。 + +05:04.090 --> 05:07.190 +ええと、 アイテムの重量は非常によく人口に膾炙している。 + +05:07.220 --> 05:08.990 +メーカーブランド。 + +05:09.020 --> 05:10.820 +よく似たベストセラーだ。 + +05:10.820 --> 05:14.780 +ランクも人口が多く、 その後は尻すぼみになる。 + +05:15.050 --> 05:19.910 +では、 どのような機能を使うのが良いのだろうか? + +05:19.910 --> 05:22.520 +まあ、 私たちは本当に人口の多いものを探しているんだ。 + +05:22.520 --> 05:23.600 +いいスタートだ。 + +05:23.630 --> 05:33.350 +私たちは一貫して人口に膾炙していることを望み、 また、 価格と有意義に関連しそうだと感じられるものであることを望む。 + +05:34.040 --> 05:38.600 +それで、 これらのアイテムの重さを見ると、 かなり堅実な候補だと感じられる。 + +05:38.630 --> 05:47.300 +つまり、 明確ではないが、 おそらく重量と価格にはある程度の相関関係があると思う。 + +05:47.510 --> 05:56.180 +もっと大きくて、 もっと重くて、 もっと平均的な価値があるような、 + +05:56.360 --> 06:03.920 +ブランドのような......。 + +06:03.950 --> 06:04.880 +何かあるかもしれない。 + +06:04.880 --> 06:07.360 +それはベストセラーになるようなことだ。 + +06:07.390 --> 06:08.980 +だから、 まずはそこから始めよう。 + +06:09.010 --> 06:12.430 +それらはそもそも妥当な機能だと感じる。 + +06:12.430 --> 06:16.990 +そして、 もうひとつ、 少し前に話したことに戻るが、 追加しよう。 + +06:17.320 --> 06:21.850 +ええと、 だから、 ちょっとジャンキーなことから始めようと思うんだ。 + +06:21.880 --> 06:25.090 +ここに書いたように、 これはちょっと陳腐なものだ。 + +06:25.210 --> 06:34.510 +この辞書に入力されているウェイトは、 とても汚いデータなんだ。 + +06:34.510 --> 06:40.450 +ポンド単位の場合もあれば、 オンス単位の場合もあるし、 100分の1ポンドやミリグラム、 + +06:40.450 --> 06:45.490 +キログラムなどさまざまだ。 + +06:45.490 --> 06:52.000 +だから、 ここに大きな古いif文があって、 この重さがどの単位で計算されるかを調べ、 + +06:52.000 --> 06:58.720 +それをポンド数に変換して、 その金額を返しているんだ。 + +06:58.720 --> 07:00.100 +だから、 これはそういうことなんだ。 + +07:00.100 --> 07:03.100 +これで仕事ができると必ずしも納得させるつもりはない。 + +07:03.100 --> 07:04.270 +私の言葉を信じることができるだろう。 + +07:04.270 --> 07:09.330 +あるいは、 私に不信感を抱いているのであれば、 この中に入って試してみてほしい。 + +07:09.510 --> 07:16.170 +それから、 トレーニングに必要なウエイトも全部揃えるつもりだ。 + +07:16.350 --> 07:25.230 +そして、 この行は、 もし明らかでなければ、 ここから「なし」をフィルタリングして、 + +07:25.230 --> 07:36.030 +もし私がその単位を認識していないものがあれば、 「なし」を返すようにしています。 + +07:36.030 --> 07:40.800 +平均重量は13ポンド。 6. + +07:40.950 --> 07:44.430 +なぜ平均体重を計算する必要があるんだ? + +07:44.430 --> 07:49.350 +少し技術的な理由ですが、 このような線形回帰を扱う場合、 + +07:49.350 --> 07:59.880 +学習セットの10%の項目のうち、 重みを持たない項目をどのように扱うかを決定する必要があります。 + +07:59.880 --> 08:11.180 +データサイエンティストの皆さんは、 重みがあるかないかを表す特徴量を使うトリックがあることをご存知でしょう。 + +08:11.300 --> 08:16.880 +そして、 それをどのようにモデルに組み入れるか、 少し、 小細工をしなければならない。 + +08:16.940 --> 08:22.880 +もし重みがないのであれば、 平均値を選んでそれを入れるというのも、 + +08:22.880 --> 08:26.420 +ひとつのアプローチとして立派なものだ。 + +08:26.420 --> 08:34.700 +この関数はアイテムを受け取り、 その重量を取得しようとします。 そして重量を返すか、 + +08:34.700 --> 08:41.360 +重量がゼロかゼロでないかを返します。 + +08:41.360 --> 08:46.040 +重さがないものは、 代わりに平均的な重さに置き換える。 + +08:46.580 --> 08:49.940 +ええと、 これがデフォルトで体重を測るということですね。 + +08:50.690 --> 08:55.100 +私たちがフィーチャー・エンジニアリングをしていく中で、 これはかなり、 ええと、 グロテスクな仕事だったと思う。 + +08:55.100 --> 08:58.760 +だから、 ちょっと休憩して、 私たちがやらなければならない他の機能について熟考してもらおうと思う。 + +08:58.790 --> 09:04.880 +そしてまた戻ってきたら、 フィーチャー・エンジニアリングを終えてモデルを実行する前に、 ベストセラー・ランクに入るつもりだ。 + +09:04.880 --> 09:06.650 +そして、 それがどのように価格を予測するかを見ている。 + +09:06.680 --> 09:07.910 +すぐに会おう。 diff --git a/week5/community-contributions/subtitles/srts/59472503/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472503/ko_KR.srt new file mode 100755 index 0000000..0ad42dd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472503/ko_KR.srt @@ -0,0 +1,391 @@ +WEBVTT + +00:01.010 --> 00:02.810 +주피터 연구소에 잘 오셨어요 + +00:02.810 --> 00:09.050 +지난 시간에는 바보 같은 모델을 살펴봤죠 제품의 가격을 예측해서 기본 기준을 설정하는 + +00:09.050 --> 00:10.520 +방법이었죠 + +00:10.550 --> 00:14.270 +이제 좀 더 흥미로운 기본 모델을 살펴보죠 + +00:14.270 --> 00:20.150 +이건 물론 도표입니다 아주 간단한 모델로 평평한 평균 가격을 예측할 + +00:20.150 --> 00:20.900 +수 있죠 + +00:21.230 --> 00:26.810 +여기서 아주 작은 변화가 보이실 겁니다 제가 색을 바꿨죠 원래 여기 있던 노란색이 더 보기 + +00:26.810 --> 00:30.740 +좋은 주황색으로 바뀌었어요 노란색이 잘 안 보이거든요 + +00:30.740 --> 00:34.370 +하지만 그렇지 않다면 익숙한 사진일 거예요 + +00:34.460 --> 00:39.950 +평균적으로 145달러가 빠져나갔어요 + +00:40.010 --> 00:45.470 +앞서 다이어그램을 봤을 때 말씀드리고 싶은 게 있는데 이걸 봤는지 모르겠는데 + +00:45.470 --> 00:49.610 +평균적으로 340달러나 차이가 나요 + +00:49.610 --> 00:55.610 +즉, 평균보다 무작위로 추측하면 상당히 나쁜 결과가 나온다는 + +00:55.610 --> 00:56.420 +거죠 + +00:57.170 --> 01:02.980 +네, 물론 데이터 세트가 균등하게 분배되지 않았기 때문이죠 + +01:02.980 --> 01:06.220 +노드는 평균 500달러예요 + +01:06.940 --> 01:08.170 +네 + +01:08.170 --> 01:15.640 +그럼 이제 이 시점에서 기능 공학으로 넘어가겠습니다 전통적인 머신 + +01:15.640 --> 01:20.230 +러닝 기술의 가장 근본적인 것 중 하나죠 + +01:20.230 --> 01:23.470 +데이터 과학이 예전에 쓰던 방식이죠 + +01:23.470 --> 01:25.060 +우리라면 이렇게 했을 거예요 + +01:25.060 --> 01:31.120 +이런 문제가 발생하면 이런 문제를 해결할 양상은 무엇인지 생각해 + +01:31.120 --> 01:38.560 +보게 됩니다 아마존 제품의 각 제품에서 가격을 예측하는 데 가장 적합한 양상은 + +01:38.560 --> 01:40.570 +무엇인지 말이죠 + +01:40.570 --> 01:47.830 +기능 엔지니어링이라는 걸 하려고 많은 시간을 들였어요 특정 아이템의 어떤 + +01:47.830 --> 01:54.010 +속성이 가격을 예측하는 데 가장 중요한지 알아내는 거죠 + +01:54.190 --> 01:59.530 +심층 신경망이 그 모든 걸 가능하게 하기 전까진 많은 시간을 + +01:59.530 --> 02:00.790 +투자했죠 + +02:00.910 --> 02:07.360 +어쨌든, 이제 기능 공학에 대해 공부할 겁니다 다시 한 번 말하지만 기능 공학이나 + +02:07.360 --> 02:12.370 +전통적인 머신 러닝이 훌륭하게 작동할 때도 있어요 + +02:12.370 --> 02:14.860 +가끔은 그게 문제가 될 수도 있어요 + +02:14.860 --> 02:19.630 +아마존 제품의 경우 우리가 그런 영역에 있다고 생각할 수도 있어요 + +02:19.690 --> 02:21.970 +하지만 성능은 봐야죠 + +02:21.970 --> 02:24.370 +먼저 한 가지 상기시켜 드리죠 + +02:24.370 --> 02:29.590 +제 훈련 데이터를 보면 기억하실 거예요 + +02:29.590 --> 02:31.630 +디테일이라는 게 있어요 + +02:31.630 --> 02:36.280 +아마존 데이터 세트에서 우리가 빨아들인 필드 중 하나예요 + +02:36.430 --> 02:41.230 +이건 파이썬 사전처럼 생긴 비트인데요 + +02:41.260 --> 02:42.790 +언뜻 보면요 + +02:42.910 --> 02:44.800 +키와 가치가 보이죠 + +02:44.860 --> 02:48.280 +그러면 전체적인 것이 문자열이라는 것을 알 수 있죠 + +02:48.280 --> 02:50.380 +하나의 큰 문자열이죠 + +02:50.380 --> 02:53.980 +사전을 나타내는 JSON 블롭이에요 + +02:54.190 --> 03:00.640 +이 상세 필드를 읽을 수 있다면 좋을 것 같아요 트레이닝과 테스트에서 모든 데이터 + +03:00.640 --> 03:07.040 +세트 포인트를요 그리고 텍스트에서 파이썬 사전으로 전환하는 거죠 + +03:07.040 --> 03:10.160 +다행히 표준 라이브러리가 그걸 가능하게 하죠 + +03:10.160 --> 03:19.430 +JSON 패키지를 이용해 JSON을 할 수 있어요 Loadstring을 하면 이런 문자열을 개체로 변환하죠 + +03:19.430 --> 03:22.880 +그걸 실행하고 그걸 실행할 거예요 + +03:22.910 --> 03:25.190 +몇 초면 돼요 + +03:25.190 --> 03:30.830 +이제 트레인0.com 특징을 입력할 수 있어요 + +03:30.830 --> 03:36.830 +그리고 이 문자열이 파이썬 사전에 변환된 걸 보게 될 거예요 + +03:37.010 --> 03:37.550 +어디 보죠 + +03:37.550 --> 03:38.540 +실행해 보죠 + +03:38.570 --> 03:39.680 +됐어요 + +03:39.710 --> 03:40.820 +보면 알잖아요 + +03:40.850 --> 03:41.180 +미안해요 + +03:41.210 --> 03:46.220 +사전을 확대해 보면 그 텍스트와 같은 걸 볼 수 있죠 + +03:46.430 --> 03:50.900 +.키를 입력하면 여기 키가 있는 게 보이죠 + +03:51.380 --> 03:59.120 +우리 데이터에 문제가 있어요 이 사전들은 다른 제품들에 대해 다르게 채워져 있다는 + +03:59.120 --> 04:00.320 +거죠 + +04:00.320 --> 04:05.320 +어떤 제품은 기능이 전혀 없어요 + +04:05.440 --> 04:09.700 +어떤 건 특징이 거의 없어요 + +04:09.700 --> 04:11.950 +일관성이 없는 포집이죠 + +04:11.950 --> 04:13.630 +get get get을 해 보죠 + +04:13.720 --> 04:21.520 +또 다른 유용한 파이썬 표준 라이브러리 도구를 사용할 수 있습니다 컬렉션 패키지에서 counter를요 + +04:21.550 --> 04:26.470 +counter로 할 수 있는 건 숫자를 세는 겁니다 그런 + +04:26.470 --> 04:35.290 +다음 기능 카운트, 가장 흔한 것 .40 같은 걸 말할 수 있죠 가장 흔한 걸 보여달라고 요청하는 거예요 + +04:35.290 --> 04:37.990 +실행해보죠 뭐가 나오는지 보세요 + +04:38.200 --> 04:44.740 +가장 일반적인 기능은 40개입니다 모든 훈련 데이터 포인트에 대항해 + +04:44.740 --> 04:46.450 +채워져 있죠 + +04:46.690 --> 04:51.340 +날짜 먼저 사용 가능은 많이 채운다는 거죠 + +04:51.370 --> 04:52.180 +거의요 + +04:52.180 --> 04:52.810 +네 + +04:52.840 --> 05:02.260 +인구의 90%가 데이터 집합에 있는 400,000명 중 360,000명 정도에 + +05:02.260 --> 05:03.970 +해당하죠 + +05:04.090 --> 05:07.190 +품목의 무게는 아주 많이 차 있어요 + +05:07.220 --> 05:08.990 +제조사 브랜드예요 + +05:09.020 --> 05:10.820 +비슷한 베스트셀러예요 + +05:10.820 --> 05:14.780 +랭크도 가득 찼다가 사라지기 시작하죠 + +05:15.050 --> 05:19.910 +그럼 어떤 특징을 사용하면 좋을까요? + +05:19.910 --> 05:22.520 +많이 들어찬 걸 찾고 있어요 + +05:22.520 --> 05:23.600 +시작이 좋네요 + +05:23.630 --> 05:30.110 +지속적으로 채워지길 원하고 가격과 의미 있게 연관될 가능성이 + +05:30.110 --> 05:33.350 +있는 뭔가가 되길 원해요 + +05:34.040 --> 05:38.600 +이 아이템의 무게를 보면 꽤 확실한 후보 같아요 + +05:38.630 --> 05:44.840 +당신 생각에는 확실하진 않지만 무게와 가격 사이에 상관관계가 + +05:44.840 --> 05:47.300 +있을 거예요 + +05:47.510 --> 05:56.180 +더 크고 무거운 제품이나 평균적으로 더 가치 있는 제품요 브랜드가 기능과 정확히 + +05:56.360 --> 06:01.100 +일치하진 않겠지만 베스트셀러 순위에 들 수 + +06:01.100 --> 06:03.920 +있는 방법이 있을 거예요 + +06:03.950 --> 06:04.880 +중요한 단서일 수도 있어요 + +06:04.880 --> 06:07.360 +베스트셀러로 잘 팔릴 것 같아요 + +06:07.390 --> 06:08.980 +이것부터 시작하죠 + +06:09.010 --> 06:12.430 +이런 것들은 처음부터 합리적인 기능으로 느껴졌어요 + +06:12.430 --> 06:16.990 +한 가지만 더 추가할게요 아까 얘기했던 것으로 되돌아가는 거죠 + +06:17.320 --> 06:21.850 +약간 조잡한 비트부터 시작할게요 + +06:21.880 --> 06:25.090 +여기 비트를 보면 좀 진부해요 + +06:25.210 --> 06:34.510 +이 사전에 채워진 무게는 아주 더러운 데이터예요 + +06:34.510 --> 06:40.450 +파운드 단위로도 나오고 온스 단위로 나오기도 하고 100g 단위로 + +06:40.450 --> 06:45.490 +나오기도 해요 1mg이나 킬로그램 등 다양하죠 + +06:45.490 --> 06:52.000 +여기 if문이 있는데요 어떤 웨이트 단위를 입력했는지 + +06:52.000 --> 06:58.720 +알아내고 파운드로 변환한 다음 그 양을 반환하죠 + +06:58.720 --> 07:00.100 +그런 거였군요 + +07:00.100 --> 07:03.100 +이게 효과가 있다는 걸 꼭 설득할 필요는 없어요 + +07:03.100 --> 07:04.270 +내 말을 믿어요 + +07:04.270 --> 07:09.330 +아니면 날 못 믿겠으면 들어와서 이걸 써봐요 + +07:09.510 --> 07:16.170 +그리고 제 운동 기구에 쓸 웨이트도 전부 get 할 거예요 + +07:16.350 --> 07:25.230 +그리고 이 선은 0을 걸러내는 거예요 단위를 인식하지 못하는 게 있으면 + +07:25.230 --> 07:31.290 +0을 반환하기 때문에 훈련 데이터 집합의 평균 + +07:31.290 --> 07:36.030 +무게를 계산할 수 있어요 + +07:36.030 --> 07:40.800 +평균 체중은 13파운드예요 6개요 + +07:40.950 --> 07:44.430 +왜 평균 무게를 재야 하냐고 하실 수도 있겠네요 + +07:44.430 --> 07:49.350 +약간 기술적인 이유에서죠 이런 선형 퇴행 과정을 + +07:49.350 --> 07:55.290 +다룰 때는 중량 인구가 없는 아이템을 어떻게 다룰지 결정해야 + +07:55.290 --> 07:59.880 +합니다 훈련 아이템 중량 없는 10%를요 + +07:59.880 --> 08:04.770 +다양한 기법을 사용할 수 있어요 데이터 과학자들도 + +08:04.770 --> 08:11.180 +알 텐데 하중이 있는지 없는지를 나타내는 기능을 활용할 수 있어요 + +08:11.300 --> 08:16.880 +그리고 모델의 몸에 그걸 어떻게 적용할지 어설프게 표현해야 해요 + +08:16.940 --> 08:22.880 +완벽하게 신뢰할 수 있는 접근법이 하나 있는데 무게가 없는 + +08:22.880 --> 08:26.420 +건 평균을 골라서 집어넣는 거예요 + +08:26.420 --> 08:34.700 +기본값으로 함수 get-tier를 설정했는데 항목을 취하고 그 가중치를 얻으려 하죠 + +08:34.700 --> 08:41.360 +가중치를 반환하거나 가중치가 없거나 0이면요 문제가 될 테니까요 + +08:41.360 --> 08:46.040 +무게가 없는 건 평균 무게로 바꿔요 + +08:46.580 --> 08:49.940 +기본값 get 웨이트고요 + +08:50.690 --> 08:55.100 +기능 공학에서 꽤 지저분한 작업이 되었어요 + +08:55.100 --> 08:58.760 +잠시 쉴 테니 다른 기능들을 잘 생각해 보세요 + +08:58.790 --> 09:03.830 +잠시 후에는 베스트셀러 순위에 오르겠습니다 기능 엔지니어링과 모델을 실행하기 + +09:03.830 --> 09:04.880 +전에요 + +09:04.880 --> 09:06.650 +가격 예측도 하고요 + +09:06.680 --> 09:07.910 +이따 봐요 diff --git a/week5/community-contributions/subtitles/srts/59472505/en_US.srt b/week5/community-contributions/subtitles/srts/59472505/en_US.srt new file mode 100755 index 0000000..ce6c446 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472505/en_US.srt @@ -0,0 +1,496 @@ +WEBVTT + +00:00.260 --> 00:05.780 +So the good news is that this is the very final video about data set curation. + +00:05.810 --> 00:08.120 +You were probably fed up of data set curation. + +00:08.120 --> 00:10.970 +Now there's just one more piece and then we are done. + +00:11.000 --> 00:16.370 +So we have crafted an outstanding data set of which we should be very proud. + +00:16.370 --> 00:19.610 +Let's do some final peeks at it. + +00:19.760 --> 00:23.030 +Um, one question that you might ask. + +00:23.060 --> 00:24.710 +Um, well, I'm going to ask it anyway. + +00:24.710 --> 00:30.740 +Is is it possible that the price of an item is related? + +00:30.740 --> 00:35.570 +It's correlated to how long the description is of that item. + +00:35.570 --> 00:41.960 +You might imagine a situation where the higher price things tend to have more information. + +00:41.960 --> 00:44.420 +And that would be worth us understanding. + +00:44.600 --> 00:48.170 +Um, because yeah, that's something that the model would learn from quickly. + +00:48.170 --> 00:52.550 +And it gives us a good sense, perhaps when we look at traditional approaches about how we might approach + +00:52.550 --> 00:52.730 +it. + +00:52.730 --> 01:00.650 +So this is a nice little scatter plot that is going to show us, um, each of the, the sizes on the + +01:00.650 --> 01:01.220 +x axis. + +01:01.250 --> 01:07.970 +It's going to show us the length of the description, and on the y axis it's going to show us the price. + +01:08.060 --> 01:12.350 +Um, let's have a look at this across the full sample data set. + +01:12.740 --> 01:13.700 +So here we go. + +01:13.700 --> 01:15.950 +Here's a nice nice picture for you. + +01:15.950 --> 01:20.210 +So there are 400,000 points on this picture. + +01:20.360 --> 01:23.330 +Uh and it's something to look at. + +01:23.330 --> 01:26.420 +You can see in it there's there's a lot to digest. + +01:26.450 --> 01:34.040 +You can see this interesting pattern that's happening as prices tend to be more prevalent at these boundary + +01:34.040 --> 01:34.490 +points. + +01:34.490 --> 01:43.370 +The $799 priced items, um, and you can see, of course, that there are many more cheaper items. + +01:43.370 --> 01:52.610 +And you can see that there is something of apparently a correlation that, uh, items which have longer + +01:52.610 --> 01:59.480 +descriptions do appear, perhaps sometimes to have a trend of being the more expensive ones. + +01:59.480 --> 02:03.750 +But it's not clear that there's a significant correlation in that regard. + +02:03.750 --> 02:06.750 +So there's something there, but it's nothing major. + +02:06.750 --> 02:12.780 +So we suspect that traditional machine learning, when trying to look at something like that will probably + +02:12.780 --> 02:15.270 +not find any major correlation. + +02:15.270 --> 02:21.660 +So just an example of the kind of, um, diagram that you can come up with to try and get insight into + +02:21.660 --> 02:23.610 +different aspects of your data. + +02:24.330 --> 02:27.510 +One other thing I want to talk about for a moment more is tokens. + +02:27.750 --> 02:34.830 +Um, and uh, the, um, we're going to be working a lot more with tokens when we get to actually training + +02:34.830 --> 02:38.640 +against an open source model, but it's worth looking at tokens right now. + +02:38.640 --> 02:44.790 +So I've just written this function report, which takes an item and which will then print, uh, the + +02:44.790 --> 02:48.630 +prompt, first of all, the full training prompt that will be used during training. + +02:48.630 --> 02:54.930 +And then the last ten tokens in that prompt, and then it will decode those. + +02:54.930 --> 02:59.010 +So we'll see the bits of text that map to the last ten tokens. + +02:59.010 --> 03:02.250 +And if you're wondering why the last ten you're going to see in just a second. + +03:02.250 --> 03:08.550 +So let's pick a random number, number 40,000 and run this. + +03:08.580 --> 03:09.060 +Okay. + +03:09.090 --> 03:12.840 +So this here sorry for all of this text here. + +03:12.840 --> 03:18.390 +That is the prompt that's going to be sent to the LLM to learn from. + +03:18.630 --> 03:22.170 +Um, it's going to be asked how much does this cost to the nearest dollar. + +03:22.170 --> 03:23.790 +And then it's going to get a description. + +03:23.790 --> 03:30.090 +And then price is and then this which is the price rounded to the nearest dollar. + +03:30.180 --> 03:36.750 +You'll note if you look in the item code that when building the training prompt, it rounds this to + +03:36.780 --> 03:37.860 +the nearest dollar. + +03:37.950 --> 03:44.520 +So if we look at the last ten tokens you can see what's happening here I'm printing out underneath it. + +03:44.550 --> 03:46.110 +What are those ten tokens. + +03:46.110 --> 03:53.010 +And I just wanted to show you that in terms of the final few tokens, price gets mapped to one token + +03:53.040 --> 04:02.130 +is gets a token with that start of of of word space before it dollars again with the start of word, + +04:02.130 --> 04:07.410 +and then the number 34 is getting mapped to one specific token. + +04:07.680 --> 04:12.450 +And this is, as I say, just a feature of the llama tokenizer that it does. + +04:12.450 --> 04:18.000 +As with GPT, it does have a separate token for every three digit number. + +04:18.000 --> 04:21.120 +Some of the other tokenizers the other models do not. + +04:21.210 --> 04:27.120 +Um, and whilst this isn't required for our project, it does make things a bit simpler for us later. + +04:27.180 --> 04:32.250 +And then the period gets one token and the .00 gets one token. + +04:32.280 --> 04:35.130 +Let's do another sample. + +04:36.930 --> 04:42.000 +Let's do something completely, uh, something in a different location altogether. + +04:42.720 --> 04:44.100 +Number 10,000. + +04:44.280 --> 04:47.550 +And this is a something that's rather cheap. + +04:47.580 --> 04:51.930 +It costs $9 and price is 9000. + +04:51.960 --> 04:58.110 +Let's go for something that's near the end of the data set 398,000. + +04:58.620 --> 05:05.740 +And this is a, um, uh, coilover damper kit. + +05:05.740 --> 05:10.240 +And this price is $765. + +05:10.240 --> 05:15.430 +And you'll see once more that the 765 gets mapped to one token. + +05:15.430 --> 05:22.090 +So you should satisfy yourself this this sample is of course sorted by cheapest first ish because we've + +05:22.120 --> 05:25.840 +gone through sampling, uh, in each, each category. + +05:25.840 --> 05:28.990 +So, so rounded to the nearest dollar. + +05:28.990 --> 05:35.170 +It is sorted by cheapest in the lower, um, parts of the sample, and the most expensive in the higher + +05:35.170 --> 05:36.100 +parts of the sample. + +05:36.100 --> 05:43.390 +And you can satisfy yourself that we are getting this effect, that every number from 1 to 999 is getting + +05:43.390 --> 05:46.780 +mapped to one token, just as it says here. + +05:46.780 --> 05:54.190 +And as I say one more time, when, uh, look at the quantity or gamma or phi three tokenizers, you'll + +05:54.190 --> 05:55.870 +see that that's not the case. + +05:55.960 --> 06:02.350 +Um, it turns out to be a little bit handy for us later on, but it's not required and definitely later. + +06:02.350 --> 06:07.060 +If you want to experiment with using other models like Quantum Gemini three, you can simply switch + +06:07.090 --> 06:08.620 +it in and it will work. + +06:08.650 --> 06:14.440 +You'll just find here that it will be mapped to multiple tokens, not to the one token for the three + +06:14.470 --> 06:15.430 +digit number. + +06:16.690 --> 06:20.260 +Okay, big sigh of relief. + +06:20.260 --> 06:22.630 +We've made it through data curation. + +06:22.630 --> 06:27.460 +The last part of it all is to finish things off and upload to the hub. + +06:27.460 --> 06:33.520 +And what we're going to do to start with is shuffle up our data set, because it's no good at all if + +06:33.520 --> 06:35.710 +it's sorted in order of cheapest. + +06:35.710 --> 06:38.650 +First we need a nice jumbled data set. + +06:38.800 --> 06:44.350 +Um, and first I, um, set the random seed because I want to make sure that we always are working with + +06:44.350 --> 06:50.230 +exactly the same data set so that you can reproduce exactly the same stuff that I will and get the same + +06:50.260 --> 06:51.340 +outcomes. + +06:51.520 --> 06:58.990 +Um, we use Random.shuffle to shuffle things up, and then I take the first 400,000 as my training data + +06:59.020 --> 06:59.290 +set. + +06:59.290 --> 07:01.900 +And then the next 2000 as the test set. + +07:01.930 --> 07:03.220 +Now I hear you. + +07:03.250 --> 07:05.230 +You cry, you data scientists. + +07:05.260 --> 07:11.380 +That one normally takes, like, at least a 5% or 10% test data set here. + +07:11.470 --> 07:16.270 +Um, and you can absolutely feel free to do so because obviously we've got we've got 8000 data points + +07:16.300 --> 07:17.350 +right, right here. + +07:17.350 --> 07:21.490 +And you can also, of course, sample more to have a bigger data set. + +07:21.520 --> 07:26.590 +It won't be necessary for us because we're going to find that we're only going to use a few hundred + +07:26.590 --> 07:27.310 +for testing. + +07:27.310 --> 07:30.160 +And that's going to give us very accurate results. + +07:30.160 --> 07:34.090 +And we get diminishing returns if we keep testing against more and more. + +07:34.090 --> 07:39.340 +So this is plenty for our purposes for this project, but it is a best practice. + +07:39.370 --> 07:40.630 +I don't know if it's a best practice. + +07:40.660 --> 07:47.620 +It's a common practice to reserve at least 5% of these other, uh, for the test data set, and sometimes + +07:47.620 --> 07:52.810 +to to separately have 5% for test and a 5% for validation, as I talked about before. + +07:52.870 --> 07:58.930 +Um, not required for this purpose, but by all means, you can do it if you wish and have that as an + +07:58.930 --> 08:00.880 +extra data set that you manage. + +08:01.030 --> 08:02.300 +Um, but anyway, we will do that. + +08:02.300 --> 08:03.320 +We will jumble it up. + +08:03.320 --> 08:08.630 +It's been divided into a training dataset of 400,000 and a test set of 2000. + +08:08.660 --> 08:14.480 +Let's have a look at the first the the the test first element. + +08:14.480 --> 08:19.640 +The test prompt that you remember is the prompt without revealing the answer. + +08:19.640 --> 08:24.680 +This is the prompt that will be sent to sorry, I'm looking at the training prompt first, then we'll + +08:24.680 --> 08:25.310 +look at the test prompt. + +08:25.340 --> 08:27.470 +The training prompt is the one that does have the answer. + +08:27.470 --> 08:31.310 +So the training prompt says how much does this cost to the nearest dollar. + +08:31.310 --> 08:35.120 +It is a Delphi fuel pump module. + +08:35.390 --> 08:37.430 +Um, and uh yeah. + +08:37.460 --> 08:37.910 +How about that. + +08:37.910 --> 08:39.470 +It costs $227. + +08:39.470 --> 08:41.300 +I would have had no clue about that. + +08:41.300 --> 08:47.450 +So this is an example of something that will be sent to an LM as part of training, because it contains + +08:47.450 --> 08:50.240 +the description and it contains the price. + +08:50.480 --> 08:54.380 +Um, so let's look at a test prompt. + +08:54.410 --> 09:01.280 +Now the test prompt is going to show us something that will be used, which will have the description, + +09:01.280 --> 09:02.990 +but it will not have the price. + +09:02.990 --> 09:07.400 +And this is the first item in our test set. + +09:07.400 --> 09:09.350 +So there we have it. + +09:09.470 --> 09:17.960 +Uh, let's have a quick look at the distribution of prices for the first 250 test points, because these + +09:17.960 --> 09:22.430 +are actually the points that we'll be using most of the time for actually testing our model. + +09:22.430 --> 09:26.750 +And you can see there's a nice healthy spread of different prices here. + +09:26.780 --> 09:33.410 +There's plenty of things in the higher area that will test whether the model can handle expensive things. + +09:33.410 --> 09:41.360 +And then, you know, the majority are the cheaper priced with a good variety of prices in our test + +09:41.360 --> 09:42.530 +data set. + +09:43.340 --> 09:51.650 +Okay, finally, finally, we now turn this into a series of training prompts and test prompts, uh, + +09:51.650 --> 09:57.680 +which is just simply plucking out the the prompt and the test prompt that we just looked at, along + +09:57.680 --> 09:58.910 +with the prices. + +09:59.390 --> 10:03.590 +This little piece of code here will upload it to the hugging face. + +10:03.620 --> 10:10.820 +I will turn it into a data set object suitable for the hugging face hub, by calling the Fromdict for + +10:10.820 --> 10:14.330 +a data set and then putting that into a data set dict. + +10:15.050 --> 10:22.940 +And then finally this line here will upload your data set to the Hugging Face hub so that you can continue + +10:22.940 --> 10:26.000 +to use it and download it for future. + +10:26.000 --> 10:33.110 +Uh, when we get to to fine tuning, uh, but I'm not going to run it because I've already run it. + +10:33.110 --> 10:35.300 +And this is for you to put in your username. + +10:35.300 --> 10:46.100 +I have this uploaded to to my to to uh, um, sorry, I have it uploaded to my username here. + +10:46.100 --> 10:51.800 +So you will be able to also just retrieve the data that way too. + +10:51.830 --> 10:55.820 +If you wanted to short circuit all of this data curation, which hopefully you do not want to do. + +10:56.210 --> 11:05.940 +Um, and then as a final thing here, um, I'm going to turn this train and test the collection into + +11:06.090 --> 11:07.230 +a pickle file. + +11:07.230 --> 11:12.270 +I'm going to pickle it and put it into a file so we can load it for future days so we don't have to + +11:12.300 --> 11:16.050 +go through all of this rigmarole again of building our lists. + +11:16.050 --> 11:22.620 +So if you're familiar with Python pickles, it's super easy way to take a Python object and dump it + +11:22.620 --> 11:23.520 +out to a file. + +11:23.520 --> 11:28.710 +And now that I've run that, there will be two new files here test dot pickle and train dot pickle that + +11:28.710 --> 11:32.340 +will contain my training and test data set. + +11:33.090 --> 11:36.900 +And with that we have completed our data curation work. + +11:36.900 --> 11:44.820 +Please can I leave with you to investigate the data set more and to also confirm when you try out this, + +11:44.970 --> 11:53.220 +this exercise of trying to tokenize, uh, different, um, uh, different data points that you always + +11:53.220 --> 11:59.490 +get the case that three digit numbers tokenized to one token and get a sense for those tokens. + +11:59.820 --> 12:03.720 +And with that, I will see you back with the slides for a wrap up. diff --git a/week5/community-contributions/subtitles/srts/59472505/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472505/ja_JP.srt new file mode 100755 index 0000000..91edf13 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472505/ja_JP.srt @@ -0,0 +1,403 @@ +WEBVTT + +00:00.260 --> 00:05.780 +というわけで、 これがデータセット・キュレーションに関する最後のビデオとなる。 + +00:05.810 --> 00:08.120 +データセットのキュレーションにうんざりしていたのだろう。 + +00:08.120 --> 00:10.970 +さあ、 あと1枚で完成だ。 + +00:11.000 --> 00:16.370 +だから我々は、 誇るべき優れたデータセットを作り上げたのだ。 + +00:16.370 --> 00:19.610 +では、 最終チェックをしてみよう。 + +00:19.760 --> 00:23.030 +あの、 ひとつ質問していいですか? + +00:23.060 --> 00:24.710 +まあ、 とにかく聞いてみるよ。 + +00:24.710 --> 00:30.740 +商品の価格が関係している可能性はありますか? + +00:30.740 --> 00:35.570 +これは、 そのアイテムの説明文の長さに相関している。 + +00:35.570 --> 00:41.960 +高価格のものほど情報量が多いという状況を想像するかもしれない。 + +00:41.960 --> 00:44.420 +そして、 それは私たちが理解する価値のあることだろう。 + +00:44.600 --> 00:48.170 +ああ、 それはモデルがすぐに学ぶことだからね。 + +00:48.170 --> 00:52.730 +そして、 伝統的なアプローチに目を向けることで、 私たちがどのようなアプローチを取るべきか、 おそらく良い感覚を与えてくれる。 + +00:52.730 --> 01:01.220 +これは、 X軸の各サイズを示す、 小さな散布図だ。 + +01:01.250 --> 01:07.970 +説明文の長さが表示され、 Y軸には価格が表示される。 + +01:08.060 --> 01:12.350 +では、 全サンプルのデータを見てみよう。 + +01:12.740 --> 01:13.700 +それでは、 どうぞ。 + +01:13.700 --> 01:15.950 +ここに素敵な写真がある。 + +01:15.950 --> 01:20.210 +だから、 この写真には40万点がある。 + +01:20.360 --> 01:23.330 +そして、 それは見るべきものだ。 + +01:23.330 --> 01:26.420 +消化すべきことがたくさんあるのがわかるだろう。 + +01:26.450 --> 01:34.490 +このような境界点では価格が高くなる傾向があるため、 このような興味深いパターンが起きていることがわかる。 + +01:34.490 --> 01:43.370 +799ドルの商品、 うーん、 もちろん、 もっと安い商品もたくさんあることはお分かりいただけるだろう。 + +01:43.370 --> 01:59.480 +そして、 説明文が長い商品ほど、 高価な商品である傾向があることがわかる。 + +01:59.480 --> 02:03.750 +しかし、 その点で有意な相関関係があるかどうかは明らかではない。 + +02:03.750 --> 02:06.750 +だから、 何かはあるんだけど、 大したことではないんだ。 + +02:06.750 --> 02:15.270 +そのため、 従来の機械学習では、 このようなことを調べようとしても、 おそらく大きな相関関係は見いだせないと思われる。 + +02:15.270 --> 02:23.610 +つまり、 データのさまざまな側面を理解するために思いつく図の一例です。 + +02:24.330 --> 02:27.510 +もうひとつ、 トークンについてもう少し話したい。 + +02:27.750 --> 02:34.830 +オープンソースのモデルに対して実際にトレーニングするようになったら、 トークンをもっとたくさん使うことになるだろうが、 + +02:34.830 --> 02:38.640 +今はトークンに注目する価値がある。 + +02:38.640 --> 02:48.630 +そこで、 この関数レポートを書きました。 この関数は、 アイテムを受け取り、 プロンプトを、 まず、 トレーニング中に使用される完全なトレーニングプロンプトを表示します。 + +02:48.630 --> 02:54.930 +そして、 そのプロンプトの最後の10個のトークンをデコードする。 + +02:54.930 --> 02:59.010 +つまり、 最後の10個のトークンに対応するテキストの断片を見ることになる。 + +02:59.010 --> 03:02.250 +そして、 なぜ最後の10人なのか不思議に思っているのなら、 すぐにわかるだろう。 + +03:02.250 --> 03:08.550 +では、 4万番というランダムな数字を選んで実行してみよう。 + +03:08.580 --> 03:09.060 +オーケー。 + +03:09.090 --> 03:12.840 +というわけで、 こんな文章で申し訳ない。 + +03:12.840 --> 03:18.390 +このプロンプトは、 LLMに送られて学ぶことになる。 + +03:18.630 --> 03:22.170 +ええと、 これは1ドル単位でいくらですかと聞かれそうですね。 + +03:22.170 --> 03:23.790 +そして、 それが説明文になる。 + +03:23.790 --> 03:30.090 +そして価格は、 そしてこれは1ドル未満を四捨五入した価格である。 + +03:30.180 --> 03:37.860 +項目コードを見ればわかるが、 トレーニングのプロンプトを作成する際、 1ドル未満を四捨五入している。 + +03:37.950 --> 03:44.520 +最後の10個のトークンを見れば、 何が起こっているかがわかるだろう。 + +03:44.550 --> 03:46.110 +その10トークンとは何ですか? + +03:46.110 --> 03:53.010 +そして、 最後の数トークンに関して、 価格が1つのトークンにマップされることを示したかった。 + +03:53.040 --> 04:07.410 +それは、 単語の開始で再びドルになる前に、 単語の開始スペースのトークンを取得することである。 + +04:07.680 --> 04:12.450 +そしてこれは、 私が言うように、 llamaトークナイザーの機能なのだ。 + +04:12.450 --> 04:18.000 +GPTと同様、 3桁の数字ごとに個別のトークンがある。 + +04:18.000 --> 04:21.120 +他のトークナイザーにはないものもある。 + +04:21.210 --> 04:27.120 +僕らのプロジェクトには必要ないことだけど、 後々のことを考えると少しシンプルになる。 + +04:27.180 --> 04:32.250 +そして、 その期間はトークンを1つ獲得し、 . 00はトークンを1つ獲得する。 + +04:32.280 --> 04:35.130 +別のサンプルをやってみよう。 + +04:36.930 --> 04:42.000 +全く別の、 あー、 全く別の場所で何かやろう。 + +04:42.720 --> 04:44.100 +万人。 + +04:44.280 --> 04:47.550 +そして、 これはむしろ安いものだ。 + +04:47.580 --> 04:51.930 +価格は9000ドル。 + +04:51.960 --> 04:58.110 +データセットの最後に近い398,000を選んでみよう。 + +04:58.620 --> 05:05.740 +そして、 これは......コイルオーバー・ダンパー・キット。 + +05:05.740 --> 05:10.240 +そしてこの価格は765ドル。 + +05:10.240 --> 05:15.430 +そして、 765が1つのトークンにマッピングされていることがもう一度わかるだろう。 + +05:15.430 --> 05:25.840 +というわけで、 このサンプルはもちろん一番安いものから順に並んでいる。 + +05:25.840 --> 05:28.990 +だから、 1ドル単位で四捨五入した。 + +05:28.990 --> 05:36.100 +これは、 サンプルの下位、 つまりウムな部分で最も安く、 サンプルの上位で最も高い順にソートされている。 + +05:36.100 --> 05:46.780 +そして、 ここに書いてあるように、 1から999までのすべての数字が1つのトークンにマッピングされている、 という効果が得られていることを自分で納得できるだろう。 + +05:46.780 --> 05:55.870 +そしてもう1度言いますが、 量、 ガンマ、 ファイの3つのトークナイザーを見れば、 そうではないことがわかるでしょう。 + +05:55.960 --> 06:02.350 +うーん、 これは後でちょっと便利なんだけど、 必須じゃないし、 絶対に後回しなんだ。 + +06:02.350 --> 06:08.620 +クァンタム・ジェミニ・スリーのような他のモデルを使って実験したい場合は、 単に入れ替えるだけで機能する。 + +06:08.650 --> 06:15.430 +ここでは、 3桁の数字に対応する1つのトークンではなく、 複数のトークンにマッピングされることがわかるだろう。 + +06:16.690 --> 06:20.260 +よし、 ほっと一息だ。 + +06:20.260 --> 06:22.630 +私たちはデータ・キュレーションを通してそれを成し遂げてきた。 + +06:22.630 --> 06:27.460 +最後の仕上げは、 ハブにアップロードすることだ。 + +06:27.460 --> 06:35.710 +まずはデータセットをシャッフルしてみよう。 安い順に並べ替えたのではまったく意味がない。 + +06:35.710 --> 06:38.650 +まず、 ごちゃごちゃしたデータセットが必要だ。 + +06:38.800 --> 06:44.350 +まず最初に、 ランダムシードを設定します。 これは、 常にまったく同じデータセットで作業していることを確認したいからで、 + +06:44.350 --> 06:51.340 +私が行うのとまったく同じことを再現して、 同じ結果を得ることができます。 + +06:51.520 --> 06:59.290 +私たちはランダムを使っている。 shuffleでシャッフルして、 最初の40万件をトレーニング・データセットとする。 + +06:59.290 --> 07:01.900 +そして次の2000をテストセットとする。 + +07:01.930 --> 07:03.220 +今、 聞いたよ。 + +07:03.250 --> 07:05.230 +データサイエンティストたちよ。 + +07:05.260 --> 07:11.380 +これは通常、 少なくとも5%か10%のテストデータセットを必要とする。 + +07:11.470 --> 07:17.350 +8000ものデータがあるんだから。 + +07:17.350 --> 07:21.490 +もちろん、 より大きなデータセットを得るためにサンプルを増やすこともできる。 + +07:21.520 --> 07:27.310 +テストに使うのは数百本程度だろうから、 必要ないだろう。 + +07:27.310 --> 07:30.160 +そうすることで、 非常に正確な結果が得られる。 + +07:30.160 --> 07:34.090 +そして、 より多くの相手とテストを続ければ、 収穫は減っていく。 + +07:34.090 --> 07:39.340 +このプロジェクトの目的にはこれで十分だが、 ベストプラクティスである。 + +07:39.370 --> 07:40.630 +ベストプラクティスかどうかは分からない。 + +07:40.660 --> 07:47.620 +少なくとも5%はテスト用データセットとして確保するのが一般的で、 前にお話ししたように、 + +07:47.620 --> 07:52.810 +テスト用と検証用に分けて5%を確保することもある。 + +07:52.870 --> 08:00.880 +この目的には必要ないが、 望むのであれば、 追加データセットとして管理することもできる。 + +08:01.030 --> 08:02.300 +うーん、 でもとにかく、 そうしよう。 + +08:02.300 --> 08:03.320 +それをごちゃ混ぜにする。 + +08:03.320 --> 08:08.630 +400,000のトレーニングデータセットと、 2000のテストセットに分かれている。 + +08:08.660 --> 08:14.480 +最初のテスト要素を見てみよう。 + +08:14.480 --> 08:19.640 +あなたが覚えているテストのプロンプトは、 答えを明らかにしないプロンプトです。 + +08:19.640 --> 08:25.310 +まずトレーニングのプロンプトを見て、 次にテストのプロンプトを見ます。 + +08:25.340 --> 08:27.470 +トレーニングプロンプトは答えを持っているものだ。 + +08:27.470 --> 08:31.310 +だから、 トレーニングのプロンプトには、 1ドル単位でいくらと書いてある。 + +08:31.310 --> 08:35.120 +デルファイ製の燃料ポンプモジュールだ。 + +08:35.390 --> 08:37.430 +うーん、 そうだね。 + +08:37.460 --> 08:37.910 +どうだろう。 + +08:37.910 --> 08:39.470 +227ドルである。 + +08:39.470 --> 08:41.300 +私だったら、 そんなことはまったくわからなかっただろう。 + +08:41.300 --> 08:50.240 +つまり、 これはトレーニングの一環としてLMに送られるものの例である。 + +08:50.480 --> 08:54.380 +では、 テストプロンプトを見てみよう。 + +08:54.410 --> 09:02.990 +今、 テストプロンプトは、 説明文はあるが値段はない、 使用されるものを見せようとしている。 + +09:02.990 --> 09:07.400 +これがテストセットの最初の項目だ。 + +09:07.400 --> 09:09.350 +そうだ。 + +09:09.470 --> 09:22.430 +最初の250のテストポイントの価格分布を見てみましょう。 + +09:22.430 --> 09:26.750 +そして、 ここにはさまざまな価格の健康的な広がりがあるのがわかるだろう。 + +09:26.780 --> 09:33.410 +高価なものを扱えるモデルかどうかが試されるようなものが、 高いエリアにはたくさんある。 + +09:33.410 --> 09:42.530 +そして、 私たちのテスト・データ・セットでは、 価格のバラエティーに富んでいる。 + +09:43.340 --> 09:51.650 +さて、 最後に、 これを一連のトレーニング・プロンプトとテスト・プロンプトに変えますが、 これは、 + +09:51.650 --> 09:58.910 +先ほど見たプロンプトとテスト・プロンプトを、 価格とともに抜き出すだけです。 + +09:59.390 --> 10:03.590 +この小さなコードが、 ハグする顔にアップロードする。 + +10:03.620 --> 10:14.330 +データセットのFromdictを呼び出し、 それをデータセットのdictに入れることで、 抱きつき顔ハブに適したデータセットオブジェクトに変える。 + +10:15.050 --> 10:22.940 +そして最後に、 この行があなたのデータセットをハギング・フェイス・ハブにアップロードし、 あなたがそれを引き続き使用したり、 + +10:22.940 --> 10:26.000 +将来ダウンロードできるようにします。 + +10:26.000 --> 10:33.110 +でも、 もう走ったから走らせるつもりはないよ。 + +10:33.110 --> 10:35.300 +そして、 これはあなたのユーザー名を入れるためのものです。 + +10:35.300 --> 10:46.100 +これは僕のユーザーネームにアップロードしてあるんだ。 + +10:46.100 --> 10:51.800 +だから、 その方法でデータを取り出すこともできる。 + +10:51.830 --> 10:55.820 +もし、 このデータキュレーションをすべてショートカットしたいのであれば、 できればそうしたくないだろう。 + +10:56.210 --> 11:07.230 +そして最後に、 この列車をピックルファイルにして、 コレクションをテストします。 + +11:07.230 --> 11:12.270 +このリストをピクルスにしてファイルに保存しておくと、 + +11:12.300 --> 11:16.050 +将来のために読み込むことができる。 + +11:16.050 --> 11:23.520 +Pythonのピクルスに慣れている人なら、 Pythonのオブジェクトをファイルにダンプする超簡単な方法だ。 + +11:23.520 --> 11:28.710 +これを実行すると、 test dot pickleとtrain dot pickleという2つの新しいファイルができ、 + +11:28.710 --> 11:32.340 +トレーニング・データとテスト・データが格納される。 + +11:33.090 --> 11:36.900 +これでデータのキュレーション作業は完了した。 + +11:36.900 --> 11:44.820 +データセットをもっと調査し、 異なるデータ・ポイントをトークン化する練習をしたときに、 + +11:44.970 --> 11:59.490 +3桁の数字が常に1つのトークンにトークン化され、 それらのトークンの感覚を得ることができるかどうかを確認してください。 + +11:59.820 --> 12:03.720 +それでは、 またスライドをご覧いただきながら、 総括をしたいと思います。 diff --git a/week5/community-contributions/subtitles/srts/59472505/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472505/ko_KR.srt new file mode 100755 index 0000000..8d8b51a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472505/ko_KR.srt @@ -0,0 +1,469 @@ +WEBVTT + +00:00.260 --> 00:05.780 +좋은 소식은 이게 데이터 세트 큐레이션에 관한 마지막 영상이라는 거예요 + +00:05.810 --> 00:08.120 +데이터 세트 큐레이션이 지겨웠을 거예요 + +00:08.120 --> 00:10.970 +이제 하나만 더 하면 끝이에요 + +00:11.000 --> 00:16.370 +그래서 우린 아주 훌륭한 데이터 세트를 만들었어요 아주 자랑스럽게 여겨야 하죠 + +00:16.370 --> 00:19.610 +마지막으로 살짝 보죠 + +00:19.760 --> 00:23.030 +궁금한 게 하나 있어요 + +00:23.060 --> 00:24.710 +그래도 물어볼 거예요 + +00:24.710 --> 00:30.740 +물건의 가격과 관련이 있을까요? + +00:30.740 --> 00:35.570 +물건의 길이에 따라 관련이 있어요 + +00:35.570 --> 00:41.960 +가격이 높은 물건일수록 정보가 더 많은 상황을 상상할 수 있죠 + +00:41.960 --> 00:44.420 +그걸 이해해야 해요 + +00:44.600 --> 00:48.170 +모델에게 그런 걸 빨리 배울 수 있거든요 + +00:48.170 --> 00:52.730 +어떻게 접근할지 전통적인 접근법을 보면 좋은 감이 오죠 + +00:52.730 --> 01:01.220 +이건 멋진 산란도예요 엑스 축의 개별 크기를 보여줄 거예요 + +01:01.250 --> 01:07.970 +설명의 길이를 보여줄 거예요 y축에서는 가격을 보여주고요 + +01:08.060 --> 01:12.350 +전체 샘플 데이터 세트를 살펴보죠 + +01:12.740 --> 01:13.700 +자, 시작하죠 + +01:13.700 --> 01:15.950 +여기 잘 나온 사진이 있어요 + +01:15.950 --> 01:20.210 +이 그림에는 400,000점이 있어요 + +01:20.360 --> 01:23.330 +볼거리도 많고요 + +01:23.330 --> 01:26.420 +보시다시피 소화할 게 많아요 + +01:26.450 --> 01:34.490 +흥미로운 패턴이 일어나고 있어요 가격이 이런 경계선에서 더 자주 나타나죠 + +01:34.490 --> 01:43.370 +799달러짜리 상품도 있고요 보다시피 더 저렴한 제품도 많아요 + +01:43.370 --> 01:52.610 +분명히 상관관계가 있는 게 보이실 거예요 설명이 긴 물건들은 때때로 + +01:52.610 --> 01:59.480 +더 비싼 물건이라는 추세를 보이는 거죠 + +01:59.480 --> 02:03.750 +하지만 그 일과 관련해서 중요한 상관관계가 있는지는 확실하지 않아요 + +02:03.750 --> 02:06.750 +뭔가 있긴 한데 심각한 건 아니에요 + +02:06.750 --> 02:12.780 +전통적인 머신 러닝을 적용하면 큰 상관관계를 + +02:12.780 --> 02:15.270 +찾지 못할 거예요 + +02:15.270 --> 02:21.660 +일종의 다이어그램을 예로 들게요 데이터의 다양한 양상에 대한 통찰력을 얻기 + +02:21.660 --> 02:23.610 +위한 거죠 Get it + +02:24.330 --> 02:27.510 +잠시 더 얘기하고 싶은 건 토큰이에요 + +02:27.750 --> 02:34.830 +오픈 소스 모델에 대해 실제로 훈련하게 되면 Get 토큰으로 더 많이 작업할 겁니다 + +02:34.830 --> 02:38.640 +지금 토큰을 살펴볼 가치가 있어요 + +02:38.640 --> 02:44.790 +함수 리포트를 작성했는데 아이템을 하나 가져와서 프롬프트를 프린트합니다 + +02:44.790 --> 02:48.630 +훈련 때 사용될 전체 훈련 프롬프트죠 + +02:48.630 --> 02:54.930 +마지막 10개의 토큰을 프롬프트에 입력하면 디코딩이 되는 것이죠 + +02:54.930 --> 02:59.010 +마지막 10개의 토큰까지의 행적을 볼 수 있어요 + +02:59.010 --> 03:02.250 +왜 마지막 10개가 있는지 궁금하시다면 잠시 후에 보여드리죠 + +03:02.250 --> 03:08.550 +무작위로 40,000번을 선택해 실행해보죠 + +03:08.580 --> 03:09.060 +네 + +03:09.090 --> 03:12.840 +여기 이건∙∙∙ 여기 있는 텍스트 전부 죄송해요 + +03:12.840 --> 03:18.390 +LLM으로 보내져 배울 수 있는 프롬프트죠 + +03:18.630 --> 03:22.170 +최대한 비용이 많이 들 거라고 할 거예요 + +03:22.170 --> 03:23.790 +그런 다음 설명을 get 하죠 + +03:23.790 --> 03:30.090 +가격은 이겁니다 가격은 1달러로 반올림한 거고요 + +03:30.180 --> 03:36.750 +아이템 코드를 보시면 훈련 프롬프트를 만들 때 가장 가까운 달러로 회전하는 것을 + +03:36.780 --> 03:37.860 +알 수 있죠 + +03:37.950 --> 03:44.520 +마지막 10개의 토큰을 보면 어떻게 된 건지 알 수 있죠 아래에 프린트했어요 + +03:44.550 --> 03:46.110 +그 10패는 뭐죠? + +03:46.110 --> 03:53.010 +이제 마지막 몇 개의 토큰을 보여드릴게요 가격은 하나의 토큰에 매핑되고 + +03:53.040 --> 04:02.130 +워드 스페이스의 시작을 가진 토큰을 받습니다 워드의 시작과 함께 달러의 시작을 가진 토큰이죠 + +04:02.130 --> 04:07.410 +그리고 숫자 34는 하나의 특정 토큰에 매핑되고요 + +04:07.680 --> 04:12.450 +이건 라마 토큰라이저의 기능 중 하나예요 + +04:12.450 --> 04:18.000 +GPT처럼 세 자리 숫자마다 토큰을 갖고 있어요 + +04:18.000 --> 04:21.120 +다른 모델의 토큰이 그렇지 않은 경우도 있어요 + +04:21.210 --> 04:27.120 +우리 프로젝트에 필수적인 건 아니지만 나중에 비트를 보면 좀 더 간단해질 거예요 + +04:27.180 --> 04:32.250 +마침표는 토큰 하나와 더불어∙∙∙ 더불어∙∙∙ 0은 토큰을 받아요 + +04:32.280 --> 04:35.130 +샘플을 하나 더 해 보죠 + +04:36.930 --> 04:42.000 +완전히 다른 장소에서 뭔가 해 보자고요 + +04:42.720 --> 04:44.100 +10,000번요 + +04:44.280 --> 04:47.550 +이건 가격이 저렴해요 + +04:47.580 --> 04:51.930 +9천 달러인데 가격이 9천 달러예요 + +04:51.960 --> 04:58.110 +데이터셋 398,000의 끝에 있는 것으로 가보죠 + +04:58.620 --> 05:05.740 +이건 코일오버 댐퍼 키트예요 + +05:05.740 --> 05:10.240 +가격은 765달러예요 + +05:10.240 --> 05:15.430 +또 한 번 765가 토큰 하나에 매핑되는 걸 보실 수 있어요 + +05:15.430 --> 05:22.090 +이제 만족하셔야 해요 이 샘플은 가장 싼 것부터 분류했어요 각 + +05:22.120 --> 05:25.840 +항목의 샘플을 다 살펴봤거든요 + +05:25.840 --> 05:28.990 +가장 가까운 달러로 동그랗게 돌려요 + +05:28.990 --> 05:35.170 +가장 싼 제품은 낮은 제품군으로 분류하고 가장 비싼 제품은 높은 제품군으로 + +05:35.170 --> 05:36.100 +분류해요 + +05:36.100 --> 05:43.390 +이렇게 효과를 얻는다고 만족하실 수 있습니다 1부터 999까지의 숫자는 모두 토큰 하나에 + +05:43.390 --> 05:46.780 +매핑됩니다 여기 나와 있는 것처럼요 + +05:46.780 --> 05:54.190 +다시 한번 말씀드리지만 수량이나 감마 혹은 피 쓰리 토큰라이저를 보면 그렇지 않다는 + +05:54.190 --> 05:55.870 +걸 알 수 있어요 + +05:55.960 --> 06:02.350 +나중에 비트가 좀 유용할 것 같긴 한데 꼭 필요한 건 아니고 나중엔 꼭 필요해요 + +06:02.350 --> 06:07.060 +양자 제미니 3호 같은 다른 모델을 실험하고 싶다면 스위치만 + +06:07.090 --> 06:08.620 +켜면 돼요 + +06:08.650 --> 06:15.430 +여기 보면 여러 개의 토큰에 매핑되어 있습니다. 하나의 3자리 토큰이 아니라요. + +06:16.690 --> 06:20.260 +안도의 한숨을 내쉬네요 + +06:20.260 --> 06:22.630 +데이터 큐레이션을 통과했어요 + +06:22.630 --> 06:27.460 +마지막 단계는 마무리하고 허브에 업로드 하는 거죠 + +06:27.460 --> 06:33.520 +가장 먼저 할 일은 데이터 세트를 섞는 겁니다 가장 싼 순서대로 분류하면 + +06:33.520 --> 06:35.710 +전혀 좋지 않으니까요 + +06:35.710 --> 06:38.650 +먼저 잘 뒤섞인 데이터 집합이 필요해요 + +06:38.800 --> 06:44.350 +먼저 무작위 씨앗을 설정했어요 항상 정확히 같은 데이터로 작업하도록 확실히 하려고요 + +06:44.350 --> 06:50.230 +그래야 제가 하려는 것과 정확히 같은 걸 복제해서 같은 결과를 얻을 수 있으니까요 get + +06:50.260 --> 06:51.340 +it + +06:51.520 --> 06:59.290 +랜덤을 사용해요 섞어서 섞은 다음 첫 400,000개를 훈련 데이터로 가져가요 + +06:59.290 --> 07:01.900 +그 다음 2000년은 테스트 세트죠 + +07:01.930 --> 07:03.220 +이제 알겠어요 + +07:03.250 --> 07:05.230 +데이터 과학자들은 울죠 + +07:05.260 --> 07:11.380 +보통 테스트 데이터는 최소 5%에서 10%가 필요해요 + +07:11.470 --> 07:16.270 +얼마든지 그렇게 하셔도 돼요 여기 데이터 포인트가 8,000개 + +07:16.300 --> 07:17.350 +있거든요 + +07:17.350 --> 07:21.490 +더 많은 샘플을 채취해 더 큰 데이터를 얻을 수도 있죠 + +07:21.520 --> 07:27.310 +우리한테는 필요 없을 거예요 왜냐하면 테스트에 몇백 개만 사용할 거니까요 + +07:27.310 --> 07:30.160 +그럼 아주 정확한 결과가 나오죠 + +07:30.160 --> 07:34.090 +계속 더 많이 시험하면 결과도 감소하죠 Get it + +07:34.090 --> 07:39.340 +이 프로젝트의 목적에는 충분하지만 최선의 관행이죠 + +07:39.370 --> 07:40.630 +최선의 관행인지는 모르겠어요 + +07:40.660 --> 07:47.620 +일반적인 관행은 적어도 5%를 테스트 데이터 집합에 비축하는 것이고 때로는 5%를 + +07:47.620 --> 07:52.810 +테스트에 비축하고 5%를 유효성 검증에 비축하는 것이죠 + +07:52.870 --> 07:58.930 +이 목적에는 필요 없지만 원한다면 얼마든지 할 수 있어요 관리할 추가 데이터 + +07:58.930 --> 08:00.880 +집합으로 가질 수 있죠 + +08:01.030 --> 08:02.300 +어쨌든 그렇게 할 거예요 + +08:02.300 --> 08:03.320 +전부 뒤섞을 거예요 + +08:03.320 --> 08:08.630 +훈련 데이터 400,000개와 테스트 세트 2000개로 나뉘었죠 + +08:08.660 --> 08:14.480 +그럼 첫 번째 테스트 요소를 보죠 + +08:14.480 --> 08:19.640 +여러분이 기억하는 테스트 프롬프트는 답을 공개하지 않은 프롬프트죠 + +08:19.640 --> 08:24.680 +이게 전송될 프롬프트∙∙∙ 죄송합니다 훈련 프롬프트를 먼저 보고 테스트 프롬프트를 + +08:24.680 --> 08:25.310 +보죠 + +08:25.340 --> 08:27.470 +훈련 시기가 해답을 갖고 있어요 + +08:27.470 --> 08:31.310 +훈련용 문자가 가장 가까운 비용으로 얼마가 드는지를 보여주죠 + +08:31.310 --> 08:35.120 +델포이 연료 펌프 모듈이에요 + +08:35.390 --> 08:37.430 +그리고 네 + +08:37.460 --> 08:37.910 +어때요? + +08:37.910 --> 08:39.470 +227달러예요 + +08:39.470 --> 08:41.300 +전 전혀 몰랐을 거예요 + +08:41.300 --> 08:47.450 +이건 훈련의 일부로 LM으로 보내질 것의 예입니다 설명과 + +08:47.450 --> 08:50.240 +가격이 포함돼 있거든요 + +08:50.480 --> 08:54.380 +테스트 프롬프트를 보죠 + +08:54.410 --> 09:01.280 +테스트 프롬프트는 사용될 뭔가를 보여줄 겁니다 설명은 있지만 + +09:01.280 --> 09:02.990 +가격은 없죠 + +09:02.990 --> 09:07.400 +이게 테스트 세트의 첫 번째 항목이에요 + +09:07.400 --> 09:09.350 +자, 됐어요 + +09:09.470 --> 09:17.960 +첫 250가지 테스트 포인트들의 가격 분배를 빠르게 살펴봅시다. 왜냐하면 이 포인트들은 모델 + +09:17.960 --> 09:22.430 +테스트에 가장 많이 사용되는 포인트들이거든요. + +09:22.430 --> 09:26.750 +다양한 가격대가 잘 분포돼 있어요 + +09:26.780 --> 09:33.410 +높은 영역에서는 이 모델이 비싼 물건을 다룰 수 있는지 테스트할 것들이 많아요 + +09:33.410 --> 09:41.360 +그리고 다수가 저렴한 제품으로 테스트 데이터에 다양한 가격대가 포함되어 + +09:41.360 --> 09:42.530 +있죠 + +09:43.340 --> 09:51.650 +자, 마지막으로 이제 이걸 일련의 훈련과 테스트 프롬프트로 바꿀게요 + +09:51.650 --> 09:58.910 +방금 본 테스트 프롬프트와 가격표를 뽑아내는 거죠 + +09:59.390 --> 10:03.590 +이 코드는 포옹하는 얼굴에 업로드 돼요 + +10:03.620 --> 10:10.820 +안는 얼굴 허브에 적합한 데이터셋 객체로 바꿀 겁니다 데이터셋에 대한 Fromex를 호출하고 + +10:10.820 --> 10:14.330 +dataset 독감에 넣는 거죠 + +10:15.050 --> 10:22.940 +마지막으로 이 라인은 데이터 세트를 안아주는 페이스 허브에 업로드 합니다 계속 사용하거나 + +10:22.940 --> 10:26.000 +앞으로 다운로드 할 수 있도록요 + +10:26.000 --> 10:33.110 +get me fine 튜닝이 끝나면요 하지만 이미 작동했으니 실행하진 않을 거예요 + +10:33.110 --> 10:35.300 +그리고 이건 당신의 사용자이름을 입력하세요. + +10:35.300 --> 10:46.100 +업로드해 놨어요, 제 어, 죄송합니다 제 사용자 이름에 업로드해 놨어요 + +10:46.100 --> 10:51.800 +그렇게 데이터를 검색할 수도 있어요 + +10:51.830 --> 10:55.820 +이 모든 데이터 큐레이션을 합선시키고 싶다면요 그런 일은 없길 바라지만요 + +10:56.210 --> 11:07.230 +마지막으로 이 기차를 돌려서 피클 파일로 만들어 볼게요 + +11:07.230 --> 11:12.270 +절여서 파일에 넣을 거예요 앞으로 로드할 수 있게요 목록 작성하는 + +11:12.300 --> 11:16.050 +이 복잡한 과정을 다시 거칠 필요가 없도록요 + +11:16.050 --> 11:23.520 +파이썬 을 잘 아신다면 파이썬 을 파일에 덤프하는 아주 쉬운 방법이에요 + +11:23.520 --> 11:28.710 +이제 실행하면 새 파일이 두 개 생기죠 test.pickle과 Tack.pickle이요 + +11:28.710 --> 11:32.340 +트레이닝과 테스트 데이터 세트를 포함하죠 + +11:33.090 --> 11:36.900 +이로써 데이터 큐레이션을 마쳤어요 + +11:36.900 --> 11:44.820 +데이터 집합을 더 살펴보고 확인하기 위해 이 작업을 진행할 텐데요. 서로 + +11:44.970 --> 11:53.220 +다른 데이터 포인트를 토큰화하는 작업입니다. 그러면 항상 세 자릿수가 하나의 + +11:53.220 --> 11:59.490 +토큰으로 토큰화되고 토큰의 감각을 얻을 수 있죠. + +11:59.820 --> 12:03.720 +그럼 마무리로 슬라이드로 다시 만나죠 diff --git a/week5/community-contributions/subtitles/srts/59472693/en_US.srt b/week5/community-contributions/subtitles/srts/59472693/en_US.srt new file mode 100755 index 0000000..32e0bbf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472693/en_US.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:01.760 --> 00:02.480 +Friends. + +00:02.510 --> 00:05.390 +I am absolutely exhausted. + +00:05.390 --> 00:09.620 +I am exhausted and a little tiny bit traumatized. + +00:09.950 --> 00:13.520 +And you are somewhat to blame for this, as you will discover. + +00:13.550 --> 00:16.760 +You will discover in a moment you will find out why. + +00:16.790 --> 00:21.050 +But first, let's talk about what we have in store for today. + +00:21.500 --> 00:25.670 +So we are going to be raising the bar on our baselines. + +00:25.670 --> 00:29.360 +I'm just going to take a moment to remind you what you can already do. + +00:29.390 --> 00:36.230 +Of course, generating text and generating code from combinations of frontier models with AI assistants, + +00:36.230 --> 00:44.240 +with tools, and also open source systems using hugging face Transformers library, you can use Lang + +00:44.240 --> 00:51.110 +chain to build a rag pipeline, and now you can also curate data and you can curate it. + +00:51.110 --> 00:52.700 +Finally, if I may say so. + +00:52.700 --> 00:57.740 +And you can also make a baseline model using some rather foolish techniques. + +00:57.740 --> 01:04.040 +But then using traditional machine learning linear regression, including both feature engineering and + +01:04.040 --> 01:11.030 +bag of words, and then onto more sophisticated techniques using word two vec and then adding in support + +01:11.030 --> 01:14.270 +vector machines and then random forests. + +01:14.330 --> 01:16.250 +Quite a trek it's been. + +01:16.250 --> 01:17.750 +So today. + +01:17.780 --> 01:23.510 +Today we are going to now take the framework we put together and put it against frontier models. + +01:23.510 --> 01:30.080 +And this will sort of really capture all of the steps it takes to take a proper business problem, um, + +01:30.140 --> 01:35.750 +understand the data and then present it to frontier models and compare their performance. + +01:35.750 --> 01:41.870 +So it's an exciting moment for us, and we will get right to it. + +01:41.930 --> 01:46.130 +And I will see you over at JupyterLab in a moment. diff --git a/week5/community-contributions/subtitles/srts/59472693/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472693/ja_JP.srt new file mode 100755 index 0000000..5758dbd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472693/ja_JP.srt @@ -0,0 +1,67 @@ +WEBVTT + +00:01.760 --> 00:02.480 +友人だ。 + +00:02.510 --> 00:05.390 +本当に疲れ果てている。 + +00:05.390 --> 00:09.620 +疲れきっているし、 ちょっとだけトラウマになっている。 + +00:09.950 --> 00:13.520 +そして、 その責任はあなたにもある。 + +00:13.550 --> 00:16.760 +その理由はすぐにわかるだろう。 + +00:16.790 --> 00:21.050 +その前に、 今日の予定について話そう。 + +00:21.500 --> 00:25.670 +だから、 ベースラインのハードルを上げていくつもりだ。 + +00:25.670 --> 00:29.360 +すでにできることを思い出してほしい。 + +00:29.390 --> 00:36.230 +もちろん、 AIアシスタントを使ったフロンティアモデルの組み合わせからテキストを生成したり、 ツールを使ってコードを生成したり、 + +00:36.230 --> 00:44.240 +抱き合わせ顔変換ライブラリを使ったオープンソースシステムもあり、 ラングチェーンを使ってラグパイプラインを構築することもできるし、 + +00:44.240 --> 00:51.110 +今ではデータをキュレーションすることもできる。 + +00:51.110 --> 00:52.700 +最後に、 言わせてもらえば。 + +00:52.700 --> 00:57.740 +また、 かなり馬鹿げたテクニックを使ってベースラインモデルを作ることもできる。 + +00:57.740 --> 01:04.040 +しかし、 特徴工学とバッグ・オブ・ワードの両方を含む伝統的な機械学習の線形回帰を使い、 + +01:04.040 --> 01:14.270 +さらにワード2ベクトルを使ったより高度な技術を使い、 サポートベクターマシン、 そしてランダムフォレストを加える。 + +01:14.330 --> 01:16.250 +かなりの旅だった。 + +01:16.250 --> 01:17.750 +だから今日も。 + +01:17.780 --> 01:23.510 +今日は、 私たちが組み立てたフレームワークを、 フロンティア・モデルに当てはめてみよう。 + +01:23.510 --> 01:30.080 +そしてこれは、 適切なビジネス上の問題を取り上げ、 データを理解し、 それをフロンティア・モデルに提示し、 + +01:30.140 --> 01:35.750 +そのパフォーマンスを比較するために必要なすべてのステップを実際に捉えることになる。 + +01:35.750 --> 01:41.870 +だから、 我々にとってはエキサイティングな瞬間だ。 + +01:41.930 --> 01:46.130 +それではまたJupyterLabでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59472693/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472693/ko_KR.srt new file mode 100755 index 0000000..353a21c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472693/ko_KR.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:01.760 --> 00:02.480 +친구요 + +00:02.510 --> 00:05.390 +완전히 지쳤어요 + +00:05.390 --> 00:09.620 +너무 지쳤고 약간 비트가 깨졌어요 + +00:09.950 --> 00:13.520 +곧 알게 되겠지만 당신 탓도 있어요 + +00:13.550 --> 00:16.760 +곧 그 이유를 알게 될 거예요 + +00:16.790 --> 00:21.050 +하지만 먼저 오늘 준비한 걸 얘기해 보죠 + +00:21.500 --> 00:25.670 +기준치에 대한 기준을 높일 거예요 + +00:25.670 --> 00:29.360 +당신이 이미 할 수 있는 걸 상기시켜 줄게요 + +00:29.390 --> 00:36.230 +물론 프론티어 모델 조합으로 텍스트와 코드를 생성하고 인공지능 비서를 대동하고 도구를 + +00:36.230 --> 00:44.240 +활용할 수 있습니다 얼굴 트랜스포머 라이브러리를 이용한 오픈 소스 시스템도 있죠 랭체인을 이용해 + +00:44.240 --> 00:51.110 +랙 파이프라인을 만들 수도 있고 데이터 큐레이팅과 큐레이팅도 할 수 있죠 + +00:51.110 --> 00:52.700 +드디어 말이죠 + +00:52.700 --> 00:57.740 +바보 같은 기술로 기본 모델을 만들 수도 있고요 + +00:57.740 --> 01:04.040 +기능 공학이나 단어 모음을 포함한 전통적인 머신 러닝 선형 회귀를 + +01:04.040 --> 01:11.030 +이용하고 워드 2Vc를 이용한 더 정교한 기술을 이용하고 벡터 머신과 임의 + +01:11.030 --> 01:14.270 +포레스트 지원을 추가하죠 + +01:14.330 --> 01:16.250 +꽤 먼 길이었어요 + +01:16.250 --> 01:17.750 +오늘요 + +01:17.780 --> 01:23.510 +오늘은 기존의 프레임워크를 개척 시대의 모델에 적용해 볼 거예요 Put + +01:23.510 --> 01:30.080 +적절한 사업 문제를 해결하는 데 필요한 모든 단계를 포착할 수 있을 거예요 데이터를 + +01:30.140 --> 01:35.750 +이해하고 개척지 모델에 제시해 성능을 비교하는 거죠 + +01:35.750 --> 01:41.870 +정말 기대되네요 get it으로 바로 들어갈게요 + +01:41.930 --> 01:46.130 +잠시 후 유피터랩에서 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59472873/en_US.srt b/week5/community-contributions/subtitles/srts/59472873/en_US.srt new file mode 100755 index 0000000..7df257a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472873/en_US.srt @@ -0,0 +1,157 @@ +WEBVTT + +00:00.590 --> 00:07.130 +So it's quite an adventure that we had at the frontier of what's capable with Llms today, solving a + +00:07.130 --> 00:15.140 +particular problem that required world knowledge and to take you through what we saw from the performance + +00:15.140 --> 00:18.710 +of everything as a reminder from last time we started. + +00:18.830 --> 00:22.490 +Well, actually, we started with the random model, but we'll forget about that because that was silly. + +00:22.520 --> 00:28.760 +We're proper first starting point was a constant model that just predicted an average number. + +00:29.150 --> 00:32.300 +We obviously were able to do better, but not that much better. + +00:32.300 --> 00:37.850 +With a model that used feature engineering, you may have improved on that with better features. + +00:38.060 --> 00:45.470 +Um, but our best one was a random forest model based on a not a bag of words. + +00:45.500 --> 00:47.990 +A word to vec vectorized. + +00:48.050 --> 00:53.690 +Uh, look at the prompts with 400 dimensional vectors. + +00:53.840 --> 00:55.610 +And that brought our error. + +00:55.640 --> 01:01.740 +The average difference between the prediction and the actual price of a product based on its description + +01:01.740 --> 01:09.240 +down to $97 being after being trained on 400,000 example data points. + +01:09.690 --> 01:14.010 +We then, uh, unveiled the human today. + +01:14.010 --> 01:15.810 +That was our first model. + +01:15.930 --> 01:20.490 +Uh, and the human got 127 in terms of error. + +01:20.490 --> 01:27.810 +So you'll see, I was able to, uh, at least do better than the very primitive feature engineering. + +01:27.810 --> 01:30.270 +And at least I did better than than constant. + +01:30.270 --> 01:34.530 +I think I wouldn't have, uh, I might not have included the whole result if I hadn't done better than + +01:34.530 --> 01:36.060 +a constant number. + +01:36.360 --> 01:43.260 +Uh, but, uh, obviously, uh, the next, uh, Claude clearly, uh, did significantly better than + +01:43.260 --> 01:43.470 +me. + +01:43.470 --> 01:48.390 +And Claude was very, very similar to Random Forest, uh, so very much on par. + +01:48.390 --> 01:53.430 +And again, one has to bear in mind, Claude is doing this without seeing any training data. + +01:53.430 --> 01:56.490 +It's just purely based on its world knowledge. + +01:56.490 --> 01:58.560 +And then being given this product. + +01:58.600 --> 02:03.550 +And I can tell you from bitter personal experience that that is a challenging task. + +02:04.210 --> 02:13.870 +But GPT four mini did better and got down to an $80 error, and GPT four did even better yet and brought + +02:13.870 --> 02:17.020 +it down to $76 in terms of the difference. + +02:17.020 --> 02:23.620 +So it shows you that out of the box, working with frontier models and APIs, you can build solutions + +02:23.620 --> 02:27.280 +to problems, even problems which feel like they are regression problems. + +02:27.280 --> 02:27.670 +They're not. + +02:27.670 --> 02:29.410 +They're numerical problems. + +02:29.560 --> 02:30.160 +They're not. + +02:30.190 --> 02:36.160 +They don't necessarily naturally sound like they should be ones where just text completion will be able + +02:36.160 --> 02:36.820 +to solve them. + +02:36.820 --> 02:46.870 +But even given that kind of problem still out of the box, GPT four mini is able to outperform a random + +02:46.870 --> 02:52.360 +forest model, a traditional machine learning model with 400,000 training data points. + +02:52.360 --> 03:00.130 +So it just goes to show you how powerful these models are and how they can be applied to so many types + +03:00.130 --> 03:01.510 +of commercial problem. + +03:02.320 --> 03:09.640 +But with that, we can now finally move on to the world of training. + +03:09.670 --> 03:16.720 +The next subject is going to be about how we take this further, by fine tuning a frontier model to + +03:16.750 --> 03:20.680 +take what it's got and do better with training examples. + +03:20.710 --> 03:22.870 +The thing that it hasn't had so far. + +03:22.870 --> 03:25.780 +So that is a big and exciting topic. + +03:25.780 --> 03:29.530 +It will then complete this week before next week. + +03:29.530 --> 03:36.790 +We take it to a whole different world where we try and fine tune our own open source model to see if + +03:36.790 --> 03:42.220 +we can compete, bearing in mind that we'll be dealing with something with massively fewer parameters. + +03:42.220 --> 03:44.350 +So a very different world. + +03:44.590 --> 03:51.040 +And to see whether or not we have a hope of beating traditional machine learning or frontier models. + +03:51.070 --> 03:52.930 +Lots to be excited about. + +03:53.020 --> 03:57.280 +But first, I will see you tomorrow for fine tuning. diff --git a/week5/community-contributions/subtitles/srts/59472873/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472873/ja_JP.srt new file mode 100755 index 0000000..058e841 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472873/ja_JP.srt @@ -0,0 +1,127 @@ +WEBVTT + +00:00.590 --> 00:07.130 +Llmsで可能なことのフロンティアで、 世界の知識を必要とする特殊な問題を解決し、 + +00:07.130 --> 00:18.710 +前回のリマインダーのように、 すべてのパフォーマンスから見えたものをお見せする。 + +00:18.830 --> 00:22.490 +まあ、 実際はランダムモデルから始めたんだけど、 それはバカバカしいから忘れよう。 + +00:22.520 --> 00:28.760 +最初の出発点は、 ただ平均的な数字を予測する一定のモデルだった。 + +00:29.150 --> 00:32.300 +もっとうまくやれたのは確かだが、 それ以上ではなかった。 + +00:32.300 --> 00:37.850 +フィーチャーエンジニアリングを使ったモデルでは、 より良い機能でそれを改善したかもしれない。 + +00:38.060 --> 00:45.470 +ええと、 でも私たちのベストは、 単語の袋ではなく、 ランダムフォレストモデルをベースにしたものでした。 + +00:45.500 --> 00:47.990 +vecベクトル化に一言。 + +00:48.050 --> 00:53.690 +400次元ベクトルのプロンプトを見てください。 + +00:53.840 --> 00:55.610 +そして、 それがエラーを招いた。 + +00:55.640 --> 01:09.240 +400,000例のデータで学習させた結果、 商品の説明に基づく予測値と実際の価格の差は平均97ドルまで縮小した。 + +01:09.690 --> 01:14.010 +そして、 今日、 その人間のお披露目をした。 + +01:14.010 --> 01:15.810 +それが最初のモデルだった。 + +01:15.930 --> 01:20.490 +エラーという点では、 人間が127点。 + +01:20.490 --> 01:27.810 +だから、 原始的なフィーチャーエンジニアリングよりは、 少なくともうまくやれたと思う。 + +01:27.810 --> 01:30.270 +そして少なくとも、 コンスタントな成績よりは良かった。 + +01:30.270 --> 01:36.060 +一定数以上の成績が残せなければ、 全試合を収録しなかったかもしれない。 + +01:36.360 --> 01:43.470 +でも、 明らかに、 次のクロードは、 僕よりかなり良かった。 + +01:43.470 --> 01:48.390 +そしてクロードはランダムフォレストと非常によく似ていた。 + +01:48.390 --> 01:53.430 +そしてまた、 クロードはトレーニングデータを見ずにこれをやっていることを念頭に置かなければならない。 + +01:53.430 --> 01:56.490 +純粋に世界の知識に基づいているだけだ。 + +01:56.490 --> 01:58.560 +そしてこの製品を渡された。 + +01:58.600 --> 02:03.550 +個人的な苦い経験から言わせてもらえば、 それは難しい仕事だ。 + +02:04.210 --> 02:17.020 +しかし、 GPT4ミニの方がうまくいき、 80ドルの誤差になった。 GPT4はさらにうまくいき、 その差は76ドルまで縮まった。 + +02:17.020 --> 02:27.280 +つまり、 フロンティア・モデルとAPIを使うことで、 問題に対するソリューションを構築することができるということだ。 + +02:27.280 --> 02:27.670 +そうではない。 + +02:27.670 --> 02:29.410 +数値的な問題だ。 + +02:29.560 --> 02:30.160 +そうではない。 + +02:30.190 --> 02:36.820 +必ずしもテキスト補完だけで解決できるようなものではありません。 + +02:36.820 --> 02:52.360 +しかし、 GPT four miniは、 そのような問題であっても、 従来の機械学習モデルであるランダムフォレスト・モデル(400,000の学習データを持つモデル)を凌駕することができる。 + +02:52.360 --> 03:01.510 +つまり、 これらのモデルがいかに強力で、 いかに多くの種類の商業的問題に適用できるかを示しているのだ。 + +03:02.320 --> 03:09.640 +しかし、 これでようやくトレーニングの世界に移ることができる。 + +03:09.670 --> 03:20.680 +次のテーマは、 フロンティアモデルを微調整することによって、 それをさらに発展させ、 学習例を使ってより良い結果を出す方法についてです。 + +03:20.710 --> 03:22.870 +これまでなかったもの。 + +03:22.870 --> 03:25.780 +だから、 これは大きな、 そしてエキサイティングな話題なんだ。 + +03:25.780 --> 03:29.530 +来週までに今週中に完了する予定だ。 + +03:29.530 --> 03:36.790 +私たちはそれをまったく別の世界に持ち込み、 自分たちのオープンソースモデルを微調整し、 + +03:36.790 --> 03:42.220 +競合できるかどうかを試す。 + +03:42.220 --> 03:44.350 +だから、 まったく違う世界だ。 + +03:44.590 --> 03:51.040 +そして、 従来の機械学習やフロンティアモデルに勝てる見込みがあるかどうかを見極める。 + +03:51.070 --> 03:52.930 +興奮することがたくさんある。 + +03:53.020 --> 03:57.280 +その前に、 微調整のためにまた明日会おう。 diff --git a/week5/community-contributions/subtitles/srts/59472873/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472873/ko_KR.srt new file mode 100755 index 0000000..d4cd336 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472873/ko_KR.srt @@ -0,0 +1,151 @@ +WEBVTT + +00:00.590 --> 00:07.130 +오늘날의 Lms는 정말 대단한 모험이었습니다 세계 + +00:07.130 --> 00:15.140 +지식이 필요한 문제를 해결했고 지난 시간에 살펴본 성능을 다시 + +00:15.140 --> 00:18.710 +한번 보여드릴게요 + +00:18.830 --> 00:22.490 +사실 무작위 모델로 시작했지만 바보 같았으니 잊어버리기로 하죠 + +00:22.520 --> 00:28.760 +우선 상수 모델로 평균 수치를 예측했어요 + +00:29.150 --> 00:32.300 +더 잘할 수 있었지만 그렇게 많이는 아니었어요 + +00:32.300 --> 00:37.850 +기능 공학 모델을 사용하면 더 나은 기능으로 향상시킬 수 있어요 + +00:38.060 --> 00:45.470 +하지만 제일 좋았던 건 아무 말이나 갖다 붙인 숲속 모델이었어요 + +00:45.500 --> 00:47.990 +벡터라이즈에게 전해요 + +00:48.050 --> 00:53.690 +400차원 벡터가 있는 프롬프트 좀 보세요 + +00:53.840 --> 00:55.610 +그게 우리의 실수를 가져왔죠 + +00:55.640 --> 01:01.740 +설명에 근거한 제품의 예측과 실제 가격 사이의 평균 차이는 400,000개의 + +01:01.740 --> 01:09.240 +예시 데이터 포인트에 대해 훈련된 후의 97달러까지로 줄어들죠 + +01:09.690 --> 01:14.010 +오늘 그 인간을 공개했어요 + +01:14.010 --> 01:15.810 +그게 첫 번째 모델이었어요 + +01:15.930 --> 01:20.490 +참가자는 오차로 127점을 받았어요 + +01:20.490 --> 01:27.810 +그래서 제가 적어도 원시적인 기능 공학보다는 잘 만들 수 있었어요 + +01:27.810 --> 01:30.270 +꾸준한 것보다는 나았네요 + +01:30.270 --> 01:34.530 +제가 상수보다 잘하지 않았다면 전체 결과를 포함하지 + +01:34.530 --> 01:36.060 +않았을 거예요 + +01:36.360 --> 01:43.470 +하지만 다음 클로드는 저보다 훨씬 잘했어요 + +01:43.470 --> 01:48.390 +클로드는 아무 숲이나 숲과 아주 비슷했어요 아주 비슷했죠 + +01:48.390 --> 01:53.430 +다시 한번 명심해야 할 점은 클로드는 훈련 데이터를 보지 않고 이걸 하고 있다는 거죠 + +01:53.430 --> 01:56.490 +순전히 세계 지식에 근거한 거예요 + +01:56.490 --> 01:58.560 +이 제품을 받았을 때요 + +01:58.600 --> 02:03.550 +씁쓸한 개인적인 경험으로 말씀드리자면 정말 어려운 작업이에요 + +02:04.210 --> 02:13.870 +GPT 4 미니는 더 잘해서 80달러까지 낮췄습니다 GPT 4 미니는 더 잘해서 + +02:13.870 --> 02:17.020 +76달러까지 낮췄죠 + +02:17.020 --> 02:23.620 +프론티어 모델과 API를 이용해 문제 해결법을 구축할 수 있습니다 + +02:23.620 --> 02:27.280 +퇴행 문제처럼 보이는 문제도요 + +02:27.280 --> 02:27.670 +아니에요 + +02:27.670 --> 02:29.410 +숫자 문제라고요 + +02:29.560 --> 02:30.160 +아니에요 + +02:30.190 --> 02:36.820 +텍스트만 완성하면 해결될 것 같은 자연적인 소리는 아니에요 + +02:36.820 --> 02:46.870 +하지만 그런 문제에도 불구하고 GPT 4 미니는 임의의 숲 모델을 능가할 수 있습니다 400,000개의 + +02:46.870 --> 02:52.360 +훈련 데이터 포인트가 있는 전통적인 머신 러닝 모델이죠 + +02:52.360 --> 03:00.130 +이런 모델들이 얼마나 강력한지 보여줍니다 다양한 유형의 상업적 문제에 어떻게 적용할 + +03:00.130 --> 03:01.510 +수 있는지도요 + +03:02.320 --> 03:09.640 +하지만 이제 드디어 훈련의 세계로 넘어갈 수 있겠네요 + +03:09.670 --> 03:16.720 +다음 주제는 어떻게 발전시키느냐입니다 개척 시대 모델을 세밀하게 조정해서 + +03:16.750 --> 03:20.680 +훈련 예시를 더 잘 활용하는 거죠 + +03:20.710 --> 03:22.870 +지금까지 없었던 거요 + +03:22.870 --> 03:25.780 +아주 크고 흥미로운 주제죠 + +03:25.780 --> 03:29.530 +다음 주 전에 완성될 거예요 + +03:29.530 --> 03:36.790 +완전히 다른 세계로 가져가서 오픈 소스 모델을 세밀하게 조정해 경쟁할 수 있는지 봅니다 + +03:36.790 --> 03:42.220 +매개 변수가 아주 적은 걸 다루게 될 거라는 걸 염두에 두고요 + +03:42.220 --> 03:44.350 +완전히 다른 세상이죠 + +03:44.590 --> 03:51.040 +전통적인 머신 러닝이나 선구적 모델을 이길 수 있을지 확인해 보죠 + +03:51.070 --> 03:52.930 +기대할 게 많아요 + +03:53.020 --> 03:57.280 +하지만 먼저 내일 다시 만나서 조율을 하죠 diff --git a/week5/community-contributions/subtitles/srts/59472883/en_US.srt b/week5/community-contributions/subtitles/srts/59472883/en_US.srt new file mode 100755 index 0000000..577eec4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472883/en_US.srt @@ -0,0 +1,85 @@ +WEBVTT + +00:01.490 --> 00:03.500 +Okay, time to reveal the results. + +00:03.500 --> 00:04.880 +It has run to completion. + +00:04.880 --> 00:07.010 +And here it is. + +00:07.940 --> 00:11.060 +So a moment to pause. + +00:11.090 --> 00:19.040 +It turns out that it's actually a little bit worse than the previous results before fine tuning. + +00:19.490 --> 00:21.560 +Um, I would expect that. + +00:21.560 --> 00:23.690 +It's just that it's very similar. + +00:23.690 --> 00:26.510 +It's not actually that the model has gotten any worse. + +00:26.510 --> 00:32.960 +I would suspect that fine tuning in this case has not helped us, which is obviously disappointing. + +00:32.960 --> 00:37.520 +I did warn you at the beginning that there would be a disappointment in this session. + +00:37.730 --> 00:42.290 +Uh, now, having said that, there are some things that it's definitely improved upon. + +00:42.320 --> 00:45.770 +Unfortunately, they're not reflected in this business metric. + +00:45.890 --> 00:50.810 +Uh, but it has improved in terms of the, the, the biggest outliers. + +00:50.810 --> 00:56.000 +I don't know if you remember, but when we ran it before, it was guessing some things that were way + +00:56.000 --> 00:57.980 +outside the range of a thousand. + +00:58.010 --> 01:02.150 +I think I showed you there were points like that were far, far too high. + +01:02.150 --> 01:08.960 +And from seeing our data set of 500, it's it's appreciated that there aren't things that are priced + +01:08.960 --> 01:09.860 +that much. + +01:09.860 --> 01:14.750 +And so that has caused something of a, of a nuanced correction to what it's doing. + +01:14.780 --> 01:18.110 +But other than that it hasn't particularly helped it. + +01:18.110 --> 01:23.780 +And in fact, unfortunately with this set, this this test set, at least it's actually appears to have + +01:23.780 --> 01:30.170 +hindered it according to this business metric, the one that we're really focused on, the total difference. + +01:30.350 --> 01:33.500 +So that's a sobering moment for us. + +01:33.500 --> 01:38.120 +Uh, and uh, in the next video, I'll explain why. + +01:38.120 --> 01:43.040 +And the times when fine tuning frontier models can be very helpful and when they can't. + +01:43.040 --> 01:47.060 +And fear not, there is, there is, there is good news ahead. + +01:47.060 --> 01:51.260 +Even if this is a is a setback for us, we will we will see more. + +01:51.290 --> 01:52.130 +See you next time. diff --git a/week5/community-contributions/subtitles/srts/59472883/ja_JP.srt b/week5/community-contributions/subtitles/srts/59472883/ja_JP.srt new file mode 100755 index 0000000..de64f4c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472883/ja_JP.srt @@ -0,0 +1,79 @@ +WEBVTT + +00:01.490 --> 00:03.500 +さて、 結果を発表しよう。 + +00:03.500 --> 00:04.880 +完成まで走りきった。 + +00:04.880 --> 00:07.010 +そしてここにある。 + +00:07.940 --> 00:11.060 +だから、 ちょっと立ち止まって。 + +00:11.090 --> 00:19.040 +実際、 微調整前の結果よりも少し悪いことが判明した。 + +00:19.490 --> 00:21.560 +そうだろうね。 + +00:21.560 --> 00:23.690 +ただ、 とても似ているんだ。 + +00:23.690 --> 00:26.510 +実際、 モデルが悪くなったわけではない。 + +00:26.510 --> 00:32.960 +この場合、 微調整が役に立たなかったのではないかと思う。 + +00:32.960 --> 00:37.520 +私は冒頭で、 このセッションには残念なことがあると警告した。 + +00:37.730 --> 00:42.290 +とはいえ、 いくつか改善された点もある。 + +00:42.320 --> 00:45.770 +残念ながら、 このビジネス指標には反映されていない。 + +00:45.890 --> 00:50.810 +しかし、 最も大きな異常値という点では改善されている。 + +00:50.810 --> 00:57.980 +覚えているかどうかわからないが、 以前このテストを実施したときは、 1000の範囲外のことも推測していた。 + +00:58.010 --> 01:02.150 +私は、 そのようなポイントがあまりにも高すぎることを示したと思う。 + +01:02.150 --> 01:09.860 +そして、 500のデータを見たところ、 それほど高い値段のものはないということがわかった。 + +01:09.860 --> 01:14.750 +そのため、 そのようなニュアンスでの修正が行われている。 + +01:14.780 --> 01:18.110 +しかし、 それ以外は特に役に立っていない。 + +01:18.110 --> 01:23.780 +そして実際、 残念なことに、 このセット、 このテストセットでは、 少なくとも、 我々が本当に注目しているこのビジネス指標である総差異によれば、 + +01:23.780 --> 01:30.170 +実際には妨げになっているように見える。 + +01:30.350 --> 01:33.500 +だから、 私たちにとっては気が重い瞬間だ。 + +01:33.500 --> 01:38.120 +次のビデオで、 その理由を説明するよ。 + +01:38.120 --> 01:43.040 +また、 フロンティア・モデルの微調整が非常に役立つ時と、 そうでない時がある。 + +01:43.040 --> 01:47.060 +そして、 恐れることはない。 + +01:47.060 --> 01:51.260 +たとえこれが後退であったとしても、 私たちはもっと多くを見ることができるだろう。 + +01:51.290 --> 01:52.130 +それではまた次回。 diff --git a/week5/community-contributions/subtitles/srts/59472883/ko_KR.srt b/week5/community-contributions/subtitles/srts/59472883/ko_KR.srt new file mode 100755 index 0000000..cfd522c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59472883/ko_KR.srt @@ -0,0 +1,85 @@ +WEBVTT + +00:01.490 --> 00:03.500 +이제 결과를 발표할게요 + +00:03.500 --> 00:04.880 +실행이 완료되었어요 + +00:04.880 --> 00:07.010 +여기 있네요 + +00:07.940 --> 00:11.060 +잠시 멈출게요 + +00:11.090 --> 00:19.040 +미세 조정 전보다 조금 더 안 좋은 결과가 나왔어요 비트 코팅 + +00:19.490 --> 00:21.560 +그럴 줄 알았어요 + +00:21.560 --> 00:23.690 +그냥 아주 비슷할 뿐이에요 + +00:23.690 --> 00:26.510 +모델이 더 나빠진 건 아니에요 + +00:26.510 --> 00:32.960 +이 사건의 세세한 조정이 도움이 안 된 것 같아요 당연히 실망스럽죠 + +00:32.960 --> 00:37.520 +처음에 경고했듯이 실망스러운 시간이 될 거예요 + +00:37.730 --> 00:42.290 +하지만 몇 가지 개선된 점이 있어요 + +00:42.320 --> 00:45.770 +안타깝게도 이 사업적 척도에는 반영되지 않아요 + +00:45.890 --> 00:50.810 +하지만 가장 큰 이상 측면에서는 나아졌어요 + +00:50.810 --> 00:56.000 +기억할지 모르겠지만 전에 실행했을 때 천 달러 범위에서 한참 벗어난다고 + +00:56.000 --> 00:57.980 +추측했었죠 + +00:58.010 --> 01:02.150 +너무 높은 점수가 있다는 걸 보여드린 것 같아요 + +01:02.150 --> 01:08.960 +저희 데이터 집합이 500개인데 그렇게 비싼 건 없다는 걸 알게 + +01:08.960 --> 01:09.860 +됐어요 + +01:09.860 --> 01:14.750 +그래서 뭔가 미묘한 수정이 생긴 것 같아요 + +01:14.780 --> 01:18.110 +하지만 그 외에는 별 도움이 안 됐어요 + +01:18.110 --> 01:23.780 +사실 안타깝게도 이 테스트 세트로는 적어도 비즈니스 측정법에 + +01:23.780 --> 01:30.170 +따르면 방해가 된 것 같습니다 저희가 집중적으로 살펴볼 총차 말이에요 + +01:30.350 --> 01:33.500 +정신이 번쩍 드는 순간이죠 + +01:33.500 --> 01:38.120 +다음 영상에서 이유를 설명해 드릴게요 + +01:38.120 --> 01:43.040 +모형을 세밀하게 조정하면 도움이 될 때도 있고 아닐 때도 있죠 + +01:43.040 --> 01:47.060 +걱정 마세요 좋은 소식이 기다리고 있어요 + +01:47.060 --> 01:51.260 +비록 이번 일로 차질이 생겼지만 앞으로 더 많이 보게 될 거예요 + +01:51.290 --> 01:52.130 +다음에 봐요 diff --git a/week5/community-contributions/subtitles/srts/59473019/en_US.srt b/week5/community-contributions/subtitles/srts/59473019/en_US.srt new file mode 100755 index 0000000..e1ab85d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473019/en_US.srt @@ -0,0 +1,163 @@ +WEBVTT + +00:00.680 --> 00:05.210 +Welcome back to an action packed time of of training. + +00:05.210 --> 00:13.880 +So now, after waiting about five minutes when I run this same, uh, list events function, we get + +00:13.880 --> 00:16.100 +to see lots of things going on. + +00:16.100 --> 00:19.520 +What you see in the last ten events is that training is happening. + +00:19.520 --> 00:26.240 +And you can see in this message the step zero, step one, step two, step three out of 500, representing + +00:26.240 --> 00:30.380 +the 500 different training data points that it's going to go through. + +00:30.380 --> 00:32.750 +And you'll also see here training loss. + +00:32.780 --> 00:39.500 +Training loss is something that we want to come down and that will show us, uh, a good outcome, which + +00:39.500 --> 00:41.660 +is of course, what we're going for. + +00:41.660 --> 00:49.820 +And every time if I rerun this, we'll see now that it's already on step 118 as we go now, it's kind + +00:49.820 --> 00:51.440 +of hard to see what's going on here. + +00:51.440 --> 00:55.970 +And so this would be this would be a bit challenging if this is what we had to put up with. + +00:55.970 --> 01:02.990 +But we don't because we use weights and biases and here, here is our gorgeous weights and biases screen + +01:02.990 --> 01:06.440 +which allows us to follow along with the training. + +01:06.770 --> 01:12.290 +And what we can see is the the results of training loss and watch what's happening. + +01:12.290 --> 01:14.900 +It's just updated because the page was refreshing. + +01:15.110 --> 01:18.980 +Um, and we'll get to see and get some insight into what's going on. + +01:18.980 --> 01:25.100 +Now, one thing that you'll always see when you're running this kind of training is that the first few + +01:25.100 --> 01:28.430 +batch steps will typically show a dramatic drop. + +01:28.670 --> 01:33.590 +Um, and that's actually not necessarily something to get too excited about. + +01:33.620 --> 01:39.770 +Typically, what's happening there is that the model is very quickly, uh, learning, um, the very + +01:39.770 --> 01:44.780 +much the parts of the structure which almost go without saying, like the fact that there's a dollar + +01:44.780 --> 01:52.250 +sign, um, and uh, and where to put the decimal point and a few other things that, that, um, I + +01:52.250 --> 01:58.400 +think in this case, it realizes that just guessing zero zero for the sense is more likely than, than + +01:58.400 --> 02:02.930 +anything else to be the right number of cents, because there's many things that are just a whole number + +02:02.930 --> 02:03.590 +of dollars. + +02:03.590 --> 02:09.590 +So there's this is that's total speculation, but it's things like that, the kind of obvious stuff + +02:09.590 --> 02:16.740 +about the construct that allows training to to come down very quickly for the first few steps to get + +02:16.740 --> 02:21.390 +the obvious stuff out of the way, so that it's now following a decent pattern. + +02:21.420 --> 02:28.290 +What you're really looking for is continual progress from that point, and you typically, if you've + +02:28.290 --> 02:32.430 +got a training set up well and there's no right or wrong answers here. + +02:32.430 --> 02:35.250 +So so there's exceptions to everything I'm saying now. + +02:35.250 --> 02:38.460 +But but generally speaking you do want to see some variation. + +02:38.460 --> 02:45.300 +It's good to see some some batch steps that have greater and lower loss because you are you're trying + +02:45.300 --> 02:49.230 +to optimize and you're looking to explore different, different possibilities. + +02:49.440 --> 02:57.000 +Um, and what you're trying to what you're looking for is a kind of trend where over time, whilst any + +02:57.000 --> 03:04.980 +individual batch step may go up or down, you do expect or hope to see a gradual decrease in loss, + +03:05.250 --> 03:14.070 +um, over time and as potentially a slight concern for us that might make us a bit worried, is that + +03:14.070 --> 03:18.310 +we're seeing something where this doesn't particularly seem to be coming down. + +03:18.310 --> 03:25.300 +It just seems to show a lot of volatility, um, representing the fact that some of the of the data + +03:25.300 --> 03:28.840 +points are doing better than others in terms of the the guessing. + +03:28.840 --> 03:35.770 +But it's not as if there seems to be much of an improving trend or a trend in any direction yet. + +03:36.070 --> 03:39.310 +Um, at least that's what appears to be the case. + +03:39.490 --> 03:45.970 +So hopefully you are also running this, and you're going to be watching carefully to see what happens. + +03:45.970 --> 03:50.860 +Uh, it's going to take a total, I think, of about 10 or 15 minutes to run all the way through, but + +03:50.860 --> 03:53.290 +let's see where it is now in this list. + +03:54.910 --> 03:57.130 +We're now about halfway through. + +03:57.160 --> 04:02.890 +When it gets to the end, there's going to be a pause while it goes through a step of validating the + +04:02.890 --> 04:09.100 +results and and and some, some confirmation checks on the OpenAI side that it that it tells you about + +04:09.100 --> 04:10.840 +in the, in the statuses. + +04:10.930 --> 04:16.690 +Um, and then you actually get an email to tell you that it's completed at the same time as this run + +04:16.690 --> 04:17.260 +completes. + +04:17.260 --> 04:20.560 +And when we pick up in the next video, it will have run. + +04:20.560 --> 04:21.760 +I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59473019/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473019/ja_JP.srt new file mode 100755 index 0000000..b9cf17f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473019/ja_JP.srt @@ -0,0 +1,118 @@ +WEBVTT + +00:00.680 --> 00:05.210 +アクション満載のトレーニングの時間へ、 おかえりなさい。 + +00:05.210 --> 00:16.100 +それで、 同じようにイベントをリストアップする機能を実行すると、 5分ほど待たされた後、 いろいろなものが表示されるようになった。 + +00:16.100 --> 00:19.520 +この10大会を見てわかるのは、 トレーニングが行われているということだ。 + +00:19.520 --> 00:26.240 +このメッセージには、 500のうちステップ0、 ステップ1、 ステップ2、 + +00:26.240 --> 00:30.380 +ステップ3が表示されている。 + +00:30.380 --> 00:32.750 +そして、 ここでもトレーニングロスが見られる。 + +00:32.780 --> 00:41.660 +トレーニング・ロスというのは、 私たちが減らしたいものであり、 それは私たちに良い結果を示してくれるものだ。 + +00:41.660 --> 00:51.440 +そして、 これを再実行するたびに、 すでにステップ118に進んでいることがわかるだろう。 + +00:51.440 --> 00:55.970 +だから、 これで我慢しなければならないとしたら、 これはちょっと難しいことだ。 + +00:55.970 --> 01:06.440 +しかし、 私たちはウェイトとバイアスを使うので、 そうしない。 ここにあるのは、 トレーニングに沿ったフォローができるゴージャスなウェイトとバイアスの画面だ。 + +01:06.770 --> 01:12.290 +そして私たちが見ることができるのは、 トレーニングの成果であり、 何が起こっているかを見ることだ。 + +01:12.290 --> 01:14.900 +ページが更新されたから更新されただけだ。 + +01:15.110 --> 01:18.980 +そして、 何が起こっているのかを見たり、 洞察したりすることができるだろう。 + +01:18.980 --> 01:28.430 +さて、 この種のトレーニングを実施しているときに必ず目にするのが、 最初の数バッチステップで劇的な低下が見られるということだ。 + +01:28.670 --> 01:33.590 +うーん、 それは必ずしも興奮することではないんだ。 + +01:33.620 --> 01:39.770 +通常、 そこで起こっていることは、 モデルが、 + +01:39.770 --> 01:44.780 +ドル記号があるという事実や、 小数点以下をどこに置くかといった、 + +01:44.780 --> 02:03.590 +ほとんど言うまでもないような構造の部分を、 非常に素早く学習しているということです。 + +02:03.590 --> 02:09.590 +これは全くの憶測だが、 そのようなこと、 つまり構成に関する明白な事柄があるからこそ、 + +02:09.590 --> 02:21.390 +トレーニングは最初の数ステップを非常に素早く下り、 明白な事柄を片付けて、 まともなパターンに従うようになるのだ。 + +02:21.420 --> 02:28.290 +あなたが本当に求めているのは、 その時点からの継続的な進歩であり、 一般的には、 トレーニングがうまくセットアップされていれば、 + +02:28.290 --> 02:32.430 +正解も不正解もない。 + +02:32.430 --> 02:35.250 +だから、 私が今言っていることすべてに例外がある。 + +02:35.250 --> 02:38.460 +しかし、 一般的に言えば、 ある程度のバリエーションは欲しいところだ。 + +02:38.460 --> 02:49.230 +最適化を図り、 さまざまな可能性を模索しているのだから。 + +02:49.440 --> 02:57.000 +そして、 あなたが求めているのは、 時間の経過とともに、 + +02:57.000 --> 03:04.980 +個々のバッチステップが上がったり下がったりする一方で、 + +03:05.250 --> 03:18.310 +損失が徐々に減少していくような傾向です。 + +03:18.310 --> 03:28.840 +これは、 推測という点で、 あるデータポイントが他のデータポイントよりうまくいっているという事実を表している。 + +03:28.840 --> 03:35.770 +しかし、 まだ改善傾向やトレンドがあるようには思えない。 + +03:36.070 --> 03:39.310 +少なくとも、 そう見える。 + +03:39.490 --> 03:45.970 +だから、 願わくば君たちもこれを実行して、 何が起こるか注意深く見守っていてほしい。 + +03:45.970 --> 03:53.290 +ええと、 全部通すと10分か15分くらいかかると思うけど、 このリストの中で今どの位置にあるか見てみよう。 + +03:54.910 --> 03:57.130 +もう半分が終わったところだ。 + +03:57.160 --> 04:02.890 +最後に到達すると、 結果を検証するステップを経て、 OpenAI側でいくつかの確認チェックが行われ、 + +04:02.890 --> 04:10.840 +ステータスに表示されます。 + +04:10.930 --> 04:17.260 +ええと、 そして実際に、 この実行が完了すると同時に、 完了したことを伝えるメールが届く。 + +04:17.260 --> 04:20.560 +そして、 次のビデオでピックアップするときには、 もう走っている。 + +04:20.560 --> 04:21.760 +そこで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59473019/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473019/ko_KR.srt new file mode 100755 index 0000000..bd74c2a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473019/ko_KR.srt @@ -0,0 +1,160 @@ +WEBVTT + +00:00.680 --> 00:05.210 +액션 가득한 훈련 시간에 돌아오신 걸 환영해요 + +00:05.210 --> 00:13.880 +5분 정도 기다린 후에 이 리스트 이벤트 함수를 실행하면 많은 일이 일어나고 있는 걸 볼 수 + +00:13.880 --> 00:16.100 +있죠. Get it + +00:16.100 --> 00:19.520 +지난 10종목 동안 훈련이 진행되고 있어요 + +00:19.520 --> 00:26.240 +이 메시지를 보시면 단계 0, 1, 2, 3이 500개 중 하나입니다 500개의 + +00:26.240 --> 00:30.380 +다른 훈련 데이터 포인트를 나타내죠 + +00:30.380 --> 00:32.750 +훈련의 손실도 볼 수 있어요 + +00:32.780 --> 00:39.500 +훈련에서 진 걸 만회해서 좋은 결과를 내고 싶어요 그게 + +00:39.500 --> 00:41.660 +우리 목표죠 + +00:41.660 --> 00:49.820 +이걸 재실행할 때마다 이미 118단계인 걸 보게 됩니다 지금 하고 있죠 무슨 일인지 보기가 + +00:49.820 --> 00:51.440 +좀 어려워요 + +00:51.440 --> 00:55.970 +비트 박스를 참는 게 쉽지 않았을 거예요 Put it up Put up Put it up Put it up Put it Put it Put it Put it Put it + +00:55.970 --> 01:02.990 +우리는 무게와 편향성을 이용하기 때문에 그렇게 하지 않습니다 여기 멋진 무게와 편향 + +01:02.990 --> 01:06.440 +스크린으로 훈련 과정을 따라 볼 수 있죠 + +01:06.770 --> 01:12.290 +훈련에서 손실을 본 결과가 어떻게 되는지 볼 수 있어요 + +01:12.290 --> 01:14.900 +페이지가 새로워져서 업데이트된 거예요 + +01:15.110 --> 01:18.980 +Get up! Get up! 어떤 상황인지 파악할 수 있을 거예요 + +01:18.980 --> 01:25.100 +이런 종류의 훈련을 실행할 때 항상 보게 되는 것 중 하나는 첫 몇 배치 단계에서 + +01:25.100 --> 01:28.430 +보통 극적인 드롭을 보인다는 거죠 + +01:28.670 --> 01:33.590 +get's get's get's get's get. 이건 사실 그렇게 좋아할 일은 아니에요 + +01:33.620 --> 01:39.770 +일반적으로 이 모델은 아주 빠르게 학습합니다 거의 말할 필요도 + +01:39.770 --> 01:44.780 +없는 구조의 일부를요 $ 기호가 있는 거나 소수점을 + +01:44.780 --> 01:52.250 +어디에 둘지 같은 거요 이 경우에는 개념에서 0을 0으로 추정하는 게 다른 + +01:52.250 --> 01:58.400 +어떤 것보다도 올바른 센트 수가 될 가능성이 높죠 달러의 개수에는 + +01:58.400 --> 02:03.590 +많은 것들이 있으니까요 + +02:03.590 --> 02:09.590 +아직은 추측일 뿐이지만 이런 것들이 도움이 돼요. Get-Tuck Season + +02:09.590 --> 02:16.740 +1 닙턱 시즌 1 이런 구조의 특징 덕분에 훈련이 빠르게 진행되고 첫 몇 단계에서는 특징을 빠르게 + +02:16.740 --> 02:21.390 +파악해서 지금은 괜찮은 패턴을 따르고 있죠. + +02:21.420 --> 02:28.290 +그 시점부터 계속적인 진전을 봐야 합니다 트레이닝 세팅을 잘 해뒀다면 + +02:28.290 --> 02:32.430 +여기에 옳고 그른 답은 없어요 + +02:32.430 --> 02:35.250 +제가 지금 하는 모든 것에 예외가 있는 거죠 + +02:35.250 --> 02:38.460 +하지만 일반적으로 보면 약간의 변화가 있어야 해요 + +02:38.460 --> 02:45.300 +손실이 크고 적은 배치 단계를 보는 건 좋아요 최적화하고 다양한 + +02:45.300 --> 02:49.230 +가능성을 탐색해야 하니까요 + +02:49.440 --> 02:57.000 +지금 보시는 건 일종의 경향인데요 시간이 지나면서 + +02:57.000 --> 03:04.980 +배치 단계를 늘렸다가 줄였다가 하면서 손실이 서서히 줄어들기를 + +03:05.250 --> 03:14.070 +기대하는 거죠 그리고 걱정되는 부분은 비트 수가 줄지 않는 것 + +03:14.070 --> 03:18.310 +같다는 거예요 + +03:18.310 --> 03:25.300 +변동이 많은 것 같아요 추측 측면에서 일부 데이터 포인트가 다른 + +03:25.300 --> 03:28.840 +것보다 더 잘한다는 걸 보여주죠 + +03:28.840 --> 03:35.770 +하지만 아직 어떤 방향으로든 개선 추세가 있는 것 같진 않아요 + +03:36.070 --> 03:39.310 +적어도 겉으로 보기엔 그래요 + +03:39.490 --> 03:45.970 +여러분도 이걸 실행하며 무슨 일이 일어나는지 주의 깊게 보시길 바라요 + +03:45.970 --> 03:50.860 +제 생각에는 다 실행하려면 10분에서 15분 정도 걸릴 것 같습니다 + +03:50.860 --> 03:53.290 +리스트의 어디에 있는지 보죠 + +03:54.910 --> 03:57.130 +이제 절반쯤 왔어요 + +03:57.160 --> 04:02.890 +끝에 다다르면 일시 정지가 됩니다 결과의 유효성을 확인하는 단계를 + +04:02.890 --> 04:09.100 +거치는 동안에요 OpenAI 쪽에서 확인을 합니다 상태에 대해 알려주는 + +04:09.100 --> 04:10.840 +부분이죠 + +04:10.930 --> 04:16.690 +Get이 완료됐다는 이메일을 받을 수 있어요 이 런이 완료되는 것과 동시에 + +04:16.690 --> 04:17.260 +말이죠 + +04:17.260 --> 04:20.560 +다음 비디오에선 실행될 거예요 + +04:20.560 --> 04:21.760 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59473021/en_US.srt b/week5/community-contributions/subtitles/srts/59473021/en_US.srt new file mode 100755 index 0000000..9461fd6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473021/en_US.srt @@ -0,0 +1,787 @@ +WEBVTT + +00:00.860 --> 00:03.890 +Welcome to our favorite place to be to JupyterLab. + +00:03.890 --> 00:06.920 +Here we are again now in day three. + +00:06.920 --> 00:08.900 +In week six. + +00:09.200 --> 00:13.580 +I'm really looking forward to this notebook again and I hope you enjoy it too. + +00:13.580 --> 00:16.130 +I've got some good things cooked up for you. + +00:16.310 --> 00:21.650 +So again, our plan today is to look at baseline models. + +00:21.650 --> 00:28.190 +And so I'm going to start with a bunch of imports which are all imports that you've seen before. + +00:28.190 --> 00:30.080 +Nothing very new here. + +00:30.080 --> 00:33.890 +But then some new imports in this second cell that you'll see here. + +00:33.890 --> 00:41.150 +Some imports for traditional machine learning pandas that you have probably encountered many times in + +00:41.150 --> 00:48.260 +the journey, a wonderful way to organize your data into into things that are a bit like mini spreadsheets. + +00:48.380 --> 00:51.320 +Um, numpy, of course, I'm sure is old hat for you. + +00:51.320 --> 00:52.760 +And then sklearn. + +00:52.790 --> 01:00.620 +Scikit learn is a machine learning library that is incredibly popular, incredibly popular with tons + +01:00.620 --> 01:07.550 +and tons of common algorithms that we will be using plenty of today, but most most importantly, linear + +01:07.580 --> 01:14.120 +regression, a standard part of any data scientist's toolkit for running linear regression models. + +01:14.690 --> 01:20.630 +And then there's another one here, which is a little set of imports related to natural language processing + +01:20.660 --> 01:31.100 +NLP, including Gensim, which is a very useful library for NLP related stuffs such as word two vec + +01:31.130 --> 01:32.780 +that I mentioned a while ago. + +01:32.810 --> 01:40.010 +I mentioned just then and is something that is a powerful model for turning words into vectors. + +01:40.010 --> 01:44.450 +So make sure that I run that cell too. + +01:44.780 --> 01:47.720 +Oh, and then there is one more, another set of imports. + +01:47.810 --> 01:50.630 +Uh, more from uh, scikit learn again. + +01:50.630 --> 01:56.810 +But just I kept these ones separate because it's, uh, two different imports we're doing for more advanced + +01:56.810 --> 01:57.590 +machine learning. + +01:57.590 --> 02:03.600 +One is for the support vector regression, part of the Support Vector Machines and package. + +02:03.600 --> 02:06.870 +And then the other is the Random Forest Regressor. + +02:06.930 --> 02:12.390 +Uh, we I mentioned random forests a moment ago, so we will bring that in as well. + +02:12.840 --> 02:13.800 +Okay. + +02:13.830 --> 02:17.700 +Now this these set of constants, they might they might surprise you. + +02:17.730 --> 02:19.860 +They they look a bit unusual. + +02:20.070 --> 02:22.500 +Uh, I'll tell you to hold that thought. + +02:22.500 --> 02:23.670 +They will come in later. + +02:23.670 --> 02:26.040 +You may recognize them if you've ever done anything like this before. + +02:26.040 --> 02:32.910 +That, um, slightly strangely, for various reasons that are very historic. + +02:33.090 --> 02:41.430 +Um, when you print a that particular symbol to the standard out, it changes the color to being in + +02:41.430 --> 02:42.960 +the color green. + +02:43.200 --> 02:47.370 +Um, and for all sorts of, of reasons that I won't go into. + +02:47.490 --> 02:53.820 +Uh, and reset turns the color back to black or white or depending on what your foreground color is. + +02:53.850 --> 02:59.250 +And so knowing these constants, having them to hand makes it easy to print things in color, which + +02:59.250 --> 03:00.600 +we will be doing today. + +03:00.840 --> 03:01.410 +Okay. + +03:01.410 --> 03:07.950 +So run that constants run our usual environment setup that we know so well and log in to hugging face. + +03:08.280 --> 03:09.510 +Um, I'm not sure. + +03:09.510 --> 03:12.660 +I don't think we actually use hugging face today, so I don't think I needed to log into hugging face, + +03:12.660 --> 03:14.910 +but we did it anyway just for kicks. + +03:15.180 --> 03:18.900 +Um, and then make sure that matplotlib comes in the Jupyter notebook. + +03:18.990 --> 03:26.160 +Uh, we will load in our data from the pickle files rather than having to recreate it. + +03:26.160 --> 03:33.180 +And so in it comes, uh, let's just take another look at the training data. + +03:33.180 --> 03:35.970 +So let's just take the first training data point. + +03:35.970 --> 03:39.870 +And I'm just going to ask for its prompt to remind you again of what this was. + +03:39.870 --> 03:47.460 +So this is I'm looking for the prompt attribute of one of these item objects that I really belabored + +03:47.460 --> 03:48.150 +you with. + +03:48.150 --> 03:52.050 +Uh, in, in the past, uh, in the two, two days ago. + +03:52.140 --> 03:55.350 +But hopefully this is now something you're becoming more familiar with. + +03:55.380 --> 03:58.110 +Let me print that so it prints out nicely. + +03:58.560 --> 04:01.740 +Um, there we go. + +04:02.010 --> 04:02.880 +Uh, here it is. + +04:02.880 --> 04:04.620 +How much this cost to the nearest dollar? + +04:04.620 --> 04:08.350 +And then there's the title, and then there's the detail and there is the price. + +04:08.380 --> 04:15.070 +Now you might wonder why I'm spending so much time on things like this item class specifically for this + +04:15.070 --> 04:15.580 +problem. + +04:15.580 --> 04:20.710 +And it is really because this is the kind of stuff that you'll be doing when you come up with your, + +04:20.740 --> 04:24.970 +your when you face your own commercial problems and look for ways to engineer it and to massage the + +04:24.970 --> 04:25.450 +data. + +04:25.450 --> 04:28.420 +So this is real world experience that will come in handy. + +04:28.450 --> 04:33.610 +You won't use exactly this code, the item class, and you probably won't have a prompt exactly like + +04:33.610 --> 04:34.000 +this. + +04:34.000 --> 04:39.190 +But this kind of technique is something you'll be able to replicate, so it's important to understand + +04:39.190 --> 04:43.480 +it and understand the decisions that I'm making as we come up with it, so that you'll be able to do + +04:43.480 --> 04:46.720 +the same thing with confidence with your own projects. + +04:46.780 --> 04:51.160 +So this then, is the training prompt that it came up with. + +04:51.190 --> 04:53.500 +And now let's look at a test prompt. + +04:53.500 --> 04:57.700 +So I'm going to take the first of our test items. + +04:57.700 --> 05:00.790 +And I'm going to call the method test prompt. + +05:00.790 --> 05:09.190 +And remember that basically takes its training prompt but strips out the actual price so that we don't + +05:09.190 --> 05:11.980 +reveal the answer when we're trying to test our model. + +05:11.980 --> 05:14.950 +And its job is to fill in that price. + +05:14.950 --> 05:19.810 +And if I want to know what it's supposed to fill in, um, I'll take it for a training point. + +05:19.960 --> 05:25.360 +You take a train and then you can just call price like that. + +05:25.360 --> 05:31.720 +And that is the actual price, uh, associated with this, you'll see that that this has been rounded + +05:31.720 --> 05:34.030 +to the nearest whole dollar. + +05:34.120 --> 05:36.700 +But the real price is something slightly different. + +05:36.700 --> 05:43.090 +So hopefully this reminds you refreshes your memory on what we're doing with these train and test, + +05:43.240 --> 05:51.280 +uh, methods and these these lists of training and test items and how we call them. + +05:51.490 --> 05:58.510 +So now I want to reveal something that I'm really quite pleased with, which is a chunk of code, which + +05:58.540 --> 06:03.280 +again, whilst you may not use exactly this code in your projects, you'll do similar things. + +06:03.280 --> 06:09.800 +So it's a nice kind of principle, a nice way of approaching the problem that you should, uh, take + +06:09.800 --> 06:12.800 +on and be able to replicate for your own problems. + +06:12.800 --> 06:21.200 +So I wanted to be able to test different models that we come up with in a really quick, simple way. + +06:21.410 --> 06:26.900 +Um, and that involves taking a bunch of our test data points and running them through the model and + +06:26.900 --> 06:28.850 +being able to visualize the results. + +06:28.850 --> 06:33.740 +And this was something I used to have that in a function, and I ended up repeating that lots and having + +06:33.740 --> 06:38.180 +to copy and paste my code a lot, because repeatedly I'd want to be doing the same thing. + +06:38.180 --> 06:43.910 +And any time you do that, it sounds like it's a time for you to build some kind of a utility to do + +06:43.910 --> 06:44.780 +it for you. + +06:44.900 --> 06:50.930 +Um, and so what I came up with is this, this class tester, which is going to be able to test a model + +06:50.930 --> 06:56.780 +and the way it will work is that you will be able to write any function you want, any function that + +06:56.780 --> 07:01.790 +will be called like my, uh, sorry, my prediction function or anything like that. + +07:01.790 --> 07:10.130 +And its only job will be to take an item and to respond, return the estimated price that is its job, + +07:10.130 --> 07:14.570 +and you put whatever code you want in there to do a particular prediction. + +07:14.570 --> 07:19.070 +And once you've written a function like that, you can just call tester. + +07:19.100 --> 07:24.800 +This class I'm about to show you dot test and pass in the name of the function. + +07:24.800 --> 07:26.720 +And it will take this function. + +07:26.720 --> 07:34.910 +It will call it repeatedly, in fact, for 250 different test items and see how good it is at predicting + +07:34.910 --> 07:38.600 +the results, and then summarize that information back visually. + +07:38.870 --> 07:39.980 +That's the idea. + +07:39.980 --> 07:43.190 +And it's going to simplify our workflow distinctly. + +07:43.430 --> 07:45.410 +Um, and this is the class itself. + +07:45.410 --> 07:46.460 +It's perfectly simple. + +07:46.460 --> 07:48.650 +It's got some stuff to deal with with colors. + +07:48.650 --> 07:50.720 +I told you we'd be printing some colors. + +07:50.720 --> 07:58.280 +It runs a data point, and the run data point is the method that actually does the business for one + +07:58.280 --> 08:01.100 +particular data point, it gets that data point. + +08:01.130 --> 08:05.030 +This is where it calls the function that you provided. + +08:05.030 --> 08:08.730 +It calls it with the data point to get your model's models. + +08:08.730 --> 08:12.630 +Guess what your function says it should be worth. + +08:12.630 --> 08:18.480 +And then it gets the truth by calling the price attribute that we just looked at just a moment ago. + +08:18.480 --> 08:24.510 +And then the error is, of course, the absolute difference between the guess and the truth. + +08:24.540 --> 08:25.980 +As simple as that. + +08:26.010 --> 08:35.040 +It also calculates the something called the squared log error, and the formula for the squared log + +08:35.040 --> 08:37.410 +error is exactly as it is here. + +08:37.440 --> 08:43.770 +It's the log of the truth plus one minus the log of the guess plus one. + +08:44.160 --> 08:50.700 +Um, and uh, yeah, you can in your, you can probably imagine why there's this plus one in the formula. + +08:50.700 --> 08:55.500 +It's because if the truth were zero, you wouldn't want math.log to blow up. + +08:55.560 --> 09:01.890 +So this formula works well for, for cases when, for example, the, the, the truth or the guess are + +09:01.890 --> 09:02.580 +zero. + +09:03.120 --> 09:08.790 +Um, and that gives us then the squared log error is the square of course, of this. + +09:09.330 --> 09:14.400 +Uh, and uh, we're then going to, uh, do a little bit of processing. + +09:14.400 --> 09:17.850 +We're going to have the ability to draw a little chart, which I will show you in a moment. + +09:18.030 --> 09:19.380 +Uh, write a report. + +09:19.380 --> 09:23.880 +And this ultimately is the, uh, function I mentioned a moment ago. + +09:23.880 --> 09:27.960 +You can just call test to run this this test. + +09:28.170 --> 09:28.740 +Okay. + +09:28.770 --> 09:30.240 +Let me execute that cell. + +09:30.240 --> 09:35.160 +So you don't need to particularly understand everything that I did in this test class. + +09:35.160 --> 09:41.310 +It's the the the principle of creating a nice little test harness like this and having it be something + +09:41.310 --> 09:46.950 +you invest a bit of time in to make sure you'll be able to get real insight into the results of running + +09:46.950 --> 09:47.550 +your model. + +09:47.550 --> 09:49.080 +That's the learning here. + +09:49.350 --> 09:54.450 +So what's the simplest possible model that you could imagine? + +09:54.450 --> 09:56.610 +What is the simplest possible model? + +09:56.610 --> 09:59.250 +We're going to come up before we do real baseline models. + +09:59.250 --> 10:03.960 +We're going to do two comedy models, silly models that are going to be the most basic thing we can + +10:03.960 --> 10:04.560 +imagine. + +10:04.560 --> 10:07.110 +And let me challenge you for a moment. + +10:07.230 --> 10:11.680 +Have a think about what could be the simplest possible model, and it's probably going to be something + +10:11.680 --> 10:12.760 +simpler than that. + +10:12.940 --> 10:16.900 +Um, so the first is going to be two very simple models. + +10:16.900 --> 10:19.090 +The first one, I reveal the answer already. + +10:19.090 --> 10:24.130 +You probably saw that the first one will be we're just going to guess a random number. + +10:24.130 --> 10:25.540 +That's all it's going to be. + +10:25.540 --> 10:27.640 +So here is a function. + +10:27.640 --> 10:31.090 +Here is a function that takes a takes a it doesn't take a prompt. + +10:31.090 --> 10:36.820 +It takes an item that not that it matters because it's going to completely ignore the item and instead + +10:36.820 --> 10:39.520 +it's going to not care what it's told. + +10:39.520 --> 10:43.510 +It will return a random number between 1 and 1000. + +10:43.690 --> 10:47.500 +Uh, sorry, that's between 1 and 999 inclusive. + +10:47.740 --> 10:53.320 +Uh, we will set the random seed so that it's the same every every time that we run this test. + +10:53.440 --> 10:54.880 +And now we run it. + +10:54.880 --> 11:00.730 +So the way that we run this test again is I go with my, my tester class, I just showed you dot test. + +11:00.730 --> 11:03.700 +And then I simply pass in the name of this function. + +11:03.700 --> 11:08.140 +I don't call the function because if I, if I call the function, it will just call it once and that + +11:08.140 --> 11:08.920 +will be it. + +11:08.950 --> 11:11.590 +I pass in the function itself like so. + +11:11.620 --> 11:17.350 +And now I'm going to execute this and you're going to see the results of my program. + +11:17.800 --> 11:20.800 +So it happened very fast because this was a very quick model. + +11:20.800 --> 11:24.280 +So I'm going to scroll back up and tell you what you're seeing here because it's a lot. + +11:24.790 --> 11:28.270 +And it's something you're going to get very familiar with because we're going to do this a lot of times + +11:28.270 --> 11:29.770 +in the next few classes. + +11:29.770 --> 11:37.060 +So each row you are seeing here is representing a different one of our test data points. + +11:37.060 --> 11:40.270 +And it's telling you what the item is over here on the right. + +11:40.270 --> 11:47.050 +Like here is a Godox ML 60 by LED, LED light kit, handheld LED. + +11:47.050 --> 11:49.270 +And then I cut it short after that. + +11:49.270 --> 11:55.630 +And what you're seeing for this particular LED light kit is what did the model what did this function + +11:55.630 --> 11:57.760 +guess for the LED light kit. + +11:57.760 --> 12:01.810 +And it guessed $143 because it's a random number generator. + +12:02.260 --> 12:03.760 +What is the truth? + +12:03.760 --> 12:09.940 +Somewhat remarkably, the truth is $289, which is rather more than I would have expected for, uh, + +12:10.000 --> 12:12.980 +but only based on that that truncated Description there. + +12:12.980 --> 12:13.490 +Maybe. + +12:13.790 --> 12:17.150 +Maybe it comes with a laptop on the side or something. + +12:17.900 --> 12:19.190 +So that's the error. + +12:19.220 --> 12:23.330 +That's how much we this this model gets it wrong by this. + +12:23.330 --> 12:26.930 +Here is the squared log error that we'll probably talk about another day. + +12:26.930 --> 12:32.630 +But it's something that is meant to better compensate, better reflect the difference between absolute + +12:32.630 --> 12:35.720 +errors and relative percentage errors. + +12:35.870 --> 12:40.970 +But we're really going to be focusing on this more than anything because it's so easy to understand + +12:41.000 --> 12:45.920 +for for us, for, for for humans, just the difference between the guess and the truth. + +12:46.310 --> 12:51.200 +Um, and it's colored in red because that's considered a really terrible guess. + +12:51.200 --> 12:53.270 +So red is really terrible. + +12:53.270 --> 12:56.210 +Yellow is, uh, and green is fair enough. + +12:56.210 --> 13:00.530 +And the definitions for those, if we scroll back up, I've just come up with something that's a bit + +13:00.560 --> 13:02.540 +bit of a, of a rule of thumb. + +13:02.540 --> 13:06.710 +I call it green if it guesses within $40 or 20%. + +13:06.740 --> 13:09.950 +If it's within $40 or 20%, then that's that's green. + +13:09.950 --> 13:15.140 +You might think that's quite generous of me to say $40, but remember, there's a big range of prices + +13:15.140 --> 13:17.510 +here and you're just given the description of something. + +13:17.510 --> 13:20.420 +And really it's very hard to do this. + +13:20.420 --> 13:25.250 +So I think if something guesses to within 40 bucks then it's doing a fine job. + +13:25.430 --> 13:28.070 +So you could of course be stricter if you wish. + +13:28.070 --> 13:29.930 +This this is all yours to tweak. + +13:30.080 --> 13:32.300 +But that was my principle for this. + +13:32.810 --> 13:35.180 +So here are all of the points. + +13:35.180 --> 13:38.630 +And at the end there's a nice little visualization. + +13:38.660 --> 13:40.730 +So what are we seeing here. + +13:40.760 --> 13:41.900 +I love this diagram. + +13:41.930 --> 13:43.340 +And we're going to see a lot of these diagrams. + +13:43.340 --> 13:44.960 +So so get used to this one. + +13:44.990 --> 13:51.530 +The x axis is showing you the ground truth the actual value of a product. + +13:51.560 --> 13:58.850 +Uh also sometimes you will hear that described as y by data scientists, whereas this axis is showing + +13:58.850 --> 14:04.760 +you y hat as data scientists would say, or what estimate did the model give for the value. + +14:04.760 --> 14:10.430 +So we're seeing the model's estimate against the actual true value of the product. + +14:10.430 --> 14:17.460 +So the true value is always spread from from 0 to 1000in our data set, the model's value is all over + +14:17.460 --> 14:17.940 +the place. + +14:17.970 --> 14:20.520 +A total random set of dots. + +14:20.520 --> 14:26.310 +This blue line represents, of course, the line of of perfect guessing. + +14:26.580 --> 14:33.360 +If the model ever happens to guess along this blue line, then it is guest exactly on the ground truth, + +14:33.360 --> 14:34.920 +and you can see that it got lucky. + +14:34.920 --> 14:40.530 +Of course it will get lucky a small amount of time, and these green dots that are close to the blue + +14:40.530 --> 14:44.310 +line represent where it's done fairly well. + +14:44.550 --> 14:52.020 +Yellow dots for where it's a and then red dots is when it has missed the trick and gone right out there. + +14:52.500 --> 14:54.690 +Uh, so that was fun. + +14:54.690 --> 14:55.860 +I hope you enjoyed it. + +14:55.860 --> 14:59.670 +There's another very trivial model that we can do, and it may be the one that you were thinking of + +14:59.670 --> 15:00.750 +before when I asked for it. + +15:00.750 --> 15:05.520 +For a very basic model, uh, you may have thought one of the really basic one that occurred to me was, + +15:05.520 --> 15:09.060 +let's just guess zero for everything, or guess one for everything. + +15:09.330 --> 15:11.280 +We can do slightly better than that. + +15:11.310 --> 15:18.600 +We can take the training data set and say, what is the average price of a product across all of the + +15:18.600 --> 15:19.620 +training data set? + +15:19.620 --> 15:22.710 +Because remember our model is provided with the training data set. + +15:22.710 --> 15:26.970 +So we can consider that as our as a as a constant guess. + +15:27.000 --> 15:33.930 +Let's just guess that everything is is the average price of anything in our training data set. + +15:34.140 --> 15:40.470 +Um, so basically we'll calculate the, uh, the, the prices of our training data set and then we'll + +15:40.470 --> 15:45.450 +find its average, the sum of all of the training prices divided by the count of them. + +15:45.450 --> 15:52.440 +That will give us the, the mean, the mean, uh, price of a point in the training data set. + +15:52.470 --> 15:56.820 +And here is our very sophisticated model, our very sophisticated model. + +15:56.820 --> 16:02.220 +Again, it takes an item and it simply returns the, uh, the average. + +16:02.220 --> 16:05.940 +So it ignores whatever it's passed and it just returns the average. + +16:05.940 --> 16:08.490 +So let's have a look at what this is going to look like. + +16:08.520 --> 16:13.170 +Test to see if you can picture in your mind what kind of diagram you're about to see. + +16:13.290 --> 16:17.320 +Uh, hopefully you can imagine exactly what it's going to look like. + +16:18.190 --> 16:20.800 +And if you're ready to see if you're right or not. + +16:21.040 --> 16:21.790 +Bam! + +16:21.790 --> 16:23.890 +This is, of course, the diagram. + +16:23.890 --> 16:29.650 +It guessed at a fixed point, which if you thought it was going to be at 500, then remember that the + +16:29.650 --> 16:32.920 +distribution is skewed more towards cheaper items. + +16:32.950 --> 16:35.170 +Not not as badly as as it was originally. + +16:35.200 --> 16:37.480 +We corrected for it, but only a bit. + +16:37.690 --> 16:41.620 +Um, so it guessed this amount for absolutely everything. + +16:41.950 --> 16:48.670 +And of course, at the point where that is the same as the value of the product, it got a green, otherwise + +16:48.670 --> 16:50.650 +yellow or red. + +16:50.680 --> 16:53.140 +So there is the spread. + +16:53.170 --> 16:56.590 +Uh, and you can see the result that you expected. + +16:56.590 --> 17:02.080 +If we scroll back through the actual results, you'll see that there's a sea of reds with just some + +17:02.080 --> 17:07.240 +greens from time to time for things that cost close to the average. + +17:08.050 --> 17:11.080 +Well, with that, I hope that you're enjoying it. + +17:11.080 --> 17:15.490 +So far, we haven't actually looked at real machine learning models yet, but don't worry, we're just + +17:15.490 --> 17:16.360 +about to do that. + +17:16.390 --> 17:17.290 +Hang on in there. + +17:17.380 --> 17:18.280 +See you next time. diff --git a/week5/community-contributions/subtitles/srts/59473021/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473021/ja_JP.srt new file mode 100755 index 0000000..1c87a3a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473021/ja_JP.srt @@ -0,0 +1,670 @@ +WEBVTT + +00:00.860 --> 00:03.890 +JupyterLabへようこそ。 + +00:03.890 --> 00:06.920 +そして3日目。 + +00:06.920 --> 00:08.900 +第6週だ。 + +00:09.200 --> 00:13.580 +またこのノートを楽しみにしています。 + +00:13.580 --> 00:16.130 +いいものを用意したよ。 + +00:16.310 --> 00:21.650 +だから、 今日もベースラインモデルを見るつもりだ。 + +00:21.650 --> 00:28.190 +だから、 まずは見たことのあるインポートの数々から始めよう。 + +00:28.190 --> 00:30.080 +特に目新しいことはない。 + +00:30.080 --> 00:33.890 +しかし、 この2番目のセルに新しいインポートがいくつか入っている。 + +00:33.890 --> 00:41.150 +従来の機械学習用パンダのインポートをいくつか紹介しよう。 このパンダは、 + +00:41.150 --> 00:48.260 +おそらく旅の中で何度も出会ったことがあるだろう。 + +00:48.380 --> 00:51.320 +ええと、 もちろん、 numpyはあなたにとって古馴染みだと確信している。 + +00:51.320 --> 00:52.760 +そしてsklearn。 + +00:52.790 --> 01:00.620 +Scikit learnは信じられないほど人気のある機械学習ライブラリで、 今日私たちがたくさん使うことになる一般的なアルゴリズムが何トンも何トンもありますが、 + +01:00.620 --> 01:14.120 +最も重要なのは線形回帰で、 線形回帰モデルを実行するためのデータサイエンティストのツールキットの標準的な部分です。 + +01:14.690 --> 01:20.630 +Gensimは、 少し前に紹介したword + +01:20.660 --> 01:32.780 +two vecのようなNLP関連でとても便利なライブラリだ。 + +01:32.810 --> 01:40.010 +先ほど述べたように、 言葉をベクトルに変えるための強力なモデルである。 + +01:40.010 --> 01:44.450 +だから、 そのセルも必ず走らせる。 + +01:44.780 --> 01:47.720 +それからもうひとつ、 輸入品がある。 + +01:47.810 --> 01:50.630 +ええと、 またscikit learnから。 + +01:50.630 --> 01:57.590 +ただ、 より高度な機械学習のためにやっている2つの異なるインポートなので、 これらは別々にしておいた。 + +01:57.590 --> 02:03.600 +1つはサポート・ベクトル回帰で、 サポート・ベクトル・マシンとパッケージの一部である。 + +02:03.600 --> 02:06.870 +そしてもう1つは、 ランダムフォレスト・リグレッサーだ。 + +02:06.930 --> 02:12.390 +ええと、 先ほどランダムフォレストについて触れましたが、 それも取り入れましょう。 + +02:12.840 --> 02:13.800 +オーケー。 + +02:13.830 --> 02:17.700 +さて、 この定数セットだが、 あなたを驚かせるかもしれない。 + +02:17.730 --> 02:19.860 +ちょっと変わった形をしている。 + +02:20.070 --> 02:22.500 +ええと、 その考えは止めておくように言っておくよ。 + +02:22.500 --> 02:23.670 +後から入ってくる。 + +02:23.670 --> 02:26.040 +このようなことをしたことがある人なら、 見覚えがあるかもしれない。 + +02:26.040 --> 02:32.910 +それは、 うーん、 少し奇妙なことに、 非常に歴史的な様々な理由がある。 + +02:33.090 --> 02:42.960 +ええと、 その特定の記号を標準出力にプリントすると、 色が緑色に変わるんだ。 + +02:43.200 --> 02:47.370 +まあ、 いろいろな理由があるんだけど、 それはここでは書かないよ。 + +02:47.490 --> 02:53.820 +そして、 リセットは色を黒か白に戻す。 + +02:53.850 --> 03:00.600 +この定数を知っていれば、 カラー印刷も簡単にできる。 + +03:00.840 --> 03:01.410 +オーケー。 + +03:01.410 --> 03:07.950 +そこで、 定数を実行し、 いつもの環境設定を実行し、 ハギング・フェイスにログインする。 + +03:08.280 --> 03:09.510 +うーん、 よくわからない。 + +03:09.510 --> 03:14.910 +今日はハギング・フェイスを実際に使うことはないと思うので、 ハギング・フェイスにログインする必要はなかったと思う。 + +03:15.180 --> 03:18.900 +それから、 Jupyterノートブックにmatplotlibが入っていることを確認してください。 + +03:18.990 --> 03:26.160 +ええと、 データを再作成するのではなく、 ピックルファイルからデータを読み込みます。 + +03:26.160 --> 03:33.180 +そこで、 トレーニングデータをもう一度見てみよう。 + +03:33.180 --> 03:35.970 +では、 最初のトレーニング・データを取ってみよう。 + +03:35.970 --> 03:39.870 +そして、 これが何であったかをもう一度思い出してもらうために、 その促しを求めるだけだ。 + +03:39.870 --> 03:48.150 +というわけで、 このアイテム・オブジェクトのプロンプト属性を探しています。 + +03:48.150 --> 03:52.050 +ええと、 過去に、 ええと、 2日前に。 + +03:52.140 --> 03:55.350 +でも、 これで皆さんがこのことをより身近に感じてくれることを願っています。 + +03:55.380 --> 03:58.110 +きれいに印刷できるように印刷させてください。 + +03:58.560 --> 04:01.740 +うーん、 そうだな。 + +04:02.010 --> 04:02.880 +ええと、 これです。 + +04:02.880 --> 04:04.620 +1ドル単位でいくらですか? + +04:04.620 --> 04:08.350 +そしてタイトルがあり、 ディテールがあり、 価格がある。 + +04:08.380 --> 04:15.580 +さて、 なぜ私がこの問題のために特別にこのアイテムクラスのようなものに多くの時間を費やしているのか不思議に思うかもしれない。 + +04:15.580 --> 04:20.710 +このようなことは、 あなた自身が商業的な問題に直面したときに、 その問題を解決し、 + +04:20.740 --> 04:25.450 +データを加工する方法を探すために行うことなのです。 + +04:25.450 --> 04:28.420 +だから、 これは役に立つ実戦経験なんだ。 + +04:28.450 --> 04:34.000 +このコード、 つまりアイテム・クラスを正確に使うことはないだろうし、 おそらくこのようなプロンプトはないだろう。 + +04:34.000 --> 04:39.190 +しかし、 このようなテクニックは皆さんにも再現できるものなので、 それを理解し、 私がどのような決断を下しているのかを理解することが重要で、 + +04:39.190 --> 04:46.720 +そうすれば皆さんは自分のプロジェクトで自信を持って同じことができるようになる。 + +04:46.780 --> 04:51.160 +つまり、 これがトレーニングのプロンプトだ。 + +04:51.190 --> 04:53.500 +では、 テストプロンプトを見てみよう。 + +04:53.500 --> 04:57.700 +では、 テスト項目の1つ目を紹介しよう。 + +04:57.700 --> 05:00.790 +そして、 このメソッドをテスト・プロンプトと呼ぶことにする。 + +05:00.790 --> 05:11.980 +そして、 基本的にトレーニングのプロンプトを取るが、 モデルをテストするときに答えが明らかにならないように実際の価格を取り除くことを覚えておいてほしい。 + +05:11.980 --> 05:14.950 +そして、 その価格を埋めるのが仕事だ。 + +05:14.950 --> 05:19.810 +そして、 もしそれが何を記入することになっているのか知りたければ、 うーん、 トレーニングポイントのためにそれを取るよ。 + +05:19.960 --> 05:25.360 +電車に乗ったら、 そのまま値段を言えばいい。 + +05:25.360 --> 05:34.030 +そして、 これが実際の価格であり、 この価格に関連するものである。 + +05:34.120 --> 05:36.700 +しかし、 本当の値段は少し違う。 + +05:36.700 --> 05:43.090 +この訓練とテストの方法、 訓練項目とテスト項目のリスト、 そしてそれらをどのように呼ぶかについて、 + +05:43.240 --> 05:51.280 +記憶を呼び覚ますことができれば幸いだ。 + +05:51.490 --> 05:58.510 +このコードの塊は、 あなたのプロジェクトで正確にこのコードを使うことはないかもしれないが、 + +05:58.540 --> 06:03.280 +似たようなことはできるだろう。 + +06:03.280 --> 06:12.800 +だから、 これは一種の原理原則であり、 問題に取り組むための素晴らしい方法であり、 あなた自身の問題にも応用できるはずだ。 + +06:12.800 --> 06:21.200 +だから、 私たちが考え出したさまざまなモデルを、 本当に素早くシンプルな方法でテストできるようにしたかった。 + +06:21.410 --> 06:28.850 +テストデータの束をモデルに通し、 その結果を視覚化するんだ。 + +06:28.850 --> 06:38.180 +結局、 同じことを何度も繰り返したくなるので、 コードを何度もコピー・アンド・ペーストしなければなりませんでした。 + +06:38.180 --> 06:44.780 +そうする時はいつでも、 自分のために何らかのユーティリティを構築する時だと思う。 + +06:44.900 --> 06:50.930 +それで私が考えたのがこのクラス・テスターで、 + +06:50.930 --> 07:01.790 +モデルをテストできるようにするものです。 + +07:01.790 --> 07:10.130 +そして、 その唯一の仕事は、 アイテムを受け取り、 それに応答し、 + +07:10.130 --> 07:14.570 +見積もり価格を返すことである。 + +07:14.570 --> 07:19.070 +このような関数を書いたら、 あとはテスターを呼び出すだけだ。 + +07:19.100 --> 07:24.800 +これから紹介するクラスは、 ドット・テストと関数名を渡す。 + +07:24.800 --> 07:26.720 +そして、 この機能を使う。 + +07:26.720 --> 07:34.910 +実際、 250の異なるテスト項目に対してこのテストを繰り返し実行し、 結果を予測する能力がどの程度あるかを調べ、 + +07:34.910 --> 07:38.600 +その情報を視覚的にまとめて返す。 + +07:38.870 --> 07:39.980 +そういうことだ。 + +07:39.980 --> 07:43.190 +そして、 ワークフローを明確に簡素化することができる。 + +07:43.430 --> 07:45.410 +ええと、 これは授業そのものです。 + +07:45.410 --> 07:46.460 +至ってシンプルだ。 + +07:46.460 --> 07:48.650 +色で対処しなければならないことがあるんだ。 + +07:48.650 --> 07:50.720 +何色かプリントすると言ったはずだ。 + +07:50.720 --> 08:01.100 +データ・ポイントを実行し、 そのデータ・ポイントを取得する。 + +08:01.130 --> 08:05.030 +ここで、 あなたが提供した関数が呼び出される。 + +08:05.030 --> 08:08.730 +モデルのモデルを取得するために、 データポイントとともに呼び出す。 + +08:08.730 --> 08:12.630 +あなたの機能で、 その価値を推測してください。 + +08:12.630 --> 08:18.480 +そして、 先ほど見た価格属性を呼び出すことで真実を得る。 + +08:18.480 --> 08:24.510 +そして、 その誤差はもちろん、 推測と真実の絶対的な差である。 + +08:24.540 --> 08:25.980 +簡単なことだ。 + +08:26.010 --> 08:37.410 +また、 二乗対数誤差と呼ばれるものも計算され、 二乗対数誤差の公式はまさにここにある通りである。 + +08:37.440 --> 08:43.770 +これは、 真実の対数プラス1から推測の対数プラス1を引いたものである。 + +08:44.160 --> 08:50.700 +なぜ、 このプラス1が計算式にあるのかは、 想像がつくだろう。 + +08:50.700 --> 08:55.500 +真実がゼロだったら、 数学はいらないからだ。 丸太が吹き飛ぶ。 + +08:55.560 --> 09:02.580 +だからこの式は、 例えば、 真実や推測がゼロの場合にうまく機能する。 + +09:03.120 --> 09:08.790 +対数誤差の2乗は、 もちろんこの2乗だ。 + +09:09.330 --> 09:14.400 +それから、 ちょっと処理をするんだ。 + +09:14.400 --> 09:17.850 +これからちょっとしたグラフを描くことができる。 + +09:18.030 --> 09:19.380 +ええと、 報告書を書いてください。 + +09:19.380 --> 09:23.880 +そしてこれが最終的に、 さっき言った機能なんだ。 + +09:23.880 --> 09:27.960 +このテストを実行するにはtestを呼び出せばいい。 + +09:28.170 --> 09:28.740 +オーケー。 + +09:28.770 --> 09:30.240 +そのセルを実行させてくれ。 + +09:30.240 --> 09:35.160 +だから、 このテストクラスで私がやったことのすべてを特に理解する必要はない。 + +09:35.160 --> 09:47.550 +このような小さなテストハーネスを作成し、 それに少し時間を投資することで、 モデルを実行した結果について本当の洞察を得ることができるようになるという原則だ。 + +09:47.550 --> 09:49.080 +それがここでの学びだ。 + +09:49.350 --> 09:54.450 +では、 考えられる最もシンプルなモデルは何だろう? + +09:54.450 --> 09:56.610 +最もシンプルなモデルとは? + +09:56.610 --> 09:59.250 +本当のベースラインモデルを作る前に、 私たちは出てくるつもりだ。 + +09:59.250 --> 10:04.560 +私たちは2つのお笑いモデルをやるつもりで、 想像しうる最も基本的なことをやるおバカなモデルだ。 + +10:04.560 --> 10:07.110 +そして、 ちょっと挑戦させてほしい。 + +10:07.230 --> 10:12.760 +可能な限りシンプルなモデルは何か考えてみてほしい。 おそらく、 それよりももっとシンプルなものになるだろう。 + +10:12.940 --> 10:16.900 +ええと、 ではまず、 非常にシンプルな2つのモデルを紹介します。 + +10:16.900 --> 10:19.090 +1つ目は、 すでに答えを明らかにしている。 + +10:19.090 --> 10:24.130 +おそらく、 最初の1つは乱数を当てるだけだと見ただろう。 + +10:24.130 --> 10:25.540 +それだけだ。 + +10:25.540 --> 10:27.640 +これがその機能だ。 + +10:27.640 --> 10:31.090 +プロンプトを受け取らない関数がある。 + +10:31.090 --> 10:39.520 +そのアイテムを完全に無視し、 代わりに何を言われても気にしないようにするのだから。 + +10:39.520 --> 10:43.510 +1から1000の間の乱数を返す。 + +10:43.690 --> 10:47.500 +ええと、 すみません、 1~999の間です。 + +10:47.740 --> 10:53.320 +ええと、 ランダムシードを設定して、 このテストを実行するたびに同じになるようにします。 + +10:53.440 --> 10:54.880 +そして今、 我々はそれを実行している。 + +10:54.880 --> 11:00.730 +このテストをもう一度実行するには、 テスター・クラスでドット・テストを実行します。 + +11:00.730 --> 11:03.700 +そして、 単純にこの関数の名前を渡す。 + +11:03.700 --> 11:08.920 +関数を呼び出さないのは、 もし関数を呼び出したら、 一度だけ呼び出してそれでおしまいだからだ。 + +11:08.950 --> 11:11.590 +関数そのものをこのように渡す。 + +11:11.620 --> 11:17.350 +そして今、 これを実行に移し、 私のプログラムの結果を見てもらう。 + +11:17.800 --> 11:20.800 +このモデルは非常に素早いものだったからだ。 + +11:20.800 --> 11:24.280 +だから、 上にスクロールし直して、 あなたがここで見ているものを教えようと思う。 + +11:24.790 --> 11:29.770 +次の数回の授業で何度もやることになるので、 とても慣れることになる。 + +11:29.770 --> 11:37.060 +つまり、 ここに表示されている各行は、 テスト・データの異なる1点を表している。 + +11:37.060 --> 11:40.270 +そして、 この右側にあるアイテムが何であるかを教えてくれる。 + +11:40.270 --> 11:47.050 +LEDライトキット、 ハンドヘルドLED。 + +11:47.050 --> 11:49.270 +そしてその後、 短く切った。 + +11:49.270 --> 11:57.760 +そして、 このLEDライトキットに表示されているのは、 LEDライトキットの機能を推測したモデルです。 + +11:57.760 --> 12:01.810 +そして、 乱数ジェネレーターだから143ドルを当てた。 + +12:02.260 --> 12:03.760 +真実とは何か? + +12:03.760 --> 12:12.980 +ちょっと驚くべきことに、 真実は289ドルで、 これは私が予想していたよりもむしろ高い。 + +12:12.980 --> 12:13.490 +たぶんね。 + +12:13.790 --> 12:17.150 +もしかしたら、 ノートパソコンが横に付いてくるかもしれない。 + +12:17.900 --> 12:19.190 +それがエラーなんだ。 + +12:19.220 --> 12:23.330 +それくらい、 このモデルは間違っている。 + +12:23.330 --> 12:26.930 +対数誤差の2乗については、 また別の機会に説明することにしよう。 + +12:26.930 --> 12:32.630 +しかし、 これは絶対的な誤差と相対的なパーセンテージの誤差の差をよりよく補正し、 + +12:32.630 --> 12:35.720 +よりよく反映させるためのものだ。 + +12:35.870 --> 12:40.970 +しかし、 私たちは何よりもこのことに焦点を当てるつもりだ。 なぜなら、 私たちにとって、 + +12:41.000 --> 12:45.920 +人間にとって、 推測と真実の違いを理解するのはとても簡単だからだ。 + +12:46.310 --> 12:51.200 +ええと、 赤で塗られているのは、 それが本当にひどい推測だと考えられているからです。 + +12:51.200 --> 12:53.270 +だから赤は本当にひどい。 + +12:53.270 --> 12:56.210 +黄色は......、 緑はまあまあかな。 + +12:56.210 --> 13:02.540 +その定義は、 上にスクロールして戻ってみると、 経験則のようなものがある。 + +13:02.540 --> 13:06.710 +私は、 40ドルか20%以内で当てたら緑と呼んでいる。 + +13:06.740 --> 13:09.950 +40ドルか20%以内ならグリーンだ。 + +13:09.950 --> 13:17.510 +40ドルというのはかなり気前がいいと思うかもしれないが、 ここでは値段の幅が大きく、 何かの説明を受けただけだということを覚えておいてほしい。 + +13:17.510 --> 13:20.420 +そして、 これを実行するのは本当に難しい。 + +13:20.420 --> 13:25.250 +だから、 40ドル以内で当てることができれば、 それは立派な仕事だと思う。 + +13:25.430 --> 13:28.070 +だから、 望むならもっと厳しくすることももちろんできる。 + +13:28.070 --> 13:29.930 +これを微調整するのはあなた次第だ。 + +13:30.080 --> 13:32.300 +でも、 それが私の原則だった。 + +13:32.810 --> 13:35.180 +では、 すべてのポイントを紹介しよう。 + +13:35.180 --> 13:38.630 +そして最後には、 ちょっとした視覚化もある。 + +13:38.660 --> 13:40.730 +では、 何が見えているのか。 + +13:40.760 --> 13:41.900 +私はこの図が大好きだ。 + +13:41.930 --> 13:43.340 +このような図をたくさん見ることになるだろう。 + +13:43.340 --> 13:44.960 +だから、 これに慣れることだ。 + +13:44.990 --> 13:51.530 +X軸は、 製品の実際の価値を示している。 + +13:51.560 --> 13:58.850 +また、 データサイエンティストがyと表現するのを耳にすることもありますが、 この軸はデータサイエンティストが言うところのyハット、 + +13:58.850 --> 14:04.760 +つまりモデルがどのような推定値を出したかを示しています。 + +14:04.760 --> 14:10.430 +つまり、 製品の実際の真価に対するモデルの推定値を見ているのだ。 + +14:10.430 --> 14:17.940 +つまり、 真の値は常に0から1000の範囲にある。 + +14:17.970 --> 14:20.520 +完全にランダムな点の集合。 + +14:20.520 --> 14:26.310 +この青い線は、 もちろん完全な推測の線を表している。 + +14:26.580 --> 14:34.920 +もしモデルがこの青い線に沿って推測するようなことがあれば、 それはまさにグランド・トゥルース(地上真実)上にいることになり、 運がよかったと見ることができる。 + +14:34.920 --> 14:40.530 +もちろん、 幸運に恵まれるのはわずかな時間であり、 青い線に近い緑色の点は、 + +14:40.530 --> 14:44.310 +かなりうまくいっていることを表している。 + +14:44.550 --> 14:52.020 +黄色い点は「A」、 赤い点はトリックが外れて右に出てしまった場合だ。 + +14:52.500 --> 14:54.690 +ああ、 楽しかった。 + +14:54.690 --> 14:55.860 +楽しんでいただけたなら幸いだ。 + +14:55.860 --> 15:00.750 +もうひとつ、 とてもつまらないモデルがある。 + +15:00.750 --> 15:05.520 +非常に基本的なモデルとして、 ええと、 私が思いついた本当に基本的なものの1つは、 すべてに対してゼロを推測しよう、 + +15:05.520 --> 15:09.060 +あるいはすべてに対して1を推測しよう、 と思ったかもしれない。 + +15:09.330 --> 15:11.280 +私たちはそれよりも少しマシなことができる。 + +15:11.310 --> 15:19.620 +トレーニング・データ・セットを使って、 すべてのトレーニング・データ・セットにおける商品の平均価格はいくらか、 と言うことができる。 + +15:19.620 --> 15:22.710 +というのも、 我々のモデルにはトレーニング・データセットが提供されているからだ。 + +15:22.710 --> 15:26.970 +だから、 私たちはそれを一定の推測として考えることができる。 + +15:27.000 --> 15:33.930 +学習データセットに含まれるあらゆるものの平均価格がすべてだと仮定しよう。 + +15:34.140 --> 15:40.470 +基本的には、 トレーニングデータセットの価格を計算し、 + +15:40.470 --> 15:45.450 +その平均を求めます。 + +15:45.450 --> 15:52.440 +これで、 トレーニング・データ・セット内のあるポイントの平均価格が得られる。 + +15:52.470 --> 15:56.820 +そして、 これが我々の非常に洗練されたモデルだ。 + +15:56.820 --> 16:02.220 +繰り返しになるが、 これはアイテムを受け取り、 その平均値を返すだけである。 + +16:02.220 --> 16:05.940 +そのため、 渡されたものはすべて無視され、 ただ平均値が返される。 + +16:05.940 --> 16:08.490 +では、 これがどのようなものか見てみよう。 + +16:08.520 --> 16:13.170 +これから見る図がどのようなものか、 頭の中に思い浮かべることができるかどうか試してみよう。 + +16:13.290 --> 16:17.320 +どんな感じか想像できるかな? + +16:18.190 --> 16:20.800 +そして、 自分が正しいかどうかを確かめる準備ができているならね。 + +16:21.040 --> 16:21.790 +バム! + +16:21.790 --> 16:23.890 +もちろん、 これがその図だ。 + +16:23.890 --> 16:32.920 +もし500点だと思ったのなら、 分布がより安いものに偏っていることを覚えておいてほしい。 + +16:32.950 --> 16:35.170 +元々ほどひどくはない。 + +16:35.200 --> 16:37.480 +修正したが、 ほんの少しだ。 + +16:37.690 --> 16:41.620 +ええと、 だから、 この金額は絶対にすべてに対して当てられたんだ。 + +16:41.950 --> 16:50.650 +そしてもちろん、 それが製品の価値と同じになった時点で緑、 そうでなければ黄色か赤になった。 + +16:50.680 --> 16:53.140 +だからスプレッドがある。 + +16:53.170 --> 16:56.590 +そして、 期待通りの結果を見ることができる。 + +16:56.590 --> 17:02.080 +実際の結果をスクロールバックしてみると、 赤の海が広がっており、 + +17:02.080 --> 17:07.240 +平均に近い値段のものには時折緑が混じっているのがわかる。 + +17:08.050 --> 17:11.080 +それでは、 楽しんでいただけたら幸いです。 + +17:11.080 --> 17:16.360 +今のところ、 まだ実際の機械学習モデルを見ていないが、 心配しないでほしい。 + +17:16.390 --> 17:17.290 +頑張るんだ。 + +17:17.380 --> 17:18.280 +それではまた次回 diff --git a/week5/community-contributions/subtitles/srts/59473021/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473021/ko_KR.srt new file mode 100755 index 0000000..1aa5ae1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473021/ko_KR.srt @@ -0,0 +1,763 @@ +WEBVTT + +00:00.860 --> 00:03.890 +주피터랩의 가장 좋은 장소에 잘 오셨어요 + +00:03.890 --> 00:06.920 +다시 대회 3일째인데요 + +00:06.920 --> 00:08.900 +6주 차에요 + +00:09.200 --> 00:13.580 +이 공책이 정말 기대되네요 여러분도 즐겁게 읽으시길 바라요 + +00:13.580 --> 00:16.130 +좋은 걸 준비해 뒀어요 + +00:16.310 --> 00:21.650 +오늘도 기본 모델을 살펴볼 거예요 + +00:21.650 --> 00:28.190 +여러 개의 import로 시작할게요. 모두 이전에도 봤던 것들이에요. + +00:28.190 --> 00:30.080 +새로운 건 없어요 + +00:30.080 --> 00:33.890 +두 번째 셀에 새로운 수입품이 있어요 + +00:33.890 --> 00:41.150 +머신 러닝 판다를 위해 가져온 장비인데 여행하면서 많이 보셨을 + +00:41.150 --> 00:48.260 +겁니다 데이터를 비트처럼 정리하는 훌륭한 방법이죠 + +00:48.380 --> 00:51.320 +물론 넘피는 낯설지 않겠죠 + +00:51.320 --> 00:52.760 +그리고 스클론이죠 + +00:52.790 --> 01:00.620 +Scikit은 머신 러닝 라이브러리로 아주 인기가 많습니다 아주 흔한 알고리즘이 많아서 + +01:00.620 --> 01:07.550 +아주 유명하죠 오늘날에도 많이 사용될 겁니다 하지만 가장 중요한 건 선형 회귀입니다 + +01:07.580 --> 01:14.120 +데이터 과학자의 툴킷에서 표준으로 사용되는 선형 회귀 모델이죠 + +01:14.690 --> 01:20.630 +또 다른 것은 자연 언어 프로세싱 NLP와 관련된 것들입니다 + +01:20.660 --> 01:31.100 +겐심도 포함해서요 NLP와 관련된 아주 유용한 라이브러리입니다 아까 언급했던 워드 2 베c가 + +01:31.130 --> 01:32.780 +그 예죠 + +01:32.810 --> 01:40.010 +방금 언급했지만 언어를 벡터로 바꾸는 강력한 모델이죠 + +01:40.010 --> 01:44.450 +그 감방도 제가 운영하게 해주세요 + +01:44.780 --> 01:47.720 +하나 더 있어요 다른 수입품 세트요 + +01:47.810 --> 01:50.630 +공부가 더 필요해요 + +01:50.630 --> 01:57.590 +이건 따로 보관했어요 더 고급 머신 러닝을 위해 두 개를 수입하는 거니까요 + +01:57.590 --> 02:03.600 +하나는 벡터 회귀 지원입니다 벡터 머신과 패키지의 일부죠 + +02:03.600 --> 02:06.870 +다른 하나는 무작위 숲 퇴행자예요 + +02:06.930 --> 02:12.390 +어, 아까 임의의 숲이라고 했는데 그것도 같이 갖고 올게요 + +02:12.840 --> 02:13.800 +네 + +02:13.830 --> 02:17.700 +이 상수들의 집합은 여러분을 놀라게 할 수도 있어요 + +02:17.730 --> 02:19.860 +비트가 좀 특이하네요 + +02:20.070 --> 02:22.500 +그 생각은 잠시 보류하세요 + +02:22.500 --> 02:23.670 +나중에 올 거예요 + +02:23.670 --> 02:26.040 +이런 걸 해 본 적이 있다면 알아볼 수 있을 거예요 + +02:26.040 --> 02:32.910 +좀 이상하지만 역사적인 이유에서죠 + +02:33.090 --> 02:41.430 +특정 기호를 표준에 프린트하면 색이 변해서 초록색이 + +02:41.430 --> 02:42.960 +돼요 + +02:43.200 --> 02:47.370 +이유는 다 말할 순 없지만요 + +02:47.490 --> 02:53.820 +그리고 다시 색을 검정이나 흰색으로 바꾸죠 전경 색에 따라서요 + +02:53.850 --> 02:59.250 +이런 상수를 알고 손으로 직접 인쇄할 수 있으면 컬러 프린트가 쉬워지죠 + +02:59.250 --> 03:00.600 +오늘 할 거예요 + +03:00.840 --> 03:01.410 +네 + +03:01.410 --> 03:07.950 +그러니 저 상수는 우리가 잘 아는 일반적인 환경 셋업을 실행합니다 그리고 얼굴 안기로 로그인하세요 + +03:08.280 --> 03:09.510 +잘 모르겠어요 + +03:09.510 --> 03:12.660 +오늘은 포옹하는 얼굴 안 써요 굳이 로그인할 필요 + +03:12.660 --> 03:14.910 +없었는데 그냥 재미로 해봤어요 + +03:15.180 --> 03:18.900 +매트플로틀리브는 유피터 노트북에 꼭 넣어주세요 + +03:18.990 --> 03:26.160 +피클 파일에서 데이터를 로드할 거예요 다시 만드는 대신에요 + +03:26.160 --> 03:33.180 +훈련 데이터를 다시 한 번 살펴보죠 + +03:33.180 --> 03:35.970 +첫 번째 훈련 데이터 포인트를 보죠 + +03:35.970 --> 03:39.870 +이게 뭐였는지 다시 상기시켜줄 프롬프트를 요청할게요 + +03:39.870 --> 03:48.150 +이건 이 아이템 개체의 프롬프트 특성을 찾는 겁니다 아까도 말씀드렸죠 + +03:48.150 --> 03:52.050 +과거에요 이틀 전쯤에 말이죠 + +03:52.140 --> 03:55.350 +하지만 이제 이런 것에 익숙해졌길 바라요 + +03:55.380 --> 03:58.110 +프린트해서 잘 나오게 할게요 + +03:58.560 --> 04:01.740 +자, 됐어요 + +04:02.010 --> 04:02.880 +여기 있네요 + +04:02.880 --> 04:04.620 +이건 얼마 정도 할까요? + +04:04.620 --> 04:08.350 +제목도 있고 디테일도 있고 가격도 있죠 + +04:08.380 --> 04:15.580 +왜 이 문제를 위해 아이템 클래스에 많은 시간을 투자하는지 궁금하실 텐데요 + +04:15.580 --> 04:20.710 +실제로 이런 일을 하게 되는 건 상업적인 문제를 해결하고 + +04:20.740 --> 04:25.450 +데이터를 조작할 방법을 찾을 때죠 + +04:25.450 --> 04:28.420 +현실 세계 경험이 유용할 거예요 + +04:28.450 --> 04:34.000 +이 코드나 아이템 클래스를 사용하지 않을 거고 이것과 똑같은 프롬프트를 갖지도 않겠죠 + +04:34.000 --> 04:39.190 +하지만 이런 기술은 여러분이 복제할 수 있습니다 따라서 제가 내리는 결정을 + +04:39.190 --> 04:43.480 +이해하고 이해하는 게 중요합니다 그래야 여러분도 자신의 프로젝트에서 + +04:43.480 --> 04:46.720 +자신감을 갖고 똑같이 할 수 있죠 + +04:46.780 --> 04:51.160 +이게 그 훈련 프롬프트예요 + +04:51.190 --> 04:53.500 +이제 테스트 프롬프트를 보죠 + +04:53.500 --> 04:57.700 +첫 번째 테스트 아이템을 선택할게요 + +04:57.700 --> 05:00.790 +메서드 테스트 프롬프트를 호출할게요 + +05:00.790 --> 05:09.190 +기본 훈련 프롬프트만 갖고 실제 가격을 없애 모델을 테스트할 때 답을 공개하지 않도록 + +05:09.190 --> 05:11.980 +하는 걸 기억하세요 + +05:11.980 --> 05:14.950 +그 가격을 채우는 게 그 기계의 일이죠 + +05:14.950 --> 05:19.810 +뭘 채우려는 건지 알고 싶을 땐 훈련용으로 가져가요 + +05:19.960 --> 05:25.360 +기차를 타고 가서 가격을 부르면 돼요 + +05:25.360 --> 05:31.720 +이게 이 제품의 실제 가격입니다 보시다시피 가장 비슷한 가격으로 + +05:31.720 --> 05:34.030 +반올림했죠 + +05:34.120 --> 05:36.700 +하지만 진짜 가격은 약간 달라요 + +05:36.700 --> 05:43.090 +이걸 보면 우리가 이 훈련과 테스트 메서드로 뭘 하는지 기억이 되살아나길 + +05:43.240 --> 05:51.280 +바랍니다 이 훈련과 테스트 아이템 목록과 그걸 어떻게 부르는지도요 + +05:51.490 --> 05:58.510 +이제 제가 아주 만족하는 걸 보여드리겠습니다 코드 덩어리죠 여러분 프로젝트에 정확히 + +05:58.540 --> 06:03.280 +이 코드를 사용하지 않을 수도 있지만 비슷한 걸 할 거예요 + +06:03.280 --> 06:09.800 +여러분이 해결해야 할 문제에 접근하는 좋은 방법이고 여러분의 문제에 대해 + +06:09.800 --> 06:12.800 +복제할 수 있는 좋은 방법이죠 + +06:12.800 --> 06:21.200 +그래서 다양한 모델을 테스트하고 싶었어요 빠르고 간단한 방법으로요 + +06:21.410 --> 06:26.900 +그러려면 테스트 데이터를 모델에 적용해 결과를 + +06:26.900 --> 06:28.850 +시각화해야 해요 + +06:28.850 --> 06:33.740 +함수에서 가지고 있던 것입니다 그리고 계속해서 반복을 했고 코드를 복사하고 + +06:33.740 --> 06:38.180 +붙여넣기를 했습니다 반복적으로 같은 것을 해야 하기 때문이죠 + +06:38.180 --> 06:43.910 +그렇게 할 때마다 여러분을 위해 일종의 유틸리티를 만들 때인 것처럼 + +06:43.910 --> 06:44.780 +들리네요 + +06:44.900 --> 06:50.930 +그래서 클래스 테스터라는 걸 생각해냈어요 모델을 테스트할 수 + +06:50.930 --> 06:56.780 +있는 건데 어떤 식으로 작동하느냐면 원하는 함수를 쓸 수 있어요 + +06:56.780 --> 07:01.790 +어떤 함수든 제 예측 함수 같은 걸로 불릴 수 있죠 + +07:01.790 --> 07:10.130 +유일한 작업은 항목을 취하고 그에 반응해 추정 가격을 반환하는 겁니다 특정 예측을 + +07:10.130 --> 07:14.570 +하기 위해 원하는 어떤 코드든 입력하는 거죠 + +07:14.570 --> 07:19.070 +그런 함수를 작성하고 나면 테스터를 호출할 수 있어요 + +07:19.100 --> 07:24.800 +이 클래스는 . test를 하고 함수 이름을 통과하는 거예요 + +07:24.800 --> 07:26.720 +이 함수를 취할 거예요 + +07:26.720 --> 07:34.910 +250개의 다른 테스트 항목에 대해 반복적으로 호출하고 결과를 예측하는 데 얼마나 좋은지 + +07:34.910 --> 07:38.600 +보고 그 정보를 시각적으로 요약하죠 + +07:38.870 --> 07:39.980 +그게 목적이죠 + +07:39.980 --> 07:43.190 +워크플로우도 단순화할 수 있고요 + +07:43.430 --> 07:45.410 +이게 그 수업이에요 + +07:45.410 --> 07:46.460 +아주 간단해요 + +07:46.460 --> 07:48.650 +색상도 신경 써야 해요 + +07:48.650 --> 07:50.720 +색상 인쇄할 거라고 했잖아요 + +07:50.720 --> 07:58.280 +데이터 포인트를 실행하는데 실행 데이터 포인트는 하나의 데이터 포인트에 대한 비즈니스를 하는 메서드입니다. + +07:58.280 --> 08:01.100 +그 데이터 포인트를 얻는 것이죠. + +08:01.130 --> 08:05.030 +여기서 당신이 제공한 함수를 호출하죠 + +08:05.030 --> 08:08.730 +모델의 모델을 get get 데이터 포인트라고 부르죠 + +08:08.730 --> 08:12.630 +함수에서 얼마라고 하는지 맞혀 보세요 + +08:12.630 --> 08:18.480 +그리고 가격 특성을 호출해서 진실을 얻습니다. 조금 전에 보았던 것이죠. + +08:18.480 --> 08:24.510 +물론 오류는 추측과 진실의 절대적인 차이예요 + +08:24.540 --> 08:25.980 +아주 간단해요 + +08:26.010 --> 08:35.040 +제곱로그에러라는 것도 계산하는데 제곱로그에러의 공식은 여기와 + +08:35.040 --> 08:37.410 +똑같아요 + +08:37.440 --> 08:43.770 +진실 더하기 1을 더하고 맞힌 것 더하기 1을 더해요 + +08:44.160 --> 08:50.700 +네, 그리고 왜 공식에 플러스 1이 들어가는지 짐작되실 거예요 + +08:50.700 --> 08:55.500 +진실이 0이라면 수학을 원치 않을 테니까요 일지를 폭파할 거예요 + +08:55.560 --> 09:02.580 +이 공식은 진실 혹은 추측이 0인 경우에 잘 맞아요 + +09:03.120 --> 09:08.790 +그러면 제곱 로그 에러는 물론 이것의 제곱이고요 + +09:09.330 --> 09:14.400 +이제 비트 프로세싱을 좀 할 거예요 + +09:14.400 --> 09:17.850 +작은 차트를 그릴 수 있어요 잠시 후 보여드리죠 + +09:18.030 --> 09:19.380 +보고서를 써요 + +09:19.380 --> 09:23.880 +이게 궁극적으로 제가 아까 언급한 함수예요 + +09:23.880 --> 09:27.960 +테스트를 호출해 이 테스트를 실행할 수 있어요 + +09:28.170 --> 09:28.740 +네 + +09:28.770 --> 09:30.240 +제가 그 감방을 처리할게요 + +09:30.240 --> 09:35.160 +그러니 제가 이 시험에서 풀었던 모든 걸 이해하실 필요는 없어요 + +09:35.160 --> 09:41.310 +이렇게 멋진 테스트 하니스를 만드는 건 원칙이에요 약간의 시간을 투자해 모델을 + +09:41.310 --> 09:47.550 +실행한 결과에 대한 진정한 통찰력을 얻을 수 있도록 하는 거죠 비트로 + +09:47.550 --> 09:49.080 +여기서 배우는 거죠 + +09:49.350 --> 09:54.450 +가장 간단한 모델은 뭘까요? + +09:54.450 --> 09:56.610 +가장 단순한 모델이 뭘까요? + +09:56.610 --> 09:59.250 +기본 모델 작업을 하기 전에 위로 올라갈 거예요 + +09:59.250 --> 10:03.960 +두 가지 코미디 모델을 만들 거예요 우스꽝스러운 모델인데 우리가 상상할 수 있는 가장 기본적인 + +10:03.960 --> 10:04.560 +거죠 + +10:04.560 --> 10:07.110 +잠깐 질문 하나 할게요 + +10:07.230 --> 10:11.680 +가장 간단한 모델이 무엇인지 생각해 보세요 아마 그것보다 더 간단한 + +10:11.680 --> 10:12.760 +걸 거예요 + +10:12.940 --> 10:16.900 +첫 번째는 아주 간단한 모델 두 개예요 + +10:16.900 --> 10:19.090 +첫 번째 문제는 벌써 답을 공개했어요 + +10:19.090 --> 10:24.130 +아마 첫 번째 숫자는 무작위로 추측하는 걸 보셨을 거예요 + +10:24.130 --> 10:25.540 +그게 다일 거예요 + +10:25.540 --> 10:27.640 +여기 함수가 있어요 + +10:27.640 --> 10:31.090 +여기 함수가 있는데 프롬프트가 없어요 + +10:31.090 --> 10:36.820 +항목을 선택하는데∙∙∙ 중요하지 않아요 항목을 완전히 무시할 테니까요 대신 + +10:36.820 --> 10:39.520 +무슨 말을 들었는지 신경 쓰지 않죠 + +10:39.520 --> 10:43.510 +1에서 1000 사이의 무작위 숫자를 반환하죠 + +10:43.690 --> 10:47.500 +죄송해요, 그건 1과 999를 포함해서죠 + +10:47.740 --> 10:53.320 +무작위로 종자를 설정해서 매번 같은 결과를 얻도록 할 거예요 + +10:53.440 --> 10:54.880 +이제 작동시켜요 + +10:54.880 --> 11:00.730 +이 테스트를 다시 실행하는 방법은 테스터 클래스로 가서 .테스트를 보여드렸죠 + +11:00.730 --> 11:03.700 +그런 다음 이 함수의 이름을 전달해요 + +11:03.700 --> 11:08.920 +함수를 호출하지 않아요 왜냐하면 함수를 호출하면 한 번만 호출하거든요 + +11:08.950 --> 11:11.590 +함수 그 자체로 이렇게 전달해요 + +11:11.620 --> 11:17.350 +이제 이걸 실행하면 제 프로그램의 결과를 보실 수 있어요 + +11:17.800 --> 11:20.800 +아주 빨리 진행됐죠 아주 빠른 모델이었거든요 + +11:20.800 --> 11:24.280 +다시 스크롤을 올려 뭐가 보이는지 말씀드리죠 너무 많아서요 + +11:24.790 --> 11:28.270 +여러분이 아주 익숙해지게 될 겁니다 다음 몇 클래스에서 이걸 많이 + +11:28.270 --> 11:29.770 +할 테니까요 get it + +11:29.770 --> 11:37.060 +여기 보이는 각 행은 다른 테스트 데이터 포인트를 나타내요 + +11:37.060 --> 11:40.270 +오른쪽에 있는 아이템이 뭔지 알려주고 있어요 + +11:40.270 --> 11:47.050 +이건 고독스 ML 60 바이 LED예요 LED 조명 키트죠 휴대용 LED예요 + +11:47.050 --> 11:49.270 +그 후론 짧게 편집했어요 + +11:49.270 --> 11:55.630 +이 LED 조명 키트는 모델과 그 기능의 함수를 + +11:55.630 --> 11:57.760 +보여 주는데요 + +11:57.760 --> 12:01.810 +무작위 번호 생성기라서 143달러로 나왔어요 + +12:02.260 --> 12:03.760 +진실이 뭐죠? + +12:03.760 --> 12:09.940 +그런데 놀랍게도 289달러였어요 제가 기대한 금액보다 좀 높네요 하지만 + +12:10.000 --> 12:12.980 +거기 짧게 묘사된 걸 보면 그렇죠 + +12:12.980 --> 12:13.490 +어쩌면요 + +12:13.790 --> 12:17.150 +옆에 노트북 같은 게 딸려 있을지도 몰라요 + +12:17.900 --> 12:19.190 +그게 오류예요 + +12:19.220 --> 12:23.330 +이 모델이 이 정도로 잘못하고 있다는 거죠 + +12:23.330 --> 12:26.930 +제곱 로그 에러는 다음에 얘기하도록 하죠 + +12:26.930 --> 12:32.630 +하지만 이건 보완을 위한 겁니다 절대 오류와 상대적인 오차의 + +12:32.630 --> 12:35.720 +차이를 더 잘 반영하는 거죠 + +12:35.870 --> 12:40.970 +하지만 이 부분에 집중할 겁니다 추측과 진실의 차이는 + +12:41.000 --> 12:45.920 +우리 인간에게는 너무나 쉽게 이해되니까요 + +12:46.310 --> 12:51.200 +추측이 형편없어서 빨간색으로 칠한 거예요 + +12:51.200 --> 12:53.270 +빨간색은 정말 끔찍해요 + +12:53.270 --> 12:56.210 +노란색은... 초록색이면 충분해요 + +12:56.210 --> 13:00.530 +그에 대한 정의는 스크롤을 다시 올려보면 경험에서 + +13:00.560 --> 13:02.540 +나온 비트가 있는데요 + +13:02.540 --> 13:06.710 +40달러나 20% 이하면 녹색이라고 부르죠 + +13:06.740 --> 13:09.950 +40달러나 20% 이하면 녹색이에요 + +13:09.950 --> 13:15.140 +40달러라니 관대하다고 생각하실 수도 있지만 가격 범위가 넓다는 걸 기억하세요 + +13:15.140 --> 13:17.510 +설명만 듣고 바로 결정해야 해요 + +13:17.510 --> 13:20.420 +정말 어려운 일이에요 + +13:20.420 --> 13:25.250 +만약 40달러 오차 범위 안에 든다면 잘 작동하고 있는 거죠 + +13:25.430 --> 13:28.070 +원하신다면 더 엄격하게 하실 수 있어요 + +13:28.070 --> 13:29.930 +이건 당신이 손보면 돼요 + +13:30.080 --> 13:32.300 +하지만 그게 제 원칙이었어요 + +13:32.810 --> 13:35.180 +여기 포인트가 다 있어요 + +13:35.180 --> 13:38.630 +마지막엔 멋진 시각화가 있어요 + +13:38.660 --> 13:40.730 +이건 뭔가요? + +13:40.760 --> 13:41.900 +이 도표 마음에 들어요 + +13:41.930 --> 13:43.340 +이 도표들을 많이 보게 될 거예요 + +13:43.340 --> 13:44.960 +Get in get에 익숙해지세요 + +13:44.990 --> 13:51.530 +엑스 축은 대지의 진리를 보여줍니다 제품의 실제 값이죠 + +13:51.560 --> 13:58.850 +데이터 과학자들이 Y라고 설명하는 걸 들을 수도 있어요 반면 축을 보면 + +13:58.850 --> 14:04.760 +데이터 과학자들이 어떤 값을 계산했는지 알 수 있죠 + +14:04.760 --> 14:10.430 +제품의 실제 가치와 모델의 추정치를 비교하는 거죠 + +14:10.430 --> 14:17.460 +그래서 진정한 값은 항상 데이터 집합에서 0에서 1000인 사이로 퍼져 있어요 모델의 값은 사방에 + +14:17.460 --> 14:17.940 +있죠 + +14:17.970 --> 14:20.520 +무작위로 찍혀 있어요 + +14:20.520 --> 14:26.310 +이 파란 선은 완벽한 추리를 의미해요 + +14:26.580 --> 14:33.360 +모델이 이 파란 선을 따라 맞힌다면 바닥에 정확히 게스트예요 운이 좋았다는 + +14:33.360 --> 14:34.920 +걸 알 수 있죠 + +14:34.920 --> 14:40.530 +물론 시간이 촉박하긴 하지만요. 파란 선 근처에 있는 초록색 점들은 + +14:40.530 --> 14:44.310 +get 작업을 꽤 잘했다는 걸 나타내요. + +14:44.550 --> 14:52.020 +노란 점은 A가 있는 곳이고 빨간 점은 묘기를 놓쳐서 저기로 갔을 때죠 + +14:52.500 --> 14:54.690 +정말 재미있었어요 + +14:54.690 --> 14:55.860 +즐거우셨길 바라요 + +14:55.860 --> 14:59.670 +우리가 할 수 있는 아주 사소한 모델이 또 있어요 제가 요청했을 때 여러분이 생각하셨던 + +14:59.670 --> 15:00.750 +것일 수도 있죠 + +15:00.750 --> 15:05.520 +아주 기본적인 모델에서 여러분은 생각하셨겠지만 제게 정말 기본적인 + +15:05.520 --> 15:09.060 +건 모든 걸 0으로 맞추거나 1로 맞추자는 거였어요 + +15:09.330 --> 15:11.280 +더 잘할 수 있어요 + +15:11.310 --> 15:18.600 +훈련 데이터 세트를 가지고 모든 훈련 데이터 집합의 평균 가격이 얼마인지 물어볼 + +15:18.600 --> 15:19.620 +수 있어요 + +15:19.620 --> 15:22.710 +왜냐하면 우리 모델은 훈련 데이터 집합이 제공되거든요 + +15:22.710 --> 15:26.970 +그래서 그걸 계속 추측하는 거라고 볼 수 있죠 + +15:27.000 --> 15:33.930 +모든 게 훈련 데이터 집합에 있는 모든 것의 평균 가격이라고 가정해 보죠 + +15:34.140 --> 15:40.470 +기본적으로 훈련 데이터의 가격을 계산하고 평균을 내요 훈련 + +15:40.470 --> 15:45.450 +가격을 전부 합쳐서 그걸 개수로 나눈 거죠 + +15:45.450 --> 15:52.440 +훈련 데이터 집합에서 1점의 가격을 알려줄 거예요 + +15:52.470 --> 15:56.820 +이건 아주 정교한 모델이에요 아주 정교한 모델이죠 + +15:56.820 --> 16:02.220 +다시 말씀드리지만 아이템을 선택하고 평균을 반환해요 + +16:02.220 --> 16:05.940 +통과한 건 무시하고 평균만 반환해요 + +16:05.940 --> 16:08.490 +어떻게 보일지 한 번 보죠 + +16:08.520 --> 16:13.170 +어떤 도표를 보게 될지 마음속으로 그릴 수 있는지 보세요 + +16:13.290 --> 16:17.320 +어떤 모습일지 정확히 상상할 수 있으면 좋겠네요 + +16:18.190 --> 16:20.800 +당신이 옳았는지 확인할 준비가 됐는지도요 + +16:21.040 --> 16:21.790 +좋아요! + +16:21.790 --> 16:23.890 +이건 물론 도표고요 + +16:23.890 --> 16:29.650 +고정된 지점에서 추측하는 거죠 500이 될 거라고 생각한다면 유통 방향이 좀 더 저렴한 + +16:29.650 --> 16:32.920 +물건 쪽으로 기울었다는 걸 기억하세요 + +16:32.950 --> 16:35.170 +처음만큼 심하진 않아요 + +16:35.200 --> 16:37.480 +비트를 약간 수정했어요 + +16:37.690 --> 16:41.620 +모든 것에 이 정도 양을 계산했어요 + +16:41.950 --> 16:48.670 +물론 제품의 값과 같은 지점에서 초록이 나왔어요 아니면 노랑이나 + +16:48.670 --> 16:50.650 +빨강이겠죠 + +16:50.680 --> 16:53.140 +이렇게 퍼져 있어요 + +16:53.170 --> 16:56.590 +예상했던 결과를 볼 수 있어요 + +16:56.590 --> 17:02.080 +실제 결과를 다시 스크롤하면 붉은 바다가 보일 겁니다 + +17:02.080 --> 17:07.240 +가끔 녹색도 보이고요 평균에 가까운 비용으로요 + +17:08.050 --> 17:11.080 +그럼 즐겁게 보시길 바랄게요 + +17:11.080 --> 17:15.490 +아직 실제 머신 러닝 모델은 보지 못했지만 곧 볼 테니 걱정 + +17:15.490 --> 17:16.360 +마세요 + +17:16.390 --> 17:17.290 +조금만 참아요 + +17:17.380 --> 17:18.280 +다음에 봐요 diff --git a/week5/community-contributions/subtitles/srts/59473071/en_US.srt b/week5/community-contributions/subtitles/srts/59473071/en_US.srt new file mode 100755 index 0000000..108c5e9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473071/en_US.srt @@ -0,0 +1,280 @@ +WEBVTT + +00:01.340 --> 00:02.180 +Hey, gang. + +00:02.210 --> 00:04.250 +Look, I know what you're thinking. + +00:04.250 --> 00:07.130 +This week was supposed to be training week. + +00:07.160 --> 00:11.900 +I set it all up to be all about fine tuning frontier models. + +00:11.900 --> 00:13.250 +And what haven't we done? + +00:13.250 --> 00:15.110 +We haven't fine tuned frontier models. + +00:15.110 --> 00:16.490 +But I have good news. + +00:16.490 --> 00:18.140 +Today is the day. + +00:18.170 --> 00:19.460 +Today is the day. + +00:19.460 --> 00:25.550 +But I should prepare you that today may also disappoint in some ways as well as we will. + +00:25.580 --> 00:26.720 +We will find out. + +00:26.900 --> 00:35.720 +Um, but be prepared for that as we embark upon a whole brave new world, we're getting finally to training. + +00:35.720 --> 00:42.710 +And as a quick recap, you can already generate text and code with frontier models, with APIs and using + +00:42.710 --> 00:51.140 +open source models hugging face through both the pipelines and the the lower level transformers APIs. + +00:51.140 --> 00:58.640 +Like using the models directly, you can create advanced Rag pipelines using Lange chain and not using + +00:58.640 --> 00:59.390 +Lange chain. + +00:59.390 --> 01:06.620 +And most importantly, you can now follow a five step strategy for problem solving that includes a lot + +01:06.650 --> 01:09.110 +of time we seem to spend curating data. + +01:09.140 --> 01:13.910 +It turns out that lots of time is spent curating data and then making a baseline model. + +01:13.910 --> 01:16.760 +We did some training, but it was training of a baseline model. + +01:16.790 --> 01:22.970 +Traditional ML or we've not yet done is trained, fine tuned, a frontier model. + +01:22.970 --> 01:24.500 +And that's what we're going to do today. + +01:24.530 --> 01:30.170 +Going to understand the process for fine tuning in front of your model, create a data set for it, + +01:30.200 --> 01:35.360 +run fine tuning, and then test our new fine tuned model. + +01:35.930 --> 01:42.290 +And just as a point of order to explain when you're talking about training, when in the context of + +01:42.290 --> 01:48.290 +these kinds of models, fine tuning is synonymous with with training, we never, of course, train + +01:48.290 --> 01:54.020 +one of these things from scratch, because that would cost north of hundreds of millions of dollars. + +01:54.020 --> 02:00.680 +So we're always taking an existing model that's been trained, a pre-trained model, and we are doing + +02:00.680 --> 02:05.670 +more training, taking advantage of transfer learning, which is this theory that says that you can + +02:05.670 --> 02:10.410 +just take an existing pre-trained model, do a bit more training, and it will be better at the new + +02:10.410 --> 02:12.030 +task you're training it for. + +02:12.120 --> 02:14.760 +And that's also known as fine tuning. + +02:15.450 --> 02:24.030 +So with that vocabulary out of the way, let's just talk about the three steps to fine tuning with OpenAI. + +02:24.180 --> 02:30.930 +There's three things we need to follow in order to take GPT four zero or GPT four mini that we will + +02:30.930 --> 02:33.390 +take and fine tune it. + +02:33.420 --> 02:39.270 +The first step is you need to prepare training data that it will use for training. + +02:39.270 --> 02:44.820 +We obviously use training data in the context of the traditional models, linear regression and so on. + +02:44.820 --> 02:49.170 +We got some training examples and we pumped it through a linear regression model. + +02:49.170 --> 02:50.790 +So we have to create training data. + +02:50.820 --> 02:54.720 +Now um, and then we have to upload it to OpenAI. + +02:54.720 --> 03:02.520 +And it expects that training data in a particular format called JSON L which stands for JSON lines, + +03:02.670 --> 03:07.160 +which is subtly different as I will show you to normal JSON. + +03:07.640 --> 03:13.880 +We are then going to run our training, our fine tuning and these charts all pointing downwards. + +03:13.880 --> 03:14.900 +Might might trouble you. + +03:14.930 --> 03:20.270 +It looks like things are going wrong, but au contraire, when it comes to training, your one is watching. + +03:20.300 --> 03:21.590 +Training loss. + +03:21.710 --> 03:23.960 +And of course you want loss to go down. + +03:23.960 --> 03:25.880 +That means that things are getting better. + +03:25.970 --> 03:31.610 +And so we will be watching our charts like a hawk and trying to make sure that our losses are coming + +03:31.610 --> 03:32.480 +down. + +03:33.200 --> 03:39.560 +Uh, and most importantly, you look at training loss during the course of a batch, and you also look + +03:39.590 --> 03:44.090 +at validation loss, which is on a held out data set. + +03:44.090 --> 03:45.740 +Uh, is that coming down, too? + +03:45.770 --> 03:49.940 +Because you may be overfitting to your training data if you just watch training loss. + +03:49.970 --> 03:55.940 +And that actually isn't a problem in our case, because we're only going to be running one epoch through + +03:55.940 --> 03:56.990 +our training data. + +03:56.990 --> 03:58.850 +And epoch is what you call it. + +03:58.940 --> 04:04.340 +When you go, you take take a complete training run all the way through your training data, and then + +04:04.340 --> 04:07.370 +you repeat and do it all a second time with the same data. + +04:07.400 --> 04:10.070 +That would be called a second epoch of training. + +04:10.490 --> 04:16.130 +And we are not going to do that because we have so much training data that we don't need to do that. + +04:16.130 --> 04:19.580 +We might as well just use a bit more training data and do one epoch. + +04:19.580 --> 04:26.360 +And since all of the data will always be new data, the training loss is just as useful for us as validation + +04:26.360 --> 04:27.230 +loss. + +04:28.100 --> 04:31.760 +And then finally you evaluate your results. + +04:31.760 --> 04:38.240 +And then based on what you see, you tweak and you repeat and keep going. + +04:38.390 --> 04:40.310 +So those are the stages. + +04:41.060 --> 04:44.870 +And as I say, the first of them is to prepare the data. + +04:45.050 --> 04:52.970 +So OpenAI expects it in this format called JSON L, which means that it is a series of lines of JSON + +04:52.970 --> 04:53.750 +data. + +04:53.900 --> 04:56.030 +And you may think, isn't that just the same as JSON data? + +04:56.030 --> 04:56.870 +It's not. + +04:56.870 --> 04:59.000 +It's not in a in a collection. + +04:59.000 --> 04:59.900 +So it's not in a list. + +04:59.900 --> 05:02.770 +It doesn't start with a square bracket with with commas. + +05:02.770 --> 05:10.720 +It's just each row, each line in this file is a separate JSON object starting and ending with curly + +05:10.720 --> 05:11.410 +braces. + +05:11.410 --> 05:16.270 +It's a subtle distinction, but it can catch you out if you're not expecting that you're not writing + +05:16.270 --> 05:19.390 +a JSON object, because that would have a list around it. + +05:19.390 --> 05:26.320 +You're writing rows of JSON to this file, and then each row is going to be something that is mostly + +05:26.320 --> 05:27.550 +very familiar to us. + +05:27.580 --> 05:30.730 +It will have one attribute called messages. + +05:30.730 --> 05:38.890 +And what goes in there is the thing that we know so well, the list of dictionaries where each dictionary + +05:38.920 --> 05:40.750 +has a role and a content. + +05:40.750 --> 05:42.100 +It's a conversation. + +05:42.100 --> 05:45.460 +So that is what is going in each row. + +05:45.610 --> 05:51.040 +As you will see, we will craft this particular type of data set for uploading. + +05:52.240 --> 05:53.260 +All right. + +05:53.680 --> 05:57.130 +With that enough enough chit chat. + +05:57.160 --> 05:58.900 +Let's go to Jupyter Lab. + +05:58.900 --> 06:01.000 +Let's actually run this thing. + +06:01.000 --> 06:05.230 +And for the first time we will train a frontier model. + +06:05.260 --> 06:06.370 +Let's do it. diff --git a/week5/community-contributions/subtitles/srts/59473071/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473071/ja_JP.srt new file mode 100755 index 0000000..99ddd23 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473071/ja_JP.srt @@ -0,0 +1,232 @@ +WEBVTT + +00:01.340 --> 00:02.180 +やあ、 みんな。 + +00:02.210 --> 00:04.250 +何を考えているかは分かる。 + +00:04.250 --> 00:07.130 +今週はトレーニングウィークのはずだった。 + +00:07.160 --> 00:11.900 +フロンティア・モデルを微調整するために、 私はすべてを設定した。 + +00:11.900 --> 00:13.250 +私たちは何をしなかったのか? + +00:13.250 --> 00:15.110 +私たちはフロンティアモデルを微調整していない。 + +00:15.110 --> 00:16.490 +だが、 いいニュースがある。 + +00:16.490 --> 00:18.140 +今日がその日だ。 + +00:18.170 --> 00:19.460 +今日がその日だ。 + +00:19.460 --> 00:25.550 +しかし、 今日もまた、 私たちと同じように、 ある意味で失望させられるかもしれないことを覚悟しておかなければならない。 + +00:25.580 --> 00:26.720 +いずれ分かるだろう。 + +00:26.900 --> 00:35.720 +うーん、 でも、 勇敢な新世界に乗り出すのだから、 その覚悟はしておいてほしい。 + +00:35.720 --> 00:51.140 +簡単にまとめると、 すでにフロンティア・モデルを使ってテキストやコードを生成したり、 APIを使ったり、 パイプラインと下位レベルのトランスフォーマーAPIの両方を通してオープンソースのモデルを抱き合わせで使ったりすることができる。 + +00:51.140 --> 00:59.390 +モデルを直接使用するのと同様に、 ランゲ・チェインを使用して高度なラグ・パイプラインを作成することも、 ランゲ・チェインを使用しないこともできます。 + +00:59.390 --> 01:09.110 +そして最も重要なことは、 問題解決のための5つのステップ戦略に従うことができるようになったことだ。 + +01:09.140 --> 01:13.910 +その結果、 多くの時間をデータのキュレーションに費やし、 ベースラインモデルを作成することが判明した。 + +01:13.910 --> 01:16.760 +トレーニングも行ったが、 それはベースラインモデルのトレーニングだった。 + +01:16.790 --> 01:22.970 +伝統的なMLは、 私たちがまだ行っていない、 訓練された、 微調整された、 フロンティアモデルである。 + +01:22.970 --> 01:24.500 +そして、 それが今日私たちがやろうとしていることだ。 + +01:24.530 --> 01:30.170 +ファイン・チューニングのプロセスをモデルの前で理解し、 そのためのデータセットを作成し、 ファイン・チューニングを実行し、 + +01:30.200 --> 01:35.360 +そしてファイン・チューニングされた新しいモデルをテストする。 + +01:35.930 --> 01:42.290 +この種のモデルの文脈では、 + +01:42.290 --> 01:54.020 +微調整はトレーニングと同義なのです。 + +01:54.020 --> 02:00.680 +つまり私たちは常に、 訓練済みの既存のモデル(事前訓練済みモデル)を使って、 転移学習を利用しながら訓練を重ねているのです。 + +02:00.680 --> 02:12.030 +転移学習とは、 事前訓練済みの既存のモデルを使い、 もう少し訓練を重ねれば、 新しいタスクに対してより優れた訓練ができるようになるという理論です。 + +02:12.120 --> 02:14.760 +そしてそれは微調整とも呼ばれる。 + +02:15.450 --> 02:24.030 +それでは、 OpenAIで微調整を行うための3つのステップについて説明しよう。 + +02:24.180 --> 02:33.390 +GPT4ゼロ、 あるいはGPT4ミニを獲得し、 微調整するためには、 3つのことを守る必要がある。 + +02:33.420 --> 02:39.270 +最初のステップは、 トレーニングに使用するトレーニングデータを準備することだ。 + +02:39.270 --> 02:44.820 +従来のモデル、 線形回帰などの文脈では、 明らかにトレーニングデータを使用する。 + +02:44.820 --> 02:49.170 +我々はいくつかのトレーニング例を得て、 それを線形回帰モデルにかけた。 + +02:49.170 --> 02:50.790 +だから、 トレーニングデータを作らなければならない。 + +02:50.820 --> 02:54.720 +そしてそれをOpenAIにアップロードしなければならない。 + +02:54.720 --> 03:07.160 +そして、 JSON L(JSON行の略)と呼ばれる特定のフォーマットでトレーニング・データを受け取る。 + +03:07.640 --> 03:13.880 +これからトレーニング、 微調整を行い、 これらのチャートはすべて下向きになる。 + +03:13.880 --> 03:14.900 +あなたを悩ませるかもしれない。 + +03:14.930 --> 03:20.270 +物事がうまくいっていないように見えるが、 とんでもない。 + +03:20.300 --> 03:21.590 +トレーニングの損失。 + +03:21.710 --> 03:23.960 +そしてもちろん、 あなたは損失が減ることを望んでいる。 + +03:23.960 --> 03:25.880 +つまり、 状況は良くなっているということだ。 + +03:25.970 --> 03:32.480 +だから私たちはタカのようにチャートを注視し、 損失が減少していることを確認しようとする。 + +03:33.200 --> 03:39.560 +そして最も重要なのは、 バッチの過程でトレーニングのロスを調べ、 + +03:39.590 --> 03:44.090 +さらに検証のロスを調べることだ。 + +03:44.090 --> 03:45.740 +ええと、 それも降りてくるんですか? + +03:45.770 --> 03:49.940 +トレーニングの損失を見るだけでは、 トレーニングデータにオーバーフィットする可能性があるからだ。 + +03:49.970 --> 03:56.990 +なぜなら、 トレーニング・データを1エポック実行するだけだからだ。 + +03:56.990 --> 03:58.850 +そしてエポックとは、 あなたがそう呼ぶものだ。 + +03:58.940 --> 04:07.370 +そして、 同じデータで2回目を行う。 + +04:07.400 --> 04:10.070 +これはトレーニングの第2のエポックと呼ばれるものだ。 + +04:10.490 --> 04:16.130 +なぜなら、 我々はトレーニングデータをたくさん持っているからだ。 + +04:16.130 --> 04:19.580 +もう少し多くのトレーニングデータを使って、 1エポックやったほうがいいかもしれない。 + +04:19.580 --> 04:27.230 +そして、 すべてのデータは常に新しいデータであるため、 トレーニング損失は検証損失と同様に我々にとって有用である。 + +04:28.100 --> 04:31.760 +そして最後に結果を評価する。 + +04:31.760 --> 04:38.240 +そして、 その結果に基づいて微調整をし、 繰り返し、 続けていく。 + +04:38.390 --> 04:40.310 +これがその段階だ。 + +04:41.060 --> 04:44.870 +その第一は、 データを準備することだ。 + +04:45.050 --> 04:53.750 +だからOpenAIは、 JSON Lと呼ばれるフォーマット、 つまりJSONデータの一連の行でそれを期待する。 + +04:53.900 --> 04:56.030 +JSONデータと同じじゃないか、 と思うかもしれない。 + +04:56.030 --> 04:56.870 +それは違う。 + +04:56.870 --> 04:59.000 +コレクションの中には入っていない。 + +04:59.000 --> 04:59.900 +だからリストには入っていない。 + +04:59.900 --> 05:02.770 +カンマ付きの角括弧では始まらない。 + +05:02.770 --> 05:11.410 +このファイルの各行は、 中かっこで始まり、 中かっこで終わる別々のJSONオブジェクトである。 + +05:11.410 --> 05:16.270 +これは微妙な違いだが、 JSONオブジェクトを記述していないことを想定していない場合、 + +05:16.270 --> 05:19.390 +リストに囲まれてしまうため、 注意を引くことができる。 + +05:19.390 --> 05:27.550 +あなたはこのファイルにJSONの行を書き、 それぞれの行は私たちにとって非常に馴染みのあるものになる。 + +05:27.580 --> 05:30.730 +メッセージという属性を持つ。 + +05:30.730 --> 05:40.750 +そしてそこに入るのは、 私たちがよく知っている辞書のリストであり、 各辞書には役割と内容がある。 + +05:40.750 --> 05:42.100 +それは会話だ。 + +05:42.100 --> 05:45.460 +つまり、 これが各列に入るものだ。 + +05:45.610 --> 05:51.040 +おわかりのように、 この特殊なデータセットをアップロードするために細工をする。 + +05:52.240 --> 05:53.260 +分かった。 + +05:53.680 --> 05:57.130 +雑談はこれくらいにしておこう。 + +05:57.160 --> 05:58.900 +Jupyter Labに行ってみよう。 + +05:58.900 --> 06:01.000 +実際に走らせてみよう。 + +06:01.000 --> 06:05.230 +そして初めてフロンティアモデルをトレーニングする。 + +06:05.260 --> 06:06.370 +そうしよう。 diff --git a/week5/community-contributions/subtitles/srts/59473071/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473071/ko_KR.srt new file mode 100755 index 0000000..f6fc0fb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473071/ko_KR.srt @@ -0,0 +1,274 @@ +WEBVTT + +00:01.340 --> 00:02.180 +여러분 + +00:02.210 --> 00:04.250 +무슨 생각하는지 알아요 + +00:04.250 --> 00:07.130 +이번 주는 훈련 주간이었어야 해요 + +00:07.160 --> 00:11.900 +개척 시대 모델들을 세심하게 조정하는 일을 시작했죠 + +00:11.900 --> 00:13.250 +우리가 안 한 게 뭐죠? + +00:13.250 --> 00:15.110 +개척 시대 모델은 잘 몰라요 + +00:15.110 --> 00:16.490 +좋은 소식이 있어요 + +00:16.490 --> 00:18.140 +오늘이 그날이에요 + +00:18.170 --> 00:19.460 +오늘이 그날이에요 + +00:19.460 --> 00:25.550 +하지만 오늘 우리가 실망할 수도 있다는 걸 미리 알려드리죠 + +00:25.580 --> 00:26.720 +곧 알게 되겠죠 + +00:26.900 --> 00:35.720 +하지만 그걸 각오해야 해요 완전히 새로운 세상에 나가 드디어 훈련을 시작하니까요 + +00:35.720 --> 00:42.710 +간단히 요약하자면 프런티어 모델과 API로 텍스트와 코드를 생성할 수 있고 + +00:42.710 --> 00:51.140 +파이프라인과 하위 레벨 트랜스포머 API를 서로 끌어안는 오픈 소스 모델을 사용할 수 있죠 + +00:51.140 --> 00:59.390 +모델을 직접 사용하는 것처럼 랭 체인이 아닌 래그 파이프라인을 만들 수 있어요 + +00:59.390 --> 01:06.620 +가장 중요한 건 큐레이팅에 많은 시간을 들이는 문제 해결을 위한 5단계 전략을 + +01:06.650 --> 01:09.110 +따를 수 있다는 거죠 + +01:09.140 --> 01:13.910 +많은 시간을 데이터를 큐레이팅하고 기준 모델을 만드는 데 할애하죠 + +01:13.910 --> 01:16.760 +훈련을 좀 했지만 기본 모델 훈련이었어요 + +01:16.790 --> 01:22.970 +전통적인 ML은 아직 완성되지 않았어요 미세하게 훈련된 개척지 모델이죠 + +01:22.970 --> 01:24.500 +오늘 그걸 할 거예요 + +01:24.530 --> 01:30.170 +모델 앞에서 미세 튜닝 과정을 이해하고 데이터 세트를 생성하고 미세 + +01:30.200 --> 01:35.360 +튜닝을 실행한 후 새 미세 튜닝 모델을 테스트하는 거죠 + +01:35.930 --> 01:42.290 +훈련에 대해 설명하자면 이런 모델에서 세밀한 조정은 + +01:42.290 --> 01:48.290 +훈련과 동의어입니다 물론 처음부터 훈련하진 않습니다 + +01:48.290 --> 01:54.020 +그러면 수억 달러가 들 테니까요 + +01:54.020 --> 02:00.680 +우리는 항상 훈련된 기존 모델을 사용합니다 미리 훈련된 모델이죠 전송 학습을 + +02:00.680 --> 02:05.670 +활용해 더 많은 훈련을 합니다 이 이론은 기존에 훈련된 + +02:05.670 --> 02:10.410 +모델을 가지고 더 많은 훈련을 하면 새 작업에서 더 낫다는 + +02:10.410 --> 02:12.030 +거죠 + +02:12.120 --> 02:14.760 +그걸 미세 조정이라고 하죠 + +02:15.450 --> 02:24.030 +이제 어휘력 문제를 해결했으니 오픈AI를 조정하는 세 가지 단계에 대해 이야기해 보죠 + +02:24.180 --> 02:30.930 +GPT 40과 4 미니를 받아들이려면 세 가지를 따라야 합니다 이 세 + +02:30.930 --> 02:33.390 +가지를 잘 조율해야 하죠 + +02:33.420 --> 02:39.270 +첫 단계는 훈련에 사용할 훈련 데이터를 준비하는 거예요 + +02:39.270 --> 02:44.820 +전통적인 모델의 맥락에서 훈련 데이터를 사용합니다 선형 회귀 같은 거죠 + +02:44.820 --> 02:49.170 +훈련용 견본을 구해서 선형 회귀 모델로 주입했어요 + +02:49.170 --> 02:50.790 +훈련 데이터를 만들어야 해요 + +02:50.820 --> 02:54.720 +이제 오픈AI에 업로드 해야 해요 + +02:54.720 --> 03:02.520 +JSON L이라는 특정한 포맷의 훈련 데이터를 기대합니다 JSON 라인을 약자로 하죠 + +03:02.670 --> 03:07.160 +미묘하게 달라요 일반적인 JSON을 보여드리죠 + +03:07.640 --> 03:13.880 +그다음 훈련과 미세한 조율을 실시할 겁니다 이 도표들은 모두 아래를 가리키죠 + +03:13.880 --> 03:14.900 +당신이 곤란해질지도 몰라요 + +03:14.930 --> 03:20.270 +뭔가 잘못되고 있는 것처럼 보이지만 그 반대입니다 훈련할 때 여러분의 상대는 지켜보고 있죠 + +03:20.300 --> 03:21.590 +훈련의 손실이죠 + +03:21.710 --> 03:23.960 +물론 손실이 줄어들길 바라죠 + +03:23.960 --> 03:25.880 +상황이 나아지고 있다는 뜻이죠 + +03:25.970 --> 03:31.610 +그래서 매의 눈으로 도표를 보면서 손실이 줄어드는지 확인할 + +03:31.610 --> 03:32.480 +거예요 + +03:33.200 --> 03:39.560 +가장 중요한 것은 한 배치에서 트레이닝 손실과 유효성 검증 손실입니다. + +03:39.590 --> 03:44.090 +데이터 집합에 있는 것이죠. + +03:44.090 --> 03:45.740 +저것도 내려가요? + +03:45.770 --> 03:49.940 +훈련 데이터에 과부하가 걸릴 수 있으니까요 + +03:49.970 --> 03:55.940 +사실 우리 경우엔 그게 문제가 아닙니다 트레이닝 데이터로 한 개의 epoch만 실행할 + +03:55.940 --> 03:56.990 +테니까요 + +03:56.990 --> 03:58.850 +이포크라고도 하죠 + +03:58.940 --> 04:04.340 +훈련 데이터를 통해 완전한 훈련을 한 다음 같은 데이터를 + +04:04.340 --> 04:07.370 +반복해서 실행하는 거죠 + +04:07.400 --> 04:10.070 +그런 걸 두 번째 수련 시대라고 하죠 + +04:10.490 --> 04:16.130 +하지만 그렇게 하지 않을 겁니다 훈련 데이터가 너무 많아서 그럴 필요가 없으니까요 + +04:16.130 --> 04:19.580 +트레이닝 데이터를 더 활용해 하나의 시대를 만드는 게 낫겠어요 비트 코스트 + +04:19.580 --> 04:26.360 +모든 데이터는 항상 새로운 데이터이기 때문에 훈련의 손실은 검증의 손실만큼이나 + +04:26.360 --> 04:27.230 +유용하죠 + +04:28.100 --> 04:31.760 +그리고 마지막으로 결과를 평가하죠 + +04:31.760 --> 04:38.240 +그걸 바탕으로 수정하고 반복해서 계속하는 거죠 + +04:38.390 --> 04:40.310 +그게 무대예요 + +04:41.060 --> 04:44.870 +말씀드렸듯이 첫 번째는 데이터를 준비하는 거죠 + +04:45.050 --> 04:52.970 +OpenAI는 JSON L이라는 포맷에서 그걸 기대합니다 JSON 데이터의 일련의 라인이라는 + +04:52.970 --> 04:53.750 +뜻이죠 + +04:53.900 --> 04:56.030 +JSON 데이터와 똑같다고 생각하실지도 몰라요 + +04:56.030 --> 04:56.870 +아니에요 + +04:56.870 --> 04:59.000 +컬렉션에 있는 게 아니에요 + +04:59.000 --> 04:59.900 +목록에 없군요 + +04:59.900 --> 05:02.770 +쉼표가 붙은 대괄호로 시작하진 않아요 + +05:02.770 --> 05:11.410 +이 파일의 각 행, 각 줄은 중괄호로 시작하고 끝나는 별개의 JSON 객체예요 + +05:11.410 --> 05:16.270 +미묘한 차이점이지만 JSON 개체를 작성하지 않을 거라는 기대는 안 할 경우에 + +05:16.270 --> 05:19.390 +걸릴 수 있어요 주변에 리스트가 있을 테니까요 + +05:19.390 --> 05:26.320 +이 파일에 JSON 행을 작성하는 거죠 그럼 각각의 행은 우리에게 아주 익숙한 + +05:26.320 --> 05:27.550 +것이 돼요 + +05:27.580 --> 05:30.730 +메시지라는 하나의 특성을 갖게 되죠 + +05:30.730 --> 05:38.890 +그 안에는 우리가 잘 아는 것이 들어있습니다 각 사전마다 역할과 내용이 있는 + +05:38.920 --> 05:40.750 +사전 목록이죠 + +05:40.750 --> 05:42.100 +대화예요 + +05:42.100 --> 05:45.460 +각 행에 들어가는 거죠 + +05:45.610 --> 05:51.040 +보다시피 업로드를 위한 이 특정 유형의 데이터 세트를 만들 거예요 + +05:52.240 --> 05:53.260 +좋아요 + +05:53.680 --> 05:57.130 +잡담은 이 정도면 충분해요 + +05:57.160 --> 05:58.900 +주피터 연구소로 가요 + +05:58.900 --> 06:01.000 +실제로 실행해보죠 + +06:01.000 --> 06:05.230 +그리고 처음으로 개척지 모델을 훈련할 거예요 + +06:05.260 --> 06:06.370 +시작하죠 diff --git a/week5/community-contributions/subtitles/srts/59473089/en_US.srt b/week5/community-contributions/subtitles/srts/59473089/en_US.srt new file mode 100755 index 0000000..c28bfdf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473089/en_US.srt @@ -0,0 +1,394 @@ +WEBVTT + +00:01.940 --> 00:03.290 +Welcome back. + +00:03.320 --> 00:09.020 +So hopefully you are still impressed by the GPT four mini results. + +00:09.260 --> 00:19.820 +The frontier model coming in strong at just shy of $80 and with 52% hits just going above the 50% mark + +00:19.820 --> 00:22.820 +in terms of the number of green dots and a nice chart there. + +00:22.880 --> 00:29.510 +But the time has come for us to now run against its bigger cousin, big GPT four. + +00:29.540 --> 00:31.250 +Oh, I just did. + +00:31.280 --> 00:37.520 +I did want to mention that, uh, do be aware that at this point, uh, when we're talking about these + +00:37.520 --> 00:42.020 +kinds of error numbers, the error is already, uh, reasonably low. + +00:42.050 --> 00:44.810 +You think about the actual prices of products. + +00:44.810 --> 00:45.770 +They do vary. + +00:45.770 --> 00:48.020 +There is volatility baked into prices. + +00:48.020 --> 00:49.310 +There are sales. + +00:49.340 --> 00:54.170 +There's there's big swings when you come to things like the prices of a wheel or the price of a laptop. + +00:54.320 --> 01:02.320 +No doubt they vary quite considerably from different sites and different, uh, Stores, so there is + +01:02.320 --> 01:05.380 +some natural error built into this question anyway. + +01:05.380 --> 01:09.370 +So we're certainly not expecting to to come down to small numbers. + +01:09.730 --> 01:16.090 +So from that point of view, recognize that every dollar improvement we get at this point is a is a + +01:16.090 --> 01:17.350 +victory in its own right. + +01:17.380 --> 01:21.670 +We're going to be squeezing juice if we can, but don't expect a big change now. + +01:21.670 --> 01:29.110 +We are now at a point where this is already super impressive and far ahead of any of the traditional + +01:29.110 --> 01:30.910 +machine learning techniques. + +01:31.390 --> 01:41.290 +Um, so, uh, the next thing to do, of course, is to unveil the bigger cousin, GPT four. + +01:41.290 --> 01:47.740 +Oh, here is the same type of function again, the function that we'll be passing to our test program. + +01:47.890 --> 01:51.790 +Uh, it's of course calling OpenAI ChatGPT completions dot create. + +01:51.790 --> 01:58.690 +We're using the GPT four model as of August the 6th, which is very recent. + +01:58.960 --> 02:06.080 +Um, passing in messages, passing in the seed and using the same way of getting back the results. + +02:06.110 --> 02:09.590 +It's worth mentioning this is now going to cost a little bit more. + +02:09.620 --> 02:14.210 +It costs somewhere between $0.10 and $0.20 for me to run this across 250 points. + +02:14.450 --> 02:19.160 +Um, so, um, yeah, I mean, it's not going to break the bank, but you might want to think twice + +02:19.160 --> 02:21.200 +before running this a hundred times. + +02:21.380 --> 02:24.650 +Uh, so I do urge you to at least try it once. + +02:24.740 --> 02:26.330 +It's it's great fun. + +02:26.510 --> 02:30.170 +Um, but, yeah, it doesn't it doesn't necessarily need to be run a lot of times. + +02:30.200 --> 02:35.390 +I also want to mention that you might wonder why I'm not trying to run this against oh one. + +02:35.450 --> 02:39.530 +Uh, so the kind of problems are that oh one is designed for. + +02:39.560 --> 02:45.560 +Ah, more the problems where there is deeper thinking required, a multi-step thought process and challenging. + +02:45.560 --> 02:49.100 +And that doesn't really lend itself to the problem that we're tackling here. + +02:49.160 --> 02:53.090 +Um, it would also be slower and more expensive and for no good reason. + +02:53.090 --> 02:56.270 +We're not trying to solve, uh, math puzzles. + +02:56.300 --> 03:02.180 +We're just trying to have the most likely next guess of a price based on worldly knowledge. + +03:02.330 --> 03:05.750 +So GPT four frontier should be our best shot. + +03:05.870 --> 03:06.110 +Um. + +03:06.110 --> 03:08.570 +And I have, of course, already run it. + +03:08.660 --> 03:12.170 +Uh, so it's a little bit slower than GPT four mini. + +03:12.170 --> 03:13.070 +Just a little bit. + +03:13.100 --> 03:15.740 +But I think you should run it yourself and you'll enjoy it. + +03:15.770 --> 03:18.470 +And here are the results I get. + +03:18.740 --> 03:23.660 +Um, so there's a lot of greens, but there's lots of reds as well. + +03:24.230 --> 03:27.830 +So it's quite a variety here. + +03:28.700 --> 03:33.110 +Um, no doubt you're betting. + +03:33.110 --> 03:35.000 +And here are the results. + +03:35.000 --> 03:37.610 +So the bottom line is it is better. + +03:37.610 --> 03:39.380 +It is improved it for sure. + +03:39.380 --> 03:42.140 +It's gone from about 80 to about 76. + +03:42.140 --> 03:44.840 +And the hits from 52 to 58. + +03:44.840 --> 03:45.950 +Uh, green dots. + +03:45.950 --> 03:47.990 +It's not a massive change. + +03:47.990 --> 03:53.840 +You may have expected more going from mini to GPT four, but remember GPT four mini is really good. + +03:53.840 --> 04:00.840 +And despite the fact that it's a much cheaper model, it has much the very similar smarts to GPT four + +04:00.870 --> 04:01.380 +zero. + +04:01.620 --> 04:08.310 +Um, so I would say, first of all, it is to be celebrated that we've come down by by $6 by $4. + +04:08.340 --> 04:08.790 +Sorry. + +04:08.910 --> 04:16.770 +Uh, but but also that, um, we can already assume that GPT four mini has done very well indeed. + +04:17.040 --> 04:19.050 +Um, so, um. + +04:19.200 --> 04:22.740 +Yeah, this is this is, uh, this is the results. + +04:22.770 --> 04:25.170 +It looks visually very impressive. + +04:25.200 --> 04:28.410 +The the I mean, compare it to humanity. + +04:28.440 --> 04:33.180 +Uh, at the end of this lecture, I think we'll end by going back and embarrassingly looking at the + +04:33.480 --> 04:37.980 +chart one more time, uh, comparing it to what frontier models are able to achieve. + +04:38.040 --> 04:41.370 +Uh, but this does give you a good sense. + +04:41.880 --> 04:42.990 +Okay. + +04:42.990 --> 04:50.670 +And then, of course, we do have one more frontier model that perhaps we should unveil, which is Claude + +04:50.700 --> 04:52.920 +3.5 sonnet. + +04:53.220 --> 05:03.760 +Um, which is the strongest, uh, model from anthropic And, um, you recognize, I hope the structure. + +05:04.000 --> 05:07.180 +Uh, Claude messages create uh, again. + +05:07.180 --> 05:11.500 +Now, with in the case of Claude, you can't pass in a seed. + +05:11.530 --> 05:12.430 +It doesn't take that. + +05:12.430 --> 05:14.500 +So the results will be different each time. + +05:14.500 --> 05:20.440 +But I've just gone with the first run I've done in here, and that's what we'll have as, as the, uh, + +05:20.440 --> 05:24.850 +the, uh, the results for, for our purposes. + +05:24.880 --> 05:28.990 +And I guess I'll probably, uh, replicate and save this notebook. + +05:28.990 --> 05:30.970 +So we have it for, for posterity. + +05:31.270 --> 05:37.780 +Uh, so time for Claude starts with a couple of reds, then a sea of greens. + +05:40.510 --> 05:45.850 +And I will get us straight to some outcomes. + +05:48.160 --> 05:50.260 +So here's the results from Claude. + +05:50.290 --> 05:53.440 +Interestingly, Claude actually does a little bit worse. + +05:53.560 --> 06:03.060 +Uh, it's, uh, um, uh, worse, in fact, than GPT four mini in this task, and worse also than the + +06:03.060 --> 06:03.990 +Random forest. + +06:04.020 --> 06:09.300 +Of course, again, Random Forest had the benefit of all of its training data 400,000 training data. + +06:09.300 --> 06:12.750 +So it's a very different kind of of of test. + +06:13.200 --> 06:20.520 +And you can see one of the reasons for that is that you can tell from this axis that it has made some + +06:20.520 --> 06:21.180 +prediction. + +06:21.180 --> 06:27.000 +We can't quite see it because it's going to be a little tiny red dot, but it made some massively outsized + +06:27.000 --> 06:30.690 +prediction, although that will of course been spread out across everything. + +06:30.690 --> 06:32.730 +So it wouldn't have made a huge difference. + +06:32.730 --> 06:41.670 +But still, uh, one guess would have been like way, way too high and will have certainly, uh, certainly + +06:41.670 --> 06:43.320 +made its results be a bit distorted. + +06:43.350 --> 06:47.820 +Let's see if we can spy that from scanning through here. + +06:48.000 --> 06:49.950 +Where did it go so wrong? + +06:49.950 --> 06:51.060 +Here it is. + +06:53.550 --> 06:56.940 +G technology G speed ES pro high performance. + +06:56.940 --> 07:02.780 +So this thing which actually cost $495.95. + +07:02.780 --> 07:07.250 +It guest was $4,999 and 99. + +07:07.550 --> 07:14.510 +So it was a very outsized guest there that introduced 4500 error. + +07:14.540 --> 07:18.590 +Now, obviously, that that error in itself won't distort things too much because it gets averaged out + +07:18.590 --> 07:21.020 +across the 250 data points. + +07:21.020 --> 07:26.570 +But it's an example of one of the things that pushed it into the worst camp. + +07:27.110 --> 07:30.170 +So those are the results from Claude. + +07:30.200 --> 07:38.060 +Uh, still has a nice diagram here, but didn't quite measure up in this with this particular challenge, + +07:38.060 --> 07:43.790 +uh, to GPT four or GPT four at the frontier was the winner of this challenge. + +07:43.820 --> 07:50.150 +You'll remember Claude won when it came to the coding performance challenge, by a way, a big, big + +07:50.150 --> 07:50.900 +margin. + +07:50.960 --> 07:56.150 +Uh, but in this case, for this particular challenge, our winner is Claude. + +07:56.480 --> 07:59.730 +Uh, is sorry, is GPT four at the frontier. + +07:59.730 --> 08:04.620 +And as I promised, I will leave you by one more look at the. + +08:04.650 --> 08:07.200 +At this human's feeble attempt. + +08:07.230 --> 08:14.820 +Take a look at the nice, elegant diagram here with a number of green dots with GPT four guessing close + +08:14.820 --> 08:16.740 +to the to a home run. + +08:16.740 --> 08:22.620 +And now compare that with the performance of the resident human in the room. + +08:22.890 --> 08:26.580 +Uh, and this was my chart. + +08:28.800 --> 08:30.600 +That I don't want to look at that ever again. + +08:30.840 --> 08:31.860 +Uh, all right. + +08:31.860 --> 08:36.570 +Anyway, I hope that this was a, uh, educational exercise for you. + +08:36.570 --> 08:40.680 +And you see how you would do, uh, experiments like this with your own business problems. + +08:40.680 --> 08:41.430 +Please do. + +08:41.460 --> 08:43.200 +Now, come replicate this. + +08:43.200 --> 08:46.890 +Play around with this, uh, you'll find, particularly with Claude, of course, that you won't get + +08:46.890 --> 08:48.750 +the same results because there's no seed. + +08:48.930 --> 08:53.430 +Um, but you will get some something different and see how it performs. + +08:53.430 --> 08:55.050 +Maybe you'll get better results than me. + +08:55.230 --> 09:00.060 +Uh, but I will see you to wrap up with some more slides. diff --git a/week5/community-contributions/subtitles/srts/59473089/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473089/ja_JP.srt new file mode 100755 index 0000000..0ce08cc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473089/ja_JP.srt @@ -0,0 +1,349 @@ +WEBVTT + +00:01.940 --> 00:03.290 +お帰りなさい。 + +00:03.320 --> 00:09.020 +というわけで、 GPT4ミニの結果にまだ感動してくれているといいのだが。 + +00:09.260 --> 00:22.820 +フロンティア・モデルは80ドル手前で好調で、 52%のヒットがあり、 緑色のドットの数では50%を超えている。 + +00:22.880 --> 00:29.510 +しかし、 今こそGPT4と対戦するときだ。 + +00:29.540 --> 00:31.250 +ああ、 今やったよ。 + +00:31.280 --> 00:37.520 +ただ、 現時点では、 この種のエラー数について話しているとき、 エラーはすでに、 + +00:37.520 --> 00:42.020 +あー、 それなりに低いということは知っておいてほしい。 + +00:42.050 --> 00:44.810 +あなたは商品の実際の価格を考える。 + +00:44.810 --> 00:45.770 +様々だ。 + +00:45.770 --> 00:48.020 +価格にはボラティリティがある。 + +00:48.020 --> 00:49.310 +セールもある。 + +00:49.340 --> 00:54.170 +ホイールの価格やノートパソコンの価格など、 大きな変動がある。 + +00:54.320 --> 01:05.380 +サイトや店によってかなり違うのは間違いない。 + +01:05.380 --> 01:09.370 +だから我々は、 この数字が小さくなるとは思っていない。 + +01:09.730 --> 01:17.350 +その観点から、 現時点での1ドルの改善は、 それ自体が勝利であると認識してほしい。 + +01:17.380 --> 01:21.670 +できる限りジュースを絞るつもりだが、 今は大きな変化を期待しないでほしい。 + +01:21.670 --> 01:30.910 +私たちは今、 すでに超素晴らしい、 従来の機械学習技術をはるかに凌駕する段階にいる。 + +01:31.390 --> 01:41.290 +それで、 その次にすべきことは、 もちろん、 より大きないとこであるGPT4のお披露目だ。 + +01:41.290 --> 01:47.740 +ああ、 ここにも同じタイプの関数がある。 テスト・プログラムに渡す関数だ。 + +01:47.890 --> 01:51.790 +もちろんOpenAI ChatGPT completions dot createと呼んでいる。 + +01:51.790 --> 01:58.690 +GPT4モデルを使用しているのは8月6日現在で、 ごく最近のことだ。 + +01:58.960 --> 02:06.080 +ええと、 メッセージを渡し、 シードを渡し、 同じ方法で結果を返す。 + +02:06.110 --> 02:09.590 +これには少しコストがかかる。 + +02:09.620 --> 02:14.210 +費用は0ドル。 10ドルと0ドル。 250ポイントに20ドル。 + +02:14.450 --> 02:21.200 +そう、 そう、 銀行を破綻させるようなものではないが、 これを100回実行する前によく考えた方がいいかもしれない。 + +02:21.380 --> 02:24.650 +だから、 少なくとも一度は試してみてほしい。 + +02:24.740 --> 02:26.330 +とても楽しいよ。 + +02:26.510 --> 02:30.170 +うーん、 でも、 そうだね、 必ずしも何度も走る必要はない。 + +02:30.200 --> 02:35.390 +また、 なぜ私がこの試合を "Oh one "と戦おうとしないのか、 不思議に思うかもしれないことにも触れておきたい。 + +02:35.450 --> 02:39.530 +ええと、 それで、 どんな問題があるかというと、 ああいう問題なんだ。 + +02:39.560 --> 02:45.560 +ああ、 もっと深い思考が必要で、 多段階の思考プロセスが必要で、 チャレンジングな問題だ。 + +02:45.560 --> 02:49.100 +そして、 それは私たちがここで取り組んでいる問題には適していない。 + +02:49.160 --> 02:53.090 +しかし、 それだとスピードも遅くなるし、 料金も高くなる。 + +02:53.090 --> 02:56.270 +僕らは数学のパズルを解こうとしているんじゃない。 + +02:56.300 --> 03:02.180 +私たちはただ、 世間的な知識に基づいて、 次に最も可能性の高い価格を推測しようとしているだけなのだ。 + +03:02.330 --> 03:05.750 +だからGPTの4フロンティアがベストショットになるはずだ。 + +03:05.870 --> 03:06.110 +うーん。 + +03:06.110 --> 03:08.570 +そしてもちろん、 私はすでにそれを実行した。 + +03:08.660 --> 03:12.170 +GPT4ミニより少し遅いですね。 + +03:12.170 --> 03:13.070 +ちょっとだけね。 + +03:13.100 --> 03:15.740 +でも、 自分で走れば楽しめると思うよ。 + +03:15.770 --> 03:18.470 +その結果がこれだ。 + +03:18.740 --> 03:23.660 +緑が多いけど、 赤も多いね。 + +03:24.230 --> 03:27.830 +だから、 ここはバラエティに富んでいる。 + +03:28.700 --> 03:33.110 +うーん、 間違いなく賭けているね。 + +03:33.110 --> 03:35.000 +その結果がこれだ。 + +03:35.000 --> 03:37.610 +要するに、 その方がいいということだ。 + +03:37.610 --> 03:39.380 +確かに改善された。 + +03:39.380 --> 03:42.140 +約80から約76になった。 + +03:42.140 --> 03:44.840 +そして52から58までのヒット。 + +03:44.840 --> 03:45.950 +ええと、 緑の点。 + +03:45.950 --> 03:47.990 +大規模な変化ではない。 + +03:47.990 --> 03:53.840 +ミニからGPT4への移籍はもっと期待していたかもしれないが、 GPT4ミニは本当にいいものだということを覚えておいてほしい。 + +03:53.840 --> 04:01.380 +そして、 ずっと安いモデルであるにもかかわらず、 GPT four zeroとほとんど同じような賢さを持っている。 + +04:01.620 --> 04:08.310 +まず第一に、 6ドルも4ドルも安くなったことは喜ばしいことだ。 + +04:08.340 --> 04:08.790 +申し訳ない。 + +04:08.910 --> 04:16.770 +ええと、 でも、 GPT4ミニは本当によくやっていると思う。 + +04:17.040 --> 04:19.050 +ええと、 それで、 ええと。 + +04:19.200 --> 04:22.740 +そう、 これがその結果だ。 + +04:22.770 --> 04:25.170 +見た目はとても印象的だ。 + +04:25.200 --> 04:28.410 +つまり、 人類との比較だ。 + +04:28.440 --> 04:37.980 +この講義の最後には、 恥ずかしながらもう一度チャートを見て、 フロンティア・モデルが達成できることと比較することで終わりにしようと思う。 + +04:38.040 --> 04:41.370 +ええと、 でも、 これでよくわかるよ。 + +04:41.880 --> 04:42.990 +オーケー。 + +04:42.990 --> 04:52.920 +そしてもちろん、 もう1つフロンティア・モデルがあり、 それはクロード3だ。 + +04:52.920 --> 04:52.920 +5ソネット + +04:53.220 --> 05:03.760 +人間工学的なモデルから、 最強のモデルはどれですか? + +05:04.000 --> 05:07.180 +ええと、 クロードがまたメッセージを作ってくれたんだ。 + +05:07.180 --> 05:11.500 +クロードの場合、 シードでパスを出すことはできない。 + +05:11.530 --> 05:12.430 +それは必要ない。 + +05:12.430 --> 05:14.500 +だから結果は毎回違ってくる。 + +05:14.500 --> 05:20.440 +しかし、 私がここで行った最初の試運転を、 私たちの目的のための、 その、 + +05:20.440 --> 05:24.850 +その、 その、 結果として持っているものにした。 + +05:24.880 --> 05:28.990 +そして、 おそらく、 このノートを複製して保存すると思う。 + +05:28.990 --> 05:30.970 +だから、 後世のために持っているんだ。 + +05:31.270 --> 05:37.780 +ええと、 クロードの時間は2、 3の赤から始まり、 次に緑の海だ。 + +05:40.510 --> 05:45.850 +そして、 いくつかの結果につなげたい。 + +05:48.160 --> 05:50.260 +クロードの結果はこうだ。 + +05:50.290 --> 05:53.440 +興味深いことに、 クロードは実際にはもう少し悪い。 + +05:53.560 --> 06:03.990 +このタスクでは、 GPTの4つのミニより悪く、 ランダムフォレストよりも悪い。 + +06:04.020 --> 06:09.300 +もちろん、 今回もランダムフォレストには40万件の学習データがある。 + +06:09.300 --> 06:12.750 +だから、 まったく違う種類のテストなんだ。 + +06:13.200 --> 06:21.180 +その理由のひとつは、 この軸を見れば、 ある程度の予測を立てていることがわかるからだ。 + +06:21.180 --> 06:27.000 +それは小さな小さな赤い点になるだろうから、 私たちにはよく見えないが、 それはもちろんすべてのものに広がっていくだろうが、 + +06:27.000 --> 06:30.690 +何か大きな予測をしたのだ。 + +06:30.690 --> 06:32.730 +だから、 大きな違いはなかっただろう。 + +06:32.730 --> 06:43.320 +しかし、 それでもなお、 1つの推測は、 あまりにも、 あまりにも高すぎただろう。 + +06:43.350 --> 06:47.820 +ここをスキャンしてスパイできるか見てみよう。 + +06:48.000 --> 06:49.950 +どこで間違ったのか? + +06:49.950 --> 06:51.060 +これだ。 + +06:53.550 --> 06:56.940 +Gテクノロジー GスピードESプロ ハイパフォーマンス + +06:56.940 --> 07:02.780 +つまり、 495ドルもするものなのだ。 95. + +07:02.780 --> 07:07.250 +客は4,999ドルと99ドルだった。 + +07:07.550 --> 07:14.510 +つまり、 4500のミスを招いたのは、 非常に大きなゲストだったのだ。 + +07:14.540 --> 07:21.020 +もちろん、 この誤差は250のデータポイントで平均化されるため、 それ自体が物事を大きく歪めることはない。 + +07:21.020 --> 07:26.570 +しかし、 最悪の陣営に押し上げた一例だ。 + +07:27.110 --> 07:30.170 +これがクロードの結果だ。 + +07:30.200 --> 07:43.790 +GPT4、 あるいはフロンティアのGPT4がこのチャレンジの勝者だった。 + +07:43.820 --> 07:50.900 +クロードがコーディング・パフォーマンス・チャレンジで勝ったことを覚えているだろう。 + +07:50.960 --> 07:56.150 +ええと、 でもこの場合、 この特別なチャレンジの勝者はクロードだ。 + +07:56.480 --> 07:59.730 +ええと、 すみません、 GPT4はフロンティアですね。 + +07:59.730 --> 08:04.620 +そしてお約束したように、 もうひとつだけお見せしよう。 + +08:04.650 --> 08:07.200 +この人間の弱々しい試みに。 + +08:07.230 --> 08:16.740 +GPT4がホームランに近いと推測される緑色の点がいくつもある、 エレガントで素敵な図を見てみよう。 + +08:16.740 --> 08:22.620 +そして今度は、 その部屋に常駐している人間のパフォーマンスと比較してみよう。 + +08:22.890 --> 08:26.580 +ええと、 これが私のカルテ。 + +08:28.800 --> 08:30.600 +もう二度と見たくないと。 + +08:30.840 --> 08:31.860 +ああ、 わかったよ。 + +08:31.860 --> 08:36.570 +とにかく、 これがあなたにとって、 えー、 勉強になったことを願っている。 + +08:36.570 --> 08:40.680 +そして、 自分自身のビジネス上の問題に対して、 このような実験をどのように行うかを見るんだ。 + +08:40.680 --> 08:41.430 +どうぞよろしく。 + +08:41.460 --> 08:43.200 +さあ、 これを再現しよう。 + +08:43.200 --> 08:48.750 +これを試してみると、 特にクロードの場合は、 種がないために同じ結果が得られないことがわかるだろう。 + +08:48.930 --> 08:53.430 +うーん、 でも、 何か違うものを手に入れて、 その性能を見てみるんだ。 + +08:53.430 --> 08:55.050 +もしかしたら、 私よりいい結果が出るかもしれない。 + +08:55.230 --> 09:00.060 +ええと、 でも最後にもう少しスライドをお見せしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59473089/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473089/ko_KR.srt new file mode 100755 index 0000000..149ba85 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473089/ko_KR.srt @@ -0,0 +1,385 @@ +WEBVTT + +00:01.940 --> 00:03.290 +잘 돌아왔어요 + +00:03.320 --> 00:09.020 +GPT 4 미니 결과에 여전히 감동하셨길 바라요 + +00:09.260 --> 00:19.820 +프런티어 모델은 80달러로 큰 인기를 끌었고 조회 수가 52%로 50%를 넘어섰습니다 녹색 + +00:19.820 --> 00:22.820 +점 수와 멋진 차트 덕분이죠 + +00:22.880 --> 00:29.510 +하지만 이제 더 큰 사촌 격인 GPT 4와 겨룰 때가 됐죠 + +00:29.540 --> 00:31.250 +방금 했어요 + +00:31.280 --> 00:37.520 +말씀드리고 싶은 건 이 시점에서 우리가 이런 종류의 오류 숫자에 + +00:37.520 --> 00:42.020 +대해 얘기할 때 오류는 이미 꽤 낮다는 거예요 + +00:42.050 --> 00:44.810 +제품의 실제 가격을 생각해 보세요 + +00:44.810 --> 00:45.770 +다양하죠 + +00:45.770 --> 00:48.020 +가격에 변동이 있어요 + +00:48.020 --> 00:49.310 +세일 중이에요 + +00:49.340 --> 00:54.170 +바퀴나 노트북 가격 같은 건 큰 반전이 있어요 + +00:54.320 --> 01:02.320 +장소와 스토어마다 제각각인 건 분명합니다 그러니 이 질문에는 자연스러운 + +01:02.320 --> 01:05.380 +오류가 내재돼 있죠 + +01:05.380 --> 01:09.370 +그래서 소규모로 줄어들 거라고 기대하진 않아요 + +01:09.730 --> 01:16.090 +그런 관점에서 현재 1달러씩 개선될 때마다 그 자체로 get up + +01:16.090 --> 01:17.350 +승리예요 + +01:17.380 --> 01:21.670 +가능하면 주스를 짜겠지만 큰 변화는 기대하지 마세요 + +01:21.670 --> 01:29.110 +지금까지의 기술로는 이미 놀라운 수준이에요 기존의 머신 러닝 기술보다 + +01:29.110 --> 01:30.910 +훨씬 앞섰죠 + +01:31.390 --> 01:41.290 +다음 단계는 물론 더 큰 사촌 격인 GPT 4를 공개하는 거죠 + +01:41.290 --> 01:47.740 +여기 같은 형식의 함수가 또 있네요 테스트 프로그램에 통과시킬 함수죠 + +01:47.890 --> 01:51.790 +OpenAI 챗GPT완료 .Create라고 부르고요 + +01:51.790 --> 01:58.690 +8월 6일 기준으로 GPT 4 모델을 사용하고 있어요 아주 최근이죠 + +01:58.960 --> 02:06.080 +메시지를 전달하고 씨앗을 전달하고 같은 방법으로 결과를 얻는 거죠 + +02:06.110 --> 02:09.590 +미리 말씀드리자면 비트가 좀 더 들 거예요 + +02:09.620 --> 02:14.210 +0달러 정도 해요 10달러와 0달러요 250점짜리 시험에 20점요 + +02:14.450 --> 02:19.160 +네, 돈이 많이 들진 않겠지만 100번씩 방송하기 전에 + +02:19.160 --> 02:21.200 +다시 생각해 보세요 + +02:21.380 --> 02:24.650 +그러니까 적어도 한 번은 시도해 보세요 + +02:24.740 --> 02:26.330 +정말 재미있어요 + +02:26.510 --> 02:30.170 +하지만 자주 실행할 필요는 없어요 + +02:30.200 --> 02:35.390 +제가 왜 이걸 O1에 대항해 실행하지 않는지 궁금하실 수도 있어요 + +02:35.450 --> 02:39.530 +오 원의 용도는 이런 문제들이었어요 + +02:39.560 --> 02:45.560 +더 깊이 있는 사고가 필요한 문제입니다 다단계 사고 과정으로 어려운 문제들이죠 + +02:45.560 --> 02:49.100 +그건 우리가 다루는 문제와 맞지 않아요 + +02:49.160 --> 02:53.090 +더 느리고 더 비싸겠죠 아무 이유 없이요 + +02:53.090 --> 02:56.270 +수학 퍼즐을 푸는 게 아니에요 + +02:56.300 --> 03:02.180 +세속적인 지식을 바탕으로 가격을 가장 잘 추측해 보려고요 + +03:02.330 --> 03:05.750 +GPT 4 프런티어가 가장 좋은 기회예요 + +03:05.870 --> 03:06.110 +네 + +03:06.110 --> 03:08.570 +물론 이미 실행해봤죠 + +03:08.660 --> 03:12.170 +GPT 4 미니 비트보다 조금 느린데요 + +03:12.170 --> 03:13.070 +비트 주세요 + +03:13.100 --> 03:15.740 +하지만 직접 운영해 보면 재미있을 거예요 + +03:15.770 --> 03:18.470 +Get it 결과가 나왔어요 + +03:18.740 --> 03:23.660 +녹색도 많고 빨간색도 많아요 + +03:24.230 --> 03:27.830 +꽤 다양하네요 + +03:28.700 --> 03:33.110 +내기하는 거 맞죠? + +03:33.110 --> 03:35.000 +결과가 나왔어요 + +03:35.000 --> 03:37.610 +결론은 이게 더 낫다는 거죠 + +03:37.610 --> 03:39.380 +확실히 좋아졌네요 + +03:39.380 --> 03:42.140 +80에서 76으로 떨어졌어요 + +03:42.140 --> 03:44.840 +52점부터 58점까지 점수를 올렸죠 + +03:44.840 --> 03:45.950 +초록색 점요 + +03:45.950 --> 03:47.990 +큰 변화는 아니에요 + +03:47.990 --> 03:53.840 +미니에서 4급 GPT로 발전할 거라고 기대하셨겠지만 4급 GPT 미니는 정말 좋은 차예요 + +03:53.840 --> 04:00.840 +훨씬 저렴한 모델임에도 불구하고 GPT 40과 매우 비슷한 스마트함을 갖추고 + +04:00.870 --> 04:01.380 +있죠 + +04:01.620 --> 04:08.310 +우선 축하할 일이 있어요 6달러 4달러 줄었어요 + +04:08.340 --> 04:08.790 +미안해요 + +04:08.910 --> 04:16.770 +하지만 이미 GPT 4 미니는 아주 잘 달리고 있다고 볼 수 있죠 + +04:17.040 --> 04:19.050 +그래서 말인데요 + +04:19.200 --> 04:22.740 +네, 이게 결과예요 + +04:22.770 --> 04:25.170 +시각적으로 아주 인상적이에요 + +04:25.200 --> 04:28.410 +인간적인 것과 비교해 보세요 + +04:28.440 --> 04:33.180 +강의를 마무리하기 전에 부끄럽지만 도표를 다시 한번 + +04:33.480 --> 04:37.980 +보면서 선구자 모델의 성과를 비교해 보도록 하죠 + +04:38.040 --> 04:41.370 +하지만 이걸 보면 감이 오죠 + +04:41.880 --> 04:42.990 +네 + +04:42.990 --> 04:50.670 +그리고 아직 공개하지 않은 개척지 모델이 하나 더 있는데 바로 클로드 + +04:50.700 --> 04:52.920 +3이에요 5 소네트요 + +04:53.220 --> 05:03.760 +인류학에서 가장 강력한 모델이죠 구조도 알아보시겠죠? + +05:04.000 --> 05:07.180 +클로드 메시지가 다시 만들어요 + +05:07.180 --> 05:11.500 +클로드의 경우에는 씨앗을 전달할 수 없어요 + +05:11.530 --> 05:12.430 +그런 게 아니에요 + +05:12.430 --> 05:14.500 +매번 결과가 다를 거예요 + +05:14.500 --> 05:20.440 +하지만 전 여기서 처음 실행한 걸 사용했어요 그게 우리 + +05:20.440 --> 05:24.850 +목적을 위한 결과로 나올 거예요 + +05:24.880 --> 05:28.990 +이 공책도 복사해서 보관할 거예요 + +05:28.990 --> 05:30.970 +후세를 위해 남겨뒀어요 + +05:31.270 --> 05:37.780 +클로드는 빨강 두어 개로 시작해서 초록 바다를 칠해요 + +05:40.510 --> 05:45.850 +Get in get 결과로 바로 넘어가죠 + +05:48.160 --> 05:50.260 +클로드가 결과를 보냈어요 + +05:50.290 --> 05:53.440 +흥미롭게도 클로드는 더 심한 비트를 입었어요 + +05:53.560 --> 06:03.990 +이번 작업은 GPT 4 미니보다도 못한 것 같아요 랜덤 숲보다도 못한 것 같아요 + +06:04.020 --> 06:09.300 +랜덤 포레스트는 훈련 데이터의 혜택을 많이 받았어요 400,000 훈련 데이터요 + +06:09.300 --> 06:12.750 +아주 색다른 테스트예요 + +06:13.200 --> 06:21.180 +그 이유 중 하나는 이 축을 보면 예측을 할 수 있기 때문이죠 + +06:21.180 --> 06:27.000 +아주 작은 빨간 점이라 잘 보이진 않지만 엄청난 예측을 했어요 + +06:27.000 --> 06:30.690 +물론 모든 곳에 퍼지겠지만요 + +06:30.690 --> 06:32.730 +큰 차이는 없었을 거예요 + +06:32.730 --> 06:41.670 +하지만 그래도 너무 높았을 가능성이 있어요 그래서 결과물이 약간 비뚤어졌을 + +06:41.670 --> 06:43.320 +거예요 + +06:43.350 --> 06:47.820 +여기를 스캔해서 볼 수 있는지 보죠 + +06:48.000 --> 06:49.950 +어디서부터 잘못된 걸까요? + +06:49.950 --> 06:51.060 +여기 있네요 + +06:53.550 --> 06:56.940 +G테크놀로지 G스피드 ES 프로 고성능이죠 + +06:56.940 --> 07:02.780 +이 기계는 495달러예요 95살요 + +07:02.780 --> 07:07.250 +손님은 4,999달러 99센트였어요 + +07:07.550 --> 07:14.510 +4500 오류를 도입한 건 초대형 게스트였어요 + +07:14.540 --> 07:18.590 +그 오류 자체는 크게 왜곡되지 않아요 250개의 데이터 + +07:18.590 --> 07:21.020 +포인트 평균이 나오니까요 + +07:21.020 --> 07:26.570 +하지만 최악의 캠프로 내몰린 좋은 예죠 + +07:27.110 --> 07:30.170 +클로드가 결과를 보냈어요 + +07:30.200 --> 07:38.060 +여기 도표가 있지만 이번 과제에서는 기대에 못 미쳤어요 프런티어 + +07:38.060 --> 07:43.790 +GPT4가 이번 과제의 우승자였죠 + +07:43.820 --> 07:50.900 +클로드가 코딩 성능 과제에서 우승한 거 기억하시죠? 엄청난 마진을 기록했어요 + +07:50.960 --> 07:56.150 +하지만 이번 게임의 승자는 클로드예요 + +07:56.480 --> 07:59.730 +죄송합니다, GPT 4가 프런티어에 있어요 + +07:59.730 --> 08:04.620 +약속드린 대로 한 번만 더 보고 갈게요 + +08:04.650 --> 08:07.200 +인간의 미약한 시도에 말이죠 + +08:07.230 --> 08:14.820 +GPT 4로 표시된 초록색 점들이 멋진 도표를 보여줍니다 홈런에 + +08:14.820 --> 08:16.740 +가까운 수치죠 + +08:16.740 --> 08:22.620 +이제 이 결과를 방에 있는 사람의 결과와 비교해 보죠 + +08:22.890 --> 08:26.580 +이건 제 차트예요 + +08:28.800 --> 08:30.600 +다시는 보고 싶지 않아요 + +08:30.840 --> 08:31.860 +알았어요 + +08:31.860 --> 08:36.570 +어쨌든, 이게 당신에게 교육적인 경험이 됐길 바라요 + +08:36.570 --> 08:40.680 +본인 사업 문제로 이런 실험을 어떻게 하는지 아시겠죠 + +08:40.680 --> 08:41.430 +그러세요 + +08:41.460 --> 08:43.200 +이제 이걸 따라 해 보세요 + +08:43.200 --> 08:46.890 +이것저것 해 보면 특히 클로드에게서는 씨앗이 없기 때문에 같은 + +08:46.890 --> 08:48.750 +결과를 얻지 못해요 get it + +08:48.930 --> 08:53.430 +Get it로 색다른 걸 만들어 성능을 보는 거죠 + +08:53.430 --> 08:55.050 +어쩌면 나보다 더 좋은 결과를 낼지도 모르죠 get it get it get it + +08:55.230 --> 09:00.060 +마무리할 때는 슬라이드를 더 보여드릴게요 diff --git a/week5/community-contributions/subtitles/srts/59473101/en_US.srt b/week5/community-contributions/subtitles/srts/59473101/en_US.srt new file mode 100755 index 0000000..3120865 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473101/en_US.srt @@ -0,0 +1,328 @@ +WEBVTT + +00:01.400 --> 00:02.420 +Welcome back. + +00:02.420 --> 00:06.980 +So about ten minutes later, maybe 15 minutes later, the run has completed. + +00:06.980 --> 00:08.090 +And how do I know this? + +00:08.120 --> 00:08.900 +A few ways. + +00:08.900 --> 00:15.020 +One of them is that I just got an email from OpenAI, as you can see right here in my email, uh, to + +00:15.050 --> 00:16.910 +tell me that my fine tuning job. + +00:16.940 --> 00:22.490 +Blah blah blah has successfully completed and a new model, blah blah blah has been created. + +00:22.490 --> 00:28.040 +And you'll notice in the name of that model there is that word processor, because we specifically asked + +00:28.040 --> 00:30.680 +for the suffix processor to be included. + +00:30.740 --> 00:32.840 +Uh, just shows you how how it works. + +00:32.840 --> 00:38.390 +It's the name of a feat for fine tuning, and then the name of the GPT four mini variant that we've + +00:38.390 --> 00:43.850 +been working with colon personal colon processor, and then a code at the end. + +00:43.850 --> 00:45.740 +So that's my email. + +00:45.770 --> 00:48.110 +Here is the JupyterLab. + +00:48.110 --> 00:50.090 +Uh, this was the thing we were running. + +00:50.180 --> 00:58.370 +Um, and now we're looking at the final ten messages in the status, and you will see that it completed + +00:58.370 --> 00:59.240 +step. + +00:59.390 --> 01:01.940 +Um, the last five steps here then. + +01:01.940 --> 01:04.110 +Fine tune model created. + +01:04.140 --> 01:08.070 +Evaluating model against our usage policies before enabling. + +01:08.070 --> 01:11.040 +That was the thing I mentioned to you that it does. + +01:11.250 --> 01:17.340 +Um, and then usage policies complete and the job has been successfully completed. + +01:17.340 --> 01:24.960 +And that is that I will now show you in weights and biases how this looks. + +01:24.960 --> 01:27.690 +This is the final weights and biases chart. + +01:27.840 --> 01:33.870 +Um, you can see that the things that really matter are training loss and validation loss uh, that + +01:33.870 --> 01:38.880 +you can see that the validation loss of course, is not or didn't happen nearly as regularly as the + +01:38.880 --> 01:40.470 +training loss was calculated. + +01:40.710 --> 01:45.990 +Um, and as I say, because again, because we only did one epoch that all of the training data was + +01:45.990 --> 01:46.530 +new. + +01:46.530 --> 01:52.410 +So the training loss, uh, is, is just as useful for us, um, or the validation loss because it's + +01:52.410 --> 01:54.000 +always calculating it on the same set. + +01:54.030 --> 01:57.960 +It's particularly useful for, for trying to spot trends. + +01:58.080 --> 02:02.670 +Um, and at first blush, looking at the validation loss, there doesn't appear to be much of a trend + +02:02.700 --> 02:04.500 +there, which again, is concerning. + +02:04.500 --> 02:05.520 +We can bring this up. + +02:05.520 --> 02:08.510 +We can edit this panel and zoom in. + +02:08.510 --> 02:10.940 +We can change the y axis here. + +02:10.970 --> 02:12.920 +And the minimum should be zero. + +02:12.920 --> 02:15.590 +And let's make the maximum like three. + +02:15.590 --> 02:17.600 +So we zoom all the way in. + +02:18.620 --> 02:23.240 +And you can see that it doesn't particularly look like it's improving. + +02:23.240 --> 02:32.000 +In fact um, you could if you wished, uh, be uh, almost argue that maybe it's increasing slightly. + +02:32.120 --> 02:39.080 +Um, but uh, I'm not sure if that we can necessarily say that there is a smoothing function that's + +02:39.080 --> 02:46.910 +available in this chart that we can go with, and that is the smoothed version of it. + +02:47.450 --> 02:54.080 +Uh, and, uh, yes, I suppose it's, it's certainly not going up, but it appears that it made some + +02:54.080 --> 02:57.080 +improvements and then it just kind of stayed flat. + +02:57.080 --> 03:00.620 +But it does look like there was some improvement up until the 300 point. + +03:01.100 --> 03:07.150 +Um, so these are all things for you to look at and spend more time on yourself. + +03:07.150 --> 03:12.950 +But at this point, it's time for us to now go and evaluate this model against our test data. + +03:13.340 --> 03:18.260 +So I will kick that off and then flip to a video when it completes. + +03:18.380 --> 03:20.960 +So let's go back to the Jupyter Lab. + +03:20.960 --> 03:24.020 +So this is our fine tuned model right here. + +03:24.260 --> 03:29.060 +Um we can get the job ID and we can collect the fine tuned model. + +03:29.060 --> 03:30.830 +Let me just quickly show you what that's going to be. + +03:30.860 --> 03:41.450 +If I, um, show you this, you can see when we look in here now that right here there's a new attribute + +03:41.450 --> 03:47.810 +fine tuned model, and it contains that same name of the fine tuned model that was in the email as well. + +03:47.810 --> 03:52.430 +So you could equally copy and paste it from the email, but we might as well just pluck it out with + +03:52.430 --> 03:53.180 +some code. + +03:53.180 --> 03:54.380 +So here we do. + +03:54.980 --> 04:03.020 +Uh, so just to show you that that's done when I'm suggesting, obviously if I run this, it's got that + +04:03.020 --> 04:04.310 +same name. + +04:05.540 --> 04:07.100 +All right. + +04:07.100 --> 04:13.470 +So we're going to redo this messages for function again. + +04:13.530 --> 04:20.400 +Uh, this time, uh, I'm, uh, just using the one that doesn't reveal the answer. + +04:20.430 --> 04:24.270 +Obviously, we don't want to give it that information. + +04:24.540 --> 04:29.010 +Uh, let's just convince ourselves that that is actually going to work. + +04:29.400 --> 04:30.960 +There you go. + +04:30.990 --> 04:35.280 +So it gives the question. + +04:35.280 --> 04:37.260 +It does not reveal the price. + +04:37.260 --> 04:41.190 +And the challenge for our model is going to be to finish this off. + +04:41.820 --> 04:47.340 +You will remember from last time a utility function that we that we have that will pluck out the price + +04:47.340 --> 04:48.840 +from what comes back. + +04:49.260 --> 04:54.960 +Uh, and I remember last time I did this, the price is roughly 99.99 because blah blah, blah. + +04:54.990 --> 04:59.610 +And if we run that of course get price function just strips out the price from there. + +04:59.820 --> 05:01.920 +Uh, as I think you're familiar. + +05:02.370 --> 05:10.020 +So then this is the function, the function that we will be about to test against. + +05:10.020 --> 05:18.270 +GPT fine tuned response is OpenAI ChatGPT completions create you call it, just as you would call it, + +05:18.270 --> 05:20.010 +for the normal GPT four. + +05:20.040 --> 05:21.570 +Oh, same API. + +05:21.600 --> 05:22.470 +Exactly. + +05:22.470 --> 05:27.000 +There's only one difference, one tiny difference, minute difference. + +05:27.030 --> 05:30.330 +That is this we don't pass in GPT four mini. + +05:30.360 --> 05:38.190 +We pass in the name of our fine tuned model, this name right here, that is what we will send in to + +05:38.220 --> 05:38.850 +OpenAI. + +05:38.850 --> 05:44.160 +And it will automatically it will know that that means that we want to use our fine tuned version. + +05:44.670 --> 05:47.430 +We take back the response, we get the price. + +05:49.080 --> 05:51.510 +So let's just print one example. + +05:51.510 --> 05:58.080 +Let's print a test, uh, something from the first thing on our test set, which was that that thing + +05:58.080 --> 06:01.380 +that cost 200 and something that was, uh, caught me off guard. + +06:01.380 --> 06:04.620 +And then we will call our GPT fine tuned for the first time. + +06:04.620 --> 06:06.480 +Let's see what one result looks like. + +06:07.410 --> 06:12.780 +Okay, so that is the sorry, the the number we were looking at earlier was a training price. + +06:12.780 --> 06:15.120 +This is the price of the first test item. + +06:15.280 --> 06:18.010 +Uh, let's see what the first test item actually is. + +06:19.120 --> 06:19.330 +So. + +06:24.880 --> 06:26.620 +Let's have a look at it. + +06:26.710 --> 06:32.290 +It is an AC compressor repair kit for Ford, uh, body parts. + +06:32.290 --> 06:38.440 +And, uh, so this is one that I had to do myself and, uh, yeah, obviously it's not done a very good + +06:38.440 --> 06:42.130 +job of that first data point, but who cares about one data point? + +06:42.130 --> 06:47.560 +What matters is doing it across the lot, at least the 250 that we've been using consistently for all + +06:47.560 --> 06:48.490 +of our testing. + +06:48.490 --> 06:53.410 +So without further ado, let's run it off it goes. + +06:53.410 --> 06:58.540 +So the first couple of results look a bit red, and then it looks a bit better and gets green, but + +06:58.540 --> 07:00.160 +then a whole bunch of red. + +07:00.160 --> 07:04.630 +So some mixed results here. + +07:05.440 --> 07:11.470 +And at this point I'm not going to have you hanging for the 250 of them. + +07:11.470 --> 07:13.150 +I'm going to pause. + +07:13.150 --> 07:16.510 +And then the next video we will reveal the outcome. + +07:16.510 --> 07:17.770 +I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59473101/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473101/ja_JP.srt new file mode 100755 index 0000000..24a0590 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473101/ja_JP.srt @@ -0,0 +1,283 @@ +WEBVTT + +00:01.400 --> 00:02.420 +お帰りなさい。 + +00:02.420 --> 00:06.980 +そうして約10分後、 あるいは15分後、 走りは完了した。 + +00:06.980 --> 00:08.090 +なぜそんなことが分かるのか? + +00:08.120 --> 00:08.900 +いくつかの方法がある。 + +00:08.900 --> 00:16.910 +そのひとつは、 OpenAIからメールをもらったことだ。 メールにあるように、 僕のファインチューニングの仕事を教えてくれた。 + +00:16.940 --> 00:22.490 +"ブラブラブラ "が成功裏に完了し、 新しいモデル "ブラブラブラ "が誕生した。 + +00:22.490 --> 00:30.680 +このモデルの名前にワープロが含まれていることにお気づきだろう。 + +00:30.740 --> 00:32.840 +ええと、 どのように機能するかを示しただけだよ。 + +00:32.840 --> 00:38.390 +微調整のための技の名前、 そしてコロン・パーソナル・コロン・プロセッサーで取り組んできたGPT + +00:38.390 --> 00:43.850 +4ミニバリアントの名前、 そして最後にコードだ。 + +00:43.850 --> 00:45.740 +これが私のメールだ。 + +00:45.770 --> 00:48.110 +これがJupyterLabだ。 + +00:48.110 --> 00:50.090 +ええと、 これは僕らがやっていたことなんだ。 + +00:50.180 --> 00:59.240 +そして今、 ステータスの最後の10個のメッセージを見ている。 + +00:59.390 --> 01:01.940 +じゃあ、 最後の5つのステップだ。 + +01:01.940 --> 01:04.110 +モデルを微調整。 + +01:04.140 --> 01:08.070 +有効にする前に、 当社の使用ポリシーに照らしてモデルを評価する。 + +01:08.070 --> 01:11.040 +それが、 私があなたに言ったことだ。 + +01:11.250 --> 01:17.340 +ええと、 その後、 使用ポリシーが完了し、 仕事は正常に完了しました。 + +01:17.340 --> 01:24.960 +そして、 それがどのように見えるか、 ウェイトとバイアスでお見せしようということだ。 + +01:24.960 --> 01:27.690 +これが最終的なウェイトとバイアスのチャートである。 + +01:27.840 --> 01:33.870 +本当に重要なのは、 トレーニングの損失と検証の損失であることがおわかりいただけると思います。 もちろん、 検証の損失は、 + +01:33.870 --> 01:40.470 +トレーニングの損失が計算されたときほど定期的に発生していないか、 発生していないことがおわかりいただけると思います。 + +01:40.710 --> 01:46.530 +というのも、 1エポックしか行っていないため、 トレーニングデータはすべて新しいものだからだ。 + +01:46.530 --> 01:54.000 +つまり、 トレーニングロスも、 検証ロスも、 常に同じセットで計算されるので、 我々にとって有用なのだ。 + +01:54.030 --> 01:57.960 +特にトレンドの見極めに役立つ。 + +01:58.080 --> 02:04.500 +検証の結果、 一見したところ、 あまりトレンドがないように見える。 + +02:04.500 --> 02:05.520 +これを持ち出すことはできる。 + +02:05.520 --> 02:08.510 +このパネルを編集し、 ズームインすることができる。 + +02:08.510 --> 02:10.940 +ここでY軸を変えることができる。 + +02:10.970 --> 02:12.920 +最小値はゼロであるべきだ。 + +02:12.920 --> 02:15.590 +そして、 最大でも3人くらいにしよう。 + +02:15.590 --> 02:17.600 +だからズームインするんだ。 + +02:18.620 --> 02:23.240 +そして、 特に改善されているようには見えないことがわかるだろう。 + +02:23.240 --> 02:32.000 +実際、 望むのであれば、 わずかに増えていると主張することもできるだろう。 + +02:32.120 --> 02:39.080 +ええと、 でも、 このチャートで利用できる平滑化関数が、 + +02:39.080 --> 02:46.910 +必ずしも平滑化されたものだと言えるかどうかはわかりません。 + +02:47.450 --> 02:54.080 +そうだね、 確かに上がってはいないけど、 少し改善されたように見えるし、 + +02:54.080 --> 02:57.080 +その後は横ばいになっている。 + +02:57.080 --> 03:00.620 +しかし、 300ポイントまでは改善が見られたようだ。 + +03:01.100 --> 03:07.150 +つまり、 これらはすべて、 あなたが自分自身に目を向け、 もっと時間を費やすためのものなんだ。 + +03:07.150 --> 03:12.950 +しかしこの時点で、 テストデータに対してこのモデルを評価する必要がある。 + +03:13.340 --> 03:18.260 +だから、 それをキックオフして、 それが完了したらビデオに切り替える。 + +03:18.380 --> 03:20.960 +それではJupyter Labに戻ろう。 + +03:20.960 --> 03:24.020 +これが微調整されたモデルだ。 + +03:24.260 --> 03:29.060 +ジョブIDを取得し、 微調整されたモデルを収集することができます。 + +03:29.060 --> 03:30.830 +では、 その内容を簡単にお見せしよう。 + +03:30.860 --> 03:47.810 +これをお見せすると、 ここに新しい属性のファイン・チューンド・モデルがあり、 メールにあったファイン・チューンド・モデルと同じ名前が含まれていることがわかります。 + +03:47.810 --> 03:53.180 +メールからコピー&ペーストすることもできるが、 コードで抜き出すほうがいいだろう。 + +03:53.180 --> 03:54.380 +だから、 こうする。 + +03:54.980 --> 04:04.310 +ええと、 私が提案したときにそれが実行されることを示すために、 これを実行すると、 明らかに同じ名前になります。 + +04:05.540 --> 04:07.100 +分かった。 + +04:07.100 --> 04:13.470 +だから、 このメッセージの機能をもう一度やり直そう。 + +04:13.530 --> 04:20.400 +ええと、 今回は、 ええと、 答えを明らかにしない方を使っているんだ。 + +04:20.430 --> 04:24.270 +もちろん、 そのような情報は与えたくない。 + +04:24.540 --> 04:29.010 +ああ、 それが実際にうまくいくと自分自身を納得させよう。 + +04:29.400 --> 04:30.960 +そうだ。 + +04:30.990 --> 04:35.280 +という疑問が湧く。 + +04:35.280 --> 04:37.260 +価格は明らかにしていない。 + +04:37.260 --> 04:41.190 +そして、 我々のモデルにとっての課題は、 これを仕上げることだ。 + +04:41.820 --> 04:48.840 +前回、 我々が持っている効用関数を覚えているだろう。 + +04:49.260 --> 04:54.960 +ええと、 前回これをやったとき、 値段はだいたい99だったと記憶している。 99歳は、 ブラブラ、 ブラブラだからだ。 + +04:54.990 --> 04:59.610 +そして、 それを実行すれば、 もちろん "get price "関数はそこから価格を取り除くだけだ。 + +04:59.820 --> 05:01.920 +ええと、 よくご存知だと思いますが。 + +05:02.370 --> 05:10.020 +これがこれからテストする関数だ。 + +05:10.020 --> 05:18.270 +GPTの微調整された応答は、 OpenAI ChatGPTの完了を作成し、 通常のGPTの4つのために、 あなたがそれを呼び出すのと同じように、 + +05:18.270 --> 05:20.010 +それを呼び出します。 + +05:20.040 --> 05:21.570 +ああ、 同じAPIだ。 + +05:21.600 --> 05:22.470 +その通りだ。 + +05:22.470 --> 05:27.000 +たった一つの違い、 ほんの小さな違い、 微細な違いだ。 + +05:27.030 --> 05:30.330 +GPT4ミニではパスしない。 + +05:30.360 --> 05:38.850 +この名前をOpenAIに送ります。 + +05:38.850 --> 05:44.160 +そして、 それが微調整されたバージョンを使いたいことを意味することを自動的に認識する。 + +05:44.670 --> 05:47.430 +私たちは返事を撤回し、 代金を受け取る。 + +05:49.080 --> 05:51.510 +では、 1つだけ例をプリントしてみよう。 + +05:51.510 --> 06:01.380 +テストセットの最初のもの、 つまり200ドルもするものと、 意表を突かれたものをプリントしてみよう。 + +06:01.380 --> 06:04.620 +そして、 GPTを初めて微調整したと呼ぶことにする。 + +06:04.620 --> 06:06.480 +結果をひとつ見てみよう。 + +06:07.410 --> 06:12.780 +そう、 つまり、 さっきの数字はトレーニングの値段なんだ。 + +06:12.780 --> 06:15.120 +これは最初のテスト品の価格である。 + +06:15.280 --> 06:18.010 +では、 最初のテスト項目を見てみよう。 + +06:19.120 --> 06:19.330 +だから + +06:24.880 --> 06:26.620 +見てみよう。 + +06:26.710 --> 06:32.290 +フォード用のACコンプレッサー修理キットです。 + +06:32.290 --> 06:42.130 +そして、 これは僕自身がやらなければならなかったことなんだけど、 そう、 明らかに最初のデータポイントについてはあまりいい仕事をしていない。 + +06:42.130 --> 06:48.490 +重要なのは、 ロット全体でそれを行うことであり、 少なくとも私たちがすべてのテストで一貫して使用してきた250本である。 + +06:48.490 --> 06:53.410 +では、 さっそく実行してみよう。 + +06:53.410 --> 06:58.540 +だから、 最初の2、 3回は少し赤く見え、 その後少し良くなって緑になるが、 + +06:58.540 --> 07:00.160 +その後は赤一色だ。 + +07:00.160 --> 07:04.630 +そのため、 いくつかの結果が混在している。 + +07:05.440 --> 07:11.470 +この時点で、 私は250人のためにあなたを吊るし上げるつもりはない。 + +07:11.470 --> 07:13.150 +私は一時停止する。 + +07:13.150 --> 07:16.510 +そして次のビデオで結果を明らかにする。 + +07:16.510 --> 07:17.770 +そこで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59473101/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473101/ko_KR.srt new file mode 100755 index 0000000..46c4fa5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473101/ko_KR.srt @@ -0,0 +1,322 @@ +WEBVTT + +00:01.400 --> 00:02.420 +잘 돌아왔어요 + +00:02.420 --> 00:06.980 +10분에서 15분 정도 지나자 마라톤이 끝났어요 + +00:06.980 --> 00:08.090 +어떻게 아냐고요? + +00:08.120 --> 00:08.900 +몇 가지 있죠 + +00:08.900 --> 00:15.020 +오픈AI에서 이메일이 왔습니다 여기 보이듯이 제 튜닝에 + +00:15.050 --> 00:16.910 +대해 알려 주네요 + +00:16.940 --> 00:22.490 +어쩌고저쩌고가 성공적으로 완료됐고 새 모델, 어쩌고저쩌고가 생성됐어요 + +00:22.490 --> 00:28.040 +모델의 이름에서 워드 프로세서가 보이시죠. 프로세서에 접미사를 포함하도록 + +00:28.040 --> 00:30.680 +특별히 요청했거든요. + +00:30.740 --> 00:32.840 +어떻게 작동하는지 보여드릴게요 + +00:32.840 --> 00:38.390 +미세한 조정의 위업 이름이죠 GPT 4 미니 버전의 이름이에요 + +00:38.390 --> 00:43.850 +퍼스널 콜론 프로세서로 작업해 왔죠 그리고 마지막 코드예요 + +00:43.850 --> 00:45.740 +이게 제 이메일 주소예요 + +00:45.770 --> 00:48.110 +여기가 유피터랩이에요 + +00:48.110 --> 00:50.090 +우리가 진행하던 거였어요 + +00:50.180 --> 00:58.370 +현재 상태에 있는 마지막 메시지 10개를 보고 있는데요 단계가 완료된 걸 보실 + +00:58.370 --> 00:59.240 +수 있죠 + +00:59.390 --> 01:01.940 +마지막 다섯 계단이에요 + +01:01.940 --> 01:04.110 +파인 튠 모델이 완성됐어요 + +01:04.140 --> 01:08.070 +사용하기 전에 사용 정책에 대항해 모델을 평가하는 거죠 + +01:08.070 --> 01:11.040 +제가 말씀드린 게 그거였어요 + +01:11.250 --> 01:17.340 +그러면 사용 방침이 완성되고 작업이 성공적으로 완료되죠 + +01:17.340 --> 01:24.960 +이제 무게와 편향으로 어떻게 보이는지 보여드릴게요 + +01:24.960 --> 01:27.690 +이게 최종 무게와 편향도 도표예요 + +01:27.840 --> 01:33.870 +여기서 정말 중요한 건 훈련과 검증 손실입니다 검증 손실은 + +01:33.870 --> 01:38.880 +정기적으로 계산된 훈련 손실만큼 발생하거나 일어나지 + +01:38.880 --> 01:40.470 +않아요 + +01:40.710 --> 01:46.530 +다시 말씀드리지만 저희는 한 시대를 개혁하면서 모든 훈련 데이터가 새로워졌어요 + +01:46.530 --> 01:52.410 +훈련의 손실은 검증의 손실만큼이나 유용해요 항상 한 세트에서 + +01:52.410 --> 01:54.000 +계산을 하니까요 + +01:54.030 --> 01:57.960 +유행을 파악하는 데 특히 유용하죠 + +01:58.080 --> 02:02.670 +언뜻 보면 검증 손실을 보면 별다른 추세가 없는 것 같아요 + +02:02.700 --> 02:04.500 +그래서 걱정이죠 + +02:04.500 --> 02:05.520 +이 얘기를 꺼내죠 + +02:05.520 --> 02:08.510 +이 패널을 편집하고 확대할 수 있어요 + +02:08.510 --> 02:10.940 +여기서 y축을 바꿔도 되고요 + +02:10.970 --> 02:12.920 +최소 0달러는 받아야죠 + +02:12.920 --> 02:15.590 +최대한 3개로 하죠 + +02:15.590 --> 02:17.600 +줌 인을 하죠 + +02:18.620 --> 02:23.240 +보시다시피 나아지는 것 같지 않아요 + +02:23.240 --> 02:32.000 +사실, 이렇게 말할 수도 있어요. 조금씩 증가하고 있다고요. + +02:32.120 --> 02:39.080 +하지만 스무딩 함수가 이 차트에 있는지 + +02:39.080 --> 02:46.910 +잘 모르겠네요 스무딩 함수 말이에요 + +02:47.450 --> 02:54.080 +네, 확실히 올라가진 않지만 뭔가 개선된 것 같아요 그리고 + +02:54.080 --> 02:57.080 +평평하게 유지됐죠 + +02:57.080 --> 03:00.620 +하지만 300점 이전에는 개선이 있었던 것 같아요 + +03:01.100 --> 03:07.150 +이런 것들을 보면서 시간을 더 들여 보세요 + +03:07.150 --> 03:12.950 +하지만 이제 테스트 데이터를 대상으로 이 모델을 평가할 때예요 + +03:13.340 --> 03:18.260 +이걸 차고 있다가 다 되면 비디오로 돌릴게요 + +03:18.380 --> 03:20.960 +그럼 주피터 연구소로 돌아가죠 + +03:20.960 --> 03:24.020 +이게 저희가 만든 미세 튜닝 모델이에요 + +03:24.260 --> 03:29.060 +get ID를 얻어서 미세 튜닝 모델을 수집할 수 있어요 + +03:29.060 --> 03:30.830 +어떻게 되는지 간단히 보여드릴게요 + +03:30.860 --> 03:41.450 +이걸 보여드리면 여길 보시면 바로 여기에 새 특성 미세 조정 모델이 있는 걸 보실 수 있습니다 이메일에 + +03:41.450 --> 03:47.810 +있던 미세 조정 모델의 동일한 이름도 포함하고 있죠 + +03:47.810 --> 03:52.430 +이메일에서 복사, 붙여넣기를 할 수 있지만 코드를 이용해서 빼내는 + +03:52.430 --> 03:53.180 +게 낫겠죠 + +03:53.180 --> 03:54.380 +여기 있네요 + +03:54.980 --> 04:03.020 +제가 제안할 때 완료됐다는 걸 보여드리기 위해 이걸 실행하면 같은 이름을 + +04:03.020 --> 04:04.310 +갖게 되죠 + +04:05.540 --> 04:07.100 +좋아요 + +04:07.100 --> 04:13.470 +함수에 대한 메시지를 다시 할게요 + +04:13.530 --> 04:20.400 +이번에는 어, 답이 없는 것만 사용할게요 + +04:20.430 --> 04:24.270 +물론 그 정보를 주고 싶진 않아요 + +04:24.540 --> 04:29.010 +그게 통할 거라고 스스로를 설득해 보죠 + +04:29.400 --> 04:30.960 +여기요 + +04:30.990 --> 04:35.280 +그래서 질문이 생기죠 + +04:35.280 --> 04:37.260 +가격은 안 나와요 + +04:37.260 --> 04:41.190 +모델에게 주어진 과제는 이 부분을 완성하는 거예요 + +04:41.820 --> 04:47.340 +기억하실 거예요 지난번에 나온 유틸리티 함수요 돌아오는 것에서 가격을 + +04:47.340 --> 04:48.840 +추려내는 거죠 + +04:49.260 --> 04:54.960 +지난번에 이걸 했을 때 대략 99달러였어요 99는 어쩌고저쩌고죠 + +04:54.990 --> 04:59.610 +실행하면 get 가격 함수에서 가격을 벗기는 거죠 + +04:59.820 --> 05:01.920 +이미 알고 계시겠지만요 + +05:02.370 --> 05:10.020 +이게 함수입니다 우리가 시험할 함수죠 + +05:10.020 --> 05:18.270 +GPT의 미세한 반응은 오픈AI 챗GPT 완성입니다 일반 GPT 4를 그렇게 + +05:18.270 --> 05:20.010 +부르는데요 + +05:20.040 --> 05:21.570 +같은 API네요 + +05:21.600 --> 05:22.470 +맞아요 + +05:22.470 --> 05:27.000 +아주 미세한 차이죠 + +05:27.030 --> 05:30.330 +GPT 4 미니에서는 통과 못 하는 부분이죠 + +05:30.360 --> 05:38.850 +미세 튜닝 모델의 이름을 전달합니다 바로 이 이름이죠 오픈AI에 보낼 거예요 + +05:38.850 --> 05:44.160 +자동으로 알 거예요 잘 조율된 버전을 사용하고 싶다는 의미라는 것을요 + +05:44.670 --> 05:47.430 +Get it, get it, get it, get it, get it, it, it! 응답을 취소하면 대가를 받는 거죠 + +05:49.080 --> 05:51.510 +예제 하나를 프린트하죠 + +05:51.510 --> 05:58.080 +테스트 세트를 프린트해 보죠 테스트 세트에 처음 나온 건데 200달러나 + +05:58.080 --> 06:01.380 +주고 샀고 제가 예상치 못한 거였어요 + +06:01.380 --> 06:04.620 +그리고 GPT를 처음으로 파인 튜닝할 거예요 + +06:04.620 --> 06:06.480 +어떤 결과가 나왔는지 보죠 + +06:07.410 --> 06:12.780 +네, 그게 죄송합니다 아까 보려던 액수는 훈련 비용이었어요 + +06:12.780 --> 06:15.120 +첫 번째 테스트 제품 가격이에요 + +06:15.280 --> 06:18.010 +첫 번째 테스트 아이템이 뭔지 보죠 + +06:19.120 --> 06:19.330 +그래서요? + +06:24.880 --> 06:26.620 +한번 보죠 + +06:26.710 --> 06:32.290 +에어컨 압축기 수리 키트예요 포드 차체 부품용이죠 + +06:32.290 --> 06:38.440 +이건 제가 직접 해야 했던 작업인데 첫 번째 데이터 포인트를 제대로 처리하지 못했지만 + +06:38.440 --> 06:42.130 +데이터 포인트 하나쯤이야 무슨 상관이죠? + +06:42.130 --> 06:47.560 +중요한 건 공장 전체에 걸쳐 하는 겁니다 적어도 모든 테스트에 일관적으로 사용했던 250g을 + +06:47.560 --> 06:48.490 +통해서요 + +06:48.490 --> 06:53.410 +그럼 지체 없이 실행해 보죠 + +06:53.410 --> 06:58.540 +처음 몇 결과는 약간 빨간색이었다가 좀 더 좋아 보였다가 초록색으로 변했다가 다시 온통 + +06:58.540 --> 07:00.160 +빨간색이었죠. 비트 + +07:00.160 --> 07:04.630 +결과가 엇갈리네요 + +07:05.440 --> 07:11.470 +250년 동안 매달리게 할 순 없어요 + +07:11.470 --> 07:13.150 +잠시 멈출게요 + +07:13.150 --> 07:16.510 +다음 영상에서는 결과를 알려드리죠 + +07:16.510 --> 07:17.770 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59473137/en_US.srt b/week5/community-contributions/subtitles/srts/59473137/en_US.srt new file mode 100755 index 0000000..65503d0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473137/en_US.srt @@ -0,0 +1,394 @@ +WEBVTT + +00:00.800 --> 00:02.540 +Let's get straight to it. + +00:02.690 --> 00:09.890 +So the place where you can see everything that's going on and get knee deep in your data is a beautiful + +00:09.890 --> 00:12.260 +platform called Weights and Biases. + +00:12.380 --> 00:17.330 +It's completely free for personal use anyway, and it's a superb. + +00:17.360 --> 00:20.180 +You can, uh, go here. + +00:20.210 --> 00:22.340 +Uh wandb is weights and biases. + +00:22.580 --> 00:25.400 +I to sign up, create your free account. + +00:25.400 --> 00:27.080 +You don't need to if you don't wish to. + +00:27.110 --> 00:29.450 +This is an this is completely optional. + +00:29.480 --> 00:32.660 +It will allow you to visualize your training while it runs. + +00:32.660 --> 00:36.680 +I strongly recommend it because it's super satisfying. + +00:36.740 --> 00:41.810 +Uh, there's no point in doing training in my mind if you can't see lots of wiggly lines. + +00:41.810 --> 00:44.930 +And believe me, we're going to have a lot of wiggly lines. + +00:44.930 --> 00:52.790 +Uh, not not just today, but in the coming two weeks to go, it will be, uh, lots of charts, uh, + +00:52.790 --> 00:57.950 +when you go to weights and biases and you've signed up for your free account or you may already have + +00:57.950 --> 01:03.810 +one, uh, if you go to the avatar menu, like the settings menu and go to settings, You can create + +01:03.810 --> 01:09.750 +an API key very similar to the sorts of API keys we've used for OpenAI and so on. + +01:10.050 --> 01:15.870 +You can then go to the OpenAI dashboard, and I've put a link right here in the notebook. + +01:15.900 --> 01:21.990 +And when you go to that page, but in the middle of the page, there is a section that says integrations, + +01:21.990 --> 01:25.590 +and it has a section where you can put in your weights and biases key. + +01:25.590 --> 01:32.130 +And if you do that, then it's your OpenAI account is hooked up to weights and biases, and you'll be + +01:32.160 --> 01:36.600 +able to watch this fine tuning process happening in weights and biases. + +01:36.630 --> 01:38.520 +And that's just great. + +01:38.520 --> 01:41.670 +So I strongly recommend it but not required. + +01:41.700 --> 01:46.950 +Assuming you did do that, then you're going to want to execute this line here which is setting up your + +01:46.950 --> 01:48.660 +weights and biases integration. + +01:48.900 --> 01:50.280 +And you can give it a name. + +01:50.280 --> 01:53.520 +I'm calling the name of this project generally. + +01:53.520 --> 01:55.410 +Overall we'll be doing a lot of this project. + +01:55.410 --> 02:00.360 +I call it Pricer, but for this one I'm calling it GPT pricer because it is. + +02:00.390 --> 02:06.720 +GPT is fine tuned version to price products, That's why I call it Pricer. + +02:06.750 --> 02:08.640 +So we run that. + +02:08.880 --> 02:11.790 +Just setting up a settings right now. + +02:11.820 --> 02:13.920 +But this this is it. + +02:13.920 --> 02:15.450 +This folks. + +02:15.450 --> 02:18.780 +This is the time when we actually do our fine tuning. + +02:18.810 --> 02:29.430 +We call a new OpenAI API, which is a the big the big one OpenAI fine tuning dot jobs dot create. + +02:29.700 --> 02:31.140 +And what we pass in. + +02:31.140 --> 02:36.390 +So so you remember earlier this this is what came back from uploading our file. + +02:36.390 --> 02:37.560 +It has an ID. + +02:37.560 --> 02:43.500 +Let me show you that it has an ID which identifies the file. + +02:45.690 --> 02:47.100 +That's the name of the file. + +02:47.100 --> 02:50.040 +As far as OpenAI is concerned, we had a rather simpler name for it. + +02:50.040 --> 02:52.140 +This is the whole this is a file object. + +02:52.170 --> 02:53.940 +You probably remember seeing this a moment ago. + +02:53.970 --> 02:57.390 +That's the file ID and that's all the details about it. + +02:57.570 --> 02:59.520 +Um, and that's its ID. + +02:59.520 --> 03:02.760 +So we provide the ID of the training file. + +03:02.760 --> 03:05.370 +We also provide the ID of the validation file. + +03:05.400 --> 03:11.180 +Again not strictly necessary in our case now, but a good practice and you will want to do this for + +03:11.180 --> 03:13.010 +your fine tuning runs in the future. + +03:13.040 --> 03:13.820 +Probably. + +03:13.940 --> 03:15.500 +We provide the model. + +03:15.530 --> 03:21.140 +Now I'm suggesting GPT for many, partly because it's going to be cheaper to run it in inference. + +03:21.140 --> 03:23.780 +It's just going to be a couple of cents at the most. + +03:23.900 --> 03:31.820 +Um, and partly because if you remember earlier when we ran the original models, GPT for the big guy + +03:31.820 --> 03:36.650 +and for mini gave fairly similar performance, not a ton of difference between them. + +03:36.650 --> 03:41.300 +So it seems like we might as well fine tune the smaller one. + +03:41.390 --> 03:44.180 +The seed means that it will be repeatable. + +03:44.510 --> 03:46.070 +Um, number of epochs. + +03:46.070 --> 03:47.090 +So this is optional. + +03:47.090 --> 03:53.570 +You don't need to specify number of epochs, how many times it's going to go all the way through the + +03:53.570 --> 03:54.020 +data. + +03:54.050 --> 03:55.910 +You can let it decide for itself. + +03:55.910 --> 04:02.480 +I want to fix it to one because we're providing a fair amount of data, 500 data points more than than + +04:02.480 --> 04:04.340 +is usually recommended. + +04:04.490 --> 04:08.230 +Um, and so I figured there's no point in doing multiple epochs. + +04:08.230 --> 04:12.260 +If we decide we want to do more, we can just bump up the amount of training data because we've got + +04:12.260 --> 04:15.320 +lots of it, rather than doing multiple epochs. + +04:15.440 --> 04:18.890 +This is where I specify the weights and biases integration. + +04:18.890 --> 04:23.120 +If you don't want to use weights and biases, just don't just remove this line altogether. + +04:23.540 --> 04:30.350 +Um, and then suffix is an optional thing that just will include that in the name of the model that + +04:30.350 --> 04:31.280 +it creates. + +04:31.430 --> 04:35.300 +Just something you can do if you want the model to have a decent name. + +04:35.540 --> 04:37.940 +Uh, and that's about all there is to it. + +04:37.940 --> 04:42.200 +I will just mention if you haven't come across the word hyperparameters before, but I'm sure you have. + +04:42.230 --> 04:49.970 +But for anyone that hasn't, hyperparameters is what people data scientists call just the extra knobs + +04:49.970 --> 04:55.790 +and wheels and settings that control how your training is going to work. + +04:55.820 --> 04:59.600 +Any extra parameter that is something that you can set. + +04:59.630 --> 05:02.870 +Try to different possibilities to see if it makes things better or worse. + +05:02.870 --> 05:08.630 +And that process of trying out different values and seeing if it makes it better or worse, uh, known + +05:08.630 --> 05:11.250 +as hyperparameter Optimization. + +05:11.580 --> 05:13.680 +Hyperparameter tuning as well. + +05:13.980 --> 05:19.290 +And all of this is very fancy talk for trial and error, which is what it really is. + +05:19.290 --> 05:21.030 +It's saying these are settings. + +05:21.030 --> 05:23.640 +We don't really know if it's going to make it better or worse. + +05:23.640 --> 05:27.180 +There's no real there's no no great theory behind this. + +05:27.180 --> 05:30.630 +So just try some different possibilities and see what happens. + +05:30.780 --> 05:32.460 +But no one wants to say it quite like that. + +05:32.460 --> 05:37.260 +So everyone says hyperparameter optimization because that sounds much more important. + +05:37.500 --> 05:39.750 +And so that's that's what we'll call it. + +05:39.840 --> 05:42.720 +And that's why we pass in the hyper parameters. + +05:42.720 --> 05:48.420 +And if you want to yourself do some hyperparameter optimization and try different epochs, then you + +05:48.450 --> 05:50.250 +certainly should do so. + +05:50.250 --> 05:52.740 +But anyways I talk enough. + +05:52.740 --> 06:00.120 +We will run this guy and like that it runs and what comes back is a fine tuning job. + +06:00.420 --> 06:04.770 +It says when it was created, uh, it says there's no error, which is good. + +06:04.800 --> 06:05.760 +Not yet. + +06:05.940 --> 06:08.610 +Uh, it's finished at none. + +06:08.850 --> 06:12.750 +Uh, here are our hyperparameters with the number of epochs. + +06:13.090 --> 06:14.740 +Um, that is the model. + +06:14.950 --> 06:19.390 +Um, and then everything else, the files that we passed in. + +06:20.050 --> 06:26.050 +Uh, now, this, uh, here will list all of the jobs that we've got right now. + +06:26.050 --> 06:28.390 +And it starts with the most recent first. + +06:28.390 --> 06:34.870 +So since we've just kicked this off, if we run this, we'll see that this, this particular job, um, + +06:35.200 --> 06:40.600 +uh, and we can check it is because we should see that this here matches this here. + +06:40.600 --> 06:47.950 +So we're talking about the same job, uh, and we can see somewhere here what's going on? + +06:48.850 --> 06:53.260 +Um, well, actually, first, let's just this job ID thing so that we don't have to keep remembering + +06:53.260 --> 06:53.380 +it. + +06:53.380 --> 06:56.050 +Let's let's take it into a variable job ID. + +06:56.380 --> 06:58.420 +Just make sure that that's what we expect. + +06:58.420 --> 07:03.940 +If I print that you see this job, ID matches that there and that there. + +07:03.940 --> 07:09.280 +This is the name of our current run the job ID that we'll use to refer to it. + +07:09.280 --> 07:16.300 +And we can call this retrieve, uh, to get information about what's going on. + +07:16.750 --> 07:19.570 +And so let me see what we can learn from this. + +07:20.200 --> 07:25.240 +Uh, somewhere here we should see that it says that it's running. + +07:27.040 --> 07:31.960 +Uh, but anyway, the most important thing where you really see what's going on is in the next line + +07:31.960 --> 07:39.370 +here, which is list events passing in the job ID, and I'm limiting it to ten events. + +07:39.370 --> 07:42.880 +And if I run this now, you really see the business. + +07:42.880 --> 07:44.680 +They only have been two events. + +07:44.890 --> 07:49.600 +Um, and it's listing them in in order where the most recent event comes on top. + +07:49.600 --> 07:56.530 +So there's been two events created fine tuning job and validating training file, which is what it's + +07:56.530 --> 07:57.520 +doing now. + +07:58.660 --> 08:04.840 +And so what's going to happen next is that over time it's going to validate the file. + +08:04.840 --> 08:06.730 +And then it's going to start to train. + +08:06.730 --> 08:08.230 +And that's where things get interesting. + +08:08.260 --> 08:12.250 +And because it's going to take a couple of minutes before it gets to that point, I will break for the + +08:12.250 --> 08:12.970 +next video. + +08:12.970 --> 08:14.620 +And in the next video we'll see. + +08:14.650 --> 08:16.180 +Training in action. + +08:16.180 --> 08:17.770 +I will see you over there. diff --git a/week5/community-contributions/subtitles/srts/59473137/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473137/ja_JP.srt new file mode 100755 index 0000000..277c2a9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473137/ja_JP.srt @@ -0,0 +1,352 @@ +WEBVTT + +00:00.800 --> 00:02.540 +本題に入ろう。 + +00:02.690 --> 00:12.260 +そこで、 何が起こっているかをすべて見ることができ、 データに深く入り込むことができるのが、 Weights and Biasesと呼ばれる美しいプラットフォームだ。 + +00:12.380 --> 00:17.330 +とにかく個人使用なら完全に無料だし、 超優秀だ。 + +00:17.360 --> 00:20.180 +ここに行けばいい + +00:20.210 --> 00:22.340 +あの......ワンドブはウェイトとバイアスです。 + +00:22.580 --> 00:25.400 +登録するには、 無料アカウントを作成してください。 + +00:25.400 --> 00:27.080 +希望しないなら、 その必要はない。 + +00:27.110 --> 00:29.450 +これは完全にオプションである。 + +00:29.480 --> 00:32.660 +これにより、 走りながらトレーニングを視覚化することができる。 + +00:32.660 --> 00:36.680 +とても満足できるので、 強くお勧めする。 + +00:36.740 --> 00:41.810 +ええと、 くねくねした線がたくさん見えないと、 僕の中ではトレーニングをする意味がないんだ。 + +00:41.810 --> 00:44.930 +信じてほしいのは、 私たちはたくさんのくねくねしたラインを持っているということだ。 + +00:44.930 --> 00:52.790 +今日だけでなく、 あと2週間もすれば、 たくさんのチャートが表示されるでしょう。 ウェイトとバイアスにアクセスして、 + +00:52.790 --> 00:57.950 +無料アカウントにサインアップするか、 すでにお持ちかもしれませんが、 + +00:57.950 --> 01:09.750 +アバター・メニューの設定メニューから、 OpenAIなどで使っているAPIキーとよく似たAPIキーを作成できます。 + +01:10.050 --> 01:15.870 +そしてOpenAIのダッシュボードに行き、 ノートブックのここにリンクを貼っておきます。 + +01:15.900 --> 01:25.590 +そのページを開くと、 ページの真ん中あたりに統合という項目があり、 ウェイトとバイアスのキーを入力する欄がある。 + +01:25.590 --> 01:36.600 +そうすれば、 あなたのOpenAIのアカウントが重みとバイアスに接続され、 重みとバイアスで起こっている微調整のプロセスを見ることができます。 + +01:36.630 --> 01:38.520 +それは素晴らしいことだ。 + +01:38.520 --> 01:41.670 +だから強く勧めるが、 必須ではない。 + +01:41.700 --> 01:48.660 +それができたと仮定して、 重みとバイアスの統合を設定するこの行を実行したい。 + +01:48.900 --> 01:50.280 +それに名前をつけることもできる。 + +01:50.280 --> 01:53.520 +私はこのプロジェクトの名前を一般的に呼んでいる。 + +01:53.520 --> 01:55.410 +全体的に、 このプロジェクトはたくさんやることになるだろう。 + +01:55.410 --> 02:00.360 +私はプライサーと呼んでいるが、 今回はGPTプライサーと呼ぶことにする。 + +02:00.390 --> 02:06.720 +GPTは商品の価格設定をするために微調整されたバージョンです。 + +02:06.750 --> 02:08.640 +だから、 それを実行するんだ。 + +02:08.880 --> 02:11.790 +今、 セッティングをしているところだ。 + +02:11.820 --> 02:13.920 +しかし、 これだ。 + +02:13.920 --> 02:15.450 +この人々だ。 + +02:15.450 --> 02:18.780 +実際に微調整を行うのはこの時期だ。 + +02:18.810 --> 02:29.430 +私たちは新しいOpenAI APIを呼び出します。 これは、 OpenAIのファインチューニング・ドットジョブ・ドットが作成する大きな大きなものです。 + +02:29.700 --> 02:31.140 +そして、 私たちが通過するもの。 + +02:31.140 --> 02:36.390 +だから、 ファイルをアップロードして戻ってきたのがこれなんだ。 + +02:36.390 --> 02:37.560 +IDがある。 + +02:37.560 --> 02:43.500 +ファイルを識別するIDがあることをお見せしよう。 + +02:45.690 --> 02:47.100 +これがファイル名だ。 + +02:47.100 --> 02:50.040 +OpenAIに関しては、 私たちはもっとシンプルな名前を考えていた。 + +02:50.040 --> 02:52.140 +これがファイル・オブジェクト全体だ。 + +02:52.170 --> 02:53.940 +少し前に見たのを覚えているだろう。 + +02:53.970 --> 02:57.390 +これがファイルIDで、 これがその詳細のすべてだ。 + +02:57.570 --> 02:59.520 +これがIDだ。 + +02:59.520 --> 03:02.760 +そこで、 トレーニングファイルのIDを提供する。 + +03:02.760 --> 03:05.370 +また、 検証ファイルのIDも提供する。 + +03:05.400 --> 03:13.010 +今回のケースでも厳密には必要ないが、 良い練習方法であり、 将来的に微調整を行う際には必ず行うことになるだろう。 + +03:13.040 --> 03:13.820 +おそらくね。 + +03:13.940 --> 03:15.500 +我々はモデルを提供する。 + +03:15.530 --> 03:21.140 +今、 私は多くの人にGPTを勧めているが、 その理由のひとつは推論で実行した方が安くなるからだ。 + +03:21.140 --> 03:23.780 +せいぜい2、 3セントだろう。 + +03:23.900 --> 03:31.820 +それと、 以前、 オリジナルモデルを走らせたときのことを思い出してほしいんだが、 GPTは大柄な選手用とミニ用でかなり似たようなパフォーマンスで、 + +03:31.820 --> 03:36.650 +両者に大きな差はなかったんだ。 + +03:36.650 --> 03:41.300 +だから、 小さいほうを微調整したほうがよさそうだ。 + +03:41.390 --> 03:44.180 +種があるということは、 再現性があるということだ。 + +03:44.510 --> 03:46.070 +ええと、 エポック数。 + +03:46.070 --> 03:47.090 +だからこれはオプションだ。 + +03:47.090 --> 03:54.020 +エポック数を指定する必要はない。 + +03:54.050 --> 03:55.910 +自分で決めさせればいい。 + +03:55.910 --> 04:02.480 +通常推奨されているよりも500データポイント多い、 かなりの量のデータを提供しているので、 + +04:02.480 --> 04:04.340 +1つに修正したい。 + +04:04.490 --> 04:08.230 +それで、 複数のエポックをやる意味がないと思ったんだ。 + +04:08.230 --> 04:15.320 +もっとやりたいと思ったら、 何度もエポックするのではなく、 たくさん持っているトレーニングデータの量を増やせばいいのだ。 + +04:15.440 --> 04:18.890 +ここで重みとバイアスの統合を指定する。 + +04:18.890 --> 04:23.120 +ウェイトやバイアスを使いたくない場合は、 この行を完全に削除することだけは避けてください。 + +04:23.540 --> 04:31.280 +それから、 サフィックスはオプションで、 作成するモデルの名前に含めるだけです。 + +04:31.430 --> 04:35.300 +ただ、 モデルにちゃんとした名前をつけたいなら、 できることだ。 + +04:35.540 --> 04:37.940 +ええと、 それだけなんだ。 + +04:37.940 --> 04:42.200 +ハイパーパラメーターという言葉に出会ったことがないかどうかだけ書いておこう。 + +04:42.230 --> 04:49.970 +ハイパーパラメーターとは、 データサイエンティストたちが、 トレーニングがどのように機能するかをコントロールするための余分なノブやホイール、 + +04:49.970 --> 04:55.790 +設定のことだ。 + +04:55.820 --> 04:59.600 +追加で設定できるパラメータがあれば、 何でもいい。 + +04:59.630 --> 05:02.870 +いろいろな可能性を試してみて、 状況が良くなるか悪くなるかを確認する。 + +05:02.870 --> 05:11.250 +異なる値を試してみて、 それが良くなるか悪くなるかを見るこのプロセスは、 ハイパーパラメーターの最適化として知られている。 + +05:11.580 --> 05:13.680 +ハイパーパラメーターのチューニングも。 + +05:13.980 --> 05:19.290 +これはすべて、 試行錯誤を意味する非常に洒落た言葉だ。 + +05:19.290 --> 05:21.030 +これが設定だと言っているんだ。 + +05:21.030 --> 05:23.640 +それが良くなるのか悪くなるのか、 僕らにはわからない。 + +05:23.640 --> 05:27.180 +これには何の理論もない。 + +05:27.180 --> 05:30.630 +だから、 いろいろな可能性を試して、 何が起こるか見てみよう。 + +05:30.780 --> 05:32.460 +しかし、 誰もそんなことは言いたがらない。 + +05:32.460 --> 05:37.260 +だからみんなハイパーパラメーター最適化と言うんだ。 + +05:37.500 --> 05:39.750 +だから、 そう呼ぶことにする。 + +05:39.840 --> 05:42.720 +そのためにハイパーパラメーターを渡す。 + +05:42.720 --> 05:48.420 +また、 ハイパーパラメーターの最適化を行い、 さまざまなエポックを試したいのであれば、 + +05:48.450 --> 05:50.250 +そうすべきだろう。 + +05:50.250 --> 05:52.740 +とにかく、 私は十分に話した。 + +05:52.740 --> 06:00.120 +私たちはこの男を走らせ、 それが走ることを気に入り、 戻ってくるのは微調整の仕事だ。 + +06:00.420 --> 06:04.770 +作成されたとき、 エラーはなかったと書いてある。 + +06:04.800 --> 06:05.760 +まだだ。 + +06:05.940 --> 06:08.610 +ええと、 誰もいないところで終わっている。 + +06:08.850 --> 06:12.750 +ハイパーパラメータとエポック数です。 + +06:13.090 --> 06:14.740 +それがモデルだ。 + +06:14.950 --> 06:19.390 +それから、 私たちが持ち込んだファイル。 + +06:20.050 --> 06:26.050 +ええと、 では、 ここに今あるすべての仕事をリストアップします。 + +06:26.050 --> 06:28.390 +そして、 一番新しいものから始まる。 + +06:28.390 --> 06:40.600 +このジョブを実行すると、 このジョブがこのジョブと一致することが確認できます。 + +06:40.600 --> 06:47.950 +同じ仕事について話しているわけだが、 何が起こっているのか、 このどこかでわかるだろう? + +06:48.850 --> 06:53.380 +ええと、 実は、 まず、 このジョブIDを覚えておく必要がないようにしましょう。 + +06:53.380 --> 06:56.050 +ジョブIDを変数にしてみよう。 + +06:56.380 --> 06:58.420 +ただ、 それが私たちの期待するものであることを確認してほしい。 + +06:58.420 --> 07:03.940 +この仕事をプリントすれば、 IDはあそことあそこが一致する。 + +07:03.940 --> 07:09.280 +これは現在のランの名前であり、 それを参照するために使用するジョブIDである。 + +07:09.280 --> 07:16.300 +このリトリーブを呼び出して、 何が起こっているのか情報を得ることができる。 + +07:16.750 --> 07:19.570 +ここから何が学べるか考えてみよう。 + +07:20.200 --> 07:25.240 +ええと、 ここのどこかに "実行中 "と書いてあるはずだ。 + +07:27.040 --> 07:31.960 +ええと、 でもとにかく、 何が起こっているのかが本当にわかる最も重要なことは、 + +07:31.960 --> 07:39.370 +この次の行にある、 ジョブIDに渡されるイベントのリストです。 + +07:39.370 --> 07:42.880 +そして、 今これを実行すれば、 本当にビジネスが見えてくる。 + +07:42.880 --> 07:44.680 +まだ2回しか開催されていない。 + +07:44.890 --> 07:49.600 +ええと、 そして、 一番新しいイベントが一番上に来るように順番にリストアップされているんだ。 + +07:49.600 --> 07:57.520 +つまり、 ジョブの微調整とトレーニングファイルの検証という2つのイベントが発生した。 + +07:58.660 --> 08:04.840 +そして次に起こるのは、 時間をかけてファイルを検証することだ。 + +08:04.840 --> 08:06.730 +そしてトレーニングを開始するんだ。 + +08:06.730 --> 08:08.230 +そこからが面白くなる。 + +08:08.260 --> 08:12.970 +そして、 そのポイントに到達するまでに2、 3分かかりそうなので、 次のビデオのために休憩する。 + +08:12.970 --> 08:14.620 +次のビデオでは、 それを見てみよう。 + +08:14.650 --> 08:16.180 +トレーニングの実際 + +08:16.180 --> 08:17.770 +向こうで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59473137/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473137/ko_KR.srt new file mode 100755 index 0000000..286dcef --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473137/ko_KR.srt @@ -0,0 +1,385 @@ +WEBVTT + +00:00.800 --> 00:02.540 +Get it, get it 바로 본론으로 들어가죠 + +00:02.690 --> 00:09.890 +모든 걸 볼 수 있고 데이터에 무릎까지 들어갈 수 있는 플랫폼은 바로 Webex and Bias라는 + +00:09.890 --> 00:12.260 +아름다운 플랫폼이에요 + +00:12.380 --> 00:17.330 +개인적 용도로만 사용하고 아주 좋아요 + +00:17.360 --> 00:20.180 +여기로 가시면 돼요 + +00:20.210 --> 00:22.340 +원반은 무게와 편향이에요 + +00:22.580 --> 00:25.400 +등록하고 무료 계정을 만들라고요 + +00:25.400 --> 00:27.080 +내키지 않으면 안 해도 돼요 + +00:27.110 --> 00:29.450 +이건 완전히 선택 사항이에요 + +00:29.480 --> 00:32.660 +이게 작동하는 동안 훈련하는 모습을 시각화할 수 있죠 + +00:32.660 --> 00:36.680 +강력히 추천해요 정말 만족스럽거든요 + +00:36.740 --> 00:41.810 +꿈틀거리는 선이 많이 보여야 머릿속에서 훈련할 수 있죠 + +00:41.810 --> 00:44.930 +꿈틀거리는 선이 아주 많을 거예요 + +00:44.930 --> 00:52.790 +오늘뿐만 아니라 앞으로 2주 동안 차트가 많을 겁니다 무료 계정을 등록했는지 평가할 + +00:52.790 --> 00:57.950 +때 유용할 수도 있고 이미 등록한 계정도 있을 수 있죠 아바타 + +00:57.950 --> 01:03.810 +메뉴나 설정 메뉴에서 설정으로 가면 오픈AI나 기타에서 사용한 + +01:03.810 --> 01:09.750 +API 키와 유사한 API 키를 생성할 수 있어요 + +01:10.050 --> 01:15.870 +OpenAI 대시보드로 가면 링크를 노트북에 넣어 놨어요 + +01:15.900 --> 01:21.990 +해당 페이지를 보시면 중앙에 통합이라는 항목이 있어요 체중과 바이어스를 + +01:21.990 --> 01:25.590 +입력할 수 있는 항목이 있죠 + +01:25.590 --> 01:32.130 +그렇게 하면 오픈AI 계정에 무게와 편향 정보가 저장됩니다 무게와 + +01:32.160 --> 01:36.600 +편향에 따른 미세한 튜닝 과정을 볼 수 있죠 + +01:36.630 --> 01:38.520 +정말 멋져요 + +01:38.520 --> 01:41.670 +강력히 권하지만 필수는 아니에요 + +01:41.700 --> 01:46.950 +그렇게 했다고 가정하고 이 라인을 실행합니다 무게 설정과 + +01:46.950 --> 01:48.660 +편향 통합이죠 + +01:48.900 --> 01:50.280 +이름을 지어줄 수도 있고요 + +01:50.280 --> 01:53.520 +이 프로젝트의 이름을 말하고 있어요 + +01:53.520 --> 01:55.410 +전반적으로 이 프로젝트의 많은 부분을 할 거예요 + +01:55.410 --> 02:00.360 +저는 프라이서라고 부르지만 이 차는 GPT 프라이서라고 부르죠 + +02:00.390 --> 02:06.720 +GPT는 가격 제품의 정교한 버전입니다 그래서 프라이서라고 부르죠 + +02:06.750 --> 02:08.640 +그걸 실행하죠 + +02:08.880 --> 02:11.790 +지금 설정하고 있어요 + +02:11.820 --> 02:13.920 +하지만 이게 다예요 + +02:13.920 --> 02:15.450 +여러분 + +02:15.450 --> 02:18.780 +이제 세세한 조율을 할 거예요 + +02:18.810 --> 02:29.430 +새로운 OpenAI API라고 하는데 아주 큰 거죠 OpenAI의 .잡스.Create죠 + +02:29.700 --> 02:31.140 +우리가 지나치는 것도요 + +02:31.140 --> 02:36.390 +아까 기억하시겠지만 이건 우리 파일을 업로드하면 나오는 거죠 + +02:36.390 --> 02:37.560 +ID가 있어요 + +02:37.560 --> 02:43.500 +파일을 식별하는 ID를 갖고 있어요 + +02:45.690 --> 02:47.100 +그게 파일 이름이에요 + +02:47.100 --> 02:50.040 +오픈아이는 좀 더 단순한 이름으로 불렸어요 + +02:50.040 --> 02:52.140 +이건 전부 파일 객체예요 + +02:52.170 --> 02:53.940 +조금 전에 이걸 보셨을 거예요 + +02:53.970 --> 02:57.390 +파일 ID와 관련된 모든 세부 사항이죠 + +02:57.570 --> 02:59.520 +그게 ID예요 + +02:59.520 --> 03:02.760 +훈련 파일의 ID를 제공하죠 + +03:02.760 --> 03:05.370 +검증 파일의 ID도 제공하죠 + +03:05.400 --> 03:11.180 +지금 꼭 필요한 건 아니지만 연습하면 좋을 거예요 나중에 세밀하게 조정할 + +03:11.180 --> 03:13.010 +때 써먹으세요 + +03:13.040 --> 03:13.820 +아마도요 + +03:13.940 --> 03:15.500 +모델을 제공하죠 + +03:15.530 --> 03:21.140 +저는 많은 사람에게 GPT를 제안하고 있습니다 추론상 실행하는 것이 더 저렴하기 때문이기도 하죠 + +03:21.140 --> 03:23.780 +몇 센트밖에 안 될 거예요 + +03:23.900 --> 03:31.820 +그 이유는 앞서 원형 모델을 실행했을 때 GPT와 미니가 비슷한 성능을 + +03:31.820 --> 03:36.650 +보였기 때문이기도 해요 큰 차이가 없었죠 + +03:36.650 --> 03:41.300 +작은 걸 세심하게 조정하는 게 좋겠어요 + +03:41.390 --> 03:44.180 +씨앗은 반복 가능하다는 뜻이에요 + +03:44.510 --> 03:46.070 +여러 시대에서요 + +03:46.070 --> 03:47.090 +선택 사항이에요 + +03:47.090 --> 03:54.020 +발생된 시대의 개수나 데이터에 몇 번 사용될지 명시할 필요가 없어요 + +03:54.050 --> 03:55.910 +스스로 결정하게 해요 + +03:55.910 --> 04:02.480 +1로 수정하고 싶어요 데이터 양이 꽤 많거든요 권장량보다 500포인트나 + +04:02.480 --> 04:04.340 +많죠 + +04:04.490 --> 04:08.230 +여러 개혁을 시도할 필요가 없다고 생각했죠 + +04:08.230 --> 04:12.260 +더 많은 걸 하고 싶다면 훈련 데이터를 늘리면 돼요 여러 + +04:12.260 --> 04:15.320 +개혁을 할 필요 없이 이미 많으니까요 + +04:15.440 --> 04:18.890 +무게와 편향의 통합을 지정하는 부분이죠 + +04:18.890 --> 04:23.120 +무게나 편향성을 사용하기 싫다면 이 선을 아예 없애지 마세요 + +04:23.540 --> 04:31.280 +접미사는 선택적인 것으로 그것이 생성하는 모델의 이름에 포함되죠 + +04:31.430 --> 04:35.300 +모델이 괜찮은 이름을 갖길 원한다면 할 수 있는 일이죠 + +04:35.540 --> 04:37.940 +그게 다인 것 같아요 + +04:37.940 --> 04:42.200 +hyperperameter라는 단어를 처음 들어보신다면 말씀드릴게요 분명 들어봤을 거예요 + +04:42.230 --> 04:49.970 +경험이 없는 분들을 위해 hyperperameter를 설명하자면 데이터 과학자들은 이걸 추가적인 + +04:49.970 --> 04:55.790 +손잡이와 바퀴 설정이라고 부릅니다 훈련이 어떻게 진행될지 제어하는 거죠 + +04:55.820 --> 04:59.600 +추가 매개 변수는 설정할 수 있어요 + +04:59.630 --> 05:02.870 +상황이 좋아질지 나빠질지 여러 가능성을 살펴보죠 + +05:02.870 --> 05:08.630 +다양한 값을 시험해보고 더 나은지 나쁜지 보는 과정이죠 hyperpaameter + +05:08.630 --> 05:11.250 +최적화라고 해요 + +05:11.580 --> 05:13.680 +하이퍼파라미터 튜닝도요 + +05:13.980 --> 05:19.290 +시행착오를 거창하게 표현한 말인데 실제로도 그래요 + +05:19.290 --> 05:21.030 +이건 설정이라고 하네요 + +05:21.030 --> 05:23.640 +더 좋아질지 나빠질지는 알 수 없어요 + +05:23.640 --> 05:27.180 +그럴듯한 이론 같은 건 없어요 + +05:27.180 --> 05:30.630 +여러 가지 가능성을 시도해 보고 어떻게 되는지 봐야죠 + +05:30.780 --> 05:32.460 +하지만 아무도 그렇게 말하고 싶어하지 않아요 + +05:32.460 --> 05:37.260 +하이퍼파라미터 최적화라고 다들 쓰는데 그게 더 중요하게 들리기 때문이죠 + +05:37.500 --> 05:39.750 +그래서 그렇게 부르기로 했어요 + +05:39.840 --> 05:42.720 +그래서 하이퍼 매개 변수를 넘기는 거죠 + +05:42.720 --> 05:48.420 +hyperpaameter 최적화를 하고 싶다면 여러 버전으로 + +05:48.450 --> 05:50.250 +시도해 보세요 + +05:50.250 --> 05:52.740 +어쨌든 전 말이 많아요 + +05:52.740 --> 06:00.120 +이걸 작동시키면 잘 작동하고 돌아와서 세밀한 조율만 하면 돼요 + +06:00.420 --> 06:04.770 +생성될 때 오류가 없다고 하네요 좋은 거죠 + +06:04.800 --> 06:05.760 +아직요 + +06:05.940 --> 06:08.610 +완성이 안 됐어요 + +06:08.850 --> 06:12.750 +이건 여러 개혁이 담긴 하이퍼파라미터예요 + +06:13.090 --> 06:14.740 +그게 모델이에요 + +06:14.950 --> 06:19.390 +그리고 다른 것도 전부요 우리가 제출한 파일들요 + +06:20.050 --> 06:26.050 +여기에는 현재 우리 업무가 다 적혀 있어요 + +06:26.050 --> 06:28.390 +가장 최근의 일부터 시작하죠 + +06:28.390 --> 06:34.870 +이제 막 시작했으니까 이걸 실행하면 이 특정 작업이... 확인할 수 + +06:35.200 --> 06:40.600 +있어요 여기 이 부분이 여기 이 부분과 일치해야 하거든요 + +06:40.600 --> 06:47.950 +같은 일을 하는 거군요 여기 어딘가에서 무슨 일인지 볼 수 있나요? + +06:48.850 --> 06:53.380 +우선 직업 ID부터 얘기하죠 계속 기억할 필요 없게요 + +06:53.380 --> 06:56.050 +변수 작업 ID로 가져가요 + +06:56.380 --> 06:58.420 +우리가 기대하는 대로만 하면 돼요 + +06:58.420 --> 07:03.940 +이 작업이 보인다고 하면 ID가 저기, 저기 일치해요 + +07:03.940 --> 07:09.280 +참조에 사용할 현재 실행 작업 ID의 이름이죠 + +07:09.280 --> 07:16.300 +get 절개라고 부를 수 있는데 무슨 일이 일어나는지 정보를 얻기 위해서죠 + +07:16.750 --> 07:19.570 +여기서 뭘 배울 수 있을지 보죠 + +07:20.200 --> 07:25.240 +여기 어딘가에 실행 중이라고 뜨는 게 보일 거예요 + +07:27.040 --> 07:31.960 +어쨌든 가장 중요한 것은 다음 줄에서 일어나는 일을 볼 수 있습니다. + +07:31.960 --> 07:39.370 +작업 ID로 리스트 이벤트가 전달되고 있습니다. 10개의 이벤트로 제한할게요. + +07:39.370 --> 07:42.880 +지금 운영하면 사업이 제대로 돼요 + +07:42.880 --> 07:44.680 +두 번밖에 안 했어요 + +07:44.890 --> 07:49.600 +가장 최근 사건이 가장 위인 순서로 나열돼 있어요 + +07:49.600 --> 07:56.530 +두 이벤트가 생성됐어요 미세 튜닝 작업과 훈련 파일 유효성 검사죠 지금 하고 있는 + +07:56.530 --> 07:57.520 +거예요 + +07:58.660 --> 08:04.840 +다음에 일어나는 일은 시간이 지나면서 파일의 유효성을 검사하는 거죠 + +08:04.840 --> 08:06.730 +이제 훈련을 시작할 거예요 + +08:06.730 --> 08:08.230 +Get it, get it, get it! 이때부터 재미있어지죠 + +08:08.260 --> 08:12.250 +거기에 도달하기까지 몇 분 걸리기 때문에 다음 비디오로 넘어가서 쉬도록 + +08:12.250 --> 08:12.970 +하죠 + +08:12.970 --> 08:14.620 +다음 비디오에서 보실 수 있어요 + +08:14.650 --> 08:16.180 +실전 훈련이죠 + +08:16.180 --> 08:17.770 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59473147/en_US.srt b/week5/community-contributions/subtitles/srts/59473147/en_US.srt new file mode 100755 index 0000000..334d2bd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473147/en_US.srt @@ -0,0 +1,466 @@ +WEBVTT + +00:00.770 --> 00:01.910 +Well, I'm very relieved. + +00:01.910 --> 00:03.050 +I've got that behind me. + +00:03.080 --> 00:05.120 +No more human testing for me. + +00:05.150 --> 00:09.500 +We'll have one final look at my abysmal results and move swiftly on. + +00:09.530 --> 00:13.760 +The time has come for us to be working with the frontier. + +00:13.910 --> 00:18.560 +And we're going to start with GPT four mini, the miniature version. + +00:18.650 --> 00:20.810 +Um, but it's not so many. + +00:20.840 --> 00:22.400 +It's still a mighty frontier. + +00:22.430 --> 00:23.210 +LLM. + +00:23.420 --> 00:28.310 +Um, and it's going to be exciting to see how it fares compared to the other models we've worked with. + +00:28.490 --> 00:33.380 +So first of all, we need to work on our prompt for GPT four. + +00:33.680 --> 00:39.950 +And um, this is I mean, this is an extremely important part of your learning because this is where + +00:39.950 --> 00:46.070 +we see how we will solve real commercial problems like this using a frontier model. + +00:46.100 --> 00:51.500 +But a lot of this should again be pretty much second nature to you, because it's so similar to many + +00:51.500 --> 00:53.210 +of the tasks we've done before. + +00:53.240 --> 00:57.590 +The first thing we're going to do is write a function messages for item. + +00:57.590 --> 01:03.680 +Given our data point, we're going to want to turn that into a prompt for GPT four, which comes in + +01:03.680 --> 01:10.490 +the form of one of these messages lists, which you remember is a list of dictionaries. + +01:10.670 --> 01:13.880 +So this is how we build up that list of dictionaries. + +01:13.880 --> 01:21.170 +It starts with a system message, which is going to be in our case you estimate prices of items, reply + +01:21.200 --> 01:24.350 +only with the price, no explanation. + +01:24.500 --> 01:31.760 +And then for the user prompt, we are going to take the test prompt from this item, which you've seen + +01:31.760 --> 01:32.900 +many times. + +01:33.140 --> 01:36.500 +But we are going to do a little bit of data munging to it. + +01:36.500 --> 01:41.090 +So first of all, I'm going to strip out this text to the nearest dollar. + +01:41.120 --> 01:46.250 +The reason I'm doing that well actually the reason I should say the reason I put that in in the first + +01:46.250 --> 01:52.430 +place is that later when we work with open source models, um, we'll be we'll be wanting to give those + +01:52.430 --> 01:57.860 +open source models, make the problem as easy as it possibly can be, because they've got a lot to learn + +01:57.860 --> 02:01.890 +and they've only got the humble 8 billion parameters to do it in. + +02:01.920 --> 02:07.170 +When it comes to these frontier models, they are obviously much more capable, much more powerful, + +02:07.170 --> 02:09.750 +and they don't need the same kind of guardrails. + +02:09.840 --> 02:15.870 +GPT four or mini is going to be very happy, uh, predicting prices, including cents, and it knows + +02:15.870 --> 02:17.280 +exactly what it's working with. + +02:17.280 --> 02:20.790 +So this was an unnecessary guardrail in this case. + +02:20.790 --> 02:21.990 +So I took it out. + +02:22.440 --> 02:27.540 +I also take out the very end of the prompt, which is where it says price is dollars. + +02:27.570 --> 02:34.350 +The reason I take that out is you will see when it comes to building this list of messages, I first + +02:34.350 --> 02:38.100 +put in the system prompt under the system role. + +02:38.130 --> 02:45.840 +I then add user and I put in the user prompt, and then I put in one more message which comes from assistant. + +02:45.840 --> 02:50.070 +In other words, it's like it's the reply from GPT four mini. + +02:50.310 --> 02:50.610 +Uh. + +02:50.610 --> 02:56.700 +And that reply is price is dollars and then nothing. + +02:57.300 --> 03:04.530 +Uh, and that makes it unbearably desperate to fill in that next token with the price, because this + +03:04.530 --> 03:08.550 +is apparently the response coming back from the assistant from it. + +03:08.610 --> 03:15.630 +Uh, and so you've really we've, we've we've teed it up so that it feels, uh, like it absolutely + +03:15.630 --> 03:23.010 +has to fill that in with the most plausible next token, which is the most likely price of this product. + +03:23.010 --> 03:28.590 +So that that trick of sort of having it be the assistant's response and filling it in that way is, + +03:28.590 --> 03:31.890 +uh, is a good one to experiment with. + +03:32.280 --> 03:35.430 +Um, so that's what we do. + +03:35.460 --> 03:38.850 +Let's just quickly, uh, try this out and have a look at this. + +03:38.880 --> 03:47.430 +So, so if we say, like, messages for and put in the first test data point just to see what this looks + +03:47.430 --> 03:52.890 +like, what we get is, uh, roll system and then this is the system prompt. + +03:52.980 --> 03:55.320 +Hopefully that makes total sense. + +03:55.380 --> 04:00.120 +Then roll user and the content is how much does this cost. + +04:00.120 --> 04:08.310 +And then an item and, uh, yeah, I'm guessing, uh, some kind of, uh, horrible memory of having + +04:08.310 --> 04:11.310 +to go through and guess the prices of all of these things. + +04:11.310 --> 04:13.740 +Just looking at it fills me with dread. + +04:13.800 --> 04:15.780 +Uh, but it won't be my problem this time. + +04:15.780 --> 04:17.760 +It's going to be GPT four or Mini's problem. + +04:17.760 --> 04:23.910 +So it gets given that as part of the user prompt, and then it's given the assistance response. + +04:23.910 --> 04:26.850 +It's own response price is dollars. + +04:27.390 --> 04:31.620 +And then it will try and continue this conversation. + +04:31.890 --> 04:36.810 +Um, so this is a great, uh, prompt for us to be sending GPT four. + +04:36.840 --> 04:43.410 +Oh, then next, just one more little quick function, a utility called Get Price that I've written + +04:43.410 --> 04:51.330 +here, which takes a string s, uh, and, uh, extracts from that string a floating point number within + +04:51.330 --> 04:51.840 +it. + +04:51.930 --> 05:00.670 +Um, so that if, for example, we get a string back from GPT four, like, like, um, the price is + +05:00.670 --> 05:04.840 +roughly dollar 99.99. + +05:05.290 --> 05:08.110 +Um, because blah blah. + +05:09.610 --> 05:15.730 +If it doesn't obey my instructions and it decides to just be talkative about it, we will still strip + +05:15.730 --> 05:16.120 +out. + +05:16.150 --> 05:16.420 +Oops. + +05:16.420 --> 05:17.680 +Don't reveal. + +05:17.740 --> 05:23.260 +We will still strip out the 99.99 there. + +05:23.560 --> 05:24.250 +So it will. + +05:24.280 --> 05:25.390 +It will work. + +05:26.290 --> 05:27.280 +Uh, okay. + +05:27.280 --> 05:30.520 +That brings us I will carefully scroll down. + +05:30.550 --> 05:33.880 +That brings us to the big, uh, function. + +05:33.910 --> 05:34.930 +This is. + +05:34.930 --> 05:36.280 +This is the real deal. + +05:36.310 --> 05:42.970 +Uh, we are going to call GPT four mini with an item, and this is how we're going to do it. + +05:43.000 --> 05:47.830 +Of course, we say OpenAI ChatGPT completions dot create. + +05:48.190 --> 05:51.940 +Uh, we pass in a model, uh, GPT four mini. + +05:52.150 --> 05:59.290 +Uh, we, uh, pass in the messages for that item, which we use the function that we just talked about, + +05:59.480 --> 06:07.580 +this seed is a way to tell GPT four that we want to try and have this be reproducible, so that it gives + +06:07.580 --> 06:11.060 +the same answer with the same inputs. + +06:11.420 --> 06:16.250 +Now, our GPT, OpenAI can't always honour this because they make changes to their model. + +06:16.250 --> 06:19.310 +And so sometimes the model will change. + +06:19.310 --> 06:21.710 +And so so this number will change. + +06:21.830 --> 06:24.200 +But it does its best, all else being equal. + +06:24.200 --> 06:27.860 +Then if you call this twice with the same seed you should get the same answer. + +06:28.220 --> 06:29.630 +So that's good to know. + +06:30.110 --> 06:36.890 +And then max tokens I keep that number small because you know we've given it such a great tee up, we + +06:36.890 --> 06:40.400 +can expect it really will fill in the price as indeed it does. + +06:40.550 --> 06:44.690 +And so we don't need to waste our money, uh, bringing back more tokens than that. + +06:44.690 --> 06:51.350 +And I should say, because we do this, uh, this whole operation is very, very cheap across all 250 + +06:51.380 --> 06:52.520 +data points. + +06:52.610 --> 06:55.760 +Um, it's still for me in the US anyway. + +06:55.790 --> 06:58.870 +Costs way less than a than a, um. + +06:58.870 --> 07:03.470 +Well, less that's that's an ascent less than so little that it won't show up in the user interface. + +07:03.470 --> 07:05.300 +It's less than a fraction of a cent. + +07:05.480 --> 07:14.120 +So it's very cheap indeed to use because it's a short input tokens and a and a five output tokens. + +07:14.690 --> 07:18.140 +And just before I want to be careful not to reveal what goes on. + +07:18.260 --> 07:22.280 +Uh, uh, just before let's just quickly try this out. + +07:22.280 --> 07:24.470 +Let's run GPT four. + +07:24.470 --> 07:32.510 +Oh, mini, and let's call it with Test of Zero to see how it performs. + +07:32.570 --> 07:38.210 +Uh, we run that and it gets back the answer 260.0. + +07:38.360 --> 07:42.140 +Uh, so you can see it went through it called GPT four mini. + +07:42.170 --> 07:44.480 +It then got back, it sent in something. + +07:44.510 --> 07:49.880 +It sent in exactly this uh, and then with what it got back, it put it through this function and stripped + +07:49.880 --> 07:53.870 +out 260 as the price for the first item. + +07:54.140 --> 07:59.060 +Uh, we might as well find out what is the price of the first item? + +07:59.060 --> 07:59.810 +Some, uh. + +08:02.000 --> 08:05.360 +So it gets to 60 and the number is three, seven, four. + +08:05.360 --> 08:07.220 +So it was quite far out with that one. + +08:07.580 --> 08:08.030 +Um. + +08:08.060 --> 08:14.690 +All right, well, the time has come to call tester dot test passing in GPT for a mini and test. + +08:14.690 --> 08:20.720 +And you've seen a few times now that I have already run this because it comes pretty fast, but it still + +08:20.720 --> 08:22.910 +takes maybe 2 or 3 minutes to do the whole lot. + +08:22.940 --> 08:27.620 +So I didn't want you to have to sit there while it was happening and so I can scroll through the outcome. + +08:27.620 --> 08:33.410 +But before I do so, uh, have your guess about how GPT four mini will perform. + +08:33.410 --> 08:36.380 +How will it fare against, uh, original? + +08:36.410 --> 08:37.730 +Against the average number? + +08:37.730 --> 08:42.440 +How will it fare against humanity as represented by by yours truly? + +08:42.440 --> 08:49.190 +And how will it fare against the most advanced traditional models, which had the benefit of 400,000 + +08:49.220 --> 08:50.990 +examples to train on? + +08:50.990 --> 08:51.860 +Let's see. + +08:51.860 --> 08:55.580 +So I executed this and this is what I got. + +08:55.580 --> 09:02.690 +So some yellows and reds we see indeed the reproducibility you can see was I it gave identical guess + +09:02.690 --> 09:07.640 +of 260 um against the the truth there. + +09:08.210 --> 09:12.680 +Um so you can see some greens and some reds. + +09:12.710 --> 09:17.480 +Lots of greens, lots of greens but still yellows and reds as well. + +09:18.710 --> 09:20.240 +Uh, I'm going to speed up. + +09:20.270 --> 09:22.400 +Wow, a big bunch of greens there. + +09:23.360 --> 09:24.710 +Some more reds, some more greens. + +09:24.710 --> 09:33.320 +And here is the answer I give you the GPT four mini frontier model results. + +09:33.740 --> 09:36.110 +It has crushed the others. + +09:36.110 --> 09:37.250 +Crushed them. + +09:37.250 --> 09:42.770 +You can see that, uh, it has come in at $79.58. + +09:42.920 --> 09:52.400 +Uh, definitely doing better than humanity, doing better than the, um, the other, uh, um, all + +09:52.400 --> 09:58.550 +of the other models, including, uh, random forest by far, and without having any training data at + +09:58.550 --> 10:03.540 +all Based purely on getting the description and continuing that token sequence. + +10:03.540 --> 10:09.360 +And as I say, whereas there is some risk of test of training data contamination, I think it's unlikely + +10:09.360 --> 10:13.650 +because there are almost no cases of it getting it exactly right. + +10:13.650 --> 10:17.730 +So it's not as if it was regurgitating something it already knew. + +10:17.880 --> 10:25.560 +Um, it's just got such a significant, uh, worldly knowledge that it's been trained on that. + +10:25.560 --> 10:32.970 +It has a great viewpoint as to how much a tire costs, how much a headlamp costs, how much a shower + +10:32.970 --> 10:34.860 +faucet costs, and all the other things. + +10:34.860 --> 10:37.620 +That gave me tremendous problem because I have no clue. + +10:37.710 --> 10:40.260 +Uh, but it had a very good clue indeed. + +10:40.350 --> 10:46.260 +Um, and so much of a clue that it beat all of the models that had worked on this. + +10:46.260 --> 10:47.910 +So that's pretty cool. + +10:47.910 --> 10:50.940 +I did enjoy seeing this a lot. + +10:51.060 --> 10:59.370 +And, uh, in the next video, we will take it up a notch and see how it's it's bigger cousin fares. diff --git a/week5/community-contributions/subtitles/srts/59473147/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473147/ja_JP.srt new file mode 100755 index 0000000..3173a04 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473147/ja_JP.srt @@ -0,0 +1,409 @@ +WEBVTT + +00:00.770 --> 00:01.910 +まあ、 とても安心したよ。 + +00:01.910 --> 00:03.050 +それはもう過去のことだ。 + +00:03.080 --> 00:05.120 +もう人体実験はいらない。 + +00:05.150 --> 00:09.500 +最後にもう一度、 私のひどい成績を見て、 速やかに次に進むことにしよう。 + +00:09.530 --> 00:13.760 +フロンティアとともに仕事をする時が来たのだ。 + +00:13.910 --> 00:18.560 +まずはGPT4ミニ(ミニチュア版)から。 + +00:18.650 --> 00:20.810 +うーん、 でもそんなに多くないよ。 + +00:20.840 --> 00:22.400 +まだまだ未開の地だ。 + +00:22.430 --> 00:23.210 +LLM。 + +00:23.420 --> 00:28.310 +私たちがこれまで手掛けてきた他のモデルと比べてどうなのか、 楽しみだね。 + +00:28.490 --> 00:33.380 +だからまず、 GPT4のプロンプトを作る必要がある。 + +00:33.680 --> 00:46.070 +フロンティア・モデルを使って、 このような実際の商業的な問題をどのように解決していくかを学ぶのですから。 + +00:46.100 --> 00:53.210 +しかし、 この作業の多くは、 私たちがこれまで行ってきた多くの作業と似ているため、 あなたにとってはごく自然なことだろう。 + +00:53.240 --> 00:57.590 +最初にやることは、 itemに対するメッセージ関数を書くことだ。 + +00:57.590 --> 01:03.680 +このデータ・ポイントをGPT + +01:03.680 --> 01:10.490 +4のプロンプトに変えたい。 + +01:10.670 --> 01:13.880 +こうして辞書のリストを作っていく。 + +01:13.880 --> 01:21.170 +システム・メッセージで始まり、 私たちの場合は商品の価格を見積もり、 価格だけを返信し、 + +01:21.200 --> 01:24.350 +説明はしない。 + +01:24.500 --> 01:32.900 +そしてユーザー・プロンプトには、 何度も見てきたこの項目のテスト・プロンプトを使う。 + +01:33.140 --> 01:36.500 +でも、 これから少しデータを加工するんだ。 + +01:36.500 --> 01:41.090 +そこでまず、 このテキストを1ドル単位で切り取ってみる。 + +01:41.120 --> 01:46.250 +というのも、 オープンソースのモデルで作業する場合、 + +01:46.250 --> 02:01.890 +彼らは学ぶべきことがたくさんあるのに、 80億というわずかなパラメータしかないからだ。 + +02:01.920 --> 02:07.170 +これらのフロンティアモデルに関しては、 明らかに能力が高く、 はるかにパワフルであり、 + +02:07.170 --> 02:09.750 +同じようなガードレールは必要ない。 + +02:09.840 --> 02:17.280 +GPT4やミニは、 セントも含めて価格を予測することができ、 非常に満足している。 + +02:17.280 --> 02:20.790 +だから、 このケースでは不必要なガードレールだった。 + +02:20.790 --> 02:21.990 +だから外したんだ。 + +02:22.440 --> 02:27.540 +また、 プロンプトの一番最後の、 価格がドルであることを示す箇所も削除した。 + +02:27.570 --> 02:38.100 +なぜこれを外したかというと、 メッセージのリストを作るときに、 まずシステム役割の下にシステム・プロンプトを入れたからだ。 + +02:38.130 --> 02:45.840 +次にユーザーを追加し、 ユーザー・プロンプトを入力し、 さらにアシスタントからのメッセージを入力する。 + +02:45.840 --> 02:50.070 +つまり、 GPTフォーミニからの返信のようなものだ。 + +02:50.310 --> 02:50.610 +ええと。 + +02:50.610 --> 02:56.700 +そしてその返事は、 値段はドルで、 あとは何もない。 + +02:57.300 --> 03:08.550 +そのため、 次のトークンに値段を記入するのに耐えがたいほど必死になる。 + +03:08.610 --> 03:15.630 +そうすると、 次のトークン、 つまり、 この製品の最もありそうな価格で、 + +03:15.630 --> 03:23.010 +それを埋めなければならないように感じるんだ。 + +03:23.010 --> 03:31.890 +だから、 アシスタントの反応になるようなトリックを試してみるのはいいことだよ。 + +03:32.280 --> 03:35.430 +そう、 それが僕らの仕事なんだ。 + +03:35.460 --> 03:38.850 +さっそく、 ええと、 これを試して見ましょう。 + +03:38.880 --> 03:47.430 +最初のテスト・データ・ポイントにメッセージを入力し、 どのように表示されるかを確認すると、 "Roll + +03:47.430 --> 03:52.890 +System "と表示され、 これがシステム・プロンプトです。 + +03:52.980 --> 03:55.320 +それがまったく意味のあることであればいいのだが。 + +03:55.380 --> 04:00.120 +そして、 ユーザーとコンテンツは、 いくらかかるのか? + +04:00.120 --> 04:11.310 +そして、 ある品物を見て、 ああ、 これらの品物の値段を当てなければならなかったという恐ろしい思い出があるんだ。 + +04:11.310 --> 04:13.740 +それを見るだけで、 私は恐怖でいっぱいになる。 + +04:13.800 --> 04:15.780 +ああ、 でも今回は僕の問題じゃない。 + +04:15.780 --> 04:17.760 +GPT4かミニの問題になるだろう。 + +04:17.760 --> 04:23.910 +だから、 ユーザー・プロンプトの一部としてそれを受け取り、 アシスタンス・レスポンスが与えられる。 + +04:23.910 --> 04:26.850 +それ自身の反応価格はドルだ。 + +04:27.390 --> 04:31.620 +そしてこの会話を続けようとする。 + +04:31.890 --> 04:36.810 +ええと、 だからこれは、 GPT4を送るための素晴らしい、 ええと、 プロンプトなんだ。 + +04:36.840 --> 04:51.840 +これは文字列sを受け取り、 その文字列から浮動小数点数を抽出する。 + +04:51.930 --> 05:04.840 +例えば、 GPT4からストリングが戻ってきたとすると、 その価格は99ドル。 + +05:04.840 --> 05:04.840 +99. + +05:05.290 --> 05:08.110 +うーん、 なぜならブラブラだからだ。 + +05:09.610 --> 05:16.120 +もし、 私の指示に従わず、 ただ饒舌になることを決めたとしても、 私たちはそれを剥奪する。 + +05:16.150 --> 05:16.420 +おっと。 + +05:16.420 --> 05:17.680 +表に出すな。 + +05:17.740 --> 05:23.260 +それでも99番を外す。 そこに99人。 + +05:23.560 --> 05:24.250 +だから、 そうなるだろう。 + +05:24.280 --> 05:25.390 +うまくいくだろう。 + +05:26.290 --> 05:27.280 +ああ、 わかった。 + +05:27.280 --> 05:30.520 +ということで、 慎重に下にスクロールしていく。 + +05:30.550 --> 05:33.880 +それが、 大きな、 ええと、 機能につながる。 + +05:33.910 --> 05:34.930 +これがそうだ。 + +05:34.930 --> 05:36.280 +これが本物だ。 + +05:36.310 --> 05:42.970 +GPT4ミニをアイテムで呼び出すんだ。 + +05:43.000 --> 05:47.830 +もちろん、 OpenAI ChatGPT completions dot createと言っている。 + +05:48.190 --> 05:51.940 +ええと、 GPT4ミニというモデルでパスするんだ。 + +05:52.150 --> 05:59.290 +このシードは、 GPT4に、 同じ入力で同じ答えを出すように、 + +05:59.480 --> 06:11.060 +再現性を持たせたいことを伝えるためのものだ。 + +06:11.420 --> 06:16.250 +現在、 我々のGPTであるオープンAIは、 モデルを変更するため、 常にこれを尊重することはできない。 + +06:16.250 --> 06:19.310 +だから、 モデルが変わることもある。 + +06:19.310 --> 06:21.710 +だから、 この数字は変わるだろう。 + +06:21.830 --> 06:24.200 +しかし、 ベストを尽くしている。 + +06:24.200 --> 06:27.860 +同じシードで2回コールすれば、 同じ答えが返ってくるはずだ。 + +06:28.220 --> 06:29.630 +だから、 それはいいことだ。 + +06:30.110 --> 06:36.890 +そして、 最大トークンの数を少なめにしているのは、 私たちがこのように素晴らしいティーアップを施したことを知っているからで、 実際にそうであるように、 + +06:36.890 --> 06:40.400 +本当に価格が埋まることを期待しているんだ。 + +06:40.550 --> 06:44.690 +だから、 無駄なお金を使う必要はないんだ。 + +06:44.690 --> 06:52.520 +そして言っておくが、 我々はこれをやっているので、 この作戦全体は250のデータポイントすべてにわたってとてもとても安いのだ。 + +06:52.610 --> 06:55.760 +うーん、 とにかくアメリカではまだ僕のためにあるんだ。 + +06:55.790 --> 06:58.870 +を買うよりずっと安い。 + +06:58.870 --> 07:03.470 +まあ、 ユーザーインターフェイスに表示されないほどわずかな上昇だ。 + +07:03.470 --> 07:05.300 +1セントにも満たない。 + +07:05.480 --> 07:14.120 +つまり、 短い入力トークンとaと5つの出力トークンなので、 実に使い安い。 + +07:14.690 --> 07:18.140 +その前に、 何が起こっているのかを明かさないように注意したい。 + +07:18.260 --> 07:22.280 +ええと、 ええと、 その前に、 ちょっと試してみましょう。 + +07:22.280 --> 07:24.470 +GPT4を実行しよう。 + +07:24.470 --> 07:32.510 +ああ、 ミニ、 そしてテスト・オブ・ゼロと呼んで、 そのパフォーマンスを確かめよう。 + +07:32.570 --> 07:38.210 +それを実行すると、 260という答えが返ってくる。 0. + +07:38.360 --> 07:42.140 +GPT4ミニというのを使っている。 + +07:42.170 --> 07:44.480 +その後、 何かを送ってきた。 + +07:44.510 --> 07:53.870 +そして、 この関数を通し、 260を最初の商品の価格として取り出した。 + +07:54.140 --> 07:59.060 +ええと、 最初の商品の値段を調べた方がいいんじゃない? + +07:59.060 --> 07:59.810 +いくつか、 ええと。 + +08:02.000 --> 08:05.360 +それで60になり、 数字は3、 7、 4となる。 + +08:05.360 --> 08:07.220 +だから、 あれはかなり遠回りだった。 + +08:07.580 --> 08:08.030 +うーん。 + +08:08.060 --> 08:14.690 +よし、 じゃあ、 GPTのテスター・ドット・テスト・パッシングにミニとテストを依頼する時が来た。 + +08:14.690 --> 08:20.720 +もう何回か見てもらってると思うんだけど、 かなり早く終わるんだけど、 それでも全部やるのに2、 + +08:20.720 --> 08:22.910 +3分はかかるんだ。 + +08:22.940 --> 08:27.620 +だから、 その間にあなたがそこに座っている必要はないと思ったし、 結果をスクロールできるようにしたんだ。 + +08:27.620 --> 08:33.410 +でもその前に、 GPT 4ミニがどうなるか、 あなたの推測を聞かせてください。 + +08:33.410 --> 08:36.380 +オリジナルとの対決はどうなる? + +08:36.410 --> 08:37.730 +平均的な数字に対して? + +08:37.730 --> 08:42.440 +あなたのような人類を相手にどう戦うのか? + +08:42.440 --> 08:50.990 +そして、 40万もの例を使ってトレーニングした最先端の伝統的なモデルに対して、 どのような結果をもたらすのだろうか? + +08:50.990 --> 08:51.860 +見てみよう。 + +08:51.860 --> 08:55.580 +それで、 これを実行した結果がこれだ。 + +08:55.580 --> 09:07.640 +だから、 いくつかの黄色と赤色は確かに再現性がある。 + +09:08.210 --> 09:12.680 +うーん、 緑と赤が見えるね。 + +09:12.710 --> 09:17.480 +緑が多いが、 黄色や赤もある。 + +09:18.710 --> 09:20.240 +スピードを上げるよ。 + +09:20.270 --> 09:22.400 +うわぁ、 緑がいっぱいだ。 + +09:23.360 --> 09:24.710 +もっと赤もあれば、 緑もある。 + +09:24.710 --> 09:33.320 +そして、 これがGPTの4つのミニ・フロンティア・モデルの結果である。 + +09:33.740 --> 09:36.110 +他を押しつぶした。 + +09:36.110 --> 09:37.250 +潰した。 + +09:37.250 --> 09:42.770 +79ドルになっているのがわかるだろう。 58. + +09:42.920 --> 09:52.400 +人類よりも、 他の、 ええと、 ええと、 他のすべてのモデルよりも、 ええと、 ええと、 他のすべてのモデルよりも、 ええと、 + +09:52.400 --> 09:58.550 +ええと、 他のすべてのモデルよりも、 ええと、 ええと、 ええと、 ええと、 ランダムフォレストを含めて、 + +09:58.550 --> 10:03.540 +圧倒的に優れている。 + +10:03.540 --> 10:09.360 +また、 トレーニングデータのテストが汚染されるリスクはあるが、 それが正確に行われるケースはほとんどないので、 + +10:09.360 --> 10:13.650 +その可能性は低いと思う。 + +10:13.650 --> 10:17.730 +だから、 すでに知っていることを再確認したわけではない。 + +10:17.880 --> 10:25.560 +そのために訓練されているんだ。 + +10:25.560 --> 10:34.860 +タイヤがいくらで、 ヘッドランプがいくらで、 シャワーの蛇口がいくらで、 その他もろもろ、 素晴らしい視点を持っている。 + +10:34.860 --> 10:37.620 +そのことは、 私にはまったく分からないので、 とてつもない問題になった。 + +10:37.710 --> 10:40.260 +でも、 実にいいヒントがあったよ。 + +10:40.350 --> 10:46.260 +そして、 その手がかりは、 これまで取り組んできたすべてのモデルを打ち負かすほどだった。 + +10:46.260 --> 10:47.910 +とてもクールだよ。 + +10:47.910 --> 10:50.940 +これを見るのはとても楽しかった。 + +10:51.060 --> 10:59.370 +そして、 次のビデオでは、 さらにステップアップして、 より大きな従兄弟がどうなるか見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/59473147/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473147/ko_KR.srt new file mode 100755 index 0000000..e860412 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473147/ko_KR.srt @@ -0,0 +1,460 @@ +WEBVTT + +00:00.770 --> 00:01.910 +정말 안심이에요 + +00:01.910 --> 00:03.050 +그건 다 지난 일이에요 + +00:03.080 --> 00:05.120 +인체 실험은 이제 안 해요 + +00:05.150 --> 00:09.500 +최악의 결과를 마지막으로 보고 다음으로 넘어가죠 + +00:09.530 --> 00:13.760 +우리가 개척지와 함께 일할 때가 왔어요 + +00:13.910 --> 00:18.560 +GPT 4 미니부터 시작할 거예요 미니 버전이죠 + +00:18.650 --> 00:20.810 +그렇게 많지는 않아요 + +00:20.840 --> 00:22.400 +여전히 거대한 개척지죠 + +00:22.430 --> 00:23.210 +`LM'요 + +00:23.420 --> 00:28.310 +우리가 함께 작업한 다른 모델과 비교해서 얼마나 잘 팔릴지 궁금해요 + +00:28.490 --> 00:33.380 +먼저 GPT 4 프롬프트를 작업해야 하는데요 + +00:33.680 --> 00:39.950 +이건 여러분이 배우는 데 있어 아주 중요한 부분이에요 개척자 모델을 + +00:39.950 --> 00:46.070 +통해 이런 상업적 문제를 어떻게 해결할지 볼 수 있으니까요 + +00:46.100 --> 00:51.500 +하지만 많은 부분이 여러분에게 제2의 본성이 되어야 합니다 왜냐하면 이전에 했던 많은 작업들과 + +00:51.500 --> 00:53.210 +비슷하기 때문이죠 + +00:53.240 --> 00:57.590 +가장 먼저 항목에 대한 함수 메시지를 작성할게요 + +00:57.590 --> 01:03.680 +데이터 포인트에 따라 GPT 4를 위한 프롬프트로 바꾸겠습니다 이 + +01:03.680 --> 01:10.490 +메시지 목록 중 하나의 형태로 온 거죠 여러분이 기억하시는 사전 목록이요 + +01:10.670 --> 01:13.880 +이렇게 사전 목록을 만드는 거예요 + +01:13.880 --> 01:21.170 +시스템 메시지로 시작합니다 이 경우엔 물품의 가격을 추정하고 가격으로만 + +01:21.200 --> 01:24.350 +답장합니다 설명 없이요 + +01:24.500 --> 01:31.760 +사용자 프롬프트는 이 아이템에서 테스트 프롬프트를 가져옵니다 많이 보셨던 + +01:31.760 --> 01:32.900 +거죠 + +01:33.140 --> 01:36.500 +데이터 비트를 좀 뒤져볼게요 + +01:36.500 --> 01:41.090 +먼저 이 텍스트를 $로 줄여볼게요 + +01:41.120 --> 01:46.250 +제가 그렇게 하는 이유는 아니, 그렇게 말해야 하는 이유는 이걸 + +01:46.250 --> 01:52.430 +처음에 넣은 이유는 나중에 오픈 소스 모델로 작업할 때 오픈 소스 모델을 제공해 + +01:52.430 --> 01:57.860 +최대한 쉽게 문제를 해결하게 될 테니까요 배울 게 많은데 그걸 할 + +01:57.860 --> 02:01.890 +80억 매개 변수밖에 없으니까요 + +02:01.920 --> 02:07.170 +개척 시대의 모델은 훨씬 더 강력하고 능력이 뛰어나며 똑같은 + +02:07.170 --> 02:09.750 +가드레일도 필요 없죠 + +02:09.840 --> 02:15.870 +GPT4나 미니가 아주 좋아할 거예요 센트를 포함해서 가격을 예측할 수 있으니까요 어떤 게 효과가 있는지 + +02:15.870 --> 02:17.280 +정확히 알고 있죠 + +02:17.280 --> 02:20.790 +이 경우에는 불필요한 가드레일이었어요 + +02:20.790 --> 02:21.990 +그래서 뺐어요 + +02:22.440 --> 02:27.540 +프롬프트 맨 끝도 제거했어요 가격은 달러라는 곳이죠 + +02:27.570 --> 02:34.350 +이걸 제거한 이유는 메시지 목록을 만들 때 시스템 역할에 + +02:34.350 --> 02:38.100 +먼저 입력하기 때문이에요. + +02:38.130 --> 02:45.840 +사용자를 추가하고 사용자 프롬프트를 입력합니다 그리고 보조에서 오는 메시지를 하나 더 입력해요 + +02:45.840 --> 02:50.070 +GPT4 미니의 회신 같은 거죠 + +02:50.310 --> 02:50.610 +네 + +02:50.610 --> 02:56.700 +가격은 달러라는 대답만 남죠 + +02:57.300 --> 03:04.530 +그래서 참을 수 없을 정도로 다음 토큰에 가격을 적고 싶어지죠 이게 + +03:04.530 --> 03:08.550 +조수로부터 돌아오는 응답이니까요 + +03:08.610 --> 03:15.630 +그래서 저희는 그 부분을 가장 그럴듯한 다음 토큰으로 채워야 + +03:15.630 --> 03:23.010 +한다고 생각했어요 이 제품을 살 때 가장 그럴듯한 가격으로요 + +03:23.010 --> 03:28.590 +그래서 조수처럼 반응하게 해서 그렇게 채우는 게 실험해보기에 + +03:28.590 --> 03:31.890 +좋은 방법이죠 + +03:32.280 --> 03:35.430 +그게 우리 일이에요 + +03:35.460 --> 03:38.850 +빨리 이걸 시험해 보고 살펴보죠 + +03:38.880 --> 03:47.430 +메시지 for get을 입력하고 첫 번째 테스트 데이터 포인트를 입력하면 + +03:47.430 --> 03:52.890 +롤 시스템과 시스템 프롬프트가 뜨죠 + +03:52.980 --> 03:55.320 +이해가 되면 좋겠네요 + +03:55.380 --> 04:00.120 +롤 유저를 선택하고 콘텐츠는 비용이죠 + +04:00.120 --> 04:08.310 +그리고 제 생각에 이 물건들은 끔찍한 메모리일 것 같아요 이 물건들의 가격을 + +04:08.310 --> 04:11.310 +전부 다 맞춰야 했으니까요 + +04:11.310 --> 04:13.740 +보기만 해도 겁이 나요 + +04:13.800 --> 04:15.780 +이번엔 제 문제가 아니에요 + +04:15.780 --> 04:17.760 +GPT 4와 미니 중 선택해야 해요 + +04:17.760 --> 04:23.910 +사용자 프롬프트의 일부로 그걸 받게 되죠 그런 다음 지원 응답을 받게 돼요 + +04:23.910 --> 04:26.850 +그 자체로 보상가는 달러예요 + +04:27.390 --> 04:31.620 +그럼 이 대화를 계속하려고 할 거예요 + +04:31.890 --> 04:36.810 +GPT 4를 보내기에 좋은 타이밍인 셈이죠 + +04:36.840 --> 04:43.410 +다음, 짧은 함수가 하나 더 있어요 여기 쓴 Get Price라는 + +04:43.410 --> 04:51.840 +유틸리티로 문자열 s를 이용해 그 문자열에서 부동점 수를 추출해요 + +04:51.930 --> 05:04.840 +예를 들어 GPT 4에서 문자열을 받으면 가격이 대략 1달러 99센트예요 + +05:04.840 --> 05:04.840 +99살요 + +05:05.290 --> 05:08.110 +왜냐하면 어쩌고저쩌고해서요 + +05:09.610 --> 05:16.120 +제 지시를 따르지 않고 계속 떠들면 그래도 옷을 벗을 거예요 + +05:16.150 --> 05:16.420 +이런, 미안해요 + +05:16.420 --> 05:17.680 +밝히지 마세요 + +05:17.740 --> 05:23.260 +99는 여전히 제거할 거예요 99달러 나왔어요 + +05:23.560 --> 05:24.250 +그럴 거예요 + +05:24.280 --> 05:25.390 +잘될 거예요 + +05:26.290 --> 05:27.280 +네 + +05:27.280 --> 05:30.520 +이제 조심스럽게 스크롤을 내릴게요 + +05:30.550 --> 05:33.880 +이제 큰 함수로 넘어가죠 + +05:33.910 --> 05:34.930 +맞아요 + +05:34.930 --> 05:36.280 +이건 진짜예요 + +05:36.310 --> 05:42.970 +GPT 4 미니를 항목과 함께 호출할 겁니다 이렇게 하는 거죠 + +05:43.000 --> 05:47.830 +OpenAI 챗GPT완료 .Create라고 하죠 + +05:48.190 --> 05:51.940 +GPT 4 미니 모델을 통과했어요 + +05:52.150 --> 05:59.290 +그 아이템에 대한 메시지를 전달합니다 방금 말씀드린 함수를 사용해서요 + +05:59.480 --> 06:07.580 +이 시드는 GPT 4에 이걸 재생하고 싶다고 말하는 방법입니다 같은 입력으로 + +06:07.580 --> 06:11.060 +같은 답을 주도록요 + +06:11.420 --> 06:16.250 +오픈아이는 기존 GPT 모델을 변경하기 때문에 늘 이 방식을 고수할 순 없죠 + +06:16.250 --> 06:19.310 +그래서 모델이 바뀌기도 해요 + +06:19.310 --> 06:21.710 +이 숫자는 바뀔 거예요 + +06:21.830 --> 06:24.200 +하지만 모든 게 평등하니 최선을 다하죠 + +06:24.200 --> 06:27.860 +그럼 같은 씨앗으로 두 번 부르면 같은 답이 나오겠네요? Get it + +06:28.220 --> 06:29.630 +좋은 정보네요 + +06:30.110 --> 06:36.890 +최대 토큰은 작게 두겠습니다 티업이 훌륭하기 때문이죠 가격이 + +06:36.890 --> 06:40.400 +채워질 거라고 기대할 수 있어요 + +06:40.550 --> 06:44.690 +그러니 더 많은 토큰을 가져오느라 돈을 낭비할 필요가 없죠 + +06:44.690 --> 06:51.350 +이런 작업을 하기 때문에 250개의 데이터 포인트에 걸쳐 이 전체 작업은 아주 + +06:51.380 --> 06:52.520 +저렴해요 + +06:52.610 --> 06:55.760 +미국에 있는 제 자리예요 + +06:55.790 --> 06:58.870 +가격은 그보다도 훨씬 less예요 + +06:58.870 --> 07:03.470 +음, less 아, 그러니까 등급이 너무 적어서 사용자 인터페이스에서 안 보이는 거죠 + +07:03.470 --> 07:05.300 +1센트도 안 되는 less요 + +07:05.480 --> 07:14.120 +아주 저렴합니다 짧은 입력 토큰과 5개의 출력 토큰을 사용하니까요 + +07:14.690 --> 07:18.140 +그리고 조심하기 전에 무슨 일이 일어나는지 말해야죠 + +07:18.260 --> 07:22.280 +그 전에 이걸 빨리 해 보죠 + +07:22.280 --> 07:24.470 +GPT 4를 돌려 보죠 + +07:24.470 --> 07:32.510 +테스트 오브 제로로 성능을 확인해 보죠 + +07:32.570 --> 07:38.210 +그걸 실행하면 답이 260으로 나와요 0살요 + +07:38.360 --> 07:42.140 +GPT 4 미니라는 걸 거쳤네요 + +07:42.170 --> 07:44.480 +그리고 뭔가 보내왔죠 + +07:44.510 --> 07:49.880 +이것과 똑같은 것을 보냈고 받은 정보를 바탕으로 이 함수를 적용해서 260개를 + +07:49.880 --> 07:53.870 +제거해 첫 번째 물건의 가격을 맞췄어요. + +07:54.140 --> 07:59.060 +일단 첫 번째 물건의 가격부터 알아보죠 + +07:59.060 --> 07:59.810 +조금요 + +08:02.000 --> 08:05.360 +60까지 가면 숫자는 3, 7, 4가 되죠 + +08:05.360 --> 08:07.220 +그래서 좀 멀리 갔죠 + +08:07.580 --> 08:08.030 +네 + +08:08.060 --> 08:14.690 +이제 테스터와 GPT 테스트를 통과할 시간입니다 미니 테스트죠 + +08:14.690 --> 08:20.720 +제가 이미 실행해본 걸 몇 번 보셨을 겁니다 꽤 빠르거든요 하지만 전체를 다 하려면 + +08:20.720 --> 08:22.910 +2, 3분 정도 걸려요 + +08:22.940 --> 08:27.620 +여러분이 거기 앉아 계시는 동안 결과를 스크롤할 수 있게 하고 싶진 않았어요 + +08:27.620 --> 08:33.410 +하지만 그 전에 GPT 4 미니가 어떻게 달릴지 한번 맞혀 보세요 + +08:33.410 --> 08:36.380 +오리지널과 비교하면 어떨까요? + +08:36.410 --> 08:37.730 +평균 수보다요? + +08:37.730 --> 08:42.440 +제가 대표하는 인류에 대한 반발은 어떨까요? + +08:42.440 --> 08:49.190 +또한 훈련용 400,000대의 모델이 있는 최첨단 전통 모델과는 어떻게 + +08:49.220 --> 08:50.990 +경쟁할까요? + +08:50.990 --> 08:51.860 +어디 보죠 + +08:51.860 --> 08:55.580 +그래서 이걸 실행했더니 이렇게 됐어요 + +08:55.580 --> 09:02.690 +노란색과 빨간색은 재생률이 아주 높아요 아까와 + +09:02.690 --> 09:07.640 +같은 260개를 예상했죠 + +09:08.210 --> 09:12.680 +녹색과 붉은색이 보이죠 + +09:12.710 --> 09:17.480 +녹색은 많지만 노란색과 빨간색도 있어요 + +09:18.710 --> 09:20.240 +속도를 높일게요 + +09:20.270 --> 09:22.400 +채소가 정말 많네요 + +09:23.360 --> 09:24.710 +붉은색과 녹색도 더 넣고요 + +09:24.710 --> 09:33.320 +답은 이겁니다 GPT 4 미니 프론티어 모델 결과죠 + +09:33.740 --> 09:36.110 +다른 건 다 박살 났죠 + +09:36.110 --> 09:37.250 +박살을 냈죠 + +09:37.250 --> 09:42.770 +여기 보시면 79달러가 들어 있어요 58살요 + +09:42.920 --> 09:52.400 +확실히 인간보다 낫고 다른 모든 모델보다 나아요 무작위 숲도 포함해서요 + +09:52.400 --> 09:58.550 +훈련 데이터도 전혀 없이 순수하게 설명만 듣고 + +09:58.550 --> 10:03.540 +토큰 시퀀스를 계속하는 거죠 + +10:03.540 --> 10:09.360 +훈련용 데이터 오염을 테스트할 위험은 있지만 그럴 가능성은 희박합니다 + +10:09.360 --> 10:13.650 +정확히 해낼 경우가 거의 없으니까요 + +10:13.650 --> 10:17.730 +이미 알고 있던 걸 토해낸 게 아니에요 + +10:17.880 --> 10:25.560 +세속적인 지식이 아주 풍부해서 그런 쪽으로 훈련된 거죠 + +10:25.560 --> 10:32.970 +타이어 가격, 전조등 가격 샤워 수도꼭지 등 다른 비용을 알려주는 좋은 + +10:32.970 --> 10:34.860 +관점이 있어요 + +10:34.860 --> 10:37.620 +그게 큰 문제였어요 전혀 알 수가 없었거든요 + +10:37.710 --> 10:40.260 +하지만 아주 좋은 단서가 있었죠 + +10:40.350 --> 10:46.260 +이 작업에 참여한 모든 모델보다 낫다는 걸 알 수 있었죠 + +10:46.260 --> 10:47.910 +정말 멋져요 + +10:47.910 --> 10:50.940 +이 장면을 보는 게 즐거웠어요 + +10:51.060 --> 10:59.370 +다음 영상에서는 수준을 한 단계 높여서 더 큰 사촌이 어떻게 하는지 보죠 diff --git a/week5/community-contributions/subtitles/srts/59473159/en_US.srt b/week5/community-contributions/subtitles/srts/59473159/en_US.srt new file mode 100755 index 0000000..1ab7e87 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473159/en_US.srt @@ -0,0 +1,520 @@ +WEBVTT + +00:00.770 --> 00:05.690 +Welcome to Jupyter Lab and welcome to our experiments at the frontier. + +00:05.690 --> 00:12.290 +So we are going to put our frontier models to the test, trying out this challenge of predicting the + +00:12.290 --> 00:18.590 +prices of products using a combination of GPT four and Claude. + +00:18.800 --> 00:21.980 +Uh, and I do want to point out a couple of things about this. + +00:21.980 --> 00:26.900 +First of all, it's worth pointing out that we're not doing any training here. + +00:26.900 --> 00:31.070 +We're not going to give the frontier models any benefit of the training data. + +00:31.070 --> 00:36.350 +We're simply going to be giving it the test data and asking it to predict the outcome. + +00:36.350 --> 00:41.930 +So when we looked at traditional machine learning, we gave it the 400,000 training data points and + +00:41.930 --> 00:44.510 +had it learn a model based on that. + +00:44.690 --> 00:51.440 +In this case, we're simply giving it the test data and saying, given all of your phenomenal knowledge + +00:51.440 --> 00:56.690 +of of everything that you know about the world, all of the world information stored in your trillions + +00:56.690 --> 01:03.180 +of parameters, please predict the price of this product and do it by finishing this the sentence this + +01:03.180 --> 01:10.890 +product is worth dollars, and then the model is convinced that the most likely next token is going + +01:10.890 --> 01:13.590 +to be a plausible price for that product. + +01:13.590 --> 01:16.470 +So we're taking advantage of its world knowledge. + +01:16.950 --> 01:21.240 +So yeah, on the one hand it's not been trained for this task. + +01:21.240 --> 01:25.080 +Uh, on the other hand though, something else that's worth mentioning that maybe some of you thought + +01:25.080 --> 01:32.160 +already is that given the enormous, outrageously large training data set that has been put through + +01:32.160 --> 01:38.610 +these models as part of training, it's entirely possible that they have, in fact, seen these products + +01:38.610 --> 01:39.120 +before. + +01:39.150 --> 01:41.910 +They may have been provided with scrapes of Amazon. + +01:41.910 --> 01:45.540 +They may have been provided with hugging face data sets, for all we know. + +01:45.750 --> 01:48.570 +Uh, so, um, it's possible now. + +01:48.570 --> 01:49.980 +Now they don't. + +01:50.010 --> 01:54.240 +You'll see that results aren't suspiciously spot on or something. + +01:54.240 --> 02:00.210 +That would make one feel that it has the benefit of precise prices, but we do still have to bear in + +02:00.210 --> 02:02.490 +mind that it might have an unfair advantage. + +02:02.490 --> 02:07.250 +I haven't seen evidence of that, but it's certainly something that one wants to be worried about. + +02:07.250 --> 02:10.490 +It's what people call testata contamination. + +02:10.490 --> 02:16.910 +When there's a possibility that your test data set has been seen, or has aspects of it have been seen + +02:16.910 --> 02:18.710 +during training time. + +02:19.070 --> 02:20.390 +So we'll bear that in mind. + +02:20.390 --> 02:22.190 +But that's just going to be a side note. + +02:22.190 --> 02:24.530 +We're not going to dwell more on that. + +02:24.530 --> 02:27.320 +I haven't seen significant evidence that that is at play. + +02:27.890 --> 02:30.500 +So we're going to do some imports. + +02:31.130 --> 02:32.150 +There they go. + +02:32.240 --> 02:36.980 +We are now back to importing OpenAI and anthropic and we'll be making use of them. + +02:36.980 --> 02:42.890 +Now you remember I wrote that lovely tester class that I do like, and I think it's going to be very + +02:42.890 --> 02:43.520 +useful for you. + +02:43.520 --> 02:49.820 +And I encourage you to be writing similar kinds of test harness frameworks for your own projects to + +02:49.850 --> 02:55.250 +validate their results using the using as many business metrics as you can. + +02:55.700 --> 03:00.650 +I've moved it out into a separate, uh, Python module of its own. + +03:00.650 --> 03:05.760 +This is the same code that's out in a module like that, and that just means that we don't have to have + +03:05.760 --> 03:10.020 +it in all of our Jupyter notebooks going forwards, because we will use it quite a lot. + +03:10.050 --> 03:13.710 +We can just import it like so, and it will be there. + +03:13.710 --> 03:16.140 +And the the signature has changed very slightly. + +03:16.170 --> 03:22.410 +We'll have to say Tesla dot test, put in the function name and also pass in the test data set, of + +03:22.410 --> 03:26.760 +which it will take the first 250 elements data points. + +03:26.790 --> 03:27.210 +All right. + +03:27.210 --> 03:30.270 +So with that we are now going to load in our environment variables. + +03:30.270 --> 03:32.730 +We are going to log in to hugging face. + +03:33.000 --> 03:36.150 +Uh again I don't think we're actually going to use that. + +03:36.150 --> 03:39.870 +But um anyway might as well we get into the practice of it. + +03:40.080 --> 03:42.360 +Uh, always nice to log into hugging face, isn't it? + +03:42.390 --> 03:45.420 +We will initialize our two models. + +03:45.420 --> 03:51.840 +We will, uh, tell matplotlib that we are going to be making charts, and we will load in our pickle + +03:51.840 --> 03:56.580 +files for our training and test data set that we outputted. + +03:56.670 --> 03:58.350 +Um, and they are loaded in. + +03:58.380 --> 04:04.260 +Now, I did say we were just about to go straight to the frontier, but I am going to, uh, pause for + +04:04.260 --> 04:09.320 +one more second, because I do have one other model to show you before we go to the frontier. + +04:09.320 --> 04:13.490 +And you're thinking, oh, come on, you said it was frontier time, but I think you will be amused + +04:13.490 --> 04:14.120 +by this. + +04:14.120 --> 04:17.030 +And this came very much at my expense. + +04:17.120 --> 04:22.970 +And this is why, at the start of today's videos, I said I was absolutely exhausted. + +04:22.970 --> 04:31.610 +But it did occur to me that another, perhaps another thing that we should compare our models to would + +04:31.610 --> 04:36.620 +be the efforts of humanity in trying to predict prices of products. + +04:36.680 --> 04:42.500 +It seems like we should have that baseline as well, so that we can compare our model performance against + +04:42.500 --> 04:44.330 +human performance. + +04:44.510 --> 04:52.040 +And I couldn't find anybody that I could convince to go through the horror that is reading 250 product + +04:52.040 --> 04:54.380 +descriptions and trying to figure out how much they cost. + +04:54.380 --> 04:58.850 +And so I subjected myself to this torture and torture it was. + +04:58.850 --> 05:02.660 +I can tell you it is way more difficult than I was expecting. + +05:02.660 --> 05:08.030 +I said to you before that I thought it was it was quite hard, but it's way harder than I had realized + +05:08.310 --> 05:11.070 +there are just things that I had no idea about. + +05:11.070 --> 05:16.560 +I had no idea how much it costs to buy a wheel, and there are a couple of wheels in there. + +05:16.710 --> 05:22.950 +Uh, then, even though I should know computers back to front, I found myself agonizing over the cost + +05:22.950 --> 05:28.770 +of refurbished computers with 400GB of, uh, disk space. + +05:28.920 --> 05:31.560 +And, yeah, it was just really, really hard. + +05:31.560 --> 05:32.820 +And chandeliers. + +05:32.820 --> 05:34.740 +I don't know how much a chandelier costs. + +05:34.740 --> 05:36.360 +Anyway, I digress. + +05:36.390 --> 05:47.040 +I wrote some code that outputs to a CSV file 250 test prompts, and after I ran that, uh, I'll run + +05:47.040 --> 05:47.130 +it. + +05:47.130 --> 05:51.450 +Now it creates this file human input dot csv. + +05:51.450 --> 05:53.790 +And here is human input dot CSV. + +05:53.790 --> 06:04.140 +And it contains the prompts every single one of the top 250 prompts and zero in this column to be filled + +06:04.140 --> 06:06.690 +in by said human. + +06:06.690 --> 06:12.530 +Uh, and I, you know, I'm not even sure if I'm going to check in this human output into into git. + +06:12.560 --> 06:17.840 +If you see it there, then I then I've dared to because I after a while you become fatigued and I was + +06:17.840 --> 06:19.700 +going through it probably too fast. + +06:19.700 --> 06:21.890 +I probably made some real blunders in there. + +06:21.890 --> 06:24.620 +If you look at it, you'll probably say, what were you thinking? + +06:24.620 --> 06:27.230 +You should stick to teaching LM engineering. + +06:27.260 --> 06:28.190 +Certainly don't. + +06:28.460 --> 06:30.950 +You're not, not not someone of the world. + +06:31.100 --> 06:34.100 +Uh, but, um, yeah, I gave it my best shot. + +06:34.100 --> 06:38.840 +So anyways, we'll read back in the prices that I set. + +06:38.990 --> 06:43.940 +Uh, and then let's just, uh, quickly get a sense for, for, for how this looks. + +06:43.940 --> 06:54.950 +So we're going to write a function which is going to be the human, um, the predictor, the human processor. + +06:54.950 --> 06:57.140 +So it needs to take an input. + +06:57.140 --> 07:00.230 +And that input should be one of the items. + +07:00.260 --> 07:04.610 +And its job is to return the cost of that item. + +07:04.910 --> 07:11.820 +Um, so what I do at this point is I say, okay, so if I look in my training data set, I say my test + +07:11.820 --> 07:12.720 +data set. + +07:12.750 --> 07:15.840 +What is the index of that item? + +07:15.840 --> 07:19.410 +So is it the the zeroth item in in test. + +07:19.410 --> 07:21.030 +Is it the first the second the third. + +07:21.030 --> 07:23.940 +And we will call that index. + +07:24.420 --> 07:30.090 +So that is which number of the test items we are looking at here. + +07:30.090 --> 07:36.750 +And then I have read in all of my hopeless estimates into human predictions. + +07:36.750 --> 07:40.230 +And so we will simply return human predictions. + +07:44.010 --> 07:46.890 +At uh at index. + +07:48.240 --> 07:48.930 +All right. + +07:48.960 --> 07:49.680 +And run that. + +07:49.680 --> 07:56.850 +And now now we will see we will see tester dot test human pricer. + +07:59.640 --> 08:01.800 +And pass in the test data set. + +08:02.040 --> 08:02.910 +Here we go. + +08:04.320 --> 08:07.740 +So there are the results. + +08:07.920 --> 08:13.010 +Uh, well you can see that there's a fairly large number of reds in there. + +08:13.040 --> 08:14.450 +There are some greens though. + +08:14.480 --> 08:15.980 +I did respectably. + +08:16.340 --> 08:18.590 +Uh, but still I was quite far out. + +08:18.590 --> 08:19.490 +But look, this one. + +08:19.490 --> 08:20.300 +What is this? + +08:20.300 --> 08:23.600 +Richmond auto upholstery, I guess 260. + +08:23.630 --> 08:25.220 +And it was 225. + +08:25.220 --> 08:28.010 +And this one here, Gibson Performance exhaust. + +08:28.010 --> 08:31.010 +I don't know how much a Gibson Performance exhaust costs, but I. + +08:31.010 --> 08:32.870 +I guessed 499. + +08:32.870 --> 08:35.090 +I thought I'd, you know, go go for it. + +08:35.090 --> 08:37.280 +And the answer is 535. + +08:37.430 --> 08:38.900 +But then some others in here. + +08:39.050 --> 08:40.340 +What did I get wrong here? + +08:40.370 --> 08:49.250 +A Street Series stainless performance something, uh, I guess $260 and it was $814, so I was just + +08:49.280 --> 08:53.210 +way off there anyway to put me out of my misery. + +08:53.210 --> 08:55.910 +If we scroll down, we will see. + +08:55.940 --> 09:03.980 +Here is the chart then, uh, I uh, in particular, you'll see that I didn't do terribly. + +09:04.010 --> 09:04.640 +I mean, I did. + +09:04.640 --> 09:09.020 +All right, look, lots of green dots hit rate of 32%. + +09:09.200 --> 09:16.440 +Um, I, I also I realized actually, as I was About two thirds of the way through that, all of my + +09:16.440 --> 09:20.100 +prices, I'd never guessed anything much more than 4 or $500. + +09:20.100 --> 09:22.890 +So I knew immediately that obviously I was. + +09:22.890 --> 09:26.040 +I hadn't I hadn't spotted things that were expensive. + +09:26.220 --> 09:28.650 +So that was obviously a failing. + +09:28.920 --> 09:34.110 +Um, so my total error, as it happens, was $127. + +09:34.110 --> 09:38.280 +And that means I come in better than than than the average. + +09:38.280 --> 09:42.630 +It's not like I could have done better just by guessing the average number all the way through. + +09:42.840 --> 09:48.240 +Uh, the, uh, I've written down to remind myself the the comparisons. + +09:48.240 --> 09:50.460 +The average was 146. + +09:50.520 --> 09:52.500 +Uh, was the error of the average price. + +09:52.590 --> 09:53.880 +So I did better than that. + +09:53.880 --> 09:57.930 +The straight up linear regression with feature engineering. + +09:57.930 --> 10:00.540 +The basic one was 139. + +10:00.540 --> 10:01.800 +So I beat that. + +10:01.800 --> 10:06.150 +I beat a very, very basic feature engineering linear regression. + +10:06.150 --> 10:09.930 +But you probably already put in more features and did better than that anyway. + +10:10.020 --> 10:17.240 +Um, but then all of the other models are crushed me with the, the bag of words style models and the + +10:17.240 --> 10:18.230 +word two vec models. + +10:18.230 --> 10:24.650 +And then you remember that Random Forest came in at 97, significantly better than humanity. + +10:24.680 --> 10:31.850 +So already as it would happen, good traditional machine learning models can do better than this human + +10:31.850 --> 10:38.210 +anyway in predicting the prices of items, but you may be better informed than me if you put yourself + +10:38.210 --> 10:40.460 +through this exercise, which I do not recommend. + +10:40.760 --> 10:43.580 +Then you may find you may find that you do better. + +10:43.580 --> 10:47.330 +Anyway, in all seriousness, I haven't just wasted your time. + +10:47.330 --> 10:52.310 +This is the kind of exercise that's good to do because maybe for a few data points, but it gives you + +10:52.310 --> 10:58.310 +a good sense of the type of problem you're solving, and where the bar is set in terms of human performance + +10:58.340 --> 11:02.390 +is something which can be used to compare how well we're doing with models. + +11:02.390 --> 11:08.450 +After all, if we can't do better than then, than the human performance, then we need to work harder. + +11:08.450 --> 11:10.460 +So that gives you a sense. + +11:10.460 --> 11:15.950 +And when we come back in the next video, we really will move on to Frontier Models. + +11:15.950 --> 11:16.670 +It's happening. + +11:16.700 --> 11:17.480 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59473159/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473159/ja_JP.srt new file mode 100755 index 0000000..38c386b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473159/ja_JP.srt @@ -0,0 +1,448 @@ +WEBVTT + +00:00.770 --> 00:05.690 +Jupyter Labへようこそ、 そしてフロンティアでの実験へようこそ。 + +00:05.690 --> 00:12.290 +そこで私たちは、 GPT4とクロードを組み合わせて商品の価格を予測するというこの課題に挑戦し、 + +00:12.290 --> 00:18.590 +フロンティアモデルを試してみようと思う。 + +00:18.800 --> 00:21.980 +この件に関して、 いくつか指摘しておきたいことがある。 + +00:21.980 --> 00:26.900 +まず言っておきたいのは、 ここではトレーニングはしていないということだ。 + +00:26.900 --> 00:31.070 +フロンティアモデルにはトレーニングデータの恩恵を与えない。 + +00:31.070 --> 00:36.350 +単にテストデータを与え、 結果を予測するよう求めるだけだ。 + +00:36.350 --> 00:44.510 +従来の機械学習では、 40万件の学習データを与え、 それに基づいてモデルを学習させていた。 + +00:44.690 --> 00:56.690 +この場合、 私たちは単にテストデータを与え、 あなたが世界について知っているすべての驚異的な知識、 + +00:56.690 --> 01:03.180 +あなたの何兆ものパラメータに保存されている世界のすべての情報を与えて、 + +01:03.180 --> 01:13.590 +この製品の価格を予測してください、 と言っているのです。 + +01:13.590 --> 01:16.470 +だから、 我々はその世界的な知識を活用している。 + +01:16.950 --> 01:21.240 +そう、 一方では、 この任務のために訓練されたわけではない。 + +01:21.240 --> 01:25.080 +その一方で、 もうお気づきの方もいらっしゃるかもしれませんが、 言及する価値のあることがあります。 + +01:25.080 --> 01:32.160 +それは、 トレーニングの一環としてこれらのモデルに投入された膨大な、 とんでもなく大きなトレーニング・データ・セットを考えると、 + +01:32.160 --> 01:39.120 +彼らが実際に、 以前にこれらの製品を見たことがある可能性は十分にあるということです。 + +01:39.150 --> 01:41.910 +アマゾンの切れ端を提供されたのかもしれない。 + +01:41.910 --> 01:45.540 +ハグする顔のデータセットが提供されたのかもしれない。 + +01:45.750 --> 01:48.570 +ええと、 だから、 今は可能なんだ。 + +01:48.570 --> 01:49.980 +今は違う。 + +01:50.010 --> 01:54.240 +結果が怪しいほど的中しているわけでもなんでもないことがわかるだろう。 + +01:54.240 --> 02:02.490 +そのため、 正確な価格という利点があるように感じられるが、 それでも不公平な利点があるかもしれないことを念頭に置く必要がある。 + +02:02.490 --> 02:07.250 +その証拠を見たことはないが、 心配したいことであることは確かだ。 + +02:07.250 --> 02:10.490 +いわゆるテスタタ汚染だ。 + +02:10.490 --> 02:18.710 +テスト・データセットが、 トレーニング中に見たことがある、 あるいは見たことがある可能性がある場合。 + +02:19.070 --> 02:20.390 +だから、 そのことは肝に銘じておく。 + +02:20.390 --> 02:22.190 +でも、 それは余談に過ぎない。 + +02:22.190 --> 02:24.530 +私たちはそのことにこれ以上こだわるつもりはない。 + +02:24.530 --> 02:27.320 +私は、 それが関係しているという重要な証拠を見たことがない。 + +02:27.890 --> 02:30.500 +だから、 輸入をするんだ。 + +02:31.130 --> 02:32.150 +そうだ。 + +02:32.240 --> 02:36.980 +今はOpenAIとanthropicのインポートに戻り、 それらを活用していく。 + +02:36.980 --> 02:43.520 +私が書いた素敵なテスター・クラスは気に入っているし、 あなたにとってとても役に立つと思う。 + +02:43.520 --> 02:49.820 +そして、 あなた自身のプロジェクトでも、 同じような種類のテスト・ハーネス・フレームワークを書いて、 + +02:49.850 --> 02:55.250 +できるだけ多くのビジネス・メトリクスを使って、 その結果を検証することをお勧めする。 + +02:55.700 --> 03:00.650 +このモジュールは、 独立したPythonモジュールに移しました。 + +03:00.650 --> 03:10.020 +これは、 そのようなモジュールにあるコードと同じもので、 今後、 Jupyterノートブックのすべてにこのコードを入れる必要はないということだ。 + +03:10.050 --> 03:13.710 +そうやってインポートすればいいんだ。 + +03:13.710 --> 03:16.140 +そして、 サインもわずかに変わった。 + +03:16.170 --> 03:22.410 +テスラ・ドット・テストと言い、 関数名を入れ、 + +03:22.410 --> 03:26.760 +テスト・データ・セットを渡す。 + +03:26.790 --> 03:27.210 +分かった。 + +03:27.210 --> 03:30.270 +これで環境変数をロードすることになる。 + +03:30.270 --> 03:32.730 +これからハグフェイスにログインします。 + +03:33.000 --> 03:36.150 +もう一度言うけど、 実際に使うことはないと思う。 + +03:36.150 --> 03:39.870 +でも......とにかく、 その練習を始めた方がいい。 + +03:40.080 --> 03:42.360 +ええと、 ハグしている顔にログインするのは、 いつだって素敵なことだよね? + +03:42.390 --> 03:45.420 +つのモデルを初期化する。 + +03:45.420 --> 03:56.580 +matplotlibにグラフを作成することを伝え、 出力したトレーニングデータとテストデータセットのpickleファイルをロードする。 + +03:56.670 --> 03:58.350 +ええと、 そして、 それらは積み込まれている。 + +03:58.380 --> 04:09.320 +フロンティアに行く前に、 もうひとつお見せしたいモデルがあるんです。 + +04:09.320 --> 04:14.120 +フロンティアの時代だと言ったじゃないか。 + +04:14.120 --> 04:17.030 +そして、 これは私の犠牲の上に成り立っている。 + +04:17.120 --> 04:22.970 +だからこそ、 今日のビデオの冒頭で、 私は疲れきっていると言ったのだ。 + +04:22.970 --> 04:36.620 +しかし、 もうひとつ、 おそらくもうひとつ、 私たちのモデルを比較すべきは、 商品の価格を予測しようとする人類の努力だろうと思いついた。 + +04:36.680 --> 04:44.330 +モデルのパフォーマンスを人間のパフォーマンスと比較できるようにするために、 ベースラインも持っておくべきだと思う。 + +04:44.510 --> 04:54.380 +そして、 250の商品説明を読んで、 その値段がいくらなのかを把握しようとする恐怖を味わうような人を説得できる人を見つけることができなかった。 + +04:54.380 --> 04:58.850 +そうして、 私はこの拷問に自分をさらした。 + +04:58.850 --> 05:02.660 +思っていたよりずっと難しいよ。 + +05:02.660 --> 05:08.030 +前にも言ったけど、 かなり大変だと思っていたんだ。 でも、 僕が思っていた以上に大変で、 + +05:08.310 --> 05:11.070 +僕が知らなかったことがたくさんあった。 + +05:11.070 --> 05:16.560 +ホイールを買うのにいくらかかるか知らなかった。 + +05:16.710 --> 05:28.770 +コンピュータのことは隅から隅まで知っているはずなのに、 ディスク容量が400GBもある整備済みコンピュータの値段に頭を悩ませていた。 + +05:28.920 --> 05:31.560 +そして、 本当に、 本当に大変だった。 + +05:31.560 --> 05:32.820 +そしてシャンデリア。 + +05:32.820 --> 05:34.740 +シャンデリアの値段は知らない。 + +05:34.740 --> 05:36.360 +とにかく、 話がそれた。 + +05:36.390 --> 05:47.130 +250のテストプロンプトをCSVファイルに出力するコードを書いたんだ。 + +05:47.130 --> 05:51.450 +これで、 人間の入力ドットcsvファイルが作成される。 + +05:51.450 --> 05:53.790 +そして、 これが人間入力のドットCSVである。 + +05:53.790 --> 06:06.690 +そして、 この欄の上位250のプロンプトとゼロのプロンプトのひとつひとつを、 当該人間が記入するようになっている。 + +06:06.690 --> 06:12.530 +それでね、 この人間のアウトプットをgitにチェックインするかどうかもわからないんだ。 + +06:12.560 --> 06:19.700 +しばらくすると疲労が蓄積してくるから、 あえてそうしているんだ。 + +06:19.700 --> 06:21.890 +私はおそらく、 そこで本当に不手際を犯した。 + +06:21.890 --> 06:24.620 +これを見たら、 何を考えているんだと言うだろうね。 + +06:24.620 --> 06:27.230 +LMエンジニアリングの指導に専念すべきだ。 + +06:27.260 --> 06:28.190 +確かにそうだ。 + +06:28.460 --> 06:30.950 +あなたは世界の誰かではない。 + +06:31.100 --> 06:34.100 +でも、 ベストは尽くしたよ。 + +06:34.100 --> 06:38.840 +とにかく、 私が設定した価格を読み返そう。 + +06:38.990 --> 06:43.940 +ええと、 それから、 これがどう見えるか、 すぐに感覚を掴みましょう。 + +06:43.940 --> 06:54.950 +そこで、 人間の、 うーん、 予測者、 人間のプロセッサーとなる関数を書くことにする。 + +06:54.950 --> 06:57.140 +だから入力が必要なんだ。 + +06:57.140 --> 07:00.230 +そして、 その入力は項目のひとつであるべきだ。 + +07:00.260 --> 07:04.610 +そして、 その商品の代金を返すのが仕事だ。 + +07:04.910 --> 07:12.720 +この時点で私がすることは、 トレーニングデータセットとテストデータセットを見ることです。 + +07:12.750 --> 07:15.840 +その項目のインデックスは? + +07:15.840 --> 07:19.410 +ということは、 テストでは0番目の項目なのだろうか。 + +07:19.410 --> 07:21.030 +1番目なのか、 2番目なのか、 3番目なのか。 + +07:21.030 --> 07:23.940 +それをインデックスと呼ぶことにする。 + +07:24.420 --> 07:30.090 +というわけで、 ここで見ているテスト項目の数はこれだ。 + +07:30.090 --> 07:36.750 +そして、 私は絶望的な予想をすべて人間の予測に読み込んだ。 + +07:36.750 --> 07:40.230 +だから、 私たちは人間の予測を返すだけだ。 + +07:44.010 --> 07:46.890 +ええと......インデックスで + +07:48.240 --> 07:48.930 +分かった。 + +07:48.960 --> 07:49.680 +そしてそれを実行する。 + +07:49.680 --> 07:56.850 +そして今、 我々はテスター・ドット・テスト・ヒューマン・プライサーを見ることになる。 + +07:59.640 --> 08:01.800 +そしてテストデータセットでパスする。 + +08:02.040 --> 08:02.910 +さあ、 始めよう。 + +08:04.320 --> 08:07.740 +これがその結果だ。 + +08:07.920 --> 08:13.010 +ええと、 かなり多くの赤が入っているのがわかるだろう。 + +08:13.040 --> 08:14.450 +グリーンもあるけどね。 + +08:14.480 --> 08:15.980 +私は立派にやった。 + +08:16.340 --> 08:18.590 +あー、 でも、 それでも僕はかなり遠くにいたよ。 + +08:18.590 --> 08:19.490 +でも、 これを見て。 + +08:19.490 --> 08:20.300 +これは何だ? + +08:20.300 --> 08:23.600 +リッチモンドの自動車椅子張り、 260かな。 + +08:23.630 --> 08:25.220 +そして225だった。 + +08:25.220 --> 08:28.010 +そしてこちらはギブソン・パフォーマンスのエキゾースト。 + +08:28.010 --> 08:31.010 +ギブソン・パフォーマンスのエキゾーストがいくらかは知らないけれど。 + +08:31.010 --> 08:32.870 +私は499だと思った。 + +08:32.870 --> 08:35.090 +僕は、 その、 頑張ろうと思ったんだ。 + +08:35.090 --> 08:37.280 +そして答えは535だ。 + +08:37.430 --> 08:38.900 +でも、 ここには他にも何人かいる。 + +08:39.050 --> 08:40.340 +私はここで何を間違えたのだろう? + +08:40.370 --> 08:49.250 +ストリート・シリーズのステンレスの性能は、 確か260ドルだったと思うけど、 + +08:49.280 --> 08:53.210 +814ドルだった。 + +08:53.210 --> 08:55.910 +下にスクロールすると、 こうなる。 + +08:55.940 --> 09:03.980 +これがそのチャートで、 僕は特に、 ひどい成績ではなかったことがわかるだろう。 + +09:04.010 --> 09:04.640 +つまり、 やったんだ。 + +09:04.640 --> 09:09.020 +よし、 見ろ、 たくさんの緑の点のヒット率が32%だ。 + +09:09.200 --> 09:20.100 +私も、 3分の2が終わったところで気づいたんですが、 私の値段はすべて、 4ドルか500ドル以上だと思ったことがなかったんです。 + +09:20.100 --> 09:22.890 +だから、 明らかにそうだとすぐにわかった。 + +09:22.890 --> 09:26.040 +私は高価なものに目をつけていなかった。 + +09:26.220 --> 09:28.650 +それは明らかに失敗だった。 + +09:28.920 --> 09:34.110 +つまり、 私のミスは合計で127ドルだった。 + +09:34.110 --> 09:38.280 +そしてそれは、 私が平均よりも良い成績を収めていることを意味する。 + +09:38.280 --> 09:42.630 +終始、 平均的な数字を当てるだけで、 もっといい成績を残せたような気がしてならない。 + +09:42.840 --> 09:48.240 +ええと、 その......比較を思い出すためにメモしたんだ。 + +09:48.240 --> 09:50.460 +平均は146だった。 + +09:50.520 --> 09:52.500 +ええと、 平均価格の間違いでした。 + +09:52.590 --> 09:53.880 +だから、 それよりはうまくやったよ。 + +09:53.880 --> 09:57.930 +特徴工学を用いたストレートな線形回帰。 + +09:57.930 --> 10:00.540 +基本的なものは139だった。 + +10:00.540 --> 10:01.800 +だから、 それを打ち破ったんだ。 + +10:01.800 --> 10:06.150 +私は非常に基本的な特徴工学的線形回帰を打ち負かした。 + +10:06.150 --> 10:09.930 +しかし、 どうせあなたはすでにそれ以上の機能を搭載し、 それ以上の結果を出しているのだろう。 + +10:10.020 --> 10:18.230 +うーん、 でも、 他のモデルはみんな、 バッグ・オブ・ワード・スタイルのモデルやワード・ツー・ベックのモデルで私を押し潰したんだ。 + +10:18.230 --> 10:24.650 +そして、 ランダムフォレストが97点で、 人類を大きく上回ったことを思い出すだろう。 + +10:24.680 --> 10:40.460 +しかし、 この練習をすれば、 私よりも良い情報が得られるかもしれない。 + +10:40.760 --> 10:43.580 +そうすれば、 もっとうまくいくことに気づくかもしれない。 + +10:43.580 --> 10:47.330 +とにかく、 真面目な話、 私はあなたの時間を無駄にしたわけではない。 + +10:47.330 --> 10:52.310 +このような練習をするのはいいことだと思う。 なぜなら、 + +10:52.310 --> 11:02.390 +数少ないデータポイントかもしれないが、 自分が解いている問題のタイプをよく理解することができるからだ。 + +11:02.390 --> 11:08.450 +結局のところ、 その時よりも、 人間のパフォーマンスよりもうまくやれないのであれば、 もっと努力する必要がある。 + +11:08.450 --> 11:10.460 +だから、 それが感覚になる。 + +11:10.460 --> 11:15.950 +また次のビデオでは、 フロンティア・モデルの話をしよう。 + +11:15.950 --> 11:16.670 +それは起きている。 + +11:16.700 --> 11:17.480 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59473159/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473159/ko_KR.srt new file mode 100755 index 0000000..a4f5168 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473159/ko_KR.srt @@ -0,0 +1,508 @@ +WEBVTT + +00:00.770 --> 00:05.690 +개척지 실험에 오신 걸 환영해요 + +00:05.690 --> 00:12.290 +그래서 새로운 모델을 시험해 보려고 합니다 GPT 4와 + +00:12.290 --> 00:18.590 +클로드를 조합해서 제품 가격을 예측하는 거죠 + +00:18.800 --> 00:21.980 +이와 관련해 몇 가지 지적하고 싶어요 + +00:21.980 --> 00:26.900 +우선, 이건 훈련이 아니란 걸 강조해야 해요 + +00:26.900 --> 00:31.070 +개척지 모델에는 훈련 데이터의 혜택을 주지 않을 거예요 + +00:31.070 --> 00:36.350 +테스트 데이터를 주고 결과를 예측하도록 요청할 뿐이죠 + +00:36.350 --> 00:41.930 +전통적인 머신 러닝을 살펴볼 때 400,000개의 훈련 데이터 포인트를 주고 + +00:41.930 --> 00:44.510 +그걸 기반으로 모델을 배우게 했어요 + +00:44.690 --> 00:51.440 +이 경우에는 테스트 데이터를 주는 거죠 세상에 대해 알고 있는 경이로운 + +00:51.440 --> 00:56.690 +모든 지식 수조 개의 매개 변수에 저장된 모든 정보를 가지고 + +00:56.690 --> 01:03.180 +제품의 가격을 예측해 보세요 그리고 이 문장을 끝내세요 이 제품은 + +01:03.180 --> 01:10.890 +달러 가치가 있다 그러면 모델은 가장 그럴듯한 다음 토큰이 그 제품의 가격이 될 것이라고 + +01:10.890 --> 01:13.590 +확신하죠 + +01:13.590 --> 01:16.470 +그래서 그 세계의 지식을 이용하는 거죠 + +01:16.950 --> 01:21.240 +한편으로는 이 작업에 대한 훈련을 받지 않았어요 + +01:21.240 --> 01:25.080 +한편으로는 이미 생각하셨을지 모르지만 언급할 + +01:25.080 --> 01:32.160 +만한 다른 주제가 있습니다. 훈련의 일환으로 이런 모델로 만들어진 어마어마하게 방대한 + +01:32.160 --> 01:39.120 +훈련 데이터셋을 보면 이런 제품들을 이미 봤을 가능성이 충분히 있다는 거죠. + +01:39.150 --> 01:41.910 +아마존에서 가져온 걸 수도 있죠 + +01:41.910 --> 01:45.540 +포옹하는 얼굴 데이터 세트를 제공했을지도 모르죠 + +01:45.750 --> 01:48.570 +이제 가능하대요 + +01:48.570 --> 01:49.980 +이젠 아니에요 + +01:50.010 --> 01:54.240 +결과가 의심스러울 정도로 정확하진 않아요 + +01:54.240 --> 02:00.210 +그렇다면 정확한 가격의 혜택을 누릴 수 있겠지만 부당한 이점이 있을 수 + +02:00.210 --> 02:02.490 +있다는 걸 명심해야 해요 + +02:02.490 --> 02:07.250 +그런 증거는 못 봤지만 확실히 걱정해야 할 문제인 건 분명해요 + +02:07.250 --> 02:10.490 +테스타타 오염이라고 하죠 + +02:10.490 --> 02:16.910 +테스트 데이터가 목격되거나 훈련 중에 그 측면이 보였을 가능성이 + +02:16.910 --> 02:18.710 +있을 때요 + +02:19.070 --> 02:20.390 +그걸 명심하세요 + +02:20.390 --> 02:22.190 +그건 여담으로 남겨둘게요 + +02:22.190 --> 02:24.530 +그 얘기는 그만하죠 + +02:24.530 --> 02:27.320 +그게 관련됐다는 중요한 증거는 못 봤어요 + +02:27.890 --> 02:30.500 +수입 작업을 할 거예요 + +02:31.130 --> 02:32.150 +저기 가네요 + +02:32.240 --> 02:36.980 +오픈AI와 인류애를 다시 수입하고 있어요 그걸 활용할 거예요 + +02:36.980 --> 02:42.890 +제가 만든 테스터 수업 기억하시죠? 제가 좋아하는 건데 여러분께 아주 유용할 + +02:42.890 --> 02:43.520 +거예요 + +02:43.520 --> 02:49.820 +가능한 많은 비즈니스 지표를 사용해 결과를 검증하기 위한 여러분의 프로젝트를 + +02:49.850 --> 02:55.250 +위해 유사한 테스트 하니스 프레임워크를 작성하시길 권장해요 + +02:55.700 --> 03:00.650 +파이썬 을 가지고 있는 분리된 모듈로 옮겼어요 + +03:00.650 --> 03:05.760 +이런 모듈에 있는 것과 같은 코드예요 그 말은 Jupyter 노트북에 이게 + +03:05.760 --> 03:10.020 +있을 필요가 없다는 거죠 앞으로 꽤 많이 사용할 테니까요 + +03:10.050 --> 03:13.710 +이렇게 불러오기만 하면 돼요 + +03:13.710 --> 03:16.140 +서명이 아주 약간 바뀌었어요 + +03:16.170 --> 03:22.410 +테슬라 put test라고 적고 함수 이름을 넣고 시험 데이터 세트에도 통과시켜야 + +03:22.410 --> 03:26.760 +해요 250개의 요소 데이터 포인트를 취할 거예요 + +03:26.790 --> 03:27.210 +좋아요 + +03:27.210 --> 03:30.270 +이제 환경 변수를 로드할 거예요 + +03:30.270 --> 03:32.730 +얼굴 포옹에 로그인할게요 + +03:33.000 --> 03:36.150 +다시 말씀드리지만 그건 안 쓸 것 같아요 + +03:36.150 --> 03:39.870 +어쨌든 get it의 실천을 시작해 보죠. + +03:40.080 --> 03:42.360 +포옹하는 얼굴 로그인하면 늘 좋죠 + +03:42.390 --> 03:45.420 +두 모델을 초기화할 거예요 + +03:45.420 --> 03:51.840 +Mattplotlib에 우리가 차트를 만들 거라고 말할 거예요. 그리고 우리가 출력한 + +03:51.840 --> 03:56.580 +트레이닝과 테스트 데이터 세트를 위해 피클 파일을 로드할 거예요. + +03:56.670 --> 03:58.350 +장전도 됐고요 + +03:58.380 --> 04:04.260 +개척지로 곧장 간다고 말씀드렸지만 잠시 멈춰 보겠습니다 개척지로 + +04:04.260 --> 04:09.320 +가기 전에 보여 드릴 모델이 하나 더 있거든요 + +04:09.320 --> 04:14.120 +서구 시대라고 하셨지만 이것도 재미있을 거예요 + +04:14.120 --> 04:17.030 +제가 그 대가를 치렀죠 + +04:17.120 --> 04:22.970 +그래서 오늘 영상을 시작할 때 너무 피곤하다고 한 거예요 + +04:22.970 --> 04:31.610 +하지만 우리 모델을 비교해 봐야 할 또 다른 대상은 제품 가격을 예측하려는 + +04:31.610 --> 04:36.620 +인류의 노력일 거라는 생각이 들었죠 + +04:36.680 --> 04:42.500 +기준선도 있어야 할 것 같습니다 모델의 성능과 인간의 성능을 + +04:42.500 --> 04:44.330 +비교할 수 있게요 + +04:44.510 --> 04:52.040 +250개의 제품 설명을 읽고 가격을 계산하는 등의 공포를 감당할 수 있는 사람을 + +04:52.040 --> 04:54.380 +찾을 수가 없었어요 + +04:54.380 --> 04:58.850 +그래서 스스로를 고문했어요 정말 고문이 따로 없었죠 + +04:58.850 --> 05:02.660 +제가 예상했던 것보다 훨씬 어렵네요 + +05:02.660 --> 05:08.030 +전에 꽤 힘들 거라고 말씀드렸는데 생각했던 것보다 훨씬 힘들어요 + +05:08.310 --> 05:11.070 +제가 전혀 몰랐던 것들이 있어요 + +05:11.070 --> 05:16.560 +바퀴 하나에 얼마인지 몰랐는데 바퀴가 몇 개 있네요 + +05:16.710 --> 05:22.950 +컴퓨터의 순서를 알아야 하는데도 복원한 컴퓨터의 디스크 + +05:22.950 --> 05:28.770 +공간이 400GB나 돼서 고민하게 되더라고요 + +05:28.920 --> 05:31.560 +네, 정말 힘들었어요 + +05:31.560 --> 05:32.820 +샹들리에도요 + +05:32.820 --> 05:34.740 +샹들리에가 얼마인지 모르겠어요 + +05:34.740 --> 05:36.360 +본론에서 벗어났네요 + +05:36.390 --> 05:47.130 +코드를 작성해서 CSV파일 250 테스트 프롬프트에 출력합니다. 이것을 실행한 후에는, 실행해 볼게요. + +05:47.130 --> 05:51.450 +이제 파일을 생성합니다 사람 입력 .csv + +05:51.450 --> 05:53.790 +여기 사람 입력 .CSV가 있네요 + +05:53.790 --> 06:04.140 +프롬프트들을 포함하고 있습니다 상위 250개의 프롬프트들 전부를요 이 칼럼에 0은 + +06:04.140 --> 06:06.690 +사람이 채울 수 있죠 + +06:06.690 --> 06:12.530 +이 사람 산출물을 깃에 체크해야 할지조차 모르겠어요 + +06:12.560 --> 06:17.840 +보시면 아시겠지만 제가 감히 했어요 시간이 좀 지나면 지치거든요 너무 + +06:17.840 --> 06:19.700 +빨리 진행한 것 같아요 + +06:19.700 --> 06:21.890 +제가 큰 실수를 한 것 같아요 + +06:21.890 --> 06:24.620 +무슨 생각으로 그랬는지 궁금하실 거예요 + +06:24.620 --> 06:27.230 +당신은 LM 엔지니어링이나 가르쳐요 + +06:27.260 --> 06:28.190 +당연히 아니죠 + +06:28.460 --> 06:30.950 +당신은 이 세상의 사람이 아니에요 + +06:31.100 --> 06:34.100 +하지만 최선을 다하긴 했어요 + +06:34.100 --> 06:38.840 +제가 정한 가격을 다시 읽어볼게요 + +06:38.990 --> 06:43.940 +Get it가 어떻게 보일지 간단히 짚어 보죠 + +06:43.940 --> 06:54.950 +그래서 우리가 쓸 함수는 인간 예측기인 인간 프로세서예요 + +06:54.950 --> 06:57.140 +그래서 입력이 필요하죠 + +06:57.140 --> 07:00.230 +그 입력이 항목 중 하나가 되어야 해요 + +07:00.260 --> 07:04.610 +그 물건의 가격을 환불하는 게 그 기계의 일이죠 + +07:04.910 --> 07:11.820 +음, 이 시점에서 제가 하는 것은, 트레이닝 데이터 세트를 보면 테스트 데이터 세트라고 + +07:11.820 --> 07:12.720 +하죠 + +07:12.750 --> 07:15.840 +그 항목의 인덱스는 뭐죠? + +07:15.840 --> 07:19.410 +테스트의 0번째 항목인가요? + +07:19.410 --> 07:21.030 +첫 번째인가요? 두 번째인가요? 세 번째인가요? + +07:21.030 --> 07:23.940 +그걸 인덱스라고 부르죠 + +07:24.420 --> 07:30.090 +여기서 보고 있는 테스트 아이템의 개수죠 + +07:30.090 --> 07:36.750 +그리고 가망 없는 예측을 전부 읽었어요 + +07:36.750 --> 07:40.230 +그래서 인간의 예측을 다시 보여드리려고 해요 + +07:44.010 --> 07:46.890 +인덱스 에서도요 + +07:48.240 --> 07:48.930 +좋아요 + +07:48.960 --> 07:49.680 +실행하세요 + +07:49.680 --> 07:56.850 +이제 테스터가 인간 프라이서를 테스트할 거예요 + +07:59.640 --> 08:01.800 +테스트 데이터 세트도 통과시키고요 + +08:02.040 --> 08:02.910 +시작할게요 + +08:04.320 --> 08:07.740 +결과가 나왔네요 + +08:07.920 --> 08:13.010 +붉은 연어가 꽤 많이 보여요 + +08:13.040 --> 08:14.450 +그래도 녹색은 좀 있네요 + +08:14.480 --> 08:15.980 +정중하게 했어요 + +08:16.340 --> 08:18.590 +그래도 꽤 멀리까지 갔어요 + +08:18.590 --> 08:19.490 +하지만 이걸 보세요 + +08:19.490 --> 08:20.300 +이게 뭐죠? + +08:20.300 --> 08:23.600 +리치먼드 자동차 덮개 전문점 260점 정도 될 거예요 + +08:23.630 --> 08:25.220 +225달러였어요 + +08:25.220 --> 08:28.010 +이건 깁슨 퍼포먼스 배기관이에요 + +08:28.010 --> 08:31.010 +깁슨 퍼포먼스 배기관이 얼마인지는 모르지만요 + +08:31.010 --> 08:32.870 +499개라고 생각했어요 + +08:32.870 --> 08:35.090 +그래서 한번 해 보기로 했죠 + +08:35.090 --> 08:37.280 +답은 535예요 + +08:37.430 --> 08:38.900 +하지만 다른 사람들도 있어요 + +08:39.050 --> 08:40.340 +Get it, get it, get it, get it! 내가 뭘 잘못 알고 있는 거죠? + +08:40.370 --> 08:49.250 +Street Series Senless 성능인가 뭔가였는데 260달러쯤 했어요 814달러였죠 + +08:49.280 --> 08:53.210 +어쨌든 너무 빗나갔어요 고통을 덜어주려고요 + +08:53.210 --> 08:55.910 +스크롤을 내리면 보일 거예요 + +08:55.940 --> 09:03.980 +여기 차트가 있어요 제가 형편없지 않다는 걸 보실 수 있죠 + +09:04.010 --> 09:04.640 +아니, 그랬다고요 + +09:04.640 --> 09:09.020 +보세요, 많은 녹색 점이 32%에 도달해요 + +09:09.200 --> 09:16.440 +그리고 3분의 2 정도 봤을 때 제가 생각한 가격은 400-500달러 + +09:16.440 --> 09:20.100 +정도였어요 + +09:20.100 --> 09:22.890 +그래서 제가 그런다는 걸 바로 알았죠 + +09:22.890 --> 09:26.040 +비싼 건 못 봤어요 + +09:26.220 --> 09:28.650 +그건 명백한 실패였죠 + +09:28.920 --> 09:34.110 +제 총 오류는 127달러였어요 + +09:34.110 --> 09:38.280 +평균보다 더 잘한다는 뜻이죠 + +09:38.280 --> 09:42.630 +처음부터 끝까지 맞힌다고 더 잘 만들 수 있는 것도 아니고요 + +09:42.840 --> 09:48.240 +비교를 위해 적어 봤어요 + +09:48.240 --> 09:50.460 +평균은 146점이었어요 + +09:50.520 --> 09:52.500 +평균 가격의 오류였어요 + +09:52.590 --> 09:53.880 +그보다는 잘했죠 + +09:53.880 --> 09:57.930 +기능 엔지니어링의 선형 회귀죠 + +09:57.930 --> 10:00.540 +기본은 139개였어요 + +10:00.540 --> 10:01.800 +그래서 이겼어요 + +10:01.800 --> 10:06.150 +아주 기본적인 기능 공학 선형 회귀를 깼어요 + +10:06.150 --> 10:09.930 +하지만 이미 더 많은 기능을 넣고 그것보다 잘했을 거예요. + +10:10.020 --> 10:18.230 +그런데 다른 모델들이 전부 단어의 봉지에 담긴 스타일과 두 개의 벡 모델로 저를 압도했어요 + +10:18.230 --> 10:24.650 +랜덤 포레스트는 97년에 등장했는데 인류보다 훨씬 뛰어났죠 + +10:24.680 --> 10:31.850 +기존의 좋은 머신 러닝 모델은 사람보다 제품 가격 예측에 더 뛰어납니다 + +10:31.850 --> 10:38.210 +하지만 직접 해 보면 저보다 더 잘 알 거예요 추천하진 않아요 + +10:38.210 --> 10:40.460 +TUI + +10:40.760 --> 10:43.580 +더 잘 살 수 있다는 걸 알게 될 거예요 + +10:43.580 --> 10:47.330 +어쨌든, 제가 여러분의 시간을 낭비한 건 아니에요 + +10:47.330 --> 10:52.310 +이런 운동은 도움이 됩니다 데이터 포인트를 몇 개 얻을 수도 있지만 + +10:52.310 --> 10:58.310 +어떤 문제를 해결하는지 감이 올 수 있거든요 인적 능력이라는 기준점이 어디에 있는지 + +10:58.340 --> 11:02.390 +모델로 얼마나 잘하는지 비교할 수 있으니까요 + +11:02.390 --> 11:08.450 +결국 사람보다 더 잘할 수 없다면 더 열심히 노력해야죠 + +11:08.450 --> 11:10.460 +그걸 보면 감이 오죠 + +11:10.460 --> 11:15.950 +다음 비디오에서는 개척 시대 모델에 대해 얘기해 보죠 + +11:15.950 --> 11:16.670 +시작됐어요 + +11:16.700 --> 11:17.480 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59473191/en_US.srt b/week5/community-contributions/subtitles/srts/59473191/en_US.srt new file mode 100755 index 0000000..4b42c39 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473191/en_US.srt @@ -0,0 +1,493 @@ +WEBVTT + +00:00.920 --> 00:02.690 +And you thought we'd never get here. + +00:02.720 --> 00:07.910 +Here we are in Jupyter Lab, running our fine tuning for a frontier model. + +00:07.910 --> 00:10.580 +So we start with a bunch of imports. + +00:10.580 --> 00:12.860 +We also import the test data. + +00:12.860 --> 00:19.250 +If you remember, this is that nifty piece of code that is able to run through 250 test examples and + +00:19.250 --> 00:21.140 +give us a beautiful chart. + +00:21.140 --> 00:28.880 +At the end of it, we load in our environment, we load in hugging face token, which we're not going + +00:28.910 --> 00:34.280 +to use this time, but like before, why not always like hugging and hugging face? + +00:34.310 --> 00:36.800 +And we are going to use OpenAI. + +00:36.830 --> 00:41.000 +So we very much need to run that line there and here. + +00:41.000 --> 00:46.610 +And again we're going to do this where we open the training data and the test data from a pickle file + +00:46.700 --> 00:51.200 +so that we don't have to recreate everything from scratch in it comes. + +00:51.470 --> 00:52.340 +All right. + +00:52.340 --> 00:54.500 +So let's talk about what we're going to do now. + +00:54.500 --> 01:04.680 +So OpenAI recommends that when you're doing training you use somewhere between 50 and 100 example points + +01:04.830 --> 01:06.210 +that you use for training. + +01:06.210 --> 01:14.160 +And really the the main intention of fine tuning for a frontier model is about adapting its tone and + +01:14.160 --> 01:19.860 +style, and correcting for errors and improving accuracy in some circumstances. + +01:20.010 --> 01:28.860 +Um, there's not a massive point in, in putting in enormous numbers of examples, because a model like + +01:28.980 --> 01:35.070 +the GPT four series is trained on so much data that what you're really trying to do is just give it + +01:35.070 --> 01:39.150 +enough examples of something specific you want it to do, so it can learn from that. + +01:39.390 --> 01:46.980 +Um, so yeah, there's um, not not a recommendation to go to a very large number, but I'm at the very + +01:46.980 --> 01:50.970 +least going to pick 500 here, which is more than they recommend. + +01:51.060 --> 01:55.830 +Uh, and I've tested it and it does better than, than smaller numbers. + +01:56.130 --> 01:59.040 +Um, and so I'm picking 500 of our examples. + +01:59.070 --> 02:00.540 +Now our examples are very small. + +02:00.570 --> 02:07.310 +Our text is very small and I think typically there they are thinking about much bigger training documents. + +02:07.310 --> 02:10.220 +So because of that, I don't feel bad about this. + +02:10.430 --> 02:14.510 +And at the moment this is actually fine tuning. + +02:14.660 --> 02:19.850 +Um, is is free for a period of time until I think it's September the 23rd, but later in September. + +02:19.850 --> 02:26.840 +But even when it stops being free, the cost you pay is similar to the cost to actually just run inference + +02:26.840 --> 02:30.830 +on 500 of these, which is measured in a few cents again. + +02:30.830 --> 02:36.890 +So at this point, I imagine we're talking about about $0.05 to do this, uh, for, for or equivalent + +02:36.890 --> 02:38.480 +in, in your currency. + +02:38.570 --> 02:41.930 +Um, so it's still small pennies. + +02:41.930 --> 02:46.490 +And as I say, it's free at least until late September. + +02:46.730 --> 02:54.170 +So with that, I'm dividing into a training set of 500 from the actual training set that we've got, + +02:54.200 --> 02:56.600 +which is 400,000. + +02:56.780 --> 03:00.020 +Uh, and I'm going to take 50 as validation. + +03:00.020 --> 03:05.240 +I mentioned a moment ago, we don't actually need to do validation because our training set, we're + +03:05.240 --> 03:07.190 +only going to do one epoch through it. + +03:07.460 --> 03:12.470 +Um, but I thought it'd be useful to show it to you so that you know how to do this for the future in + +03:12.470 --> 03:13.340 +in your projects. + +03:13.340 --> 03:16.970 +Because all of this can be replicated for your projects. + +03:16.970 --> 03:18.560 +So we run this. + +03:19.010 --> 03:28.160 +So I mentioned to you that the first step is preparing the Jsonl JSON lines data, converting our training + +03:28.160 --> 03:30.020 +data into this format. + +03:30.020 --> 03:37.730 +So first of all, I wrote a method that a function that you know well messages for uh, which is taken + +03:37.730 --> 03:40.040 +exactly from what we did last time. + +03:40.130 --> 03:46.580 +Uh, it says you estimate prices of items, reply only with the price, no explanation. + +03:46.580 --> 03:51.380 +And then for the user prompt, I take the test prompt from the item. + +03:51.590 --> 03:58.850 +Um, and I strip out to the nearest dollar and just replace that with uh, and with with empty. + +03:58.850 --> 04:04.780 +So it's not it's not directing it to only go to the nearest dollar, The frontier Labs need no such + +04:04.810 --> 04:05.740 +approximation. + +04:05.740 --> 04:10.900 +And I also take out that, um, and that's what goes in the user prompt. + +04:10.900 --> 04:17.350 +And then I reply with the assistant saying price is and then giving the price. + +04:17.350 --> 04:19.090 +So let's run that. + +04:19.090 --> 04:24.760 +And just in case it's not clear what's going on, let's just give you an example. + +04:24.790 --> 04:34.210 +Messages for train zero, which will be the first one that the model sees. + +04:34.210 --> 04:36.910 +And this is what you get roll system. + +04:36.910 --> 04:38.560 +And that's the system prompt. + +04:38.590 --> 04:42.970 +Check you're happy with that and then roll user. + +04:43.090 --> 04:45.070 +And this is the user prompt. + +04:45.430 --> 04:51.040 +Uh it's as if we have asked this question how much does this cost question mark. + +04:51.040 --> 04:58.270 +And then this spiel about a Delphi or Delphi, Delphi, uh, fuel pump module. + +04:58.690 --> 05:04.900 +Um, and then the This is the assistant's response. + +05:04.900 --> 05:07.330 +The price is $226. + +05:07.330 --> 05:12.310 +I would never have I well, I remember, I didn't guess that it was anything like that. + +05:12.520 --> 05:14.080 +Uh, so there you go. + +05:14.080 --> 05:15.700 +You learn something every day. + +05:15.700 --> 05:20.200 +Anyway, this is the format of the messages, which is something that should be very, very familiar + +05:20.200 --> 05:21.040 +to you at this stage. + +05:21.040 --> 05:28.270 +And you can see how this is a perfectly crafted test, sorry, training data point that we will be providing + +05:28.300 --> 05:29.410 +to the model. + +05:29.770 --> 05:36.910 +Okay, so then here is a function make JSON L that is going to do just what you would think it will + +05:36.910 --> 05:38.530 +take in a bunch of items. + +05:38.530 --> 05:40.570 +It will iterate through those items. + +05:40.570 --> 05:47.560 +It will create this text, this, this, uh, object for each one. + +05:47.560 --> 05:54.340 +And then it will use Json.dumps dump string to convert that into a simple string. + +05:54.340 --> 06:00.820 +And then look, it just simply adds that to this one string with a carriage return at the end of it. + +06:00.850 --> 06:05.020 +And then I return that back and I strip out that last carriage return. + +06:05.020 --> 06:07.360 +So let's see this in action. + +06:07.360 --> 06:10.240 +So let's say let's run it first. + +06:10.450 --> 06:12.340 +Don't make my usual blunder. + +06:12.640 --> 06:13.600 +There we go. + +06:13.600 --> 06:15.730 +And now say make JSON. + +06:15.730 --> 06:22.180 +L uh, and let's pass in uh, some of our training data. + +06:22.210 --> 06:27.370 +Let's just pass in the first three of that so we don't crowd everything out. + +06:27.370 --> 06:29.860 +So here we get back a string, of course. + +06:29.860 --> 06:34.090 +And it's a string which has, uh. + +06:34.090 --> 06:35.830 +It might be easier if we print it. + +06:35.860 --> 06:40.090 +Let's print it so that we get empty lines clearly showing through. + +06:40.420 --> 06:48.130 +Okay, so it's a string and you can see one, two, three lines in the string. + +06:48.370 --> 06:50.290 +Um, it's sort of wrapping around. + +06:50.290 --> 07:00.730 +And you can see that each row has in it, um, the, the full messages exchanged for the represents + +07:00.730 --> 07:02.290 +that training data point. + +07:02.740 --> 07:03.400 +Okay. + +07:03.410 --> 07:04.760 +So far so good. + +07:04.940 --> 07:08.240 +Now we have this function just building on that. + +07:08.240 --> 07:08.810 +Right? + +07:08.840 --> 07:12.110 +Jsonl take items and takes a file name. + +07:12.110 --> 07:13.850 +And this is super simple stuff. + +07:13.880 --> 07:18.950 +Opens that file name and calls the function above and writes it out. + +07:18.950 --> 07:23.270 +So I don't think I need to to give you a demo of that one. + +07:23.300 --> 07:26.180 +I do need to execute it, but we can actually run it. + +07:26.180 --> 07:33.770 +So we're going to take our training data set, which you remember the fine tuned train which is 500 + +07:33.800 --> 07:34.190 +items. + +07:34.190 --> 07:34.940 +Let's check. + +07:34.970 --> 07:40.550 +There it is, 500 items from the training data set overall, which is 400,000. + +07:40.580 --> 07:45.590 +We're not going to we're not going to write all of them to a file and upload them to GPT four. + +07:45.920 --> 07:52.550 +Uh, so, um, we we write that out to a file called Fine Tune train dot JSON. + +07:52.550 --> 07:58.700 +L let's run that and then we'll take the validation set and do exactly the same run that. + +07:58.700 --> 08:03.170 +So we've written those two files and you can see they just wrote a couple of seconds ago. + +08:03.170 --> 08:05.790 +So if I open this up, I can open it. + +08:05.790 --> 08:12.450 +There is actually a fancy JSON lines editor in, uh, in JupyterLab, but we're just going to go over + +08:12.450 --> 08:13.530 +a normal editor. + +08:13.560 --> 08:18.420 +And here you can see, just as you'd expect, we're expecting 500 rows. + +08:18.420 --> 08:24.270 +Here we go, all the way down to the end, 500 rows it is. + +08:24.300 --> 08:27.960 +And they all have exactly the structure that you would hope. + +08:28.170 --> 08:34.200 +Um, and you can see this actually isn't well-formed JSON because, uh, each line is a well-formed + +08:34.200 --> 08:35.010 +JSON document. + +08:35.010 --> 08:39.330 +I know I'm belaboring that point, but it is it is important you wouldn't be able to read this in and + +08:39.330 --> 08:42.990 +parse it as a JSON document, because it's a not well-formed JSON. + +08:42.990 --> 08:44.910 +It's separate lines. + +08:45.240 --> 08:50.730 +Um, and the validation file open with editor is just, I think 50. + +08:50.760 --> 08:54.780 +We said 50 lines much the same way. + +08:54.900 --> 08:58.680 +Uh, I can I'll just show you what it looks like in the JSON lines. + +08:58.680 --> 08:59.130 +Editor. + +08:59.130 --> 09:01.140 +It's a fancy editor that looks like this. + +09:01.140 --> 09:04.290 +And you can open up each one and it's like a JSON object. + +09:04.720 --> 09:05.110 +Uh. + +09:05.200 --> 09:06.310 +Look at that. + +09:07.060 --> 09:08.260 +That's how I should have started. + +09:08.260 --> 09:08.980 +Probably. + +09:09.010 --> 09:11.380 +It gives you a very good sense of what's going on. + +09:11.650 --> 09:17.080 +Uh, it's a very intuitive sense of the way the reason why messages are packaged. + +09:17.080 --> 09:18.280 +The way they're packaged. + +09:19.180 --> 09:23.950 +All right, so that are those are the files. + +09:24.100 --> 09:26.080 +Uh, that's the last step of this. + +09:26.080 --> 09:27.310 +This part. + +09:27.700 --> 09:33.550 +Um, it will be time for us to upload these files to OpenAI. + +09:33.550 --> 09:38.350 +And to do that, we call OpenAI dot files dot create. + +09:38.350 --> 09:42.880 +And we pass in the file and we tell it the purpose is fine tune. + +09:43.270 --> 09:47.350 +Uh, and just one tiny thing to watch out for. + +09:47.350 --> 09:50.560 +When you pass in this file, you have to pass it in. + +09:50.590 --> 09:56.680 +You have to open it as a binary file, because it's just going to be the binary bytes in that file that + +09:56.680 --> 09:58.270 +will get streamed up to OpenAI. + +09:58.300 --> 10:02.980 +So you don't want this to be an R, you want it to be an RB. + +10:02.980 --> 10:07.160 +Uh, so just just a small nuance to watch out for. + +10:07.160 --> 10:12.140 +We're just sending the entire contents of the file as is to OpenAI. + +10:12.290 --> 10:15.950 +So we execute that line takes a second. + +10:15.980 --> 10:20.960 +If I just inspect what came back, I get back a file object. + +10:21.650 --> 10:24.410 +It's got a certain number of bytes. + +10:24.620 --> 10:28.070 +Object is file, purpose is fine tuned, status is processed. + +10:28.070 --> 10:32.660 +So already OpenAI is taking that file and has processed it. + +10:32.660 --> 10:35.930 +And we will do the same thing for the validation. + +10:36.260 --> 10:38.660 +We'll run it and there we go. + +10:38.660 --> 10:41.420 +And once again it is processed. + +10:41.420 --> 10:45.350 +So at this point we have created two JSON files. + +10:45.350 --> 10:50.180 +One for our fine tuned training set, one for our fine tuned validation set. + +10:50.180 --> 10:53.000 +We've written them out to our file system. + +10:53.000 --> 11:00.560 +And then we have uploaded them to OpenAI, where they are now sitting as file objects in OpenAI. + +11:00.590 --> 11:05.150 +In the next session, we will actually do some fine tuning. + +11:05.180 --> 11:06.170 +See you there. diff --git a/week5/community-contributions/subtitles/srts/59473191/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473191/ja_JP.srt new file mode 100755 index 0000000..9ca5a66 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473191/ja_JP.srt @@ -0,0 +1,433 @@ +WEBVTT + +00:00.920 --> 00:02.690 +そして、 あなたは私たちがここに到着することはないと思っていた。 + +00:02.720 --> 00:07.910 +ここではJupyter Labでフロンティアモデルの微調整を行っている。 + +00:07.910 --> 00:10.580 +だから、 まずは輸入品の数々から始める。 + +00:10.580 --> 00:12.860 +テストデータもインポートする。 + +00:12.860 --> 00:21.140 +覚えているだろうか、 これは250のテスト例を実行し、 美しいチャートを与えてくれる、 あの気の利いたコードの一部である。 + +00:21.140 --> 00:28.880 +その最後に、 環境をロードし、 ハグする顔のトークンをロードします。 今回は使いませんが、 + +00:28.910 --> 00:34.280 +以前のように、 いつもハグやハグする顔のようにしませんか? + +00:34.310 --> 00:36.800 +そしてOpenAIを使うつもりだ。 + +00:36.830 --> 00:41.000 +だから、 あそこでもここでもそのラインを走らせる必要がある。 + +00:41.000 --> 00:46.610 +トレーニング・データとテスト・データをピックルファイルから開くことで、 + +00:46.700 --> 00:51.200 +すべてを一から作り直す必要がなくなる。 + +00:51.470 --> 00:52.340 +分かった。 + +00:52.340 --> 00:54.500 +では、 これからどうするかについて話そう。 + +00:54.500 --> 01:06.210 +そこでOpenAIは、 トレーニングを行う際には、 トレーニングに使用するサンプルポイントを50から100の間で使用することを推奨しています。 + +01:06.210 --> 01:14.160 +そして、 フロンティア・モデルのファイン・チューニングの主な目的は、 そのトーンやスタイルを適応させることであり、 + +01:14.160 --> 01:19.860 +状況によってはエラーを修正し、 精度を向上させることである。 + +01:20.010 --> 01:28.860 +GPTの4つのシリーズのようなモデルは、 非常に多くのデータで訓練されているので、 本当にやろうとしていることは、 + +01:28.980 --> 01:39.150 +何か特定のことをさせたいときに十分な例を与えて、 そこから学習させることです。 + +01:39.390 --> 01:50.970 +でも、 少なくともここでは、 彼らが推奨している数より多い500を選ぶつもりだ。 + +01:51.060 --> 01:55.830 +ええと、 テストしてみたんだけど、 小さい数字よりはうまくいったよ。 + +01:56.130 --> 01:59.040 +それで、 例の中から500を選んでいるんだ。 + +01:59.070 --> 02:00.540 +今の例はとても小さい。 + +02:00.570 --> 02:07.310 +私たちのテキストはとても小さく、 一般的にはもっと大きなトレーニング文書を考えていると思う。 + +02:07.310 --> 02:10.220 +だから、 この件に関しては悪い気はしない。 + +02:10.430 --> 02:14.510 +そして現在、 これは実際に微調整を行っている。 + +02:14.660 --> 02:19.850 +ええと、 9月23日だと思うけど、 9月後半までの期間無料なんだ。 + +02:19.850 --> 02:30.830 +しかし、 無料でなくなったとしても、 あなたが支払うコストは、 実際に500個の推論を実行するのにかかるコストと同じようなものである。 + +02:30.830 --> 02:36.890 +だから、 現時点では約0ドルというところだろう。 05を行うには、 えー、 あなたの通貨で、 + +02:36.890 --> 02:38.480 +または同等額で。 + +02:38.570 --> 02:41.930 +ええと、 だからまだ小銭なんだ。 + +02:41.930 --> 02:46.490 +少なくとも9月下旬までは無料だ。 + +02:46.730 --> 02:56.600 +そのため、 実際のトレーニングセットである400,000から500のトレーニングセットに分割している。 + +02:56.780 --> 03:00.020 +ええと、 そして私は50を検証として受け取るつもりだ。 + +03:00.020 --> 03:07.190 +先ほど、 検証は必要ないと言いましたが、 トレーニングセットは1エポックしか行わないからです。 + +03:07.460 --> 03:13.340 +うーん、 でも、 将来のプロジェクトでこの方法を知ってもらうために、 お見せした方が役に立つと思ったんだ。 + +03:13.340 --> 03:16.970 +なぜなら、 このすべてがあなたのプロジェクトでも再現できるからだ。 + +03:16.970 --> 03:18.560 +だからこれを実行する。 + +03:19.010 --> 03:30.020 +最初のステップは、 JsonlのJSONラインデータを準備することで、 トレーニングデータをこのフォーマットに変換することだとお話ししました。 + +03:30.020 --> 03:40.040 +まず最初に、 皆さんがよく知っている関数を使ったメソッドを書きました。 + +03:40.130 --> 03:46.580 +ええと、 商品の価格を見積もり、 価格だけを返信してください。 + +03:46.580 --> 03:51.380 +そして、 ユーザー・プロンプトには、 その項目からテスト・プロンプトを選ぶ。 + +03:51.590 --> 03:58.850 +それで、 1ドル単位まで切り取って、 それを "空 "に置き換えたんだ。 + +03:58.850 --> 04:05.740 +フロンティア・ラボにはそのような近似値は必要ない。 + +04:05.740 --> 04:10.900 +そして、 ユーザー・プロンプトに表示されるのも、 この部分だ。 + +04:10.900 --> 04:17.350 +そして、 アシスタントに "price is "と答え、 値段を伝える。 + +04:17.350 --> 04:19.090 +では、 それを実行してみよう。 + +04:19.090 --> 04:24.760 +念のため、 例を挙げてみよう。 + +04:24.790 --> 04:34.210 +モデルが最初に目にする列車ゼロのメッセージ。 + +04:34.210 --> 04:36.910 +そして、 これがロールシステムだ。 + +04:36.910 --> 04:38.560 +これがシステム・プロンプトだ。 + +04:38.590 --> 04:42.970 +それで満足していることを確認してから、 ユーザーをロールバックする。 + +04:43.090 --> 04:45.070 +そしてこれがユーザー・プロンプトだ。 + +04:45.430 --> 04:51.040 +ええと......これはいくらかかるんだろう? + +04:51.040 --> 04:58.270 +そして、 デルファイ、 デルファイ、 燃料ポンプ・モジュールについての説明だ。 + +04:58.690 --> 05:04.900 +ええと、 それからこれがアシスタントの返事です。 + +05:04.900 --> 05:07.330 +価格は226ドル。 + +05:07.330 --> 05:12.310 +まさかあんなことになるとは......。 + +05:12.520 --> 05:14.080 +ええと、 どうぞ。 + +05:14.080 --> 05:15.700 +毎日何かを学んでいる。 + +05:15.700 --> 05:21.040 +いずれにせよ、 これがメッセージの形式であり、 現段階ではとても、 とても馴染みのあるものであるはずだ。 + +05:21.040 --> 05:29.410 +そして、 これがいかに完璧に作られたテスト、 申し訳ないが、 モデルに提供するトレーニング・データ・ポイントであるかがおわかりいただけるだろう。 + +05:29.770 --> 05:38.530 +では、 ここでJSON Lを作る関数を紹介しよう。 + +05:38.530 --> 05:40.570 +これらの項目を繰り返し処理する。 + +05:40.570 --> 05:47.560 +このテキスト、 このテキスト、 このオブジェクトをそれぞれ作成する。 + +05:47.560 --> 05:54.340 +そしてJsonを使う。 はダンプ文字列を単純な文字列に変換する。 + +05:54.340 --> 06:00.820 +そして、 この文字列の最後にキャリッジ・リターンを追加するだけだ。 + +06:00.850 --> 06:05.020 +そしてそれを戻して、 最後のキャリッジ・リターンを取り除く。 + +06:05.020 --> 06:07.360 +では、 実際に見てみよう。 + +06:07.360 --> 06:10.240 +では、 まずそれを実行してみよう。 + +06:10.450 --> 06:12.340 +私のいつもの失態を犯さないように。 + +06:12.640 --> 06:13.600 +これでよし。 + +06:13.600 --> 06:15.730 +そして今、 JSONを作ると言う。 + +06:15.730 --> 06:22.180 +そして、 トレーニング・データを入れてみましょう。 + +06:22.210 --> 06:27.370 +全部が混雑しないように、 最初の3つだけパスしよう。 + +06:27.370 --> 06:29.860 +もちろん、 ここで糸が戻ってくる。 + +06:29.860 --> 06:34.090 +そして、 この文字列は...。 + +06:34.090 --> 06:35.830 +印刷した方が簡単かもしれない。 + +06:35.860 --> 06:40.090 +空の線がはっきり見えるように印刷しよう。 + +06:40.420 --> 06:48.130 +文字列の中に1行、 2行、 3行と並んでいる。 + +06:48.370 --> 06:50.290 +うーん、 巻きついている感じだね。 + +06:50.290 --> 07:02.290 +各行には、 そのトレーニング・データ・ポイントでやり取りされた全メッセージが含まれているのがわかるだろう。 + +07:02.740 --> 07:03.400 +オーケー。 + +07:03.410 --> 07:04.760 +ここまでは順調だ。 + +07:04.940 --> 07:08.240 +今、 私たちはこの機能を使っている。 + +07:08.240 --> 07:08.810 +そうだろう? + +07:08.840 --> 07:12.110 +Jsonlは項目を取り、 ファイル名を取る。 + +07:12.110 --> 07:13.850 +そしてこれは超シンプルなものだ。 + +07:13.880 --> 07:18.950 +そのファイル名を開き、 上の関数を呼び出して書き出す。 + +07:18.950 --> 07:23.270 +だから、 そのデモをお見せする必要はないと思う。 + +07:23.300 --> 07:26.180 +実行する必要はあるが、 実際に実行することはできる。 + +07:26.180 --> 07:34.190 +そこで、 トレーニング・データ・セット、 つまり500アイテムからなる微調整されたトレーニング・データ・セットを使います。 + +07:34.190 --> 07:34.940 +確認しよう。 + +07:34.970 --> 07:40.550 +トレーニングデータセット全体から500アイテム、 つまり400,000アイテムだ。 + +07:40.580 --> 07:45.590 +すべてをファイルに書き込んでGPT4にアップロードするつもりはない。 + +07:45.920 --> 07:52.550 +それをFine Tune train dot JSONというファイルに書き出す。 + +07:52.550 --> 07:58.700 +では、 それを実行し、 検証セットも同じように実行しよう。 + +07:58.700 --> 08:03.170 +2つのファイルを書き込んだが、 数秒前に書き込まれたばかりであることがわかるだろう。 + +08:03.170 --> 08:05.790 +だから、 これを開けば開けるんだ。 + +08:05.790 --> 08:13.530 +実際には、 JupyterLabには派手なJSON行エディタがあるのですが、 ここでは普通のエディタについて説明します。 + +08:13.560 --> 08:18.420 +そして、 ここでは、 期待通り、 500行を想定していることがわかる。 + +08:18.420 --> 08:24.270 +さあ、 最後まで500列だ。 + +08:24.300 --> 08:27.960 +そして、 そのどれもが期待通りの構造を持っている。 + +08:28.170 --> 08:35.010 +各行が整形されたJSONドキュメントだからです。 + +08:35.010 --> 08:42.990 +しかし、 重要なのは、 これをJSONドキュメントとして読み込んでパースすることはできないということだ。 + +08:42.990 --> 08:44.910 +別々のラインだ。 + +08:45.240 --> 08:50.730 +それから、 エディターで開く検証ファイルは50だと思う。 + +08:50.760 --> 08:54.780 +私たちは50行をほぼ同じように言った。 + +08:54.900 --> 08:58.680 +ええと、 JSONの行でどのように見えるかをお見せしましょう。 + +08:58.680 --> 08:59.130 +編集者 + +08:59.130 --> 09:01.140 +こんな感じの派手なエディターだ。 + +09:01.140 --> 09:04.290 +それぞれを開くと、 JSONオブジェクトのようになっている。 + +09:04.720 --> 09:05.110 +ええと。 + +09:05.200 --> 09:06.310 +あれを見ろ。 + +09:07.060 --> 09:08.260 +そうやって始めるべきだったんだ。 + +09:08.260 --> 09:08.980 +おそらくね。 + +09:09.010 --> 09:11.380 +何が起こっているのか、 とてもよくわかる。 + +09:11.650 --> 09:17.080 +メッセージがパッケージされる理由を直感的に理解できるんだ。 + +09:17.080 --> 09:18.280 +パッケージの仕方だ。 + +09:19.180 --> 09:23.950 +よし、 これがそのファイルだ。 + +09:24.100 --> 09:26.080 +これが最後のステップだ。 + +09:26.080 --> 09:27.310 +この部分だ。 + +09:27.700 --> 09:33.550 +そろそろ、 これらのファイルをOpenAIにアップロードしなければならない。 + +09:33.550 --> 09:38.350 +そのために、 OpenAIのドット・ファイルをドット・クリエイトと呼んでいる。 + +09:38.350 --> 09:42.880 +そしてファイルを渡し、 目的は微調整だと伝える。 + +09:43.270 --> 09:47.350 +それと、 ひとつだけ気をつけてほしいことがある。 + +09:47.350 --> 09:50.560 +このファイルを渡すときは、 このファイルを渡さなければならない。 + +09:50.590 --> 09:58.270 +バイナリファイルとして開く必要があります。 なぜなら、 OpenAIにストリームアップされるのは、 そのファイルのバイナリバイトになるからです。 + +09:58.300 --> 10:02.980 +つまり、 Rではなく、 RBにしたいわけだ。 + +10:02.980 --> 10:07.160 +ええと、 だから気をつけるべきはほんのちょっとしたニュアンスなんだ。 + +10:07.160 --> 10:12.140 +ファイルの中身を全部そのままOpenAIに送っているだけです。 + +10:12.290 --> 10:15.950 +だから、 その行を実行するのに1秒かかる。 + +10:15.980 --> 10:20.960 +戻ってきたものを検査すると、 ファイル・オブジェクトが返ってくる。 + +10:21.650 --> 10:24.410 +バイト数は決まっている。 + +10:24.620 --> 10:28.070 +オブジェクトはファイル、 目的は微調整、 ステータスは処理。 + +10:28.070 --> 10:32.660 +だから、 OpenAIはすでにそのファイルを受け取り、 処理している。 + +10:32.660 --> 10:35.930 +そして、 私たちは同じことを検証のために行う。 + +10:36.260 --> 10:38.660 +走らせればいいんだ。 + +10:38.660 --> 10:41.420 +そしてもう一度、 加工される。 + +10:41.420 --> 10:45.350 +この時点で、 2つのJSONファイルを作成したことになる。 + +10:45.350 --> 10:50.180 +1つは微調整したトレーニングセット用、 もう1つは微調整した検証セット用だ。 + +10:50.180 --> 10:53.000 +ファイルシステムに書き出した。 + +10:53.000 --> 11:00.560 +そしてそれらをOpenAIにアップロードし、 OpenAIのファイルオブジェクトとして置いています。 + +11:00.590 --> 11:05.150 +次のセッションでは、 実際に微調整を行う。 + +11:05.180 --> 11:06.170 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59473191/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473191/ko_KR.srt new file mode 100755 index 0000000..9244801 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473191/ko_KR.srt @@ -0,0 +1,481 @@ +WEBVTT + +00:00.920 --> 00:02.690 +Get it 여기까지 못 올 줄 알았죠? + +00:02.720 --> 00:07.910 +주피터 연구소에서 개척지 모델을 세밀하게 조정하고 있어요 + +00:07.910 --> 00:10.580 +수입 부품부터 시작하죠 + +00:10.580 --> 00:12.860 +테스트 데이터도 가져오죠 + +00:12.860 --> 00:19.250 +기억하신다면 이건 250개의 테스트 예제를 실행할 수 있는 실용적인 코드입니다 아름다운 + +00:19.250 --> 00:21.140 +차트를 제공하죠 + +00:21.140 --> 00:28.880 +마지막에 우리 환경에서 끌어안는 얼굴 토큰을 로드합니다 이번에는 사용하지 않겠지만 + +00:28.910 --> 00:34.280 +이전처럼 항상 끌어안고 얼굴을 안는 건 어떨까요? + +00:34.310 --> 00:36.800 +오픈AI를 사용할 거예요 + +00:36.830 --> 00:41.000 +그래서 저 선을 저기와 여기에 실행해야 해요 + +00:41.000 --> 00:46.610 +피클 파일에서 훈련 데이터와 테스트 데이터를 열어 이걸 하도록 하겠습니다 + +00:46.700 --> 00:51.200 +모든 걸 처음부터 다시 만들 필요가 없도록요 + +00:51.470 --> 00:52.340 +좋아요 + +00:52.340 --> 00:54.500 +이제 뭘 할지 얘기해보죠 + +00:54.500 --> 01:04.680 +오픈AI는 여러분이 훈련할 때 훈련에 사용한 예시를 50-100개 정도 사용하라고 + +01:04.830 --> 01:06.210 +권장해요 + +01:06.210 --> 01:14.160 +개척지 모델의 세밀한 조정은 음색과 스타일을 바꾸는 거예요 오류를 + +01:14.160 --> 01:19.860 +수정하고 어떤 상황에서는 정확도를 높이는 거죠 + +01:20.010 --> 01:28.860 +중요한 건 아니고 예시를 많이 넣는 것도 큰 의미가 없어요 GPT 4 시리즈 같은 모델은 너무 많은 데이터를 바탕으로 + +01:28.980 --> 01:35.070 +훈련되어서 여러분이 정말 하려는 건 여러분이 원하는 특정 작업의 예시를 충분히 + +01:35.070 --> 01:39.150 +제공하는 거예요 그래야 거기서 배울 수 있죠 + +01:39.390 --> 01:46.980 +네, 아주 많은 숫자를 추천하지 않는 건 아니지만 적어도 500은 + +01:46.980 --> 01:50.970 +뽑을 거예요 권장량보다 많죠 + +01:51.060 --> 01:55.830 +그리고 제가 실험해 봤는데 작은 수치보다 결과가 좋았어요 + +01:56.130 --> 01:59.040 +500가지 예시를 들어볼게요 + +01:59.070 --> 02:00.540 +지금은 아주 작은 사례만 있죠 + +02:00.570 --> 02:07.310 +텍스트는 아주 작아요 일반적으로 훨씬 더 큰 훈련 문서를 생각하고 있죠 + +02:07.310 --> 02:10.220 +그래서 저는 전혀 미안하지 않아요 + +02:10.430 --> 02:14.510 +지금은 미세한 조율이에요 + +02:14.660 --> 02:19.850 +한동안은 비어요 9월 23일까지는요 9월 말까지는요 + +02:19.850 --> 02:26.840 +하지만 무료가 아니게 되더라도 여러분이 지불하는 비용은 500개에 대한 추론만 수행하는 + +02:26.840 --> 02:30.830 +비용과 비슷합니다 이것도 역시 몇 센트로 측정되죠 + +02:30.830 --> 02:38.480 +그럼 지금으로서는 0달러 정도 되겠네요 5천 달러요, 그 정도 금액으로요 + +02:38.570 --> 02:41.930 +여전히 적은 금액이네요 + +02:41.930 --> 02:46.490 +말씀드렸듯이 9월 말까지는 무료예요 + +02:46.730 --> 02:54.170 +500개의 훈련 세트를 만들었어요 실제 훈련 세트인 400,000개와 + +02:54.200 --> 02:56.600 +비교해서요 + +02:56.780 --> 03:00.020 +50달러로 검증 받을게요 + +03:00.020 --> 03:05.240 +아까 언급했듯이 검증은 필요 없습니다 훈련 세트에서 한 번만 + +03:05.240 --> 03:07.190 +할 것이기 때문이죠 + +03:07.460 --> 03:12.470 +하지만 여러분께 보여드리면 유용할 것 같았어요 나중에 여러분 프로젝트에서 어떻게 사용할지 + +03:12.470 --> 03:13.340 +알 수 있게요 + +03:13.340 --> 03:16.970 +이 모든 게 여러분의 프로젝트에 따라 복제될 수 있거든요 + +03:16.970 --> 03:18.560 +이걸 실행해요 + +03:19.010 --> 03:28.160 +첫 단계는 Jsonl JSON 라인 데이터를 준비하는 거라고 말씀드렸죠 훈련 데이터를 이 포맷으로 + +03:28.160 --> 03:30.020 +변환하는 거예요 + +03:30.020 --> 03:37.730 +먼저 여러분이 잘 아는 메서드 함수를 작성했어요 메시지를 전달하는 함수죠 지난 시간에 했던 + +03:37.730 --> 03:40.040 +것과 정확히 일치해요 + +03:40.130 --> 03:46.580 +물품의 가격을 추정하고 가격으로만 답해야 해요 설명하지 말고요 + +03:46.580 --> 03:51.380 +사용자 프롬프트는 아이템에서 테스트 프롬프트를 가져오고요 + +03:51.590 --> 03:58.850 +그리고 1달러가 될 만한 건 다 빼고 빈 지폐로 바꿔요 + +03:58.850 --> 04:04.780 +최저 금액을 제시하는 게 아닙니다 개척지 연구소는 그런 추측이 + +04:04.810 --> 04:05.740 +필요 없죠 + +04:05.740 --> 04:10.900 +그리고 이것도 빼요 그게 사용자 프롬프트에 가는 거죠 + +04:10.900 --> 04:17.350 +그럼 전 보조에게 가격을 말하고 가격을 알려줘요 + +04:17.350 --> 04:19.090 +실행해 보죠 + +04:19.090 --> 04:24.760 +무슨 일인지 잘 모르실 수 있으니 예를 들어볼게요 + +04:24.790 --> 04:34.210 +모델이 처음 보는 0번 열차에 보내는 메시지예요 + +04:34.210 --> 04:36.910 +Get 롤 시스템이에요 + +04:36.910 --> 04:38.560 +시스템 프롬프트죠 + +04:38.590 --> 04:42.970 +만족스러운지 확인하고 사용자를 롤해요 + +04:43.090 --> 04:45.070 +이게 사용자 프롬프트예요 + +04:45.430 --> 04:51.040 +우리가 이 질문을 한 것 같네요 물음표는 얼마일까요? + +04:51.040 --> 04:58.270 +델포이 어쩌고 하는 장황한 얘기도 들었어요 연료 펌프 모듈요 + +04:58.690 --> 05:04.900 +그리고 이건 비서의 대답이에요 + +05:04.900 --> 05:07.330 +가격은 226달러예요 + +05:07.330 --> 05:12.310 +그런 건 상상도 못 했어요 그런 건 상상도 못 했죠 + +05:12.520 --> 05:14.080 +자, 보세요 + +05:14.080 --> 05:15.700 +매일 새로운 걸 배우네요 + +05:15.700 --> 05:20.200 +어쨌든 이게 메시지 형식입니다 이 단계에선 아주 익숙할 + +05:20.200 --> 05:21.040 +거예요 + +05:21.040 --> 05:28.270 +이게 어떻게 완벽하게 만들어진 테스트, 아니 훈련 데이터 포인트인지 알 수 있습니다 우리가 모델에 + +05:28.300 --> 05:29.410 +제공할 것이죠 + +05:29.770 --> 05:36.910 +좋아요, 여기 JSON L을 만드는 함수가 있어요 여러분이 예상하는 대로 항목을 여러 + +05:36.910 --> 05:38.530 +개 받아들이죠 + +05:38.530 --> 05:40.570 +그런 항목을 반복할 거예요 + +05:40.570 --> 05:47.560 +각각의 객체를 위해 텍스트를 생성하죠 + +05:47.560 --> 05:54.340 +그런 다음 Json을 사용하죠 덤프 스트링을 덤프 스트링으로 전환해 단순한 문자열로 변환하죠 + +05:54.340 --> 06:00.820 +보세요, 문자열 하나에 추가하고 끝에 캐리지 리턴을 하죠 + +06:00.850 --> 06:05.020 +그리고 다시 리턴합니다 마지막 캐리지 리턴을 제거하고요 + +06:05.020 --> 06:07.360 +어떻게 작동하는지 보죠 + +06:07.360 --> 06:10.240 +먼저 실행해 보죠 + +06:10.450 --> 06:12.340 +나처럼 실수하지 마요 + +06:12.640 --> 06:13.600 +됐어요 + +06:13.600 --> 06:15.730 +이제 만들기를 해요 JSON + +06:15.730 --> 06:22.180 +훈련 데이터를 제출하도록 하죠 + +06:22.210 --> 06:27.370 +앞의 세 곡만 통과시키죠 그래야 사람들이 안 몰려요 + +06:27.370 --> 06:29.860 +get get get 문자열이죠 + +06:29.860 --> 06:34.090 +그리고 문자열도 있어요 + +06:34.090 --> 06:35.830 +인쇄하는 게 더 쉬울 것 같아요 + +06:35.860 --> 06:40.090 +get을 해서 빈 선이 잘 보이게 만들어 줄게요 + +06:40.420 --> 06:48.130 +문자열입니다 문자열에 1, 2, 3개의 줄이 있죠 + +06:48.370 --> 06:50.290 +좀 휘감기고 있어요 + +06:50.290 --> 07:02.290 +각 행에 있는 메시지를 보면 훈련 데이터 포인트를 나타내요 + +07:02.740 --> 07:03.400 +네 + +07:03.410 --> 07:04.760 +지금까진 좋아요 + +07:04.940 --> 07:08.240 +이제 그 위에 구축된 함수가 있어요 + +07:08.240 --> 07:08.810 +그렇죠? + +07:08.840 --> 07:12.110 +Jsonl은 항목을 취하고 파일 이름을 취해요 + +07:12.110 --> 07:13.850 +아주 간단해요 + +07:13.880 --> 07:18.950 +파일 이름을 열고 위의 함수를 호출해 써요 + +07:18.950 --> 07:23.270 +그에 대한 데모는 드릴 필요가 없을 것 같네요 + +07:23.300 --> 07:26.180 +실행해야 하지만 실제로 실행할 수 있어요 + +07:26.180 --> 07:34.190 +이제 훈련 데이터 집합을 볼게요. 잘 조율된 트레인 기억하시죠? 500개의 아이템이 있어요. + +07:34.190 --> 07:34.940 +확인해 보죠 + +07:34.970 --> 07:40.550 +됐어요, 훈련 데이터 세트 전체에서 500개, 400,000개예요 + +07:40.580 --> 07:45.590 +전부 파일에 작성해 GPT 4에 업로드 하진 않아요 + +07:45.920 --> 07:52.550 +파인 튠 트레인 닷 JSON 파일에 그걸 적어요 + +07:52.550 --> 07:58.700 +그걸 실행하고 유효성 검증 세트를 가져다가 정확히 같은 걸 실행하죠 + +07:58.700 --> 08:03.170 +저 두 파일을 작성했는데 몇 초 전에 작성된 걸 보실 수 있죠 + +08:03.170 --> 08:05.790 +이걸 열면 열 수 있어요 + +08:05.790 --> 08:12.450 +JupyterLab에도 JSON 라인 편집기가 있어요 일반 편집기로 + +08:12.450 --> 08:13.530 +넘어가 보죠 + +08:13.560 --> 08:18.420 +여러분이 기대하는 대로 500개의 행이 있네요 + +08:18.420 --> 08:24.270 +자, 끝까지 갑니다 500줄이네요 + +08:24.300 --> 08:27.960 +모두 우리가 바라는 구조로 이루어져 있어요 + +08:28.170 --> 08:35.010 +잘 구성된 JSON이 아닌 걸 알 수 있어요 각 행은 잘 구성된 JSON 문서거든요 + +08:35.010 --> 08:39.330 +제가 장황하게 말하고 있지만 이걸 읽을 수 없고 JSON 문서로 파싱할 + +08:39.330 --> 08:42.990 +수 없는 게 중요해요 구성이 잘 된 JSON이 아니니까요 + +08:42.990 --> 08:44.910 +선이 분리돼 있어요 + +08:45.240 --> 08:50.730 +에디터로 여는 검증 파일이 50개 정도 될 거예요 + +08:50.760 --> 08:54.780 +50줄은 거의 비슷하게 말했어요 + +08:54.900 --> 08:58.680 +JSON 라인에서 어떻게 보이는지 보여드릴게요 + +08:58.680 --> 08:59.130 +편집자요 + +08:59.130 --> 09:01.140 +이렇게 생긴 멋진 편집기예요 + +09:01.140 --> 09:04.290 +각각을 열 수 있어요 JSON 개체 같죠 + +09:04.720 --> 09:05.110 +네 + +09:05.200 --> 09:06.310 +보세요 + +09:07.060 --> 09:08.260 +그렇게 시작을 했어야죠 + +09:08.260 --> 09:08.980 +아마도요 + +09:09.010 --> 09:11.380 +어떤 상황인지 잘 알 수 있죠 + +09:11.650 --> 09:17.080 +메시지가 왜 포장되는지에 대한 직관적인 감각을 발휘하죠 + +09:17.080 --> 09:18.280 +포장된 방식요 + +09:19.180 --> 09:23.950 +좋아요, 그게 파일이에요 + +09:24.100 --> 09:26.080 +그게 마지막 단계예요 + +09:26.080 --> 09:27.310 +이 부분요 + +09:27.700 --> 09:33.550 +오픈AI에 업로드 할 때가 된 것 같아요 + +09:33.550 --> 09:38.350 +OpenAI.Files.Create라고 이름 붙였어요 + +09:38.350 --> 09:42.880 +파일을 전달하고 목적이 미세하다고 말하죠 + +09:43.270 --> 09:47.350 +한 가지 주의할 게 있어요 + +09:47.350 --> 09:50.560 +이 파일을 전달할 때 반드시 전달해야 해요 + +09:50.590 --> 09:56.680 +이진법 파일로 열어야 합니다. 왜냐하면 그 파일은 바이너리 바이트로 되어 있어서 OpenAI로 스트리밍 + +09:56.680 --> 09:58.270 +될 것이니까요. + +09:58.300 --> 10:02.980 +R이 아니라 RB로 해야 해요 + +10:02.980 --> 10:07.160 +아주 작은 뉘앙스만 조심하면 돼요 + +10:07.160 --> 10:12.140 +파일의 모든 내용을 OpenAI로 보내고 있어요 + +10:12.290 --> 10:15.950 +그 라인을 실행하는 데 1초가 걸리죠 + +10:15.980 --> 10:20.960 +무엇이 돌아왔는지 검사하면 파일 객체를 가져오죠. + +10:21.650 --> 10:24.410 +특정 바이트의 개수가 있고요 + +10:24.620 --> 10:28.070 +객체는 파일이고 목적은 잘 조정되었고 상태는 처리되었어요 + +10:28.070 --> 10:32.660 +OpenAI는 이미 파일을 가져와서 처리하고 있어요 + +10:32.660 --> 10:35.930 +검증도 똑같이 할 거예요 + +10:36.260 --> 10:38.660 +실행하면 다 됐어요 + +10:38.660 --> 10:41.420 +다시 한 번 처리하죠 + +10:41.420 --> 10:45.350 +이 시점에서 우린 두 개의 JSON 파일을 만들었죠 + +10:45.350 --> 10:50.180 +하나는 훈련용이고 하나는 검증용이죠 + +10:50.180 --> 10:53.000 +우리 파일 시스템에 적어뒀어요 + +10:53.000 --> 11:00.560 +그리고 OpenAI에 업로드했습니다 OpenAI에서 파일 객체로 자리 잡고 있죠 + +11:00.590 --> 11:05.150 +다음 시간에는 미세한 조정을 할 거예요 + +11:05.180 --> 11:06.170 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59473201/en_US.srt b/week5/community-contributions/subtitles/srts/59473201/en_US.srt new file mode 100755 index 0000000..0308295 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473201/en_US.srt @@ -0,0 +1,457 @@ +WEBVTT + +00:00.740 --> 00:07.280 +Well, before we do a postmortem on what happened, let's just quickly look at the standing the ranking + +00:07.280 --> 00:08.690 +orders that we've got here. + +00:08.690 --> 00:15.710 +So you'll remember that when we did a constant run based on the average, we had an error, an average + +00:15.710 --> 00:18.110 +prediction difference of 146. + +00:18.620 --> 00:22.280 +When we did traditional machine learning, it went to 139. + +00:22.280 --> 00:24.800 +Random forest was 97. + +00:24.830 --> 00:33.860 +The human, one human in particular, uh, who shall not be named, was 127, uh, GPT four or mini + +00:33.860 --> 00:37.400 +when we ran it the first time, I'm afraid to say was $80. + +00:37.430 --> 00:43.310 +GPT four was 76, and what we just came back with was 91. + +00:43.310 --> 00:50.300 +As I say, there were things about it that actually were improved on the prior run, but for whatever + +00:50.300 --> 00:52.580 +reason, uh, it is what it is. + +00:52.580 --> 00:54.350 +I can't I can't fudge the results. + +00:54.350 --> 01:01.130 +The unfortunately, the business metric that we're most focused on was slightly poorer fine tuning, + +01:01.160 --> 01:05.670 +uh, has, uh, fine tuned in the wrong direction, it seems. + +01:05.670 --> 01:07.350 +So let's talk about that. + +01:08.280 --> 01:13.830 +It was obviously a sobering moment for us, an important learn on our journey. + +01:14.130 --> 01:17.610 +Uh, so it does, uh, take a moment. + +01:17.640 --> 01:23.670 +We need to take a moment to think about, um, what is the objective of fine tuning with a frontier + +01:23.700 --> 01:24.600 +model? + +01:24.660 --> 01:32.880 +Uh, fine tuning is often used, and we will be using it for taking an open source model that has fewer + +01:32.880 --> 01:39.390 +parameters and trying to train it on a data set to make it rival with a frontier model. + +01:39.420 --> 01:44.760 +But when you have a frontier model that has trillions of parameters already and has been trained over + +01:44.760 --> 01:48.450 +enormous data sets, what is your objective? + +01:48.510 --> 01:53.460 +Um, and so here, these, these five main objectives for why you fine tune a frontier model. + +01:53.460 --> 01:57.390 +I actually basically took these from OpenAI's website itself. + +01:57.420 --> 02:05.460 +These are OpenAI's reasons for why you would want to train to fine tune something like GPT for many. + +02:05.730 --> 02:12.960 +Um, and it's, it's if you want to craft the like the style or tone of the responses, it gives an + +02:12.960 --> 02:16.860 +example of adding some sarcasm to some responses. + +02:16.860 --> 02:23.790 +If you want to improve reliably producing a particular type of format, a construct, you need the format + +02:23.790 --> 02:27.120 +to be in a particular style or way or structure. + +02:27.540 --> 02:34.530 +Um, the third one is correcting, uh, where the model is failing to follow a difficult or challenging + +02:34.530 --> 02:34.950 +prompt. + +02:34.950 --> 02:39.900 +There's something very complex it's being asked to do, and it's it doesn't get the joke, it's missing + +02:39.900 --> 02:40.410 +it. + +02:40.800 --> 02:47.130 +Um, handling edge cases when there are things that are occasional flaws that get exposed in the model + +02:47.130 --> 02:52.020 +that you need to correct for and then performing something new. + +02:52.020 --> 02:57.240 +And this is perhaps what we were trying to do, a new task, but one that's hard to articulate in a + +02:57.240 --> 02:57.930 +prompt. + +02:57.930 --> 03:05.110 +And that's really what OpenAI stresses on the site that it's about trying to solve for things that you + +03:05.110 --> 03:12.340 +can't already fix with good prompting, and it really urges you to start by working as much as you can + +03:12.340 --> 03:18.310 +on the prompt, because much of the time with something like GPT four or mini, you're going to be able + +03:18.310 --> 03:23.560 +to get to a very high level of performance just through prompting. + +03:23.920 --> 03:28.960 +Um, and really, for a frontier model that that's the key here. + +03:29.170 --> 03:37.120 +Uh, the we can already specify the question at hand and the style of output very clearly in a prompt. + +03:37.120 --> 03:43.900 +And in fact, if you remember back to the prior results, GPT four mini responded accurately in terms + +03:43.900 --> 03:45.280 +of a proper structure. + +03:45.280 --> 03:49.990 +In every single case, it never we weren't ever not able to pluck a number out. + +03:49.990 --> 03:54.850 +And the numbers were always, you know, within within an error close ish to the product. + +03:54.850 --> 04:01.330 +It was guessing, um, so it wasn't a problem with it understanding the challenge or the output format. + +04:01.330 --> 04:09.210 +Um, and you have to remember that GPT four and GPT four mini have an absolutely staggering size of + +04:09.210 --> 04:17.640 +training data with a great world knowledge, and it's unlikely that giving it 500 more training examples + +04:17.670 --> 04:21.270 +is going to move the needle in terms of its world knowledge. + +04:21.960 --> 04:29.460 +And there is then this, this slight point, um, that, uh, that I talked about a while back now about + +04:29.460 --> 04:35.940 +what they call catastrophic forgetfulness, forgetting, which is where sometimes adding in more fine + +04:35.940 --> 04:41.730 +tuning causes you to erode some of the deeper knowledge that was gained during pre-training. + +04:41.730 --> 04:45.060 +So it's not always a good thing to be fine tuning. + +04:45.300 --> 04:50.910 +Um, and I don't know if it was catastrophic forgetting that caused this, this slight dip down or whether + +04:50.910 --> 04:56.940 +it's just a bad luck, just that there is some some noise in the system and and it just didn't happen + +04:56.940 --> 04:58.500 +to do so well on the test set. + +04:58.800 --> 05:01.860 +Um, but we certainly didn't appear to improve things. + +05:01.860 --> 05:02.970 +That's the bottom line. + +05:02.970 --> 05:09.420 +And it's because, in my view, and from from the way that I understand it and the way the experiments + +05:09.420 --> 05:15.030 +show, we were already doing a great job of clearly prompting what was needed. + +05:15.030 --> 05:21.810 +GPT four, in many, was already understanding that well, and the caliber of results was already very + +05:21.810 --> 05:22.590 +good. + +05:23.580 --> 05:29.610 +So having said that, the challenge for you, though, is to keep working on this. + +05:29.610 --> 05:35.940 +I've done a bit of hyperparameter optimization or trial and error to try and improve things a bit. + +05:36.120 --> 05:37.350 +Um, but not much. + +05:37.350 --> 05:43.320 +And I would be shocked if it's not possible to get to a point where this fine tuning is at least doing + +05:43.320 --> 05:46.650 +a little bit better, a little bit better than what we had before. + +05:46.650 --> 05:48.270 +So that's the challenge for you. + +05:48.300 --> 05:54.540 +Do some more, you know, I mean, whilst OpenAI doesn't recommend that one puts in massive data sets, + +05:54.540 --> 06:01.800 +particularly while it's free to do so, I would certainly be interested in in, in trying bigger data + +06:01.830 --> 06:02.130 +sets. + +06:02.130 --> 06:04.650 +Try a training dataset of 1000 or 2000. + +06:04.680 --> 06:06.930 +Maybe try some more epochs. + +06:06.930 --> 06:10.710 +I did do that and it didn't make a difference for me, but try something different. + +06:10.710 --> 06:13.560 +There are other hyperparameters you can explore. + +06:13.590 --> 06:15.300 +You can look up on OpenAI's website. + +06:15.300 --> 06:18.660 +There's a couple that you can try changing if you wish. + +06:18.660 --> 06:21.900 +Just pass them into that same dictionary of hyperparameters. + +06:22.260 --> 06:28.410 +Um, and uh, yeah, you could also try putting in different training data points, and you can try + +06:28.440 --> 06:29.490 +playing with the prompt. + +06:29.520 --> 06:37.140 +I mean, OpenAI's biggest point on the website is that you will get the most mileage from improving + +06:37.140 --> 06:38.190 +the prompting. + +06:38.310 --> 06:43.890 +And obviously this is something where we spent a bit of time curating the data and perfecting the prompts, + +06:43.890 --> 06:46.440 +but there's much, much more that can be done there. + +06:46.440 --> 06:48.780 +So have a shot at that as well. + +06:48.780 --> 06:55.230 +The challenge for you is do some hyperparameter optimization, do some playing around with the prompting, + +06:55.260 --> 06:56.820 +at least do better. + +06:56.820 --> 06:58.200 +Let's look back at where we were. + +06:58.230 --> 07:01.470 +Your challenge is to do better than 76. + +07:01.590 --> 07:08.020 +Um, and I will tell you that that I have been able to do better than 76 at one point with a with a + +07:08.020 --> 07:09.310 +prior run. + +07:09.460 --> 07:16.360 +And so I have done I know that it's possible to do better than 76 without without making too many changes. + +07:16.390 --> 07:18.520 +Not massively better, but better. + +07:18.730 --> 07:21.010 +And that is the challenge for you. + +07:21.040 --> 07:24.790 +Uh, do so please, and let me know how you get on. + +07:24.790 --> 07:30.610 +And if you particularly if you get, uh, optimized prompt or hyper parameters, then then push the + +07:30.610 --> 07:36.040 +code, do a PR so that I can look at it and share it with others and see where we get to. + +07:36.370 --> 07:43.480 +Um, and that will be, uh, that will be your your challenge accomplished when you do better than 76. + +07:45.040 --> 07:46.240 +All right. + +07:46.240 --> 07:49.870 +That brings us to a conclusion for week six. + +07:49.870 --> 07:58.690 +You are remarkably now 75%, three quarters of the way to being an LM engineer, proficient LM engineer + +07:58.690 --> 08:01.900 +who has mastered AI and LM engineering. + +08:02.110 --> 08:04.570 +And I hope you're as excited about that as I am. + +08:04.600 --> 08:10.130 +It's just a fantastic progress that you should be super proud of everything that you've learned. + +08:10.190 --> 08:15.920 +Uh, obviously generating text and code with frontier models assistance and using open source models + +08:15.920 --> 08:20.570 +with hugging face transformers, library, uh, lang chain rag. + +08:20.720 --> 08:26.960 +And then most recently, the five step strategy for problem solving curating data. + +08:26.960 --> 08:28.250 +We did a lot of curating data. + +08:28.250 --> 08:32.420 +But you know, the life of an LM engineer involves a lot of data curation. + +08:32.420 --> 08:37.490 +That is a knack that you get into, and it's one of the most important parts. + +08:37.490 --> 08:42.710 +Certainly in all of the experiments that I did, changing the data structure was the thing that moved + +08:42.710 --> 08:44.810 +the needle more than anything else. + +08:44.900 --> 08:48.950 +Uh, and you're already seeing it post a lot of, uh, experiment. + +08:49.040 --> 08:51.560 +Um, but you I'm sure you can do better. + +08:52.070 --> 08:55.100 +Um, you've played with traditional machine learning. + +08:55.160 --> 08:59.660 +Uh, just just to get a good sense of a baseline that that we've beaten comfortably. + +08:59.810 --> 09:04.460 +Uh, you made a frontier model solution, and now fine tuned frontier models. + +09:04.460 --> 09:06.660 +So the results were a little disappointing. + +09:06.690 --> 09:07.710 +Gotta be real. + +09:07.890 --> 09:11.520 +But nonetheless, this is something that you can use in your own projects. + +09:11.520 --> 09:17.760 +And there are situations such as if you want to change the style or you're having difficult edge cases + +09:17.760 --> 09:20.730 +that are causing you problems, then fine tuning is the answer. + +09:20.730 --> 09:22.560 +And now at least you have a good recipe. + +09:22.590 --> 09:25.980 +You know how to do it and you've seen a run happening. + +09:25.980 --> 09:28.980 +You've checked its status, you've watched it in weights and biases. + +09:28.980 --> 09:31.290 +You know everything that's involved. + +09:32.490 --> 09:42.450 +All right, next week we turn over a new leaf and we we start a new segment of the voyage as we turn + +09:42.450 --> 09:44.160 +to open source models. + +09:44.160 --> 09:51.090 +Fine tuning open source models is a very different proposition to fine tuning a frontier model, fine + +09:51.090 --> 09:52.290 +tuning, an open source model. + +09:52.290 --> 09:59.160 +What we're trying to do is start with something that is massively smaller than the large models that + +09:59.160 --> 09:59.970 +we're dealing with. + +09:59.970 --> 10:02.040 +I mean, it's still going to have billions of parameters. + +10:02.040 --> 10:09.300 +It's still a big model in the in the general scheme of things, but it doesn't compare with the trillions + +10:09.300 --> 10:13.290 +of parameters in GPT four and GPT four mini. + +10:13.320 --> 10:20.460 +So we're going to be fine tuning open source models, and we're going to be using something called Lora, + +10:20.460 --> 10:24.480 +which you may have heard of or you will have heard of because I've mentioned it a few times, but you + +10:24.480 --> 10:30.810 +may have perhaps seen some examples of Lora, and you may have heard of its cousin, Lora, which is + +10:30.840 --> 10:32.820 +a quantized version of Lora. + +10:32.850 --> 10:37.770 +We will be working on both, and by the end of it you will know them both back to front. + +10:37.980 --> 10:43.950 +Um, and in the next, next session, we're going to be selecting the base model, which you already + +10:43.950 --> 10:44.640 +know what that is. + +10:44.640 --> 10:47.310 +But but we'll be choosing it for reals. + +10:47.490 --> 10:53.070 +Um, that is going to be the model that we will have to compete with GPT four. + +10:53.340 --> 10:56.280 +Our current the current winner on our leaderboard. + +10:56.280 --> 10:59.220 +That is going to be our challenge for next week. + +10:59.250 --> 11:03.480 +It's going to be a big challenge, but I can't wait to take it on. + +11:03.480 --> 11:05.940 +And I hope you can't wait as well. + +11:05.940 --> 11:07.140 +I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59473201/ja_JP.srt b/week5/community-contributions/subtitles/srts/59473201/ja_JP.srt new file mode 100755 index 0000000..bc7d4db --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473201/ja_JP.srt @@ -0,0 +1,373 @@ +WEBVTT + +00:00.740 --> 00:08.690 +さて、 何が起こったかを事後報告する前に、 ここにある順位表の立ち位置を簡単に見ておこう。 + +00:08.690 --> 00:18.110 +だから、 平均値に基づいて一定の走りをしたとき、 平均予測差146という誤差が出たことを覚えているだろう。 + +00:18.620 --> 00:22.280 +従来の機械学習では139点だった。 + +00:22.280 --> 00:24.800 +ランダムフォレストは97だった。 + +00:24.830 --> 00:33.860 +その人間、 特にある人間、 名前は伏せるが、 GPT4かミニを初めて走らせたときは127、 + +00:33.860 --> 00:37.400 +残念ながら80ドルだった。 + +00:37.430 --> 00:43.310 +GPT4は76で、 今戻ってきたのは91だった。 + +00:43.310 --> 00:50.300 +というのも、 実際に前評判より改善された点もあったのだが、 理由はともあれ、 まあ、 + +00:50.300 --> 00:52.580 +そういうことなのだ。 + +00:52.580 --> 00:54.350 +結果はごまかせない。 + +00:54.350 --> 01:01.130 +残念なことに、 我々が最も注目しているビジネス指標は、 + +01:01.160 --> 01:05.670 +微調整がやや甘かった。 + +01:05.670 --> 01:07.350 +それについて話そう。 + +01:08.280 --> 01:13.830 +それは私たちにとって明らかに気が引き締まる瞬間であり、 旅の重要な学びだった。 + +01:14.130 --> 01:17.610 +ちょっと時間がかかるんだ。 + +01:17.640 --> 01:24.600 +フロンティアモデルによる微調整の目的は何なのか? + +01:24.660 --> 01:39.390 +ファインチューニングはよく使われるもので、 パラメータが少ないオープンソースのモデルを、 データセットで訓練してフロンティアモデルに匹敵するようにするために使います。 + +01:39.420 --> 01:44.760 +しかし、 すでに何兆ものパラメータを持ち、 膨大なデータセットで訓練されたフロンティアモデルがある場合、 + +01:44.760 --> 01:48.450 +その目的は何でしょうか? + +01:48.510 --> 01:53.460 +フロンティアモデルを微調整する主な目的は5つある。 + +01:53.460 --> 01:57.390 +これらは基本的にOpenAIのウェブサイトから引用した。 + +01:57.420 --> 02:05.460 +これらは、 GPTのようなものを微調整する訓練を多くの人が受けたいと思う理由についてのOpenAIの理由である。 + +02:05.730 --> 02:16.860 +返答のスタイルやトーンに工夫を凝らしたいのであれば、 返答に皮肉を加える例を挙げています。 + +02:16.860 --> 02:23.790 +もしあなたが、 特定のタイプのフォーマット(構成)を確実に上達させたいのであれば、 そのフォーマットは特定のスタイルや方法、 + +02:23.790 --> 02:27.120 +構造である必要がある。 + +02:27.540 --> 02:34.950 +うーん、 3つ目は、 モデルが難しい、 あるいは挑戦的なプロンプトに従えない場合の修正です。 + +02:34.950 --> 02:40.410 +非常に複雑なことを要求されているのに、 ジョークが通じず、 見逃している。 + +02:40.800 --> 02:47.130 +エッジケースの処理というのは、 モデルの中に時折露呈する欠陥のようなものがあって、 それを修正する必要がある場合に、 + +02:47.130 --> 02:52.020 +何か新しいことを実行することだ。 + +02:52.020 --> 02:57.930 +そして、 これこそが私たちがやろうとしていたことであり、 新しい課題なのだが、 プロンプトで明確に表現するのは難しい。 + +02:57.930 --> 03:05.110 +OpenAIがサイトで強調しているのは、 優れたプロンプトで解決できないことを解決しようということです。 + +03:05.110 --> 03:12.340 +GPT 4やminiのようなものでは、 多くの場合、 プロンプトだけで非常に高いレベルのパフォーマンスを得ることができるので、 + +03:12.340 --> 03:23.560 +できる限りプロンプトに取り組むことから始めるよう、 本当に促しています。 + +03:23.920 --> 03:28.960 +フロンティア・モデルにとって、 それが重要なんだ。 + +03:29.170 --> 03:37.120 +ええと、 私たちはプロンプトの中で、 手元の質問と出力のスタイルを非常に明確に指定することができます。 + +03:37.120 --> 03:45.280 +そして実際、 事前の結果を思い起こせば、 GPT4ミニは適切な構成という点で的確な反応を示していた。 + +03:45.280 --> 03:49.990 +どのケースでも、 数字を引き出せないことはなかった。 + +03:49.990 --> 03:54.850 +そして、 その数字は常に、 誤差の範囲内だった。 + +03:54.850 --> 04:01.330 +推測だから、 課題や出力形式を理解することに問題はなかったんだ。 + +04:01.330 --> 04:21.270 +GPT4とGPT4ミニは、 世界的な知識を持つトレーニングデータのサイズが非常に大きいことを忘れてはならない。 + +04:21.960 --> 04:29.460 +そして、 少し前に話した、 壊滅的な忘却、 忘却と呼ばれる、 より細かい調整を加えることで、 + +04:29.460 --> 04:35.940 +事前のトレーニングで得たより深い知識が損なわれてしまうことがある、 + +04:35.940 --> 04:41.730 +ということがある。 + +04:41.730 --> 04:45.060 +だから、 微調整をすることが必ずしもいいこととは限らない。 + +04:45.300 --> 04:50.910 +このわずかな落ち込みの原因が、 壊滅的な忘れ物なのか、 それとも単に運が悪かっただけなのか、 + +04:50.910 --> 04:58.500 +システム内にノイズがあり、 たまたまテストセットでうまくいかなかっただけなのかはわからない。 + +04:58.800 --> 05:01.860 +うーん、 でも、 状況が改善したようには見えなかった。 + +05:01.860 --> 05:02.970 +これが結論だ。 + +05:02.970 --> 05:15.030 +そして、 私の見解では、 そして私の理解や実験が示すところでは、 私たちはすでに必要なことを明確に促すという素晴らしい仕事をしていたからだ。 + +05:15.030 --> 05:22.590 +GPT4は、 多くの場合、 すでにそのことをよく理解していたし、 結果の質もすでに非常に良かった。 + +05:23.580 --> 05:29.610 +とはいえ、 あなたにとっての挑戦は、 これに取り組み続けることだ。 + +05:29.610 --> 05:35.940 +ハイパーパラメーターを最適化したり、 試行錯誤したりして、 少しは改善できるようになった。 + +05:36.120 --> 05:37.350 +うーん、 でもそれほどでもない。 + +05:37.350 --> 05:43.320 +そして、 この微調整が、 少なくとも以前より少しはマシになる、 少しはマシになるというところまで到達できないのであれば、 + +05:43.320 --> 05:46.650 +私はショックを受けるだろう。 + +05:46.650 --> 05:48.270 +それがあなたへの挑戦です。 + +05:48.300 --> 05:54.540 +つまり、 OpenAIは大規模なデータセットの投入を推奨しているわけではないのですが、 + +05:54.540 --> 06:02.130 +特に無料で投入できる間は、 より大規模なデータセットを試してみたいと思っています。 + +06:02.130 --> 06:04.650 +1000または2000のトレーニングデータセットを試す。 + +06:04.680 --> 06:06.930 +もう少しエポック数を増やしてみようか。 + +06:06.930 --> 06:10.710 +でも、 何か違うことを試してみてほしい。 + +06:10.710 --> 06:13.560 +他にもハイパーパラメーターを調べることができる。 + +06:13.590 --> 06:15.300 +OpenAIのウェブサイトで調べることができる。 + +06:15.300 --> 06:18.660 +あなたが望むなら、 変えてみることができるカップルがある。 + +06:18.660 --> 06:21.900 +それを同じハイパーパラメーターの辞書に渡すだけだ。 + +06:22.260 --> 06:29.490 +それから、 別のトレーニングデータを入れてみたり、 プロンプトで遊んでみたりすることもできる。 + +06:29.520 --> 06:38.190 +つまり、 OpenAIのウェブサイトで最も強調されているのは、 プロンプトを改善することで最大の効果が得られるということだ。 + +06:38.310 --> 06:43.890 +これは明らかに、 私たちがデータのキュレーションとプロンプトの完成に少し時間を費やしたものですが、 + +06:43.890 --> 06:46.440 +もっともっとできることがあるはずです。 + +06:46.440 --> 06:48.780 +だから、 それも試してみてほしい。 + +06:48.780 --> 06:56.820 +あなたの課題は、 ハイパーパラメーターを最適化し、 プロンプトを弄って、 少なくとももっとうまくやることだ。 + +06:56.820 --> 06:58.200 +自分たちがいた場所を振り返ってみよう。 + +06:58.230 --> 07:01.470 +あなたの挑戦は76より良い結果を出すことだ。 + +07:01.590 --> 07:09.310 +ええと、 76よりいいタイムを出したこともあるんだ。 + +07:09.460 --> 07:16.360 +だから、 あまり多くの変更を加えなくても、 76年よりもいい結果を出すことは可能だとわかっている。 + +07:16.390 --> 07:18.520 +大幅に良くなったわけではないが、 良くなった。 + +07:18.730 --> 07:21.010 +そして、 それがあなたにとっての挑戦でもある。 + +07:21.040 --> 07:24.790 +ぜひそうしてください。 + +07:24.790 --> 07:30.610 +そして、 もし最適化されたプロンプトやハイパーパラメータが得られたら、 + +07:30.610 --> 07:36.040 +コードをプッシュしてPRしてください。 + +07:36.370 --> 07:43.480 +そして、 76歳以上の成績を収めたとき、 それがあなたの挑戦の達成となる。 + +07:45.040 --> 07:46.240 +分かった。 + +07:46.240 --> 07:49.870 +これで第6週の結論が出た。 + +07:49.870 --> 07:58.690 +AIとLMエンジニアリングをマスターした熟練LMエンジニアになるまでの道のりの4分の3、 + +07:58.690 --> 08:01.900 +75%に達している。 + +08:02.110 --> 08:04.570 +僕と同じように、 君たちも楽しみにしていてほしい。 + +08:04.600 --> 08:10.130 +あなたが学んだことすべてを誇りに思うべき、 素晴らしい進歩だ。 + +08:10.190 --> 08:15.920 +明らかに、 フロンティアモデルのアシストでテキストとコードを生成し、 抱擁顔トランスフォーマー、 + +08:15.920 --> 08:20.570 +ライブラリ、 あー、 ラングチェーンラグでオープンソースモデルを使用している。 + +08:20.720 --> 08:26.960 +そして最近では、 データをキュレーションする問題解決のための5つのステップ戦略だ。 + +08:26.960 --> 08:28.250 +私たちはデータのキュレーションをたくさん行った。 + +08:28.250 --> 08:32.420 +でもね、 LMエンジニアの生活には多くのデータキュレーションが含まれるんだ。 + +08:32.420 --> 08:37.490 +それはコツであり、 最も重要な部分のひとつだ。 + +08:37.490 --> 08:44.810 +確かに、 私が行ったすべての実験において、 データ構造を変えることが何よりも針を動かした。 + +08:44.900 --> 08:48.950 +すでに多くの実験が行われている。 + +08:49.040 --> 08:51.560 +うーん、 でも君なら......もっとうまくやれると思うよ。 + +08:52.070 --> 08:55.100 +ええと、 あなたは従来の機械学習で遊んだことがありますね。 + +08:55.160 --> 08:59.660 +ええと、 ただ、 僕らが楽に勝ってきたという基準値を知るためにね。 + +08:59.810 --> 09:04.460 +あなたはフロンティアモデルの解を作り、 今はフロンティアモデルを微調整している。 + +09:04.460 --> 09:06.660 +だから結果は少し残念だった。 + +09:06.690 --> 09:07.710 +本当だよ。 + +09:07.890 --> 09:11.520 +しかし、 それにもかかわらず、 これはあなた自身のプロジェクトで使えるものだ。 + +09:11.520 --> 09:17.760 +また、 スタイルを変えたいとか、 難しいエッジケースが問題を引き起こしているといった状況であれば、 + +09:17.760 --> 09:20.730 +微調整が答えとなる。 + +09:20.730 --> 09:22.560 +そして今、 少なくともあなたは良いレシピを持っている。 + +09:22.590 --> 09:25.980 +やり方は知っているし、 走りを見たこともあるだろう。 + +09:25.980 --> 09:28.980 +あなたはそのステータスをチェックし、 ウェイトやバイアスを観察してきた。 + +09:28.980 --> 09:31.290 +関係することはすべて知っているはずだ。 + +09:32.490 --> 09:44.160 +さて、 来週からまた新たな一歩を踏み出し、 オープンソースモデルに目を向ける。 + +09:44.160 --> 09:52.290 +オープンソースのモデルを微調整することは、 フロンティアモデルを微調整すること、 オープンソースのモデルを微調整することとはまったく異なる提案である。 + +09:52.290 --> 09:59.970 +私たちがやろうとしているのは、 私たちが扱っている大型モデルよりもはるかに小さいものから始めることだ。 + +09:59.970 --> 10:02.040 +つまり、 まだ何十億ものパラメーターがある。 + +10:02.040 --> 10:13.290 +しかし、 GPT4やGPT4ミニの何兆ものパラメータとは比較にならない。 + +10:13.320 --> 10:20.460 +オープンソースのモデルを微調整し、 Loraと呼ばれるものを使用する予定です。 Loraについては何度か触れたので聞いたことがあるかもしれませんが、 + +10:20.460 --> 10:32.820 +おそらくLoraの例をいくつか見たことがあるかもしれませんし、 Loraの同類でLoraの量子化バージョンであるLoraについても聞いたことがあるかもしれません。 + +10:32.850 --> 10:37.770 +私たちはこの2つに取り組み、 最後にはこの2つについて一から十まで知ることになる。 + +10:37.980 --> 10:44.640 +ええと、 次のセッションではベースモデルを選びますが、 それが何かはもうお分かりですね。 + +10:44.640 --> 10:47.310 +でも、 でも、 本当に選ぶんだ。 + +10:47.490 --> 10:53.070 +それがGPT4と競争するモデルになるだろう。 + +10:53.340 --> 10:56.280 +リーダーボードの現在の勝者。 + +10:56.280 --> 10:59.220 +それが来週の課題だ。 + +10:59.250 --> 11:03.480 +大きな挑戦になるだろうけど、 挑戦するのが待ちきれないよ。 + +11:03.480 --> 11:05.940 +そして、 あなたも待ちきれないことを願っている。 + +11:05.940 --> 11:07.140 +それではまた。 diff --git a/week5/community-contributions/subtitles/srts/59473201/ko_KR.srt b/week5/community-contributions/subtitles/srts/59473201/ko_KR.srt new file mode 100755 index 0000000..9431603 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59473201/ko_KR.srt @@ -0,0 +1,433 @@ +WEBVTT + +00:00.740 --> 00:08.690 +사후 조사를 하기 전에 순위표부터 간단히 살펴보죠 + +00:08.690 --> 00:15.710 +기억하실 거예요 평균에 기반해 지속 실행을 했을 때 오류가 있었죠 평균 예측 + +00:15.710 --> 00:18.110 +차이가 146였어요 + +00:18.620 --> 00:22.280 +전통적인 머신 러닝 때는 139까지 올라갔어요 + +00:22.280 --> 00:24.800 +랜덤 숲은 97이었어요 + +00:24.830 --> 00:33.860 +참가자 중 특히 한 명은 이름을 밝힐 수 없어요 127달러였죠 GPT 4라고 불리는 미니였는데 처음 + +00:33.860 --> 00:37.400 +실행했을 때 80달러였던 것 같아요 + +00:37.430 --> 00:43.310 +GPT 4는 76이었고 방금 나온 건 91이었어요 + +00:43.310 --> 00:50.300 +아까도 말했지만 이전 주행에서 개선된 점이 있지만 이유가 + +00:50.300 --> 00:52.580 +뭐든 어쩔 수 없죠 + +00:52.580 --> 00:54.350 +결과를 조작할 순 없어요 + +00:54.350 --> 01:01.130 +안타깝게도 우리가 가장 집중하는 사업 지표가 약간 부족합니다 세밀 + +01:01.160 --> 01:05.670 +조정이 잘못된 방향으로 이뤄진 것 같아요 + +01:05.670 --> 01:07.350 +그 얘길 해보죠 + +01:08.280 --> 01:13.830 +정신이 번쩍 드는 순간이었어요 여정에서 중요한 걸 배웠죠 + +01:14.130 --> 01:17.610 +시간이 좀 걸리네요 + +01:17.640 --> 01:23.670 +잠시 생각을 해봐야겠어요 개척지 모델을 세밀하게 조정하는 목표가 + +01:23.700 --> 01:24.600 +뭘까요? + +01:24.660 --> 01:32.880 +미세 튜닝을 자주 사용하는데 매개 변수가 적은 오픈 소스 모델을 선택해 프런티어 + +01:32.880 --> 01:39.390 +모델에 대적할 수 있도록 데이터 세트에서 훈련할 거예요 + +01:39.420 --> 01:44.760 +하지만 수조 개의 매개 변수가 이미 있고 방대한 데이터 세트를 위해 훈련된 + +01:44.760 --> 01:48.450 +개척자 모델이 있다면 목표가 무엇일까요? + +01:48.510 --> 01:53.460 +개척 시대 모델을 조정한 다섯 가지 주요 목적이 있어요 + +01:53.460 --> 01:57.390 +오픈AI의 웹사이트에서 가져온 것들이에요. + +01:57.420 --> 02:05.460 +오픈라이에서는 GPT 같은 기능을 세밀하게 조정하는 훈련을 실시하죠 + +02:05.730 --> 02:12.960 +반응의 스타일이나 톤을 만들고 싶을 때 반응에 빈정거림을 + +02:12.960 --> 02:16.860 +추가하는 예가 되죠 + +02:16.860 --> 02:23.790 +특정 형식의 형식, 구성체를 신뢰할 수 있게 제작하고 싶다면 특정한 스타일, + +02:23.790 --> 02:27.120 +방법, 구조에 있는 형식이 필요해요 + +02:27.540 --> 02:34.950 +세 번째는 수정입니다 어렵거나 어려운 프롬프트를 따르지 않는 모델이죠 + +02:34.950 --> 02:40.410 +아주 복잡한 걸 요구하는데 Get it은 농담을 이해하지 못해요 + +02:40.800 --> 02:47.130 +첨단 케이스 처리란 때때로 노출되는 결함이 있는 경우죠 그걸 수정하고 + +02:47.130 --> 02:52.020 +새로운 걸 실행해야 하는 모델에서요 Get up + +02:52.020 --> 02:57.930 +그게 우리가 하려던 일이었어요 새로운 작업이었지만 바로 설명하긴 어려웠죠 + +02:57.930 --> 03:05.110 +오픈AI가 사이트에서 강조하는 게 바로 그겁니다 프롬프트만으로 고칠 수 + +03:05.110 --> 03:12.340 +없는 문제를 해결하려고 노력하죠 프롬프트에서 최대한 많은 작업을 하도록 강력하게 + +03:12.340 --> 03:18.310 +권장합니다 GPT 4, 미니 같은 것들은 대부분 프롬프트만으로 + +03:18.310 --> 03:23.560 +아주 높은 수준의 실행을 얻을 수 있거든요 + +03:23.920 --> 03:28.960 +개척지 모델로서는 그게 핵심이에요 + +03:29.170 --> 03:37.120 +이미 당면한 질문과 프롬프트에서 결과물의 스타일을 명시할 수 있어요 + +03:37.120 --> 03:43.900 +이전 결과를 떠올려 보면 GPT 4 미니는 적절한 구조라는 측면에서 정확하게 + +03:43.900 --> 03:45.280 +반응했죠 + +03:45.280 --> 03:49.990 +모든 사건에서 숫자를 추려내지 못한 적이 없어요 + +03:49.990 --> 03:54.850 +숫자는 항상 오차 범위 내에 제품에 근접해 있었어요 + +03:54.850 --> 04:01.330 +추측이었죠, 그래서 챌린지나 출력 형식을 이해하는 데는 문제가 없었어요 + +04:01.330 --> 04:09.210 +GPT4와 GPT4 미니는 엄청난 규모의 훈련 데이터를 가지고 있습니다 + +04:09.210 --> 04:17.640 +세계 지식이 담겨 있죠 500개의 훈련 예시를 더 제공한다고 세계 지식이 + +04:17.670 --> 04:21.270 +달라질 것 같진 않아요 + +04:21.960 --> 04:29.460 +그리고 사소한 문제가 있어요 전에 말했던 심각한 건망증에 대한 + +04:29.460 --> 04:35.940 +건데요 망각이라는 거예요 때때로 미세한 조율을 더하면 + +04:35.940 --> 04:41.730 +훈련 전에 얻은 깊은 지식이 줄어들기도 하죠 + +04:41.730 --> 04:45.060 +세세한 부분까지 신경 쓰는 게 늘 좋은 건 아니에요 + +04:45.300 --> 04:50.910 +엄청난 망각이 원인이었는지 약간 하락했는지도 모르고 시스템에 + +04:50.910 --> 04:56.940 +소음이 있어서 불운이 따랐는지도 몰라요 시험 세트에서 잘 안 풀린 + +04:56.940 --> 04:58.500 +거죠 + +04:58.800 --> 05:01.860 +하지만 나아진 건 없는 것 같았어요 + +05:01.860 --> 05:02.970 +그게 핵심이에요 + +05:02.970 --> 05:09.420 +제 관점에서 그리고 제가 이해하는 방식과 실험이 보여주는 방식으로 볼 때 필요한 + +05:09.420 --> 05:15.030 +걸 확실히 전달하는 훌륭한 일을 이미 하고 있었기 때문이죠 + +05:15.030 --> 05:22.590 +GPT 4는 이미 많은 사람이 이 점을 잘 이해하고 있었고 그 결과의 수준도 이미 매우 뛰어났죠 + +05:23.580 --> 05:29.610 +하지만 당신이 해야 할 일은 계속 작업하는 거예요 + +05:29.610 --> 05:35.940 +하이퍼파라미터 최적화 혹은 시행착오를 통해 개선하려고 했어요 비트 + +05:36.120 --> 05:37.350 +많이는 아니고요 + +05:37.350 --> 05:43.320 +조금이라도 나아지지 않는다면 정말 충격일 거예요 적어도 이전 비트보다 조금이라도 + +05:43.320 --> 05:46.650 +나아진다면요 get it get it + +05:46.650 --> 05:48.270 +그게 과제군요 + +05:48.300 --> 05:54.540 +더 많은 것을요 오픈AI는 방대한 데이터 집합을 권장하지 + +05:54.540 --> 06:02.130 +않지만 특히 무료일 때는요 저는 더 큰 데이터 집합을 시도해보고 싶어요 + +06:02.130 --> 06:04.650 +훈련 데이터 집합을 1,000이나 2,000으로 해봐요 + +06:04.680 --> 06:06.930 +다른 시대를 살아볼 수도 있고요 + +06:06.930 --> 06:10.710 +해 봤는데 별 차이 없더라고요 다른 걸 시도해 보려고요 + +06:10.710 --> 06:13.560 +다른 hyperperameter도 있어요 + +06:13.590 --> 06:15.300 +오픈아이 웹사이트에서 찾아보세요 + +06:15.300 --> 06:18.660 +원하면 몇 가지 바꿔볼 수도 있어요 + +06:18.660 --> 06:21.900 +하이퍼파라미터 사전에 넣어 보세요 + +06:22.260 --> 06:28.410 +다양한 훈련 데이터를 입력해 볼 수도 있고 프롬프트를 플레이해 볼 수도 + +06:28.440 --> 06:29.490 +있어요 + +06:29.520 --> 06:37.140 +오픈아이가 웹 사이트에서 강조한 점은 입력을 개선함으로써 가장 많은 주행 거리를 얻을 수 + +06:37.140 --> 06:38.190 +있다는 거죠 + +06:38.310 --> 06:43.890 +데이터 큐레이팅과 프롬프트 수정에서 시간을 좀 들인 부분이죠 하지만 여기서 + +06:43.890 --> 06:46.440 +할 수 있는 건 훨씬 더 많아요 + +06:46.440 --> 06:48.780 +그것도 한번 해 보세요 + +06:48.780 --> 06:55.230 +하이퍼파라미터 최적화를 해야 합니다 프롬프트와 관련된 것을 변경해서 더 잘 할 + +06:55.260 --> 06:56.820 +수 있도록 하세요 + +06:56.820 --> 06:58.200 +다시 한 번 볼까요? + +06:58.230 --> 07:01.470 +76점 이상을 받아야 해요 + +07:01.590 --> 07:08.020 +그리고 솔직히 말씀드리면 저는 이전 경기에서 76점보다 높은 점수를 받은 + +07:08.020 --> 07:09.310 +적이 있어요 + +07:09.460 --> 07:16.360 +그래서 저는 많은 변화 없이 76편보다 나은 걸 만들 수 있다는 걸 알아요 + +07:16.390 --> 07:18.520 +많이 나아지진 않았지만 나아졌어요 + +07:18.730 --> 07:21.010 +그게 바로 도전 과제죠 + +07:21.040 --> 07:24.790 +그렇게 해요, 어떻게 돼가는지 알려줘요 Get it + +07:24.790 --> 07:30.610 +최적화된 프롬프트나 하이퍼 매개 변수가 있다면 코드를 푸시해 PR을 하세요 + +07:30.610 --> 07:36.040 +제가 살펴보고 공유할 수 있도록요 어떻게 되는지 보도록 하죠 + +07:36.370 --> 07:43.480 +76점 이상을 받으면 그게 바로 도전 과제의 성적이 되는 거죠 + +07:45.040 --> 07:46.240 +좋아요 + +07:46.240 --> 07:49.870 +이렇게 6주 차 결론이 났네요 + +07:49.870 --> 07:58.690 +당신은 이제 놀랍게도 75%에 달하는 길을 달렸습니다 능숙한 달 착륙 엔지니어로서 인공지능과 + +07:58.690 --> 08:01.900 +달 착륙 공학에 통달했죠 + +08:02.110 --> 08:04.570 +여러분도 저처럼 기뻐해 주시면 좋겠네요 + +08:04.600 --> 08:10.130 +정말 멋진 발전이에요 지금까지 배운 모든 걸 자랑스러워해도 돼요 + +08:10.190 --> 08:15.920 +프런티어 모델의 도움을 받아 텍스트와 코드를 생성하고 얼굴 껴안기 트랜스포머, + +08:15.920 --> 08:20.570 +라이브러리 랑체 같은 오픈 소스 모델을 사용했죠 + +08:20.720 --> 08:26.960 +최근에는 큐레이터 문제 해결 5단계 전략을 다뤘는데요 + +08:26.960 --> 08:28.250 +큐레이팅 데이터를 많이 만들었어요 + +08:28.250 --> 08:32.420 +하지만 달 착륙선 엔지니어의 삶에는 데이터 수집이 많이 포함되죠 + +08:32.420 --> 08:37.490 +Get it은 요령이에요 가장 중요한 부분이죠 + +08:37.490 --> 08:42.710 +제가 했던 모든 실험에서 데이터 구조를 바꾸는 건 바늘을 움직이는 + +08:42.710 --> 08:44.810 +가장 큰 일이었어요 + +08:44.900 --> 08:48.950 +벌써 많은 실험이 post에 올라가 있어요. + +08:49.040 --> 08:51.560 +하지만 더 잘할 수 있잖아요 + +08:52.070 --> 08:55.100 +전통적인 머신 러닝을 사용했죠 + +08:55.160 --> 08:59.660 +Get up 기본값이 얼마인지 확인하려고요 우리가 쉽게 이겼던 점수죠 + +08:59.810 --> 09:04.460 +개척 시대 해결책을 내놓았고 이젠 미세한 개척 시대 모델이 됐죠 + +09:04.460 --> 09:06.660 +그래서 결과가 좀 실망스러웠죠 + +09:06.690 --> 09:07.710 +현실적이어야 해요 + +09:07.890 --> 09:11.520 +그럼에도 불구하고 여러분의 프로젝트에 사용할 수 있는 거죠 + +09:11.520 --> 09:17.760 +스타일을 바꾸고 싶다거나 어려운 문제가 발생했을 + +09:17.760 --> 09:20.730 +때 미세 조정이 답이죠 + +09:20.730 --> 09:22.560 +그래도 이제 좋은 레시피가 생겼잖아요 + +09:22.590 --> 09:25.980 +어떻게 하는지 알고 연승하는 것도 봤죠 + +09:25.980 --> 09:28.980 +상태를 확인하고 무게와 편향성을 확인했죠 + +09:28.980 --> 09:31.290 +다 알고 계시잖아요 + +09:32.490 --> 09:42.450 +다음 주엔 새로운 관점으로 여행의 새로운 부분을 시작합니다 오픈 소스 + +09:42.450 --> 09:44.160 +모델로요 + +09:44.160 --> 09:51.090 +오픈 소스 모델을 미세 조정하는 건 개척지 모델을 미세 조정하는 것과 아주 다른 + +09:51.090 --> 09:52.290 +제의예요 + +09:52.290 --> 09:59.970 +우리가 하려는 건 우리가 다루는 큰 모델보다 훨씬 작은 모델이에요 + +09:59.970 --> 10:02.040 +여전히 수십억 개의 매개 변수가 있을 거예요 + +10:02.040 --> 10:09.300 +일반적으로 보면 여전히 큰 모델이지만 GPT4와 GPT4 미니에 있는 수조 + +10:09.300 --> 10:13.290 +개의 매개 변수와 비교할 수는 없어요 + +10:13.320 --> 10:20.460 +오픈 소스 모델을 세밀히 조정하고 로라라는 걸 사용할 거예요 들어본 적 있거나 있을 + +10:20.460 --> 10:24.480 +거예요 제가 몇 번 언급했거든요 하지만 로라의 + +10:24.480 --> 10:30.810 +예시를 본 적도 있을 거예요 로라의 사촌인 로라도 들어봤을 거예요 양자화된 + +10:30.840 --> 10:32.820 +버전의 로라죠 + +10:32.850 --> 10:37.770 +둘 다 연습할 거예요 끝날 때쯤엔 둘 다 알게 될 거예요 + +10:37.980 --> 10:44.640 +다음 세션에서는 기본 모델을 선택할 겁니다 이미 알고 계시겠지만요 + +10:44.640 --> 10:47.310 +하지만 진짜 선택해야 해요 + +10:47.490 --> 10:53.070 +GPT 4와 경쟁할 수 있는 모델이 될 거예요 + +10:53.340 --> 10:56.280 +현재 순위표의 우승자는요? + +10:56.280 --> 10:59.220 +다음 주 과제는 그거예요 + +10:59.250 --> 11:03.480 +큰 도전이 되겠지만 빨리 시작하고 싶어요 + +11:03.480 --> 11:05.940 +당신도 기다려주지 않으면 좋겠네요 + +11:05.940 --> 11:07.140 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59503703/en_US.srt b/week5/community-contributions/subtitles/srts/59503703/en_US.srt new file mode 100755 index 0000000..4ef70e4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59503703/en_US.srt @@ -0,0 +1,166 @@ +WEBVTT + +00:00.800 --> 00:01.310 +Well. + +00:01.310 --> 00:02.450 +Hello there everybody. + +00:02.450 --> 00:06.770 +I am so grateful that you've made it through to the start of week seven. + +00:06.800 --> 00:10.670 +It is tremendous and I plan to not disappoint. + +00:10.670 --> 00:12.530 +I will make it worth your while. + +00:12.530 --> 00:13.190 +It's going. + +00:13.220 --> 00:15.830 +There's some really great content ahead. + +00:15.830 --> 00:22.250 +You may be feeling a bit of an anti-climax from what happened last week with our disappointing results. + +00:22.250 --> 00:25.760 +Fine tuning frontier model after so much hype. + +00:25.850 --> 00:31.040 +But what I can tell you is it's like a good movie where there's a period of time where things feel tense + +00:31.040 --> 00:32.210 +and disappointing. + +00:32.420 --> 00:35.870 +There's going to be redemption, there's going to be redemption. + +00:35.870 --> 00:41.480 +We will see if you're like me and you kept hammering away after after those results, you will have, + +00:41.480 --> 00:45.260 +I expect, got to a point where you have got some improvements. + +00:45.260 --> 00:50.960 +You get to a point where you can fine tune and actually get better results than we originally got with + +00:51.200 --> 00:58.910 +the Untuned version, but not significantly so, but a little bit better than I did in the last day. + +00:58.910 --> 01:02.260 +But anyways, for now we are moving swiftly onwards. + +01:02.260 --> 01:08.860 +We are launching into week seven and what a week do I have in store for you? + +01:09.010 --> 01:10.780 +Uh, it is a massive week. + +01:10.780 --> 01:12.400 +It is a really massive week. + +01:12.400 --> 01:15.220 +It is an advanced week where we're raising the bar. + +01:15.220 --> 01:22.600 +We're getting to a point where you're now acquiring advanced skills in this kind of deep learning field + +01:22.600 --> 01:23.920 +of of llms. + +01:23.920 --> 01:29.860 +And as we move more deeply into training, what you can already do, of course, you can already generate + +01:29.890 --> 01:35.350 +text and code with frontier models, with APIs, and with hugging face open source code. + +01:35.350 --> 01:41.080 +You can build rag pipelines and then you can now build data sets. + +01:41.380 --> 01:47.260 +Quite a long time we spent on data sets and baseline models and frontier models that you can fine tune, + +01:47.260 --> 01:49.450 +albeit with not amazing results. + +01:49.450 --> 01:53.830 +For our use case today we get advanced. + +01:53.860 --> 01:59.160 +We talk about using a technique called Lora for fine tuning open source models. + +01:59.160 --> 02:00.870 +We describe quantization. + +02:00.870 --> 02:09.480 +We talk about Q Laura, and we're getting into three key hyperparameters R alpha and target modules, + +02:09.780 --> 02:13.980 +which if you've not encountered them before, sounds like something straight out of Star Trek, but + +02:13.980 --> 02:17.460 +in fact will make complete sense by the end of today. + +02:17.580 --> 02:23.550 +And before we launch into it, I do need to take a moment to remind you of your eight week plan, and + +02:23.580 --> 02:29.130 +this may feel like this is boring for you at this point, but I think it's important to take pride in + +02:29.130 --> 02:33.960 +your accomplishments and to remember where everything that you've learned, you started. + +02:34.290 --> 02:41.220 +When we we first had our session to talk about, uh, uh, the, the opportunity with Llms and Frontier + +02:41.250 --> 02:49.290 +models and we compared them, um, we then looked at using the UIs, we use Gradio, we used agent ization + +02:49.290 --> 02:50.460 +Multi-modality. + +02:50.490 --> 02:55.530 +Then we got to hugging face the pipelines and then Tokenizers and models. + +02:55.530 --> 03:01.900 +Then in week four, we generated code by selecting LMS for the task. + +03:01.900 --> 03:04.060 +In week five, that was rag week. + +03:04.060 --> 03:09.580 +We used the wonderful Chroma and Lang chain to build some rag pipelines there, and hopefully you did + +03:09.580 --> 03:12.580 +the project, which I'm really excited to see what people do there. + +03:12.820 --> 03:16.510 +Uh, and then last week we fine tuned a frontier model. + +03:16.510 --> 03:17.980 +Uh, now you know how to do it. + +03:17.980 --> 03:19.660 +You understand about creating the data sets. + +03:19.660 --> 03:23.950 +You know how it works, and you understand when it's most applicable, when you're dealing with that + +03:23.950 --> 03:28.870 +kind of nuance of tone, and perhaps when it's not so applicable, in our case, when you're trying + +03:28.870 --> 03:33.820 +to actually build in functionality because a frontier model is already so massive. + +03:34.270 --> 03:41.470 +Um, so now we arrive at week seven, we are going to be fine tuning an open source model, one that + +03:41.470 --> 03:46.540 +is significantly smaller than a frontier model, and see where we get to with that. + +03:46.540 --> 03:49.120 +And week eight is where it all comes together. + +03:49.210 --> 03:55.300 +And so with that introduction, the main topic of the day is Laura, and we will launch right into that. diff --git a/week5/community-contributions/subtitles/srts/59503703/ja_JP.srt b/week5/community-contributions/subtitles/srts/59503703/ja_JP.srt new file mode 100755 index 0000000..0c77d8c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59503703/ja_JP.srt @@ -0,0 +1,136 @@ +WEBVTT + +00:00.800 --> 00:01.310 +まあね。 + +00:01.310 --> 00:02.450 +皆さん、 こんにちは。 + +00:02.450 --> 00:06.770 +第7週が始まるまでよく頑張ってくれた。 + +00:06.800 --> 00:10.670 +期待を裏切らないつもりだ。 + +00:10.670 --> 00:12.530 +その価値は必ずある + +00:12.530 --> 00:13.190 +そうだ。 + +00:13.220 --> 00:15.830 +この先、 本当に素晴らしいコンテンツが待っている。 + +00:15.830 --> 00:22.250 +先週の不本意な結果を受け、 少しアンチクライマックスを感じているかもしれない。 + +00:22.250 --> 00:25.760 +あれだけ大騒ぎしたフロンティアモデルの微調整。 + +00:25.850 --> 00:32.210 +ただ、 言えることは、 良い映画と同じで、 緊張感や失望を感じる期間があるということだ。 + +00:32.420 --> 00:35.870 +救済がある、 救済があるんだ。 + +00:35.870 --> 00:41.480 +もしあなたが私のように、 あの結果が出たあとも打ち込み続けていたのなら、 + +00:41.480 --> 00:45.260 +きっといくつかの改善点が見えてくるはずだ。 + +00:45.260 --> 00:58.910 +微調整ができるようになり、 実際にアンチューンド・ヴァージョンで得た結果よりもいい結果が得られるようになった。 + +00:58.910 --> 01:02.260 +でもとにかく、 今は前進あるのみだ。 + +01:02.260 --> 01:08.860 +私たちは7週目に突入し、 皆さんにどんな1週間をお届けできるだろうか? + +01:09.010 --> 01:10.780 +今週は大変な一週間だ。 + +01:10.780 --> 01:12.400 +本当に大規模な1週間だ。 + +01:12.400 --> 01:15.220 +ハードルを上げる上級者向けの週だ。 + +01:15.220 --> 01:23.920 +私たちは今、 あなたがこのようなllmsのディープラーニング分野で高度なスキルを身につけるところまで来ています。 + +01:23.920 --> 01:29.860 +もちろん、 フロンティアモデルやAPI、 ハグフェイスのオープンソースコードを使えば、 + +01:29.890 --> 01:35.350 +テキストやコードを生成することはできる。 + +01:35.350 --> 01:41.080 +ラグ・パイプラインを構築し、 データセットを構築することができる。 + +01:41.380 --> 01:49.450 +データセットとベースラインモデル、 そして微調整が可能なフロンティアモデルにかなりの時間を費やした。 + +01:49.450 --> 01:53.830 +今日のユースケースは高度なものだ。 + +01:53.860 --> 01:59.160 +オープンソースのモデルを微調整するためにLoraというテクニックを使うことについて話す。 + +01:59.160 --> 02:00.870 +量子化について説明する。 + +02:00.870 --> 02:17.460 +Qローラについて話し、 3つの重要なハイパーパラメーター、 Rアルファとターゲットモジュールについて説明する。 + +02:17.580 --> 02:23.550 +この時点では退屈に感じるかもしれないが、 自分の成果に誇りを持ち、 + +02:23.580 --> 02:33.960 +自分が学んだことすべてがどこから始まったのかを思い出すことが重要だと思う。 + +02:34.290 --> 02:41.220 +最初にLlmsとフロンティアのモデルについてセッションを行い、 両者を比較しました。 + +02:41.250 --> 02:50.460 +その後、 UIを使用することを検討し、 Gradioを使用し、 エージェント化マルチモダリティを使用しました。 + +02:50.490 --> 02:55.530 +その後、 パイプライン、 そしてトークナイザーとモデルをハグすることになった。 + +02:55.530 --> 03:01.900 +そして4週目には、 タスクにLMSを選択してコードを生成した。 + +03:01.900 --> 03:04.060 +第5週はボロ負けウィークだった。 + +03:04.060 --> 03:12.580 +私たちは素晴らしいクロマとラング・チェーンを使ってラグ・パイプラインを構築した。 + +03:12.820 --> 03:16.510 +そして先週、 フロンティアモデルを微調整したんだ。 + +03:16.510 --> 03:17.980 +ああ、 これでやり方がわかっただろう。 + +03:17.980 --> 03:19.660 +データセットの作成については理解しているはずだ。 + +03:19.660 --> 03:23.950 +フロンティア・モデルがすでに巨大であるため、 + +03:23.950 --> 03:33.820 +機能性を実際に組み込もうとする場合などだ。 + +03:34.270 --> 03:41.470 +さて、 7週目に入ったところで、 フロンティア・モデルよりもかなり小さいオープンソース・モデルを微調整し、 + +03:41.470 --> 03:46.540 +その到達点を見ることにする。 + +03:46.540 --> 03:49.120 +そして、 第8週がすべての集大成となる。 + +03:49.210 --> 03:55.300 +ということで、 今日のメイントピックはローラ。 diff --git a/week5/community-contributions/subtitles/srts/59503703/ko_KR.srt b/week5/community-contributions/subtitles/srts/59503703/ko_KR.srt new file mode 100755 index 0000000..f4080cb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59503703/ko_KR.srt @@ -0,0 +1,163 @@ +WEBVTT + +00:00.800 --> 00:01.310 +글쎄요 + +00:01.310 --> 00:02.450 +안녕하세요, 여러분 + +00:02.450 --> 00:06.770 +7주 차까지 살아남아서 정말 다행이에요 + +00:06.800 --> 00:10.670 +정말 굉장하네요 실망시키지 않을 거예요 + +00:10.670 --> 00:12.530 +보상은 충분히 해드리죠 + +00:12.530 --> 00:13.190 +가고 있어요 + +00:13.220 --> 00:15.830 +정말 멋진 콘텐츠가 기다리고 있어요 + +00:15.830 --> 00:22.250 +지난주 실망스러운 결과로 비트가 좀 가라앉았을 거예요 + +00:22.250 --> 00:25.760 +과대광고 끝에 미개척 모델을 다듬었어요 + +00:25.850 --> 00:31.040 +하지만 긴장되고 실망스러운 시기가 있는 좋은 영화와 같다는 건 말씀드릴 + +00:31.040 --> 00:32.210 +수 있어요 + +00:32.420 --> 00:35.870 +구원을 받을 거예요 구원을 받을 거예요 + +00:35.870 --> 00:41.480 +저처럼 계속 노력하는지 두고 봐야죠 결과를 보고도 어느 + +00:41.480 --> 00:45.260 +정도 나아졌는지 확인할 거예요 + +00:45.260 --> 00:50.960 +미세 조정으로 원래의 비튜니티 버전보다 더 나은 결과를 + +00:51.200 --> 00:58.910 +얻을 수 있는 지점에 이르렀죠 큰 차이는 없지만 어제보다 약간 나아졌어요 + +00:58.910 --> 01:02.260 +어쨌든 지금은 빨리 진행할게요 + +01:02.260 --> 01:08.860 +이제 7주 차예요 어떤 주를 준비했다고 했죠? + +01:09.010 --> 01:10.780 +엄청난 한 주예요 + +01:10.780 --> 01:12.400 +정말 중요한 한 주예요 + +01:12.400 --> 01:15.220 +기대치를 높이는 한 주예요 + +01:15.220 --> 01:22.600 +llm의 딥 러닝 영역에서 고급 기술을 습득하는 단계에 + +01:22.600 --> 01:23.920 +이르렀죠 + +01:23.920 --> 01:29.860 +훈련에 더 깊이 들어가면 이미 할 수 있는 건 프론티어 모델, API + +01:29.890 --> 01:35.350 +페이스 오픈 소스 코드로 텍스트와 코드를 생성할 수 있어요 + +01:35.350 --> 01:41.080 +래그 파이프라인을 만들 수 있고 데이터 세트를 만들 수도 있어요 + +01:41.380 --> 01:47.260 +데이터 세트와 기준 모델 개척 모델에 꽤 오랜 시간을 투자했어요 미세 조정할 수 있는 + +01:47.260 --> 01:49.450 +모델이라 결과가 좋진 않았죠 + +01:49.450 --> 01:53.830 +오늘 유스케이스는 Get 케이스예요 + +01:53.860 --> 01:59.160 +로라라는 기술을 이용해 오픈 소스 모델을 미세 조정하는 거죠 + +01:59.160 --> 02:00.870 +퀀타이즈를 설명할게요 + +02:00.870 --> 02:09.480 +Q 로라에 관해 얘기하면서 세 가지 핵심 하이퍼파라미터 R 알파와 표적 모듈에 관해 얘기할 겁니다 처음 마주치는 + +02:09.780 --> 02:13.980 +거라면 스타트렉에서 튀어나온 것 같지만 오늘 저녁이면 + +02:13.980 --> 02:17.460 +완전히 이해하게 될 거예요 + +02:17.580 --> 02:23.550 +시작하기 전에 8주간의 계획을 다시 한번 짚고 넘어가죠 지금 이 시점에서는 지루하게 + +02:23.580 --> 02:29.130 +느껴질 수도 있지만 자신의 성취에 자부심을 갖는 게 중요하다고 생각해요 모든 + +02:29.130 --> 02:33.960 +걸 어디서 배웠는지 어디서 시작했는지 기억해야 하죠 + +02:34.290 --> 02:41.220 +처음 모였을 때 Lms와 프론티어 모델에 대해 이야기하고 두 가지를 비교했습니다. + +02:41.250 --> 02:50.460 +UI 사용도 살펴보고 그래디오와 에이전트 Iization 다중 모듈도 사용했죠. + +02:50.490 --> 02:55.530 +그다음은 페이스 더 파이프라인 포옹 토큰라이저와 모델이었죠 + +02:55.530 --> 03:01.900 +4주 차에는 LMS를 선택해 코드를 생성했죠 작업을 위해서요 + +03:01.900 --> 03:04.060 +5주 차에는 엉망이었어요 + +03:04.060 --> 03:09.580 +크로마와 랑 체인을 이용해 랙 파이프라인을 지었어요 프로젝트를 맡아주셨으면 해요 + +03:09.580 --> 03:12.580 +사람들이 거기서 뭘 할지 정말 기대돼요 + +03:12.820 --> 03:16.510 +지난주에는 개척 시대 모델을 미세 조정했어요 + +03:16.510 --> 03:17.980 +이제 어떻게 하는지 알죠? + +03:17.980 --> 03:19.660 +데이터 집합을 만드는 걸 이해하죠 + +03:19.660 --> 03:23.950 +어떻게 작동하는지 알고 언제 가장 적합한지 알죠 그런 + +03:23.950 --> 03:28.870 +뉘앙스를 다룰 때요 우리 경우에 별로 적합하지 않을 때는 기능성을 + +03:28.870 --> 03:33.820 +구축할 때죠 프런티어 모델은 이미 너무 크니까요 + +03:34.270 --> 03:41.470 +이제 7주 차인데 오픈 소스 모델을 미세 조정할 거예요 프론티어 모델보다 + +03:41.470 --> 03:46.540 +훨씬 작은 모델이죠 어떻게 되는지 보죠 get it + +03:46.540 --> 03:49.120 +8주 차부터 모든 게 합쳐지죠 + +03:49.210 --> 03:55.300 +이 소개로 오늘의 주요 주제는 로라입니다 바로 시작해 보죠 diff --git a/week5/community-contributions/subtitles/srts/59503705/en_US.srt b/week5/community-contributions/subtitles/srts/59503705/en_US.srt new file mode 100755 index 0000000..25f1ea6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59503705/en_US.srt @@ -0,0 +1,247 @@ +WEBVTT + +00:00.950 --> 00:05.870 +And so now we talk about quantization the q and q Laura. + +00:05.900 --> 00:07.730 +Q stands for quantized quantized. + +00:07.760 --> 00:08.420 +Laura. + +00:08.420 --> 00:12.620 +And I did mention quantization briefly, I believe in week three. + +00:12.620 --> 00:14.480 +So you may remember some of this. + +00:14.480 --> 00:17.540 +But now now we'll talk about it for reals. + +00:18.080 --> 00:20.630 +So here's the problem. + +00:20.630 --> 00:28.700 +The we're working with these smaller models, the 8 billion parameter version of llama 3.1. + +00:28.700 --> 00:34.580 +And we've come up with this clever scheme that Laura for, for working with smaller dimensional matrices + +00:34.940 --> 00:36.950 +so that we can get more memory. + +00:37.130 --> 00:42.740 +But the the problem is that even that base model, even the small version of it, the 8 billion version + +00:42.740 --> 00:46.730 +of it, will take up 32GB of Ram. + +00:46.730 --> 00:52.880 +That's, uh, 8 billion floating point numbers, which are each 32 bits long. + +00:53.000 --> 00:56.480 +Uh, and so it's going to fill up a GPU. + +00:56.510 --> 01:03.380 +In fact, the cheap GPUs that will be using on T4 boxes only have 15GB of memory in them, so it won't + +01:03.380 --> 01:05.530 +even fit the base model itself. + +01:05.530 --> 01:08.740 +We're going to be out of memory right away, so that is a problem. + +01:08.770 --> 01:13.570 +Lora is very useful in making things better for us, but it's not good enough because we can't even + +01:13.570 --> 01:15.880 +fit the base model itself in memory. + +01:15.880 --> 01:22.870 +Because 8 billion might be a small size of a model in some worlds, but it's still an enormous number + +01:22.870 --> 01:23.920 +of parameters. + +01:24.190 --> 01:27.940 +And so that that then gives us a challenge. + +01:28.240 --> 01:31.810 +So this quite surprising discovery was made. + +01:31.810 --> 01:33.970 +That is what we will be working with. + +01:34.120 --> 01:37.870 +And at first it sounds almost too good to be true. + +01:37.900 --> 01:40.480 +It sounds like this is a way to have your cake and eat it. + +01:40.480 --> 01:41.950 +And it kind of is. + +01:42.190 --> 01:51.310 +So the, the idea that that, that some people had was, okay, so we've got 8 billion parameters. + +01:51.310 --> 01:57.370 +If we try and have fewer parameters like we have a 4 billion parameter model, we lose a lot of the + +01:57.370 --> 01:58.600 +power of the model. + +01:58.600 --> 02:01.900 +Those 8 billion parameters give us lots of knobs to turn. + +02:01.930 --> 02:04.270 +It's in this very clever architecture. + +02:04.270 --> 02:05.800 +It gives us a lot of power. + +02:05.830 --> 02:06.370 +All right. + +02:06.370 --> 02:09.080 +So let's not cut down the number of parameters. + +02:09.080 --> 02:15.200 +Instead of doing that, let's just reduce the precision of each of these parameters. + +02:15.230 --> 02:21.050 +It's like saying instead of being able to turn it through a very sort of finely grinded wheel, we're + +02:21.050 --> 02:25.100 +going to make it go click, click, click, click through a few settings. + +02:25.310 --> 02:31.070 +Uh, and so that that was a thinking let's just reduce the precision of each of these weights but keep + +02:31.070 --> 02:32.450 +the same number of weights. + +02:32.480 --> 02:37.010 +Now you might think logically all right, but you're just cutting the amount of information. + +02:37.010 --> 02:41.390 +Surely it's going to be if you have half the amount of information, it's going to be quite similar + +02:41.390 --> 02:43.730 +to having half the number of weights. + +02:43.760 --> 02:46.280 +Uh, and it turns out that that's not the case. + +02:46.280 --> 02:52.520 +For whatever reason, if you lower the precision, you do get some reduction in quality of the neural + +02:52.520 --> 02:55.100 +network, but not as much as you might think. + +02:55.100 --> 02:57.920 +It still retains a lot of its power. + +02:58.160 --> 03:04.700 +And it turns out that this is just a great trick that allows you to fit bigger models in memory with + +03:04.730 --> 03:06.560 +the same number of parameters. + +03:06.560 --> 03:10.220 +Just lower precision means that it takes up less memory. + +03:10.580 --> 03:14.210 +So it's surprising it works remarkably well. + +03:14.300 --> 03:21.020 +And in fact, you could take the 32 bit floating point numbers that you normally have and reduce it + +03:21.020 --> 03:26.000 +all the way down to eight bit numbers, and you still get good performance. + +03:26.000 --> 03:29.540 +And then and now this is where it starts to sound really crazy. + +03:29.630 --> 03:32.480 +You can reduce it all the way down to four bits. + +03:32.480 --> 03:35.210 +So each number is just a four bit number. + +03:35.240 --> 03:40.550 +If you think of that from from an integer point of view, that it's as if each number is going from + +03:40.550 --> 03:42.950 +0 to 15 and that's it. + +03:43.160 --> 03:45.020 +Just just in whole numbers. + +03:45.560 --> 03:47.510 +That's how low the precision is. + +03:47.510 --> 03:51.620 +Like a click that just has has 16 settings on it. + +03:52.010 --> 03:55.190 +Um, and you still get pretty good performance. + +03:55.340 --> 03:55.670 +Sure. + +03:55.670 --> 04:00.770 +You do see a bit of a, of a drop in quality, but only a bit. + +04:01.010 --> 04:03.200 +Um, and so this was the, the intuition. + +04:03.200 --> 04:09.200 +And this, of course, dramatically reduces the memory requirement and allows one to fit bigger models + +04:09.200 --> 04:10.310 +in memory. + +04:10.790 --> 04:14.420 +There are a couple of of minor technical details that I'll mention. + +04:14.780 --> 04:17.810 +One of them is that I just talked about the click switch. + +04:17.810 --> 04:20.510 +You can think of it as being like a number from 0 to 15. + +04:20.510 --> 04:28.160 +Typically, it's in fact not interpreted as an integer, but as a four bits are used to to be considered + +04:28.160 --> 04:31.940 +as as floating point numbers just with lower granularity. + +04:31.940 --> 04:34.970 +And you'll see that in reality in an example. + +04:35.360 --> 04:39.740 +And the other thing to to point out, which is something I didn't understand early on when when color + +04:39.770 --> 04:47.030 +first came out, is that this is the quantizing the base model, but it's not quantizing the, the, + +04:47.030 --> 04:53.390 +the, the Lora adapters, they will still be 32 bit floats as you will see. + +04:53.570 --> 04:59.630 +Um, so we're just talking about reducing the precision of the base model, this enormous great base + +04:59.630 --> 05:02.300 +model so that we can fit it in memory. + +05:02.300 --> 05:04.490 +So that is cu Lora. + +05:04.520 --> 05:05.840 +That's quantization. + +05:05.840 --> 05:10.250 +It's going to feel more real when we see it in the lab in just a second. + +05:10.250 --> 05:16.640 +But first I want to talk to you about three important hyperparameters in the next video. diff --git a/week5/community-contributions/subtitles/srts/59503705/ja_JP.srt b/week5/community-contributions/subtitles/srts/59503705/ja_JP.srt new file mode 100755 index 0000000..de1819c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59503705/ja_JP.srt @@ -0,0 +1,211 @@ +WEBVTT + +00:00.950 --> 00:05.870 +そして今、 qとqラウラの量子化について話している。 + +00:05.900 --> 00:07.730 +Qは量子化された量子化の略。 + +00:07.760 --> 00:08.420 +ローラ + +00:08.420 --> 00:12.620 +確か3週目に量子化について少し触れた。 + +00:12.620 --> 00:14.480 +だから、 覚えていることもあるだろう。 + +00:14.480 --> 00:17.540 +でも、 これからは本音で話そう。 + +00:18.080 --> 00:20.630 +問題はここからだ。 + +00:20.630 --> 00:28.700 +我々はこのような小さなモデル、 つまり80億パラメータ版のラマ3で作業している。 1. + +00:28.700 --> 00:36.950 +私たちは、 より少ない次元の行列を扱うことで、 より多くのメモリを確保できるよう、 ローラという賢いスキームを考え出した。 + +00:37.130 --> 00:42.740 +しかし問題は、 そのベースモデルでさえ、 その小型版、 80億版でさえ、 + +00:42.740 --> 00:46.730 +32GBのラムを消費するということだ。 + +00:46.730 --> 00:52.880 +これは80億個の浮動小数点数で、 それぞれ32ビット長だ。 + +00:53.000 --> 00:56.480 +それでGPUがいっぱいになるんだ。 + +00:56.510 --> 01:05.530 +実際、 T4ボックスで使用される安価なGPUには15GBのメモリしか搭載されていないため、 ベースモデル自体にすら適合しない。 + +01:05.530 --> 01:08.740 +すぐにメモリ不足になるから、 それは問題だ。 + +01:08.770 --> 01:13.570 +ローラは僕らにとって物事をより良くするのにとても役に立つが、 ベースモデル自体をメモリに収めることもできないので、 + +01:13.570 --> 01:15.880 +十分とは言えない。 + +01:15.880 --> 01:23.920 +というのも、 80億という数字は、 ある世界では小さなモデルサイズかもしれないが、 それでも膨大な数のパラメーターがあることに変わりはないからだ。 + +01:24.190 --> 01:27.940 +そして、 それが私たちに課題を与えている。 + +01:28.240 --> 01:31.810 +そこで驚くべき発見がなされた。 + +01:31.810 --> 01:33.970 +それが私たちの仕事だ。 + +01:34.120 --> 01:37.870 +そして、 最初は、 それは本当であるにはあまりにも良すぎるように聞こえる。 + +01:37.900 --> 01:40.480 +これはケーキを食べながらケーキを食べる方法のようだ。 + +01:40.480 --> 01:41.950 +そして、 それはある種のものだ。 + +01:42.190 --> 01:51.310 +つまり、 80億のパラメーターがあるのだから、 80億のパラメーターを使えばいいという考え方もある。 + +01:51.310 --> 01:58.600 +40億のパラメーターを持つモデルのようにパラメーターを少なくしようとすれば、 モデルのパワーの多くを失うことになる。 + +01:58.600 --> 02:01.900 +この80億のパラメーターは、 私たちにたくさんのノブを与えてくれる。 + +02:01.930 --> 02:04.270 +それはこの非常に巧妙な建築にある。 + +02:04.270 --> 02:05.800 +大きな力を与えてくれる。 + +02:05.830 --> 02:06.370 +分かった。 + +02:06.370 --> 02:09.080 +だから、 パラメーターの数を減らすのはやめよう。 + +02:09.080 --> 02:15.200 +そうする代わりに、 それぞれのパラメーターの精度を下げよう。 + +02:15.230 --> 02:21.050 +非常に細かく砥石を回すことができる代わりに、 カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 + +02:21.050 --> 02:25.100 +カチッ、 カチッ、 カチッ、 カチッ、 カチッ、 といくつかの設定を通過させることができるようにするんだ。 + +02:25.310 --> 02:32.450 +それで、 それぞれのウエイトの精度を下げて、 ウエイトの数は同じにしようと考えたんだ。 + +02:32.480 --> 02:37.010 +論理的に考えればそうかもしれないが、 情報量を減らしているだけだ。 + +02:37.010 --> 02:43.730 +確かに、 情報量が半分になれば、 ウエイトの数が半分になるのと似たようなものだろう。 + +02:43.760 --> 02:46.280 +そうではないことがわかったんだ。 + +02:46.280 --> 02:52.520 +どんな理由であれ、 精度を下げればニューラルネットワークの質は多少落ちるが、 + +02:52.520 --> 02:55.100 +思ったほどではない。 + +02:55.100 --> 02:57.920 +まだ多くのパワーを保っている。 + +02:58.160 --> 03:06.560 +そして、 これは同じパラメーター数でより大きなモデルをメモリにフィットさせるための素晴らしいトリックであることがわかった。 + +03:06.560 --> 03:10.220 +精度が低いというだけで、 より少ないメモリしか消費しない。 + +03:10.580 --> 03:14.210 +だから、 驚くほどうまく機能している。 + +03:14.300 --> 03:21.020 +実際、 通常の32ビットの浮動小数点数を8ビットに減らしても、 + +03:21.020 --> 03:26.000 +十分なパフォーマンスを得ることができる。 + +03:26.000 --> 03:29.540 +そして、 ここからが本当にクレイジーに聞こえ始めるところだ。 + +03:29.630 --> 03:32.480 +4ビットまで減らすことができる。 + +03:32.480 --> 03:35.210 +つまり、 それぞれの数字は単なる4ビットの数字なのだ。 + +03:35.240 --> 03:42.950 +整数の観点から考えるなら、 それぞれの数字が0から15まで進んでいるようなものだ。 + +03:43.160 --> 03:45.020 +ただ、 整数で。 + +03:45.560 --> 03:47.510 +それだけ精度が低いということだ。 + +03:47.510 --> 03:51.620 +16の設定があるだけのクリックとかね。 + +03:52.010 --> 03:55.190 +うーん、 それでもかなりいいパフォーマンスが得られる。 + +03:55.340 --> 03:55.670 +もちろんだ。 + +03:55.670 --> 04:00.770 +品質が落ちるのは少しだが、 ほんの少しだ。 + +04:01.010 --> 04:03.200 +それで、 これが直感だったんだ。 + +04:03.200 --> 04:10.310 +そしてもちろん、 これによって必要なメモリが劇的に削減され、 より大きなモデルをメモリに収めることができるようになる。 + +04:10.790 --> 04:14.420 +細かい技術的なことをいくつか挙げておこう。 + +04:14.780 --> 04:17.810 +そのひとつが、 先ほど話したクリックスイッチだ。 + +04:17.810 --> 04:20.510 +0から15までの数字のようなものだと考えればいい。 + +04:20.510 --> 04:31.940 +一般的には、 整数として解釈されることはなく、 4ビットが浮動小数点数として使用される。 + +04:31.940 --> 04:34.970 +そして、 現実の例を見ればわかるだろう。 + +04:35.360 --> 04:39.740 +そして、 もうひとつ指摘しておきたいのは、 カラーが初めて登場した当初は、 + +04:39.770 --> 04:53.390 +私が理解していなかったことなのですが、 これはベースモデルを量子化しているのであって、 ローラ・アダプターを量子化しているわけではないということです。 + +04:53.570 --> 05:02.300 +つまり、 ベースモデルの精度を下げて、 この巨大で素晴らしいベースモデルをメモリに収まるようにしようという話なんだ。 + +05:02.300 --> 05:04.490 +それがクー・ロラなんだ。 + +05:04.520 --> 05:05.840 +それが量子化だ。 + +05:05.840 --> 05:10.250 +あと少しでラボで見ることができるのだから、 よりリアルに感じられるだろう。 + +05:10.250 --> 05:16.640 +その前に、 次のビデオで3つの重要なハイパーパラメーターについてお話ししたいと思います。 diff --git a/week5/community-contributions/subtitles/srts/59503705/ko_KR.srt b/week5/community-contributions/subtitles/srts/59503705/ko_KR.srt new file mode 100755 index 0000000..73f7976 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59503705/ko_KR.srt @@ -0,0 +1,244 @@ +WEBVTT + +00:00.950 --> 00:05.870 +이제 퀀타이즈 얘길 하죠 q&q 로라 + +00:05.900 --> 00:07.730 +Q는 퀀타이즈된 퀀타이즈죠 + +00:07.760 --> 00:08.420 +로라예요 + +00:08.420 --> 00:12.620 +3주 차에 퀀타이즈를 잠깐 언급했었죠 + +00:12.620 --> 00:14.480 +그럼 기억날지도 모르겠네요 + +00:14.480 --> 00:17.540 +이제 진짜 얘기를 해보죠 + +00:18.080 --> 00:20.630 +문제는 이거예요 + +00:20.630 --> 00:28.700 +우린 작은 모델로 작업하고 있어요 80억 매개 변수 버전의 llama 3이죠 1번요 + +00:28.700 --> 00:34.580 +이 기발한 계획도 로라가 생각해 냈어요 소차원 행렬을 연구하면 메모리를 더 많이 + +00:34.940 --> 00:36.950 +얻을 수 있죠 Get up + +00:37.130 --> 00:42.740 +그런데 문제는 기본 모델조차도 작은 버전이라도 80억 버전이라도 + +00:42.740 --> 00:46.730 +램 32GB를 쓴다는 거예요 + +00:46.730 --> 00:52.880 +80억 개의 부동 소수점 수죠 각각 32비츠 길이로요 + +00:53.000 --> 00:56.480 +GPU 안을 채울 거예요 + +00:56.510 --> 01:03.380 +T4 박스에 사용할 저렴한 GPU는 메모리가 15GB밖에 안 돼요 기본 모델 + +01:03.380 --> 01:05.530 +자체에도 맞지 않죠 + +01:05.530 --> 01:08.740 +메모리가 바로 바닥날 거예요 그게 문제죠 + +01:08.770 --> 01:13.570 +로라는 우리에게 더 나은 환경을 만들어 주지만 베이스 모델 자체를 메모리에 + +01:13.570 --> 01:15.880 +넣을 수 없으니 충분하지 않아요 + +01:15.880 --> 01:23.920 +80억이면 어떤 세계에선 작은 모형 크기일 수 있지만 그래도 엄청난 수의 매개 변수니까요 + +01:24.190 --> 01:27.940 +그래서 어려운 점이 많아요 + +01:28.240 --> 01:31.810 +그래서 이 놀라운 발견이 이루어졌죠 + +01:31.810 --> 01:33.970 +그게 우리가 작업할 거예요 + +01:34.120 --> 01:37.870 +처음에는 너무 좋아서 믿기 힘들 정도였죠 + +01:37.900 --> 01:40.480 +두 마리 토끼를 다 잡을 수 있는 방법 같네요 + +01:40.480 --> 01:41.950 +그런 셈이죠 + +01:42.190 --> 01:51.310 +그래서 어떤 사람들은 80억 개의 매개 변수가 있다고 했죠 + +01:51.310 --> 01:57.370 +40억 개의 매개 변수 모델처럼 적은 매개 변수를 가지려고 한다면 모델의 힘을 + +01:57.370 --> 01:58.600 +많이 잃게 돼요 + +01:58.600 --> 02:01.900 +그 80억 개의 매개 변수 덕분에 돌릴 노브가 많아요 + +02:01.930 --> 02:04.270 +이 기발한 건축물에 그 점이 있어요 + +02:04.270 --> 02:05.800 +큰 힘을 주죠 + +02:05.830 --> 02:06.370 +좋아요 + +02:06.370 --> 02:09.080 +매개 변수의 수를 줄이지 말죠 + +02:09.080 --> 02:15.200 +그렇게 하는 대신 각 매개 변수의 정밀도를 낮추죠 + +02:15.230 --> 02:21.050 +곱게 갈린 바퀴를 통과하는 게 아니라 몇 가지 설정을 통해 + +02:21.050 --> 02:25.100 +클릭, 클릭, 클릭하게 하는 거죠 + +02:25.310 --> 02:31.070 +그래서 생각한 건데 무게의 정밀도는 줄이고 무게의 개수는 그대로 + +02:31.070 --> 02:32.450 +두기로 했어요 + +02:32.480 --> 02:37.010 +논리적으로 생각할 수 있지만 정보의 양을 줄이고 있어요 + +02:37.010 --> 02:41.390 +정보가 절반만 있어도 무게의 수가 절반인 + +02:41.390 --> 02:43.730 +것과 비슷할 거예요 + +02:43.760 --> 02:46.280 +알고 보니 그렇지 않더군요 + +02:46.280 --> 02:52.520 +어떤 이유에서든 정밀도를 낮추면 신경망의 질이 어느 정도 떨어지지만 생각만큼 떨어지진 + +02:52.520 --> 02:55.100 +않아요. Get it. + +02:55.100 --> 02:57.920 +아직도 그 힘을 간직하고 있죠 + +02:58.160 --> 03:04.700 +이건 메모리에 같은 수의 매개 변수로 더 큰 모델을 넣게 해주는 훌륭한 + +03:04.730 --> 03:06.560 +트릭이에요 + +03:06.560 --> 03:10.220 +정밀도가 낮으면 메모리 소모가 less예요 + +03:10.580 --> 03:14.210 +놀라울 정도로 잘 작동하죠 + +03:14.300 --> 03:21.020 +여러분이 평소에 가지고 있는 32비트 부동점 수를 취해서 8비트 + +03:21.020 --> 03:26.000 +숫자로 줄여도 여전히 좋은 성능을 낼 수 있어요. + +03:26.000 --> 03:29.540 +그리고 여기서부터 정말 미친 소리처럼 들리기 시작하죠 + +03:29.630 --> 03:32.480 +4분의 1로 줄일 수 있어요 + +03:32.480 --> 03:35.210 +각 숫자는 네 비트짜리 숫자죠 + +03:35.240 --> 03:40.550 +정수의 관점에서 생각해 보면 각각의 숫자는 0에서 + +03:40.550 --> 03:42.950 +15까지밖에 없어요 + +03:43.160 --> 03:45.020 +그냥 숫자만 따져서요 + +03:45.560 --> 03:47.510 +그 정도로 정확성이 낮죠 + +03:47.510 --> 03:51.620 +클릭은 16개의 설정을 가지고 있어요 + +03:52.010 --> 03:55.190 +그래도 여전히 좋은 연기를 보여줬어요 get it get it + +03:55.340 --> 03:55.670 +네 + +03:55.670 --> 04:00.770 +비트의 품질이 떨어지긴 했지만 아주 조금이에요 + +04:01.010 --> 04:03.200 +그게 바로 직관이었어요 + +04:03.200 --> 04:09.200 +이렇게 하면 메모리 요구 사항이 급격히 줄고 메모리에서 더 큰 모델에 + +04:09.200 --> 04:10.310 +맞게 되죠 + +04:10.790 --> 04:14.420 +몇 가지 사소한 기술적인 사항을 언급할게요 + +04:14.780 --> 04:17.810 +그 중 하나는 방금 말씀드린 클릭 스위치예요 + +04:17.810 --> 04:20.510 +0부터 15까지의 숫자로 생각해 보세요 + +04:20.510 --> 04:28.160 +일반적으로 4비트는 정수로 해석되지 않지만 4비트는 세분성이 + +04:28.160 --> 04:31.940 +낮은 부동점수로 간주되거든요 + +04:31.940 --> 04:34.970 +예제에서 실제로 그걸 보실 수 있어요 + +04:35.360 --> 04:39.740 +또 하나 지적하고 싶은 건 색깔이 처음 나왔을 때 + +04:39.770 --> 04:47.030 +제가 이해 못 했던 건데요 이건 베이스 모델을 수량화하는 거지 로라 어댑터를 + +04:47.030 --> 04:53.390 +수량화하는 게 아니에요 보다시피 여전히 32 비트 플로트죠 + +04:53.570 --> 04:59.630 +베이스모델의 정밀도를 줄이는 것에 대해 얘기하고 있어요 이 거대하고 훌륭한 베이스모델을 + +04:59.630 --> 05:02.300 +메모리에 넣어야 해요 + +05:02.300 --> 05:04.490 +저게 쿠 로라예요 + +05:04.520 --> 05:05.840 +그게 퀀타이즈예요 + +05:05.840 --> 05:10.250 +잠시 후 실험실에서 보면 더 진짜 같을 거예요 + +05:10.250 --> 05:16.640 +하지만 먼저 다음 비디오에서 중요한 하이퍼파라미터 세 가지를 말씀드릴게요 diff --git a/week5/community-contributions/subtitles/srts/59504769/en_US.srt b/week5/community-contributions/subtitles/srts/59504769/en_US.srt new file mode 100755 index 0000000..6310400 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504769/en_US.srt @@ -0,0 +1,301 @@ +WEBVTT + +00:00.590 --> 00:03.320 +Without further ado, we're going to get stuck into it. + +00:03.350 --> 00:05.180 +Talking about Laura. + +00:05.210 --> 00:07.430 +Low rank adaptation. + +00:07.670 --> 00:13.520 +And this is going to be a section where we will talk a bit of theory for a few slides, but fear not, + +00:13.550 --> 00:16.160 +we're going to get straight to practice as usual. + +00:16.190 --> 00:19.250 +The best way to understand these things is by seeing them in action. + +00:19.250 --> 00:24.350 +So just after a couple of slides, we're going to hit Colab and look at these things for reals. + +00:24.350 --> 00:26.480 +But first just to set the context then. + +00:26.480 --> 00:31.520 +So look we're going to be using llama 3.1 for this for this week. + +00:31.850 --> 00:34.340 +And llama 3.1 comes in three sizes. + +00:34.340 --> 00:42.680 +It comes in the 8 billion parameter size the 70 billion and then the monstrous 405 billion size. + +00:42.890 --> 00:46.100 +Um, and of course we're taking the smaller one, the 8 billion. + +00:46.250 --> 00:53.630 +Um, but even that is going to be way too large for us to be training realistically on, on the sort + +00:53.660 --> 00:59.450 +of box we want to be able to pay for, like a one GPU box, um, 8 billion weights is already that's + +00:59.450 --> 01:01.370 +32GB of Ram. + +01:01.400 --> 01:07.280 +If you add it up, It's, it's and that's just to have the model in memory when you start training it, + +01:07.280 --> 01:14.210 +which is about running optimization, where you have to be able to get gradients for each of these weights. + +01:14.270 --> 01:18.080 +Um, that that's something which would consume way too much memory. + +01:18.110 --> 01:19.880 +We wouldn't have a hope. + +01:20.240 --> 01:27.110 +Um, and so it would also take a hugely long amount of time because there would be so much to, to be + +01:27.110 --> 01:30.470 +optimizing across be optimizing these 8 billion weights. + +01:30.470 --> 01:32.870 +That's that's just a lot of processing. + +01:33.200 --> 01:38.630 +Um, and it's the kind of thing, of course, that these, uh, made that these frontier labs and places + +01:38.630 --> 01:45.650 +like meta have spent very large sums of money for the biggest models, more than $100 million it costs + +01:45.650 --> 01:46.850 +to train one of them. + +01:46.850 --> 01:49.970 +And so that, you know, probably not the kind of money we're going to be spending. + +01:50.150 --> 01:59.870 +Uh, so there are some techniques, some tricks, uh, which make it surprisingly low cost to train, + +01:59.900 --> 02:05.390 +uh, from a base model so that you can make something that's better at achieving your particular task, + +02:05.390 --> 02:11.030 +Assuming that it's got a lot in common with what the base model was originally trained to do. + +02:11.270 --> 02:18.500 +Um, so before I explain what Lora is, let me just quickly summarize the llama architecture. + +02:18.530 --> 02:22.790 +Now, we're not going to get into deep into neural network architecture in this course. + +02:23.060 --> 02:27.620 +It's something which we'll I'll give you some insight, some intuition behind without going into a lot + +02:27.650 --> 02:28.340 +of detail. + +02:28.340 --> 02:35.510 +But the llama 3.1 architecture consists of stacks and stacks of layers of, of neurons. + +02:35.660 --> 02:42.860 +Um, it's actually got 32 groups of these layers where each group consists. + +02:42.860 --> 02:45.740 +So each group is called a llama decoder layer. + +02:45.740 --> 02:51.980 +And it has in it some self-attention layers, some multi-layer perceptron layers and a silo activation + +02:51.980 --> 02:53.240 +layer and layer norm. + +02:53.240 --> 02:55.220 +And we'll see this in a second. + +02:55.250 --> 02:58.040 +You don't you maybe you know what this is already. + +02:58.070 --> 02:59.840 +If you've got a theoretical background. + +02:59.840 --> 03:03.410 +If not, it's going to be be more real, be more tangible. + +03:03.410 --> 03:07.040 +When you see this architecture in Colab in just a second. + +03:07.370 --> 03:14.480 +Um, And all of these parameters sticking in this, in this big, uh, this, this, this, uh, layered + +03:14.510 --> 03:17.720 +architecture take up 32 gigs of memory. + +03:17.840 --> 03:22.520 +So this is now the big idea behind Lora. + +03:22.550 --> 03:30.020 +The idea is, look, what we can do is we can first freeze all of these weights. + +03:30.050 --> 03:35.360 +Normally, during optimization, you you do a forward pass through your neural network. + +03:35.570 --> 03:41.780 +You figure out how, um, you look at the prediction, the next token that your network predicted, + +03:41.780 --> 03:46.700 +you compare it with what the token should have been, what is the actual true next token. + +03:46.700 --> 03:51.620 +And then based on that, you figure out how much would you want to shift each of the different weights + +03:51.650 --> 03:57.650 +a little bit in order to make it so that next time it's a little bit better at predicting the right + +03:57.680 --> 03:58.610 +next token? + +03:58.610 --> 04:00.530 +That's the idea of optimization. + +04:00.560 --> 04:02.570 +A bit hand-wavy, but you get the idea. + +04:02.600 --> 04:03.110 +Literally. + +04:03.140 --> 04:03.830 +Hand-wavy. + +04:03.920 --> 04:09.230 +Uh uh, but uh, the the concept of Lora is, first of all, frees all these weights. + +04:09.230 --> 04:16.320 +We're not actually going to optimize these 8 billion weights because it's just too much, too many things, + +04:16.350 --> 04:19.080 +too many knobs to turn to, too many gradients. + +04:19.380 --> 04:27.330 +Instead, we pick a few of the layers that we think are the key things that we'd want to train. + +04:27.330 --> 04:35.310 +And these layers, these modules in this, this stacked, uh, layered architecture are known as the + +04:35.310 --> 04:36.840 +target modules. + +04:36.840 --> 04:39.960 +So that's where this expression target modules comes from. + +04:39.960 --> 04:45.480 +That I said, sounds a bit like something out of Star Trek, but it just means the the layers of the + +04:45.480 --> 04:51.300 +neural network that you will be focusing on for the purposes of training, but the weights will still + +04:51.300 --> 04:52.200 +be frozen. + +04:52.890 --> 05:01.830 +Instead, you will create new matrices called adapter matrices with fewer dimensions, so not as many + +05:01.830 --> 05:05.700 +dimensions as are in the the real guy. + +05:05.730 --> 05:09.840 +These will be smaller dimensionality or lower rank. + +05:09.840 --> 05:21.750 +It's called um and and they will be off to one side, and you will have the technique for applying these + +05:21.750 --> 05:24.420 +matrices into the target modules. + +05:24.420 --> 05:27.030 +So they will they will adapt the target modules. + +05:27.030 --> 05:30.510 +There'll be a formula which I will tell you about in a second. + +05:30.510 --> 05:36.390 +But that formula will mean that that in the future, whatever values are in those blue low rank adapters + +05:36.390 --> 05:37.950 +will slightly shift. + +05:37.950 --> 05:42.060 +We'll slightly change what goes on in the target modules. + +05:42.060 --> 05:48.240 +They adapt them so it's lower rank, it's lower dimensional, fewer weights that will be applied against + +05:48.240 --> 05:49.800 +these target modules. + +05:50.820 --> 05:54.450 +And then there's one little technicality because you'll see this in a second. + +05:54.480 --> 06:00.000 +It's worth mentioning, in fact, because of the way that the dimensions work in these neural networks, + +06:00.000 --> 06:06.120 +there are in fact two of these low rank matrices, one one is known as a and one is known as B. + +06:06.420 --> 06:09.270 +And you'll see in the code they're called lora a and lora b. + +06:09.300 --> 06:11.310 +So there are two matrices. + +06:11.310 --> 06:16.230 +It's not super important to know that, but I want to make sure that when you see it in the code. + +06:16.230 --> 06:18.780 +You'll see this and you'll say, okay, there are two matrices. + +06:18.780 --> 06:20.910 +They get applied to target modules. + +06:20.910 --> 06:22.290 +This makes sense. + +06:22.710 --> 06:27.420 +And that at a high level is the story behind Laura Freas. + +06:27.420 --> 06:34.290 +The main model come up with a bunch of of smaller matrices with fewer dimensions. + +06:34.290 --> 06:36.060 +These are subject to training. + +06:36.060 --> 06:42.660 +They will get trained and then they will be applied using some simple formula to the target modules. + +06:42.990 --> 06:49.380 +And that way you'll be able to make a base model that will get better and better as it learns. + +06:49.380 --> 06:53.910 +Because of the application of these Laura matrices. + +06:53.910 --> 07:00.600 +And Laura stands for low rank adaptation because they are lower rank, lower dimensions and they adapt + +07:00.600 --> 07:02.130 +the target modules. + +07:02.400 --> 07:02.970 +There we go. + +07:02.970 --> 07:07.650 +A lot of talking, a lot of words, but hopefully you've got an intuition for how this fits together, + +07:07.650 --> 07:11.970 +and that intuition will become clearer when you see it in the code. + +07:12.150 --> 07:19.230 +Um, but in the next session, we'll just talk quickly about one more thing, which is the Q, the quantization. diff --git a/week5/community-contributions/subtitles/srts/59504769/ja_JP.srt b/week5/community-contributions/subtitles/srts/59504769/ja_JP.srt new file mode 100755 index 0000000..e756959 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504769/ja_JP.srt @@ -0,0 +1,250 @@ +WEBVTT + +00:00.590 --> 00:03.320 +前置きはこれくらいにして、 さっそく本題に入ろう。 + +00:03.350 --> 00:05.180 +ローラについて語る + +00:05.210 --> 00:07.430 +低ランクの適応。 + +00:07.670 --> 00:13.520 +そしてこのセクションでは、 数枚のスライドで少し理論的な話をするが、 + +00:13.550 --> 00:16.160 +恐れることはない。 + +00:16.190 --> 00:19.250 +こうしたことを理解する最善の方法は、 実際に見てみることだ。 + +00:19.250 --> 00:24.350 +だから、 スライドを2、 3枚見た後、 Colabに行き、 これらのことを実際に見てみるつもりだ。 + +00:24.350 --> 00:26.480 +しかし、 まず文脈を整理する必要がある。 + +00:26.480 --> 00:31.520 +これからllama 3を使うんだ。 今週はこれに1点。 + +00:31.850 --> 00:34.340 +そしてラマ3世。 1には3つのサイズがある。 + +00:34.340 --> 00:42.680 +パラメータサイズは80億、 700億、 そして4,050億という途方もないサイズがある。 + +00:42.890 --> 00:46.100 +ええと、 もちろん、 小さいほうの80億を取ります。 + +00:46.250 --> 01:01.370 +GPUが1つで、 80億ウェイトとなると、 すでに32GBのラムを搭載していることになります。 + +01:01.400 --> 01:07.280 +これを足すと、 そうなります。 これは、 トレーニングを開始するときにモデルをメモリに保存しておくためで、 + +01:07.280 --> 01:14.210 +最適化を実行することになります。 + +01:14.270 --> 01:18.080 +ええと、 それはあまりにも多くのメモリを消費するものだ。 + +01:18.110 --> 01:19.880 +希望はない。 + +01:20.240 --> 01:30.470 +80億のウェイトを最適化するのに膨大な時間がかかるからだ。 + +01:30.470 --> 01:32.870 +処理量が多いんだ。 + +01:33.200 --> 01:38.630 +もちろん、 フロンティア・ラボやメタのようなところは、 + +01:38.630 --> 01:46.850 +最大級のモデルには1億ドル以上の巨費を投じている。 + +01:46.850 --> 01:49.970 +だから、 おそらく我々が使うようなお金ではない。 + +01:50.150 --> 01:59.870 +ベースモデルから訓練することで、 驚くほど低コストで、 + +01:59.900 --> 02:11.030 +特定のタスクを達成するのに優れたものを作ることができます。 + +02:11.270 --> 02:18.500 +ええと、 ローラが何なのかを説明する前に、 ラマ建築について簡単にまとめておこう。 + +02:18.530 --> 02:22.790 +さて、 このコースではニューラルネットワーク・アーキテクチャについて深入りするつもりはない。 + +02:23.060 --> 02:28.340 +これは、 詳細には触れないが、 いくつかの洞察や直感をお伝えするものだ。 + +02:28.340 --> 02:35.510 +しかし、 ラマは3. 1 アーキテクチャは、 ニューロンの層のスタックとスタックで構成されている。 + +02:35.660 --> 02:42.860 +このレイヤーには32のグループがあり、 それぞれのグループが構成されているんだ。 + +02:42.860 --> 02:45.740 +つまり、 各グループはラマ・デコーダー層と呼ばれる。 + +02:45.740 --> 02:53.240 +その中には、 自己注意層、 多層パーセプトロン層、 サイロ活性化層、 ノルム層がある。 + +02:53.240 --> 02:55.220 +これはすぐにわかる。 + +02:55.250 --> 02:58.040 +まさか......もしかしたら、 これが何なのか、 もう知っているかもしれない。 + +02:58.070 --> 02:59.840 +理論的なバックグラウンドがあればね。 + +02:59.840 --> 03:03.410 +そうでなければ、 よりリアルに、 より具体的になる。 + +03:03.410 --> 03:07.040 +Colabでこの建築を見るのはほんの一瞬だ。 + +03:07.370 --> 03:14.480 +そして、 これらのパラメーターはすべて、 この大きな、 あー、 あー、 この、 あー、 レイヤード・アーキテクチャーに張り付いていて、 + +03:14.510 --> 03:17.720 +32ギガのメモリーを消費する。 + +03:17.840 --> 03:22.520 +これが今、 ローラを支える大きなアイデアだ。 + +03:22.550 --> 03:30.020 +つまり、 私たちにできることは、 まずこれらのウェイトをすべて凍結させることだ。 + +03:30.050 --> 03:35.360 +通常、 最適化の際にはニューラルネットワークをフォワードパスする。 + +03:35.570 --> 03:46.700 +ネットワークが予測した次のトークンを見て、 トークンがどうあるべきかを比較する。 + +03:46.700 --> 03:51.620 +そして、 それに基づいて、 次に正しいトークンを予測するために、 + +03:51.650 --> 03:58.610 +それぞれの重みをどれくらいずらせばいいかを考える。 + +03:58.610 --> 04:00.530 +それが最適化の考え方だ。 + +04:00.560 --> 04:02.570 +少し手探りだが、 おわかりだろう。 + +04:02.600 --> 04:03.110 +文字通りだ。 + +04:03.140 --> 04:03.830 +ハンドウェービー。 + +04:03.920 --> 04:09.230 +ええと、 でも......ローラのコンセプトは、 まず第一に、 すべての重りを解放することなんだ。 + +04:09.230 --> 04:19.080 +80億ものウェイトを最適化するのは、 あまりに多すぎるし、 回すノブやグラデーションが多すぎるからだ。 + +04:19.380 --> 04:27.330 +その代わりに、 私たちがトレーニングしたいと思う重要なレイヤーをいくつか選ぶ。 + +04:27.330 --> 04:36.840 +そして、 これらのレイヤー、 つまり、 この積み重ねられた、 あー、 レイヤーアーキテクチャーのこれらのモジュールは、 ターゲットモジュールとして知られている。 + +04:36.840 --> 04:39.960 +だから、 このターゲット・モジュールという表現はそこからきている。 + +04:39.960 --> 04:45.480 +と言うと、 『スタートレック』に出てきそうだが、 これはトレーニングのために注目するニューラルネットワークのレイヤーを意味するだけで、 + +04:45.480 --> 04:52.200 +重みは凍結されたままである。 + +04:52.890 --> 05:05.700 +その代わりに、 アダプター行列と呼ばれる次元数の少ない行列を新たに作成する。 + +05:05.730 --> 05:09.840 +これらは次元が小さいか、 ランクが低くなる。 + +05:09.840 --> 05:24.420 +そして、 これらのマトリックスをターゲット・モジュールに適用するテクニックを身につけるのだ。 + +05:24.420 --> 05:27.030 +だから、 彼らはターゲット・モジュールに合わせるだろう。 + +05:27.030 --> 05:30.510 +その方法については後で説明する。 + +05:30.510 --> 05:37.950 +しかし、 この計算式は、 将来、 青い低ランクのアダプターにどんな値が入っていたとしても、 微妙にずれることを意味する。 + +05:37.950 --> 05:42.060 +ターゲット・モジュールの中身を少し変えてみる。 + +05:42.060 --> 05:49.800 +より低いランク、 より低い次元、 より少ない重みで、 これらのターゲット・モジュールに適用されるようにするのだ。 + +05:50.820 --> 05:54.450 +そして、 ちょっとした技術的な問題が1つある。 + +05:54.480 --> 06:00.000 +このようなニューラルネットワークでは次元が機能するため、 実際には低ランクの行列が2つあり、 + +06:00.000 --> 06:06.120 +1つはa、 もう1つはBとして知られている。 + +06:06.420 --> 06:09.270 +コードを見てもらえばわかるが、 これらはローラaとローラbと呼ばれている。 + +06:09.300 --> 06:11.310 +つまり、 2つのマトリックスがある。 + +06:11.310 --> 06:16.230 +それを知ることはそれほど重要なことではないが、 コードの中でそれを見たときに確認してもらいたい。 + +06:16.230 --> 06:18.780 +これを見て、 2つの行列があることがわかるだろう。 + +06:18.780 --> 06:20.910 +それらはターゲットモジュールに適用される。 + +06:20.910 --> 06:22.290 +これは理にかなっている。 + +06:22.710 --> 06:27.420 +そして、 それがローラ・フリアスの物語なのだ。 + +06:27.420 --> 06:34.290 +メインモデルは、 次元数の少ない小さな行列の束で構成される。 + +06:34.290 --> 06:36.060 +これらはトレーニングの対象となる。 + +06:36.060 --> 06:42.660 +彼らはトレーニングを受けた後、 簡単な計算式を使ってターゲット・モジュールに適用される。 + +06:42.990 --> 06:49.380 +そうすれば、 学習すればするほど良くなるベースモデルを作ることができる。 + +06:49.380 --> 06:53.910 +これらのローラ行列が適用されるからだ。 + +06:53.910 --> 07:02.130 +ローラとはローランク・アダプテーションの略で、 低ランク、 低次元、 そしてターゲット・モジュールに適応することを意味する。 + +07:02.400 --> 07:02.970 +これでよし。 + +07:02.970 --> 07:07.650 +多くのことを話し、 多くの言葉を並べたが、 願わくば、 これがどのように組み合わされているのか、 + +07:07.650 --> 07:11.970 +直感的に理解してほしい。 + +07:12.150 --> 07:19.230 +うーん、 でも次のセッションでは、 もうひとつだけ、 Q(量子化)について手短にお話ししましょう。 diff --git a/week5/community-contributions/subtitles/srts/59504769/ko_KR.srt b/week5/community-contributions/subtitles/srts/59504769/ko_KR.srt new file mode 100755 index 0000000..14cee98 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504769/ko_KR.srt @@ -0,0 +1,292 @@ +WEBVTT + +00:00.590 --> 00:03.320 +그럼 지체 없이 get it로 들어가 보죠 + +00:03.350 --> 00:05.180 +로라 말이에요 + +00:05.210 --> 00:07.430 +낮은 순위 적응력이죠 + +00:07.670 --> 00:13.520 +이 섹션에서 슬라이드 몇 개 동안 이론에 관해 얘기할 거예요 하지만 걱정 마세요 늘 그렇듯 바로 + +00:13.550 --> 00:16.160 +연습할 거니까요 Get in get + +00:16.190 --> 00:19.250 +이런 걸 이해하는 가장 좋은 방법은 직접 보는 거예요 + +00:19.250 --> 00:24.350 +슬라이드 몇 개 본 후에 Colab을 누르고 실제로 살펴보죠 + +00:24.350 --> 00:26.480 +하지만 먼저 컨텍스트를 설정하죠 + +00:26.480 --> 00:31.520 +자, llama 3을 사용할 거예요 이번 주에 1달러예요 + +00:31.850 --> 00:34.340 +라마 3도요 1은 세 가지 사이즈가 있어요 + +00:34.340 --> 00:42.680 +80억 매개 변수로 700억에 달하는 거대한 규모입니다 무려 4050억이죠 + +00:42.890 --> 00:46.100 +물론 더 작은 80억 달러도 받을 거예요 + +00:46.250 --> 00:53.630 +하지만 그것도 너무 커질 거예요 현실적으로 훈련하기에는요 하나의 GPU + +00:53.660 --> 00:59.450 +박스에 지불하고 싶은 비용으로요 80억 개의 무게라면 램 + +00:59.450 --> 01:01.370 +32GB죠 + +01:01.400 --> 01:07.280 +모두 더하면 메모리 안에 있는 모델이 됩니다. 훈련을 시작할 때 최적화 + +01:07.280 --> 01:14.210 +실행을 할 수 있습니다. 각각의 값에 따라 그러데이션을 얻을 수 있어야 하죠. + +01:14.270 --> 01:18.080 +메모리를 너무 많이 쓰는 거예요 + +01:18.110 --> 01:19.880 +희망이 없었을 거예요 + +01:20.240 --> 01:27.110 +시간도 엄청나게 오래 걸릴 거예요 최적화할 게 너무 많으니까요 80억 + +01:27.110 --> 01:30.470 +개의 무게를 최적화하려면요 + +01:30.470 --> 01:32.870 +처리할 게 많네요 + +01:33.200 --> 01:38.630 +물론 이런 프런티어 연구소와 메타 연구소들이 대형 모델에 + +01:38.630 --> 01:46.850 +많은 돈을 투자한 덕분이죠 한 대를 훈련하는 데 1억 달러 이상이 들었어요 + +01:46.850 --> 01:49.970 +그래서 아마 우리가 쓸 만한 돈은 아닐 거예요 + +01:50.150 --> 01:59.870 +그래서 기본 모델을 훈련하는 데 놀라울 정도로 적은 비용과 요령이 필요합니다 그래야 특정 + +01:59.900 --> 02:05.390 +작업을 더 잘 수행할 수 있으니까요 기본 모델이 원래 + +02:05.390 --> 02:11.030 +훈련받은 것과 공통점이 많다고 가정하면요 + +02:11.270 --> 02:18.500 +로라가 뭔지 설명하기 전에 라마 건축 양식을 간단히 요약해 볼게요 + +02:18.530 --> 02:22.790 +Get in GAME 이 과정에선 신경망 구조를 깊이 다루진 않을 거예요 + +02:23.060 --> 02:28.340 +아주 자세히는 설명하지 않고 직관적으로 설명해 드릴게요 + +02:28.340 --> 02:35.510 +라마 3은 어때요? 뉴런이 층층이 쌓여 있는 형태예요 + +02:35.660 --> 02:42.860 +각 그룹이 구성된 이런 레이어 그룹이 32개예요 + +02:42.860 --> 02:45.740 +각 그룹을 라마 디코더 층이라고 해요 + +02:45.740 --> 02:51.980 +자기 주의력 계층과 다중 인터셉트론 계층 사일로 활성화 계층과 표준 계층이 + +02:51.980 --> 02:53.240 +있어요 + +02:53.240 --> 02:55.220 +잠시 후에 이걸 보죠 + +02:55.250 --> 02:58.040 +이미 어떤 상황인지 알고 있을지도 몰라요 + +02:58.070 --> 02:59.840 +이론적 배경이 있다면요 + +02:59.840 --> 03:03.410 +그렇지 않다면 더 현실적이고 실재할 거예요 + +03:03.410 --> 03:07.040 +콜랍에 있는 이 건축물을 보면요 + +03:07.370 --> 03:14.480 +이 모든 매개 변수들이 이 큰, 이 계층 아키텍처에 달라붙어 32기가 + +03:14.510 --> 03:17.720 +메모리를 차지해요 + +03:17.840 --> 03:22.520 +이게 로라의 원대한 아이디어예요 + +03:22.550 --> 03:30.020 +우선 이 추들을 전부 얼릴 거예요 + +03:30.050 --> 03:35.360 +보통 최적화 과정에서는 신경망을 통해 전진 패스를 해요 + +03:35.570 --> 03:41.780 +네트워크가 예측한 다음 토큰을 어떻게 분석할지 살펴보고 원래 토큰이 있어야 + +03:41.780 --> 03:46.700 +할 것과 비교해 보세요 진짜 다음 토큰이 무엇일까요? + +03:46.700 --> 03:51.620 +그런 다음 그걸 바탕으로 각 비트를 조금씩 바꿔서 + +03:51.650 --> 03:58.610 +다음번에는 올바른 토큰을 예측할 수 있도록 하는 거죠 + +03:58.610 --> 04:00.530 +그게 최적화의 개념이죠 + +04:00.560 --> 04:02.570 +약간 웨이브 비트 같지만 느낌은 오죠 get it get it + +04:02.600 --> 04:03.110 +말 그대로요 + +04:03.140 --> 04:03.830 +수작업으로요 + +04:03.920 --> 04:09.230 +하지만 로라의 콘셉트는 우선 이 모든 역기를 자유롭게 한다는 거죠 + +04:09.230 --> 04:16.320 +80억 개의 무게는 최적화하지 않을 겁니다 너무 많으니까요 너무 많은 노브와 + +04:16.350 --> 04:19.080 +그러데이션이 있어요 + +04:19.380 --> 04:27.330 +대신 훈련하고 싶은 핵심 사항을 몇 가지 골라요 + +04:27.330 --> 04:35.310 +이 층과 모듈 안에 이렇게 층층이 쌓인 구조물을 목표 모듈이라고 + +04:35.310 --> 04:36.840 +해요 + +04:36.840 --> 04:39.960 +그래서 이 표현 대상 모듈이 있는 거죠 + +04:39.960 --> 04:45.480 +스타트렉에 나오는 비트 같다고 했는데 신경망의 층을 + +04:45.480 --> 04:51.300 +말하는 거예요 훈련에 집중할 수 있지만 무게는 여전히 얼어 + +04:51.300 --> 04:52.200 +있죠 + +04:52.890 --> 05:01.830 +대신 새로운 매트릭스인 어댑터 매트릭스를 소개할 거예요 적은 수의 치수죠 실제 치수에 + +05:01.830 --> 05:05.700 +비해서 치수가 많지는 않아요 + +05:05.730 --> 05:09.840 +입체감이 작거나 계급이 낮을 거예요 + +05:09.840 --> 05:21.750 +이 물질을 한쪽으로 몰면 이 물질을 표적 모듈에 바르는 기술을 익힐 + +05:21.750 --> 05:24.420 +수 있어요 + +05:24.420 --> 05:27.030 +그래서 목표 모듈을 수정할 거예요 + +05:27.030 --> 05:30.510 +공식이 있는데 잠시 후에 말씀드릴게요 + +05:30.510 --> 05:36.390 +하지만 이 공식은 미래에는 저 파란색 로 랭크 어댑터에 있는 값이 조금씩 바뀔 + +05:36.390 --> 05:37.950 +거라는 걸 의미하죠 + +05:37.950 --> 05:42.060 +대상 모듈에서 일어나는 일을 약간 바꿀 거예요 + +05:42.060 --> 05:48.240 +등급도 낮고 차원도 낮아서 표적 모듈에 가해지는 무게가 적게 들도록 + +05:48.240 --> 05:49.800 +개조했어요 + +05:50.820 --> 05:54.450 +잠시 후에 보시겠지만 기술적인 문제가 있어요 + +05:54.480 --> 06:00.000 +이 말을 꼭 해야겠네요 이런 신경망에서 치수가 작동하는 방식을 + +06:00.000 --> 06:06.120 +보면 낮은 순위의 행렬은 두 가지예요 하나는 A, 하나는 B죠 + +06:06.420 --> 06:09.270 +코드에는 로라 a와 로라 b라고 적혀 있어요 + +06:09.300 --> 06:11.310 +행렬은 두 가지가 있어요 + +06:11.310 --> 06:16.230 +그걸 아는 게 그리 중요하진 않지만 코드에서 그걸 보시도록 확실히 해두고 싶어요 + +06:16.230 --> 06:18.780 +이걸 보면 두 행렬이 있다고 생각할 거예요 + +06:18.780 --> 06:20.910 +Get을 대상 모듈에 적용해요 + +06:20.910 --> 06:22.290 +말 되네요 + +06:22.710 --> 06:27.420 +로라 프레스의 숨은 이야기가 아주 깊어요 + +06:27.420 --> 06:34.290 +본래는 치수가 적은 작은 행렬을 많이 만들었어요 + +06:34.290 --> 06:36.060 +훈련 대상이죠 + +06:36.060 --> 06:42.660 +get을 훈련한 다음 간단한 공식을 이용해 목표 모듈에 적용할 거예요 + +06:42.990 --> 06:49.380 +그렇게 기본 모델을 만들 수 있어요 get이 학습할수록 더 좋아지는 거죠 + +06:49.380 --> 06:53.910 +로라 매트릭스 응용 프로그램 덕분이죠 + +06:53.910 --> 07:00.600 +로라는 낮은 서열의 적응을 의미해요 서열도 낮고, 공간도 낮고 목표 모듈에 + +07:00.600 --> 07:02.130 +적응하거든요 + +07:02.400 --> 07:02.970 +됐어요 + +07:02.970 --> 07:07.650 +많은 대화와 단어가 오가지만 잘 조합할 수 있는 직관을 가지길 + +07:07.650 --> 07:11.970 +바랍니다 코드를 보면 그 직관이 더 명확해질 거예요 + +07:12.150 --> 07:19.230 +다음 시간엔 하나만 더 간단히 얘기할게요 Q, 퀀타이즈요 diff --git a/week5/community-contributions/subtitles/srts/59504785/en_US.srt b/week5/community-contributions/subtitles/srts/59504785/en_US.srt new file mode 100755 index 0000000..ad8c9cd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504785/en_US.srt @@ -0,0 +1,220 @@ +WEBVTT + +00:00.740 --> 00:04.160 +So at this point we're going to talk about hyperparameters. + +00:04.160 --> 00:06.320 +And we're going to introduce three of them. + +00:06.320 --> 00:08.840 +So a reminder of what is a hyperparameter. + +00:08.840 --> 00:10.700 +We talked about it a bit last week. + +00:10.730 --> 00:13.910 +A hyperparameter is one of these levers. + +00:14.000 --> 00:19.250 +That is something which you as the experimenter just gets to choose what you want it to be. + +00:19.250 --> 00:24.740 +There's no particular hard and fast rule about what it should be, and you're meant to use a process + +00:24.740 --> 00:31.880 +known as hyperparameter optimization to try different values and see what works best for your task at + +00:31.910 --> 00:32.570 +hand. + +00:32.570 --> 00:38.690 +And in reality, what what we are actually doing is basically trial and error. + +00:38.690 --> 00:44.690 +It's a bit of guesswork and then experimentation, because there aren't necessarily any theoretical + +00:44.690 --> 00:47.570 +reasons why it should be set one way. + +00:47.570 --> 00:50.000 +It's a matter of practical experiment. + +00:50.000 --> 00:56.240 +And so often you find in these things when people have a something that's working well and there's a + +00:56.240 --> 01:01.250 +few, a few controlling parameters, but they're not quite sure how what they should be set to. + +01:01.280 --> 01:03.570 +We don't yet have the theory to say what they should be. + +01:03.600 --> 01:05.490 +We just call it a hyperparameter. + +01:05.610 --> 01:06.570 +That's what it's called. + +01:06.570 --> 01:11.940 +And it means that you're in this world of trial and error and guesswork until you pick the right settings. + +01:11.940 --> 01:13.590 +That works best for your model. + +01:13.620 --> 01:17.550 +I'm oversimplifying a bit, of course, but hopefully you get the general idea. + +01:17.550 --> 01:21.180 +So there's going to be three of them that are most critical. + +01:21.180 --> 01:26.760 +In the case of Q Laura fine tuning, and I want to introduce them to you now and then. + +01:26.760 --> 01:29.640 +We will be playing with them in our time on this. + +01:29.640 --> 01:33.720 +The first of them is called R, which stands for rank. + +01:33.810 --> 01:41.940 +And it means simply how many dimensions are we going to use for these lower rank matrices within within + +01:41.940 --> 01:49.800 +the Lama architecture, the the inner layers have dimensionality of like 1004 thousand dimensions. + +01:49.800 --> 01:55.020 +We're going to want much smaller number of dimensions in our lower rank matrices. + +01:55.020 --> 01:56.640 +That's that's the whole idea of them. + +01:56.850 --> 02:03.600 +Um, so typically, uh, as I said, there's no hard and fast rules different tasks look for different + +02:03.600 --> 02:09.520 +values of R to start with when you're working in this kind of language generation models. + +02:09.520 --> 02:14.740 +I think a good rule of thumb that I've, I've always used and that I see people use generally in the + +02:14.740 --> 02:18.910 +community start with eight, which is a small number. + +02:19.210 --> 02:24.940 +Um, and that means that it will use up very lower memory and it will run fairly fast. + +02:25.120 --> 02:30.940 +Um, and then double it to 16, which will take up more memory and run more slowly and see whether or + +02:30.940 --> 02:36.280 +not you get better results and then potentially double again until you reach a point where you're getting + +02:36.280 --> 02:37.450 +diminishing returns. + +02:37.450 --> 02:40.690 +It's slowing down and it's taking longer, but you're not seeing any improvement. + +02:40.690 --> 02:45.520 +And then, you know, there's no point in having a higher R, you've already got the power you need + +02:45.520 --> 02:47.170 +for the data that you've got. + +02:47.350 --> 02:51.880 +So that's r uh, the next one that we'll talk about is alpha. + +02:51.880 --> 02:56.890 +And Alpha is quite simply a scaling factor that is multiplied. + +02:56.890 --> 03:01.120 +It's applied to these Laura A Laura B matrices. + +03:01.120 --> 03:05.530 +And that is then used to change the weights in the model. + +03:05.530 --> 03:10.710 +The formula for what it's worth is that the amount that you change the weights in the model by in your + +03:10.740 --> 03:11.940 +in your target modules. + +03:11.970 --> 03:16.830 +Is alpha times the A matrix times the B matrix. + +03:16.830 --> 03:18.630 +They get all multiplied together. + +03:18.630 --> 03:21.330 +So bigger alpha means more effect. + +03:21.330 --> 03:27.600 +And in practice the rule of thumb that is used I think almost ubiquitously I've I've always used it + +03:27.600 --> 03:32.880 +and I've always seen it this way in examples is to set alpha to be double R. + +03:32.910 --> 03:36.540 +So if you start with an R of eight your alpha is 16. + +03:36.570 --> 03:42.540 +Then when you go up to an R of 16 alpha is 32 and then 32 would be 64. + +03:42.540 --> 03:44.940 +So that is the good rule of thumb. + +03:44.940 --> 03:51.510 +But of course it's always worth experimenting with different alphas to see if that if that changes the + +03:51.540 --> 03:53.040 +your your accuracy. + +03:54.240 --> 04:02.160 +And then the third and final of our three essential hyperparameters is the actually saying, what will + +04:02.190 --> 04:08.640 +be the target modules that you will focus on adapting in your architecture? + +04:08.640 --> 04:15.970 +Which which of these layers are you going to select to focus on uh, and uh, generally the most common + +04:15.970 --> 04:19.540 +choice and the one that we'll be using is that you focus on the attention layers. + +04:19.540 --> 04:20.830 +That's very common. + +04:20.830 --> 04:23.920 +You'll see that in the code that's going to make more sense when you see it. + +04:23.980 --> 04:28.990 +Uh, there are situations when you want to to target other target modules. + +04:29.080 --> 04:35.800 +Um, if for example, uh, you're generating something that, that where you want the output to be in + +04:35.830 --> 04:41.110 +like a completely different language or something like that, then you might want to, uh, to target + +04:41.110 --> 04:43.780 +some of those final layers. + +04:43.780 --> 04:49.210 +So you'll see some, some, some that I'll give you more context in a moment about how that works. + +04:49.210 --> 04:54.940 +But generally speaking, the most common by far is to target the attention head layers. + +04:54.940 --> 04:56.440 +That's what we will do. + +04:56.470 --> 04:59.470 +And you will see how that set up in a moment. + +05:00.670 --> 05:06.760 +And with that, we are now going to head to Google Colab to look at this, to look at some models, + +05:06.760 --> 05:12.760 +to talk about Laura, to talk about Q, Laura and to see these three hyperparameters in action. + +05:13.180 --> 05:14.260 +Let's do it. diff --git a/week5/community-contributions/subtitles/srts/59504785/ja_JP.srt b/week5/community-contributions/subtitles/srts/59504785/ja_JP.srt new file mode 100755 index 0000000..c32bf31 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504785/ja_JP.srt @@ -0,0 +1,178 @@ +WEBVTT + +00:00.740 --> 00:04.160 +そこでこの時点で、 ハイパーパラメーターについて話すことにする。 + +00:04.160 --> 00:06.320 +そのうちの3人を紹介しよう。 + +00:06.320 --> 00:08.840 +そこで、 ハイパーパラメータとは何かを思い出してほしい。 + +00:08.840 --> 00:10.700 +先週も少し話した。 + +00:10.730 --> 00:13.910 +ハイパーパラメータとは、 このレバーのひとつである。 + +00:14.000 --> 00:19.250 +それは、 実験者であるあなたが、 どうしたいかを選ぶだけのものだ。 + +00:19.250 --> 00:24.740 +ハイパーパラメーターの最適化として知られるプロセスを使って、 + +00:24.740 --> 00:32.570 +さまざまな値を試し、 手元のタスクに最適なものを見極めることになる。 + +00:32.570 --> 00:38.690 +そして、 実際に私たちがやっていることは、 基本的に試行錯誤なのだ。 + +00:38.690 --> 00:47.570 +理論的な理由があるわけではないので、 推測と実験を繰り返すことになる。 + +00:47.570 --> 00:50.000 +実践的な実験の問題だ。 + +00:50.000 --> 01:01.250 +よくあることだが、 うまく機能しているものがあって、 それをコントロールするパラメータがいくつかある。 + +01:01.280 --> 01:03.570 +それがどうあるべきかを語る理論はまだない。 + +01:03.600 --> 01:05.490 +私たちはこれをハイパーパラメーターと呼んでいる。 + +01:05.610 --> 01:06.570 +そう呼ばれているんだ。 + +01:06.570 --> 01:11.940 +そして、 正しいセッティングを選ぶまで、 試行錯誤と推測の世界に身を置くことになる。 + +01:11.940 --> 01:13.590 +それがあなたのモデルに最も適している。 + +01:13.620 --> 01:17.550 +もちろん、 少し単純化しすぎてはいるが、 大まかなイメージはつかんでいただけただろうか。 + +01:17.550 --> 01:21.180 +その中で最も重要なのは3つだ。 + +01:21.180 --> 01:26.760 +Qローラのファインチューニングの場合は、 時々紹介したい。 + +01:26.760 --> 01:29.640 +私たちはこれから、 彼らと一緒にプレーすることになる。 + +01:29.640 --> 01:33.720 +その最初のものはRと呼ばれ、 これはランクの略である。 + +01:33.810 --> 01:49.800 +つまり、 ラマ・アーキテクチャー内の下位ランクのマトリックスに何次元を使うかということだ。 + +01:49.800 --> 01:55.020 +低ランクの行列には、 もっと少ない次元数が必要だ。 + +01:55.020 --> 01:56.640 +それこそが、 彼らのアイデアなんだ。 + +01:56.850 --> 02:03.600 +ええと、 だから、 さっきも言ったように、 この種の言語生成モデルで作業する場合、 タスクによって最初に求めるRの値が違うので、 + +02:03.600 --> 02:09.520 +厳密なルールはないんだ。 + +02:09.520 --> 02:18.910 +私がいつも使っている経験則であり、 コミュニティで一般的に使われている経験則だと思う。 + +02:19.210 --> 02:24.940 +ええと、 つまり、 メモリの使用量が非常に少なく、 かなり高速に動作するということです。 + +02:25.120 --> 02:30.940 +そして、 より多くのメモリを消費し、 よりゆっくりと実行される16に倍増し、 より良い結果が得られるかどうかを確認し、 + +02:30.940 --> 02:37.450 +収穫が減少するポイントに達するまで、 さらに倍増する可能性がある。 + +02:37.450 --> 02:40.690 +スピードは落ちているし、 時間もかかっているが、 改善は見られない。 + +02:40.690 --> 02:47.170 +そうなると、 Rが高くても意味がない。 今あるデータに必要なパワーはすでに持っている。 + +02:47.350 --> 02:51.880 +というわけで、 次に話すのはアルファだ。 + +02:51.880 --> 02:56.890 +そしてアルファは、 単純に掛け合わされるスケーリング係数である。 + +02:56.890 --> 03:01.120 +このローラ・A・ローラ・Bのマトリックスに適用される。 + +03:01.120 --> 03:05.530 +そして、 そのウェイトを使ってモデルのウェイトを変更する。 + +03:05.530 --> 03:11.940 +この計算式は、 モデル内のウェイトをターゲット・モジュール内で変更する量に相当する。 + +03:11.970 --> 03:16.830 +アルファ×A行列×B行列。 + +03:16.830 --> 03:18.630 +それらはすべて掛け算になる。 + +03:18.630 --> 03:21.330 +つまり、 アルファ値が大きいほど効果が大きいということだ。 + +03:21.330 --> 03:32.880 +そして、 実際に使用される経験則は、 ほとんどどこにでもあるものだと思う。 + +03:32.910 --> 03:36.540 +つまり、 Rが8でスタートした場合、 アルファは16となる。 + +03:36.570 --> 03:42.540 +そしてRが16になるとアルファは32になり、 32になると64になる。 + +03:42.540 --> 03:44.940 +これが経験則だ。 + +03:44.940 --> 03:53.040 +しかしもちろん、 異なるアルファを試してみて、 精度が変わるかどうかを確認する価値は常にある。 + +03:54.240 --> 04:08.640 +そして3つ目、 3つの重要なハイパーパラメータの最後が、 アーキテクチャに適応させるターゲットモジュールは何かということです。 + +04:08.640 --> 04:19.540 +どのレイヤーに注目するかというと......一般的には、 アテンション・レイヤーに注目するのが最も一般的な選択だ。 + +04:19.540 --> 04:20.830 +よくあることだよ。 + +04:20.830 --> 04:23.920 +コードを見てもらえればわかると思う。 + +04:23.980 --> 04:28.990 +他のモジュールをターゲットにしたい場合もあるだろう。 + +04:29.080 --> 04:43.780 +例えば、 まったく違う言語で出力したい場合などは、 最終レイヤーのいくつかをターゲットにするといいかもしれない。 + +04:43.780 --> 04:49.210 +だから、 いくつか、 いくつか、 いくつか、 その仕組みについてもう少し詳しく説明しよう。 + +04:49.210 --> 04:54.940 +しかし、 一般的に言って、 最も一般的なのは、 ヘッドレイヤーに注目することである。 + +04:54.940 --> 04:56.440 +それが私たちがやることだ。 + +04:56.470 --> 04:59.470 +どのようにセットアップされるかは、 すぐにわかるだろう。 + +05:00.670 --> 05:06.760 +そして、 これからGoogle Colabに向かい、 これを見て、 いくつかのモデルを見て、 ローラについて話し、 + +05:06.760 --> 05:12.760 +Qとローラについて話し、 そしてこれら3つのハイパーパラメータが実際に使われているのを見ることにする。 + +05:13.180 --> 05:14.260 +そうしよう。 diff --git a/week5/community-contributions/subtitles/srts/59504785/ko_KR.srt b/week5/community-contributions/subtitles/srts/59504785/ko_KR.srt new file mode 100755 index 0000000..e91a61f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504785/ko_KR.srt @@ -0,0 +1,217 @@ +WEBVTT + +00:00.740 --> 00:04.160 +이 시점에서 hyperperameter에 관해 얘기할 거예요 + +00:04.160 --> 00:06.320 +그중 세 가지를 소개할게요 + +00:06.320 --> 00:08.840 +hyperperameter가 뭔지 다시 말씀드릴게요 + +00:08.840 --> 00:10.700 +지난주에 비트 얘기를 좀 했어요 + +00:10.730 --> 00:13.910 +하이퍼파라미터는 이 레버 중 하나예요 + +00:14.000 --> 00:19.250 +실험자로서 원하는 걸 선택할 수 있는 거예요 + +00:19.250 --> 00:24.740 +무엇이어야 하는지에 대한 어렵고 빠른 규칙은 없어요 hyperpaameter + +00:24.740 --> 00:32.570 +최적화라는 프로세스를 사용해야 합니다 다양한 값을 시도하고 당면한 작업에 뭐가 최선인지 알아보기 위해서요 + +00:32.570 --> 00:38.690 +사실 우리가 하는 일은 시행착오를 겪는 거예요 + +00:38.690 --> 00:44.690 +추측과 실험이 뒤따르는 작업이죠 비트를 한 방향으로만 설정해야 + +00:44.690 --> 00:47.570 +하는 이론적 이유가 없거든요 + +00:47.570 --> 00:50.000 +실질적인 실험의 문제죠 + +00:50.000 --> 00:56.240 +이런 경우 종종 발견하게 되는데 잘 작동하는 프로그램일 경우 몇 가지 제어 매개 + +00:56.240 --> 01:01.250 +변수가 있지만 어떻게 설정해야 할지 잘 모르는 경우가 있죠 + +01:01.280 --> 01:03.570 +어떤 형태가 돼야 할지 아직 이론적으로 몰라요 + +01:03.600 --> 01:05.490 +hyperperameter라고 부르죠 + +01:05.610 --> 01:06.570 +그렇게 불러요 + +01:06.570 --> 01:11.940 +올바른 설정을 고를 때까지 시행착오와 추측의 세계에 있다는 뜻이죠 + +01:11.940 --> 01:13.590 +모델에게 가장 잘 어울려요 + +01:13.620 --> 01:17.550 +비트를 좀 단순화했지만 대충 이해하셨으면 좋겠네요 get it get it + +01:17.550 --> 01:21.180 +가장 중요한 세 가지가 있어요 + +01:21.180 --> 01:26.760 +Q 로라 파인 튜닝의 경우 이따금 소개해 드릴게요 + +01:26.760 --> 01:29.640 +그들과 함께 작업할 거예요 + +01:29.640 --> 01:33.720 +첫 번째는 R이라고 해요 직위를 뜻하죠 + +01:33.810 --> 01:41.940 +라마 건축물에서 낮은 계층의 행렬을 만들 때 사용하는 차원이 얼마나 + +01:41.940 --> 01:49.800 +되는지를 뜻합니다 내부 층의 차원은 100만 4천 차원에 달하죠 + +01:49.800 --> 01:55.020 +낮은 계급 행렬에서 훨씬 적은 수의 치수를 확보해 주세요 + +01:55.020 --> 01:56.640 +그게 그들의 의도예요 + +01:56.850 --> 02:03.600 +아까 말했듯이 어렵고 빠른 규칙은 없어요 이런 언어 생성 모델에서 + +02:03.600 --> 02:09.520 +작업할 땐 다양한 작업과 다양한 R 값을 찾죠 + +02:09.520 --> 02:14.740 +제가 항상 써먹던 경험 법칙이 하나 있는데 일반적으로 사람들이 쓰는 + +02:14.740 --> 02:18.910 +걸 보니 8부터 시작하는 것 같아요 작은 숫자죠 + +02:19.210 --> 02:24.940 +아주 적은 메모리를 사용하고 꽤 빨리 실행된다는 뜻이죠 + +02:25.120 --> 02:30.940 +다음엔 16으로 두 배 늘리죠 메모리를 더 잡아먹고 더 천천히 실행하면서 더 나은 결과를 + +02:30.940 --> 02:36.280 +얻는지 보고 잠재적으로 또 두 배가 될 때까지요 결과물이 감소하는 지점까지 도달할 + +02:36.280 --> 02:37.450 +때까지요 + +02:37.450 --> 02:40.690 +속도가 느려지고 시간이 더 오래 걸리는데 나아지는 게 보이지 않아요 + +02:40.690 --> 02:45.520 +높은 R을 갖는 건 의미가 없어요 이미 데이터를 위해 필요한 힘을 + +02:45.520 --> 02:47.170 +갖고 있으니까요 + +02:47.350 --> 02:51.880 +다음은 알파에 대해 얘기해 보죠 + +02:51.880 --> 02:56.890 +알파는 단순히 배율 요인이 두 배인 거예요 + +02:56.890 --> 03:01.120 +로라 A. 로라 B 매트릭스에 적용했어요 + +03:01.120 --> 03:05.530 +그 무게로 모델의 무게를 바꾸죠 + +03:05.530 --> 03:10.710 +값어치의 공식은 목표 모듈에서 모델에서 무게 중심을 변경하는 + +03:10.740 --> 03:11.940 +양이에요 + +03:11.970 --> 03:16.830 +알파 곱하기 A 곱하기 B 곱하기예요 + +03:16.830 --> 03:18.630 +모두 함께 번성하죠 Get up, Get up + +03:18.630 --> 03:21.330 +알파가 클수록 효과도 크군요 + +03:21.330 --> 03:27.600 +실제로 사용되는 경험 법칙은∙∙∙ 거의 보편적으로 전 항상 사용했고 항상 + +03:27.600 --> 03:32.880 +이런 식으로 봤어요 예를 들어 알파를 R로 설정하는 거죠 + +03:32.910 --> 03:36.540 +R이 8로 시작하면 알파는 16이 되죠 + +03:36.570 --> 03:42.540 +16 알파 R은 32 32는 64가 되겠죠 + +03:42.540 --> 03:44.940 +경험으로 터득한 좋은 법칙이죠 + +03:44.940 --> 03:51.510 +물론 다양한 알파로 실험해볼 가치가 있죠 그게 여러분의 정확성을 바꿀지 + +03:51.540 --> 03:53.040 +보려면요 + +03:54.240 --> 04:02.160 +세 번째이자 마지막 핵심 하이퍼파라미터는 아키텍처에 적용하는 + +04:02.190 --> 04:08.640 +데 집중할 대상 모듈이 무엇인가 하는 거죠 + +04:08.640 --> 04:15.970 +이 중 어떤 레이어에 초점을 맞출지 선택하세요 일반적으로 가장 일반적인 선택은 + +04:15.970 --> 04:19.540 +주의 집중 레이어에 초점을 맞추는 거죠 + +04:19.540 --> 04:20.830 +흔한 일이에요 + +04:20.830 --> 04:23.920 +코드에서 보실 수 있어요 보시면 더 이해가 될 거예요 + +04:23.980 --> 04:28.990 +다른 목표 모듈을 목표로 삼고 싶을 때가 있죠 + +04:29.080 --> 04:35.800 +예를 들어, 여러분이 뭔가를 생성할 때 결과물이 완전히 다른 언어로 + +04:35.830 --> 04:41.110 +나오길 원한다면 최종 레이어 일부를 대상으로 하는 + +04:41.110 --> 04:43.780 +게 좋을 거예요 + +04:43.780 --> 04:49.210 +그래서 여러분이 보실 건 그게 어떻게 작동하는지에 대한 좀 더 많은 컨텍스트를 알려드리죠 + +04:49.210 --> 04:54.940 +하지만 일반적으로 가장 많이 사용되는 것은 주의 헤드 층이에요 + +04:54.940 --> 04:56.440 +그렇게 할 거예요 + +04:56.470 --> 04:59.470 +어떻게 설정됐는지 곧 보실 거예요 + +05:00.670 --> 05:06.760 +이제 구글 Colab으로 가서 몇 가지 모델을 살펴보고 로라에 관해 얘기해 보죠 + +05:06.760 --> 05:12.760 +그리고 hyperperameter 3개가 어떻게 작동하는지 볼게요 + +05:13.180 --> 05:14.260 +시작하죠 diff --git a/week5/community-contributions/subtitles/srts/59504887/en_US.srt b/week5/community-contributions/subtitles/srts/59504887/en_US.srt new file mode 100755 index 0000000..24a0860 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504887/en_US.srt @@ -0,0 +1,391 @@ +WEBVTT + +00:00.560 --> 00:02.960 +Well, here we are again in Google Colab. + +00:02.960 --> 00:06.230 +It's been a minute since we were here, and welcome back to it. + +00:06.290 --> 00:10.790 +Uh, this week we're going to spend our time here and it's going to be terrific. + +00:10.790 --> 00:12.680 +It's actually it's going to be the best week yet. + +00:12.680 --> 00:16.610 +I know I keep saying that, but this time it really, really is going to be peak peak. + +00:16.640 --> 00:21.740 +The only thing that's going to be better than what we do here is what's coming next week in week eight, + +00:21.740 --> 00:24.590 +which really, I mean, I can't wait to tell you about that. + +00:24.590 --> 00:27.860 +But stay focused, keep on week seven. + +00:27.890 --> 00:29.870 +There's a lot to go through here. + +00:29.870 --> 00:32.240 +So first we do. + +00:32.270 --> 00:35.660 +I've set up this week seven day one Colab. + +00:35.660 --> 00:37.730 +We start with some installs. + +00:37.850 --> 00:42.380 +Um, and one of the things we're installing is a new package you've not seen before, a hugging face + +00:42.380 --> 00:48.950 +library called Peft, which stands for parameter efficient fine tuning, parameter Efficient Fine Tuning, + +00:48.950 --> 00:52.460 +which is their name for the library that includes Laura. + +00:52.610 --> 00:54.080 +Uh, it's within this library. + +00:54.110 --> 00:55.220 +Pfft, pfft. + +00:55.220 --> 00:56.420 +Just rolls off the tongue. + +00:56.480 --> 00:57.830 +Um, so that is it. + +00:57.830 --> 01:08.180 +I am on a T4 box, which is the lowest of the GPU boxes, which just has 15, gigabytes of GPU Ram. + +01:08.210 --> 01:12.890 +You can see, most of which I'm already using up just a few cells into this. + +01:12.890 --> 01:15.170 +So anyway, we do our Pip installs. + +01:15.170 --> 01:20.000 +We do a bunch of imports here, uh, and set some constants. + +01:20.000 --> 01:24.800 +We're going to be working with a base model which is llama 3.18 billion. + +01:24.800 --> 01:27.440 +And I'm also setting a fine tuned model here. + +01:27.440 --> 01:29.690 +I obviously we haven't done any fine tuning yet. + +01:29.690 --> 01:30.110 +I'm. + +01:30.140 --> 01:31.670 +This is from the future. + +01:31.790 --> 01:37.070 +Uh, I'm bringing this in just so I can show you what a fine tuning model looks like in terms of the + +01:37.070 --> 01:42.980 +the the Laura matrices applying to the target modules. + +01:43.160 --> 01:52.310 +And then here are three hyper parameters which now you're an expert on are which I am setting to 32. + +01:52.580 --> 01:56.690 +You probably remember I said start with eight and then go to 16 and then go to 32. + +01:56.720 --> 01:58.130 +Well I got to 32. + +01:58.130 --> 02:00.680 +Uh, and that was where I ended up. + +02:00.680 --> 02:03.650 +Uh, and so I'm doing 32 here. + +02:03.770 --> 02:06.980 +Uh, alpha is a rule of thumb, double R. + +02:07.010 --> 02:08.630 +So there it is at 64. + +02:08.630 --> 02:10.610 +And the target modules. + +02:10.610 --> 02:14.780 +So these are the four names of the layers that we are targeting. + +02:14.780 --> 02:17.420 +And you will see why shortly. + +02:17.420 --> 02:22.880 +And this is by far the most common, uh, setup for llama models. + +02:22.910 --> 02:27.590 +Other models may have different names of their layers, but yeah, you give the names of the layers + +02:27.590 --> 02:32.120 +that you will be targeting in this list that you assign to target modules. + +02:32.870 --> 02:33.530 +Okay. + +02:33.530 --> 02:38.990 +So next, this is some standard stuff that you've done a few times now to log in to Hugging Face. + +02:39.200 --> 02:43.760 +Um, and I've got the usual blurb that if you don't have hugging face account, but of course you have + +02:43.760 --> 02:45.290 +a hugging face account by now. + +02:45.440 --> 02:51.680 +Uh, but you log in there, it's free, you get a token, and then you go to this section in the Colab, + +02:51.710 --> 02:59.270 +the, the key, and you use that to put in your, uh, your, your token, um, as you've done in the + +02:59.270 --> 02:59.570 +past. + +02:59.570 --> 03:03.500 +And then when you do that, you can run this cell and it will log in to hugging face. + +03:03.710 --> 03:06.260 +Uh, the alternative is you could just type in your token there. + +03:06.260 --> 03:09.440 +If you have any problems with accessing the notebook. + +03:09.680 --> 03:18.480 +Okay, so without further ado, I am going to read in the base model without any quantization. + +03:18.480 --> 03:19.440 +No funny business. + +03:19.470 --> 03:25.290 +We're just going to read in the entire llama 3.18 billion base model, remembering that is the smallest + +03:25.290 --> 03:26.940 +of the llama series. + +03:27.090 --> 03:33.120 +Uh, device map equals auto means use a GPU if you've got one which which this box does. + +03:33.150 --> 03:36.810 +And I'm not going to run this now because I just ran it and it took about five minutes. + +03:36.930 --> 03:40.830 +Uh, and it came up with a warning that it couldn't fit it all in GPUs. + +03:40.830 --> 03:42.600 +So some of it went on the CPU. + +03:42.600 --> 03:47.850 +And that's why if you look at the resources over on the right, you'll see that my GPU is 11 out of + +03:47.850 --> 03:53.880 +15 gigs are filled up, and almost 13 gigs of Ram are also filled up. + +03:53.880 --> 04:00.690 +So it's really taken up both the reason for the spike here is because I did it once, and then I restarted + +04:00.690 --> 04:01.710 +and did it again. + +04:01.710 --> 04:07.620 +Uh, obviously all you will see is the the one rise up to the top when you run this. + +04:07.710 --> 04:08.310 +Okay. + +04:08.340 --> 04:14.460 +And so now I'm going to print how much memory is this base model using up. + +04:14.460 --> 04:18.990 +And again if you if you wanted to train this it would it would take many many more times. + +04:18.990 --> 04:25.440 +This this is just how much memory is the base model using up, and its memory footprint is just north + +04:25.440 --> 04:29.790 +of 32GB, 32GB of memory being used. + +04:29.790 --> 04:31.890 +And you may remember that's what we said earlier. + +04:31.890 --> 04:38.310 +It's basically 32 bit floats for each of the 8 billion parameters. + +04:39.120 --> 04:39.690 +Okay. + +04:39.690 --> 04:41.610 +So that's, uh, it's big. + +04:41.850 --> 04:43.770 +Uh, and let's just look at it. + +04:43.770 --> 04:47.340 +You can just take a look by printing the base model itself. + +04:47.340 --> 04:54.870 +And this now is a view on what it looks like, um, so briefly before, but we'll just pause for a moment. + +04:54.870 --> 04:59.700 +And again, this isn't going to be a deeply theoretical class, so I'm not going to do too much in the + +04:59.700 --> 05:02.310 +way of explaining this other than saying what you can. + +05:02.340 --> 05:07.770 +What is made clear when you look at the architecture of this neural network is that there are it consists + +05:07.770 --> 05:14.070 +of, first of all, an embedding layer, which is the thing that that takes text and turns it into it, + +05:14.070 --> 05:18.840 +embeds it into vectors in the in the neural network. + +05:18.840 --> 05:23.700 +So this is very it's like the, the encoder encoding LMS that we talked about before. + +05:23.730 --> 05:32.270 +The first layer is embedding tokens, um, into a, um, a vector. + +05:32.300 --> 05:36.740 +Uh, and in fact, that is the dimensionality of how many possible tokens we have. + +05:36.740 --> 05:40.130 +And this is the dimensionality of the embedded vectors. + +05:40.550 --> 05:46.820 +So there are then 32 layers called Lama decoder layers. + +05:46.850 --> 05:48.860 +32 sets of them. + +05:48.860 --> 05:53.510 +And each of those 32 looks like all of this. + +05:53.960 --> 05:55.490 +Let's get that right to there. + +05:55.520 --> 06:02.810 +Um, and so you can you can go through this, but you can see that it consists of the the set of attention + +06:02.810 --> 06:08.180 +layers, which are called q, proj, k proj, v and o proj. + +06:08.180 --> 06:15.080 +And these these are the layers that we have targeted in our target modules, which is typically what + +06:15.080 --> 06:15.410 +you do. + +06:15.440 --> 06:19.190 +You can try others too, but this is the most common approach. + +06:19.190 --> 06:26.120 +And you'll see that, uh, some of these layers have 4000 odd dimensions in and out. + +06:26.330 --> 06:31.530 +Uh, this one and this one and some of them are 4000 in and and 1000 out. + +06:32.700 --> 06:36.030 +So they've got some different dimensionality there. + +06:36.030 --> 06:39.690 +And that will be somewhat relevant when we look at the lora A and Lora B. + +06:39.720 --> 06:45.330 +But I'm not going to get too deep into this, but this is yours to experiment with and read up on if + +06:45.330 --> 06:47.430 +you want more more information about it. + +06:47.670 --> 06:54.450 +There is then a multi-layer perceptron layer with something that, for example, the the up is something + +06:54.450 --> 07:00.090 +that explodes out the number of dimensions, and then the down then reduces down the number of dimensions. + +07:00.090 --> 07:03.150 +And that's followed by an activation function. + +07:03.150 --> 07:06.360 +Again, for people that are more familiar with this stuff. + +07:06.390 --> 07:12.540 +The activation function that's used for Lama is Selu, which you can see in the PyTorch documentation. + +07:12.540 --> 07:19.020 +You can look at it and learn more about what that is and and why it is used. + +07:19.410 --> 07:25.830 +Um, so that is then followed by by layer norm layers. + +07:26.280 --> 07:33.110 +Um, and then at the very end there is a linear layer, the LM head. + +07:33.110 --> 07:34.910 +And this is sometimes targeted. + +07:34.910 --> 07:36.950 +This is sometimes added to target modules. + +07:36.950 --> 07:41.930 +As I mentioned before, in the cases where you wanted to generate something, where part of what you + +07:41.930 --> 07:47.120 +want it to learn is to generate results, that will take a different format of some sort. + +07:47.150 --> 07:52.700 +Maybe you want a particular structure of JSON, or maybe something completely different, like you want + +07:52.730 --> 07:57.830 +it to speak a different language, or you want it to to structure things in some, some very unique + +07:57.830 --> 07:58.490 +way. + +07:58.520 --> 08:02.210 +Then you might target this in your target modules as well. + +08:02.840 --> 08:05.540 +Um, but that gives you a sense of the architecture. + +08:05.540 --> 08:11.420 +And in a second, when we look at the Lora adapters, you'll see why I've taken a moment to dwell on + +08:11.420 --> 08:12.170 +this. + +08:12.470 --> 08:13.250 +All right. + +08:13.250 --> 08:15.020 +So we used up 32GB. + +08:15.050 --> 08:21.200 +The next thing we need to do is to restart this session by going to runtime and restart session to clear + +08:21.230 --> 08:23.120 +out the memory so we can keep going. + +08:23.480 --> 08:27.110 +There are some torch commands that will clear the cache, but in fact they're not aggressive enough. + +08:27.110 --> 08:31.070 +It still holds on to too much because we've consumed so much. + +08:31.100 --> 08:33.950 +The only way forwards now is to restart the session. + +08:33.950 --> 08:35.420 +So that's what I'll do. + +08:35.540 --> 08:40.430 +And I will see you in the next video once I have restarted and I'm back here again. diff --git a/week5/community-contributions/subtitles/srts/59504887/ja_JP.srt b/week5/community-contributions/subtitles/srts/59504887/ja_JP.srt new file mode 100755 index 0000000..3b56c80 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504887/ja_JP.srt @@ -0,0 +1,352 @@ +WEBVTT + +00:00.560 --> 00:02.960 +さて、 グーグルコラボに再びやってきた。 + +00:02.960 --> 00:06.230 +ここに来るのは久しぶりだが、 おかえりなさい。 + +00:06.290 --> 00:10.790 +あー、 今週はここで過ごすことになるんだけど、 すごいことになりそうだよ。 + +00:10.790 --> 00:12.680 +実際、 これまでで最高の週になりそうだ。 + +00:12.680 --> 00:16.610 +何度も言うようだが、 今回は本当に、 ピークがピークになりそうだ。 + +00:16.640 --> 00:21.740 +ここでやっていることより良くなりそうなのは、 + +00:21.740 --> 00:24.590 +来週の第8週だ。 + +00:24.590 --> 00:27.860 +だが、 集中を切らさずに7週目を続けてくれ。 + +00:27.890 --> 00:29.870 +ここには通過しなければならないことがたくさんある。 + +00:29.870 --> 00:32.240 +だからまず、 そうしよう。 + +00:32.270 --> 00:35.660 +この週7日目のコラボをセットアップした。 + +00:35.660 --> 00:37.730 +まずはインストールから始めよう。 + +00:37.850 --> 00:42.380 +PeftはParameter Efficient + +00:42.380 --> 00:52.460 +Fine Tuningの略で、 ローラを含むライブラリの名前だ。 + +00:52.610 --> 00:54.080 +この図書館の中にあるんだ。 + +00:54.110 --> 00:55.220 +プッ、 プッ。 + +00:55.220 --> 00:56.420 +舌を巻く。 + +00:56.480 --> 00:57.830 +ええと、 それで終わりです。 + +00:57.830 --> 01:08.180 +私は、 GPUボックスの中で最も低いT4ボックスを使用しており、 15ギガバイトのGPUラムを搭載しているだけだ。 + +01:08.210 --> 01:12.890 +見てわかるように、 ほんの数セルを入れただけで、 そのほとんどをすでに使い切っている。 + +01:12.890 --> 01:15.170 +とにかく、 僕らはピップをインストールするんだ。 + +01:15.170 --> 01:20.000 +ここではたくさんのインポートを行い、 いくつかの定数を設定する。 + +01:20.000 --> 01:24.800 +ベースモデルはラマ3だ。 180億ドル + +01:24.800 --> 01:27.440 +そして、 私はここで微調整モデルも設定している。 + +01:27.440 --> 01:29.690 +まだ微調整はしていない。 + +01:29.690 --> 01:30.110 +私は。 + +01:30.140 --> 01:31.670 +これは未来から来たものだ。 + +01:31.790 --> 01:42.980 +ラウラ・マトリックスがターゲット・モジュールに適用されるファインチューニング・モデルがどのようなものかをお見せするために持ってきたんだ。 + +01:43.160 --> 01:52.310 +そして、 ここに3つのハイパーパラメーターがある。 + +01:52.580 --> 01:56.690 +8から始めて16、 32と言ったのを覚えているだろう。 + +01:56.720 --> 01:58.130 +32歳まで来たよ。 + +01:58.130 --> 02:00.680 +結局、 そこに行き着いたんだ。 + +02:00.680 --> 02:03.650 +それで、 ここで32をやっているんだ。 + +02:03.770 --> 02:06.980 +ええと、 アルファは経験則で、 ダブルRだ。 + +02:07.010 --> 02:08.630 +それで64点だ。 + +02:08.630 --> 02:10.610 +そしてターゲット・モジュール。 + +02:10.610 --> 02:14.780 +これが、 私たちがターゲットにしている4つのレイヤーの名前だ。 + +02:14.780 --> 02:17.420 +その理由はすぐにわかるだろう。 + +02:17.420 --> 02:22.880 +そして、 これがラマ・モデルの最も一般的なセットアップだ。 + +02:22.910 --> 02:27.590 +他のモデルではレイヤーの名前が違うかもしれないが、 そう、 ターゲットモジュールに割り当てるこのリストに、 + +02:27.590 --> 02:32.120 +ターゲットとするレイヤーの名前を与えるのだ。 + +02:32.870 --> 02:33.530 +オーケー。 + +02:33.530 --> 02:38.990 +次に、 これはハギング・フェイスにログインするために何度かやったことのある標準的なものだ。 + +02:39.200 --> 02:43.760 +ええと、 もしあなたがハグする顔のアカウントを持っていないなら、 もちろんあなたは今頃ハグする顔のアカウントを持っているでしょう、 + +02:43.760 --> 02:45.290 +といういつもの宣伝文句があります。 + +02:45.440 --> 02:51.680 +でも、 そこでログインして、 無料だし、 トークンももらえるし、 それからColabのこのセクションに行って、 + +02:51.710 --> 02:59.570 +キーというのを使って、 トークンを入れるんだ。 + +02:59.570 --> 03:03.500 +そうすれば、 このセルを走らせれば、 ハギング・フェイスにログインできる。 + +03:03.710 --> 03:06.260 +トークンを入力することもできる。 + +03:06.260 --> 03:09.440 +ノートブックへのアクセスに問題がある場合。 + +03:09.680 --> 03:18.480 +さて、 それでは早速、 ベースモデルを量子化せずに読み込んでみよう。 + +03:18.480 --> 03:19.440 +おかしなことはしない。 + +03:19.470 --> 03:26.940 +ラマ3世を全部読むだけだ。 180億円のベースモデルは、 リャマ・シリーズの最小モデルであることを忘れてはならない。 + +03:27.090 --> 03:33.120 +ええと、 デバイス・マップ・イコール自動というのは、 GPUがあればそれを使うということです。 + +03:33.150 --> 03:36.810 +そして、 私は今これを実行するつもりはない。 + +03:36.930 --> 03:40.830 +ええと、 GPUに全部は入りきらないという警告が出たんだ。 + +03:40.830 --> 03:42.600 +だから、 その一部がCPUに使われたんだ。 + +03:42.600 --> 03:47.850 +右側のリソースを見ると、 GPUは15ギガのうち11ギガが埋まっていて、 + +03:47.850 --> 03:53.880 +ラムも13ギガ近く埋まっている。 + +03:53.880 --> 04:01.710 +だから、 ここでの急上昇の理由は、 一度やって、 再開してまたやったからだ。 + +04:01.710 --> 04:07.620 +ええと、 これを実行すると、 明らかに1がトップに上がるのが見えるだけです。 + +04:07.710 --> 04:08.310 +オーケー。 + +04:08.340 --> 04:14.460 +では、 このベースモデルのメモリ使用量をプリントしてみよう。 + +04:14.460 --> 04:18.990 +そしてまた、 もしこれをトレーニングしようと思ったら、 何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も何度も。 + +04:18.990 --> 04:25.440 +これは、 ベースモデルがどれだけのメモリーを使用しているかということであり、 そのメモリーのフットプリントは32GBのすぐ北にあり、 + +04:25.440 --> 04:29.790 +32GBのメモリーが使用されている。 + +04:29.790 --> 04:31.890 +以前、 私たちがそう言ったことを覚えているだろう。 + +04:31.890 --> 04:38.310 +基本的には、 80億の各パラメーターに対して32ビットの浮動小数点数だ。 + +04:39.120 --> 04:39.690 +オーケー。 + +04:39.690 --> 04:41.610 +だから、 それは大きいんだ。 + +04:41.850 --> 04:43.770 +ええと、 ちょっと見てみましょう。 + +04:43.770 --> 04:47.340 +ベースモデルそのものをプリントして見ることができる。 + +04:47.340 --> 04:54.870 +そして、 これは今、 どんな風に見えるかというと、 うーん、 前に簡単に書いたけど、 ちょっと小休止。 + +04:54.870 --> 05:02.310 +繰り返しになるが、 これは深く理論的な授業になるつもりはないので、 できることを言う以外にはあまり説明するつもりはない。 + +05:02.340 --> 05:14.070 +このニューラルネットワークのアーキテクチャーを見れば明らかなように、 + +05:14.070 --> 05:18.840 +まず埋め込み層がある。 + +05:18.840 --> 05:23.700 +つまりこれは、 前に話したLMSをエンコードするエンコーダーのようなものだ。 + +05:23.730 --> 05:32.270 +最初の層は、 トークンumをベクトルumに埋め込む。 + +05:32.300 --> 05:36.740 +実際、 これが可能なトークンの数の次元なんだ。 + +05:36.740 --> 05:40.130 +そして、 これが埋め込まれたベクトルの次元数である。 + +05:40.550 --> 05:46.820 +そのため、 ラマ・デコーダー・レイヤーと呼ばれる32のレイヤーが存在する。 + +05:46.850 --> 05:48.860 +32セットだ。 + +05:48.860 --> 05:53.510 +そして、 その32人はそれぞれこのように見える。 + +05:53.960 --> 05:55.490 +それを正しく伝えよう。 + +05:55.520 --> 06:08.180 +このレイヤーは、 q、 proj、 k proj、 v、 o projと呼ばれるアテンション・レイヤーのセットで構成されています。 + +06:08.180 --> 06:15.410 +そしてこれらのレイヤーは、 ターゲット・モジュールでターゲットにしたレイヤーである。 + +06:15.440 --> 06:19.190 +他の方法も試すことができるが、 これが最も一般的な方法だ。 + +06:19.190 --> 06:26.120 +そして、 これらのレイヤーのいくつかは、 4000の奇妙な寸法が出たり入ったりしているのがわかるだろう。 + +06:26.330 --> 06:31.530 +ええと、 これとこれと、 いくつかは4000人入って1000人出ている。 + +06:32.700 --> 06:36.030 +だから、 彼らには次元の違うものがある。 + +06:36.030 --> 06:39.690 +そしてそれは、 ローラAとローラBを見るときに多少関係してくる。 + +06:39.720 --> 06:47.430 +しかし、 私はこのことについてあまり深入りするつもりはない。 + +06:47.670 --> 06:54.450 +そして多層パーセプトロン層があり、 例えば、 アップは次元数を爆発的に増やし、 + +06:54.450 --> 07:00.090 +ダウンは次元数を減らす。 + +07:00.090 --> 07:03.150 +そしてその後に活性化関数が続く。 + +07:03.150 --> 07:06.360 +繰り返しになるが、 こういうことに詳しい人たちのために。 + +07:06.390 --> 07:12.540 +Lamaに使われている活性化関数はSeluで、 PyTorchのドキュメントで見ることができる。 + +07:12.540 --> 07:19.020 +それを見て、 それが何なのか、 なぜ使われるのか、 もっと詳しく知ることができる。 + +07:19.410 --> 07:25.830 +ええと、 だから、 その後にレイヤー・ノルム・レイヤーが続く。 + +07:26.280 --> 07:33.110 +そして一番最後にLMヘッドという直線的な層がある。 + +07:33.110 --> 07:34.910 +そして、 これは時に標的にされる。 + +07:34.910 --> 07:36.950 +これはターゲットモジュールに追加されることもある。 + +07:36.950 --> 07:41.930 +前にも述べたように、 何かを生成したい場合、 学習させたいことの一部が結果を生成することである場合、 + +07:41.930 --> 07:47.120 +それは何らかの別の形式を取ることになる。 + +07:47.150 --> 07:52.700 +JSONの特定の構造が必要かもしれないし、 まったく別のものが必要かもしれない。 たとえば、 + +07:52.730 --> 07:58.490 +別の言語を話させたいとか、 非常にユニークな方法で物事を構造化させたいとか。 + +07:58.520 --> 08:02.210 +それなら、 ターゲットとするモジュールでも、 これをターゲットにするかもしれない。 + +08:02.840 --> 08:05.540 +うーん、 でもこれで建築の雰囲気はわかるよね。 + +08:05.540 --> 08:12.170 +そしてすぐに、 ローラのアダプターを見れば、 なぜ私がこのことに時間を割いたのかがわかるだろう。 + +08:12.470 --> 08:13.250 +分かった。 + +08:13.250 --> 08:15.020 +それで32GBを使い切った。 + +08:15.050 --> 08:23.120 +次に必要なのは、 このセッションを再起動することだ。 ランタイムからセッションの再起動を実行し、 メモリーをクリアにして続行できるようにする。 + +08:23.480 --> 08:27.110 +キャッシュをクリアするトーチコマンドはいくつかあるが、 実際には十分な攻撃性はない。 + +08:27.110 --> 08:31.070 +私たちが多くのものを消費してきたせいで、 まだ多くのものを抱えている。 + +08:31.100 --> 08:33.950 +今はセッションを再開するしかない。 + +08:33.950 --> 08:35.420 +だから、 そうするつもりだ。 + +08:35.540 --> 08:40.430 +また次のビデオでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59504887/ko_KR.srt b/week5/community-contributions/subtitles/srts/59504887/ko_KR.srt new file mode 100755 index 0000000..5b73134 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59504887/ko_KR.srt @@ -0,0 +1,379 @@ +WEBVTT + +00:00.560 --> 00:02.960 +다시 구글 콜랩에 왔네요 + +00:02.960 --> 00:06.230 +여기 온 지 좀 됐는데 잘 돌아왔어요 + +00:06.290 --> 00:10.790 +이번 주에는 여기서 시간을 보낼 거예요 정말 멋질 거예요 + +00:10.790 --> 00:12.680 +최고의 한 주가 될 거예요 + +00:12.680 --> 00:16.610 +제가 계속 그렇게 말하지만 이번엔 정말 절정일 거예요 + +00:16.640 --> 00:21.740 +지금보다 더 나은 건 8주 차에 나올 다음 주 영상뿐이에요 + +00:21.740 --> 00:24.590 +빨리 보여드리고 싶네요 + +00:24.590 --> 00:27.860 +하지만 7주 차에 집중하세요 + +00:27.890 --> 00:29.870 +살펴볼 게 많아요 + +00:29.870 --> 00:32.240 +그럼 먼저 하죠 + +00:32.270 --> 00:35.660 +이번 주에는 콜랍을 7일간 준비했어요 + +00:35.660 --> 00:37.730 +설치부터 시작하죠 + +00:37.850 --> 00:42.380 +저희가 설치하는 것 중 하나는 여러분이 처음 보시는 새로운 패키지예요 + +00:42.380 --> 00:48.950 +얼굴을 끌어안는 Pft라는 라이브러리인데 매개 변수 효율적인 미세 조정의 약자죠 매개 변수 효율 + +00:48.950 --> 00:52.460 +미세 조정 로라를 포함한 라이브러리의 이름이에요 + +00:52.610 --> 00:54.080 +이 라이브러리 안에 있어요 + +00:54.110 --> 00:55.220 +네 + +00:55.220 --> 00:56.420 +입에 착 붙네요 + +00:56.480 --> 00:57.830 +그게 다예요 + +00:57.830 --> 01:08.180 +T4 박스에 올라왔습니다 GPU 박스에서 가장 낮은 것으로 15기가바이트의 GPU 램이 있죠 + +01:08.210 --> 01:12.890 +보시다시피 대부분 이미 셀 몇 개를 사용하고 있죠 + +01:12.890 --> 01:15.170 +어쨌든 우리는 Pip 설치를 해요 + +01:15.170 --> 01:20.000 +수입품도 많이 만들고 상수도 설정했어요 + +01:20.000 --> 01:24.800 +기본 모델인 llama 3으로 작업할 거예요 180억 달러요 + +01:24.800 --> 01:27.440 +미세 튜닝 모델도 설정하고 있어요 + +01:27.440 --> 01:29.690 +아직 세부 조정은 안 했어요 + +01:29.690 --> 01:30.110 +네 + +01:30.140 --> 01:31.670 +미래에서 온 거예요 + +01:31.790 --> 01:37.070 +이걸 가져와서 미세한 조율 모델이 어떤지 보여 드릴게요 + +01:37.070 --> 01:42.980 +로라 행렬이 목표물 모듈에 적용되는 걸 보면요 + +01:43.160 --> 01:52.310 +여기 3개의 하이퍼 매개 변수가 있어요 이제 당신이 전문가니까 32로 설정할게요 + +01:52.580 --> 01:56.690 +8로 시작해서 16으로 가고 32로 가라고 한 거 기억하죠? + +01:56.720 --> 01:58.130 +32,000달러까지 올렸어요 + +01:58.130 --> 02:00.680 +그렇게 된 거예요 + +02:00.680 --> 02:03.650 +전 지금 32마일로 가고 있어요 + +02:03.770 --> 02:06.980 +알파는 경험에서 나온 거예요 + +02:07.010 --> 02:08.630 +64에 있네요 + +02:08.630 --> 02:10.610 +목표 모듈도요 + +02:10.610 --> 02:14.780 +이건 우리가 대상으로 하는 레이어 이름 4개예요 + +02:14.780 --> 02:17.420 +그 이유는 곧 알게 될 거예요 + +02:17.420 --> 02:22.880 +이게 가장 흔한 라마 모델이에요 + +02:22.910 --> 02:27.590 +다른 모델은 계층 이름이 다를 수도 있지만 네, 여러분이 대상으로 할 + +02:27.590 --> 02:32.120 +이 리스트에서 대상으로 할 계층 이름을 부여하죠 대상 모드에요 + +02:32.870 --> 02:33.530 +네 + +02:33.530 --> 02:38.990 +다음은 여러분이 이미 몇 번 했던 기본 기능이에요 페이스 포옹에 로그인하기 위해서요 + +02:39.200 --> 02:43.760 +포옹 얼굴 계정이 없으면 이런 문구가 뜨겠지만 지금쯤은 + +02:43.760 --> 02:45.290 +당연히 있겠죠 + +02:45.440 --> 02:51.680 +로그인하면 무료고 토큰을 받아요 그리고 Colab에 있는 + +02:51.710 --> 02:59.570 +이 구역으로 가서 키를 입력하고 토큰을 입력하면 돼요 전에도 해 봤잖아요 + +02:59.570 --> 03:03.500 +그렇게 하면 이 셀을 실행할 수 있어요 얼굴 안기로 로그인되죠 + +03:03.710 --> 03:06.260 +대안은 그냥 토큰을 입력하는 거죠 + +03:06.260 --> 03:09.440 +공책 접근에 문제가 있다면요 + +03:09.680 --> 03:18.480 +좋아요, 지체 없이 기본 모델에서 읽겠습니다 양자화 없이요 + +03:18.480 --> 03:19.440 +허튼짓하지 말고요 + +03:19.470 --> 03:25.290 +전체 라마를 읽어볼게요 180억 기본 모델이죠 라마 시리즈 + +03:25.290 --> 03:26.940 +중 가장 작아요 + +03:27.090 --> 03:33.120 +장치 맵은 자동이란 뜻입니다 이 박스와 같은 GPU를 사용하라는 거죠 + +03:33.150 --> 03:36.810 +지금 실행하진 않겠습니다 방금 실행했는데 5분 정도 걸렸거든요 + +03:36.930 --> 03:40.830 +GPU에 다 넣을 수 없다는 경고가 떴어요 + +03:40.830 --> 03:42.600 +그래서 일부는 CPU로 갔죠 + +03:42.600 --> 03:47.850 +오른쪽 리소스를 보시면 GPU 15기가 중 11기가가 + +03:47.850 --> 03:53.880 +차 있는 걸 보실 수 있습니다 거의 13기가 램도 차 있고요 + +03:53.880 --> 04:01.710 +여기 스파이크가 있는 이유는 두 가지 다 해당합니다 한 번 하고 다시 시작했거든요 + +04:01.710 --> 04:07.620 +이걸 실행하면 제일 위에 있는 것만 보일 거예요 + +04:07.710 --> 04:08.310 +네 + +04:08.340 --> 04:14.460 +이제 이 기본 모델이 얼마나 많은 메모리를 사용하는지 프린트할게요 + +04:14.460 --> 04:18.990 +다시 말하지만 훈련하려면 훨씬 더 많이 해야 해요 + +04:18.990 --> 04:25.440 +이건 기본 모델이 얼마나 많은 메모리를 사용하는지 보여줍니다 메모리 공간은 + +04:25.440 --> 04:29.790 +32GB 북쪽 32GB 메모리가 사용되고 있어요 + +04:29.790 --> 04:31.890 +아까 했던 말 기억하시죠? + +04:31.890 --> 04:38.310 +80억 매개 변수에 대한 32 비트 플로트죠 + +04:39.120 --> 04:39.690 +네 + +04:39.690 --> 04:41.610 +정말 큰 변화죠 + +04:41.850 --> 04:43.770 +어디 한번 볼까요? + +04:43.770 --> 04:47.340 +기본 모델 자체를 프린트해서 살펴볼 수 있어요 + +04:47.340 --> 04:54.870 +지금 보시는 건 어떻게 생겼는지에 대한 건데요 아주 간략하지만 잠시 멈춰보죠 + +04:54.870 --> 04:59.700 +다시 말씀드리지만 이론만 따지는 수업이 아니므로 제가 길게 설명하지 + +04:59.700 --> 05:02.310 +않고 있는 그대로 말씀드릴게요 + +05:02.340 --> 05:07.770 +이 신경망 구조를 보면 명확히 알 수 있는 건 내장 + +05:07.770 --> 05:14.070 +레이어로 구성돼 있다는 겁니다 텍스트를 취해 변환해 + +05:14.070 --> 05:18.840 +신경망 내 벡터에 내장하는 거죠 + +05:18.840 --> 05:23.700 +이건 아주∙∙∙ 아까 얘기했던 인코딩 LMS 같은 거죠 + +05:23.730 --> 05:32.270 +첫 번째 레이어는 토큰을 벡터로 넣는 거예요 + +05:32.300 --> 05:36.740 +사실, 가능한 기념품이 얼마나 많은지 그게 차원이에요 + +05:36.740 --> 05:40.130 +이건 내부 벡터의 입체성이에요 + +05:40.550 --> 05:46.820 +라마 디코더라는 층이 32개 있어요 + +05:46.850 --> 05:48.860 +32세트요 + +05:48.860 --> 05:53.510 +32개 모두 이런 모양이에요 + +05:53.960 --> 05:55.490 +Get it을 하죠 + +05:55.520 --> 06:02.810 +이 부분을 살펴보면 주의력 레이어 세트로 구성된 걸 볼 수 있어요 q, proj, + +06:02.810 --> 06:08.180 +k proj, v, o proj라고 하죠 + +06:08.180 --> 06:15.410 +이건 대상 모듈에서 우리가 대상으로 하는 층입니다 여러분이 주로 하는 거죠 + +06:15.440 --> 06:19.190 +다른 방법도 있지만 이게 가장 일반적인 방법이에요 + +06:19.190 --> 06:26.120 +어떤 층은 4천여 개의 차원이 들락날락해요 + +06:26.330 --> 06:31.530 +이것과 이것, 어떤 건 4천 개, 1천 개예요 + +06:32.700 --> 06:36.030 +그래서 입체감이 좀 달라요 + +06:36.030 --> 06:39.690 +로라 A와 로라 B를 볼 때 어느 정도 관련이 있죠 + +06:39.720 --> 06:45.330 +너무 깊이 들어가진 않겠지만 여러분이 실험하고 읽어보세요 더 많은 정보가 필요하다면요 + +06:45.330 --> 06:47.430 +get it get it + +06:47.670 --> 06:54.450 +다중 인터셉트론 레이어도 있는데 예를 들어 위는 차원의 개수를 폭발적으로 + +06:54.450 --> 07:00.090 +늘리고 아래는 차원의 개수를 줄이는 거죠 + +07:00.090 --> 07:03.150 +그리고 활성화 함수가 뒤따르죠 + +07:03.150 --> 07:06.360 +이런 것에 익숙한 분들을 위해서요 + +07:06.390 --> 07:12.540 +라마에 사용되는 활성화 함수는 셀루예요 피토크 문서화에서 보실 수 있죠 + +07:12.540 --> 07:19.020 +그걸 보면 그게 무엇이고 왜 사용되는지 더 알 수 있죠 + +07:19.410 --> 07:25.830 +그 뒤에 레이어 보통 레이어가 있어요 + +07:26.280 --> 07:33.110 +그리고 맨 끝에는 LM 헤드라는 직선 층이 있어요 + +07:33.110 --> 07:34.910 +특정 목표를 노리기도 하죠 + +07:34.910 --> 07:36.950 +대상 모듈에 추가되기도 하죠 + +07:36.950 --> 07:41.930 +전에 말씀드렸듯이 뭔가를 생성하고자 하는 경우 결과를 생성하는 + +07:41.930 --> 07:47.120 +걸 배우길 원하는 경우 그건 일종의 다른 포맷을 갖죠 + +07:47.150 --> 07:52.700 +JSON의 특정 구조를 원할 수도 있고 완전히 다른 걸 원할 수도 있죠 다른 + +07:52.730 --> 07:58.490 +언어를 사용하길 원하거나 아주 독특한 방식으로 구조를 원하거나요 + +07:58.520 --> 08:02.210 +그럼 이것도 대상 모듈에 대상으로 할 수 있죠 + +08:02.840 --> 08:05.540 +그러면 건축물이 어떤 느낌인지 알 수 있죠 + +08:05.540 --> 08:11.420 +잠시 후 로라 어댑터를 보면 제가 왜 이 얘기를 계속하는지 알게 될 + +08:11.420 --> 08:12.170 +거예요 + +08:12.470 --> 08:13.250 +좋아요 + +08:13.250 --> 08:15.020 +32GB를 썼어요 + +08:15.050 --> 08:21.200 +다음으로 할 일은 이 세션을 다시 시작하는 겁니다 런타임으로 가서요 세션을 다시 시작해 메모리를 + +08:21.230 --> 08:23.120 +지우고 계속 진행해야죠 + +08:23.480 --> 08:27.110 +캐시를 비우라는 명령이 있었지만 그렇게 적극적이지 않았어요 + +08:27.110 --> 08:31.070 +너무 많이 소비해서 아직도 너무 오래 붙어 있어요 + +08:31.100 --> 08:33.950 +이제 유일한 방법은 세션을 다시 시작하는 거예요 + +08:33.950 --> 08:35.420 +그렇게 할 거예요 + +08:35.540 --> 08:40.430 +다음 비디오에서 다시 시작하면 다시 여기로 돌아오죠 diff --git a/week5/community-contributions/subtitles/srts/59505329/en_US.srt b/week5/community-contributions/subtitles/srts/59505329/en_US.srt new file mode 100755 index 0000000..8bb69df --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59505329/en_US.srt @@ -0,0 +1,160 @@ +WEBVTT + +00:00.920 --> 00:02.120 +Welcome back. + +00:02.150 --> 00:06.920 +You may, like me, have just gone off and got a coffee while things loaded back up again. + +00:07.220 --> 00:09.140 +And back here. + +00:09.350 --> 00:14.960 +Uh, after the restart your session box, you needed to have come back and rerun the. + +00:14.990 --> 00:17.660 +You don't need to rerun the installs, but you need to rerun the imports. + +00:17.660 --> 00:20.270 +The constants log in to hugging face again. + +00:20.300 --> 00:21.740 +Do not run this. + +00:21.740 --> 00:24.800 +Otherwise we'll be back where we were with a big model in memory. + +00:24.830 --> 00:27.470 +Uh, skip back to restart your session. + +00:27.470 --> 00:31.520 +And here we are loading the base model using eight bit. + +00:31.520 --> 00:33.770 +So again, you've seen this before. + +00:33.770 --> 00:35.060 +We did briefly. + +00:35.060 --> 00:39.080 +Uh, at least have this in the code if I didn't mention much about it. + +00:39.350 --> 00:45.890 +Um, and it uses this wonderfully named there's a package called Bits and Bytes, uh, which is a great + +00:45.890 --> 00:47.090 +name for a package. + +00:47.210 --> 00:52.850 +Uh, and you create a class called Bits and Bytes config, which is where you specify, uh, what kind + +00:52.880 --> 00:58.310 +of quantization config you want, how, how much you want to reduce the precision of your model. + +00:58.310 --> 01:01.940 +And we are saying load in eight bit is true. + +01:02.060 --> 01:07.520 +Uh, and then you when you load the base model, you pass that in as a parameter. + +01:07.520 --> 01:11.000 +As I said, we did do this in the past, I think, but we didn't talk much about it. + +01:11.420 --> 01:20.660 +Um, so this is how we load in the llama 3.1 base model, with the precision reduced from 32 bits down + +01:20.660 --> 01:21.980 +to eight bits. + +01:22.250 --> 01:26.300 +Um, so I've run that because it takes a couple of minutes. + +01:26.330 --> 01:30.230 +And now let's look at the memory footprint that that's using up. + +01:30.260 --> 01:34.730 +It's using up nine gigabytes just over nine gigs. + +01:34.790 --> 01:40.820 +Uh, as you can see, if you look over here at the resources that we're using up nine gigs and we're + +01:40.820 --> 01:44.090 +no longer having to spill into system memory. + +01:44.300 --> 01:48.320 +Uh, and let's have a look at that base model's architecture. + +01:49.130 --> 01:54.350 +So here then, is the model architecture after quantization. + +01:54.740 --> 01:59.030 +Um, and now let me pause for a bit while, uh, to, for you to spot the difference. + +01:59.060 --> 02:01.910 +Let's see what's changed in the architecture. + +02:01.940 --> 02:09.620 +Uh, now that we've quantized, I'll give you just a second to identify that challenge for you. + +02:09.740 --> 02:11.330 +And your time is up. + +02:11.330 --> 02:14.690 +And, of course, you know that there's no difference in architecture. + +02:14.720 --> 02:15.770 +Trick question. + +02:16.040 --> 02:17.120 +Oh, don't hate me. + +02:17.300 --> 02:19.640 +Uh, so, yeah, there's no difference in architecture. + +02:19.640 --> 02:24.520 +This is identical because the whole idea of quantization is that whilst we've reduced the precision + +02:24.550 --> 02:29.020 +of each of the numbers, each of the weights in the model, we haven't actually changed the architecture + +02:29.020 --> 02:30.160 +of the model at all. + +02:30.340 --> 02:35.470 +And somewhat surprisingly, it's not going to have a massive effect on the performance of the model. + +02:35.470 --> 02:42.010 +So hopefully unless I've made some horrible gaffe, this architecture is identical to the one above. + +02:42.040 --> 02:44.290 +You can just confirm and double check. + +02:44.320 --> 02:51.850 +I haven't made any terrible mistake, but that's that is the whole idea of quantization. + +02:51.850 --> 03:04.900 +We've got from 32GB down to nine gigabytes without, it seems, any, um, uh, well, we haven't yet + +03:04.900 --> 03:09.370 +seen, but without without without significant consequences. + +03:09.370 --> 03:10.810 +There will be some consequences. + +03:10.810 --> 03:13.930 +Just not as big as you might expect. + +03:14.620 --> 03:17.830 +Uh, so we will have to restart our sessions once more. + +03:17.830 --> 03:23.920 +Uh, so for the final time, if I could ask you to restart your session as before, uh, runtime restart + +03:23.920 --> 03:26.410 +session and then meet me back here again. + +03:26.440 --> 03:33.010 +Uh, after this point, once you've run the imports, the constants, uh, and logged into huggingface + +03:33.010 --> 03:35.080 +again, I will see you in one second. diff --git a/week5/community-contributions/subtitles/srts/59505329/ja_JP.srt b/week5/community-contributions/subtitles/srts/59505329/ja_JP.srt new file mode 100755 index 0000000..0bdedf8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59505329/ja_JP.srt @@ -0,0 +1,142 @@ +WEBVTT + +00:00.920 --> 00:02.120 +お帰りなさい。 + +00:02.150 --> 00:06.920 +私と同じように、 再びロードを再開する間、 コーヒーを飲みに行ったかもしれない。 + +00:07.220 --> 00:09.140 +そしてここに戻ってきた。 + +00:09.350 --> 00:14.960 +ええと、 セッションを再開した後、 もう一度セッションを再開する必要がありました。 + +00:14.990 --> 00:17.660 +インストールを再実行する必要はないが、 インポートを再実行する必要がある。 + +00:17.660 --> 00:20.270 +定数は再び抱き合う顔にログインする。 + +00:20.300 --> 00:21.740 +これを実行してはならない。 + +00:21.740 --> 00:24.800 +そうでなければ、 記憶の中のビッグモデルで元の状態に戻ってしまうだろう。 + +00:24.830 --> 00:27.470 +ええと、 スキップしてセッションを再開してください。 + +00:27.470 --> 00:31.520 +そしてここでは、 8ビットを使ってベースモデルをロードしている。 + +00:31.520 --> 00:33.770 +だから、 前にも見たことがあるだろう。 + +00:33.770 --> 00:35.060 +短い間だった。 + +00:35.060 --> 00:39.080 +ええと、 もし私がそれについてあまり触れなかったとしても、 少なくともコードにはこのことが書かれているはずだ。 + +00:39.350 --> 00:47.090 +そして、 Bits and Bytesという素晴らしい名前のパッケージを使っている。 + +00:47.210 --> 00:52.850 +そして、 Bits and Bytes configというクラスを作り、 そこで、 どのような量子化コンフィギュレーションにするか、 + +00:52.880 --> 00:58.310 +モデルの精度をどの程度下げたいかを指定する。 + +00:58.310 --> 01:01.940 +そして、 私たちは8ビットでのロードを真実だと言っている。 + +01:02.060 --> 01:07.520 +そして、 ベースモデルをロードするときに、 それをパラメータとして渡すんだ。 + +01:07.520 --> 01:11.000 +さっきも言ったように、 私たちは過去にこのようなことをやっていたと思うが、 それについてはあまり話さなかった。 + +01:11.420 --> 01:21.980 +ええと、 こうしてラマ3をロードするんだ。 1ベースモデルで、 精度を32ビットから8ビットに下げた。 + +01:22.250 --> 01:26.300 +ええと、 2、 3分で終わるから、 それを実行したんだ。 + +01:26.330 --> 01:30.230 +次に、 そのメモリフットプリントを見てみよう。 + +01:30.260 --> 01:34.730 +9ギガバイト強を使っている。 + +01:34.790 --> 01:44.090 +リソースを見てもらえばわかると思うが、 9ギガを使い切り、 システム・メモリに注ぎ込む必要がなくなった。 + +01:44.300 --> 01:48.320 +ベースモデルのアーキテクチャーを見てみよう。 + +01:49.130 --> 01:54.350 +これが量子化後のモデル・アーキテクチャだ。 + +01:54.740 --> 01:59.030 +ええと、 ここでちょっと間を置かせてください。 + +01:59.060 --> 02:01.910 +アーキテクチャーで何が変わったか見てみよう。 + +02:01.940 --> 02:09.620 +ええと、 量子化したところで、 ちょっとだけ時間をあげよう。 + +02:09.740 --> 02:11.330 +時間切れだ。 + +02:11.330 --> 02:14.690 +そしてもちろん、 建築に違いがないこともご存じだろう。 + +02:14.720 --> 02:15.770 +トリック・クエスチョン。 + +02:16.040 --> 02:17.120 +私を嫌いにならないで。 + +02:17.300 --> 02:19.640 +ええと、 そうだね、 建築に違いはないよ。 + +02:19.640 --> 02:24.520 +というのも、 量子化の全体的な考え方は、 モデルの各数値、 各重みの精度を下げる一方で、 + +02:24.550 --> 02:30.160 +実際にはモデルのアーキテクチャはまったく変えていないからだ。 + +02:30.340 --> 02:35.470 +そして少々意外なことに、 それはモデルの性能に大きな影響を与えることはない。 + +02:35.470 --> 02:42.010 +だから、 私が何かひどい失言をしない限り、 この建築は上のものと同じであることを願う。 + +02:42.040 --> 02:44.290 +確認とダブルチェックをすればいい。 + +02:44.320 --> 02:51.850 +ひどいミスはしていないけど、 それが量子化の考え方なんだ。 + +02:51.850 --> 03:09.370 +32ギガバイトから9ギガバイトまで、 何も、 うーん、 うーん、 まあ、 まだ見ていないけれども、 大きな影響もなく手に入れたようだ。 + +03:09.370 --> 03:10.810 +何らかの影響はあるだろう。 + +03:10.810 --> 03:13.930 +ただ、 期待するほど大きくはない。 + +03:14.620 --> 03:17.830 +だから、 もう一度セッションをやり直さなければならない。 + +03:17.830 --> 03:26.410 +ええと、 では最後に、 セッションを以前と同じように再スタートさせてください。 + +03:26.440 --> 03:33.010 +この後、 インポート、 定数を実行し、 huggingfaceに再ログインしたら、 + +03:33.010 --> 03:35.080 +1秒後に会おう。 diff --git a/week5/community-contributions/subtitles/srts/59505329/ko_KR.srt b/week5/community-contributions/subtitles/srts/59505329/ko_KR.srt new file mode 100755 index 0000000..cdb430b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59505329/ko_KR.srt @@ -0,0 +1,157 @@ +WEBVTT + +00:00.920 --> 00:02.120 +잘 돌아왔어요 + +00:02.150 --> 00:06.920 +여러분도 저처럼 커피를 마시러 갔을지도 모릅니다 다시 짐을 싣는 동안에요 + +00:07.220 --> 00:09.140 +여기도요 + +00:09.350 --> 00:14.960 +세션 박스를 재시작한 후 다시 돌아와서 재시작해야 했죠 + +00:14.990 --> 00:17.660 +설치한 건 재실행할 필요 없고 수입한 건 재실행해야 해요 + +00:17.660 --> 00:20.270 +상수는 얼굴을 다시 안는 데 로그인하죠 + +00:20.300 --> 00:21.740 +실행하지 마세요 + +00:21.740 --> 00:24.800 +안 그러면 메모리 모델로 되돌아가는 거예요 + +00:24.830 --> 00:27.470 +다시 세션을 시작하도록 하죠 + +00:27.470 --> 00:31.520 +8 비트를 이용해 기본 모델을 로딩하고 있어요 + +00:31.520 --> 00:33.770 +전에도 보신 적 있죠 + +00:33.770 --> 00:35.060 +잠깐요 + +00:35.060 --> 00:39.080 +제가 말 안 했으면 코드에 적어뒀을 텐데요 + +00:39.350 --> 00:45.890 +이름도 참 잘 지었어요 비트 앤 바이트라는 패키지인데 패키지 이름으로 + +00:45.890 --> 00:47.090 +딱이죠 + +00:47.210 --> 00:52.850 +Bits&Bite config라는 클래스를 생성하는데 여기서 명시하는 거죠 + +00:52.880 --> 00:58.310 +어떤 종류의 수량화 구성을 원하는지 모델의 정밀도를 얼마나 줄이고 싶은지를요 + +00:58.310 --> 01:01.940 +8비트로 로드하는 게 참이죠 + +01:02.060 --> 01:07.520 +그리고 베이스 모델을 로드할 때 그걸 매개 변수로 넘겨요 + +01:07.520 --> 01:11.000 +전에도 이런 적이 있지만 얘기를 많이 하진 않았어요 + +01:11.420 --> 01:20.660 +라마 3을 이렇게 싣는 거예요 기본 모델 1개를 32개에서 8개로 정밀하게 + +01:20.660 --> 01:21.980 +줄였어요 + +01:22.250 --> 01:26.300 +몇 분 정도 걸리니까 그렇게 했어요 + +01:26.330 --> 01:30.230 +이제 메모리 발자국을 보죠 + +01:30.260 --> 01:34.730 +9기가바이트를 사용하고 있어요. 9기가를 조금 넘어요. + +01:34.790 --> 01:40.820 +보다시피 여기 리소스를 보시면 9기가를 사용하고 있고 시스템 + +01:40.820 --> 01:44.090 +메모리로 쏟아질 필요가 없어요 + +01:44.300 --> 01:48.320 +기본 모델의 아키텍처를 살펴보죠 + +01:49.130 --> 01:54.350 +여기 퀀타이즈 후의 모델 아키텍처가 있네요 + +01:54.740 --> 01:59.030 +차이점을 파악할 수 있게 잠시 비트를 멈출게요 + +01:59.060 --> 02:01.910 +아키텍처에서 무엇이 바뀌었는지 보죠 + +02:01.940 --> 02:09.620 +이제 퀀타이즈가 됐으니 도전 과제를 확인할 시간을 1초 드릴게요 + +02:09.740 --> 02:11.330 +시간 다 됐어요 + +02:11.330 --> 02:14.690 +물론 건축은 차이가 없어요 + +02:14.720 --> 02:15.770 +함정 질문이에요 + +02:16.040 --> 02:17.120 +미워하지 마세요 + +02:17.300 --> 02:19.640 +네, 건축학적으로 다른 점은 없어요 + +02:19.640 --> 02:24.520 +동일합니다 퀀타이즈의 전체적인 원리는 모델에서 각 숫자와 + +02:24.550 --> 02:30.160 +무게의 정밀도를 낮췄을 뿐 모델의 구조를 바꾸진 않았으니까요 + +02:30.340 --> 02:35.470 +그리고 좀 놀랍게도 모델의 성능에 큰 영향을 주진 않아요 + +02:35.470 --> 02:42.010 +제가 실수를 한 게 아니라면 이 구조는 위층과 똑같을 거예요 + +02:42.040 --> 02:44.290 +확인하고 다시 확인하세요 + +02:44.320 --> 02:51.850 +큰 실수는 안 했지만 그게 퀀타이즈의 핵심이에요 + +02:51.850 --> 03:04.900 +32GB에서 9기가바이트로 내려갔는데, 이건 마치 아직 본 적이 없지만, 중요한 + +03:04.900 --> 03:09.370 +결과가 없는 것 같아요. + +03:09.370 --> 03:10.810 +대가가 따를 거예요 + +03:10.810 --> 03:13.930 +생각만큼 크진 않지만요 + +03:14.620 --> 03:17.830 +그래서 세션을 다시 시작해야 할 것 같아요 + +03:17.830 --> 03:23.920 +마지막으로 세션을 다시 시작하도록 요청할게요 런타임 세션 다시 시작이요 + +03:23.920 --> 03:26.410 +그리고 여기서 다시 만나요 + +03:26.440 --> 03:33.010 +이 시점 이후 수입, 상수를 실행하고 포옹페이스에 다시 로그인하면 + +03:33.010 --> 03:35.080 +1초 후에 만나요 diff --git a/week5/community-contributions/subtitles/srts/59505337/en_US.srt b/week5/community-contributions/subtitles/srts/59505337/en_US.srt new file mode 100755 index 0000000..48265a3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59505337/en_US.srt @@ -0,0 +1,160 @@ +WEBVTT + +00:00.620 --> 00:07.340 +So we're now going to look at four bit quantization, the rather remarkable effect of reducing the precision + +00:07.340 --> 00:08.330 +all the way down. + +00:08.330 --> 00:13.460 +So very similar to before we create a quant config using bits and bytes. + +00:13.700 --> 00:17.150 +But this time we asked to load in four bit equals. + +00:17.150 --> 00:17.750 +True. + +00:17.780 --> 00:19.370 +There are some other settings here too. + +00:19.370 --> 00:24.710 +This time there's one called use double Quant, which again is slightly mysterious. + +00:24.710 --> 00:30.710 +The idea here is that it does a pass through quantizing all of the weights, and then it does a second + +00:30.710 --> 00:32.270 +pass through again. + +00:32.270 --> 00:36.830 +And in doing so it's able to reduce memory by I think about 10 to 20% more. + +00:36.860 --> 00:38.990 +It squeezes a bit more out of this. + +00:39.080 --> 00:46.100 +And this is, uh, experimentally shown to make it very, very small difference to the power of the + +00:46.100 --> 00:46.940 +neural network. + +00:46.940 --> 00:48.470 +So it's almost a freebie. + +00:48.500 --> 00:51.020 +It's again cake and eat it situation. + +00:51.020 --> 00:54.980 +So recommended to use Double Quant as true. + +00:55.130 --> 01:01.280 +This here compute dtype is about the data type that's used during computation. + +01:01.490 --> 01:06.350 +And there generally you could work with 32 bit floats here. + +01:06.350 --> 01:14.240 +But using the Bfloat16 data type binary float 16 is seen as something which improves the speed of training + +01:14.240 --> 01:20.480 +and makes it faster, with only a tiny sacrifice to quality of the of the training. + +01:20.630 --> 01:26.450 +Certainly when I've tried this, I've seen it run faster and I've not been able to detect any actual + +01:26.450 --> 01:30.470 +change in the rate of of of optimization. + +01:30.710 --> 01:32.750 +So this is recommended for sure. + +01:32.750 --> 01:35.750 +But again, it's a hyper parameter that you can experiment with. + +01:35.750 --> 01:40.220 +And then this here the the four bit quant type. + +01:40.220 --> 01:45.800 +This is saying when we reduce the precision down to a four bit number, how should we interpret that + +01:45.800 --> 01:46.730 +four bit number. + +01:46.760 --> 01:54.350 +You might think okay, so if it's four bits 0000 through to 1111, then that represents an integer from + +01:54.350 --> 01:55.790 +0 to 15. + +01:55.820 --> 01:57.800 +That might be one way of doing it. + +01:57.830 --> 02:02.690 +Um it's more common to interpret it, map it to a sort of a floating point number. + +02:02.810 --> 02:08.750 +Um, and this nf4 approach maps it to something which has a normal distribution to it. + +02:08.780 --> 02:11.540 +And so again, this is very common setting. + +02:11.540 --> 02:13.550 +Uh, it's what I've used. + +02:13.580 --> 02:16.010 +I tried something else and it wasn't as good. + +02:16.040 --> 02:18.620 +So so this is the generally the recommended one. + +02:18.620 --> 02:23.090 +But it is a hyper parameter which means it is yours for trial and error. + +02:23.600 --> 02:31.370 +With that in mind, we create this quant config, which is very standard quant config for four bit quantization. + +02:31.370 --> 02:38.570 +And we create a base model with that which I have done and will now print the memory footprint. + +02:38.570 --> 02:43.010 +And remarkably, we are down to 5.6GB. + +02:43.040 --> 02:48.770 +You may have already spotted that over here in my resources, but when you remember that the base model, + +02:48.770 --> 02:54.470 +the real thing was 32GB in size, it's really come a long way down. + +02:54.470 --> 03:01.580 +So this is now something which will comfortably fit in our GPU's memory for this cheap T4 box. + +03:01.580 --> 03:04.940 +And if we look at the base model, we'll see the architecture. + +03:04.970 --> 03:07.580 +I'm not going to try and make that stupid joke again. + +03:07.670 --> 03:15.920 +It is, of course, identical to the architecture of the original, beefier 8 billion llama model. + +03:16.280 --> 03:22.130 +Just that within this deep within this, the precision of the weights is lower. + +03:22.130 --> 03:23.150 +It's four bit. + +03:23.960 --> 03:24.740 +Okay. + +03:24.740 --> 03:28.160 +In the next video, at this point, you should not restart your session. + +03:28.160 --> 03:34.490 +We need to keep this session as it is, and in the next video we're going to go in and load in our example + +03:34.490 --> 03:40.160 +of a fine tuned model and see how the Laura adaptations apply to this architecture. + +03:40.190 --> 03:41.030 +I'll see you there. diff --git a/week5/community-contributions/subtitles/srts/59505337/ja_JP.srt b/week5/community-contributions/subtitles/srts/59505337/ja_JP.srt new file mode 100755 index 0000000..a09c67f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59505337/ja_JP.srt @@ -0,0 +1,142 @@ +WEBVTT + +00:00.620 --> 00:08.330 +そこで今回は、 4ビットの量子化について見ていくことにする。 + +00:08.330 --> 00:13.460 +つまり、 ビットとバイトを使ってクオンツのコンフィグを作成する前とよく似ている。 + +00:13.700 --> 00:17.150 +しかし、 今回は4ビット・イコールでロードするように頼んだ。 + +00:17.150 --> 00:17.750 +その通りだ。 + +00:17.780 --> 00:19.370 +他にもいくつかの設定がある。 + +00:19.370 --> 00:24.710 +今回は "use double Quant "という、 これまたちょっと不思議なものがある。 + +00:24.710 --> 00:32.270 +ここでの考え方は、 すべてのウエイトを量子化するパススルーを行い、 その後、 もう一度パススルーを行うというものだ。 + +00:32.270 --> 00:36.830 +そうすることで、 さらに10~20%ほどメモリを減らすことができるんだ。 + +00:36.860 --> 00:38.990 +ここからもう少し絞り出す。 + +00:39.080 --> 00:46.940 +そしてこれは、 ニューラルネットワークのパワーにごくわずかな違いをもたらすことが実験的に示されている。 + +00:46.940 --> 00:48.470 +だから、 ほとんどタダ同然だ。 + +00:48.500 --> 00:51.020 +またケーキを食べているような状況だ。 + +00:51.020 --> 00:54.980 +そのため、 ダブルクアントをトゥルーとして使用することを推奨する。 + +00:55.130 --> 01:01.280 +このcompute dtypeは、 計算中に使用されるデータ型に関するものである。 + +01:01.490 --> 01:06.350 +そして一般的に、 ここでは32ビットの浮動小数点数を使うことができる。 + +01:06.350 --> 01:14.240 +しかし、 Bfloat16データ型バイナリフロート16を使用することは、 トレーニングの質をほんの少し犠牲にするだけで、 + +01:14.240 --> 01:20.480 +トレーニングのスピードを向上させ、 より速くするものだと考えられている。 + +01:20.630 --> 01:30.470 +確かに、 私がこれを試したとき、 より速く走るのを見たが、 最適化の速度に実際の変化を検出することはできなかった。 + +01:30.710 --> 01:32.750 +だから、 これは絶対にお勧めだ。 + +01:32.750 --> 01:35.750 +しかし、 繰り返すが、 これはハイパーパラメーターであり、 試してみることができる。 + +01:35.750 --> 01:40.220 +そしてこれが4ビットのクオンツタイプ。 + +01:40.220 --> 01:46.730 +これは、 精度を4ビットに落としたとき、 その4ビットをどう解釈すべきかということだ。 + +01:46.760 --> 01:55.790 +4ビットが0000から1111までなら、 0から15までの整数を表していることになる。 + +01:55.820 --> 01:57.800 +それも一つの方法かもしれない。 + +01:57.830 --> 02:02.690 +それを解釈して、 浮動小数点数のようなものにマッピングするのが一般的だ。 + +02:02.810 --> 02:08.750 +そして、 このnf4のアプローチは、 それを正規分布に対応させるものなんだ。 + +02:08.780 --> 02:11.540 +そしてまた、 これは非常に一般的な設定でもある。 + +02:11.540 --> 02:13.550 +僕が使ってきたものだよ。 + +02:13.580 --> 02:16.010 +他のものを試したが、 それほど良くなかった。 + +02:16.040 --> 02:18.620 +だから、 一般的にはこれが推奨されている。 + +02:18.620 --> 02:23.090 +しかし、 これはハイパーパラメーターであり、 試行錯誤が可能であることを意味する。 + +02:23.600 --> 02:31.370 +これを念頭に置いて、 4ビット量子化用のごく標準的なクオンツ・コンフィグを作成する。 + +02:31.370 --> 02:38.570 +これでベースモデルを作り、 メモリのフットプリントを印刷する。 + +02:38.570 --> 02:43.010 +そして驚くべきことに、 我々は5人になってしまった。 6GB。 + +02:43.040 --> 02:48.770 +しかし、 ベースモデルの実物が32GBだったことを思い出すと、 + +02:48.770 --> 02:54.470 +本当にずいぶん小さくなったものだ。 + +02:54.470 --> 03:01.580 +これで、 この安いT4ボックスのGPUのメモリに余裕で収まるようになった。 + +03:01.580 --> 03:04.940 +ベースモデルを見れば、 そのアーキテクチャがわかるだろう。 + +03:04.970 --> 03:07.580 +もうあんな馬鹿げた冗談は言わないよ。 + +03:07.670 --> 03:15.920 +もちろん、 80億リラマのオリジナルモデルのアーキテクチャと同一である。 + +03:16.280 --> 03:22.130 +ただ、 この奥の奥では、 ウエイトの精度が低くなっている。 + +03:22.130 --> 03:23.150 +4ビットだ。 + +03:23.960 --> 03:24.740 +オーケー。 + +03:24.740 --> 03:28.160 +次のビデオでは、 この時点でセッションを再開してはいけない。 + +03:28.160 --> 03:34.490 +このセッションはこのままにしておいて、 次のビデオでは、 微調整されたモデルの例をロードして、 + +03:34.490 --> 03:40.160 +ローラの適応がこのアーキテクチャにどのように適用されるかを見てみよう。 + +03:40.190 --> 03:41.030 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59505337/ko_KR.srt b/week5/community-contributions/subtitles/srts/59505337/ko_KR.srt new file mode 100755 index 0000000..736c642 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59505337/ko_KR.srt @@ -0,0 +1,157 @@ +WEBVTT + +00:00.620 --> 00:07.340 +이제 4비트 퀀타이즈를 살펴볼 겁니다 정밀도를 끝까지 낮추는 놀라운 + +00:07.340 --> 00:08.330 +효과죠 + +00:08.330 --> 00:13.460 +비트와 바이트를 이용한 퀀트 구성 이전과 아주 유사하죠 + +00:13.700 --> 00:17.150 +하지만 이번엔 4 비트 =으로 로드하라고 했죠 + +00:17.150 --> 00:17.750 +맞아요 + +00:17.780 --> 00:19.370 +다른 설정도 있어요 + +00:19.370 --> 00:24.710 +이번엔 더블 퀀트 사용이란 게 있는데 이것도 살짝 수수께끼죠 + +00:24.710 --> 00:30.710 +모든 무게를 수량화해서 통과하게 하는 거예요 그리고 다시 + +00:30.710 --> 00:32.270 +한번 통과하죠 + +00:32.270 --> 00:36.830 +그렇게 함으로써 메모리를 10에서 20퍼센트 더 줄일 수 있어요 + +00:36.860 --> 00:38.990 +비트를 더 짜내는 거예요 + +00:39.080 --> 00:46.940 +그리고 이건 실험적으로 입증된 건데 신경망의 힘에 아주 미세한 차이를 만들어냈어요 + +00:46.940 --> 00:48.470 +거의 공짜나 다름없죠 + +00:48.500 --> 00:51.020 +둘 다 해당하는 상황이죠 + +00:51.020 --> 00:54.980 +그래서 더블 퀀트를 참으로 사용하라고 권장했죠 + +00:55.130 --> 01:01.280 +계산 dtype은 계산에 사용되는 데이터 타입에 관한 거죠 + +01:01.490 --> 01:06.350 +일반적으로 32 비트 플로트로 작업할 수 있어요 + +01:06.350 --> 01:14.240 +Bfloat16 데이터 유형의 플로트 이진법 16을 사용하면 훈련 + +01:14.240 --> 01:20.480 +속도가 빨라집니다 훈련의 질을 조금만 희생해도요 + +01:20.630 --> 01:26.450 +확실히 제가 이걸 시도했을 땐 더 빨리 실행되는 걸 봤어요 최적화 속도에서 + +01:26.450 --> 01:30.470 +실제적인 변화는 감지할 수 없었죠 + +01:30.710 --> 01:32.750 +그러니 이건 확실히 추천해요 + +01:32.750 --> 01:35.750 +하지만 이것도 하이퍼 매개 변수라 실험할 수 있어요 + +01:35.750 --> 01:40.220 +이건 4 비트 퀀트 유형이에요 + +01:40.220 --> 01:45.800 +4 비트 숫자로 정밀도를 낮출 때 4 비트 숫자를 어떻게 해석해야 + +01:45.800 --> 01:46.730 +할까요? + +01:46.760 --> 01:54.350 +4bit00부터 1111까지라고 하면 0부터 15까지 정수라고 생각할 + +01:54.350 --> 01:55.790 +수도 있죠 + +01:55.820 --> 01:57.800 +그것도 한 방법이죠 + +01:57.830 --> 02:02.690 +부동점 숫자에 맞춰 지도를 만드는 게 일반적이죠 + +02:02.810 --> 02:08.750 +이 nf4 접근법은 정규 배포된 뭔가에 매핑해요 + +02:08.780 --> 02:11.540 +이건 아주 흔한 설정이에요 + +02:11.540 --> 02:13.550 +제가 쓴 거예요 + +02:13.580 --> 02:16.010 +다른 걸 해 봤는데 별로였어요 + +02:16.040 --> 02:18.620 +일반적으로 권장하는 게 이거죠 + +02:18.620 --> 02:23.090 +하이퍼 매개 변수라서 시행착오를 겪어도 사용 가능해요 + +02:23.600 --> 02:31.370 +그걸 염두에 두고 퀀트 구성을 만드는데 4비트 퀀트화를 위한 아주 표준적인 퀀트 구성이죠 + +02:31.370 --> 02:38.570 +기본 모델을 생성합니다 메모리 공간을 프린트할 거예요 + +02:38.570 --> 02:43.010 +놀랍게도 현재는 5마리로 줄었죠 6GB요 + +02:43.040 --> 02:48.770 +제 리소스에서 이미 보셨을 수도 있지만 기본 모델을 기억하시면 + +02:48.770 --> 02:54.470 +실제 크기는 32GB였죠 정말 많이 줄었어요 + +02:54.470 --> 03:01.580 +이 정도면 GPU 메모리에 저렴한 T4 박스에 쉽게 들어갈 거예요 + +03:01.580 --> 03:04.940 +기본 모델을 보면 아키텍처가 있어요 + +03:04.970 --> 03:07.580 +그 멍청한 농담은 다시 안 할 거예요 + +03:07.670 --> 03:15.920 +물론 더 우람했던 80억 라마 모델의 구조와 동일하죠 + +03:16.280 --> 03:22.130 +이 깊숙한 곳에서는 무게추의 정확도가 낮아요 + +03:22.130 --> 03:23.150 +4비트예요 + +03:23.960 --> 03:24.740 +네 + +03:24.740 --> 03:28.160 +다음 비디오에선 이 시점에서 세션을 다시 시작하지 마세요 + +03:28.160 --> 03:34.490 +이번 세션은 이대로 유지해야 합니다 다음 비디오에선 미세 조정 모델의 예제를 불러오겠습니다 + +03:34.490 --> 03:40.160 +로라가 적응한 것이 이 구조에 어떻게 적용되는지 보죠 + +03:40.190 --> 03:41.030 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59506507/en_US.srt b/week5/community-contributions/subtitles/srts/59506507/en_US.srt new file mode 100755 index 0000000..00003af --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506507/en_US.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:00.500 --> 00:05.960 +Well, I realized that was a whole lot of theory, but I hope it gave you a good intuition that will + +00:05.960 --> 00:08.030 +be a basis for what we're about to do. + +00:08.120 --> 00:14.660 +And it's also very helpful when you encounter problems or if you're exploring hyperparameters, optimizations + +00:14.660 --> 00:20.420 +that you have that sense of why we are playing with what we are and what it represents. + +00:20.420 --> 00:26.690 +But to summarize, when we started out, we were talking about the smallest variant of Lambda 3.1, + +00:26.690 --> 00:33.770 +which is an 8 billion parameter model, a 32GB of Ram that it takes up. + +00:33.770 --> 00:39.860 +We realized that you can quantize it down so that the weights are an eight bits, and then it only uses + +00:39.860 --> 00:41.720 +up nine gigabytes. + +00:41.780 --> 00:44.480 +So only that's still a very big amount of Ram. + +00:44.630 --> 00:50.090 +Uh, we could quantize it all the way down to four bits, uh, using the double Quant trick and get + +00:50.090 --> 00:52.880 +it down to 5.6GB. + +00:52.940 --> 01:00.560 +Uh, and then we also saw that we could use instead of trying to train the big guy, we could instead + +01:00.620 --> 01:08.150 +fine tune these separate, uh, Laura matrices that get applied to the big model. + +01:08.150 --> 01:17.450 +And if we do so, then we're looking at 100MB or so, 109MB of parameters, a far smaller number, a + +01:17.450 --> 01:21.680 +little dot compared to the enormous base model. + +01:21.680 --> 01:26.090 +So hopefully that gives you, again, a great sense of how it all fits together. + +01:26.090 --> 01:30.620 +And with that, you have built some essential domain expertise. + +01:30.680 --> 01:34.550 +Uh, this has been a really important week of knowledge building. + +01:34.610 --> 01:37.100 +We're about to put it all into practice. + +01:37.100 --> 01:41.330 +We're going to select an open source model that we'll be using for fine tuning. + +01:41.330 --> 01:46.880 +We will look at some different variants of it, and then we will evaluate the base model out of the + +01:46.910 --> 01:48.560 +box to see how it performs. + +01:48.560 --> 01:52.520 +It's going to be a practical week next week and I'm looking forward to it. diff --git a/week5/community-contributions/subtitles/srts/59506507/ja_JP.srt b/week5/community-contributions/subtitles/srts/59506507/ja_JP.srt new file mode 100755 index 0000000..c9c0484 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506507/ja_JP.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:00.500 --> 00:08.030 +まあ、 理論的な話ばかりになってしまったが、 これからやろうとしていることの基礎となる直感をつかんでもらえたと思う。 + +00:08.120 --> 00:14.660 +また、 問題に遭遇したときや、 ハイパーパラメーターや最適化を模索しているときにも、 なぜそのようなことをしているのか、 + +00:14.660 --> 00:20.420 +そしてそれが何を表しているのか、 ということを理解するのに非常に役立つ。 + +00:20.420 --> 00:26.690 +しかし、 要約すると、 私たちがラムダ3の最小変形について話し始めたとき、 私たちは話していた。 1、 これは80億パラメータモデルで、 + +00:26.690 --> 00:33.770 +32GBのラムを使用する。 + +00:33.770 --> 00:41.720 +ウエイトが8ビットになるように量子化すれば、 9ギガバイトしか使わないことに気づいた。 + +00:41.780 --> 00:44.480 +それでも、 ラムの量は非常に多い。 + +00:44.630 --> 00:52.880 +4ビットまで量子化して、 ダブルクアントのトリックを使って5ビットにすることができるんだ。 + +00:52.880 --> 00:52.880 +6GB。 + +00:52.940 --> 01:08.150 +それから、 大きなモデルをトレーニングする代わりに、 大きなモデルに適用されるローラ行列を微調整することができることもわかった。 + +01:08.150 --> 01:17.450 +そうすると、 パラメータは100MBほど、 109MBとなり、 巨大なベースモデルに比べればはるかに少ない、 + +01:17.450 --> 01:21.680 +小さな点となる。 + +01:21.680 --> 01:26.090 +これでまた、 すべてがどのように組み合わされているのか、 よくお分かりいただけたと思う。 + +01:26.090 --> 01:30.620 +そうすることで、 あなたは必要不可欠な専門知識を身につけることができる。 + +01:30.680 --> 01:34.550 +ええと、 この1週間は知識を深めるための本当に重要な1週間だった。 + +01:34.610 --> 01:37.100 +これからすべてを実践する。 + +01:37.100 --> 01:41.330 +微調整に使うオープンソースのモデルを選ぶつもりだ。 + +01:41.330 --> 01:48.560 +そのさまざまなバリエーションを見て、 そして箱から出してすぐにベースモデルを評価し、 そのパフォーマンスを確認する。 + +01:48.560 --> 01:52.520 +来週は実践的な1週間になりそうで、 楽しみにしている。 diff --git a/week5/community-contributions/subtitles/srts/59506507/ko_KR.srt b/week5/community-contributions/subtitles/srts/59506507/ko_KR.srt new file mode 100755 index 0000000..85783de --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506507/ko_KR.srt @@ -0,0 +1,70 @@ +WEBVTT + +00:00.500 --> 00:05.960 +이론은 너무 많다는 걸 깨달았지만 좋은 직감을 얻었길 바라요 그게 우리가 + +00:05.960 --> 00:08.030 +하려는 일의 토대니까요 + +00:08.120 --> 00:14.660 +또 문제가 생겼을 때 매우 도움이 됩니다 hyperperameter나 최적화를 + +00:14.660 --> 00:20.420 +탐색할 때 왜 우리가 무엇이고 무엇을 나타내는지 알 수 있죠 + +00:20.420 --> 00:26.690 +요약하자면 우리가 처음 시작했을 때 얘기했던 건 람다 3의 가장 작은 변종이었죠 1은 80억 매개 + +00:26.690 --> 00:33.770 +변수 모델로 램 32GB를 차지하죠 + +00:33.770 --> 00:39.860 +수량화해서 8비트로 만들고 9기가바이트만 사용하면 된다는 + +00:39.860 --> 00:41.720 +걸 알게 됐죠 + +00:41.780 --> 00:44.480 +여전히 램의 양이 많은 거죠 + +00:44.630 --> 00:50.090 +퀀타이즈해서 비율을 4까지 낮출 수 있어요 더블 퀀트 트릭을 써서 5로 + +00:50.090 --> 00:52.880 +낮추는 거죠 6GB요 + +00:52.940 --> 01:00.560 +그리고 또 하나 깨달은 건 키 큰 사람을 훈련하는 대신 로라 매트릭스를 + +01:00.620 --> 01:08.150 +미세 조정할 수 있다는 거예요 키 큰 모델에게 적용되는 거죠 + +01:08.150 --> 01:17.450 +그렇게 되면 100MB나 109MB 매개 변수가 생기는 거죠 거대한 베이스 + +01:17.450 --> 01:21.680 +모델에 비하면 훨씬 적은 숫자예요 + +01:21.680 --> 01:26.090 +어떻게 짜맞추는지 감이 잘 잡히길 바라요 + +01:26.090 --> 01:30.620 +그것과 함께 필수 도메인 전문 지식을 구축했죠 + +01:30.680 --> 01:34.550 +이번 주는 지식을 쌓는 중요한 한 주였어요 + +01:34.610 --> 01:37.100 +이제 모든 걸 실천할 거예요 Put it up Put it up Put it up Put it up Put it up Put it + +01:37.100 --> 01:41.330 +미세 조정을 위해 사용할 오픈 소스 모델을 선택할게요 + +01:41.330 --> 01:46.880 +다양한 변형을 살펴보고 성능을 평가하기 위해 새로운 기본 모델을 + +01:46.910 --> 01:48.560 +살펴볼 거예요 + +01:48.560 --> 01:52.520 +다음 주는 실전일 거예요 정말 기대돼요 diff --git a/week5/community-contributions/subtitles/srts/59506611/en_US.srt b/week5/community-contributions/subtitles/srts/59506611/en_US.srt new file mode 100755 index 0000000..a86ed6d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506611/en_US.srt @@ -0,0 +1,268 @@ +WEBVTT + +00:00.680 --> 00:08.750 +So in a future day, I'm going to be training, fine tuning a model and creating a fine tuned model. + +00:08.750 --> 00:15.170 +And what I'm going to do now is load in one of the ones that that I've saved in the future, confusingly, + +00:15.170 --> 00:16.160 +if you see what I mean. + +00:16.250 --> 00:21.230 +Uh, and that will allow you to see the architecture of it, to get a sense of it. + +00:21.320 --> 00:24.380 +And you do that by loading in not a base model. + +00:24.440 --> 00:31.910 +Not not one of these, um, uh, auto model for causal LLM from pre-trained, but instead a Peft model, + +00:31.910 --> 00:35.630 +a parameter efficient fine tuned model from pre-trained. + +00:35.630 --> 00:39.350 +And I tell it the base model, which is the one that we've just got right here. + +00:39.350 --> 00:40.760 +And fine tune model. + +00:40.760 --> 00:44.570 +This is the name of the model that I saved after I did this. + +00:44.570 --> 00:45.950 +Laura fine tuning. + +00:45.950 --> 00:48.920 +And I'm doing this just to show you what it looks like. + +00:48.950 --> 00:54.440 +Uh, and this is one that that will run very quickly because it's relatively small. + +00:54.590 --> 00:56.870 +Uh, let's have a look at the size. + +00:56.870 --> 01:02.270 +So the memory footprint of this is Gigabytes. + +01:02.270 --> 01:06.170 +And that should be familiar to you because it's very close to this. + +01:06.320 --> 01:10.940 +Uh, it is about 100MB difference between them. + +01:10.940 --> 01:12.440 +Let's have a look at it. + +01:13.250 --> 01:18.470 +So here is the architecture of this thing. + +01:18.740 --> 01:22.340 +Uh, and now, yeah, I no longer play that trick. + +01:22.370 --> 01:24.530 +This is definitely different to what we saw before. + +01:24.530 --> 01:26.360 +And let me tell you what we're seeing. + +01:26.360 --> 01:32.480 +So first of all, everything you're seeing up to this point here is the same up to the point where we + +01:32.480 --> 01:35.420 +have the 32 Lama decoder layers. + +01:35.420 --> 01:48.050 +And now we get to the this attention layer and we have the Q proj the k proj the V and the O uh, proj + +01:48.320 --> 01:49.340 +uh layers. + +01:49.340 --> 01:53.930 +And what you'll see is that each of these has a base layer. + +01:53.930 --> 01:57.410 +And then it has lora A and lora B. + +01:57.500 --> 02:05.380 +And these are the A and B matrices that I told you about before, uh which have come in here and you'll + +02:05.380 --> 02:07.060 +see this number 32 here. + +02:07.060 --> 02:11.260 +That is the R that I mentioned before, the Lora rank. + +02:11.290 --> 02:14.050 +They are 32 rank matrices. + +02:14.200 --> 02:16.270 +Um, and uh, yeah. + +02:16.300 --> 02:24.160 +The, the uh, if, if you're in if you're familiar with the way that these the matrix math works out, + +02:24.160 --> 02:32.050 +you can see this is designed so that the 32 dimensions will can be multiplied together in such a way + +02:32.050 --> 02:39.130 +as it can be applied to this base layer and be used to make a small shift to that base layer. + +02:39.130 --> 02:44.170 +I'm again being a bit hand-wavy because I want to get bogged down in the theory, but the idea is that + +02:44.170 --> 02:45.760 +these will be multiplied together. + +02:45.790 --> 02:46.720 +Lora A and Lora. + +02:46.750 --> 02:53.890 +B together with alpha, the scaling factor, and that will then be used as a delta to apply. + +02:53.890 --> 02:59.890 +On top of this base layer, there is also another hyperparameter called dropout. + +03:00.010 --> 03:01.870 +And we'll be talking about that later. + +03:01.870 --> 03:04.360 +That's not one of the big three that we talked about this week. + +03:04.990 --> 03:07.180 +but you'll see that feature a few times here. + +03:07.180 --> 03:13.060 +And so if we look at the other, uh, of the of the four target modules, you'll see that they all have + +03:13.060 --> 03:20.020 +a Laura A and a Laura B here, where again, we have a Laura A and a Laura B, and finally a Laura A + +03:20.020 --> 03:28.660 +and Laura B here, Laura A and Laura B, and so that is where our adapter matrices have been inserted + +03:28.660 --> 03:36.430 +into the Lama architecture to adapt the bigger model, but with much fewer dimensions. + +03:36.460 --> 03:42.400 +Uh, these, these 32 dimensions as specified by our R hyperparameter. + +03:43.510 --> 03:45.220 +Uh, nothing else has been changed. + +03:45.220 --> 03:48.880 +The the multi-layer perceptron layer is exactly the same. + +03:49.060 --> 03:51.130 +Um, and everything else is the same. + +03:52.090 --> 03:58.720 +And so just to mention again, we're trying not to get bogged down in this, but the you could look + +03:58.720 --> 04:06.490 +back to convince yourself that this is the number of dimensions in those, those four, uh, Matrices + +04:06.760 --> 04:07.720 +there is. + +04:07.720 --> 04:14.530 +Each one has a Laura A and a Laura B, and I've just multiplied together the dimensions of that matrix + +04:14.530 --> 04:20.890 +to tell you how many dimensions, how many weights in total we have across these adapters. + +04:20.890 --> 04:25.210 +And then that means for each layer we sum up these four numbers. + +04:25.210 --> 04:31.990 +I multiply that by 32 because there are 32 of these groups of modules. + +04:32.800 --> 04:37.120 +And then each of these parameters is a four byte number. + +04:37.120 --> 04:38.350 +It's 32 bits. + +04:38.350 --> 04:43.570 +And so I calculate the size and divide that by a million to get it in megabytes. + +04:43.870 --> 04:48.580 +I'm not sure if you're following all this, but hopefully you get a general idea just to give you a + +04:48.580 --> 04:55.600 +sense of perspective, if you add up all of the weights in our Laura adapters, there's a total of 27 + +04:55.600 --> 05:02.830 +million parameters and the total size is about 109MB. + +05:02.830 --> 05:07.190 +So 27 million parameters of size Hundred and nine megabytes. + +05:07.190 --> 05:10.730 +That's how large our adapters are. + +05:11.030 --> 05:20.090 +And of course, compare that to the fact that llama overall has 8 billion parameters and is 32GB in + +05:20.090 --> 05:20.840 +size. + +05:20.840 --> 05:26.450 +So it gives you a sense we're doing a lot here, a lot of parameters and a lot to be trained, but it's + +05:26.450 --> 05:33.290 +tiny compared to the monstrosity that is llama 3.1, even the small variant. + +05:33.290 --> 05:41.360 +So this whilst I realize there's been a fair bit of, uh, of, uh, stuff in here that, uh, you may + +05:41.360 --> 05:46.490 +have to go back and check and see what I mean, but hopefully it gives you that intuition, that sense + +05:46.490 --> 05:53.840 +that we're able to use these lower dimensional matrices to have an impact on the bigger architecture, + +05:53.840 --> 06:00.320 +but with a smaller size, smaller number of weights that has to be adjusted. + +06:00.680 --> 06:06.440 +Um, and just to give you just a sort of evidence that this number, this 109MB, is the size of the + +06:06.440 --> 06:09.870 +parameters I can actually go into hugging face. + +06:09.900 --> 06:17.130 +I'm now in hugging face and I'm looking at where I saved that particular Laura adapter, that fine tuned + +06:17.130 --> 06:18.810 +model, and what we'll find. + +06:18.810 --> 06:22.320 +When you look at these, you look for something called safe tensors. + +06:22.320 --> 06:26.070 +That is the file which stores the parameters themselves. + +06:26.310 --> 06:31.530 +Um, and if you look at this for llama 3.1, you'll see that it's 32GB large. + +06:31.530 --> 06:40.890 +If I look at it for this, you'll see it's 109MB of parameters, 109MB, which matches this estimate + +06:40.890 --> 06:42.540 +here, 109MB. + +06:42.540 --> 06:49.830 +That is the size of the parameters that we are fine tuning using this Q Laura technique. + +06:50.310 --> 06:55.200 +So I hope at least at the very least, it's given you a decent intuition for what's going on here and + +06:55.200 --> 07:01.470 +how we're able to pull this trick of being able to fine tune a model without needing to have gigabytes + +07:01.470 --> 07:05.250 +of data that we are optimizing over. + +07:06.000 --> 07:09.720 +And so with that, back to the slides for a wrap up. diff --git a/week5/community-contributions/subtitles/srts/59506611/ja_JP.srt b/week5/community-contributions/subtitles/srts/59506611/ja_JP.srt new file mode 100755 index 0000000..78885fe --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506611/ja_JP.srt @@ -0,0 +1,217 @@ +WEBVTT + +00:00.680 --> 00:08.750 +だから将来的には、 トレーニングをしてモデルを微調整し、 微調整したモデルを作るつもりだ。 + +00:08.750 --> 00:16.160 +そして、 これからすることは、 未来に保存しておいたもののひとつをロードすることだ。 + +00:16.250 --> 00:21.230 +そうすれば、 その建築物を見て、 感覚をつかむことができる。 + +00:21.320 --> 00:24.380 +そして、 ベースモデルではなく、 ロードすることでそれを実現する。 + +00:24.440 --> 00:35.630 +このような、 えーと、 えーと、 事前に訓練された因果LLMの自動モデルではなく、 事前に訓練されたパラメータ効率の良い微調整モデルであるPeftモデルである。 + +00:35.630 --> 00:39.350 +そして、 今ここにあるのがベースモデルであることを伝える。 + +00:39.350 --> 00:40.760 +そしてモデルを微調整する。 + +00:40.760 --> 00:44.570 +このようにして保存したモデルの名前がこれだ。 + +00:44.570 --> 00:45.950 +ローラの微調整 + +00:45.950 --> 00:48.920 +どんな風に見えるかをお見せするためにやっているんだ。 + +00:48.950 --> 00:54.440 +これは比較的小さいから、 すぐに終わるよ。 + +00:54.590 --> 00:56.870 +サイズを見てみよう。 + +00:56.870 --> 01:02.270 +そのため、 メモリフットプリントはギガバイトになる。 + +01:02.270 --> 01:06.170 +そして、 それはこの件に非常に近いので、 あなたにも馴染みがあるはずだ。 + +01:06.320 --> 01:10.940 +ええと、 その差は約100MBです。 + +01:10.940 --> 01:12.440 +見てみよう。 + +01:13.250 --> 01:18.470 +つまり、 これがこの製品のアーキテクチャーだ。 + +01:18.740 --> 01:22.340 +今はもう、 そんな芸当はしない。 + +01:22.370 --> 01:24.530 +これは以前見たものとは明らかに違う。 + +01:24.530 --> 01:26.360 +そして、 私たちが見ていることをお話ししましょう。 + +01:26.360 --> 01:35.420 +まず第一に、 ここまでの内容は、 32ラマ・デコーダーのレイヤーがあるところまでは同じです。 + +01:35.420 --> 01:49.340 +そして今、 私たちはこの注目のレイヤーにたどり着き、 Qプロジ、 Kプロジ、 Vプロジ、 Oプロジのレイヤーがある。 + +01:49.340 --> 01:53.930 +そして、 これらにはそれぞれベースレイヤーがある。 + +01:53.930 --> 01:57.410 +そしてロラAとロラBがある。 + +01:57.500 --> 02:07.060 +そしてこれが、 以前お話ししたA行列とB行列で、 ここに32番があります。 + +02:07.060 --> 02:11.260 +それが先ほどのR、 ローラランクだ。 + +02:11.290 --> 02:14.050 +これらは32ランクの行列である。 + +02:14.200 --> 02:16.270 +うーん、 そうだね。 + +02:16.300 --> 02:24.160 +行列の計算の仕方に詳しい方なら、 32の次元を掛け合わせることで、 このベースレイヤーに適用し、 + +02:24.160 --> 02:39.130 +そのベースレイヤーにわずかなシフトを加えることができるように設計されているのがおわかりでしょう。 + +02:39.130 --> 02:45.760 +理論に拘泥したいので、 また少し手垢がついているが、 これらは掛け合わされるということだ。 + +02:45.790 --> 02:46.720 +ローラAとローラ + +02:46.750 --> 02:53.890 +Bにスケーリングファクターであるアルファを加えたものが、 適用するデルタとして使われる。 + +02:53.890 --> 02:59.890 +このベースレイヤーの上に、 ドロップアウトと呼ばれる別のハイパーパラメーターがある。 + +03:00.010 --> 03:01.870 +それについては後で話す。 + +03:01.870 --> 03:04.360 +それは今週話したビッグ3のうちの一つではない。 + +03:04.990 --> 03:07.180 +しかし、 ここではその特徴を何度か目にすることになるだろう。 + +03:07.180 --> 03:13.060 +4つのターゲット・モジュールのうち、 + +03:13.060 --> 03:20.020 +他のモジュールを見てみると、 ローラAとローラBがあり、 + +03:20.020 --> 03:36.430 +ここにもローラAとローラBがある。 + +03:36.460 --> 03:42.400 +Rのハイパーパラメータで指定された32次元だ。 + +03:43.510 --> 03:45.220 +ええと、 他は何も変わっていない。 + +03:45.220 --> 03:48.880 +多層パーセプトロン層もまったく同じである。 + +03:49.060 --> 03:51.130 +うーん、 他は全部同じだよ。 + +03:52.090 --> 03:58.720 +もう一度言っておくが、 私たちはこの件に拘泥しないようにしているが、 + +03:58.720 --> 04:07.720 +この4つの行列の次元数がこれであることを納得するために振り返ってみるといい。 + +04:07.720 --> 04:14.530 +それぞれにローラAとローラBがあり、 そのマトリックスの次元を掛け合わせることで、 + +04:14.530 --> 04:20.890 +これらのアダプター全体でいくつの次元、 いくつのウェイトがあるかがわかる。 + +04:20.890 --> 04:25.210 +そして各レイヤーについて、 この4つの数字を合計するということだ。 + +04:25.210 --> 04:31.990 +モジュールのグループが32個あるので、 それを32倍する。 + +04:32.800 --> 04:37.120 +そして、 これらのパラメーターはそれぞれ4バイトの数字である。 + +04:37.120 --> 04:38.350 +32ビットだ。 + +04:38.350 --> 04:43.570 +それでサイズを計算し、 それを100万で割ってメガバイトにする。 + +04:43.870 --> 04:48.580 +ローラ・アダプタのウエイトをすべて合計すると、 + +04:48.580 --> 05:02.830 +2700万個のパラメータがあり、 合計サイズは約109MBになる。 + +05:02.830 --> 05:07.190 +つまり、 2,700万個のパラメータは、 100.9メガバイトのサイズということになる。 + +05:07.190 --> 05:10.730 +それくらい私たちのアダプターは大きい。 + +05:11.030 --> 05:20.840 +そしてもちろん、 llama全体が80億のパラメーターを持ち、 サイズが32GBであることと比較してほしい。 + +05:20.840 --> 05:33.290 +多くのパラメータとトレーニングが必要だが、 ラマ3のような巨大なものに比べれば小さなものだ。 + +05:33.290 --> 05:33.290 +1、 小さなバリエーションであっても。 + +05:33.290 --> 05:46.490 +しかし、 このような低次元の行列を使うことで、 より大きなアーキテクチャに影響を与えることができるということを、 + +05:46.490 --> 06:00.320 +直感的に理解していただければと思います。 + +06:00.680 --> 06:09.870 +そして、 この109MBという数字が、 私が実際にハグすることができるパラメータのサイズであることを証明するものでもある。 + +06:09.900 --> 06:18.810 +私は今、 ハグをしている。 そして、 あの特別なローラ・アダプター、 微調整されたモデルをどこに保存したか、 そして何を見つけるかを見ている。 + +06:18.810 --> 06:22.320 +これらを見るとき、 セーフ・テンソルと呼ばれるものを探す。 + +06:22.320 --> 06:26.070 +これは、 パラメーターそのものを保存するファイルである。 + +06:26.310 --> 06:31.530 +そして、 ラマ3について見てみよう。 1、 32GBの大容量であることがわかるだろう。 + +06:31.530 --> 06:42.540 +これを見ると、 パラメータが109MB、 109MBであることがわかる。 + +06:42.540 --> 06:49.830 +それが、 このQラウラを使って微調整するパラメーターの大きさだ。 + +06:50.310 --> 07:01.470 +少なくとも、 ここで何が起こっているのか、 そして最適化するために何GBものデータを必要とせずにモデルを微調整することができるというトリックをどのように使っているのか、 + +07:01.470 --> 07:05.250 +きちんとした直感をつかんでいただけたと思います。 + +07:06.000 --> 07:09.720 +ということで、 スライドに戻り、 まとめとしたい。 diff --git a/week5/community-contributions/subtitles/srts/59506611/ko_KR.srt b/week5/community-contributions/subtitles/srts/59506611/ko_KR.srt new file mode 100755 index 0000000..fae9eb9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506611/ko_KR.srt @@ -0,0 +1,265 @@ +WEBVTT + +00:00.680 --> 00:08.750 +그래서 앞으로는 모델을 훈련하고 미세 조정하고 미세 조정 모델을 만들 거예요 + +00:08.750 --> 00:15.170 +이제 제가 할 일은 미래에 저장해둔 것 중 하나를 로드하는 겁니다 무슨 말인지 + +00:15.170 --> 00:16.160 +아시겠죠 + +00:16.250 --> 00:21.230 +Get it로 건축물을 보고 감을 잡을 수 있어요 + +00:21.320 --> 00:24.380 +기본 모델이 아닌 걸 로드해 그걸 하죠 + +00:24.440 --> 00:31.910 +미리 훈련받은 인과적 LLM용 자동차 모델 말고 페프트 모델요 매개 변수 + +00:31.910 --> 00:35.630 +효율적인 미세 조정 모델이죠 + +00:35.630 --> 00:39.350 +기본 모델이라고 하죠 여기 있는 거요 + +00:39.350 --> 00:40.760 +미세 튜닝 모델이에요 + +00:40.760 --> 00:44.570 +제가 이걸 한 후 저장한 모델 이름이에요 + +00:44.570 --> 00:45.950 +로라 파인 튜닝이에요 + +00:45.950 --> 00:48.920 +어떤 모습인지 보여 드리려고 이렇게 하는 거예요 + +00:48.950 --> 00:54.440 +이건 아주 빨리 달릴 수 있어요 비교적 작으니까요 + +00:54.590 --> 00:56.870 +크기를 한번 보죠 + +00:56.870 --> 01:02.270 +메모리 공간은 기가바이트예요 + +01:02.270 --> 01:06.170 +이것과 아주 비슷하니 익숙하실 거예요 + +01:06.320 --> 01:10.940 +100MB 정도 차이가 나요 + +01:10.940 --> 01:12.440 +한번 보죠 + +01:13.250 --> 01:18.470 +이게 그 구조예요 + +01:18.740 --> 01:22.340 +이제 그 장난은 안 쳐요 + +01:22.370 --> 01:24.530 +전에 봤던 것과는 확실히 달라요 + +01:24.530 --> 01:26.360 +지금 상황을 말씀드리죠 + +01:26.360 --> 01:32.480 +우선, 여기 보이는 모든 건 라마가 있는 32개의 + +01:32.480 --> 01:35.420 +층까지 똑같아요 + +01:35.420 --> 01:48.050 +이제 주의력 레이어가 나왔네요 Q 프래, K 프래, V, O 프래, O가 + +01:48.320 --> 01:49.340 +있죠 + +01:49.340 --> 01:53.930 +여기 보시면 각각의 베이스 레이어가 있어요 + +01:53.930 --> 01:57.410 +로라 A와 로라 B도 있어요 + +01:57.500 --> 02:05.380 +여기가 아까 말한 A와 B 행렬들이에요 여기 32번 + +02:05.380 --> 02:07.060 +보이죠? + +02:07.060 --> 02:11.260 +제가 전에 말했던 로라 등급이에요 + +02:11.290 --> 02:14.050 +32급 행렬들이에요 + +02:14.200 --> 02:16.270 +네, 맞아요 + +02:16.300 --> 02:24.160 +이 매트릭스 수학이 어떻게 적용되는지 잘 아신다면 32차원적 개념을 서로 + +02:24.160 --> 02:32.050 +곱할 수 있도록 설계된 것을 보실 수 있습니다 이 기본 레이어에 적용할 수 있도록 + +02:32.050 --> 02:39.130 +말이죠 그리고 이 기본 레이어에서는 약간의 변화를 줄 수 있죠 + +02:39.130 --> 02:44.170 +물결무늬를 만들고 있어요 이론에 빠져들게 하려고요 비트들을 + +02:44.170 --> 02:45.760 +곱할 거예요 + +02:45.790 --> 02:46.720 +로라 A와 로라요 + +02:46.750 --> 02:53.890 +B와 알파를 함께 배율 요인으로 선택하고 델타로 적용하는 거죠 + +02:53.890 --> 02:59.890 +이 기본 층 위에는 드롭아웃이라는 하이퍼파라미터도 있어요 + +03:00.010 --> 03:01.870 +그 얘긴 나중에 하죠 + +03:01.870 --> 03:04.360 +이번 주에 얘기했던 세 가지 중 하나가 아니죠 + +03:04.990 --> 03:07.180 +이 기능은 여기서 몇 번 보실 거예요 + +03:07.180 --> 03:13.060 +네 개의 목표 모듈 중 다른 모듈을 보면 전부 로라 A와 로라 B가 + +03:13.060 --> 03:20.020 +여기 있는데 다시 말씀드리면 로라 A와 로라 B가 있고 마지막으로 로라 + +03:20.020 --> 03:28.660 +A와 로라 B가 여기 로라 A와 로라 B가 있죠 여기서 어댑터 매트릭스가 라마 경계의 구조에 + +03:28.660 --> 03:36.430 +삽입되어 더 큰 모델을 적용하지만 훨씬 적은 범위에서 적용해요 + +03:36.460 --> 03:42.400 +R 하이퍼파라미터에 명시된 32개의 차원이죠 + +03:43.510 --> 03:45.220 +다른 건 바뀐 게 없어요 + +03:45.220 --> 03:48.880 +다층 과셉트론 층도 정확히 같아요 + +03:49.060 --> 03:51.130 +다른 건 다 똑같아요 + +03:52.090 --> 03:58.720 +다시 한번 말씀드리지만 이 문제에 휘말리지 않으려고 노력해요 하지만 과거를 + +03:58.720 --> 04:06.490 +돌아보면 납득이 돼요 IQ, IQ, IQ, IQ, IQ 4개 행렬에는 치수의 수가 있다는 + +04:06.760 --> 04:07.720 +걸요 + +04:07.720 --> 04:14.530 +각각 로라 A와 로라 B가 있고 매트릭스의 차원을 조합해서 + +04:14.530 --> 04:20.890 +이 어댑터에 있는 무게의 총합을 알려주는 거예요 + +04:20.890 --> 04:25.210 +그 말은 각 층에서 이 4개의 숫자를 합친다는 거죠 + +04:25.210 --> 04:31.990 +32를 곱합니다. 모듈 그룹이 32개니까요. + +04:32.800 --> 04:37.120 +각 매개 변수는 4바이트 숫자죠 + +04:37.120 --> 04:38.350 +32개요 + +04:38.350 --> 04:43.570 +크기를 계산하고 100만으로 나누면 메가바이트로 get이 되죠 + +04:43.870 --> 04:48.580 +이해가 되시는지 모르겠지만 대략적인 건 + +04:48.580 --> 04:55.600 +이해하셨길 바라요 로라 어댑터의 중량을 모두 더하면 총 2,700만 + +04:55.600 --> 05:02.830 +개의 매개 변수가 있고 총 크기는 약 109MB예요 + +05:02.830 --> 05:07.190 +109MB의 2,700만 매개 변수죠 + +05:07.190 --> 05:10.730 +어댑터가 그만큼 크죠 + +05:11.030 --> 05:20.840 +물론 그걸 전체적인 llama 매개 변수가 80억 개인 것과 비교하세요 크기는 32GB죠 + +05:20.840 --> 05:26.450 +여기서 할 일이 많다는 걸 알 수 있어요 매개 변수도 많고 훈련할 것도 많지만 + +05:26.450 --> 05:33.290 +흉물스러운 라마 3에 비하면 아주 작죠 1, 작은 변종도요 + +05:33.290 --> 05:41.360 +이 안에 뭐가 많다는 건 알고 있었어요 무슨 말인지 다시 확인해 보세요 + +05:41.360 --> 05:46.490 +그래도 직감이 돌아올 거예요 저차원 물질을 + +05:46.490 --> 05:53.840 +이용하면 더 큰 건축물에 영향을 줄 수 있지만 크기가 작을수록 + +05:53.840 --> 06:00.320 +비트도 더 적게 조정해야 한다는 거죠 + +06:00.680 --> 06:06.440 +증거를 보여드리자면 이 숫자, 이 109MB는 얼굴을 안는 + +06:06.440 --> 06:09.870 +데 필요한 파라미터의 크기예요 + +06:09.900 --> 06:17.130 +얼굴을 끌어안고 있어요 제가 저장한 로라 어댑터를 보고 있죠 미세 튜닝 모델이요 + +06:17.130 --> 06:18.810 +뭘 찾게 될까요? + +06:18.810 --> 06:22.320 +이걸 보면 안전한 텐서를 찾죠 + +06:22.320 --> 06:26.070 +그게 매개 변수 자체를 저장하는 파일이에요 + +06:26.310 --> 06:31.530 +라마 3을 보시면 알 수 있죠 1번, 32GB로 크게 보이죠 + +06:31.530 --> 06:40.890 +이걸 보면 매개 변수가 109MB인 게 보이죠 여기 추정값과 + +06:40.890 --> 06:42.540 +일치해요 + +06:42.540 --> 06:49.830 +Q 로라 기법으로 세밀하게 조정하고 있는 매개 변수의 크기예요 + +06:50.310 --> 06:55.200 +적어도 직관적으로 어떤 일이 일어나고 있는지 파악하셨길 + +06:55.200 --> 07:01.470 +바랍니다 최적화하는 기가바이트의 데이터 없이도 모델을 미세 조정하는 + +07:01.470 --> 07:05.250 +트릭을 어떻게 구현할 수 있는지도요 + +07:06.000 --> 07:09.720 +이제 마무리로 슬라이드로 돌아가죠 diff --git a/week5/community-contributions/subtitles/srts/59506713/en_US.srt b/week5/community-contributions/subtitles/srts/59506713/en_US.srt new file mode 100755 index 0000000..b89e6a1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506713/en_US.srt @@ -0,0 +1,271 @@ +WEBVTT + +00:00.680 --> 00:01.790 +Hi everyone. + +00:01.790 --> 00:08.060 +So the reason I'm so fired up about week seven is that this is the time when we actually start to build + +00:08.060 --> 00:10.790 +our own proprietary model. + +00:10.790 --> 00:16.910 +It's when we start to build our own AI, and we are looking to build something that we're going to try + +00:16.910 --> 00:23.780 +and make to compete with frontier models or yes, even we're going to try and see if we can outperform + +00:23.780 --> 00:25.550 +at least some frontier models. + +00:25.580 --> 00:32.300 +Now, the reason that that's even possible is that we are trying to solve one particular specific problem + +00:32.300 --> 00:37.910 +where we have a lot of data, and so we can take potentially a model that is significantly smaller than + +00:37.910 --> 00:38.960 +a frontier model. + +00:38.960 --> 00:41.990 +We can take something like an 8 billion parameter model. + +00:41.990 --> 00:50.810 +And by making it very good at one focused task, we can compete against the, the, the enormous the + +00:50.810 --> 00:53.840 +Giants, the Giants like GPT four zero. + +00:54.290 --> 01:00.560 +Because even though GPT four has trillions of parameters, it's designed to do so many different things, + +01:00.560 --> 01:04.790 +including writing witty poems about presidential Candidates. + +01:05.060 --> 01:09.980 +We are not going to be able to do any of those things, but we are going to try and get really, really + +01:09.980 --> 01:12.620 +good at predicting prices of products. + +01:12.620 --> 01:13.970 +That's going to be our thing. + +01:13.970 --> 01:15.350 +That's our business problem. + +01:15.350 --> 01:17.210 +That's what we're setting out to solve. + +01:17.240 --> 01:24.170 +The first step in this is to decide which model we're going to use as our base model for fine tuning, + +01:24.170 --> 01:30.050 +and that's something that's worth taking time on, because it's going to be so obviously pivotal to + +01:30.080 --> 01:31.280 +what we end up building. + +01:31.670 --> 01:34.880 +You can think of it in itself as like a one massive great hyper parameter. + +01:34.880 --> 01:38.120 +We can try a different base model and see how that performs. + +01:38.120 --> 01:45.020 +And as part of that, we're going to have to decide whether we take the original pre-trained version + +01:45.020 --> 01:47.360 +of that model, the very base model. + +01:47.360 --> 01:53.390 +Sometimes people use the word foundation model to to describe that, as opposed to the one that's been + +01:53.390 --> 02:00.860 +fine tuned for chat purposes that sometimes called the instruct variant, which is the one that's expecting + +02:00.950 --> 02:08.360 +a dictionary or a list of dictionaries with a user prompt system, prompt user assistant, and so on. + +02:08.370 --> 02:15.120 +That style of input, which you remember, gets turned into just a series of tokens separating out different + +02:15.120 --> 02:16.530 +sections of the prompt. + +02:16.530 --> 02:18.540 +So we've got to make a decision about that. + +02:18.540 --> 02:20.520 +And there are of course, pros and cons. + +02:20.700 --> 02:26.550 +Um, and then once we've done that, we should then take the base model as it is off the shelf and see + +02:26.550 --> 02:31.920 +how that does against our challenge, because, you know, we see whether we're already in a position + +02:31.920 --> 02:34.950 +where we are competing with the frontier models. + +02:34.950 --> 02:40.320 +And even if we're not competing with frontier models, remember that this is going to be free of charge, + +02:40.320 --> 02:42.030 +or at least it will be the cost of us. + +02:42.030 --> 02:48.330 +Running computes so we won't have to pay API costs when we're running our own open source version of + +02:48.330 --> 02:48.840 +the model. + +02:48.840 --> 02:54.210 +So even if we're in the right territory, there's still many reasons why we should use open source. + +02:54.210 --> 02:59.910 +Even before we talk about making it into a proprietary model specialized for this particular business + +02:59.910 --> 03:00.600 +problem. + +03:00.600 --> 03:05.910 +So anyways, with that in mind, it's time for us to talk about which model to use. + +03:06.060 --> 03:09.420 +Uh, first of all, we need to decide how many parameters to use. + +03:09.420 --> 03:13.830 +And, you know, it is probably a case that more is more. + +03:13.860 --> 03:19.140 +More parameters is going to give us a better shot at, uh, solving the problem. + +03:19.140 --> 03:25.800 +Well, particularly in a world as we have it, where we have a lot of training data, we have 400,000 + +03:25.830 --> 03:26.670 +examples. + +03:26.670 --> 03:28.980 +So we've got tons of training data. + +03:28.980 --> 03:31.020 +It's not like we're limited in that regard. + +03:31.020 --> 03:33.180 +And we could always generate more if we wanted to. + +03:33.210 --> 03:38.910 +So really our constraint is going to be, uh, it's going to be the amount of memory capacity that we + +03:38.910 --> 03:39.270 +have. + +03:39.300 --> 03:42.270 +We're going to want to fit it in one smaller box. + +03:42.300 --> 03:49.320 +And so we pretty much already know at this point that we can we can get in about an 8 billion parameter + +03:49.320 --> 03:49.740 +model. + +03:49.770 --> 03:51.150 +7 to 8 billion parameter. + +03:51.150 --> 03:53.430 +We won't be able to go much more than that. + +03:53.460 --> 03:59.850 +And whilst there are some small parameter models, very small uh, like like Gemma goes down to, I + +03:59.850 --> 04:02.400 +think a 2 or 3 billion parameter version. + +04:02.400 --> 04:04.080 +Uh, we'll take a look in a second. + +04:04.230 --> 04:12.660 +Um, the, uh, we're probably going to want to, to take eight as the, uh, the largest possible model + +04:12.660 --> 04:16.510 +that will fit into the boxes that we can afford. + +04:16.660 --> 04:22.930 +So that's going to be our approach when it comes to the base or the instruct variants. + +04:22.930 --> 04:26.710 +There are pros and cons and it's something to experiment with. + +04:26.740 --> 04:35.500 +Generally speaking, if you're fine tuning specifically for one problem where you have a particular + +04:35.500 --> 04:41.230 +prompt that you'll be using and expecting a response in a particular way, then you might as well start + +04:41.230 --> 04:48.640 +with a base model, not an instruct variant, because you don't need to apply things like system prompts + +04:48.640 --> 04:53.200 +and user prompts because you're just going to be training it for one purpose anyway. + +04:53.230 --> 04:59.620 +So you might as well just take a base one and fine tune it, so that all it's really good at is that + +04:59.620 --> 05:00.490 +one task. + +05:00.490 --> 05:02.680 +So that's the kind of default answer. + +05:02.710 --> 05:06.850 +Now there are some benefits to starting with the instruct variant. + +05:06.850 --> 05:08.890 +One of them is that it's very good. + +05:08.950 --> 05:15.220 +It's already been trained to recognize things like a system prompt and the user assistant interaction. + +05:15.220 --> 05:20.710 +And you can use that to your advantage by using the system prompt to sort of tee it up in a certain + +05:20.710 --> 05:28.150 +way to to frame it, to be working, uh, to be, uh, taking a persona, uh, rather than having to + +05:28.150 --> 05:31.210 +rely on it, learning that persona through all of your training data. + +05:31.210 --> 05:37.300 +So there are some situations where the instruct variant is a better starting point. + +05:37.450 --> 05:46.450 +Uh, now, for us, my instinct is that base is the better starting point, because we've got such a + +05:46.450 --> 05:51.610 +set structure that we're going to be using for all of our training data and for our objective. + +05:51.880 --> 05:57.070 +Um, and I've in fact tried both and base did do slightly better than the instruct variant, but they + +05:57.070 --> 05:58.120 +were very close. + +05:58.180 --> 06:02.770 +Uh, you could also try both and see if you get the same results as me. + +06:02.770 --> 06:09.070 +Uh, but uh, I would think it would be more normal in our kind of situation, when you have one particular + +06:09.070 --> 06:15.040 +task and you don't need to be applying system prompts and the like that you would start with the base + +06:15.040 --> 06:16.090 +variant. + +06:16.600 --> 06:23.470 +But with that introduction, let's head over to the Hugging face, uh, open leaderboard and take a + +06:23.470 --> 06:24.670 +look at some models. diff --git a/week5/community-contributions/subtitles/srts/59506713/ja_JP.srt b/week5/community-contributions/subtitles/srts/59506713/ja_JP.srt new file mode 100755 index 0000000..7751e95 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506713/ja_JP.srt @@ -0,0 +1,202 @@ +WEBVTT + +00:00.680 --> 00:01.790 +皆さん、 こんにちは。 + +00:01.790 --> 00:10.790 +私が第7週をとても楽しみにしているのは、 この時期から実際に独自のモデルを構築し始めるからだ。 + +00:10.790 --> 00:16.910 +私たちが独自のAIを作り始めるのは、 フロンティア・モデルと競争できるようなものを作ろうとするときであり、 + +00:16.910 --> 00:25.550 +そう、 少なくともいくつかのフロンティア・モデルを凌駕できるかどうかを試してみるときでもある。 + +00:25.580 --> 00:32.300 +このようなことが可能なのは、 データがたくさんある特定の問題を解決しようとしているからで、 + +00:32.300 --> 00:38.960 +フロンティア・モデルよりもかなり小さいモデルを採用できる可能性があるからだ。 + +00:38.960 --> 00:41.990 +80億のパラメーターのようなモデルを使うことができる。 + +00:41.990 --> 00:50.810 +そして、 1つの集中したタスクを非常に得意にすることで、 GPTフォーゼロのような巨大なジャイアンツ、 + +00:50.810 --> 00:53.840 +巨人軍に対抗することができる。 + +00:54.290 --> 01:00.560 +なぜなら、 GPT4は何兆ものパラメーターを持っているにもかかわらず、 大統領候補について気の利いた詩を書くなど、 + +01:00.560 --> 01:04.790 +実にさまざまなことができるように設計されているからだ。 + +01:05.060 --> 01:12.620 +しかし、 商品の価格を予測することに関しては、 本当に、 本当にうまくやろうと思っている。 + +01:12.620 --> 01:13.970 +それが僕らの仕事だ。 + +01:13.970 --> 01:15.350 +それが我々のビジネス上の問題だ。 + +01:15.350 --> 01:17.210 +それを解決するのが私たちの目的だ。 + +01:17.240 --> 01:24.170 +この最初のステップは、 微調整のためのベースモデルとしてどのモデルを使うかを決めることで、 + +01:24.170 --> 01:31.280 +これは時間をかける価値のあることだ。 + +01:31.670 --> 01:34.880 +それ自体、 巨大なハイパーパラメーターのようなものだと考えればいい。 + +01:34.880 --> 01:38.120 +別のベースモデルを試してみて、 その結果を見ることもできる。 + +01:38.120 --> 01:47.360 +その一環として、 私たちはそのモデルのオリジナルの事前訓練されたバージョン、 まさにベースモデルを使うかどうかを決めなければならない。 + +01:47.360 --> 01:53.390 +チャット用に微調整されたもので、 + +01:53.390 --> 02:08.360 +インストラクトバリアントと呼ばれることもあります。 + +02:08.370 --> 02:16.530 +あなたが覚えているその入力スタイルは、 プロンプトのさまざまなセクションを区切る一連のトークンに変わる。 + +02:16.530 --> 02:18.540 +だから、 それについて決断しなければならない。 + +02:18.540 --> 02:20.520 +もちろん、 長所も短所もある。 + +02:20.700 --> 02:26.550 +そうしたら、 ベースモデルをそのまま使って、 + +02:26.550 --> 02:34.950 +我々の挑戦に対してどうなのかを見てみるべきだ。 + +02:34.950 --> 02:42.030 +そして、 フロンティアモデルと競争していないとしても、 これが無料になること、 少なくとも私たちの負担になることを忘れないでほしい。 + +02:42.030 --> 02:48.840 +計算を実行することで、 自分たちのオープンソース版のモデルを実行するときにAPIコストを支払う必要がなくなる。 + +02:48.840 --> 02:54.210 +だから、 私たちが正しい領域にいるとしても、 オープンソースを使うべき理由はまだたくさんある。 + +02:54.210 --> 02:59.910 +この特定のビジネス問題に特化した独自のモデルにするという話をする前から、 + +02:59.910 --> 03:00.600 +だ。 + +03:00.600 --> 03:05.910 +とにかく、 それを念頭に置いて、 どのモデルを使うべきかを話す時が来た。 + +03:06.060 --> 03:09.420 +ええと、 まず最初に、 いくつのパラメーターを使うかを決める必要がある。 + +03:09.420 --> 03:13.830 +それに、 多ければ多いほどいいというものでもないだろう。 + +03:13.860 --> 03:19.140 +パラメーターを増やせば、 問題を解決する可能性が高まる。 + +03:19.140 --> 03:26.670 +特に今のような世界では、 多くのトレーニングデータがあり、 40万もの例がある。 + +03:26.670 --> 03:28.980 +だから大量のトレーニングデータがある。 + +03:28.980 --> 03:31.020 +その点で私たちが制限されているわけではない。 + +03:31.020 --> 03:33.180 +そして、 やろうと思えばいつでももっと生み出せる。 + +03:33.210 --> 03:39.270 +というわけで、 私たちの制約となるのは、 メモリの容量ということになる。 + +03:39.300 --> 03:42.270 +もっと小さな箱に収めたい。 + +03:42.300 --> 03:49.740 +この時点で、 私たちは80億のパラメーターを持つモデルを作ることができることをすでに知っている。 + +03:49.770 --> 03:51.150 +パラメータは70億から80億。 + +03:51.150 --> 03:53.430 +それ以上は無理だろう。 + +03:53.460 --> 04:02.400 +パラメータが小さいモデルがある一方で、 ジェンマのようにパラメータが20億か30億のモデルもある。 + +04:02.400 --> 04:04.080 +ええと、 すぐに見てみましょう。 + +04:04.230 --> 04:16.510 +ええと、 おそらく、 余裕がある箱に収まる、 可能な限り大きなモデルとして8つを選びたいんだ。 + +04:16.660 --> 04:22.930 +だから、 ベースやインストラクターのバリエーションに関しては、 そういうアプローチになるだろう。 + +04:22.930 --> 04:26.710 +長所も短所もあるし、 試してみる価値はある。 + +04:26.740 --> 04:35.500 +一般的に言えば、 特定のプロンプトを使用し、 特定の方法での応答を期待するような、 + +04:35.500 --> 04:41.230 +ある問題に特化した微調整を行うのであれば、 インストラクターのバリアントではなく、 + +04:41.230 --> 04:53.200 +ベースモデルから始めた方がよいでしょう。 + +04:53.230 --> 05:00.490 +だから、 ベースとなる1つを微調整して、 その1つのタスクだけが本当に得意になるようにしたほうがいい。 + +05:00.490 --> 05:02.680 +それがデフォルトの答えだ。 + +05:02.710 --> 05:06.850 +さて、 インストラクターのバリエーションから始めることにはいくつかの利点がある。 + +05:06.850 --> 05:08.890 +そのひとつは、 とてもおいしいということだ。 + +05:08.950 --> 05:15.220 +システムプロンプトやユーザーアシスタントのインタラクションなどを認識するようにすでに訓練されている。 + +05:15.220 --> 05:20.710 +システム・プロンプトを使うことで、 ペルソナを学習させるのではなく、 + +05:20.710 --> 05:31.210 +ペルソナを学習させ、 トレーニング・データを通してペルソナを学習させることができる。 + +05:31.210 --> 05:37.300 +だから、 インストラクターのバリエーションがより良い出発点になる状況もある。 + +05:37.450 --> 05:51.610 +僕らの場合、 直感的にはベースの方がスタート地点としてはいいんじゃないかと思うんだ。 + +05:51.880 --> 05:58.120 +実際に両方試してみたが、 ベースはインストラクターのバリエーションより若干良かったが、 非常に僅差だった。 + +05:58.180 --> 06:02.770 +ええと、 両方試してみて、 私と同じ結果が出るかどうか見てみるのもいい。 + +06:02.770 --> 06:09.070 +でも、 僕らのような状況では、 ある特定のタスクがあり、 システムプロンプトなどを適用する必要がない場合、 + +06:09.070 --> 06:16.090 +ベースのバリアントから始めるのが普通だと思う。 + +06:16.600 --> 06:24.670 +しかし、 その紹介をした上で、 ハグする顔、 あー、 オープンリーダーボードに向かい、 いくつかのモデルを見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/59506713/ko_KR.srt b/week5/community-contributions/subtitles/srts/59506713/ko_KR.srt new file mode 100755 index 0000000..1c5c802 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506713/ko_KR.srt @@ -0,0 +1,250 @@ +WEBVTT + +00:00.680 --> 00:01.790 +안녕하세요 + +00:01.790 --> 00:08.060 +7주 차에 제가 이렇게 흥분한 이유는 우리만의 모델을 구축하기 + +00:08.060 --> 00:10.790 +시작했기 때문이죠 + +00:10.790 --> 00:16.910 +우리만의 인공지능을 개발하기 시작하는 시기예요 개척자 모델과 경쟁할 + +00:16.910 --> 00:23.780 +수 있는 제품을 만들려고 해요 적어도 몇몇 개척자 모델보다 나은 제품을 만들려고 + +00:23.780 --> 00:25.550 +노력하죠 + +00:25.580 --> 00:32.300 +그게 가능한 이유는 데이터가 많은 특정 문제를 해결하려 하기 때문이죠 + +00:32.300 --> 00:38.960 +그래서 프런티어 모델보다 훨씬 작은 모델을 쓸 수 있어요 + +00:38.960 --> 00:41.990 +80억 개의 매개 변수 모델을 사용할 수 있어요 + +00:41.990 --> 00:50.810 +한 가지 집중된 작업에서 뛰어난 성과를 내면 거대한 거대 기업과 경쟁할 수 있습니다 GPT + +00:50.810 --> 00:53.840 +40 같은 거대 기업요 + +00:54.290 --> 01:00.560 +GPT 4에는 수조 개의 매개 변수가 있지만 다양한 용도로 설계되었기 때문입니다 + +01:00.560 --> 01:04.790 +대선 후보에 관한 재치 있는 시를 쓰는 것도 그중 하나죠 + +01:05.060 --> 01:09.980 +이 모든 걸 할 수는 없겠지만 제품 가격을 예측하는 데에 + +01:09.980 --> 01:12.620 +정말 최선을 다할 거예요. + +01:12.620 --> 01:13.970 +우리만의 방식이죠 + +01:13.970 --> 01:15.350 +그건 우리 사업상의 문제예요 + +01:15.350 --> 01:17.210 +그걸 해결하려고 해요 + +01:17.240 --> 01:24.170 +첫 단계는 어떤 모델을 미세 조정의 베이스 모델로 쓸지 결정하는 겁니다 시간을 + +01:24.170 --> 01:31.280 +들일 가치가 있는 일이죠 우리가 만들 것에 결정적인 역할을 할 테니까요 + +01:31.670 --> 01:34.880 +그 자체로 하나의 거대한 하이퍼 매개 변수라고 생각할 수 있죠 + +01:34.880 --> 01:38.120 +다른 기본 모델을 시험해 보고 어떻게 되는지 보죠 + +01:38.120 --> 01:45.020 +그 일부로 그 훈련받은 모델의 원래 버전을 쓸지 결정해야 해요 + +01:45.020 --> 01:47.360 +바로 그 기본 모델요 + +01:47.360 --> 01:53.390 +사람들은 그걸 설명하기 위해 파운데이션 모델이란 단어를 쓰는데 + +01:53.390 --> 02:00.860 +채팅 목적으로 잘 만들어진 건 반대로 변환이라고도 하죠 사전이나 사전 목록을 기대하는 + +02:00.950 --> 02:08.360 +거예요 사용자 프롬프트 시스템이나 사용자 보조 같은 게 있는 거죠 + +02:08.370 --> 02:15.120 +입력 스타일은 프롬프트의 다른 섹션을 분리하는 토큰 시리즈로 + +02:15.120 --> 02:16.530 +바뀌죠 + +02:16.530 --> 02:18.540 +그래서 결정을 내려야 해요 + +02:18.540 --> 02:20.520 +물론 장단점이 있죠 + +02:20.700 --> 02:26.550 +그 작업이 끝나면 기존의 기본 모델을 가져다가 우리 과제와 어떻게 + +02:26.550 --> 02:31.920 +맞물리는지 봐야 해요 우리가 이미 프런티어 모델과 경쟁하는 + +02:31.920 --> 02:34.950 +위치에 있는지 알 수 있으니까요 + +02:34.950 --> 02:40.320 +우리가 개척 시대 모델들과 경쟁하지 않더라도 무료로 진행될 거라는 걸 기억하세요 적어도 + +02:40.320 --> 02:42.030 +비용은 우리가 부담해야죠 + +02:42.030 --> 02:48.330 +계산을 실행해 API 비용을 내지 않아도 되죠 고유의 오픈 소스 버전 모델을 실행할 + +02:48.330 --> 02:48.840 +때요 + +02:48.840 --> 02:54.210 +따라서 올바른 영역에 있다 해도 오픈 소스를 써야 할 이유는 여전히 많아요 + +02:54.210 --> 03:00.600 +이런 특정 비즈니스 문제에 특화된 독점 모델로 만들자는 얘기가 나오기 전부터요 + +03:00.600 --> 03:05.910 +어쨌든 그걸 염두에 두고 어떤 모델을 사용할지 얘기해보죠 + +03:06.060 --> 03:09.420 +우선 매개 변수를 몇 개 쓸지 정해야 해요 + +03:09.420 --> 03:13.830 +많을수록 좋은 것 같아요 + +03:13.860 --> 03:19.140 +매개 변수가 많을수록 문제 해결에 도움이 돼요 + +03:19.140 --> 03:25.800 +특히 훈련 데이터가 많은 현재 세계에서는 400,000개의 예가 + +03:25.830 --> 03:26.670 +있어요 + +03:26.670 --> 03:28.980 +훈련 데이터가 아주 많아요 + +03:28.980 --> 03:31.020 +그런 면에서 한계가 있는 건 아니잖아요 + +03:31.020 --> 03:33.180 +원하면 언제든 더 만들 수 있어요 + +03:33.210 --> 03:39.270 +그래서 제약 조건은 우리가 가진 메모리 용량의 양이 될 거예요 + +03:39.300 --> 03:42.270 +작은 상자에 넣을 거예요 + +03:42.300 --> 03:49.740 +지금까지는 이미 알고 있었어요 약 80억 개의 매개 변수 모델을 얻을 수 있다는 것을요. + +03:49.770 --> 03:51.150 +70억에서 80억 매개 변수죠 + +03:51.150 --> 03:53.430 +그 이상은 못 가요 + +03:53.460 --> 03:59.850 +작은 매개 변수 모델이 있긴 하지만요 아주 작죠 젬마 같은 경우 20억에서 30억 + +03:59.850 --> 04:02.400 +개의 매개 변수 버전일 거예요 + +04:02.400 --> 04:04.080 +잠시 후에 살펴보죠 + +04:04.230 --> 04:12.660 +8대를 가져가려고 해요 저희 예산에 맞는 + +04:12.660 --> 04:16.510 +가장 큰 모델로요 + +04:16.660 --> 04:22.930 +그게 저희가 베이스나 변형을 만들 때 접근하는 방식이죠 + +04:22.930 --> 04:26.710 +장단점이 있는데 실험해 볼 만한 거예요 + +04:26.740 --> 04:35.500 +일반적으로, 한 가지 문제에 대해서만 미세 조정할 수 있습니다 특정 프롬프트를 이용하고 있고 특정 + +04:35.500 --> 04:41.230 +방식으로 응답을 기대한다면 기본 모델로 시작하는 것이 좋습니다 + +04:41.230 --> 04:48.640 +변종은 아닙니다 시스템 프롬프트나 사용자 프롬프트 같은 것을 적용할 필요가 없습니다 + +04:48.640 --> 04:53.200 +한 가지 목적으로만 훈련시킬 것이기 때문이죠 + +04:53.230 --> 05:00.490 +그러니 기본을 하나 골라서 미세 조정하세요 그 작업에만 정말 잘하도록요 + +05:00.490 --> 05:02.680 +그게 기본 대답이에요 + +05:02.710 --> 05:06.850 +변종으로 시작하면 좋은 점이 있어요 + +05:06.850 --> 05:08.890 +하나는 아주 맛있다는 거죠 + +05:08.950 --> 05:15.220 +시스템 프롬프트나 사용자 보조 상호 작용 같은 걸 인식하도록 훈련받았죠 + +05:15.220 --> 05:20.710 +이를 장점으로 활용할 수 있습니다 시스템 프롬프트를 이용해 + +05:20.710 --> 05:28.150 +특정 방식으로 틀을 잡고 작업하는 거죠 인격에 의존하지 않고 훈련 데이터를 통해 + +05:28.150 --> 05:31.210 +인격을 익히는 거예요 + +05:31.210 --> 05:37.300 +그래서 변형 구성이 더 나은 출발점이 되는 상황도 있어요 + +05:37.450 --> 05:46.450 +제 직감으로는 기초가 더 나은 출발점입니다 훈련 데이터와 목표에 + +05:46.450 --> 05:51.610 +사용할 정해진 구조가 있으니까요 + +05:51.880 --> 05:58.120 +사실 둘 다 써봤는데 밑바탕이 개별 제품보다 조금 더 잘 나왔어요 하지만 막상막하였죠 + +05:58.180 --> 06:02.770 +어, 둘 다 해 보시고 저랑 같은 결과가 나오는지 확인해 보세요 Get up + +06:02.770 --> 06:09.070 +하지만 이런 상황에서는 더 정상적일 것 같아요 하나의 특정 작업이 있는데 + +06:09.070 --> 06:15.040 +시스템 프롬프트를 적용할 필요가 없을 때요 그런 경우 기본 변종에서 + +06:15.040 --> 06:16.090 +시작하죠 + +06:16.600 --> 06:23.470 +소개를 마쳤으니 이제 안아주는 얼굴로 넘어가죠 오픈 leaderboard에서 모델들을 살펴볼게요 + +06:23.470 --> 06:24.670 +HOLO diff --git a/week5/community-contributions/subtitles/srts/59506929/en_US.srt b/week5/community-contributions/subtitles/srts/59506929/en_US.srt new file mode 100755 index 0000000..96eabe3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506929/en_US.srt @@ -0,0 +1,256 @@ +WEBVTT + +00:00.890 --> 00:04.820 +And we return to the hugging face open LLM leaderboard. + +00:04.820 --> 00:09.650 +The first place you go when selecting your base model for your training. + +00:09.650 --> 00:17.690 +So first thing I'm going to do is I'm going to focus in on models with a parameter size at most, let's + +00:17.690 --> 00:18.740 +say nine. + +00:18.740 --> 00:26.000 +And we can we can afford to go down a bit and let's filter out everything but the base models to start + +00:26.000 --> 00:26.690 +with. + +00:26.810 --> 00:30.950 +Um, and let's have it show the number of parameters as well. + +00:30.950 --> 00:34.190 +Here we go in this table down below. + +00:34.280 --> 00:39.890 +So this is the results of the strongest models according to these various metrics. + +00:39.950 --> 00:48.890 +And you will see that powerhouse clan that I mentioned many times, including this 2.52.5 is very new. + +00:48.890 --> 00:51.710 +Uh 7 billion variant. + +00:51.710 --> 00:56.600 +Uh is uh the highest scorer of this lot. + +00:56.840 --> 01:02.760 +Um, and you'll see that Gemma is a 9 billion parameter variant of Gemma. + +01:02.790 --> 01:07.650 +That's right up there to, uh, Mistral up here. + +01:07.950 --> 01:12.780 +Uh, and the one that we like to talk about a lot. + +01:12.810 --> 01:17.010 +Um, there was phi2, by the way, from Microsoft. + +01:17.010 --> 01:20.700 +Uh, llama 3.1 is a little bit further down. + +01:20.850 --> 01:26.340 +Uh, now it's worth mentioning that the numbers are all reasonably close at the top, although there's + +01:26.340 --> 01:28.380 +somewhat something of a margin here. + +01:28.500 --> 01:33.030 +Uh, so it might concern you that llama 3.1 that you happen to know is the one we're going to end up + +01:33.030 --> 01:36.510 +picking because you've seen it in the code is further down. + +01:36.660 --> 01:39.570 +Um, but there is there is something of a reason for that. + +01:39.630 --> 01:45.960 +Uh, when you're looking at these different scores, really you do need to, to also bring in the, + +01:45.960 --> 01:49.170 +the versions that have been trained. + +01:49.260 --> 01:56.850 +Um, the what's called the instruct variants, which are the same models but then given uh, more trained + +01:56.920 --> 02:03.880 +using various reinforcement learning techniques to to respond to that particular chat instruct style. + +02:03.880 --> 02:10.210 +And when it's uh, given that kind of framework, it's more likely to be able to perform against these + +02:10.210 --> 02:16.840 +various tests because it will respond to the instruction that's being given rather than being expected + +02:16.840 --> 02:19.870 +to be trained to, to adapt to a different task. + +02:20.050 --> 02:26.080 +Um, so really, you're getting a more realistic view of the capability, even of the base model, if + +02:26.080 --> 02:31.720 +you look at how it performs with benchmarks, when you look at the instruct variation, if that makes + +02:31.720 --> 02:32.170 +sense. + +02:32.170 --> 02:38.590 +And when we do that, we see that llama 3.18 billion really is in the top grouping here. + +02:38.620 --> 02:42.880 +Uh, we've got uh, Phi three is up there as well. + +02:42.880 --> 02:43.870 +Uh, jammer. + +02:43.870 --> 02:48.040 +And then um, the Meta Llama 3.1. + +02:48.040 --> 02:52.030 +So it's doing it's doing very well when you look at the instruct variant. + +02:52.060 --> 02:58.160 +And as I said, uh, somewhat perversely, I'm not suggesting that we actually use the instruct variant. + +02:58.160 --> 03:03.320 +I'm suggesting that we stick with the the base version of it because we don't want it. + +03:03.350 --> 03:08.600 +We don't want it necessarily to have used up lots of its thought process, lots of its sort of training + +03:08.600 --> 03:12.620 +power, learning about things like system prompts and user prompts and so on. + +03:12.620 --> 03:16.940 +I'm just saying that once you have been through that exercise, you can see it performs well in all + +03:16.970 --> 03:17.900 +of these scores. + +03:17.900 --> 03:24.290 +And that gives us a good sense that the base model is good at adapting to be able to address these different + +03:24.290 --> 03:25.160 +benchmarks. + +03:25.160 --> 03:29.570 +So that's a more nuanced way to interpret the results of the leaderboard. + +03:29.570 --> 03:34.580 +You can look at the instruct variant and see how that performs, and it still gives you a good indication + +03:34.580 --> 03:37.400 +of how the base model will perform as well. + +03:38.030 --> 03:46.010 +Now, there is one other slightly subtle reason that that I'm picking llama, even though you might + +03:46.010 --> 03:54.740 +say that either five three, Gemma or indeed Gwen would look like they are scoring higher in many fronts. + +03:54.740 --> 03:57.350 +There is a convenience to llama. + +03:57.350 --> 03:57.980 +That's just. + +03:58.010 --> 04:03.350 +It only makes a small difference to everything, but it does make our code a bit simpler and it makes + +04:03.350 --> 04:10.040 +the task a bit easier for llama, which is that when you look at the tokenizer for llama, you'll see + +04:10.040 --> 04:19.760 +that for llama, every number between 0 and 999, every three digit number gets mapped to one token. + +04:19.790 --> 04:25.280 +The same is actually not true for 3 or 4, or for Quan. + +04:25.460 --> 04:30.950 +In all three of those other models, they have basically a kind of you can think of it as like a token + +04:30.980 --> 04:31.670 +per digit. + +04:31.670 --> 04:36.110 +So the number 999 ends up as three separate tokens. + +04:36.140 --> 04:40.940 +Now, you might ask me, what difference does that make that it shouldn't make any difference at all. + +04:41.210 --> 04:50.150 +So it's going to turn out that when we're doing training we are we're using a model to generate tokens + +04:50.150 --> 04:54.990 +and we're trying to make it think in terms of more of a regression model. + +04:54.990 --> 05:00.660 +We want it to be trying to solve for getting better and better at predicting the next token, and that + +05:00.660 --> 05:02.610 +that should map to the price. + +05:02.610 --> 05:04.830 +So it simplifies the problem. + +05:04.830 --> 05:10.740 +If the price is reflected exactly in one token that the model has to generate. + +05:10.740 --> 05:16.980 +So just in this particular situation, for the particular problem we're trying to solve, the tokenization + +05:16.980 --> 05:25.950 +strategy for Lambda 3.1 works very well because the the, the single next token that it generates will + +05:25.950 --> 05:28.950 +in itself reflect everything about the price. + +05:29.130 --> 05:35.460 +So that, uh, it's not the case that it might predict that the next token should be nine, and that + +05:35.460 --> 05:39.660 +could be $9 or $99 or $999. + +05:39.660 --> 05:42.000 +And that will only transpire when it does the token. + +05:42.000 --> 05:42.690 +After that. + +05:42.720 --> 05:49.710 +No, it's going to be the case that the single token that it projects as the next token in its answer + +05:49.720 --> 05:53.890 +will reflect the full price of the product in one token. + +05:54.340 --> 06:02.290 +So it's a nuance, but it's a reason why we lean towards selecting llama 3.1 in this case. + +06:02.650 --> 06:07.690 +But by all means, we will have the ability to choose other models and see how they perform. + +06:07.690 --> 06:13.510 +But llama gets a bit of an edge because of this convenience with the way that it tokenizes. + +06:13.900 --> 06:19.360 +So that gives you some color on some of the thought process that goes behind selecting a model, looking + +06:19.360 --> 06:23.770 +at the leaderboards, looking a little bit more deeply at leaderboards, thinking about instruct variants + +06:23.800 --> 06:29.920 +versus the base model's parameter sizes, and then also some nuances about things like the way that + +06:29.920 --> 06:31.420 +the tokenization works. + +06:31.420 --> 06:38.810 +And all of that together has allowed us to come to the decision that we are going to select llama 3.18 + +06:38.810 --> 06:42.190 +billion as the base model for our project. + +06:42.280 --> 06:48.640 +And now with that, let's go to the Colab and give that base model a try. diff --git a/week5/community-contributions/subtitles/srts/59506929/ja_JP.srt b/week5/community-contributions/subtitles/srts/59506929/ja_JP.srt new file mode 100755 index 0000000..3cdf691 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506929/ja_JP.srt @@ -0,0 +1,202 @@ +WEBVTT + +00:00.890 --> 00:04.820 +そして、 我々はハグ顔オープンLLMリーダーボードに戻る。 + +00:04.820 --> 00:09.650 +トレーニングのベースとなるモデルを選ぶ際、 最初に行く場所。 + +00:09.650 --> 00:18.740 +そこでまず最初にやることは、 パラメーターのサイズが最大でも9のモデルに焦点を絞ることだ。 + +00:18.740 --> 00:26.690 +そして、 私たちは少し下がってもいいし、 まずはベースモデル以外のすべてを除外してみよう。 + +00:26.810 --> 00:30.950 +ええと、 パラメーターの数も表示させましょう。 + +00:30.950 --> 00:34.190 +下の表を見てみよう。 + +00:34.280 --> 00:39.890 +というわけで、 これらのさまざまな指標に従った最強モデルの結果がこれだ。 + +00:39.950 --> 00:48.890 +そして、 この2人を含め、 何度も述べたあの強豪クランを目にすることになるだろう。 52. 5はとても新しい。 + +00:48.890 --> 00:51.710 +ええと、 70億のバリエーション。 + +00:51.710 --> 00:56.600 +ウーはこの中で最も得点力のある選手だ。 + +00:56.840 --> 01:02.760 +ええと、 ジェンマはジェンマの90億パラメータ変種であることがわかるだろう。 + +01:02.790 --> 01:07.650 +ミストラルのすぐ上だ。 + +01:07.950 --> 01:12.780 +それと、 僕らがよく話したいと思っているのが、 この話なんだ。 + +01:12.810 --> 01:17.010 +そういえば、 マイクロソフトのファイ2がありましたね。 + +01:17.010 --> 01:20.700 +ええと、 ラマ3。 1はもう少し先だ。 + +01:20.850 --> 01:28.380 +この数字には若干の差があるが、 上位はほぼ拮抗している。 + +01:28.500 --> 01:36.510 +ラマ3世が気になるかもしれないね。 あなたが知っている1が、 私たちが最終的に選ぶことになるものです。 + +01:36.660 --> 01:39.570 +でも、 それには何か理由があるんだ。 + +01:39.630 --> 01:49.170 +このようなさまざまなスコアを見る場合、 訓練されたバージョンを取り入れる必要がある。 + +01:49.260 --> 01:56.850 +インストラクター・バリアントと呼ばれるものは、 同じモデルだが、 特定のチャット・インストラクター・スタイルに対応できるよう、 + +01:56.920 --> 02:03.880 +さまざまな強化学習テクニックを使ってより訓練されたものだ。 + +02:03.880 --> 02:10.210 +このような枠組みがあれば、 さまざまなテストに対応できる可能性が高くなる。 なぜなら、 + +02:10.210 --> 02:19.870 +異なるタスクに適応するように訓練されることを期待されるよりも、 与えられた指示に反応するようになるからだ。 + +02:20.050 --> 02:26.080 +つまり、 ベースモデルであっても、 ベンチマークやインストラクターのバリエーションを見れば、 + +02:26.080 --> 02:32.170 +より現実的な能力がわかるということだ。 + +02:32.170 --> 02:38.590 +そうすると、 ラマ3世が見えてくる。 180億ドルは本当にトップグループだ。 + +02:38.620 --> 02:42.880 +ええと、 ファイ3もあそこにある。 + +02:42.880 --> 02:43.870 +あの、 ジャマー。 + +02:43.870 --> 02:48.040 +それからメタ・ラマ3。 1. + +02:48.040 --> 02:52.030 +だから、 インストラクターのバリアントを見ると、 とてもうまくいっている。 + +02:52.060 --> 02:58.160 +そして、 さっきも言ったように、 ちょっと変な言い方だが、 インストラクターのバリエーションを実際に使おうとは言っていない。 + +02:58.160 --> 03:03.320 +私たちはそれを望んでいないのだから、 基本バージョンに固執することを提案しているのだ。 + +03:03.350 --> 03:12.620 +システムのプロンプトやユーザーのプロンプトなどについて学習し、 思考プロセスやトレーニングの力を使い果たすことは避けたい。 + +03:12.620 --> 03:17.900 +ただ、 その練習を経験すれば、 これらのスコアすべてで好成績を収めていることがわかると言っているのだ。 + +03:17.900 --> 03:25.160 +そして、 ベースモデルがこれらの異なるベンチマークに対応できるよう適応する能力に優れていることを感じさせてくれる。 + +03:25.160 --> 03:29.570 +つまり、 リーダーボードの結果をよりニュアンス的に解釈する方法だ。 + +03:29.570 --> 03:37.400 +インストラクターのバリアントを見て、 そのパフォーマンスを確認することができる。 + +03:38.030 --> 03:46.010 +さて、 私がラマを選んだのにはもうひとつ微妙な理由がある。 5人組の3人、 ジェマ、 あるいはグウェンのいずれかが、 + +03:46.010 --> 03:54.740 +多くの面でより高いスコアを獲得しているように見えるかもしれないのに、 だ。 + +03:54.740 --> 03:57.350 +リャマにはリャマの便利さがある。 + +03:57.350 --> 03:57.980 +ただそれだけだ。 + +03:58.010 --> 04:03.350 +しかし、 これによってコードが少しシンプルになり、 llamaのタスクも少し楽になります。 + +04:03.350 --> 04:10.040 +llamaのトークナイザを見ると、 llamaでは0から999までのすべての数字、 + +04:10.040 --> 04:19.760 +つまり3桁の数字が1つのトークンにマッピングされることがわかります。 + +04:19.790 --> 04:25.280 +同じことが3や4、 あるいはクアンにも言えるかというと、 実はそうではない。 + +04:25.460 --> 04:31.670 +そのほかの3つのモデルでは、 基本的に1桁につき1トークンというような形になっている。 + +04:31.670 --> 04:36.110 +つまり、 999という数字は3つのトークンとして扱われることになる。 + +04:36.140 --> 04:40.940 +では、 何の違いもないはずのことに、 どんな違いがあるのかと聞かれるかもしれない。 + +04:41.210 --> 04:54.990 +だから、 トレーニングをするときに、 トークンを生成するモデルを使い、 回帰モデルのような考え方をさせようとしているんだ。 + +04:54.990 --> 05:02.610 +私たちは、 次のトークンを予測するのがどんどんうまくなるように、 そしてそれが価格に対応するように解決しようとしてほしいのです。 + +05:02.610 --> 05:04.830 +だから問題が単純化される。 + +05:04.830 --> 05:10.740 +価格が、 モデルが生成しなければならないトークン1個に正確に反映されている場合。 + +05:10.740 --> 05:16.980 +つまり、 この特定の状況、 私たちが解決しようとしている特定の問題では、 + +05:16.980 --> 05:25.950 +ラムダ3のトークン化戦略が必要なのです。 1が非常にうまく機能するのは、 トークンが生成する1つの次のトークンが、 + +05:25.950 --> 05:28.950 +それ自体で価格のすべてを反映するからだ。 + +05:29.130 --> 05:35.460 +つまり、 次のトークンは9ドルであるべきで、 それは9ドルかもしれないし、 99ドルかもしれないし、 + +05:35.460 --> 05:39.660 +999ドルかもしれないと予測することはない。 + +05:39.660 --> 05:42.000 +そして、 それが実現するのは、 トークンを行ったときだけだ。 + +05:42.000 --> 05:42.690 +その後だ。 + +05:42.720 --> 05:53.890 +そうではなく、 次のトークンとして提示される1つのトークンは、 1つのトークンに商品の全価格が反映されることになる。 + +05:54.340 --> 06:02.290 +つまり、 ニュアンスの違いではあるが、 我々がラマ3を選択する方向に傾く理由なのだ。 この場合は1。 + +06:02.650 --> 06:07.690 +しかし、 ぜひとも他のモデルを選び、 そのパフォーマンスを見ることができるようになるだろう。 + +06:07.690 --> 06:13.510 +しかし、 llamaはトークン化する方法でこの利便性から少し優位に立つ。 + +06:13.900 --> 06:19.360 +ですから、 モデルを選択し、 リーダーボードを見て、 リーダーボードをもう少し深く見て、 インストラクターのバリアントとベースモデルのパラメータサイズについて考え、 + +06:19.360 --> 06:31.420 +さらにトークン化の方法などのニュアンスについて考える、 その背後にある思考プロセスについていくらかご理解いただけると思います。 + +06:31.420 --> 06:42.190 +その結果、 ラマ3世を選ぶという決断に至った。 180億ドルを基本モデルとしている。 + +06:42.280 --> 06:48.640 +さて、 それではコラブに行ってベースモデルを試してみよう。 diff --git a/week5/community-contributions/subtitles/srts/59506929/ko_KR.srt b/week5/community-contributions/subtitles/srts/59506929/ko_KR.srt new file mode 100755 index 0000000..25ef068 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59506929/ko_KR.srt @@ -0,0 +1,250 @@ +WEBVTT + +00:00.890 --> 00:04.820 +포옹 페이스 오픈 LLM 리더보드로 돌아가죠 + +00:04.820 --> 00:09.650 +훈련용 기본 모델을 고를 때 가장 먼저 가는 곳이죠 + +00:09.650 --> 00:17.690 +가장 먼저 모델에 초점을 맞추겠습니다 매개 변수 크기, 최대 9라고 + +00:17.690 --> 00:18.740 +해보죠 + +00:18.740 --> 00:26.690 +비트만 조금 낮추고 기본 모델만 빼고 전부 걸러내죠 + +00:26.810 --> 00:30.950 +매개 변수의 개수도 보여 주세요 + +00:30.950 --> 00:34.190 +이 아래 테이블로 가죠 + +00:34.280 --> 00:39.890 +다양한 지표에 따라 가장 강력한 모델의 결과가 나왔어요 + +00:39.950 --> 00:48.890 +제가 여러 번 언급했던 파워하우스 클랜이 보일 겁니다 이 2를 포함해서요 52살요 5는 새로운 거예요 + +00:48.890 --> 00:51.710 +70억 개 변종요 + +00:51.710 --> 00:56.600 +이 로드의 최고 득점자였어요 + +00:56.840 --> 01:02.760 +보시다시피 제마는 90억 개의 변수 제마예요 + +01:02.790 --> 01:07.650 +미스트랄강 바로 위예요 + +01:07.950 --> 01:12.780 +우리가 자주 얘기하는 그 사람도요 + +01:12.810 --> 01:17.010 +마이크로소프트의 phi2도 있었죠 + +01:17.010 --> 01:20.700 +라마 3요 1번 비트는 좀 더 아래예요 + +01:20.850 --> 01:26.340 +먼저 언급할 점은 상위권의 숫자는 꽤 근소한 차이라는 겁니다 약간의 + +01:26.340 --> 01:28.380 +차이가 있긴 하지만요 + +01:28.500 --> 01:33.030 +라마 3에 대해 말씀드릴 게 있어요 여러분이 고르게 될 1은 우리가 선택하게 + +01:33.030 --> 01:36.510 +될 겁니다 코드에서 더 아래쪽에 있는 걸 보셨으니까요 + +01:36.660 --> 01:39.570 +하지만 거기엔 이유가 있어요 + +01:39.630 --> 01:45.960 +이런 다양한 스코어를 볼 때 훈련된 버전도 반드시 + +01:45.960 --> 01:49.170 +가져와야 해요 + +01:49.260 --> 01:56.850 +다중 분석이라고 부르는 건 같은 모델이지만 좀 더 훈련된 형태로 다양한 강화 + +01:56.920 --> 02:03.880 +학습 기술을 이용해 특정 채팅 분석 스타일에 대응하는 거예요 + +02:03.880 --> 02:10.210 +그런 프레임워크가 갖춰지면 다양한 테스트에 더 잘 대처할 수 + +02:10.210 --> 02:16.840 +있어요 주어진 지시에 반응할 테니까요 다른 작업에 적응하도록 훈련받기를 + +02:16.840 --> 02:19.870 +기대하기보다요 + +02:20.050 --> 02:26.080 +이 기능을 좀 더 현실적으로 볼 수 있습니다 기본 모델이라도 벤치마크를 + +02:26.080 --> 02:32.170 +가지고 어떻게 작동하는지 보고 변형된 부분을 보면 이해가 되죠 + +02:32.170 --> 02:38.590 +그렇게 하면 라마다 3이 나오죠 제일 위에 있는 게 180억 개예요 + +02:38.620 --> 02:42.880 +그 위에는 피3도 있어요 + +02:42.880 --> 02:43.870 +재머요 + +02:43.870 --> 02:48.040 +메타 라마 3도 있어요 1번요 + +02:48.040 --> 02:52.030 +그러니까 instructant varian을 보면 아주 잘 되고 있어요 + +02:52.060 --> 02:58.160 +좀 삐딱하게 들리겠지만 실제로 지시 기능을 쓰자는 건 아니에요 + +02:58.160 --> 03:03.320 +기본 형태를 유지하자는 거예요 그건 원하지 않으니까요 + +03:03.350 --> 03:08.600 +사고 과정이나 훈련 능력을 모두 소모하는 건 원치 않아요 시스템 프롬프트나 + +03:08.600 --> 03:12.620 +사용자 프롬프트 같은 걸 배우는 것도요 + +03:12.620 --> 03:16.940 +일단 그 훈련을 하고 나면 모든 점수가 잘 나온다는 걸 알 + +03:16.970 --> 03:17.900 +수 있어요 + +03:17.900 --> 03:24.290 +이런 걸 보면 기본 모델이 이런 다양한 벤치마크를 해결하도록 잘 적응했다는 느낌이 + +03:24.290 --> 03:25.160 +들죠 + +03:25.160 --> 03:29.570 +순위표 결과를 해석하는 미묘한 방법이네요 + +03:29.570 --> 03:34.580 +변형 기능을 보고 어떻게 작동하는지 볼 수 있어요 베이스 모델이 + +03:34.580 --> 03:37.400 +어떻게 작동할지 알 수 있죠 + +03:38.030 --> 03:46.010 +라마를 고른 데는 또 다른 미묘한 이유가 있어요 물론 167cm인 + +03:46.010 --> 03:54.740 +제마나 그웬이 여러 면에서 더 높아 보일 수도 있지만요 + +03:54.740 --> 03:57.350 +라마는 편리해요 + +03:57.350 --> 03:57.980 +정말 그래요 + +03:58.010 --> 04:03.350 +모든 것에 약간의 차이는 주지만 코드를 좀 더 간단하게 만들고 + +04:03.350 --> 04:10.040 +라마를 위한 작업을 좀 더 쉽게 해줍니다 라마를 위한 토큰라이저를 보면 + +04:10.040 --> 04:19.760 +0부터 999까지의 모든 숫자, 모든 세 자리 숫자가 하나의 토큰에 매핑되어 있는 것을 볼 수 있죠 + +04:19.790 --> 04:25.280 +3, 4편이나 콴의 경우는 달라요 + +04:25.460 --> 04:30.950 +이 세 모델 모두 기본적으로∙∙∙ 숫자당 토큰 같은 걸 갖고 있다고 생각하면 + +04:30.980 --> 04:31.670 +돼요 + +04:31.670 --> 04:36.110 +999는 결국 세 개의 패가 되죠 + +04:36.140 --> 04:40.940 +이런 질문을 하실지도 모르겠네요. 그게 무슨 차이가 있죠? 전혀 차이가 없어야 하는 걸까요? + +04:41.210 --> 04:50.150 +그래서 우리가 트레이닝을 할 때 토큰을 생성하기 위해 모델을 사용합니다 회귀 + +04:50.150 --> 04:54.990 +모델의 관점에서 생각하도록 하는 거죠 + +04:54.990 --> 05:00.660 +문제를 해결하고 다음 토큰을 더 잘 예측할 수 있도록 하는 거죠 가격에도 + +05:00.660 --> 05:02.610 +적용되도록요 + +05:02.610 --> 05:04.830 +문제를 단순화하죠 + +05:04.830 --> 05:10.740 +모델이 생성해야 하는 토큰 하나에 가격이 정확히 반영된다면요 + +05:10.740 --> 05:16.980 +이 특정한 상황에서 우리가 해결하려는 특정한 문제에 + +05:16.980 --> 05:25.950 +대해 람다 3의 토큰화 전략이에요 1은 아주 잘 작동합니다. 생성되는 단일 다음 토큰이 가격에 + +05:25.950 --> 05:28.950 +대한 모든 것을 반영하기 때문이죠 + +05:29.130 --> 05:35.460 +다음 토큰은 9가 되어야 한다고 예측하는 경우는 없어요 9달러, + +05:35.460 --> 05:39.660 +999달러 혹은 999달러일 수도 있죠 + +05:39.660 --> 05:42.000 +토큰을 써야만 효과가 나타나죠 + +05:42.000 --> 05:42.690 +그다음에요 + +05:42.720 --> 05:49.710 +아뇨, 다음 토큰으로 제시되는 하나의 토큰이 하나의 토큰에 제품의 + +05:49.720 --> 05:53.890 +전체 가격을 반영하는 경우죠 + +05:54.340 --> 06:02.290 +그런 미묘한 차이 때문에 llama 3을 선택하는 거예요 이 경우엔 1명이죠 + +06:02.650 --> 06:07.690 +하지만 다른 모델을 선택해서 성능을 확인할 수 있어요 + +06:07.690 --> 06:13.510 +라마는 비트 마이너스가 편리해서 좀 더 유리해요 + +06:13.900 --> 06:19.360 +이로써 여러분은 모델을 선택하는 과정과 leaderboard를 + +06:19.360 --> 06:23.770 +보고, 더 깊이 살펴볼 수 있습니다. 기본 모델의 매개 변수 + +06:23.800 --> 06:29.920 +크기와 비교해 다양성을 만드는 과정과 토큰화의 작동 방식에 대한 뉘앙스를 볼 + +06:29.920 --> 06:31.420 +수 있죠. + +06:31.420 --> 06:38.810 +그 모든 걸 종합한 결과 이런 결정을 내릴 수 있었습니다 llama 3을 선택하기로요 180억 달러를 기본 + +06:38.810 --> 06:42.190 +모델로 삼았어요 + +06:42.280 --> 06:48.640 +이제 Colab으로 가서 기본 모델을 시도해 보죠 diff --git a/week5/community-contributions/subtitles/srts/59507017/en_US.srt b/week5/community-contributions/subtitles/srts/59507017/en_US.srt new file mode 100755 index 0000000..6d9aad6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507017/en_US.srt @@ -0,0 +1,217 @@ +WEBVTT + +00:00.410 --> 00:01.670 +Welcome to Colab. + +00:01.670 --> 00:04.910 +Welcome to the week seven day two Colab. + +00:04.910 --> 00:10.760 +And just before we try our base model, we're going to actually see these tokenizers in action. + +00:10.820 --> 00:15.410 +Uh, start with some Pip installs and some imports as usual. + +00:15.560 --> 00:22.430 +And now in the constants section, I'm going to set up four different tokenizers that we will just quickly + +00:22.430 --> 00:23.120 +look at. + +00:23.120 --> 00:27.830 +After that, we're going to look at our base model llama and do some some work with the data. + +00:27.830 --> 00:33.800 +And I've also got the constants here for writing to the output in color that you may remember from when + +00:33.800 --> 00:35.930 +we visualized results in the past. + +00:36.440 --> 00:37.850 +All right, let's run that. + +00:37.880 --> 00:43.070 +We log in to hugging face using the usual snippet that you know well at this point. + +00:43.070 --> 00:49.100 +And now I've written a useful function called investigate tokenizer that takes a model name. + +00:49.460 --> 00:58.790 +And what that does is it uh, will load in the tokenizer for that model name, and it will then iterate + +00:58.790 --> 01:06.030 +through these six numbers zero, one, ten, 100, 999, and 1000. + +01:06.090 --> 01:12.180 +For each of those numbers, it will convert it into a string, and then ask the tokenizer to convert + +01:12.180 --> 01:14.160 +that string into tokens. + +01:14.160 --> 01:20.130 +So what are the tokens that represent the string 100 as a piece of text? + +01:20.520 --> 01:24.660 +Um, and I'm using this add special tokens as false parameter. + +01:24.660 --> 01:29.940 +What that means is please don't add in things like a start of sentence token and an end of sentence + +01:29.940 --> 01:33.510 +token or something like that that will interfere with things. + +01:33.600 --> 01:38.760 +Just simply convert this text into tokens that represents that text. + +01:38.970 --> 01:40.260 +Um, so that's what it will do. + +01:40.260 --> 01:41.730 +And then it will print that out. + +01:41.730 --> 01:50.520 +So to run that and to show it let me call investigate tokenizer. + +01:51.450 --> 01:59.460 +And we will start by calling it for the llama three one uh model. + +01:59.460 --> 02:00.950 +And let's see what we get. + +02:02.390 --> 02:06.200 +So what we get is the string zero. + +02:06.200 --> 02:09.710 +Text zero converts to the token number 15. + +02:09.740 --> 02:12.230 +One goes to the token 16. + +02:12.260 --> 02:15.380 +Ten goes to token number 605. + +02:15.410 --> 02:16.190 +As it happens. + +02:16.190 --> 02:21.380 +And you'll see that similarly 109.99 they each mapped to a single token. + +02:21.890 --> 02:25.310 +1000 though that maps now to two tokens. + +02:25.340 --> 02:28.580 +In fact, it maps to the token for the number 100. + +02:28.610 --> 02:33.230 +If you see that followed by the token for the number zero and that. + +02:33.230 --> 02:41.030 +So just just as you would imagine, it is the tokens for the text 100, followed by the text for a zero. + +02:41.090 --> 02:47.480 +And what you see here, loud and clear, is that if we're just focusing on three digit text numbers, + +02:47.480 --> 02:53.690 +on three digit numbers, we have this useful property that whatever price we have is always going to + +02:53.690 --> 02:55.130 +map to one token. + +02:55.160 --> 03:02.330 +So the model's task of trying to predict the price of a product will end up being just predict one token + +03:02.330 --> 03:05.930 +and get that token right, and then you've got the right price of the product. + +03:06.110 --> 03:08.690 +Um, and so it's not an essential property. + +03:08.720 --> 03:15.470 +We don't require that these models are perfectly able to generate a sequence of tokens, but it's convenient + +03:15.470 --> 03:20.270 +that we're going to simplify the problem down to just getting this one token right. + +03:20.720 --> 03:26.270 +And we can also see how this looks for another model like K-125. + +03:28.280 --> 03:30.350 +Uh, and now we see something different. + +03:30.380 --> 03:38.450 +The single digits zero and one mapped to a single token, but ten maps to two tokens. + +03:38.450 --> 03:50.990 +In fact, the token for one followed by zero 100 is 100999 is presumably 999, and 1000 is 1000, uh, + +03:50.990 --> 03:53.210 +for tokens being used there. + +03:53.210 --> 03:54.830 +So you see that different property. + +03:54.830 --> 03:59.930 +And hopefully that really clarifies, um, why I think llama three one has an edge. + +04:00.290 --> 04:00.590 +Uh. + +04:00.630 --> 04:01.230 +Gemma. + +04:01.230 --> 04:02.670 +Two for Gemma, two. + +04:02.700 --> 04:03.450 +Sorry. + +04:03.630 --> 04:04.290 +Uh, Gemma. + +04:04.290 --> 04:08.280 +Two similar properties to Kwon. + +04:08.460 --> 04:11.880 +Uh, interestingly, uh, totally different vocabulary. + +04:11.880 --> 04:14.670 +It's a much bigger number, but that's no surprise. + +04:14.670 --> 04:17.610 +There's no reason why they should have the same vocabulary. + +04:17.940 --> 04:19.860 +Um, and, um. + +04:19.890 --> 04:26.640 +Yeah, it's, uh, clearly, uh, not not a one token for for one three digit number. + +04:26.910 --> 04:30.660 +Um, and Phi three is, as I say, similar. + +04:30.990 --> 04:37.230 +Uh, there is actually a different variant of phi three, a smaller variant that has the same nice properties + +04:37.230 --> 04:38.220 +as Lama three one. + +04:38.220 --> 04:42.420 +So that is another thing that would be worth potentially trying. + +04:42.480 --> 04:44.040 +Um, as could any of these be tried? + +04:44.040 --> 04:48.270 +It's not a disqualifier that that it produces multiple tokens by any means. + +04:48.450 --> 04:49.020 +All right. + +04:49.020 --> 04:54.420 +Well, anyway, that gives you a background to the tokenizers and a good sense for why we picked the + +04:54.420 --> 04:55.680 +model that we did. + +04:55.740 --> 05:01.410 +Uh, in the next video, we will then load in data and try testing our model. diff --git a/week5/community-contributions/subtitles/srts/59507017/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507017/ja_JP.srt new file mode 100755 index 0000000..2f2a3e1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507017/ja_JP.srt @@ -0,0 +1,193 @@ +WEBVTT + +00:00.410 --> 00:01.670 +Colabへようこそ。 + +00:01.670 --> 00:04.910 +7週目2日目のコラボへようこそ。 + +00:04.910 --> 00:10.760 +ベース・モデルを試す前に、 これらのトークナイザーが実際に動いているところを見てみよう。 + +00:10.820 --> 00:15.410 +ええと、 いつものようにPipのインストールとインポートから始めよう。 + +00:15.560 --> 00:23.120 +定数セクションで、 4つの異なるトークナイザーを設定します。 + +00:23.120 --> 00:27.830 +その後、 ベースモデルのラマを見て、 データを使っていくつかの作業をする。 + +00:27.830 --> 00:35.930 +そして、 過去に結果を視覚化したときに覚えているかもしれないが、 カラーで出力するための定数もここに用意した。 + +00:36.440 --> 00:37.850 +よし、 実行しよう。 + +00:37.880 --> 00:43.070 +ハグフェイスにログインするには、 もうお馴染みのスニペットを使う。 + +00:43.070 --> 00:49.100 +そして今、 私はモデル名を受け取ってトークン化する調査という便利な関数を書いた。 + +00:49.460 --> 00:58.790 +そして、 そのモデル名のトークナイザーをロードし、 0、 1、 10、 100、 + +00:58.790 --> 01:06.030 +999、 1000の6つの数字を繰り返し処理する。 + +01:06.090 --> 01:14.160 +それぞれの数字について、 それを文字列に変換し、 その文字列をトークンに変換するようトークナイザーに依頼する。 + +01:14.160 --> 01:20.130 +では、 文字列100をテキストとして表現するトークンとは何か? + +01:20.520 --> 01:24.660 +ええと、 この特別なトークンを追加することをfalseパラメータとして使っているんだ。 + +01:24.660 --> 01:29.940 +つまり、 文頭トークンや文末トークンなど、 邪魔になるようなものを追加しないでください、 + +01:29.940 --> 01:33.510 +ということです。 + +01:33.600 --> 01:38.760 +このテキストを、 そのテキストを表すトークンに変換するだけだ。 + +01:38.970 --> 01:40.260 +うーん、 だからそうなるんだ。 + +01:40.260 --> 01:41.730 +そしてそれをプリントアウトする。 + +01:41.730 --> 01:50.520 +そこで、 トークン・サイザーを調査するために、 トークン・サイザーを呼び出してみよう。 + +01:51.450 --> 01:59.460 +まずはラマ・スリー・ワン・アーのモデルから始めよう。 + +01:59.460 --> 02:00.950 +何が出てくるか見てみよう。 + +02:02.390 --> 02:06.200 +つまり、 得られるのは文字列ゼロである。 + +02:06.200 --> 02:09.710 +テキストゼロはトークン番号15に変換されます。 + +02:09.740 --> 02:12.230 +ひとつはトークン16へ。 + +02:12.260 --> 02:15.380 +10はトークン番号605へ。 + +02:15.410 --> 02:16.190 +たまたまだ。 + +02:16.190 --> 02:21.380 +そして、 同じようなことが109でも見られるだろう。 99 それぞれが1つのトークンにマッピングされている。 + +02:21.890 --> 02:25.310 +1000でトークン2枚になる。 + +02:25.340 --> 02:28.580 +実際、 これは100という数字のトークンに対応する。 + +02:28.610 --> 02:33.230 +その後に数字のゼロとそのトークンが続いている。 + +02:33.230 --> 02:41.030 +つまり、 あなたが想像している通り、 100というテキストを表すトークンの後に0というテキストが続く。 + +02:41.090 --> 02:55.130 +3桁のテキスト番号、 3桁の数字に焦点を絞った場合、 どのような価格であっても常に1つのトークンに対応するという便利な特性がある。 + +02:55.160 --> 03:02.330 +つまり、 ある商品の価格を予測しようとするモデルのタスクは、 結局はただ1つのトークンを予測し、 そのトークンを正解にすれば、 + +03:02.330 --> 03:05.930 +その商品の正しい価格がわかるということになる。 + +03:06.110 --> 03:08.690 +だから、 本質的な性質ではないんだ。 + +03:08.720 --> 03:15.470 +私たちは、 これらのモデルが一連のトークンを完璧に生成できることを要求しているわけではないが、 問題を単純化して、 + +03:15.470 --> 03:20.270 +この1つのトークンを正しく生成することだけに絞るのは好都合だ。 + +03:20.720 --> 03:26.270 +また、 K-125のような別のモデルでもこのように見える。 + +03:28.280 --> 03:30.350 +そして今、 私たちは違うものを見ている。 + +03:30.380 --> 03:38.450 +一桁の0と1は1つのトークンに対応するが、 10は2つのトークンに対応する。 + +03:38.450 --> 03:53.210 +実際、 1の後に0が続く100のトークンは100999であり、 おそらく999であり、 1000は1000である。 + +03:53.210 --> 03:54.830 +つまり、 異なる特性があるのだ。 + +03:54.830 --> 03:59.930 +そして、 私がなぜラマ3・1が有利だと思うのか、 その理由が明確になればいいのだが......。 + +04:00.290 --> 04:00.590 +ええと。 + +04:00.630 --> 04:01.230 +ジェマ + +04:01.230 --> 04:02.670 +ジェマに2つ、 2つ。 + +04:02.700 --> 04:03.450 +申し訳ない。 + +04:03.630 --> 04:04.290 +あの、 ジェマ。 + +04:04.290 --> 04:08.280 +クォンに似た物件が2つある。 + +04:08.460 --> 04:11.880 +あー、 興味深いことに、 あー、 語彙が全然違うんだ。 + +04:11.880 --> 04:14.670 +もっと大きな数字だが、 それも不思議ではない。 + +04:14.670 --> 04:17.610 +同じ語彙でなければならない理由はない。 + +04:17.940 --> 04:19.860 +ええと、 それで、 ええと。 + +04:19.890 --> 04:26.640 +ええ、 明らかに、 3桁の数字1つに対して1つのトークンではありません。 + +04:26.910 --> 04:30.660 +そしてファイ3も似たようなものだ。 + +04:30.990 --> 04:38.220 +ええと、 実はファイ3には別のバリエーションがあって、 ラマ3世と同じような素晴らしい特性を持つ小さなバリエーションがあるんだ。 + +04:38.220 --> 04:42.420 +だから、 これも試してみる価値があるだろう。 + +04:42.480 --> 04:44.040 +ええと、 どれでも試すことができますか? + +04:44.040 --> 04:48.270 +決して複数のトークンを出すことが失格というわけではない。 + +04:48.450 --> 04:49.020 +分かった。 + +04:49.020 --> 04:55.680 +まあ、 いずれにせよ、 トークナイザーの背景と、 私たちがなぜこのモデルを選んだのか、 その理由はご理解いただけたと思う。 + +04:55.740 --> 05:01.410 +次のビデオでは、 データをロードしてモデルをテストしてみよう。 diff --git a/week5/community-contributions/subtitles/srts/59507017/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507017/ko_KR.srt new file mode 100755 index 0000000..5ca8322 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507017/ko_KR.srt @@ -0,0 +1,214 @@ +WEBVTT + +00:00.410 --> 00:01.670 +콜랍에 잘 오셨어요 + +00:01.670 --> 00:04.910 +콜랍 일주일째입니다 이틀째죠 + +00:04.910 --> 00:10.760 +기본 모델을 시도하기 전에 토큰라이저가 작동하는 걸 볼 수 있어요 + +00:10.820 --> 00:15.410 +평소처럼 Pip 설치와 외부에서 작업하세요 + +00:15.560 --> 00:23.120 +이제 상수 섹션에서 4개의 다른 토큰라이저를 설정하겠습니다 잠깐 살펴보죠 + +00:23.120 --> 00:27.830 +그 다음에는 베이스 모델 라마를 보고 데이터로 작업을 좀 할 거예요 + +00:27.830 --> 00:33.800 +그리고 색상 출력에 작성을 위한 상수도 있어요 과거에 결과를 시각화할 때 기억하실지도 + +00:33.800 --> 00:35.930 +모르겠네요 + +00:36.440 --> 00:37.850 +좋아요, 실행해 보죠 + +00:37.880 --> 00:43.070 +얼굴을 껴안는 데 로그인합니다 여러분이 잘 아는 일반적인 스닙 조각으로요 + +00:43.070 --> 00:49.100 +그래서 모델 이름을 가진 토큰라이저 조사하기라는 유용한 함수를 만들었어요 + +00:49.460 --> 00:58.790 +그러면 모델 이름을 위한 토큰라이저를 로드하고 이 6개의 숫자를 반복합니다 + +00:58.790 --> 01:06.030 +0, 1, 10 100, 999, 1000이죠 + +01:06.090 --> 01:12.180 +각각의 숫자에 대해 문자열로 변환합니다 그런 다음 토큰라이저에게 그 문자열을 토큰으로 + +01:12.180 --> 01:14.160 +변환하라고 하죠 + +01:14.160 --> 01:20.130 +문자열 100을 텍스트로 나타내는 토큰은 무엇일까요? + +01:20.520 --> 01:24.660 +이 특별한 추가 토큰을 가짜 매개 변수로 사용하고 있어요 + +01:24.660 --> 01:29.940 +문장의 시작 토큰과 끝 토큰 같은 것을 추가해서 다른 + +01:29.940 --> 01:33.510 +것을 방해하지 말라는 뜻이죠 + +01:33.600 --> 01:38.760 +단순히 이 텍스트를 해당 텍스트를 나타내는 토큰으로 변환하세요 + +01:38.970 --> 01:40.260 +그게 이 기계의 역할이죠 + +01:40.260 --> 01:41.730 +그럼 그걸 프린트하죠 + +01:41.730 --> 01:50.520 +실행해서 보여드리기 위해 토큰라이저를 조사할게요 + +01:51.450 --> 01:59.460 +라마 3 모델로 시작하도록 하죠 + +01:59.460 --> 02:00.950 +Get in get을 해 보죠 + +02:02.390 --> 02:06.200 +문자열 0이 나오죠 get + +02:06.200 --> 02:09.710 +텍스트 0은 토큰 넘버 15로 변환되죠 + +02:09.740 --> 02:12.230 +하나는 16번 토큰으로 가요 + +02:12.260 --> 02:15.380 +10은 605번 토큰에 있어요 + +02:15.410 --> 02:16.190 +공교롭게도요 + +02:16.190 --> 02:21.380 +109도 비슷하게 생겼고요 99개씩 토큰 하나에 매핑됐어요 + +02:21.890 --> 02:25.310 +1,000달러면 토큰 2개와 같아요 + +02:25.340 --> 02:28.580 +숫자 100의 토큰과 매핑이 되죠 + +02:28.610 --> 02:33.230 +0과 다른 숫자가 적힌 토큰이 보이시죠? + +02:33.230 --> 02:41.030 +여러분이 상상하시는 것처럼 텍스트 100의 토큰과 0의 텍스트죠 + +02:41.090 --> 02:47.480 +여기서 명확하게 볼 수 있듯이 3자리 숫자에만 초점을 맞춘다면 + +02:47.480 --> 02:53.690 +어떤 값을 가지고 있든 토큰 하나에 대응할 수 있는 유용한 속성이 + +02:53.690 --> 02:55.130 +생기죠 + +02:55.160 --> 03:02.330 +즉 제품의 가격을 예측하는 모델의 작업은 결국 토큰 하나를 예측해서 올바른 토큰을 얻는 것이 됩니다. + +03:02.330 --> 03:05.930 +그렇게 되면 제품의 가격이 올바른 것이죠. + +03:06.110 --> 03:08.690 +그래서 필수적인 자산이 아니에요 + +03:08.720 --> 03:15.470 +이 모델들이 토큰의 배열을 생성할 수 있어야 하는 것은 아니지만 이 토큰 하나만 제대로 + +03:15.470 --> 03:20.270 +만들어도 문제를 간단하게 해결할 수 있어서 편리하죠 + +03:20.720 --> 03:26.270 +K-125 같은 다른 모델도 이렇게 생겼는지 알 수 있죠 + +03:28.280 --> 03:30.350 +이제 다른 게 보여요 + +03:30.380 --> 03:38.450 +0과 1은 토큰 하나에 매핑되지만 10은 토큰 2개에 매핑되죠 + +03:38.450 --> 03:50.990 +1에 대한 토큰과 0, 100을 합치면 100999고 999이고 1,000은 1,000이죠 거기서 사용된 + +03:50.990 --> 03:53.210 +토큰의 값이에요 + +03:53.210 --> 03:54.830 +다른 속성을 볼 수 있죠 + +03:54.830 --> 03:59.930 +라마31이 왜 유리한지 이번 일로 명확해졌으면 좋겠어요 + +04:00.290 --> 04:00.590 +네 + +04:00.630 --> 04:01.230 +제마요 + +04:01.230 --> 04:02.670 +제마는 두 개예요 + +04:02.700 --> 04:03.450 +미안해요 + +04:03.630 --> 04:04.290 +제마요 + +04:04.290 --> 04:08.280 +권 회장과 비슷한 두 가지 특성이 있어요 + +04:08.460 --> 04:11.880 +흥미롭게도 완전히 다른 어휘예요 + +04:11.880 --> 04:14.670 +훨씬 큰 숫자지만 놀랍진 않아요 + +04:14.670 --> 04:17.610 +같은 단어를 쓸 이유가 없죠 + +04:17.940 --> 04:19.860 +음, 음요 + +04:19.890 --> 04:26.640 +네, 토큰이 하나가 아니라 세 자리 숫자네요 + +04:26.910 --> 04:30.660 +피3도 아까 말했듯이 비슷해요 + +04:30.990 --> 04:37.230 +사실 파이3에도 다른 변종이 있어요 라마가 가진 더 작은 변종이지만 라마의 3-1과 같은 멋진 특성을 + +04:37.230 --> 04:38.220 +갖고 있죠 + +04:38.220 --> 04:42.420 +그것도 시도해 볼 만한 가치가 있어요 + +04:42.480 --> 04:44.040 +이 중 어떤 것도 시도할 수 없었겠죠? + +04:44.040 --> 04:48.270 +여러 패를 만든다고 해서 자격이 없는 건 아니에요 + +04:48.450 --> 04:49.020 +좋아요 + +04:49.020 --> 04:54.420 +자, 이제 토큰라이저에 대해 알아봤고 왜 그 모델을 선택했는지도 + +04:54.420 --> 04:55.680 +알게 되었네요 + +04:55.740 --> 05:01.410 +다음 영상에서는 데이터를 불러와서 모델을 테스트해 볼게요 diff --git a/week5/community-contributions/subtitles/srts/59507313/en_US.srt b/week5/community-contributions/subtitles/srts/59507313/en_US.srt new file mode 100755 index 0000000..0eaa8fe --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507313/en_US.srt @@ -0,0 +1,163 @@ +WEBVTT + +00:00.920 --> 00:06.260 +And it's this time again, when we look at the podium of how our models are performing across the board. + +00:06.260 --> 00:12.050 +You already know the answer, but we should look at it anyway and and and have a giggle. + +00:12.320 --> 00:17.900 +Uh, so we first look at the constant model that just guesses the average value from the training data + +00:17.930 --> 00:18.590 +set. + +00:18.950 --> 00:22.880 +And if we look at that number, it was 146. + +00:23.000 --> 00:26.150 +It's looking distinctly lower in here right now. + +00:26.150 --> 00:28.070 +And I think you probably know why. + +00:28.370 --> 00:34.100 +Uh, then we looked at a traditional machine learning model that was able to beat the average, which + +00:34.100 --> 00:36.260 +we would hope it would, but not by masses. + +00:36.470 --> 00:42.050 +Uh, we looked at the random forest approach, which was the best of the traditional machine learning + +00:42.050 --> 00:44.510 +approaches coming in at 97. + +00:44.510 --> 00:51.140 +We did compare it with a human being and showed that human could beat the basic, simple features, + +00:51.140 --> 00:53.870 +but was beaten by random forest model. + +00:54.140 --> 01:00.290 +Uh, GPT four zero was the best of the various frontier models we looked at, and absolutely crushed + +01:00.290 --> 01:04.610 +it with 76 way outperforming this human. + +01:04.820 --> 01:13.600 +And then today we've looked at the, uh, fine tuned bass llama, 3.18 billion parameters quantized + +01:13.600 --> 01:15.310 +all the way down to four bits. + +01:15.430 --> 01:21.190 +Uh, and we saw that that was at 396, devastatingly bad. + +01:21.280 --> 01:29.290 +Uh, and it was only slightly better when we quantized to eight bits, instead coming in at $301 of + +01:29.290 --> 01:29.950 +error. + +01:30.160 --> 01:36.550 +Uh, so clearly we're off to a bad start with llama 3.1, but in some ways that's exciting because the + +01:36.550 --> 01:37.780 +challenge is on. + +01:37.870 --> 01:45.640 +Uh, we've got the the model here ready for us to try and explore how we can make it better. + +01:45.640 --> 01:53.410 +And our target is to try and be competitive with a model like GPT four, which is, you know, has trillions + +01:53.410 --> 01:54.460 +of parameters. + +01:54.460 --> 01:59.530 +If we can be competitive with that in this particular task, and we can do it with an open source model + +01:59.530 --> 02:04.240 +that's for free, then we have achieved a great thing. + +02:04.240 --> 02:05.080 +Okay. + +02:05.110 --> 02:11.690 +And so in summary, at this point, at this juncture, you have reached 80% of your way in this journey + +02:11.690 --> 02:14.120 +and that is absolutely fantastic. + +02:14.120 --> 02:15.260 +I'm so happy you. + +02:15.290 --> 02:16.190 +Are sticking at it. + +02:16.190 --> 02:17.660 +I'm so happy you're here. + +02:17.660 --> 02:20.690 +I got to tell you, the 20% that's still to come. + +02:20.690 --> 02:21.740 +It's the best. + +02:21.740 --> 02:23.060 +It is the best. + +02:23.090 --> 02:25.820 +The juiciest content is what's to come. + +02:25.820 --> 02:27.860 +As we look at training this model. + +02:27.860 --> 02:32.540 +And then in the finale next week, we really put everything together. + +02:32.540 --> 02:35.210 +I mean, it's just it's going to be a crescendo. + +02:35.210 --> 02:37.160 +It's going to get better and better. + +02:37.160 --> 02:38.300 +Hang in there. + +02:38.330 --> 02:40.250 +All of the good stuff is to come. + +02:40.250 --> 02:44.870 +So what we're doing next time, there's some more hyperparameters. + +02:44.870 --> 02:49.760 +I know that these hyperparameters can be a bit grueling, but this is where the really important stuff + +02:49.760 --> 02:50.390 +is learned. + +02:50.390 --> 02:54.530 +So there's some hyperparameters that control training that I'll explain to you. + +02:54.560 --> 03:00.020 +We're then going to set up something called a supervised fine tuning SFT trainer, which is where it + +03:00.020 --> 03:00.830 +all happens. + +03:00.830 --> 03:03.650 +And then we're going to kick off training. + +03:03.650 --> 03:10.970 +You're going to be training your own proprietary large language model, uh, based on, uh, llama 3.1 + +03:10.970 --> 03:11.720 +based model. + +03:11.720 --> 03:15.320 +And we're going to be doing it, uh, in the next session. + +03:15.320 --> 03:17.210 +So I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59507313/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507313/ja_JP.srt new file mode 100755 index 0000000..7b01c61 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507313/ja_JP.srt @@ -0,0 +1,127 @@ +WEBVTT + +00:00.920 --> 00:06.260 +そしてまたこの時期、 我々のモデルが全体的にどのようなパフォーマンスを見せているかを表彰台で見ることになる。 + +00:06.260 --> 00:12.050 +答えはもうわかっているだろうが、 とにかく見て、 笑ってみよう。 + +00:12.320 --> 00:18.590 +まず、 トレーニングデータセットから平均値を推測する定数モデルを見てみよう。 + +00:18.950 --> 00:22.880 +その数字を見てみると、 146だった。 + +00:23.000 --> 00:26.150 +今は明らかに気温が下がっている。 + +00:26.150 --> 00:28.070 +その理由は、 おそらくあなたも知っていると思う。 + +00:28.370 --> 00:36.260 +従来の機械学習モデルは、 平均を上回ることができた。 + +00:36.470 --> 00:44.510 +ランダムフォレスト・アプローチを検討したところ、 従来の機械学習アプローチでは97点と最も優れていた。 + +00:44.510 --> 00:53.870 +人間と比較したところ、 人間は基本的で単純な特徴には勝てたが、 ランダムフォレストモデルには負けた。 + +00:54.140 --> 01:00.290 +GPTフォーゼロは、 私たちが調べた様々なフロンティアモデルの中で最も優れていて、 + +01:00.290 --> 01:04.610 +76がこの人間を大きく上回っていた。 + +01:04.820 --> 01:15.310 +そして今日は、 3.ファインチューン・バス・ラマを見てきた。 180億ものパラメーターが4ビットにまで量子化されている。 + +01:15.430 --> 01:21.190 +ええと、 396で、 壊滅的に悪かった。 + +01:21.280 --> 01:29.950 +そして、 8ビットに量子化するとわずかに良くなり、 代わりに301ドルの誤差が生じた。 + +01:30.160 --> 01:37.780 +ええと、 明らかにラマ3は出だしが悪いね。 1だが、 ある意味、 挑戦が始まっているのでエキサイティングだ。 + +01:37.870 --> 01:45.640 +ええと、 ここにモデルを用意してあるので、 どうすればもっと良くなるかを試行錯誤することができる。 + +01:45.640 --> 01:54.460 +私たちの目標は、 GPT4のような何兆ものパラメーターを持つモデルに負けないようにすることです。 + +01:54.460 --> 01:59.530 +もし私たちがこの特別なタスクで競争力を持ち、 それを無償のオープンソースモデルで実現できるのであれば、 + +01:59.530 --> 02:04.240 +私たちは偉大なことを成し遂げたことになる。 + +02:04.240 --> 02:05.080 +オーケー。 + +02:05.110 --> 02:14.120 +まとめると、 この時点で、 この分岐点で、 あなたはこの旅の道のりの80%に到達している。 + +02:14.120 --> 02:15.260 +とても嬉しいよ。 + +02:15.290 --> 02:16.190 +粘り強くやっている。 + +02:16.190 --> 02:17.660 +あなたがここにいてくれて本当にうれしい。 + +02:17.660 --> 02:20.690 +言っておくが、 まだ20%は残っている。 + +02:20.690 --> 02:21.740 +最高だよ。 + +02:21.740 --> 02:23.060 +最高だよ。 + +02:23.090 --> 02:25.820 +最も魅力的なコンテンツはこれからだ。 + +02:25.820 --> 02:27.860 +このモデルをトレーニングしていく。 + +02:27.860 --> 02:32.540 +そして来週のフィナーレでは、 本当にすべてをまとめる。 + +02:32.540 --> 02:35.210 +つまり、 クレッシェンドになるんだ。 + +02:35.210 --> 02:37.160 +これからどんどん良くなっていくよ。 + +02:37.160 --> 02:38.300 +頑張るんだ。 + +02:38.330 --> 02:40.250 +いいものはすべてこれからだ。 + +02:40.250 --> 02:44.870 +そこで次回は、 さらにいくつかのハイパーパラメーターを用意する。 + +02:44.870 --> 02:50.390 +これらのハイパーパラメーターは少々過酷なものであることは承知しているが、 本当に重要なことはここで学ぶことができる。 + +02:50.390 --> 02:54.530 +トレーニングをコントロールするハイパーパラメーターがいくつかあるので、 それを説明しよう。 + +02:54.560 --> 03:00.830 +その後、 SFTトレーナーというものをセットアップする。 + +03:00.830 --> 03:03.650 +そしてトレーニングを開始する。 + +03:03.650 --> 03:11.720 +あなたは、 llama 3に基づいて、 独自の大規模言語モデルをトレーニングすることになる。 1ベースのモデル。 + +03:11.720 --> 03:15.320 +そして、 次のセッションでそれをやるつもりだ。 + +03:15.320 --> 03:17.210 +だから、 そこで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59507313/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507313/ko_KR.srt new file mode 100755 index 0000000..25a89a8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507313/ko_KR.srt @@ -0,0 +1,148 @@ +WEBVTT + +00:00.920 --> 00:06.260 +이번에도 시상대에 올랐습니다 모델들의 퍼포먼스를 감상해 보죠 + +00:06.260 --> 00:12.050 +이미 답을 아시지만 그래도 한번 보고 웃어야겠어요 + +00:12.320 --> 00:18.590 +먼저 훈련 데이터 세트에서 평균 값을 추측하는 상수 모델을 살펴보죠 + +00:18.950 --> 00:22.880 +그 숫자를 보면 146이었죠 + +00:23.000 --> 00:26.150 +확실히 더 낮아 보여요 + +00:26.150 --> 00:28.070 +이유는 아실 거예요 + +00:28.370 --> 00:34.100 +그리고 전통적인 머신 러닝 모델을 살펴봤는데 평균을 넘을 수 있었어요 그러길 + +00:34.100 --> 00:36.260 +바라지만 대다수는 아니었죠 + +00:36.470 --> 00:42.050 +무작위 숲 접근법을 살펴봤는데 97년에 나온 전통적인 머신 + +00:42.050 --> 00:44.510 +러닝 접근법 중 최고였어요 + +00:44.510 --> 00:51.140 +인간과 비교해 보니 인간은 단순한 기본 특징을 이길 수 있었지만 + +00:51.140 --> 00:53.870 +숲의 모델에 지고 말았죠 + +00:54.140 --> 01:00.290 +GPT 40은 다양한 프런티어 모델 중 최고였어요 인간을 + +01:00.290 --> 01:04.610 +76배나 능가하며 완벽하게 해냈죠 + +01:04.820 --> 01:15.310 +오늘은 미세하게 조율된 베이스 라마 3을 살펴봤어요 180억 매개 변수가 4분의 1로 양분화됐어요 + +01:15.430 --> 01:21.190 +396도라는 걸 확인했는데 정말 심각했어요 + +01:21.280 --> 01:29.950 +8비트로 양자화했을 때가 약간 더 나았습니다 오차가 301달러로 늘었죠 + +01:30.160 --> 01:36.550 +라마 3은 시작이 안 좋네요 1점이지만 도전이 시작됐으니 어떤 면에선 + +01:36.550 --> 01:37.780 +신나죠 + +01:37.870 --> 01:45.640 +이 모델을 가지고 어떻게 개선할지 시험해 볼 거예요 + +01:45.640 --> 01:53.410 +저희 목표는 GPT 4 같은 모델과 경쟁하는 겁니다 매개 변수가 수조 개나 + +01:53.410 --> 01:54.460 +되죠 + +01:54.460 --> 01:59.530 +이 특정 작업에서 경쟁력을 갖추고 무료 오픈 소스 모델로 + +01:59.530 --> 02:04.240 +작업할 수 있다면 우린 대단한 걸 이룬 거죠 + +02:04.240 --> 02:05.080 +네 + +02:05.110 --> 02:11.690 +요약하자면, 이 시점에서 여러분은 80%의 여정을 마쳤는데 + +02:11.690 --> 02:14.120 +정말 대단한 거예요 + +02:14.120 --> 02:15.260 +정말 기뻐요 + +02:15.290 --> 02:16.190 +계속 물고 늘어져요 + +02:16.190 --> 02:17.660 +와줘서 정말 기뻐요 + +02:17.660 --> 02:20.690 +아직 20%가 안 왔어요 + +02:20.690 --> 02:21.740 +최고예요 + +02:21.740 --> 02:23.060 +그게 최고죠 + +02:23.090 --> 02:25.820 +가장 맛있는 콘텐츠는 다음에 나올 거예요 + +02:25.820 --> 02:27.860 +이 모델을 훈련할 때요 + +02:27.860 --> 02:32.540 +다음 주 결승전에선 모든 걸 합쳤어요 TUZ TUZ TOZ TUZ + +02:32.540 --> 02:35.210 +점점 더 커질 거예요 + +02:35.210 --> 02:37.160 +Get it, get it, get it, get it, get 갈수록 더 좋아질 거예요 + +02:37.160 --> 02:38.300 +조금만 참아요 + +02:38.330 --> 02:40.250 +좋은 건 다 나올 거예요 + +02:40.250 --> 02:44.870 +다음 시간에는 hyperperameter를 더 할 거예요 + +02:44.870 --> 02:50.390 +하이퍼파라미터는 좀 힘들 수도 있지만 정말 중요한 걸 배우는 비트예요 + +02:50.390 --> 02:54.530 +hyperperameter가 있어요 컨트롤 훈련이죠 설명해 드릴게요 + +02:54.560 --> 03:00.020 +그런 다음 감독자 미세 조정 SFT 트레이너를 설치할 거예요 여기서 모든 게 + +03:00.020 --> 03:00.830 +이뤄지죠 + +03:00.830 --> 03:03.650 +그리고 훈련을 시작할 거예요 + +03:03.650 --> 03:11.720 +여러분은 독자적인 대형 언어 모델을 훈련하게 될 겁니다 라마 3을 기반으로요 1단계 모델이죠 + +03:11.720 --> 03:15.320 +다음 세션에서 그걸 할 거예요 + +03:15.320 --> 03:17.210 +그럼 거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59507329/en_US.srt b/week5/community-contributions/subtitles/srts/59507329/en_US.srt new file mode 100755 index 0000000..fd813e6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507329/en_US.srt @@ -0,0 +1,274 @@ +WEBVTT + +00:00.800 --> 00:01.490 +Okay. + +00:01.490 --> 00:02.900 +It's moment of truth time. + +00:02.900 --> 00:06.620 +I have just taken our class tester. + +00:06.620 --> 00:07.940 +You remember this class? + +00:08.090 --> 00:15.140 +Uh, it's the class that runs the evaluation across the full, uh, 250 data points from our test data + +00:15.170 --> 00:15.560 +set. + +00:15.590 --> 00:17.450 +It's slightly different. + +00:17.450 --> 00:22.610 +Uh, if you if you go through this, you'll notice there's some very subtle differences, because we're + +00:22.610 --> 00:24.380 +not taking an item object. + +00:24.380 --> 00:27.980 +We're taking a data set, a data point from our data set. + +00:27.980 --> 00:29.990 +So there's a couple of small differences. + +00:29.990 --> 00:35.900 +But otherwise this tester is basically exactly the same, um, in terms of what it does. + +00:36.050 --> 00:40.760 +And it ends of course with this single line tester test. + +00:40.790 --> 00:47.630 +The model predict is the function that we just wrote that tries out our model against 250 points, and + +00:47.630 --> 00:49.130 +we pass in the test data set. + +00:49.130 --> 00:52.670 +And of course I've already run it and let me scroll through the results. + +00:52.670 --> 00:56.240 +So you get a sense it's right up at the there we go. + +00:56.270 --> 00:57.560 +We'll take it to the top. + +00:57.560 --> 01:04.610 +So you'll see there, of course, that first red item is where it predicted $1,800 for something that + +01:04.610 --> 01:05.930 +costs 374. + +01:05.960 --> 01:08.330 +There's some more reds, there's some greens. + +01:08.510 --> 01:12.200 +Um, and it certainly gives you a sense that it's not like we're getting that. + +01:12.230 --> 01:15.500 +It's the model understands the task for this one. + +01:15.500 --> 01:19.070 +For example, it guest $89.99. + +01:19.070 --> 01:22.040 +And the truth was 101 79. + +01:22.040 --> 01:25.310 +It's interesting that it's not sticking to the nearest whole dollar. + +01:25.310 --> 01:28.400 +It's it's still creating things with $0.99. + +01:28.820 --> 01:34.400 +Um, and you'll see that, uh, yeah, there's some there's some other problematic ones here, but there's + +01:34.430 --> 01:39.980 +a greens and reds, greens and reds, but quite a few reds, quite a few reds. + +01:39.980 --> 01:44.090 +So I will put us out of our misery and go straight to the charts. + +01:44.090 --> 01:45.140 +Here it comes. + +01:45.170 --> 01:46.550 +Oh my goodness. + +01:46.580 --> 01:52.730 +It is a horrible, horrible result of 395. + +01:52.760 --> 02:02.300 +In terms of the error, 395 uh, which you will remember, uh, is considerably worse than taking a + +02:02.300 --> 02:03.380 +random guess. + +02:03.380 --> 02:07.940 +I think it's certainly worse than just taking the average number of the training data set. + +02:07.970 --> 02:10.070 +Not that it knew the training data set. + +02:10.310 --> 02:15.800 +Uh, and, uh, yeah, it's, uh, it's generally a horrible result. + +02:16.070 --> 02:19.210 +Um, Perhaps not massively surprising. + +02:19.210 --> 02:20.890 +It's a tiny model. + +02:20.980 --> 02:26.110 +It's been hugely quantized as well, and you can see visually what it's doing. + +02:26.500 --> 02:35.290 +You can see that it has, um, had a few different levels that it's been most comfortable guessing at, + +02:35.290 --> 02:38.710 +and it's guessed most often at one of these three levels. + +02:38.710 --> 02:41.950 +And unfortunately one of them is far too high. + +02:42.070 --> 02:44.710 +We've never told the model not to go above $1,000. + +02:44.710 --> 02:46.210 +It's not like that's a requirement. + +02:46.210 --> 02:48.670 +It can guess whatever it wants, as could GPT four. + +02:48.910 --> 02:57.310 +We've, uh um, not not given it that that, uh, that intelligence to to know that, um, and so it's + +02:57.310 --> 02:59.830 +gone too high far too much of the time. + +02:59.890 --> 03:02.230 +Uh, and it's really ruined the results. + +03:02.230 --> 03:05.650 +So a poor performance from our base model. + +03:05.650 --> 03:09.160 +Not that surprising given the small number of parameters. + +03:09.160 --> 03:16.720 +And of course, the challenge on our hands now is going to be, can we take this poor performing model + +03:16.720 --> 03:22.780 +and use fine tuning, use a training data set to make it stronger? + +03:22.780 --> 03:28.990 +And can we get close to what a trillion parameter model can achieve. + +03:28.990 --> 03:32.650 +So this is an 8 billion parameter model and it's been quantized. + +03:32.650 --> 03:41.080 +Can we get close to the the trillion plus model parameters in a major frontier model. + +03:41.110 --> 03:43.540 +Because this is open source, it's free. + +03:43.540 --> 03:44.890 +There's no API cost. + +03:44.920 --> 03:51.250 +Wouldn't it be amazing if we could perform at that level, um, or at least beat a human? + +03:51.250 --> 03:55.030 +Right now the human beings are winning over an untrained llama. + +03:55.090 --> 03:59.380 +At least this human is one final thought to leave you with. + +03:59.410 --> 04:03.400 +You remember that this is quantized down to four bits. + +04:03.400 --> 04:07.480 +You might be asking yourself, how would it look if we quantized just to eight bits? + +04:07.480 --> 04:13.330 +If we kept the eight bit version and ran it through, it would be interesting to see whether the how + +04:13.330 --> 04:18.490 +much the performance was impacting by going all the way down to the double quantized four bit. + +04:18.490 --> 04:20.020 +And indeed you can do that, of course. + +04:20.050 --> 04:25.120 +And this framework gives us a lovely way to to in a very simple, tangible way. + +04:25.150 --> 04:26.260 +See the difference. + +04:26.260 --> 04:29.680 +So remember 395 is how far this is wrong. + +04:29.680 --> 04:37.920 +So in this other tab I have just run it with the only quantized to two eight bits. + +04:38.040 --> 04:44.880 +So if I go up to the top, you can see that I've got up here four bits set to false and otherwise it's + +04:44.880 --> 04:46.050 +exactly the same. + +04:46.410 --> 04:47.310 +Notebook. + +04:47.310 --> 04:51.060 +Let's scroll all the way down and go straight to the results. + +04:51.450 --> 04:54.180 +It looks like it's in here. + +04:56.070 --> 04:57.030 +Hold on. + +04:57.030 --> 04:58.200 +Build the tension. + +04:58.200 --> 04:59.670 +And here we go. + +04:59.670 --> 05:03.030 +So it's also pretty horrible performance. + +05:03.030 --> 05:06.690 +But it is better 395 became 301. + +05:06.990 --> 05:08.520 +And that's not surprising at all. + +05:08.550 --> 05:12.690 +You know it's got a twice the amount of information. + +05:12.870 --> 05:20.370 +Um so you know again quantizing did have an impact on accuracy, but perhaps we would have expected + +05:20.490 --> 05:23.130 +the eight bit model to have done even better. + +05:23.130 --> 05:27.540 +So so it wasn't there wasn't such a great difference between them. + +05:28.230 --> 05:33.540 +Uh, but it does show you, of course, that the bigger model is able to do a better job. + +05:34.470 --> 05:35.340 +All right. + +05:35.340 --> 05:37.920 +With that, that's been pretty interesting. + +05:37.920 --> 05:38.970 +Pretty revealing. + +05:38.970 --> 05:42.360 +Let's go back to the slides to wrap up and summarize. diff --git a/week5/community-contributions/subtitles/srts/59507329/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507329/ja_JP.srt new file mode 100755 index 0000000..0826aa4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507329/ja_JP.srt @@ -0,0 +1,244 @@ +WEBVTT + +00:00.800 --> 00:01.490 +オーケー。 + +00:01.490 --> 00:02.900 +今が正念場だ。 + +00:02.900 --> 00:06.620 +クラステスターを受けたばかりだ。 + +00:06.620 --> 00:07.940 +このクラスを覚えているか? + +00:08.090 --> 00:15.560 +このクラスは、 テスト・データ・セットの250のデータ・ポイント全体にわたって評価を実行する。 + +00:15.590 --> 00:17.450 +少し違う。 + +00:17.450 --> 00:24.380 +アイテム・オブジェクトを取るわけではないので、 微妙な違いがあることにお気づきだろう。 + +00:24.380 --> 00:27.980 +私たちはデータセットからデータ・ポイントを取っている。 + +00:27.980 --> 00:29.990 +だから、 小さな違いがいくつかある。 + +00:29.990 --> 00:35.900 +しかし、 それ以外の点では、 このテスターは基本的にまったく同じだ。 + +00:36.050 --> 00:40.760 +そしてもちろん、 このシングルラインテスターのテストで終わる。 + +00:40.790 --> 00:49.130 +モデルpredictは先ほど書いた関数で、 250ポイントに対してモデルを試すもので、 テストデータセットを渡す。 + +00:49.130 --> 00:52.670 +もちろん、 すでに実行し、 結果をスクロールさせている。 + +00:52.670 --> 00:56.240 +だから、 すぐそこまで来ている感じがする。 + +00:56.270 --> 00:57.560 +トップに持っていく。 + +00:57.560 --> 01:05.930 +もちろん、 最初の赤い項目は、 374ドルのものが1800ドルと予測されているところだ。 + +01:05.960 --> 01:08.330 +赤もあれば緑もある。 + +01:08.510 --> 01:12.200 +うーん、 確かに、 私たちがそれを得ているのとは違うということを感じさせてくれる。 + +01:12.230 --> 01:15.500 +今回の課題はモデルが理解している。 + +01:15.500 --> 01:19.070 +例えば89ドル。 99. + +01:19.070 --> 01:22.040 +そして真実は101 79だった。 + +01:22.040 --> 01:25.310 +ドル単位にこだわっていないのが面白い。 + +01:25.310 --> 01:28.400 +今でも0ドルでものを作っている。 99. + +01:28.820 --> 01:34.400 +ええと、 そうだな、 他にもいくつか問題のあるものがあるが、 緑と赤、 緑と赤、 + +01:34.430 --> 01:39.980 +でもかなりの数の赤、 かなりの数の赤があるのがわかるだろう。 + +01:39.980 --> 01:44.090 +だから、 私は私たちを窮地から救い出し、 チャートに直行する。 + +01:44.090 --> 01:45.140 +来たぞ。 + +01:45.170 --> 01:46.550 +なんてことだ。 + +01:46.580 --> 01:52.730 +395の恐ろしい、 恐ろしい結果だ。 + +01:52.760 --> 02:03.380 +誤差という点では、 395......覚えているだろうが、 ランダムに推測するよりもかなり悪い。 + +02:03.380 --> 02:07.940 +トレーニングデータセットの平均値を取るよりも確実に悪いと思う。 + +02:07.970 --> 02:10.070 +トレーニングデータセットを知っていたわけではない。 + +02:10.310 --> 02:15.800 +ああ、 そうだね、 一般的にはひどい結果だよ。 + +02:16.070 --> 02:19.210 +おそらく、 大きな驚きはないだろう。 + +02:19.210 --> 02:20.890 +小さなモデルだ。 + +02:20.980 --> 02:26.110 +それも非常に量子化されていて、 何をしているのか視覚的にわかる。 + +02:26.500 --> 02:38.710 +最も推測しやすいレベルがいくつかあり、 この3つのレベルのいずれかを最も頻繁に推測していることがわかるだろう。 + +02:38.710 --> 02:41.950 +そして残念なことに、 そのうちのひとつがあまりにも高すぎる。 + +02:42.070 --> 02:44.710 +モデルには1,000ドルを超えるなと言ったことはない。 + +02:44.710 --> 02:46.210 +それが必須条件というわけではない。 + +02:46.210 --> 02:48.670 +GPTの4人と同じように、 好きなように推測することができる。 + +02:48.910 --> 02:59.830 +私たちは......うーん、 それを知るための知性を与えてこなかった。 + +02:59.890 --> 03:02.230 +あー、 それで結果が台無しになった。 + +03:02.230 --> 03:05.650 +つまり、 ベース・モデルのパフォーマンスが低かったということだ。 + +03:05.650 --> 03:09.160 +パラメータの数が少ないことを考えれば、 それほど驚くことではない。 + +03:09.160 --> 03:22.780 +そしてもちろん、 私たちが今直面している課題は、 この性能の低いモデルを微調整し、 トレーニングデータセットを使ってより強くすることができるかということだ。 + +03:22.780 --> 03:28.990 +そして、 1兆個のパラメータを持つモデルが達成できることに近づけるだろうか。 + +03:28.990 --> 03:32.650 +つまり、 これは80億のパラメーターを持つモデルで、 量子化されている。 + +03:32.650 --> 03:41.080 +兆を超える主要なフロンティアモデルのパラメータに近づくことができるだろうか? + +03:41.110 --> 03:43.540 +これはオープンソースだから無料だ。 + +03:43.540 --> 03:44.890 +APIのコストはかからない。 + +03:44.920 --> 03:51.250 +もし私たちがそのレベルのパフォーマンスを発揮できたら、 いや、 少なくとも人間を打ち負かすことができたら、 素晴らしいと思わない? + +03:51.250 --> 03:55.030 +今、 人間は訓練されていないラマに勝っている。 + +03:55.090 --> 03:59.380 +少なくとも、 この人間が最後に残した思いである。 + +03:59.410 --> 04:03.400 +これが4ビットに量子化されることを覚えているだろうか。 + +04:03.400 --> 04:07.480 +8ビットに量子化したらどうなるだろうか? + +04:07.480 --> 04:13.330 +もし、 8ビットのバージョンを維持したまま実行したとしたら、 2倍量子化された4ビットにすることでパフォーマンスにどれだけの影響が出るか、 + +04:13.330 --> 04:18.490 +興味深いところだ。 + +04:18.490 --> 04:20.020 +もちろん、 それは可能だ。 + +04:20.050 --> 04:25.120 +そして、 このフレームワークは、 とてもシンプルで具体的な方法を教えてくれる。 + +04:25.150 --> 04:26.260 +その違いを見てみよう。 + +04:26.260 --> 04:29.680 +だから、 395がどこまで間違っているか覚えておいてほしい。 + +04:29.680 --> 04:37.920 +だから、 この別のタブでは、 8ビットを2つに量子化しただけで実行している。 + +04:38.040 --> 04:46.050 +つまり、 一番上まで行くと、 ここで4つのビットがfalseに設定されていることがわかりますが、 それ以外はまったく同じです。 + +04:46.410 --> 04:47.310 +ノートブック + +04:47.310 --> 04:51.060 +下までスクロールして、 結果に直行しよう。 + +04:51.450 --> 04:54.180 +この中にあるようだ。 + +04:56.070 --> 04:57.030 +ちょっと待ってくれ。 + +04:57.030 --> 04:58.200 +緊張感を高める。 + +04:58.200 --> 04:59.670 +そして、 これだ。 + +04:59.670 --> 05:03.030 +だから、 パフォーマンスもかなりひどい。 + +05:03.030 --> 05:06.690 +しかし、 395が301になったのは良いことだ。 + +05:06.990 --> 05:08.520 +そして、 それはまったく驚くべきことではない。 + +05:08.550 --> 05:12.690 +情報量が2倍になっているのはご存じでしょう。 + +05:12.870 --> 05:23.130 +つまり、 量子化は精度に影響を与えたが、 8ビットモデルであればもっと良い結果が得られると予想される。 + +05:23.130 --> 05:27.540 +だから、 2人の間にそれほど大きな違いはなかった。 + +05:28.230 --> 05:33.540 +でも、 大きなモデルの方がいい仕事ができるということだ。 + +05:34.470 --> 05:35.340 +分かった。 + +05:35.340 --> 05:37.920 +それはそれで、 かなり面白かったよ。 + +05:37.920 --> 05:38.970 +かなり露呈している。 + +05:38.970 --> 05:42.360 +スライドに戻ってまとめよう。 diff --git a/week5/community-contributions/subtitles/srts/59507329/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507329/ko_KR.srt new file mode 100755 index 0000000..52cd90e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507329/ko_KR.srt @@ -0,0 +1,271 @@ +WEBVTT + +00:00.800 --> 00:01.490 +네 + +00:01.490 --> 00:02.900 +진실의 순간이에요 + +00:02.900 --> 00:06.620 +방금 시험지를 먹었어요 + +00:06.620 --> 00:07.940 +이 수업 기억나요? + +00:08.090 --> 00:15.560 +전체적인 평가를 수행하는 수업이에요 시험 데이터 세트에서 250개의 데이터 포인트를 통틀어서요 + +00:15.590 --> 00:17.450 +약간 달라요 + +00:17.450 --> 00:22.610 +여길 보시면 아주 미묘한 차이가 있는 걸 아실 겁니다 왜냐하면 우린 아이템 객체를 + +00:22.610 --> 00:24.380 +가져가지 않으니까요 + +00:24.380 --> 00:27.980 +데이터셋에서 데이터 포인트를 가져오는 거죠 + +00:27.980 --> 00:29.990 +몇 가지 작은 차이가 있어요 + +00:29.990 --> 00:35.900 +하지만 그 외에는 이 테스터는 기본적으로 똑같아요 + +00:36.050 --> 00:40.760 +물론 이 단선 테스터 테스트로 끝나죠 + +00:40.790 --> 00:47.630 +모델 예측은 방금 작성한 함수로 250점 대 모델을 테스트해보고 테스트 데이터 세트에서 + +00:47.630 --> 00:49.130 +통과하는 거죠 + +00:49.130 --> 00:52.670 +물론 이미 실행했죠 결과를 스크롤해 볼게요 + +00:52.670 --> 00:56.240 +느낌으로 알 수 있죠 바로 위∙∙∙ get 됐네요 + +00:56.270 --> 00:57.560 +정상까지 올라갈 거예요 + +00:57.560 --> 01:04.610 +보시다시피 첫 번째 빨간 항목은 374달러짜리 물건이 1,800달러로 + +01:04.610 --> 01:05.930 +예상됐어요 + +01:05.960 --> 01:08.330 +붉은색과 녹색도 있어요 + +01:08.510 --> 01:12.200 +우리가 그걸 못 느낀다는 걸 확실히 느낄 수 있죠 + +01:12.230 --> 01:15.500 +이 모델은 이 작업을 이해하죠 + +01:15.500 --> 01:19.070 +예를 들어, 89달러를 게스트하죠 99살요 + +01:19.070 --> 01:22.040 +진실은 101-79였어요 + +01:22.040 --> 01:25.310 +가장 가까운 금액이 아닌 게 흥미롭네요 + +01:25.310 --> 01:28.400 +0달러로 뭔가를 만드는 거예요 99살요 + +01:28.820 --> 01:34.400 +보시면 문제가 되는 색도 몇 개 있어요 녹색과 + +01:34.430 --> 01:39.980 +붉은색이 있는데 붉은색이 꽤 많아요 + +01:39.980 --> 01:44.090 +이 고통을 끝내고 곧장 차트에 오를 거예요 Put it up Put it + +01:44.090 --> 01:45.140 +나오네요 + +01:45.170 --> 01:46.550 +말도 안 돼요 + +01:46.580 --> 01:52.730 +395의 끔찍한 결과예요 + +01:52.760 --> 02:02.300 +오차로 따지면 395%는 기억하시겠지만 무작위로 추측하는 것보다 훨씬 심각한 + +02:02.300 --> 02:03.380 +수치죠 + +02:03.380 --> 02:07.940 +훈련 데이터의 평균 수보다 확실히 더 나쁜 것 같아요 + +02:07.970 --> 02:10.070 +훈련 데이터는 몰랐지만요 + +02:10.310 --> 02:15.800 +네, 보통 끔찍한 결과죠 + +02:16.070 --> 02:19.210 +그렇게 놀랍진 않아요 + +02:19.210 --> 02:20.890 +작은 모형이에요 + +02:20.980 --> 02:26.110 +양자화도 잘 돼 있어서 눈으로 확인할 수 있어요 + +02:26.500 --> 02:35.290 +몇 가지 단계가 있는데 가장 쉽게 추측할 수 있는 단계죠 대부분 이 세 단계 + +02:35.290 --> 02:38.710 +중 하나에서 추측해요 + +02:38.710 --> 02:41.950 +안타깝게도 한 마리는 너무 높죠 + +02:42.070 --> 02:44.710 +모델에게 1,000달러 이상은 안 된다고 한 적이 없어요 + +02:44.710 --> 02:46.210 +꼭 그래야 하는 건 아니에요 + +02:46.210 --> 02:48.670 +GPT 4처럼 원하는 건 뭐든 추측할 수 있죠 + +02:48.910 --> 02:57.310 +우리는 그걸 알 수 있는 지식을 주지 않았어요 그래서 너무 자주 너무 + +02:57.310 --> 02:59.830 +높이 올라갔죠 + +02:59.890 --> 03:02.230 +그래서 결과가 엉망이 됐죠 + +03:02.230 --> 03:05.650 +기본 모델의 성능이 낮았어요 + +03:05.650 --> 03:09.160 +변수가 적은 걸 보면 그리 놀랍진 않네요 + +03:09.160 --> 03:16.720 +물론 이제 우리가 해결해야 할 과제는 성능이 떨어지는 모델을 미세 + +03:16.720 --> 03:22.780 +튜닝과 훈련 데이터를 이용해 더 강하게 만드는 거죠 + +03:22.780 --> 03:28.990 +그리고 1조 매개 변수 모델이 달성할 수 있는 것에 근접할 수 있을까요? Get it + +03:28.990 --> 03:32.650 +80억 매개 변수 모델로 수량화되었어요 + +03:32.650 --> 03:41.080 +주요 개척 모델의 get 1조 이상의 모델 변수에 근접할 수 있을까요? + +03:41.110 --> 03:43.540 +오픈 소스라서 무료예요 + +03:43.540 --> 03:44.890 +API 비용이 없어요 + +03:44.920 --> 03:51.250 +그 수준으로 경기를 펼치거나 최소한 인간을 이기면 굉장하겠죠? + +03:51.250 --> 03:55.030 +훈련받지 않은 라마를 인간이 이기고 있어요 + +03:55.090 --> 03:59.380 +적어도 이 인간은 당신에게 마지막으로 남길 존재죠 + +03:59.410 --> 04:03.400 +이건 4분의 1로 퀀화되는 거 아시죠? + +04:03.400 --> 04:07.480 +여러분은 궁금하실 거예요 우리가 8분의 1로 양자화되면 어떻게 보일까요? + +04:07.480 --> 04:13.330 +8비트 버전을 유지하고 실행하면 성능이 얼마나 영향을 미치는지 알 수 + +04:13.330 --> 04:18.490 +있을 거예요 이중 퀀타이즈 된 4비트 버전까지 살펴보면요 + +04:18.490 --> 04:20.020 +물론 그럴 수 있죠 + +04:20.050 --> 04:25.120 +이 프레임워크가 아주 간단하고 실재하는 방법을 제공하죠 + +04:25.150 --> 04:26.260 +차이를 보세요 + +04:26.260 --> 04:29.680 +395까지 세면 얼마나 잘못된 건지 아시겠죠 + +04:29.680 --> 04:37.920 +다른 탭에서 유일한 8비트로 퀀타이즈 된 걸 실행했어요 + +04:38.040 --> 04:44.880 +위로 가면 4bit가 false로 설정된 게 보이시죠? 아니면 완전히 + +04:44.880 --> 04:46.050 +똑같아요 + +04:46.410 --> 04:47.310 +공책이에요 + +04:47.310 --> 04:51.060 +스크롤을 쭉 내려서 결과를 보죠 + +04:51.450 --> 04:54.180 +여기 있는 것 같아요 + +04:56.070 --> 04:57.030 +잠깐만요 + +04:57.030 --> 04:58.200 +긴장감을 조성해요 + +04:58.200 --> 04:59.670 +자, 보세요 + +04:59.670 --> 05:03.030 +그러니 연기도 형편없죠 + +05:03.030 --> 05:06.690 +395가 301이 된 게 더 낫죠 + +05:06.990 --> 05:08.520 +놀랄 일도 아니죠 + +05:08.550 --> 05:12.690 +정보가 두 배나 많거든요 + +05:12.870 --> 05:20.370 +다시 말씀드리지만 퀀타이즈는 정확도에 영향을 미쳤습니다 하지만 8비트 모델은 + +05:20.490 --> 05:23.130 +더 잘했을지도 모르겠네요 + +05:23.130 --> 05:27.540 +그래서 두 사람 사이에 큰 차이는 없었어요 + +05:28.230 --> 05:33.540 +물론 더 큰 모델이 더 잘한다는 걸 보여주죠 + +05:34.470 --> 05:35.340 +좋아요 + +05:35.340 --> 05:37.920 +그게 참 흥미로웠어요 + +05:37.920 --> 05:38.970 +노출이 심하네요 + +05:38.970 --> 05:42.360 +슬라이드로 돌아가서 마무리하고 요약해보죠 diff --git a/week5/community-contributions/subtitles/srts/59507423/en_US.srt b/week5/community-contributions/subtitles/srts/59507423/en_US.srt new file mode 100755 index 0000000..fd48873 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507423/en_US.srt @@ -0,0 +1,445 @@ +WEBVTT + +00:00.920 --> 00:05.060 +So you may remember eons ago when we were building our data set. + +00:05.060 --> 00:08.960 +At the end of that, we uploaded our data to Huggingface. + +00:08.990 --> 00:13.760 +Since that point, we since we also produced pickle files, we've been loading in the data from pickle + +00:13.760 --> 00:15.200 +files from that point onwards. + +00:15.200 --> 00:20.090 +But now that we're in Google Colab, it's easiest for us to collect that data back from the Huggingface + +00:20.090 --> 00:27.050 +hub again, which is a very typical task in this kind of, uh, process of building your own model. + +00:27.080 --> 00:28.400 +So here I go. + +00:28.430 --> 00:34.550 +I load the dataset using, uh, hugging face load data set method, passing in the data set name, and + +00:34.550 --> 00:40.190 +then I break it up into a train and a test and the data set name I set in the constants at the top. + +00:40.220 --> 00:44.930 +Once I've done that, we can take a look at the first training data point. + +00:45.080 --> 00:48.710 +And what it looks like is it has text and a price. + +00:48.740 --> 00:51.830 +You may remember we set this explicitly ourselves. + +00:51.830 --> 00:54.800 +The text is our prompt. + +00:54.830 --> 01:00.590 +How much does this cost to the nearest dollar followed by the description of the product followed by + +01:00.590 --> 01:07.070 +price is dollars and then the price itself, but rounded to the nearest whole number. + +01:07.070 --> 01:08.330 +And in the top. + +01:08.330 --> 01:09.920 +Here I say, how much does this cost? + +01:09.950 --> 01:11.330 +To the nearest dollar. + +01:11.720 --> 01:18.560 +And the reason I'm doing that is I want to make the task a bit easier for the llama 3.1 model with its + +01:18.560 --> 01:20.870 +puny 8 billion parameters. + +01:21.230 --> 01:26.870 +When we were sending it to a frontier model, we didn't need to specify that because it's easily powerful + +01:26.870 --> 01:29.030 +enough to make its own decisions about cents. + +01:29.030 --> 01:33.710 +But in this case, we want to give our model every, every, every simplicity we can. + +01:33.920 --> 01:40.400 +Um, and since it's going to, since this will always map to one token in llama 3.1, we're making it + +01:40.400 --> 01:46.490 +quite easy that all it's got to do is be able to predict that one token right there. + +01:46.490 --> 01:50.180 +That's going to be what it's going to try and learn how to do well. + +01:50.540 --> 01:54.320 +Um, and in this data set, we also have the real price in here too. + +01:54.680 --> 02:00.560 +Uh, if I look at the test data and take the first point, the test data is going to look very similar + +02:00.560 --> 02:02.600 +in structure with one tiny difference. + +02:02.600 --> 02:03.950 +Do you know what that difference is? + +02:04.130 --> 02:05.030 +I'm sure you do. + +02:05.070 --> 02:11.220 +It is, of course, that there is no price provided at this point in the test data. + +02:11.250 --> 02:13.260 +The text is going to be the text. + +02:13.290 --> 02:14.970 +How much is this to the nearest dollar? + +02:14.970 --> 02:17.340 +And then we pass in this text. + +02:17.340 --> 02:21.870 +And the assignment to our model is to predict the next token. + +02:21.900 --> 02:25.260 +What is the probability of the next token coming after this. + +02:25.260 --> 02:33.090 +And we hope it will give a high probability to the token that matches the number that is, uh, 3.74, + +02:33.180 --> 02:35.490 +uh, matching the actual price. + +02:35.550 --> 02:37.350 +Uh, and so that is the assignment. + +02:37.350 --> 02:42.810 +And because this maps to one token, it's really the challenge is just to get good at predicting the + +02:42.810 --> 02:47.760 +next token, the single next token that represents that cost. + +02:48.030 --> 02:52.620 +Uh, one other point to mention is that you may remember, we did some futzing around to make sure that + +02:52.620 --> 02:58.140 +this text always fitted into exactly 179 tokens or less. + +02:58.260 --> 03:05.670 +Um, and because of that, we're able now to tell, uh, the I've got a constant up here that says maximum + +03:05.670 --> 03:08.460 +sequence length is 182. + +03:08.880 --> 03:10.320 +I've added in a few tokens. + +03:10.320 --> 03:13.140 +There it is, in fact 179. + +03:13.140 --> 03:20.790 +But I'm adding in a few extra spare tokens, uh, because, uh, the the tokenizer is going to add in + +03:20.790 --> 03:26.880 +a beginning of sentence token to the start of the sequence, and it may add in an end of sentence or + +03:26.910 --> 03:28.890 +a pad token or two at the end. + +03:28.890 --> 03:34.500 +And I want to have no risk at all that we accidentally chop off the price of the most important token, + +03:34.500 --> 03:37.020 +uh, which is going to come at the end of this. + +03:37.020 --> 03:42.450 +So given a little bit of extra leeway, in fact, this doesn't become important until we get to training. + +03:42.450 --> 03:45.540 +But I wanted to point it out now since we're looking at the data. + +03:46.470 --> 03:48.510 +So there we go. + +03:48.720 --> 03:50.790 +We've just sorry, gone too far. + +03:50.790 --> 03:53.010 +We've just looked at this data. + +03:53.040 --> 03:57.630 +The next thing that we do is we pick the right quantization config. + +03:57.630 --> 04:01.530 +I set a constant up above, uh, quant four bit. + +04:01.560 --> 04:03.420 +In this case I set it to true. + +04:03.450 --> 04:06.030 +Let's just go and check that we will see. + +04:06.060 --> 04:06.810 +There we go. + +04:06.810 --> 04:08.970 +Quant four bit is set to true. + +04:08.980 --> 04:14.320 +And so now when I come back down again, we're going to pick the four bit quantization. + +04:14.320 --> 04:17.110 +And I show you here what it would look like if we picked eight bit. + +04:17.110 --> 04:20.680 +But we're going to pick the really minuscule four bit version. + +04:21.100 --> 04:24.370 +And then we load in the tokenizer and the model. + +04:24.370 --> 04:26.680 +I'm not going to run this cell because I already ran it. + +04:26.680 --> 04:28.870 +You can see it's sitting in the memory here. + +04:28.870 --> 04:30.730 +If I run it a second time I'll run out of memory. + +04:31.810 --> 04:36.100 +And what we do here is we load in the tokenizer. + +04:36.130 --> 04:39.640 +There's a bit of stuff here that's very boilerplate that you'll see a lot. + +04:39.760 --> 04:45.670 +Um, we're telling the tokenizer that when if it ever needs to pad the end of a sequence, it should + +04:45.670 --> 04:48.970 +just use the end of sentence token and just have that repeatedly. + +04:48.970 --> 04:51.400 +And it should do that off to the right hand side. + +04:51.400 --> 04:54.430 +This is standard stuff that will happen when we train. + +04:54.430 --> 04:56.200 +We won't actually use it right now. + +04:56.320 --> 04:57.910 +Um, so you don't need to worry about it. + +04:57.910 --> 04:59.740 +But but you'll see this all over the place. + +04:59.740 --> 05:04.300 +It's a very standard setup, as is this line here, which you also don't need to worry about right now. + +05:04.300 --> 05:10.540 +What we're doing is creating a tokenizer and loading in the llama 3.1 base model. + +05:10.540 --> 05:15.370 +And it's using up the 5.6GB of memory that you're expecting. + +05:15.370 --> 05:22.090 +There it is, 55.9, it seems because because I've done some, uh, inference down below. + +05:22.240 --> 05:29.710 +Uh, but yeah, it's, um, it's the, the very slimmed down four bit version of the model. + +05:30.250 --> 05:34.810 +So now this function is one that should be familiar to you because we used it recently with frontier + +05:34.840 --> 05:42.670 +models extract price, which is going to take some text, any text and pluck out from it the price that's + +05:42.670 --> 05:43.780 +being predicted. + +05:43.780 --> 05:55.570 +And so if I do something like extract price, price is dollar 999, I should have this as a string perhaps. + +05:55.570 --> 05:57.040 +So that's not going to work very well is it. + +05:57.040 --> 06:01.120 +Price is dollar 9999 blah blah. + +06:01.540 --> 06:04.600 +Uh price is 999. + +06:04.840 --> 06:05.770 +So cheap. + +06:07.060 --> 06:07.960 +Whatever. + +06:08.260 --> 06:10.210 +Uh, then hopefully what we'll see. + +06:10.240 --> 06:10.540 +Yes. + +06:10.570 --> 06:12.610 +Is that it's going to pluck out 99999. + +06:12.610 --> 06:16.760 +But the model, we know it's going to be provided with this in the in the prompt. + +06:16.760 --> 06:21.680 +It's what comes next has got to have nine, nine, nine in it. + +06:21.860 --> 06:25.820 +Um, and then this here model predict. + +06:25.820 --> 06:30.080 +So this is the function which we're going to be using in our test harness. + +06:30.080 --> 06:33.020 +This is the function where we tell it. + +06:33.050 --> 06:34.790 +We're going to give you a prompt. + +06:34.790 --> 06:37.550 +And we want to know how much does this cost. + +06:37.550 --> 06:44.540 +And so this is how we call our model in inference mode similar to what we did several weeks ago. + +06:44.810 --> 06:52.490 +Uh, we take the prompt and we encode it using tokenizer dot encode. + +06:52.490 --> 06:55.820 +And this thing here will push it off to the GPU. + +06:56.510 --> 07:01.400 +Uh, this is just, uh, something that's, uh, not super important. + +07:01.400 --> 07:03.020 +It stops it from printing a warning. + +07:03.020 --> 07:06.680 +So, so this doesn't actually affect anything. + +07:07.190 --> 07:16.040 +Uh, and then, um, yeah, to be precise, this this prevents it from trying to predict anything that's + +07:16.040 --> 07:21.680 +happening in that input token area, which we don't want it to predict, that we want it to predict + +07:21.680 --> 07:22.880 +what's coming afterwards. + +07:22.880 --> 07:24.170 +That's what it would do anyway. + +07:24.170 --> 07:27.230 +But it would give a warning if we didn't explicitly tell it this. + +07:27.410 --> 07:33.380 +So then we say for our outputs, we're going to call our base model lemma 3.1. + +07:33.380 --> 07:36.950 +And we're going to call the generate method on it. + +07:36.980 --> 07:38.720 +We pass in the inputs. + +07:38.720 --> 07:41.930 +We're going to say the maximum new tokens we is for. + +07:41.930 --> 07:43.340 +We could make that a much smaller number. + +07:43.340 --> 07:44.900 +We only really need one token. + +07:44.900 --> 07:50.870 +I'm giving it to to generate up to four tokens, just in case it prints another dollar sign or something + +07:50.870 --> 07:51.680 +like that. + +07:52.100 --> 07:58.130 +Um, uh, that we pass in the attention mask that I've just set that stops it giving a warning. + +07:58.130 --> 08:00.590 +And this is just saying we only want back one answer. + +08:00.590 --> 08:03.290 +We don't want it to come back with multiple answers. + +08:03.680 --> 08:08.000 +And then for the reply we take that one answer, it sends us back. + +08:08.000 --> 08:13.130 +And we call tokenizer dot decode to turn that back into a string. + +08:13.130 --> 08:15.830 +And then we extract that string. + +08:16.340 --> 08:17.000 +All right. + +08:17.000 --> 08:18.240 +So that's exciting. + +08:18.240 --> 08:20.280 +Let's just remind ourselves. + +08:20.280 --> 08:27.270 +So if we take the zeroth, the the first test item, here it is. + +08:27.270 --> 08:30.000 +It's the OEM AC compressor. + +08:30.030 --> 08:33.780 +The actual price is $374. + +08:33.810 --> 08:34.920 +Who knew. + +08:34.920 --> 08:37.440 +So let's have our first shot at this. + +08:37.440 --> 08:40.080 +So we're going to say Model.predict. + +08:42.870 --> 08:44.010 +Test zero. + +08:44.010 --> 08:48.390 +And to get the prompt out of that I just call text on that. + +08:49.050 --> 08:50.190 +So are you ready. + +08:50.220 --> 08:50.970 +Here we go. + +08:51.000 --> 08:59.910 +Llama 3.1 base model is going to try and predict the price of an OEM AC compressor with something repair + +08:59.940 --> 09:02.850 +kit okay. + +09:02.850 --> 09:07.470 +And it's predicted $1,800 which is rather far off. + +09:07.470 --> 09:12.960 +So that is potentially a bad omen for how the llama 3.1 base model will work. + +09:12.960 --> 09:19.800 +But we might have just gotten unlucky with the first example, but that will be revealed in the next + +09:19.800 --> 09:20.460 +video. diff --git a/week5/community-contributions/subtitles/srts/59507423/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507423/ja_JP.srt new file mode 100755 index 0000000..c195d20 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507423/ja_JP.srt @@ -0,0 +1,400 @@ +WEBVTT + +00:00.920 --> 00:05.060 +私たちがデータセットを作っていた何年も前のことを覚えているだろうか。 + +00:05.060 --> 00:08.960 +その最後に、 Huggingfaceにデータをアップロードした。 + +00:08.990 --> 00:15.200 +その時点から、 ピックルファイルも作っていたので、 それ以降はピックルファイルからデータを読み込んでいる。 + +00:15.200 --> 00:27.050 +しかし、 Google Colabにいる今、 Huggingfaceのハブからデータを収集するのが一番簡単だ。 + +00:27.080 --> 00:28.400 +それでは、 どうぞ。 + +00:28.430 --> 00:34.550 +データセットをロードするには、 データセット名を渡して、 + +00:34.550 --> 00:40.190 +データセットをtrainとtestに分割する。 + +00:40.220 --> 00:44.930 +そうしたら、 最初のトレーニングデータを見てみよう。 + +00:45.080 --> 00:48.710 +テキストと値段が書いてある。 + +00:48.740 --> 00:51.830 +私たちが自分たちでこれを設定したことを覚えているかもしれない。 + +00:51.830 --> 00:54.800 +テキストは私たちのプロンプトだ。 + +00:54.830 --> 01:00.590 +この商品はいくらですか(1ドル単位)」、 「この商品はいくらですか(1ドル単位)」、 「この商品はいくらですか(1ドル単位)」、 + +01:00.590 --> 01:07.070 +「この商品はいくらですか(1ドル単位)」、 「この商品はいくらですか(1ドル単位)」、 「この商品はいくらですか(1ドル単位)」。 + +01:07.070 --> 01:08.330 +そしてトップには + +01:08.330 --> 01:09.920 +これはいくらするんだ? + +01:09.950 --> 01:11.330 +1ドル単位で。 + +01:11.720 --> 01:20.870 +そうしている理由は、 ラマ3世の作業を少しでも楽にしたいからだ。 80億のパラメータを持つ1モデル。 + +01:21.230 --> 01:26.870 +フロンティア・モデルに送る場合、 セントに関する独自の判断を下すのに十分なパワーを備えているため、 + +01:26.870 --> 01:29.030 +それを指定する必要はなかった。 + +01:29.030 --> 01:33.710 +しかし、 この場合、 私たちはモデルにできる限りのシンプルさを与えたい。 + +01:33.920 --> 01:40.400 +ええと、 そうなると、 ラマ3では常に1つのトークンに対応することになりますから。 1、 私たちは、 トークンを1つ予測するだけでいいように、 + +01:40.400 --> 01:46.490 +とても簡単にしています。 + +01:46.490 --> 01:50.180 +それが、 うまくやる方法を学ぼうとすることになる。 + +01:50.540 --> 01:54.320 +そしてこのデータセットには、 実際の価格も含まれている。 + +01:54.680 --> 02:00.560 +ええと、 テストデータを見て、 最初のポイントを取ると、 テストデータは非常によく似た構造になりそうだが、 + +02:00.560 --> 02:02.600 +1つだけ小さな違いがある。 + +02:02.600 --> 02:03.950 +その違いが何なのか分かる? + +02:04.130 --> 02:05.030 +そうだろうね。 + +02:05.070 --> 02:11.220 +もちろん、 この時点のテストデータには価格が提示されていない。 + +02:11.250 --> 02:13.260 +テキストはテキストになる。 + +02:13.290 --> 02:14.970 +1ドル単位でいくらですか? + +02:14.970 --> 02:17.340 +そして、 この文章を通過する。 + +02:17.340 --> 02:21.870 +そして私たちのモデルに課せられたのは、 次のトークンを予測することだ。 + +02:21.900 --> 02:25.260 +この後、 次のトークンが来る確率は? + +02:25.260 --> 02:35.490 +そして、 3という数字に一致するトークンが高い確率で出ることを期待している。 74、 ええと、 実際の価格と同じです。 + +02:35.550 --> 02:37.350 +それが任務だ。 + +02:37.350 --> 02:47.760 +そして、 これは1つのトークンに対応するので、 次のトークン、 つまりそのコストを表す1つの次のトークンを予測することに長けていることが本当の課題なのだ。 + +02:48.030 --> 02:52.620 +ええと、 もうひとつ言っておくと、 覚えているかもしれないが、 このテキストが常に179トークン以内に収まるようにするために、 + +02:52.620 --> 02:58.140 +いろいろと工夫した。 + +02:58.260 --> 03:08.460 +そのおかげで、 最大配列長182という定数がここにある。 + +03:08.880 --> 03:10.320 +トークンも少し入れた。 + +03:10.320 --> 03:13.140 +実際に179番がある。 + +03:13.140 --> 03:20.790 +というのも、 トークナイザーはシーケンスの最初に文頭トークンを追加し、 + +03:20.790 --> 03:28.890 +最後に文末トークンやパッドトークンを追加する可能性があるからです。 + +03:28.890 --> 03:37.020 +そして、 最も重要なトークンの価格を誤って切り下げてしまうようなリスクは絶対に避けたい。 + +03:37.020 --> 03:42.450 +だから、 少し余裕を持たせれば、 実際、 トレーニングに入るまでは重要なことではないんだ。 + +03:42.450 --> 03:45.540 +でも、 データを見ている今だからこそ、 指摘しておきたかったんだ。 + +03:46.470 --> 03:48.510 +そうだ。 + +03:48.720 --> 03:50.790 +申し訳ないが、 やり過ぎた。 + +03:50.790 --> 03:53.010 +今、 このデータを見たところだ。 + +03:53.040 --> 03:57.630 +次にすることは、 正しい量子化設定を選ぶことだ。 + +03:57.630 --> 04:01.530 +定数を4ビットより上に設定したんだ。 + +04:01.560 --> 04:03.420 +今回はtrueに設定した。 + +04:03.450 --> 04:06.030 +行って確認してみよう。 + +04:06.060 --> 04:06.810 +これでよし。 + +04:06.810 --> 04:08.970 +量子4ビットが真にセットされる。 + +04:08.980 --> 04:14.320 +それで、 もう一度下に戻ってきたら、 4ビットの量子化を選ぶんだ。 + +04:14.320 --> 04:17.110 +そして、 8ビットを選ぶとどうなるかをお見せしよう。 + +04:17.110 --> 04:20.680 +しかし、 我々は本当に極小の4ビットバージョンを選ぶつもりだ。 + +04:21.100 --> 04:24.370 +そして、 トークナイザーとモデルをロードする。 + +04:24.370 --> 04:26.680 +このセルを走らせるつもりはない。 + +04:26.680 --> 04:28.870 +メモリーの中にあるのがわかるだろう。 + +04:28.870 --> 04:30.730 +2回目を実行するとメモリが足りなくなる。 + +04:31.810 --> 04:36.100 +ここで行うのは、 トークナイザーを読み込むことだ。 + +04:36.130 --> 04:39.640 +ここには、 よく目にするような定型文のようなものが少しある。 + +04:39.760 --> 04:45.670 +つまり、 トークナイザーがシーケンスの最後を埋める必要がある場合は、 文末トークンを使い、 + +04:45.670 --> 04:48.970 +それを繰り返すように指示するのです。 + +04:48.970 --> 04:51.400 +そうすれば、 右側に表示されるはずだ。 + +04:51.400 --> 04:54.430 +これは、 トレーニングのときに起こる標準的なことだ。 + +04:54.430 --> 04:56.200 +今は実際に使うことはない。 + +04:56.320 --> 04:57.910 +だから、 心配する必要はないよ。 + +04:57.910 --> 04:59.740 +でも、 でも、 このようなことはあちこちで目にすることだろう。 + +04:59.740 --> 05:04.300 +このラインもそうだが、 非常に標準的なセットアップだ。 + +05:04.300 --> 05:10.540 +私たちがやっているのは、 トークナイザーを作り、 llama 3を読み込むことです。 ベースモデル1台。 + +05:10.540 --> 05:15.370 +そして、 5を使い切っている。 あなたが期待している6GBのメモリ。 + +05:15.370 --> 05:22.090 +そうだ、 55だ。 9、 それは私が下で推論を行ったからだと思う。 + +05:22.240 --> 05:29.710 +ええと、 でも、 そうだね、 4ビットにスリム化したモデルなんだ。 + +05:30.250 --> 05:34.810 +この関数は、 最近フロンティア・モデルの価格抽出で使ったので、 + +05:34.840 --> 05:43.780 +おなじみのものだろう。 + +05:43.780 --> 05:55.570 +例えば、 priceをドル999で抽出するような場合、 文字列として持っている必要がある。 + +05:55.570 --> 05:57.040 +だから、 それではうまくいかない。 + +05:57.040 --> 06:01.120 +価格は9999ドルだ。 + +06:01.540 --> 06:04.600 +価格は999ドル。 + +06:04.840 --> 06:05.770 +とても安い。 + +06:07.060 --> 06:07.960 +何でもいい。 + +06:08.260 --> 06:10.210 +ああ、 そうなるといいんだけど......。 + +06:10.240 --> 06:10.540 +そうだ。 + +06:10.570 --> 06:12.610 +99999を抜き取るということだ。 + +06:12.610 --> 06:16.760 +しかし、 このモデルは、 プロンプトの中で提供されることは分かっている。 + +06:21.860 --> 06:25.820 +そして、 このモデルが予測する。 + +06:25.820 --> 06:30.080 +これがテスト・ハーネスで使用する関数だ。 + +06:30.080 --> 06:33.020 +これは、 そのことを伝える機能である。 + +06:33.050 --> 06:34.790 +これからプロンプトを出す。 + +06:34.790 --> 06:37.550 +そして、 その費用がいくらかかるのかを知りたい。 + +06:37.550 --> 06:44.540 +数週間前にやったのと同じように、 推論モードでモデルを呼び出す方法だ。 + +06:44.810 --> 06:52.490 +ええと、 プロンプトをトークナイザーのドット・エンコードを使ってエンコードします。 + +06:52.490 --> 06:55.820 +これをGPUに押し出す。 + +06:56.510 --> 07:01.400 +あー、 これはただ、 あー、 超どうでもいいことなんだ。 + +07:01.400 --> 07:03.020 +警告が表示されなくなる。 + +07:03.020 --> 07:06.680 +だから、 実際には何の影響もない。 + +07:07.190 --> 07:22.880 +正確には、 入力トークンの領域で起こっていることを予測しようとするのを防いでいるんだ。 + +07:22.880 --> 07:24.170 +いずれにせよ、 そうなるだろう。 + +07:24.170 --> 07:27.230 +しかし、 もし私たちがそれを明確に伝えなければ、 警告を出すだろう。 + +07:27.410 --> 07:33.380 +そこで、 ベースモデルをレンマ3と呼ぶことにする。 1. + +07:33.380 --> 07:36.950 +そしてgenerateメソッドを呼び出す。 + +07:36.980 --> 07:38.720 +インプットを渡す。 + +07:38.720 --> 07:41.930 +私たちは新規トークンの最大数を言うつもりだ。 + +07:41.930 --> 07:43.340 +もっと少ない数字にできるはずだ。 + +07:43.340 --> 07:44.900 +本当に必要なのはトークン1つだけだ。 + +07:44.900 --> 07:51.680 +万が一、 別のドル記号などが表示された場合に備えて、 最大4つのトークンを生成できるようにしている。 + +07:52.100 --> 07:58.130 +ええと、 警告を出さないように設定したアテンションマスクの中を通過するんだ。 + +07:58.130 --> 08:00.590 +そしてこれは、 ただひとつの答えを返してほしいと言っているにすぎない。 + +08:00.590 --> 08:03.290 +複数の答えが返ってくることは避けたい。 + +08:03.680 --> 08:08.000 +そして、 その1つの答えを返すと、 また送られてくる。 + +08:08.000 --> 08:13.130 +そして、 トークナイザー・ドット・デコードを呼び出して文字列に戻す。 + +08:13.130 --> 08:15.830 +そして、 その文字列を取り出す。 + +08:16.340 --> 08:17.000 +分かった。 + +08:17.000 --> 08:18.240 +だからエキサイティングだ。 + +08:18.240 --> 08:20.280 +思い出してみよう。 + +08:20.280 --> 08:27.270 +では、 0番目、 つまり最初のテスト項目を例にとると、 こうなる。 + +08:27.270 --> 08:30.000 +純正のACコンプレッサーだ。 + +08:30.030 --> 08:33.780 +実際の価格は374ドル。 + +08:33.810 --> 08:34.920 +誰が知っていた? + +08:34.920 --> 08:37.440 +では、 最初のショットをご覧いただこう。 + +08:37.440 --> 08:40.080 +だから、 モデルということになる。 を予測する。 + +08:42.870 --> 08:44.010 +テストゼロ。 + +08:44.010 --> 08:48.390 +そして、 そのプロンプトを出すために、 私はテキストを呼び出すだけだ。 + +08:49.050 --> 08:50.190 +準備はできているか? + +08:50.220 --> 08:50.970 +さあ、 始めよう。 + +08:51.000 --> 09:02.850 +ラマ 3. 1ベースモデルは、 何か修理キット大丈夫とOEM ACコンプレッサーの価格を予測しようとする。 + +09:02.850 --> 09:07.470 +そして、 1800ドルという予想はかなり外れている。 + +09:07.470 --> 09:12.960 +ということは、 リャマが3位になるには悪い予兆かもしれない。 ベースモデルは1台。 + +09:12.960 --> 09:20.460 +ただ、 最初の例は運が悪かっただけかもしれないが、 それは次のビデオで明らかになるだろう。 diff --git a/week5/community-contributions/subtitles/srts/59507423/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507423/ko_KR.srt new file mode 100755 index 0000000..965e61d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507423/ko_KR.srt @@ -0,0 +1,433 @@ +WEBVTT + +00:00.920 --> 00:05.060 +아주 오래 전에 데이터 세트를 만들던 때를 기억하실 거예요 + +00:05.060 --> 00:08.960 +데이터를 허깅페이스에 업로드 했어요 + +00:08.990 --> 00:13.760 +그 시점부터, 우린 피클 파일도 제작한 이후로 그 시점부터 피클 파일의 데이터를 + +00:13.760 --> 00:15.200 +로드하고 있어요 + +00:15.200 --> 00:20.090 +하지만 구글 Colab에 있으니 H깅페이스 허브에서 데이터를 + +00:20.090 --> 00:27.050 +수집하는 게 가장 쉽죠 자신만의 모델을 만드는 이런 과정의 아주 전형적인 작업이에요 + +00:27.080 --> 00:28.400 +그럼 시작할게요 + +00:28.430 --> 00:34.550 +데이터셋을 로드합니다 얼굴 로드 데이터셋 메서드를 끌어안고 데이터셋 이름을 전달합니다. + +00:34.550 --> 00:40.190 +그리고 그것을 기차와 테스트, 데이터셋 이름을 상수에 나눠 놓죠. + +00:40.220 --> 00:44.930 +그 후에 첫 번째 훈련 데이터 포인트를 볼 수 있어요 + +00:45.080 --> 00:48.710 +텍스트와 가격이 있는 것 같아요 + +00:48.740 --> 00:51.830 +우리가 명시적으로 설정한 걸 기억하실 거예요 + +00:51.830 --> 00:54.800 +글이 우리의 프롬프트예요 + +00:54.830 --> 01:00.590 +가장 가까운 달러의 가격은 얼마일까요? 제품 설명과 가격은 + +01:00.590 --> 01:07.070 +달러 그다음은 가격을 가장 가까운 숫자로 반올림해 볼게요 + +01:07.070 --> 01:08.330 +그리고 위에도요 + +01:08.330 --> 01:09.920 +이건 얼마냐고 물어볼게요 + +01:09.950 --> 01:11.330 +최대한 많이요 + +01:11.720 --> 01:18.560 +이렇게 하는 이유는 비트를 좀 더 쉽게 만들기 위해서예요 80억 개의 변수가 있는 + +01:18.560 --> 01:20.870 +단일 모델이죠 + +01:21.230 --> 01:26.870 +개척 모델로 보낼 때 그걸 지정할 필요가 없었어요 센트에 대한 결정을 스스로 + +01:26.870 --> 01:29.030 +내릴 만큼 강력하니까요 + +01:29.030 --> 01:33.710 +하지만 이 경우에는 최대한 단순하게 만들고 싶어요 + +01:33.920 --> 01:40.400 +그리고 이건 항상 라마 3에서 토큰 하나로만 매핑되니까요 아주 쉽게 만들었어요 그냥 + +01:40.400 --> 01:46.490 +저기 있는 토큰 하나만 예측하면 되죠 + +01:46.490 --> 01:50.180 +그걸 잘 하는 방법을 배우려고 할 거예요 + +01:50.540 --> 01:54.320 +이 데이터 세트에는 실제 가격도 있어요 + +01:54.680 --> 02:00.560 +테스트 데이터를 보고 첫 번째 점을 보면 구조는 아주 비슷한데 한 가지 + +02:00.560 --> 02:02.600 +작은 차이가 있어요 + +02:02.600 --> 02:03.950 +그 차이가 뭔지 아세요? + +02:04.130 --> 02:05.030 +물론 그러시겠죠 + +02:05.070 --> 02:11.220 +물론 이 시점에서 테스트 데이터에 제공된 가격은 없어요 + +02:11.250 --> 02:13.260 +텍스트는 텍스트가 될 거예요 + +02:13.290 --> 02:14.970 +이게 얼마 정도 해요? + +02:14.970 --> 02:17.340 +이 텍스트를 전달해요 + +02:17.340 --> 02:21.870 +다음 토큰을 예측하는 게 우리 모델의 임무죠 + +02:21.900 --> 02:25.260 +다음 토큰이 나올 확률은 얼마나 될까요? + +02:25.260 --> 02:33.090 +3이라는 숫자와 일치하는 토큰이 나올 확률이 높기를 바라야죠 74,000달러예요 실제 + +02:33.180 --> 02:35.490 +가격과 같네요 + +02:35.550 --> 02:37.350 +그게 이번 과제예요 + +02:37.350 --> 02:42.810 +이것은 하나의 토큰에 대응하기 때문에 다음 토큰을 예측하는 것이 매우 어렵습니다. + +02:42.810 --> 02:47.760 +하나의 토큰이 비용을 나타내니까요. Get in get get it. + +02:48.030 --> 02:52.620 +또 하나 언급할 점은 기억하실지 모르겠지만 이 텍스트가 정확히 + +02:52.620 --> 02:58.140 +179t less 토큰에 맞는지 확인하기 위해 우리가 좀 고생을 했었죠 + +02:58.260 --> 03:05.670 +그 덕분에 이제 알 수 있는 게 여기 연속적으로 최대 시퀀스 길이가 182이라고 + +03:05.670 --> 03:08.460 +나오네요 + +03:08.880 --> 03:10.320 +몇 가지 패를 더했어요 + +03:10.320 --> 03:13.140 +여기 있네요, 179개 + +03:13.140 --> 03:20.790 +하지만 저는 여분의 토큰을 몇 개 추가할 겁니다 토큰라이저는 문장 토큰의 시작 부분을 + +03:20.790 --> 03:26.880 +순서 시작 부분에 추가하고 문장 끝이나 패드 토큰을 추가할 수도 + +03:26.910 --> 03:28.890 +있기 때문이죠 + +03:28.890 --> 03:34.500 +실수로 가장 중요한 토큰의 가격을 오차로 처리하는 일은 없었으면 합니다 + +03:34.500 --> 03:37.020 +마지막에 나올 토큰이죠 + +03:37.020 --> 03:42.450 +약간의 여유만 있다면요 사실 훈련이 시작되기 전까진 중요하지 않아요 Get in get + +03:42.450 --> 03:45.540 +데이터를 보고 있으니 지금 지적하고 싶었어요 + +03:46.470 --> 03:48.510 +자, 됐어요 + +03:48.720 --> 03:50.790 +미안해요, 너무 심했어요 + +03:50.790 --> 03:53.010 +방금 이 데이터를 봤어요 + +03:53.040 --> 03:57.630 +다음으로 할 일은 올바른 퀀타이즈 구성을 고르는 거죠 + +03:57.630 --> 04:01.530 +퀀트 포 비트 이상으로 상시 유지하기로 했어요 + +04:01.560 --> 04:03.420 +이 경우엔 true로 설정했죠 + +04:03.450 --> 04:06.030 +가서 확인해 보죠 + +04:06.060 --> 04:06.810 +됐어요 + +04:06.810 --> 04:08.970 +퀀트 포 비트가 트루로 설정됐어요 + +04:08.980 --> 04:14.320 +이제 다시 내려오면 for 비트 퀀타이즈를 선택할게요 + +04:14.320 --> 04:17.110 +8비트를 선택하면 어떻게 되는지 보여드릴게요 + +04:17.110 --> 04:20.680 +하지만 아주 작은 4비트 버전을 고를 거예요 + +04:21.100 --> 04:24.370 +그리고 토큰라이저와 모델을 로드하죠 + +04:24.370 --> 04:26.680 +이 방은 안 쓸 거예요 이미 썼으니까요 + +04:26.680 --> 04:28.870 +여기 메모리에 남아 있어요 + +04:28.870 --> 04:30.730 +두 번 실행하면 메모리가 부족할 거예요 + +04:31.810 --> 04:36.100 +여기서 토큰라이저를 로드할 거예요 + +04:36.130 --> 04:39.640 +비트 박스는 상용적인 내용으로 자주 보게 될 거예요 + +04:39.760 --> 04:45.670 +토큰라이저에 이렇게 알려 주는 거예요 시퀀스 끝에 패드를 넣어야 할 때는 + +04:45.670 --> 04:48.970 +그냥 문장 끝부분을 쓰면 된다고요 + +04:48.970 --> 04:51.400 +오른쪽에 이렇게 해야 해요 + +04:51.400 --> 04:54.430 +훈련할 때 흔히 하는 거예요 + +04:54.430 --> 04:56.200 +당장은 안 쓸 거예요 + +04:56.320 --> 04:57.910 +걱정 안 하셔도 돼요 + +04:57.910 --> 04:59.740 +하지만 어디서나 볼 수 있죠 + +04:59.740 --> 05:04.300 +아주 표준적인 셋업이죠 여기 이 라인처럼요 이것도 지금 당장 걱정하실 필요는 없어요 + +05:04.300 --> 05:10.540 +토큰이저를 만들어 llama 3을 로드하고 있어요 기본 모델 1개요 + +05:10.540 --> 05:15.370 +5를 다 쓰고 있어요 메모리가 6GB예요 + +05:15.370 --> 05:22.090 +저기 있네요, 55번 9개인 것 같아요 제가 아래에 추론을 했거든요 + +05:22.240 --> 05:29.710 +아주 날씬한 4인치 비트 모델이에요 + +05:30.250 --> 05:34.810 +이 함수는 여러분께 익숙할 겁니다 최근 프론티어 모델에서 + +05:34.840 --> 05:43.780 +가격 추출에 사용했거든요 텍스트를 추출하죠 어떤 텍스트든요 예측된 가격을 추출해요 + +05:43.780 --> 05:55.570 +가격을 추출하는 이런 걸 하면 가격은 $999 문자열로 이걸 넣어야겠죠 + +05:55.570 --> 05:57.040 +잘 안 될 것 같아요 + +05:57.040 --> 06:01.120 +가격은 달러 9999 어쩌고저쩌고죠 + +06:01.540 --> 06:04.600 +가격은 999예요 + +06:04.840 --> 06:05.770 +정말 싸요 + +06:07.060 --> 06:07.960 +됐어요 + +06:08.260 --> 06:10.210 +결과가 기대되네요 + +06:10.240 --> 06:10.540 +네 + +06:10.570 --> 06:12.610 +99999마리를 잡아낼 거예요 + +06:12.610 --> 06:16.760 +모델은 프롬프트에서 제공될 거라는 걸 알죠 + +06:16.760 --> 06:21.680 +9단계가 모두 포함돼야 해요 + +06:21.860 --> 06:25.820 +이 모델이 예측해요 + +06:25.820 --> 06:30.080 +이게 시험용 하네스에 사용할 함수예요 + +06:30.080 --> 06:33.020 +우리가 그걸 알려주는 함수죠 + +06:33.050 --> 06:34.790 +프롬프트를 하나 드리죠 + +06:34.790 --> 06:37.550 +비용은 얼마나 드는지 알고 싶어요 + +06:37.550 --> 06:44.540 +이건 추론 모드의 모델을 부르는 방법입니다 몇 주 전에 했던 것과 유사하죠 + +06:44.810 --> 06:52.490 +프롬프트를 가져다가 Tokenizer.ex코드를 이용해 암호화해요 + +06:52.490 --> 06:55.820 +이건 GPU 쪽으로 밀어내죠 + +06:56.510 --> 07:01.400 +이건 그냥... 별로 중요하지 않은 거예요 + +07:01.400 --> 07:03.020 +경고문을 인쇄하는 걸 막아주죠 + +07:03.020 --> 07:06.680 +그럼 아무 영향도 없군요 + +07:07.190 --> 07:16.040 +어, 그리고, 음 정확히 말하자면, 입력 토큰 영역에서 일어나는 일을 예측하지 못하게 합니다. + +07:16.040 --> 07:22.880 +예측하지 않고, 이후에 일어날 일을 예측하고 싶어하죠. + +07:22.880 --> 07:24.170 +어차피 그렇게 될 거예요 + +07:24.170 --> 07:27.230 +하지만 우리가 말하지 않았다면 경고가 나왔을 거예요 + +07:27.410 --> 07:33.380 +결과물을 위해 기본 모델 lemma 3을 호출하죠 1번요 + +07:33.380 --> 07:36.950 +그 위에 생성 메서드를 호출할 거예요 + +07:36.980 --> 07:38.720 +입력값을 통과시키죠 + +07:38.720 --> 07:41.930 +최대 새 토큰을 위한 거라고 하죠 + +07:41.930 --> 07:43.340 +훨씬 적은 숫자로 만들 수 있어요 + +07:43.340 --> 07:44.900 +토큰 하나만 있으면 돼요 + +07:44.900 --> 07:51.680 +4개까지 토큰을 생성하도록 할 것입니다 달러 기호를 또 출력할 수 있으니까요 + +07:52.100 --> 07:58.130 +방금 설치한 주의력 마스크를 통과시키면 경고가 안 뜨죠 + +07:58.130 --> 08:00.590 +이건 하나의 답만 원한다는 거죠 + +08:00.590 --> 08:03.290 +여러 가지 답이 나오면 안 돼요 + +08:03.680 --> 08:08.000 +그 답에 대한 답변은 그 답 하나를 취하면 다시 보내지죠 + +08:08.000 --> 08:13.130 +Tokenizer.Dcoode를 호출해 다시 문자열로 바꾸죠 + +08:13.130 --> 08:15.830 +그 문자열을 추출해요 + +08:16.340 --> 08:17.000 +좋아요 + +08:17.000 --> 08:18.240 +신나네요 + +08:18.240 --> 08:20.280 +우리 스스로 되새겨 보죠 + +08:20.280 --> 08:27.270 +0을 보면 첫 번째 테스트 항목이 여기 있네요 + +08:27.270 --> 08:30.000 +OEM 에어컨 압축기예요 + +08:30.030 --> 08:33.780 +실제 가격은 374달러예요 + +08:33.810 --> 08:34.920 +누가 알았겠어요 + +08:34.920 --> 08:37.440 +첫 번째 시도예요 + +08:37.440 --> 08:40.080 +Model이라고 입력하죠 예측요 + +08:42.870 --> 08:44.010 +0번째 테스트예요 + +08:44.010 --> 08:48.390 +프롬프트를 얻기 위해 get 텍스트를 호출할게요 + +08:49.050 --> 08:50.190 +그럼 준비되셨나요? + +08:50.220 --> 08:50.970 +시작할게요 + +08:51.000 --> 08:59.910 +라마 3요 1번 기본 모델은 OEM AC 압축기와 수리 키트의 + +08:59.940 --> 09:02.850 +가격을 예측해 보려고 해요 + +09:02.850 --> 09:07.470 +예상 가격은 1,800달러인데 차이가 너무 커요 + +09:07.470 --> 09:12.960 +라마 3에 대한 나쁜 징조일 수도 있어요 기본 모델 1개면 돼요 + +09:12.960 --> 09:20.460 +첫 번째 예시는 운이 나빴을 뿐이지만 그건 다음 영상에서 밝혀질 거예요 diff --git a/week5/community-contributions/subtitles/srts/59507435/en_US.srt b/week5/community-contributions/subtitles/srts/59507435/en_US.srt new file mode 100755 index 0000000..027c5fc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507435/en_US.srt @@ -0,0 +1,190 @@ +WEBVTT + +00:00.290 --> 00:05.120 +So I'm now going to talk about five important hyperparameters for the training process. + +00:05.120 --> 00:07.460 +And some of these we've talked about briefly before. + +00:07.460 --> 00:12.860 +But many of these will be somewhat new to you unless you've worked in other data science projects of + +00:12.860 --> 00:13.670 +this sort. + +00:13.700 --> 00:18.890 +And the first one I'll mention is epochs, which we did briefly mention some some time ago. + +00:18.890 --> 00:27.830 +So epochs are referring to how many times are you going to go through your entire data set, uh, as + +00:27.830 --> 00:29.300 +part of the training process. + +00:29.300 --> 00:34.370 +So you might imagine that when you're training, you take each of your training data points and you + +00:34.370 --> 00:37.670 +go through the set once and then you're done. + +00:37.670 --> 00:42.620 +But in fact, it turns out that you can get more mileage by going back a second time and going through + +00:42.620 --> 00:45.200 +all of your training data again with your model. + +00:45.230 --> 00:46.820 +Now you might think, why? + +00:46.820 --> 00:50.240 +Why does it help to go through a second time when the model already saw it once? + +00:50.240 --> 00:52.520 +So you're just giving it the same data a second time? + +00:52.520 --> 00:58.160 +Well, remember when we go through the training optimization process involves going through each of + +00:58.160 --> 01:04.250 +these points and then making a very small step in the direction of making the model a little bit better, + +01:04.250 --> 01:10.770 +shifting the weights in our in our Laura matrices a little tiny bit so that next time it does a bit + +01:10.770 --> 01:11.490 +better. + +01:11.640 --> 01:16.380 +Um, so every time we go through all of the training data set, we have an opportunity to get a little + +01:16.380 --> 01:17.700 +tiny bit better. + +01:17.880 --> 01:22.860 +Um, and presumably once it's gone through, once the model is now in a bit of a different state. + +01:22.950 --> 01:27.930 +So when it sees it again, it can allow it to refine and do a little bit better. + +01:28.560 --> 01:34.980 +There is another reason why it often makes sense to have multiple epochs, and that comes down to batch + +01:34.980 --> 01:35.550 +size. + +01:35.580 --> 01:43.950 +The next hyperparameter batch size, is saying that often we don't take one data point and put it through + +01:43.980 --> 01:49.410 +the forward pass of the model to predict the next token, calculate the loss, and then go backwards + +01:49.410 --> 01:52.380 +and figure out the gradients of how much does that loss? + +01:52.410 --> 01:56.610 +Is that affected by the different weights in the parameter in the model? + +01:56.610 --> 02:00.540 +And then optimize the model by doing a little step in the right direction? + +02:00.630 --> 02:06.240 +Uh, it sometimes makes sense to do that at the same time with a bunch of data points together. + +02:06.240 --> 02:13.410 +Like often you pick a four or 8 or 16 and you do it together for for all 16. + +02:13.930 --> 02:17.950 +One reason for doing that is, is performance, that it means that you can just get through everything + +02:17.980 --> 02:18.460 +faster. + +02:18.460 --> 02:19.630 +You can do it all together. + +02:19.660 --> 02:23.920 +If you can fit 16 data points on your GPU, then that's a good thing to do. + +02:23.950 --> 02:29.830 +There are some other reasons why it might actually be better to do it in batches, than to do it step + +02:29.830 --> 02:30.670 +by step. + +02:30.760 --> 02:35.920 +Um, uh, but but, uh, but the basic reason is for performance. + +02:36.100 --> 02:44.200 +When you do multiple epochs with each epoch, it's typical that you resort or you juggle up all of the + +02:44.200 --> 02:44.710 +batches. + +02:44.710 --> 02:52.030 +So they're different batches, different sets of these 16 data points that the model sees with each + +02:52.030 --> 02:52.870 +of your epochs. + +02:52.870 --> 02:58.660 +So actually in some ways the data is different for each of these epochs because it's seeing a different + +02:58.660 --> 03:02.890 +sample of your data points as it goes through them. + +03:02.890 --> 03:06.250 +And that's another reason why multiple epochs can be good. + +03:06.880 --> 03:13.120 +Now there's one very common technique which is to at the end of each epoch, you typically save your + +03:13.120 --> 03:13.630 +model. + +03:13.630 --> 03:20.040 +And it's quite common to run a bunch of epochs and then test how your model performed at the end of + +03:20.040 --> 03:27.270 +each of those epochs, and what you often find is that the model was getting better and better as it + +03:27.270 --> 03:33.570 +learned more and more in each epoch, but then you reach a certain point where the model starts to overfit, + +03:33.570 --> 03:38.070 +which we talked about last time, where it starts to get so used to seeing this training data that it + +03:38.070 --> 03:41.220 +starts to solve just for exactly that training data. + +03:41.220 --> 03:48.480 +And then when you test it, the performance gets worse because it's not expecting points outside its + +03:48.480 --> 03:49.740 +training data set. + +03:49.740 --> 03:54.690 +So you start to see like better, better, better, better, worse, worse, worse. + +03:54.690 --> 03:57.450 +And then the results get continually worse. + +03:57.450 --> 04:04.410 +And what you do is you run this and you quite simply pick the epoch which gave you the best model, + +04:04.410 --> 04:05.550 +the best outcome. + +04:05.550 --> 04:08.460 +And that's the one that you consider the results of your training. + +04:08.460 --> 04:12.570 +That's the version of the fine tuned model and that's what you take forwards. + +04:12.570 --> 04:18.990 +So it's common to run a larger number of epochs and then use that kind of testing to pick which one + +04:18.990 --> 04:20.490 +was your best model. + +04:21.870 --> 04:26.910 +With that, I will pause and continue with these parameters in the next video. diff --git a/week5/community-contributions/subtitles/srts/59507435/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507435/ja_JP.srt new file mode 100755 index 0000000..ece96c3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507435/ja_JP.srt @@ -0,0 +1,136 @@ +WEBVTT + +00:00.290 --> 00:05.120 +そこで今回は、 トレーニング・プロセスにおける5つの重要なハイパーパラメーターについてお話しします。 + +00:05.120 --> 00:07.460 +そのうちのいくつかは、 以前にも簡単に話したことがある。 + +00:07.460 --> 00:13.670 +しかし、 これらの多くは、 この種の他のデータ・サイエンス・プロジェクトに携わったことがない限り、 あなたにとってやや目新しいものだろう。 + +00:13.700 --> 00:18.890 +そして、 最初に挙げるのは、 少し前に少し触れたエポックだ。 + +00:18.890 --> 00:29.300 +つまりエポックとは、 トレーニング・プロセスの一環として、 データセット全体を何回処理するかということだ。 + +00:29.300 --> 00:34.370 +だから、 トレーニングするときに、 それぞれのトレーニング・データ・ポイントを取って、 そのセットを1回通して、 + +00:34.370 --> 00:37.670 +それで終わりだと想像するかもしれない。 + +00:37.670 --> 00:42.620 +しかし実際には、 2度目に戻ってすべてのトレーニングデータをもう一度モデルで調べ直すことで、 + +00:42.620 --> 00:45.200 +より多くの走行距離を得られることが判明した。 + +00:45.230 --> 00:46.820 +と思うかもしれない。 + +00:46.820 --> 00:50.240 +モデルがすでに一度見ているのに、 なぜ2度目を通すことが助けになるのか? + +00:50.240 --> 00:52.520 +では、 同じデータを2度目に与えるだけなのか? + +00:52.520 --> 00:58.160 +トレーニングの最適化プロセスでは、 これらのポイントをひとつひとつ確認し、 + +00:58.160 --> 01:11.490 +モデルを少し良くする方向にごく小さな一歩を踏み出す。 + +01:11.640 --> 01:17.700 +だから、 すべてのトレーニング・データ・セットを経験するたびに、 私たちはほんの少し上達する機会を得ることになる。 + +01:17.880 --> 01:22.860 +そして恐らく、 それが完了した時点で、 モデルの状態は少し変わっている。 + +01:22.950 --> 01:27.930 +そうすれば、 再びそれを見たときに、 より洗練された、 より良いプレーができるようになる。 + +01:28.560 --> 01:35.550 +複数のエポックを持つことがしばしば理にかなっている理由はもう一つあり、 それはバッチサイズに行き着く。 + +01:35.580 --> 01:43.950 +次のハイパーパラメーター・バッチサイズは、 1つのデータポイントを次のトークンを予測するモデルのフォワードパスにかけて、 損失を計算し、 + +01:43.980 --> 01:49.410 +それから逆戻りして、 その損失がどの程度なのかの勾配を把握する、 ということをしないことが多い、 + +01:49.410 --> 01:52.380 +ということです。 + +01:52.410 --> 01:56.610 +モデルのパラメーターの重みが違うことが影響しているのでしょうか? + +01:56.610 --> 02:00.540 +そして、 正しい方向に少しずつステップを踏んでモデルを最適化する? + +02:00.630 --> 02:06.240 +多くのデータポイントを同時に扱うことは、 理にかなっていることもあるんだ。 + +02:06.240 --> 02:13.410 +よくあるように、 4人とか8人とか16人を選んで、 16人全員でやるんだ。 + +02:13.930 --> 02:18.460 +そうする理由のひとつは、 パフォーマンスだ。 + +02:18.460 --> 02:19.630 +全部一緒にできる。 + +02:19.660 --> 02:23.920 +もしGPUに16個のデータポイントを収めることができるなら、 それは良いことだ。 + +02:23.950 --> 02:30.670 +一歩一歩やるより、 まとめてやったほうがいい理由は他にもいくつかある。 + +02:30.760 --> 02:35.920 +うーん、 でも、 でも、 でも、 基本的な理由はパフォーマンスのためだよ。 + +02:36.100 --> 02:44.710 +各エポックで複数のエポックを行う場合、 リゾートするか、 すべてのバッチをジャグリングするのが一般的だ。 + +02:44.710 --> 02:52.870 +つまり、 エポックごとに異なるバッチ、 異なる16点のデータセットをモデルが見ることになる。 + +02:52.870 --> 03:02.890 +エポックごとに異なるデータを見ているのですから。 + +03:02.890 --> 03:06.250 +それが、 複数のエポックが良い理由でもある。 + +03:06.880 --> 03:13.630 +各エポックの終了時にモデルを保存するという、 非常に一般的なテクニックがある。 + +03:13.630 --> 03:27.270 +多くのエポックを実行し、 各エポックの終了時にモデルのパフォーマンスをテストするのはよくあることです。 + +03:27.270 --> 03:41.220 +多くの場合、 モデルは各エポックで学習を重ねるにつれてどんどん良くなっていきます。 + +03:41.220 --> 03:49.740 +そしてテストすると、 トレーニングデータセット外のポイントを想定していないため、 パフォーマンスが悪化する。 + +03:49.740 --> 03:54.690 +だから、 より良い、 より良い、 より良い、 より良い、 より悪い、 より悪い、 より悪い、 より悪い、 というように見えてくる。 + +03:54.690 --> 03:57.450 +そして結果は悪化の一途をたどる。 + +03:57.450 --> 04:05.550 +そしてこれを実行し、 単純に最良のモデル、 最良の結果をもたらしたエポックを選ぶのだ。 + +04:05.550 --> 04:08.460 +そして、 それがトレーニングの成果だと考えるものだ。 + +04:08.460 --> 04:12.570 +それが微調整モデルのバージョンであり、 それを前進させるものだ。 + +04:12.570 --> 04:20.490 +そのため、 より多くのエポック数を実行し、 そのようなテストを使ってどれがベストなモデルかを選ぶのが一般的だ。 + +04:21.870 --> 04:26.910 +これで一旦中断し、 次のビデオでこれらのパラメータについて続けることにする。 diff --git a/week5/community-contributions/subtitles/srts/59507435/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507435/ko_KR.srt new file mode 100755 index 0000000..7f6c806 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507435/ko_KR.srt @@ -0,0 +1,178 @@ +WEBVTT + +00:00.290 --> 00:05.120 +이제 훈련 프로세스에 필요한 5개의 하이퍼파라미터에 대해 말씀드리죠 + +00:05.120 --> 00:07.460 +이 중 일부는 전에 간단히 얘기한 적이 있죠 + +00:07.460 --> 00:12.860 +하지만 대부분은 생소할 겁니다 이런 종류의 데이터 과학 프로젝트에서 일한 적이 + +00:12.860 --> 00:13.670 +없다면요 + +00:13.700 --> 00:18.890 +첫 번째는 에포치입니다 몇 번 짧게 언급했었죠 + +00:18.890 --> 00:27.830 +epoch는 훈련 과정의 일부로 전체 데이터 세트를 몇 번이나 검토할지를 + +00:27.830 --> 00:29.300 +뜻해요 + +00:29.300 --> 00:34.370 +여러분이 훈련할 때 각각의 훈련 데이터 포인트를 가지고 세트를 + +00:34.370 --> 00:37.670 +한 번 거치면 끝이라고 생각할 수 있죠 + +00:37.670 --> 00:42.620 +하지만 사실 다시 돌아가서 훈련 데이터를 모델과 함께 다시 검토하면 더 많은 마일리지들을 얻을 수 있어요. + +00:42.620 --> 00:45.200 +Get in get get get it. + +00:45.230 --> 00:46.820 +왜냐고 궁금하시겠죠 + +00:46.820 --> 00:50.240 +모델이 이미 한 번 봤는데 왜 다시 보려는 거죠? + +00:50.240 --> 00:52.520 +두 번째에도 같은 데이터를 주나요? + +00:52.520 --> 00:58.160 +훈련 최적화 과정을 거치면서 이런 점들을 하나씩 + +00:58.160 --> 01:04.250 +짚어 보고 모델이 더 잘 보이게 작은 단계를 밟아요 + +01:04.250 --> 01:11.490 +무게추를 조금씩 바꾸면 다음에는 더 잘 닦일 거예요 + +01:11.640 --> 01:16.380 +훈련 데이터 세트를 검토할 때마다 조금씩 향상될 기회가 있어요 비트 코일, 비트 + +01:16.380 --> 01:17.700 +코일, 비트 코일 + +01:17.880 --> 01:22.860 +비트가 통과하고 나면 모델이 약간 다른 상태가 되겠죠 + +01:22.950 --> 01:27.930 +비트를 다시 봤을 때 더 개선할 수 있어요 + +01:28.560 --> 01:35.550 +여러 단계가 필요한 또 다른 이유가 있습니다 바로 배치 크기인데요 + +01:35.580 --> 01:43.950 +다음 hyperpaameter 배치 사이즈는 종종 하나의 데이터 포인트를 모델의 전방으로 보내 다음 + +01:43.980 --> 01:49.410 +토큰을 예측하고 손실을 계산한 다음 되돌아가 손실된 금액의 단계별을 + +01:49.410 --> 01:52.380 +계산하지 않는다는 것을 의미하죠 + +01:52.410 --> 01:56.610 +모델 내에서 다른 무게에 영향을 받는 건가요? + +01:56.610 --> 02:00.540 +그런 다음 올바른 방향으로 작은 단계를 밟아 모델을 최적화하는 거죠 + +02:00.630 --> 02:06.240 +동시에 여러 데이터 포인트를 합치는 게 좋을 때도 있어요 + +02:06.240 --> 02:13.410 +4개나 8개, 16개를 골라서 16개 전부를 같이 하는 거죠 + +02:13.930 --> 02:17.950 +그렇게 하는 이유 중 하나는 성능입니다. 모든 것을 빠르게 할 수 있다는 것을 의미하죠. Get + +02:17.980 --> 02:18.460 +it. + +02:18.460 --> 02:19.630 +다 같이 하면 돼요 + +02:19.660 --> 02:23.920 +GPU 데이터 포인트가 16개면 좋은 거죠 + +02:23.950 --> 02:29.830 +단계별로 하는 것보다 여러 번에 나눠서 하는 게 더 좋은 이유가 + +02:29.830 --> 02:30.670 +있어요 + +02:30.760 --> 02:35.920 +하지만 기본 이유는 실적 때문이에요 + +02:36.100 --> 02:44.710 +한 가지 시대로 여러 가지 시도를 할 땐 모든 시도를 한 번에 성공하는 게 일반적이죠 + +02:44.710 --> 02:52.030 +즉, 모델은 각 발생기에서 16개의 데이터 포인트를 다른 배치로 보게 되는 + +02:52.030 --> 02:52.870 +것이죠 + +02:52.870 --> 02:58.660 +사실 데이터는 어떤 면에서 각 발생에 따라 다릅니다 발생을 거치면서 + +02:58.660 --> 03:02.890 +다른 데이터 포인트의 샘플을 보기 때문이죠 + +03:02.890 --> 03:06.250 +그래서 다양한 시대가 좋은 거예요 + +03:06.880 --> 03:13.630 +아주 보편적인 기술이 하나 있는데 각 에포크의 끝에서 모델을 저장하는 거예요 + +03:13.630 --> 03:20.040 +여러 신세계를 실행한 후 각 신세계 끝에서 모델이 어떻게 수행했는지 테스트하는 + +03:20.040 --> 03:27.270 +건 흔한 일입니다 그런데 그 모델은 각 신세계에서 더 많이 배울수록 더 좋아지곤 합니다 + +03:27.270 --> 03:33.570 +하지만 지난 시간에 얘기했던 것처럼 모델이 지나치게 충족되는 지점에 도달하면 + +03:33.570 --> 03:38.070 +이 훈련 데이터에 너무 익숙해져서 그 훈련 데이터만을 + +03:38.070 --> 03:41.220 +위해 해결하기 시작하죠 + +03:41.220 --> 03:48.480 +그리고 테스트했을 때 성능이 더 나빠집니다 훈련 데이터 세트 외의 점수는 기대하지 않기 + +03:48.480 --> 03:49.740 +때문이죠 + +03:49.740 --> 03:54.690 +점점 좋아지고 좋아지고 나빠지고 나빠지는 게 보여요 + +03:54.690 --> 03:57.450 +결과는 계속 나빠지기만 하죠 Get it + +03:57.450 --> 04:04.410 +이 프로그램을 실행하면서 최고의 모델과 결과를 제공한 이포크를 고르면 + +04:04.410 --> 04:05.550 +되죠 + +04:05.550 --> 04:08.460 +훈련의 결과로 생각하는 거죠 + +04:08.460 --> 04:12.570 +그게 미세 튜닝 모델의 버전이고 그걸 앞으로 가져가는 거죠 + +04:12.570 --> 04:18.990 +더 많은 이전화를 실행해 어떤 모델이 최선인지 고르는 테스트가 + +04:18.990 --> 04:20.490 +일반적이죠 + +04:21.870 --> 04:26.910 +다음 비디오에서 이 매개 변수들을 잠시 멈추고 계속할게요 diff --git a/week5/community-contributions/subtitles/srts/59507489/en_US.srt b/week5/community-contributions/subtitles/srts/59507489/en_US.srt new file mode 100755 index 0000000..e7a4426 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507489/en_US.srt @@ -0,0 +1,205 @@ +WEBVTT + +00:01.610 --> 00:06.140 +Continuing our adventure through hyperparameters for training. + +00:06.140 --> 00:11.660 +The next one is pretty crucial and it is called learning Rate. + +00:11.660 --> 00:16.220 +And again, many data scientists amongst you will know this one only too well. + +00:16.460 --> 00:23.000 +So for very, very quickly, for those that are less familiar with this, again the purpose of training + +00:23.000 --> 00:26.690 +is that you take your model, you take a training data point. + +00:26.690 --> 00:32.150 +You do what they call a forward pass, which is an inference where you go through the model and say, + +00:32.150 --> 00:38.780 +predict the next token that should come, and it gives a prediction the the, the predicted next token. + +00:39.110 --> 00:44.330 +Or in fact, it gives a probabilities of all of the possible next tokens. + +00:44.360 --> 00:49.610 +And you use that and you have the actual next token that it should have actually been. + +00:49.610 --> 00:54.380 +And you can take these two, the prediction and the actual to come up with a loss. + +00:54.500 --> 01:01.460 +How poorly did it do at predicting the actual and what you can then do is take that loss and you can + +01:01.460 --> 01:07.430 +do what they call back propagation when you go back through the model and figure out how sensitive, + +01:07.460 --> 01:13.910 +how much would I have to tweak each weight up or down in order to do a little bit better next time? + +01:14.120 --> 01:17.570 +Uh, and then you have to take a step in the direction. + +01:17.570 --> 01:23.060 +You have to shift your weights, a step in the direction to do better next time. + +01:23.060 --> 01:28.730 +And that step, that amount that you shift your weights in a good direction so that it will do a little + +01:28.760 --> 01:29.990 +bit better next time. + +01:29.990 --> 01:35.420 +When faced with exactly that training data point, uh, is called the learning rate. + +01:35.570 --> 01:42.470 +And it's typically it's it's either 0.0001 or 0.00001. + +01:42.530 --> 01:45.350 +Uh, you will see some examples when we go through it. + +01:45.440 --> 01:51.290 +And there's also an ability to do have what's called a learning rate scheduler, which is when you start + +01:51.290 --> 01:57.470 +the learning rate at one number and during the course of your run over the period of several epochs, + +01:57.470 --> 02:02.720 +you gradually lower it and lower it and lower it, because as your model gets more trained, you want + +02:02.720 --> 02:08.120 +your learning rate, the amount of step that you take to get shorter and shorter and shorter until you're + +02:08.120 --> 02:11.300 +only making tiny Any adjustments to your network. + +02:11.330 --> 02:15.050 +Because you're pretty confident that you're in the right vicinity. + +02:15.050 --> 02:17.540 +So that is learning rates. + +02:17.570 --> 02:21.440 +Again, it will be old hat to many people who have a data science background. + +02:21.440 --> 02:23.450 +It might be new to others. + +02:24.050 --> 02:27.920 +Gradient accumulation is a way. + +02:27.950 --> 02:35.210 +It's it's a technique that allows you to improve speed of going through training where you say, okay, + +02:35.210 --> 02:40.760 +so what we're going to do is we're going to we normally do a forward pass. + +02:40.970 --> 02:46.340 +We get the, the, the, the, the loss as I just described it. + +02:46.370 --> 02:52.400 +We then work out the gradients going backwards and then we take a step in the right direction. + +02:52.400 --> 02:58.790 +And then we repeat gradient accumulation says well perhaps what we can do is we can do a forward pass + +02:58.790 --> 03:03.800 +and get the gradients and don't take a step, just do a second forward pass and get the gradients and + +03:03.800 --> 03:07.040 +add up those gradients and do that a few more times. + +03:07.040 --> 03:13.790 +Just keep accumulating these gradients and then take a step and then optimize the network. + +03:14.060 --> 03:19.170 +And that just means that you do these steps less frequently, which means it can run a bit faster. + +03:19.350 --> 03:21.900 +Um, in some ways it's a bit similar to batch size. + +03:21.900 --> 03:27.120 +It has some some there's some sort of a conceptual similarities there, because you're sort of grouping + +03:27.120 --> 03:30.060 +things together and just taking one slightly bigger step. + +03:30.330 --> 03:35.070 +Um, in the hyperparameters that I've set up, I'm not using gradient accumulation. + +03:35.070 --> 03:36.540 +I've got that set to one. + +03:36.690 --> 03:39.480 +But I did try it in the past, and I see how it speeds things up. + +03:39.480 --> 03:44.220 +And so you might well be interested in experimenting with that and see what it does. + +03:44.220 --> 03:46.710 +So that is gradient accumulation. + +03:47.100 --> 03:50.700 +And then last but not least the optimizer. + +03:50.730 --> 03:57.030 +The optimizer is the formula that's used when it's time, when you've got the gradients you've got your + +03:57.030 --> 03:57.780 +learning rate. + +03:57.780 --> 04:05.730 +And it's time to now make an update to your neural network to shift everything a little bit in a good + +04:05.730 --> 04:11.730 +direction, so that next time it's that little bit more likely to predict the right next token. + +04:11.730 --> 04:14.550 +And the process for doing that is called the optimizer. + +04:14.550 --> 04:21.090 +And there are a bunch of well-known formulae for how you could do that, each with pros and cons, you'll + +04:21.090 --> 04:22.860 +see we pick one in particular. + +04:22.860 --> 04:27.180 +That's one that is a little bit more expensive in terms of the performance. + +04:27.180 --> 04:31.050 +It's a it's a bit harder work, but it leads to good outcomes. + +04:31.050 --> 04:33.840 +So it's the one that I would recommend starting with. + +04:33.990 --> 04:40.170 +And then if you do end up having any kind of memory problems, there are alternatives that are, um, + +04:40.170 --> 04:42.150 +that consume less memory. + +04:42.300 --> 04:44.820 +But that that process is called optimization. + +04:44.820 --> 04:49.410 +And the algorithm that you pick to do it is called the optimizer. + +04:49.410 --> 04:54.210 +And it's another hyperparameter, and that you can try different ones and see how they do. + +04:54.540 --> 04:57.390 +So I realize it's an awful lot of talking. + +04:57.390 --> 05:04.860 +And I've also used the the conversation about hyperparameters to explain a bit about the training process. + +05:04.950 --> 05:10.590 +Uh, but hopefully this was good foundational background that's prepared you for what is just about + +05:10.590 --> 05:17.340 +to happen now, which is we're going back to Google Colab, where we are going to set up and kick off + +05:17.340 --> 05:24.120 +our SFT trainer to fine tune our own, uh, specialized LM. diff --git a/week5/community-contributions/subtitles/srts/59507489/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507489/ja_JP.srt new file mode 100755 index 0000000..20ee615 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507489/ja_JP.srt @@ -0,0 +1,178 @@ +WEBVTT + +00:01.610 --> 00:06.140 +トレーニングのためのハイパーパラメータについての冒険を続ける。 + +00:06.140 --> 00:11.660 +次はかなり重要で、 学習率と呼ばれるものだ。 + +00:11.660 --> 00:16.220 +そしてまた、 皆さんの中の多くのデータサイエンティストは、 このことをよくご存知だろう。 + +00:16.460 --> 00:26.690 +トレーニングの目的は、 モデルを作成し、 トレーニング・データを取得することです。 + +00:26.690 --> 00:32.150 +フォワード・パスと呼ばれる推論で、 モデルを通して次に来るはずのトークンを予測し、 + +00:32.150 --> 00:38.780 +予測された次のトークンを予測する。 + +00:39.110 --> 00:44.330 +あるいは実際には、 次のトークンの可能性をすべて確率で示す。 + +00:44.360 --> 00:49.610 +そして、 そのトークンを使って、 実際にあるべき次のトークンを手に入れる。 + +00:49.610 --> 00:54.380 +そして、 この2つ、 予想と実際を総合して、 負けを導き出すことができる。 + +00:54.500 --> 01:01.460 +次にできることは、 そのロスを利用して、 バックプロパゲーション(逆伝播法)と呼ばれる、 + +01:01.460 --> 01:13.910 +モデルを遡り、 次回はもう少し良い結果を出すために、 各ウェイトをどれだけ上下に微調整すれば良いかを割り出すことだ。 + +01:14.120 --> 01:17.570 +そして、 その方向に一歩踏み出すんだ。 + +01:17.570 --> 01:23.060 +ウェイトを移動させ、 次はもっとうまくやるための一歩を踏み出すのだ。 + +01:23.060 --> 01:29.990 +その一歩、 その一歩が、 ウェイトを良い方向にシフトさせ、 次回はもう少し良い結果が出るようにする。 + +01:29.990 --> 01:35.420 +まさにその学習データに直面したとき、 ええと、 学習率と呼ばれる。 + +01:35.570 --> 01:42.470 +典型的なのは0か0かだ。 0001 または 0. 00001. + +01:42.530 --> 01:45.350 +ええと、 いくつか例を挙げればわかるだろう。 + +01:45.440 --> 01:51.290 +学習レート・スケジューラーと呼ばれる機能もあります。 これは、 学習レートをある数値で開始し、 + +01:51.290 --> 01:57.470 +数回のエポックにわたって実行する間に、 学習レートを徐々に下げていくものです。 + +01:57.470 --> 02:02.720 +モデルがより訓練されるにつれて、 学習レート、 つまり学習ステップをどんどん短くしていき、 + +02:02.720 --> 02:11.300 +ネットワークにわずかな調整を加えるだけにしたいからです。 + +02:11.330 --> 02:15.050 +なぜなら、 あなたは自分が正しい場所にいると確信しているからだ。 + +02:15.050 --> 02:17.540 +それが学習率というわけだ。 + +02:17.570 --> 02:21.440 +繰り返しになるが、 データ・サイエンスのバックグラウンドを持つ多くの人々にとっては、 古くからある話だろう。 + +02:21.440 --> 02:23.450 +他の人にとっては初めてのことかもしれない。 + +02:24.050 --> 02:27.920 +グラデーションの蓄積は一つの方法だ。 + +02:27.950 --> 02:35.210 +トレーニングの中で、 よし、 これからやるのは普通に前方へのパスだ、 + +02:35.210 --> 02:40.760 +と言ってスピードを向上させるテクニックなんだ。 + +02:40.970 --> 02:46.340 +私たちは、 今私が説明したような喪失感を味わうことになる。 + +02:46.370 --> 02:52.400 +そして逆算して勾配を計算し、 正しい方向に一歩を踏み出す。 + +02:52.400 --> 02:58.790 +そして、 グラデーションの累積を繰り返すのだが、 おそらくできることは、 フォワードパスをしてグラデーションを取得し、 + +02:58.790 --> 03:03.800 +一歩も動かずに、 2回目のフォワードパスをしてグラデーションを取得し、 それらのグラデーションを加算して、 + +03:03.800 --> 03:07.040 +さらに数回繰り返すことだ。 + +03:07.040 --> 03:13.790 +勾配を積み重ね、 一歩を踏み出し、 ネットワークを最適化する。 + +03:14.060 --> 03:19.170 +そしてそれは、 これらのステップを行う頻度が少ないということであり、 つまり、 もう少し速く走ることができるということだ。 + +03:19.350 --> 03:21.900 +ある意味、 バッチサイズに似ている。 + +03:21.900 --> 03:30.060 +それは、 ある意味、 概念的な類似性があるんだ。 + +03:30.330 --> 03:35.070 +私が設定したハイパーパラメーターでは、 勾配累積を使っていません。 + +03:35.070 --> 03:36.540 +私はそれを1に設定している。 + +03:36.690 --> 03:39.480 +でも、 過去に試したことがあるんだ。 + +03:39.480 --> 03:44.220 +だから、 それを試してみて、 どんな効果があるか見てみるのもいいかもしれない。 + +03:44.220 --> 03:46.710 +これがグラディエントの蓄積だ。 + +03:47.100 --> 03:50.700 +そして最後がオプティマイザーだ。 + +03:50.730 --> 03:57.780 +オプティマイザーは、 勾配が決まったら、 学習率が決まったら、 時間になったら使う計算式だ。 + +03:57.780 --> 04:05.730 +そして今、 ニューラルネットワークにアップデートを加え、 すべてを良い方向に少しずつシフトさせ、 + +04:05.730 --> 04:11.730 +次回、 正しい次のトークンを予測する可能性を少しでも高めるのだ。 + +04:11.730 --> 04:14.550 +そして、 そのためのプロセスをオプティマイザーと呼ぶ。 + +04:14.550 --> 04:22.860 +その方法については有名な計算式がたくさんあり、 それぞれに長所と短所がある。 + +04:22.860 --> 04:27.180 +性能的には少し高価なものだ。 + +04:27.180 --> 04:31.050 +それは少し大変な仕事だが、 良い結果につながる。 + +04:31.050 --> 04:33.840 +だから、 まずはそこから始めることをお勧めする。 + +04:33.990 --> 04:42.150 +そして、 もしメモリに何らかの問題が生じても、 より少ないメモリ消費で済む代替手段がある。 + +04:42.300 --> 04:44.820 +しかし、 そのプロセスは最適化と呼ばれている。 + +04:44.820 --> 04:49.410 +そのために選ぶアルゴリズムがオプティマイザーと呼ばれるものだ。 + +04:49.410 --> 04:54.210 +これもハイパーパラメーターのひとつで、 いろいろなものを試して、 その結果を見ることができる。 + +04:54.540 --> 04:57.390 +だから、 すごくたくさんのことを話しているんだ。 + +04:57.390 --> 05:04.860 +また、 ハイパーパラメーターについての会話を使って、 トレーニング・プロセスについても少し説明した。 + +05:04.950 --> 05:17.340 +GoogleColabに戻り、 SFTトレーナーをセットアップして、 + +05:17.340 --> 05:24.120 +自分たちの専門的なLMを微調整する。 diff --git a/week5/community-contributions/subtitles/srts/59507489/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507489/ko_KR.srt new file mode 100755 index 0000000..d872290 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507489/ko_KR.srt @@ -0,0 +1,205 @@ +WEBVTT + +00:01.610 --> 00:06.140 +하이퍼파라미터에서 훈련을 계속하죠 + +00:06.140 --> 00:11.660 +다음 단계는 아주 중요한 학습률이에요 + +00:11.660 --> 00:16.220 +여러분 중 데이터 과학자들이 이걸 너무 잘 알 거예요 + +00:16.460 --> 00:23.000 +아주 잠깐, 이것에 덜 익숙한 분들을 위해 설명하자면 훈련의 목적은 여러분의 모델을 + +00:23.000 --> 00:26.690 +트레이닝 데이터 포인트를 취하는 거죠 + +00:26.690 --> 00:32.150 +전진 패스라는 걸 하는데요 모델을 살펴보고 다음 + +00:32.150 --> 00:38.780 +토큰이 올지 예측하는 겁니다 예측된 다음 토큰을 주는 거죠 + +00:39.110 --> 00:44.330 +아니면 가능한 모든 다음 패의 확률을 주죠 + +00:44.360 --> 00:49.610 +그걸 사용하면 실제 있어야 할 다음 토큰이 생기죠 + +00:49.610 --> 00:54.380 +이 두 가지를 예견하고 실제 손실을 계산해 보세요 + +00:54.500 --> 01:01.460 +실제 비트를 예측하는 데 얼마나 형편없었는지를요 그런 다음 할 수 있는 건 손실을 받아들이고 + +01:01.460 --> 01:07.430 +그것의 역전파를 할 수 있습니다 모델로 돌아가서 각 비트를 얼마나 민감하게 조정해야 + +01:07.460 --> 01:13.910 +하는지 알아내는 거죠 다음에 좀 더 잘하기 위해 각 비트를 얼마나 조정할까요? + +01:14.120 --> 01:17.570 +그 방향으로 한 걸음씩 나아가야 해요 + +01:17.570 --> 01:23.060 +역기를 옮겨야 해요 다음번엔 더 잘하도록 한 걸음씩 나아가는 거죠 + +01:23.060 --> 01:28.730 +비트를 좋은 방향으로 많이 옮겨야 다음번에 더 + +01:28.760 --> 01:29.990 +잘 걸리죠 + +01:29.990 --> 01:35.420 +훈련 데이터 포인트를 마주했을 때 학습률이라고 하죠 + +01:35.570 --> 01:42.470 +일반적으로 0 정도 돼요 0001이나 0요 00001년요 + +01:42.530 --> 01:45.350 +살펴보면 몇 가지 예가 보일 거예요 + +01:45.440 --> 01:51.290 +또 학습률 스케줄러라는 기능도 있습니다 여러 + +01:51.290 --> 01:57.470 +개혁을 거치면서 학습률을 한 숫자로 시작해서 점점 + +01:57.470 --> 02:02.720 +낮추는 거죠 모델이 훈련될수록 학습률이 + +02:02.720 --> 02:08.120 +점점 낮아져서 네트워크에 아주 작은 변화만 + +02:08.120 --> 02:11.300 +줄어드는 거예요 + +02:11.330 --> 02:15.050 +거의 맞혔다고 확신하고 있잖아요 + +02:15.050 --> 02:17.540 +이게 학습률이에요 + +02:17.570 --> 02:21.440 +데이터 과학을 전공한 많은 사람들에겐 익숙한 일이겠죠 + +02:21.440 --> 02:23.450 +다른 사람에겐 생소할 수 있죠 + +02:24.050 --> 02:27.920 +그러데이션 축적도 방법이에요 + +02:27.950 --> 02:35.210 +훈련 속도를 높이는 기술이에요 이렇게 말할 수 있죠 우리가 + +02:35.210 --> 02:40.760 +뭘 할 거냐면 보통은 앞으로 패스해요 + +02:40.970 --> 02:46.340 +방금 말한 것처럼 get 손실이 발생하죠 + +02:46.370 --> 02:52.400 +그리고 뒤로 가는 그러데이션을 계산해서 올바른 방향으로 한 걸음씩 나아가는 거죠 + +02:52.400 --> 02:58.790 +그러한 그러데이션 누적 현상을 반복해 보면 이렇게 말할 수 있죠 앞으로 패스해서 + +02:58.790 --> 03:03.800 +얻으면 어떨까? 한 걸음씩 가지 않고 두 번째 패스해서 얻으면 어떨까? + +03:03.800 --> 03:07.040 +그 과정을 몇 번 더 반복하는 거죠 + +03:07.040 --> 03:13.790 +이런 그러데이션을 계속 축적하고 한 단계씩 나아가 네트워크를 최적화하세요 + +03:14.060 --> 03:19.170 +이 단계를 비정상적으로 수행하면 비트 코드가 더 빨라지죠 + +03:19.350 --> 03:21.900 +비트 크기와 비슷할 거예요 + +03:21.900 --> 03:27.120 +개념적 유사점이 있어요 왜냐하면 함께 그룹을 이루고 + +03:27.120 --> 03:30.060 +약간 더 큰 단계를 밟으니까요 + +03:30.330 --> 03:35.070 +제가 설정한 hyperpaameter에는 그러데이션 축적을 사용하지 않아요 + +03:35.070 --> 03:36.540 +1로 설정했어요 + +03:36.690 --> 03:39.480 +하지만 전에 해봤는데 속도가 빨라지는 걸 봤어요 + +03:39.480 --> 03:44.220 +그러니 이 방법으로 실험해 보고 어떻게 되는지 보세요 + +03:44.220 --> 03:46.710 +이게 그러데이션 축적이에요 + +03:47.100 --> 03:50.700 +마지막은 최적화 장치예요 + +03:50.730 --> 03:57.030 +최적화 프로그램은 적절한 때에 사용하는 공식이에요 그러데이션이 되면 학습률이 + +03:57.030 --> 03:57.780 +올라가죠 + +03:57.780 --> 04:05.730 +이제 신경망을 업데이트할 시간이에요 모든 걸 좋은 방향으로 약간씩 바꾸면 + +04:05.730 --> 04:11.730 +다음에는 올바른 토큰을 예측할 가능성이 커지죠 비트 + +04:11.730 --> 04:14.550 +그 과정을 최적화 장치라고 하죠 + +04:14.550 --> 04:21.090 +그 방법에 관한 잘 알려진 공식이 아주 많은데 각각 장단점이 있죠 그중 하나를 + +04:21.090 --> 04:22.860 +선택할게요 + +04:22.860 --> 04:27.180 +비트는 성능 면에서 조금 더 비싸죠 + +04:27.180 --> 04:31.050 +비트 박스는 좀 힘들지만 좋은 결과를 낳아요 + +04:31.050 --> 04:33.840 +이 제품부터 시작하시길 추천해요 + +04:33.990 --> 04:40.170 +그리고 메모리 문제가 발생하면 메모리 소모량을 줄여주는 + +04:40.170 --> 04:42.150 +대안이 있어요 + +04:42.300 --> 04:44.820 +하지만 그 과정을 최적화라고 하죠 + +04:44.820 --> 04:49.410 +그 작업을 위해 선택한 알고리즘을 최적화 장치라고 해요 + +04:49.410 --> 04:54.210 +또 다른 hyperperameter죠 다양한 걸 시도해보고 어떤지 보세요 + +04:54.540 --> 04:57.390 +말이 너무 많다는 걸 깨달았어요 + +04:57.390 --> 05:04.860 +hyperperameter에 대한 대화도 비트로 사용했어요 훈련 프로세스에 대해 설명하려고요 + +05:04.950 --> 05:10.590 +지금까지 배운 배경이 도움이 됐길 바랍니다 앞으로 일어날 + +05:10.590 --> 05:17.340 +일에 대비해서요 구글 Colab으로 돌아가 SFT 트레이너를 + +05:17.340 --> 05:24.120 +시작할 겁니다 우리만의 LM을 세밀하게 조정하기 위해서요 diff --git a/week5/community-contributions/subtitles/srts/59507635/en_US.srt b/week5/community-contributions/subtitles/srts/59507635/en_US.srt new file mode 100755 index 0000000..06dfd89 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507635/en_US.srt @@ -0,0 +1,382 @@ +WEBVTT + +00:00.950 --> 00:02.870 +Look, I hope you're excited. + +00:02.870 --> 00:04.160 +You really should be. + +00:04.190 --> 00:09.110 +You've been through 80% of the course and it's all been building up to this moment. + +00:09.140 --> 00:15.920 +Today you will be training your own proprietary LLM for for fun and for profit. + +00:16.100 --> 00:17.780 +It all starts here. + +00:17.780 --> 00:20.360 +So what is actually involved in today? + +00:20.360 --> 00:23.600 +We're going to start with some stuff that maybe isn't so thrilling. + +00:23.600 --> 00:25.700 +We're going to talk hyperparameters one more time. + +00:25.700 --> 00:29.090 +I've got some essential hyperparameters to go through with you. + +00:29.090 --> 00:34.280 +And the reason this is so important is that you're going to be doing some hyperparameter optimization + +00:34.280 --> 00:35.060 +yourself. + +00:35.060 --> 00:37.490 +The fancy word for for trial and error. + +00:37.490 --> 00:41.270 +And you need to understand the context of what it is that you're playing with. + +00:41.270 --> 00:46.430 +And this is really the opportunity to build something that can beat other models. + +00:46.460 --> 00:51.200 +It's about understanding what kind of levers you've got to experiment with. + +00:51.230 --> 00:56.630 +That is at the heart of the R&D behind building leading models. + +00:56.630 --> 01:00.260 +So we've got some hyperparameters to talk about. + +01:00.480 --> 01:05.790 +And then we're going to set up a supervised fine tuning trainer, an SFT. + +01:05.820 --> 01:11.040 +Trainer, which is the sort of core object behind running this training. + +01:11.130 --> 01:16.440 +Um, looking at parts of the TRL library from Hugging Face and then we will kick off. + +01:16.470 --> 01:20.760 +Our own proprietary LM training process. + +01:20.820 --> 01:22.350 +It's going to be great. + +01:22.920 --> 01:28.800 +So first of all, though, before we get to the great stuff, we do need to talk about some of the essential + +01:28.800 --> 01:35.310 +hyperparameters that control this process, starting with Q Laura, and most of this is stuff that you're + +01:35.310 --> 01:36.810 +very familiar with now. + +01:36.810 --> 01:41.970 +So the first hyperparameter I'll mention is one more time to bring up the target modules. + +01:41.970 --> 01:44.370 +And I think you now remember exactly what this is. + +01:44.700 --> 01:52.890 +If you have an architecture of a transformer, a base model like Lama 3.1, it's way too big to try + +01:52.890 --> 01:55.320 +and fine tune this enormous, great architecture. + +01:55.320 --> 02:00.850 +So instead we pick a few layers in the architecture and we call those layers. + +02:00.850 --> 02:03.400 +The target modules are the ones we're going to target. + +02:03.430 --> 02:04.660 +We freeze everything. + +02:04.660 --> 02:06.520 +We're not going to try and optimize these weights. + +02:06.520 --> 02:07.630 +There's too many of them. + +02:07.630 --> 02:09.220 +Even in these target modules. + +02:09.220 --> 02:10.660 +We're not going to train these. + +02:10.660 --> 02:16.780 +Rather, we're going to have onto one side a lower dimensional matrix that we will train this lower + +02:16.780 --> 02:22.210 +dimensional matrix and we will apply it to this original target module. + +02:22.210 --> 02:27.730 +We'll apply it in fact by multiplying them together, using that as a, as a as a delta on the weights + +02:27.730 --> 02:28.420 +here. + +02:28.510 --> 02:35.020 +Um, and so we train these little guys and we apply them to the target modules, selected layers in + +02:35.020 --> 02:38.230 +the bigger architecture that's target modules. + +02:38.230 --> 02:44.920 +And then ah, uh, with this greater 3D goggles as the, as the logo, as the icon. + +02:45.040 --> 02:51.190 +Uh, R is how many dimensions do we have in this lower dimensional, uh, adapter matrix? + +02:51.190 --> 02:56.350 +Uh, it's often common with learning with language learning tasks to start with eight. + +02:56.530 --> 03:02.730 +Um, for this project you're going to see I've got 32 as the R, because we've got so much training + +03:02.730 --> 03:07.020 +data that I figured we could use quite a few parameters to learn on. + +03:07.200 --> 03:10.140 +But if that's running out of memory for you, you can you can have eight. + +03:10.170 --> 03:16.890 +I actually I should say that the difference between 8 and 16 and 32 was quite marginal. + +03:16.890 --> 03:19.110 +It did improve things, but not by a huge amount. + +03:19.110 --> 03:22.470 +So if you have any memory problems, then stick with an R of eight. + +03:22.500 --> 03:25.320 +If you're on a smaller box that will be just fine. + +03:25.440 --> 03:31.800 +32 is splashing out a bit, but but but it was worth it given the amount of training data we have. + +03:32.550 --> 03:36.570 +Alpha, you may remember, is the scaling factor. + +03:36.570 --> 03:42.990 +It's used to multiply up the importance of this adapter when it's applied to the target module. + +03:42.990 --> 03:46.680 +In fact, you may remember there are actually two Laura matrices. + +03:46.680 --> 03:53.490 +One is called Laura A and one is called Laura B, and the formula is that the change in weights is actually, + +03:53.550 --> 03:54.600 +uh alpha. + +03:54.630 --> 03:58.520 +The scaling factor times A times B, as simple as that. + +03:58.520 --> 04:02.410 +That is that's the most maths that we're going to get in this course. + +04:02.920 --> 04:05.230 +And I think that's not not taking it too far. + +04:05.230 --> 04:07.360 +So that's as simple as what alpha is. + +04:07.360 --> 04:08.530 +It's the scaling factor. + +04:08.530 --> 04:12.130 +And the rule of thumb is to have alpha to be double R. + +04:12.220 --> 04:13.630 +That's what everyone does. + +04:13.630 --> 04:16.480 +By all means you can experiment with other values of alpha. + +04:16.480 --> 04:20.650 +But but the norm is is to do alpha is two r. + +04:20.650 --> 04:25.120 +So we're going to start with an R of 32 and an alpha of 64. + +04:26.230 --> 04:33.130 +Quantisation of course is just what we call it when we reduce the precision of the weights in the base + +04:33.130 --> 04:33.760 +model. + +04:33.760 --> 04:35.830 +The base model has 32 bit numbers. + +04:35.830 --> 04:36.490 +In it. + +04:36.550 --> 04:41.980 +We reduce it down to eight bits or even down to four bits, which sounds insane. + +04:42.070 --> 04:47.560 +We did that with our base model and we saw that we were still getting results. + +04:47.650 --> 04:51.730 +They weren't great results, but I think that would be true for the base model overall. + +04:51.730 --> 04:56.380 +And we did see actually that the eight bit model did better than the four bit model, but they were + +04:56.380 --> 04:58.420 +both pretty miserable at it. + +04:58.730 --> 05:02.900 +And by all means you can try training with the eight bit model too. + +05:02.900 --> 05:07.640 +But we're going to train with a four bit model because that's what will fit in in our in our memory. + +05:07.640 --> 05:12.650 +And that's, uh, but I'd be interested if you try the eight bit to see whether you get significantly + +05:12.650 --> 05:13.910 +different results. + +05:14.630 --> 05:19.880 +And then the final hyperparameter is a new one that we've not talked about before, except to show you + +05:19.880 --> 05:21.920 +it in the code dropout. + +05:21.920 --> 05:24.440 +So dropout is a type. + +05:24.440 --> 05:29.960 +It's a technique that's known as a regularization technique, of which there are a few, um, which + +05:29.960 --> 05:35.840 +means that it's a technique designed to prevent the model from doing what's known as overfitting. + +05:36.020 --> 05:43.340 +And overfitting is when a model gets so much training data, it goes through so much training that it + +05:43.340 --> 05:51.080 +starts to just expect exactly the structure of the data in the training data set, and then give back + +05:51.110 --> 05:52.580 +exactly that answer. + +05:52.580 --> 05:59.330 +And it starts to to no longer understand the general trends of what's being suggested, but instead + +05:59.330 --> 06:04.100 +it sort of hones in on precisely those words and the prediction that comes later. + +06:04.100 --> 06:10.520 +And as a result of that, if you give it some new point that it hasn't seen in its training data set, + +06:10.550 --> 06:16.280 +it performs really badly because it's not being learning the general themes, it's being learning to. + +06:16.310 --> 06:21.980 +It's been too much learning the very specifics of this training data set. + +06:22.010 --> 06:24.530 +I'm being a bit hand-wavy again, but hopefully you get the idea. + +06:24.560 --> 06:31.040 +That's called overfitting when you are too precisely adhering to the training data set and the outcome. + +06:31.040 --> 06:36.380 +And it's not learning the general flavor of what's of what it's trying to predict. + +06:36.560 --> 06:38.000 +Um, and it's that flavor. + +06:38.000 --> 06:39.770 +It's that nuance of what's going on. + +06:39.770 --> 06:42.020 +That's what you're trying to teach the model. + +06:42.260 --> 06:46.280 +Um, so that's that's the sort of that's the preamble, the explanation of what? + +06:46.310 --> 06:47.300 +Of what overfitting is. + +06:47.300 --> 06:53.690 +But now, to tell you exactly what dropout does, and it's really simple, what dropout actually does, + +06:53.780 --> 07:03.870 +uh, is it quite simply removes a random subset of the neurons from the deep neural network. + +07:03.870 --> 07:06.840 +From the transformer, it takes a random percentage. + +07:06.960 --> 07:12.690 +We're going to start with 10%, takes 10% of the neurons, and it just wipes them out, sets the activations + +07:12.690 --> 07:16.800 +to zero so that they are not involved in the forward pass or the backward pass. + +07:16.800 --> 07:21.300 +They're not involved in predicting the next token and they're not involved in optimizing. + +07:21.300 --> 07:23.010 +It's as if they're just not there. + +07:23.010 --> 07:29.730 +And as a result, every time that you're going through training, the model is seeing a different subset, + +07:29.760 --> 07:35.490 +a different 90% of the neural network, 10% of them have been removed randomly each time. + +07:35.490 --> 07:44.310 +And so the, the, the weights are sort of discouraged from being too precise and to and looking too + +07:44.310 --> 07:50.700 +precisely for one set of input tokens, but instead, because different neurons participate every time + +07:50.700 --> 07:54.210 +in the training process, it starts to learn more. + +07:54.240 --> 08:00.670 +The general theme than learning very specifically how to expect different tokens. + +08:00.670 --> 08:05.380 +So it prevents any one neuron from becoming too specialized. + +08:05.380 --> 08:11.560 +It supports this concept of more general understanding in the neural network, in this very simplistic + +08:11.560 --> 08:17.680 +way of just removing 10% of the neurons from the process, a different 10% each time. + +08:17.680 --> 08:18.910 +So that's dropout. + +08:18.910 --> 08:20.230 +It's really very simple. + +08:20.260 --> 08:28.180 +When you realize it, it's literally dropping out a bunch of the neurons and the the norm. + +08:28.570 --> 08:32.740 +It's usually somewhere in the in the range of 5% through to 20%. + +08:32.860 --> 08:36.340 +Um, I've picked 10% as the dropout that we're using. + +08:36.340 --> 08:43.150 +You should absolutely experiment with 5% and 20% and see whether you get better results or not. + +08:43.180 --> 08:47.320 +It is very much a hyperparameter to be experimented with. + +08:47.830 --> 08:51.040 +Okay, so those are the five hyperparameters for Q. + +08:51.070 --> 08:57.130 +Laura, next time we'll talk about five hyperparameters for the overall training process. diff --git a/week5/community-contributions/subtitles/srts/59507635/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507635/ja_JP.srt new file mode 100755 index 0000000..ac5cbe0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507635/ja_JP.srt @@ -0,0 +1,352 @@ +WEBVTT + +00:00.950 --> 00:02.870 +興奮してるかな? + +00:02.870 --> 00:04.160 +そうあるべきだ。 + +00:04.190 --> 00:09.110 +コースの8割を消化し、 この瞬間まですべてが積み重なってきた。 + +00:09.140 --> 00:15.920 +今日、 あなたは楽しみながら、 そして利益を得るために、 独自のLLMをトレーニングすることになる。 + +00:16.100 --> 00:17.780 +すべてはここから始まる。 + +00:17.780 --> 00:20.360 +では、 今日は実際に何があるのか? + +00:20.360 --> 00:23.600 +まずは、 あまりスリリングではないものから始めよう。 + +00:23.600 --> 00:25.700 +ハイパーパラメーターについて、 もう1度話そう。 + +00:25.700 --> 00:29.090 +必要不可欠なハイパーパラメーターをいくつか用意した。 + +00:29.090 --> 00:35.060 +これがとても重要な理由は、 ハイパーパラメーターの最適化を自分で行うことになるからだ。 + +00:35.060 --> 00:37.490 +試行錯誤を意味する洒落た言葉だ。 + +00:37.490 --> 00:41.270 +そして、 自分がプレーしていることの背景を理解する必要がある。 + +00:41.270 --> 00:46.430 +そしてこれは、 他のモデルに勝てるものを作るチャンスでもある。 + +00:46.460 --> 00:51.200 +どのようなレバーを使って実験するのかを理解することだ。 + +00:51.230 --> 00:56.630 +それが、 トップモデルを作るための研究開発の核心である。 + +00:56.630 --> 01:00.260 +そこで、 ハイパーパラメーターについて説明しよう。 + +01:00.480 --> 01:05.790 +そして、 監督付きの微調整トレーナー、 SFTをセットアップする。 + +01:05.820 --> 01:11.040 +トレーナーというのは、 このトレーニングの中核をなす存在だ。 + +01:11.130 --> 01:16.440 +ハギング・フェイスからTRLライブラリーの一部を見て、 それからキックオフだ。 + +01:16.470 --> 01:20.760 +独自のLMトレーニングプロセス + +01:20.820 --> 01:22.350 +素晴らしいものになるよ。 + +01:22.920 --> 01:28.800 +というわけで、 まずは素晴らしい話に入る前に、 このプロセスをコントロールする重要なハイパーパラメーターについて、 + +01:28.800 --> 01:36.810 +Qローラから話をする必要がある。 + +01:36.810 --> 01:41.970 +そこで最初に挙げるハイパーパラメーターは、 ターゲット・モジュールを呼び出すためのものだ。 + +01:41.970 --> 01:44.370 +そして、 これが何なのか、 正確に覚えていただけたと思う。 + +01:44.700 --> 01:52.890 +トランスフォーマーのアーキテクチャがあれば、 ラマ3のようなベースモデルがある。 1、 この巨大で偉大な建築物を微調整しようとするには、 + +01:52.890 --> 01:55.320 +あまりにも大きすぎる。 + +01:55.320 --> 02:00.850 +だから、 その代わりにアーキテクチャー内のいくつかのレイヤーを選び、 それをレイヤーと呼ぶ。 + +02:00.850 --> 02:03.400 +ターゲット・モジュールとは、 これからターゲットにするモジュールのことだ。 + +02:03.430 --> 02:04.660 +何でも冷凍するんだ。 + +02:04.660 --> 02:06.520 +ウエイトを最適化するつもりはない。 + +02:06.520 --> 02:07.630 +数が多すぎるんだ。 + +02:07.630 --> 02:09.220 +これらのターゲット・モジュールでもだ。 + +02:09.220 --> 02:10.660 +これをトレーニングするつもりはない。 + +02:10.660 --> 02:16.780 +そうではなく、 低次元の行列を片側に持ってきて、 この低次元の行列を訓練し、 + +02:16.780 --> 02:22.210 +それを元のターゲット・モジュールに適用するのだ。 + +02:22.210 --> 02:28.420 +それを掛け合わせ、 ウェイトのデルタとして使うのだ。 + +02:28.510 --> 02:38.230 +そうして、 この小人を訓練して、 ターゲット・モジュールに適用するんだ。 ターゲット・モジュールとは、 大きなアーキテクチャーの中で選択されたレイヤーのことだ。 + +02:38.230 --> 02:44.920 +そして、 この大きな3Dゴーグルをロゴとして、 アイコンとして使っている。 + +02:45.040 --> 02:51.190 +Rは、 この低次元のアダプターマトリックスの次元数を表している。 + +02:51.190 --> 02:56.350 +ええと、 言語学習の課題では、 8から始めるのが一般的です。 + +02:56.530 --> 03:07.020 +このプロジェクトでは、 Rを32にしました。 学習データがたくさんあるので、 かなりの数のパラメータを使って学習できると考えたからです。 + +03:07.200 --> 03:10.140 +しかし、 もしそれでメモリが足りなくなるようなら、 8枚にすることもできる。 + +03:10.170 --> 03:16.890 +実際には、 8と16と32の差はごくわずかだったと言うべきだろう。 + +03:16.890 --> 03:19.110 +確かに改善されたが、 それほど大きな改善ではない。 + +03:19.110 --> 03:22.470 +記憶力に問題がある場合は、 Rを8にしてください。 + +03:22.500 --> 03:25.320 +もし、 あなたが小さい箱を使っているなら、 それで十分だ。 + +03:25.440 --> 03:31.800 +32ドルというのは少々高いが、 トレーニングデータの量を考えれば、 それだけの価値はある。 + +03:32.550 --> 03:36.570 +アルファはスケーリング・ファクターである。 + +03:36.570 --> 03:42.990 +このアダプターをターゲット・モジュールに適用する際に、 その重要性を倍増させるために使用される。 + +03:42.990 --> 03:46.680 +実は、 ラウラのマトリックスは2つあることを覚えているだろうか。 + +03:46.680 --> 03:54.600 +ひとつはローラA、 もうひとつはローラBと呼ばれ、 計算式では重さの変化はアルファ値になる。 + +03:54.630 --> 03:58.520 +スケーリングファクターにA×Bをかけたもの。 + +03:58.520 --> 04:02.410 +それが、 このコースで習う数学の一番多いところだ。 + +04:02.920 --> 04:05.230 +そして、 それは行き過ぎではないと思う。 + +04:05.230 --> 04:07.360 +アルファとはそういうものだ。 + +04:07.360 --> 04:08.530 +スケーリングファクターだ。 + +04:08.530 --> 04:12.130 +そして経験則では、 アルファはダブルRであるべきだ。 + +04:12.220 --> 04:13.630 +それは誰もがやっていることだ。 + +04:13.630 --> 04:16.480 +ぜひ他のアルファ値を試してみてほしい。 + +04:16.480 --> 04:20.650 +しかし、 しかし、 アルファは2Rでやるのが普通だ。 + +04:20.650 --> 04:25.120 +そこで、 まずRを32、 アルファを64とする。 + +04:26.230 --> 04:33.760 +定量化とはもちろん、 ベースモデルの重みの精度を下げることを指す。 + +04:33.760 --> 04:35.830 +ベースモデルは32ビット。 + +04:35.830 --> 04:36.490 +その中にある。 + +04:36.550 --> 04:41.980 +私たちはそれを8ビット、 あるいは4ビットにまで減らす。 + +04:42.070 --> 04:47.560 +ベースモデルでそれをやってみたところ、 やはり結果が出た。 + +04:47.650 --> 04:51.730 +素晴らしい結果ではなかったが、 ベースモデル全体に言えることだと思う。 + +04:51.730 --> 04:56.380 +実際、 8ビットモデルは4ビットモデルよりも成績が良かったが、 + +04:56.380 --> 04:58.420 +どちらもかなり惨めだった。 + +04:58.730 --> 05:02.900 +そしてぜひ、 8ビットモデルでのトレーニングも試してみてほしい。 + +05:02.900 --> 05:07.640 +しかし、 4ビットのモデルでトレーニングすることにした。 + +05:07.640 --> 05:12.650 +でも、 8ビットを試してみて、 有意に異なる結果が得られるかどうか、 + +05:12.650 --> 05:13.910 +興味があるね。 + +05:14.630 --> 05:21.920 +そして最後のハイパーパラメーターは、 コード・ドロップアウトでお見せする以外、 これまでお話ししたことのない新しいものです。 + +05:21.920 --> 05:24.440 +つまり、 ドロップアウトはタイプなのだ。 + +05:24.440 --> 05:29.960 +正則化テクニックとして知られているテクニックで、 いくつかありますが、 つまり、 + +05:29.960 --> 05:35.840 +モデルがいわゆるオーバーフィッティングをしないように設計されたテクニックです。 + +05:36.020 --> 05:43.340 +オーバーフィッティングとは、 モデルがあまりにも多くの訓練データを取得し、 あまりにも多くの訓練を経て、 + +05:43.340 --> 05:52.580 +訓練データセットに含まれるデータの構造を正確に予想し、 その通りの答えを返すようになることである。 + +05:52.580 --> 05:59.330 +そして、 もはや提案されていることの一般的な傾向を理解するのではなく、 + +05:59.330 --> 06:04.100 +その言葉やその後に来る予測に集中するようになる。 + +06:04.100 --> 06:10.520 +その結果、 トレーニングデータセットで見たことのないような新しいポイントを与えると、 一般的なテーマを学習しているのではなく、 + +06:10.550 --> 06:16.280 +そのテーマに合わせて学習しているため、 成績は非常に悪くなる。 + +06:16.310 --> 06:21.980 +このトレーニングデータセットの特殊性を学ぶのは大変だった。 + +06:22.010 --> 06:24.530 +また少し手探り状態になってしまったが、 ご理解いただけただろうか。 + +06:24.560 --> 06:31.040 +訓練データセットと結果に正確に準拠しすぎることをオーバーフィッティングと呼ぶ。 + +06:31.040 --> 06:36.380 +そして、 何を予測しようとしているのかの一般的な風味を学んでいない。 + +06:36.560 --> 06:38.000 +あの、 あの味です。 + +06:38.000 --> 06:39.770 +そういうニュアンスなんだ。 + +06:39.770 --> 06:42.020 +それをモデルに教えようとしているんだ。 + +06:42.260 --> 06:46.280 +ええと、 つまり、 それが前文であり、 何の説明なんですか? + +06:46.310 --> 06:47.300 +オーバーフィッティングとは何か? + +06:47.300 --> 06:53.690 +しかし今、 ドロップアウトが何をするのかを正確にお伝えすると、 実に単純なことなのだが、 + +06:53.780 --> 07:03.870 +ドロップアウトが実際にすることは、 ディープ・ニューラル・ネットワークからニューロンのランダムなサブセットを削除することだ。 + +07:03.870 --> 07:06.840 +変圧器からはランダムなパーセンテージを取る。 + +07:06.960 --> 07:12.690 +まず10%から始め、 ニューロンの10%を取り出し、 それらを消去して活性をゼロにし、 + +07:12.690 --> 07:16.800 +前進パスにも後退パスにも関与しないようにする。 + +07:16.800 --> 07:21.300 +彼らは次のトークンの予測には関与しないし、 最適化にも関与しない。 + +07:21.300 --> 07:23.010 +まるでそこにいないかのようだ。 + +07:23.010 --> 07:35.490 +その結果、 トレーニングのたびに、 モデルは異なるサブセット、 異なる90%のニューラルネットワークを見ることになる。 + +07:35.490 --> 07:44.310 +そのため、 重みは、 1セットの入力トークンを正確に探しすぎることを抑制し、 その代わりに、 + +07:44.310 --> 07:50.700 +異なるニューロンがトレーニング・プロセスに毎回参加することで、 + +07:50.700 --> 07:54.210 +より多くのことを学び始める。 + +07:54.240 --> 08:00.670 +一般的なテーマは、 さまざまなトークンを期待する方法を具体的に学ぶことだ。 + +08:00.670 --> 08:05.380 +つまり、 1つのニューロンが専門化しすぎるのを防いでいるのだ。 + +08:05.380 --> 08:11.560 +これは、 ニューロンの10%をプロセスから取り除くという非常に単純化された方法で、 + +08:11.560 --> 08:17.680 +神経回路網におけるより一般的な理解という概念をサポートするものである。 + +08:17.680 --> 08:18.910 +それがドロップアウトだ。 + +08:18.910 --> 08:20.230 +とてもシンプルなことなんだ。 + +08:20.260 --> 08:28.180 +それに気づいたとき、 それは文字通り、 ニューロンと規範の束を取り除くことなのだ。 + +08:28.570 --> 08:32.740 +通常は5%から20%の範囲だ。 + +08:32.860 --> 08:36.340 +ええと、 10%をドロップアウトとして使っています。 + +08:36.340 --> 08:43.150 +5%や20%で実験し、 より良い結果が得られるかどうかを確かめるべきだ。 + +08:43.180 --> 08:47.320 +これは非常に実験的なハイパーパラメーターである。 + +08:47.830 --> 08:51.040 +さて、 これがQの5つのハイパーパラメータだ。 + +08:51.070 --> 08:57.130 +ローラ、 次回は全体的なトレーニングプロセスに関する5つのハイパーパラメータについて話そう。 diff --git a/week5/community-contributions/subtitles/srts/59507635/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507635/ko_KR.srt new file mode 100755 index 0000000..5b6a08d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507635/ko_KR.srt @@ -0,0 +1,373 @@ +WEBVTT + +00:00.950 --> 00:02.870 +기대해도 좋아요 + +00:02.870 --> 00:04.160 +그러는 게 좋을 거예요 + +00:04.190 --> 00:09.110 +코스의 80%를 통과했고 그 모든 게 이 순간을 위해 쌓여온 거죠 + +00:09.140 --> 00:15.920 +오늘 여러분은 자체 개발한 LLM을 훈련할 겁니다 재미와 이윤을 위해서요 + +00:16.100 --> 00:17.780 +여기서 모든 게 시작돼요 + +00:17.780 --> 00:20.360 +오늘날에는 어떤 일이 일어나나요? + +00:20.360 --> 00:23.600 +별로 스릴이 없을지도 모르는 것부터 시작할게요 + +00:23.600 --> 00:25.700 +hyperperameter에 대해 한 번 더 얘기할게요 + +00:25.700 --> 00:29.090 +여러분과 함께 할 필수 하이퍼파라미터예요 + +00:29.090 --> 00:34.280 +이게 중요한 이유는 hyperpaameter 최적화를 직접 하실 것이기 + +00:34.280 --> 00:35.060 +때문이죠 + +00:35.060 --> 00:37.490 +시행착오를 뜻하는 고급 단어죠 + +00:37.490 --> 00:41.270 +우리가 갖고 노는 것의 맥락을 이해해야 해요 + +00:41.270 --> 00:46.430 +다른 모델을 이길 수 있는 걸 만들 기회예요 + +00:46.460 --> 00:51.200 +어떤 레버를 실험해야 하는지 이해하는 게 중요해요 + +00:51.230 --> 00:56.630 +건축의 주요 모델에 대한 연구개발의 핵심이죠 + +00:56.630 --> 01:00.260 +hyperperameter에 대해 얘기해 보죠 + +01:00.480 --> 01:05.790 +감독하에 소량 조율 트레이너를 설치할 거예요 + +01:05.820 --> 01:11.040 +트레이너는 이 훈련의 핵심 객체예요 + +01:11.130 --> 01:16.440 +TRL 라이브러리에서 얼굴 안기 부분을 살펴보고 시작할게요 + +01:16.470 --> 01:20.760 +우리만의 LM 훈련 과정이죠 + +01:20.820 --> 01:22.350 +멋질 거예요 + +01:22.920 --> 01:28.800 +먼저, 훌륭한 내용을 다루기 전에 이 프로세스를 제어하는 필수 hyperperameter에 + +01:28.800 --> 01:36.810 +대해 얘기하겠습니다 Q 로라부터요 대부분은 이제 아주 익숙해졌죠 + +01:36.810 --> 01:41.970 +첫 번째 hyperpaameter는 대상 모듈을 다시 한 번 언급하는 거예요 + +01:41.970 --> 01:44.370 +이제 이게 뭔지 정확히 기억하실 거예요 + +01:44.700 --> 01:52.890 +변압기 아키텍처가 있다면요 라마 3 같은 기본 모델요 첫째, 이 거대한 건축물을 정교하게 다듬기에는 + +01:52.890 --> 01:55.320 +너무 커요 + +01:55.320 --> 02:00.850 +아키텍처에서 레이어 몇 개를 골라 그걸 레이어라고 부르죠 + +02:00.850 --> 02:03.400 +목표 모듈이 우리가 목표로 삼을 것들이죠 + +02:03.430 --> 02:04.660 +전부 얼려요 + +02:04.660 --> 02:06.520 +무게를 최적화하지 않을 거예요 + +02:06.520 --> 02:07.630 +너무 많아요 + +02:07.630 --> 02:09.220 +이 표적 모듈에서도 말이죠 + +02:09.220 --> 02:10.660 +훈련 안 시킬 거예요 + +02:10.660 --> 02:16.780 +그보다는 한쪽에 차원 매트릭스를 두고 이 차원 매트릭스를 + +02:16.780 --> 02:22.210 +훈련할 겁니다 그걸 이 원래 목표 모듈에 적용하고요 + +02:22.210 --> 02:28.420 +이걸 곱해서 적용할 거예요 여기 무게추를 삼각주로 사용해서요 + +02:28.510 --> 02:35.020 +이 작은 녀석들을 훈련해서 대상 모듈에 적용합니다 더 큰 아키텍처에서 + +02:35.020 --> 02:38.230 +선택한 층이 대상 모듈이죠 + +02:38.230 --> 02:44.920 +그리고 더 큰 3D 고글을 로고로 해서 아이콘으로 삼았어요 + +02:45.040 --> 02:51.190 +R은 이 저차원 어댑터 매트릭스에 차원이 몇 개 있느냐는 거예요 + +02:51.190 --> 02:56.350 +언어 학습을 할 때 8단계로 시작하는 경우가 많아요 + +02:56.530 --> 03:02.730 +이 프로젝트에서 32가 R인 걸 보실 수 있습니다 훈련 데이터가 아주 많거든요 + +03:02.730 --> 03:07.020 +몇 가지 매개 변수를 이용해 배울 수 있을 것 같았죠 + +03:07.200 --> 03:10.140 +하지만 메모리가 부족하다면 8개를 가질 수 있어요 + +03:10.170 --> 03:16.890 +솔직히 8과 16과 32의 차이는 거의 없다고 봐야겠죠 + +03:16.890 --> 03:19.110 +상황이 좋아지긴 했지만 큰 차이는 없었어요 + +03:19.110 --> 03:22.470 +메모리에 문제가 있다면 R에서 8을 유지하세요 + +03:22.500 --> 03:25.320 +작은 상자라도 괜찮을 거예요 + +03:25.440 --> 03:31.800 +32는 좀 비싸지만 훈련 데이터를 생각하면 비트를 쓸 만해요 + +03:32.550 --> 03:36.570 +알파는 배율 요인이죠 + +03:36.570 --> 03:42.990 +대상 모듈에 적용될 때 이 어댑터의 중요성을 곱하기 위해 사용되죠 + +03:42.990 --> 03:46.680 +기억하실지 모르겠지만 로라 행렬은 두 개가 있어요 + +03:46.680 --> 03:53.490 +하나는 로라 A 하나는 로라 B라고 해요 무게의 변화가 알파형으로 바뀌는 + +03:53.550 --> 03:54.600 +거죠 + +03:54.630 --> 03:58.520 +배율 인수 곱하기 A 곱하기 B, 간단하죠 + +03:58.520 --> 04:02.410 +이 코스에서 배울 수 있는 가장 많은 수학이죠. Get it. + +04:02.920 --> 04:05.230 +그 정도면 과하지 않은 것 같아요 + +04:05.230 --> 04:07.360 +알파가 무엇인지만큼 간단하죠 + +04:07.360 --> 04:08.530 +스케일링 요인이죠 + +04:08.530 --> 04:12.130 +경험상 알파는 더블 R로 해야 해요 + +04:12.220 --> 04:13.630 +다들 그렇게 해요 + +04:13.630 --> 04:16.480 +알파의 다른 가치를 실험할 수 있어요 + +04:16.480 --> 04:20.650 +하지만 알파는 R이 두 개인 게 일반적이죠 + +04:20.650 --> 04:25.120 +R은 32로 알파는 64로 시작하죠 + +04:26.230 --> 04:33.760 +퀀티세이션은 베이스 모델의 무게 중심을 줄이는 것을 일컫는 말이죠 + +04:33.760 --> 04:35.830 +기본 모델은 32 비트 번호예요 + +04:35.830 --> 04:36.490 +네 + +04:36.550 --> 04:41.980 +8분의 1에서 4분의 1까지 줄여요 미친 소리 같겠지만요 + +04:42.070 --> 04:47.560 +베이스 모델로 그렇게 했는데도 여전히 결과가 나오는 걸 봤죠 + +04:47.650 --> 04:51.730 +좋은 결과는 아니었지만 전반적인 베이스 모델은 그렇다고 생각해요 + +04:51.730 --> 04:56.380 +8비트 모델이 4비트 모델보다 잘 달리는 걸 확인했지만 + +04:56.380 --> 04:58.420 +둘 다 아주 처참했어요 + +04:58.730 --> 05:02.900 +8비트 모델로 연습해 볼 수도 있어요 + +05:02.900 --> 05:07.640 +하지만 4비트 모델로 훈련할 거예요 우리 메모리에 맞으니까요 + +05:07.640 --> 05:12.650 +Get 8 비트를 시도해보고 결과가 확연히 달라지는지 + +05:12.650 --> 05:13.910 +보고 싶어요 + +05:14.630 --> 05:19.880 +마지막 hyperpaameter는 새로운 건데 코드 드롭아웃에서 보여드린 + +05:19.880 --> 05:21.920 +것 외엔 얘기한 적 없죠 + +05:21.920 --> 05:24.440 +중퇴도 유형이군요 + +05:24.440 --> 05:29.960 +정규화 기술이라고 알려진 기술인데 몇 가지가 있어요 + +05:29.960 --> 05:35.840 +과잉 착용이라는 행동을 막기 위해 고안된 기술이죠 + +05:36.020 --> 05:43.340 +과잉 충족이란 모델이 훈련 데이터를 많이 받고 훈련을 많이 거쳐서 훈련 + +05:43.340 --> 05:51.080 +데이터 집합에 있는 데이터의 구조를 정확히 기대하게 되고 정확히 그 답을 + +05:51.110 --> 05:52.580 +주는 거예요 + +05:52.580 --> 05:59.330 +제안되는 것의 일반적인 추세를 더는 이해하지 못해요 대신 + +05:59.330 --> 06:04.100 +그 단어와 나중에 나올 예측에 집중하죠 + +06:04.100 --> 06:10.520 +그 결과 훈련 데이터 세트에서 보지 못한 새로운 점을 주면 성능이 아주 떨어집니다 + +06:10.550 --> 06:16.280 +일반적인 주제를 배우는 게 아니라 그걸 배우는 거니까요 + +06:16.310 --> 06:21.980 +이 훈련 데이터 집합의 세부 사항을 너무 많이 배웠어요 + +06:22.010 --> 06:24.530 +또 손으로 비트를 휘젓고 있는데 이해하시길 바라요 get it get it + +06:24.560 --> 06:31.040 +훈련 데이터와 결과에 너무 충실하면 과잉 대응이라고 하죠 + +06:31.040 --> 06:36.380 +예측하려는 것의 일반적인 맛을 배우는 게 아니에요 + +06:36.560 --> 06:38.000 +바로 그 맛이에요 + +06:38.000 --> 06:39.770 +뉘앙스가 중요한 거죠 + +06:39.770 --> 06:42.020 +모델에게 그걸 가르쳐야 해요 + +06:42.260 --> 06:46.280 +방금 그건 서문인데 뭘 설명하는 거죠? + +06:46.310 --> 06:47.300 +과잉이 뭔지에 대해서요 + +06:47.300 --> 06:53.690 +드롭아웃의 기능을 간단히 설명하자면 드롭아웃의 + +06:53.780 --> 07:03.870 +역할은 심층 신경망에서 무작위로 뉴런을 제거하는 거예요 + +07:03.870 --> 07:06.840 +변압기에서 무작위로 퍼센트를 얻어요 + +07:06.960 --> 07:12.690 +10%부터 시작할 거예요 10%의 뉴런을 제거하고 활성화 비율을 0으로 설정해서 + +07:12.690 --> 07:16.800 +앞이나 뒤의 패스에는 관여하지 않게 하는 거죠 + +07:16.800 --> 07:21.300 +다음 토큰을 예측하는 것과 관련 없고 최적화하는 것과도 관련 없죠 + +07:21.300 --> 07:23.010 +마치 존재하지 않는 것 같아요 + +07:23.010 --> 07:29.730 +그 결과 훈련을 받을 때마다 모델은 90%의 다른 신경망을 + +07:29.760 --> 07:35.490 +무작위로 제거하는 다른 하위 집합을 보게 되죠 + +07:35.490 --> 07:44.310 +그래서 추를 조작할 때 너무 정확하거나 너무 정확한 입력 토큰을 사용하지 않게 + +07:44.310 --> 07:50.700 +됐죠 하지만 훈련 과정에서 다양한 뉴런이 참여하면서 더 + +07:50.700 --> 07:54.210 +많은 걸 배우기 시작했어요 + +07:54.240 --> 08:00.670 +일반적인 주제는 구체적으로 다른 패를 기대하는 법을 배우는 거죠 + +08:00.670 --> 08:05.380 +하나의 뉴런이 너무 각화되지 않도록요 + +08:05.380 --> 08:11.560 +신경망에 대한 일반적 이해를 뒷받침하는 개념이죠 매번 다른 + +08:11.560 --> 08:17.680 +10%의 뉴런을 제거하는 아주 단순한 방법이에요 + +08:17.680 --> 08:18.910 +드롭아웃이에요 + +08:18.910 --> 08:20.230 +아주 간단해요 + +08:20.260 --> 08:28.180 +그걸 깨달았을 땐 말 그대로 뉴런과 정상 기관이 떨어져 나가고 있었죠 + +08:28.570 --> 08:32.740 +보통 5%에서 20% 정도 돼요 + +08:32.860 --> 08:36.340 +10%는 포기한 사람으로 정했어요 + +08:36.340 --> 08:43.150 +5%, 20%로 실험을 해봐야 해요. 더 나은 결과가 나올지 봐야죠. Get it. + +08:43.180 --> 08:47.320 +하이퍼파라미터로 실험해 볼 만하죠 + +08:47.830 --> 08:51.040 +Q의 하이퍼파라미터 5개고요 + +08:51.070 --> 08:57.130 +로라, 다음에는 전반적인 훈련 프로세스를 위한 hyperperameter 5개를 다룰 거예요 diff --git a/week5/community-contributions/subtitles/srts/59507687/en_US.srt b/week5/community-contributions/subtitles/srts/59507687/en_US.srt new file mode 100755 index 0000000..2fbb336 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507687/en_US.srt @@ -0,0 +1,454 @@ +WEBVTT + +00:00.620 --> 00:02.630 +It's time for action, everybody. + +00:02.660 --> 00:04.670 +We've set up our colab. + +00:04.670 --> 00:06.380 +Here we are, week seven, day three. + +00:06.410 --> 00:08.240 +We've got our constant setup. + +00:08.540 --> 00:12.920 +We've checked our model name and it's time to get into things. + +00:13.250 --> 00:19.940 +Um, so the first thing we're going to do is log in to hugging Face, which you know, well. + +00:19.940 --> 00:23.810 +And then after we've logged into hugging face here, we're going to do something that you don't know + +00:23.810 --> 00:29.480 +so well, which is new, which is logging into the wonderful weights and biases platform. + +00:29.690 --> 00:34.940 +So you may have already set this up as part of last week's, uh, foray into weights and biases, but + +00:34.940 --> 00:36.830 +if not, it's very simple to do. + +00:36.830 --> 00:38.150 +And of course it is free. + +00:38.150 --> 00:43.550 +You go to the, uh, weights and biases W and B dot I right linked here. + +00:43.550 --> 00:50.630 +Uh, you set up a free account and then you can find your, your wand API key, your weights and biases + +00:50.660 --> 00:51.530 +API key. + +00:51.530 --> 00:58.160 +And then you have to go to this padlock symbol here and add it in as a secret to your Google Colab. + +00:58.220 --> 01:03.090 +Um, or if you don't want to do that, you can also just type it directly into the notebook right here + +01:03.090 --> 01:03.840 +instead as well. + +01:03.840 --> 01:05.850 +But it's recommended to use the secrets. + +01:05.850 --> 01:09.990 +So once you've set up weights and biases on we go. + +01:10.020 --> 01:15.420 +We then load in the data set, and we can just quickly check that the data set is what we expect. + +01:15.450 --> 01:18.750 +We expect there to be and we expect. + +01:18.780 --> 01:24.870 +Let's have a look at how many training data points we're looking for 400,000 of them there they are + +01:24.870 --> 01:27.840 +400,000 training data points a lot of them. + +01:27.840 --> 01:29.040 +Let's look at the first one. + +01:29.040 --> 01:32.760 +Just make sure it's exactly what we expect it should be. + +01:32.760 --> 01:39.570 +The text in this training data point is the prompt we're expecting followed by the price right here. + +01:39.570 --> 01:44.340 +And it's going to be this that the model is going to be desperate to predict this next token and get + +01:44.340 --> 01:45.120 +it right. + +01:45.510 --> 01:47.460 +And we also have the actual price here. + +01:47.460 --> 01:49.950 +You can see we're rounding it to the nearest whole token. + +01:49.950 --> 01:51.660 +And we tell it that we're looking. + +01:51.660 --> 01:56.550 +How much does this cost to the nearest dollar, the nearest token the nearest dollar. + +01:57.100 --> 01:58.360 +And there is the price. + +01:58.390 --> 01:59.440 +The real price. + +01:59.440 --> 02:03.940 +If we look at the test data set and not that we'll be using that today. + +02:03.940 --> 02:06.400 +But you will remember the test data set is very similar. + +02:06.430 --> 02:08.020 +Just one tiny difference. + +02:08.020 --> 02:10.480 +There is not a price after here. + +02:10.480 --> 02:12.070 +It just says price is dollar. + +02:12.280 --> 02:16.360 +And that's that is what we're training our model to be good at doing. + +02:16.750 --> 02:17.530 +Okay. + +02:17.560 --> 02:23.890 +So um, if we're logged in, if we're asking to log to weights and biases, it then sets up the weights + +02:23.890 --> 02:24.880 +and Biases project. + +02:24.880 --> 02:30.190 +This is in fact not required to have this line of code, because actually hugging face will do this + +02:30.190 --> 02:33.160 +for you if you start to run and you don't, you haven't run this line. + +02:33.160 --> 02:39.700 +But I sometimes add this because you can also, if you wish, pass in a config config a config with + +02:39.700 --> 02:45.070 +some parameters, and that lets you set up some extra um, attributes that will appear in weights and + +02:45.070 --> 02:45.580 +biases. + +02:45.580 --> 02:47.470 +That's a useful trick to know. + +02:47.560 --> 02:50.290 +Um, but but anyways, you can run this or not. + +02:50.320 --> 02:51.940 +It doesn't make much difference. + +02:52.570 --> 02:55.310 +Okay, so then this we saw before. + +02:55.310 --> 03:01.310 +We're just going to take this hyper parameter and decide whether we're going to use the bits and bytes + +03:01.340 --> 03:04.280 +config for four bit quantization or for eight bit. + +03:04.280 --> 03:06.320 +And we are of course going with four bits. + +03:06.350 --> 03:08.390 +We're going all the way down. + +03:08.390 --> 03:13.100 +And then it is time to load in the tokenizer and the model. + +03:13.160 --> 03:16.220 +And so usual code to load the tokenizer. + +03:16.220 --> 03:22.670 +I mentioned to you that this is boilerplate stuff that you do that tells the trainer that we want to + +03:22.700 --> 03:30.950 +pad every data point so that it fills up our maximum sequence length, and it should pad it all up with + +03:30.950 --> 03:39.110 +end of sentence tokens just to fill up that that set of characters, the 182, uh, length of our of + +03:39.110 --> 03:41.630 +our tokens in our, in our sequences. + +03:41.630 --> 03:44.570 +So we'll pad them to the right with these tokens. + +03:44.960 --> 03:52.490 +And then there's this one line here, which is just another boilerplate thing to set the pad token ID + +03:52.520 --> 03:54.690 +on this thing called the generation config. + +03:54.690 --> 04:00.930 +And the reason that put this in there is it stops it saying a warning later that it that it doesn't + +04:00.930 --> 04:05.490 +see this, but I actually think it would take this anyway. + +04:05.490 --> 04:09.420 +So so this avoids it printing an unnecessary warning. + +04:10.290 --> 04:17.730 +Um, and then we end by printing the memory footprint as usual, and we see that it is the 5.6GB that + +04:17.730 --> 04:24.840 +we expect for our four bit version of the llama, 3.18 billion parameter model. + +04:25.560 --> 04:27.300 +Okay, one more technicality here. + +04:27.300 --> 04:29.610 +This thing called the data collator. + +04:29.670 --> 04:37.020 +So there is this, um, this cunning thing that when we're doing our training, we're going to be passing + +04:37.020 --> 04:43.380 +in the the full sequence, including the price, and the model will see lots of examples of this and + +04:43.380 --> 04:46.230 +get better and better at predicting these tokens. + +04:46.230 --> 04:47.940 +But there's a bit of a catch. + +04:47.940 --> 04:55.260 +We actually don't care for the model to learn about how to predict all of the tokens in the prompt up + +04:55.290 --> 04:57.000 +until the dollar sign. + +04:57.000 --> 05:00.060 +We want it to learn how to predict that token right there. + +05:00.060 --> 05:05.790 +So we don't want it to spend lots of time seeing how good it is at writing descriptions of products, + +05:05.790 --> 05:10.200 +and then also learn the price we want it to focus on that price. + +05:10.380 --> 05:15.120 +Um, and, and doing that involves setting up something called a mask. + +05:15.270 --> 05:21.300 +When you tell the trainer that you don't need it to learn about the prompt, you just want it to take + +05:21.300 --> 05:23.310 +this into account, to give it context. + +05:23.310 --> 05:27.960 +But learn how to predict this token right here after the dollar sign. + +05:27.960 --> 05:34.860 +Setting up masks is a bit fiddly and involves a bit of uh um uh, yeah, yeah, a bit of, uh, messing + +05:34.860 --> 05:36.390 +around with with tokens. + +05:36.390 --> 05:42.000 +But luckily hugging face has made it very easy indeed with a nice little utility that does it all for + +05:42.000 --> 05:42.330 +you. + +05:42.330 --> 05:46.500 +And it's called the data Collator for completion only. + +05:46.830 --> 05:49.420 +LM, uh, which is what we've got. + +05:49.450 --> 05:51.970 +We've got something where all we want to do is have it complete. + +05:51.970 --> 05:53.380 +This particular sentence. + +05:53.380 --> 05:59.800 +How it works is you come up with something that's called the response template, which sounds very fancy, + +05:59.800 --> 06:05.440 +but it just simply means what is the chunk of text which is going to indicate that I want you to predict + +06:05.440 --> 06:06.700 +whatever comes next. + +06:06.700 --> 06:10.480 +And in our case, it is quite simply price is dollar sign. + +06:10.480 --> 06:13.120 +We want it to predict the next thing after that. + +06:13.120 --> 06:18.490 +So you put that into something called response templates and then you create something, an instance + +06:18.490 --> 06:23.800 +of data collator for completion, only passing in the response templates. + +06:23.800 --> 06:30.550 +And you also have to tell it your tokenizer, and it will use that to build the final set of masked + +06:30.550 --> 06:33.730 +data that goes into the the trainer. + +06:33.730 --> 06:35.230 +And you'll see that in just a second. + +06:35.230 --> 06:40.090 +So this is super simple and it's doing something quite sophisticated behind the scenes and making it + +06:40.090 --> 06:41.320 +easy for us. + +06:42.010 --> 06:42.910 +All right. + +06:43.270 --> 06:46.750 +And finally the last the last big moment for us. + +06:46.750 --> 06:50.090 +This is going to look like lots of code, but it's not actually that much to it. + +06:50.270 --> 06:56.090 +We are about to call our trainer, and we're going to have to pass in two sets of parameters. + +06:56.090 --> 06:58.460 +One of them is called the Laura config. + +06:58.460 --> 07:02.960 +It's the hyperparameters that relate to how we're doing our Laura training. + +07:02.960 --> 07:08.390 +And the other, the psft config is the supervised fine tuning. + +07:08.390 --> 07:15.140 +Config is just general stuff about how training should work, and what I've done is I've basically set + +07:15.140 --> 07:20.060 +these things up to pull from the constants that we defined right at the top of the notebook, like alpha + +07:20.060 --> 07:24.560 +and dropout, and then the things that don't matter so much, I've just put put in here what to use. + +07:24.560 --> 07:28.610 +And these are not important hyperparameters or they're or they're just things where we should set it + +07:28.610 --> 07:30.110 +to a to a certain value. + +07:30.140 --> 07:34.910 +So you can see that for the Laura parameters, we of course pass in the alpha, which we know well, + +07:34.940 --> 07:40.610 +I won't define it yet again, the dropout and the target modules that we're targeting. + +07:40.610 --> 07:46.350 +So so that goes into the Laura config, the training parameters, the SFT config had stuff like the + +07:46.350 --> 07:46.950 +run name. + +07:46.950 --> 07:50.160 +How many epochs are we running the batch size? + +07:50.310 --> 07:52.170 +Uh, super important 16. + +07:52.200 --> 07:56.610 +In my case, it might be one for you if you're on a on a the T4 box. + +07:56.850 --> 08:00.660 +Uh, the gradient accumulation, that is one for us. + +08:00.750 --> 08:06.360 +And then things like learning rate the cosine scheduler that we decided to use. + +08:06.510 --> 08:12.960 +Um, and then just down at the bottom here, we're saying that we want this to be pushing to the hub. + +08:13.050 --> 08:19.050 +Um, every time that, that it's doing a save, which is every 5000 of the, of the batch steps, we + +08:19.050 --> 08:23.520 +want it to push this model to the hub so that we'll get a series of saves to the hub. + +08:23.520 --> 08:26.220 +And we can see our model at each of those points. + +08:26.520 --> 08:32.700 +Um, and we ask to give it this name, and we say we want it to be a private, uh, repo so that it's + +08:32.700 --> 08:36.570 +not public yet until we're satisfied that we've got great results from it. + +08:37.710 --> 08:40.740 +Uh, and then this is the final line. + +08:40.740 --> 08:43.990 +We set up something called an SFT trainer. + +08:43.990 --> 08:47.230 +We pass in just a few simple things. + +08:47.260 --> 08:49.630 +Number one, it needs to know the base model. + +08:49.630 --> 08:54.940 +What is the underlying model that we want to fine tune, which is llama 3.1. + +08:55.330 --> 08:57.490 +Number two what is our training data? + +08:57.520 --> 08:58.510 +We pass that in. + +08:58.510 --> 09:00.430 +What are the Lora parameters. + +09:00.430 --> 09:07.750 +This has as you know it has the alpha, the r, the dropout, the tokenizer, the training parameters + +09:07.750 --> 09:09.430 +that we just set up right here. + +09:09.430 --> 09:16.120 +And finally we pass in that collator, which is that sneaky object which knows to look for the prices + +09:16.120 --> 09:17.770 +dollar sign and it's telling it. + +09:17.800 --> 09:20.110 +Don't bother predicting what comes before that. + +09:20.440 --> 09:22.570 +Worry about what comes next. + +09:22.990 --> 09:27.400 +Um, and so this then will have set up our trainer. + +09:27.400 --> 09:35.590 +And in the next video we'll run this single line fine tuning train, which is the line that kicks it + +09:35.590 --> 09:36.190 +off. + +09:36.310 --> 09:37.990 +Uh, that will be quite the moment. + +09:37.990 --> 09:42.070 +So without further ado, let's go do that. diff --git a/week5/community-contributions/subtitles/srts/59507687/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507687/ja_JP.srt new file mode 100755 index 0000000..c809123 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507687/ja_JP.srt @@ -0,0 +1,373 @@ +WEBVTT + +00:00.620 --> 00:02.630 +みんな、 行動の時だ。 + +00:02.660 --> 00:04.670 +コラボを立ち上げたんだ。 + +00:04.670 --> 00:06.380 +さあ、 7週目、 3日目だ。 + +00:06.410 --> 00:08.240 +常にセットアップしている。 + +00:08.540 --> 00:12.920 +モデル名を確認し、 いよいよ本番だ。 + +00:13.250 --> 00:19.940 +ええと、 まず最初にすることは、 ハギング・フェイスにログインすること。 + +00:19.940 --> 00:23.810 +そして、 ハギング・フェイスにログインした後、 皆さんがあまりご存じない、 + +00:23.810 --> 00:29.480 +新しいこと、 素晴らしいウェイト&バイアス・プラットフォームにログインします。 + +00:29.690 --> 00:34.940 +先週のウェイトとバイアスの考察の一環として、 すでに設定済みかもしれないが、 そうでなければ、 + +00:34.940 --> 00:36.830 +とても簡単だ。 + +00:36.830 --> 00:38.150 +もちろん無料だ。 + +00:38.150 --> 00:43.550 +ここにリンクされているウェイトとバイアスのWとBのドットIをご覧ください。 + +00:43.550 --> 00:51.530 +無料アカウントをセットアップして、 あなたの杖のAPIキー、 重みと偏りのAPIキーを見つけることができます。 + +00:51.530 --> 00:58.160 +そして、 この南京錠のマークをクリックし、 Google Colabにシークレットとして追加します。 + +00:58.220 --> 01:03.840 +そうしたくない場合は、 このノートブックに直接入力することもできます。 + +01:03.840 --> 01:05.850 +しかし、 秘訣を使うことをお勧めする。 + +01:05.850 --> 01:09.990 +ウェイトとバイアスを設定したら、 さあ始めよう。 + +01:10.020 --> 01:15.420 +そしてデータセットをロードし、 そのデータセットが期待通りのものであることを素早くチェックすることができる。 + +01:15.450 --> 01:18.750 +私たちはそれを期待しているし、 期待している。 + +01:18.780 --> 01:27.840 +トレーニング・データ・ポイントの数を見てみよう。 40万個のトレーニング・データ・ポイントがある。 + +01:27.840 --> 01:29.040 +一つ目を見てみよう。 + +01:29.040 --> 01:32.760 +ただ、 私たちが期待する通りのものであることを確認してほしい。 + +01:32.760 --> 01:39.570 +このトレーニング・データ・ポイントのテキストは、 私たちが期待しているプロンプトで、 その後に価格が続きます。 + +01:39.570 --> 01:45.120 +そして、 この次のトークンを予測し、 それを的中させるためにモデルは必死になる。 + +01:45.510 --> 01:47.460 +実際の価格もここにある。 + +01:47.460 --> 01:49.950 +四捨五入しているのがわかるだろう。 + +01:49.950 --> 01:51.660 +そして、 探していることを伝える。 + +01:51.660 --> 01:56.550 +1ドル単位、 トークン単位、 1ドル単位でいくらかかりますか? + +01:57.100 --> 01:58.360 +そして値段だ。 + +01:58.390 --> 01:59.440 +本当の価格だ。 + +01:59.440 --> 02:03.940 +テストデータセットを見てみよう。 + +02:03.940 --> 02:06.400 +しかし、 テストデータセットが非常に似ていることを覚えているだろう。 + +02:06.430 --> 02:08.020 +ほんの小さな違いだ。 + +02:08.020 --> 02:10.480 +ここに値段はない。 + +02:10.480 --> 02:12.070 +値段はドルと書いてあるだけだ。 + +02:12.280 --> 02:16.360 +そしてそれこそが、 我々のモデルが得意とするトレーニングなのだ。 + +02:16.750 --> 02:17.530 +オーケー。 + +02:17.560 --> 02:24.880 +ログインして、 weights and biasesにログインするように指示すると、 weights and Biasesプロジェクトがセットアップされます。 + +02:24.880 --> 02:33.160 +というのも、 もし走り始めても、 ハグフェイスがこれをやってくれるからだ。 + +02:33.160 --> 02:45.580 +しかし、 必要であれば、 コンフィグにいくつかのパラメータを渡して、 ウェイトやバイアスに表示される追加的なUMや属性を設定することもできるからだ。 + +02:45.580 --> 02:47.470 +知っておくと便利なトリックだね。 + +02:47.560 --> 02:50.290 +うーん、 でも、 とにかく、 これを実行するかどうかは自由だ。 + +02:50.320 --> 02:51.940 +大した違いはない。 + +02:52.570 --> 02:55.310 +じゃあ、 これは前にも見たね。 + +02:55.310 --> 03:01.310 +このハイパーパラメーターを使って、 ビットとバイトのコンフィグを4ビット量子化に使うか、 + +03:01.340 --> 03:04.280 +8ビットに使うかを決めるだけだ。 + +03:04.280 --> 03:06.320 +もちろん4ビットで行く。 + +03:06.350 --> 03:08.390 +ずっと下に行くんだ。 + +03:08.390 --> 03:13.100 +そして、 トークナイザーとモデルをロードする。 + +03:13.160 --> 03:16.220 +そして、 トークナイザーをロードするいつものコード。 + +03:16.220 --> 03:22.670 +これは定型文のようなもので、 トレーナーに対して、 配列の最大長を満たすようにすべてのデータ・ポイントをパディングし、 + +03:22.700 --> 03:41.630 +文末のトークンですべてのデータ・ポイントをパディングして、 配列のトークンの長さである182文字を埋めるように指示するものです。 + +03:41.630 --> 03:44.570 +だから、 このトークンで右サイドを埋めるんだ。 + +03:44.960 --> 03:52.490 +そしてこの1行は、 世代設定と呼ばれるものにパッドのトークンIDを設定するための、 + +03:52.520 --> 03:54.690 +単なる定型文です。 + +03:54.690 --> 04:05.490 +これを入れたのは、 後で警告が出るのを防ぐためだ。 + +04:05.490 --> 04:09.420 +そうすることで、 不必要な警告が表示されるのを避けることができる。 + +04:10.290 --> 04:17.730 +そして最後に、 いつものようにメモリーのフットプリントを印刷して、 それが5であることを確認する。 4ビット版のラマに期待される6GB、 + +04:17.730 --> 04:24.840 +3. 180億パラメータモデル。 + +04:25.560 --> 04:27.300 +さて、 ここでもうひとつ詭弁がある。 + +04:27.300 --> 04:29.610 +データコレーターと呼ばれるものだ。 + +04:29.670 --> 04:46.230 +つまり、 トレーニングをするときに、 価格を含む完全なシーケンスを渡すという狡猾な方法がある。 + +04:46.230 --> 04:47.940 +しかし、 ちょっとした引っ掛かりがある。 + +04:47.940 --> 04:57.000 +実際には、 ドル記号までのプロンプトのすべてのトークンを予測する方法についてモデルが学習することは気にしない。 + +04:57.000 --> 05:00.060 +トークンを予測する方法を学ばせたいんだ。 + +05:00.060 --> 05:05.790 +だから、 商品の説明を書くのがどれだけ上手かを見るのに多くの時間を費やしてほしくないし、 + +05:05.790 --> 05:10.200 +価格も覚えてほしくない。 + +05:10.380 --> 05:15.120 +そのためには、 マスクというものをセットアップする必要があるんだ。 + +05:15.270 --> 05:23.310 +トレーナーに「プロンプトについて学ぶ必要はない。 + +05:23.310 --> 05:27.960 +しかし、 ドル記号のすぐ後にあるこのトークンを予測する方法を学ぼう。 + +05:27.960 --> 05:36.390 +マスクのセッティングは少々手こずり、 トークンをいじくり回す必要がある。 + +05:36.390 --> 05:42.330 +しかし幸運なことに、 ハギング・フェイスは、 それをすべてやってくれるちょっとしたユーティリティで、 実に簡単にしてくれた。 + +05:42.330 --> 05:46.500 +そして、 それはデータ・コレーターと呼ばれ、 完成度の高いものである。 + +05:46.830 --> 05:49.420 +LMは......僕らが手に入れたものだ。 + +05:49.450 --> 05:51.970 +完成させたいものしかないんだ。 + +05:51.970 --> 05:53.380 +この特別な文章。 + +05:53.380 --> 05:59.800 +どのように機能するかというと、 レスポンス・テンプレートと呼ばれるものを考え出すのです。 とても派手に聞こえますが、 + +05:59.800 --> 06:06.700 +それは単に、 次に何が来るかを予測してほしいことを示すテキストの塊を意味します。 + +06:06.700 --> 06:10.480 +そして私たちの場合、 それは単純に価格がドル記号であるということだ。 + +06:10.480 --> 06:13.120 +その次を予測させたい。 + +06:13.120 --> 06:18.490 +そこで、 それをレスポンス・テンプレートと呼ばれるものに入れ、 レスポンス・テンプレートだけを渡して、 + +06:18.490 --> 06:23.800 +データ・コレーターのインスタンスを作成する。 + +06:23.800 --> 06:33.730 +そして、 トークナイザーも指定しなければならない。 トークナイザーは、 トレーナーに入力されるマスクされたデータの最終的なセットを構築するためにそれを使用する。 + +06:33.730 --> 06:35.230 +すぐにわかるだろう。 + +06:35.230 --> 06:41.320 +だからこれは超シンプルで、 裏ではかなり高度なことをやっていて、 僕らにとっては簡単なことなんだ。 + +06:42.010 --> 06:42.910 +分かった。 + +06:43.270 --> 06:46.750 +そしてついに、 私たちにとって最後の大きな瞬間がやってきた。 + +06:46.750 --> 06:50.090 +これはたくさんのコードがあるように見えるだろうが、 実際にはそれほど多くはない。 + +06:50.270 --> 06:56.090 +これからトレーナーを呼び出すので、 2組のパラメーターを渡さなければならない。 + +06:56.090 --> 06:58.460 +そのひとつがローラのコンフィグだ。 + +06:58.460 --> 07:02.960 +ハイパーパラメーターは、 ローラのトレーニング方法に関係するものだ。 + +07:02.960 --> 07:08.390 +そしてもうひとつは、 psftコンフィグによる微調整だ。 + +07:08.390 --> 07:15.140 +コンフィグは、 トレーニングがどのように機能するかについての一般的なもので、 私がやったことは、 + +07:15.140 --> 07:24.560 +基本的に、 アルファ値やドロップアウトなど、 ノートブックの一番上で定義した定数から引っ張ってくるように設定しました。 + +07:24.560 --> 07:30.110 +これらは重要なハイパーパラメーターではない。 + +07:30.140 --> 07:40.610 +ローラのパラメーターには、 もちろんアルファを渡していることがわかるだろう。 + +07:40.610 --> 07:46.950 +ローラの設定、 トレーニングパラメータ、 SFTの設定には、 ラン名などが含まれています。 + +07:46.950 --> 07:50.160 +バッチサイズは何エポックですか? + +07:50.310 --> 07:52.170 +ええと、 超重要な16番。 + +07:52.200 --> 07:56.610 +私の場合、 T4ボックスを使っているのであれば、 これはあなたのためのものかもしれない。 + +07:56.850 --> 08:00.660 +グラデーションの蓄積は、 僕らにとっては大事なことなんだ。 + +08:00.750 --> 08:06.360 +そして、 私たちが使うことにしたコサイン・スケジューラーの学習率などだ。 + +08:06.510 --> 08:12.960 +それから、 この下の方にある、 ハブにプッシュするように言っているんだ。 + +08:13.050 --> 08:19.050 +保存を行うたびに、 つまりバッチ・ステップの5000回ごとに、 このモデルをハブにプッシュして、 + +08:19.050 --> 08:23.520 +ハブに一連の保存を行うようにします。 + +08:23.520 --> 08:26.220 +そして、 それぞれのポイントで我々のモデルを見ることができる。 + +08:26.520 --> 08:32.700 +そして、 この名前をつけてほしいと頼み、 非公開のレポにしたいと言って、 そのレポから素晴らしい結果が出たと満足するまで、 + +08:32.700 --> 08:36.570 +まだ公開しないようにする。 + +08:37.710 --> 08:40.740 +そしてこれが最後のセリフだ。 + +08:40.740 --> 08:43.990 +SFTトレーナーというものを立ち上げた。 + +08:43.990 --> 08:47.230 +私たちはほんのわずかなことで合格する。 + +08:47.260 --> 08:49.630 +第一に、 ベースモデルを知る必要がある。 + +08:49.630 --> 08:54.940 +私たちが微調整したい根本的なモデルとは、 ラマ3である。 1. + +08:55.330 --> 08:57.490 +その2 トレーニングデータとは何か? + +08:57.520 --> 08:58.510 +私たちはそれをパスする。 + +08:58.510 --> 09:00.430 +ローラのパラメーターは? + +09:00.430 --> 09:09.430 +これには、 アルファ値、 r、 ドロップアウト、 トークナイザー、 トレーニング・パラメーターが設定されている。 + +09:09.430 --> 09:17.770 +そして最後にコレーターを渡す。 コレーターは、 ドル記号の値段を探すことを知っていて、 それを教えてくれる卑劣なオブジェクトだ。 + +09:17.800 --> 09:20.110 +その前をわざわざ予測する必要はない。 + +09:20.440 --> 09:22.570 +次のことを心配する。 + +09:22.990 --> 09:27.400 +それで、 これでトレーナーが決まった。 + +09:27.400 --> 09:36.190 +そして次のビデオでは、 この単線の微調整列車を走らせる。 + +09:36.310 --> 09:37.990 +それは素晴らしい瞬間だ。 + +09:37.990 --> 09:42.070 +では、 さっそく行ってみよう。 diff --git a/week5/community-contributions/subtitles/srts/59507687/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507687/ko_KR.srt new file mode 100755 index 0000000..d9cf827 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507687/ko_KR.srt @@ -0,0 +1,442 @@ +WEBVTT + +00:00.620 --> 00:02.630 +행동할 시간입니다, 여러분 + +00:02.660 --> 00:04.670 +콜랍을 준비했어요 + +00:04.670 --> 00:06.380 +7주 차, 3일째예요 + +00:06.410 --> 00:08.240 +계속 설정해뒀어요 + +00:08.540 --> 00:12.920 +모델 이름을 체크했고 get get get을 할 시간이네요. + +00:13.250 --> 00:19.940 +첫 번째로 할 일은 얼굴 안기에 로그인하는 거예요 + +00:19.940 --> 00:23.810 +얼굴 안기 로깅에 로그인한 후에는 여러분이 잘 모르는 + +00:23.810 --> 00:29.480 +새로운 걸 할 거예요 무게와 바이어스 플랫폼에 로그인하는 거죠 + +00:29.690 --> 00:34.940 +지난주에 무게와 편향성을 탐구하려고 이미 설정해 두셨을 수도 있지만 그렇지 + +00:34.940 --> 00:36.830 +않다면 아주 간단해요 + +00:36.830 --> 00:38.150 +물론 공짜고요 + +00:38.150 --> 00:43.550 +무게와 편향 W와 B의 점과 I를 연결하세요 + +00:43.550 --> 00:50.630 +무료 계정을 만들면 지팡이 API 키와 무게, 편향 API 키를 찾을 수 + +00:50.660 --> 00:51.530 +있어요 + +00:51.530 --> 00:58.160 +그리고 이 자물쇠 기호로 가서 구글 Colab에 비밀리에 추가해야 해요 + +00:58.220 --> 01:03.090 +그렇게 하기 싫으시면 여기 있는 공책에 직접 입력하셔도 + +01:03.090 --> 01:03.840 +돼요 + +01:03.840 --> 01:05.850 +하지만 비밀을 사용하는 걸 추천해요 + +01:05.850 --> 01:09.990 +무게와 편향을 설정하면 시작할게요 + +01:10.020 --> 01:15.420 +그런 다음 데이터 세트를 로드하고 데이터 집합이 우리가 기대하는 건지 재빨리 확인할 수 있어요 + +01:15.450 --> 01:18.750 +그럴 거라고 예상하고 기대하죠 + +01:18.780 --> 01:24.870 +훈련 데이터 포인트가 얼마나 많은지 봅시다 400,000개를 찾고 있어요 400,000개의 + +01:24.870 --> 01:27.840 +훈련 데이터 포인트죠 아주 많아요 + +01:27.840 --> 01:29.040 +첫 번째를 보죠 + +01:29.040 --> 01:32.760 +우리가 예상하는 대로 만들어 주세요 + +01:32.760 --> 01:39.570 +이 훈련 데이터 포인트의 텍스트는 우리가 기대하는 프롬프트입니다 그리고 여기 가격이 있죠 + +01:39.570 --> 01:44.340 +모델은 필사적으로 다음 토큰을 예측하고 get을 할 + +01:44.340 --> 01:45.120 +거예요 + +01:45.510 --> 01:47.460 +실제 가격도 나와 있어요 + +01:47.460 --> 01:49.950 +가장 가까운 토큰으로 가는 게 보이죠 + +01:49.950 --> 01:51.660 +우리가 찾는다고 말하는 거죠 + +01:51.660 --> 01:56.550 +가장 가까운 1달러의 비용은 얼마일까요? 가장 가까운 1달러의 토큰은 얼마일까요? + +01:57.100 --> 01:58.360 +대가가 따르죠 + +01:58.390 --> 01:59.440 +진짜 가격요 + +01:59.440 --> 02:03.940 +테스트 데이터 세트를 보면∙∙∙ 오늘 그걸 사용하진 않을 거예요 + +02:03.940 --> 02:06.400 +하지만 테스트 데이터 세트도 아주 유사하다는 걸 기억하실 거예요 + +02:06.430 --> 02:08.020 +아주 작은 차이죠 + +02:08.020 --> 02:10.480 +이다음에는 가격도 없어요 + +02:10.480 --> 02:12.070 +가격은 달러라고만 쓰여 있어요 + +02:12.280 --> 02:16.360 +그런 걸 잘하도록 모델을 훈련하고 있어요 + +02:16.750 --> 02:17.530 +네 + +02:17.560 --> 02:23.890 +저희가 로그인해서 무게와 바이어스에 로그인하도록 요청하면 무게와 바이어스 프로젝트를 + +02:23.890 --> 02:24.880 +설정해요 + +02:24.880 --> 02:30.190 +이 코드는 사실 이 줄이 있을 필요가 없어요 왜냐하면 얼굴을 안으면 이렇게 되거든요 만약 + +02:30.190 --> 02:33.160 +실행을 시작했는데 이 줄을 실행하지 않았다면요 + +02:33.160 --> 02:39.700 +하지만 가끔 이걸 추가해요 원한다면 매개 변수로 구성된 구성을 통과시킬 수도 + +02:39.700 --> 02:45.580 +있거든요 그럼 무게나 편향에 나타날 추가 특성을 설정할 수 있죠 + +02:45.580 --> 02:47.470 +유용한 정보네요 + +02:47.560 --> 02:50.290 +어쨌든 이걸 실행하든 말든 맘대로 해요 + +02:50.320 --> 02:51.940 +그게 그거죠 + +02:52.570 --> 02:55.310 +네, 이건 아까 봤죠 + +02:55.310 --> 03:01.310 +이 하이퍼 매개 변수를 가지고 4비트 수량화를 위해 비트 및 바이트 구성을 쓸지 + +03:01.340 --> 03:04.280 +8비트 사용할지 결정하는 거죠 + +03:04.280 --> 03:06.320 +물론 50센트짜리로 할게요 + +03:06.350 --> 03:08.390 +계속 내려갈 거예요 + +03:08.390 --> 03:13.100 +이제 토큰라이저와 모델을 로드할 차례예요 + +03:13.160 --> 03:16.220 +토큰마이저를 로드하는 일반적인 코드죠 + +03:16.220 --> 03:22.670 +이것은 상용적인 것이라고 말씀드렸었는데요 트레이너에게 모든 데이터 + +03:22.700 --> 03:30.950 +포인트를 패드하고 싶다고 말합니다 최대 시퀀스 길이를 채우기 위해서요 그리고 문장 끝 + +03:30.950 --> 03:39.110 +토큰들로 모든 것을 패드해야 합니다 글자 집합을 채우기 위해서요 시퀀스에서 토큰의 182개 + +03:39.110 --> 03:41.630 +길이 말이죠 + +03:41.630 --> 03:44.570 +이 토큰으로 오른쪽에 패드하죠 + +03:44.960 --> 03:52.490 +그리고 여기 이 라인이 있는데요 이것은 또 다른 표준 문항으로 패드 토큰 ID를 생성 환경 + +03:52.520 --> 03:54.690 +설정하는 것인데요 + +03:54.690 --> 04:00.930 +이걸 저기에 넣은 이유는 나중에 경고를 막기 위해서예요 이걸 보지 않는다는 + +04:00.930 --> 04:05.490 +경고요 하지만 어쨌든 이걸 받을 것 같아요 + +04:05.490 --> 04:09.420 +그래서 이건 불필요한 경고를 인쇄하는 걸 피하죠 + +04:10.290 --> 04:17.730 +마지막으로 메모리 발자국 프린팅으로 마무리합니다 5가 보이죠 4 비트 버전의 라마는 6GB가 + +04:17.730 --> 04:24.840 +예상됩니다, 3이죠 180억 매개 변수 모델이죠 + +04:25.560 --> 04:27.300 +세부 사항이 하나 더 있어요 + +04:27.300 --> 04:29.610 +데이터 합성기라는 거예요 + +04:29.670 --> 04:37.020 +이런 교묘한 방법이 있어요 훈련을 할 때 가격을 포함한 전체 배열을 통과시키면 + +04:37.020 --> 04:43.380 +모델에서 많은 예시를 볼 수 있고 이런 토큰을 예측하는 능력이 점점 + +04:43.380 --> 04:46.230 +좋아지죠 get it + +04:46.230 --> 04:47.940 +하지만 비트가 있죠 + +04:47.940 --> 04:55.260 +사실 우린 달러 기호가 나올 때까지 모든 토큰을 예측하는 모델에 + +04:55.290 --> 04:57.000 +관심 없어요 + +04:57.000 --> 05:00.060 +여기 있는 토큰을 예측하는 법을 배우게 하는 거죠 + +05:00.060 --> 05:05.790 +그래서 제품 설명을 얼마나 잘하는지 확인하느라 많은 시간을 들이지 않고 + +05:05.790 --> 05:10.200 +그 가격에 집중할 수 있는 가격을 알아내길 원하죠 + +05:10.380 --> 05:15.120 +그러려면 마스크라는 걸 만들어야 해요 + +05:15.270 --> 05:21.300 +트레이너에게 프롬프트에 대해 배울 필요가 없다고 할 때 컨텍스트를 주기 위해 + +05:21.300 --> 05:23.310 +이걸 고려하길 원하죠 + +05:23.310 --> 05:27.960 +하지만 달러 기호 다음에 나오는 토큰을 예측하는 법을 배우세요 + +05:27.960 --> 05:34.860 +마스크를 세팅하는 건 손이 많이 가고 네, 토큰으로 비트를 만들어야 + +05:34.860 --> 05:36.390 +해요 + +05:36.390 --> 05:42.000 +다행히도 안아주는 얼굴 덕분에 아주 쉬워졌습니다 모든 걸 대신 해주는 작고 실용적인 장치가 + +05:42.000 --> 05:42.330 +있죠 + +05:42.330 --> 05:46.500 +데이터 컬레이터라고 부르는데 오직 완성만을 위한 거죠 + +05:46.830 --> 05:49.420 +LM, 그게 우리예요 + +05:49.450 --> 05:51.970 +이제 완성만 하면 되는 거예요 + +05:51.970 --> 05:53.380 +이 문장요 + +05:53.380 --> 05:59.800 +응답 템플릿이라는 걸 만들어내는 겁니다 아주 거창하게 들리지만 그냥 다음에 + +05:59.800 --> 06:06.700 +뭐가 올지 예측하라는 걸 나타내는 텍스트 덩어리가 무엇인지를 의미하죠 + +06:06.700 --> 06:10.480 +이 경우엔 간단히 가격은 $s 사인이죠 + +06:10.480 --> 06:13.120 +그 다음 일을 예측하게 하고 싶어요 + +06:13.120 --> 06:18.490 +응답 템플릿에 넣고 뭔가를 만드는 거죠 데이터 콜렛터의 완성을 + +06:18.490 --> 06:23.800 +위한 인스턴스요 응답 템플릿에만 전달하는 거죠 + +06:23.800 --> 06:30.550 +토큰라이저라고 알려줘야 해요 그럼 그걸 이용해 트레이너로 가는 마스크된 + +06:30.550 --> 06:33.730 +데이터 최종 세트를 빌드하죠 + +06:33.730 --> 06:35.230 +잠시 후에 보실 수 있어요 + +06:35.230 --> 06:40.090 +이건 아주 간단해요 막후에서 아주 정교한 작업을 하고 있죠 우릴 + +06:40.090 --> 06:41.320 +쉽게 해줘요 + +06:42.010 --> 06:42.910 +좋아요 + +06:43.270 --> 06:46.750 +드디어 마지막 중요한 순간이에요 + +06:46.750 --> 06:50.090 +많은 코드처럼 보이지만 사실 그렇게 많지는 않아요 + +06:50.270 --> 06:56.090 +트레이너를 부를 거예요 두 개의 매개 변수를 통과시켜야 해요 + +06:56.090 --> 06:58.460 +그중 하나는 로라 구성이에요 + +06:58.460 --> 07:02.960 +hyperperameter는 로라 훈련과 관련이 있어요 + +07:02.960 --> 07:08.390 +다른 하나는 감독하에 미세한 조율을 하는 거죠 + +07:08.390 --> 07:15.140 +구성이란 훈련이 어떻게 작동해야 하는지에 대한 일반적인 거죠 제가 한 건 기본적으로 이런 것들을 노트북 + +07:15.140 --> 07:20.060 +상단에 정의한 상수에서 끌어오기 위해 설정했어요 알파나 드롭 아웃 같은 + +07:20.060 --> 07:24.560 +거요 그리고 별로 중요하지 않은 건 사용할 걸 여기 적어뒀어요 + +07:24.560 --> 07:28.610 +이건 중요한 hyperperameter가 아니에요 그냥 특정 + +07:28.610 --> 07:30.110 +값에 설정해야 하는 거죠 + +07:30.140 --> 07:34.910 +로라 매개 변수에 대해 보다시피 알파를 넘깁니다 잘 아는 거죠 다시 + +07:34.940 --> 07:40.610 +정의하진 않을게요 우리가 대상으로 하는 드롭아웃과 대상 모듈이요 + +07:40.610 --> 07:46.950 +그래서 로라 구성과 훈련 매개 변수 SFT 구성에는 실행 이름 같은 게 있어요 + +07:46.950 --> 07:50.160 +몇 개의 편성에서 작업하죠? + +07:50.310 --> 07:52.170 +아주 중요한 16살요 + +07:52.200 --> 07:56.610 +제 경우에는 T4 박스에 있다면 그럴 수도 있겠네요 + +07:56.850 --> 08:00.660 +그러데이션이 쌓이는 게 문제네요 + +08:00.750 --> 08:06.360 +그리고 우리가 사용하기로 한 코사인 스케줄러의 학습률 같은 것도요 + +08:06.510 --> 08:12.960 +그리고 여기 아래쪽에 허브에 푸시하는 걸 원한다고 돼 있어요 + +08:13.050 --> 08:19.050 +저장할 때마다 즉, 배치 단계 5000개마다 이 모델을 허브에 푸시해야 + +08:19.050 --> 08:23.520 +합니다 그래야 허브에 일련의 저장량을 얻을 수 있죠 + +08:23.520 --> 08:26.220 +각각의 지점에서 모델을 볼 수 있어요 + +08:26.520 --> 08:32.700 +그리고 이 이름을 부여하고 개인의 압류로 하고 싶다고 했어요 좋은 결과를 + +08:32.700 --> 08:36.570 +얻을 때까지 아직 공개하지 않게요 + +08:37.710 --> 08:40.740 +이게 마지막 선이에요 + +08:40.740 --> 08:43.990 +특수 훈련 교관이라는 걸 만들었어요 + +08:43.990 --> 08:47.230 +간단한 것만 몇 개 통과하죠 + +08:47.260 --> 08:49.630 +첫째, 기본 모델을 알아야 해요 + +08:49.630 --> 08:54.940 +섬세하게 조정해야 하는 기본 모델은 llama 3이죠 1번요 + +08:55.330 --> 08:57.490 +2번, 훈련 데이터는요? + +08:57.520 --> 08:58.510 +그걸 전달해요 + +08:58.510 --> 09:00.430 +로라의 조건은요? + +09:00.430 --> 09:07.750 +아시다시피 알파, r, 드롭아웃 토큰라이저도 있고 방금 설정한 훈련 매개 + +09:07.750 --> 09:09.430 +변수도 있어요 + +09:09.430 --> 09:16.120 +마지막으로 콜레이터를 통과시킵니다 교활한 객체죠 가격의 $ 기호를 찾아서 + +09:16.120 --> 09:17.770 +알려주는 거예요 + +09:17.800 --> 09:20.110 +그 전은 예측하지 마세요 + +09:20.440 --> 09:22.570 +다음을 걱정하세요 + +09:22.990 --> 09:27.400 +이건 트레이너를 설정하는 거고요 + +09:27.400 --> 09:36.190 +다음 영상에서는 이 선 하나로 미세 튜닝 트레인을 작동시킬 거예요 이 선이 시작이죠 + +09:36.310 --> 09:37.990 +멋진 순간이 될 거예요 + +09:37.990 --> 09:42.070 +그럼 지체 없이 그걸 하러 가죠 diff --git a/week5/community-contributions/subtitles/srts/59507785/en_US.srt b/week5/community-contributions/subtitles/srts/59507785/en_US.srt new file mode 100755 index 0000000..998223d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507785/en_US.srt @@ -0,0 +1,790 @@ +WEBVTT + +00:01.160 --> 00:06.590 +Well, I'm sure you're fed up of me saying that the moment of truth has arrived, but it really has. + +00:06.590 --> 00:07.970 +The moment of truth has arrived. + +00:07.970 --> 00:12.440 +I'm extremely excited, as you could probably tell, and I hope you are too. + +00:12.500 --> 00:17.270 +This is this is a big deal, a big, a big moment for us. + +00:17.270 --> 00:24.680 +So first of all, we are in the week seven day three Colab, and I must confess that I'm doing something + +00:24.710 --> 00:35.690 +a bit naughty in that I have picked a super beefy A100 GPU box, which is the most pricey of the Colab + +00:35.720 --> 00:37.370 +boxes it consumes. + +00:37.370 --> 00:46.790 +It gobbles up 11.77 uh units compute units per hour, and I think the current going rate, if I'm if + +00:46.790 --> 00:52.160 +I'm not mistaken, um, it's roughly 100 units cost about $10. + +00:52.160 --> 00:56.000 +So this is this is spending about a dollar an hour. + +00:56.210 --> 01:00.050 +Um, right now those prices do change from time to time. + +01:00.050 --> 01:05.820 +Um, so, yeah, This is this is definitely not as cheap as as things usually are. + +01:05.820 --> 01:07.260 +And you don't need to do this. + +01:07.260 --> 01:13.050 +You can definitely use a, um, let me see. + +01:13.080 --> 01:14.820 +Uh, change runtime type. + +01:14.820 --> 01:16.560 +You can be using a t4 GPU. + +01:16.590 --> 01:21.030 +I will tell you which parameters to change for that and it will be just fine. + +01:21.090 --> 01:25.560 +Um, but if you don't mind spending a few dollars and you want to have a blast and you want to be changing + +01:25.560 --> 01:31.320 +hyper parameters and training fast, it's beautiful to have an A100 box for a while and experience the + +01:31.320 --> 01:36.540 +raw power of a box with 40GB of GPU Ram, so I love it. + +01:36.540 --> 01:42.450 +I don't I I don't mind giving Google a little bit of money for this, because it's a it's such a treat + +01:42.450 --> 01:46.080 +to have a juicy, powerful box like this. + +01:46.080 --> 01:49.260 +So anyways, we start with some pip installs. + +01:49.260 --> 01:56.850 +This includes this library TRL Transformers Reinforcement Learning from Huggingface, which contains + +01:56.850 --> 02:02.670 +the SFT trainer supervised Fine Tuning Trainer, which is the trainer that we'll be doing the work for + +02:02.670 --> 02:03.730 +us today. + +02:04.090 --> 02:10.990 +Um, and I do want to say I should take just a moment to say that I talked, uh, in the last video, + +02:11.170 --> 02:17.140 +um, a fair bit about training, and I, I sort of did it almost by the by, I talked about about some + +02:17.140 --> 02:22.360 +of the hyperparameters and in doing so talked about a bit of the training process of forward passes + +02:22.360 --> 02:23.440 +and backward passes. + +02:23.440 --> 02:26.830 +And again, for some people, that's old hat that you're very familiar with this. + +02:26.830 --> 02:32.140 +For some people that might have gone over your head and you might be be be saying, you know, can you + +02:32.170 --> 02:36.490 +not take more time to better explain the whole training process? + +02:36.490 --> 02:43.600 +And one thing I do want to say is that hugging face has made this so accessible, so easy. + +02:43.660 --> 02:49.150 +Um, they've made the barrier to entry for training a model so low that whilst it's very helpful to + +02:49.180 --> 02:55.360 +have an intuition for what's going on in terms of, of of optimizing, of taking steps in the right + +02:55.360 --> 02:57.220 +direction, this is helpful. + +02:57.220 --> 03:00.880 +It's not essential to know the detail of the theory. + +03:00.940 --> 03:04.390 +Uh, you just have to know enough to be able to tweak the hyperparameters. + +03:04.390 --> 03:04.390 +Parameters. + +03:04.390 --> 03:07.960 +Hugging face makes it incredibly easy to do the training. + +03:07.960 --> 03:13.690 +So if you did feel like some of that went over your head, then rest assured it really doesn't matter. + +03:13.690 --> 03:15.760 +The code will make it very clear. + +03:15.760 --> 03:20.710 +And if you know this stuff back to front already and you're very familiar with optimization, then great. + +03:21.130 --> 03:22.750 +That will make it so much the better. + +03:22.750 --> 03:24.220 +It'll make it even easier. + +03:24.430 --> 03:27.760 +All right, so we do some some imports. + +03:28.120 --> 03:31.180 +And now we have a bunch of parameters to talk about. + +03:31.180 --> 03:36.160 +So uh, the base model of course llama 3.18 billion. + +03:36.160 --> 03:38.680 +We know it well the project name. + +03:38.680 --> 03:42.820 +So this is the project name that will be used in weights and biases. + +03:42.820 --> 03:47.980 +Uh, when we get to that, to, to show the, the, uh, the results that will compare. + +03:47.980 --> 03:51.010 +And we'll also use it when we upload to the Hugging Face hub. + +03:51.010 --> 03:55.990 +And I'm using the name Pricer for this project, as in something that comes up with prices. + +03:55.990 --> 04:00.820 +You may remember when we were training a GPT, I called it Pricer GPT. + +04:01.360 --> 04:07.520 +I've kept the project separate because the results will be so different That just because we'll be measuring + +04:07.520 --> 04:10.640 +different quantities, it will be confusing to have them in the same project. + +04:10.910 --> 04:14.600 +Um, so, uh, I've called this just pricer. + +04:15.170 --> 04:19.580 +Uh, hugging face user, you should put your hugging face name in here. + +04:19.670 --> 04:27.020 +Uh, because you'll be wanting to post push your fine tuned models to the hub for your models because + +04:27.020 --> 04:29.180 +you will treasure them and use them in the future. + +04:29.180 --> 04:30.860 +And you can keep them private. + +04:30.860 --> 04:33.440 +Uh, they're just for your own consumption. + +04:34.430 --> 04:38.000 +So when it comes to the data, we need to load in the data set. + +04:38.000 --> 04:41.810 +And here you that you have a choice. + +04:41.840 --> 04:47.990 +If a few weeks ago when we were doing data curation, you did go all the way through and you uploaded + +04:47.990 --> 04:53.960 +the data set to your Hugging Face hub, then you can keep this line in here and it will have your hugging + +04:53.960 --> 04:59.210 +face username and then pricer data, which is what we we called it, and you'll be able to load it in. + +04:59.330 --> 05:05.330 +Alternatively, if you decided just to watch that one out and watch me doing the data set stuff but + +05:05.330 --> 05:08.460 +you didn't, uh, upload it to Huggingface then then. + +05:08.460 --> 05:09.030 +Shame. + +05:09.030 --> 05:11.460 +But still, I do understand there was quite a lot to it. + +05:11.490 --> 05:15.660 +You can simply uncomment this line where I've just hardcoded. + +05:15.660 --> 05:17.010 +We don't need the f there. + +05:17.220 --> 05:24.150 +Uh, we've just I've just hard coded the the the price of data in my hub, which will be made public. + +05:24.150 --> 05:26.460 +So you can just download it there for. + +05:26.460 --> 05:26.730 +Fine. + +05:26.760 --> 05:27.360 +Fine. + +05:27.360 --> 05:29.730 +You don't need to upload your own one. + +05:30.090 --> 05:36.180 +And then maximum sequence length, you'll remember the data is always crafted so that it's no more than + +05:36.180 --> 05:43.080 +179 tokens, adding on a few tokens for the start of sentence and anything any gumph at the end, uh, + +05:43.080 --> 05:46.620 +means that we're going to say maximum sequence length of 182. + +05:46.620 --> 05:50.610 +And this is very important because every training data point will be fitted into this. + +05:50.610 --> 05:55.080 +So that and this needs to be a small enough number to fit in our GPU's memory. + +05:55.260 --> 05:59.640 +And that's why we've cut off our descriptions to fit within this amount. + +06:00.450 --> 06:07.110 +So then a few, uh, sort of administrative things, um, I've come up with something called the run + +06:07.110 --> 06:11.350 +name for each of these runs, which is quite simply the current date the year. + +06:11.350 --> 06:12.010 +Month. + +06:12.010 --> 06:13.720 +Day and hour. + +06:13.720 --> 06:13.990 +Minute. + +06:13.990 --> 06:14.650 +Second. + +06:14.800 --> 06:18.550 +Right now of this run and you'll see why in just a second. + +06:18.580 --> 06:23.830 +I'm going to have something called the Project Run name, which is Pricer, and then a hyphen, and + +06:23.830 --> 06:32.200 +then the date, and then the hub model name where I want to save the model will be the username. + +06:32.200 --> 06:34.480 +And then this here. + +06:34.810 --> 06:35.290 +Uh. + +06:35.290 --> 06:36.670 +And why am I doing this. + +06:36.670 --> 06:40.000 +So sometimes people like to just have one model. + +06:40.000 --> 06:46.720 +And as you run this you just upload different versions that you store against that same model repository. + +06:46.720 --> 06:49.810 +Because remember everything in Huggingface is just a git repo. + +06:49.810 --> 06:56.560 +So you could just keep um, you could basically just keep pushing new versions of your model that will + +06:56.560 --> 07:03.340 +just be, uh, just new versions of that, like, like checking in new versions of code or pushing versions + +07:03.340 --> 07:03.700 +of code. + +07:03.700 --> 07:07.600 +It could just be different versions of the model with the same model name. + +07:07.600 --> 07:13.240 +But I quite like to separate out my different runs and have them as different models, because within + +07:13.240 --> 07:17.230 +them there'll be different versions, potentially as different epochs. + +07:17.230 --> 07:21.070 +And I like to keep them separate because I'll have trained them with different hyperparameters, and + +07:21.070 --> 07:22.450 +I want to keep notes of that. + +07:22.900 --> 07:25.300 +So that's, that's the way I like to do it. + +07:25.390 --> 07:28.420 +Um, not strictly necessary, but I find it helpful. + +07:28.510 --> 07:33.340 +Uh, and just to give you a sense for that, if I just take some if I take the run name, let's start + +07:33.340 --> 07:34.150 +with that. + +07:34.480 --> 07:37.450 +If I just show you what run name is right now. + +07:37.960 --> 07:45.310 +Um, the run name you can see is just the current date that I'm doing this the 22nd and the current + +07:45.340 --> 07:46.150 +time. + +07:46.330 --> 07:50.080 +That time is in UTC in universal time. + +07:50.080 --> 07:52.690 +I'm not actually, it's not actually four minutes past midnight. + +07:53.020 --> 07:55.060 +Uh, so that's the run name. + +07:55.060 --> 07:57.220 +And then what was the other thing? + +07:57.220 --> 08:00.730 +Uh, there's project run name and then hub model name. + +08:00.730 --> 08:07.480 +So project run name is just Pricer followed by that. + +08:07.480 --> 08:12.860 +And then hub model name, which is what it will be uploaded as. + +08:14.720 --> 08:16.190 +Is that so? + +08:16.190 --> 08:20.360 +This will be creating a model with that name after it's run. + +08:20.360 --> 08:26.780 +And so if when I look in my models directory, I see a bunch of these because I've run this too many + +08:26.780 --> 08:29.150 +times, more times than I'm willing to admit. + +08:29.240 --> 08:31.100 +Uh, but it's been great fun. + +08:31.160 --> 08:34.070 +Uh, I've got lots of these pricer models. + +08:34.070 --> 08:36.770 +Uh, and they've all been running. + +08:37.070 --> 08:45.260 +Uh, so just to finish this off, um, I see it just disconnected and reconnected over there. + +08:45.500 --> 08:54.170 +Uh, so the hyperparameters then that we are using for training, uh, you know, are, well, the dimensions + +08:54.170 --> 08:55.430 +of the matrix. + +08:55.490 --> 08:57.110 +Um, and I'm starting with 32. + +08:57.140 --> 08:59.540 +As I say, you can bring that down to eight. + +08:59.540 --> 09:01.520 +Uh, particularly if you're running on a lower box. + +09:01.520 --> 09:02.900 +It'll be just fine. + +09:03.050 --> 09:06.140 +Uh, Laura, Alpha should be double R. + +09:06.170 --> 09:09.440 +Um, and so if you bring this down to eight, then make this 16. + +09:10.040 --> 09:13.050 +Um, the target modules, of course. + +09:13.050 --> 09:14.520 +Are you know it too well. + +09:14.520 --> 09:15.840 +I don't need to tell you what they are. + +09:17.190 --> 09:19.770 +And this is the standard ones for the llama three. + +09:19.800 --> 09:21.960 +These are the modules that you target. + +09:22.080 --> 09:26.250 +The Lora dropout was the thing that I gave quite the long explanation of last time. + +09:26.250 --> 09:33.450 +It's the the the trick to help with regularization, or with making sure that models generalize well + +09:33.480 --> 09:40.800 +to new data points by taking 10%, in this case 10% of the neurons, and just wiping them out, setting + +09:40.800 --> 09:46.350 +their activations to zero, effectively just removing them from the training process a different 10% + +09:46.380 --> 09:47.160 +each time. + +09:47.160 --> 09:51.780 +And as a result, the model doesn't become overly dependent on any one neuron. + +09:51.810 --> 09:59.160 +It just learns to generally for the whole model to get better at receiving training points and giving + +09:59.160 --> 10:03.390 +the right next token so it helps the model generalize. + +10:03.540 --> 10:07.020 +10% is 10.1 is a very typical starting point. + +10:07.020 --> 10:13.300 +You should try 5% and 20% and see how they perform and quant for bits. + +10:13.300 --> 10:13.690 +True. + +10:13.720 --> 10:16.150 +Is is quantizing down to four bits. + +10:16.690 --> 10:17.530 +Okay. + +10:17.560 --> 10:20.740 +The hyperparameters for hyperparameters for training. + +10:20.980 --> 10:23.230 +I've set this up to run for three epochs. + +10:23.230 --> 10:25.060 +You could do it just for one. + +10:25.270 --> 10:30.580 +By all means, you'll have perfectly decent results after one batch size. + +10:30.580 --> 10:32.980 +I've got to 16 right here. + +10:33.130 --> 10:36.580 +Uh, I would, would you need it with. + +10:36.580 --> 10:39.760 +With, um, a juicy A100 box? + +10:39.760 --> 10:42.790 +I can pack in a batch size of 16 batches. + +10:42.790 --> 10:49.510 +Given that this is the max sequence length, it can cram all of them in and still have just about squeeze + +10:49.510 --> 10:51.760 +that into the 40GB of memory. + +10:51.820 --> 10:58.210 +Um, but for for for you, if you're going to go with a lower end box like a T4, then this should be + +10:58.210 --> 11:00.820 +probably one you could try. + +11:00.850 --> 11:01.480 +Try it higher. + +11:01.480 --> 11:04.300 +If you get more GPU memory that's still free. + +11:04.480 --> 11:09.340 +Um, there's this convention that people tend to use powers of two for batch size. + +11:09.340 --> 11:12.640 +So one or 2 or 4 or 8 or 16. + +11:12.820 --> 11:20.290 +Um, and in theory, there's a there's various loose bits of evidence that suggest that if you have + +11:20.290 --> 11:26.380 +it as a power of two, the GPU is better able to parallelize it, and the the performance is slightly + +11:26.380 --> 11:27.160 +better. + +11:27.250 --> 11:33.370 +Um, but there's always the data on that is, is always, uh, just a little bit vague. + +11:33.370 --> 11:40.240 +And so generally speaking, I imagine if you can cram another a little bit more batch, uh, onto your + +11:40.240 --> 11:41.800 +GPU, then you should do so. + +11:41.800 --> 11:44.860 +So I wouldn't shy away from having a batch size of three. + +11:44.890 --> 11:48.940 +If you can fit that on your GPU, it's going to run faster than a batch size of two. + +11:49.000 --> 11:54.340 +Uh, maybe maybe four would would just be that little bit more, be a bit more efficient if it could + +11:54.340 --> 11:54.820 +fit. + +11:54.820 --> 11:59.860 +But I think the general advice is just whatever will fit on your GPU is what you should pick and start + +11:59.860 --> 12:02.080 +with one and see what kind of headroom you have. + +12:02.080 --> 12:08.320 +Unless you're splashing out like me, in which case go for 16 gradient accumulation steps. + +12:08.320 --> 12:09.430 +I explained that last time. + +12:09.430 --> 12:13.570 +I think actually this helps with the improve the memory of the process. + +12:13.600 --> 12:16.760 +Uh, you could try, but But I'm staying with one. + +12:16.760 --> 12:19.040 +You can try that at 2 or 4 as well. + +12:19.340 --> 12:25.670 +Learning rates I think you know understand is super important is how much of a step you take as you + +12:25.670 --> 12:29.210 +optimize to try and take a step in the right direction. + +12:29.210 --> 12:34.250 +And learning rate is a super important hyperparameter that you need to experiment with. + +12:34.370 --> 12:40.130 +Um, and it's one of those things which which, uh, there's no right answer. + +12:40.160 --> 12:42.830 +Learning rate could be too high or it could be too low. + +12:42.860 --> 12:46.220 +The what you're trying to achieve this is going to be a bit hand-wavy. + +12:46.250 --> 12:52.310 +Again, what you're trying to achieve is, if you imagine that the loss, uh, the true the loss of + +12:52.310 --> 12:55.910 +your model, uh, is something which has a big dip in it like this. + +12:55.910 --> 12:59.540 +And you're trying to find this valley, you're trying to locate this valley. + +12:59.540 --> 13:05.330 +Then you want to be taking these steps, uh, along the direction of the valley, dropping so that you + +13:05.330 --> 13:07.460 +will end up at the bottom of this valley. + +13:07.460 --> 13:13.520 +If your learning rate is too high, you might jump over that valley and jump back again and never actually + +13:13.520 --> 13:14.870 +go down into the valley. + +13:14.870 --> 13:16.770 +If your learning rate is too low. + +13:16.800 --> 13:22.170 +You might take little tiny steps and be making two slower progress towards that valley. + +13:22.200 --> 13:24.810 +There's another problem with taking two small steps. + +13:24.810 --> 13:31.560 +Supposing that you don't just have one big valley, but you have a smaller valley and then the big valley. + +13:31.590 --> 13:37.410 +If your learning rate is too low, you might take small steps and end up sinking down into that little + +13:37.410 --> 13:38.010 +valley. + +13:38.010 --> 13:43.650 +And actually every small step you take just goes up the two walls of the little valley and never gets + +13:43.650 --> 13:45.120 +out of that little valley. + +13:45.180 --> 13:50.340 +And so you never realize that there was this much bigger value valley just next door. + +13:50.730 --> 13:55.860 +And so that is a key problem. + +13:55.860 --> 13:59.040 +It's a common problem with learning rates being too small. + +13:59.070 --> 14:02.400 +People call it being stuck in the local minimum. + +14:02.400 --> 14:05.370 +This thing is called a minimum and it's local to where you are. + +14:05.370 --> 14:10.770 +And you haven't found the the global minimum, which is which is all the way down here because you're + +14:10.770 --> 14:12.780 +stuck in the local minimum. + +14:12.780 --> 14:16.290 +And that is the problem with having a learning rate that's too low. + +14:16.680 --> 14:19.210 +There is this nice trick. + +14:19.270 --> 14:25.390 +I picked a learning rate of 0.00011 times ten to the minus four. + +14:25.660 --> 14:30.820 +There's a nice trick of using something called a learning rate scheduler, which is something that will + +14:30.820 --> 14:36.220 +vary the learning rate and make it get smaller and smaller and smaller over the course of your three + +14:36.250 --> 14:40.870 +epochs, until it's pretty much zero by the end of your three epochs. + +14:41.200 --> 14:42.340 +Um, and you can give it. + +14:42.370 --> 14:45.700 +If you choose to use one of these, you can give it a different shape. + +14:45.700 --> 14:53.950 +And cosine is a very nice one, which starts with a learning rate that starts at the slowly decreases, + +14:53.950 --> 14:55.390 +and then it decreases quite a lot. + +14:55.390 --> 14:57.250 +And then it tails off at the end. + +14:57.400 --> 14:59.650 +Uh, you'll see that visually in a moment. + +14:59.650 --> 15:01.960 +And that's a really good, good technique. + +15:02.110 --> 15:07.510 +Um, and the final learning rate parameter is called the warm up ratio, which is saying at the very + +15:07.510 --> 15:13.750 +beginning of your training process, things are quite unstable because your model has a lot to learn + +15:13.750 --> 15:15.190 +from the first few data points. + +15:15.190 --> 15:20.020 +And it's quite dangerous to have a big learning rate initially because it would jump all over the place. + +15:20.020 --> 15:27.190 +So warm up ratio says start with a lower learning rate and then warm it up to the learning rate that + +15:27.190 --> 15:28.330 +it becomes the peak. + +15:28.330 --> 15:31.960 +And then start your cosine, uh, trail. + +15:32.230 --> 15:33.550 +And you'll see that visually. + +15:33.550 --> 15:34.420 +So it'll make more sense. + +15:34.420 --> 15:36.880 +But these are these are very sensible settings. + +15:36.880 --> 15:41.980 +But you can definitely experiment with a higher or lower starting learning rate or different scheduler + +15:41.980 --> 15:42.700 +types. + +15:43.090 --> 15:45.760 +And finally the optimizer. + +15:45.910 --> 15:54.430 +So here I am picking the paged Adam w uh w means with weight decay a paged Adam w 32 bit. + +15:54.460 --> 15:57.940 +That is a good optimizer, which has good convergence. + +15:57.940 --> 16:04.480 +It does a good job of finding the the optimal spot, but it comes at a cost of consuming memory. + +16:04.660 --> 16:10.600 +Um, I've put down here a link to a hugging face writeup on the different optimizers that you could + +16:10.600 --> 16:11.260 +pick. + +16:11.320 --> 16:15.910 +Um, it's clear that the most common for Transformers is Adam or Adam. + +16:15.910 --> 16:25.310 +W um, and that Adam, uh, does does do well because it stores the rolling average of prior gradients, + +16:25.550 --> 16:28.820 +and it uses that rather than just the most recent gradient. + +16:28.820 --> 16:32.420 +But of course, by storing that, that's going to take an extra memory footprint. + +16:32.420 --> 16:34.640 +And so it's a bit greedy for memory. + +16:34.640 --> 16:40.820 +So if you're running out of memory then you have an option to choose a cheaper, less greedy optimizer + +16:41.000 --> 16:41.990 +to save some memory. + +16:41.990 --> 16:48.590 +But the results might be slightly worse than than using the paged Adam w okay. + +16:48.620 --> 16:51.680 +And then finally some administrative setup. + +16:51.710 --> 16:56.600 +Uh, this number of steps is how many batch steps to take before it. + +16:56.630 --> 17:01.310 +It saves progress to weights and biases to show us how things are going. + +17:01.460 --> 17:09.860 +Um, and this is how many steps before it actually uploads the model to the hub and saves a proper version + +17:09.860 --> 17:10.460 +of it. + +17:10.460 --> 17:17.090 +This is whether or not we're logging to weights and biases, so that that is a tour of the parameters. + +17:17.090 --> 17:21.770 +And in the next time we really get to to the trainer itself. diff --git a/week5/community-contributions/subtitles/srts/59507785/ja_JP.srt b/week5/community-contributions/subtitles/srts/59507785/ja_JP.srt new file mode 100755 index 0000000..0c55b44 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507785/ja_JP.srt @@ -0,0 +1,667 @@ +WEBVTT + +00:01.160 --> 00:06.590 +さて、 皆さんは私が「真実の瞬間が来た」と言うのにうんざりしていると思うが、 本当に来たのだ。 + +00:06.590 --> 00:07.970 +真実の瞬間がやってきた。 + +00:07.970 --> 00:12.440 +おそらくお分かりになると思うが、 私は非常に興奮している。 + +00:12.500 --> 00:17.270 +これは我々にとって大きな出来事であり、 大きな、 大きな瞬間だ。 + +00:17.270 --> 00:24.680 +まず最初に、 我々は7週目3日目のColabにいるのだが、 正直に告白すると、 + +00:24.710 --> 00:37.370 +私はColabの箱の中で最も高価なA100 GPUの箱を選んだという、 ちょっとエッチなことをしている。 + +00:37.370 --> 00:46.790 +11を食い尽くす。 77ユニットで1時間あたりの計算単位を算出し、 現在のレートは、 私の記憶が間違っていなければ、 + +00:46.790 --> 00:52.160 +だいたい100ユニットで10ドルくらいだと思います。 + +00:52.160 --> 00:56.000 +つまり、 これは1時間に約1ドルを費やしていることになる。 + +00:56.210 --> 01:00.050 +ええと、 今現在、 その値段は時々変わるんだ。 + +01:00.050 --> 01:05.820 +うーん、 そうだね、 これはいつものように安くはないよ。 + +01:05.820 --> 01:07.260 +そして、 こんなことをする必要はない。 + +01:07.260 --> 01:13.050 +間違いなく使えるよ。 + +01:13.080 --> 01:14.820 +ええと、 ランタイムのタイプを変更してください。 + +01:14.820 --> 01:16.560 +t4GPUを使用している可能性があります。 + +01:16.590 --> 01:21.030 +そのためにどのパラメーターを変更すればいいかを教えよう。 + +01:21.090 --> 01:25.560 +うーん、 でも、 もしあなたが数ドルの出費を気にせず、 爆走したい、 ハイパーパラメーターを変更し、 + +01:25.560 --> 01:36.540 +速くトレーニングしたいのであれば、 しばらくA100ボックスを持って、 40GBのGPU Ramを搭載したボックスの生のパワーを体験するのは美しいよ。 + +01:36.540 --> 01:46.080 +なぜなら、 このようなジューシーでパワフルなボックスがあるのはとても喜ばしいことだからだ。 + +01:46.080 --> 01:49.260 +とにかく、 まずはpipのインストールから始めよう。 + +01:49.260 --> 01:56.850 +これには、 HuggingfaceのTRL Transformers Reinforcement Learningというライブラリがあり、 + +01:56.850 --> 02:03.730 +SFTトレーナー監修のFine Tuning Trainerが含まれています。 + +02:04.090 --> 02:10.990 +前回のビデオでは、 トレーニングについて少しお話しましたが、 その際、 + +02:11.170 --> 02:23.440 +ハイパーパラメータについてお話し、 フォワードパスとバックワードパスのトレーニングプロセスについて少しお話しました。 + +02:23.440 --> 02:26.830 +そしてまた、 ある人たちにとっては、 このことはもう古いことで、 よく知っていることだろう。 + +02:26.830 --> 02:36.490 +トレーニングの全過程をもっと時間をかけて説明できないのか? + +02:36.490 --> 02:43.600 +そして、 ひとつ言いたいのは、 ハグフェイスのおかげで、 このことがとても身近に、 とても簡単になったということだ。 + +02:43.660 --> 02:49.150 +モデルをトレーニングするための参入障壁を低くしているので、 最適化や正しい方向へのステップを踏む上で、 + +02:49.180 --> 02:57.220 +何が起こっているのか直感的に理解することは非常に役に立つ。 + +02:57.220 --> 03:00.880 +理論の詳細を知ることは必須ではない。 + +03:00.940 --> 03:04.390 +ハイパーパラメーターを微調整できるくらいの知識があればいいんだ。 + +03:04.390 --> 03:04.390 +パラメーター + +03:04.390 --> 03:07.960 +顔をハグすることで、 トレーニングは驚くほど簡単になる。 + +03:07.960 --> 03:13.690 +だから、 もし頭から抜けてしまったと感じたとしても、 それは本当に重要なことではないので安心してほしい。 + +03:13.690 --> 03:15.760 +規約を見れば一目瞭然だ。 + +03:15.760 --> 03:20.710 +そして、 もしあなたがすでにこのようなことを隅から隅まで知っていて、 最適化に精通しているのであれば、 それは素晴らしいことだ。 + +03:21.130 --> 03:22.750 +そうすればもっと良くなる。 + +03:22.750 --> 03:24.220 +そうすればもっと簡単になる + +03:24.430 --> 03:27.760 +よし、 では輸入をいくつかやってみよう。 + +03:28.120 --> 03:31.180 +そして今、 私たちは話すべきたくさんのパラメーターを持っている。 + +03:31.180 --> 03:36.160 +ベースモデルはもちろんラマ3だ。 180億ドル + +03:36.160 --> 03:38.680 +私たちはそのプロジェクト名をよく知っている。 + +03:38.680 --> 03:42.820 +つまり、 これがウェイトとバイアスで使われるプロジェクト名だ。 + +03:42.820 --> 03:47.980 +そうなったら、 その結果を比較するんだ。 + +03:47.980 --> 03:51.010 +そして、 ハギング・フェイスのハブにアップロードする際にも使う。 + +03:51.010 --> 03:55.990 +そして、 このプロジェクトにはプライサーという名前を使っている。 + +03:55.990 --> 04:00.820 +GPTをトレーニングしていたとき、 私はそれをプライサーGPTと呼んでいたのを覚えているかもしれない。 + +04:01.360 --> 04:07.520 +プロジェクトは別々にしているのは、 結果があまりにも異なるからだ。 異なる量を計測するというだけで、 + +04:07.520 --> 04:10.640 +同じプロジェクトにすると混乱するだろう。 + +04:10.910 --> 04:14.600 +ええと、 だから、 僕はこれをただのプライサーと呼んでいるんだ。 + +04:15.170 --> 04:19.580 +ええと、 ハギング・フェイスのユーザー、 ここにハギング・フェイスの名前を入れるべきだよ。 + +04:19.670 --> 04:29.180 +ええと、 あなたが微調整したモデルを大事にし、 将来的に使用するために、 モデル用のハブにプッシュ投稿したくなるでしょうから。 + +04:29.180 --> 04:30.860 +また、 非公開にすることもできる。 + +04:30.860 --> 04:33.440 +ええと、 自分で食べるだけだよ。 + +04:34.430 --> 04:38.000 +データに関しては、 データセットをロードする必要がある。 + +04:38.000 --> 04:41.810 +そしてここで、 あなたには選択肢がある。 + +04:41.840 --> 04:47.990 +数週間前にデータ・キュレーションを行った際に、 ハギング・フェイスのハブにデータセットをアップロードしたのであれば、 + +04:47.990 --> 04:53.960 +この行をそのままにしておけば、 ハギング・フェイスのユーザー名とプライサー・データが表示され、 + +04:53.960 --> 04:59.210 +それを読み込むことができます。 + +04:59.330 --> 05:08.460 +あるいは、 もしあなたが、 その1本だけを見て、 僕がデータセットをやっているのを見ようと決めたとしても、 それをHuggingfaceにアップロードしなかったとしたら。 + +05:08.460 --> 05:09.030 +残念だ。 + +05:09.030 --> 05:11.460 +それでも、 かなり多くのことがあったことは理解している。 + +05:11.490 --> 05:15.660 +ハードコードしたこの行をコメントアウトすればいい。 + +05:15.660 --> 05:17.010 +そこにFは必要ない。 + +05:17.220 --> 05:24.150 +ええと、 僕のハブにはデータの価格をハードコーディングしてあるんだ。 + +05:24.150 --> 05:26.460 +そこでダウンロードすればいい。 + +05:26.460 --> 05:26.730 +素晴らしい。 + +05:26.760 --> 05:27.360 +素晴らしい。 + +05:27.360 --> 05:29.730 +自分でアップロードする必要はない。 + +05:30.090 --> 05:36.180 +それから、 最大配列長ですが、 データは常に179トークン以下になるように作られていることを思い出してください。 + +05:36.180 --> 05:46.620 +文頭の数トークンと、 末尾のガムテープのようなものを加えて、 最大配列長182ということになります。 + +05:46.620 --> 05:50.610 +これは非常に重要なことで、 すべてのトレーニング・データ・ポイントがこれに適合することになるからだ。 + +05:50.610 --> 05:55.080 +そのため、 GPUのメモリに収まるような小さな数値にする必要がある。 + +05:55.260 --> 05:59.640 +そのため、 この金額内に収まるように説明を削っている。 + +06:00.450 --> 06:11.350 +それで、 いくつか、 ええと、 管理的なことなんだけど、 それぞれのランにランネームというものを考えたんだ。 + +06:11.350 --> 06:12.010 +月だ。 + +06:12.010 --> 06:13.720 +曜日と時間 + +06:13.720 --> 06:13.990 +分。 + +06:13.990 --> 06:14.650 +セカンドだ。 + +06:14.800 --> 06:18.550 +その理由はすぐにわかるだろう。 + +06:18.580 --> 06:23.830 +プロジェクト・ラン名と呼ばれるもので、 Pricer、 + +06:23.830 --> 06:32.200 +ハイフン、 日付、 そしてモデルを保存するハブモデル名がユーザー名になります。 + +06:32.200 --> 06:34.480 +そしてこれだ。 + +06:34.810 --> 06:35.290 +ええと。 + +06:35.290 --> 06:36.670 +そして、 なぜ私はこんなことをしているのか。 + +06:36.670 --> 06:40.000 +だから時々、 1つのモデルだけを持ちたがる人がいる。 + +06:40.000 --> 06:46.720 +そしてこれを実行しながら、 同じモデル・リポジトリに対して保存する異なるバージョンをアップロードするだけだ。 + +06:46.720 --> 06:49.810 +Huggingfaceのすべてが単なるgitレポであることを覚えておいてほしい。 + +06:49.810 --> 06:56.560 +だから、 基本的には、 新しいバージョンのコードをチェックインしたり、 新しいバージョンのコードをプッシュしたりするだけで、 + +06:56.560 --> 07:03.700 +新しいバージョンのモデルをプッシュし続けることができる。 + +07:03.700 --> 07:07.600 +同じモデル名でもバージョンが違うだけかもしれない。 + +07:07.600 --> 07:17.230 +しかし、 私は自分の異なるランを分けて、 それぞれを異なるモデルとして持ちたいんだ。 + +07:17.230 --> 07:22.450 +ハイパーパラメーターを変えてトレーニングすることもあるし、 そのメモを残しておきたいからだ。 + +07:22.900 --> 07:25.300 +だから、 それが僕の好きなやり方なんだ。 + +07:25.390 --> 07:28.420 +厳密には必要ではないが、 役に立つと思う。 + +07:28.510 --> 07:34.150 +その感覚を味わってもらうために、 ランネームをいくつか挙げてみよう。 + +07:34.480 --> 07:37.450 +今、 ランの名前をお見せしましょうか? + +07:37.960 --> 07:46.150 +ランネームは、 22日という現在の日付と、 現在の時刻です。 + +07:46.330 --> 07:50.080 +その時間は協定世界時(UTC)である。 + +07:50.080 --> 07:52.690 +実はまだ午前0時4分じゃないんだ。 + +07:53.020 --> 07:55.060 +ええと、 それがランの名前なんだ。 + +07:55.060 --> 07:57.220 +それからもうひとつは? + +07:57.220 --> 08:00.730 +プロジェクト・ラン名とハブ・モデル名がある。 + +08:00.730 --> 08:07.480 +だから、 プロジェクト・ランの名前はPricerと続くだけだ。 + +08:07.480 --> 08:12.860 +そして、 アップロードされるハブのモデル名。 + +08:14.720 --> 08:16.190 +そうなのか? + +08:16.190 --> 08:20.360 +これは、 実行後にその名前のモデルを作成することになる。 + +08:20.360 --> 08:29.150 +モデル・ディレクトリを見ると、 このようなものがたくさんある。 + +08:29.240 --> 08:31.100 +でも、 とても楽しかった。 + +08:31.160 --> 08:34.070 +あ、 この手の値段の高いモデルはたくさん持っているよ。 + +08:34.070 --> 08:36.770 +みんな走っていたよ。 + +08:37.070 --> 08:45.260 +ええと、 それで最後に、 ええと、 あそこで接続が切れたり切れたりしてるのが見えるんだけど。 + +08:45.500 --> 08:55.430 +トレーニングに使うハイパーパラメータは、 行列の次元です。 + +08:55.490 --> 08:57.110 +ええと、 32から始めます。 + +08:57.140 --> 08:59.540 +だから、 それを8つまで減らすことができる。 + +08:59.540 --> 09:01.520 +特に下位のボックスで走っている場合はね。 + +09:01.520 --> 09:02.900 +大丈夫だよ。 + +09:03.050 --> 09:06.140 +ええと、 ローラ、 アルファはダブルRのはずだよ。 + +09:06.170 --> 09:09.440 +ええと、 これを8つに減らすと16になる。 + +09:10.040 --> 09:13.050 +もちろん、 ターゲット・モジュールだ。 + +09:13.050 --> 09:14.520 +あなたはそれを知りすぎている。 + +09:14.520 --> 09:15.840 +それが何であるかは言うまでもない。 + +09:17.190 --> 09:19.770 +そして、 これがラマ3世のスタンダードなものだ。 + +09:19.800 --> 09:21.960 +これらは、 あなたがターゲットとするモジュールである。 + +09:22.080 --> 09:26.250 +ローラの脱落は、 前回かなり長い説明をしたことだ。 + +09:26.250 --> 09:33.450 +正則化、 つまりモデルが新しいデータポイントに対してうまく汎化できるようにするためのトリックで、 + +09:33.480 --> 09:47.160 +ニューロンの10%(この場合は10%)を取り出し、 それらを消去してアクティブ度をゼロにする。 + +09:47.160 --> 09:51.780 +その結果、 モデルは特定のニューロンに過度に依存することはない。 + +09:51.810 --> 09:59.160 +モデル全体がトレーニングポイントを受け取り、 正しい次のトークンを与えることができるようになるよう、 + +09:59.160 --> 10:03.390 +一般的に学習しているのだ。 + +10:03.540 --> 10:07.020 +10%は10である。 1は非常に典型的な出発点だ。 + +10:07.020 --> 10:13.300 +5%と20%を試してみて、 そのパフォーマンスとビットの量を見てみるべきだ。 + +10:13.300 --> 10:13.690 +その通りだ。 + +10:13.720 --> 10:16.150 +これは4ビットに量子化することである。 + +10:16.690 --> 10:17.530 +オーケー。 + +10:17.560 --> 10:20.740 +トレーニング用のハイパーパラメータ。 + +10:20.980 --> 10:23.230 +私はこれを3エポック実行するように設定した。 + +10:23.230 --> 10:25.060 +一人だけならできるだろう。 + +10:25.270 --> 10:30.580 +ぜひとも、 1回のバッチサイズで申し分のない結果が得られるだろう。 + +10:30.580 --> 10:32.980 +私はここで16番を任された。 + +10:33.130 --> 10:36.580 +ええと、 一緒に必要ですか? + +10:36.580 --> 10:39.760 +ジューシーなA100の箱で? + +10:39.760 --> 10:42.790 +私は16バッチで包装できる。 + +10:42.790 --> 10:51.760 +これが最大シーケンス長であることを考えると、 すべてのシーケンスを詰め込んでも、 40GBのメモリにギリギリ収まる。 + +10:51.820 --> 11:00.820 +うーん、 でも、 もしT4のようなローエンドボックスを使うつもりなら、 これを試してみるのもいいかもしれないね。 + +11:00.850 --> 11:01.480 +もっと高くしてみて。 + +11:01.480 --> 11:04.300 +もしGPUメモリが増えたとしても、 それはまだ無料だ。 + +11:04.480 --> 11:09.340 +バッチサイズには2のべき乗を使うという慣例があるんだ。 + +11:09.340 --> 11:12.640 +つまり、 1か2か4か8か16か。 + +11:12.820 --> 11:20.290 +理論的には、 2のべき乗にするとGPUが並列化しやすくなり、 パフォーマンスがわずかに向上するという、 + +11:20.290 --> 11:27.160 +さまざまなゆるい証拠があります。 + +11:27.250 --> 11:33.370 +でも、 そのデータはいつも、 ちょっと曖昧なんだ。 + +11:33.370 --> 11:41.800 +だから一般的に言って、 GPUにもうちょっと多くのバッチを詰め込めるなら、 そうすべきだと思う。 + +11:41.800 --> 11:44.860 +だから、 私は3人分のバッチを作ることをためらわない。 + +11:44.890 --> 11:48.940 +それをGPUに搭載できれば、 2つのバッチサイズよりも高速に実行できるだろう。 + +11:49.000 --> 11:54.820 +ええと、 もしかしたら、 4つあれば、 もう少し、 もう少し効率的になるかもしれない。 + +11:54.820 --> 12:02.080 +ただ、 一般的なアドバイスとしては、 GPUに搭載できるものなら何でもいい。 + +12:02.080 --> 12:08.320 +私のように大枚をはたくのでなければ、 16段のグラデーション蓄積を目指そう。 + +12:08.320 --> 12:09.430 +それは前回説明した。 + +12:09.430 --> 12:13.570 +実際、 これはプロセスの記憶を向上させるのに役立っていると思う。 + +12:13.600 --> 12:16.760 +ああ、 やってみてもいいけど、 でも僕は1人でいるよ。 + +12:16.760 --> 12:19.040 +2、 4番でも試すことができる。 + +12:19.340 --> 12:29.210 +学習率......超重要なのは、 正しい方向への一歩を踏み出そうと最適化するときに、 どれだけの一歩を踏み出すかということだと理解していると思う。 + +12:29.210 --> 12:34.250 +そして学習率は、 実験が必要な超重要なハイパーパラメーターだ。 + +12:34.370 --> 12:40.130 +うーん、 これは正解がないことのひとつなんだ。 + +12:40.160 --> 12:42.830 +学習率は高すぎるかもしれないし、 低すぎるかもしれない。 + +12:42.860 --> 12:46.220 +あなたが達成しようとしていることは、 少し手探りなものになるだろう。 + +12:46.250 --> 12:55.910 +繰り返しになりますが、 あなたが達成しようとしているのは、 あなたのモデルの本当の損失が、 このように大きなくぼみを持つものだと想像することです。 + +12:55.910 --> 12:59.540 +そして、 あなたはこの谷を見つけようとしている。 + +12:59.540 --> 13:07.460 +そして、 谷の方向に沿ってステップを踏み、 谷の底にたどり着くように下がっていく。 + +13:07.460 --> 13:14.870 +学習率が高すぎると、 谷を飛び越えてはまた戻り、 実際に谷に降りることはないかもしれない。 + +13:14.870 --> 13:16.770 +学習率が低すぎる場合 + +13:16.800 --> 13:22.170 +小さな小さなステップを踏みながら、 その谷に向かって2歩ずつゆっくり進んでいるかもしれない。 + +13:22.200 --> 13:24.810 +小さなステップを2つ踏むことには、 もう一つ問題がある。 + +13:24.810 --> 13:31.560 +仮に大きな谷が1つだけでなく、 小さな谷と大きな谷があるとする。 + +13:31.590 --> 13:38.010 +学習率が低すぎると、 小さな一歩を踏み出しても、 小さな谷に沈んでしまうかもしれない。 + +13:38.010 --> 13:45.120 +そして実際、 あなたが踏み出す小さな一歩一歩は、 小さな谷の二つの壁を登っていくだけで、 その小さな谷から出ることはない。 + +13:45.180 --> 13:50.340 +だから、 すぐ隣にもっと大きな価値の谷があったことに気づかない。 + +13:50.730 --> 13:55.860 +それが重要な問題だ。 + +13:55.860 --> 13:59.040 +学習率が小さすぎるのはよくある問題だ。 + +13:59.070 --> 14:02.400 +人はそれをローカルミニマムから抜け出せないと呼ぶ。 + +14:02.400 --> 14:05.370 +これはミニマムと呼ばれるもので、 あなたがいる場所のローカルなものだ。 + +14:05.370 --> 14:12.780 +ローカル・ミニマムから抜け出せないので、 グローバル・ミニマムはまだ見つかっていない。 + +14:12.780 --> 14:16.290 +そしてそれが、 学習率が低すぎることの問題なのだ。 + +14:16.680 --> 14:19.210 +こんな素敵なトリックがある。 + +14:19.270 --> 14:25.390 +学習率は0を選んだ。 10の00011乗マイナス4。 + +14:25.660 --> 14:30.820 +学習率スケジューラーというものを使うと、 学習率を変化させ、 3回のエポックの間にどんどん小さくしていき、 + +14:30.820 --> 14:36.220 +3回のエポックが終わるころには学習率がほとんどゼロになるようにする、 + +14:36.250 --> 14:40.870 +いいトリックがある。 + +14:41.200 --> 14:42.340 +ええと、 そして、 あなたはそれを与えることができる。 + +14:42.370 --> 14:45.700 +これらのいずれかを使用する場合は、 異なる形状を与えることができる。 + +14:45.700 --> 14:55.390 +そしてコサインはとてもいいもので、 学習率が徐々に低下していくところから始まり、 その後かなり低下していく。 + +14:55.390 --> 14:57.250 +そして最後には尻すぼみになる。 + +14:57.400 --> 14:59.650 +すぐに視覚的にわかるよ。 + +14:59.650 --> 15:01.960 +そして、 それは本当に良い、 良いテクニックだ。 + +15:02.110 --> 15:07.510 +最終的な学習率のパラメーターはウォームアップ率と呼ばれるもので、 学習プロセスの最初の段階では、 + +15:07.510 --> 15:15.190 +モデルは最初の数データポイントから多くのことを学ばなければならないため、 かなり不安定になると言っています。 + +15:15.190 --> 15:20.020 +それに、 最初に学習率を大きくするのはかなり危険だ。 + +15:20.020 --> 15:28.330 +つまり、 ウォームアップの比率は、 低い学習率から始めて、 ピークとなる学習率までウォームアップさせるということだ。 + +15:28.330 --> 15:31.960 +そしてコサインのトレイルを始めるんだ。 + +15:32.230 --> 15:33.550 +そして、 それを視覚的に見ることができる。 + +15:33.550 --> 15:34.420 +だから、 もっと意味がある。 + +15:34.420 --> 15:36.880 +しかし、 これらは非常に賢明な設定だ。 + +15:36.880 --> 15:42.700 +しかし、 学習開始率を高くしたり低くしたり、 スケジューラーの種類を変えてみたりすることは間違いなくできる。 + +15:43.090 --> 15:45.760 +そして最後にオプティマイザーだ。 + +15:45.910 --> 15:54.430 +だから、 ここではページングされたアダムwを選んでいる。 wとは、 ページングされたアダムw 32ビットのウェイト減衰を意味する。 + +15:54.460 --> 15:57.940 +これは収束性の良いオプティマイザーだ。 + +15:57.940 --> 16:04.480 +最適な場所を見つけるのは良い仕事だが、 その代償としてメモリを消費する。 + +16:04.660 --> 16:11.260 +ええと、 あなたが選ぶことのできるさまざまなオプティマイザーについてのハグフェイス・ライトアップへのリンクをここに貼っておきます。 + +16:11.320 --> 16:15.910 +ええと、 トランスフォーマーで最も一般的なのはアダムかアダムであることは明らかです。 + +16:15.910 --> 16:25.310 +アダムは、 過去の勾配のローリング平均を保存し、 最新の勾配だけでなく、 + +16:25.550 --> 16:28.820 +それを使うからだ。 + +16:28.820 --> 16:32.420 +しかしもちろん、 それを保存することで、 余計なメモリ・フットプリントが必要になる。 + +16:32.420 --> 16:34.640 +だから、 ちょっとメモリに貪欲なんだ。 + +16:34.640 --> 16:41.990 +だから、 もしメモリが足りなくなったら、 より安価で、 より貪欲でないオプティマイザを選んでメモリを節約するという選択肢もある。 + +16:41.990 --> 16:48.590 +しかし、 その結果は、 ページングされたアダムを使うよりも若干悪いかもしれない。 + +16:48.620 --> 16:51.680 +そして最後に、 いくつかの管理設定を行う。 + +16:51.710 --> 16:56.600 +ええと、 このステップ数は、 その前に何回バッチステップを踏むかということです。 + +16:56.630 --> 17:01.310 +ウエイトとバイアスに進捗状況を保存し、 物事の進捗状況を示してくれる。 + +17:01.460 --> 17:10.460 +モデルを実際にハブにアップロードし、 適切なバージョンを保存するまでに、 このようなステップがあります。 + +17:10.460 --> 17:17.090 +これは、 ウェイトとバイアスにログを取るかどうかということで、 パラメーターのツアーになる。 + +17:17.090 --> 17:21.770 +そして次回は、 本当にトレーナー自身に迫っていく。 diff --git a/week5/community-contributions/subtitles/srts/59507785/ko_KR.srt b/week5/community-contributions/subtitles/srts/59507785/ko_KR.srt new file mode 100755 index 0000000..01c5a25 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59507785/ko_KR.srt @@ -0,0 +1,754 @@ +WEBVTT + +00:01.160 --> 00:06.590 +제가 이런 말 하는 게 지겹겠지만 진실의 순간이 왔어요 + +00:06.590 --> 00:07.970 +진실의 순간이 왔어요 + +00:07.970 --> 00:12.440 +보시다시피 전 정말 흥분돼요 여러분도 그렇길 바라요 + +00:12.500 --> 00:17.270 +이건 저희에게 아주 중요한 순간이에요 + +00:17.270 --> 00:24.680 +콜랍 비트를 7일간 사용하고 있는데요 솔직히 말하면 좀 야한 + +00:24.710 --> 00:37.370 +걸 준비했어요 아주 우람한 A100 GPU 박스를 골랐죠 콜랍 박스에서 가장 비싼 제품이에요 + +00:37.370 --> 00:46.790 +11점을 기록하네요 시간당 계산 단위 77개 현재 시세는 제가 잘못 + +00:46.790 --> 00:52.160 +안 게 아니라면 약 100개 단위 약 10달러네요 + +00:52.160 --> 00:56.000 +시간당 1달러 정도를 쓰는 거예요 + +00:56.210 --> 01:00.050 +지금은 가격이 때때로 바뀌어요 + +01:00.050 --> 01:05.820 +네, 이건 확실히 평소보다 싸지 않아요 + +01:05.820 --> 01:07.260 +이럴 필요 없어요 + +01:07.260 --> 01:13.050 +꼭 필요한 건 어디 보죠 + +01:13.080 --> 01:14.820 +런타임 타입을 변경하세요 + +01:14.820 --> 01:16.560 +t4 GPU를 사용할 수도 있어요 + +01:16.590 --> 01:21.030 +어떤 매개 변수를 변경할지 알려드릴게요, 괜찮을 거예요 + +01:21.090 --> 01:25.560 +하지만 몇 달러만 써도 블래스트를 경험하고 하이퍼 매개변수를 + +01:25.560 --> 01:31.320 +바꾸고 빠르게 훈련하고 싶다면 A100 박스를 사용하면 됩니다 GPU + +01:31.320 --> 01:36.540 +40GB가 있는 박스의 힘을 경험해보세요 + +01:36.540 --> 01:42.450 +구글에 돈을 좀 쥐여줘도 괜찮아요 이렇게 즙이 많고 + +01:42.450 --> 01:46.080 +강력한 비트 박스가 있잖아요 + +01:46.080 --> 01:49.260 +어쨌든 파이프 설치부터 시작할게요 + +01:49.260 --> 01:56.850 +TRL 트랜스포머 강화 학습 프로그램인 HINGPace 라이브러리도 포함됩니다 + +01:56.850 --> 02:03.730 +SFT 트레이너가 감독한 파인 튜닝 트레이너로 오늘 저희가 할 트레이너죠 + +02:04.090 --> 02:10.990 +잠시 드릴 말씀이 있는데요 지난 비디오에서 훈련에 관해 + +02:11.170 --> 02:17.140 +조금 다뤘습니다 비트를 통해 하이퍼파라미터도 + +02:17.140 --> 02:23.440 +언급했고 전후 패스 훈련 과정도 조금 다뤘죠 + +02:23.440 --> 02:26.830 +어떤 사람들은 그런 게 익숙할 거예요 + +02:26.830 --> 02:32.140 +어떤 사람들은 당신의 머리 위에 있는 것을 요구할 수도 있습니다. HDTT를 + +02:32.170 --> 02:36.490 +더 잘 설명하기 위해 시간을 더 들이지 않을 수 있나요? + +02:36.490 --> 02:43.600 +한 가지 말씀드리고 싶은 건 포옹하는 얼굴 덕분에 이 과정이 아주 쉬워졌다는 거예요 + +02:43.660 --> 02:49.150 +훈련에 있어서 진입 장벽이 너무 낮아서 직관력을 발휘하는 + +02:49.180 --> 02:55.360 +게 도움이 되긴 해요 최적화하고 올바른 방향으로 나아가는 + +02:55.360 --> 02:57.220 +직관력요 + +02:57.220 --> 03:00.880 +이론의 세부 사항을 알 필요는 없어요 + +03:00.940 --> 03:04.390 +하이퍼파라미터 조정할 만큼만 알면 돼요 + +03:04.390 --> 03:04.390 +매개 변수요 + +03:04.390 --> 03:07.960 +얼굴을 끌어안으면 훈련이 아주 쉬워져요 + +03:07.960 --> 03:13.690 +만약 어떤 것들이 여러분 머리 위를 지나갔다고 생각하신다면 걱정하지 마세요. 중요하지 않아요. HET + +03:13.690 --> 03:15.760 +코드가 아주 명확할 거예요 + +03:15.760 --> 03:20.710 +이미 이 모든 걸 알고 최적화에 익숙하다면 아주 좋아요 + +03:21.130 --> 03:22.750 +그럼 훨씬 더 좋겠죠 + +03:22.750 --> 03:24.220 +그럼 더 쉽겠죠 + +03:24.430 --> 03:27.760 +수입을 좀 해 보죠 + +03:28.120 --> 03:31.180 +이제 매개 변수가 많이 생겼죠 + +03:31.180 --> 03:36.160 +기본 모델은 당연히 라마 3이고요 180억 달러요 + +03:36.160 --> 03:38.680 +프로젝트 이름은 잘 알죠 + +03:38.680 --> 03:42.820 +무게와 편향에 사용될 프로젝트 이름이에요 + +03:42.820 --> 03:47.980 +Get을 하면 비교할 수 있는 결과를 보여줄 거예요 + +03:47.980 --> 03:51.010 +H깅 페이스 허브에 업로드 할 때도 사용할 거예요 + +03:51.010 --> 03:55.990 +이 프로젝트에선 프라이서란 이름을 사용하고 있어요 가격과 관련된 뭔가처럼요 + +03:55.990 --> 04:00.820 +기억하실지 모르겠지만 GPT를 훈련할 때 프라이서 GPT라고 불렀어요 + +04:01.360 --> 04:07.520 +결과가 다를까 봐 프로젝트를 분리해 뒀어요 다른 양을 측정할 거라서 같은 + +04:07.520 --> 04:10.640 +프로젝트에 두면 헷갈릴 수 있거든요 + +04:10.910 --> 04:14.600 +그래서 프라이서라고 이름 지었어요 + +04:15.170 --> 04:19.580 +아, 안는 얼굴 사용자요 여기에 안는 얼굴 이름을 넣으세요 + +04:19.670 --> 04:27.020 +정밀 튜닝 모델을 허브에 post 하고 싶을 테니까요. 왜냐하면 여러분이 그것들을 소중히 여기고 미래에 + +04:27.020 --> 04:29.180 +사용하게 될 테니까요. + +04:29.180 --> 04:30.860 +비밀로 간직할 수 있어요 + +04:30.860 --> 04:33.440 +그냥 여러분이 드실 거예요 + +04:34.430 --> 04:38.000 +데이터에 관해서는 데이터 세트를 로드해야 해요 + +04:38.000 --> 04:41.810 +여기선 선택할 수 있어요 + +04:41.840 --> 04:47.990 +몇 주 전에 데이터 큐레이션을 할 때 데이터 세트를 포옹 페이스 허브에 업로드했죠 + +04:47.990 --> 04:53.960 +이 선을 여기 둘 수 있어요 포옹 페이스 사용자 이름과 프라이서 데이터죠 + +04:53.960 --> 04:59.210 +우리가 부르는 이름이에요 로드할 수 있어요 + +04:59.330 --> 05:05.330 +아니면 그냥 이걸 보면서 데이터셋 작업하는 걸 보든지요 허깅페이스에 + +05:05.330 --> 05:08.460 +업로드 안 한 거요 + +05:08.460 --> 05:09.030 +안타깝네요 + +05:09.030 --> 05:11.460 +그래도 많은 일이 있었다는 건 알아요 + +05:11.490 --> 05:15.660 +하드코딩한 이 라인을 주석 처리 해제할 수 있어요 + +05:15.660 --> 05:17.010 +F는 필요 없어요 + +05:17.220 --> 05:24.150 +하드 코드화 했어요 허브에 데이터 가격을요 공개될 거예요 + +05:24.150 --> 05:26.460 +그냥 다운로드 하세요 + +05:26.460 --> 05:26.730 +좋아요 + +05:26.760 --> 05:27.360 +좋아요 + +05:27.360 --> 05:29.730 +직접 업로드 안 해도 돼요 + +05:30.090 --> 05:36.180 +최대 시퀀스 길이는 데이터가 항상 조각되어서 179개의 토큰을 + +05:36.180 --> 05:43.080 +넘지 않게 됩니다 문장 시작에 토큰 몇 개를 더하고 끝에 검프한 것을 추가하면 + +05:43.080 --> 05:46.620 +최대 시퀀스 길이는 182개가 되죠 + +05:46.620 --> 05:50.610 +이건 아주 중요해요 모든 훈련 데이터 포인트가 여기에 맞물리거든요 + +05:50.610 --> 05:55.080 +GPU 메모리에 들어갈 만큼 작은 숫자여야 하죠 + +05:55.260 --> 05:59.640 +그래서 이 정도 양에 맞게 설명을 잘라낸 거예요 + +06:00.450 --> 06:07.110 +몇 가지 관리적인 것들을 살펴봅시다 각 실행의 실행 이름이라는 것을 생각해 냈습니다 + +06:07.110 --> 06:11.350 +간단하게 현재 날짜를 나타내는 것이죠 + +06:11.350 --> 06:12.010 +한 달요 + +06:12.010 --> 06:13.720 +낮과 시간요 + +06:13.720 --> 06:13.990 +1분 + +06:13.990 --> 06:14.650 +두 번째요 + +06:14.800 --> 06:18.550 +잠시 후에 그 이유를 알게 될 거예요 + +06:18.580 --> 06:23.830 +프로젝트 Run name이라고 입력할게요 Pricer요 + +06:23.830 --> 06:32.200 +그다음 하이픈, 날짜 허브 모델 이름을 입력하죠 모델을 저장하려는 건 사용자 이름이 될 거예요 + +06:32.200 --> 06:34.480 +그리고 이거요 + +06:34.810 --> 06:35.290 +네 + +06:35.290 --> 06:36.670 +왜 이걸 하냐고요? + +06:36.670 --> 06:40.000 +그래서 사람들은 모델을 한 명만 쓰기도 해요 + +06:40.000 --> 06:46.720 +실행하면 같은 모델 리포지토리에 저장한 다른 버전을 업로드만 하면 되죠 + +06:46.720 --> 06:49.810 +허그페이스에서 하는 건 전부 깃 회수예요 + +06:49.810 --> 06:56.560 +그러니까 여러분은 여러분의 모델의 새 버전을 계속 푸시할 수 있는 겁니다 그것의 + +06:56.560 --> 07:03.700 +새 버전이 되는 거죠 코드의 새 버전을 확인하거나 버전의 코드를 푸시하는 것처럼요 + +07:03.700 --> 07:07.600 +모델 이름은 같지만 모델의 다른 버전일 수도 있죠 + +07:07.600 --> 07:13.240 +하지만 저는 다른 실행을 분리해서 다른 모델로 두는 걸 좋아합니다 그 안에 다른 + +07:13.240 --> 07:17.230 +버전이 있으니까요 다른 시대로서도 가능하고요 + +07:17.230 --> 07:21.070 +저는 분리하는 걸 좋아해요 다른 hyperperameter로 훈련했으니까요 + +07:21.070 --> 07:22.450 +그걸 기록하고 싶어요 + +07:22.900 --> 07:25.300 +전 이렇게 하는 게 좋아요 + +07:25.390 --> 07:28.420 +꼭 필요한 건 아니지만 도움이 될 거예요 + +07:28.510 --> 07:33.340 +그에 대한 느낌을 드리기 위해 run name을 선택해보죠 그것부터 + +07:33.340 --> 07:34.150 +시작하죠 + +07:34.480 --> 07:37.450 +지금 실행 이름을 보여드릴게요 + +07:37.960 --> 07:46.150 +여기 보이는 실행 이름은 현재의 날짜입니다 22일과 현재 시간이죠 + +07:46.330 --> 07:50.080 +우주 시간으로 이루어진 협회의 시간이죠 + +07:50.080 --> 07:52.690 +사실 지금은 12시 4분이 아니에요 + +07:53.020 --> 07:55.060 +그게 실행명이에요 + +07:55.060 --> 07:57.220 +또 뭐가 있었죠? + +07:57.220 --> 08:00.730 +프로젝트 실행 이름과 허브 모델 이름이 있네요 + +08:00.730 --> 08:07.480 +프로젝트 실행 이름은 Pricer로 하고요 + +08:07.480 --> 08:12.860 +그리고 업로드 될 허브 모델 이름이죠 + +08:14.720 --> 08:16.190 +그래요? + +08:16.190 --> 08:20.360 +실행된 후에 그 이름으로 모델을 생성할 거예요 + +08:20.360 --> 08:26.780 +모델 디렉터리를 보면 이런 게 잔뜩 보여요 너무 많이 실행했거든요 인정하는 + +08:26.780 --> 08:29.150 +것보다 더 많이요 + +08:29.240 --> 08:31.100 +하지만 정말 즐거웠어요 + +08:31.160 --> 08:34.070 +프라이서 모델은 많아요 + +08:34.070 --> 08:36.770 +전부 도망치고 있어요 + +08:37.070 --> 08:45.260 +이제 마무리할게요 방금 연결됐다가 끊겼다가 하네요 + +08:45.500 --> 08:55.430 +훈련에 사용하는 하이퍼파라미터는 매트릭스의 차원이에요 + +08:55.490 --> 08:57.110 +32명부터 시작할게요 + +08:57.140 --> 08:59.540 +아까 말했듯이 8명까지 줄일 수 있어요 + +08:59.540 --> 09:01.520 +특히 로우 박스라면요 + +09:01.520 --> 09:02.900 +괜찮을 거예요 + +09:03.050 --> 09:06.140 +로라, 알파는 더블 R이에요 + +09:06.170 --> 09:09.440 +이걸 8로 내리면 16이 되는 거죠 + +09:10.040 --> 09:13.050 +물론 목표 모듈이죠 + +09:13.050 --> 09:14.520 +너무 잘 아시네요 + +09:14.520 --> 09:15.840 +그게 뭔지는 말 안 해도 알겠죠 + +09:17.190 --> 09:19.770 +이건 라마 3의 기본 모델이에요 + +09:19.800 --> 09:21.960 +이 모듈이 목표예요 + +09:22.080 --> 09:26.250 +로라 중퇴는 지난번에 꽤 길게 설명한 거예요 + +09:26.250 --> 09:33.450 +정규화에 도움이 되는 방법입니다 새로운 데이터 포인트에 모델이 일반화되도록 하는 + +09:33.480 --> 09:40.800 +거죠 이 경우에는 10%의 뉴런을 가져다가 활성화되지 않게 하고 완전히 지워버리는 + +09:40.800 --> 09:47.160 +겁니다 훈련 과정에서 매번 다른 10%씩 뉴런을 제거하는 거죠 + +09:47.160 --> 09:51.780 +그 결과, 모델은 특정 뉴런에 지나치게 의존하지 않아요 + +09:51.810 --> 09:59.160 +전체 모델을 향상시키기 위해 전반적으로 학습합니다. 훈련 포인트를 받고 올바른 다음 토큰을 + +09:59.160 --> 10:03.390 +주는 것을요. 모델이 일반화되도록 돕죠. + +10:03.540 --> 10:07.020 +10%도 10이에요 1은 아주 전형적인 출발점이에요 + +10:07.020 --> 10:13.300 +5%와 20%로 해보고 퀀트 포 비츠로 어떻게 하나 봐요 + +10:13.300 --> 10:13.690 +맞아요 + +10:13.720 --> 10:16.150 +4분의 1로 수량화하고 있어요 + +10:16.690 --> 10:17.530 +네 + +10:17.560 --> 10:20.740 +hyperperameter를 위한 hyperperameter 훈련용으로요 + +10:20.980 --> 10:23.230 +세 개 개 개혁에 사용할 계획이에요 + +10:23.230 --> 10:25.060 +한 명만 해도 돼요 + +10:25.270 --> 10:30.580 +한 번 만든 양이면 완벽한 결과가 나올 거예요 + +10:30.580 --> 10:32.980 +1만 6천 달러예요 + +10:33.130 --> 10:36.580 +같이 필요하실 것 같아요 + +10:36.580 --> 10:39.760 +끝내주는 A100 박스로요? + +10:39.760 --> 10:42.790 +16번씩 포장할 수 있어요 + +10:42.790 --> 10:49.510 +이게 최대 시퀀스 길이인 걸 고려하면 전부 밀어넣을 수 있어요 40GB 메모리에 + +10:49.510 --> 10:51.760 +겨우 넣을 수 있죠 + +10:51.820 --> 10:58.210 +하지만 T4처럼 낮은 박스를 사용하고 싶다면 이 제품을 사용해 + +10:58.210 --> 11:00.820 +보는 게 좋을 거예요 + +11:00.850 --> 11:01.480 +더 높이 해봐요 + +11:01.480 --> 11:04.300 +GPU 메모리가 더 많으면 get get은 여전히 공짜죠 + +11:04.480 --> 11:09.340 +배치 사이즈를 위해 파워 2를 쓰는 게 관례예요 + +11:09.340 --> 11:12.640 +1, 2, 4, 8, 16명이요 + +11:12.820 --> 11:20.290 +이론상으로는 2의 제곱으로 설정했을 때 GPU 성능이 + +11:20.290 --> 11:27.160 +더 좋아진다는 여러 가지 불확실한 증거가 있는데요 + +11:27.250 --> 11:33.370 +하지만 비트에 관한 데이터는 항상 좀 모호해요 + +11:33.370 --> 11:40.240 +일반적으로 말하자면 GPU 위에 좀 더 많은 비트를 밀어 넣을 수 있다면 + +11:40.240 --> 11:41.800 +그렇게 해야죠 + +11:41.800 --> 11:44.860 +그래서 3개 분량도 주저 없이 만들 수 있어요 + +11:44.890 --> 11:48.940 +GPU 안에 넣을 수 있다면 배치 크기 2보다 더 빨리 실행될 거예요 + +11:49.000 --> 11:54.820 +비트가 더 잘 맞는다면 4개가 더 효율적일 거예요 + +11:54.820 --> 11:59.860 +일반적으로 권고하자면 GPU 크기에 맞는 걸 선택해서 그걸로 시작하고 + +11:59.860 --> 12:02.080 +상당한 공간을 확보하세요 + +12:02.080 --> 12:08.320 +저처럼 돈을 펑펑 쓰실 게 아니라면 16단계 그러데이션 누적 과정을 거치세요 + +12:08.320 --> 12:09.430 +저번에 설명했잖아요 + +12:09.430 --> 12:13.570 +메모리 증진에 도움이 되는 것 같아요 + +12:13.600 --> 12:16.760 +그래도 되지만 전 한 명과 함께 지내고 있어요 + +12:16.760 --> 12:19.040 +2-4시에 시도해 보세요 + +12:19.340 --> 12:25.670 +학습률은 정말 중요합니다 올바른 방향으로 나아가기 위해 최적화할 때 + +12:25.670 --> 12:29.210 +얼마나 걸음을 내딛는지가 중요하죠 + +12:29.210 --> 12:34.250 +학습률은 실험해 봐야 할 아주 중요한 하이퍼파라미터예요 + +12:34.370 --> 12:40.130 +그리고 이런 질문에는 정답이 없어요 + +12:40.160 --> 12:42.830 +학습률이 너무 높거나 너무 낮을 수 있어요 + +12:42.860 --> 12:46.220 +손으로 물결무늬를 만들 거예요 비트를 입어요 + +12:46.250 --> 12:52.310 +다시 말씀드리지만 여러분이 이루고자 하는 것은 모델이 사라졌다고 가정해 + +12:52.310 --> 12:55.910 +보세요 이렇게 크게 움푹 들어간 부분이죠 + +12:55.910 --> 12:59.540 +이 계곡을 찾고 저 계곡을 찾아야 해요 + +12:59.540 --> 13:05.330 +이 계단을 오르다가 골짜기 방향을 따라 내려가면 골짜기 + +13:05.330 --> 13:07.460 +바닥에 닿을 거예요 + +13:07.460 --> 13:13.520 +학습 속도가 너무 높으면 계곡을 넘었다 다시 돌아와야 해요 계곡 아래로 내려가면 + +13:13.520 --> 13:14.870 +안 되죠 + +13:14.870 --> 13:16.770 +학습률이 너무 낮으면요 + +13:16.800 --> 13:22.170 +작은 걸음을 내디디면서 계곡으로 가는 속도가 느려질 수도 있어요 + +13:22.200 --> 13:24.810 +작은 보폭 두 개에는 또 다른 문제가 있어요 + +13:24.810 --> 13:31.560 +하나의 큰 계곡이 아니라 작은 계곡이 있고 큰 계곡이 있다고 가정해 보죠 + +13:31.590 --> 13:37.410 +학습률이 너무 낮으면 작은 발걸음을 떼다가 저 작은 계곡으로 가라앉을 수도 + +13:37.410 --> 13:38.010 +있어요 + +13:38.010 --> 13:43.650 +작은 걸음을 내디딜 때마다 작은 계곡의 두 벽을 따라 올라가는데 그 계곡을 + +13:43.650 --> 13:45.120 +벗어나지 못해요 + +13:45.180 --> 13:50.340 +그래서 바로 옆에 훨씬 더 큰 밸리가 있다는 걸 전혀 알 수 없죠 + +13:50.730 --> 13:55.860 +그게 핵심적인 문제죠 + +13:55.860 --> 13:59.040 +학습 속도가 너무 낮으면 흔한 문제예요 + +13:59.070 --> 14:02.400 +사람들은 그걸 지역 내 최소량에 갇힌 거라고 하죠 + +14:02.400 --> 14:05.370 +이건 최소라고 하는데 여러분이 있는 곳의 로컬이죠 + +14:05.370 --> 14:10.770 +글로벌 최소치를 못 찾았군요 이 아래쪽에 있는 거요 로컬 최소치에 + +14:10.770 --> 14:12.780 +갇혀 있으니까요 + +14:12.780 --> 14:16.290 +학습률이 너무 낮으면 이런 문제가 생겨요 + +14:16.680 --> 14:19.210 +멋진 트릭이 있어요 + +14:19.270 --> 14:25.390 +학습률은 0으로 정했어요 00011 곱하기 10은 4죠 + +14:25.660 --> 14:30.820 +학습률 스케줄러라는 게 있는데 이 프로그램은 학습률을 + +14:30.820 --> 14:36.220 +다양하게 해서 세 개 시대에 걸쳐 학습률을 점점 줄여 + +14:36.250 --> 14:40.870 +줍니다 결국 0이 될 때까지요 + +14:41.200 --> 14:42.340 +그걸 줄 수 있어요 + +14:42.370 --> 14:45.700 +이 중 하나를 선택하면 다른 모양을 줄 수 있어요 + +14:45.700 --> 14:53.950 +코사인은 아주 좋은 제품이에요 학습 속도가 아주 느리게 시작하지만 점차 감소하고 + +14:53.950 --> 14:55.390 +있어요 + +14:55.390 --> 14:57.250 +끝에는 꼬리가 보여요 + +14:57.400 --> 14:59.650 +곧 시각적으로 보실 거예요 + +14:59.650 --> 15:01.960 +아주 좋은 기술이에요 + +15:02.110 --> 15:07.510 +최종 학습률 매개변수는 웜업 비율이라고 하는데요 훈련 과정 + +15:07.510 --> 15:13.750 +초기에 모델이 처음 데이터 포인트 몇 개에서 배울 게 많아서 불안정하다는 + +15:13.750 --> 15:15.190 +뜻이죠 + +15:15.190 --> 15:20.020 +처음에 학습률이 높으면 위험합니다 모든 게 뒤죽박죽이 되거든요 + +15:20.020 --> 15:27.190 +워밍업 비율은 낮은 학습율로 시작해서 최고가 될 때까지 워밍업하는 + +15:27.190 --> 15:28.330 +거예요 + +15:28.330 --> 15:31.960 +이제 코사인 트레일을 만들어 보죠 + +15:32.230 --> 15:33.550 +시각적으로도 볼 수 있어요 + +15:33.550 --> 15:34.420 +그래야 말이 되죠 + +15:34.420 --> 15:36.880 +하지만 이건 아주 합리적인 설정이에요 + +15:36.880 --> 15:41.980 +하지만 학습 시작률이 높거나 낮거나 다양한 스케줄러 유형을 실험할 수 + +15:41.980 --> 15:42.700 +있어요 + +15:43.090 --> 15:45.760 +마지막으로 최적화 장치예요 + +15:45.910 --> 15:54.430 +여기 peded Adam w를 선택할게요. W는 무게 감쇠 a peded Adam w 32 비트라는 뜻이에요. + +15:54.460 --> 15:57.940 +융합이 잘되는 최적화 장치죠 + +15:57.940 --> 16:04.480 +최적의 장소를 찾는 건 잘하지만 메모리 소모에 따른 대가가 따르죠 + +16:04.660 --> 16:11.260 +여기 링크를 걸어 놨어요 다양한 최적화 방법을 소개한 We't go에 관한 글이에요 + +16:11.320 --> 16:15.910 +트랜스포머에 가장 많이 나오는 건 애덤과 애덤이에요 + +16:15.910 --> 16:25.310 +애덤이 아주 잘하네요 이전 평균 그러데이션을 저장하고 있거든요 최근 그러데이션보다 + +16:25.550 --> 16:28.820 +더 잘 활용하고 있어요 + +16:28.820 --> 16:32.420 +하지만 그걸 저장하면 추가 메모리 공간이 필요해요 + +16:32.420 --> 16:34.640 +비트는 좀 과한 메모리예요 + +16:34.640 --> 16:41.990 +메모리가 부족하다면 더 싸고 욕심이 덜한 최적화 장치를 선택해 메모리를 저장하세요 + +16:41.990 --> 16:48.590 +하지만 결과는 pageed Adam w보다 약간 나쁠 수도 있어요 + +16:48.620 --> 16:51.680 +마지막으로 행정적인 설정이 있죠 + +16:51.710 --> 16:56.600 +이 단계의 수는 제조 단계를 나타내요 + +16:56.630 --> 17:01.310 +진척을 줄여 무게와 편견을 줄여 현재 상황을 보여 주죠 + +17:01.460 --> 17:10.460 +이건 몇 단계에 걸쳐 허브로 모델을 업로드하고 적절한 버전을 저장하는 단계죠 + +17:10.460 --> 17:17.090 +무게와 편향으로 로깅을 하는지 아닌지를 보여줍니다 매개 변수를 보여드린 거죠 + +17:17.090 --> 17:21.770 +다음엔 트레이너에 대해 제대로 얘기해보죠 Get it diff --git a/week5/community-contributions/subtitles/srts/59508055/en_US.srt b/week5/community-contributions/subtitles/srts/59508055/en_US.srt new file mode 100755 index 0000000..7f0c2f5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508055/en_US.srt @@ -0,0 +1,82 @@ +WEBVTT + +00:00.980 --> 00:06.620 +I'm so very happy that you've reached this epic moment in the course and that you're hanging in there. + +00:06.620 --> 00:12.110 +This is where things have gotten really crucial, and we're learning some of the most important things + +00:12.110 --> 00:13.640 +about building an LMS. + +00:13.760 --> 00:20.930 +And I'm I'm just very grateful that you've been staying along and hopefully enjoying it as much as I + +00:20.960 --> 00:21.530 +am. + +00:21.770 --> 00:24.590 +And yeah, the best is yet to come. + +00:24.590 --> 00:28.490 +So, uh, for today, it's going to be few slides. + +00:28.490 --> 00:30.320 +It's going to be all action. + +00:30.410 --> 00:35.360 +Uh, you're going to be looking at the actual run happening and weights and biases, and I'll be showing + +00:35.360 --> 00:40.070 +you the sorts of things to look for and how we can use weights and biases to get some intelligence about + +00:40.070 --> 00:40.970 +what's happening. + +00:41.270 --> 00:46.610 +Uh, I'm going to show you the Huggingface hub with models in it, and you get a sense of what's going + +00:46.610 --> 00:47.210 +on. + +00:47.420 --> 00:54.050 +Um, there's also one thing that's been on my mind that I mentioned last time that I, I don't want + +00:54.050 --> 00:58.220 +to be flippant about the importance of keeping costs low. + +00:58.310 --> 01:04.190 +The idea of this class is not to build up a big bill for you from, uh, from Google, who will happily + +01:04.190 --> 01:07.190 +take your, your money for for running their boxes. + +01:07.310 --> 01:10.010 +Um, I have a lot of fun running this training. + +01:10.010 --> 01:12.740 +It's, um, running the training process. + +01:12.800 --> 01:19.820 +Um, it's perfectly achievable to do that and get good results with the spending a matter of cents. + +01:19.820 --> 01:23.810 +And I did want to take just a quick moment to talk about that, because it's very important. + +01:23.840 --> 01:28.340 +And I don't want to, uh, lead you down the wrong path of thinking you have to spend lots of money + +01:28.340 --> 01:32.180 +to get good results and to learn particularly, which is the main objective here. + +01:32.480 --> 01:35.540 +Um, and so, in fact, we're going to start with that. + +01:35.600 --> 01:42.530 +Um, and I want to, uh, take you straight over to JupyterLab, not not to Google Colab, but JupyterLab, + +01:42.530 --> 01:47.780 +where I'm going to show you something to illustrate my point and to help set you up should you wish + +01:47.780 --> 01:50.090 +to be training at a lower cost. diff --git a/week5/community-contributions/subtitles/srts/59508055/ja_JP.srt b/week5/community-contributions/subtitles/srts/59508055/ja_JP.srt new file mode 100755 index 0000000..0520aee --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508055/ja_JP.srt @@ -0,0 +1,61 @@ +WEBVTT + +00:00.980 --> 00:06.620 +あなたがコースのこの壮大な瞬間に辿り着き、 そこで踏ん張っていることをとても嬉しく思う。 + +00:06.620 --> 00:13.640 +ここが本当に重要なところで、 LMSを構築する上で最も重要なことを学んでいます。 + +00:13.760 --> 00:21.530 +そして僕は、 君が僕と同じように楽しんでくれていることにとても感謝している。 + +00:21.770 --> 00:24.590 +そして、 そうだ、 最高はこれからだ。 + +00:24.590 --> 00:28.490 +というわけで、 今日はスライドは少なめに。 + +00:28.490 --> 00:30.320 +すべてアクションになる。 + +00:30.410 --> 00:40.970 +実際に起きているランとウェイトとバイアスを見ることになるが、 どのようなものを見るべきか、 またウェイトとバイアスをどのように使えば何が起きているのかについてのインテリジェンスが得られるかを紹介する。 + +00:41.270 --> 00:47.210 +モデルを入れたHuggingfaceのハブをお見せします。 + +00:47.420 --> 00:54.050 +前回も申し上げたことですが、 コストを低く抑えることの重要性について、 + +00:54.050 --> 00:58.220 +私は軽々しく言いたくはないのです。 + +00:58.310 --> 01:07.190 +このクラスの目的は、 グーグルから多額の請求書を受け取ることではありません。 + +01:07.310 --> 01:10.010 +このトレーニングはとても楽しいんだ。 + +01:10.010 --> 01:12.740 +それは......トレーニングのプロセスだ。 + +01:12.800 --> 01:19.820 +そうすれば、 ほんの数セントの出費で良い結果を得ることができる。 + +01:19.820 --> 01:23.810 +それはとても重要なことなので、 少し時間をとってお話ししたいと思います。 + +01:23.840 --> 01:28.340 +そして、 良い結果を得るために、 また、 特にここでの主な目的であることを学ぶために、 お金をたくさん使わなければならないと考えるような、 + +01:28.340 --> 01:32.180 +間違った道にあなたを導きたいとは思わない。 + +01:32.480 --> 01:35.540 +ええと、 それで、 実は、 そのことから始めるつもりなんだ。 + +01:35.600 --> 01:42.530 +そして、 JupyterLab(Google Colabではなく、 JupyterLab)に直接お連れしたいのですが、 + +01:42.530 --> 01:50.090 +そこで私の言いたいことを説明し、 低コストでのトレーニングを希望する場合に役立つものをお見せします。 diff --git a/week5/community-contributions/subtitles/srts/59508055/ko_KR.srt b/week5/community-contributions/subtitles/srts/59508055/ko_KR.srt new file mode 100755 index 0000000..e9cc6c3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508055/ko_KR.srt @@ -0,0 +1,79 @@ +WEBVTT + +00:00.980 --> 00:06.620 +에픽이 이 역사적인 순간에 도달해서 정말 기뻐요 끝까지 버틴 것도요 + +00:06.620 --> 00:12.110 +아주 중요한 부분입니다 LMS 구축에 가장 중요한 것들을 + +00:12.110 --> 00:13.640 +배우고 있죠 + +00:13.760 --> 00:21.530 +계속 함께해 줘서 정말 고마워요 나만큼 이곳을 즐기면 좋겠네요 + +00:21.770 --> 00:24.590 +네, 아직 최고의 순간은 오지 않았어요 + +00:24.590 --> 00:28.490 +오늘은 슬라이드를 몇 개 보여드릴게요 + +00:28.490 --> 00:30.320 +액션이 가득할 거예요 + +00:30.410 --> 00:35.360 +당신은 실제 경기 상황을 보고 무게와 편향성을 살펴볼 거예요 저는 그런 것들을 살펴보고 + +00:35.360 --> 00:40.070 +무게와 편향성을 어떻게 이용하면 경기 결과를 알 수 있는지 보여 드릴게요 get + +00:40.070 --> 00:40.970 +it + +00:41.270 --> 00:46.610 +먼저 허깅페이스 허브를 보여드릴게요 모델들이 있어요 어떤 상황인지 아실 + +00:46.610 --> 00:47.210 +거예요 + +00:47.420 --> 00:54.050 +지난번에 말씀드렸던 것 중에 마음에 걸리는 게 하나 있는데 경솔하게 가격을 + +00:54.050 --> 00:58.220 +낮추는 게 얼마나 중요한지 말하고 싶지 않아요 + +00:58.310 --> 01:04.190 +이 수업의 목적은 구글로부터 거액의 청구서를 받는 게 아닙니다 구글은 + +01:04.190 --> 01:07.190 +여러분의 돈을 기꺼이 받아 가겠죠 + +01:07.310 --> 01:10.010 +이 훈련이 정말 재밌어요 + +01:10.010 --> 01:12.740 +훈련 과정을 진행하는 거죠 + +01:12.800 --> 01:19.820 +Get it은 완벽하게 가능합니다 단돈 센트로 좋은 결과를 낼 수 있죠 + +01:19.820 --> 01:23.810 +잠시 그 얘길 하고 싶었어요 아주 중요하니까요 + +01:23.840 --> 01:28.340 +잘못된 길로 인도하고 싶진 않아요 많은 돈을 써야 좋은 결과를 얻고 배우게 + +01:28.340 --> 01:32.180 +된다는 생각을요 그게 주된 목표죠 get a my life + +01:32.480 --> 01:35.540 +그래서 거기서부터 시작할 거예요 + +01:35.600 --> 01:42.530 +유피터랩으로 바로 안내하겠습니다 구글 콜랍이 아니라 유피터랩입니다 제 주장을 + +01:42.530 --> 01:47.780 +설명하고 더 저렴한 가격에 훈련하고 싶다면 설정하는 데 도움이 + +01:47.780 --> 01:50.090 +될 뭔가를 보여드릴게요 diff --git a/week5/community-contributions/subtitles/srts/59508057/en_US.srt b/week5/community-contributions/subtitles/srts/59508057/en_US.srt new file mode 100755 index 0000000..0710062 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508057/en_US.srt @@ -0,0 +1,91 @@ +WEBVTT + +00:01.100 --> 00:02.660 +Actually slight change in plan. + +00:02.660 --> 00:04.910 +I'm going to wrap up the day. + +00:04.940 --> 00:11.540 +Day three at this point, and say that we'll take day four to look at the results in weights and biases, + +00:11.540 --> 00:16.460 +and to examine the progress of training, because I think that we should let it run for a bit and then + +00:16.460 --> 00:18.620 +take some, some serious time to do that. + +00:18.860 --> 00:23.150 +Um, I'll also at that point be able to show you the model in the Huggingface hub. + +00:23.150 --> 00:28.820 +And I also I do I feel like I might have been a bit glib about the amount of money that it costs to + +00:28.850 --> 00:30.050 +train these models. + +00:30.050 --> 00:35.660 +I want to be clear that you really don't need to spend any material amount of money for for to have + +00:35.660 --> 00:41.960 +a lot of fun with this and investigate hyperparameter optimization as part of this course, it would + +00:41.960 --> 00:43.370 +only cost cents. + +00:43.580 --> 00:49.280 +Um, and so I want to quickly explain how you can do things like reduce your training data set to a + +00:49.280 --> 00:56.720 +more manageable size, and have more sensible parameters so that you could train on a, um, a normal + +00:56.720 --> 01:02.120 +spec GPU box and be spending only a few cents on this project. + +01:02.120 --> 01:07.890 +That's that's all that's required if you wish to be a complete nerd like me and go all out and run lots + +01:07.890 --> 01:15.180 +of runs with top end boxes and spend 5 or $10 then then, uh, on your head, be it as it will be on + +01:15.180 --> 01:15.690 +mine. + +01:15.990 --> 01:18.060 +But it's not necessary at all. + +01:18.810 --> 01:26.580 +But you do need absolutely to take a moment to congratulate yourself on where you've got to. + +01:26.610 --> 01:28.830 +You have a training run happening. + +01:28.860 --> 01:32.880 +I do hope right now while while I speak it is running. + +01:33.300 --> 01:35.700 +And you are in a position now that you can explain. + +01:35.700 --> 01:38.730 +Q Laura pretty well for fine tuning open source models. + +01:38.730 --> 01:45.360 +You're so fed up with me talking about target modules, I'm sure, uh, and now, uh, explaining things + +01:45.360 --> 01:52.410 +like learning rates, uh, ah and alpha, uh, and uh, dropout and all of the various other things + +01:52.410 --> 01:57.330 +like optimizers and the like, it's all second nature to you, and it's all complex stuff. + +01:57.330 --> 02:00.030 +This is, uh, upskilling in a big way. + +02:00.030 --> 02:01.800 +So congratulations. + +02:01.800 --> 02:03.780 +It's tremendous progress. + +02:03.810 --> 02:08.730 +And next time we'll go over to weights and biases and see what's happening. diff --git a/week5/community-contributions/subtitles/srts/59508057/ja_JP.srt b/week5/community-contributions/subtitles/srts/59508057/ja_JP.srt new file mode 100755 index 0000000..4727bb3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508057/ja_JP.srt @@ -0,0 +1,76 @@ +WEBVTT + +00:01.100 --> 00:02.660 +実は計画を少し変更した。 + +00:02.660 --> 00:04.910 +今日はこれで終わりにする。 + +00:04.940 --> 00:11.540 +この時点で3日目。 4日目はウエイトとバイアスの結果を見たり、 + +00:11.540 --> 00:18.620 +トレーニングの進捗状況を調べたりする。 + +00:18.860 --> 00:23.150 +ええと、 その時点でHuggingfaceのハブのモデルもお見せできます。 + +00:23.150 --> 00:30.050 +それに、 モデルを訓練するのにかかる費用については、 少し口が滑ったかもしれない。 + +00:30.050 --> 00:35.660 +このコースの一環として、 ハイパーパラメーター最適化について調べたり、 + +00:35.660 --> 00:43.370 +これを楽しんだりするために、 本当に多くのお金を費やす必要はない。 + +00:43.580 --> 00:49.280 +トレーニング・データ・セットを扱いやすいサイズに縮小し、 より賢明なパラメーターを設定することで、 + +00:49.280 --> 01:02.120 +通常仕様のGPUボックスでトレーニングを行い、 このプロジェクトにわずか数セントを費やすことができます。 + +01:02.120 --> 01:07.890 +私のような完全なオタクになって、 トップエンドのボックスで何本も走り、 + +01:07.890 --> 01:15.690 +5ドルも10ドルも使いたいのなら、 それはそれで必要なことだ。 + +01:15.990 --> 01:18.060 +でも、 その必要はまったくない。 + +01:18.810 --> 01:26.580 +しかし、 自分自身を褒め称える時間は絶対に必要だ。 + +01:26.610 --> 01:28.830 +トレーニングランが行われている。 + +01:28.860 --> 01:32.880 +今、 私が話している間、 それが動いていることを願っている。 + +01:33.300 --> 01:35.700 +そしてあなたは今、 説明できる立場にいる。 + +01:35.700 --> 01:38.730 +Qローラはオープンソースのモデルを微調整するのにかなり適している。 + +01:38.730 --> 01:45.360 +私がターゲット・モジュールの話をするのにうんざりしているのは確かだが、 今は、 学習率や、 + +01:45.360 --> 01:52.410 +ああ、 アルファ、 ああ、 ドロップアウト、 その他オプティマイザーなどさまざまなことを説明するのは、 + +01:52.410 --> 01:57.330 +あなたにとってはごく当たり前のことで、 複雑なことなんだ。 + +01:57.330 --> 02:00.030 +これは大きな意味でのスキルアップだ。 + +02:00.030 --> 02:01.800 +おめでとう。 + +02:01.800 --> 02:03.780 +とてつもない進歩だ。 + +02:03.810 --> 02:08.730 +そして次回はウェイトとバイアスに行き、 何が起きているのか見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/59508057/ko_KR.srt b/week5/community-contributions/subtitles/srts/59508057/ko_KR.srt new file mode 100755 index 0000000..15b624a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508057/ko_KR.srt @@ -0,0 +1,85 @@ +WEBVTT + +00:01.100 --> 00:02.660 +계획이 약간 변경됐어요 + +00:02.660 --> 00:04.910 +오늘은 이만 퇴근할게요 + +00:04.940 --> 00:11.540 +사흘째인데요 나흘째에는 무게와 편향성 결과를 살펴보고 훈련 과정을 검토할 + +00:11.540 --> 00:16.460 +겁니다 비트를 좀 더 달리게 한 다음 진지하게 시간을 들여야 + +00:16.460 --> 00:18.620 +할 것 같아요 + +00:18.860 --> 00:23.150 +어깅페이스 허브에 있는 모델도 보여드릴 수 있어요 + +00:23.150 --> 00:28.820 +그리고 제가 좀 비트를 탄 것 같아요 모델 훈련에 드는 비용에 + +00:28.850 --> 00:30.050 +대해서요 + +00:30.050 --> 00:35.660 +이 코스를 즐기기 위해 많은 돈을 쓰실 필요는 없어요 + +00:35.660 --> 00:43.370 +하이퍼파라미터 최적화를 살펴보기 위해서요 단돈 몇 센트예요 + +00:43.580 --> 00:49.280 +훈련 데이터 세트를 관리하기 쉬운 크기로 줄이는 방법을 간단히 + +00:49.280 --> 00:56.720 +설명하고 싶어요 더 합리적인 매개 변수를 만들어 일반 사양 GPU 박스로 훈련하고 + +00:56.720 --> 01:02.120 +이 프로젝트에 적은 돈을 쓸 수 있도록요 + +01:02.120 --> 01:07.890 +저처럼 완전히 괴짜가 되고 싶다면 필요한 건 그게 다예요. 최고급 박스로 + +01:07.890 --> 01:15.690 +많은 게임을 하고 5달러나 10달러만 쓰면 당신도 당신도 그렇게 될 거예요. 저처럼 말이에요. + +01:15.990 --> 01:18.060 +그럴 필요 없어요 + +01:18.810 --> 01:26.580 +하지만 앞으로의 일을 축하하려면 잠시 시간을 가져야 해요 + +01:26.610 --> 01:28.830 +훈련 중이에요 + +01:28.860 --> 01:32.880 +제가 말하는 동안에 작동하면 좋겠네요 + +01:33.300 --> 01:35.700 +이제 설명할 수 있는 위치에 있잖아요 + +01:35.700 --> 01:38.730 +오픈 소스 모델을 잘 조정했어요 + +01:38.730 --> 01:45.360 +목표 모듈에 대해 얘기하는 데 질리셨을 거예요 학습율이나 알파, + +01:45.360 --> 01:52.410 +드롭아웃 같은 걸 설명하는 데도요 최적화 장치 같은 다양한 것들도요 그게 + +01:52.410 --> 01:57.330 +다 여러분에게 제2의 본성이자 복잡한 거죠 + +01:57.330 --> 02:00.030 +이건 정말 큰 활력소예요 + +02:00.030 --> 02:01.800 +축하해요 + +02:01.800 --> 02:03.780 +엄청난 발전이에요 + +02:03.810 --> 02:08.730 +다음 시간에는 무게와 편향으로 가서 어떻게 되는지 보죠 diff --git a/week5/community-contributions/subtitles/srts/59508121/en_US.srt b/week5/community-contributions/subtitles/srts/59508121/en_US.srt new file mode 100755 index 0000000..9663ac0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508121/en_US.srt @@ -0,0 +1,148 @@ +WEBVTT + +00:00.920 --> 00:02.300 +The moment has arrived. + +00:02.300 --> 00:03.230 +Here we go. + +00:03.260 --> 00:04.790 +We're in fine tuning. + +00:04.790 --> 00:05.810 +We do fine tuning. + +00:06.020 --> 00:06.740 +Train. + +00:06.770 --> 00:10.130 +There is also a line here to push it to the hub after it's finished. + +00:10.160 --> 00:11.750 +Let's kick that off. + +00:13.130 --> 00:16.910 +So while it's kicking off, we'll take a quick look. + +00:16.940 --> 00:22.340 +The GPU is starting at six, which is what we expect. + +00:22.370 --> 00:27.560 +It's about as much as it needs for the the model itself. + +00:28.550 --> 00:37.490 +And it's going to be warming itself up and getting ready for this training while it's doing that. + +00:37.640 --> 00:42.170 +Since we're waiting for it just for a second, I might as well just talk to some of these other training + +00:42.170 --> 00:46.130 +parameters that we that we didn't mention before. + +00:46.610 --> 00:48.230 +Um, eval strategy. + +00:48.230 --> 00:55.460 +So it is typical when you're doing this kind of training that you would repeatedly have a held out data + +00:55.490 --> 01:01.790 +set that you would use for evaluation, that you would use as a constant set that's not involved in + +01:01.790 --> 01:07.670 +training, that you would use to get the model to, uh, validate that it's making progress. + +01:07.700 --> 01:09.800 +Now, I've not done that here. + +01:09.890 --> 01:15.790 +Um, partly because we've got so much training data that I think it's got plenty to be training on. + +01:15.850 --> 01:21.850 +Um, just in one epoch alone and and partly because I just performance, the speed of training was so + +01:21.850 --> 01:22.300 +important. + +01:22.300 --> 01:26.590 +I didn't want it to be stopping to do validation repeatedly through this. + +01:26.590 --> 01:29.710 +But it is a best practice to have a validation data set. + +01:29.740 --> 01:31.600 +We've got plenty of data for it. + +01:31.630 --> 01:39.970 +We we held out 2000 test data points so you can use a bunch of them, um, as part of your validation + +01:39.970 --> 01:42.490 +step uh test test set. + +01:42.490 --> 01:50.260 +So one certain improvement to this, that would be more of a best practice would be to use an eval strategy + +01:50.260 --> 01:53.620 +and pass in validation data as well as training data. + +01:53.770 --> 01:58.840 +Um, but but it wasn't a super necessary in this case, so I didn't do it. + +01:59.650 --> 02:01.600 +Um, okay. + +02:01.600 --> 02:07.690 +Well, you may notice if you are glancing over this way that the GPU memory has shot up and out of the + +02:07.690 --> 02:15.280 +40GB of GPU memory that I've got, 38.2 is currently being used. + +02:15.280 --> 02:23.710 +So I really did squeeze this so that, uh, with 16 batch steps, uh, a batch size of 16, I would + +02:23.710 --> 02:31.270 +really use up almost the entire GPU, very close to running out there and you can see that stuff is + +02:31.270 --> 02:32.890 +happening down there. + +02:32.890 --> 02:36.520 +So let's just, uh, scroll down and see what we're seeing here. + +02:36.880 --> 02:42.850 +Um, so it's off and running and we can see that that things are happening. + +02:42.940 --> 02:51.100 +Uh, and now to run the entire three epochs is going to take, uh, according to this estimate here, + +02:51.160 --> 02:53.140 +uh, just over 24 hours. + +02:53.170 --> 02:55.390 +26 hours or so. + +02:55.450 --> 03:04.120 +Uh, so, you know, it is something like eight hours per epoch, um, on this, uh, even on this beefy + +03:04.150 --> 03:10.240 +machine, because the training data is so enormous, but it's not strictly necessary to go through all + +03:10.240 --> 03:13.210 +400,000, uh, training data. + +03:13.210 --> 03:18.310 +And as I say, you can run this on a much lower end box and let it run for for a while, and that is + +03:18.310 --> 03:19.750 +all completely fine. + +03:20.080 --> 03:26.710 +Um, but what you're seeing here is it's it's it's off every 50 steps, just as we configured. + +03:26.740 --> 03:31.450 +We're getting an output that shows us our training loss so far. + +03:31.660 --> 03:36.940 +Um, and this is now running and outputting to weights and biases. + +03:37.060 --> 03:41.350 +And in the next video, I will give you a peek into how that's looking. diff --git a/week5/community-contributions/subtitles/srts/59508121/ja_JP.srt b/week5/community-contributions/subtitles/srts/59508121/ja_JP.srt new file mode 100755 index 0000000..24865b0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508121/ja_JP.srt @@ -0,0 +1,121 @@ +WEBVTT + +00:00.920 --> 00:02.300 +その時が来た。 + +00:02.300 --> 00:03.230 +さあ、 始めよう。 + +00:03.260 --> 00:04.790 +微調整中だ。 + +00:04.790 --> 00:05.810 +微調整を行う。 + +00:06.020 --> 00:06.740 +電車だ。 + +00:06.770 --> 00:10.130 +ここには、 終了後にハブにプッシュするためのラインもある。 + +00:10.160 --> 00:11.750 +では、 それを始めよう。 + +00:13.130 --> 00:16.910 +キックオフの間に、 ちょっと見てみよう。 + +00:16.940 --> 00:22.340 +GPUは6時から始まる。 + +00:22.370 --> 00:27.560 +モデル自体に必要な量と同じくらいだ。 + +00:28.550 --> 00:37.490 +その間にウォーミングアップを行い、 このトレーニングの準備をするんだ。 + +00:37.640 --> 00:46.130 +せっかくなので、 これまで触れなかった他のトレーニングパラメーターについても話しておこうと思う。 + +00:46.610 --> 00:48.230 +あの、 評価戦略。 + +00:48.230 --> 00:55.460 +このようなトレーニングを行う場合、 評価用のデータセット、 つまりトレーニングには使用しない一定のデータセットを繰り返し持ち、 + +00:55.490 --> 01:07.670 +モデルが進歩していることを検証するために使用するのが一般的です。 + +01:07.700 --> 01:09.800 +今、 私はそのようなことはしていない。 + +01:09.890 --> 01:15.790 +ええと、 トレーニングデータがたくさんあるので、 トレーニングするのに十分だと思うからというのもある。 + +01:15.850 --> 01:22.300 +ただ、 1つのエポックに限れば、 そしてパフォーマンスのためでもあるが、 トレーニングのスピードはとても重要だった。 + +01:22.300 --> 01:26.590 +このような繰り返しで検証を行うことを止めてほしくなかった。 + +01:26.590 --> 01:29.710 +しかし、 検証データセットを持つことはベストプラクティスである。 + +01:29.740 --> 01:31.600 +そのためのデータはたくさんある。 + +01:31.630 --> 01:42.490 +テストデータ2000点を用意しましたので、 検証ステップのテストセットとしてお使いください。 + +01:42.490 --> 01:53.620 +そこで、 ベストプラクティスに近い改善策としては、 evalストラテジーを使い、 トレーニングデータだけでなく検証データも渡すことだ。 + +01:53.770 --> 01:58.840 +ええと、 でも、 でも、 今回は超必要なことではなかったので、 やりませんでした。 + +01:59.650 --> 02:01.600 +うーん、 わかった。 + +02:01.600 --> 02:15.280 +GPUメモリが40GBから38GBに増えていることにお気づきだろうか。 + +02:15.280 --> 02:15.280 +2が現在使用されている。 + +02:15.280 --> 02:23.710 +だから、 16のバッチステップ、 16のバッチサイズで、 + +02:23.710 --> 02:32.890 +GPUをほとんど使い切ってしまうくらいに絞ったんだ。 + +02:32.890 --> 02:36.520 +では、 下にスクロールして、 何が見えるか見てみよう。 + +02:36.880 --> 02:42.850 +そう、 だから、 もう始動しているし、 いろいろなことが起こっているのがわかる。 + +02:42.940 --> 02:53.140 +そして今、 3つのエポック全体を実行するには、 この見積もりによれば、 24時間強かかる。 + +02:53.170 --> 02:55.390 +26時間くらいかな。 + +02:55.450 --> 03:04.120 +トレーニング・データが膨大なので、 この高性能なマシンでも1エポックあたり8時間くらいかかりますが、 + +03:04.150 --> 03:13.210 +厳密に40万件すべてのトレーニング・データを調べる必要はありません。 + +03:13.210 --> 03:19.750 +そして、 私が言うように、 これをもっと低価格のマシンで走らせてもいいし、 しばらく走らせてもいい。 + +03:20.080 --> 03:26.710 +ええと、 でも、 ここに表示されているのは、 設定した通り、 50ステップごとにオフになっています。 + +03:26.740 --> 03:31.450 +これまでのトレーニングのロスを示す出力が出た。 + +03:31.660 --> 03:36.940 +ええと、 これでウェイトとバイアスを出力しているところです。 + +03:37.060 --> 03:41.350 +次のビデオでは、 その様子をお見せしよう。 diff --git a/week5/community-contributions/subtitles/srts/59508121/ko_KR.srt b/week5/community-contributions/subtitles/srts/59508121/ko_KR.srt new file mode 100755 index 0000000..3ee9542 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508121/ko_KR.srt @@ -0,0 +1,145 @@ +WEBVTT + +00:00.920 --> 00:02.300 +드디어 때가 왔어요 + +00:02.300 --> 00:03.230 +시작할게요 + +00:03.260 --> 00:04.790 +잘 조정하고 있어요 + +00:04.790 --> 00:05.810 +세세한 조율을 하죠 + +00:06.020 --> 00:06.740 +기차요 + +00:06.770 --> 00:10.130 +작업이 끝나면 허브에 넣는 라인도 있어요 + +00:10.160 --> 00:11.750 +시작해 보죠 + +00:13.130 --> 00:16.910 +시작되는 동안 잠깐 살펴보죠 + +00:16.940 --> 00:22.340 +GPU 시작점이 6점이에요 예상 수치죠 + +00:22.370 --> 00:27.560 +모델 자체에 필요한 건 다 있어요 + +00:28.550 --> 00:37.490 +그걸 하는 동안 몸을 데우면서 훈련을 준비하죠 + +00:37.640 --> 00:42.170 +기다리고 있으니 다른 훈련 매개 변수들에 대해 얘기할게요 + +00:42.170 --> 00:46.130 +전에 언급하지 않은 것들이죠 + +00:46.610 --> 00:48.230 +평가 전략요 + +00:48.230 --> 00:55.460 +이런 종류의 훈련을 할 때 전형적으로 지속적으로 데이터 세트를 갖고 평가할 + +00:55.490 --> 01:01.790 +겁니다 훈련에 포함되지 않은 상수 집합으로 사용할 거예요 모델이 + +01:01.790 --> 01:07.670 +진전을 이루고 있는지 유효성을 검사하기 위해 사용하겠죠 + +01:07.700 --> 01:09.800 +여기선 안 해봤어요 + +01:09.890 --> 01:15.790 +훈련 데이터가 너무 많아서 훈련할 게 많은 것 같아요 + +01:15.850 --> 01:22.300 +한 시대에서만요 그리고 공연 때문이기도 하지만 훈련 속도가 정말 중요했어요 + +01:22.300 --> 01:26.590 +이걸 통해 반복적으로 유효성 검증을 하는 걸 멈추고 싶지 않았어요 + +01:26.590 --> 01:29.710 +유효성 검증 데이터 모음이 최선의 관행이죠 + +01:29.740 --> 01:31.600 +데이터는 충분히 있어요 + +01:31.630 --> 01:39.970 +2,000개의 테스트 데이터 포인트를 개최해서 그중 다수를 사용할 수 있습니다 유효성 검사 + +01:39.970 --> 01:42.490 +단계인 테스트 세트로요 + +01:42.490 --> 01:50.260 +개선점을 찾자면 최선의 관행은 평가 전략을 활용해서 훈련 데이터와 함께 유효성 + +01:50.260 --> 01:53.620 +검증 데이터를 통과하는 거죠 + +01:53.770 --> 01:58.840 +하지만 이 경우에는 꼭 필요한 건 아니라서 안 했어요 + +01:59.650 --> 02:01.600 +네 + +02:01.600 --> 02:07.690 +이 쪽으로 보면 GPU 메모리가 40GB 밖으로 나왔고 제가 가진 + +02:07.690 --> 02:15.280 +GPU 메모리에선 38GB가 나왔죠 현재 2를 사용하고 있어요 + +02:15.280 --> 02:23.710 +16개의 배치 단계가 있고 배치 사이즈는 16입니다 GPU 전체를 거의 다 + +02:23.710 --> 02:31.270 +사용하게 됩니다 실행되는 것과 비슷합니다 여기에서 일어나는 일을 + +02:31.270 --> 02:32.890 +볼 수 있죠 + +02:32.890 --> 02:36.520 +스크롤을 내려서 뭐가 보이는지 보죠 + +02:36.880 --> 02:42.850 +작동 중이고 어떻게 되는지 볼 수 있어요 + +02:42.940 --> 02:51.100 +이제 세 개 개국을 다스리는 데 24시간 정도 걸릴 + +02:51.160 --> 02:53.140 +거예요 + +02:53.170 --> 02:55.390 +26시간 정도요 + +02:55.450 --> 03:04.120 +개혁기마다 8시간 정도 작업합니다 이 육중한 기체도 마찬가지입니다 훈련 데이터가 + +03:04.150 --> 03:10.240 +방대하긴 하지만 400,000개의 훈련 데이터를 전부 + +03:10.240 --> 03:13.210 +검토할 필요는 없거든요 + +03:13.210 --> 03:18.310 +말씀드렸듯이 훨씬 낮은 버전의 박스에서 실행할 수 있고 한동안 실행하게 둘 수도 있어요 + +03:18.310 --> 03:19.750 +그건 다 괜찮아요 + +03:20.080 --> 03:26.710 +하지만 여기서 보시는 건 50단계마다 달라요 우리가 구성한 대로요 + +03:26.740 --> 03:31.450 +지금까지의 훈련 손실을 보여주는 결과가 나오고 있어요 + +03:31.660 --> 03:36.940 +지금은 무게와 편향성을 측정하고 있어요 + +03:37.060 --> 03:41.350 +다음 영상에서는 어떻게 생겼는지 살짝 보여드릴게요 diff --git a/week5/community-contributions/subtitles/srts/59508175/en_US.srt b/week5/community-contributions/subtitles/srts/59508175/en_US.srt new file mode 100755 index 0000000..560990d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508175/en_US.srt @@ -0,0 +1,274 @@ +WEBVTT + +00:00.320 --> 00:07.970 +So I'm taking a moment now to explain that the training costs of optimizing a model for this course + +00:07.970 --> 00:09.740 +can be very slim indeed. + +00:09.740 --> 00:17.060 +It can be a matter of a couple of cents, and you don't need to go berserk and be doing what I'm doing + +00:17.060 --> 00:21.920 +and using A100 boxes and, and big training runs. + +00:22.490 --> 00:24.710 +And just to to clarify that. + +00:24.710 --> 00:30.260 +So first of all, as I explained last time, you can run on a box like a T4, which is very cheap. + +00:30.440 --> 00:34.190 +You can have a batch size that the largest that will fit on that box. + +00:34.190 --> 00:38.060 +It's probably 1 or 2, um, and then run training that way. + +00:38.060 --> 00:41.960 +But if you do that, actually with the training data set that we've been working with, it will take + +00:41.960 --> 00:46.370 +a long time, which is still going to add up to a bit because we've just got lots of data. + +00:46.370 --> 00:50.780 +And I wanted to explain that actually, it's not necessary to be training against this monstrous data + +00:50.810 --> 00:51.200 +set. + +00:51.200 --> 00:55.520 +I do that because I want to show you some, some good, some, some really quite strong results, but + +00:55.520 --> 00:58.250 +you'll get great results with a much smaller data set too. + +00:58.370 --> 01:04.350 +Now, one thing I would suggest though one you can just take the data set of 400,000 data points and + +01:04.350 --> 01:11.040 +just select from it the first 20,000 and run with that sub data set, and that would be totally fine. + +01:11.130 --> 01:13.830 +Um, it's probably better if you're going to do that. + +01:13.830 --> 01:20.670 +If instead you focus in on one particular type of product that's being priced, because that way the + +01:20.670 --> 01:24.120 +model will have opportunity to learn all about that product. + +01:24.240 --> 01:30.960 +Um, for example, you could choose appliances, which was one of the data sets that we pulled down + +01:30.960 --> 01:33.030 +from the Huggingface hub at the very beginning. + +01:33.210 --> 01:39.000 +Um, and instead of bringing down all of these data sets, you could simply bring down appliances only. + +01:39.000 --> 01:43.260 +And I've set up a, um, a Jupyter notebook. + +01:43.260 --> 01:46.380 +Uh, not not on Colab, just just a local notebook. + +01:46.380 --> 01:50.970 +It's in week six, where the other, uh, days were for week six. + +01:51.000 --> 01:55.410 +We actually built this data set the full one on day two. + +01:55.440 --> 02:01.610 +So this is a copy of day two, but it just made much simpler and more narrow to only look at appliances. + +02:01.730 --> 02:04.760 +And because it's such a small data set, it will run super fast. + +02:04.790 --> 02:06.650 +We connect it to the environment. + +02:06.650 --> 02:10.850 +This is will be a throwback to the to the past. + +02:10.940 --> 02:14.450 +It's complaining because I've already logged in to Huggingface and I tried to log in a second time, + +02:14.450 --> 02:15.770 +but ignore that. + +02:16.130 --> 02:23.600 +Um, and now we're just going to take, uh, the data set names, and I've commented everything out + +02:23.600 --> 02:28.340 +except for appliances, which you may remember, was one of the really small data sets. + +02:28.580 --> 02:35.300 +Um, uh, Hugging Face is really upset with me for, uh, running it multiple times, but there we go. + +02:35.330 --> 02:36.200 +Off it runs. + +02:36.200 --> 02:43.520 +The total time it takes to load in this data set is, uh, about a 0.2 of a minute, if I remember right. + +02:43.880 --> 02:50.900 +Uh, 0.3 of a minute even, uh, and when we do that, we've got 28,000 items. + +02:50.990 --> 02:54.890 +Um, so it's a lot smaller than the 400,000 data set we've been working with. + +02:54.890 --> 03:01.350 +But it's a perfectly respectable number for Flora, and it has the benefit of being focused on appliances. + +03:01.710 --> 03:04.590 +So it's a narrower data set. + +03:04.800 --> 03:10.620 +Um, and, uh, yeah, I, um, you can follow the rest of these charts. + +03:10.620 --> 03:12.330 +I've taken out the charts that don't matter. + +03:12.330 --> 03:14.280 +Like the comparison of different data types. + +03:14.280 --> 03:15.270 +This is the price. + +03:15.270 --> 03:16.380 +The average is. + +03:16.380 --> 03:17.670 +It's a smaller average. + +03:17.670 --> 03:20.850 +Um, but still you get all the way up to 999. + +03:20.850 --> 03:22.920 +And that is the, the, the curve. + +03:23.190 --> 03:27.900 +Um, and then to curate this, there's nothing really to it. + +03:27.930 --> 03:30.510 +We're going to take the entire data set. + +03:30.510 --> 03:36.840 +This is that same chart now for our smaller data set showing that there isn't a material correlation + +03:36.840 --> 03:37.710 +there. + +03:38.040 --> 03:44.400 +Um, and we can just quickly confirm that the same thing applies to the tokens as we checked before. + +03:44.580 --> 03:50.850 +Um, and then finally when we divide that into a training and test data set, we'll take the 25,000 + +03:50.880 --> 03:54.840 +items for training and then 2000 for test. + +03:55.140 --> 03:58.290 +Um, and everything else will work just great here. + +03:58.350 --> 04:00.460 +Uh, you'll see the training prompt. + +04:00.460 --> 04:02.140 +The test prompt. + +04:02.320 --> 04:05.590 +We can plot the distribution of prices in the test set. + +04:05.590 --> 04:05.980 +There we go. + +04:06.010 --> 04:08.230 +We've got a perfectly decent spread. + +04:08.230 --> 04:16.120 +And then finally we can create the prompts from this, just as we did before, and then upload this + +04:16.120 --> 04:21.610 +to Huggingface, potentially give it a different name like I've called it light data here. + +04:21.880 --> 04:28.690 +Um, and also make the two pickle files, uh, and yeah, then you can use that in your training instead. + +04:28.690 --> 04:36.760 +It will build a model based on smaller populations of data that will be focused on predicting the prices + +04:36.760 --> 04:40.030 +of just home appliances, rather than all the different types. + +04:40.030 --> 04:41.080 +This will work great. + +04:41.080 --> 04:45.160 +I think it'll be a good exercise for you to have to make those small changes in various places in the + +04:45.160 --> 04:47.830 +code to focus in on this smaller data set. + +04:47.950 --> 04:54.640 +Uh, and I have run this myself, and I can confirm that whatever result we're going to discover, uh, + +04:54.640 --> 05:02.010 +from the main run we're doing with the 400,000, the same will apply in this case in terms of how it + +05:02.010 --> 05:04.830 +stacks up compared to other models. + +05:05.040 --> 05:12.900 +Not as much as will be the case with the the bigger data set, but uh, without giving the game away + +05:12.930 --> 05:18.360 +that the the important things will happen even if you're focused on appliances. + +05:18.360 --> 05:24.090 +And obviously when you're dealing with the data set at 25,000, uh, you can, uh, get through it very + +05:24.090 --> 05:25.620 +quickly indeed. + +05:25.680 --> 05:30.930 +Um, but should you do that and should you then get the bug like I have and want to then go and do a + +05:30.930 --> 05:31.530 +bigger run? + +05:31.530 --> 05:36.720 +Then of course, you can add in more types and you can spend a few dollars and then be doing it for + +05:36.720 --> 05:38.010 +the full data set. + +05:38.040 --> 05:39.690 +Anyway, I thought I'd go through that. + +05:39.690 --> 05:39.930 +This. + +05:39.930 --> 05:45.540 +This may be a good revision for people on how we curated the data and should you wish, a smaller data + +05:45.570 --> 05:45.930 +set. + +05:45.960 --> 05:47.220 +It tells you how to do it. + +05:47.250 --> 05:47.700 +All right. + +05:47.700 --> 05:53.970 +In the next video, we go to weights and biases, and we look at how the big run with the 400,000 is + +05:53.970 --> 05:54.750 +coming along. + +05:54.750 --> 05:59.340 +And we poke around weights and biases to see what more can be done there. + +05:59.370 --> 06:00.180 +See you there. diff --git a/week5/community-contributions/subtitles/srts/59508175/ja_JP.srt b/week5/community-contributions/subtitles/srts/59508175/ja_JP.srt new file mode 100755 index 0000000..d5aea98 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508175/ja_JP.srt @@ -0,0 +1,226 @@ +WEBVTT + +00:00.320 --> 00:07.970 +だから、 このコースのためにモデルを最適化するためのトレーニング・コストは、 実にわずかなものであることを説明するために、 + +00:07.970 --> 00:09.740 +今この時間を割いている。 + +00:09.740 --> 00:17.060 +2、 3セントの問題で済むし、 私のようにA100ボックスを使ったり、 + +00:17.060 --> 00:21.920 +大がかりなトレーニングをする必要もない。 + +00:22.490 --> 00:24.710 +それを明確にするためだ。 + +00:24.710 --> 00:30.260 +だからまず、 前回説明したように、 T4のような非常に安価なボックスで走らせることができる。 + +00:30.440 --> 00:34.190 +その箱に収まる最大のバッチサイズを持つことができる。 + +00:34.190 --> 00:38.060 +おそらく1回か2回、 うーん、 それからランのトレーニングをするんだ。 + +00:38.060 --> 00:46.370 +でも、 それをやると、 これまで使ってきたトレーニング・データセットでは時間がかかる。 + +00:46.370 --> 00:51.200 +そして、 実はこの巨大なデータセットに対してトレーニングする必要はないことを説明したかった。 + +00:51.200 --> 00:58.250 +私は、 いくつかの良い結果、 いくつかの本当に強力な結果をお見せしたいからそうしているのですが、 もっと小さなデータセットでも素晴らしい結果を得ることができます。 + +00:58.370 --> 01:04.350 +ただ、 ひとつ提案したいのは、 400,000点のデータセットから最初の20,000点を選択して、 + +01:04.350 --> 01:11.040 +そのサブデータセットで実行してもまったく問題ないということだ。 + +01:11.130 --> 01:13.830 +そうするつもりなら、 その方がいいだろうね。 + +01:13.830 --> 01:24.120 +その代わりに、 ある特定の種類の製品に絞って値段をつければ、 モデルはその製品についてすべてを知ることができるからだ。 + +01:24.240 --> 01:33.030 +例えば、 Huggingfaceのハブから最初に引き出したデータセットのひとつである家電製品を選ぶことができます。 + +01:33.210 --> 01:39.000 +そして、 これらすべてのデータセットをダウンさせるのではなく、 単にアプライアンスだけをダウンさせることもできる。 + +01:39.000 --> 01:43.260 +そしてJupyterノートブックをセットアップした。 + +01:43.260 --> 01:46.380 +ええと、 Colabではなく、 ただのローカルノートです。 + +01:46.380 --> 01:50.970 +それは6週目で、 他の、 ええと、 6週目のための日があったところだ。 + +01:51.000 --> 01:55.410 +私たちは2日目にこのデータセットの完全版を作成した。 + +01:55.440 --> 02:01.610 +これは2日目のコピーだが、 電化製品だけを見るにはもっとシンプルで狭い。 + +02:01.730 --> 02:04.760 +そして、 このように小さなデータセットなので、 超高速で実行される。 + +02:04.790 --> 02:06.650 +私たちはそれを環境に結びつける。 + +02:06.650 --> 02:10.850 +これは過去への回帰となるだろう。 + +02:10.940 --> 02:14.450 +すでにHuggingfaceにログイン済みで、 2回目のログインを試みたから文句を言っているのだが、 + +02:14.450 --> 02:15.770 +無視してくれ。 + +02:16.130 --> 02:28.340 +データセットの名前ですが、 家電製品以外はコメントアウトしてあります。 + +02:28.580 --> 02:35.300 +ええと、 ハギング・フェイスは僕が何度もそれをやったことにすごく怒っているんだけど、 まあいいや。 + +02:35.330 --> 02:36.200 +走り出す。 + +02:36.200 --> 02:43.520 +このデータセットのロードにかかる時間の合計は、 ええと、 約0です。 私の記憶が正しければ、 1分の2。 + +02:43.880 --> 02:50.900 +ええと、 0。 3分でも、 ええと、 そうすると、 28,000の項目があるんだ。 + +02:50.990 --> 02:54.890 +ええと、 だから、 私たちがこれまで扱ってきた40万件のデータセットよりもずっと小さいんだ。 + +02:54.890 --> 03:01.350 +しかし、 フローラにとっては申し分のない数字であり、 家電製品に特化しているという利点もある。 + +03:01.710 --> 03:04.590 +だから、 より狭いデータセットなんだ。 + +03:04.800 --> 03:10.620 +ええと、 それで、 ええと、 この残りのグラフを追ってみてください。 + +03:10.620 --> 03:12.330 +どうでもいいチャートは取り除いた。 + +03:12.330 --> 03:14.280 +異なるデータタイプの比較とかね。 + +03:14.280 --> 03:15.270 +これが価格だ。 + +03:15.270 --> 03:16.380 +平均はこうだ。 + +03:16.380 --> 03:17.670 +平均すると少ない。 + +03:17.670 --> 03:20.850 +うーん、 それでも999まである。 + +03:20.850 --> 03:22.920 +そして、 それがカーブなんだ。 + +03:23.190 --> 03:27.900 +それから、 これをキュレーションするのは、 本当に何もないんだ。 + +03:27.930 --> 03:30.510 +全データセットを使うつもりだ。 + +03:30.510 --> 03:37.710 +これは、 同じグラフをより小さなデータセットに置き換えたものだが、 相関関係は見られない。 + +03:38.040 --> 03:44.400 +ええと、 トークンについては、 前に確認したのと同じことが適用されることをすぐに確認できます。 + +03:44.580 --> 03:50.850 +そして、 最終的にトレーニング用とテスト用のデータセットに分けるとき、 トレーニング用には25,000アイテム、 + +03:50.880 --> 03:54.840 +テスト用には2,000アイテムを使います。 + +03:55.140 --> 03:58.290 +他のことはすべて、 ここでうまくいくよ。 + +03:58.350 --> 04:00.460 +トレーニングのプロンプトが表示されます。 + +04:00.460 --> 04:02.140 +テストのプロンプト。 + +04:02.320 --> 04:05.590 +テストセットの価格分布をプロットすることができる。 + +04:05.590 --> 04:05.980 +これでよし。 + +04:06.010 --> 04:08.230 +スプレッドは申し分ない。 + +04:08.230 --> 04:21.610 +そして最後に、 前と同じようにここからプロンプトを作成し、 これをHuggingfaceにアップロードします。 + +04:21.880 --> 04:28.690 +そして、 2つのピックルファイルを作り、 それをトレーニングに使うことができる。 + +04:28.690 --> 04:40.030 +より少ないデータ集団に基づいてモデルを構築し、 あらゆる種類の家電製品ではなく、 家電製品だけの価格を予測することに重点を置く。 + +04:40.030 --> 04:41.080 +これならうまくいくだろう。 + +04:41.080 --> 04:45.160 +この小さなデータセットに集中するために、 コードのいろいろな場所に小さな変更を加えることは、 + +04:45.160 --> 04:47.830 +いい練習になると思う。 + +04:47.950 --> 04:54.640 +そして、 私自身もこの実験を行ったが、 40万人での実験からどのような結果が出ようとも、 + +04:54.640 --> 05:04.830 +他のモデルとの比較という点では、 この場合も同じであることを確認することができた。 + +05:05.040 --> 05:18.360 +大きなデータセットの場合ほどではないが、 家電製品に集中していても重要なことは起こる。 + +05:18.360 --> 05:25.620 +25,000ものデータセットを扱っていると、 あっという間に終わってしまう。 + +05:25.680 --> 05:31.530 +でも、 そうしているうちに、 私のように虫がわいてきて、 もっと大舞台に行きたくなったらどうする? + +05:31.530 --> 05:38.010 +もちろん、 さらに種類を増やすこともできるし、 数ドルを費やして全データセットに対応することもできる。 + +05:38.040 --> 05:39.690 +とにかく、 私はそれを経験しようと思った。 + +05:39.690 --> 05:39.930 +これだ。 + +05:39.930 --> 05:45.930 +これは、 私たちがどのようにデータをキュレーションしたのか、 またご希望であれば、 より小さなデータセットについて、 人々にとって良い復習になるかもしれない。 + +05:45.960 --> 05:47.220 +その方法を教えてくれる。 + +05:47.250 --> 05:47.700 +分かった。 + +05:47.700 --> 05:54.750 +次のビデオでは、 ウェイトとバイアスに行き、 40万人のビッグランがどのように進んでいるかを見る。 + +05:54.750 --> 05:59.340 +そして、 ウェイトとバイアスの周辺を突いて、 そこでさらに何ができるかを確認する。 + +05:59.370 --> 06:00.180 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59508175/ko_KR.srt b/week5/community-contributions/subtitles/srts/59508175/ko_KR.srt new file mode 100755 index 0000000..f90f375 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508175/ko_KR.srt @@ -0,0 +1,262 @@ +WEBVTT + +00:00.320 --> 00:07.970 +이 코스를 위한 모델을 최적화하는 데 드는 훈련 비용은 아주 적다는 걸 말씀드리고 + +00:07.970 --> 00:09.740 +싶군요 + +00:09.740 --> 00:17.060 +몇 센트만 있으면 되니까 저처럼 미친 듯이 A100 + +00:17.060 --> 00:21.920 +상자를 들고 훈련할 필요 없어요 + +00:22.490 --> 00:24.710 +확실히 해두죠 + +00:24.710 --> 00:30.260 +먼저, 지난번에 설명했듯이 T4 같은 박스에서 작동할 수 있어요 아주 저렴하죠 + +00:30.440 --> 00:34.190 +상자에 들어가는 가장 큰 배치 사이즈를 가질 수 있어요 + +00:34.190 --> 00:38.060 +한두 번 연습하고 그렇게 달리기를 해요 + +00:38.060 --> 00:41.960 +하지만 그렇게 하면 우리가 작업해온 트레이닝 데이터 집합으로 + +00:41.960 --> 00:46.370 +시간이 오래 걸릴 겁니다 비트가 많아도 시간이 좀 더 걸리겠죠 + +00:46.370 --> 00:51.200 +이 괴물 같은 데이터에 맞서 훈련할 필요는 없다는 걸 설명하고 싶었어요 + +00:51.200 --> 00:55.520 +이렇게 하는 이유는 여러분께 좋은, 꽤 강력한 결과를 보여드리고 싶어서죠 하지만 훨씬 + +00:55.520 --> 00:58.250 +적은 데이터 집합으로도 훌륭한 결과를 얻을 수 있어요 + +00:58.370 --> 01:04.350 +제가 제안하고 싶은 것은 데이터 포인트 400,000개의 데이터 집합 중에서 + +01:04.350 --> 01:11.040 +첫 20,000개를 선택해서 하위 데이터 집합으로 실행해도 된다는 거예요 + +01:11.130 --> 01:13.830 +그렇게 하는 게 좋겠어요 + +01:13.830 --> 01:20.670 +가격이 매겨지는 특정 제품에 집중하는 대신에요 그래야 모델이 해당 제품에 + +01:20.670 --> 01:24.120 +관해 전부 배울 기회가 생기니까요 + +01:24.240 --> 01:30.960 +예를 들어 어플라이언스를 선택할 수 있는데 처음에 H깅페이스 허브에서 뽑아낸 + +01:30.960 --> 01:33.030 +데이터 세트 중 하나였죠 + +01:33.210 --> 01:39.000 +이 모든 데이터 세트를 없애는 대신 가전제품만 없애는 거죠 + +01:39.000 --> 01:43.260 +주피터 공책을 준비했어요 + +01:43.260 --> 01:46.380 +콜랍은 아니고 동네 공책이에요 + +01:46.380 --> 01:50.970 +6주 차예요, 다른 날은 6주 차까지 갔거든요 + +01:51.000 --> 01:55.410 +둘째 날 전체 데이터를 구축했어요 + +01:55.440 --> 02:01.610 +이건 둘째 날 복사본인데 훨씬 간단하고 가전제품만 볼 수 있게 편리하게 만들었어요 + +02:01.730 --> 02:04.760 +데이터 집합이 작아서 아주 빨리 실행될 거예요 + +02:04.790 --> 02:06.650 +환경과 연결하는 거죠 + +02:06.650 --> 02:10.850 +과거를 떠올리게 하는 요리예요 + +02:10.940 --> 02:14.450 +이미 허깅페이스에 로그인해서 불평하는 거예요 두 번째도 로그인하려고 + +02:14.450 --> 02:15.770 +했는데 무시하세요 + +02:16.130 --> 02:23.600 +이제 데이터 세트 이름을 입력할게요 어플라이언스만 빼고 전부 주석을 달았어요 기억하실지 + +02:23.600 --> 02:28.340 +모르겠지만 아주 작은 데이터 세트였죠 + +02:28.580 --> 02:35.300 +포옹하는 얼굴이 정말 속상해요 여러 번 실행했거든요 + +02:35.330 --> 02:36.200 +출발했어요 + +02:36.200 --> 02:43.520 +이 데이터 세트를 로드하는 데 걸리는 총 시간은 0분 정도 제 기억이 맞는다면 2분 정도 + +02:43.880 --> 02:50.900 +0살요 3분 만에 끝내면 28,000개가 완성되죠 + +02:50.990 --> 02:54.890 +우리가 작업한 400,000개의 데이터 세트보다 훨씬 작아요 + +02:54.890 --> 03:01.350 +하지만 플로라에게는 아주 괜찮은 수치였고 가전제품에 집중할 수 있어서 유리했어요 + +03:01.710 --> 03:04.590 +데이터 집합이 더 좁아지죠 + +03:04.800 --> 03:10.620 +어, 그리고 이 차트들 따라가시면 돼요 REST + +03:10.620 --> 03:12.330 +안 중요한 차트는 뺐어요 + +03:12.330 --> 03:14.280 +서로 다른 데이터 타입의 비교 같은 거요 + +03:14.280 --> 03:15.270 +이게 그 대가예요 + +03:15.270 --> 03:16.380 +평균은 그렇죠 + +03:16.380 --> 03:17.670 +평균보다 낮아요 + +03:17.670 --> 03:20.850 +999까지 갈 수 있어요. Get it. + +03:20.850 --> 03:22.920 +이게 바로 곡선이에요 + +03:23.190 --> 03:27.900 +그리고 이걸 정리하는 건 사실 별거 없어요 + +03:27.930 --> 03:30.510 +전체 데이터 세트를 가져오는 거죠 + +03:30.510 --> 03:37.710 +작은 데이터 집합에 대한 같은 도표입니다 물질적 상관관계가 없다는 걸 보여주죠 + +03:38.040 --> 03:44.400 +음, 아까 확인했던 것과 같은 것이 토큰에 적용된다는 것을 빠르게 확인해 볼게요. + +03:44.580 --> 03:50.850 +마지막으로 트레이닝과 테스트 데이터로 나눌 때 트레이닝용 아이템 25,000개 + +03:50.880 --> 03:54.840 +테스트용 2000개를 가져가요 + +03:55.140 --> 03:58.290 +다른 건 다 잘 될 거예요 + +03:58.350 --> 04:00.460 +훈련 시간을 표시해 뒀어요 + +04:00.460 --> 04:02.140 +테스트 프롬프트요 + +04:02.320 --> 04:05.590 +테스트 세트에서 가격 배분을 계산할 수 있어요 + +04:05.590 --> 04:05.980 +됐어요 + +04:06.010 --> 04:08.230 +완벽한 스프레드가 있어요 + +04:08.230 --> 04:16.120 +마지막으로 아까 했던 것처럼 프롬프트만 생성하면 됩니다 그리고 이걸he깅face에 업로드 합니다 + +04:16.120 --> 04:21.610 +다른 이름을 붙일 수도 있죠 라이트 데이터라고 부를 수 있어요 + +04:21.880 --> 04:28.690 +피클 파일 두 개를 만들 수 있어요 그걸 훈련에 활용할 수 있죠 + +04:28.690 --> 04:36.760 +적은 데이터량을 바탕으로 모델을 만들 겁니다 다양한 가전제품의 가격을 예측하는 데 집중할 + +04:36.760 --> 04:40.030 +겁니다 다른 종류가 아니라요 + +04:40.030 --> 04:41.080 +이거면 될 거예요 + +04:41.080 --> 04:45.160 +코드의 다양한 곳에 작은 변화를 주는 것이 좋은 운동이 될 겁니다 + +04:45.160 --> 04:47.830 +작은 데이터 집합에 집중하기 위해서요 + +04:47.950 --> 04:54.640 +제가 직접 실행해 봤는데 어떤 결과가 나오든 확인시켜 줄 수 있어요 400,000로 + +04:54.640 --> 05:02.010 +했던 메인 실행에서도요 이 경우에도 같은 결과가 적용됩니다 다른 모델에 비해 어떻게 차곡차곡 + +05:02.010 --> 05:04.830 +쌓이느냐에 있어서요 + +05:05.040 --> 05:12.900 +데이터가 많을수록 더 잘 알 수 있겠지만 게임에서 중요한 일이 벌어질 거라는 걸 미리 + +05:12.930 --> 05:18.360 +알려주지 않으면 되죠 가전제품에 집중하더라도요 + +05:18.360 --> 05:24.090 +25,000에 데이터셋을 다룰 때는 아주 빠르게 get을 + +05:24.090 --> 05:25.620 +할 수 있죠 + +05:25.680 --> 05:31.530 +하지만 그렇게 하고 저처럼 버그를 확보해서 더 크게 달리길 원하나요? + +05:31.530 --> 05:36.720 +물론 더 많은 유형을 추가해 몇 달러 더 써서 전체 데이터 세트를 + +05:36.720 --> 05:38.010 +만들 수도 있죠 + +05:38.040 --> 05:39.690 +어쨌든, 그 과정을 거쳐야겠다고 생각했죠 + +05:39.690 --> 05:39.930 +이거요 + +05:39.930 --> 05:45.930 +데이터 큐레이팅에 있어 좋은 수정안이 될 수 있습니다 더 작은 데이터 집합을 원하신다면요 + +05:45.960 --> 05:47.220 +어떻게 하는지 알려주죠 + +05:47.250 --> 05:47.700 +좋아요 + +05:47.700 --> 05:53.970 +다음 영상에서는 무게와 편향성을 살펴보고 400,000도 회전을 어떻게 진행하는지 + +05:53.970 --> 05:54.750 +살펴보죠 + +05:54.750 --> 05:59.340 +무게와 편향성을 따져보고 뭘 더 할 수 있는지 보죠 + +05:59.370 --> 06:00.180 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59508289/en_US.srt b/week5/community-contributions/subtitles/srts/59508289/en_US.srt new file mode 100755 index 0000000..a0bf8f7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508289/en_US.srt @@ -0,0 +1,673 @@ +WEBVTT + +00:00.320 --> 00:07.100 +So here we are now, back in the Colab, in the same one that we kicked off in the previous day. + +00:07.100 --> 00:12.170 +It's now somewhat misleadingly named week seven, day three because we are of course on week seven, + +00:12.170 --> 00:12.890 +day four. + +00:13.190 --> 00:18.170 +But I daren't touch it for fear that I that I, uh, stop it in some way. + +00:18.170 --> 00:26.960 +You can see that the GPU Ram continues in its nail, bitingly close to the ceiling level of 38.2 out + +00:26.960 --> 00:28.070 +of the 40GB. + +00:28.070 --> 00:29.600 +But it's luckily not changing. + +00:29.600 --> 00:30.650 +Not going up. + +00:30.980 --> 00:36.440 +And you'll see here this is the printout of each of the different batch steps as they happen. + +00:36.440 --> 00:40.430 +We're now on batch step 4350. + +00:40.640 --> 00:47.600 +Um, and if we zoom all the way up, uh, which again, I'm doing everything very delicately so I don't + +00:47.630 --> 00:49.580 +accidentally stop the batch in some way. + +00:49.610 --> 00:55.310 +You can see that we're only a little tiny way into this strip that represents all three epochs. + +00:55.310 --> 01:01.580 +So one about there perhaps is a complete, uh, run through all of the data. + +01:01.820 --> 01:05.180 +Um, and you can see we're at batch step. + +01:05.360 --> 01:11.360 +Uh, I can't say the number because it's ticking too fast, but for for ten, uh, there we go. + +01:11.510 --> 01:19.310 +Uh, out of this very round number, uh, that you see here of 100,000, uh, and you might wonder why + +01:19.310 --> 01:20.690 +it's quite so round. + +01:20.720 --> 01:24.290 +Uh, and, uh, it's just something of a coincidence. + +01:24.350 --> 01:33.380 +Uh, remember, we have 400,000 data points, and but they're in batches of 16 are in any one step. + +01:33.410 --> 01:40.430 +So if I take my calculator here for this trivial maths, take 400 000 and I divide that by 16. + +01:40.730 --> 01:45.920 +Uh, and then I multiply that by the number of epochs. + +01:46.160 --> 01:51.950 +Um, I thought we had three epochs, but it looks like we have four epochs. + +01:51.980 --> 01:54.050 +I left it at four epochs. + +01:54.200 --> 01:56.270 +Uh, so, uh, yeah. + +01:56.360 --> 01:59.630 +If you then multiply that by four for four epochs. + +01:59.820 --> 02:07.890 +Uh, you do end up with, uh, just to give evidence of that, there's the camera, uh, 100,000, uh, + +02:07.890 --> 02:10.530 +and it's that that is the 100,000 here. + +02:10.530 --> 02:15.120 +It's not just a wild random number, randomly round number. + +02:15.120 --> 02:22.230 +It is, in fact, exactly the number of batch steps involved in 400,000 data points, grouped into batches + +02:22.230 --> 02:25.830 +of 16, done four times over. + +02:26.160 --> 02:29.550 +Uh, so off it ticks. + +02:29.550 --> 02:34.470 +And you can see just from looking at these numbers that we started with quite high training loss and + +02:34.470 --> 02:37.770 +that that number has come down, uh, quite significantly. + +02:37.770 --> 02:43.440 +But it's sort of hard to tell from here exactly what's going on, because the numbers bounce around + +02:43.440 --> 02:44.490 +up and down. + +02:44.700 --> 02:46.770 +Um, and it's hard to get a sense of the trend. + +02:46.770 --> 02:53.100 +If only there were a tool that would allow us to visualize the progress of the batch in a way that could + +02:53.100 --> 02:55.500 +allow us to compare between different runs and the like. + +02:55.530 --> 02:59.880 +Of course, there is such a tool and it's weights and biases and it's sitting right here. + +03:00.090 --> 03:03.060 +Um, and this this is the result of our run. + +03:03.060 --> 03:04.830 +This is it happening right now. + +03:05.070 --> 03:09.750 +Um, and this is showing us what's going on. + +03:09.750 --> 03:10.020 +So. + +03:10.020 --> 03:12.930 +So this training loss here is the real the real thing. + +03:12.930 --> 03:14.610 +This is what we really want to look at. + +03:14.640 --> 03:17.550 +Let me just edit this panel so we get to see it a bit more. + +03:17.730 --> 03:24.630 +Um, now this the the y axis here goes all the way down to zero where zero will be zero loss, which + +03:24.630 --> 03:26.160 +would mean perfection. + +03:26.160 --> 03:33.690 +It would mean that the model is always predicting with 100% confidence the next token and that next + +03:33.690 --> 03:37.710 +token that it predicts with 100% confidence is the right next token. + +03:37.830 --> 03:40.350 +Um, so that is an unlikely place to get. + +03:40.380 --> 03:45.450 +In fact, one would be suspicious if you got much below 1.5. + +03:45.480 --> 03:49.950 +Typically this is a bit of a rule of thumb thing, but but generally speaking, that would be like a + +03:49.950 --> 03:52.080 +great, uh, loss to have. + +03:52.080 --> 03:57.630 +And below that might cause you pause for thought about whether overfitting is going on. + +03:57.840 --> 04:03.730 +Um, let me change these axes and make the minimum of the y axis one so that. + +04:03.760 --> 04:04.540 +Well, we can make it. + +04:04.570 --> 04:05.230 +We can do even better. + +04:05.230 --> 04:12.250 +We can make it like 1.4 or something, um, so that we can really see what's going on here. + +04:12.430 --> 04:18.340 +Um, and it's a bumpy line, but it is pretty clear even at this early stage, that that is a bumpy + +04:18.370 --> 04:20.530 +line that has an improving trend. + +04:20.560 --> 04:27.580 +Unlike when we saw this with, uh, GPT and the fine tuning, when it seemed to be bouncing up and down + +04:27.580 --> 04:32.530 +but not really showing a trend, I think you can see that this looks like it's improving. + +04:32.530 --> 04:36.190 +We can also apply the smoothing thing and see how that looks. + +04:36.310 --> 04:37.600 +I mean, that's clearly isn't it. + +04:37.600 --> 04:40.450 +That is that is a line that is improving. + +04:40.450 --> 04:42.700 +If we apply the smoothing factor. + +04:42.730 --> 04:45.820 +Now, why do these bump up and down? + +04:45.970 --> 04:47.530 +I mean it depends really. + +04:47.530 --> 04:55.270 +It's remember each of these points represents, uh, 16 data points, 16 prompts with a thing for it + +04:55.270 --> 05:00.940 +to predict at the end of it shoved into one and it's a random it's been jumbled up. + +05:00.940 --> 05:03.190 +So it's a random set of the 16. + +05:03.520 --> 05:06.940 +And so who knows which products are shoved into the 16. + +05:06.970 --> 05:11.620 +There could be some really expensive products that it's making a wild guess at and getting completely + +05:11.620 --> 05:12.070 +wrong. + +05:12.070 --> 05:14.410 +So there's plenty of noise. + +05:14.500 --> 05:20.530 +Um, because the there's a different makeup of each of these different batch steps, and it's good to + +05:20.530 --> 05:27.640 +have a bit of noise because you want to be shaking things up a bit and getting the model to be, um, + +05:27.670 --> 05:29.620 +being bounced around. + +05:29.710 --> 05:34.510 +Um, in a sense, again, I'm being very hand-wavy, but because you're trying to not get stuck in a + +05:34.510 --> 05:39.580 +local minimum, but be trying out different, different possibilities with the idea that the model will + +05:39.580 --> 05:47.170 +improve and find a big global minimum, a big valley as it tries to, to, um, find gradients and improve + +05:47.170 --> 05:49.600 +its ability to predict the next token. + +05:49.960 --> 05:51.730 +Um, it's just moved on a little bit. + +05:51.730 --> 05:57.040 +And again, it's very clear that there is visually some improvement happening here. + +05:57.370 --> 05:58.990 +Um, so what else can we see? + +05:58.990 --> 06:00.940 +So this is the learning rate. + +06:00.940 --> 06:06.460 +This is the, uh, the that key hyperparameter about how much of a step it should take. + +06:06.460 --> 06:11.590 +And I told you we'd chosen a cosine learning rate, and you might be a bit surprised to see something + +06:11.590 --> 06:15.370 +which doesn't look very cosine y at all and doesn't look the way I described it. + +06:15.370 --> 06:22.180 +And this what you're seeing here is that thing called warm up, which is the fact that it that it doesn't + +06:22.180 --> 06:30.190 +start up at the, the 0.0001, the highest number, it starts at zero and builds up to that point. + +06:30.190 --> 06:36.760 +Because you can see the beginning of the batch is quite a dramatic movement from being way off to to + +06:36.790 --> 06:38.110 +being in a better position. + +06:38.110 --> 06:40.750 +And you don't want the learning rate to be too high then. + +06:40.780 --> 06:43.570 +Or it might do, it might overshoot in all sorts of ways. + +06:43.570 --> 06:45.820 +So that's the theory behind this warm up. + +06:46.000 --> 06:49.180 +Um, uh, part of the process. + +06:49.210 --> 06:53.170 +Um, and you can see it really, really coming to, to, to be here. + +06:53.470 --> 07:00.860 +Um, and you might wonder why this isn't really showing much in the way of a cosine kind of, uh, shape. + +07:00.860 --> 07:02.900 +And that's because it's so early in the training. + +07:02.900 --> 07:05.060 +Still, we're still at the very top of this cosine. + +07:05.060 --> 07:09.560 +It's about to start curving down, and it's doing that very slowly. + +07:09.920 --> 07:18.590 +Um, and, um, yeah, there's also some fun charts here that show us what's going on on the GPU. + +07:18.740 --> 07:21.440 +Uh, some of these are more meaningful than others. + +07:21.470 --> 07:22.430 +There's some here. + +07:22.430 --> 07:24.140 +The, um. + +07:24.170 --> 07:29.480 +Yeah, the power usage and the time spent accessing memory is good. + +07:29.540 --> 07:30.020 +Uh, it's. + +07:30.020 --> 07:32.150 +I want to see the CPU utilization. + +07:32.150 --> 07:32.870 +Where is that? + +07:33.170 --> 07:35.540 +I mean, the GPU, even the CPU utilization. + +07:35.570 --> 07:43.640 +The GPU utilization would be the perhaps one of the most important to make sure that the time that the + +07:43.640 --> 07:48.950 +GPU is hard at work, it's not like it's shepherding memory in and out, and that that's what's taking + +07:48.950 --> 07:49.460 +the time. + +07:49.460 --> 07:52.580 +You want to know that the GPU is being utilized. + +07:52.580 --> 07:58.850 +And that's a great sign that we are hammering our are powerful A100 blocks and making the most out of + +07:58.850 --> 07:59.090 +it. + +07:59.090 --> 08:04.550 +And if you're using the different box, then you've got some different hyperparameters. + +08:04.550 --> 08:09.740 +Then come and check out the GPU utilization, make sure it's doing well, make sure it's nice and hot, + +08:09.800 --> 08:13.760 +uh, and that you're getting good use out of your GPU. + +08:13.790 --> 08:19.070 +Otherwise, you might want to tweak some of the hyperparameters to see if you can't get more juice out + +08:19.070 --> 08:23.090 +of it so that it's your training process is more efficient. + +08:23.690 --> 08:25.550 +Um, okay. + +08:25.550 --> 08:29.390 +So what I'm going to do now is do a bit of a cheat. + +08:29.450 --> 08:36.200 +I'm going to do what they do in those cooking classes or like, like cooking videos when they put it + +08:36.200 --> 08:41.540 +in the oven and they say, and now here's one that I did earlier, they take it out of the oven and + +08:41.540 --> 08:46.670 +it's the thing that they put in, you know, it's like, oh, that's uh, that's just cheating. + +08:46.670 --> 08:53.180 +But I have done that and that I did kick this off a while back and it ran with the same hyperparameters. + +08:53.180 --> 08:58.080 +So the same thing, um, and, uh, and it's this pink one right here. + +08:58.080 --> 09:00.240 +And this I should have explained. + +09:00.240 --> 09:00.870 +I'm so sorry. + +09:00.870 --> 09:05.640 +This here is showing the four runs that have happened under this project. + +09:05.640 --> 09:07.500 +The project which is called Pricer. + +09:07.500 --> 09:12.450 +So up at the top here, this navigation, um, it has the Pricer project. + +09:12.450 --> 09:15.240 +And down here are the four runs. + +09:15.450 --> 09:25.500 +Um, and this run here was when I ran either 3 or 4 epochs, um, of this model with this same data + +09:25.530 --> 09:28.650 +set and with the, uh, yeah. + +09:28.680 --> 09:30.990 +With otherwise with everything else the same. + +09:31.140 --> 09:35.910 +Uh, and so if I show you that, we'll see what happened as a result of that. + +09:38.310 --> 09:39.810 +And here we go. + +09:39.810 --> 09:43.290 +So this this is, uh, this is the meaty one. + +09:43.290 --> 09:48.630 +So let's let's bring this up and we are going to have to change the scale. + +09:49.620 --> 09:51.390 +We're going to have to come down. + +09:51.990 --> 09:52.650 +There we go. + +09:52.650 --> 09:53.640 +Now you can see everything. + +09:53.640 --> 09:54.870 +If I leave it at one. + +09:55.200 --> 09:57.840 +Okay, so a few things to point out. + +09:57.840 --> 10:02.370 +So first of all you're seeing a purple. + +10:02.790 --> 10:03.630 +Let me click. + +10:03.660 --> 10:06.120 +I think if I do this one here it's going to bring it up. + +10:06.120 --> 10:06.870 +There we go. + +10:06.960 --> 10:10.320 +So you can see here a blue and a purple line. + +10:10.320 --> 10:13.920 +The blue is just here and the purple is here. + +10:13.920 --> 10:19.350 +The blue is the current run that is running right now over on this tab. + +10:19.650 --> 10:22.950 +It's this this guy that we are running right at the moment. + +10:22.950 --> 10:26.610 +The purple is the one that I kicked off a while ago. + +10:26.730 --> 10:29.460 +I of course it has the data in there about ten days ago. + +10:29.820 --> 10:35.430 +Um, and you can see that the blue is tracking extremely closely to the purple, which is further evidence + +10:35.430 --> 10:38.010 +that I'm not I'm not cheating here. + +10:38.010 --> 10:43.200 +Uh, it is the case that that blue will continue to follow the same trajectory as the purple. + +10:43.200 --> 10:45.960 +The purple has just had its course. + +10:46.320 --> 10:53.100 +Uh, now, what you'll see is that the trend indeed improves and improves and improves, which is good + +10:53.310 --> 10:54.150 +Ish. + +10:54.450 --> 11:00.630 +Um, so first of all, you'll, you'll see that it that it improves and then it takes a little dive + +11:00.630 --> 11:05.070 +and then it improves, and then it takes a little dive again and improves and it takes an even bigger + +11:05.070 --> 11:05.670 +dive. + +11:05.670 --> 11:08.550 +And so you might be wondering what are these dives? + +11:08.550 --> 11:12.210 +Well, these dives are the end of each of the epochs. + +11:12.210 --> 11:14.130 +So this is an entire epoch. + +11:14.160 --> 11:18.240 +That's epoch one, that's epoch two, that's epoch three, and this is epoch four. + +11:18.240 --> 11:20.040 +And unfortunately it crashed. + +11:20.040 --> 11:25.650 +Uh, or Google reset the instance uh, halfway through uh, epoch four. + +11:25.650 --> 11:28.530 +So, uh, we didn't get to see how epoch four ended. + +11:28.800 --> 11:31.980 +Um, but as you'll see, that that proves to be unimportant. + +11:32.340 --> 11:36.720 +Um, so why does the why is there this sudden drop at the end of each epoch? + +11:36.720 --> 11:38.220 +Well, that's a very good question. + +11:38.220 --> 11:39.720 +It's extremely important. + +11:39.840 --> 11:44.190 +Uh, it's because what's starting to happen is a little bit of overfitting. + +11:44.190 --> 11:50.520 +What's happening here is that the model is seeing some of the same data that it already saw in the first + +11:50.550 --> 11:51.120 +epoch. + +11:51.150 --> 11:53.070 +Now, sure, it's muddled up differently. + +11:53.110 --> 12:00.100 +and seeing them in, uh, in batch batches that are different sets of 16 than the ones that got here. + +12:00.130 --> 12:01.750 +Hugging face takes care of that for you. + +12:01.750 --> 12:06.310 +The SFT trainer automatically reshuffles the batches each time. + +12:06.550 --> 12:11.560 +Um, but nonetheless, the models had the benefit of seeing this data before, and it can take advantage + +12:11.560 --> 12:12.070 +of that. + +12:12.070 --> 12:14.770 +It's learned something despite the dropout. + +12:14.950 --> 12:22.600 +Um, despite some some other things we've done to try and regularize, um, it's still has a leg up + +12:22.600 --> 12:25.330 +on the fact that it's seen this exact data before. + +12:25.810 --> 12:30.760 +Um, but luckily there's only a small step down there, so we don't need to be too concerned that there's + +12:30.760 --> 12:32.290 +overfitting happening. + +12:32.290 --> 12:38.620 +But then it gets worse here and it gets significantly worse here. + +12:38.830 --> 12:41.890 +Uh, and so this is a big sign of overfitting. + +12:41.890 --> 12:46.630 +And now, if I had been doing what I told you is a best practice and I should have been doing, which + +12:46.630 --> 12:51.910 +was running validation runs at the same time, uh, the chances, first of all, they wouldn't have + +12:51.910 --> 12:53.710 +taken these little jump downs. + +12:53.710 --> 12:59.560 +But secondly, it would have probably started to go up at this point because we're overfitting. + +12:59.560 --> 13:04.360 +And indeed I have savings of the batch at this point. + +13:04.720 --> 13:11.650 +And sure enough, it was saved up to the Hugging face hub every 5000 steps so I could test it. + +13:11.650 --> 13:19.450 +And sure enough, the results do get worse past the third epoch, so there was no point in going more + +13:19.480 --> 13:20.410 +than three epochs. + +13:20.410 --> 13:27.010 +So it didn't matter that Google pulled the plug on my instance at this point, because this data was + +13:27.010 --> 13:28.660 +actually no longer useful. + +13:28.660 --> 13:30.580 +The model was already doing poorly. + +13:30.850 --> 13:36.940 +And again, this is a great example of where you can regularly upload to the hub, and then you can + +13:36.940 --> 13:40.360 +go back at each of these checkpoints and run your test over them. + +13:40.360 --> 13:43.930 +And you can pick the model that performs the best. + +13:44.170 --> 13:50.020 +And that's a very powerful technique for so that you don't have to guess how many epochs to run. + +13:50.020 --> 13:55.520 +You just run too many and then select the one that has the best results out of your training data. + +13:55.550 --> 13:58.220 +Out of sample when you're trying something new. + +13:58.610 --> 14:03.950 +Um, so I think it's a really good illustration of of how this works. + +14:04.010 --> 14:10.610 +Um, and of the, the, the, the effect of overfitting and the effect of different epochs. + +14:10.640 --> 14:16.010 +And, but what we know for sure is that during that first epoch, say, because it was always seeing + +14:16.010 --> 14:22.340 +new data, all of this slope downwards is all representing good improvement. + +14:22.370 --> 14:26.450 +And I can tell you that the results at the end of here were also distinctly better than here. + +14:26.450 --> 14:28.790 +So this was also showing some improvement. + +14:28.820 --> 14:32.330 +And but around here it started to get a little bit more dodgy. + +14:32.330 --> 14:36.680 +And maybe, maybe coincidentally that is at around the 1.5 level. + +14:36.680 --> 14:41.930 +And I mentioned before there was that rule of thumb that less than 1.5 maybe is a time to be raising + +14:41.930 --> 14:45.170 +an eyebrow and looking again at your results. + +14:46.220 --> 14:51.110 +Uh, so I will pause at this moment, and when we return, we'll talk a little bit more about a couple + +14:51.140 --> 14:52.040 +of other things. diff --git a/week5/community-contributions/subtitles/srts/59508289/ja_JP.srt b/week5/community-contributions/subtitles/srts/59508289/ja_JP.srt new file mode 100755 index 0000000..8341d41 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508289/ja_JP.srt @@ -0,0 +1,577 @@ +WEBVTT + +00:00.320 --> 00:07.100 +こうして今、 前日にキックオフしたのと同じラボに戻ってきた。 + +00:07.100 --> 00:12.890 +今は第7週3日目というやや誤解を招きそうな名前だが、 我々はもちろん第7週4日目にいるからだ。 + +00:13.190 --> 00:18.170 +でも、 何かで止めてしまうことを恐れて、 触れないようにしているんだ。 + +00:18.170 --> 00:28.070 +GPUラムは、 天井の38に迫る勢いで釘付けになっている。 40GBのうち2GB。 + +00:28.070 --> 00:29.600 +しかし、 幸いなことにそれは変わらない。 + +00:29.600 --> 00:30.650 +上がらない。 + +00:30.980 --> 00:36.440 +そして、 各バッチステップのプリントアウトをご覧ください。 + +00:36.440 --> 00:40.430 +現在、 バッチステップ4350だ。 + +00:40.640 --> 00:47.600 +うーん、 そして、 もしズームアップしきったら、 うーん、 もう一度言うけど、 うっかりバッチを止めてしまわないように、 + +00:47.630 --> 00:49.580 +慎重にやっているんだ。 + +00:49.610 --> 00:55.310 +この3つのエポックを表す帯は、 まだほんの少ししか入っていないことがわかるだろう。 + +00:55.310 --> 01:01.580 +そこでひとつ、 おそらくすべてのデータを完全に、 あー、 一通り調べてみる。 + +01:01.820 --> 01:05.180 +ええと、 バッチステップにいるのがわかるだろう。 + +01:05.360 --> 01:11.360 +あまりに早く過ぎてしまうので、 数字は言えませんが、 10回分です。 + +01:11.510 --> 01:20.690 +この10万という丸い数字のうち、 なぜこんなに丸いのか不思議に思うかもしれない。 + +01:20.720 --> 01:24.290 +それは偶然の一致なんだ。 + +01:24.350 --> 01:33.380 +ええと、 40万点のデータがありますが、 1ステップで16点ずつです。 + +01:33.410 --> 01:40.430 +だから、 このつまらない計算のために電卓で400,000を16で割ってみる。 + +01:40.730 --> 01:45.920 +それにエポック数を掛けるんだ。 + +01:46.160 --> 01:51.950 +エポックは3つだと思っていたが、 4つあるようだ。 + +01:51.980 --> 01:54.050 +私は4エポックのままにしておいた。 + +01:54.200 --> 01:56.270 +ああ、 そうだね。 + +01:56.360 --> 01:59.630 +それを4倍して4エポックとする。 + +01:59.820 --> 02:10.530 +その証拠に、 10万台のカメラがある。 + +02:10.530 --> 02:15.120 +単なる乱数ではなく、 ランダムに丸められた数字だ。 + +02:15.120 --> 02:25.830 +実際、 これは40万点のデータを16のバッチにまとめ、 4回繰り返して行うバッチ・ステップの数そのものである。 + +02:26.160 --> 02:29.550 +ええと、 そうすると、 それが止まるんだ。 + +02:29.550 --> 02:37.770 +この数字を見ただけでも、 かなり高いトレーニングロスからスタートし、 その数字がかなり下がっていることがわかるだろう。 + +02:37.770 --> 02:44.490 +しかし、 数字が上下に動き回っているため、 ここから何が起こっているのかを正確に判断するのは難しい。 + +02:44.700 --> 02:46.770 +うーん、 トレンドの感覚をつかむのは難しいですね。 + +02:46.770 --> 02:55.500 +バッチの進捗状況を視覚化し、 異なる実行の比較などを可能にするツールがあればいいのだが......。 + +02:55.530 --> 02:59.880 +もちろん、 そのようなツールはあるし、 重みもバイアスもある。 + +03:00.090 --> 03:03.060 +ええと、 そしてこれが僕らが走った結果なんだ。 + +03:03.060 --> 03:04.830 +これが今起きていることだ。 + +03:05.070 --> 03:09.750 +そして、 これは何が起こっているかを示しているんだ。 + +03:09.750 --> 03:10.020 +だから + +03:10.020 --> 03:12.930 +だから、 このトレーニングの損失は本物だ。 + +03:12.930 --> 03:14.610 +これが私たちが本当に見たいことなのだ。 + +03:14.640 --> 03:17.550 +このパネルを編集して、 もう少し見せてもらおう。 + +03:17.730 --> 03:26.160 +このY軸はゼロまで下がっていて、 ゼロはゼロロスとなり、 完璧を意味する。 + +03:26.160 --> 03:37.710 +これは、 モデルが常に100%の信頼度で次のトークンを予測し、 100%の信頼度で予測した次のトークンが正しい次のトークンであることを意味する。 + +03:37.830 --> 03:40.350 +うーん、 それはあり得ない場所だね。 + +03:40.380 --> 03:45.450 +実際、 1を大きく下回ると疑われる。 5. + +03:45.480 --> 03:49.950 +一般的には、 これは経験則のようなものだが、 しかし、 一般的に言えば、 それは素晴らしい、 あー、 + +03:49.950 --> 03:52.080 +持っているべき損失のようなものだろう。 + +03:52.080 --> 03:57.630 +そしてその下には、 オーバーフィッティングが起こっていないかどうかを考えるきっかけになるかもしれない。 + +03:57.840 --> 04:03.730 +この軸を変えて、 Y軸の最小値を1つにしてみよう。 + +04:03.760 --> 04:04.540 +まあ、 何とかなるさ。 + +04:04.570 --> 04:05.230 +我々はもっとうまくやれる。 + +04:05.230 --> 04:12.250 +1のようにすればいい。 4か何かで、 ここで何が起こっているのかがわかるようにするんだ。 + +04:12.430 --> 04:20.530 +しかし、 この初期の段階でも、 このでこぼこした線が改善傾向にあることは明らかだ。 + +04:20.560 --> 04:27.580 +GPTと微調整を行った時とは異なり、 上下に揺れ動き、 + +04:27.580 --> 04:32.530 +トレンドは見られなかった。 + +04:32.530 --> 04:36.190 +また、 スムージングを適用して、 どのように見えるか見てみよう。 + +04:36.310 --> 04:37.600 +つまり、 それは明らかに違う。 + +04:37.600 --> 04:40.450 +つまり、 それは向上しているラインなのだ。 + +04:40.450 --> 04:42.700 +平滑化係数を適用すると + +04:42.730 --> 04:45.820 +さて、 なぜこれが上下するのか? + +04:45.970 --> 04:47.530 +つまり、 本当に場合によるんだ。 + +04:47.530 --> 04:55.270 +これらのポイントはそれぞれ、 16のデータポイント、 16のプロンプト、 その最後に予測するものを1つに押し込んだもので、 + +04:55.270 --> 05:00.940 +ランダムにごちゃ混ぜになっているんだ。 + +05:00.940 --> 05:03.190 +つまり、 16人の中からランダムに選ばれているわけだ。 + +05:03.520 --> 05:06.940 +それで、 どの製品が16番に押し込まれるかは誰にもわからない。 + +05:06.970 --> 05:12.070 +もしかしたら、 本当に高価な製品かもしれない。 + +05:12.070 --> 05:14.410 +だから雑音は多い。 + +05:14.500 --> 05:20.530 +それぞれのバッチステップの構成が異なるので、 + +05:20.530 --> 05:29.620 +少しノイズがあるのは良いことです。 + +05:29.710 --> 05:34.510 +ある意味、 また非常に手探りですが、 ローカル・ミニマムから抜け出せないで、 + +05:34.510 --> 05:39.580 +モデルが改善されて大きなグローバル・ミニマム、 大きな谷を見つけ、 + +05:39.580 --> 05:49.600 +勾配を見つけて次のトークンを予測する能力を向上させようとする、 という考えで、 さまざまな可能性を試しているのです。 + +05:49.960 --> 05:51.730 +うーん、 少し前進しただけだよ。 + +05:51.730 --> 05:57.040 +そして、 ここでもまた、 目に見えて改善が進んでいることは明らかだ。 + +05:57.370 --> 05:58.990 +他に何が見える? + +05:58.990 --> 06:00.940 +これが学習率だ。 + +06:00.940 --> 06:06.460 +このハイパーパラメーターは、 どの程度のステップを踏むべきかを示す重要なパラメーターだ。 + +06:06.460 --> 06:11.590 +コサイン学習率を選んだと言ったが、 コサインyにはまったく見えないし、 私が説明したようには見えないものを見て、 + +06:11.590 --> 06:15.370 +少し驚くかもしれない。 + +06:15.370 --> 06:22.180 +これはウォームアップと呼ばれるもので、 + +06:22.180 --> 06:30.190 +0点では始動しない。 0001が最高で、 ゼロから始まり、 そこまで積み上げていく。 + +06:30.190 --> 06:38.110 +バッチの始まりは、 大きく外れていた状態から、 より良いポジションへとかなり劇的な動きを見せているのがわかるだろう。 + +06:38.110 --> 06:40.750 +それに、 学習率が高すぎるのもよくない。 + +06:40.780 --> 06:43.570 +あるいは、 いろんな意味でオーバーシュートするかもしれない。 + +06:43.570 --> 06:45.820 +これが、 このウォームアップのセオリーだ。 + +06:46.000 --> 06:49.180 +ええと、 プロセスの一部なんだ。 + +06:49.210 --> 06:53.170 +本当に、 本当に、 ここに来ているのがわかるだろう。 + +06:53.470 --> 07:00.860 +なぜコサインのような形があまり出てこないのか不思議に思うかもしれない。 + +07:00.860 --> 07:02.900 +それはトレーニングの初期だからだ。 + +07:02.900 --> 07:05.060 +それでも、 我々はまだこのコサインの頂点にいる。 + +07:05.060 --> 07:09.560 +これからカーブを描き始めるが、 それは非常にゆっくりとしたものだ。 + +07:09.920 --> 07:18.590 +それと、 GPUで何が起こっているかを示す楽しいチャートもある。 + +07:18.740 --> 07:21.440 +そのなかには、 もっと意味のあるものもある。 + +07:21.470 --> 07:22.430 +ここにもある。 + +07:22.430 --> 07:24.140 +その...。 + +07:24.170 --> 07:29.480 +ええ、 電力使用量とメモリへのアクセスに費やされる時間は良好です。 + +07:29.540 --> 07:30.020 +ええと、 それは + +07:30.020 --> 07:32.150 +CPU使用率を見たい。 + +07:32.150 --> 07:32.870 +それはどこですか? + +07:33.170 --> 07:35.540 +つまり、 GPUやCPUの使用率さえもだ。 + +07:35.570 --> 07:43.640 +GPUの使用率は、 GPUが懸命に働いている時間が、 メモリの出し入れに時間を取られているのではないことを確認するために、 + +07:43.640 --> 07:49.460 +おそらく最も重要なものの1つだろう。 + +07:49.460 --> 07:52.580 +GPUが活用されていることを知りたい。 + +07:52.580 --> 07:59.090 +そしてそれは、 私たちが強力なA100ブロックを打ち込み、 それを最大限に活用していることの証でもある。 + +07:59.090 --> 08:04.550 +また、 異なるボックスを使用している場合は、 異なるハイパーパラメータをいくつか持っていることになる。 + +08:04.550 --> 08:13.760 +それからGPUの使用率をチェックし、 うまく動作していることを確認する。 + +08:13.790 --> 08:23.090 +そうでない場合は、 ハイパーパラメータを微調整して、 トレーニングプロセスをより効率的にすることができないか試してみるといいだろう。 + +08:23.690 --> 08:25.550 +うーん、 わかった。 + +08:25.550 --> 08:29.390 +だから、 これからするのはちょっとしたズルだ。 + +08:29.450 --> 08:36.200 +料理教室とか、 料理のビデオとかで、 オーブンに入れて、 こう言うんだけど、 + +08:36.200 --> 08:41.540 +これはさっきやったやつなんだけど、 オーブンから取り出して、 + +08:41.540 --> 08:46.670 +中に入れたものなんだ。 + +08:46.670 --> 08:53.180 +しかし、 私はそれを実行したし、 しばらく前にこれをキックオフし、 同じハイパーパラメータで実行した。 + +08:53.180 --> 08:58.080 +だから、 同じように、 うーん、 このピンクのやつだよ。 + +08:58.080 --> 09:00.240 +そして、 これは私が説明すべきことだった。 + +09:00.240 --> 09:00.870 +本当に申し訳ない。 + +09:00.870 --> 09:05.640 +これは、 このプロジェクトで行われた4回の走行である。 + +09:05.640 --> 09:07.500 +プライサーと呼ばれるプロジェクトだ。 + +09:07.500 --> 09:12.450 +この一番上のナビゲーションには、 プリサープロジェクトがあります。 + +09:12.450 --> 09:15.240 +そしてこの下にあるのが4本だ。 + +09:15.450 --> 09:28.650 +この実行は、 同じデータセットで、 このモデルを3エポックか4エポック実行したものです。 + +09:28.680 --> 09:30.990 +それ以外はすべて同じだ。 + +09:31.140 --> 09:35.910 +それを見せれば、 その結果どうなったかわかるだろう。 + +09:38.310 --> 09:39.810 +そして、 これだ。 + +09:39.810 --> 09:43.290 +だから、 これは......これは肉厚なやつだ。 + +09:43.290 --> 09:48.630 +では、 これを上に持ってきて、 スケールを変更する必要がありそうだ。 + +09:49.620 --> 09:51.390 +降りなければならない。 + +09:51.990 --> 09:52.650 +これでよし。 + +09:52.650 --> 09:53.640 +これですべてを見ることができる。 + +09:53.640 --> 09:54.870 +このままにしておけば + +09:55.200 --> 09:57.840 +さて、 いくつか指摘しておきたいことがある。 + +09:57.840 --> 10:02.370 +だからまず、 あなたは紫を見ている。 + +10:02.790 --> 10:03.630 +クリックさせてください。 + +10:03.660 --> 10:06.120 +ここをこうすれば、 それが出てくると思うんだ。 + +10:06.120 --> 10:06.870 +これでよし。 + +10:06.960 --> 10:10.320 +青と紫の線が見えるだろう。 + +10:10.320 --> 10:13.920 +青はちょうどここにあり、 紫はここにある。 + +10:13.920 --> 10:19.350 +青は、 このタブで現在実行中のものだ。 + +10:19.650 --> 10:22.950 +今走っているのはこの男だ。 + +10:22.950 --> 10:26.610 +紫は少し前に蹴飛ばしたやつだ。 + +10:26.730 --> 10:29.460 +もちろん、 10日ほど前のデータも入っている。 + +10:29.820 --> 10:38.010 +青が紫に極めて近く追従しているのがわかるだろう。 + +10:38.010 --> 10:43.200 +青は紫と同じ軌跡をたどり続けるだろう。 + +10:43.200 --> 10:45.960 +紫はちょうどその時期が終わったところだ。 + +10:46.320 --> 10:54.150 +今わかることは、 トレンドは確かに改善され、 改善され、 改善されるということだ。 + +10:54.450 --> 11:00.630 +まず第一に、 改善し、 そして少し急降下し、 そして改善し、 また少し急降下し、 + +11:00.630 --> 11:05.670 +そして改善し、 さらに大きく急降下するのがわかるだろう。 + +11:05.670 --> 11:08.550 +このダイブとは何なのか? + +11:08.550 --> 11:12.210 +まあ、 これらのダイビングはそれぞれのエポックの終わりなんだ。 + +11:12.210 --> 11:14.130 +つまり、 これは1つのエポックなのだ。 + +11:14.160 --> 11:18.240 +これがエポック1、 これがエポック2、 これがエポック3、 そしてこれがエポック4だ。 + +11:18.240 --> 11:20.040 +そして残念ながらクラッシュした。 + +11:20.040 --> 11:25.650 +あるいは、 グーグルがエポック4の途中でインスタンスをリセットした。 + +11:25.650 --> 11:28.530 +だから、 エポック4がどのように終わったかを見ることはできなかった。 + +11:28.800 --> 11:31.980 +うーん、 でも見ての通り、 そんなことはどうでもいいことだとわかるだろう。 + +11:32.340 --> 11:36.720 +では、 なぜ各エポックの終わりになると急に気温が下がるのか? + +11:36.720 --> 11:38.220 +それはいい質問だね。 + +11:38.220 --> 11:39.720 +非常に重要なことだ。 + +11:39.840 --> 11:44.190 +少しオーバーフィッティングが始まっているんだ。 + +11:44.190 --> 11:51.120 +ここで起こっているのは、 モデルが最初のエポックですでに見たのと同じデータを見ているということだ。 + +11:51.150 --> 11:53.070 +今は確かに、 違う形で混濁している。 + +11:53.110 --> 12:00.100 +そして、 ここに来た選手とは異なる16人バッチで彼らを見ることになる。 + +12:00.130 --> 12:01.750 +ハグをすれば、 その心配はない。 + +12:01.750 --> 12:06.310 +SFTトレーナーは、 毎回自動的にバッチを入れ替えます。 + +12:06.550 --> 12:12.070 +しかし、 それにもかかわらず、 モデルは以前からこのデータを見ており、 それを活用することができる。 + +12:12.070 --> 12:14.770 +落ちこぼれにもかかわらず、 何かを学んだ。 + +12:14.950 --> 12:22.600 +正則化を試みるために他のことをやったにもかかわらず、 このデータは以前にも同じデータを見たことがあるということで、 + +12:22.600 --> 12:25.330 +まだ優位に立っているんだ。 + +12:25.810 --> 12:32.290 +うーん、 でも幸いなことに、 そこからはほんのわずかな段差しかないので、 オーバーフィッティングが起きていることをあまり心配する必要はない。 + +12:32.290 --> 12:38.620 +しかし、 その後はここにきてさらに悪化し、 著しく悪化している。 + +12:38.830 --> 12:41.890 +これはオーバーフィッティングの大きな兆候だ。 + +12:41.890 --> 12:46.630 +そして、 もし私がベストプラクティスであり、 やるべきことだと言った、 + +12:46.630 --> 12:53.710 +検証ランを同時に行っていたら、 まず第一に、 彼らはこのような小さなジャンプダウンをしなかっただろう。 + +12:53.710 --> 12:59.560 +しかし第二に、 おそらくこの時点で上がり始めていただろう。 + +12:59.560 --> 13:04.360 +そして実際、 私はこの時点でバッチの貯金を持っている。 + +13:04.720 --> 13:11.650 +そして案の定、 5000歩ごとにハグする顔のハブに保存されたので、 テストすることができた。 + +13:11.650 --> 13:20.410 +そして案の定、 3回目のエポックを過ぎると結果は悪化する。 + +13:20.410 --> 13:28.660 +だから、 グーグルがこの時点で私のインスタンスのプラグを抜いたことは問題ではなかった。 + +13:28.660 --> 13:30.580 +このモデルはすでに不調だった。 + +13:30.850 --> 13:40.360 +これは、 定期的にハブにアップロードし、 各チェックポイントに戻ってテストを実行できる素晴らしい例だ。 + +13:40.360 --> 13:43.930 +そして、 最高のパフォーマンスを発揮するモデルを選ぶことができる。 + +13:44.170 --> 13:50.020 +これは、 何エポック走らせるかを推測する必要がないようにするための非常に強力なテクニックだ。 + +13:50.020 --> 13:55.520 +たくさん実行しすぎて、 トレーニングデータから最も良い結果が出たものを選ぶだけだ。 + +13:55.550 --> 13:58.220 +新しいことに挑戦するときは、 見本から外れる。 + +13:58.610 --> 14:03.950 +だから、 これがどのように機能するのか、 本当によくわかると思う。 + +14:04.010 --> 14:10.610 +それと、 オーバーフィッティングの影響と、 エポック数の違いによる影響。 + +14:10.640 --> 14:16.010 +そして、 しかし、 私たちが確実に知っているのは、 最初のエポックでは、 常に新しいデータを見ていたため、 + +14:16.010 --> 14:22.340 +この下向きの傾斜はすべて良好な改善を表しているということだ。 + +14:22.370 --> 14:26.450 +そして、 ここでの最後の結果も、 ここより明らかに良かったと言える。 + +14:26.450 --> 14:28.790 +だから、 この点でも改善が見られた。 + +14:28.820 --> 14:32.330 +そして、 しかし、 このあたりから少し怪しくなってきた。 + +14:32.330 --> 14:36.680 +そして、 もしかしたら、 偶然かもしれないが、 それは1番あたりかもしれない。 5レベル。 + +14:36.680 --> 14:45.170 +そして、 経験則として1以下というのがあると前述した。 5 もしかしたら、 眉をひそめて自分の結果を見直す時期かもしれない。 + +14:46.220 --> 14:52.040 +ええと、 ですから、 この場は一旦中断して、 また戻ってきたら、 もう少し他のことについて話しましょう。 diff --git a/week5/community-contributions/subtitles/srts/59508289/ko_KR.srt b/week5/community-contributions/subtitles/srts/59508289/ko_KR.srt new file mode 100755 index 0000000..22ac3a1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508289/ko_KR.srt @@ -0,0 +1,646 @@ +WEBVTT + +00:00.320 --> 00:07.100 +다시 콜랍으로 돌아왔어요 어제 시작한 바로 그 장소죠 + +00:07.100 --> 00:12.890 +지금은 7주 차나 3일째라고 잘못 명명됐어요 7주 차나 4일째에 찍었거든요 + +00:13.190 --> 00:18.170 +하지만 만질 수가 없어요 제가 어떤 식으로든 막을 수 있을까 봐서요 + +00:18.170 --> 00:28.070 +GPU 램이 계속 천장 레벨 38에 근접하게 유지되고 있네요 40GB에서 2개요 + +00:28.070 --> 00:29.600 +다행히도 바뀌지 않네요 + +00:29.600 --> 00:30.650 +안 올라가요 + +00:30.980 --> 00:36.440 +여기 출력된 게 보이시죠 각각의 배치 단계가 발생하는 대로요 + +00:36.440 --> 00:40.430 +지금은 4350단계예요 + +00:40.640 --> 00:47.600 +줌인해서 끝까지 올리면... 다시 말하지만 아주 섬세하게 작업하고 있어요 실수로 반죽이 + +00:47.630 --> 00:49.580 +멈추는 일이 없도록요 + +00:49.610 --> 00:55.310 +세 개 시대의 모습을 볼 수 있는 아주 작은 구역이에요 + +00:55.310 --> 01:01.580 +여기 있는 것 중 하나는 모든 데이터를 훑는 거죠 + +01:01.820 --> 01:05.180 +보다시피 배치 스텝이에요 + +01:05.360 --> 01:11.360 +너무 빨리 가서 숫자는 말 못 하지만 10달러면... 좋아요 + +01:11.510 --> 01:19.310 +여기 보이는 100,000라는 둥근 숫자에서 왜 이렇게 둥근지 궁금하실 + +01:19.310 --> 01:20.690 +거예요 + +01:20.720 --> 01:24.290 +그냥 우연의 일치예요 + +01:24.350 --> 01:33.380 +데이터 포인트가 400,000개인데 16개씩 묶어서 한 단계에 있어요 + +01:33.410 --> 01:40.430 +이 하찮은 수학 계산기에 대해 계산을 해보죠 40만 달러를 16으로 나누죠 + +01:40.730 --> 01:45.920 +그걸 여러 시대의 개수로 곱한 거예요. + +01:46.160 --> 01:51.950 +세 개 개국인 줄 알았는데 네 개국인 것 같네요 + +01:51.980 --> 01:54.050 +네 개국에서 활동했죠 + +01:54.200 --> 01:56.270 +그래서 네 + +01:56.360 --> 01:59.630 +네 개의 개화에 4를 곱하면 + +01:59.820 --> 02:07.890 +결국 증거를 남기게 되죠 여기 카메라가 있네요 100,000번 카메라예요 이게 100,000번 + +02:07.890 --> 02:10.530 +카메라죠 + +02:10.530 --> 02:15.120 +무작위로 나온 숫자가 아니에요 + +02:15.120 --> 02:22.230 +정확히 400,000개의 데이터 포인트에 관련된 배치 단계의 수입니다 16개의 + +02:22.230 --> 02:25.830 +배치로 묶였고 4번 이상 이루어졌죠 + +02:26.160 --> 02:29.550 +틱틱거리는 거죠 + +02:29.550 --> 02:34.470 +이 수치만 봐도 알 수 있어요 훈련에서 손실이 컸을 때 시작했는데 + +02:34.470 --> 02:37.770 +그 손실이 눈에 띄게 줄었어요 + +02:37.770 --> 02:44.490 +하지만 이 화면만 봐서는 정확히 알 수 없어요 수치가 위아래로 흔들리거든요 + +02:44.700 --> 02:46.770 +Get it 유행도 파악하기 어렵고요 + +02:46.770 --> 02:53.100 +배치의 진행을 시각화할 수 있는 툴이 있다면 좋을 텐데요 실행과 비슷한 것을 + +02:53.100 --> 02:55.500 +비교할 수 있는 툴이요 + +02:55.530 --> 02:59.880 +무게와 편향성이라는 도구가 바로 여기 있어요 + +03:00.090 --> 03:03.060 +이게 그 결과물이에요 + +03:03.060 --> 03:04.830 +지금 벌어지고 있어요 + +03:05.070 --> 03:09.750 +무슨 일이 일어나는지 보여주는 거예요 + +03:09.750 --> 03:10.020 +그래서요? + +03:10.020 --> 03:12.930 +이 훈련의 손실은 실전이에요 + +03:12.930 --> 03:14.610 +우리가 정말 보고 싶은 건 이거예요 + +03:14.640 --> 03:17.550 +이 패널을 수정해서 좀 더 보도록 하죠 get it get it + +03:17.730 --> 03:24.630 +여기 있는 y축은 끝까지 0까지 이어지는데 0은 손실이 0이 되니 완벽을 + +03:24.630 --> 03:26.160 +의미하죠 + +03:26.160 --> 03:33.690 +이 모델은 항상 100% 확신으로 다음 토큰을 예측하고 100% 확신으로 예측한 + +03:33.690 --> 03:37.710 +다음 토큰이 올바른 다음 토큰이 되는 거죠 + +03:37.830 --> 03:40.350 +get it up + +03:40.380 --> 03:45.450 +1점보다 낮으면 의심받을 만하죠 5분 + +03:45.480 --> 03:49.950 +보통은 경험에서 우러나오는 비법이지만 일반적으로 + +03:49.950 --> 03:52.080 +보면 큰 손실이에요 + +03:52.080 --> 03:57.630 +그 아래로는 너무 많이 끼는 건 아닌지 생각할 시간이 생기죠 + +03:57.840 --> 04:03.730 +이 축을 바꿔서 y 축을 최소화할게요 + +04:03.760 --> 04:04.540 +갈 수 있어요 + +04:04.570 --> 04:05.230 +더 잘할 수 있어요 + +04:05.230 --> 04:12.250 +1로 하죠 4시쯤에 보면 어떤지 알 수 있을 거예요 + +04:12.430 --> 04:18.340 +울퉁불퉁한 코스지만 초반 단계에서도 분명한 건 울퉁불퉁한 코스지만 발전하는 + +04:18.370 --> 04:20.530 +추세가 있다는 거예요 + +04:20.560 --> 04:27.580 +GPT와 미세 조율로 봤을 때와는 달리 위아래로 흔들리기만 하고 별다른 + +04:27.580 --> 04:32.530 +변화는 없었는데 지금은 나아지고 있는 것 같아요 + +04:32.530 --> 04:36.190 +표면을 매끄럽게 하는 것도 가능해요 + +04:36.310 --> 04:37.600 +이건 확실히 아니에요 + +04:37.600 --> 04:40.450 +그 선이 나아지고 있어요 + +04:40.450 --> 04:42.700 +부드러움을 더하면요 + +04:42.730 --> 04:45.820 +왜 위아래로 흔들릴까요? + +04:45.970 --> 04:47.530 +상황에 따라 다르죠 + +04:47.530 --> 04:55.270 +각각의 점들은 16개의 데이터 포인트와 16개의 프롬프트입니다. 하나의 끝부분에 + +04:55.270 --> 05:00.940 +있는 예측할 수 있는 것들이죠. 무작위로 뒤섞여 있어요. + +05:00.940 --> 05:03.190 +16개의 무작위 세트예요 + +05:03.520 --> 05:06.940 +16번 테이블에 어떤 제품을 넣었는지 누가 알겠어요? + +05:06.970 --> 05:12.070 +정말 비싼 제품이 있을 수도 있어요 그냥 추측만 하고 완전히 틀린 제품요 + +05:12.070 --> 05:14.410 +그래서 소음이 많죠 + +05:14.500 --> 05:20.530 +각 단계마다 구성이 다르기 때문에 비트를 좀 섞는 게 + +05:20.530 --> 05:27.640 +좋아요 그래야 흔들리는 느낌이 나거든요 모델이 이리저리 흔들리는 + +05:27.670 --> 05:29.620 +느낌요 + +05:29.710 --> 05:34.510 +어떤 면에서는 수작업으로 하고 있어요 최소한의 지역에만 + +05:34.510 --> 05:39.580 +머물러 있지 않고 다양한 가능성을 시도해야 하니까요 모델이 + +05:39.580 --> 05:47.170 +개선될 수 있도록요 글로벌 최소의 큰 계곡을 찾으면서 그러데이션을 찾고 다음 토큰을 예측하는 + +05:47.170 --> 05:49.600 +능력을 향상시키죠 + +05:49.960 --> 05:51.730 +비트가 조금 바뀌었어요 + +05:51.730 --> 05:57.040 +다시 말하지만 시각적으로 개선된 점이 분명히 보이네요 + +05:57.370 --> 05:58.990 +또 뭐가 있을까요? + +05:58.990 --> 06:00.940 +이게 학습률이군요 + +06:00.940 --> 06:06.460 +이게 그 키 하이퍼파라미터예요 얼마나 걸어야 하는지 알 수 있죠 + +06:06.460 --> 06:11.590 +코사인 학습 비트를 정했다고 말씀드렸는데 놀라실지도 모르겠네요 + +06:11.590 --> 06:15.370 +전혀 코사인 같지 않고 제가 설명한 것과도 달라요 + +06:15.370 --> 06:22.180 +지금 보시는 건 워밍업이라는 건데요 0에서 + +06:22.180 --> 06:30.190 +시작하지 않는다는 거죠 가장 높은 숫자 0001은 0에서 시작해서 0까지 쌓이죠 + +06:30.190 --> 06:36.760 +반죽이 시작될 때 극적으로 움직이잖아요 멀리 있다가 더 좋은 위치로 + +06:36.790 --> 06:38.110 +이동하죠 + +06:38.110 --> 06:40.750 +학습률이 너무 높으면 안 되죠 + +06:40.780 --> 06:43.570 +아니면 모든 면에서 지나칠 수도 있고요 + +06:43.570 --> 06:45.820 +그게 이 준비 운동의 원리예요 + +06:46.000 --> 06:49.180 +과정의 일부죠 + +06:49.210 --> 06:53.170 +실제로 이 자리에 서게 될 거예요 + +06:53.470 --> 07:00.860 +왜 코사인 형태가 안 보이는지 궁금하실 거예요 + +07:00.860 --> 07:02.900 +훈련 초반이라 그래요 + +07:02.900 --> 07:05.060 +그래도 코사인 꼭대기에 있어요 + +07:05.060 --> 07:09.560 +아래로 구부러지기 시작하는데 아주 천천히 구부러져요 + +07:09.920 --> 07:18.590 +GPU 상황에 대해 보여주는 재미있는 차트도 있어요 + +07:18.740 --> 07:21.440 +어떤 건 다른 것보다 더 의미가 있어요 + +07:21.470 --> 07:22.430 +여기 있어요 + +07:22.430 --> 07:24.140 +그거요 + +07:24.170 --> 07:29.480 +전력 사용량과 메모리 접근 시간은 좋아요 + +07:29.540 --> 07:30.020 +네 + +07:30.020 --> 07:32.150 +CPU 활용도를 보고 싶어요 + +07:32.150 --> 07:32.870 +그게 어디죠? + +07:33.170 --> 07:35.540 +GPU CPU 활용도까지도요 + +07:35.570 --> 07:43.640 +GPU 활용도는 GPU가 열심히 일하는 시간을 확인하는 데 가장 중요한 부분일 겁니다 + +07:43.640 --> 07:49.460 +메모리를 안팎으로 처리하는 데 시간이 걸리는 게 아니죠 + +07:49.460 --> 07:52.580 +GPU 사용 여부를 알고 싶은 거죠 + +07:52.580 --> 07:59.090 +강력한 A100 블록을 망치질하며 최대한 활용하고 있다는 신호죠 + +07:59.090 --> 08:04.550 +다른 상자를 사용한다면 다른 hyperpaameter가 생기죠 + +08:04.550 --> 08:09.740 +GPU 활용도를 확인하세요 잘 작동하는지 잘 작동하는지 + +08:09.800 --> 08:13.760 +GPU를 잘 활용하고 있는지 확인하세요 + +08:13.790 --> 08:19.070 +하이퍼파라미터 일부를 수정해 get을 더 끌어낼 수 있는지 + +08:19.070 --> 08:23.090 +볼 수도 있어요 훈련 과정이 더 효율적이게요 + +08:23.690 --> 08:25.550 +네 + +08:25.550 --> 08:29.390 +이제 비트를 좀 쓸 거예요 + +08:29.450 --> 08:36.200 +요리 수업이나 오븐에 넣는 영상에서 배웠던 걸 해볼게요 + +08:36.200 --> 08:41.540 +아까 했던 걸 보여드릴게요 오븐에서 꺼낸 걸 + +08:41.540 --> 08:46.670 +그대로 넣는 건 속임수라고들 하잖아요 + +08:46.670 --> 08:53.180 +하지만 전 그걸 했고 얼마 전에 이걸 시작했었죠 동일 hyperperameter로 실행됐어요 + +08:53.180 --> 08:58.080 +같은 거예요 여기 분홍색요 + +08:58.080 --> 09:00.240 +제가 설명을 못 드렸네요 + +09:00.240 --> 09:00.870 +정말 미안해요 + +09:00.870 --> 09:05.640 +이 프로젝트에서 실행된 4개의 실행을 보여주고 있어요 + +09:05.640 --> 09:07.500 +프라이서라는 프로젝트죠 + +09:07.500 --> 09:12.450 +상단에 있는 내비게이션에는 프라이서 프로젝트가 있어요 + +09:12.450 --> 09:15.240 +여기 네 번의 경주가 있는데요 + +09:15.450 --> 09:25.500 +이것은 제가 서너 개의 이포크를 실행했을 때 같은 데이터셋으로 + +09:25.530 --> 09:28.650 +실행한 것인데요 + +09:28.680 --> 09:30.990 +다른 건 다 똑같아요 + +09:31.140 --> 09:35.910 +그걸 보여드리면 그 결과 어떻게 됐는지 알 수 있죠 + +09:38.310 --> 09:39.810 +자, 보세요 + +09:39.810 --> 09:43.290 +이게 살이 많은 고기예요 + +09:43.290 --> 09:48.630 +이걸 불러와서 스케일을 바꿔야 해요 + +09:49.620 --> 09:51.390 +우리가 내려가야 해요 + +09:51.990 --> 09:52.650 +됐어요 + +09:52.650 --> 09:53.640 +이제 다 보여요 + +09:53.640 --> 09:54.870 +1시에 끝낼게요 + +09:55.200 --> 09:57.840 +몇 가지 짚고 넘어갈게요 + +09:57.840 --> 10:02.370 +우선 보라색이 보이죠 + +10:02.790 --> 10:03.630 +클릭해 볼게요 + +10:03.660 --> 10:06.120 +이걸 누르면 위로 올라갈 거예요 + +10:06.120 --> 10:06.870 +됐어요 + +10:06.960 --> 10:10.320 +파란색과 보라색 선이 보이시죠 + +10:10.320 --> 10:13.920 +파란색은 여기쯤이고 보라색은 여기예요 + +10:13.920 --> 10:19.350 +파란색은 현재 이 탭에서 실행 중인 실행이고요 + +10:19.650 --> 10:22.950 +지금 우리가 실행 중인 이 남자요 + +10:22.950 --> 10:26.610 +보라색은 제가 예전에 버린 거예요 + +10:26.730 --> 10:29.460 +물론 10일 전에 데이터가 들어있죠 + +10:29.820 --> 10:35.430 +보시다시피 파란색이 보라색과 아주 가깝게 붙어 있죠 제가 속임수를 + +10:35.430 --> 10:38.010 +쓰지 않는다는 증거예요 + +10:38.010 --> 10:43.200 +저 파란색은 보라색과 같은 궤도를 계속 따를 거예요 + +10:43.200 --> 10:45.960 +보라색은 이제 다 됐어요 + +10:46.320 --> 10:53.100 +보시면 아시겠지만 경향이 계속 발전하고 있어요 좋은 + +10:53.310 --> 10:54.150 +거죠 + +10:54.450 --> 11:00.630 +우선, 호전되는 걸 보실 거예요 호전되고 또 호전되고 + +11:00.630 --> 11:05.670 +또 호전되고 또 호전되고 더 호전되죠 + +11:05.670 --> 11:08.550 +이 다이빙이 뭔지 궁금하실 거예요 + +11:08.550 --> 11:12.210 +이런 잠수로 각 시대의 종말이 온 거죠 + +11:12.210 --> 11:14.130 +한 시대의 이야기죠 + +11:14.160 --> 11:18.240 +1번 시대를 거쳐 2번, 3번, 4번까지 반복될 수 있죠 + +11:18.240 --> 11:20.040 +불행히도 추락했죠 + +11:20.040 --> 11:25.650 +구글에서 이포크 4 중간쯤에 인스턴스를 리셋했어요 + +11:25.650 --> 11:28.530 +Get it 에포크 4가 어떻게 끝났는지는 못 봤네요 + +11:28.800 --> 11:31.980 +하지만 그건 중요하지 않아요 + +11:32.340 --> 11:36.720 +왜 각 시대 말에는 갑자기 폭락하는 걸까요? + +11:36.720 --> 11:38.220 +아주 좋은 질문이에요 + +11:38.220 --> 11:39.720 +아주 중요해요 + +11:39.840 --> 11:44.190 +비트가 좀 과하게 들어가 있어서 그래요 + +11:44.190 --> 11:51.120 +이 모델은 첫 이포크에서 본 것과 같은 데이터를 보고 있어요 + +11:51.150 --> 11:53.070 +물론 좀 뒤죽박죽이긴 하죠 + +11:53.110 --> 12:00.100 +16개씩 다른 세트로 묶어서 파는 걸 봤어요 + +12:00.130 --> 12:01.750 +얼굴만 안아주면 돼요 + +12:01.750 --> 12:06.310 +특수 훈련 담당자가 자동으로 배열을 바꿔요 + +12:06.550 --> 12:12.070 +그럼에도 불구하고 모델은 이 데이터를 전에 본 적이 있고 그걸 이용할 수 있어요 + +12:12.070 --> 12:14.770 +중퇴했지만 배운 게 있어요 + +12:14.950 --> 12:22.600 +정규화하려고 여러 시도를 했지만 여전히 이런 데이터를 본 적이 + +12:22.600 --> 12:25.330 +있다는 게 문제예요 + +12:25.810 --> 12:30.760 +다행히도 작은 계단만 있어서 너무 꽉 끼는 건 걱정 + +12:30.760 --> 12:32.290 +안 해도 돼요 + +12:32.290 --> 12:38.620 +하지만 여기서부터 훨씬 더 나빠져요 + +12:38.830 --> 12:41.890 +너무 꽉 끼어서 그래요 + +12:41.890 --> 12:46.630 +만약 제가 말씀드린 최선의 관행을 따랐다면 즉, 유효성 + +12:46.630 --> 12:51.910 +검증을 동시에 실행했다면 성공률은 이런 점프 다운을 하지 + +12:51.910 --> 12:53.710 +않았을 거예요 + +12:53.710 --> 12:59.560 +둘째, 지금쯤이면 가격이 올라갔을 거예요 너무 많이 들어갔거든요 + +12:59.560 --> 13:04.360 +그리고 지금은 저축해둔 게 있어요 + +13:04.720 --> 13:11.650 +아니나 다를까 안는 얼굴 허브에 저장되어 있었습니다 5천 걸음마다 테스트할 수 있도록 말이죠 + +13:11.650 --> 13:19.450 +세 번째 시대 이후에도 결과는 더 나빠졌으니 세 번 이상은 시도할 필요가 없었죠. Get + +13:19.480 --> 13:20.410 +it! + +13:20.410 --> 13:27.010 +구글이 이 시점에서 제 인스턴스를 중단한 건 중요하지 않아요 이 데이터는 더 이상 + +13:27.010 --> 13:28.660 +쓸모없으니까요 + +13:28.660 --> 13:30.580 +모델은 이미 형편없었어요 + +13:30.850 --> 13:36.940 +이건 훌륭한 예입니다 허브에 정기적으로 업로드 한 다음 각각의 체크포인트로 + +13:36.940 --> 13:40.360 +돌아가 테스트를 실행할 수 있죠 + +13:40.360 --> 13:43.930 +가장 잘 어울리는 모델을 고를 수 있어요 + +13:44.170 --> 13:50.020 +아주 강력한 기술이라 몇 개 시대를 개혁할지 추측할 필요가 없어요 + +13:50.020 --> 13:55.520 +너무 많이 실행한 다음 훈련 데이터에서 가장 좋은 결과를 얻은 걸 선택하는 거죠 + +13:55.550 --> 13:58.220 +새로운 걸 시도할 때 샘플이 다 떨어지죠 + +13:58.610 --> 14:03.950 +이게 어떻게 작동하는지 잘 보여주는 예라고 생각해요 + +14:04.010 --> 14:10.610 +과잉 착용의 영향과 다양한 시대의 영향도 봤죠 + +14:10.640 --> 14:16.010 +하지만 확실한 건 첫 번째 시대를 맞이한 이후 새로운 데이터가 끊임없이 + +14:16.010 --> 14:22.340 +쏟아져 나왔다는 겁니다 이렇게 기울어진 건 모두 개선의 징후예요 + +14:22.370 --> 14:26.450 +여기 끝부분의 결과도 여기보다 훨씬 더 좋았어요 + +14:26.450 --> 14:28.790 +이것도 발전을 보였어요 + +14:28.820 --> 14:32.330 +그런데 비트 박스가 좀 더 위험해졌어요 get it get it + +14:32.330 --> 14:36.680 +우연의 일치로 1시쯤에 찍은 것 같아요 5층요 + +14:36.680 --> 14:41.930 +아까 경험 법칙이 있다고 말씀드렸죠 1보다 less요 5점이면 눈썹을 치켜들고 + +14:41.930 --> 14:45.170 +결과를 다시 볼 때예요 + +14:46.220 --> 14:51.110 +이 시점에서 잠시 비트를 멈출게요 다시 돌아오면 몇 가지 다른 것에 대해 좀 더 + +14:51.140 --> 14:52.040 +얘기해 보죠 diff --git a/week5/community-contributions/subtitles/srts/59508297/en_US.srt b/week5/community-contributions/subtitles/srts/59508297/en_US.srt new file mode 100755 index 0000000..ba8a2f5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508297/en_US.srt @@ -0,0 +1,97 @@ +WEBVTT + +00:01.400 --> 00:02.960 +What more is there to say, really? + +00:02.990 --> 00:04.820 +Tomorrow is the day for results. + +00:04.850 --> 00:07.610 +A day that very excited indeed about. + +00:07.850 --> 00:08.570 +Uh. + +00:08.570 --> 00:15.920 +But I will mention everything that you can do at this point generating text and code with frontier models + +00:15.920 --> 00:22.550 +and their APIs, open source models through hugging faces, various libraries, tools, assistance, + +00:22.580 --> 00:23.420 +rag. + +00:23.540 --> 00:25.460 +It's all part of your skill set. + +00:25.550 --> 00:32.420 +You can follow the five step strategy I went through to solve problems including curating data, building + +00:32.420 --> 00:39.320 +a baseline model, and fine tuning frontier model with example training sets in those jsonl the JSON + +00:39.320 --> 00:48.530 +lines files that we uploaded, and now and now you can run Q Laura fine tuning for open source models, + +00:48.560 --> 00:52.250 +including defining and selecting your hyperparameters. + +00:52.250 --> 00:56.270 +And then you can run training and you can monitor training. + +00:56.270 --> 01:05.900 +And a great joy it is to uh, so tomorrow, you will be able to run inference on a fine tuned model, + +01:05.900 --> 01:10.340 +which isn't as simple as it sounds, because you can't just load it in. + +01:10.370 --> 01:15.710 +You have to know how to load in the base model and apply these chullora weights on top of it. + +01:15.890 --> 01:16.730 +But it's not that difficult. + +01:16.730 --> 01:17.810 +But but it is. + +01:17.840 --> 01:19.400 +There are a couple of steps to it. + +01:19.850 --> 01:23.480 +Uh, and then, of course, we'll take a moment to look at the results. + +01:23.630 --> 01:31.220 +Um, but most importantly, at that point, you will be able to carry out the end to end process from + +01:31.220 --> 01:37.760 +first idea and thought and definition of the commercial problem through to having a trained model, + +01:37.850 --> 01:45.050 +uh, to build your own proprietary verticalized LLM to solve a business problem. + +01:45.650 --> 01:50.450 +And I was going to try and put a third bullet in there, because I've normally had three bullets against + +01:50.450 --> 01:52.100 +the what you'll be able to do tomorrow. + +01:52.100 --> 01:57.320 +But it seems to me like that second bullet is enough of a mic drop that it's like you don't need a third + +01:57.320 --> 01:57.740 +bullet. + +01:57.740 --> 01:58.820 +That's all you need. + +01:58.820 --> 02:00.830 +That second bullet, it says it all. + +02:00.830 --> 02:02.150 +That's what you're going to be able to do. + +02:02.150 --> 02:03.560 +And it's huge. + +02:03.590 --> 02:04.520 +I'll see you then. diff --git a/week5/community-contributions/subtitles/srts/59508297/ja_JP.srt b/week5/community-contributions/subtitles/srts/59508297/ja_JP.srt new file mode 100755 index 0000000..b2fd7af --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508297/ja_JP.srt @@ -0,0 +1,79 @@ +WEBVTT + +00:01.400 --> 00:02.960 +これ以上何を言うことがある? + +00:02.990 --> 00:04.820 +明日は結果を出す日だ。 + +00:04.850 --> 00:07.610 +本当に興奮した一日だった。 + +00:07.850 --> 00:08.570 +ええと。 + +00:08.570 --> 00:15.920 +しかし、 フロンティア・モデルとそのAPI、 ハギング・フェイスを通じたオープンソース・モデル、 様々なライブラリ、 + +00:15.920 --> 00:23.420 +ツール、 アシスタンス、 ボロを使ってテキストやコードを生成するこの時点でできることはすべて挙げておこう。 + +00:23.540 --> 00:25.460 +すべてはあなたのスキルの一部だ。 + +00:25.550 --> 00:32.420 +データのキュレーション、 ベースラインモデルの構築、 + +00:32.420 --> 00:39.320 +フロンティアモデルのファインチューニングなど、 + +00:39.320 --> 00:52.250 +問題解決のために私が行った5つのステップ戦略に従うことができます。 + +00:52.250 --> 00:56.270 +そして、 トレーニングを実行し、 トレーニングをモニターすることができる。 + +00:56.270 --> 01:10.340 +明日には、 微調整されたモデルで推論を実行することができる。 + +01:10.370 --> 01:15.710 +ベースモデルをロードし、 その上にこれらのチュロラウェイトを適用する方法を知らなければならない。 + +01:15.890 --> 01:16.730 +でも、 そんなに難しいことじゃない。 + +01:16.730 --> 01:17.810 +でも、 でも、 そうなんだ。 + +01:17.840 --> 01:19.400 +それにはいくつかのステップがある。 + +01:19.850 --> 01:23.480 +ええと、 それからもちろん、 結果を見てみましょう。 + +01:23.630 --> 01:31.220 +しかし、 最も重要なことは、 その時点で、 最初のアイデアや考え、 商業的な問題の定義から、 + +01:31.220 --> 01:45.050 +訓練されたモデル、 つまり、 ビジネス上の問題を解決するための独自の垂直型LLMを構築するまでのエンド・ツー・エンドのプロセスを実行できるようになることだ。 + +01:45.650 --> 01:52.100 +そして、 私はそこに3つ目の弾丸を入れてみようと思っていた。 なぜなら、 私は通常、 明日できることに対して3つの弾丸を持っているからだ。 + +01:52.100 --> 01:57.740 +でも、 2発目はマイクを落とすのに十分で、 3発目は必要ないように思える。 + +01:57.740 --> 01:58.820 +それだけで十分だ。 + +01:58.820 --> 02:00.830 +2つ目の弾丸がすべてを物語っている。 + +02:00.830 --> 02:02.150 +それができるようになることだ。 + +02:02.150 --> 02:03.560 +しかも巨大だ。 + +02:03.590 --> 02:04.520 +それじゃ、 また diff --git a/week5/community-contributions/subtitles/srts/59508297/ko_KR.srt b/week5/community-contributions/subtitles/srts/59508297/ko_KR.srt new file mode 100755 index 0000000..07357fe --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59508297/ko_KR.srt @@ -0,0 +1,91 @@ +WEBVTT + +00:01.400 --> 00:02.960 +더 할 말이 뭐가 있겠어요? + +00:02.990 --> 00:04.820 +내일이면 결과가 나오겠죠 + +00:04.850 --> 00:07.610 +정말 흥분되는 날이었죠 + +00:07.850 --> 00:08.570 +네 + +00:08.570 --> 00:15.920 +이 시점에서 할 수 있는 모든 걸 말씀드리죠 프론티어 모델과 API로 텍스트와 코드를 + +00:15.920 --> 00:23.420 +생성하고 얼굴을 끌어안기, 다양한 라이브러리 도구, 지원, 래그 모델을 통해서요 + +00:23.540 --> 00:25.460 +다 실력의 일부예요 + +00:25.550 --> 00:32.420 +제가 다뤘던 5단계 전략을 따르시면 됩니다 큐레이팅 데이터 문제 해결과 기본 + +00:32.420 --> 00:39.320 +모델 구축 개척자 모델을 미세 조정하는 거죠 업로드한 jsonl의 JSON + +00:39.320 --> 00:48.530 +라인 파일에 대한 훈련 세트와 함께요 Q 로라 미세 조정도 실행할 수 있습니다 hyperpaameter를 + +00:48.560 --> 00:52.250 +정의 및 선택하는 것도 포함해서요 + +00:52.250 --> 00:56.270 +훈련도 실행하고 모니터링도 할 수 있죠 + +00:56.270 --> 01:05.900 +내일 여러분은 미세 튜닝 모델에 대한 추론을 실행할 수 있을 겁니다 생각만큼 간단하지 + +01:05.900 --> 01:10.340 +않아요 그냥 로드할 수 없으니까요 + +01:10.370 --> 01:15.710 +베이스 모델을 로드하고 출로라 추를 그 위에 붙이는 법을 알아야 해요 + +01:15.890 --> 01:16.730 +그렇게 어렵진 않아요 + +01:16.730 --> 01:17.810 +하지만 사실이에요 + +01:17.840 --> 01:19.400 +몇 가지 단계가 있어요 + +01:19.850 --> 01:23.480 +그 후에 결과를 보도록 하죠 + +01:23.630 --> 01:31.220 +하지만 가장 중요한 것은 그 시점에서 상업적 문제의 아이디어와 사고, 정의를 + +01:31.220 --> 01:37.760 +종단 간 프로세스로 수행할 수 있다는 것입니다 훈련된 모델을 통해 + +01:37.850 --> 01:45.050 +독자적인 수직화 LLM을 구축해 사업적 문제를 해결할 수 있죠 + +01:45.650 --> 01:50.450 +세 번째 총알을 넣으려고 했어요 내일 할 작업에서는 보통 세 발을 + +01:50.450 --> 01:52.100 +넣거든요 Put + +01:52.100 --> 01:57.740 +하지만 두 번째 총알로 충분히 유명해져서 세 번째 총알은 필요 없는 것 같아요 + +01:57.740 --> 01:58.820 +그거면 돼요 + +01:58.820 --> 02:00.830 +두 번째 총알이 모든 걸 말해 줘요 + +02:00.830 --> 02:02.150 +그게 여러분이 할 수 있는 거죠 + +02:02.150 --> 02:03.560 +정말 커요 + +02:03.590 --> 02:04.520 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59509185/en_US.srt b/week5/community-contributions/subtitles/srts/59509185/en_US.srt new file mode 100755 index 0000000..4041a77 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59509185/en_US.srt @@ -0,0 +1,526 @@ +WEBVTT + +00:00.680 --> 00:06.440 +So this is where I left you looking at this satisfying chart on training loss and seeing the training + +00:06.440 --> 00:07.640 +loss coming down. + +00:07.670 --> 00:09.800 +Could stare at this all day. + +00:09.800 --> 00:14.270 +Uh, but, uh, we will move on to other charts. + +00:14.480 --> 00:18.170 +Uh, let's go back to this diagram again. + +00:18.590 --> 00:22.190 +Um, I wanted to point out this one that you may have already seen. + +00:22.190 --> 00:24.110 +This is the learning rate. + +00:24.110 --> 00:25.670 +Let's blow this up. + +00:27.050 --> 00:34.250 +Uh, so this is showing you exactly what I was trying to describe earlier, but but as I, as I told + +00:34.250 --> 00:39.830 +you it would, it looks much more clear when when you're looking at it, uh, in weights and biases. + +00:39.890 --> 00:47.030 +Um, so this is showing how the learning rate changed over time from the beginning through to the end + +00:47.030 --> 00:54.290 +of the four, almost four, uh, didn't quite get to the end of the fourth epoch, um, when I ran the + +00:54.290 --> 00:55.130 +model before. + +00:55.160 --> 00:58.430 +And what you can see is that the learning rate started at zero. + +00:58.460 --> 01:03.720 +It then went up, uh, because of the warm up to this point here. + +01:03.870 --> 01:12.930 +Um, and then you can see that it, it gradually comes down in this very nice, smooth way, slowly + +01:12.930 --> 01:15.210 +to start with and then a lot more. + +01:15.210 --> 01:17.460 +And then at the end it tails off. + +01:17.460 --> 01:23.430 +And the idea is that it actually gets to exactly zero when you're four epochs are up. + +01:23.430 --> 01:25.710 +But I didn't make it to the end of the fourth epoch. + +01:25.740 --> 01:30.900 +And obviously if you choose to run for one epoch, then you get this whole chart just for the one epoch. + +01:30.930 --> 01:36.810 +It just takes the number of epochs that you set, and it smoothens the learning rate over that number + +01:36.810 --> 01:37.860 +of epochs. + +01:38.220 --> 01:42.660 +Uh, so, um, it hopefully illustrates exactly the point. + +01:42.660 --> 01:48.960 +And you can see that our blue line representing the current batch is right up at the top of this. + +01:49.020 --> 01:53.010 +Uh, and it looked flat to us only because we were at the very, very top. + +01:53.010 --> 01:59.310 +But in due course, it is going to come down smoothly, just as its predecessor did. + +02:00.580 --> 02:06.220 +Uh, so then another thing I wanted to mention, um, is that when we were looking at the different + +02:06.220 --> 02:11.830 +runs just here, you can see you can use this eye icon here to decide what you're going to be looking + +02:11.830 --> 02:12.250 +at. + +02:12.250 --> 02:16.690 +And I didn't put an eye on this one here in between. + +02:16.720 --> 02:27.190 +Now, what this is, is that after my, uh, my this batch, uh, was a brutally kicked off by by kicked + +02:27.190 --> 02:29.230 +off its instance by by Google. + +02:29.230 --> 02:35.230 +I was annoyed and decided I wanted to try and continue where it left off and run another couple of epochs. + +02:35.230 --> 02:38.470 +Even though the results got worse, I wanted to see what happened. + +02:38.500 --> 02:42.970 +I wanted to take it to an extreme, and I wanted to make sure it wasn't just an anomaly that the fourth + +02:42.970 --> 02:44.770 +epoch, the results got worse. + +02:44.770 --> 02:47.110 +Maybe the fifth epoch, they would suddenly be a lot better. + +02:47.110 --> 02:49.360 +So I at least wanted to see it play out a bit. + +02:49.540 --> 02:52.540 +Um, and so I'm going to now show that for you. + +02:52.570 --> 02:56.080 +Now it's going to be a bit confusing because I started it again. + +02:56.080 --> 02:58.960 +It's not going to continue off to the right here. + +02:58.960 --> 03:01.430 +It's going to begin over on the left. + +03:01.430 --> 03:06.500 +So you just have to bear in mind that it's going to see it as if it was the first training step. + +03:06.500 --> 03:11.720 +But in fact, what I'm going to show you belongs over to the right of this purple line. + +03:11.720 --> 03:12.350 +Let's see. + +03:12.380 --> 03:14.180 +Now this this this thing. + +03:14.180 --> 03:15.530 +And there it is. + +03:15.530 --> 03:17.600 +So let me blow this up. + +03:18.050 --> 03:23.630 +So hopefully it's clear to you that there should really be over here. + +03:23.630 --> 03:27.080 +It should be we should be able to take that and pull it over to the right. + +03:27.530 --> 03:34.130 +Because this is what happened when I resumed that SFT trainer from where it left off down here. + +03:34.130 --> 03:44.420 +And what you can see is this is then basically a fifth, another full epoch, um, representing that + +03:44.420 --> 03:47.990 +we never completed the fourth one, but this is like doing a whole nother epoch. + +03:47.990 --> 03:51.290 +And then this would be like the whole of the sixth epoch. + +03:51.410 --> 03:57.530 +Um, and what you can see again, another of these falls between the, um, when it, when it started, + +03:57.530 --> 04:04.100 +uh, the sixth epoch and at this point, definitely in a very suspicious territory. + +04:04.250 --> 04:05.810 +The loss looking too low. + +04:05.810 --> 04:12.710 +And sure enough, when I took these versions of the model and tried to run tests against them, they + +04:12.710 --> 04:19.010 +were all poorer in performance than the model that I took from a cut off about about here. + +04:19.460 --> 04:21.890 +So it was a test worth doing. + +04:21.890 --> 04:27.890 +I needed to satisfy myself that it wasn't just bad luck back here, but that it really was overfitting + +04:27.890 --> 04:30.080 +and that I wasn't getting useful results anymore. + +04:30.080 --> 04:31.910 +And that did prove to be the case. + +04:32.210 --> 04:35.090 +So it was a good test to do. + +04:35.090 --> 04:40.220 +And you can benefit from this because you can know if you have decided to do the full, the full Monty + +04:40.250 --> 04:47.300 +and run with this big version of the model, then, you know, once you've done, uh, your you might + +04:47.300 --> 04:48.830 +as well not go beyond three epochs. + +04:48.830 --> 04:55.820 +There is no no use for that, in my experience, unless you've tried changing hyperparameters and you've + +04:55.820 --> 04:57.290 +discovered something different. + +04:58.500 --> 05:04.110 +Uh, so then, um, the final thing I'll show you, you can play around with many of the other charts + +05:04.140 --> 05:04.860 +and weights and biases. + +05:04.860 --> 05:05.760 +There's lots to explore. + +05:05.760 --> 05:09.930 +You can look at the gradients themselves, and that is quite a rabbit hole. + +05:10.020 --> 05:15.450 +And you'd have to do a little bit of, uh, digging and research to understand what you're looking at + +05:15.450 --> 05:17.220 +and how to learn things from it. + +05:17.220 --> 05:22.440 +And ideally, what you the main things that you want to be looking for is making sure that you never + +05:22.470 --> 05:26.670 +get into a situation where your gradients are becoming zero. + +05:26.880 --> 05:29.700 +Um, which means that you're not learning anymore. + +05:29.700 --> 05:34.890 +If your gradients are zero, then your model is no longer learning and there's no use to be continuing + +05:34.890 --> 05:36.270 +the learning process. + +05:36.270 --> 05:40.740 +So you want to watch out for gradients being zero, and you also want to watch out for gradients blowing + +05:40.740 --> 05:47.670 +up and being too high, because that means that your your model is going to be bouncing around too much + +05:47.700 --> 05:49.920 +unless your learning rate is really tiny. + +05:49.920 --> 05:53.820 +Uh, your model is going to be, uh, not learning in a productive way. + +05:53.820 --> 05:59.740 +So those are some of the things to look for when you're looking at gradients in weights and biases. + +06:00.370 --> 06:03.730 +But the last thing I wanted to show you was going to hugging face. + +06:03.910 --> 06:11.710 +Um, and just show you if you if you remember this, this model here, which is the, uh, the the version + +06:11.710 --> 06:15.520 +of the Pricer model that I ran for all of these epochs. + +06:15.700 --> 06:16.930 +Um, you see this? + +06:16.930 --> 06:21.880 +The name of the run is the name that I constructed based on the date and time. + +06:21.940 --> 06:24.880 +Um, and it ends in, uh, 39. + +06:25.030 --> 06:26.440 +The number of seconds. + +06:26.440 --> 06:28.690 +Uh, just keep keep that in your mind. + +06:28.690 --> 06:34.480 +When we turn to hugging face, you go to the avatar menu and to your own name. + +06:34.600 --> 06:40.180 +Uh, you will then see your spaces if you have any, your models and your data sets. + +06:40.180 --> 06:42.700 +You can see I have 1 or 2. + +06:43.120 --> 06:46.990 +Uh, and when it comes to Pricer, I've run this once or twice. + +06:47.170 --> 06:54.070 +Uh, uh, and these, each of these represent the different repos that represent one of the different + +06:54.070 --> 06:55.870 +pricer runs. + +06:55.960 --> 07:01.670 +Um, and I like to keep them each each of these runs as a separate repo so that I can have all the different + +07:01.700 --> 07:05.420 +epochs and everything within this, this, this one repo. + +07:05.420 --> 07:12.110 +So what this 139 I think is the one that was the big guy with, with the, the, the four, three and + +07:12.110 --> 07:13.310 +a half epochs. + +07:13.310 --> 07:22.730 +So if we click into this, um, it comes up with the model page, uh, and if you go to files and versions, + +07:22.730 --> 07:28.100 +what you're looking at here is basically you're looking at git, you're looking at a repo which has + +07:28.100 --> 07:31.070 +within it the files associated with your model. + +07:31.340 --> 07:39.020 +Um, and as I mentioned uh, recently, you can see that the business here is this file, the safe tensors. + +07:39.020 --> 07:49.760 +And that file is 109MB, which is the size of the adapters that the adapters that we're using with are + +07:49.790 --> 07:50.690 +set to 32. + +07:50.720 --> 07:55.550 +When we did the maths, we worked out that that would be 109MB worth of weights. + +07:55.550 --> 07:57.400 +And that is all in this file. + +07:57.400 --> 07:59.710 +Safe tenses right here. + +08:00.130 --> 08:06.700 +Um, and, um, yeah, there's there's, uh, a few other things that we could look at. + +08:06.730 --> 08:14.860 +Adaptive config.json, uh, gives information about the, the adapter that we're using for the Lora + +08:14.860 --> 08:15.460 +fine tuning. + +08:15.460 --> 08:21.940 +And you can see, for example, it has the target modules stored in here, and it has our value of R + +08:21.970 --> 08:22.810 +32. + +08:22.840 --> 08:25.450 +It says we're using Lora training. + +08:25.660 --> 08:32.230 +Um, and so it has and it has the base model name uh llama 3.18 billion in there. + +08:32.590 --> 08:39.130 +Um, so that that gives you a sense of all of the information that's saved for this, this model. + +08:39.160 --> 08:43.360 +But the other thing I wanted to point out was this 16 commits over here. + +08:43.360 --> 08:46.090 +So this is showing the commit history. + +08:46.090 --> 08:53.170 +And basically every 5000 steps, um, the code that you saw was saving. + +08:53.170 --> 08:55.600 +This was pushing our model to the hub. + +08:55.600 --> 08:57.940 +That was something we configured in the training parameters. + +08:57.940 --> 09:00.760 +So it was being saved every 5000 steps. + +09:00.760 --> 09:05.410 +And that means that we can load in any of these models and test them. + +09:05.410 --> 09:08.080 +And that's how we can select the one that's performing the best. + +09:08.110 --> 09:10.000 +We've got each of these different checkpoints. + +09:10.000 --> 09:11.890 +And we can do as many of these as we want. + +09:12.070 --> 09:19.540 +Um, and uh, and we can use that to, to, to come back and recreate that moment when the model was + +09:19.540 --> 09:20.980 +at that point in training. + +09:21.040 --> 09:26.110 +Um, and so you can imagine I could have all of my different training runs all in this as different, + +09:26.140 --> 09:32.740 +uh, different revisions of this, uh, different, different versions of this price, the repository. + +09:32.740 --> 09:33.970 +But then it would get very cluttered. + +09:33.970 --> 09:38.620 +And that's why I separate it out so that each run is its own repo. + +09:38.620 --> 09:45.430 +And then the different batch steps show here, um, as the different history of the commits. + +09:45.580 --> 09:48.250 +Um, I think that's a nice, organized way of doing it. + +09:48.670 --> 09:54.400 +So that's how to see the model in the in the Huggingface hub. + +09:54.400 --> 09:56.170 +Uh, presumably We'll see. + +09:56.170 --> 09:57.820 +This is the one that's running right now. + +09:57.820 --> 09:59.290 +It's updated 15 minutes ago. + +09:59.290 --> 10:02.440 +So we go into this go into files and versions. + +10:02.440 --> 10:03.190 +We'll see that. + +10:03.220 --> 10:05.320 +Yes it's already saved a version. + +10:05.320 --> 10:06.610 +We've got to step 5000. + +10:06.640 --> 10:10.510 +So one version of this or two commits because there was an initial commit. + +10:10.510 --> 10:14.950 +And then step 5000 is just has been saved 15 minutes ago. + +10:14.980 --> 10:17.110 +So there's already a model that's running. + +10:17.110 --> 10:20.890 +And if you've been doing this at the same time as me, then you'll be in a similar boat and you'll be + +10:20.920 --> 10:26.560 +having versions of this model being uploaded to the Huggingface hub while I speak. + +10:26.950 --> 10:30.070 +And you'll actually, you would be able to test them right away. + +10:30.070 --> 10:32.350 +You don't need to wait for the training to complete. + +10:32.770 --> 10:34.780 +Um, so there we go. + +10:34.810 --> 10:42.940 +We've seen the, uh, the training underway, with the losses showing here that are a bit hard to understand. + +10:42.970 --> 10:49.630 +We visualize them beautifully in weights and biases, and we've seen the model itself being saved to + +10:49.660 --> 10:50.290 +the hub. + +10:50.290 --> 10:53.650 +And this is the experience of training. + +10:53.680 --> 10:55.640 +And I tell you, I can do this for hours. + +10:55.640 --> 10:59.270 +And I have done this for hours, which is very tragic of me. + +10:59.270 --> 11:03.710 +And in fact, I mentioned, I think right back at the very beginning of this course, that that screen + +11:03.710 --> 11:10.100 +you see over there that actually has weights and biases on it and the the chart that I was just showing + +11:10.100 --> 11:16.430 +you, uh, was, uh, this chart was the chart that was on there, uh, at the very beginning. + +11:16.430 --> 11:18.320 +Right now it's showing this chart here. + +11:18.470 --> 11:25.550 +Uh, and so I've been watching that during the course of the first few weeks of building this, this + +11:25.550 --> 11:26.330 +course. + +11:26.450 --> 11:28.730 +Uh, and it's been terrific fun. + +11:28.820 --> 11:34.220 +Uh, and hopefully you're doing much the same thing, watching the training happening, seeing your + +11:34.220 --> 11:37.040 +model versions being uploaded to the hub. + +11:37.070 --> 11:41.090 +Uh, and all that remains is for the run to complete. + +11:41.090 --> 11:50.270 +And then tomorrow for us to come and evaluate the model and see how we have done fine tuning our own + +11:50.270 --> 11:51.770 +verticalized model. + +11:52.100 --> 11:54.020 +Um, but we'll just wrap up for today. + +11:54.020 --> 11:55.400 +Back to the slides. diff --git a/week5/community-contributions/subtitles/srts/59509185/ja_JP.srt b/week5/community-contributions/subtitles/srts/59509185/ja_JP.srt new file mode 100755 index 0000000..9695091 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59509185/ja_JP.srt @@ -0,0 +1,439 @@ +WEBVTT + +00:00.680 --> 00:06.440 +トレーニング・ロスに関するこの満足のいくチャートを見て、 トレーニング・ロスが減少していくのを見たところで、 + +00:06.440 --> 00:07.640 +ここでお別れだ。 + +00:07.670 --> 00:09.800 +一日中眺めていても飽きない。 + +00:09.800 --> 00:14.270 +あー、 でも、 他のチャートに移ろう。 + +00:14.480 --> 00:18.170 +もう一度、 この図に戻ってみよう。 + +00:18.590 --> 00:22.190 +ええと、 すでにご覧になったかもしれませんが、 これを指摘したいと思います。 + +00:22.190 --> 00:24.110 +これが学習率である。 + +00:24.110 --> 00:25.670 +爆破しよう。 + +00:27.050 --> 00:34.250 +でも、 さっき言ったように、 ウエイトとバイアスを見ると、 + +00:34.250 --> 00:39.830 +もっとはっきり見えるんだ。 + +00:39.890 --> 00:55.130 +これは、 4つのエポックの初めから終わりまで、 つまりほぼ4つのエポックの終わりまでは学習率がどのように変化したかを示しています。 + +00:55.160 --> 00:58.430 +そしてご覧いただけるのは、 学習率がゼロから始まっていることだ。 + +00:58.460 --> 01:03.720 +その後、 ここまで暖かくなったので、 上昇した。 + +01:03.870 --> 01:15.210 +そうすると、 最初はゆっくりで、 だんだんとスムーズになっていくのがわかるだろう。 + +01:15.210 --> 01:17.460 +そして最後は尻すぼみになる。 + +01:17.460 --> 01:23.430 +そして、 4つのエポックが終わったときに、 ちょうどゼロになるという考え方だ。 + +01:23.430 --> 01:25.710 +しかし、 私は第4エポックの終わりには間に合わなかった。 + +01:25.740 --> 01:30.900 +そして、 1つのエポックを選択した場合、 その1つのエポックのチャート全体が表示される。 + +01:30.930 --> 01:37.860 +設定したエポック数を受け取り、 その数のエポックにわたって学習率を平滑化するだけだ。 + +01:38.220 --> 01:42.660 +ええと、 つまり、 これはまさにそのポイントを示しているんだ。 + +01:42.660 --> 01:48.960 +そして、 現在のバッチを表す青い線がこの一番上にあるのがわかるだろう。 + +01:49.020 --> 01:53.010 +私たちには平坦に見えたが、 それは私たちが一番上にいたからだ。 + +01:53.010 --> 01:59.310 +しかし、 そのうちに、 前任者がそうであったように、 スムーズに降りてくるだろう。 + +02:00.580 --> 02:06.220 +それから、 もうひとつ言っておきたいことがあるんだけど、 ここでいろいろな走りを見ているときに、 + +02:06.220 --> 02:12.250 +この目のアイコンを使って何を見るかを決めることができるんだ。 + +02:12.250 --> 02:16.690 +そして、 その間にあるこの1本には目を入れていなかった。 + +02:16.720 --> 02:29.230 +さて、 これはどういうことかというと、 私のこのバッチがグーグルによってインスタンスから蹴落とされた後、 残酷にもグーグルによってインスタンスから蹴落とされたのだ。 + +02:29.230 --> 02:35.230 +私は腹が立ったので、 その続きをして、 もう2、 3エポック走ってみようと思った。 + +02:35.230 --> 02:38.470 +結果が悪くなっても、 何が起こったのか見たかった。 + +02:38.500 --> 02:44.770 +極端な話、 4回目のエポックで結果が悪くなったのは単なる異常ではないことを確かめたかった。 + +02:44.770 --> 02:47.110 +第5エポックになれば、 急に良くなるかもしれない。 + +02:47.110 --> 02:49.360 +だからせめて、 そのプレーを少しは見たかった。 + +02:49.540 --> 02:52.540 +では、 それをお見せしましょう。 + +02:52.570 --> 02:56.080 +また始めたので、 ちょっと混乱しそうだ。 + +02:56.080 --> 02:58.960 +このまま右に外れることはない。 + +02:58.960 --> 03:01.430 +左から始まるよ。 + +03:01.430 --> 03:06.500 +だから、 それが最初のトレーニングステップであるかのように見られることを念頭に置いておく必要がある。 + +03:06.500 --> 03:11.720 +しかし実際には、 これからお見せするのは、 この紫の線の右側にあるものだ。 + +03:11.720 --> 03:12.350 +見てみよう。 + +03:12.380 --> 03:14.180 +さて、 これはこれは。 + +03:14.180 --> 03:15.530 +そして、 そこにある。 + +03:15.530 --> 03:17.600 +では、 これを爆発させてみよう。 + +03:18.050 --> 03:23.630 +だから、 こっちの方が本当にあるべきだということがはっきりすればいいんだけどね。 + +03:23.630 --> 03:27.080 +それを右に寄せることができるはずだ。 + +03:27.530 --> 03:34.130 +というのも、 SFTのトレーナーをここで中断したところから再開したら、 こうなったのだ。 + +03:34.130 --> 03:47.990 +そして、 おわかりのように、 これは基本的に第5の、 もうひとつの完全なエポックである。 + +03:47.990 --> 03:51.290 +そして、 これは第6エポック全体のようなものだ。 + +03:51.410 --> 03:57.530 +そして、 もうひとつは、 6番目のエポックが始まったときと、 + +03:57.530 --> 04:04.100 +この時点で、 間違いなく非常に疑わしい領域にある。 + +04:04.250 --> 04:05.810 +損失が少なすぎるように見える。 + +04:05.810 --> 04:12.710 +そして案の定、 これらのバージョンのモデルを使ってテストしてみると、 + +04:12.710 --> 04:19.010 +どれもこのあたりで切り落としたモデルよりも性能が劣っていた。 + +04:19.460 --> 04:21.890 +だから、 やる価値のあるテストだった。 + +04:21.890 --> 04:30.080 +ただ運が悪かったのではなく、 本当にオーバーフィッティングで、 もう有益な結果が得られていないのだと納得する必要があった。 + +04:30.080 --> 04:31.910 +そして、 それが証明された。 + +04:32.210 --> 04:35.090 +だから、 いいテストになった。 + +04:35.090 --> 04:40.220 +なぜなら、 もしフル・モンティでこの大きなバージョンのモデルで走ると決めたなら、 + +04:40.250 --> 04:48.830 +3つのエポックを超えない方がいいということがわかるからだ。 + +04:48.830 --> 04:57.290 +私の経験では、 ハイパーパラメーターを変えてみて何か違うものを発見しない限り、 その使い道はない。 + +04:58.500 --> 05:04.860 +それから、 最後にお見せするのは、 他のチャートやウェイト、 バイアスをいろいろと弄ってみることだ。 + +05:04.860 --> 05:05.760 +探検することはたくさんある。 + +05:05.760 --> 05:09.930 +グラデーションそのものを見ることもできるし、 それはかなりのウサギの穴だ。 + +05:10.020 --> 05:17.220 +そして、 自分が見ているものを理解し、 そこから何かを学ぶには、 少し、 あー、 掘り下げて研究する必要がある。 + +05:17.220 --> 05:26.670 +そして理想的には、 勾配がゼロになるような状況に陥らないようにすることだ。 + +05:26.880 --> 05:29.700 +ええと、 つまり、 あなたはもう学んでいないということです。 + +05:29.700 --> 05:36.270 +勾配がゼロであれば、 モデルはもはや学習しておらず、 学習プロセスを続ける意味がない。 + +05:36.270 --> 05:40.740 +つまり、 勾配がゼロにならないように気をつけたいし、 + +05:40.740 --> 05:49.920 +勾配が吹っ飛んで高くなりすぎないように気をつけたい。 + +05:49.920 --> 05:53.820 +あー、 あなたのモデルは、 あー、 生産的な方法で学んでいない。 + +05:53.820 --> 05:59.740 +ウェイトとバイアスの勾配を見る際には、 このような点に注意する必要がある。 + +06:00.370 --> 06:03.730 +でも、 最後に見せたかったのは、 ハグする顔だった。 + +06:03.910 --> 06:15.520 +このモデルは、 私がすべてのエポックについて実行したPricerモデルのバージョンです。 + +06:15.700 --> 06:16.930 +これが見える? + +06:16.930 --> 06:21.880 +ランの名前は、 日付と時間から私が組み立てたものだ。 + +06:21.940 --> 06:24.880 +ええと、 39で終わるんだ。 + +06:25.030 --> 06:26.440 +秒数。 + +06:26.440 --> 06:28.690 +ああ、 そのことは頭の片隅に置いておいてくれ。 + +06:28.690 --> 06:34.480 +ハグ顔になったら、 アバターメニューから自分の名前に移動する。 + +06:34.600 --> 06:40.180 +モデルやデータセットがあれば、 スペースが表示されます。 + +06:40.180 --> 06:42.700 +私が1つか2つ持っているのがわかるだろう。 + +06:43.120 --> 06:46.990 +プライサーに関しては、 1、 2度実行したことがある。 + +06:47.170 --> 06:55.870 +ええと、 ええと、 これらはそれぞれ、 異なるプライサー・ランのひとつを表す異なるレポを表している。 + +06:55.960 --> 07:01.670 +そして、 私は、 この1つのレポの中にすべての異なるエポックとすべてを持つことができるように、 + +07:01.700 --> 07:05.420 +それぞれの実行を別々のレポとして管理したいんだ。 + +07:05.420 --> 07:13.310 +だから、 この139番は、 4つ、 3つ半のエポックに登場した大物だと思う。 + +07:13.310 --> 07:22.730 +この中をクリックすると、 モデルのページが表示され、 ファイルとバージョンに移動すると、 ここで見ているのは基本的にgitで、 + +07:22.730 --> 07:31.070 +モデルに関連するファイルがあるレポを見ていることになります。 + +07:31.340 --> 07:39.020 +ええと、 先日もお話したように、 ここでのビジネスはこのファイル、 セーフ・テンソルであることがお分かりいただけると思う。 + +07:39.020 --> 07:50.690 +そしてそのファイルは109MBで、 これは我々が使っているアダプターが32に設定されているサイズだ。 + +07:50.720 --> 07:55.550 +計算すると、 109MB分のウエイトになる。 + +07:55.550 --> 07:57.400 +それがこのファイルのすべてだ。 + +07:57.400 --> 07:59.710 +ここは安全な時制だ。 + +08:00.130 --> 08:06.700 +それに、 他にもいくつか見ることができるものがあるんだ。 + +08:06.730 --> 08:15.460 +アダプティブ・コンフィグ。 jsonは、 Loraの微調整に使っているアダプターの情報です。 + +08:15.460 --> 08:22.810 +例えば、 ここにはターゲット・モジュールが格納されており、 R 32という値が表示されている。 + +08:22.840 --> 08:25.450 +ロラのトレーニングを使っていると書いてある。 + +08:25.660 --> 08:32.230 +それで、 ベースモデルの名前は......ラマ3。 180億ドルがそこにある。 + +08:32.590 --> 08:39.130 +それで、 このモデルに保存されているすべての情報を知ることができる。 + +08:39.160 --> 08:43.360 +ただ、 もうひとつ指摘したかったのは、 この16人のコミットメントだ。 + +08:43.360 --> 08:46.090 +これはコミット履歴を示している。 + +08:46.090 --> 08:53.170 +そして基本的に5000ステップごとに、 ええと、 あなたが見たコードは保存されていた。 + +08:53.170 --> 08:55.600 +これは我々のモデルをハブへと押しやるものだった。 + +08:55.600 --> 08:57.940 +これはトレーニング・パラメーターで設定したものだ。 + +08:57.940 --> 09:00.760 +つまり、 5000歩ごとに保存されていたわけだ。 + +09:00.760 --> 09:05.410 +つまり、 これらのモデルのどれでもロードしてテストできるということだ。 + +09:05.410 --> 09:08.080 +こうして、 最も良いパフォーマンスを発揮しているものを選ぶことができるんだ。 + +09:08.110 --> 09:10.000 +それぞれのチェックポイントがある。 + +09:10.000 --> 09:11.890 +そして、 これらはいくつでもできる。 + +09:12.070 --> 09:20.980 +それを使って、 モデルがトレーニングのその時点に戻った瞬間を再現することができるんだ。 + +09:21.040 --> 09:26.110 +そして、 私のさまざまなトレーニングのすべてを、 このレポジトリの異なるリビジョン、 + +09:26.140 --> 09:32.740 +異なるバージョン、 異なる価格として、 このレポジトリに置くことができる。 + +09:32.740 --> 09:33.970 +でも、 そうするととても散らかってしまう。 + +09:33.970 --> 09:38.620 +そのため、 それぞれのレポを独立させた。 + +09:38.620 --> 09:45.430 +そして、 さまざまなバッチステップが、 コミットのさまざまな履歴としてここに表示される。 + +09:45.580 --> 09:48.250 +それは組織的でいい方法だと思う。 + +09:48.670 --> 09:54.400 +これがHuggingfaceのハブでモデルを見る方法だ。 + +09:54.400 --> 09:56.170 +そうだろうね。 + +09:56.170 --> 09:57.820 +これが今走っているものだ。 + +09:57.820 --> 09:59.290 +15分前に更新されました。 + +09:59.290 --> 10:02.440 +それで、 このファイルとバージョンに入るんだ。 + +10:02.440 --> 10:03.190 +そうだろう。 + +10:03.220 --> 10:05.320 +はい、 すでにバージョンを保存しています。 + +10:05.320 --> 10:06.610 +我々は5000のステップを踏まなければならない。 + +10:06.640 --> 10:10.510 +つまり、 最初のコミットがあったので、 1つのバージョンか2つのコミットということになる。 + +10:10.510 --> 10:14.950 +そしてステップ5000は15分前に保存されたばかりだ。 + +10:14.980 --> 10:17.110 +だから、 すでに動いているモデルがある。 + +10:17.110 --> 10:20.890 +そして、 もしあなたが私と同じ時期にこれをやっていたなら、 同じような境遇にあり、 + +10:20.920 --> 10:26.560 +私が話している間にこのモデルのバージョンがHuggingfaceのハブにアップロードされていることだろう。 + +10:26.950 --> 10:30.070 +そして実際に、 すぐにテストすることができる。 + +10:30.070 --> 10:32.350 +トレーニングが終わるのを待つ必要はない。 + +10:32.770 --> 10:34.780 +うーん、 そうだな。 + +10:34.810 --> 10:42.940 +トレーニングは進行中だが、 ちょっと理解しがたい敗戦がここにある。 + +10:42.970 --> 10:50.290 +重みとバイアスが美しく可視化され、 モデルそのものがハブに保存されるのも見たことがある。 + +10:50.290 --> 10:53.650 +そしてこれがトレーニングの経験だ。 + +10:53.680 --> 10:55.640 +何時間でもできるんだ。 + +10:55.640 --> 10:59.270 +それを何時間も続けてきた。 + +10:59.270 --> 11:03.710 +実際、 このコースの一番最初に、 あそこにあるウェイトとバイアスが表示されている画面と、 + +11:03.710 --> 11:16.430 +さっきお見せしたチャートは、 ええと、 このチャートは、 一番最初にあそこにあったチャートだと申し上げました。 + +11:16.430 --> 11:18.320 +今はこのグラフが表示されている。 + +11:18.470 --> 11:26.330 +それで、 このコースを作り始めてから数週間の間、 その様子を見ていたんだ。 + +11:26.450 --> 11:28.730 +とても楽しいよ。 + +11:28.820 --> 11:37.040 +トレーニングの様子を見たり、 モデルのバージョンがハブにアップロードされるのを見たり。 + +11:37.070 --> 11:41.090 +あとは走りきるだけだ。 + +11:41.090 --> 11:51.770 +そして明日、 私たちがこのモデルを評価し、 私たち自身の垂直モデルの微調整がどのように行われたかを確認するために来てもらう。 + +11:52.100 --> 11:54.020 +うーん、 でも今日はこれで終わりにしよう。 + +11:54.020 --> 11:55.400 +スライドに戻る。 diff --git a/week5/community-contributions/subtitles/srts/59509185/ko_KR.srt b/week5/community-contributions/subtitles/srts/59509185/ko_KR.srt new file mode 100755 index 0000000..9929454 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59509185/ko_KR.srt @@ -0,0 +1,499 @@ +WEBVTT + +00:00.680 --> 00:06.440 +여기서 제가 여러분께 훈련 손실에 대한 만족스러운 차트를 보여드렸는데요 훈련 손실에 + +00:06.440 --> 00:07.640 +대한 차트를요 + +00:07.670 --> 00:09.800 +종일 봐도 안 질리겠어요 + +00:09.800 --> 00:14.270 +다른 차트로 넘어가도록 하죠 + +00:14.480 --> 00:18.170 +이 도표를 다시 보죠 + +00:18.590 --> 00:22.190 +이미 보셨을 수도 있는 이 사진을 보여드릴게요 + +00:22.190 --> 00:24.110 +이게 학습률이에요 + +00:24.110 --> 00:25.670 +날려 버리죠 + +00:27.050 --> 00:34.250 +이게 제가 아까 설명하려고 했던 바로 그 모습이에요 하지만 제가 말씀드렸듯이 + +00:34.250 --> 00:39.830 +무게와 편향으로 보면 훨씬 더 명확해 보이죠 + +00:39.890 --> 00:47.030 +이건 지난 네 번의 시대를 거치며 학습률이 어떻게 변했는지 보여줍니다. + +00:47.030 --> 00:55.130 +거의 네 번째 시대까지 학습률이 변하지 않았죠. 이전의 모형을 실행했을 때요. + +00:55.160 --> 00:58.430 +보시다시피 학습률은 0에서 시작됐죠 + +00:58.460 --> 01:03.720 +그 후 상승했죠 이 지점까지 온난화 때문에요 + +01:03.870 --> 01:12.930 +그러면 이렇게 매끈하게 조금씩 내려오는 게 보이죠 처음엔 천천히 시작하지만 점점 + +01:12.930 --> 01:15.210 +더 많이 내려와요 + +01:15.210 --> 01:17.460 +그러다 끝에 가서는 꼬리가 떨어져요 + +01:17.460 --> 01:23.430 +네 개의 개혁이 끝나면 정확히 0까지 올라가요 + +01:23.430 --> 01:25.710 +하지만 네 번째 시대까지 못 버텼어요 + +01:25.740 --> 01:30.900 +한 시대의 작업을 실행하면 하나의 작업에만 전체 차트가 생겨요. + +01:30.930 --> 01:37.860 +몇 개의 폭동을 일으켰는지를 알면 학습 속도가 빨라져요. + +01:38.220 --> 01:42.660 +그 요점을 잘 보여주는 것 같아요 + +01:42.660 --> 01:48.960 +현재 배치를 나타내는 파란 선이 이 위에 있는 게 보이시죠 + +01:49.020 --> 01:53.010 +우리가 제일 꼭대기에 있어서 밋밋해 보였던 거예요 + +01:53.010 --> 01:59.310 +하지만 때가 되면 전임자처럼 순조롭게 내려올 거예요 + +02:00.580 --> 02:06.220 +말씀드리고 싶은 다른 것은 여기 여러 개의 실행을 살펴볼 때 이 아이콘을 + +02:06.220 --> 02:12.250 +사용할 수 있습니다 무엇을 볼 것인지 결정하기 위해서요 + +02:12.250 --> 02:16.690 +여기 이 중간에 있는 건 미처 못 봤어요 Put it up Put it + +02:16.720 --> 02:29.230 +이게 뭐냐면요 제가 만든 건 구글이 잔인하게 인스턴스를 차단했어요 + +02:29.230 --> 02:35.230 +짜증이 나서 하던 일을 계속하고 다른 시대를 개척하기로 했죠 + +02:35.230 --> 02:38.470 +결과는 나빠졌지만 어떻게 됐는지 보고 싶었어요 + +02:38.500 --> 02:42.970 +전 그걸 극단으로 몰고 가서 네 번째 시대에서 결과가 나빠진 변칙적인 + +02:42.970 --> 02:44.770 +사례가 아니길 바랐어요 + +02:44.770 --> 02:47.110 +다섯 번째 시대에는 훨씬 더 좋아질지도 몰라요 + +02:47.110 --> 02:49.360 +그래서 적어도 비트가 어떻게 되는지 보고 싶었어요 + +02:49.540 --> 02:52.540 +이제 그걸 보여 드릴게요 + +02:52.570 --> 02:56.080 +좀 헷갈릴 거예요 비트를 다시 시작했거든요 + +02:56.080 --> 02:58.960 +오른쪽으로는 안 이어져요 + +02:58.960 --> 03:01.430 +왼쪽에서 시작할 거예요 + +03:01.430 --> 03:06.500 +첫 번째 훈련 단계처럼 보일 거라는 걸 명심하세요 + +03:06.500 --> 03:11.720 +하지만 제가 보여드릴 것은 이 보라색 선 오른쪽에 있는 거예요 + +03:11.720 --> 03:12.350 +어디 보죠 + +03:12.380 --> 03:14.180 +이건 말이죠 + +03:14.180 --> 03:15.530 +저기 있네요 + +03:15.530 --> 03:17.600 +확대해 볼게요 + +03:18.050 --> 03:23.630 +그러니 여기 있어야 한다는 걸 확실히 아셨으면 좋겠네요 + +03:23.630 --> 03:27.080 +저걸 잡아서 오른쪽으로 당길 수 있어야 해요 + +03:27.530 --> 03:34.130 +왜냐하면 제가 여기서 훈련을 다시 시작했을 때 일어난 일이거든요 + +03:34.130 --> 03:44.420 +여기 보이는 게 다섯 번째이자 다른 완전한 시대예요 네 번째 시대를 끝내지 못한 거죠 + +03:44.420 --> 03:47.990 +이건 완전히 다른 시대예요 + +03:47.990 --> 03:51.290 +그럼 여섯 번째 이화기가 완성되는 거죠 + +03:51.410 --> 03:57.530 +다시 한번 말씀드리자면 6번째 시대가 시작된 시기와는 확실히 다른 + +03:57.530 --> 04:04.100 +점이 있습니다 이 시점에서는 확실히 의심스러운 상황이 벌어지고 있죠 + +04:04.250 --> 04:05.810 +손실이 너무 적어요 + +04:05.810 --> 04:12.710 +아니나 다를까 이 모델들을 가지고 테스트를 해 봤더니 제가 가져온 + +04:12.710 --> 04:19.010 +모델보다 성능이 떨어졌죠 이쯤에서 잘라낸 모델보다요 + +04:19.460 --> 04:21.890 +해볼 만한 실험이었어요 + +04:21.890 --> 04:27.890 +저 자신을 만족시켜야 했어요 운이 나빴던 게 아니라 너무 과했던 거고 더는 유용한 + +04:27.890 --> 04:30.080 +결과를 얻지 못한다는 걸요 + +04:30.080 --> 04:31.910 +실제로도 그런 경우였고요 + +04:32.210 --> 04:35.090 +좋은 시험이 됐어요 + +04:35.090 --> 04:40.220 +이로써 이득을 볼 수 있습니다 풀 몬티로 대규모 + +04:40.250 --> 04:48.830 +버전을 실행하기로 결정했다면 세 개 개혁을 넘어선 안 되니까요 + +04:48.830 --> 04:55.820 +제 경험상 그건 소용이 없어요 hyperperameter를 변경해 보고 뭔가 다른 걸 발견하지 + +04:55.820 --> 04:57.290 +않는 한은요 + +04:58.500 --> 05:04.110 +마지막으로 보여드릴 것은 다른 도표와 무게, 편향 등을 가지고 놀아보실 수 + +05:04.140 --> 05:04.860 +있어요 + +05:04.860 --> 05:05.760 +탐험할 게 많아요 + +05:05.760 --> 05:09.930 +경사만 봐도 알 수 있는데 정말 혼란스러워요 + +05:10.020 --> 05:15.450 +비트 박스를 조사하고 조사해야 해요 뭘 보고 있는지 이해하고 거기서 뭘 배울 + +05:15.450 --> 05:17.220 +수 있는지 알려면요 + +05:17.220 --> 05:22.440 +이상적으로 여러분이 가장 중요하게 여기는 건 Get 절차가 + +05:22.470 --> 05:26.670 +0이 되는 상황은 절대 없도록 하는 거예요 + +05:26.880 --> 05:29.700 +더는 배우는 게 없다는 뜻이죠 + +05:29.700 --> 05:34.890 +그러데이션이 0이면 모델은 더 이상 학습할 수 없고 학습 과정을 계속할 + +05:34.890 --> 05:36.270 +필요가 없어요 + +05:36.270 --> 05:40.740 +그러데이션이 0이 되지 않도록 주의해야 합니다 그러데이션이 + +05:40.740 --> 05:47.670 +너무 높거나 커지는 것도 주의해야 합니다 그렇게 되면 여러분의 모델이 너무 많이 흔들릴 테니까요 + +05:47.700 --> 05:49.920 +학습률이 아주 낮지 않다면요 + +05:49.920 --> 05:53.820 +이 모델은 비생산적으로 학습할 거예요 + +05:53.820 --> 05:59.740 +무게나 우회도로 그라데이션을 볼 때는 이런 점을 주의 깊게 봐야 해요 + +06:00.370 --> 06:03.730 +하지만 얼굴 껴안는 건 보여 주고 싶지 않았어요 + +06:03.910 --> 06:11.710 +혹시 기억하실지 모르겠지만 이 모델을 보여드릴게요 제가 모든 시대를 위해 실행했던 + +06:11.710 --> 06:15.520 +프라이서 모델의 버전이에요 + +06:15.700 --> 06:16.930 +이거 보여요? + +06:16.930 --> 06:21.880 +실행 이름은 날짜와 시간에 근거해 제가 만든 이름이에요 + +06:21.940 --> 06:24.880 +39년으로 끝나죠 + +06:25.030 --> 06:26.440 +초의 개수죠 + +06:26.440 --> 06:28.690 +그 점을 명심하세요 + +06:28.690 --> 06:34.480 +포옹하는 얼굴이 나오면 아바타 메뉴로 가서 본인 이름을 누르세요 + +06:34.600 --> 06:40.180 +모델과 데이터 집합이 있다면 공간을 확인할 수 있어요 + +06:40.180 --> 06:42.700 +1, 2개 있는 거 보이시죠? + +06:43.120 --> 06:46.990 +프라이서에 관해서는 한두 번 실행해 봤어요 + +06:47.170 --> 06:54.070 +이 표시는 각각 다른 회수 건과 다른 가격대 거래 건을 + +06:54.070 --> 06:55.870 +나타내요 + +06:55.960 --> 07:01.670 +전 각각의 압류 프로그램을 따로 보관해요 이 압류 프로그램 안에서 + +07:01.700 --> 07:05.420 +다양한 시대를 경험할 수 있게요 + +07:05.420 --> 07:12.110 +139개국이라는 건 큰 인물이 네 번인가 세 번 반을 폭동한 + +07:12.110 --> 07:13.310 +거겠군요 + +07:13.310 --> 07:22.730 +이걸 클릭하면 모델 페이지가 뜨는데요 파일과 버전으로 가보면 기본적으로 Git을 보고 + +07:22.730 --> 07:28.100 +있는 겁니다 회수 깃을 보고 있는데 모델과 관련된 파일이 + +07:28.100 --> 07:31.070 +그 안에 있는 거죠 + +07:31.340 --> 07:39.020 +그리고 말씀드린 것처럼 최근에 이 사업은 이 안전한 텐서 파일인 걸 보실 수 있어요 + +07:39.020 --> 07:50.690 +그 파일은 109MB로 우리가 함께 사용하는 어댑터의 크기죠 32로 설정된 어댑터요 + +07:50.720 --> 07:55.550 +계산을 해보니 109MB의 무게가 나오더군요 + +07:55.550 --> 07:57.400 +이 파일에 다 있어요 + +07:57.400 --> 07:59.710 +여기가 안전해요 + +08:00.130 --> 08:06.700 +그리고 몇 가지 다른 것도 살펴볼 수 있어요 + +08:06.730 --> 08:15.460 +Adaptive config요 json은 로라 미세 조정에 사용하는 어댑터에 관한 정보를 제공해요 + +08:15.460 --> 08:21.940 +예를 들어 여기에 저장된 대상 모듈이 있고 R32 값을 가지고 + +08:21.970 --> 08:22.810 +있어요 + +08:22.840 --> 08:25.450 +로라의 훈련을 이용한다고 쓰여 있어요 + +08:25.660 --> 08:32.230 +기본 모델 이름이 llama 3이에요 180억 달러요 + +08:32.590 --> 08:39.130 +이 모델을 위해 저장된 모든 정보를 알 수 있죠 + +08:39.160 --> 08:43.360 +또 하나 지적하고 싶은 건 16이 커밋하는 거예요 + +08:43.360 --> 08:46.090 +커밋의 역사를 보여주네요 + +08:46.090 --> 08:53.170 +기본적으로 5천 걸음마다 코드를 절약하는 거예요 + +08:53.170 --> 08:55.600 +우리 모델을 중심으로 밀어냈어요 + +08:55.600 --> 08:57.940 +훈련 매개 변수에서 설정된 거예요 + +08:57.940 --> 09:00.760 +5천 걸음마다 절약했어요 + +09:00.760 --> 09:05.410 +이 모델 중 어떤 것이든 로드해 테스트할 수 있다는 거죠 + +09:05.410 --> 09:08.080 +그렇게 해서 가장 뛰어난 선수를 뽑을 수 있어요 + +09:08.110 --> 09:10.000 +체크포인트가 각각 달라요 + +09:10.000 --> 09:11.890 +이런 건 얼마든지 할 수 있어요 + +09:12.070 --> 09:19.540 +그 경험을 바탕으로 모델이 훈련받던 그 순간을 다시 재현할 + +09:19.540 --> 09:20.980 +수 있어요 + +09:21.040 --> 09:26.110 +제가 다양한 훈련을 할 수 있다고 상상해보세요. 이 모든 + +09:26.140 --> 09:32.740 +것이, 어... 이 가격의 다른 버전의 다른 수정본, 저장소에서요. + +09:32.740 --> 09:33.970 +Get it 하면 어수선해져요 + +09:33.970 --> 09:38.620 +그래서 각각의 주행은 따로 회수하도록 구분했어요 + +09:38.620 --> 09:45.430 +다른 배치 단계가 여기 있습니다 커밋의 다른 히스토리인데요 + +09:45.580 --> 09:48.250 +그게 멋지고 체계적인 방법 같아요 + +09:48.670 --> 09:54.400 +이게 H깅페이스 허브에서 모델을 보는 방법이에요 + +09:54.400 --> 09:56.170 +두고 봐야죠 + +09:56.170 --> 09:57.820 +지금 작동 중인 거예요 + +09:57.820 --> 09:59.290 +15분 전에 업데이트됐어요 + +09:59.290 --> 10:02.440 +파일과 버전으로 가 볼게요 + +10:02.440 --> 10:03.190 +두고 보죠 + +10:03.220 --> 10:05.320 +네, 이미 버전이 저장됐어요 + +10:05.320 --> 10:06.610 +5000단계를 해야 해요 + +10:06.640 --> 10:10.510 +이것의 한 버전 혹은 두 버전은 커밋합니다 초기 커밋이 있으니까요 + +10:10.510 --> 10:14.950 +5000단계는 15분 전에 해결되었어요 + +10:14.980 --> 10:17.110 +이미 실행 중인 모델이 있는 거죠 + +10:17.110 --> 10:20.890 +저와 같은 시기에 이 일을 해왔다면 여러분도 저와 같은 처지일 + +10:20.920 --> 10:26.560 +겁니다 제가 말하는 동안 이 모델의 버전을 허깅페이스 허브에 업로드하겠죠 + +10:26.950 --> 10:30.070 +바로 테스트할 수 있어요 + +10:30.070 --> 10:32.350 +훈련이 끝날 때까지 기다릴 필요 없어요 + +10:32.770 --> 10:34.780 +자, 됐어요 + +10:34.810 --> 10:42.940 +비트가 훈련하는 걸 봤는데 여기서 보이는 패배는 이해하기 좀 어렵네요 + +10:42.970 --> 10:50.290 +무게와 편향으로 아름답게 그려졌고 모델 자체가 중심에 저장되는 것도 봤어요 + +10:50.290 --> 10:53.650 +이건 훈련의 경험이에요 + +10:53.680 --> 10:55.640 +몇 시간이고 할 수 있어요 + +10:55.640 --> 10:59.270 +몇 시간 동안이나 이러고 있었어요 정말 비극적이죠 + +10:59.270 --> 11:03.710 +사실 이 코스가 시작될 때 말씀드린 것 같은데 + +11:03.710 --> 11:10.100 +저기 보이는 화면에 무게와 편향성이 있습니다 그리고 방금 + +11:10.100 --> 11:16.430 +보여드린 이 차트는 처음부터 저기 있던 차트예요 + +11:16.430 --> 11:18.320 +지금은 이 차트를 보여주죠 + +11:18.470 --> 11:26.330 +이 코스를 만든 첫 몇 주 동안 그걸 지켜봤어요 + +11:26.450 --> 11:28.730 +정말 재미있었어요 + +11:28.820 --> 11:34.220 +여러분도 같은 일을 하시길 바랍니다 훈련 과정을 지켜보시고 모델 + +11:34.220 --> 11:37.040 +버전이 업로드되는 걸 보세요 + +11:37.070 --> 11:41.090 +이제 완주만 하면 돼요 + +11:41.090 --> 11:50.270 +내일은 모델을 평가하고 수직화 모델을 어떻게 조정했는지 볼 + +11:50.270 --> 11:51.770 +거예요 + +11:52.100 --> 11:54.020 +오늘은 여기까지 하죠 + +11:54.020 --> 11:55.400 +슬라이드로 돌아가죠 diff --git a/week5/community-contributions/subtitles/srts/59665127/en_US.srt b/week5/community-contributions/subtitles/srts/59665127/en_US.srt new file mode 100755 index 0000000..d6fc0af --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59665127/en_US.srt @@ -0,0 +1,238 @@ +WEBVTT + +00:00.560 --> 00:02.330 +Well hi there everybody. + +00:02.330 --> 00:06.890 +I'm not going to give you my usual song and dance about how excited you are, because I know how excited + +00:06.890 --> 00:08.510 +you are, as am I. + +00:08.540 --> 00:14.930 +No doubt overnight your run has been running as mine has, and you are eager to see the results. + +00:14.930 --> 00:17.030 +But we have some content to do. + +00:17.060 --> 00:24.050 +First of all, um, so already you can do so many things, uh, coding against frontier and open source + +00:24.050 --> 00:31.550 +models using data set curation, baseline and fine tuning, frontier models, and now running. + +00:31.550 --> 00:37.880 +Q Laura, uh, so today I had previously said they were going to be two bullets, but I have put in + +00:37.880 --> 00:42.890 +an extra bullet here, beginning here with how training works. + +00:42.920 --> 00:48.320 +It occurs to me that I've been quite hand-wavy about the training process itself. + +00:48.320 --> 00:53.330 +And at this point, now that you've got some hands on experience running, training, seeing Laura in + +00:53.330 --> 00:57.680 +action, it's worth me just taking a minute to explain it properly. + +00:57.680 --> 00:59.270 +So you've got that basis. + +00:59.300 --> 01:00.560 +You may know it all already. + +01:00.560 --> 01:05.300 +It might be something that at this point you've either picked up or you already had had encountered. + +01:05.360 --> 01:09.070 +Um, either way, I think it's super important that I do clarify that. + +01:09.070 --> 01:11.140 +And so we'll take a couple of minutes to do it. + +01:11.260 --> 01:16.300 +Um, and I think it's really nice that you've had some experience first running training so that this + +01:16.300 --> 01:20.110 +will hopefully connect the dots and things will click in place. + +01:20.140 --> 01:29.200 +We will then run inference for a fine tuned model, and then we will have the conclusion of week seven, + +01:29.200 --> 01:31.120 +which is exciting indeed. + +01:31.150 --> 01:32.890 +All right let's get started. + +01:33.550 --> 01:40.930 +So I want to explain that the training process, the process of improving a model so that it's better + +01:40.930 --> 01:46.120 +and better at performing a task, is something that has four steps to it. + +01:46.150 --> 01:52.570 +The first step is what's known as the forward pass, which is just another name for a sort of running + +01:52.570 --> 01:53.440 +inference. + +01:53.440 --> 02:01.480 +You have a data, a point in your data set, you have a particular training data point, and you take + +02:01.480 --> 02:07.480 +that that training prompt and you pass it through your neural network to get the prediction for the + +02:07.480 --> 02:08.590 +next token. + +02:08.590 --> 02:13.510 +And that is called the forward pass, because you're thinking of the input coming in, going through, + +02:13.510 --> 02:18.080 +and the output popping out at the end of your transformer. + +02:19.100 --> 02:22.700 +There is then what's called the loss calculation. + +02:23.240 --> 02:25.340 +And we'll talk a little bit more about this later. + +02:25.340 --> 02:26.810 +But this is saying okay. + +02:26.810 --> 02:31.010 +So the network predicted that this would be the output. + +02:31.010 --> 02:35.750 +And in fact this is the true next token because we're in training. + +02:35.750 --> 02:41.030 +And so we've got real examples that include what actually did come next in the data. + +02:41.360 --> 02:46.610 +And so now that you've got the prediction and the truth, you can come come up with some way of calculating + +02:46.610 --> 02:48.890 +the loss or how wrong were you. + +02:48.920 --> 02:53.960 +How bad was it loss being the sort of inverse of of of accuracy. + +02:54.590 --> 02:58.340 +So a bigger loss number means things went worse. + +02:58.700 --> 02:59.750 +So that's step two. + +02:59.780 --> 03:01.160 +The loss calculation. + +03:01.160 --> 03:04.580 +Step three is known as the backward pass. + +03:04.580 --> 03:06.320 +And you hear different terms for this. + +03:06.350 --> 03:08.900 +It's called backprop back propagation. + +03:09.230 --> 03:17.840 +Um and the idea is that in in this backward pass you take this loss and you look back through the neural + +03:17.840 --> 03:24.100 +network and you ask the question, if I were to tweak each of the parameters in this neural network + +03:24.100 --> 03:25.540 +by a little tiny bit. + +03:25.570 --> 03:29.140 +Would it have made this loss bigger or smaller? + +03:29.170 --> 03:31.300 +How is the loss dependent? + +03:31.330 --> 03:33.100 +How is this particular weight? + +03:33.100 --> 03:35.980 +Uh, how does that vary the loss? + +03:36.130 --> 03:39.700 +Um, what's the difference in loss based on on this weight? + +03:39.910 --> 03:43.540 +Um, and that that sensitivity is called a gradient. + +03:43.570 --> 03:44.290 +Of course. + +03:44.500 --> 03:47.230 +Um, as, as, as it is generally in maths. + +03:47.410 --> 03:56.020 +Uh, and so this is about calculating the gradients of all of your weights to see how the loss is affected + +03:56.020 --> 04:01.870 +by a small tweak to those weights, so that, that calculating the gradients of all of your weights + +04:01.900 --> 04:04.240 +is known as the backward pass. + +04:04.570 --> 04:12.970 +Uh, and then finally, the fourth step optimization is, uh, and this is the thing that we selected + +04:12.970 --> 04:19.180 +the Adam with weight decay, the Adam W optimizer for our particular training exercise. + +04:19.210 --> 04:24.280 +Optimization is saying, okay, so now we've calculated all of our gradients. + +04:24.400 --> 04:32.270 +What we now want to do is we want to tweak all of the weights a tiny tiny bit such that next time, + +04:32.270 --> 04:37.850 +if it were given the same prompt, it would be more likely to do a little bit better. + +04:37.880 --> 04:39.950 +The loss would be a little bit lower. + +04:40.400 --> 04:45.200 +So we're going to tweak in the in the direction the opposite direction to the to the gradient so that + +04:45.200 --> 04:46.970 +losses would be reduced. + +04:47.240 --> 04:52.670 +And that small step which is based on the learning rate, how much of a step you take is based on on + +04:52.670 --> 04:56.750 +how big the learning rate is, is designed to make things a little bit better. + +04:56.840 --> 05:00.290 +And you always want to try and do it in a way that will generalize well. + +05:00.290 --> 05:04.610 +You don't want to just be solving for exactly this input prompt. + +05:04.610 --> 05:10.370 +You just want the model to be learning to get a little bit better with those kinds of prompts in the + +05:10.370 --> 05:11.090 +future. + +05:11.450 --> 05:17.660 +Um, and of course, all of this process happens with mini batches at the same time, and it happens + +05:17.690 --> 05:23.780 +again and again and again all the way through your your data is one epoch and then potentially another + +05:23.780 --> 05:27.740 +time and another time after that as it goes through multiple epochs. + +05:27.740 --> 05:33.050 +So that repeated process is what is known as training. + +05:33.380 --> 05:38.600 +And now in the next video, we will just step through a diagram to illustrate that. diff --git a/week5/community-contributions/subtitles/srts/59665127/ja_JP.srt b/week5/community-contributions/subtitles/srts/59665127/ja_JP.srt new file mode 100755 index 0000000..cac43c3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59665127/ja_JP.srt @@ -0,0 +1,202 @@ +WEBVTT + +00:00.560 --> 00:02.330 +みなさん、 こんにちは。 + +00:02.330 --> 00:08.510 +あなたがどれほど興奮しているかは私も知っているからだ。 + +00:08.540 --> 00:14.930 +一晩中、 私のように走り続け、 その結果を見たがっているに違いない。 + +00:14.930 --> 00:17.030 +でも、 やるべき内容はある。 + +00:17.060 --> 00:24.050 +まず第一に、 データセットのキュレーション、 ベースラインと微調整、 フロンティアモデル、 そして現在実行中のフロンティアモデルを使って、 + +00:24.050 --> 00:31.550 +フロンティアモデルやオープンソースモデルに対してコーディングするなど、 すでに多くのことができる。 + +00:31.550 --> 00:37.880 +Q ローラ、 ええと、 今日、 私は以前、 2つの弾丸になると言っていたのですが、 トレーニングがどのように機能するかというところから始めて、 + +00:37.880 --> 00:42.890 +ここにもう1つの弾丸を入れました。 + +00:42.920 --> 00:48.320 +トレーニングのプロセス自体について、 私はかなり手探り状態だったことに思い当たる。 + +00:48.320 --> 00:53.330 +そしてこの時点で、 ローラを実際に走らせ、 トレーニングし、 活躍する姿を目の当たりにしたのだから、 + +00:53.330 --> 00:57.680 +少し時間を割いてでもきちんと説明する価値がある。 + +00:57.680 --> 00:59.270 +だから、 その根拠はある。 + +00:59.300 --> 01:00.560 +もう全部知っているかもしれない。 + +01:00.560 --> 01:05.300 +それは、 この時点であなたが拾ったものかもしれないし、 すでに出会っていたものかもしれない。 + +01:05.360 --> 01:09.070 +いずれにせよ、 それをはっきりさせることは超重要だと思う。 + +01:09.070 --> 01:11.140 +そのために2、 3分時間を取る。 + +01:11.260 --> 01:20.110 +そして、 あなたが最初にランニングのトレーニングを経験したことは本当に素晴らしいことだと思う。 + +01:20.140 --> 01:31.120 +その後、 微調整されたモデルの推論を行い、 第7週の締めくくりとなる。 + +01:31.150 --> 01:32.890 +よし、 始めよう。 + +01:33.550 --> 01:40.930 +そこで、 トレーニングのプロセス、 つまりモデルを改良して、 あるタスクをこなすのがよりうまくなるようにするプロセスには、 + +01:40.930 --> 01:46.120 +4つのステップがあることを説明したい。 + +01:46.150 --> 01:53.440 +最初のステップはフォワードパスと呼ばれるもので、 これは一種のランニング推論の別名に過ぎない。 + +01:53.440 --> 02:01.480 +あるデータ、 データセットのあるポイント、 特定のトレーニングデータがあり、 そのトレーニングプロンプトをニューラルネットワークに通して、 + +02:01.480 --> 02:08.590 +次のトークンの予測を得る。 + +02:08.590 --> 02:13.510 +これはフォワード・パスと呼ばれるもので、 入力が入力され、 それを通過し、 + +02:13.510 --> 02:18.080 +出力がトランスの端から飛び出してくると考えているからです。 + +02:19.100 --> 02:22.700 +それから、 損失計算と呼ばれるものがある。 + +02:23.240 --> 02:25.340 +これについては後でもう少し詳しく話そう。 + +02:25.340 --> 02:26.810 +でも、 これはオーケーと言っているんだ。 + +02:26.810 --> 02:31.010 +つまり、 ネットワークはこれが出力されると予測したわけだ。 + +02:31.010 --> 02:35.750 +そして実際、 これが本当の意味での次の形なんだ。 + +02:35.750 --> 02:41.030 +そのため、 データには実際に次に何が起こったかを含む実例がある。 + +02:41.360 --> 02:48.890 +そして、 予想と真実を手に入れた今、 損失を計算する方法を考え出すことができる。 + +02:48.920 --> 02:53.960 +精度の逆をいくようなロスのひどさだった。 + +02:54.590 --> 02:58.340 +つまり、 ロスの数字が大きければ大きいほど、 事態は悪化したということだ。 + +02:58.700 --> 02:59.750 +これがステップ2だ。 + +02:59.780 --> 03:01.160 +損失の計算 + +03:01.160 --> 03:04.580 +ステップ3はバックワードパスと呼ばれる。 + +03:04.580 --> 03:06.320 +これについては、 さまざまな言い方がある。 + +03:06.350 --> 03:08.900 +これはバックプロップ逆伝播と呼ばれる。 + +03:09.230 --> 03:25.540 +そして、 この後方パスでは、 この損失を受け取り、 ニューラルネットワークを振り返って、 このニューラルネットワークの各パラメータをほんの少し微調整したらどうなるだろうかと質問するのです。 + +03:25.570 --> 03:29.140 +そうすれば、 この損失は大きくなっただろうか、 それとも小さくなっただろうか? + +03:29.170 --> 03:31.300 +損失はどのように左右されるのか? + +03:31.330 --> 03:33.100 +この重さはどうですか? + +03:33.100 --> 03:35.980 +それで損失がどう変わるんだ? + +03:36.130 --> 03:39.700 +ええと、 この体重によるロスの差は? + +03:39.910 --> 03:43.540 +その感度をグラデーションと呼ぶんだ。 + +03:43.570 --> 03:44.290 +もちろんだ。 + +03:44.500 --> 03:47.230 +ええと、 数学では一般的にそうであるように、 "as"、 "as"、 "as"、 "as"。 + +03:47.410 --> 03:56.020 +つまり、 すべてのウエイトの勾配を計算することで、 そのウエイトに少し手を加えただけで、 + +03:56.020 --> 04:04.240 +ロスにどのような影響が出るかを確認するわけだ。 + +04:04.570 --> 04:12.970 +そして最後に、 4段階目の最適化ですが、 これは、 私たちが特定のトレーニングのために、 ウェイト減衰を伴うアダム、 + +04:12.970 --> 04:19.180 +アダムWオプティマイザを選択したことです。 + +04:19.210 --> 04:24.280 +最適化とは、 よし、 これですべてのグラデーションを計算したぞ、 ということだ。 + +04:24.400 --> 04:32.270 +私たちが今やりたいことは、 次に同じプロンプトが出されたときに、 もう少しうまくいく可能性が高くなるように、 + +04:32.270 --> 04:37.850 +すべてのウエイトをほんの少し微調整することだ。 + +04:37.880 --> 04:39.950 +損失はもう少し少ないだろう。 + +04:40.400 --> 04:46.970 +そこで、 ロスが少なくなるように、 勾配と反対方向に微調整を加える。 + +04:47.240 --> 04:52.670 +そして、 その小さな一歩は、 学習率に基づくもので、 どれだけの一歩を踏み出すかは、 + +04:52.670 --> 04:56.750 +学習率の大きさに基づく。 + +04:56.840 --> 05:00.290 +そして常に、 うまく一般化できるようなやり方でやってみたいものだ。 + +05:00.290 --> 05:04.610 +この入力プロンプトだけを解くことは避けたい。 + +05:04.610 --> 05:11.090 +モデルには、 将来的にそのようなプロンプトに対応できるよう、 少しずつ学習していってほしい。 + +05:11.450 --> 05:17.660 +もちろん、 このプロセスはすべてミニバッチで同時に行われ、 + +05:17.690 --> 05:27.740 +データが1つのエポックになるまで何度も何度も繰り返される。 + +05:27.740 --> 05:33.050 +その繰り返しがトレーニングというわけだ。 + +05:33.380 --> 05:38.600 +次のビデオでは、 それを図で説明しよう。 diff --git a/week5/community-contributions/subtitles/srts/59665127/ko_KR.srt b/week5/community-contributions/subtitles/srts/59665127/ko_KR.srt new file mode 100755 index 0000000..a67458e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59665127/ko_KR.srt @@ -0,0 +1,229 @@ +WEBVTT + +00:00.560 --> 00:02.330 +안녕하세요, 여러분 + +00:02.330 --> 00:06.890 +얼마나 신났는지 평소처럼 말하진 않을게요 저도 그렇고 당신도 + +00:06.890 --> 00:08.510 +신났으니까요 + +00:08.540 --> 00:14.930 +밤새도록 저처럼 달리셨을 테니 결과를 빨리 보고 싶으시겠죠 + +00:14.930 --> 00:17.030 +하지만 할 일이 있어요 + +00:17.060 --> 00:24.050 +우선, 이미 많은 것을 할 수 있습니다 프론티어에 반하는 코딩과 오픈 소스 모델은 + +00:24.050 --> 00:31.550 +데이터 세트 큐레이션, 기준선 미세 튜닝 프론티어 모델과 실행을 이용하죠 + +00:31.550 --> 00:37.880 +Q 로라, 아까 총알 두 개를 넣겠다고 했는데 여기 하나 더 + +00:37.880 --> 00:42.890 +넣었어요 훈련 과정부터 시작하죠 Put + +00:42.920 --> 00:48.320 +생각해보니 훈련 과정 자체에 대해 꽤 꼼꼼하게 알고 있었어요 + +00:48.320 --> 00:53.330 +이제 여러분은 달리기와 훈련 로라의 활약을 직접 목격한 경험이 + +00:53.330 --> 00:57.680 +있으니 잠시 시간을 내서 제대로 설명해 드릴게요 + +00:57.680 --> 00:59.270 +그런 기본이 있어요 + +00:59.300 --> 01:00.560 +이미 다 알고 계실지도 몰라요 + +01:00.560 --> 01:05.300 +그 시점에서 뭔가 알아차렸거나 이미 마주쳤을 수도 있죠 + +01:05.360 --> 01:09.070 +어느 쪽이든 그걸 분명히 하는 게 중요해요 + +01:09.070 --> 01:11.140 +몇 분 정도 걸릴 거예요 + +01:11.260 --> 01:16.300 +첫 달리기 훈련을 받은 건 좋은 일이라고 생각해요 이번 + +01:16.300 --> 01:20.110 +훈련을 통해 모든 게 잘 연결되길 바라요 + +01:20.140 --> 01:29.200 +그런 다음 잘 조율된 모델에 대한 추론을 실행합니다 그럼 7주 차에 마무리되는 거죠 정말 + +01:29.200 --> 01:31.120 +흥미진진해요 + +01:31.150 --> 01:32.890 +Get it, get it, get it, get it, get, it! 자, 그럼 시작해 볼까요? + +01:33.550 --> 01:40.930 +제가 말씀드리고 싶은 것은 모델을 개선하는 훈련 과정으로 작업을 수행하는 + +01:40.930 --> 01:46.120 +데 있어서 4단계를 거쳐야 한다는 점인데요 + +01:46.150 --> 01:52.570 +첫 단계는 전진 패스라고 알려진 겁니다 일종의 실행 추론에 쓰이는 또 다른 + +01:52.570 --> 01:53.440 +이름이죠 + +01:53.440 --> 02:01.480 +데이터 집합에 데이터가 있는데 특정 훈련 데이터 포인트가 있고 그 훈련 프롬프트를 + +02:01.480 --> 02:08.590 +신경망을 통해 전달해 다음 토큰에 대한 예측을 얻죠. + +02:08.590 --> 02:13.510 +그걸 포워드 패스라고 하죠 왜냐하면 입력값이 들어와 통과하고 + +02:13.510 --> 02:18.080 +결과물이 변압기 끝에 튀어나온다고 생각하니까요 + +02:19.100 --> 02:22.700 +그리고 손실 계산이라는 게 있어요 + +02:23.240 --> 02:25.340 +이 비트에 대해선 나중에 더 얘기하죠 + +02:25.340 --> 02:26.810 +이건 괜찮다고 하네요 + +02:26.810 --> 02:31.010 +네트워크는 이게 출력물이라고 예상했어요 + +02:31.010 --> 02:35.750 +사실 이게 진정한 다음 토큰이에요 우린 훈련 중이니까요 + +02:35.750 --> 02:41.030 +데이터에서 다음에 무엇이 올지 포함하는 실제 예제들이 있어요 + +02:41.360 --> 02:46.610 +이제 예측과 진실을 알았으니 손실을 계산할 방법을 생각해낼 수 있겠죠 + +02:46.610 --> 02:48.890 +얼마나 틀렸는지도 알고요 + +02:48.920 --> 02:53.960 +정확성의 반대인 걸 잃었을 때 얼마나 심각했나요? + +02:54.590 --> 02:58.340 +손실이 크다는 건 상황이 악화됐다는 뜻이죠 + +02:58.700 --> 02:59.750 +이게 2단계예요 + +02:59.780 --> 03:01.160 +손실 계산요 + +03:01.160 --> 03:04.580 +세 번째 단계는 뒤로 가기예요 + +03:04.580 --> 03:06.320 +다른 용어도 많이 들리죠 + +03:06.350 --> 03:08.900 +백프롭 백 전파라고 하죠 + +03:09.230 --> 03:17.840 +이 원리는 이 역과정에서 손실을 감안하고 신경망을 통해 되돌아보며 이런 질문을 + +03:17.840 --> 03:25.540 +하는 겁니다 신경망의 각 매개 변수를 조금씩만 변경한다면요 + +03:25.570 --> 03:29.140 +이 손실이 커졌을까요? 작아졌을까요? + +03:29.170 --> 03:31.300 +손실이 얼마나 크죠? + +03:31.330 --> 03:33.100 +이 무게는 어떻게 정했죠? + +03:33.100 --> 03:35.980 +그게 손실과 무슨 상관이죠? + +03:36.130 --> 03:39.700 +몸무게에 따라 감량량은 어떻게 달라지죠? + +03:39.910 --> 03:43.540 +그 민감한 부분을 그러데이션이라고 해요 + +03:43.570 --> 03:44.290 +물론이죠 + +03:44.500 --> 03:47.230 +일반적으로 수학에서 그렇듯이요 + +03:47.410 --> 03:56.020 +각 무게의 경사도를 계산하는 과정이에요 조금만 변경해도 손실이 발생하죠 + +03:56.020 --> 04:01.870 +그래서 모든 무게의 경사도를 계산하는 과정을 백워드 + +04:01.900 --> 04:04.240 +패스라고 해요 + +04:04.570 --> 04:12.970 +마지막으로 4단계 최적화는 무게 감쇠 기능을 갖춘 Adam + +04:12.970 --> 04:19.180 +W 최적화기로 훈련에 사용하기로 했어요 + +04:19.210 --> 04:24.280 +최적화는 모든 그러데이션을 계산했다는 뜻이에요 + +04:24.400 --> 04:32.270 +이제 모든 무게 비트를 조금씩 조정할 겁니다 그래야 다음번에 + +04:32.270 --> 04:37.850 +같은 프롬프트를 받으면 좀 더 잘 되겠죠 + +04:37.880 --> 04:39.950 +비트 박스는 좀 낮겠죠 + +04:40.400 --> 04:45.200 +기울기 방향과 반대 방향으로 살짝 조정해서 손실을 + +04:45.200 --> 04:46.970 +줄일 거예요 + +04:47.240 --> 04:52.670 +학습 비율에 따라 한 단계씩 나아가는 겁니다 학습 비율이 얼마나 큰지에 + +04:52.670 --> 04:56.750 +따라서요 이건 좀 더 나은 환경을 위해 고안된 거죠 + +04:56.840 --> 05:00.290 +일반화할 수 있는 방법으로 항상 시도하고 싶죠 + +05:00.290 --> 05:04.610 +이 입력 프롬프트를 정확히 해결하고 싶진 않죠 + +05:04.610 --> 05:11.090 +이런 비트로 모델이 좀 더 나아지도록 배우길 바랄 뿐이죠 + +05:11.450 --> 05:17.660 +이 모든 과정이 미니 배치에서 동시에 발생합니다 데이터 전체에서 + +05:17.690 --> 05:23.780 +계속 반복되죠 데이터는 한 발생을 거치고 여러 발생을 거치면서 + +05:23.780 --> 05:27.740 +또 다른 발생이 반복돼요 + +05:27.740 --> 05:33.050 +그 반복적인 과정을 훈련이라고 하죠 + +05:33.380 --> 05:38.600 +다음 비디오에서는 그걸 설명하기 위해 다이어그램을 보여드리죠 diff --git a/week5/community-contributions/subtitles/srts/59665129/en_US.srt b/week5/community-contributions/subtitles/srts/59665129/en_US.srt new file mode 100755 index 0000000..b383248 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59665129/en_US.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:00.710 --> 00:06.650 +And now let me make this real for you by showing you some, some diagrams, particularly now looking + +00:06.650 --> 00:11.810 +at how training works with Chullora, which is how we're actually doing it in practice. + +00:12.440 --> 00:16.280 +So first of all, let's talk about this forward pass. + +00:16.280 --> 00:20.780 +So here is a diagram that should be familiar to you because it's the same one I used before. + +00:20.810 --> 00:27.350 +That shows our llama 3.1 base model that we've quantized all the way down to four bits with its 8 billion + +00:27.380 --> 00:31.340 +parameters and the white, all of it is frozen. + +00:31.340 --> 00:36.350 +We're not going to be changing its weights as part of training, because it would be way too much work, + +00:36.380 --> 00:43.400 +way too much memory, and too slow to try and shift and tweak and optimize these 8 billion parameters. + +00:43.850 --> 00:50.630 +So what we're seeing here is a bunch of frozen rows of weights, and the ones in yellow are also frozen, + +00:50.630 --> 00:53.180 +but they're representing our target modules. + +00:53.180 --> 00:55.940 +Which color is going to be applied to. + +00:56.330 --> 00:59.210 +And let's bring in our Lora adapters. + +00:59.210 --> 00:59.790 +There they are. + +00:59.820 --> 01:06.090 +You may remember that to be technically correct, there are in fact two adapters called A and B for + +01:06.090 --> 01:10.380 +each of the different, um target modules they have. + +01:10.440 --> 01:15.420 +Dimensionality given by R, and you may remember that alpha is the scaling factor. + +01:15.420 --> 01:21.090 +And it just is as simple as the way that these are applied to their target module is that it's alpha + +01:21.120 --> 01:22.380 +times A times B. + +01:22.710 --> 01:24.450 +Um simple as that. + +01:24.450 --> 01:27.750 +So that is our neural network that we know well. + +01:27.780 --> 01:33.270 +And we are going to be training the weights in this, these Laura adapters, which in our case is about + +01:33.300 --> 01:36.090 +109MB worth of, of weights. + +01:36.780 --> 01:38.280 +Uh, okay. + +01:38.280 --> 01:40.290 +So then what happens? + +01:40.290 --> 01:42.900 +We have an input prompt over on the left. + +01:42.900 --> 01:47.760 +It is like price is dollars and then it's the next token. + +01:47.760 --> 01:50.070 +We want the model to get good at predicting. + +01:50.520 --> 01:57.780 +The forward pass is when we take that and we in inference mode basically we we put it take it through + +01:57.780 --> 02:02.400 +the the model to say please predict the next token. + +02:03.360 --> 02:09.780 +And so that goes through the model and what comes out the other side is a predicted next token price + +02:09.780 --> 02:10.260 +is. + +02:10.290 --> 02:11.820 +And then 99. + +02:11.820 --> 02:17.970 +And again because we've got we're taking advantage of this simplicity of llama 3.1 that in fact 99 is + +02:17.970 --> 02:19.170 +just one token. + +02:19.320 --> 02:20.400 +Um, not not. + +02:20.430 --> 02:23.730 +And that will always be the case for any three digit number. + +02:23.730 --> 02:27.300 +That's not that that's critical, but it does simplify things a bit for us. + +02:27.300 --> 02:29.190 +So that's the forward pass. + +02:29.220 --> 02:29.820 +All right. + +02:29.850 --> 02:32.040 +Now onto the loss calculation. + +02:32.610 --> 02:33.780 +So here we are again. + +02:33.780 --> 02:35.400 +We've predicted the next token. + +02:35.400 --> 02:41.400 +So now the model is able to look up what or not the model the training process. + +02:41.400 --> 02:44.010 +The SFT trainer in our case looks up. + +02:44.010 --> 02:45.960 +What was the actual next token. + +02:45.960 --> 02:49.080 +Because we've got the training data, we know the actual next token. + +02:49.080 --> 02:50.100 +And what was it? + +02:50.100 --> 02:51.810 +Let's say it was 89. + +02:51.810 --> 02:52.590 +It was lower. + +02:52.590 --> 02:54.000 +So we were wrong. + +02:54.000 --> 02:55.800 +Wrong by $10. + +02:56.070 --> 02:57.370 +Um, or wrong. + +02:57.370 --> 03:00.280 +By different token, it doesn't know that this represents $10. + +03:00.280 --> 03:02.350 +It just knows it's a different token. + +03:02.710 --> 03:05.650 +Um, and so there is some kind of a loss. + +03:05.650 --> 03:10.180 +And in just a moment I'm going to explain what that loss is and why it's not quite as simple as it just + +03:10.180 --> 03:11.500 +being a different token. + +03:11.500 --> 03:16.300 +There's a technicality there that we'll get to, but for now you can just think of it as it predicted + +03:16.300 --> 03:16.960 +99. + +03:16.990 --> 03:18.670 +The actual value is 89. + +03:18.700 --> 03:20.050 +We have a loss. + +03:20.680 --> 03:23.140 +So that's the loss calculation. + +03:23.140 --> 03:29.350 +Step three is the backward pass that you hear people calling backprop or backward propagation. + +03:29.350 --> 03:30.580 +Back propagation. + +03:30.760 --> 03:38.470 +Um, and in backprop basically we look back through the network, back we go and we say, all right, + +03:38.470 --> 03:43.930 +so how much if we if we were to tweak these weights by a little bit, how much would that affect the + +03:43.930 --> 03:44.650 +loss? + +03:44.650 --> 03:48.850 +How sensitive is the loss to to those those weights. + +03:48.880 --> 03:54.280 +It gives us what we call the gradients of the weights of the parameters. + +03:54.280 --> 03:57.320 +So when I say weights are synonymous with parameters. + +03:57.560 --> 04:00.770 +Uh, so, uh, how, uh, um, yeah. + +04:00.800 --> 04:02.120 +What are the gradients? + +04:02.120 --> 04:05.240 +If we were to change those weights, what would it do to the loss? + +04:05.240 --> 04:08.030 +Because we want to try and improve things a little bit. + +04:08.360 --> 04:14.120 +Um, and so those, uh, red triangles are meant to represent Delta, uh, for showing like a gradient + +04:14.210 --> 04:16.100 +calculation that's happened there. + +04:17.180 --> 04:22.730 +Uh, and then finally we get to the last step, step four optimization. + +04:22.940 --> 04:23.960 +Here it comes. + +04:23.960 --> 04:26.360 +So we've got these gradients. + +04:26.420 --> 04:30.770 +And what we now need to do is we want to take a tiny step in the right direction. + +04:30.770 --> 04:36.830 +So we want to update the parameters in our Laura matrices a little tiny bit. + +04:36.830 --> 04:41.720 +So that next time if it gets the same input prompt, the loss will be a bit lower. + +04:41.720 --> 04:42.890 +It would do a bit better. + +04:42.890 --> 04:48.200 +So we're taking a step in the right direction, and we use the learning rate to decide how much of a + +04:48.200 --> 04:48.980 +step to take. + +04:48.980 --> 04:53.810 +Because as I said before, there are pros and cons of taking smaller or larger steps. + +04:54.090 --> 05:00.690 +Um, and you may remember that the optimizer we're using, the Adam W optimizer, does something quite + +05:00.690 --> 05:06.720 +cunning, where it doesn't just use these gradients, it keeps a kind of rolling average of prior gradients, + +05:06.720 --> 05:12.540 +so that it's being really smart about how to take that step in a way that's most likely to improve things. + +05:12.660 --> 05:17.700 +And it's also trying to make sure that we don't do things like overfitting and other dangers that we've + +05:17.730 --> 05:18.810 +talked about before. + +05:19.200 --> 05:20.370 +So we do that. + +05:20.370 --> 05:26.430 +And what happens is that the Laura adapters are then improved, and as they are applied in the future + +05:26.430 --> 05:29.700 +to our base model, it will do slightly better. + +05:29.730 --> 05:36.390 +The loss will be a bit lower next time, and that is why you see that the training loss is coming down + +05:36.390 --> 05:40.740 +and our wiggly charts, because it's learning and getting better and better. + +05:41.070 --> 05:46.050 +And so it's worth noting, of course, that the thing that the weights, the parameters that get changed + +05:46.050 --> 05:51.180 +are the parameters in the Laura adapters, the green ones, we don't actually change any parameters + +05:51.180 --> 05:55.620 +in the llama 3.1 base that is just too big, too many parameters. + +05:55.620 --> 06:02.280 +If you were doing ordinary training, fine tuning, not not Laura based, but ordinary, then it would + +06:02.280 --> 06:09.120 +be the whole llama 3.1 model that would need to be having gradients calculated and need to be shifted + +06:09.120 --> 06:10.320 +during optimization. + +06:10.320 --> 06:13.650 +And that, of course, is what these big companies like meta. + +06:13.680 --> 06:19.170 +That's how they've trained llama 3.1in the first place, and they've spent significant amounts of money + +06:19.200 --> 06:25.620 +doing it, which which we don't have easy access to, which is why we're using our Lora adapters instead. + +06:26.190 --> 06:32.160 +So that's the quick summary of optimization in the sorry of the whole of the training process. + +06:32.160 --> 06:37.530 +And I imagine that was probably mostly clear to you already, but I'm hoping that these diagrams of + +06:37.530 --> 06:41.310 +crystallized it for you and mean that everything has fallen into place. + +06:41.310 --> 06:47.550 +And in the next video, I'm just going to explain one more technicality, a very important technicality + +06:47.550 --> 06:51.570 +about what we mean by the prediction and the loss calculation. + +06:51.600 --> 06:52.380 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59665129/ja_JP.srt b/week5/community-contributions/subtitles/srts/59665129/ja_JP.srt new file mode 100755 index 0000000..55e23ca --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59665129/ja_JP.srt @@ -0,0 +1,286 @@ +WEBVTT + +00:00.710 --> 00:11.810 +そして、 いくつかの図をお見せすることで、 このことを実感していただこう。 特に今、 チュローラを使ったトレーニングがどのように機能するのかを見てみよう。 + +00:12.440 --> 00:16.280 +ではまず、 このフォワードパスについて。 + +00:16.280 --> 00:20.780 +というわけで、 以前私が使ったのと同じ図なので、 お馴染みのはずである。 + +00:20.810 --> 00:27.350 +これがラマ3世だ。 1ベースモデルは、 80億のパラメーターと白を持つ4ビットにまで量子化され、 + +00:27.380 --> 00:31.340 +そのすべてが凍結されている。 + +00:31.340 --> 00:36.350 +トレーニングの一環としてウェイトを変更するつもりはない。 なぜなら、 80億ものパラメーターを変化させ、 + +00:36.380 --> 00:43.400 +微調整し、 最適化しようとするのは、 作業量が多すぎるし、 メモリも多すぎるし、 時間もかかりすぎるからだ。 + +00:43.850 --> 00:53.180 +ここにあるのは凍結されたウェイトの列で、 黄色いものも凍結されているが、 これはターゲット・モジュールを表している。 + +00:53.180 --> 00:55.940 +どの色を塗るのか。 + +00:56.330 --> 00:59.210 +そして、 ローラのアダプターを持ち込もう。 + +00:59.210 --> 00:59.790 +あそこだ。 + +00:59.820 --> 01:10.380 +技術的に正確を期すために、 実際には、 それぞれの異なるウム・ターゲット・モジュールに対してAとBと呼ばれる2つのアダプターがあることを覚えているかもしれない。 + +01:10.440 --> 01:15.420 +次元はRで与えられ、 アルファはスケーリング係数であることを覚えているだろう。 + +01:15.420 --> 01:22.380 +そして、 これらがターゲット・モジュールに適用される方法は、 アルファ×A×Bという単純なものだ。 + +01:22.710 --> 01:24.450 +簡単なことだ。 + +01:24.450 --> 01:27.750 +これが私たちがよく知っているニューラルネットワークだ。 + +01:27.780 --> 01:33.270 +そして、 このローラ・アダプターでウェイトトレーニングを行うのだが、 我々の場合、 + +01:33.300 --> 01:36.090 +約109MB分のウェイトがある。 + +01:36.780 --> 01:38.280 +ああ、 わかった。 + +01:38.280 --> 01:40.290 +では、 どうなるのか? + +01:40.290 --> 01:42.900 +左側に入力プロンプトがあります。 + +01:42.900 --> 01:47.760 +値段はドルで、 次のトークンという感じだ。 + +01:47.760 --> 01:50.070 +私たちは、 モデルが予測を得意にすることを望んでいる。 + +01:50.520 --> 02:02.400 +フォワード・パスとは、 それを推論モードにして、 次のトークンを予測してくださいというモデルに通すことです。 + +02:03.360 --> 02:10.260 +そして、 それがモデルを通って、 反対側に出てくるのが、 次のトークン価格の予測だ。 + +02:10.290 --> 02:11.820 +そして99。 + +02:11.820 --> 02:19.170 +そしてまた、 我々はラマ3のこのシンプルさを利用している。 1 実際には99は1つのトークンである。 + +02:19.320 --> 02:20.400 +うーん、 そうじゃない。 + +02:20.430 --> 02:23.730 +そしてそれは、 どんな3桁の数字でも常にそうである。 + +02:23.730 --> 02:27.300 +それが重要だというわけではないが、 僕らにとっては少しシンプルになる。 + +02:27.300 --> 02:29.190 +それがフォワードパスだ。 + +02:29.220 --> 02:29.820 +分かった。 + +02:29.850 --> 02:32.040 +さて、 次は損失計算だ。 + +02:32.610 --> 02:33.780 +だからまたここに来た。 + +02:33.780 --> 02:35.400 +私たちは次のトークンを予測した。 + +02:35.400 --> 02:41.400 +これでモデルは、 トレーニングの過程でモデルが何をしたかを調べることができる。 + +02:41.400 --> 02:44.010 +SFTのトレーナーは上を見ている。 + +02:44.010 --> 02:45.960 +次のトークンは何だったのだろう。 + +02:45.960 --> 02:49.080 +トレーニングデータを得たので、 実際の次のトークンはわかっている。 + +02:49.080 --> 02:50.100 +それは何だったのか? + +02:50.100 --> 02:51.810 +仮に89歳だとしよう。 + +02:51.810 --> 02:52.590 +もっと低かった。 + +02:52.590 --> 02:54.000 +だから我々は間違っていた。 + +02:54.000 --> 02:55.800 +10ドルの間違い。 + +02:56.070 --> 02:57.370 +うーん、 あるいは間違っている。 + +02:57.370 --> 03:00.280 +別の言い方をすれば、 これが10ドルであることを知らない。 + +03:00.280 --> 03:02.350 +ただ、 それが別のトークンだと知っているだけなのだ。 + +03:02.710 --> 03:05.650 +だから、 何らかの損失がある。 + +03:05.650 --> 03:10.180 +その損失が何なのか、 なぜトークンが違うだけという単純なものではないのか、 + +03:10.180 --> 03:11.500 +これから説明する。 + +03:11.500 --> 03:16.960 +技術的なことは後ほど説明するが、 今は99を予測したと思ってくれればいい。 + +03:16.990 --> 03:18.670 +実際の値は89である。 + +03:18.700 --> 03:20.050 +負けた。 + +03:20.680 --> 03:23.140 +これが損失計算だ。 + +03:23.140 --> 03:29.350 +ステップ3はバックワードパスで、 バックプロップとかバックワードプロパゲーションと呼ばれている。 + +03:29.350 --> 03:30.580 +逆伝播。 + +03:30.760 --> 03:38.470 +バックプロップでは基本的に、 ネットワークを振り返って、 戻って、 こう言うんだ。 もしこの重みを少しいじったら、 + +03:38.470 --> 03:44.650 +どのくらい損失に影響するだろうか? + +03:44.650 --> 03:48.850 +その重さに対して、 ロスはどの程度敏感なのか。 + +03:48.880 --> 03:54.280 +これにより、 パラメーターの重みの勾配と呼ばれるものが得られる。 + +03:54.280 --> 03:57.320 +だから、 ウェイトというのはパラメーターと同義なんだ。 + +03:57.560 --> 04:00.770 +ええと、 それで、 ええと、 どうやって? + +04:00.800 --> 04:02.120 +グラデーションとは? + +04:02.120 --> 04:05.240 +もしウェイトを変えたとしたら、 損失はどうなる? + +04:05.240 --> 04:08.030 +少しは改善したいと思っているからだ。 + +04:08.360 --> 04:16.100 +赤い三角形はデルタを表すもので、 そこで起こったグラデーション計算を表している。 + +04:17.180 --> 04:22.730 +そして最後のステップ、 ステップ4の最適化だ。 + +04:22.940 --> 04:23.960 +来たぞ。 + +04:23.960 --> 04:26.360 +グラデーションができた。 + +04:26.420 --> 04:30.770 +そして今、 私たちに必要なのは、 正しい方向に小さな一歩を踏み出すことだ。 + +04:30.770 --> 04:36.830 +そこで、 ローラ行列のパラメータを少しだけ更新したい。 + +04:36.830 --> 04:41.720 +そうすれば、 次に同じ入力プロンプトが表示されたとしても、 損失はもう少し少なくなる。 + +04:41.720 --> 04:42.890 +もう少しうまくいくだろう。 + +04:42.890 --> 04:48.980 +私たちは正しい方向に一歩を踏み出し、 学習率を使ってどの程度踏み出すかを決めているわけです。 + +04:48.980 --> 04:53.810 +前にも言ったように、 小さなステップを踏むことにも大きなステップを踏むことにも長所と短所があるからだ。 + +04:54.090 --> 05:00.690 +私たちが使っているオプティマイザー、 アダムWオプティマイザーは、 単に勾配を使うだけでなく、 + +05:00.690 --> 05:12.540 +事前の勾配のローリング平均のようなものを保持することで、 物事を改善する可能性が最も高い方法でステップを踏む方法について本当に賢くなるのです。 + +05:12.660 --> 05:18.810 +また、 オーバーフィッティングなど、 前にも話したような危険なことをしないようにするためでもある。 + +05:19.200 --> 05:20.370 +だからそうするんだ。 + +05:20.370 --> 05:26.430 +そして、 ローラ・アダプターが改良され、 将来的に我々のベースモデルに適用されることで、 + +05:26.430 --> 05:29.700 +少し良い結果が得られるというわけだ。 + +05:29.730 --> 05:40.740 +そのため、 トレーニングの損失が減少し、 チャートがぐねぐねしているのがわかるだろう。 + +05:41.070 --> 05:46.050 +もちろん、 ウェイトやパラメータが変更されるのは、 ローラ・アダプタのパラメータ、 + +05:46.050 --> 05:55.620 +つまり緑色のものであり、 ラマ3ではパラメータを変更することはない。 + +05:55.620 --> 05:55.620 +大きすぎる1ベース、 多すぎるパラメータ。 + +05:55.620 --> 06:02.280 +普通のトレーニング、 微調整、 ローラベースではなく、 普通のトレーニングをしているのであれば、 + +06:02.280 --> 06:10.320 +ラマ3全体になるだろう。 勾配を計算する必要があり、 最適化中にシフトさせる必要がある1つのモデル。 + +06:10.320 --> 06:13.650 +そしてそれはもちろん、 これらの大企業が好むメタである。 + +06:13.680 --> 06:25.620 +これがラマ3のトレーニング方法だ。 そのため、 我々はローラ・アダプターを代わりに使っている。 + +06:26.190 --> 06:32.160 +これが、 トレーニング・プロセス全体における最適化の簡単なまとめだ。 + +06:32.160 --> 06:37.530 +そして、 それはおそらく、 すでにあなたにとってほとんど明確なものだったと想像している。 しかし、 この図があなたにとってそれを結晶化させ、 + +06:37.530 --> 06:41.310 +すべてがうまくいったことを意味することを期待している。 + +06:41.310 --> 06:47.550 +そして次のビデオでは、 もうひとつ技術的なことを説明しようと思う。 予想と損失計算の意味について、 + +06:47.550 --> 06:51.570 +とても重要な技術的なことだ。 + +06:51.600 --> 06:52.380 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59665129/ko_KR.srt b/week5/community-contributions/subtitles/srts/59665129/ko_KR.srt new file mode 100755 index 0000000..27d1d30 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59665129/ko_KR.srt @@ -0,0 +1,328 @@ +WEBVTT + +00:00.710 --> 00:06.650 +이제 도표를 보여드리면서 실감이 나게 해드릴게요 특히 출로라와 훈련하는 + +00:06.650 --> 00:11.810 +방법에 대해서요 실제로 연습에서 하는 방법이죠 + +00:12.440 --> 00:16.280 +먼저 포워드 패스 얘기를 해보죠 + +00:16.280 --> 00:20.780 +이 다이어그램은 익숙하실 겁니다 제가 전에 사용한 것과 같거든요 + +00:20.810 --> 00:27.350 +라마 3을 보여줬어요 우리가 수량화한 기본 모델은 80억 + +00:27.380 --> 00:31.340 +매개 변수까지 전부 동결됐어요 + +00:31.340 --> 00:36.350 +훈련의 일환으로 무게는 바꾸지 않을 겁니다 할 일도 메모리도 + +00:36.380 --> 00:43.400 +너무 많고 80억 매개 변수를 변경하고 최적화하는 것도 너무 느리니까요 + +00:43.850 --> 00:50.630 +여기 보면 여러 개의 추가 얼어 있는데요 노란색도 얼어 있지만 + +00:50.630 --> 00:53.180 +대상 모듈을 나타내요 + +00:53.180 --> 00:55.940 +어떤 색을 적용할지를요 + +00:56.330 --> 00:59.210 +로라 어댑터를 가져오죠 + +00:59.210 --> 00:59.790 +저기 있네요 + +00:59.820 --> 01:06.090 +기술적으로 맞는다는 걸 기억하실지 모르겠는데 A와 B라는 어댑터가 + +01:06.090 --> 01:10.380 +두 개 있어요 각각의 대상 모듈에 대해서요 + +01:10.440 --> 01:15.420 +R이 부여한 차원성입니다 알파가 배율 요인인 걸 기억하실 거예요 + +01:15.420 --> 01:21.090 +대상 모듈에 적용되는 방식만큼 간단합니다 알파 곱하기 A + +01:21.120 --> 01:22.380 +곱하기 B죠 + +01:22.710 --> 01:24.450 +그게 다예요 + +01:24.450 --> 01:27.750 +이게 우리가 잘 아는 신경망이에요 + +01:27.780 --> 01:33.270 +이 로라 어댑터로 웨이트를 훈련할 거예요 이건 약 109MB짜리 + +01:33.300 --> 01:36.090 +웨이트예요 + +01:36.780 --> 01:38.280 +네 + +01:38.280 --> 01:40.290 +그럼 어떻게 되죠? + +01:40.290 --> 01:42.900 +왼쪽에 입력 프롬프트가 있어요 + +01:42.900 --> 01:47.760 +가격이 달러라면 다음 토큰이 그 다음이죠 + +01:47.760 --> 01:50.070 +Get in get 예측을 잘해야 해요 + +01:50.520 --> 01:57.780 +전진 패스란 입력 모드를 취하는 건데요 기본적으로 모델로 입력해 + +01:57.780 --> 02:02.400 +다음 토큰을 예측해달라고 하는 거죠 + +02:03.360 --> 02:10.260 +모델로 살펴보면 반대쪽에 예상되는 토큰 가격이 나와요 + +02:10.290 --> 02:11.820 +그다음은 99명이에요 + +02:11.820 --> 02:17.970 +다시 말하지만 우린 llama 3의 단순함을 이용하고 있어요 1번, 사실 99는 토큰 하나일 + +02:17.970 --> 02:19.170 +뿐이죠 + +02:19.320 --> 02:20.400 +아닌 건 아니죠 + +02:20.430 --> 02:23.730 +세 자리 숫자라면 언제나 그렇죠 + +02:23.730 --> 02:27.300 +그게 중요한 건 아니지만 비트가 우릴 좀 단순화시키죠 + +02:27.300 --> 02:29.190 +그게 포워드 패스죠 + +02:29.220 --> 02:29.820 +좋아요 + +02:29.850 --> 02:32.040 +이제 손실 계산을 해 보죠 + +02:32.610 --> 02:33.780 +다시 모였네요 + +02:33.780 --> 02:35.400 +다음 토큰을 예측했어요 + +02:35.400 --> 02:41.400 +이제 모델은 훈련 과정을 살펴볼 수 있어요 + +02:41.400 --> 02:44.010 +우리 같은 경우는 위를 봐요 + +02:44.010 --> 02:45.960 +다음 토큰이 뭐였을까요? + +02:45.960 --> 02:49.080 +훈련 데이터가 있기 때문에 실제 다음 토큰을 알아요 + +02:49.080 --> 02:50.100 +그게 뭐였죠? + +02:50.100 --> 02:51.810 +89라고 치죠 + +02:51.810 --> 02:52.590 +낮았어요 + +02:52.590 --> 02:54.000 +우리가 틀렸군요 + +02:54.000 --> 02:55.800 +10달러 차이예요 + +02:56.070 --> 02:57.370 +틀렸을 수도 있고요 + +02:57.370 --> 03:00.280 +다른 토큰으로 이게 10달러라는 걸 몰라요 + +03:00.280 --> 03:02.350 +다른 토큰이란 걸 알 뿐이죠 + +03:02.710 --> 03:05.650 +그래서 상실감이 좀 있어요 + +03:05.650 --> 03:10.180 +잠시 후에 손실이 무엇인지 설명하고 다른 토큰으로 사용하면 왜 안 되는지 + +03:10.180 --> 03:11.500 +설명해 드릴게요 + +03:11.500 --> 03:16.300 +기술적인 부분을 짚고 넘어가겠지만 일단은 99개까지 예상했다고만 해두죠. Get + +03:16.300 --> 03:16.960 +it! + +03:16.990 --> 03:18.670 +실제 가격은 89예요 + +03:18.700 --> 03:20.050 +손실이 생겼어요 + +03:20.680 --> 03:23.140 +그게 손실 계산이고요 + +03:23.140 --> 03:29.350 +세 번째 단계는 뒤로 가기 패스입니다 사람들이 뒤로 가기나 뒤로 전파라고 부르는 걸 들으실 수 있죠 + +03:29.350 --> 03:30.580 +역증식이죠 + +03:30.760 --> 03:38.470 +백프롭에서는 네트워크를 통해 살펴봅니다. 그러면 얼마나 비트를 + +03:38.470 --> 03:44.650 +약간만 조정하면 얼마나 손실이 발생할까요? + +03:44.650 --> 03:48.850 +저 무게에 대한 상실감이 얼마나 민감하죠? + +03:48.880 --> 03:54.280 +매개 변수의 무게의 기울기라는 걸 제공하죠 + +03:54.280 --> 03:57.320 +무게는 매개 변수와 동의어예요 + +03:57.560 --> 04:00.770 +그럼 어떻게 네 + +04:00.800 --> 04:02.120 +경사도가 어떻게 되죠? + +04:02.120 --> 04:05.240 +무게를 바꾸면 손실이 얼마나 커질까요? + +04:05.240 --> 04:08.030 +비트 박스를 개선하고 싶거든요 + +04:08.360 --> 04:14.120 +저 빨간 삼각형은 델타를 나타내는 거예요 저기서 일어난 그러데이션 계산을 + +04:14.210 --> 04:16.100 +보여주기 위해서요 + +04:17.180 --> 04:22.730 +그리고 마지막 단계로 넘어가죠 4단계 최적화요 Get it + +04:22.940 --> 04:23.960 +나오네요 + +04:23.960 --> 04:26.360 +그래서 이런 경사가 있죠 + +04:26.420 --> 04:30.770 +이제 올바른 방향으로 작은 걸음을 내디뎌야 해요 + +04:30.770 --> 04:36.830 +로라 행렬의 매개 변수를 업데이트해야 하는데 비트가 조금 부족해요 + +04:36.830 --> 04:41.720 +다음 번에 같은 inputFrompt를 선택하면 손실이 약간 더 낮아지죠 + +04:41.720 --> 04:42.890 +비트가 더 나을 거예요 + +04:42.890 --> 04:48.200 +올바른 방향으로 한 걸음 나아가고 있어요 학습률을 기준으로 얼마나 나아갈지 + +04:48.200 --> 04:48.980 +결정하죠 + +04:48.980 --> 04:53.810 +앞서 말했듯이 보폭이 작거나 큰 것에 장단점이 있어요 + +04:54.090 --> 05:00.690 +기억하실지 모르겠지만 애덤 W. 최적화 프로그램은 아주 기발한 방법을 사용했어요 + +05:00.690 --> 05:06.720 +기존의 그러데이션 수치를 그대로 유지하면서 기존의 그러데이션 수치를 그대로 + +05:06.720 --> 05:12.540 +유지했죠 그래서 현명하게도 개선할 수 있는 방법을 찾아냈어요 + +05:12.660 --> 05:17.700 +너무 꽉 찬 옷을 입거나 다른 위험을 감수하지 않도록 하는 것이기도 + +05:17.730 --> 05:18.810 +하죠 + +05:19.200 --> 05:20.370 +그렇게 하죠 + +05:20.370 --> 05:26.430 +로라 어댑터가 개선되고 미래에는 기본 모델에 적용할 + +05:26.430 --> 05:29.700 +텐데 조금 더 나아질 거예요 + +05:29.730 --> 05:36.390 +다음번에는 손실이 더 적겠죠 그래서 훈련 손실이 줄고 있는 거예요 흔들리는 + +05:36.390 --> 05:40.740 +도표도요 비트는 배우고 나아지고 있으니까요 + +05:41.070 --> 05:46.050 +여기서 짚고 넘어갈 것은 체중과 변수가 변경된 매개 변수는 로라 + +05:46.050 --> 05:51.180 +어댑터의 녹색 매개 변수입니다 llama 3에서는 어떤 매개 + +05:51.180 --> 05:55.620 +변수도 변경하지 않아요 베이스 1이 너무 크고 매개 변수가 너무 많아요 + +05:55.620 --> 06:02.280 +로라 기반의 훈련이 아닌 평범한 훈련이었다면 라마 + +06:02.280 --> 06:09.120 +3 전체가 됐을 거예요 그러데이션을 계산하고 최적화 때 변경해야 + +06:09.120 --> 06:10.320 +하는 모델이죠 + +06:10.320 --> 06:13.650 +물론 메타 같은 대기업이 그런 거죠 + +06:13.680 --> 06:19.170 +라마 3을 그렇게 훈련시켰어요 1인당 그리고 거기에 엄청난 돈을 들였는데 + +06:19.200 --> 06:25.620 +우리는 그 돈을 쉽게 구할 수 없어서 로라 어댑터를 대신 쓰고 있는 거예요 + +06:26.190 --> 06:32.160 +지금까지 최적화에 대한 요약이었습니다. 트레이닝 프로세스 전체에서 말이죠. + +06:32.160 --> 06:37.530 +이미 대부분 명확하게 이해하셨겠지만 이 도표를 보고 이해하셨길 + +06:37.530 --> 06:41.310 +바랍니다 모든 게 제자리를 찾았다는 뜻이죠 + +06:41.310 --> 06:47.550 +다음 강의에서는 기술적인 면을 하나만 더 설명할게요 아주 중요한 기술적 측면으로 + +06:47.550 --> 06:51.570 +예측과 손실 계산의 의미를 설명해 드리죠 + +06:51.600 --> 06:52.380 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59666211/en_US.srt b/week5/community-contributions/subtitles/srts/59666211/en_US.srt new file mode 100755 index 0000000..1bb1acf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59666211/en_US.srt @@ -0,0 +1,136 @@ +WEBVTT + +00:01.490 --> 00:08.780 +So before we try our new model and one more recap on the models so far and keep notes of this so we + +00:08.780 --> 00:09.980 +can see how we do. + +00:09.980 --> 00:14.930 +And your excitement can be there while we run our fine tuned model. + +00:15.140 --> 00:18.230 +We started with a constant model. + +00:18.230 --> 00:21.320 +We actually started with a random model, but I think we can put that one to bed. + +00:21.350 --> 00:23.150 +That's that was that was silly. + +00:23.300 --> 00:29.450 +So a constant model which just guesses the average from the training data set ends up with an error + +00:29.450 --> 00:31.070 +of 146. + +00:31.280 --> 00:35.930 +Uh, and we certainly hope that we can do better than 146. + +00:35.960 --> 00:38.900 +Otherwise, we might as well stick with a constant. + +00:38.930 --> 00:44.630 +When we used a very simplistic traditional machine learning with basic features, we got 139. + +00:44.660 --> 00:45.170 +Remember that? + +00:45.170 --> 00:50.840 +Well, I hope random forest, a more sophisticated algorithm that also that looked at the language, + +00:50.840 --> 00:53.900 +the words um, got down to 97. + +00:54.710 --> 00:58.520 +This human did a poor job at 127. + +00:58.910 --> 01:00.500 +Uh, GPT four. + +01:00.530 --> 01:03.940 +Oh, the big guy did very nicely indeed. + +01:03.940 --> 01:18.430 +At 76 and the Bass Llama 3.1, untrained, quantized down to four bits, did an appalling $396 of error. + +01:18.460 --> 01:23.710 +A much better off just sticking with the constant than using an untrained llama. + +01:23.800 --> 01:26.560 +The poor thing did not do particularly well at all. + +01:26.740 --> 01:31.000 +So I go through this one more time so that you have this nicely framed. + +01:31.030 --> 01:37.060 +The question is, remember, GPT four is a model that has trillions of weights. + +01:37.120 --> 01:40.390 +GPT four had 1.76 trillion GPT four. + +01:40.630 --> 01:44.380 +It's not known, but it's considered to be much more than that. + +01:44.380 --> 01:46.600 +So a huge number of weights. + +01:46.630 --> 01:53.530 +Llama 3.1 base has 8 billion weights, and we have reduced them down to four bits. + +01:53.530 --> 01:57.130 +And then we have used our color. + +01:57.340 --> 01:57.580 +Sorry. + +01:57.610 --> 02:04.900 +Our Lora adapters like 109MB worth of them to to put some extra weights that we can use to adapt. + +02:04.930 --> 02:11.900 +Llama lemma 3.1 base, but these are still small numbers, and obviously this is an open source model, + +02:11.900 --> 02:13.670 +which means it's free to run. + +02:13.670 --> 02:20.480 +So I'm saying all this to set expectations that obviously it's a lot to ask to try and compete with + +02:20.480 --> 02:22.610 +some of these models at the frontier. + +02:22.820 --> 02:27.110 +The thing that you need to be looking out for is, can we do better than traditional machine learning? + +02:27.350 --> 02:28.910 +Can we do better than a human can? + +02:28.940 --> 02:29.150 +Certainly. + +02:29.150 --> 02:30.470 +Can we do better than constant? + +02:30.470 --> 02:35.090 +And how do we stack up when we compare ourselves to GPT four? + +02:35.210 --> 02:42.590 +So the leading frontier model, and we can also compare it to GPT four or mini, um, as well, uh, + +02:42.590 --> 02:43.580 +afterwards. + +02:43.880 --> 02:45.800 +So that gives you the context. + +02:45.830 --> 02:46.910 +I hope you have this in your mind. + +02:46.910 --> 02:50.060 +Maybe write down the numbers so you're ready for for what's to come. + +02:50.060 --> 03:00.500 +And it is time for us to head to Colab and to run inference on the the best, strongest checkpoint from + +03:00.500 --> 03:08.380 +from the the training of our own verticalized specialized, uh, open source model. diff --git a/week5/community-contributions/subtitles/srts/59666211/ja_JP.srt b/week5/community-contributions/subtitles/srts/59666211/ja_JP.srt new file mode 100755 index 0000000..6cab8d1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59666211/ja_JP.srt @@ -0,0 +1,121 @@ +WEBVTT + +00:01.490 --> 00:09.980 +だから、 新しいモデルを試す前に、 これまでのモデルをもう1度振り返り、 自分たちがどうするかわかるようにメモしておこう。 + +00:09.980 --> 00:14.930 +そして、 私たちが微調整したモデルを走らせている間、 あなたの興奮はそこにある。 + +00:15.140 --> 00:18.230 +まずは一定のモデルから始めた。 + +00:18.230 --> 00:21.320 +実はランダムなモデルから始めたのだが、 それはもう終わりにしよう。 + +00:21.350 --> 00:23.150 +あれはあれでバカバカしい。 + +00:23.300 --> 00:31.070 +つまり、 訓練データセットから平均値を推測するだけの定数モデルでは、 146の誤差に終わる。 + +00:31.280 --> 00:35.930 +146よりもいい結果を出したいと思っている。 + +00:35.960 --> 00:38.900 +そうでなければ、 定数にこだわる方がいい。 + +00:38.930 --> 00:44.630 +基本的な特徴を持つ、 非常に単純化された従来の機械学習を使ったところ、 139の結果が出た。 + +00:44.660 --> 00:45.170 +覚えているかい? + +00:45.170 --> 00:50.840 +まあ、 ランダムフォレストという、 より洗練されたアルゴリズムで、 言語や単語を調べて、 + +00:50.840 --> 00:53.900 +97まで下がればいいんだけどね。 + +00:54.710 --> 00:58.520 +この人間は127で下手な仕事をした。 + +00:58.910 --> 01:00.500 +GPT4だね。 + +01:00.530 --> 01:03.940 +ああ、 あの大男は実によくやったよ。 + +01:03.940 --> 01:18.430 +76歳、 バス・ラマ3世。 1、 未訓練、 4ビットに量子化され、 396ドルのエラーを出した。 + +01:18.460 --> 01:23.710 +訓練されていないラマを使うより、 一定にこだわる方がずっといい。 + +01:23.800 --> 01:26.560 +可哀想に、 特にうまくはなかった。 + +01:26.740 --> 01:31.000 +だから、 もう1度、 このことを確認する。 + +01:31.030 --> 01:37.060 +問題は、 GPT4は何兆ものウェイトを持つモデルだということだ。 + +01:37.120 --> 01:40.390 +GPT4は1だった。 76兆GPT 4 + +01:40.630 --> 01:44.380 +定かではないが、 それ以上のものだと考えられている。 + +01:44.380 --> 01:46.600 +だからウェイトの数は膨大だ。 + +01:46.630 --> 01:53.530 +ラマ 3. 1つのベースには80億の重みがあり、 我々はそれを4ビットにまで減らした。 + +01:53.530 --> 01:57.130 +そして、 私たちの色を使った。 + +01:57.340 --> 01:57.580 +申し訳ない。 + +01:57.610 --> 02:04.900 +私たちのローラ・アダプターは109MBの価値があり、 適応するために使用できる余分なウェイトを置くことができる。 + +02:04.930 --> 02:13.670 +ラマのレンマ3。 1ベースだが、 これはまだ少数であり、 明らかにこれはオープンソースモデルである。 + +02:13.670 --> 02:22.610 +だから、 フロンティアでこのようなモデルと競争しようとするのは、 明らかに大変なことだと期待させるために、 このようなことを言っているんだ。 + +02:22.820 --> 02:27.110 +注目しなければならないのは、 従来の機械学習よりもうまくやれるかどうかということだ。 + +02:27.350 --> 02:28.910 +人間以上のことができるだろうか? + +02:28.940 --> 02:29.150 +確かに。 + +02:29.150 --> 02:30.470 +一定以上の結果を残せるか? + +02:30.470 --> 02:35.090 +また、 GPTの4人と比較した場合、 私たちはどうなのだろうか? + +02:35.210 --> 02:43.580 +だから、 リーディング・フロンティア・モデルは、 GPT4やミニとも比較できる。 + +02:43.880 --> 02:45.800 +だから、 その背景がわかる。 + +02:45.830 --> 02:46.910 +このことを心に留めておいてほしい。 + +02:46.910 --> 02:50.060 +これから起こることに備えて、 数字を書き留めておくのもいいかもしれない。 + +02:50.060 --> 03:00.500 +そして、 私たちはColabに向かい、 私たち自身の縦割りに特化した、 えー、 オープンソースモデルのトレーニングから得られた、 + +03:00.500 --> 03:08.380 +ベストで最強のチェックポイントで推論を実行する時なのだ。 diff --git a/week5/community-contributions/subtitles/srts/59666211/ko_KR.srt b/week5/community-contributions/subtitles/srts/59666211/ko_KR.srt new file mode 100755 index 0000000..6054aa1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59666211/ko_KR.srt @@ -0,0 +1,133 @@ +WEBVTT + +00:01.490 --> 00:09.980 +새 모델을 시도하기 전에 모델에 대해 한 가지 더 알려드리죠 어떻게 되는지 볼 수 있게 메모해두세요 + +00:09.980 --> 00:14.930 +미세 튜닝 모델을 작동하는 동안 여러분도 함께 즐기세요 + +00:15.140 --> 00:18.230 +우린 일정한 모델로 시작했어요 + +00:18.230 --> 00:21.320 +무작위 모델로 시작했지만 그건 이제 Put it로 할 수 있을 것 같아요 + +00:21.350 --> 00:23.150 +그건 바보 같은 짓이었어요 + +00:23.300 --> 00:29.450 +훈련 데이터 세트에서 평균을 추측하는 상수 모델은 결국 146 + +00:29.450 --> 00:31.070 +오류가 되죠 + +00:31.280 --> 00:35.930 +146점보다는 더 잘했으면 좋겠어요 + +00:35.960 --> 00:38.900 +아니면 그냥 계속 만나는 게 나아요 + +00:38.930 --> 00:44.630 +아주 간단한 전통적인 머신 러닝을 기본 기능으로 사용했더니 139개가 나왔어요 + +00:44.660 --> 00:45.170 +기억나요? + +00:45.170 --> 00:50.840 +무작위의 숲과 더 정교한 알고리즘이 언어를 살펴보고 + +00:50.840 --> 00:53.900 +97개로 줄었길 바라요 + +00:54.710 --> 00:58.520 +이 인간은 56kg으로 형편없었어요 + +00:58.910 --> 01:00.500 +GPT 4요 + +01:00.530 --> 01:03.940 +덩치 큰 친구가 아주 잘했어요 + +01:03.940 --> 01:18.430 +76위와 배스 라마가 3위예요 1번은 훈련받지 않고 4개로 수량화했는데 끔찍한 396달러의 오류를 냈어요 + +01:18.460 --> 01:23.710 +훈련받지 않은 라마를 쓰는 것보다 상수를 쓰는 게 훨씬 나아요 + +01:23.800 --> 01:26.560 +가엾게도 잘 지내지 못했어요 + +01:26.740 --> 01:31.000 +이걸 한 번 더 훑어볼게요 그래야 액자가 잘 나오죠 + +01:31.030 --> 01:37.060 +GPT 4의 중량은 수조 단위라는 걸 기억하세요 + +01:37.120 --> 01:40.390 +GPT 4는 1이었죠 76조 GPT 4요 + +01:40.630 --> 01:44.380 +알려진 바는 없지만 그 이상으로 여겨지죠 + +01:44.380 --> 01:46.600 +무게가 엄청나죠 + +01:46.630 --> 01:53.530 +라마 3요 1기지는 80억 개인데 우린 그걸 4분의 1로 줄였어요 + +01:53.530 --> 01:57.130 +이제 색을 칠해 볼게요 + +01:57.340 --> 01:57.580 +미안해요 + +01:57.610 --> 02:04.900 +로라 어댑터에는 109MB가 들어가는데 거기에 쓸 추가 무게를 더하기 위해서죠 + +02:04.930 --> 02:11.900 +라마레마3요 1베이스지만 여전히 작은 숫자죠 오픈 소스 모델이라 + +02:11.900 --> 02:13.670 +무료로 실행할 수 있어요 + +02:13.670 --> 02:20.480 +이런 얘기를 하는 건 기대를 심어주기 위해서예요 개척지에서 일하는 모델들과 경쟁하는 + +02:20.480 --> 02:22.610 +건 무리한 요구죠 + +02:22.820 --> 02:27.110 +우리가 기존의 머신 러닝보다 더 잘할 수 있을지 살펴봐야 해요 + +02:27.350 --> 02:28.910 +인간보다 더 잘할 수 있을까요? + +02:28.940 --> 02:29.150 +물론이죠 + +02:29.150 --> 02:30.470 +계속보다 더 잘할 수 있어요? + +02:30.470 --> 02:35.090 +GPT 4와 우리를 비교하면 어떤 스택이 나올까요? + +02:35.210 --> 02:42.590 +최전방 모델이죠 GPT 4나 미니와도 나중에 비교할 수 + +02:42.590 --> 02:43.580 +있어요 + +02:43.880 --> 02:45.800 +그게 맥락을 제공하죠 + +02:45.830 --> 02:46.910 +잘 생각해 보세요 + +02:46.910 --> 02:50.060 +앞으로 닥칠 일에 대비해서 숫자를 적어두세요 + +02:50.060 --> 03:00.500 +이제 콜랍으로 가서 가장 강력한 체크포인트를 추론할 때입니다 수직화된 오픈 + +03:00.500 --> 03:08.380 +소스 모델을 훈련한 결과죠 HDP, HDP, HDP diff --git a/week5/community-contributions/subtitles/srts/59666831/en_US.srt b/week5/community-contributions/subtitles/srts/59666831/en_US.srt new file mode 100755 index 0000000..9749d18 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59666831/en_US.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:00.890 --> 00:07.700 +Take one more moment to look at this very nice diagram that lays it all out, and we will move on. + +00:07.700 --> 00:10.970 +Now to the technicality that I wanted to mention. + +00:10.970 --> 00:18.470 +So I have been a bit loose in the past when I've said that the model predicts the next token, which + +00:18.470 --> 00:19.970 +is one way to think about it. + +00:20.000 --> 00:21.200 +One simple way to think about it. + +00:21.200 --> 00:25.730 +And we often use that, that turn of phrase, but it's not what's actually going on. + +00:25.730 --> 00:30.800 +It's not like you have a transformer architecture and the bottom layer, what comes out of that is a + +00:30.800 --> 00:33.560 +single token, which is the predicted next token. + +00:33.800 --> 00:35.390 +That's not how it works. + +00:35.390 --> 00:40.310 +What actually comes out of the last layer is probabilities. + +00:40.310 --> 00:44.390 +It has every single token, and for every token it has. + +00:44.390 --> 00:48.080 +What is the probability that that is the next token. + +00:48.080 --> 00:52.130 +It outputs that series of probabilities. + +00:52.160 --> 00:54.920 +That's what comes out of the bottom of the neural network. + +00:55.100 --> 01:00.410 +And it's you may remember there was the last layer that we saw when we printed. + +01:00.410 --> 01:02.640 +The model is called LM Head. + +01:02.700 --> 01:06.420 +And that is really the very final step that comes out. + +01:06.420 --> 01:11.310 +It actually comes out with a vector of numbers that are known as the logits, which are the the that + +01:11.310 --> 01:12.810 +represent the probabilities. + +01:12.840 --> 01:14.970 +And you need to put that through a function. + +01:15.090 --> 01:19.260 +Um, this is getting probably to too much detail now, but you may know it already. + +01:19.260 --> 01:25.680 +There's a function called softmax that converts these numbers into what can be thought of as probabilities, + +01:25.680 --> 01:31.650 +because each each token will then be somewhere between 0 and 1, and they will all add up to one. + +01:31.680 --> 01:39.540 +So it gives you a way to interpret the results of the model as a probability of each possible next token. + +01:39.540 --> 01:43.710 +So that is actually what comes out of the forward pass. + +01:44.130 --> 01:50.370 +Um, when you're doing inference when you're running this in inference mode, um, what you get out + +01:50.370 --> 01:52.080 +of it is then all of these probabilities. + +01:52.080 --> 01:52.980 +So what do you do with that. + +01:53.010 --> 01:55.530 +How do you how do you say what next token it's predicting? + +01:55.530 --> 01:58.080 +Well, most of the time actually it's a very simple approach. + +01:58.080 --> 02:00.600 +You simply take the most likely next token. + +02:00.600 --> 02:02.820 +You take the token with the highest probability. + +02:02.850 --> 02:04.450 +It gave you all these different probabilities. + +02:04.480 --> 02:06.130 +Find out which one is the max. + +02:06.130 --> 02:07.750 +Take that one as the next letter. + +02:07.780 --> 02:10.840 +There are other techniques that are a little bit more sophisticated. + +02:10.840 --> 02:16.720 +You can sample randomly using these probabilities as your as your weight for how you sample. + +02:16.780 --> 02:18.550 +And that gives you a bit more variety. + +02:18.700 --> 02:24.520 +And there are some other techniques you can use to sample a few letters in a row, and then decide whether + +02:24.520 --> 02:25.900 +that's the path that you want to take. + +02:25.900 --> 02:32.290 +So there are a bunch of different strategies during inference that you can use based on these probabilities + +02:32.290 --> 02:33.490 +to do the best job. + +02:33.490 --> 02:39.220 +And in fact, when we go and look in our project in a second, we are going to use a slightly non-standard + +02:39.220 --> 02:45.940 +strategy, since we know that each that this token represents a cost, it represents a number. + +02:45.940 --> 02:50.440 +We can do something a little bit smart with it, but but that's not necessary. + +02:50.440 --> 02:54.130 +You can also always just simply pick pick the one with the highest probability. + +02:54.340 --> 02:57.550 +So that's model output really explained. + +02:57.580 --> 02:59.470 +I hope that's now crystal clear for you. + +02:59.680 --> 03:02.050 +Um, and then the loss function. + +03:02.050 --> 03:04.330 +So I slightly glossed over in the last video. + +03:04.360 --> 03:05.840 +Like you calculate a loss. + +03:05.840 --> 03:07.550 +How bad was it? + +03:07.550 --> 03:09.950 +So what does that actually mean in practice? + +03:09.980 --> 03:11.990 +It's wonderfully simple. + +03:11.990 --> 03:15.890 +So we've got these probabilities of all of the possible next tokens. + +03:16.010 --> 03:21.200 +So what you do is you say okay, well we actually know what the next token was supposed to be. + +03:21.290 --> 03:23.690 +Let's say it was supposed to be 99. + +03:23.690 --> 03:30.740 +So you look up in all of these probabilities and you say, what probability did the model give for 99 + +03:30.740 --> 03:33.350 +for the thing that actually was the right next token. + +03:33.350 --> 03:34.790 +And that's all that matters here. + +03:34.790 --> 03:38.780 +All that matters is what probability did it give to the thing that was actually correct. + +03:39.080 --> 03:45.320 +Um, if it gives if it gave that a 100% probability, then it was perfect. + +03:45.320 --> 03:48.050 +It was 100% confident in the right results. + +03:48.050 --> 03:51.320 +And everything else would have to be zero because probabilities will add up to one. + +03:51.320 --> 03:53.900 +So so that would be absolutely perfect. + +03:53.900 --> 03:58.190 +If it's anything less than 100%, then it didn't do well. + +03:58.400 --> 04:01.850 +And the you know, the lower probability it gave, the worse it did. + +04:02.000 --> 04:07.590 +And so you take that probability and then it just turns out that the formula that seems to work well + +04:07.920 --> 04:11.910 +is to take the log of that number and then negate it. + +04:11.910 --> 04:15.390 +So you take minus one times the log of that number. + +04:15.390 --> 04:20.430 +And if you work that out, that means that if that number is one, if it's 100% probability, then you + +04:20.430 --> 04:21.360 +get zero. + +04:21.360 --> 04:24.150 +And that's sounds good because you want zero loss. + +04:24.180 --> 04:26.700 +Loss should be nothing if you were perfect. + +04:27.090 --> 04:32.340 +And then the lower your probability is, the worse the higher that loss number will be. + +04:32.340 --> 04:42.000 +So taking negative log of the probability, um, is a way of of having a good, well well-behaving loss + +04:42.000 --> 04:42.840 +function. + +04:43.230 --> 04:44.670 +And there's a fancy name for it. + +04:44.670 --> 04:48.030 +This loss function is known as the cross entropy loss. + +04:48.060 --> 04:49.200 +That's what they call it. + +04:49.200 --> 04:54.330 +It's just negative log of the probability of the true next token. + +04:54.660 --> 04:57.120 +And that's, that's what's used. + +04:57.120 --> 05:01.410 +Um, and that's that's what's being used right now if your training is going on, it is calculating + +05:01.410 --> 05:04.860 +the cross entropy loss for each of the predictions. + +05:05.190 --> 05:07.890 +And there's a there's another side note. + +05:07.920 --> 05:11.790 +There's an The interpretation of this for particularly for data scientists amongst you, this is the + +05:11.790 --> 05:17.700 +calculation that's used for classification when you're trying to classify something into different bins. + +05:17.700 --> 05:22.860 +If you've got like back in the day when there were times when we were trying to classify images to be + +05:22.860 --> 05:28.230 +one of 4 or 5 different categories, cross-entropy loss, you come up with a probability and you use + +05:28.230 --> 05:33.210 +cross-entropy loss to figure out whether or not you've done a good job of classification. + +05:33.630 --> 05:39.930 +And in fact, that makes sense, because the whole process of predicting the next token is really a + +05:39.930 --> 05:41.580 +classification problem. + +05:41.580 --> 05:48.000 +You're just trying to say there are many possible categories that this next token could be. + +05:48.030 --> 05:50.880 +In fact, there's the entire all of the possible next tokens. + +05:50.880 --> 05:57.660 +And we're going to predict which one is the most likely bucket to to put for the next token. + +05:57.660 --> 06:03.690 +And so the whole process of generative AI is really just a classification problem. + +06:03.690 --> 06:09.540 +Classifying the next token, figuring out the probability that the next token is what it turns out to + +06:09.540 --> 06:10.200 +be. + +06:10.690 --> 06:18.160 +And so interestingly, for our particular project for predicting model prices, it's really, as I said + +06:18.280 --> 06:20.320 +some time ago, it's really a regression problem. + +06:20.320 --> 06:25.390 +You're trying to predict a number and we're treating it like a classification problem, which is okay, + +06:25.390 --> 06:31.910 +because it's really going to turn out to be just a number between 0 and 999, which, sorry, between + +06:31.910 --> 06:36.670 +1 and 999, which are just 999 different possible buckets. + +06:36.670 --> 06:42.790 +And we're just trying to classify effectively every product into one of these 999 buckets. + +06:42.790 --> 06:45.850 +And that's why it works quite well as a classification problem. + +06:45.850 --> 06:52.690 +And it's why the frontier models are good at it and why we're hoping, fingers crossed, that our open + +06:52.690 --> 06:55.120 +source model is going to be good at it too. + +06:55.390 --> 07:01.030 +And with all of this, some good useful theory behind us. + +07:01.030 --> 07:04.060 +But now it's time to get back to some practice. + +07:04.060 --> 07:12.130 +So with that in mind, it's time for us to talk about how has our open source, uh, fine tuning been + +07:12.130 --> 07:12.730 +going? diff --git a/week5/community-contributions/subtitles/srts/59666831/ja_JP.srt b/week5/community-contributions/subtitles/srts/59666831/ja_JP.srt new file mode 100755 index 0000000..eca7506 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59666831/ja_JP.srt @@ -0,0 +1,283 @@ +WEBVTT + +00:00.890 --> 00:07.700 +もう一度、 この素晴らしい図をご覧いただきたい。 + +00:07.700 --> 00:10.970 +さて、 私が言及したかった技術的なことだ。 + +00:10.970 --> 00:19.970 +だから、 過去にモデルが次のトークンを予測すると言ったとき、 私は少しルーズだった。 + +00:20.000 --> 00:21.200 +単純に考えればいい。 + +00:21.200 --> 00:25.730 +私たちはよくそのような言い回しを使っているが、 実際に起こっていることはそうではない。 + +00:25.730 --> 00:30.800 +トランスフォーマー・アーキテクチャーがあり、 一番下のレイヤーから出てくるのは、 予測された次のトークンである単一のトークンである、 + +00:30.800 --> 00:33.560 +というようなものではない。 + +00:33.800 --> 00:35.390 +そういうわけにはいかない。 + +00:35.390 --> 00:40.310 +最後の層で実際に出てくるのは確率である。 + +00:40.310 --> 00:44.390 +すべてのトークンを持っている。 + +00:44.390 --> 00:48.080 +そのトークンが次のトークンである確率は? + +00:48.080 --> 00:52.130 +その一連の確率を出力する。 + +00:52.160 --> 00:54.920 +これがニューラルネットワークの底から出てくるものだ。 + +00:55.100 --> 01:00.410 +そして、 プリントするときに見た最後のレイヤーを覚えているかもしれない。 + +01:00.410 --> 01:02.640 +このモデルはLMヘッドと呼ばれる。 + +01:02.700 --> 01:06.420 +そして、 それが本当に最後のステップとなる。 + +01:06.420 --> 01:12.810 +実際にロジットと呼ばれる確率を表す数値のベクトルが出てくる。 + +01:12.840 --> 01:14.970 +そして、 それを関数に通す必要がある。 + +01:15.090 --> 01:19.260 +ええと、 これはもう詳しすぎるだろうけど、 もう知っているかもしれないね。 + +01:19.260 --> 01:31.650 +ソフトマックスと呼ばれる関数があり、 この関数はこれらの数値を確率に変換する。 + +01:31.680 --> 01:39.540 +つまり、 モデルの結果を、 次のトークンの確率として解釈する方法を与えてくれるのだ。 + +01:39.540 --> 01:43.710 +つまり、 これがフォワードパスで出てくるものなのだ。 + +01:44.130 --> 01:52.080 +推論を行うとき、 推論モードでこれを実行すると、 得られるのはこれらの確率だ。 + +01:52.080 --> 01:52.980 +それでどうするんだ。 + +01:53.010 --> 01:55.530 +次のトークンをどう予測するか? + +01:55.530 --> 01:58.080 +まあ、 たいていの場合はとてもシンプルなアプローチなんだけどね。 + +01:58.080 --> 02:00.600 +次のトークンを取るだけだ。 + +02:00.600 --> 02:02.820 +最も確率の高いトークンを取る。 + +02:02.850 --> 02:04.450 +さまざまな確率が示された。 + +02:04.480 --> 02:06.130 +どれが最大なのか調べてみよう。 + +02:06.130 --> 02:07.750 +それを次の手紙とする。 + +02:07.780 --> 02:10.840 +もう少し洗練されたテクニックもある。 + +02:10.840 --> 02:16.720 +これらの確率を、 サンプルの取り方の重みとして使って、 無作為にサンプリングすることができる。 + +02:16.780 --> 02:18.550 +そうすれば、 もう少しバリエーションが増える。 + +02:18.700 --> 02:25.900 +その他にも、 いくつかの文字を並べてみて、 それが自分の進みたい道かどうかを判断するテクニックもある。 + +02:25.900 --> 02:33.490 +そのため、 推論の際には、 これらの確率に基づいて、 最善の仕事をするために使えるさまざまな戦略がある。 + +02:33.490 --> 02:39.220 +このトークンはそれぞれコストを表し、 + +02:39.220 --> 02:45.940 +数字を表している。 + +02:45.940 --> 02:50.440 +それで少しスマートなことはできるが、 その必要はない。 + +02:50.440 --> 02:54.130 +単純に確率の高いものを選ぶこともできる。 + +02:54.340 --> 02:57.550 +これがモデル出力の説明だ。 + +02:57.580 --> 02:59.470 +これではっきりしたと思う。 + +02:59.680 --> 03:02.050 +それから損失関数。 + +03:02.050 --> 03:04.330 +だから、 前回のビデオで少し触れた。 + +03:04.360 --> 03:05.840 +損失を計算するようにね。 + +03:05.840 --> 03:07.550 +どの程度悪かったのですか? + +03:07.550 --> 03:09.950 +では、 実際にはどうなのか? + +03:09.980 --> 03:11.990 +素晴らしくシンプルだ。 + +03:11.990 --> 03:15.890 +つまり、 次に起こりうるすべてのトークンの確率がわかったわけだ。 + +03:16.010 --> 03:21.200 +それで、 次のトークンが何であるべきかがわかった。 + +03:21.290 --> 03:23.690 +仮に99歳だとしよう。 + +03:23.690 --> 03:30.740 +そこで、 これらの確率をすべて調べて、 モデルが99に対してどのような確率を与えたか、 実際に正しい次のトークンであったか、 + +03:30.740 --> 03:33.350 +と言うのだ。 + +03:33.350 --> 03:34.790 +そして、 ここで重要なのはそれだけだ。 + +03:34.790 --> 03:38.780 +重要なのは、 実際に正しかったことにどのような確率を与えたかということだ。 + +03:39.080 --> 03:45.320 +うーん、 もし100%の確率でそうなるのなら、 完璧だったということになる。 + +03:45.320 --> 03:48.050 +正しい結果に100%の自信を持っていた。 + +03:48.050 --> 03:51.320 +確率は足し算で1になるのだから、 それ以外はすべてゼロでなければならない。 + +03:51.320 --> 03:53.900 +だから、 それは絶対に完璧だ。 + +03:53.900 --> 03:58.190 +もし100%以下なら、 うまくいかなかったということだ。 + +03:58.400 --> 04:01.850 +確率が低ければ低いほど、 成績は悪くなる。 + +04:02.000 --> 04:11.910 +そして、 その確率を求め、 その数値の対数をとり、 それを否定する式がうまく機能することがわかった。 + +04:11.910 --> 04:15.390 +つまり、 その数字の対数のマイナス1倍を取るわけだ。 + +04:15.390 --> 04:21.360 +それを計算すると、 その数字が1であれば、 100%の確率であれば、 0になるということだ。 + +04:21.360 --> 04:24.150 +それはいいことだと思う。 + +04:24.180 --> 04:26.700 +完璧であれば、 損失は何もないはずだ。 + +04:27.090 --> 04:32.340 +そして、 確率が低ければ低いほど、 その損失額は大きくなる。 + +04:32.340 --> 04:42.840 +つまり、 確率の負の対数を取ることは、 うまく振る舞う損失関数を持つ方法なのだ。 + +04:43.230 --> 04:44.670 +それに洒落た名前がある。 + +04:44.670 --> 04:48.030 +この損失関数はクロスエントロピー損失として知られている。 + +04:48.060 --> 04:49.200 +そう呼ばれている。 + +04:49.200 --> 04:54.330 +真の次のトークンの確率の負の対数だ。 + +04:54.660 --> 04:57.120 +そして、 それが使われているんだ。 + +04:57.120 --> 05:04.860 +トレーニングが進行している場合、 各予測のクロスエントロピー損失を計算します。 + +05:05.190 --> 05:07.890 +そして、 もうひとつ余談がある。 + +05:07.920 --> 05:11.790 +特にデータサイエンティストにとっては、 これは何かを異なるビンに分類しようとするときに、 + +05:11.790 --> 05:17.700 +分類に使われる計算だという解釈がある。 + +05:17.700 --> 05:22.860 +昔、 画像を4つか5つのカテゴリーに分類しようとしたとき、 クロスエントロピー損失があれば、 + +05:22.860 --> 05:33.210 +確率を計算し、 分類がうまくいったかどうかを判断するためにクロスエントロピー損失を使います。 + +05:33.630 --> 05:41.580 +次のトークンを予測するプロセス全体が分類問題なのだから。 + +05:41.580 --> 05:48.000 +次のトークンには、 いろいろなカテゴリーが考えられると言いたいのだろう。 + +05:48.030 --> 05:50.880 +実際、 次の可能性のあるトークンはすべてある。 + +05:50.880 --> 05:57.660 +そして、 次のトークンを入れるのに最も可能性の高いバケツはどれかを予想する。 + +05:57.660 --> 06:03.690 +つまり、 生成AIの全プロセスは、 実際には単なる分類問題なのだ。 + +06:03.690 --> 06:10.200 +次のトークンを分類し、 次のトークンがそうである確率を計算する。 + +06:10.690 --> 06:20.320 +そして興味深いことに、 モデル価格を予測する私たちの特別なプロジェクトでは、 少し前にも言ったように、 これは本当に回帰の問題なのです。 + +06:20.320 --> 06:25.390 +数字を予測しようとしているのに、 + +06:25.390 --> 06:36.670 +それを分類問題のように扱っている。 + +06:36.670 --> 06:42.790 +私たちは、 すべての製品をこの999のバケツのひとつに分類しようとしているだけです。 + +06:42.790 --> 06:45.850 +だからこそ、 分類問題としては非常にうまく機能するのだ。 + +06:45.850 --> 06:55.120 +フロンティア・モデルが得意とする理由であり、 オープンソース・モデルが得意とすることを期待している理由でもある。 + +06:55.390 --> 07:01.030 +そして、 これらすべてが、 私たちの背後にある有益な理論である。 + +07:01.030 --> 07:04.060 +でも、 今は練習に戻る時だ。 + +07:04.060 --> 07:12.730 +そういうわけで、 そろそろオープンソースの、 ええと、 ファインチューニングはどうなっているのかについて話をしようじゃないか。 diff --git a/week5/community-contributions/subtitles/srts/59666831/ko_KR.srt b/week5/community-contributions/subtitles/srts/59666831/ko_KR.srt new file mode 100755 index 0000000..870279a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59666831/ko_KR.srt @@ -0,0 +1,328 @@ +WEBVTT + +00:00.890 --> 00:07.700 +잠시 시간을 내서 이 멋진 도표를 보세요 모든 걸 배치한 거죠 그런 다음 넘어가죠 + +00:07.700 --> 00:10.970 +이제 제가 말씀드리고 싶었던 세부적인 부분으로 넘어가죠 + +00:10.970 --> 00:18.470 +제가 비트를 좀 느슨하게 표현했는데요 모델이 다음 토큰을 예측한다고 했었죠 그렇게 생각할 + +00:18.470 --> 00:19.970 +수도 있겠네요 + +00:20.000 --> 00:21.200 +간단하게 생각해 보죠 + +00:21.200 --> 00:25.730 +종종 그런 표현을 쓰지만 실제로는 그렇지 않아요 + +00:25.730 --> 00:30.800 +변압기 구조가 있는 것도 아니고 맨 아래 계층에서 나오는 건 단일 토큰이 + +00:30.800 --> 00:33.560 +아니죠 예측 가능한 다음 토큰이요 + +00:33.800 --> 00:35.390 +그렇게 되는 게 아니에요 + +00:35.390 --> 00:40.310 +마지막 층에서 나오는 건 확률이에요 + +00:40.310 --> 00:44.390 +모든 토큰을 갖고 있고 그 모든 토큰에 대해 알고 있죠 + +00:44.390 --> 00:48.080 +그게 다음 토큰이 될 확률은 얼마나 될까요? + +00:48.080 --> 00:52.130 +일련의 확률을 내놓죠 + +00:52.160 --> 00:54.920 +신경망 아래에서 나오는 거예요 + +00:55.100 --> 01:00.410 +프린트할 때 마지막으로 봤던 레이어가 있었죠 + +01:00.410 --> 01:02.640 +LM 헤드라는 모델이에요 + +01:02.700 --> 01:06.420 +그게 가장 마지막 단계예요 + +01:06.420 --> 01:11.310 +숫자의 벡터인 로그잇을 가지고 나타납니다 확률을 나타내는 + +01:11.310 --> 01:12.810 +것들이죠 + +01:12.840 --> 01:14.970 +그걸 함수에 넣어야 해요 Put it up + +01:15.090 --> 01:19.260 +너무 자세하게 설명하는 것 같지만 이미 알고 계실지도 몰라요 + +01:19.260 --> 01:25.680 +소프트맥스라는 함수가 있는데 이 숫자들을 확률로 변환할 수 있습니다 각각의 + +01:25.680 --> 01:31.650 +토큰은 0에서 1 사이일 것이고 그 다합이 1이기 때문이죠 + +01:31.680 --> 01:39.540 +따라서 모델의 결과를 가능한 다음 토큰의 확률로 해석할 수 있는 방법을 제공하죠 + +01:39.540 --> 01:43.710 +이게 포워드 패스에서 나오는 거예요 + +01:44.130 --> 01:50.370 +추론 모드에서 이걸 실행하면 get-ference 모드에서 얻는 건 + +01:50.370 --> 01:52.080 +이 모든 확률이에요 + +01:52.080 --> 01:52.980 +이걸로 뭘 할 수 있을까요? + +01:53.010 --> 01:55.530 +다음 토큰이 뭘 예언하는지 어떻게 알죠? + +01:55.530 --> 01:58.080 +대부분은 아주 간단한 접근법이에요 + +01:58.080 --> 02:00.600 +가장 가능성이 높은 토큰을 선택하면 돼요 + +02:00.600 --> 02:02.820 +가능성이 높은 토큰을 선택하세요 + +02:02.850 --> 02:04.450 +개연성이 아주 다양해요 + +02:04.480 --> 02:06.130 +누가 최대인지 알아봐요 + +02:06.130 --> 02:07.750 +다음 글자는 이거로 해요 + +02:07.780 --> 02:10.840 +좀 더 세련된 비트도 있어요 + +02:10.840 --> 02:16.720 +이런 확률을 체중으로 삼아 무작위로 샘플을 채취할 수 있어요 + +02:16.780 --> 02:18.550 +비트가 좀 더 다양해지죠 + +02:18.700 --> 02:24.520 +몇 개의 글자를 연속해서 샘플링할 수 있는 다른 기술도 있어요 그리고 그 경로를 + +02:24.520 --> 02:25.900 +선택할 수 있죠 + +02:25.900 --> 02:32.290 +추론할 때 다양한 전략을 쓸 수 있어요 이런 확률을 바탕으로 최선의 결과를 + +02:32.290 --> 02:33.490 +낼 수 있죠 + +02:33.490 --> 02:39.220 +잠시 후 우리 프로젝트를 보면 약간 비표준 전략을 사용할 + +02:39.220 --> 02:45.940 +겁니다 각각의 토큰은 비용, 숫자를 나타내니까요 + +02:45.940 --> 02:50.440 +비트를 좀 더 영리하게 바꿀 수도 있지만 그럴 필요는 없어요 + +02:50.440 --> 02:54.130 +아니면 가장 가능성이 높은 걸 선택해도 되고요 + +02:54.340 --> 02:57.550 +모델 출력이 잘 설명했고요 + +02:57.580 --> 02:59.470 +이제 확실히 아셨길 바라요 + +02:59.680 --> 03:02.050 +그리고 상실 함수요 + +03:02.050 --> 03:04.330 +지난 영상에서는 살짝 얼버무렸어요 + +03:04.360 --> 03:05.840 +손해 보는 것도 모르시네요 + +03:05.840 --> 03:07.550 +얼마나 심각했어요? + +03:07.550 --> 03:09.950 +실제론 어떤 의미일까요? + +03:09.980 --> 03:11.990 +놀랍도록 간단해요 + +03:11.990 --> 03:15.890 +가능한 다음 토큰의 모든 가능성을 갖고 있죠 + +03:16.010 --> 03:21.200 +그래서 다음 토큰이 무엇이었는지 알고 있다고 하는 거죠 + +03:21.290 --> 03:23.690 +99개라고 가정해 보죠 + +03:23.690 --> 03:30.740 +이 모든 확률을 살펴보고 모델에서 99의 확률을 계산해 올바른 다음 토큰이 + +03:30.740 --> 03:33.350 +무엇인지 알아내는 거죠 + +03:33.350 --> 03:34.790 +그게 가장 중요하죠 + +03:34.790 --> 03:38.780 +중요한 건 정확한 결과를 낼 확률이 얼마냐는 거죠 + +03:39.080 --> 03:45.320 +만약 그게 100%의 확률이라면 완벽한 거죠 + +03:45.320 --> 03:48.050 +좋은 결과를 낼 거라고 100% 확신했어요 + +03:48.050 --> 03:51.320 +다른 건 다 0이어야 해요 확률은 1이 되니까요 + +03:51.320 --> 03:53.900 +그러니 정말 완벽하죠 + +03:53.900 --> 03:58.190 +만약 100% less라면 잘 안 된 거예요 + +03:58.400 --> 04:01.850 +확률이 낮을수록 더 안 좋은 결과가 나왔어요 + +04:02.000 --> 04:07.590 +그 확률을 따져보면 잘 되는 것 같은 공식은 그 숫자의 + +04:07.920 --> 04:11.910 +로그를 반증하는 거예요 + +04:11.910 --> 04:15.390 +1을 그 숫자의 로그에 곱해요 + +04:15.390 --> 04:20.430 +이것을 계산하면, 만약 1이라면, 100%의 확률이라면, 0이 + +04:20.430 --> 04:21.360 +되겠죠. + +04:21.360 --> 04:24.150 +손해 볼 게 없으니 좋은 거죠 + +04:24.180 --> 04:26.700 +완벽하다면 패배는 아무것도 아니죠 + +04:27.090 --> 04:32.340 +확률이 낮을수록 손실률도 높아지는 거죠 + +04:32.340 --> 04:42.840 +확률을 부정적으로 기록하는 건 손실 함수를 잘 활용하는 방법이죠 + +04:43.230 --> 04:44.670 +거창한 명칭도 있죠 + +04:44.670 --> 04:48.030 +이 손실 함수를 교차 엔트로피 손실이라고 해요 + +04:48.060 --> 04:49.200 +그렇게 부르더군요 + +04:49.200 --> 04:54.330 +다음 토큰의 참일 확률의 마이너스 로그일 뿐이죠 + +04:54.660 --> 04:57.120 +그걸 사용했어요 + +04:57.120 --> 05:01.410 +훈련을 계속한다면 지금 그걸 이용하는 거예요 각 예측의 + +05:01.410 --> 05:04.860 +교차 엔트로피 손실을 계산하는 거죠 + +05:05.190 --> 05:07.890 +그리고 덧붙일 말이 있어요 + +05:07.920 --> 05:11.790 +이것에 대한 해석이 있어요 특히 데이터 과학자들에겐요 + +05:11.790 --> 05:17.700 +이건 분류에 사용되는 계산입니다 뭔가를 다른 빈으로 분류하려고 할 때요 + +05:17.700 --> 05:22.860 +예전에 이미지를 4-5가지 다른 범주로 분류하려고 했던 때가 + +05:22.860 --> 05:28.230 +있었죠 교차 엔트로피 손실요 확률을 생각해내고 교차 엔트로피 + +05:28.230 --> 05:33.210 +손실을 이용해서 분류를 잘했는지 아닌지 알아내요 + +05:33.630 --> 05:39.930 +사실, 말이 되는 것이 다음 토큰을 예측하는 전체 프로세스는 분류 문제이기 + +05:39.930 --> 05:41.580 +때문이죠 + +05:41.580 --> 05:48.000 +다음 토큰이 될 수 있는 카테고리는 아주 많다는 거죠? + +05:48.030 --> 05:50.880 +사실, 가능한 모든 다음 패가 거기 있어요 + +05:50.880 --> 05:57.660 +어떤 버킷이 다음 토큰에 가장 잘 맞을지 예측하는 거죠. + +05:57.660 --> 06:03.690 +따라서 인공지능이 생성되는 과정은 사실 분류 문제일 뿐이에요 + +06:03.690 --> 06:09.540 +다음 토큰을 분류하고 다음 토큰이 무엇인지 밝혀질 확률을 계산하는 + +06:09.540 --> 06:10.200 +거죠 + +06:10.690 --> 06:18.160 +흥미롭게도 우리 프로젝트의 가격 예측은 몇 년 전에 말씀드렸듯이 정말 + +06:18.280 --> 06:20.320 +퇴보하고 있어요 + +06:20.320 --> 06:25.390 +숫자를 예측하려 하고 분류 문제로 취급하는 거죠 그건 괜찮아요 + +06:25.390 --> 06:31.910 +왜냐하면 0과 999 사이의 숫자가 될 테니까요 죄송합니다 1과 999 + +06:31.910 --> 06:36.670 +사이 가능한 999개의 양동이들이죠 + +06:36.670 --> 06:42.790 +모든 제품을 이 999개의 양동이에 효과적으로 분류하려는 거예요 + +06:42.790 --> 06:45.850 +그래서 분류 문제로서 꽤 효과적인 거죠 + +06:45.850 --> 06:52.690 +그래서 개척 모델들이 잘 되고 있고 우리의 오픈 소스 모델도 + +06:52.690 --> 06:55.120 +잘 되길 바라는 거죠 + +06:55.390 --> 07:01.030 +이 모든 걸 종합해 보면 유용한 이론이 뒷받침되겠죠 + +07:01.030 --> 07:04.060 +하지만 지금은 다시 연습할 때예요 get get it + +07:04.060 --> 07:12.730 +그걸 염두에 두고 오픈 소스 미세 조정은 어떻게 돼가는지 얘기해보죠 diff --git a/week5/community-contributions/subtitles/srts/59667357/en_US.srt b/week5/community-contributions/subtitles/srts/59667357/en_US.srt new file mode 100755 index 0000000..9ba439f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667357/en_US.srt @@ -0,0 +1,187 @@ +WEBVTT + +00:00.740 --> 00:03.290 +Let's now see our results side by side. + +00:03.290 --> 00:10.970 +We started our journey with a constant model that was at $1.46 error from the average of all of the + +00:10.970 --> 00:11.330 +data. + +00:11.330 --> 00:17.390 +In the training data set, we looked at a traditional machine learning model with just a few features. + +00:17.390 --> 00:18.590 +It got to 1.39. + +00:18.590 --> 00:20.810 +It beat the average random forest. + +00:20.810 --> 00:24.110 +The best of our traditional machine learning models got to 97. + +00:24.140 --> 00:35.270 +The human got to 127, GPT four got to 76, and our fine tuned customized model has got us to $47. + +00:35.390 --> 00:39.860 +And this, of course, brings me to the challenge for you. + +00:39.890 --> 00:44.330 +The challenge for you is to improve on this $47. + +00:44.450 --> 00:50.120 +Uh, this is something where hyperparameter optimization can go a long way. + +00:50.180 --> 00:57.950 +And so the task at hand is now to play with the hyperparameters experiment, use weights and biases. + +00:57.980 --> 01:03.920 +Uh, explore maybe different optimizers, different learning rates, different batch sizes, and see + +01:03.920 --> 01:06.440 +what you can do to improve on this number. + +01:06.440 --> 01:11.130 +You can also explore different ways of running the model at inference time to see if that improves it. + +01:11.130 --> 01:18.810 +And then there is one factor that is actually perhaps the way that you can make the the biggest impact + +01:18.810 --> 01:25.500 +on the results with the smallest change is to relook one more time at the data set, at the data curation + +01:25.500 --> 01:31.560 +step, and challenge yourself to see whether you can think of different ways to be prompting or organizing + +01:31.560 --> 01:36.690 +that information in order to be getting better outcomes. + +01:36.690 --> 01:42.390 +And then there is one final thing that you could do, which is a bigger step, but very exciting. + +01:42.390 --> 01:47.400 +And if you don't do it, I'm definitely going to do it, which is to try the other models on this. + +01:47.400 --> 01:54.480 +Try Jama, try Kwon the powerhouse, try fi three, see how they perform. + +01:54.720 --> 02:00.360 +There are a couple of places where it might be a bit fiddly, because they might not predict one token + +02:00.360 --> 02:04.380 +as the right number, just maybe that's only at that inference function. + +02:04.380 --> 02:06.420 +Otherwise everything else might just be fine. + +02:06.540 --> 02:10.470 +But you'll need to experiment with that and convince yourself that it's that it's so good. + +02:10.740 --> 02:12.990 +So try some different models. + +02:12.990 --> 02:19.040 +You could also try doing the whole thing with a version of llama three that's quantized to eight bits + +02:19.040 --> 02:22.100 +instead of four bits, depending on your appetite for that. + +02:22.100 --> 02:23.690 +There are also some larger models. + +02:23.690 --> 02:29.540 +There is a version of 53, I think that is 14 billion parameters that you could experiment with and + +02:29.540 --> 02:31.250 +see whether that improves things. + +02:31.250 --> 02:33.590 +So that is the objective. + +02:33.590 --> 02:37.850 +I would love to hear from the first person that can get this below 40. + +02:37.880 --> 02:39.290 +That has to be possible. + +02:39.290 --> 02:45.440 +I think there is like a hard limit on how low you can get it, given the reality of uncertainty in pricing, + +02:45.440 --> 02:50.030 +but I think you guys, someone is going to be able to get it below $40. + +02:50.030 --> 02:57.650 +You'll build a model that can get within $40 across the 251st items in the test set, and I can't wait + +02:57.650 --> 02:58.520 +to hear about that. + +02:58.520 --> 03:04.130 +So please do reach out and tell me when you get below 40, and tell me your hyperparameters and your + +03:04.130 --> 03:06.620 +model so that I can try and recreate it myself. + +03:06.650 --> 03:08.270 +I would love that. + +03:08.750 --> 03:11.750 +And with that, let's wrap up for the week. + +03:12.470 --> 03:19.760 +It's the end now of week seven where you can now do, of course, obviously generating text and code + +03:19.760 --> 03:26.520 +with frontier APIs and with open source models and hugging face, you can solve problems including dataset + +03:26.550 --> 03:29.190 +curation, a baseline model, and fine tuning. + +03:29.190 --> 03:35.550 +And at this point, you can confidently carry out the full process for selecting and training an open + +03:35.550 --> 03:39.570 +source model that can outperform the frontier. + +03:39.570 --> 03:41.940 +And that's a big accomplishment. + +03:42.540 --> 03:48.360 +So next week is the finale of this course, and I promise you, I've kept the best to last. + +03:48.360 --> 03:50.250 +It's going to be a triumph. + +03:50.430 --> 03:52.620 +Next week is going to be so much fun. + +03:52.620 --> 03:54.660 +You've you've got this far. + +03:54.690 --> 03:59.100 +Hang on in there to the very end to see everything come together. + +03:59.130 --> 04:03.150 +There's some stuff that's really important we're going to do about now packaging up what we've done + +04:03.150 --> 04:09.120 +and being able to deploy it as behind an API so we can use it for production purposes, and then really + +04:09.120 --> 04:16.140 +package everything into an application that can can make a real impact. + +04:16.440 --> 04:22.320 +And at that point, you'll be in a position to be creating your own end to end solutions to commercial + +04:22.320 --> 04:23.010 +problems. + +04:23.010 --> 04:27.240 +Using groundbreaking llms that you'll be able to train yourself. + +04:27.300 --> 04:31.740 +So there's a lot ahead, and I can't wait for next week. + +04:31.830 --> 04:34.110 +And as always, I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59667357/ja_JP.srt b/week5/community-contributions/subtitles/srts/59667357/ja_JP.srt new file mode 100755 index 0000000..8722345 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667357/ja_JP.srt @@ -0,0 +1,151 @@ +WEBVTT + +00:00.740 --> 00:03.290 +結果を並べてみよう。 + +00:03.290 --> 00:11.330 +私たちは1ドルのコンスタントモデルから旅を始めた。 全データの平均からの誤差46。 + +00:11.330 --> 00:17.390 +トレーニングデータセットでは、 数個の特徴を持つ従来の機械学習モデルを見た。 + +00:17.390 --> 00:18.590 +1.0になった。 39. + +00:18.590 --> 00:20.810 +平均的なランダムフォレストを上回った。 + +00:20.810 --> 00:24.110 +私たちの伝統的な機械学習モデルの最高峰が97に到達した。 + +00:24.140 --> 00:35.270 +人間は127ドル、 GPT4は76ドル、 そして我々の微調整されたカスタマイズモデルは47ドルに達した。 + +00:35.390 --> 00:39.860 +そして、 これはもちろん、 あなたへの挑戦につながる。 + +00:39.890 --> 00:44.330 +あなたの課題は、 この47ドルを改善することだ。 + +00:44.450 --> 00:50.120 +これはハイパーパラメーターの最適化で解決できることだ。 + +00:50.180 --> 00:57.950 +そうして今手元にあるタスクは、 ハイパーパラメータを実験的に弄って、 重みとバイアスを使うことだ。 + +00:57.980 --> 01:06.440 +異なるオプティマイザー、 異なる学習レート、 異なるバッチサイズを検討し、 この数値を改善するために何ができるかを見てみよう。 + +01:06.440 --> 01:11.130 +また、 推論時にモデルを実行する方法を変えて、 それが改善されるかどうかを調べることもできる。 + +01:11.130 --> 01:18.810 +そして、 おそらく最も小さな変化で結果に大きな影響を与えることができる方法のひとつが、 + +01:18.810 --> 01:31.560 +データセットとデータ・キュレーションのステップをもう一度見直し、 より良い結果を得るために情報を促したり整理したりする別の方法を考えられないか、 + +01:31.560 --> 01:36.690 +自分自身に挑戦することである。 + +01:36.690 --> 01:42.390 +そして最後にもうひとつ、 より大きなステップになるが、 とてもエキサイティングなことがある。 + +01:42.390 --> 01:47.400 +もしあなたがやらないなら、 私は絶対にやるつもりだ。 + +01:47.400 --> 01:54.480 +ジャマを試し、 パワーハウスのクォンを試し、 フィの3人を試し、 彼らのパフォーマンスを見る。 + +01:54.720 --> 02:04.380 +トークンを正しい数として予測できないかもしれないからだ。 + +02:04.380 --> 02:06.420 +それ以外は問題ないかもしれない。 + +02:06.540 --> 02:10.470 +でも、 それを試してみて、 それがとても美味しいということだと自分自身を納得させる必要がある。 + +02:10.740 --> 02:12.990 +だから、 いろいろなモデルを試してみてほしい。 + +02:12.990 --> 02:22.100 +また、 4ビットではなく8ビットに量子化されたllama threeのバージョンで全体をやってみることもできる。 + +02:22.100 --> 02:23.690 +もっと大きなモデルもある。 + +02:23.690 --> 02:31.250 +確か140億のパラメータを持つ53のバージョンがあったと思うが、 それを試してみて、 改善されるかどうかを確認することができるだろう。 + +02:31.250 --> 02:33.590 +それが目的だ。 + +02:33.590 --> 02:37.850 +これを40以下にできる最初の人の意見を聞きたい。 + +02:37.880 --> 02:39.290 +それは可能なはずだ。 + +02:39.290 --> 02:50.030 +価格決定の不確実性という現実を考えれば、 どこまで安くできるかは難しい限界のようなものがあると思う。 + +02:50.030 --> 02:58.520 +テストセットの251番目のアイテムで40ドル以内に収まるモデルを作るんだ。 + +02:58.520 --> 03:06.620 +だから、 40を切ったら、 ぜひ私に連絡して、 あなたのハイパーパラメーターとモデルを教えてください。 + +03:06.650 --> 03:08.270 +私はそれが大好きだ。 + +03:08.750 --> 03:11.750 +それでは、 今週を締めくくろう。 + +03:12.470 --> 03:19.760 +もちろん、 フロンティアAPIを使ってテキストやコードを生成したり、 オープンソースのモデルやハグフェイスを使って、 + +03:19.760 --> 03:29.190 +データセットのキュレーションやベースラインモデル、 微調整などの問題を解決することもできる。 + +03:29.190 --> 03:39.570 +そしてこの時点で、 フロンティアを凌駕するオープンソースモデルの選択とトレーニングの全プロセスを自信を持って実行することができる。 + +03:39.570 --> 03:41.940 +そしてそれは大きな成果だ。 + +03:42.540 --> 03:48.360 +来週がこのコースのフィナーレだが、 約束しよう。 + +03:48.360 --> 03:50.250 +勝利になるだろう。 + +03:50.430 --> 03:52.620 +来週はとても楽しくなりそうだ。 + +03:52.620 --> 03:54.660 +ここまで来たのだから。 + +03:54.690 --> 03:59.100 +最後まで頑張って、 すべてがひとつになるのを見届けよう。 + +03:59.130 --> 04:03.150 +僕らがやってきたことをパッケージングして、 + +04:03.150 --> 04:09.120 +APIの後ろにデプロイできるようにして、 + +04:09.120 --> 04:16.140 +本番で使えるようにする。 + +04:16.440 --> 04:23.010 +そしてその時点で、 商業的な問題に対する自分自身のエンド・ツー・エンドの解決策を生み出すことができるようになる。 + +04:23.010 --> 04:27.240 +画期的なllmsを使い、 自分でトレーニングできるようになる。 + +04:27.300 --> 04:31.740 +来週が待ちきれないよ。 + +04:31.830 --> 04:34.110 +そしていつものように、 そこで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59667357/ko_KR.srt b/week5/community-contributions/subtitles/srts/59667357/ko_KR.srt new file mode 100755 index 0000000..49f44e1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667357/ko_KR.srt @@ -0,0 +1,184 @@ +WEBVTT + +00:00.740 --> 00:03.290 +이제 결과를 나란히 보죠 + +00:03.290 --> 00:11.330 +1달러짜리 모델로 여정을 시작했어요 데이터 평균에서 46개 오류예요 + +00:11.330 --> 00:17.390 +훈련 데이터 세트에서 전통적인 머신 러닝 모델을 봤습니다 몇 가지 기능만 있었죠 + +00:17.390 --> 00:18.590 +1까지 갔어요 39살요 + +00:18.590 --> 00:20.810 +일반적인 숲을 능가하죠 + +00:20.810 --> 00:24.110 +전통적인 머신 러닝 모델 중 최고는 97점이었어요 + +00:24.140 --> 00:35.270 +참가자는 127점을 받았고 GPT는 4점을 받았습니다 미세하게 개조한 모델은 47점을 받았죠 + +00:35.390 --> 00:39.860 +그래서 여러분에게 도전 과제를 드리려고 해요 + +00:39.890 --> 00:44.330 +47달러를 더 벌어야 해요 + +00:44.450 --> 00:50.120 +하이퍼파라미터 최적화가 큰 도움이 될 거예요 + +00:50.180 --> 00:57.950 +이제 하이퍼파라미터 실험을 진행할 차례입니다 무게와 바이어스를 사용하는 작업이죠 + +00:57.980 --> 01:03.920 +다양한 최적화 방법과 학습률 다양한 배치 크기를 고려해서 이 수치를 + +01:03.920 --> 01:06.440 +개선할 방법을 찾아보세요 + +01:06.440 --> 01:11.130 +모델을 추론 시간에 실행하는 다양한 방법을 탐구할 수도 있습니다 개선되었는지 보기 위해서요 + +01:11.130 --> 01:18.810 +그리고 한 가지 요인이 더 있습니다 결과에 가장 큰 영향을 주는 방법으로 가장 작은 변화도 + +01:18.810 --> 01:25.500 +만들 수 있는 거죠 데이터 세트와 데이터 큐레이션 단계를 다시 한 번 살펴보고 + +01:25.500 --> 01:31.560 +정보를 프레젠테이션하거나 조직화하는 다양한 방법을 생각할 수 있는지 + +01:31.560 --> 01:36.690 +도전하는 거예요 더 나은 결과를 위해서요 + +01:36.690 --> 01:42.390 +마지막으로 할 수 있는 게 하나 더 있어요 더 큰 단계지만 아주 흥미진진하죠 + +01:42.390 --> 01:47.400 +안 하면 제가 할 거예요 다른 모델도 써 보려고요 + +01:47.400 --> 01:54.480 +JAM이나 권 장군을 써봐요 어떻게 하는지 보자고요 + +01:54.720 --> 02:00.360 +좀 성가신 부분도 있어요 토큰 하나가 올바른 숫자라는 예측을 못 할 수도 + +02:00.360 --> 02:04.380 +있거든요 비트는 추론 함수에서만 가능하니까요 + +02:04.380 --> 02:06.420 +그것만 빼면 다 괜찮을 거예요 + +02:06.540 --> 02:10.470 +하지만 실험해 보고 맛있다고 자신을 설득해야 해요 + +02:10.740 --> 02:12.990 +다른 모델도 시도해 보세요 + +02:12.990 --> 02:19.040 +라마3를 전체 버전으로 만들어도 돼요 4비트가 아니라 8비트로 수량화한 + +02:19.040 --> 02:22.100 +거죠 여러분의 입맛에 따라서요 + +02:22.100 --> 02:23.690 +큰 모델도 있어요 + +02:23.690 --> 02:29.540 +53번 버전도 있어요 140억 개의 매개 변수를 실험해보고 개선되는지 + +02:29.540 --> 02:31.250 +볼 수 있죠 + +02:31.250 --> 02:33.590 +그게 목표예요 + +02:33.590 --> 02:37.850 +Get 4만 이하로 맞추는 첫 번째 분께 듣고 싶네요 + +02:37.880 --> 02:39.290 +그게 가능해야 해요 + +02:39.290 --> 02:45.440 +불확실한 가격을 생각하면 얼마나 낮출 수 있는지에 엄격한 제한이 있는 것 같아요 하지만 + +02:45.440 --> 02:50.030 +누군가는 40달러 이하로 낮출 수 있을 거예요 Get it + +02:50.030 --> 02:57.650 +테스트 세트에서 251번째 아이템에 대해 40달러 이내로 접근할 수 있는 모델을 구축합니다. 빨리 들어보고 + +02:57.650 --> 02:58.520 +싶네요. + +02:58.520 --> 03:04.130 +그러니 40 이하로 떨어지면 연락 주세요 hyperpaameter와 모델을 + +03:04.130 --> 03:06.620 +알려 주시면 제가 직접 만들어 볼게요 + +03:06.650 --> 03:08.270 +그럼 좋죠 + +03:08.750 --> 03:11.750 +이것으로 이번 주를 마무리하죠 + +03:12.470 --> 03:19.760 +7주 차가 끝났으니 이제 프론티어 API로 텍스트와 코드 생성 오픈 + +03:19.760 --> 03:26.520 +소스 모델 얼굴 포옹 데이터셋 큐레이션 기본 모델 미세 조정 같은 + +03:26.550 --> 03:29.190 +문제를 해결할 수 있죠 + +03:29.190 --> 03:35.550 +이 시점에서는 전 과정을 자신 있게 수행할 수 있습니다 한계를 뛰어넘을 + +03:35.550 --> 03:39.570 +오픈 소스 모델을 선택하고 훈련하는 거죠 + +03:39.570 --> 03:41.940 +대단한 성과죠 + +03:42.540 --> 03:48.360 +다음 주에 이 과정의 결승전이 있어요 장담하는데 최고의 요리를 준비했죠 + +03:48.360 --> 03:50.250 +대성공일 거예요 + +03:50.430 --> 03:52.620 +다음 주는 정말 재미있을 거예요 + +03:52.620 --> 03:54.660 +여기까지 왔잖아요 + +03:54.690 --> 03:59.100 +끝까지 버텨서 모든 게 잘 어우러지는 걸 봐야죠 + +03:59.130 --> 04:03.150 +우리가 할 아주 중요한 게 몇 가지 있어요 지금까지 한 걸 패키징하고 + +04:03.150 --> 04:09.120 +API 뒤에서 배포할 수 있도록 하는 거죠 프로덕션 목적으로 사용할 수 있도록요 그런 + +04:09.120 --> 04:16.140 +다음 모든 걸 응용 프로그램에 패키지하는 거죠 실제 영향을 줄 수 있는 응용 프로그램에요 + +04:16.440 --> 04:22.320 +그 시점에 여러분은 상업적 문제에 대한 자신만의 해결책을 만들 수 있는 위치에 서게 + +04:22.320 --> 04:23.010 +되죠 + +04:23.010 --> 04:27.240 +획기적인 llm을 사용해 스스로 훈련할 수 있죠 + +04:27.300 --> 04:31.740 +할 일이 많아요 다음 주가 기대돼요 + +04:31.830 --> 04:34.110 +늘 그렇듯 거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59667365/en_US.srt b/week5/community-contributions/subtitles/srts/59667365/en_US.srt new file mode 100755 index 0000000..c8960dd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667365/en_US.srt @@ -0,0 +1,580 @@ +WEBVTT + +00:01.160 --> 00:10.100 +Here we are back in Colab, looking at the week seven, day five of the Colab notebooks and I'm on a + +00:10.100 --> 00:12.980 +T4 box, so it's a low end cheap box. + +00:12.980 --> 00:18.260 +That's all that's required since we are doing inference today, not training. + +00:18.590 --> 00:22.070 +Um, or the training is still happening in this other tab, as you can see. + +00:22.070 --> 00:30.080 +Uh, as, as as we speak, uh, we start with a few installs and then some imports with the usual kind + +00:30.110 --> 00:30.890 +of stuff. + +00:30.890 --> 00:34.190 +And let me just tell you about the constants that we've got here. + +00:34.340 --> 00:39.380 +Um, the base model, of course, llama 3.1 project name is Preiser, the hugging face user. + +00:39.380 --> 00:41.450 +So you choose here, you can put in. + +00:41.480 --> 00:47.480 +Well, I hope that you'll be putting in your name and you'll be running this inference, this test against + +00:47.480 --> 00:50.960 +the model that you have fine tuned and uploaded to the Hugging Face Hub. + +00:51.140 --> 00:58.730 +It is possible, though, that you have, uh, either, uh, lost patience and or you just want to see + +00:58.730 --> 01:03.680 +how my one did, in which case you can keep my name in there because this model will be public. + +01:03.680 --> 01:06.980 +So you will be able to run against this model too. + +01:07.430 --> 01:10.460 +Um, and I've selected in here the run name. + +01:10.460 --> 01:11.210 +This is the run name. + +01:11.210 --> 01:17.750 +You may recognize that that 39 there is the run name in hugging face of the one that I ran for multiple + +01:17.780 --> 01:19.220 +epochs in the past. + +01:19.430 --> 01:25.940 +Um, and this revision, this is where I am specifying which of the different checkpoints I am selecting. + +01:25.940 --> 01:29.720 +I'm selecting the one before it started to badly overfit. + +01:29.780 --> 01:34.640 +Um, and this was the one where it was still getting good, good results. + +01:35.150 --> 01:39.200 +Um, and then this becomes the name of my fine tuned model. + +01:39.200 --> 01:40.880 +It is, of course, the hugging face name. + +01:40.880 --> 01:41.270 +I'm sorry. + +01:41.270 --> 01:42.590 +I should make this, uh. + +01:43.190 --> 01:45.080 +Otherwise, I'm hard coding my name in here. + +01:45.080 --> 01:49.220 +But what I'll do is I will make two versions of this. + +01:49.220 --> 01:55.340 +One will be for the hugging face user that you have entered in. + +01:59.120 --> 02:14.500 +And the other one I will comment out and I will say uncomment this line if you should use my model. + +02:16.810 --> 02:22.000 +Uh, and either of course, if you're using your model, you'll need to change the run name and the + +02:22.000 --> 02:24.100 +revision to match whatever you're using. + +02:24.100 --> 02:27.340 +And you can start by not putting in put revision equals none. + +02:27.730 --> 02:36.610 +Um, or revision equals none if you're not using a revision at all okay. + +02:36.610 --> 02:43.120 +And then for the data set uh, either again we load in the data set that you have carefully, lovingly + +02:43.120 --> 02:45.550 +curated and uploaded to the Huggingface hub. + +02:45.700 --> 02:49.420 +Uh, or you can just use my one should you prefer. + +02:49.690 --> 02:55.630 +Um, and now and by the way, if you have gone with the lower cost version of this and you've trained + +02:55.630 --> 03:01.170 +your model for appliances only for home appliances, then of course you should be filling in the light + +03:01.200 --> 03:07.470 +data set that you'll have built and your model for for home appliances, and you will get similar kinds + +03:07.470 --> 03:08.700 +of results. + +03:09.630 --> 03:11.520 +Quant for bit is true. + +03:11.550 --> 03:18.000 +We are quantizing and then you may remember these are the nice ways that we can print colorful lines + +03:18.000 --> 03:19.410 +to the output. + +03:19.740 --> 03:20.280 +Okay. + +03:20.310 --> 03:22.290 +Then we log in to hugging face. + +03:22.680 --> 03:24.090 +You're used to this now. + +03:24.240 --> 03:27.000 +We don't need to log into weights and biases because we're not training. + +03:27.000 --> 03:29.400 +And then we load in the data set. + +03:29.400 --> 03:35.280 +And as well, you know, at this point if I look at the first training data set, we won't be using + +03:35.280 --> 03:36.030 +it anymore. + +03:36.030 --> 03:38.790 +But it has the price baked into it. + +03:38.820 --> 03:39.750 +It looks like this. + +03:39.780 --> 03:44.640 +We will of course now be using the test data set which looks like this. + +03:44.640 --> 03:46.800 +The text does not have the price. + +03:46.800 --> 03:50.940 +The price is only in the answer which is not given to the model. + +03:50.940 --> 03:54.060 +It's only given this text as well. + +03:54.090 --> 03:59.850 +You can double triple check in a moment when we get to the part that we'll be actually doing this prediction, + +04:00.490 --> 04:04.420 +It would be a bit of a gaffe, wouldn't it, if we were accidentally passing in the price itself? + +04:04.690 --> 04:06.010 +But we're not. + +04:06.070 --> 04:13.300 +Okay, so then first of all, it's time to it's time to load in our tokenizer and our fine tuned model. + +04:13.300 --> 04:17.110 +So we first pick the right kind of quantization. + +04:17.140 --> 04:19.180 +You're familiar with this same as before. + +04:19.210 --> 04:21.370 +This is also the same as before. + +04:21.490 --> 04:22.960 +Well with a slight difference just here. + +04:22.960 --> 04:24.460 +But we load in the tokenizer. + +04:24.460 --> 04:28.540 +We put in that boilerplate stuff to set up some of its parameters. + +04:28.540 --> 04:33.850 +We load in the base model as before using the right quant config. + +04:33.880 --> 04:36.070 +And we have got that one liner again. + +04:36.070 --> 04:39.280 +And now this is new. + +04:39.280 --> 04:42.850 +So we are now loading something called a left model. + +04:43.090 --> 04:46.510 +If you remember stands for parameter efficient fine tuning. + +04:46.510 --> 04:51.850 +It's the name of the package which which which has coded Laura. + +04:52.120 --> 04:57.850 +So a left model represents it's a hugging face model that represents a model that has a base. + +04:57.850 --> 05:01.190 +And then it's got some adapter applied on top of the base. + +05:01.760 --> 05:02.990 +And so that is what we load. + +05:03.020 --> 05:05.090 +Now you call that with Frompretrained. + +05:05.090 --> 05:11.930 +And you can pass in the base model, the fine tuned model name which we set up above. + +05:11.930 --> 05:13.820 +And then a revision if you wish. + +05:13.820 --> 05:17.780 +So if revision is not null none then I pass it in. + +05:17.780 --> 05:20.570 +Otherwise we just don't don't don't bother passing it in. + +05:21.140 --> 05:26.570 +Um and so that will load in our fine tuned model. + +05:26.570 --> 05:28.670 +And at the end of that we'll print the memory footprint. + +05:28.700 --> 05:35.900 +You may remember the memory footprint was what, 5.6GB before and now it is 5.7. + +05:35.930 --> 05:44.930 +It's 5700MB because there's that extra 100MB or 109MB of our Laura adapters. + +05:45.020 --> 05:48.950 +Um, our Laura A's and Laura B's in there. + +05:49.070 --> 05:53.540 +Uh, one more time, we can just print this fine tuned model. + +05:53.540 --> 05:56.750 +You may remember we did this right back in week. + +05:56.750 --> 06:01.240 +Uh, in day two, when I mentioned we were, we were taking a look into the future because I was using + +06:01.240 --> 06:01.480 +this. + +06:01.480 --> 06:04.120 +This model itself was the one that we looked at. + +06:04.120 --> 06:05.620 +And this is how it appears. + +06:05.620 --> 06:10.630 +If you remember this, you can see all the different layers of the neural network, and you can see + +06:10.630 --> 06:17.830 +that when you get to these, the, the tension layers, that there's a dropout layer in there. + +06:17.830 --> 06:20.860 +Now, you know all about dropout with 10% probability of dropouts. + +06:20.860 --> 06:25.240 +And then there's Laura A and Laura B that are in there as well. + +06:25.480 --> 06:33.190 +Um, and uh, yeah, you can see that Laura A and Laura B are for all of the layers that have been adapted, + +06:33.220 --> 06:34.690 +our target modules. + +06:34.690 --> 06:41.230 +And you also worth just noting down at the very end here, the LM head, since I just talked about that, + +06:41.260 --> 06:50.740 +this is the final, uh, the, the, the final fully connected layer that outputs the logits, the number + +06:50.740 --> 06:58.630 +for each of the possible, uh, vocab token vocab entries, um, which will then go into a softmax in + +06:58.630 --> 07:01.540 +order to predict the probability of the next token. + +07:02.560 --> 07:03.670 +All right. + +07:04.360 --> 07:05.320 +Are you ready? + +07:05.590 --> 07:09.520 +So, uh, we're going to go in and run inference. + +07:09.610 --> 07:18.280 +Uh, the, uh, I want to give you one more time, a quick, uh, memory that GPT four zero got to $76 + +07:18.310 --> 07:21.100 +llama 3.1 base model. + +07:21.100 --> 07:24.670 +This this untrained model was $396. + +07:24.670 --> 07:25.900 +Very disappointing. + +07:26.140 --> 07:34.450 +Uh, this human being here got 127, uh, as my error, uh, and very much hoping to see that llama + +07:34.480 --> 07:36.370 +can beat a human. + +07:36.670 --> 07:43.090 +Um, as an open source model, it is important to keep in mind I don't, uh, in case you're expecting + +07:43.090 --> 07:50.290 +something, uh, crazy here, that prices of things have a lot of volatility, and the model doesn't + +07:50.290 --> 07:51.430 +know anything about that. + +07:51.430 --> 07:56.470 +It's not going to know if the price of a product has been slashed, uh, because it's on sale by by + +07:56.470 --> 07:57.250 +a huge amount. + +07:57.250 --> 08:02.370 +So there is a natural big variation in these product prices, as I discovered when I was trying to do + +08:02.370 --> 08:04.860 +it for myself and got wildly out. + +08:04.860 --> 08:07.290 +This is this is it's a very difficult challenge. + +08:07.290 --> 08:10.350 +You might think that it sounds like a it's not that hard. + +08:10.350 --> 08:11.190 +It is very hard. + +08:11.220 --> 08:12.840 +Try it for yourself and you'll see. + +08:13.320 --> 08:14.820 +Um, okay. + +08:14.970 --> 08:17.040 +With that caveat in mind, let's keep going. + +08:17.040 --> 08:23.340 +So extract price is the function that you, you know, well it takes a string, it looks for price is + +08:23.340 --> 08:23.820 +dollars. + +08:23.820 --> 08:30.600 +And then it finds the number that comes at any point after that one more time, let's just satisfy ourselves, + +08:30.630 --> 08:34.410 +extract price and put in a string. + +08:34.410 --> 08:46.260 +Price is dollars a fabulous, uh, eight, nine nine, 99 or so, whatever I want to say and out comes + +08:46.260 --> 08:47.160 +the price. + +08:47.490 --> 08:48.870 +Uh, I'm sure you get it. + +08:48.990 --> 08:51.060 +Uh, so that's extract price. + +08:51.240 --> 08:59.540 +Uh, and then this is the model predict function, the function that we used before, um, that takes + +08:59.540 --> 09:01.670 +the the inputs. + +09:01.940 --> 09:04.460 +Um, that takes the attention mask. + +09:04.460 --> 09:06.980 +Is that thing I told you about that you use to avoid it? + +09:06.980 --> 09:11.690 +Throwing a warning and to make it very clear that we don't need it to predict the the most of the input + +09:11.690 --> 09:12.440 +prompt. + +09:12.890 --> 09:20.570 +Um, and then the outputs we call generate on the, on the fine tuned model, we pass in the inputs. + +09:20.600 --> 09:26.600 +We pass in this attention mask, we only need up to three new tokens because we're going to get the + +09:26.600 --> 09:29.270 +the next token is really going to be the one that we care about. + +09:29.270 --> 09:33.530 +But we'll, we'll put in some more just to make sure if, if it makes some horrible mistake that we + +09:33.560 --> 09:39.050 +capture that, um, and then we take, we say only one response, please, we take that one response + +09:39.050 --> 09:40.700 +and we extract the price. + +09:41.150 --> 09:46.580 +Now, as it happens, we can do a little bit better than this prediction function. + +09:46.580 --> 09:48.770 +This doesn't make a whole massive amount of difference. + +09:48.770 --> 09:54.080 +But but since, since, since we've got so much control over this model, we can actually do something + +09:54.110 --> 09:58.010 +a bit smarter with how we handle this next token. + +09:58.010 --> 09:59.720 +And so I've written this function. + +09:59.720 --> 10:02.150 +That's an improved model predict function. + +10:02.150 --> 10:06.320 +Improved model predict uh which is um, yeah. + +10:06.350 --> 10:09.110 +It's just um, it's a bit more involved. + +10:09.290 --> 10:18.230 +So, um, I guess, uh, I'll, uh, I'll just explain it in simple terms, but it's not super important. + +10:18.230 --> 10:24.290 +What it does is instead of just taking the most likely next token, it takes the most likely three next + +10:24.290 --> 10:27.260 +tokens, the three with the highest probability. + +10:27.500 --> 10:30.830 +Uh, and then it says, okay, what probability did you give for these three. + +10:30.830 --> 10:32.390 +And they represent real numbers. + +10:32.390 --> 10:39.530 +Like maybe you said it was very likely to be worth $100 and then a little bit less likely to be 99, + +10:39.530 --> 10:42.050 +but a lot more likely to be 101. + +10:42.230 --> 10:43.760 +But 100 was the most. + +10:43.850 --> 10:47.060 +And then it just takes a weighted average between those three numbers. + +10:47.060 --> 10:52.310 +And that's a way for us to get a little bit more precise about what's it trying to predict. + +10:52.490 --> 10:57.770 +Um, and it allows it to predict something that's not necessarily always a whole number. + +10:58.000 --> 11:01.480 +Um, so it's a it's a technique I've used. + +11:01.480 --> 11:06.520 +It's sort of solving for the fact that we're treating what is really a regression problem as a classification + +11:06.520 --> 11:07.300 +problem. + +11:07.360 --> 11:11.950 +It's not super important that you know about this, but but and it doesn't make much difference if you + +11:11.950 --> 11:13.420 +use the function above. + +11:13.450 --> 11:14.800 +It just makes a bit of difference. + +11:15.010 --> 11:20.260 +But it is maybe worth looking through this if you're interested in these last layers of the neural network, + +11:20.290 --> 11:27.670 +because you can see that what I do is I take the outputs of the fine tuned model passing in the inputs, + +11:27.670 --> 11:37.150 +and these are considered the the logits that I mentioned this vector across all of the possible vocabulary + +11:37.210 --> 11:38.860 +entries for a tokenizer. + +11:38.860 --> 11:43.180 +And then I call softmax in order to convert that into probabilities. + +11:43.180 --> 11:46.270 +And then I go through the top three. + +11:46.270 --> 11:51.490 +And I just this is some some gumph that just takes the weighted average between those top three. + +11:51.610 --> 11:54.400 +It's weighted prices and sums up the weighted prices. + +11:54.400 --> 11:56.050 +And that's what it returns. + +11:56.050 --> 11:58.660 +So it's very similar to model predict. + +11:58.660 --> 12:03.940 +It just gives a slightly more accurate answer that's based on the top three predictions, not just the + +12:03.940 --> 12:05.260 +top prediction. + +12:05.890 --> 12:09.100 +Uh, and so then we have our class tester. + +12:09.100 --> 12:12.700 +This is just exactly the same tester class that we've used before. + +12:12.940 --> 12:19.450 +Um, and it is worth just pointing out that the thing I mentioned before this, this is obviously this + +12:19.450 --> 12:21.250 +is the meat of the whole thing. + +12:21.250 --> 12:28.060 +When we take whatever functions passed in and we call it, and what we pass in is only the text associated + +12:28.060 --> 12:28.780 +with the data point. + +12:28.810 --> 12:30.730 +We obviously don't tell it the price. + +12:30.730 --> 12:35.800 +All it knows is the text, uh, so that it doesn't have any knowledge of the price. + +12:35.800 --> 12:37.330 +Of course, of course. + +12:37.720 --> 12:45.610 +Um, and then, uh, we then just call test a test where I'm going to use the improved function, and + +12:45.610 --> 12:53.710 +we pass in the test and like some kind of a soap opera, I'm now, of course, going to say we will + +12:53.710 --> 12:57.070 +get the results of this in the next video. + +12:57.100 --> 12:58.540 +I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59667365/ja_JP.srt b/week5/community-contributions/subtitles/srts/59667365/ja_JP.srt new file mode 100755 index 0000000..a4fcb79 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667365/ja_JP.srt @@ -0,0 +1,475 @@ +WEBVTT + +00:01.160 --> 00:12.980 +Colabに戻り、 Colabノートの7週目、 5日目を見ている。 + +00:12.980 --> 00:18.260 +今日はトレーニングではなく、 推論をするのだから、 必要なのはそれだけだ。 + +00:18.590 --> 00:22.070 +ええと、 トレーニングはまだこの別のタブで行われています。 + +00:22.070 --> 00:30.890 +今話しているように、 まずインストールから始めて、 それから通常のインポートをいくつかする。 + +00:30.890 --> 00:34.190 +そして、 ここにある定数についてお話ししましょう。 + +00:34.340 --> 00:39.380 +ええと、 ベースモデルはもちろんラマ3。 1 プロジェクト名は、 抱きつき顔の使い手、 プライザー。 + +00:39.380 --> 00:41.450 +だから、 ここで選んで、 入れることができる。 + +00:41.480 --> 00:47.480 +さて、 あなたの名前を入れて、 あなたが微調整してハグハブにアップロードしたモデルに対して、 この推論、 + +00:47.480 --> 00:50.960 +このテストを実行してくれることを願っている。 + +00:51.140 --> 01:03.680 +しかし、 あなたが、 あー、 我慢の限界に達したか、 あるいは私のものがどうだったかを見たいだけという可能性もある。 + +01:03.680 --> 01:06.980 +だから、 このモデルとも対戦できるだろう。 + +01:07.430 --> 01:10.460 +ええと、 ここでランの名前を選択しました。 + +01:10.460 --> 01:11.210 +これはランの名前である。 + +01:11.210 --> 01:19.220 +そこにある39番は、 私が過去に何度もエポックに走ったものの、 抱き合わせのランネームだとお分かりになるかもしれない。 + +01:19.430 --> 01:25.940 +ええと、 このリビジョンは、 どのチェックポイントを選択するかを指定するところです。 + +01:25.940 --> 01:29.720 +ひどくオーバーフィットし始める前のものを選んでいる。 + +01:29.780 --> 01:34.640 +うーん、 これはまだいい、 いい結果を出している方だった。 + +01:35.150 --> 01:39.200 +そして、 これが私の微調整モデルの名前になる。 + +01:39.200 --> 01:40.880 +もちろん、 ハグ顔の名前である。 + +01:40.880 --> 01:41.270 +ごめんなさい. + +01:41.270 --> 01:42.590 +これを作らないといけない。 + +01:43.190 --> 01:45.080 +そうでなければ、 ここに自分の名前をハードコーディングする。 + +01:45.080 --> 01:49.220 +でも、 私がやることは、 これの2つのバージョンを作ることだ。 + +01:49.220 --> 01:55.340 +ひとつは、 あなたがエントリーした抱きつきフェイスのユーザー用。 + +01:59.120 --> 02:14.500 +もうひとつはコメントアウトして、 私のモデルを使う場合はこの行をコメントアウトしないようにします。 + +02:16.810 --> 02:24.100 +それから、 もしあなたが自分のモデルを使うのであれば、 ラン名とリビジョンをあなたが使っているものに合わせて変更する必要がある。 + +02:24.100 --> 02:27.340 +そして、 リビジョン・イコール・ノーンを入れないことから始めればいい。 + +02:27.730 --> 02:36.610 +リビジョンをまったく使わないのであれば、 リビジョン・イコール・ノーだ。 + +02:36.610 --> 02:45.550 +そして、 データセットについては......いずれにせよ、 あなたが注意深く、 愛情を込めてキュレーションし、 Huggingfaceのハブにアップロードしたデータセットを読み込みます。 + +02:45.700 --> 02:49.420 +私のものを使ってもいいよ。 + +02:49.690 --> 02:55.630 +さて、 ところで、 もしあなたがこれの廉価版を使って、 家電製品だけのモデルをトレーニングしたのであれば、 + +02:55.630 --> 03:01.170 +もちろん、 あなたが構築した軽いデータセットと家電製品用のモデルを埋めれば、 + +03:01.200 --> 03:08.700 +同じような結果が得られるはずです。 + +03:09.630 --> 03:11.520 +クオンツ・フォー・ビットは真実だ。 + +03:11.550 --> 03:19.410 +量子化し、 カラフルな線を出力する方法を覚えているだろう。 + +03:19.740 --> 03:20.280 +オーケー。 + +03:20.310 --> 03:22.290 +そして、 ハグをするためにログインする。 + +03:22.680 --> 03:24.090 +もう慣れただろう。 + +03:24.240 --> 03:27.000 +トレーニングをしているわけではないので、 ウェイトやバイアスに記録する必要はない。 + +03:27.000 --> 03:29.400 +そしてデータセットをロードする。 + +03:29.400 --> 03:36.030 +それに、 この時点で最初のトレーニング・データセットを見れば、 もうそれを使うことはないだろう。 + +03:36.030 --> 03:38.790 +しかし、 その値段は織り込み済みだ。 + +03:38.820 --> 03:39.750 +こんな感じだ。 + +03:39.780 --> 03:44.640 +もちろん、 テストデータセットを使用する。 + +03:44.640 --> 03:46.800 +本文には価格が書かれていない。 + +03:46.800 --> 03:50.940 +値段は、 モデルに与えられない答えの中にしかない。 + +03:50.940 --> 03:54.060 +この文章も与えられただけだ。 + +03:54.090 --> 04:04.420 +この予測を実際に行う部分に入ったら、 すぐにトリプルチェックをすることができますよ」。 + +04:04.690 --> 04:06.010 +でも、 私たちは違う。 + +04:06.070 --> 04:13.300 +それではまず、 トークン・サイザーと微調整したモデルをロードしましょう。 + +04:13.300 --> 04:17.110 +そこでまず、 適切な量子化を選ぶ。 + +04:17.140 --> 04:19.180 +以前と同じでおなじみだろう。 + +04:19.210 --> 04:21.370 +これも以前と同じだ。 + +04:21.490 --> 04:22.960 +まあ、 ここだけ少し違う。 + +04:22.960 --> 04:24.460 +しかし、 我々はトークナイザーを読み込む。 + +04:24.460 --> 04:28.540 +パラメータを設定するために、 定型文を入れたんだ。 + +04:28.540 --> 04:33.850 +先ほどと同じように、 右クオンツのコンフィグを使ってベースモデルをロードする。 + +04:33.880 --> 04:36.070 +そして、 またあの一発ギャグが飛び出した。 + +04:36.070 --> 04:39.280 +そしてこれが新しい。 + +04:39.280 --> 04:42.850 +だから今、 左モデルというものをロードしている。 + +04:43.090 --> 04:46.510 +覚えていれば、 パラメーターの効率的な微調整を意味する。 + +04:46.510 --> 04:51.850 +ローラをコード化したパッケージの名前だ。 + +04:52.120 --> 04:57.850 +つまり、 左のモデルは、 ベースがあるモデルを表す抱き合わせの顔のモデルなんだ。 + +04:57.850 --> 05:01.190 +そして、 ベースの上にアダプターを装着する。 + +05:01.760 --> 05:02.990 +だから、 それを積み込むんだ。 + +05:03.020 --> 05:05.090 +今はフロンプレーンと呼んでいる。 + +05:05.090 --> 05:11.930 +そして、 上で設定したベースモデル、 微調整したモデル名を渡すことができる。 + +05:11.930 --> 05:13.820 +そして、 希望があれば修正もする。 + +05:13.820 --> 05:17.780 +もしリビジョンがNULLでなければ、 それを渡す。 + +05:17.780 --> 05:20.570 +そうでなければ、 わざわざパスする必要はない。 + +05:21.140 --> 05:26.570 +そうすれば、 微調整したモデルがロードされる。 + +05:26.570 --> 05:28.670 +そして最後に、 メモリのフットプリントを表示する。 + +05:28.700 --> 05:35.900 +メモリフットプリントが5だったことを覚えているだろうか。 以前は6GBだったが、 今は5GBだ。 7. + +05:35.930 --> 05:44.930 +ラウラ・アダプターの100MBか109MBが追加されているから5700MBだ。 + +05:45.020 --> 05:48.950 +ローラAとローラBがそこにいる。 + +05:49.070 --> 05:53.540 +ええと、 もう一回、 この微調整されたモデルをプリントすればいいんだ。 + +05:53.540 --> 05:56.750 +この記事を書いたのは1週間前のことだ。 + +05:56.750 --> 06:01.480 +2日目、 僕が未来を見ていると言ったのは、 僕がこれを使っていたからなんだ。 + +06:01.480 --> 06:04.120 +このモデル自体は私たちが見たものだ。 + +06:04.120 --> 06:05.620 +そして、 このように見える。 + +06:05.620 --> 06:10.630 +これを覚えておくと、 ニューラルネットワークのさまざまなレイヤーをすべて見ることができ、 + +06:10.630 --> 06:17.830 +これらのレイヤー、 つまり張力レイヤーに到達すると、 そこにドロップアウトレイヤーがあることがわかる。 + +06:17.830 --> 06:20.860 +今、 あなたは10%の確率でドロップアウトすることをすべて知っている。 + +06:20.860 --> 06:25.240 +それにローラAとローラBもいる。 + +06:25.480 --> 06:34.690 +ローラAとローラBは、 適応されたすべてのレイヤー、 つまりターゲット・モジュールのものだ。 + +06:34.690 --> 06:50.740 +この層は、 ロジット、 つまり、 ボキャブ・トークン・ボキャブ・エントリーの可能性のある各数値を出力し、 + +06:50.740 --> 07:01.540 +次のトークンの確率を予測するためにソフトマックスに入る。 + +07:02.560 --> 07:03.670 +分かった。 + +07:04.360 --> 07:05.320 +準備はできているか? + +07:05.590 --> 07:09.520 +だから、 推論を実行するんだ。 + +07:09.610 --> 07:21.100 +ええと、 ええと、 もう1回、 簡単に、 ええと、 GPT4ゼロが76ラマ3ドルになったことを覚えておいてください。 + +07:21.100 --> 07:21.100 +ベースモデル1台。 + +07:21.100 --> 07:24.670 +この未経験モデルは396ドルだった。 + +07:24.670 --> 07:25.900 +非常に残念だ。 + +07:26.140 --> 07:36.370 +ええと、 この人間は127を、 ええと、 僕のエラーとして、 ええと、 ラマが人間に勝てるかどうかとても期待しているんだ。 + +07:36.670 --> 07:43.090 +オープンソースのモデルとして重要なのは、 何かおかしなことを期待しているかもしれないが、 + +07:43.090 --> 07:51.430 +物価は変動が激しいということを念頭に置いておくことだ。 + +07:51.430 --> 07:57.250 +ある商品が値下げされたかどうかはわからない。 + +07:57.250 --> 08:04.860 +そのため、 これらの製品の価格には当然大きなばらつきがある。 + +08:04.860 --> 08:07.290 +これは非常に難しい挑戦だ。 + +08:07.290 --> 08:10.350 +そんなに難しくないと思うかもしれない。 + +08:10.350 --> 08:11.190 +とても難しいことだ。 + +08:11.220 --> 08:12.840 +自分で試してみればわかる。 + +08:13.320 --> 08:14.820 +うーん、 わかった。 + +08:14.970 --> 08:17.040 +そのことを念頭に置いて、 話を続けよう。 + +08:17.040 --> 08:23.820 +つまり、 価格を抽出する関数は、 文字列を受け取り、 価格がドルであるかどうかを調べます。 + +08:23.820 --> 08:34.410 +そして、 もう一回、 それ以降のどの時点に来る数字も見つける。 + +08:34.410 --> 08:47.160 +値段は1ドルで、 8ドル、 9ドル、 99ドルとか、 何でもいい。 + +08:47.490 --> 08:48.870 +ああ、 わかってもらえたと思うよ。 + +08:48.990 --> 08:51.060 +ええと、 それが抜き取り価格なんだ。 + +08:51.240 --> 09:01.670 +それから、 これはモデル予測関数で、 前に使った関数です。 + +09:01.940 --> 09:04.460 +うーん、 それはマスクに気を取られるね。 + +09:04.460 --> 09:06.980 +さっき言ったのは、 それを避けるために使うものかい? + +09:06.980 --> 09:12.440 +警告を投げ、 入力プロンプトの大部分を予測する必要がないことを明確にする。 + +09:12.890 --> 09:20.570 +そして、 微調整されたモデルで、 入力を渡して、 出力を生成する。 + +09:20.600 --> 09:29.270 +このアテンション・マスクを渡すと、 新しいトークンは3つまでしか必要ない。 + +09:29.270 --> 09:33.530 +でも、 万が一、 ひどいミスをしたときに、 + +09:33.560 --> 09:40.700 +そのミスを捕捉できるように、 もう少し入れておく。 + +09:41.150 --> 09:46.580 +さて、 たまたまだが、 私たちはこの予測関数よりもう少し良いものを作ることができる。 + +09:46.580 --> 09:48.770 +これはまったく大きな違いではない。 + +09:48.770 --> 09:58.010 +しかし、 しかし、 このモデルをこれだけコントロールできるようになったのだから、 次のトークンの扱い方をもう少しスマートにすることができる。 + +09:58.010 --> 09:59.720 +それで、 この関数を書いたんだ。 + +09:59.720 --> 10:02.150 +これは改良されたモデル予測機能だ。 + +10:02.150 --> 10:06.320 +改善されたモデルは......うーん、 そうだね。 + +10:06.350 --> 10:09.110 +ただ、 もう少し複雑なんだ。 + +10:09.290 --> 10:18.230 +だから、 ええと、 ええと、 簡単に説明するけど、 超重要なことではないんだ。 + +10:18.230 --> 10:27.260 +次のトークンの中から最も可能性の高いものを選ぶのではなく、 次のトークンの中から最も可能性の高い3つのトークンを選ぶのだ。 + +10:27.500 --> 10:30.830 +そして、 この3つの確率は? + +10:30.830 --> 10:32.390 +そしてそれらは実数を表している。 + +10:32.390 --> 10:42.050 +例えば、 100ドルの価値がある可能性が非常に高く、 99ドルの可能性は少し低いが、 101ドルの可能性はかなり高いと言ったかもしれない。 + +10:42.230 --> 10:43.760 +しかし、 100が最高だった。 + +10:43.850 --> 10:47.060 +そして、 これら3つの数値の加重平均を取る。 + +10:47.060 --> 10:52.310 +そしてそれは、 何を予測しようとしているのかをもう少し正確に知るための方法なんだ。 + +10:52.490 --> 10:57.770 +そして、 必ずしも整数ではない何かを予測することができる。 + +10:58.000 --> 11:01.480 +僕が使ってきたテクニックなんだ。 + +11:01.480 --> 11:07.300 +回帰問題を分類問題として扱っているという事実を解決するようなものだ。 + +11:07.360 --> 11:13.420 +これを知っていることは超重要ではないが、 しかし、 上記の関数を使っても大差はない。 + +11:13.450 --> 11:14.800 +ちょっとした違いだ。 + +11:15.010 --> 11:20.260 +しかし、 ニューラルネットワークの最後の層に興味があるのであれば、 + +11:20.290 --> 11:27.670 +これに目を通す価値があるかもしれません。 なぜなら、 私がしていることは、 入力に渡す微調整モデルの出力を取ることであり、 + +11:27.670 --> 11:38.860 +これらはトークナイザーの可能な語彙エントリすべてにわたって、 このベクトルで述べたロジットとみなされるからです。 + +11:38.860 --> 11:43.180 +そして、 それを確率に変換するためにソフトマックスを呼び出す。 + +11:43.180 --> 11:46.270 +そして、 トップ3を通過する。 + +11:46.270 --> 11:51.490 +そして、 これは上位3人の加重平均を取っただけのものだ。 + +11:51.610 --> 11:54.400 +これは加重価格であり、 加重価格を合計したものである。 + +11:54.400 --> 11:56.050 +そして、 それが返ってくる。 + +11:56.050 --> 11:58.660 +だから、 モデル予測にとても似ている。 + +11:58.660 --> 12:05.260 +ただ、 一番上の予想だけでなく、 上位3つの予想に基づいた、 少し正確な答えを出すだけだ。 + +12:05.890 --> 12:09.100 +ええと、 それでクラスのテスターがいるんだ。 + +12:09.100 --> 12:12.700 +これは以前使ったのとまったく同じテスター・クラスだ。 + +12:12.940 --> 12:21.250 +ええと、 その前に申し上げたこと、 これは明らかに全体の核心であることを指摘しておく価値がある。 + +12:21.250 --> 12:28.780 +渡された関数を呼び出すと、 渡されたのはデータ・ポイントに関連するテキストだけである。 + +12:28.810 --> 12:30.730 +当然、 値段は言わない。 + +12:30.730 --> 12:35.800 +知っているのはテキストだけで、 価格については何も知らない。 + +12:35.800 --> 12:37.330 +もちろんだ。 + +12:37.720 --> 12:45.610 +そして、 改良された関数を使用するテストを呼び、 テストに合格すると、 + +12:45.610 --> 12:57.070 +ある種のソープオペラのように、 次のビデオでこの結果をお伝えします。 + +12:57.100 --> 12:58.540 +そこで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59667365/ko_KR.srt b/week5/community-contributions/subtitles/srts/59667365/ko_KR.srt new file mode 100755 index 0000000..507fc00 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667365/ko_KR.srt @@ -0,0 +1,556 @@ +WEBVTT + +00:01.160 --> 00:10.100 +콜랍에 돌아왔어요 콜랍 공책 대회 7주 차 5일째예요 T4 박스에 탔는데 + +00:10.100 --> 00:12.980 +저렴한 싸구려 박스죠 + +00:12.980 --> 00:18.260 +훈련이 아니라 추론하는 거니까 그것만 하면 돼요 + +00:18.590 --> 00:22.070 +아니면 훈련이 다른 탭에서 진행되거나요 + +00:22.070 --> 00:30.890 +말씀드리는 순간 설치 몇 개로 시작해서 늘 하던 대로 가져오기로 했어요 + +00:30.890 --> 00:34.190 +여기 있는 상수에 대해 말씀드릴게요 + +00:34.340 --> 00:39.380 +기본 모델은 라마 3이고요 프로젝트 이름은 포옹하는 얼굴 사용자 프리저예요 + +00:39.380 --> 00:41.450 +여기서 선택해 put in을 해요 + +00:41.480 --> 00:47.480 +여러분의 이름을 입력하고 모델에 대한 추론과 테스트를 실행해 주세요 여러분이 + +00:47.480 --> 00:50.960 +잘 조율해서 Hub에 업로드한 모델이죠 + +00:51.140 --> 00:58.730 +하지만 인내심을 잃으셨거나 제 테스트 결과가 궁금하신 거라면 제 이름은 + +00:58.730 --> 01:03.680 +그대로 두셔도 돼요 이 모델은 공개될 테니까요 + +01:03.680 --> 01:06.980 +이 모델에 맞서 달릴 수 있어요 + +01:07.430 --> 01:10.460 +그리고 실행 이름을 선택했어요 + +01:10.460 --> 01:11.210 +이게 실행명이에요 + +01:11.210 --> 01:17.750 +39가 뭔지 아실 겁니다 제가 과거에 여러 개국 시대에 사용한 포옹의 + +01:17.780 --> 01:19.220 +연계명이죠 + +01:19.430 --> 01:25.940 +이 수정본은 제가 어떤 체크포인트를 선택할지 지정하는 곳이죠 + +01:25.940 --> 01:29.720 +너무 과해지기 전에 골라야 해요 + +01:29.780 --> 01:34.640 +이 영화는 여전히 좋은 결과가 나오고 있었어요 + +01:35.150 --> 01:39.200 +그리고 이게 제 미세 튜닝 모델의 이름이 되죠 + +01:39.200 --> 01:40.880 +당연히 포옹하는 얼굴 이름이죠 + +01:40.880 --> 01:41.270 +미안해요 + +01:41.270 --> 01:42.590 +이걸 만들어야겠어요 + +01:43.190 --> 01:45.080 +안 그러면 제 이름을 하드 코딩해야 해요 + +01:45.080 --> 01:49.220 +하지만 전 두 가지 버전을 만들 거예요 + +01:49.220 --> 01:55.340 +하나는 여러분이 입력한 포옹 얼굴 유저예요 + +01:59.120 --> 02:14.500 +다른 건 주석 처리로 이 라인을 주석 처리 해제하라고 하죠 제 모델을 사용한다면요 + +02:16.810 --> 02:22.000 +물론 모델을 사용한다면 실행명과 개정도 바꿔야 합니다 여러분이 사용하는 + +02:22.000 --> 02:24.100 +것에 맞추기 위해서요 + +02:24.100 --> 02:27.340 +for reision = 0을 입력하지 않는 것부터 시작하세요 + +02:27.730 --> 02:36.610 +수정 = 0일 수도 있고요 수정 을 사용하지 않는다면 말이죠 + +02:36.610 --> 02:43.120 +그리고 데이터 세트에서는 여러분이 정성껏 정리해서 업로드한 데이터 세트를 다시 로드합니다 + +02:43.120 --> 02:45.550 +허깅페이스 허브에 말이죠 + +02:45.700 --> 02:49.420 +아니면 제 걸 쓰셔도 돼요 + +02:49.690 --> 02:55.630 +음, 그런데 만약 이것의 저비용 버전을 선택하고 가전제품 전용 + +02:55.630 --> 03:01.170 +가전제품 모델을 훈련시켰다면 당연히 여러분이 구축한 라이트 + +03:01.200 --> 03:08.700 +데이터 세트와 가전제품 모델을 채워야 합니다 그럼 비슷한 결과가 나오죠 + +03:09.630 --> 03:11.520 +비트라는 퀀트는 진실이에요 + +03:11.550 --> 03:18.000 +수량화하는 거죠 기억하실지 모르겠지만 색색의 선을 출력하는 좋은 + +03:18.000 --> 03:19.410 +방법들이에요 + +03:19.740 --> 03:20.280 +네 + +03:20.310 --> 03:22.290 +얼굴 포옹에 로그인해요 + +03:22.680 --> 03:24.090 +이제 익숙해졌잖아요 + +03:24.240 --> 03:27.000 +훈련이 아니니까 무게나 편향에 로그인할 필요 없어요 + +03:27.000 --> 03:29.400 +그런 다음 데이터 세트를 로드하죠 + +03:29.400 --> 03:35.280 +그리고 지금 이 시점에서 첫 번째 훈련 데이터 세트를 보면 더는 사용하지 않을 + +03:35.280 --> 03:36.030 +거예요 + +03:36.030 --> 03:38.790 +하지만 가격이 들어 있어요 + +03:38.820 --> 03:39.750 +이렇게 생겼어요 + +03:39.780 --> 03:44.640 +물론 테스트 데이터 세트를 사용할 겁니다 이렇게 생겼죠 + +03:44.640 --> 03:46.800 +텍스트에는 가격이 없어요 + +03:46.800 --> 03:50.940 +가격은 모델에 없는 답안에만 달려 있어요 + +03:50.940 --> 03:54.060 +이 문장만 있어요 + +03:54.090 --> 03:59.850 +비트 박스를 만들 때 다시 한번 확인해 주세요 비트 박스를 만들 때 + +04:00.490 --> 04:04.420 +가격표를 실수로 넘기면 실수가 되겠죠? + +04:04.690 --> 04:06.010 +하지만 우린 아니죠 + +04:06.070 --> 04:13.300 +좋아요, 그럼 먼저 토큰라이저와 미세 튜닝 모델을 로드할게요 + +04:13.300 --> 04:17.110 +먼저 올바른 양자화를 선택해요 + +04:17.140 --> 04:19.180 +예전과 같은 방식이죠 + +04:19.210 --> 04:21.370 +이것도 예전과 똑같아요 + +04:21.490 --> 04:22.960 +여기만 살짝 달라요 + +04:22.960 --> 04:24.460 +토큰이저도 설치했어요 + +04:24.460 --> 04:28.540 +매개 변수를 설정하기 위해 상용 문서를 넣었어요. + +04:28.540 --> 04:33.850 +올바른 퀀트 구성을 사용하기 전처럼 베이스 모델을 로드하죠 + +04:33.880 --> 04:36.070 +또 한 줄짜리 대사가 나왔네요 + +04:36.070 --> 04:39.280 +이건 새로운 거예요 + +04:39.280 --> 04:42.850 +이제 왼쪽 모델이라는 걸 로드하고 있어요 + +04:43.090 --> 04:46.510 +매개 변수 효율 미세 조정의 약자란 걸 기억하세요 + +04:46.510 --> 04:51.850 +로라를 암호화한 상자 이름이에요 + +04:52.120 --> 04:57.850 +왼쪽 모델은 안아주는 얼굴 모델이고 베이스 모델을 나타내요 + +04:57.850 --> 05:01.190 +밑부분에는 어댑터가 달려 있어요 + +05:01.760 --> 05:02.990 +이걸 로딩할 거예요 + +05:03.020 --> 05:05.090 +Frompretled이라고 부르는군요 + +05:05.090 --> 05:11.930 +그리고 위에서 설정한 미세 튜닝 모델 이름을 베이스 모델로 넘길 수 있어요 + +05:11.930 --> 05:13.820 +원하시면 복습도 가능해요 + +05:13.820 --> 05:17.780 +수정값이 0이 아니면 넘기는 거죠 + +05:17.780 --> 05:20.570 +안 그러면 전달할 필요가 없죠 + +05:21.140 --> 05:26.570 +미세 튜닝 모델에 로드될 거예요 + +05:26.570 --> 05:28.670 +마지막에 메모리 발자국을 프린트할 거예요 + +05:28.700 --> 05:35.900 +메모리 발자국이 5개였던 거 기억하시죠? 전에는 6GB였는데 지금은 5GB예요 7시요 + +05:35.930 --> 05:44.930 +5700MB인 이유는 로라 어댑터가 100이나 109MB 더 있기 때문이죠 + +05:45.020 --> 05:48.950 +로라 A와 로라 B가 있어요 + +05:49.070 --> 05:53.540 +미세 튜닝 모델로 한 번 더 프린트할게요 + +05:53.540 --> 05:56.750 +기억하실지 모르겠지만 주중에 이걸 했어요 + +05:56.750 --> 06:01.480 +둘째 날, 제가 미래를 보고 있다고 했죠 이걸 사용하고 있었거든요 + +06:01.480 --> 06:04.120 +이 모형은 우리가 봤던 거예요 + +06:04.120 --> 06:05.620 +이렇게 보이죠 + +06:05.620 --> 06:10.630 +이걸 기억하시면 신경망의 다양한 층이 보입니다 + +06:10.630 --> 06:17.830 +여기 장력 층을 보면 드롭아웃 층이 있어요 Get up + +06:17.830 --> 06:20.860 +자퇴 확률 10%에 대해 잘 아시죠? + +06:20.860 --> 06:25.240 +로라 A와 로라 B도 있어요 + +06:25.480 --> 06:33.190 +로라 A와 로라 B는 각 층을 나타내는 거예요 우리의 목표 + +06:33.220 --> 06:34.690 +모듈이죠 + +06:34.690 --> 06:41.230 +그리고 여기 끝에 LM헤드도 주목해 주세요. 방금 이야기 + +06:41.260 --> 06:50.740 +했듯이 완전히 연결된 마지막 레이어에서 로지트와 가능한 보캡 토큰 보캡 항목의 + +06:50.740 --> 06:58.630 +숫자를 출력합니다. 그리고 소프트맥스로 가서 다음 토큰의 가능성을 + +06:58.630 --> 07:01.540 +예측하죠. + +07:02.560 --> 07:03.670 +좋아요 + +07:04.360 --> 07:05.320 +준비됐어요? + +07:05.590 --> 07:09.520 +그래서 추론을 실행할 거예요 + +07:09.610 --> 07:18.280 +한 번 더 빠르게 설명하고 싶은데요 GPT 40이 76 라마 3으로 바뀌었다는 + +07:18.310 --> 07:21.100 +메모리를요 기본 모델 1개요 + +07:21.100 --> 07:24.670 +이 미숙련 모델은 396달러였어요 + +07:24.670 --> 07:25.900 +정말 실망스럽네요 + +07:26.140 --> 07:34.450 +여기 이 사람은 제 오류로 127점을 받았어요 라마가 사람을 이길 수 있는지 + +07:34.480 --> 07:36.370 +보고 싶어요 + +07:36.670 --> 07:43.090 +오픈 소스 모델로서 명심해야 할 점은 여러분이 여기서 터무니없는 걸 기대하실까 + +07:43.090 --> 07:51.430 +봐 말씀드리는데 가격이 많이 변동될 수 있는데 모델은 그에 대해 아무것도 몰라요 + +07:51.430 --> 07:57.250 +제품 가격이 급락했는지도 알 수 없습니다. 엄청난 가격으로 판매되고 있으니까요. + +07:57.250 --> 08:02.370 +제품 가격에는 자연적으로 큰 차이가 있어요 제가 직접 만들어 + +08:02.370 --> 08:04.860 +보려 했을 때 알게 된 사실이죠 + +08:04.860 --> 08:07.290 +이건 아주 어려운 과제예요 + +08:07.290 --> 08:10.350 +별로 어렵지 않다고 생각할 수도 있어요 + +08:10.350 --> 08:11.190 +정말 힘들어요 + +08:11.220 --> 08:12.840 +직접 해 보면 알 거예요 + +08:13.320 --> 08:14.820 +네 + +08:14.970 --> 08:17.040 +그 점을 명심하고 계속하죠 + +08:17.040 --> 08:23.820 +가격 추출 함수는 아시다시피 문자열이 있죠 가격 이즈 달러를 찾아요 + +08:23.820 --> 08:30.600 +그 다음에 나오는 숫자를 한 번 더 찾아보죠 스스로 만족하도록 가격을 + +08:30.630 --> 08:34.410 +추출해 문자열을 만들어보죠. + +08:34.410 --> 08:47.160 +가격은 달러입니다 아주 멋진 달러죠 8, 9, 99, 그쯤 돼요 제가 말하고 싶은 대로 가격을 말하죠 + +08:47.490 --> 08:48.870 +I'm get you're know + +08:48.990 --> 08:51.060 +이게 추출가예요 + +08:51.240 --> 08:59.540 +그리고 이건 모델 예측 함수입니다 전에 사용했던 함수로 + +08:59.540 --> 09:01.670 +입력을 받죠 + +09:01.940 --> 09:04.460 +그건 관심 마스크를 쓴 거죠 + +09:04.460 --> 09:06.980 +혹시 제가 말씀드린 그거 피하시는 거예요? + +09:06.980 --> 09:11.690 +입력 프롬프트를 예측하기 위해 필요한 것은 아니라는 것을 확실히 하기 위해 경고를 + +09:11.690 --> 09:12.440 +던지죠 + +09:12.890 --> 09:20.570 +미세 조정 모델에서 발생하는 출력은 입력값을 전달하죠 + +09:20.600 --> 09:26.600 +주의 마스크를 통과시킵니다. 새로운 토큰 세 개만 있으면 됩니다. 왜냐하면 다음 토큰은 우리가 정말 + +09:26.600 --> 09:29.270 +중요하게 생각하는 것이기 때문이죠. + +09:29.270 --> 09:33.530 +하지만 만약 끔찍한 실수를 저지르면 그걸 포착하기 위해 몇 + +09:33.560 --> 09:39.050 +가지 더 입력할 거예요 그리고 답변을 하나만 받아내죠 그 답변 하나만 받아내서 + +09:39.050 --> 09:40.700 +가격을 추출해요 + +09:41.150 --> 09:46.580 +비트만 있으면 이 예측 함수보다 좀 더 잘할 수 있어요 + +09:46.580 --> 09:48.770 +큰 차이는 없어요 + +09:48.770 --> 09:54.080 +하지만 이 모델에 대한 컨트롤이 아주 많기 때문에 다음 토큰을 다루는 + +09:54.110 --> 09:58.010 +방법에 대해 좀 더 현명한 비트를 할 수 있어요 + +09:58.010 --> 09:59.720 +그래서 이 함수를 작성했어요 + +09:59.720 --> 10:02.150 +향상된 모델 예측 함수죠 + +10:02.150 --> 10:06.320 +향상된 모델 예측 결과 네 + +10:06.350 --> 10:09.110 +비트가 좀 더 복잡해요 + +10:09.290 --> 10:18.230 +그러니까 간단히 설명할게요 하지만 그렇게 중요한 건 아니에요 + +10:18.230 --> 10:24.290 +가장 가능성이 높은 다음 토큰이 아니라 가장 가능성이 높은 + +10:24.290 --> 10:27.260 +세 개의 토큰을 취하는 거죠 + +10:27.500 --> 10:30.830 +이 세 가지의 확률은 어떻게 나왔나요? + +10:30.830 --> 10:32.390 +실제 숫자를 나타내죠 + +10:32.390 --> 10:39.530 +가령, 비트 상자가 100달러쯤 되고 99$보다는 조금 적고 101$쯤 + +10:39.530 --> 10:42.050 +될 거라고 했죠 + +10:42.230 --> 10:43.760 +근데 100이 제일 높았어요 + +10:43.850 --> 10:47.060 +그런 다음 그 세 숫자 사이에 가중된 평균을 얻어요 + +10:47.060 --> 10:52.310 +비트가 뭘 예측하려 하는지 좀 더 정확하게 알아낼 수 있는 방법이죠 + +10:52.490 --> 10:57.770 +그리고 항상 숫자가 전부인 건 아니지만 예측할 수 있게 해주죠 + +10:58.000 --> 11:01.480 +제가 사용한 기술이에요 + +11:01.480 --> 11:06.520 +분류 문제로서 퇴행 문제를 치료하고 있다는 사실을 해결하는 + +11:06.520 --> 11:07.300 +거죠 + +11:07.360 --> 11:11.950 +이걸 아는 게 그리 중요하진 않지만 위의 함수를 사용한다고 크게 + +11:11.950 --> 11:13.420 +달라지진 않아요 + +11:13.450 --> 11:14.800 +비트가 약간 달라요 + +11:15.010 --> 11:20.260 +신경망의 마지막 계층에 관심이 있다면 한 번 살펴보는 + +11:20.290 --> 11:27.670 +게 좋을 것 같습니다. 왜냐하면 미세하게 조정한 모델이 입력을 통과하면 + +11:27.670 --> 11:37.150 +출력을 얻고 이것들은 로그잇으로 간주됩니다. 토큰라이저의 가능한 어휘 엔트리에 걸쳐 있는 + +11:37.210 --> 11:38.860 +벡터죠. + +11:38.860 --> 11:43.180 +그런 다음 소프트맥스를 호출해 확률로 전환하죠 + +11:43.180 --> 11:46.270 +그리고 상위 3인에 들죠 + +11:46.270 --> 11:51.490 +그리고 이 검프라는 게 상위 3위의 평균을 가늠하는 거예요 + +11:51.610 --> 11:54.400 +가격도 중대하고 가격도 중대해요 + +11:54.400 --> 11:56.050 +그게 반환되죠 + +11:56.050 --> 11:58.660 +모델 예측과 아주 유사하죠 + +11:58.660 --> 12:03.940 +좀 더 정확한 답을 제공할 뿐이죠 최고의 예측이 아니라 상위 3개 예측에 + +12:03.940 --> 12:05.260 +근거해서요 + +12:05.890 --> 12:09.100 +그리고 우리 반 테스터도 있어요 + +12:09.100 --> 12:12.700 +이건 우리가 전에 사용했던 테스터 클래스와 정확히 같아요 + +12:12.940 --> 12:19.450 +제가 전에 언급했던 것을 짚고 넘어가야 할 것 같아요 이게 전체의 + +12:19.450 --> 12:21.250 +핵심이에요 + +12:21.250 --> 12:28.780 +통과한 함수를 취하고 호출할 때 통과하는 건 데이터 포인트와 관련된 텍스트예요 + +12:28.810 --> 12:30.730 +가격은 당연히 안 알려줘요 + +12:30.730 --> 12:35.800 +문자만 알고 있어서 가격은 전혀 몰라요 + +12:35.800 --> 12:37.330 +네, 물론이죠 + +12:37.720 --> 12:45.610 +그런 다음 테스트를 호출하죠 개선된 함수를 사용해서요 테스트를 + +12:45.610 --> 12:53.710 +통과합니다 드라마 같죠 다음 비디오에서 결과를 알려드리죠 get + +12:53.710 --> 12:57.070 +it get it + +12:57.100 --> 12:58.540 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59667829/en_US.srt b/week5/community-contributions/subtitles/srts/59667829/en_US.srt new file mode 100755 index 0000000..7ca24ee --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667829/en_US.srt @@ -0,0 +1,148 @@ +WEBVTT + +00:01.010 --> 00:01.580 +Well. + +00:01.580 --> 00:02.330 +Hello there. + +00:02.360 --> 00:04.550 +Look, I know what you're thinking. + +00:04.580 --> 00:07.070 +You're thinking I peaked too early. + +00:07.100 --> 00:09.950 +Last week was an amazing climax. + +00:09.950 --> 00:13.430 +We built a model that massively outperformed the frontier. + +00:13.460 --> 00:15.290 +It can't get better than that. + +00:15.320 --> 00:17.000 +Well, I've got news for you. + +00:17.090 --> 00:18.860 +It's going to get better than that. + +00:18.890 --> 00:22.670 +We are actually going to have the best week yet. + +00:22.670 --> 00:24.680 +And it's just getting started. + +00:24.860 --> 00:29.960 +So this week, this week it's about reaching the pinnacle. + +00:29.960 --> 00:33.770 +It's about becoming a master of LM engineering. + +00:33.770 --> 00:34.970 +And we're going to do it. + +00:35.000 --> 00:36.680 +We're going to have a blast. + +00:36.680 --> 00:38.510 +It's going to be epic. + +00:38.870 --> 00:44.030 +Uh, we are going to build a multi-agent framework. + +00:44.030 --> 00:48.290 +We're going to build a solution that is going to be really, really awesome. + +00:48.290 --> 00:54.740 +And yeah, it's going to do something profound and it's going to do it using everything that we've been + +00:54.740 --> 00:56.840 +building for the last eight weeks. + +00:57.140 --> 01:02.750 +Uh, for today, though, we're going to be using modal, the serverless platform for AI, so that we + +01:02.750 --> 01:09.390 +can deploy the same model that we built last week, our proprietary Three specialized alum, we're going + +01:09.390 --> 01:18.120 +to be able to deploy it behind a serverless API on the cloud, and we're going to build the first agent + +01:18.150 --> 01:18.930 +of many. + +01:18.960 --> 01:24.390 +The first of the seven agents that are going to be part of the solution that we are building this week + +01:24.510 --> 01:27.150 +to solve a juicy business problem. + +01:27.150 --> 01:35.550 +So without further ado, let's get into week eight, the final week of mastering LLM engineering. + +01:35.790 --> 01:40.110 +And of course, it wouldn't be the start of a week without this picture. + +01:40.110 --> 01:44.640 +For the last time, let's look at the journey that you've been on. + +01:44.670 --> 01:54.210 +You started or seven weeks ago, uh, you first uh, we we spent some time looking at frontier models, + +01:54.330 --> 02:00.150 +uh, looking at asking questions, asking how many times that A appeared in this sentence, if you can + +02:00.150 --> 02:03.540 +remember that, uh, in week two, we built UIs. + +02:03.540 --> 02:05.850 +We experienced gradio for the first time. + +02:05.850 --> 02:08.520 +We worked with agent ization in a light way. + +02:08.520 --> 02:10.080 +It's going to get a lot heavier. + +02:10.200 --> 02:16.340 +Uh, and we played around with Multi-modality in week three, we went open source with hugging face + +02:16.340 --> 02:19.760 +with tokenization, with with models. + +02:20.150 --> 02:26.930 +In week four, we built that amazing code generation tool that rewrote Python a, C plus, plus. + +02:26.930 --> 02:32.000 +And in doing so, we explored how you select the right LLM to solve a business problem. + +02:32.060 --> 02:37.760 +In week five, we tackled Rag using Lang Chain and built a question answering project. + +02:37.790 --> 02:39.890 +Maybe you built something bigger as well. + +02:39.920 --> 02:41.540 +In week six. + +02:41.540 --> 02:48.380 +Week six was when we started on training and we fine tuned a frontier model and week seven. + +02:48.410 --> 02:55.460 +Of course, week seven we've just come from was when we beat that frontier model using Cultura fine + +02:55.460 --> 03:00.410 +tuning to make our own Verticalized specialized LLM. + +03:00.890 --> 03:03.650 +And I loved it and I hope you did too. + +03:03.680 --> 03:10.430 +And so this week, week eight, the culmination of everything that's come before is when we have a finale. + +03:10.460 --> 03:18.860 +We must LLM engineering and we're going to do it by focusing on Agentic AI and building an agent platform. + +03:19.520 --> 03:21.350 +Let's talk more about that next time. diff --git a/week5/community-contributions/subtitles/srts/59667829/ja_JP.srt b/week5/community-contributions/subtitles/srts/59667829/ja_JP.srt new file mode 100755 index 0000000..a8512e6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667829/ja_JP.srt @@ -0,0 +1,130 @@ +WEBVTT + +00:01.010 --> 00:01.580 +まあね。 + +00:01.580 --> 00:02.330 +こんにちは。 + +00:02.360 --> 00:04.550 +何を考えているかは分かる。 + +00:04.580 --> 00:07.070 +ピークが早すぎたと思っているんだろう。 + +00:07.100 --> 00:09.950 +先週は素晴らしいクライマックスだった。 + +00:09.950 --> 00:13.430 +私たちは、 フロンティアを大きく上回るモデルを構築した。 + +00:13.460 --> 00:15.290 +これ以上のものはない。 + +00:15.320 --> 00:17.000 +さて、 ニュースがある。 + +00:17.090 --> 00:18.860 +もっと良くなるはずだ。 + +00:18.890 --> 00:22.670 +今週は最高の1週間になりそうだ。 + +00:22.670 --> 00:24.680 +まだ始まったばかりだ。 + +00:24.860 --> 00:29.960 +だから今週は、 頂点を目指すということだ。 + +00:29.960 --> 00:33.770 +LMエンジニアリングの達人になることだ。 + +00:33.770 --> 00:34.970 +そして、 それを実行するんだ + +00:35.000 --> 00:36.680 +僕ら、 最高に楽しむつもりだよ。 + +00:36.680 --> 00:38.510 +壮大なものになるだろう。 + +00:38.870 --> 00:44.030 +ええと、 私たちはマルチエージェント・フレームワークを構築しようとしています。 + +00:44.030 --> 00:48.290 +我々は、 本当に、 本当に素晴らしいソリューションを構築するつもりだ。 + +00:48.290 --> 00:56.840 +そして、 この8週間、 私たちが築き上げてきたものをすべて使って、 深遠な何かを成し遂げようとしている。 + +00:57.140 --> 01:02.750 +先週構築したのと同じモデル、 私たち独自のThree + +01:02.750 --> 01:18.930 +specialized alumを、 クラウド上のサーバーレスAPIの背後に展開できるようにする。 + +01:18.960 --> 01:27.150 +今週私たちが構築するソリューションの一部となる7つのエージェントのうちの1つで、 ビジネス上の重要な問題を解決するためのものだ。 + +01:27.150 --> 01:35.550 +それでは早速、 第8週、 LLMエンジニアリングをマスターする最終週に入ろう。 + +01:35.790 --> 01:40.110 +そしてもちろん、 この写真がなければ週の始まりではない。 + +01:40.110 --> 01:44.640 +最後に、 あなたが歩んできた道程を振り返ってみよう。 + +01:44.670 --> 02:03.540 +週目にはUIを作りました。 + +02:03.540 --> 02:05.850 +初めてグラディオを体験した。 + +02:05.850 --> 02:08.520 +私たちは軽い気持ちでエージェント化に取り組んだ。 + +02:08.520 --> 02:10.080 +もっと重くなるだろう。 + +02:10.200 --> 02:16.340 +週目にはマルチモダリティで遊び、 オープンソースでハグやトークン化、 + +02:16.340 --> 02:19.760 +モデルを使ってみた。 + +02:20.150 --> 02:26.930 +第4週では、 Python a、 C plus、 plusを書き換える、 あの素晴らしいコード生成ツールを作った。 + +02:26.930 --> 02:32.000 +そしてその際、 ビジネス上の問題を解決するために適切なLLMをどのように選択するのかを探った。 + +02:32.060 --> 02:37.760 +第5週では、 ラング・チェインを使ったラグに取り組み、 質問回答プロジェクトを構築した。 + +02:37.790 --> 02:39.890 +もしかしたら、 もっと大きなものを作ったのかもしれない。 + +02:39.920 --> 02:41.540 +第6週だ。 + +02:41.540 --> 02:48.380 +第6週はトレーニングに着手し、 フロンティアモデルの微調整を行った。 + +02:48.410 --> 02:55.460 +もちろん、 第7週はクルトゥーラのファイン・チューニングを駆使してフロンティア・モデルを打ち破り、 + +02:55.460 --> 03:00.410 +独自の縦型特化型LLMを完成させたところだ。 + +03:00.890 --> 03:03.650 +そして、 私はそれを気に入った。 + +03:03.680 --> 03:10.430 +そして今週、 第8週はこれまでの集大成としてフィナーレを迎える。 + +03:10.460 --> 03:18.860 +我々はLLMエンジニアリングが必要であり、 エージェントAIに焦点を当て、 エージェント・プラットフォームを構築することでそれを実現するつもりだ。 + +03:19.520 --> 03:21.350 +それについてはまた次回に。 diff --git a/week5/community-contributions/subtitles/srts/59667829/ko_KR.srt b/week5/community-contributions/subtitles/srts/59667829/ko_KR.srt new file mode 100755 index 0000000..bfc640b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667829/ko_KR.srt @@ -0,0 +1,145 @@ +WEBVTT + +00:01.010 --> 00:01.580 +글쎄요 + +00:01.580 --> 00:02.330 +안녕하세요 + +00:02.360 --> 00:04.550 +무슨 생각하는지 알아요 + +00:04.580 --> 00:07.070 +내가 너무 일찍 정점을 찍었다고 생각하죠? + +00:07.100 --> 00:09.950 +지난주 클라이맥스는 굉장했어요 + +00:09.950 --> 00:13.430 +우리는 개척지를 훨씬 능가하는 모델을 만들었어요 + +00:13.460 --> 00:15.290 +Get it, get it, get it, it, it, it, it! 이보다 더 좋을 순 없죠 + +00:15.320 --> 00:17.000 +한 가지 알려드리죠 + +00:17.090 --> 00:18.860 +Get it, get it, get it. 더 좋아질 거예요 + +00:18.890 --> 00:22.670 +최고의 한 주가 될 거예요 + +00:22.670 --> 00:24.680 +이제 시작이에요 + +00:24.860 --> 00:29.960 +이번 주에는 정점에 오를 거예요 + +00:29.960 --> 00:33.770 +LM 공학의 달인이 되는 거죠 + +00:33.770 --> 00:34.970 +그렇게 할 거예요 + +00:35.000 --> 00:36.680 +정말 재미있을 거예요 + +00:36.680 --> 00:38.510 +에픽이 될 거예요 + +00:38.870 --> 00:44.030 +다중 에이전트 프레임워크를 구축할 거예요 + +00:44.030 --> 00:48.290 +정말 정말 멋진 솔루션을 구축할 거예요 + +00:48.290 --> 00:54.740 +뭔가 심오한 일을 할 거예요 지난 8주간 지은 모든 + +00:54.740 --> 00:56.840 +걸 활용해서요 + +00:57.140 --> 01:02.750 +오늘은 인공지능을 위한 서버 비 플랫폼인 모드알을 사용할 겁니다 + +01:02.750 --> 01:09.390 +지난주에 만든 것과 같은 모델인 독점적인 3단계 명산을 배포하기 위해서요 + +01:09.390 --> 01:18.930 +클라우드에서 서버리스 API 뒤에 배포할 수 있습니다 많은 에이전트의 첫 번째를 만들 거예요 + +01:18.960 --> 01:24.390 +이번 주에 우리가 만들 복잡한 사업 문제를 해결하는 데 일조할 + +01:24.510 --> 01:27.150 +7명 중 첫 번째 요원이죠 + +01:27.150 --> 01:35.550 +그럼 지체 없이 8주 차로 넘어가죠 LLM 엔지니어링의 진수를 보여주는 마지막 주예요 + +01:35.790 --> 01:40.110 +이 영화가 없으면 일주일의 시작도 아니죠 + +01:40.110 --> 01:44.640 +마지막으로 여러분이 걸어온 길을 살펴보죠 + +01:44.670 --> 01:54.210 +7주 전에 시작했나요? 아니면 처음 시작했나요? 우리는 개척 모델에 대해 시간을 좀 들였습니다. 질문을 하고 + +01:54.330 --> 02:00.150 +A가 이 문장에 몇 번이나 나왔는지 물어봤어요. 기억하실지 모르겠지만 + +02:00.150 --> 02:03.540 +2주 차에 UI를 구축했어요. + +02:03.540 --> 02:05.850 +처음으로 그러디오를 경험했어요 + +02:05.850 --> 02:08.520 +아이즈 요원과는 가볍게 작업했어요 + +02:08.520 --> 02:10.080 +Get up! Get up! 훨씬 더 무거워질 거예요 + +02:10.200 --> 02:16.340 +3주차에는 다중 양상을 다뤘는데요 얼굴을 껴안는 것과 토큰화 + +02:16.340 --> 02:19.760 +모델로 오픈 소스를 했죠 + +02:20.150 --> 02:26.930 +4주 차에 놀라운 코드 생성 도구를 만들었습니다 파이썬 a, c, 플러스 플러스 코드를 다시 쓴 도구죠 + +02:26.930 --> 02:32.000 +그러면서 비즈니스 문제를 해결하기 위한 올바른 LLM을 선택하는 방법을 알아봤죠 + +02:32.060 --> 02:37.760 +5주 차에는 랑 체인을 활용해 래그와 씨름했고 질문 답안 과제를 만들었죠 + +02:37.790 --> 02:39.890 +더 큰 걸 지었을지도 모르죠 + +02:39.920 --> 02:41.540 +6주 차에요 + +02:41.540 --> 02:48.380 +6주 차에는 훈련을 시작했고 개척지 모델을 조율했어요 7주 차에는 + +02:48.410 --> 02:55.460 +7주째에는 개척 시대 모델을 이겼어요 코튜라 미세 튜닝을 이용해 + +02:55.460 --> 03:00.410 +수직화된 전문 LLM을 만들었죠 + +03:00.890 --> 03:03.650 +정말 좋았어요 여러분도 좋아하셨길 바라요 + +03:03.680 --> 03:10.430 +그래서 이번 주, 8주 차에는 그동안의 모든 것이 정점을 찍고 결승전이 펼쳐져요 + +03:10.460 --> 03:18.860 +에이전트 인공지능과 에이전트 플랫폼 구축에 초점을 맞춰 LLM 엔지니어링을 진행할 거예요 + +03:19.520 --> 03:21.350 +그 얘기는 다음에 하죠 diff --git a/week5/community-contributions/subtitles/srts/59667841/en_US.srt b/week5/community-contributions/subtitles/srts/59667841/en_US.srt new file mode 100755 index 0000000..61e76be --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667841/en_US.srt @@ -0,0 +1,139 @@ +WEBVTT + +00:00.770 --> 00:07.670 +Now, look, I know that I went through that very fast, but maybe, uh, you're still, uh, blinking + +00:07.700 --> 00:08.450 +at the end of that. + +00:08.450 --> 00:13.490 +But that's because the point is that you should go back now and do this yourself and see it. + +00:13.490 --> 00:17.540 +And as you run that code and you see what's going on in the modal screens, I think it's going to make + +00:17.540 --> 00:18.770 +complete sense. + +00:18.950 --> 00:24.470 +Um, in case you have any problems with that, uh, hugging face token, I'm going to put better instructions + +00:24.470 --> 00:25.310 +in the Jupyter lab. + +00:25.310 --> 00:26.570 +So that's very clear for you. + +00:26.570 --> 00:32.540 +And yeah, if I think you'll find that it will, that it will be fairly straightforward and you'll see + +00:32.540 --> 00:34.820 +how that works and why it's working so fast. + +00:34.850 --> 00:37.010 +There's the first time you run it for a while. + +00:37.010 --> 00:38.660 +There's several minutes for it to warm up. + +00:38.660 --> 00:44.120 +But then subsequently, because we've cached the model weights and we've loaded it into memory, it + +00:44.120 --> 00:47.390 +should run quickly as it did just then. + +00:47.450 --> 00:54.650 +So with that, you have now learned how to take a model and how to deploy it to production so that people + +00:54.650 --> 01:02.450 +could call it just with Python code for production purposes within applications outside something like + +01:02.480 --> 01:03.590 +a Jupyter Lab. + +01:04.040 --> 01:07.490 +And hopefully you're now beginning to appreciate that. + +01:07.490 --> 01:12.350 +We do have a big week, and it is an epic project, and there's a lot to be done. + +01:12.350 --> 01:16.790 +In fact, uh, the next day's worth of activities is the biggest of the lot. + +01:16.820 --> 01:18.740 +There's an awful lot happening. + +01:18.860 --> 01:26.700 +Um, but just to remind yourself for today that that this was about deploying models in production using + +01:26.700 --> 01:30.390 +modal, the serverless platform. + +01:30.600 --> 01:37.350 +In some ways, it's similar to when we deployed a model or I deployed a model to the hugging face using + +01:37.380 --> 01:38.940 +hugging face endpoints. + +01:39.120 --> 01:44.850 +Um, but you can see the the extra functionality that you get with this, the ability to configure infrastructure + +01:44.850 --> 01:49.650 +with code and the way that the pricing works, it's a very, very powerful platform. + +01:49.920 --> 01:57.330 +Next time you'll be able to build an advanced Wragg solution, you're saying I already you already got + +01:57.330 --> 01:57.690 +Wragg. + +01:57.690 --> 01:58.620 +We've done Wragg. + +01:58.620 --> 01:59.670 +We know Wragg well. + +01:59.670 --> 02:01.140 +You're going to know it even more. + +02:01.140 --> 02:01.650 +Next time. + +02:01.650 --> 02:03.030 +We're going to use Wragg. + +02:03.030 --> 02:05.010 +We're going to do it directly without chain. + +02:05.010 --> 02:05.940 +We're pros now. + +02:05.940 --> 02:06.930 +We don't need long chain. + +02:06.930 --> 02:08.070 +We can do it ourselves. + +02:08.070 --> 02:14.820 +We're going to look things up in a Chrome data store and use it to give context to a model, but it's + +02:14.820 --> 02:18.180 +going to be an enormous great data store. + +02:18.180 --> 02:23.790 +And we're going to build something called an ensemble model, which is a kind of model that combines + +02:23.790 --> 02:25.590 +the best of multiple models. + +02:25.590 --> 02:32.400 +And we're going to be able to deliver production ready code that will span multiple models. + +02:32.400 --> 02:39.360 +So it's going to be about, uh, really strengthening your skill set, building expertise as you make + +02:39.360 --> 02:46.140 +the transition from being a knowledgeable in LM engineering to being a master of LM engineering. + +02:46.350 --> 02:48.330 +And with that, I'll see you next time. diff --git a/week5/community-contributions/subtitles/srts/59667841/ja_JP.srt b/week5/community-contributions/subtitles/srts/59667841/ja_JP.srt new file mode 100755 index 0000000..2ce0a34 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667841/ja_JP.srt @@ -0,0 +1,103 @@ +WEBVTT + +00:00.770 --> 00:08.450 +でも、 もしかしたら、 まだ、 まばたきしているかもしれない。 + +00:08.450 --> 00:13.490 +でもそれは、 今すぐ戻って自分でやって見るべきだということだからだ。 + +00:13.490 --> 00:18.770 +このコードを実行し、 モーダル・スクリーンで何が起こっているかを見れば、 完全に理解できると思います。 + +00:18.950 --> 00:25.310 +あの、 ハグする顔のトークンで何か問題があったら、 Jupyterラボにもっといい説明を書くつもりです。 + +00:25.310 --> 00:26.570 +だから、 それはあなたにとって非常に明確なことなんだ。 + +00:26.570 --> 00:34.820 +そして、 もしそうなるのであれば、 それはかなり簡単なことで、 それがどのように機能するのか、 なぜそんなに速く機能するのかがわかると思う。 + +00:34.850 --> 00:37.010 +しばらく走らせてみて初めてわかることがある。 + +00:37.010 --> 00:38.660 +ウォームアップに数分かかる。 + +00:38.660 --> 00:47.390 +しかしその後、 モデルの重みをキャッシュし、 メモリにロードしたため、 先ほどと同じように素早く実行されるはずだ。 + +00:47.450 --> 00:54.650 +これで、 Jupyter Labのような外部のアプリケーションで、 Pythonコードだけでモデルを呼び出せるように、 + +00:54.650 --> 01:03.590 +モデルを本番環境にデプロイする方法を学んだことになる。 + +01:04.040 --> 01:07.490 +そして願わくば、 あなたがそのことを理解し始めていることを。 + +01:07.490 --> 01:12.350 +壮大なプロジェクトだし、 やるべきことはたくさんある。 + +01:12.350 --> 01:16.790 +実際、 次の日のアクティビティが一番大きいんだ。 + +01:16.820 --> 01:18.740 +非常に多くのことが起きている。 + +01:18.860 --> 01:30.390 +ええと、 でも今日のところは、 サーバーレス・プラットフォームであるモーダルを使って本番環境にモデルをデプロイする話だったことを思い出してください。 + +01:30.600 --> 01:38.940 +ある意味、 モデルをデプロイしたとき、 あるいは私が抱擁顔のエンドポイントを使って抱擁顔にモデルをデプロイしたときと似ている。 + +01:39.120 --> 01:49.650 +でも、 コードでインフラを構成する機能や、 価格設定の方法など、 このプラットフォームが非常に強力なプラットフォームであることはお分かりいただけると思います。 + +01:49.920 --> 01:57.690 +今度、 あなたは高度なWraggソリューションを構築することができるようになる。 + +01:57.690 --> 01:58.620 +我々はラッグをやり遂げた。 + +01:58.620 --> 01:59.670 +ラッグのことはよく知っている。 + +01:59.670 --> 02:01.140 +あなたはさらにそれを知ることになる。 + +02:01.140 --> 02:01.650 +次こそは + +02:01.650 --> 02:03.030 +ウラッグを使うつもりだ。 + +02:03.030 --> 02:05.010 +チェーンなしで直接やるんだ。 + +02:05.010 --> 02:05.940 +僕らはもうプロだ。 + +02:05.940 --> 02:06.930 +長いチェーンは必要ない。 + +02:06.930 --> 02:08.070 +自分たちでできるんだ。 + +02:08.070 --> 02:18.180 +Chromeのデータストアで物事を調べ、 モデルにコンテキストを与えるために使うのだが、 それは膨大で素晴らしいデータストアになるだろう。 + +02:18.180 --> 02:25.590 +そして、 アンサンブルモデルと呼ばれる、 複数のモデルの長所を組み合わせたモデルを構築する。 + +02:25.590 --> 02:32.400 +そして、 複数のモデルにまたがるプロダクション・レディなコードを提供できるようになるだろう。 + +02:32.400 --> 02:39.360 +つまり、 LMエンジニアリングの知識人からLMエンジニアリングの達人への移行を果たすために、 + +02:39.360 --> 02:46.140 +スキルセットを強化し、 専門知識を身につけるということだ。 + +02:46.350 --> 02:48.330 +それでは、 また次回。 diff --git a/week5/community-contributions/subtitles/srts/59667841/ko_KR.srt b/week5/community-contributions/subtitles/srts/59667841/ko_KR.srt new file mode 100755 index 0000000..c40cce0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59667841/ko_KR.srt @@ -0,0 +1,133 @@ +WEBVTT + +00:00.770 --> 00:07.670 +제가 아주 빨리 지나갔다는 건 알지만 어쩌면 당신은 아직도 눈을 깜빡이고 있을지도 + +00:07.700 --> 00:08.450 +몰라요 + +00:08.450 --> 00:13.490 +하지만 중요한 건 지금 돌아가서 직접 봐야 한다는 거예요 + +00:13.490 --> 00:17.540 +코드를 실행하고 모드 스크린에서 무슨 일이 벌어지는지 보면 완전히 + +00:17.540 --> 00:18.770 +이해가 될 거예요 + +00:18.950 --> 00:24.470 +포옹하는 얼굴 토큰에 문제가 생길 수도 있으니 Jupyter 연구소에 더 나은 방법을 + +00:24.470 --> 00:25.310 +알려줄게요 + +00:25.310 --> 00:26.570 +아주 명확하죠 + +00:26.570 --> 00:32.540 +네, 그렇게 될 것 같다면 꽤 간단할 겁니다 어떻게 작동하는지 왜 그렇게 + +00:32.540 --> 00:34.820 +빠른지 보실 거예요 + +00:34.850 --> 00:37.010 +처음으로 실행하는 거죠 + +00:37.010 --> 00:38.660 +예열하려면 몇 분 걸려요 + +00:38.660 --> 00:44.120 +하지만 그 후에 모델 무게를 캐시에 넣고 메모리에 로드했기 때문에 + +00:44.120 --> 00:47.390 +방금처럼 빨리 실행될 거예요 + +00:47.450 --> 00:54.650 +이제 모델을 가져다가 프로덕션에 배포하는 법을 배웠습니다 주피터 랩 같은 + +00:54.650 --> 01:03.590 +것의 외부의 응용 프로그램에서 프로덕션 목적으로 파이썬 코드로 호출할 수 있도록요 + +01:04.040 --> 01:07.490 +여러분도 이제 그걸 느끼기 시작하셨길 바라요 + +01:07.490 --> 01:12.350 +이번 주는 중요한 한 주예요 에픽 프로젝트로 할 일이 많죠 + +01:12.350 --> 01:16.790 +사실 다음 날 할 활동이 가장 큰 활동이에요 + +01:16.820 --> 01:18.740 +많은 일이 벌어지고 있어요 + +01:18.860 --> 01:26.700 +하지만 오늘 여러분께 상기시켜 드리자면 이건 서버리스 비 플랫폼 모듈을 이용한 프로덕션의 + +01:26.700 --> 01:30.390 +모델 배포에 관한 거였어요 + +01:30.600 --> 01:37.350 +어떤 면에서는 모델을 배포하거나 얼굴 엔드포인트를 이용해 안는 얼굴에 모델을 배포할 + +01:37.380 --> 01:38.940 +때와 비슷해요 + +01:39.120 --> 01:44.850 +하지만 이것으로 얻는 추가 기능성 코드로 인프라를 구성하는 + +01:44.850 --> 01:49.650 +기능 가격 책정 방식 아주 아주 강력한 플랫폼이죠 + +01:49.920 --> 01:57.690 +다음에는 고급 Wragg 솔루션을 만들 수 있을 거예요 이미 Wragg를 갖고 있다고요? + +01:57.690 --> 01:58.620 +래그, 끝났어요 + +01:58.620 --> 01:59.670 +래그를 잘 알죠 + +01:59.670 --> 02:01.140 +더 잘 알게 될 거예요 + +02:01.140 --> 02:01.650 +다음에요 + +02:01.650 --> 02:03.030 +래그를 쓸 거예요 + +02:03.030 --> 02:05.010 +체인 없이 바로 할 거예요 + +02:05.010 --> 02:05.940 +우린 이제 프로예요 + +02:05.940 --> 02:06.930 +긴 체인은 필요 없어요 + +02:06.930 --> 02:08.070 +우리가 하면 돼요 + +02:08.070 --> 02:14.820 +크롬 데이터 스토어에서 찾아보고 모델에 컨텍스트를 주는 데 사용할 거예요 + +02:14.820 --> 02:18.180 +아주 방대한 데이터 스토어가 되겠죠 + +02:18.180 --> 02:23.790 +앙상블 모델이라는 걸 만들 거예요 여러 모델을 + +02:23.790 --> 02:25.590 +합친 모델이죠 + +02:25.590 --> 02:32.400 +다양한 모델을 포괄하는 생산 준비 코드를 제공할 수 있죠 + +02:32.400 --> 02:39.360 +여러분의 기술을 강화하고 전문성을 구축하는 게 중요합니다 LM 엔지니어링에 + +02:39.360 --> 02:46.140 +관해 아는 것에서 LM 엔지니어링의 달인이 되는 과정에서요 + +02:46.350 --> 02:48.330 +그럼 다음 시간에 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59668027/en_US.srt b/week5/community-contributions/subtitles/srts/59668027/en_US.srt new file mode 100755 index 0000000..5d0cd1b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668027/en_US.srt @@ -0,0 +1,202 @@ +WEBVTT + +00:01.040 --> 00:04.070 +And so here we are at the home page for modal. + +00:04.100 --> 00:10.790 +At modal.com spelt model not not model which is confusing. + +00:11.330 --> 00:13.430 +So what is modal. + +00:13.460 --> 00:18.620 +So modal is a service that allows you to take code. + +00:18.650 --> 00:25.610 +And it really can be any code and deploy it and run it remotely so that you can run it from, say, + +00:25.640 --> 00:28.640 +JupyterLab or just from some Python code running on your box. + +00:28.670 --> 00:36.560 +And what makes modal so powerful is that it makes it incredibly easy just to wrap up a function and + +00:36.560 --> 00:39.920 +have that be called so that it's almost transparent to you. + +00:39.920 --> 00:43.850 +It almost feels like you're calling a function that's running locally on your box. + +00:43.850 --> 00:46.820 +And in fact, it's been called out to the cloud. + +00:46.820 --> 00:48.740 +And the answer is coming back. + +00:48.860 --> 00:56.060 +So it gives you such transparency, and it allows you to work almost seamlessly between your box and + +00:56.060 --> 00:56.900 +the cloud. + +00:57.170 --> 01:03.170 +Now, a lot of the way it works is that it also allows you to package your code into an API, and have + +01:03.170 --> 01:08.120 +it running in such a way that people can use like Rest APIs to to call your code. + +01:08.150 --> 01:10.760 +We're not going to do that because we're working in Python. + +01:10.760 --> 01:12.230 +And this is all Python code. + +01:12.230 --> 01:18.230 +And it's even easier if all you need to do is call Python functions remotely on the cloud. + +01:18.440 --> 01:24.770 +Um, and whilst it is, as I say, mostly used by AI teams and for for AI as it's, as it's showing + +01:24.770 --> 01:27.290 +right here AI and data and machine learning. + +01:27.350 --> 01:32.780 +Um, it can also just be used for really any anything that you can write in Python, you can run on + +01:32.780 --> 01:33.560 +the cloud. + +01:33.680 --> 01:38.270 +Um, and so it's um, it's it's very versatile indeed. + +01:38.300 --> 01:44.510 +One other thing I'll mention that makes it very powerful is that you only pay for the compute time that + +01:44.510 --> 01:49.880 +you use, unlike services like, like, uh, you know, AWS or something. + +01:50.120 --> 01:55.640 +Um, it's one of these, these things where it will spin up a server on demand, and if it's not used + +01:55.640 --> 02:01.700 +for a while, it will, it will, it will, uh, pause that server and you'll only pay for the CPU minutes + +02:01.940 --> 02:03.980 +that your server is running for. + +02:04.040 --> 02:11.430 +So, uh, also the good news is that if you sign up for a new account, you get $30 of free credit, + +02:11.430 --> 02:14.880 +and we won't even come close to spending that $30. + +02:15.030 --> 02:16.530 +In this class. + +02:16.590 --> 02:19.920 +So you don't need to be concerned about about charges. + +02:19.920 --> 02:26.700 +This will be a freebie should you get addicted and you end up using modal for all sorts of purposes, + +02:26.820 --> 02:29.370 +then I can't be held accountable for that. + +02:29.490 --> 02:36.120 +But you will get hopefully your $30 of, uh, free credits unless they take that away. + +02:36.120 --> 02:41.820 +But even if they do that, as you will see, it's not exactly an expensive proposition. + +02:42.240 --> 02:47.580 +So when you go to modal comm, you'll need to sign up the first time and create an account if assuming + +02:47.580 --> 02:48.480 +you don't have one already. + +02:48.480 --> 02:50.910 +And once you've done that, you'll end up in a dashboard. + +02:50.910 --> 02:53.280 +And this is my dashboard right here. + +02:53.460 --> 02:59.970 +Um, and you can see here my live usage, um, in the time that I've been using modal to prepare for + +02:59.970 --> 03:04.260 +this class that has been going on for a while and running it a number of times, I've managed to rack + +03:04.290 --> 03:08.580 +up a $5 bill, uh, out of my $30 of free credit. + +03:08.580 --> 03:14.960 +So I've barely scratched the surface, and even if I hadn't got the free credit, it would be a $5 price + +03:14.960 --> 03:16.270 +tag for everything I've done so far. + +03:16.300 --> 03:18.400 +So, as I say, it's it's cheap. + +03:18.700 --> 03:23.260 +Um, here I'm looking at at apps, which are the programs that I've deployed. + +03:23.260 --> 03:26.920 +And of course we're going to be building a pricer service later. + +03:26.950 --> 03:31.960 +And if I go into this, you'll see that you can see all the different deployments that I've made of + +03:31.960 --> 03:32.710 +this service. + +03:32.740 --> 03:35.230 +And this will all make more sense when we actually do it. + +03:35.410 --> 03:38.950 +And you can see it's been called apparently 116 times. + +03:39.160 --> 03:42.250 +Uh, and you can see various other things about it. + +03:42.550 --> 03:44.170 +Um, but really, that's it. + +03:44.200 --> 03:48.580 +The one of the nice things about modal is that whilst one does have tokens and keys and all the rest + +03:48.580 --> 03:52.660 +of it, you don't need to go in and set it up and write it down because it's going to do it all for + +03:52.660 --> 03:54.370 +you and it's going to be very easy. + +03:54.370 --> 03:59.170 +All you have to do is sign up with an account and then everything else will come. + +03:59.200 --> 04:01.030 +Will will follow naturally. + +04:01.390 --> 04:05.170 +Okay, so with that, once you've signed up with your modal account, do some clicking around. + +04:05.170 --> 04:08.320 +You can always read the there's there's really nice docs. + +04:08.320 --> 04:09.460 +There's this this guide. + +04:09.460 --> 04:11.620 +You don't need to read this because we'll be doing some of this. + +04:11.620 --> 04:13.270 +But it gives you a real sense. + +04:13.270 --> 04:17.830 +There's a hello World program as well, but we'll be doing that ourselves as you will see. + +04:17.860 --> 04:20.140 +We'll get to that in the next video. + +04:20.170 --> 04:21.430 +I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59668027/ja_JP.srt b/week5/community-contributions/subtitles/srts/59668027/ja_JP.srt new file mode 100755 index 0000000..1649a5f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668027/ja_JP.srt @@ -0,0 +1,163 @@ +WEBVTT + +00:01.040 --> 00:04.070 +というわけで、 モーダルのホームページです。 + +00:04.100 --> 00:10.790 +モーダルで。 comのスペルはmodelではなくmodelなので紛らわしい。 + +00:11.330 --> 00:13.430 +では、 モーダルとは何か。 + +00:13.460 --> 00:18.620 +つまりモーダルは、 コードを取ることができるサービスなのです。 + +00:18.650 --> 00:28.640 +そして、 どんなコードでもデプロイでき、 リモートで実行できるので、 例えばJupyterLabから実行することも、 自分のボックスで実行中のPythonコードから実行することもできる。 + +00:28.670 --> 00:36.560 +そして、 モーダルが非常に強力なのは、 関数をラップアップして呼び出すだけで、 それがほとんど透過的になるため、 + +00:36.560 --> 00:39.920 +信じられないほど簡単になるということです。 + +00:39.920 --> 00:43.850 +まるで、 自分のボックスでローカルに実行されている関数を呼び出しているような感覚だ。 + +00:43.850 --> 00:46.820 +そして実際、 クラウドに呼び出されている。 + +00:46.820 --> 00:48.740 +そして、 答えは戻ってくる。 + +00:48.860 --> 00:56.900 +そのため、 透明性が確保され、 ボックスとクラウドの間でほとんどシームレスに作業することができる。 + +00:57.170 --> 01:08.120 +この仕組みの多くは、 コードをAPIにパッケージ化し、 Rest APIを使ってコードを呼び出せるようにすることだ。 + +01:08.150 --> 01:10.760 +我々はPythonで仕事をしているので、 それをするつもりはない。 + +01:10.760 --> 01:12.230 +そしてこれはすべてPythonのコードだ。 + +01:12.230 --> 01:18.230 +クラウド上でPython関数をリモートで呼び出すだけなら、 さらに簡単だ。 + +01:18.440 --> 01:27.290 +AIチームや、 AIやデータ、 機械学習に使われることがほとんどです。 + +01:27.350 --> 01:33.560 +Pythonで書けるものであれば、 クラウド上で実行できるものであれば、 どんなものにも使えます。 + +01:33.680 --> 01:38.270 +うーん、 だから、 実に万能なんだ。 + +01:38.300 --> 01:49.880 +もうひとつ、 AWSのようなサービスとは違って、 使用したコンピュート・タイムに対してしか料金を支払わないという点も強力です。 + +01:50.120 --> 01:55.640 +オンデマンドでサーバーを立ち上げ、 しばらくサーバーが使用されなければ、 + +01:55.640 --> 02:03.980 +サーバーを一時停止し、 サーバーが稼動しているCPU分の料金だけを支払います。 + +02:04.040 --> 02:14.880 +だから、 あー、 また良いニュースは、 新規アカウントにサインアップすると、 30ドルの無料クレジットがもらえるということだ。 + +02:15.030 --> 02:16.530 +このクラスでは + +02:16.590 --> 02:19.920 +だから、 料金について心配する必要はない。 + +02:19.920 --> 02:29.370 +これは、 あなたが中毒になり、 あらゆる目的でモーダルを使用することになった場合、 私はその責任を負うことはできません。 + +02:29.490 --> 02:36.120 +でも、 それが取り払われない限り、 うまくいけば30ドルの無料クレジットは手に入る。 + +02:36.120 --> 02:41.820 +しかし、 たとえそうだとしても、 おわかりのように、 決して高価な提案ではない。 + +02:42.240 --> 02:48.480 +そのため、 モーダルコムに行く際には、 初回にサインアップしてアカウントを作成する必要があります。 + +02:48.480 --> 02:50.910 +そうすれば、 ダッシュボードにたどり着くことができる。 + +02:50.910 --> 02:53.280 +そしてこれが私のダッシュボードだ。 + +02:53.460 --> 02:59.970 +しばらく続いているこのクラスの準備のためにモーダルを使っていて、 + +02:59.970 --> 03:04.260 +何度も実行しているうちに、 30ドルの無料クレジットのうち、 + +03:04.290 --> 03:08.580 +5ドルの請求が来た。 + +03:08.580 --> 03:16.270 +だから、 まだほとんど表面しか見ていない。 無料のクレジットを取得していなかったとしても、 これまでにやったことすべてに5ドルの値札がついていることになる。 + +03:16.300 --> 03:18.400 +だから言っておくが、 安いんだ。 + +03:18.700 --> 03:23.260 +ええと、 ここでアプリを見ているんだけど、 これは僕がデプロイしたプログラムなんだ。 + +03:23.260 --> 03:26.920 +そしてもちろん、 もっと値段の高いサービスを後で作るつもりだ。 + +03:26.950 --> 03:32.710 +この中に入ると、 私がこのサービスで行ったさまざまな展開を見ることができる。 + +03:32.740 --> 03:35.230 +そしてこのことは、 実際にやってみればもっと理解できるだろう。 + +03:35.410 --> 03:38.950 +そして、 116回呼ばれていることがわかるだろう。 + +03:39.160 --> 03:42.250 +あー、 他にもいろいろ見られるよ。 + +03:42.550 --> 03:44.170 +うーん、 でも本当にそれだけなんだ。 + +03:44.200 --> 03:48.580 +モーダルのいいところは、 トークンやキーやその他もろもろがある一方で、 + +03:48.580 --> 03:54.370 +わざわざセットアップしたり書き留めたりする必要がないことだ。 + +03:54.370 --> 03:59.170 +アカウントを持ってサインアップするだけで、 あとはすべて自分でやる。 + +03:59.200 --> 04:01.030 +意志は自然に後からついてくる。 + +04:01.390 --> 04:05.170 +それでは、 モーダルアカウントでサインアップしたら、 いくつかクリックしてみてください。 + +04:05.170 --> 04:08.320 +いつでも、 本当に素晴らしいドキュメントを読むことができる。 + +04:08.320 --> 04:09.460 +このガイドがある。 + +04:09.460 --> 04:11.620 +これを読む必要はない。 + +04:11.620 --> 04:13.270 +でも、 実感が湧く。 + +04:13.270 --> 04:17.830 +ハロー・ワールド・プログラムもあるが、 これは後述するように自分たちでやることになる。 + +04:17.860 --> 04:20.140 +それは次のビデオで。 + +04:20.170 --> 04:21.430 +それではまた。 diff --git a/week5/community-contributions/subtitles/srts/59668027/ko_KR.srt b/week5/community-contributions/subtitles/srts/59668027/ko_KR.srt new file mode 100755 index 0000000..7a9113a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668027/ko_KR.srt @@ -0,0 +1,199 @@ +WEBVTT + +00:01.040 --> 00:04.070 +Modal의 홈페이지에 나와 있어요 + +00:04.100 --> 00:10.790 +모달로요 모델은 모델이지 헷갈리네요 + +00:11.330 --> 00:13.430 +그럼 모듈이란 무엇일까요? + +00:13.460 --> 00:18.620 +Modal은 코드를 취할 수 있게 해주는 서비스예요 + +00:18.650 --> 00:25.610 +어떤 코드든 원격으로 배포할 수 있습니다 주피터랩이나 파이썬 + +00:25.640 --> 00:28.640 +코드로 실행할 수 있도록요 + +00:28.670 --> 00:36.560 +Modal이 강력한 이유는 함수를 정리하고 호출하는 게 아주 쉬워 거의 + +00:36.560 --> 00:39.920 +투명하게 보인다는 거죠 + +00:39.920 --> 00:43.850 +박스에 로컬로 실행되는 함수를 호출하는 것 같아요 + +00:43.850 --> 00:46.820 +사실 클라우드에 호출이 왔어요 + +00:46.820 --> 00:48.740 +답이 돌아오고 있어요 + +00:48.860 --> 00:56.060 +투명성을 제공하고 박스와 클라우드 사이에서 거의 매끄럽게 작업할 수 있게 + +00:56.060 --> 00:56.900 +해주죠 + +00:57.170 --> 01:03.170 +작동 원리의 대부분은 API에 코드를 패키지할 수 있게 해주는 겁니다 사람들이 + +01:03.170 --> 01:08.120 +REST API 같은 걸 이용해 코드를 호출할 수 있도록요 + +01:08.150 --> 01:10.760 +파이썬 으로 작업하고 있으니 그렇게 하지 않을 거예요 + +01:10.760 --> 01:12.230 +파이썬 으로 된 코드죠 + +01:12.230 --> 01:18.230 +클라우드에서 원격으로 파이썬 함수를 호출하는 것이 더 쉽고요 + +01:18.440 --> 01:24.770 +인공지능 팀에서 주로 사용하고 인공지능을 위한 건데 여기서 보시다시피 인공지능과 + +01:24.770 --> 01:27.290 +데이터 머신 러닝이죠 + +01:27.350 --> 01:32.780 +파이썬 으로 작성할 수 있는 모든 것에 사용될 수 있습니다 클라우드에서 실행할 수도 + +01:32.780 --> 01:33.560 +있고요 + +01:33.680 --> 01:38.270 +그래서 아주 다재다능해요 + +01:38.300 --> 01:44.510 +아주 강력하게 만드는 또 다른 것은 사용하는 컴퓨팅 시간에만 돈을 + +01:44.510 --> 01:49.880 +지불한다는 겁니다 AWS 같은 서비스와는 다르죠 + +01:50.120 --> 01:55.640 +주문형 서버를 스핀업 하는데 한동안 사용하지 않으면 + +01:55.640 --> 02:01.700 +서버를 일시 정지하고 서버가 실행 중인 CPU분만 지불하게 + +02:01.940 --> 02:03.980 +되죠 + +02:04.040 --> 02:11.430 +또 좋은 소식은 새 계좌를 개설하시면 30달러의 무료 카드가 제공되는데 우린 그 30달러를 + +02:11.430 --> 02:14.880 +쓸 일이 없다는 거죠 Get it + +02:15.030 --> 02:16.530 +이 수업에서요 + +02:16.590 --> 02:19.920 +그러니 기소 같은 건 걱정 안 해도 돼요 + +02:19.920 --> 02:26.700 +만약 중독되어서 모든 모듈을 사용하게 된다면, 무료로 제공해드릴 수 있습니다. 만약 사용하게 되더라도 + +02:26.820 --> 02:29.370 +저는 책임지지 않을 거예요. + +02:29.490 --> 02:36.120 +그래도 30달러짜리 크레딧은 받을 수 있을 거예요 get get get이 취소되지 않는다면요 + +02:36.120 --> 02:41.820 +하지만 그렇게 해도 보시다시피 그리 비싼 제안은 아니에요 + +02:42.240 --> 02:47.580 +Modalcom에 가면 처음 등록하고 계정을 만들어야 해요 이미 없다고 + +02:47.580 --> 02:48.480 +가정하면요 + +02:48.480 --> 02:50.910 +그렇게 하면 대시보드에 나오죠 + +02:50.910 --> 02:53.280 +이게 제 대시보드예요 + +02:53.460 --> 02:59.970 +여기 제 라이브 사용이 보이시죠 이 수업을 준비하기 위해 Modal을 사용했는데 + +02:59.970 --> 03:04.260 +시간이 좀 걸렸고 여러 번 실행했어요 무료 신용카드 + +03:04.290 --> 03:08.580 +30달러에서 5달러가 모였네요 + +03:08.580 --> 03:14.960 +아직 수박 겉핥기 수준인데 무료 크레딧이 아니라도 지금까지 한 일에 5달러가 + +03:14.960 --> 03:16.270 +드는 거예요 + +03:16.300 --> 03:18.400 +그래서 싸게 먹히는 거죠 + +03:18.700 --> 03:23.260 +제가 배포한 프로그램인 앱을 보고 있는데요 + +03:23.260 --> 03:26.920 +물론 나중에 가격 서비스도 구축할 거예요 + +03:26.950 --> 03:32.710 +여기로 가면 이 서비스에 대해 만든 다양한 배포가 보이시죠 + +03:32.740 --> 03:35.230 +실제로 해보면 이해가 더 잘 될 거예요 + +03:35.410 --> 03:38.950 +보다시피 116번이나 호출됐어요 + +03:39.160 --> 03:42.250 +다른 것들도 볼 수 있어요 + +03:42.550 --> 03:44.170 +하지만 그게 다예요 + +03:44.200 --> 03:48.580 +방식의 장점 중 하나는 토큰과 키 등 모든 게 있지만 여러분이 가서 설정하고 + +03:48.580 --> 03:52.660 +적을 필요가 없다는 거죠 여러분이 다 알아서 할 테니까요 아주 + +03:52.660 --> 03:54.370 +쉬워요 REST + +03:54.370 --> 03:59.170 +계정만 하나 만들면 나머지는 알아서 다 들어와요 + +03:59.200 --> 04:01.030 +윌도 자연스럽게 따라올 거예요 + +04:01.390 --> 04:05.170 +자, 모듈 계정에 등록하고 나면 클릭을 좀 하세요 + +04:05.170 --> 04:08.320 +항상 읽을 수 있어요 정말 좋은 문서들이 있어요 + +04:08.320 --> 04:09.460 +이 가이드가 있어요 + +04:09.460 --> 04:11.620 +이걸 할 거니까 읽을 필요 없어요 + +04:11.620 --> 04:13.270 +하지만 진짜 느낌이 오죠 + +04:13.270 --> 04:17.830 +Hello World 프로그램도 있는데 그건 저희가 직접 할 거예요 + +04:17.860 --> 04:20.140 +Get it은 다음 비디오에서 알려드릴게요 + +04:20.170 --> 04:21.430 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59668181/en_US.srt b/week5/community-contributions/subtitles/srts/59668181/en_US.srt new file mode 100755 index 0000000..eb50a96 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668181/en_US.srt @@ -0,0 +1,361 @@ +WEBVTT + +00:00.950 --> 00:06.440 +And so it gives me great pleasure to introduce to you the project that I've lined up for you this week. + +00:06.440 --> 00:08.810 +And boy, is it a meaty one. + +00:08.840 --> 00:11.930 +Here it is, the capstone project of the course. + +00:11.930 --> 00:14.120 +I've called it The Price is Right. + +00:14.120 --> 00:17.060 +It's something which brings together what we've been doing to date. + +00:17.090 --> 00:18.890 +The idea is we're going to build. + +00:18.890 --> 00:26.420 +The challenge that's been set for us is to build a platform which can look for deals being published + +00:26.420 --> 00:31.970 +on the internet, subscribe to RSS feeds so it can it can spot deals that have been published. + +00:31.970 --> 00:38.780 +When it finds a potential deal, a promising looking deal, it will read it, it will interpret it, + +00:38.780 --> 00:45.200 +and it will then use a number of LMS, including the proprietary one that we built last week to make + +00:45.200 --> 00:51.290 +its own estimate of how much that's worth, and if it finds a good opportunity, it will then automatically + +00:51.290 --> 00:58.310 +send a text message or a push notification to to you or to me telling us that there is a deal that we + +00:58.310 --> 01:03.830 +should know about and it will run autonomously all the time, just sitting around running in the background, + +01:03.830 --> 01:10.940 +and every so often you will get a push notification to say there's a new deal that's been posted online, + +01:10.940 --> 01:17.510 +where the model has decided that this price is a really great price because the item should cost a lot + +01:17.510 --> 01:18.170 +more. + +01:18.380 --> 01:24.110 +And I hope you agree, if we can pull that off, if we can build that platform, we've built something + +01:24.110 --> 01:26.720 +special that is an advanced platform. + +01:26.720 --> 01:31.190 +It's something which brings together a lot of different bits of functionality, and it's something that + +01:31.190 --> 01:36.350 +that hopefully I will be proud of, and I hope you'll be proud of it too, when it's built. + +01:36.770 --> 01:38.780 +There are going to be seven agents. + +01:38.780 --> 01:40.340 +I already said it, but I say it again. + +01:40.370 --> 01:42.650 +Seven agents are going to collaborate. + +01:42.800 --> 01:45.500 +Um, not all of them are built on LMS. + +01:45.500 --> 01:50.780 +Some of them are more sort of Python processes, but they're sort of seven pieces to the puzzle that + +01:50.780 --> 01:52.010 +are collaborating. + +01:52.280 --> 02:01.430 +We will use GPT four or GPT four mini to take information from RSS feeds and parse it and understand + +02:01.430 --> 02:05.060 +it and turn it into something which is a potential deal. + +02:05.060 --> 02:11.630 +We will use our frontier busting model that outperforms GPT four. + +02:11.840 --> 02:15.500 +And Claude will use it to estimate how much things are worth. + +02:15.500 --> 02:16.520 +But that's not all. + +02:16.520 --> 02:18.650 +We're going to use other models as well. + +02:18.650 --> 02:23.960 +We are going to build another Rag pipeline, another rag based model. + +02:24.140 --> 02:26.300 +Um, but this time we're not going to use langue chain. + +02:26.300 --> 02:27.470 +We're going to roll our own. + +02:27.470 --> 02:29.420 +And it's going to be very, very easy. + +02:29.510 --> 02:37.580 +Um, but it's going to use all 400,000 products that we, we had taken from the Amazon scrape before. + +02:37.580 --> 02:45.680 +We're going to put all of them in a mega Croma database and use that to add context as part of our Rag + +02:45.680 --> 02:48.080 +pipeline to a frontier model. + +02:48.170 --> 02:51.020 +And you can start wondering right now, maybe you already thought of this. + +02:51.020 --> 02:59.330 +Maybe you're already wondering whether or not that a a frontier model that's armed with similar prices + +02:59.330 --> 03:05.670 +of similar products, whether that is going to outperform the model that we fine tuned last week. + +03:05.700 --> 03:06.510 +You'll find out. + +03:06.510 --> 03:07.320 +We'll see. + +03:07.470 --> 03:11.670 +But we're going to have both models working together to try and get the best possible price. + +03:11.670 --> 03:14.940 +And another model as well as as all will be revealed. + +03:15.390 --> 03:17.820 +So it's going to be it's going to be a huge week. + +03:17.850 --> 03:22.080 +The most important point, of course, was this capstone project is designed to bring it all together + +03:22.080 --> 03:26.070 +in a very satisfying way, in a way that's going to feel tangible and real. + +03:26.070 --> 03:32.160 +But the real point is that this gives you a chance to revise and everything that we've learned and also + +03:32.190 --> 03:39.480 +just build expertise, build experience, have some real hands on work at making production caliber + +03:39.480 --> 03:40.290 +models. + +03:40.380 --> 03:42.510 +And that's that's really the main point of this. + +03:42.540 --> 03:47.910 +We're doing it in a fun context, something that's going to actually send you push notifications when + +03:47.910 --> 03:54.510 +there's a when there's a useful product, but it's going to be about learning and solidifying the learning + +03:54.510 --> 03:57.930 +that we've done over the course of the last seven weeks. + +03:59.550 --> 04:05.670 +But let me just tell you for a moment, the architecture of our agent platform, we're going to have + +04:05.670 --> 04:06.780 +a user interface. + +04:06.780 --> 04:09.060 +Of course, it's going to be built in Gradio. + +04:09.390 --> 04:15.840 +It's going to be using an agent framework that we'll write, which will just be a simple kind of environment, + +04:15.930 --> 04:21.150 +which will have memory so that the agents can know what they've recommended in the past, and it will + +04:21.150 --> 04:24.930 +support things like logging, so we can understand what's going on in our framework. + +04:25.260 --> 04:27.900 +There will be an agent responsible for planning. + +04:27.900 --> 04:31.770 +It's going to be coordinating activities across the other agents. + +04:31.770 --> 04:33.960 +And then we'll have these agents. + +04:33.960 --> 04:38.160 +We'll have a scanner agent that is going to identify promising deals. + +04:38.160 --> 04:40.590 +By looking at RSS feeds. + +04:40.620 --> 04:46.650 +We're going to have something I'm calling the ensemble agent, which is something there is a technical + +04:46.680 --> 04:47.910 +term ensemble. + +04:47.910 --> 04:52.560 +When you when you bring together multiple models, this agent is going to be responsible for bringing + +04:52.590 --> 04:57.240 +together different estimating models that will estimate the price of products. + +04:57.720 --> 05:01.830 +And we will have a messaging agent that will send us push notifications. + +05:01.890 --> 05:05.250 +Now this is just the high level architecture because there are more agents than this. + +05:05.250 --> 05:10.740 +Because of course, that ensemble agent is itself going to collaborate with three other agents. + +05:10.740 --> 05:13.230 +That will be our three estimating agents. + +05:13.410 --> 05:16.230 +But I'm not showing them on this diagram because that would be too cluttered. + +05:16.230 --> 05:19.440 +But I will do, I'm sure, in the future when we get there. + +05:19.530 --> 05:23.610 +So this is the high level architecture to give you a sense of what we're going to build. + +05:23.760 --> 05:28.140 +And for today, we're going to focus on just one of these. + +05:28.170 --> 05:30.210 +And we're not going to look much at this architecture. + +05:30.210 --> 05:35.340 +We're just going to be talking about taking the model we built last time and putting it up on the internet + +05:35.340 --> 05:38.130 +using this wonderful service called modal. + +05:38.850 --> 05:45.030 +Just before we talk about modal, one more thing I wanted to mention that as we're moving now towards + +05:45.030 --> 05:52.050 +writing more serious code, JupyterLab is a wonderful platform for being iterative and experimental + +05:52.050 --> 05:55.170 +and quickly turning out different solutions. + +05:55.170 --> 06:02.310 +But as as we move along the path from R&D into something which is ready to be productionized, there + +06:02.340 --> 06:06.270 +are some aspects of the coding that will be changing. + +06:06.600 --> 06:10.560 +For a start, I'm going to start to have type hints in some of the code. + +06:10.740 --> 06:13.470 +Maybe something that you're super familiar with already. + +06:13.470 --> 06:15.660 +If not, I'll explain it and it will. + +06:15.870 --> 06:16.860 +You'll see why. + +06:16.860 --> 06:22.800 +And it's something which is a good, best practice when building this kind of production caliber code. + +06:23.280 --> 06:25.830 +Uh, also going to be writing some more comments. + +06:25.860 --> 06:29.610 +It's something that when you've got a nice Jupyter notebook and you've got like lots of time to put + +06:29.640 --> 06:34.800 +text and stuff and explain it, maybe you don't need quite the same kind of commenting approach, but + +06:34.800 --> 06:40.080 +now, at least at the at the method at the function level will put comments in there. + +06:40.500 --> 06:44.190 +And logging is a good thing to do as well, and we will do that. + +06:44.400 --> 06:49.800 +Now there's one other thing which is a very good best practice, which is to write unit tests. + +06:49.830 --> 06:54.570 +Uh, it's very important, of course, to write good unit tests for code that's going into production. + +06:54.570 --> 06:58.170 +And I'm not going to be doing that because I'm a cowboy. + +06:58.290 --> 07:00.990 +Uh, and, uh, yeah, I certainly should do that. + +07:01.020 --> 07:03.960 +But what I will say is that it's a great exercise. + +07:03.960 --> 07:09.580 +Uh, should you wish to go in and write unit tests behind some of the code that we are productionizing. + +07:09.580 --> 07:10.600 +That will be wonderful. + +07:10.630 --> 07:11.920 +I hugely appreciate it. + +07:11.920 --> 07:18.370 +And if you then wanted of course to do a git push, I will merge it in with delight and will give you, + +07:18.400 --> 07:23.350 +of course, full attribution and credit for writing the unit tests, which is something that certainly + +07:23.350 --> 07:24.910 +I should I should be doing. + +07:25.210 --> 07:27.850 +Um, and it's a very much a good best practice. + +07:29.020 --> 07:30.160 +Okay. + +07:30.250 --> 07:36.700 +Uh, the, the brings me to, to introducing modal, which is the platform that we're going to be using + +07:36.730 --> 07:37.510 +today. + +07:37.540 --> 07:44.110 +Uh, modal, uh, a platform which allows us to run code remotely on a server, and you only pay for + +07:44.110 --> 07:45.460 +the time that you use. + +07:45.490 --> 07:47.410 +It's a very powerful platform. + +07:47.410 --> 07:52.930 +It's it's become heavily used in the AI world for deploying production models. + +07:52.960 --> 07:58.900 +Um, but it can also be used just for, for for many different purposes, just for CPU usage to deploy + +07:58.900 --> 08:01.960 +any function onto the cloud, as we will see. + +08:02.260 --> 08:06.730 +So without further ado, in the next video, we're going to go to modal and give it a try. + +08:06.760 --> 08:07.660 +I'll see you there. diff --git a/week5/community-contributions/subtitles/srts/59668181/ja_JP.srt b/week5/community-contributions/subtitles/srts/59668181/ja_JP.srt new file mode 100755 index 0000000..caf529d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668181/ja_JP.srt @@ -0,0 +1,292 @@ +WEBVTT + +00:00.950 --> 00:06.440 +というわけで、 今週はこの企画をご紹介できることを大変嬉しく思う。 + +00:06.440 --> 00:08.810 +そして、 その肉付きの良さ。 + +00:08.840 --> 00:11.930 +これがコースのキャップストーン・プロジェクトだ。 + +00:11.930 --> 00:14.120 +私はそれを『プライス・イズ・ライト』と呼んでいる。 + +00:14.120 --> 00:17.060 +私たちがこれまでやってきたことをまとめるものだ。 + +00:17.090 --> 00:18.890 +私たちは建設するということだ。 + +00:18.890 --> 00:31.970 +私たちに課せられた課題は、 インターネット上で公開されている取引を探し、 RSSフィードを購読して、 公開された取引を見つけることができるプラットフォームを構築することです。 + +00:31.970 --> 00:38.780 +潜在的な取引、 有望そうな取引を見つけると、 + +00:38.780 --> 01:18.170 +それを読み、 解釈し、 先週私たちが構築した独自のものを含む多くのLMSを使用して、 それがどれくらいの価値があるかを独自に推定します。 + +01:18.380 --> 01:26.720 +そして、 もし我々がそれを成し遂げることができれば、 そしてそのプラットフォームを構築することができれば、 我々は先進的なプラットフォームという特別なものを構築したことになる。 + +01:26.720 --> 01:31.190 +多くの異なる機能の断片を統合したもので、 + +01:31.190 --> 01:36.350 +願わくば私が誇れるものでありたい。 + +01:36.770 --> 01:38.780 +エージェントは7人になる。 + +01:38.780 --> 01:40.340 +すでに言ったが、 もう一度言う。 + +01:40.370 --> 01:42.650 +7人のエージェントが協力する。 + +01:42.800 --> 01:45.500 +ええと、 すべてがLMSで構築されているわけではありません。 + +01:45.500 --> 01:52.010 +Pythonのプロセスのようなものもあるが、 パズルの7つのピースが協力し合っているようなものだ。 + +01:52.280 --> 02:01.430 +GPT4またはGPT4ミニを使ってRSSフィードから情報を取得し、 それを解析して理解し、 + +02:01.430 --> 02:05.060 +潜在的な取引に変える。 + +02:05.060 --> 02:11.630 +GPT4よりも優れたフロンティア・バスト・モデルを使用する。 + +02:11.840 --> 02:15.500 +そしてクロードはそれを使って、 物の価値を推し量る。 + +02:15.500 --> 02:16.520 +しかし、 それだけではない。 + +02:16.520 --> 02:18.650 +他のモデルも使うつもりだ。 + +02:18.650 --> 02:23.960 +私たちはまた別のラグ・パイプライン、 別のラグ・ベースのモデルを作るつもりだ。 + +02:24.140 --> 02:26.300 +うーん、 でも今回はラングチェーンは使わないよ。 + +02:26.300 --> 02:27.470 +自分たちで巻くんだ。 + +02:27.470 --> 02:29.420 +とても簡単なことだ。 + +02:29.510 --> 02:37.580 +でも、 以前アマゾンのスクレイプから取り出した40万点すべての商品を使うんだ。 + +02:37.580 --> 02:48.080 +そのすべてをメガCromaデータベースに入れ、 それを使ってフロンティアモデルへのラグ・パイプラインの一部としてコンテキストを追加する。 + +02:48.170 --> 02:51.020 +もしかしたら、 すでに考えていたかもしれない。 + +02:51.020 --> 02:59.330 +同じような商品の同じような価格を武器にしたフロンティアモデルが、 先週微調整したモデルを上回るのかどうか、 + +02:59.330 --> 03:05.670 +すでに疑問に思っているかもしれない。 + +03:05.700 --> 03:06.510 +今にわかるさ。 + +03:06.510 --> 03:07.320 +今にわかるよ。 + +03:07.470 --> 03:11.670 +しかし、 可能な限り最良の価格を実現するために、 両モデルに協力してもらうつもりだ。 + +03:11.670 --> 03:14.940 +そして、 別のモデルもすべて明らかにされる。 + +03:15.390 --> 03:17.820 +だから、 大きな1週間になるだろう。 + +03:17.850 --> 03:22.080 +もちろん、 最も重要なポイントは、 このキャップストーン・プロジェクトが、 具体的でリアルに感じられる方法で、 + +03:22.080 --> 03:26.070 +非常に満足のいく形ですべてをまとめるように設計されていることだった。 + +03:26.070 --> 03:32.160 +しかし、 本当に重要なのは、 この機会に学んだことをすべて修正し、 専門知識を身につけ、 + +03:32.190 --> 03:40.290 +経験を積み、 実際に手を動かしてプロレベルのモデルを作ることができるということだ。 + +03:40.380 --> 03:42.510 +そして、 それがこの件の最大のポイントなんだ。 + +03:42.540 --> 03:47.910 +私たちは楽しい文脈の中でそれを行っており、 便利な製品があったときに実際にプッシュ通知を送るようなものだが、 + +03:47.910 --> 03:57.930 +この7週間の間に私たちが行った学びを固めるためのものだ。 + +03:59.550 --> 04:06.780 +でも、 ちょっとだけお話しさせてください。 私たちのエージェント・プラットフォームのアーキテクチャは、 ユーザー・インターフェースを持つことになっています。 + +04:06.780 --> 04:09.060 +もちろん、 グラディオで建設される。 + +04:09.390 --> 04:15.840 +エージェントが過去に何を推薦したかを知ることができるようにメモリーを持ち、 + +04:15.930 --> 04:24.930 +ロギングなどをサポートし、 フレームワークで何が起こっているかを理解できるようにする。 + +04:25.260 --> 04:27.900 +プランニングを担当するエージェントがいる。 + +04:27.900 --> 04:31.770 +他のエージェントの活動を調整することになる。 + +04:31.770 --> 04:33.960 +そして、 このエージェントたちがいる。 + +04:33.960 --> 04:38.160 +有望な案件を見極めるスキャナー・エージェントがいる。 + +04:38.160 --> 04:40.590 +RSSフィードを見ることによって。 + +04:40.620 --> 04:47.910 +アンサンブル・エージェントと呼んでいるもので、 専門用語でアンサンブルというものがあるんだ。 + +04:47.910 --> 04:57.240 +複数のモデルを組み合わせる場合、 このエージェントは、 製品の価格を見積もるさまざまな見積もりモデルをまとめる役割を担うことになる。 + +04:57.720 --> 05:01.830 +そして、 プッシュ通知を送ってくれるメッセージング・エージェントを用意する。 + +05:01.890 --> 05:05.250 +エージェントはこれ以外にもあるからだ。 + +05:05.250 --> 05:10.740 +もちろん、 そのアンサンブル・エージェントは、 他の3人のエージェントと協力することになるからだ。 + +05:10.740 --> 05:13.230 +これが3人の推定代理人だ。 + +05:13.410 --> 05:16.230 +しかし、 この図ではあまりにごちゃごちゃしてしまうので表示しない。 + +05:16.230 --> 05:19.440 +でも、 将来、 そこに着いたら、 きっとそうするよ。 + +05:19.530 --> 05:23.610 +これが、 これから私たちが作ろうとしているものを理解してもらうためのハイレベル・アーキテクチャだ。 + +05:23.760 --> 05:28.140 +そして今日は、 そのうちのひとつに焦点を当てる。 + +05:28.170 --> 05:30.210 +そして、 我々はこのアーキテクチャーをあまり見るつもりはない。 + +05:30.210 --> 05:38.130 +今回は、 前回作ったモデルを、 モーダルという素晴らしいサービスを使ってインターネットにアップする話をするだけです。 + +05:38.850 --> 05:45.030 +モーダルについて話す前に、 もうひとつ言っておきたいことがあります。 私たちは今、 より本格的なコードを書こうとしていますが、 + +05:45.030 --> 05:55.170 +JupyterLabは反復的かつ実験的で、 さまざまな解決策を素早く生み出すための素晴らしいプラットフォームです。 + +05:55.170 --> 06:06.270 +しかし、 研究開発から生産化に向けた道筋が進むにつれて、 コーディングにも変わっていく部分がある。 + +06:06.600 --> 06:10.560 +手始めに、 コードの一部に型ヒントを入れることにする。 + +06:10.740 --> 06:13.470 +もしかしたら、 すでによく知っていることかもしれない。 + +06:13.470 --> 06:15.660 +そうでなければ、 私が説明すればそうなる。 + +06:15.870 --> 06:16.860 +その理由がわかるだろう。 + +06:16.860 --> 06:22.800 +そして、 このようなプロダクションレベルのコードを構築する際には、 ベストプラクティスとなるものだ。 + +06:23.280 --> 06:25.830 +あと、 もう少しコメントを書くつもりだ。 + +06:25.860 --> 06:29.610 +素敵なJupyterノートブックを持っていて、 テキストを入れたり説明したりする時間がたくさんあるときには、 + +06:29.640 --> 06:40.080 +同じようなコメント・アプローチは必要ないかもしれませんが、 今は少なくともメソッドや関数レベルではコメントを入れることができます。 + +06:40.500 --> 06:44.190 +伐採もいいことだし、 そうするつもりだ。 + +06:44.400 --> 06:49.800 +もうひとつ、 非常に良いベストプラクティスがある。 + +06:49.830 --> 06:54.570 +ええと、 もちろん、 本番に使うコードに良い単体テストを書くことはとても重要だ。 + +06:54.570 --> 06:58.170 +カウボーイだからといって、 そんなことはしない。 + +06:58.290 --> 07:00.990 +ああ、 そうだね、 確かにそうすべきだね。 + +07:01.020 --> 07:03.960 +ただ、 私が言いたいのは、 素晴らしい練習だということだ。 + +07:03.960 --> 07:09.580 +もしあなたが、 私たちがプロダクション化しているコードの背後にあるユニットテストを書きたいと望むなら、 どうぞ。 + +07:09.580 --> 07:10.600 +それは素晴らしいことだ。 + +07:10.630 --> 07:11.920 +とても感謝している。 + +07:11.920 --> 07:18.370 +そして、 もしあなたがgit pushをしたいのであれば、 私はそれをdelightと一緒にマージし、 + +07:18.400 --> 07:24.910 +もちろん、 あなたがユニットテストを書いたことに完全な帰属とクレジットを与えます。 + +07:25.210 --> 07:27.850 +それはとても良いベストプラクティスだよ。 + +07:29.020 --> 07:30.160 +オーケー。 + +07:30.250 --> 07:37.510 +ええと、 モーダルの紹介になるんだけど、 これが今日使うプラットフォームなんだ。 + +07:37.540 --> 07:45.460 +モーダルは、 サーバー上でリモートでコードを実行できるプラットフォームで、 使用した時間分だけ料金を支払う。 + +07:45.490 --> 07:47.410 +非常に強力なプラットフォームだ。 + +07:47.410 --> 07:52.930 +AIの世界では、 プロダクションモデルの展開に多用されている。 + +07:52.960 --> 07:58.900 +しかし、 これから説明するように、 クラウド上にあらゆる機能を展開するためにCPUを使用したり、 + +07:58.900 --> 08:01.960 +さまざまな目的で使用することもできる。 + +08:02.260 --> 08:06.730 +それでは早速、 次のビデオではモーダルを試してみましょう。 + +08:06.760 --> 08:07.660 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/59668181/ko_KR.srt b/week5/community-contributions/subtitles/srts/59668181/ko_KR.srt new file mode 100755 index 0000000..6ad674f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668181/ko_KR.srt @@ -0,0 +1,352 @@ +WEBVTT + +00:00.950 --> 00:06.440 +그래서 이번 주에 여러분께 소개할 프로젝트를 소개하게 되어 기쁘군요 + +00:06.440 --> 00:08.810 +정말 두툼한 고기예요 + +00:08.840 --> 00:11.930 +이 코스 최고의 프로젝트예요 + +00:11.930 --> 00:14.120 +가격 맞히기 게임이에요 + +00:14.120 --> 00:17.060 +지금까지 해온 일을 하나로 묶어주는 거죠 + +00:17.090 --> 00:18.890 +일단 집을 지을 거예요 + +00:18.890 --> 00:26.420 +우리에게 주어진 과제는 인터넷에 게시된 거래를 찾고 RSS 피드를 구독할 수 있는 + +00:26.420 --> 00:31.970 +플랫폼을 만드는 겁니다 게시된 거래를 스팟팅할 수 있도록요 + +00:31.970 --> 00:38.780 +잠재적 거래를 찾으면 유망해 보이는 거래를요 그걸 읽고 해석해 다수의 LMS를 + +00:38.780 --> 00:45.200 +사용합니다 지난주에 우리가 만든 소유권을 포함해서요 그게 얼마나 가치 있는지 + +00:45.200 --> 00:51.290 +추정하기 위해서요 그리고 좋은 기회를 찾으면 자동적으로 텍스트 메시지나 + +00:51.290 --> 00:58.310 +푸시 알림을 여러분이나 제게 보냅니다 우리가 알아야 할 거래가 있다고 알려주는 거죠 + +00:58.310 --> 01:03.830 +자율적으로 항상 실행될 겁니다 백그라운드에서 실행되는 거죠 가끔 + +01:03.830 --> 01:10.940 +푸시 알림을 받게 됩니다 온라인에 포스팅된 새 거래가 있다는 거죠 모델이 이 가격이 + +01:10.940 --> 01:18.170 +정말 좋다고 결정하는 거죠 아이템은 훨씬 더 비싸야 하니까요 + +01:18.380 --> 01:24.110 +동의하시길 바랍니다 우리가 해낼 수 있다면 플랫폼을 구축할 수 있다면 특별한 + +01:24.110 --> 01:26.720 +걸 구축한 겁니다 고급 플랫폼이죠 + +01:26.720 --> 01:31.190 +다양한 기능성 요소들을 한데 합친 것입니다. 제가 자랑스러워할 + +01:31.190 --> 01:36.350 +만한 것이었으면 좋겠고 여러분도 자랑스러워 하실 수 있었으면 좋겠어요. + +01:36.770 --> 01:38.780 +요원은 7명이 될 거예요 + +01:38.780 --> 01:40.340 +아까도 말했지만 다시 말할게요 + +01:40.370 --> 01:42.650 +7명의 요원이 협력할 거예요 + +01:42.800 --> 01:45.500 +전부 LMS에 설치된 건 아니에요 + +01:45.500 --> 01:50.780 +파이썬 을 사용하는 과정도 있지만 퍼즐의 일곱 조각 같은 것들이 서로 협력하고 + +01:50.780 --> 01:52.010 +있어요 + +01:52.280 --> 02:01.430 +GPT 4, 또는 GPT for 미니를 이용해 RSS 피드에서 정보를 취하고 분석하고 이해한 + +02:01.430 --> 02:05.060 +후 잠재적 거래로 바꿀 거예요 + +02:05.060 --> 02:11.630 +개척지 급습 모델을 쓸 거예요 GPT 4보다 뛰어나죠 + +02:11.840 --> 02:15.500 +클로드는 그걸로 물건의 가치를 추정할 거예요 + +02:15.500 --> 02:16.520 +그게 다가 아니에요 + +02:16.520 --> 02:18.650 +다른 모델도 사용할 거예요 + +02:18.650 --> 02:23.960 +또 다른 래그 파이프라인을 만들 거예요 래그 기반 모델이죠 + +02:24.140 --> 02:26.300 +하지만 이번에는 랑그 체인을 쓰지 않을 거예요 + +02:26.300 --> 02:27.470 +직접 말 거예요 + +02:27.470 --> 02:29.420 +아주 쉬울 거예요 + +02:29.510 --> 02:37.580 +하지만 400,000가지 제품을 모두 사용할 거예요 전에 아마존 부족에게서 가져온 것들이죠 + +02:37.580 --> 02:45.680 +전부 메가 크로마 데이터베이스에 넣고 개척자 모델의 래그 파이프라인에 컨텍스트를 + +02:45.680 --> 02:48.080 +추가할 거예요 + +02:48.170 --> 02:51.020 +이미 생각해 두신 게 아닌가 궁금해지실 거예요 + +02:51.020 --> 02:59.330 +여러분은 이미 궁금하실 겁니다 비슷한 제품의 비슷한 가격으로 비슷한 제품을 제공하는 새로운 + +02:59.330 --> 03:05.670 +모델이 지난주에 우리가 미세 조정했던 모델을 능가할 수 있을지 말이죠 + +03:05.700 --> 03:06.510 +알게 될 거예요 + +03:06.510 --> 03:07.320 +두고 봐야죠 + +03:07.470 --> 03:11.670 +하지만 두 모델이 협업해서 최대한 좋은 가격을 얻도록 노력할 거예요 Get up! + +03:11.670 --> 03:14.940 +다른 모델도 공개할 거예요 + +03:15.390 --> 03:17.820 +아주 중요한 한 주가 될 거예요 + +03:17.850 --> 03:22.080 +가장 중요한 건 이 캡스톤 프로젝트가 모든 걸 하나로 합치는 아주 만족스러운 + +03:22.080 --> 03:26.070 +방법으로 설계됐다는 거예요 손에 잡히고 진짜처럼 느껴질 수 있게요 + +03:26.070 --> 03:32.160 +하지만 핵심은 우리가 배운 모든 것을 수정할 기회가 생긴다는 + +03:32.190 --> 03:40.290 +겁니다 전문성과 경험을 쌓고 생산 수준의 모델을 직접 만들 수 있죠 + +03:40.380 --> 03:42.510 +그게 이 강의의 핵심이죠 + +03:42.540 --> 03:47.910 +재미있는 컨텍스트에서 하고 있어요 푸시 알림을 실제로 보낼 + +03:47.910 --> 03:54.510 +뭔가가 있죠 유용한 제품이 있을 때요 하지만 학습에 관한 겁니다 지난 7주 동안 + +03:54.510 --> 03:57.930 +우리가 한 학습을 확고히 하는 거죠 + +03:59.550 --> 04:05.670 +잠시 말씀드리자면 에이전트 플랫폼의 아키텍처는 사용자 인터페이스를 갖게 + +04:05.670 --> 04:06.780 +될 거예요 + +04:06.780 --> 04:09.060 +물론 그라디오에서 지을 거예요 + +04:09.390 --> 04:15.840 +우리가 작성할 에이전트 프레임워크를 사용할 겁니다 단순한 종류의 환경이죠 메모리가 있어 + +04:15.930 --> 04:21.150 +에이전트는 과거에 권장했던 걸 알 수 있어요 로깅 같은 것도 지원하고요 그래서 + +04:21.150 --> 04:24.930 +프레임워크에서 무슨 일이 벌어지는지 알 수 있죠 + +04:25.260 --> 04:27.900 +계획을 담당하는 요원이 있을 거예요 + +04:27.900 --> 04:31.770 +다른 요원들 간의 활동을 조율하는 거죠 + +04:31.770 --> 04:33.960 +그럼 에이전트들이 생기죠 + +04:33.960 --> 04:38.160 +스캐너 요원이 유망한 거래를 확인할 거예요 + +04:38.160 --> 04:40.590 +RSS 피드를 보면요 + +04:40.620 --> 04:46.650 +앙상블 에이전트라는 걸 만들 거예요 기술적인 앙상블이라고 + +04:46.680 --> 04:47.910 +부르는 거죠 + +04:47.910 --> 04:52.560 +여러 모델을 조합할 때 이 에이전트는 제품의 가격을 + +04:52.590 --> 04:57.240 +예측할 수 있는 다양한 모델을 조합하는 역할을 하죠 + +04:57.720 --> 05:01.830 +그리고 푸시 알림을 보내주는 메시징 에이전트가 있어요 + +05:01.890 --> 05:05.250 +이건 단지 높은 수준의 아키텍처입니다 이것보다 에이전트가 더 많기 때문이죠 + +05:05.250 --> 05:10.740 +앙상블 에이전트 자체가 다른 세 명의 에이전트와 협업하게 되니까요 + +05:10.740 --> 05:13.230 +그게 우리 요원 3명이 될 거요 + +05:13.410 --> 05:16.230 +하지만 이 다이어그램에선 안 보여줄 거예요 너무 어수선해질 테니까요 + +05:16.230 --> 05:19.440 +하지만 언젠가는 꼭 할 거예요 get get get it + +05:19.530 --> 05:23.610 +우리가 뭘 만들 건지 감을 잡기 위한 고급 아키텍처예요 + +05:23.760 --> 05:28.140 +오늘은 이 중 하나에 집중할 거예요 + +05:28.170 --> 05:30.210 +이 아키텍처는 많이 보지 않을 거예요 + +05:30.210 --> 05:35.340 +지난 시간에 만든 모델을 인터넷에 올리는 것에 대해 얘기할 겁니다 Modal이라는 + +05:35.340 --> 05:38.130 +멋진 서비스를 이용해서요 + +05:38.850 --> 05:45.030 +모드 얘기를 하기 전에 한 가지 더 언급하고 싶은 게 있어요 이제 더 진지한 코드를 작성하는 + +05:45.030 --> 05:52.050 +쪽으로 나아가고 있으니 JupyterLab은 훌륭한 플랫폼이에요 반복적이고 실험적이며 + +05:52.050 --> 05:55.170 +다양한 솔루션을 빠르게 내놓을 수 있죠 + +05:55.170 --> 06:02.310 +하지만 연구 개발에서 생산 준비가 된 제품으로 발전할 때 코딩에 + +06:02.340 --> 06:06.270 +몇 가지 변화가 있을 거예요 + +06:06.600 --> 06:10.560 +우선, 코드에 힌트를 입력해 보도록 하죠 + +06:10.740 --> 06:13.470 +이미 아주 익숙한 것일 수도 있죠 + +06:13.470 --> 06:15.660 +안 되면 제가 설명할게요 + +06:15.870 --> 06:16.860 +이유를 알게 될 거예요 + +06:16.860 --> 06:22.800 +이런 생산 규격 코드를 만들 때 좋은 최선의 관행이죠 + +06:23.280 --> 06:25.830 +댓글도 더 달 거예요 + +06:25.860 --> 06:29.610 +Jupyter 노트북이 있으면 텍스트나 + +06:29.640 --> 06:34.800 +설명을 넣을 시간이 많죠 주석 처리 방식은 다를지 몰라도 + +06:34.800 --> 06:40.080 +적어도 함수 수준의 메서드에서 주석을 달아요 + +06:40.500 --> 06:44.190 +로깅도 좋은 일이고 앞으로도 그럴 거예요 + +06:44.400 --> 06:49.800 +다른 것도 있어요 아주 최선의 관행으로 단위 테스트를 작성하는 거죠 + +06:49.830 --> 06:54.570 +프로덕션으로 가는 코드에 대해 좋은 단위 테스트를 작성하는 건 물론 아주 중요하죠 + +06:54.570 --> 06:58.170 +전 카우보이라서 그런 짓은 안 해요 + +06:58.290 --> 07:00.990 +네, 당연히 그래야죠 + +07:01.020 --> 07:03.960 +하지만 좋은 운동이라고 말씀드릴 수 있어요 + +07:03.960 --> 07:09.580 +우리가 제작하고 있는 일부 코드 뒤에서 단위 테스트를 작성하고 싶다면요 + +07:09.580 --> 07:10.600 +그럼 정말 좋죠 + +07:10.630 --> 07:11.920 +정말 감사해요 + +07:11.920 --> 07:18.370 +물론 Git push를 원하신다면 기쁘게 합병해 단위 테스트를 작성하는 데에 + +07:18.400 --> 07:23.350 +대한 어휘 기여도와 크레딧을 모두 알려드리겠습니다 당연히 제가 + +07:23.350 --> 07:24.910 +해야 할 일이죠 + +07:25.210 --> 07:27.850 +최선의 관행이라고 할 수 있죠 + +07:29.020 --> 07:30.160 +네 + +07:30.250 --> 07:37.510 +모듈을 소개하게 됐네요 오늘 사용할 플랫폼이죠 + +07:37.540 --> 07:44.110 +모달은 서버에서 원격으로 코드를 실행할 수 있는 플랫폼으로 사용 시간당만 + +07:44.110 --> 07:45.460 +지불할 수 있죠 + +07:45.490 --> 07:47.410 +아주 강력한 플랫폼이죠 + +07:47.410 --> 07:52.930 +인공지능 세계에서 프로덕션 모델 배포에 많이 사용되고 있죠 + +07:52.960 --> 07:58.900 +하지만 다양한 목적에 사용될 수도 있어요 CPU 사용이나 클라우드에 + +07:58.900 --> 08:01.960 +어떤 함수든 배포하는 데요 + +08:02.260 --> 08:06.730 +그럼 지체 없이 다음 비디오에선 모듈로 가서 시도해 보죠 + +08:06.760 --> 08:07.660 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59668923/en_US.srt b/week5/community-contributions/subtitles/srts/59668923/en_US.srt new file mode 100755 index 0000000..1c0d075 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668923/en_US.srt @@ -0,0 +1,721 @@ +WEBVTT + +00:00.440 --> 00:08.390 +Well, welcome back to Jupyter Lab for what will be an epic finale to our time in this platform. + +00:08.570 --> 00:14.540 +And it's going to be such a great conclusion to the last seven weeks. + +00:14.600 --> 00:19.130 +The first thing you'll notice is that there is quite a lot going on in the week eight folder. + +00:19.130 --> 00:24.140 +There are a lot of files here, which reflects the fact that we've got a lot to get through, a lot + +00:24.170 --> 00:26.660 +of work to be done to solve this big problem. + +00:26.960 --> 00:31.010 +It means I'm going to be moving at a faster pace than usual as we go through this code. + +00:31.010 --> 00:36.860 +But that's okay, because at this point you are proficient, you're well on your way to mastering LLM + +00:36.860 --> 00:42.740 +engineering, and you no longer need me to belabor the point about lists of dicts of assistants and + +00:42.740 --> 00:44.210 +users and things like that. + +00:44.210 --> 00:48.200 +It's it's now something that is very much old news. + +00:48.410 --> 00:54.770 +And yeah, it's amazing to think I was explaining what tokens were only, only a matter of weeks ago. + +00:54.770 --> 00:59.230 +And and now that you're ready for a big, full on agentic AI I. + +00:59.260 --> 01:02.830 +Solution a quick admin point before we get started. + +01:02.890 --> 01:07.480 +Uh, the I've made some changes to the packages. + +01:07.480 --> 01:12.730 +I've added some more packages that are dependencies on this environment in order to, to really make + +01:12.730 --> 01:14.650 +this a very, very juicy problem. + +01:14.770 --> 01:20.950 +And that just means you might need if you've if you pulled the code sometime before like late September, + +01:20.950 --> 01:26.380 +you might need to pull the code again and update your packages to have anything new that I've added. + +01:26.380 --> 01:27.970 +And that's very easy to do. + +01:28.000 --> 01:32.050 +Um, all you have to do, of course, is go to the project home directory. + +01:32.050 --> 01:33.640 +So LM engineering. + +01:33.760 --> 01:37.480 +Um, and if you're on a PC, then you need to be in an Anaconda prompt. + +01:37.720 --> 01:43.540 +Uh, and then you do a git pull to get the latest code, and then you run conda env update. + +01:43.780 --> 01:50.710 +Um, you specify the file as environment.yml and the dash dash prune tells it to remove any packages + +01:50.710 --> 01:52.480 +that are no longer in the list. + +01:52.480 --> 01:57.400 +I don't think I have removed anything, but anyways, I think this is always a sensible uh, one liner. + +01:57.430 --> 02:02.330 +Good to have that to hand to update your environment with a new environment.yml file. + +02:02.750 --> 02:05.540 +Okay, so here we go. + +02:05.540 --> 02:07.160 +Let's talk about modal. + +02:07.160 --> 02:12.140 +So as we get into today's class there only is one import to begin with. + +02:12.140 --> 02:13.970 +And it is to import modal. + +02:14.000 --> 02:15.080 +There we go. + +02:15.170 --> 02:23.210 +Now if you have not set up your tokens which you will not have done, then you need to run a command + +02:23.210 --> 02:25.190 +line thing called modal setup. + +02:25.190 --> 02:31.010 +And the way you can run that is you just uncomment this line and you run that statement right there. + +02:31.040 --> 02:34.910 +Now I'm not going to do it because I've already done it and my environment set up. + +02:34.910 --> 02:42.710 +But when you do, it's going to pop up with a browser window and have you authorize modal. + +02:42.710 --> 02:46.190 +And then modal will set your environment variables for you. + +02:46.190 --> 02:47.900 +And I think that's really great. + +02:47.990 --> 02:54.500 +All of the challenges that people have had with tokens, particularly with OpenAI, this is a very different + +02:54.500 --> 02:55.160 +experience. + +02:55.160 --> 02:56.570 +It's really streamlined. + +02:56.570 --> 02:57.670 +So good for modal. + +02:57.670 --> 03:01.990 +They seem to have really figured out the the way to set tokens. + +03:01.990 --> 03:07.000 +If for any reason that doesn't work for you, then if you go into the modal platform, you can actually + +03:07.000 --> 03:11.590 +find your tokens in there in much the same way as you can for for things like OpenAI. + +03:11.590 --> 03:13.810 +And then you can manually set it. + +03:13.930 --> 03:16.690 +Um, but hopefully this will just work. + +03:16.690 --> 03:17.860 +It certainly did for me. + +03:17.860 --> 03:22.150 +And by all accounts, that's that's how it works for for people generally. + +03:22.690 --> 03:23.650 +All right. + +03:23.650 --> 03:30.400 +So now, uh, in the next line, I'm going to import some things from a package called hello, which + +03:30.400 --> 03:31.750 +is just what I've written over here. + +03:31.750 --> 03:35.140 +And I'm going to take you over to that right now to show you what it looks like. + +03:35.560 --> 03:37.990 +So this is just a piece of Python. + +03:37.990 --> 03:39.880 +It's just some, some Python code. + +03:40.090 --> 03:41.590 +And I'm going to tell you what it does. + +03:41.590 --> 03:43.150 +So we import modal. + +03:43.150 --> 03:46.150 +And from modal we import a few things. + +03:46.180 --> 03:51.130 +App for applications volume and image. + +03:51.340 --> 03:55.060 +Um and actually now that I look at it I see that I don't actually end up using volume. + +03:55.060 --> 03:56.840 +So I don't think that is needed. + +03:56.870 --> 03:58.700 +Keep this simple is better. + +03:59.240 --> 04:02.300 +Um, and so this is what you do. + +04:02.330 --> 04:03.650 +You begin with some setup. + +04:03.650 --> 04:07.070 +You tell modal what kind of infrastructure you need. + +04:07.070 --> 04:11.030 +And this is an example of modal talks about this a bit on the in their docs. + +04:11.030 --> 04:15.260 +But this is this is a type of infrastructure as code. + +04:15.260 --> 04:19.430 +You can use code to describe what kind of box you want. + +04:19.460 --> 04:24.320 +So if you're thinking when we're in Google Colab, we had to pick various drop downs and choose what + +04:24.320 --> 04:31.910 +kind of, um, VM, what runtime we wanted from Google Colab, well, here you just get to choose what + +04:31.910 --> 04:34.430 +you want by by specifying it in code. + +04:34.430 --> 04:40.760 +So we, we say that we want an image which we want it to be the Debian operating system. + +04:40.760 --> 04:48.530 +We want to pip install the requests package that very, very common standard package for, for uh, + +04:48.530 --> 04:52.010 +doing URL web work. + +04:52.370 --> 04:59.650 +Um, and we specify a GPU, uh, actually, which again is also not used in this example. + +04:59.650 --> 05:00.520 +Sorry. + +05:00.700 --> 05:01.780 +That makes it a bit simpler. + +05:01.780 --> 05:05.800 +We're not going to use a GPU for this example because we're keeping it very simple indeed. + +05:06.160 --> 05:10.660 +Uh, so uh, the I have got a method called hello. + +05:10.690 --> 05:11.740 +It's a very simple. + +05:11.740 --> 05:12.640 +It's a function. + +05:12.640 --> 05:13.150 +Hello. + +05:13.180 --> 05:14.710 +That returns a string. + +05:14.950 --> 05:23.500 +Uh, and what it does is it imports requests, uh, it gets it goes to a website called IP info.io, + +05:23.530 --> 05:25.120 +which is a useful one to know. + +05:25.120 --> 05:32.620 +It's just one of these utilities that will return a JSON object which describes where you are the IP + +05:32.620 --> 05:35.560 +address of, of what just called it. + +05:35.800 --> 05:42.790 +Um, and so then I take that JSON, I pull out the city, the region and the country, and I say hello + +05:42.790 --> 05:44.710 +from city, region, country. + +05:44.860 --> 05:46.840 +Um, and it's just based on your IP address. + +05:46.870 --> 05:52.270 +So it's just going to say hello from wherever you are, or at least wherever your ISP is serving you + +05:52.270 --> 05:52.540 +from. + +05:52.550 --> 05:57.710 +So for me, it's going to be somewhere fairly close to New York, but may not be in New York. + +05:57.980 --> 06:01.520 +Um, so that's a pretty simple function. + +06:01.550 --> 06:07.850 +The only potential interesting thing about it is this decorator at the top here, which is a gradient + +06:07.850 --> 06:13.490 +decorator where we've decorated it with app, which is this thing here, this modal app. + +06:13.490 --> 06:15.560 +Hello dot function. + +06:15.620 --> 06:18.020 +And then we're passing in this image. + +06:18.020 --> 06:25.190 +And that image refers to a Debian image uh which has requests installed. + +06:25.190 --> 06:31.340 +And just by decorating it with that, we're saying that's the kind of box that we want to be to be able + +06:31.340 --> 06:33.860 +to run this on should we wish to. + +06:33.950 --> 06:35.510 +That's all there is to it. + +06:35.510 --> 06:37.940 +So you you could put that to one side. + +06:37.940 --> 06:41.480 +You could imagine that this was just part of an existing piece of code. + +06:41.480 --> 06:47.420 +You had to do something, and now you've just decorated it with something to say that you want to be + +06:47.420 --> 06:53.710 +able to run it should you wish to, using a Debian operating system with requests installed. + +06:53.920 --> 06:57.520 +So that is all that's in this hello.py. + +06:57.550 --> 06:59.020 +Super simple. + +06:59.080 --> 07:03.790 +We go back to day one dot, the Jupyter notebook. + +07:04.420 --> 07:08.620 +So I'm now going to import that. + +07:08.740 --> 07:16.750 +And so now what I can do is I can call my hello I can I can take my hello function that I've, that + +07:16.750 --> 07:18.340 +I've imported here. + +07:18.370 --> 07:22.930 +And I can call it by saying hello dot local. + +07:22.930 --> 07:27.790 +And what that means is I want to run that function that I've just defined, that we just looked at this + +07:27.790 --> 07:30.730 +function here, and I just want to run it on my local box. + +07:30.730 --> 07:32.920 +I want to run it in this Jupyter notebook. + +07:33.130 --> 07:34.270 +Let's see what we get. + +07:34.600 --> 07:36.460 +So it's running right now. + +07:37.300 --> 07:40.900 +And it says hello from seaport, New York, US. + +07:40.930 --> 07:43.600 +That is where my ISP is, I guess. + +07:44.050 --> 07:44.920 +Let's try that again. + +07:44.950 --> 07:46.450 +Yeah, that seems pretty consistent. + +07:46.450 --> 07:48.340 +So that's running locally. + +07:48.970 --> 07:53.370 +Uh, and, uh, now look at this one here. + +07:53.400 --> 07:55.440 +It's exactly the same thing. + +07:55.470 --> 07:58.260 +I've just changed local to remote. + +07:58.260 --> 07:58.920 +That's the only. + +07:58.950 --> 08:00.360 +The only thing I've changed. + +08:00.990 --> 08:02.400 +Let's see what happens. + +08:05.850 --> 08:07.740 +Let's take a little bit longer and then. + +08:07.770 --> 08:11.070 +Hello from Ashburn, Virginia, us. + +08:11.100 --> 08:17.910 +It's running in a completely different state, so I'm sure you you're expecting that. + +08:17.910 --> 08:19.020 +You get the idea. + +08:19.350 --> 08:21.060 +Uh, and sometimes it's different. + +08:21.060 --> 08:23.820 +By the way, I've seen it pop up all over the place. + +08:23.820 --> 08:30.630 +So simply by calling remote instead of local, you can call this function the same piece of code, the + +08:30.630 --> 08:31.920 +same piece of Python code. + +08:31.920 --> 08:36.570 +And it's been deployed to a server and it's running on that server instead. + +08:38.280 --> 08:39.780 +So I think that's pretty magical. + +08:39.780 --> 08:44.580 +It's magical because it's so simple and it's just allowed us to deploy some code. + +08:44.760 --> 08:47.970 +And now we've got a slightly more involved package called llama. + +08:49.000 --> 08:52.030 +And this is where things like GPUs start to appear. + +08:52.030 --> 08:57.160 +You can see I paired this back to the hello example, which is why there was some some traces of what + +08:57.160 --> 08:57.880 +this was. + +08:58.240 --> 09:04.120 +Um, so I start by, um, uh, calling my app llama. + +09:04.750 --> 09:10.030 +Um, now I'm going to again have a Debian image, but this time I'm going to install torch transformers, + +09:10.030 --> 09:13.360 +bits and bytes and accelerate all packages that you know. + +09:13.360 --> 09:21.040 +Well, at this point, um, I am also getting my hugging face token from my modal secrets, and that + +09:21.040 --> 09:22.390 +is something I should have shown you before. + +09:22.390 --> 09:27.310 +I will show you in a second how you set that up in modal, how you get to secrets, and you can set + +09:27.310 --> 09:29.320 +your hugging face token in there. + +09:29.350 --> 09:32.170 +And once you've done that, you can read it like this. + +09:32.830 --> 09:39.430 +And I'm specifying that I want a T4 GPU, which of course is the very cheap basic one. + +09:39.520 --> 09:41.470 +And I've got a constant here. + +09:41.950 --> 09:47.910 +And now I have uh, this, this generate, uh, function. + +09:47.910 --> 09:51.990 +And it takes a prompt which is a string, and it returns a string. + +09:52.770 --> 09:54.480 +This is an example of the type hints. + +09:54.480 --> 09:55.920 +If you're not familiar with them. + +09:55.920 --> 10:00.360 +It's something that I'll be doing this time around in various places. + +10:00.360 --> 10:04.920 +And it will, uh, I think, become clear after a bit. + +10:05.430 --> 10:13.110 +So, uh, again, I decorate this function with I pass in the image, I tell it my secrets, and I say + +10:13.110 --> 10:18.600 +I want a T4 GPU, all configuring my server using code. + +10:18.780 --> 10:23.910 +And now this is just a function as if I were writing something to run locally. + +10:23.940 --> 10:29.550 +Now, I couldn't possibly run this locally because my box has nothing like the horsepower to be able + +10:29.550 --> 10:32.820 +to run a llama model like this locally. + +10:32.820 --> 10:37.170 +There are ways you can do it with things like llama CP if you're familiar with that, but I wouldn't + +10:37.170 --> 10:41.040 +be able to do it like it's written here, and if I did, it would be very, very slow. + +10:41.280 --> 10:48.010 +Um, and so I want to do it on a box which has a T4 GPU and this is how I'll be able to do it. + +10:48.400 --> 10:51.430 +Um, and so this, this code should all look very familiar to you. + +10:51.430 --> 10:53.620 +This is the config that you know. + +10:53.620 --> 10:59.800 +Well that puts specifies the, the four bit quantization that will have for Lama. + +11:00.160 --> 11:05.320 +Um, we are going to load the tokenizer, this boilerplate stuff, you know. + +11:05.320 --> 11:08.440 +Well we are going to load our model. + +11:08.440 --> 11:17.110 +And then this, you should also recognize we encode the the prompt into tokens as our input. + +11:17.140 --> 11:20.560 +We set this attention mask to avoid getting that warning. + +11:20.650 --> 11:23.380 +And then we do model dot generate. + +11:23.380 --> 11:25.000 +We pass in our inputs. + +11:25.000 --> 11:31.810 +We say we only need five new tokens and we want to just one response. + +11:31.810 --> 11:36.520 +We will take that one response, we will decode it and we will return it. + +11:36.550 --> 11:38.470 +It's as simple as that. + +11:38.530 --> 11:43.900 +Now with that, let's start that and let's run it. + +11:44.130 --> 11:47.100 +So stuff is happening. + +11:47.100 --> 11:52.200 +So, um, first of all, what what have I actually run? + +11:52.200 --> 11:59.400 +So I've called the remote function and what I passed in the prompt that I passed in to a llama model + +11:59.400 --> 12:02.070 +is life is a mystery. + +12:02.070 --> 12:04.260 +Everyone must stand alone. + +12:04.290 --> 12:13.110 +I hear now, uh, I, uh, hopefully most people know what comes next from that. + +12:13.110 --> 12:15.360 +Otherwise I'm going to feel very old. + +12:15.570 --> 12:23.040 +Uh, but, uh uh, this, of course, would be straight from the opening to Like a Prayer by Madonna. + +12:23.040 --> 12:26.760 +And I hear you Call my name would be what comes next. + +12:26.790 --> 12:29.310 +And it's almost unbearable not to say that. + +12:29.460 --> 12:33.750 +Uh, and now I'm going to hear that song in my head for for the rest of the day. + +12:33.870 --> 12:35.040 +Uh, but, um. + +12:35.040 --> 12:35.310 +Yeah. + +12:35.340 --> 12:40.410 +And sorry if I've put that now in your in your mind forever too, but, uh, such a catchy song. + +12:40.440 --> 12:45.220 +Anyway, it's what we're seeing now is what's going on on that box. + +12:45.370 --> 12:49.240 +Um, and we can also flip over to, to modal itself. + +12:49.420 --> 12:55.330 +Um, and uh, let's see, go to the let me refresh this screen. + +12:56.890 --> 12:59.290 +Uh, so it's not a deployed app. + +13:04.270 --> 13:05.350 +Ephemeral apps. + +13:05.350 --> 13:06.340 +That's what it is. + +13:06.550 --> 13:08.620 +Uh, give me a second to find that. + +13:08.710 --> 13:09.730 +So this is it. + +13:09.730 --> 13:11.050 +Running right now. + +13:11.080 --> 13:17.650 +Running as an ephemeral app, uh, generates, um, started two minutes ago. + +13:17.650 --> 13:22.570 +And we can click into it and we can see that its status is running. + +13:22.990 --> 13:26.380 +Uh, and, uh, let's see what else we can get. + +13:26.590 --> 13:29.680 +So the containers, it's live. + +13:31.990 --> 13:35.230 +We can look at the memory, the CPU cores. + +13:39.220 --> 13:42.960 +And we can see it's a T4 GPU, just as we specified. + +13:43.620 --> 13:52.740 +Um, and we can also look at the we can see that Hello.py is also on this box we just ran. + +13:52.740 --> 13:59.730 +And llama.py uh, is sitting there, the, the Python script that we just wrote. + +14:00.390 --> 14:03.330 +Uh, so here we go. + +14:03.360 --> 14:07.770 +While we're here, I'm just going to point out that right at the top here is secrets. + +14:07.770 --> 14:12.540 +And secrets is, of course, where you go to set the secrets, the hugging face token. + +14:12.540 --> 14:14.160 +So you will need to do that. + +14:14.310 --> 14:18.210 +I'm not going to click on that now to avoid revealing my hugging face token secret. + +14:18.270 --> 14:24.570 +Um, but that's that's where you would go so that you can then use your hugging face token in the code + +14:24.570 --> 14:30.240 +that's deployed in your ephemeral, uh, service here. + +14:30.840 --> 14:32.310 +So it's still running. + +14:32.310 --> 14:35.550 +And I think at this point I will hold to this video. + +14:35.550 --> 14:39.420 +And when I come back, uh oh, it's already just said it's succeeded. + +14:39.630 --> 14:43.690 +Uh, and I did just get a message saying that it had finished as well. + +14:43.840 --> 14:45.280 +Uh, and, uh. + +14:45.280 --> 14:45.880 +Let's see. + +14:45.910 --> 14:46.180 +Yes. + +14:46.180 --> 14:47.320 +Back here. + +14:48.400 --> 14:49.870 +Let's see what we get. + +14:49.900 --> 14:52.330 +I hear you call my name. + +14:52.390 --> 15:00.010 +Uh, so, lama, even the four bit quantized version of Lama also couldn't resist completing the Madonna + +15:00.040 --> 15:01.420 +song as well. + +15:01.690 --> 15:07.360 +Uh, and so we have just successfully run this piece of code. + +15:07.600 --> 15:18.970 +Um, using this ephemeral, uh, service on modal, uh, to run a quantized lama, um, uh, model, + +15:19.120 --> 15:22.900 +uh, Lama 3.1 model to complete a prompt. + +15:23.200 --> 15:24.070 +All right. + +15:24.070 --> 15:30.100 +When we come back, we are going to go through and it's going to be time for us to deploy our model + +15:30.100 --> 15:37.270 +and actually see how we do that and how we put an API around the proprietary model that we built last + +15:37.270 --> 15:37.870 +time. + +15:38.110 --> 15:39.550 +I will see you in a minute. diff --git a/week5/community-contributions/subtitles/srts/59668923/ja_JP.srt b/week5/community-contributions/subtitles/srts/59668923/ja_JP.srt new file mode 100755 index 0000000..d4e7682 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668923/ja_JP.srt @@ -0,0 +1,655 @@ +WEBVTT + +00:00.440 --> 00:08.390 +さて、 Jupyter Labにようこそ!このプラットフォームでの時間の壮大なフィナーレを飾ることになるだろう。 + +00:08.570 --> 00:14.540 +そして、 この7週間を締めくくる素晴らしいものになるだろう。 + +00:14.600 --> 00:19.130 +まず気づくのは、 8週目のフォルダーにはかなり多くのことが起こっているということだ。 + +00:19.130 --> 00:26.660 +これは、 この大きな問題を解決するために、 私たちが多くの仕事をこなさなければならないという事実を反映している。 + +00:26.960 --> 00:31.010 +つまり、 このコードを読み進めるにあたって、 いつもより速いペースで進むということだ。 + +00:31.010 --> 00:36.860 +でも大丈夫、 この時点であなたはLLMエンジニアリングをマスターしているのだから。 + +00:36.860 --> 00:44.210 +アシスタントのディクテットやユーザーのリストなど、 もはや私が長々と説明する必要はない。 + +00:44.210 --> 00:48.200 +それはもう、 非常に古いニュースだ。 + +00:48.410 --> 00:54.770 +ほんの数週間前まで、 トークンが何なのかを説明していたなんて。 + +00:54.770 --> 00:59.230 +そして今、 あなたは大規模な、 完全なエージェントAI Iの準備ができている。 + +00:59.260 --> 01:02.830 +始める前に、 簡単な管理上の注意点を説明しよう。 + +01:02.890 --> 01:07.480 +パッケージを少し変更したんだ。 + +01:07.480 --> 01:14.650 +この環境に依存するパッケージをいくつか追加した。 + +01:14.770 --> 01:20.950 +つまり、 9月下旬以前にコードを取得した場合は、 私が追加した新機能を追加するために、 再度コードを取得し、 + +01:20.950 --> 01:26.380 +パッケージを更新する必要があるかもしれないということだ。 + +01:26.380 --> 01:27.970 +それはとても簡単なことだ。 + +01:28.000 --> 01:32.050 +もちろん、 プロジェクトのホーム・ディレクトリに行けばいいだけだ。 + +01:32.050 --> 01:33.640 +だからLMエンジニアリングだ。 + +01:33.760 --> 01:37.480 +PCを使っているなら、 Anacondaプロンプトに入る必要があります。 + +01:37.720 --> 01:43.540 +それからgit pullして最新のコードを取得し、 conda env updateを実行する。 + +01:43.780 --> 01:52.480 +ええと、 ファイルをenvironmentとして指定します。 ymlとdash dash pruneは、 リストにもうないパッケージを削除するように指示する。 + +01:52.480 --> 01:57.400 +私は何も削除していないと思うが、 とにかく、 これは常に賢明なあー、 ワンライナーだと思う。 + +01:57.430 --> 02:02.330 +新しい環境にアップデートするために、 それを手元に置いておくのは良いことだ。 ymlファイル。 + +02:02.750 --> 02:05.540 +さて、 それでは始めよう。 + +02:05.540 --> 02:07.160 +モーダルについて話そう。 + +02:07.160 --> 02:12.140 +だから今日の授業では、 まず輸入品は1つしかない。 + +02:12.140 --> 02:13.970 +そして、 モーダルをインポートすることである。 + +02:14.000 --> 02:15.080 +これでよし。 + +02:15.170 --> 02:25.190 +トークンをセットアップしていない場合は、 モーダルセットアップと呼ばれるコマンドラインを実行する必要があります。 + +02:25.190 --> 02:31.010 +その実行方法は、 この行のコメントを解除して、 そのステートメントを実行するだけだ。 + +02:31.040 --> 02:34.910 +今、 私はそれをするつもりはない。 なぜなら、 私の環境はすでにセットアップされているからだ。 + +02:34.910 --> 02:42.710 +しかし、 そうすると、 ブラウザ・ウィンドウがポップアップし、 モーダルを承認しなければならなくなる。 + +02:42.710 --> 02:46.190 +そして、 モーダルがあなたの代わりに環境変数を設定する。 + +02:46.190 --> 02:47.900 +それは本当に素晴らしいことだと思う。 + +02:47.990 --> 02:55.160 +トークン、 特にOpenAIで人々が抱えていた課題はすべて、 これはまったく異なる経験だ。 + +02:55.160 --> 02:56.570 +実に合理的だ。 + +02:56.570 --> 02:57.670 +モーダルにはとてもいい。 + +02:57.670 --> 03:01.990 +彼らはトークンをセットする方法を本当に理解しているようだ。 + +03:01.990 --> 03:07.000 +何らかの理由でそれがうまくいかない場合は、 モーダルプラットフォームに行けば、 OpenAIのようなものと同じように、 + +03:07.000 --> 03:11.590 +トークンを見つけることができます。 + +03:11.590 --> 03:13.810 +そして、 手動で設定することができる。 + +03:13.930 --> 03:16.690 +うーん、 でもうまくいけばいいんだけど + +03:16.690 --> 03:17.860 +確かに私にはそうだった。 + +03:17.860 --> 03:22.150 +誰の目から見ても、 それが一般的な人々のための方法だ。 + +03:22.690 --> 03:23.650 +分かった。 + +03:23.650 --> 03:31.750 +次の行では、 helloというパッケージからいくつかのものをインポートします。 + +03:31.750 --> 03:35.140 +これから、 その様子をお見せしましょう。 + +03:35.560 --> 03:37.990 +つまり、 これはPythonの一部なんだ。 + +03:37.990 --> 03:39.880 +ただのPythonのコードだよ。 + +03:40.090 --> 03:41.590 +そして、 それが何をするものなのかを教えよう。 + +03:41.590 --> 03:43.150 +そこで、 モーダルをインポートする。 + +03:43.150 --> 03:46.150 +そして、 モーダルからいくつかのものをインポートする。 + +03:46.180 --> 03:51.130 +アプリケーションのボリュームとイメージのためのアプリ。 + +03:51.340 --> 03:55.060 +それに、 今見てみると、 結局ボリュームは使っていないんだ。 + +03:55.060 --> 03:56.840 +だから、 その必要はないと思う。 + +03:56.870 --> 03:58.700 +シンプルな方がいい。 + +03:59.240 --> 04:02.300 +それで、 こうするんだ。 + +04:02.330 --> 04:03.650 +まずはセットアップから始める。 + +04:03.650 --> 04:07.070 +どのようなインフラが必要かをモーダルに伝える。 + +04:07.070 --> 04:11.030 +これはモーダルの例で、 ドキュメントで少し触れています。 + +04:11.030 --> 04:15.260 +しかし、 これはコードとしてのインフラの一種である。 + +04:15.260 --> 04:19.430 +どのような箱が欲しいかをコードで記述することができる。 + +04:19.460 --> 04:24.320 +Google Colabでは、 いろいろなドロップダウンを選んで、 + +04:24.320 --> 04:34.430 +どんなVMが欲しいか、 どんなランタイムが欲しいかを選ぶ必要があった。 + +04:34.430 --> 04:40.760 +つまり、 Debianオペレーティング・システムのイメージが欲しいということだ。 + +04:40.760 --> 04:48.530 +pipでrequestsパッケージをインストールしたい。 このパッケージは、 URLウェブ作業を行うための、 + +04:48.530 --> 04:52.010 +とても一般的な標準パッケージだ。 + +04:52.370 --> 04:59.650 +そして、 GPUを指定するのだが、 これもこの例では使わない。 + +04:59.650 --> 05:00.520 +申し訳ない。 + +05:00.700 --> 05:01.780 +これで少しシンプルになった。 + +05:01.780 --> 05:05.800 +この例ではGPUを使うつもりはない。 非常にシンプルにしているからだ。 + +05:06.160 --> 05:10.660 +helloというメソッドがある。 + +05:10.690 --> 05:11.740 +とてもシンプルだ。 + +05:11.740 --> 05:12.640 +それは機能だ。 + +05:12.640 --> 05:13.150 +こんにちは。 + +05:13.180 --> 05:14.710 +これは文字列を返す。 + +05:14.950 --> 05:25.120 +それで何をするかというと、 リクエストをインポートして、 IP情報というウェブサイトにアクセスするんだ。 これは知っておくと便利だ。 + +05:25.120 --> 05:35.560 +JSONオブジェクトを返すユーティリティのひとつで、 IPアドレスの場所を記述している。 + +05:35.800 --> 05:44.710 +そのJSONから都市、 地域、 国を抜き出し、 都市、 地域、 国からこんにちはと言う。 + +05:44.860 --> 05:46.840 +ええと、 IPアドレスに基づいているだけです。 + +05:46.870 --> 05:52.540 +だから、 あなたがどこにいようと、 少なくともISPがどこからあなたにサービスを提供していようと、 こんにちはと言うだけだ。 + +05:52.550 --> 05:57.710 +だから僕にとっては、 ニューヨークにかなり近い場所になるだろうけど、 ニューヨークではないかもしれない。 + +05:57.980 --> 06:01.520 +それはとてもシンプルな機能だ。 + +06:01.550 --> 06:07.850 +このデコレーターはグラデーション・デコレーターで、 + +06:07.850 --> 06:13.490 +アプリでデコレートされています。 + +06:13.490 --> 06:15.560 +ハロー・ドット・ファンクション + +06:15.620 --> 06:18.020 +そして、 この画像の中を通過する。 + +06:18.020 --> 06:25.190 +そして、 そのイメージはDebianのイメージを指しており、 そのイメージにはリクエストがインストールされている。 + +06:25.190 --> 06:33.860 +このような装飾を施すことで、 我々が望めばこのような箱でこれを走らせることができると言っているのだ。 + +06:33.950 --> 06:35.510 +それだけだ。 + +06:35.510 --> 06:37.940 +だから、 そのことは横に置いておいてほしい。 + +06:37.940 --> 06:41.480 +これは既存のコードの一部だと想像できるだろう。 + +06:41.480 --> 06:47.420 +あなたは何かをしなければならなかった。 そして今、 あなたはそれを、 Debianオペレーティングシステムを使い、 + +06:47.420 --> 06:53.710 +リクエストをインストールした上で、 それを実行できるようにしたいという言葉で飾った。 + +06:53.920 --> 06:57.520 +このハローワークにあるのは以上だ。 パイ。 + +06:57.550 --> 06:59.020 +超シンプルだ。 + +06:59.080 --> 07:03.790 +初日のドット、 ジュピター・ノートブックに戻ろう。 + +07:04.420 --> 07:08.620 +だから、 これからそれをインポートする。 + +07:08.740 --> 07:18.340 +そして今できることは、 ここでインポートしたhello関数を呼び出すことだ。 + +07:18.370 --> 07:22.930 +そして、 ハロー・ドット・ローカルと言って呼ぶことができる。 + +07:22.930 --> 07:30.730 +つまり、 先ほど定義した関数を、 ローカル・ボックス上で実行したいのだ。 + +07:30.730 --> 07:32.920 +このJupyterノートブックで実行したい。 + +07:33.130 --> 07:34.270 +何が出てくるか見てみよう。 + +07:34.600 --> 07:36.460 +だから、 今走っている。 + +07:37.300 --> 07:40.900 +アメリカ、 ニューヨークのシーポートからこんにちは。 + +07:40.930 --> 07:43.600 +そこが私のISPなんだと思う。 + +07:44.050 --> 07:44.920 +もう一度やってみよう。 + +07:44.950 --> 07:46.450 +ああ、 それはかなり一貫しているようだ。 + +07:46.450 --> 07:48.340 +これがローカルで動いているわけだ。 + +07:48.970 --> 07:53.370 +それと、 これを見てくれ。 + +07:53.400 --> 07:55.440 +まったく同じことだ。 + +07:55.470 --> 07:58.260 +ローカルをリモートに変更したところだ。 + +07:58.260 --> 07:58.920 +それだけだ。 + +07:58.950 --> 08:00.360 +唯一、 変えたことがある。 + +08:00.990 --> 08:02.400 +どうなるか見てみよう。 + +08:05.850 --> 08:07.740 +もう少し時間をかけてからにしよう。 + +08:07.770 --> 08:11.070 +バージニア州アッシュバーンからこんにちは。 + +08:11.100 --> 08:17.910 +まったく違う状態で走っているから、 それを期待しているんだろうね。 + +08:17.910 --> 08:19.020 +おわかりだろう。 + +08:19.350 --> 08:21.060 +あー、 時には違うこともある。 + +08:21.060 --> 08:23.820 +ちなみに、 あちこちで見かけるようになった。 + +08:23.820 --> 08:31.920 +つまり、 ローカルではなくリモートを呼び出すだけで、 同じコード、 同じPythonコードを呼び出すことができる。 + +08:31.920 --> 08:36.570 +そしてそれはサーバーにデプロイされ、 代わりにそのサーバーで動いている。 + +08:38.280 --> 08:39.780 +それはとても不思議なことだと思う。 + +08:39.780 --> 08:44.580 +とてもシンプルで、 いくつかのコードをデプロイするだけでいいのだから、 魔法のようだ。 + +08:44.760 --> 08:47.970 +そして今、 私たちはllamaという少し複雑なパッケージを手に入れた。 + +08:49.000 --> 08:52.030 +そしてここからGPUのようなものが登場する。 + +08:52.030 --> 08:57.880 +ハローの例と対になっているのがわかるだろう。 + +08:58.240 --> 09:04.120 +まず、 自分のアプリのラマに電話するんだ。 + +09:04.750 --> 09:10.030 +ええと、 もう一度Debianのイメージを作りますが、 今回はtorch transformers、 bits and bytesをインストールして、 + +09:10.030 --> 09:13.360 +皆さんが知っているすべてのパッケージを加速させます。 + +09:13.360 --> 09:22.390 +まあ、 この時点で、 うーん、 僕はモード・シークレットからハグする顔のトークンも手に入れている。 + +09:22.390 --> 09:27.310 +モーダルでどのように設定するか、 どのようにシークレットにアクセスするか、 どのようにハグする顔のトークンを設定するか、 + +09:27.310 --> 09:29.320 +すぐにお見せしましょう。 + +09:29.350 --> 09:32.170 +そうしたら、 こう読むことができる。 + +09:32.830 --> 09:39.430 +そして、 私はT4 GPUが欲しいと指定しているのですが、 それはもちろんとても安いベーシックなものです。 + +09:39.520 --> 09:41.470 +そして、 ここには不変のものがある。 + +09:41.950 --> 09:47.910 +そして今、 私はこの、 この、 生成機能を手に入れた。 + +09:47.910 --> 09:51.990 +そして、 文字列であるプロンプトを受け取り、 文字列を返す。 + +09:52.770 --> 09:54.480 +これはタイプのヒントの例である。 + +09:54.480 --> 09:55.920 +ご存じない方も多いだろう。 + +09:55.920 --> 10:00.360 +今回もいろいろなところでやることになるだろう。 + +10:00.360 --> 10:04.920 +そして、 それは......少し経てば明らかになると思う。 + +10:05.430 --> 10:13.110 +この関数を装飾して、 画像を渡し、 秘密を伝え、 T4 GPUが欲しいと言うと、 + +10:13.110 --> 10:18.600 +コードを使ってサーバーを設定する。 + +10:18.780 --> 10:23.910 +そして今、 これはローカルで実行する何かを書いているかのような単なる関数である。 + +10:23.940 --> 10:32.820 +というのも、 私のマシンには、 このようなラマ・モデルをローカルで動かせるほどの馬力はないからだ。 + +10:32.820 --> 10:37.170 +リャマCPのようなものに詳しい人ならできる方法もあるだろうけど、 ここに書いてあるようなことは僕にはできないし、 + +10:37.170 --> 10:41.040 +やったとしてもとてもとても時間がかかる。 + +10:41.280 --> 10:48.010 +それで、 T4 GPUを搭載したボックスでそれをやりたいんだ。 + +10:48.400 --> 10:51.430 +ええと、 それで、 このコードはすべて、 あなたにとって非常に見慣れたものに見えるはずです。 + +10:51.430 --> 10:53.620 +これがあなたが知っているコンフィグだ。 + +10:53.620 --> 10:59.800 +つまり、 4ビットの量子化をラマに指定することになる。 + +11:00.160 --> 11:05.320 +ええと、 トークナイザーをロードします。 + +11:05.320 --> 11:08.440 +さて、 モデルをロードしよう。 + +11:08.440 --> 11:17.110 +そして、 入力としてプロンプトをトークンにエンコードする。 + +11:17.140 --> 11:20.560 +その警告を受けないように、 このアテンションマスクを設定した。 + +11:20.650 --> 11:23.380 +そして、 モデル・ドット・ジェネレーションを行う。 + +11:23.380 --> 11:25.000 +インプットを渡す。 + +11:25.000 --> 11:31.810 +新しいトークンは5つだけで、 返事は1つでいいとする。 + +11:31.810 --> 11:36.520 +私たちはその1つのレスポンスを受け取り、 それを解読して返す。 + +11:36.550 --> 11:38.470 +簡単なことだ。 + +11:38.530 --> 11:43.900 +では、 それを実行してみよう。 + +11:44.130 --> 11:47.100 +だから、 いろいろなことが起きている。 + +11:47.100 --> 11:52.200 +それで、 まず最初に、 僕は実際に何を走ったんだ? + +11:52.200 --> 12:02.070 +リモート関数を呼び出したわけだが、 ラマモデルに渡したプロンプトの内容は謎だ。 + +12:02.070 --> 12:04.260 +誰もが独り立ちしなければならない。 + +12:04.290 --> 12:13.110 +今聞いたところでは、 ええと、 ええと、 願わくば、 ほとんどの人がそこから次に何が起こるかを知っていてほしい。 + +12:13.110 --> 12:15.360 +そうでなければ、 私はとても年を取ったように感じるだろう。 + +12:15.570 --> 12:23.040 +ええと、 でも、 ええと、 これはもちろん、 マドンナの『Like a Prayer』のオープニングからそのままなんだけど。 + +12:23.040 --> 12:26.760 +そして、 次は私の名前を呼ぶと聞いている。 + +12:26.790 --> 12:29.310 +それを言わないのは、 ほとんど耐え難いことだ。 + +12:29.460 --> 12:33.750 +あー、 今日一日、 頭の中でこの歌が鳴りっぱなしだ。 + +12:33.870 --> 12:35.040 +あ、 でも、 あの。 + +12:35.040 --> 12:35.310 +そうだね。 + +12:35.340 --> 12:40.410 +そして、 もし僕がそれをいつまでも君の頭の中に置いていたら申し訳ないんだけど、 でも、 とてもキャッチーな曲なんだ。 + +12:40.440 --> 12:45.220 +とにかく、 今我々が見ているのは、 あの箱の上で何が起こっているかということだ。 + +12:45.370 --> 12:49.240 +そして、 モーダルそのものに切り替えることもできる。 + +12:49.420 --> 12:55.330 +ええと、 ええと、 この画面をリフレッシュさせてください。 + +12:56.890 --> 12:59.290 +つまり、 デプロイされたアプリではないんだ。 + +13:04.270 --> 13:05.350 +儚いアプリ。 + +13:05.350 --> 13:06.340 +そういうことだ。 + +13:06.550 --> 13:08.620 +ちょっと待ってください。 + +13:08.710 --> 13:09.730 +それでこれだ。 + +13:09.730 --> 13:11.050 +今走っている。 + +13:11.080 --> 13:17.650 +エフェメラルなアプリとして動いている。 + +13:17.650 --> 13:22.570 +クリックすると、 ステータスが実行中であることがわかる。 + +13:22.990 --> 13:26.380 +それと、 他に何が手に入るかな? + +13:26.590 --> 13:29.680 +だから、 コンテナはライブなんだ。 + +13:31.990 --> 13:35.230 +メモリやCPUコアを見ることができる。 + +13:39.220 --> 13:42.960 +そして、 我々が指定した通り、 T4 GPUであることがわかる。 + +13:43.620 --> 13:52.740 +そして、 "ハロー "を見ることもできる。 PYは今走ったこのボックスにもある。 + +13:52.740 --> 13:59.730 +そしてラマ。 さっき書いたPythonスクリプトが置いてある。 + +14:00.390 --> 14:03.330 +ええと、 それではどうぞ。 + +14:03.360 --> 14:07.770 +ここにいる間に指摘しておきたいのは、 ここの一番上にあるのは秘密だということだ。 + +14:07.770 --> 14:12.540 +そしてシークレットはもちろん、 シークレットをセットする場所である。 + +14:12.540 --> 14:14.160 +だから、 そうする必要がある。 + +14:14.310 --> 14:18.210 +ハグしている顔の形見の秘密がバレないように、 今はそれをクリックするつもりはない。 + +14:18.270 --> 14:24.570 +でもそこで、 このエフェメラル・サービスにデプロイされたコードで、 + +14:24.570 --> 14:30.240 +ハグする顔のトークンを使えるようにするんだ。 + +14:30.840 --> 14:32.310 +だからまだ動いている。 + +14:32.310 --> 14:35.550 +この時点で、 私はこのビデオに固執すると思う。 + +14:35.550 --> 14:39.420 +で、 戻ってきたら、 ああ、 もう成功したって書いてある。 + +14:39.630 --> 14:43.690 +ええと、 ちょうど終了したというメッセージも受け取ったところだ。 + +14:43.840 --> 14:45.280 +ええと、 それで...。 + +14:45.280 --> 14:45.880 +見てみよう。 + +14:45.910 --> 14:46.180 +そうだ。 + +14:46.180 --> 14:47.320 +ここに戻る。 + +14:48.400 --> 14:49.870 +何が出てくるか見てみよう。 + +14:49.900 --> 14:52.330 +私の名前を呼ぶ声が聞こえる。 + +14:52.390 --> 15:01.420 +ええと、 だから、 ラマ、 4ビット量子化バージョンのラマも、 マドンナの曲を完成させないわけにはいかなかったんだ。 + +15:01.690 --> 15:07.360 +ええと、 それで、 このコードを実行することに成功しました。 + +15:07.600 --> 15:22.900 +このエフェメラルな、 あー、 モーダルなサービスを使って、 あー、 量子化されたラマ、 あー、 モデル、 あー、 ラマ3を走らせる。 + +15:22.900 --> 15:22.900 +プロンプトを完成させるモデル1名。 + +15:23.200 --> 15:24.070 +分かった。 + +15:24.070 --> 15:30.100 +また戻ってきたら、 モデルをデプロイして、 前回構築した独自モデルの周りにAPIをどのように配置するか、 + +15:30.100 --> 15:37.870 +実際にやって見よう。 + +15:38.110 --> 15:39.550 +すぐに会おう。 diff --git a/week5/community-contributions/subtitles/srts/59668923/ko_KR.srt b/week5/community-contributions/subtitles/srts/59668923/ko_KR.srt new file mode 100755 index 0000000..da31abe --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59668923/ko_KR.srt @@ -0,0 +1,709 @@ +WEBVTT + +00:00.440 --> 00:08.390 +주피터 연구소에 잘 오셨습니다 이 플랫폼에서 보낸 시간의 에픽 엔딩이 될 텐데요 + +00:08.570 --> 00:14.540 +지난 7주간의 멋진 마무리가 될 거예요 + +00:14.600 --> 00:19.130 +가장 먼저 눈에 띄는 건 8주 차 폴더에 많은 일이 벌어지고 있다는 거예요 + +00:19.130 --> 00:24.140 +여기 파일이 많은 건 우리가 풀어야 할 게 많다는 걸 보여주죠 이 큰 문제를 해결하기 위해 + +00:24.170 --> 00:26.660 +해야 할 일이 많다는 걸요 Get it + +00:26.960 --> 00:31.010 +이 코드가 진행되는 동안 평소보다 더 빠른 속도로 움직일 거라는 의미죠 + +00:31.010 --> 00:36.860 +하지만 괜찮습니다 지금 시점에서는 이미 능숙하고 LLM 엔지니어링의 달인이 되어 + +00:36.860 --> 00:42.740 +가고 있으니까요 이제 제가 설명할 필요도 없겠죠 조수와 사용자 같은 사람들이 어디에 + +00:42.740 --> 00:44.210 +있는지에 대해서요 + +00:44.210 --> 00:48.200 +이제는 아주 오래된 뉴스죠 + +00:48.410 --> 00:54.770 +네, 토큰이 뭔지 설명하고 있었는데 불과 몇 주 전이었다니 놀랍죠 + +00:54.770 --> 00:59.230 +이제 에이전트 같은 인공지능 1을 경험할 준비가 됐어요 + +00:59.260 --> 01:02.830 +시작 전에 get 절차가 있어요 + +01:02.890 --> 01:07.480 +패키지를 좀 바꿨어요 + +01:07.480 --> 01:12.730 +이 환경에 종속성이 있는 패키지를 추가했어요 이걸 아주 흥미로운 문제로 + +01:12.730 --> 01:14.650 +만들기 위해서요 + +01:14.770 --> 01:20.950 +그 말은 여러분이∙∙∙ 9월 말쯤에 코드를 불러오면 다시 코드를 불러와 패키지를 + +01:20.950 --> 01:26.380 +업데이트해야 할 수도 있어요 제가 추가한 새로운 게 있으려면요 + +01:26.380 --> 01:27.970 +아주 쉬운 일이죠 + +01:28.000 --> 01:32.050 +여러분이 할 일은 프로젝트 홈 디렉터리에 가는 거죠 + +01:32.050 --> 01:33.640 +LM 엔지니어링이죠 + +01:33.760 --> 01:37.480 +PC용이라면 아나콘다 프롬프트가 있어야 해요 + +01:37.720 --> 01:43.540 +최신 코드를 얻기 위해 깃 풀을 하고 콘다 인플루언서 업데이트를 실행해요 + +01:43.780 --> 01:50.710 +파일을 환경으로 지정해요 yml과 prune은 목록에 없는 패키지를 + +01:50.710 --> 01:52.480 +제거하라고 하죠 + +01:52.480 --> 01:57.400 +아무것도 삭제하지 않은 것 같아요 어쨌든 이건 항상 실용적인 한 줄 대사예요 + +01:57.430 --> 02:02.330 +새로운 환경으로 환경을 업데이트하는 데 필요한 거죠 yml 파일이요 + +02:02.750 --> 02:05.540 +자, 시작하죠 + +02:05.540 --> 02:07.160 +수법에 대해 얘기해보죠 + +02:07.160 --> 02:12.140 +오늘 수업에 들어서면서 get은 하나만 남았어요 + +02:12.140 --> 02:13.970 +수단을 들여오는 거죠 + +02:14.000 --> 02:15.080 +됐어요 + +02:15.170 --> 02:23.210 +토큰을 설정하지 않았다면 모듈 셋업이라는 명령줄을 실행해야 + +02:23.210 --> 02:25.190 +해요 + +02:25.190 --> 02:31.010 +실행 방법은 이 줄을 주석 처리 해제하고 저 문을 저기서 실행하는 거죠 + +02:31.040 --> 02:34.910 +지금은 안 할게요 이미 해뒀거든요 환경 설정도요 + +02:34.910 --> 02:42.710 +하지만 그렇게 하면 브라우저 창이 뜨고 양식을 승인하게 돼요 + +02:42.710 --> 02:46.190 +그럼 Modal이 환경 변수를 설정해 주죠 + +02:46.190 --> 02:47.900 +정말 멋진 것 같아요 + +02:47.990 --> 02:55.160 +토큰을 가지고 겪는 어려움들 특히 오픈아이는 완전히 다른 경험이에요 + +02:55.160 --> 02:56.570 +아주 간소화됐죠 + +02:56.570 --> 02:57.670 +수법에 아주 좋죠 + +02:57.670 --> 03:01.990 +패를 놓는 법을 제대로 터득한 것 같군요 + +03:01.990 --> 03:07.000 +만약 그게 안 된다면 표준 플랫폼으로 가보세요 거기서 여러분의 토큰을 찾을 + +03:07.000 --> 03:11.590 +수 있어요 OpenAI 같은 걸 위한 것과 거의 같은 방식으로요 + +03:11.590 --> 03:13.810 +수동으로 설정할 수 있어요 + +03:13.930 --> 03:16.690 +그래도 잘되면 좋겠어요 + +03:16.690 --> 03:17.860 +전 그랬어요 + +03:17.860 --> 03:22.150 +누가 봐도 일반적으로 사람들이 그렇게 하죠 + +03:22.690 --> 03:23.650 +좋아요 + +03:23.650 --> 03:30.400 +이제 다음 줄에서 hello라는 패키지에서 몇 가지를 불러오겠습니다 여기 + +03:30.400 --> 03:31.750 +적어둔 거죠 + +03:31.750 --> 03:35.140 +지금 당장 가서 어떻게 생겼는지 보여드릴게요 + +03:35.560 --> 03:37.990 +이건 파이썬 일부분이에요 + +03:37.990 --> 03:39.880 +파이썬 으로 된 코드네요 + +03:40.090 --> 03:41.590 +이게 뭘 하는지 말씀드리죠 + +03:41.590 --> 03:43.150 +그래서 모듈을 수입하죠 + +03:43.150 --> 03:46.150 +몇 가지를 수입했어요 + +03:46.180 --> 03:51.130 +앱 볼륨과 이미지를 위한 앱이요 + +03:51.340 --> 03:55.060 +사실 지금 보니까 볼륨을 사용하지 않았네요 + +03:55.060 --> 03:56.840 +그래서 그건 필요 없다고 생각해요 + +03:56.870 --> 03:58.700 +간단한 게 좋아요 + +03:59.240 --> 04:02.300 +그래서 이렇게 해요 + +04:02.330 --> 04:03.650 +설정으로 시작해요 + +04:03.650 --> 04:07.070 +어떤 인프라가 필요한지 설명하는 거죠 + +04:07.070 --> 04:11.030 +이건 모듈의 예입니다 문서에서 비트로 언급하죠 + +04:11.030 --> 04:15.260 +이건 코드로서의 인프라 유형이에요 + +04:15.260 --> 04:19.430 +어떤 박스를 원하는지 코드를 사용해 설명할 수 있어요 + +04:19.460 --> 04:24.320 +Google Colab에서 다양한 드롭다운을 선택해야 한다고 생각하신다면 + +04:24.320 --> 04:31.910 +어떤 종류의 VM을 선택해야 하는지 Google Colab에서 어떤 런타임을 원하는지 말이에요 여기선 코드에서 지정해 + +04:31.910 --> 04:34.430 +원하는 걸 선택할 수 있어요 + +04:34.430 --> 04:40.760 +이미지가 필요하다고 하면 그게 데비언 운영체제가 되어야죠 + +04:40.760 --> 04:48.530 +요청 패키지를 설치하려고 합니다 URL 웹 작업을 위한 아주 아주 + +04:48.530 --> 04:52.010 +일반적인 표준 패키지죠 + +04:52.370 --> 04:59.650 +GPU를 명시합니다 이 예제에선 사용되지 않았죠 + +04:59.650 --> 05:00.520 +미안해요 + +05:00.700 --> 05:01.780 +비트가 더 간단하죠 + +05:01.780 --> 05:05.800 +이 예제에서는 GPU 사용은 안 할 겁니다 아주 단순하게 할 거니까요 + +05:06.160 --> 05:10.660 +그래서 전 인사법을 알아요 + +05:10.690 --> 05:11.740 +아주 간단해요 + +05:11.740 --> 05:12.640 +일종의 함수예요 + +05:12.640 --> 05:13.150 +안녕하세요 + +05:13.180 --> 05:14.710 +문자열을 반환하죠 + +05:14.950 --> 05:25.120 +이것은 불러오기 요청을 합니다 IP 정보라는 웹사이트로 가죠 io는 알아두면 유용하죠 + +05:25.120 --> 05:32.620 +JSON 객체를 반환하는 유틸리티 중 하나로 IP 주소를 + +05:32.620 --> 05:35.560 +설명하는 거죠 + +05:35.800 --> 05:42.790 +그리고 JSON을 가져다가 도시, 지역, 나라를 뽑고 도시, 지역, 나라에서 + +05:42.790 --> 05:44.710 +인사를 해요 + +05:44.860 --> 05:46.840 +IP 주소로 판단한 거예요 + +05:46.870 --> 05:52.540 +여러분이 어디 있든 안녕하세요를 할 겁니다 적어도 ISP가 서빙하는 곳에서요 + +05:52.550 --> 05:57.710 +그래서 뉴욕과 가까운 곳에 갈 거예요 뉴욕이 아닐 수도 있고요 + +05:57.980 --> 06:01.520 +아주 간단한 함수예요 + +06:01.550 --> 06:07.850 +잠재적으로 흥미로운 건 여기 위에 있는 장식가예요 앱으로 장식한 + +06:07.850 --> 06:13.490 +그러데이션 장식가죠 여기 이거요 모달 앱이요 + +06:13.490 --> 06:15.560 +Hello.com 함수요 + +06:15.620 --> 06:18.020 +이 이미지를 전달해요 + +06:18.020 --> 06:25.190 +그 이미지는 디비언 이미지를 뜻해요 요청이 설치된 이미지죠 + +06:25.190 --> 06:31.340 +그렇게 장식한다는 건 우리가 원할 경우 실행할 수 있는 그런 종류의 박스가 + +06:31.340 --> 06:33.860 +되기를 원한다는 거죠 + +06:33.950 --> 06:35.510 +그게 다예요 + +06:35.510 --> 06:37.940 +그래서 그건 한쪽에 두죠 Put + +06:37.940 --> 06:41.480 +이게 현존하는 코드의 일부라고 상상할 수 있어요 + +06:41.480 --> 06:47.420 +뭔가를 해야만 했고 이젠 실행하고 싶다고 말할 수 있는 것으로 + +06:47.420 --> 06:53.710 +장식했어요 요청이 설치된 디비언 운영체제를 사용해서요 + +06:53.920 --> 06:57.520 +이 안에 있는 건 그게 다예요 네 + +06:57.550 --> 06:59.020 +아주 간단해요 + +06:59.080 --> 07:03.790 +첫째 날 점으로 돌아가죠 주피터 공책요 + +07:04.420 --> 07:08.620 +이제 그걸 불러오죠 + +07:08.740 --> 07:16.750 +이제 hello를 호출할 수 있습니다 여기 가져오기 한 hello 함수를 + +07:16.750 --> 07:18.340 +취할 수 있어요 + +07:18.370 --> 07:22.930 +hello. local이라고 입력하면 돼요 + +07:22.930 --> 07:27.790 +그 말은 방금 정의한 저 함수를 실행하고 싶다는 거죠 방금 이 함수를 본 거요 + +07:27.790 --> 07:30.730 +제 로컬 상자에서 실행하고 싶다는 거죠 + +07:30.730 --> 07:32.920 +이 주피터 공책에 넣고 싶어요 + +07:33.130 --> 07:34.270 +get get을 해 보죠 + +07:34.600 --> 07:36.460 +지금 실행 중이에요 + +07:37.300 --> 07:40.900 +뉴욕 시포트에서 인사하는 거예요 + +07:40.930 --> 07:43.600 +제 ISP가 거기 있나 봐요 + +07:44.050 --> 07:44.920 +다시 해 보죠 + +07:44.950 --> 07:46.450 +네, 일관성이 있네요 + +07:46.450 --> 07:48.340 +로컬에서 실행되고 있어요 + +07:48.970 --> 07:53.370 +그리고 이 사진을 보세요 + +07:53.400 --> 07:55.440 +완전히 똑같아요 + +07:55.470 --> 07:58.260 +로컬에서 원격으로 바꿨어요 + +07:58.260 --> 07:58.920 +그게 다예요 + +07:58.950 --> 08:00.360 +유일하게 변한 거죠 + +08:00.990 --> 08:02.400 +어떻게 되나 보죠 + +08:05.850 --> 08:07.740 +조금만 더 비트를 타 보죠 + +08:07.770 --> 08:11.070 +버지니아 애시번에서 인사드려요 + +08:11.100 --> 08:17.910 +완전히 다른 상태에서 실행되고 있어요 그러니 그걸 기대하시겠죠 + +08:17.910 --> 08:19.020 +Get it, Get it 아시겠죠? + +08:19.350 --> 08:21.060 +가끔은 다르기도 해요 + +08:21.060 --> 08:23.820 +그나저나 여기저기서 나오는 걸 봤어요 + +08:23.820 --> 08:30.630 +로컬 대신 원격 호출을 하면 이 함수를 같은 코드 조각으로 호출할 수 있습니다 파이썬 코드와 + +08:30.630 --> 08:31.920 +같은 거죠 + +08:31.920 --> 08:36.570 +서버에 배포되어 그 서버에서 실행되고 있죠 + +08:38.280 --> 08:39.780 +정말 마법 같은 일이죠 + +08:39.780 --> 08:44.580 +마법 같아요 아주 간단하고 코드 몇 개를 배포하게 해주거든요 + +08:44.760 --> 08:47.970 +지금은 라마라는 좀 더 복잡한 패키지를 만들고 있어요 + +08:49.000 --> 08:52.030 +여기서 GPU 같은 게 나타나기 시작하죠 + +08:52.030 --> 08:57.160 +보다시피 hello 예제와 연결했어요 그래서 이게 무엇이었는지에 대한 흔적이 있었던 + +08:57.160 --> 08:57.880 +거죠 + +08:58.240 --> 09:04.120 +우선 제 앱 라마에게 전화를 걸어요 + +09:04.750 --> 09:10.030 +디비언 이미지도 넣는데 이번에는 토치 트랜스포머 비트, 바이트 + +09:10.030 --> 09:13.360 +패키지 가속도 전부 넣을 거예요 + +09:13.360 --> 09:21.040 +지금 저는 포옹하는 얼굴 토큰도 받고 있어요 제 방식의 비밀에서요 진작 보여 드렸어야 + +09:21.040 --> 09:22.390 +하는 거죠 + +09:22.390 --> 09:27.310 +잠시 후 Modal에서 그걸 설정하는 방법을 보여드릴게요 기밀에 도달하는 방법과 포옹하는 + +09:27.310 --> 09:29.320 +얼굴 토큰을 설정하는 방법도요 + +09:29.350 --> 09:32.170 +그렇게 하고 나면 이렇게 읽을 수 있어요 + +09:32.830 --> 09:39.430 +T4 GPU를 원한다고 지정하고 있어요 물론 아주 저렴한 기본이죠 + +09:39.520 --> 09:41.470 +여기 상속자가 있어요 + +09:41.950 --> 09:47.910 +이제 생성 함수가 있네요 + +09:47.910 --> 09:51.990 +문자열인 프롬프트를 받아 문자열을 반환하죠 + +09:52.770 --> 09:54.480 +힌트 형식의 예시인데요 + +09:54.480 --> 09:55.920 +잘 모르신다면요 + +09:55.920 --> 10:00.360 +이번에는 여러 곳에서 할 거예요 + +10:00.360 --> 10:04.920 +비트만 좀 있으면 분명해질 거예요 + +10:05.430 --> 10:13.110 +이 함수도 장식했어요 이미지를 전달하고 비밀을 말하고 T4 GPU를 + +10:13.110 --> 10:18.600 +원한다고 했죠 코드를 이용해 서버를 구성해요 + +10:18.780 --> 10:23.910 +이건 로컬에서 실행할 뭔가를 작성하는 함수예요 + +10:23.940 --> 10:29.550 +이걸 로컬에서 실행할 순 없어요 제 박스는 이런 llama 모델을 + +10:29.550 --> 10:32.820 +로컬에서 실행할 만한 힘이 없거든요 + +10:32.820 --> 10:37.170 +라마 CP 같은 것으로도 할 수 있는 방법이 있어요 익숙하신지 모르겠지만 + +10:37.170 --> 10:41.040 +여기 쓰여 있는 대로는 못 할 거예요 한다고 해도 아주 느릴 거고요 + +10:41.280 --> 10:48.010 +T4 GPU가 있는 박스에서 작업하고 싶어요 이렇게 하는 거죠 + +10:48.400 --> 10:51.430 +이 코드는 아주 익숙하실 거예요 + +10:51.430 --> 10:53.620 +여러분이 아는 구성이죠 + +10:53.620 --> 10:59.800 +그럼 라마를 위한 네 개의 퀀타이즈 비트를 구체적으로 설명해야겠네요 + +11:00.160 --> 11:05.320 +토큰라이저를 로드할 거예요 상용문서 같은 거요 + +11:05.320 --> 11:08.440 +모델을 로드할 거예요 + +11:08.440 --> 11:17.110 +그리고 프롬프트를 토큰으로 인코딩한 것도 볼 수 있는데요 + +11:17.140 --> 11:20.560 +그런 경고를 피하려고 주의 마스크를 씌웠어요 + +11:20.650 --> 11:23.380 +그리고 Model.생성기를 하죠 + +11:23.380 --> 11:25.000 +입력값을 통과시키죠 + +11:25.000 --> 11:31.810 +새 토큰 5개만 필요하고 응답은 하나만 필요하다고 하죠 + +11:31.810 --> 11:36.520 +그 응답을 받아들여서 해독한 다음 돌려주는 거죠 + +11:36.550 --> 11:38.470 +아주 간단해요 + +11:38.530 --> 11:43.900 +이제 그걸 시작해 실행해보죠 + +11:44.130 --> 11:47.100 +무슨 일이 일어나고 있어요 + +11:47.100 --> 11:52.200 +우선, 제가 실제로 실행한 건 뭐죠? + +11:52.200 --> 11:59.400 +원격 함수를 호출했고 프롬프트에서 라마 모델로 보낸 건 + +11:59.400 --> 12:02.070 +삶은 미스터리예요 + +12:02.070 --> 12:04.260 +모두 혼자 서야 해요 + +12:04.290 --> 12:13.110 +이다음은 어떻게 될지 다들 아셨으면 좋겠네요 + +12:13.110 --> 12:15.360 +안 그러면 제가 늙었다고 느낄 거예요 + +12:15.570 --> 12:23.040 +하지만 이건 물론 오프닝부터 마돈나의 기도처럼 이어지죠 + +12:23.040 --> 12:26.760 +내 이름을 부르는 소리가 들리면 그때 부를게요 + +12:26.790 --> 12:29.310 +그 말을 안 하는 게 너무 힘들어요 + +12:29.460 --> 12:33.750 +이제 그 노래가 제 머릿속에서 오늘 하루 동안 들릴 거예요 HOLO + +12:33.870 --> 12:35.040 +하지만요 + +12:35.040 --> 12:35.310 +네 + +12:35.340 --> 12:40.410 +이 노래를 영원히 마음에 담아뒀다면 미안해요 하지만 노래가 정말 중독성 있어요 Put it's go + +12:40.440 --> 12:45.220 +어쨌든 지금 보이는 건 저 상자에서 일어나는 일이에요 + +12:45.370 --> 12:49.240 +그 자체로 수정할 수도 있어요 + +12:49.420 --> 12:55.330 +그리고 어디 보자 화면을 새로 고침할게요 + +12:56.890 --> 12:59.290 +배포된 앱이 아니에요 + +13:04.270 --> 13:05.350 +하루살이 앱요 + +13:05.350 --> 13:06.340 +바로 그거예요 + +13:06.550 --> 13:08.620 +잠시만요, 좀 찾아볼게요 + +13:08.710 --> 13:09.730 +이게 끝이군요 + +13:09.730 --> 13:11.050 +지금 뛰어요 + +13:11.080 --> 13:17.650 +단기간 앱으로 운영되는데 2분 전에 시작됐어요 + +13:17.650 --> 13:22.570 +클릭하면 상태가 실행 중인 걸 볼 수 있죠 + +13:22.990 --> 13:26.380 +또 뭐가 있나 볼게요 get it + +13:26.590 --> 13:29.680 +컨테이너는 작동해요 + +13:31.990 --> 13:35.230 +메모리와 CPU 코어를 볼 수 있어요 + +13:39.220 --> 13:42.960 +T4 GPU가 보이죠 우리가 명시한 대로요 + +13:43.620 --> 13:52.740 +그리고 또 볼 수 있는 게 안녕하세요 이 상자에 파이가 있어요 + +13:52.740 --> 13:59.730 +라마도요 파이썬 파이썬의 대본 스크립트가 거기 있어요 + +14:00.390 --> 14:03.330 +자, 시작하죠 + +14:03.360 --> 14:07.770 +여기 온 김에 하나만 지적할게요 맨 위에 있는 건 비밀이에요 + +14:07.770 --> 14:12.540 +비밀은 물론 비밀을 설정하는 곳이죠 포옹 얼굴 토큰이에요 + +14:12.540 --> 14:14.160 +그러니 그렇게 하세요 + +14:14.310 --> 14:18.210 +지금은 클릭하지 않을 거예요 포옹 얼굴 토큰 비밀이 드러나면 안 되니까요 + +14:18.270 --> 14:24.570 +어쨌든 거기로 가면 됩니다 포옹하는 얼굴 토큰을 사용할 수 있는데 + +14:24.570 --> 14:30.240 +그 코드는 여기서 잠깐 동안 배포되는 코드예요 + +14:30.840 --> 14:32.310 +아직 작동하는군요 + +14:32.310 --> 14:35.550 +이 시점에서 이 비디오를 고수해야겠네요 + +14:35.550 --> 14:39.420 +제가 돌아오면 이미 성공했다고 나오네요 + +14:39.630 --> 14:43.690 +Get it가 끝났다는 메시지를 받았어요 + +14:43.840 --> 14:45.280 +그리고요 + +14:45.280 --> 14:45.880 +어디 보죠 + +14:45.910 --> 14:46.180 +네 + +14:46.180 --> 14:47.320 +여기요 + +14:48.400 --> 14:49.870 +get get을 해 보죠 + +14:49.900 --> 14:52.330 +내 이름을 부르는 소리가 들려요 + +14:52.390 --> 15:00.010 +네 개의 퀀트화된 라마의 비트조차 마돈나 노래를 완성하지 않을 + +15:00.040 --> 15:01.420 +수 없었죠 + +15:01.690 --> 15:07.360 +이 코드를 성공적으로 실행했어요 + +15:07.600 --> 15:22.900 +이 단기적인 서비스를 이용해 퀀타이즈한 라마 모델 라마3를 작동했어요 + +15:22.900 --> 15:22.900 +프롬프트를 완료하는 건 1가지 모델이죠 + +15:23.200 --> 15:24.070 +좋아요 + +15:24.070 --> 15:30.100 +돌아와서 살펴보고 모델을 배포할 시간이 될 겁니다 어떻게 + +15:30.100 --> 15:37.870 +하는지 지난 시간에 빌드한 소유 모델에 API를 어떻게 놓는지 보죠 + +15:38.110 --> 15:39.550 +잠시 후에 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59669049/en_US.srt b/week5/community-contributions/subtitles/srts/59669049/en_US.srt new file mode 100755 index 0000000..e07fef5 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669049/en_US.srt @@ -0,0 +1,877 @@ +WEBVTT + +00:00.860 --> 00:07.040 +So what you just saw was an ephemeral app, as it's called, which means just a temporary app that you're + +00:07.040 --> 00:12.650 +just using for, for, for testing before finally picking an app to deploy. + +00:12.680 --> 00:16.520 +And we're going to stay with the ephemeral apps for a little bit longer, because we're now going to + +00:16.520 --> 00:23.270 +take our proprietary model that we built last time and deploy that as an ephemeral app before we we + +00:23.300 --> 00:24.380 +do it for reals. + +00:24.380 --> 00:31.460 +So in order to do that, let me show you this other file here called Pricer ephemeral.py, which is + +00:31.490 --> 00:37.460 +a Python module which is going to have some similarities with what you just saw, but with one key difference, + +00:37.460 --> 00:43.910 +which is instead of using llama 3.1, we're going to use our own fine tuned model using very similar + +00:43.910 --> 00:50.990 +code to the code that we used in the codelab when we were running our model in inference, when we were + +00:50.990 --> 00:54.830 +testing it, and had the wonderful moment when it when it did so well. + +00:54.830 --> 01:03.380 +So in this, uh, Python module, then we begin as before by importing modal. + +01:03.620 --> 01:06.590 +And again I will remove volume. + +01:06.800 --> 01:09.650 +At one point I was going to use that, but I ended up not. + +01:10.160 --> 01:14.240 +So you set up, you say that you have an app. + +01:14.240 --> 01:15.260 +It's called Pricer. + +01:15.290 --> 01:16.760 +That's going to be the name of the app you saw. + +01:16.790 --> 01:17.630 +Pricer. + +01:18.020 --> 01:20.510 +You saw my deployed version of Pricer before. + +01:20.810 --> 01:25.640 +Um, and now we create an image and we install the same packages. + +01:25.640 --> 01:27.410 +But wait, there's a new package. + +01:27.410 --> 01:34.850 +You'll remember the parameter efficient fine tuning package that we need in order to be able to load + +01:34.850 --> 01:38.060 +in our fine tuned weights and our secrets. + +01:38.060 --> 01:42.890 +Hopefully you found your way to adding them in and you should put them under HF secret. + +01:42.890 --> 01:47.210 +Or if you put it under something else, then obviously change the name here. + +01:47.210 --> 01:50.030 +That's the name of the the the category of secrets. + +01:50.030 --> 01:56.390 +And then within that you have to have the exactly the usual um, hugging HF underscore token for, for + +01:56.390 --> 01:57.650 +the token itself. + +01:58.040 --> 02:00.480 +Um, see if that's actually in the code somewhere. + +02:00.510 --> 02:02.070 +Um, I don't think it is. + +02:02.130 --> 02:05.940 +Um, anyways, so so if that hopefully that that that makes sense. + +02:06.390 --> 02:13.740 +Um, so, um, we then um, specify we want a t4 GPU. + +02:13.770 --> 02:16.200 +We specify the base model of course. + +02:16.230 --> 02:17.850 +Llama 3.18 billion. + +02:18.060 --> 02:19.620 +Uh, the project name. + +02:19.620 --> 02:22.980 +This is because we're going to be pulling it from the Hugging Face hub. + +02:23.280 --> 02:25.860 +Uh, so this is my hugging face username. + +02:25.860 --> 02:30.000 +If you want to use my trained version, then keep my name there. + +02:30.000 --> 02:35.130 +If you want to use your version because you fine tune the version and maybe you made it even better. + +02:35.130 --> 02:39.600 +Maybe you've done some hyperparameter optimization and you have beaten me, in which case you should + +02:39.600 --> 02:41.370 +definitely use year one instead. + +02:41.520 --> 02:44.850 +Um, but this is mine, and if you use mine, then you should use that run name. + +02:44.850 --> 02:50.400 +And this revision number, which you'll remember, was the epoch where I got the best results before + +02:50.400 --> 02:51.870 +it started to overfit. + +02:51.870 --> 02:56.250 +And so this ends up with the fine tuned model name. + +02:56.250 --> 03:00.030 +And so we have a simple function price. + +03:00.030 --> 03:04.980 +And it's a function which is almost identical to the function that we had in the Google Colab. + +03:05.190 --> 03:11.130 +It's just we've got it sitting here on my local box, and we're going to decorate it with this decorator + +03:11.130 --> 03:16.200 +that says that we want to run it in this image with these secrets and this GPU. + +03:16.290 --> 03:20.340 +But this is all the same stuff. + +03:20.370 --> 03:21.990 +We do some imports. + +03:22.380 --> 03:24.720 +We note the question and the prefix. + +03:25.230 --> 03:28.860 +The prompt is going to be how much does this cost to the nearest dollar. + +03:28.890 --> 03:32.580 +Then the description of the product and then price is dollars. + +03:32.580 --> 03:37.920 +And then it's going to be almost unbearable for the model not to respond with what the price is. + +03:38.340 --> 03:41.550 +And then here we do the quantization. + +03:41.550 --> 03:44.160 +We load the model as before. + +03:44.160 --> 03:46.980 +And this line hopefully you remember this. + +03:46.980 --> 03:51.540 +This is where we load in the the Laura fine tuned model. + +03:51.540 --> 03:53.400 +We provide a base model. + +03:53.400 --> 03:57.640 +We provide the name of the fine tuned model and in this case, the revision number. + +03:57.640 --> 04:01.030 +You can leave that off if you're just taking the final save. + +04:01.030 --> 04:02.980 +You don't need to take an interim point. + +04:02.980 --> 04:04.090 +The way that I am. + +04:04.810 --> 04:06.670 +This is for reproducibility. + +04:06.670 --> 04:08.560 +So that it's the random seed is set. + +04:08.590 --> 04:12.130 +So you get the same numbers each time we tokenize. + +04:12.340 --> 04:19.570 +We then make the call dot generate to our fine tuned model and we decode. + +04:19.570 --> 04:22.840 +And then finally hopefully you also recognize this code. + +04:22.840 --> 04:29.440 +This is a bit of janky code that strips out the number, the floating point number from within the text + +04:29.470 --> 04:33.490 +that comes back, just in case there's an errant dollar sign or something like that, and makes sure + +04:33.490 --> 04:39.700 +we get whatever number we can, um, and returns that number, and that's all there is to it. + +04:39.730 --> 04:42.970 +It's code that should be pretty familiar to you at this point. + +04:42.970 --> 04:44.950 +So now we go back here. + +04:45.130 --> 04:48.640 +Um, I did already run it because as before, because it needs to warm up. + +04:48.640 --> 04:49.840 +It takes a couple of minutes. + +04:49.840 --> 04:52.180 +So it will be agonizing if we had to wait each time. + +04:52.180 --> 04:53.620 +But I already ran it. + +04:53.650 --> 04:55.700 +This is, of course, what I ran. + +04:56.480 --> 05:00.470 +You say you don't need to actually have this with modal enable output. + +05:00.470 --> 05:04.550 +But if you if you don't do that there's lots of other stuff prints. + +05:04.550 --> 05:07.730 +So this is a way to make it so that it's a nice and neat. + +05:08.030 --> 05:10.700 +Um, but you do need to have with app dot run. + +05:10.940 --> 05:14.000 +Um, and then I just say result is priced remote. + +05:14.000 --> 05:20.990 +And then what I'm asking for is to price a particular object, um, that, uh, just, uh, looking + +05:20.990 --> 05:22.790 +around for something that I could ask it to price. + +05:22.790 --> 05:27.140 +And the object that I've asked it to price is the very microphone that I am talking into right here. + +05:27.140 --> 05:29.690 +So I just typed it in there. + +05:29.690 --> 05:31.130 +I remember how much it cost. + +05:31.160 --> 05:31.940 +And so why not? + +05:31.940 --> 05:34.490 +Let's see how it does estimating that. + +05:34.670 --> 05:35.900 +Uh, so I ran it. + +05:35.930 --> 05:37.190 +It did take a couple of minutes. + +05:37.220 --> 05:42.710 +Now it takes a couple of minutes because it has to warm up that box, uh, and, and populate things, + +05:42.800 --> 05:47.090 +um, because it has to do everything you can imagine you'd have to do if you ran this in Colab. + +05:47.090 --> 05:50.600 +It has to download from the Huggingface hub and then load it into memory. + +05:50.810 --> 05:54.020 +Um, it also has to pip install all of those packages. + +05:54.020 --> 05:57.230 +So there's a lot that has to happen, which is why it takes a bit of time. + +05:57.410 --> 06:04.370 +Um, and it did all this stuff and at the end it completed and it returned the number one, 33, $133. + +06:04.370 --> 06:06.770 +And I think that is round about what this costs. + +06:06.770 --> 06:13.790 +So, uh, yeah, it's, uh, I think that's pretty much, uh, a spot on, uh, so maybe it was it. + +06:13.790 --> 06:14.180 +Who knows? + +06:14.180 --> 06:18.530 +It could have been in the training data set, but but, uh, certainly a nice result. + +06:19.220 --> 06:22.670 +So hopefully you're doing this to you're trying this. + +06:22.670 --> 06:24.980 +You will have to wait those two, two, three minutes. + +06:24.980 --> 06:29.540 +You'll be able to go onto modal while you're doing it, and you'll be able to go to ephemeral apps. + +06:29.630 --> 06:33.320 +It's no longer running, but while it was running, it will have been here, and you'll be able to watch + +06:33.320 --> 06:40.850 +it and see it doing its thing and then, uh, get a satisfactory result. + +06:41.150 --> 06:41.780 +Okay. + +06:41.780 --> 06:46.160 +But now we've done that, it's time to make the transition from these ephemeral apps, which is just + +06:46.160 --> 06:48.560 +ways of running functions remotely. + +06:48.560 --> 06:54.590 +Now we're going to have deployed apps, which is when we tell Modell that we've got some functionality + +06:54.590 --> 06:58.310 +and we want to have it be, um, permanently sitting on. + +06:58.340 --> 07:03.530 +Modell will still only pay for it when we actually run it, but we do want to give it a sort of a name + +07:03.530 --> 07:09.230 +and a proper place where we can quickly get back to it if we need to. + +07:09.650 --> 07:16.460 +Um, and the way you do it is on the command line, you type model.deploy and then the name of the app + +07:16.460 --> 07:17.690 +that you want to deploy. + +07:17.960 --> 07:24.260 +Uh, and it's important to note that this this really this is the moment that we are taking some code, + +07:24.260 --> 07:29.900 +some model that we have built as data scientists, and we are deploying it in a way that can be used + +07:29.900 --> 07:31.550 +for production purposes. + +07:31.550 --> 07:33.740 +It's going to have a Python API behind it. + +07:33.740 --> 07:40.220 +People will be able to call it, um, from Python and be able to to invoke our API. + +07:40.460 --> 07:47.540 +And this is an example of productionizing, something that we've built in originally in JupyterLab. + +07:47.930 --> 07:54.460 +Um, it can also modal also allows you to build Rest endpoints very easily so that it doesn't need to + +07:54.460 --> 07:55.450 +be called by Python. + +07:55.450 --> 08:00.070 +It could just be called from any any HTTP query. + +08:00.250 --> 08:03.610 +Um, we're not going to cover that in this class because everything we're doing is in Python. + +08:03.610 --> 08:05.980 +So we might as well just just call it from Python. + +08:06.010 --> 08:09.160 +Um, but it's very easy to do and the docs are super clear about it. + +08:09.700 --> 08:14.710 +So, uh, one thing I glossed over a moment ago, you may know this already. + +08:14.710 --> 08:20.980 +Uh, Jupiter, uh, has this great feature that if you want to, you can run something in terminal just + +08:20.980 --> 08:23.080 +by doing an exclamation mark. + +08:23.080 --> 08:26.920 +And then the the name of the command you want to run. + +08:26.920 --> 08:31.510 +You can also actually open up a terminal window by pressing the plus button there and, and create a + +08:31.510 --> 08:32.170 +terminal. + +08:32.170 --> 08:36.640 +Uh, but this is even easier because you can do it right in the prompt here, which I did above. + +08:36.640 --> 08:41.710 +And the command again is modal deploy and then the package name. + +08:41.710 --> 08:43.510 +So it's priceless service. + +08:43.510 --> 08:46.480 +So let's go and have a look at this package called Price of Service. + +08:46.510 --> 08:48.340 +Let me quickly tell you what I've got here. + +08:48.370 --> 08:53.280 +So uh it's, uh, remarkably, uh, similar. + +08:53.550 --> 09:00.300 +Um, you can see that basically we, we have, uh, the, the constants, we define the function, uh, + +09:00.300 --> 09:08.460 +and uh, it's basically the same thing, but this is going to be enough to deploy, uh, our code as + +09:08.490 --> 09:15.000 +a service, as a, as a proper deployed service, rather than the ephemeral version of the process that + +09:15.000 --> 09:15.990 +we had before. + +09:16.140 --> 09:18.150 +Um, but yeah, it is the same thing. + +09:18.150 --> 09:21.900 +It's the identical code just packaged differently. + +09:21.900 --> 09:24.720 +And again, with this volume that's not required. + +09:25.080 --> 09:32.310 +Uh, uh, so on running that if we go back to the day one notebook, uh, you'll see that it deployed + +09:32.310 --> 09:34.470 +it to as Price's service. + +09:34.710 --> 09:37.530 +Um, it gives me a little URL at the end of it. + +09:37.800 --> 09:43.500 +Um, and then once it's done that if I go now back here and I go to deployed apps, you'll see that + +09:43.500 --> 09:45.450 +price is running there. + +09:45.660 --> 09:50.830 +Um, sitting on a T4 GPU box, just as we specified. + +09:50.860 --> 09:57.100 +If we come back here, the way that you call a deployed app is different and actually simpler than the + +09:57.100 --> 09:57.970 +ephemeral app. + +09:58.000 --> 09:58.900 +You don't need that. + +09:58.900 --> 10:01.870 +That, um, scaffolding you saw a moment ago. + +10:01.900 --> 10:08.890 +Instead you just say modal dot function, dot lookup, and then you give the name of the service and + +10:08.890 --> 10:10.690 +then the name of the of the function. + +10:10.720 --> 10:11.800 +Simple as that. + +10:11.800 --> 10:12.760 +Modal dot function. + +10:12.790 --> 10:15.430 +Dot lookup the service and the function. + +10:15.700 --> 10:22.120 +Um, and so once you've run that you can then say price dot remote and make the, the remote call. + +10:22.120 --> 10:25.120 +And we get back the same number 133. + +10:25.120 --> 10:26.350 +And that's all there is to it. + +10:26.380 --> 10:33.430 +We've just deployed an app with our proprietary model to the cloud, and we've called it and it's run + +10:33.430 --> 10:34.480 +on the cloud. + +10:35.470 --> 10:39.910 +Uh, now there is a nicer way of doing this, a fancier way of doing this. + +10:39.910 --> 10:45.400 +When we build it this way, every time that it goes to sleep and needs to be warmed back up again, + +10:45.400 --> 10:49.300 +which, by the way, is quite quickly like after a minute or two it goes to sleep. + +10:49.810 --> 10:56.290 +And so every time you call it, you're waiting all that extra time, uh, and basically every time it + +10:56.290 --> 11:02.230 +has to rebuild an image and do the imports and load the model and so on. + +11:02.560 --> 11:03.340 +Um, sorry. + +11:03.370 --> 11:05.800 +I jumped to the version where it doesn't have to do that. + +11:05.800 --> 11:06.340 +Here we go. + +11:06.370 --> 11:09.340 +This is the bad version where it has to do all of that. + +11:09.430 --> 11:11.560 +So I've made a second version of it. + +11:11.560 --> 11:13.780 +Prices service two pi. + +11:13.810 --> 11:19.000 +This module, uh, and this one is done a bit in a smarter way. + +11:19.000 --> 11:27.370 +It's done using a class, and it's done in a way that allows you to pre-build some, uh, some aspects + +11:27.370 --> 11:28.300 +of initialization. + +11:28.300 --> 11:30.130 +So you don't need to do it every time. + +11:30.430 --> 11:39.250 +Um, so when you define a class like this, you have the ability to use this decorator model dot build + +11:39.250 --> 11:45.820 +and write a function that will populate your box the first time that it's built. + +11:46.000 --> 11:51.140 +And in this case I download from huggingface the model and I put it in the cache. + +11:51.290 --> 11:54.950 +And I'm not going to walk through this in detail, because I'm going to leave that as an exercise for + +11:54.950 --> 11:59.030 +you in the interest of time, but it's perfectly self-explanatory. + +11:59.180 --> 12:05.720 +This code will load the files from Huggingface and put it in Huggingface cache. + +12:05.990 --> 12:12.530 +And then you can have this decorator modal dot enter, and this is when the function actually gets called. + +12:12.950 --> 12:20.510 +And this is where it's going to set things up by setting up a tokenizer and setting up a base model + +12:20.510 --> 12:21.680 +that will be loaded once. + +12:21.680 --> 12:23.510 +So it can be used repeatedly. + +12:23.660 --> 12:26.480 +If this is called before it goes to sleep. + +12:26.990 --> 12:30.530 +Uh, and this is the method itself price. + +12:30.530 --> 12:40.040 +This is now the same as before, but the the code to load the model into memory has been pulled out + +12:40.190 --> 12:47.000 +into this one here into modal into setup under the the A decorator modal or enter. + +12:47.750 --> 12:52.640 +And so as a result of all of this, if I haven't lost you yet with this, when I go back to day one, + +12:52.640 --> 12:58.130 +the key point, uh, I then do a modal deploy of process service two. + +12:58.160 --> 13:00.320 +So process service two gets deployed. + +13:00.530 --> 13:04.310 +And when I've done that I can then use this code slightly different. + +13:04.340 --> 13:08.090 +You remember this is what we did last time we did modal dot function dot lookup. + +13:08.090 --> 13:13.190 +Now we do modal dot class which annoyingly is spelt cls. + +13:13.340 --> 13:17.300 +Um they probably class is probably something that is already used. + +13:17.300 --> 13:18.680 +So they couldn't use that. + +13:18.770 --> 13:26.420 +So modal dot dot dot lookup and then price of service and price now with a capital P price. + +13:26.420 --> 13:27.650 +So it's the class. + +13:27.650 --> 13:32.900 +So this will will find the class that we've just just deployed. + +13:33.020 --> 13:35.450 +You can instantiate that class. + +13:35.450 --> 13:38.960 +So I say price is is an instance of price. + +13:39.500 --> 13:42.770 +And then I can call Pricer dot price dot remote. + +13:42.950 --> 13:46.620 +Uh and when I do that, I get back that answer again. + +13:46.620 --> 13:51.600 +And when I ran this before, it ran super fast because it was already in memory, but now it's probably + +13:51.600 --> 13:52.440 +swapped out already. + +13:52.440 --> 13:57.510 +So yeah, I won't do it now to avoid holding us up, because we'll press on to the final example, which + +13:57.510 --> 13:58.500 +I will run. + +13:58.830 --> 14:02.400 +Um, I'm going to kick it off now so that I can talk while it's doing its thing. + +14:02.430 --> 14:03.390 +We'll let it run. + +14:03.390 --> 14:15.930 +So I have packaged this into a nice, useful class called Specialist Agent, which is our first agent + +14:15.930 --> 14:17.250 +that we're going to look at. + +14:17.370 --> 14:23.970 +Um, and it is something which is going to look up our pricer running on modal and call our pricer. + +14:23.970 --> 14:29.220 +And it's something which just takes a single function, a single method price. + +14:29.220 --> 14:31.560 +And you can pass in what you want it to price. + +14:31.560 --> 14:32.460 +Let's go and look. + +14:32.460 --> 14:33.780 +It's in agents. + +14:33.900 --> 14:35.490 +And now you get your first preview. + +14:35.520 --> 14:38.700 +Look at all of the different agents that we're going to be playing with this week. + +14:38.940 --> 14:42.120 +Uh, and we're going to go in and have a look at this particular agent. + +14:42.120 --> 14:46.380 +It's called specialist agent because it's using our special model. + +14:46.380 --> 14:50.340 +So this is our Python module for our specialist agent. + +14:50.340 --> 14:56.220 +And as I promised you'll see that I've written comments and that there's some type hints going on. + +14:56.430 --> 14:58.620 +So this class is a very simple class. + +14:58.620 --> 15:03.630 +It's an agent that runs our fine tuned LLM remotely on modal. + +15:04.020 --> 15:08.040 +Um, it sets a name, it gives itself a name and a color. + +15:08.250 --> 15:13.290 +And the reason for that, you'll find out later, is that we're going to have our agents log. + +15:13.290 --> 15:15.060 +So we'll be able to see what they're all doing. + +15:15.060 --> 15:17.430 +And the specialist agent is going to take the color red. + +15:17.430 --> 15:20.490 +So you'll be able to see when it's thinking and doing things and so on. + +15:21.240 --> 15:30.780 +Uh, so I initialize in the initialization in the constructor method here I log that, that it's getting + +15:30.780 --> 15:31.440 +started. + +15:31.440 --> 15:35.340 +And I do this modal dot plus dot lookup. + +15:35.340 --> 15:37.470 +And I look up the price of service and the price. + +15:37.650 --> 15:40.680 +And this is the thing that we just deployed a moment ago. + +15:40.680 --> 15:44.470 +So we look looked that up and we instantiate it. + +15:44.470 --> 15:47.470 +And then we log that we are ready for business. + +15:47.530 --> 15:52.180 +And the price call itself does exactly what we just looked at. + +15:52.210 --> 15:55.540 +We do price dot price dot remote. + +15:55.570 --> 15:57.910 +It's just like when we did the hello dot remote. + +15:57.910 --> 15:59.110 +We're taking a function. + +15:59.110 --> 16:02.410 +We're calling a remote, which means it's going to run on the cloud. + +16:02.410 --> 16:04.360 +It's almost transparent to us. + +16:04.360 --> 16:10.240 +It's like we're calling it directly here in Python, but it's actually going to run on the cloud on + +16:10.240 --> 16:12.520 +the T4 box that we specified. + +16:12.550 --> 16:19.000 +We pass in the description, we log the fact that it's completed and we return the result. + +16:19.570 --> 16:24.340 +So hopefully I've jabbered away for long enough that when we turn back to day one, it's going to have + +16:24.340 --> 16:26.290 +finished and it hasn't finished. + +16:26.650 --> 16:32.950 +Well then I can also fill in the time by flipping over here and we can take a look at it doing its thing. + +16:33.310 --> 16:39.850 +Uh, so here now, before we remember, I got lost looking for ephemeral apps. + +16:39.880 --> 16:46.270 +We are now correctly on deployed apps because we're looking at our Pricer service or Pricer, which + +16:46.270 --> 16:47.470 +is a deployed app. + +16:47.470 --> 16:49.840 +We can go into this and have a look at it. + +16:49.870 --> 16:51.400 +Oh, it's now saying succeeded. + +16:51.400 --> 16:52.720 +Let's go back and have a look. + +16:52.720 --> 16:54.220 +It has indeed succeeded. + +16:54.220 --> 16:54.940 +There we go. + +16:54.940 --> 16:58.000 +It succeeded and it returned the right number 133. + +16:58.030 --> 17:02.470 +And if we run it again right away without delay, see how quick it is? + +17:02.470 --> 17:05.560 +That's how quick it normally is when it's already warm. + +17:05.560 --> 17:07.360 +That's going out to the cloud. + +17:07.360 --> 17:09.730 +Let's quickly come up with something different. + +17:09.760 --> 17:16.780 +Let's go for an iPhone SE second edition. + +17:18.040 --> 17:19.300 +Let's see how much an iPhone. + +17:19.300 --> 17:19.660 +There we go. + +17:19.660 --> 17:21.640 +299 bucks apparently. + +17:21.850 --> 17:34.150 +Uh, so I'm delighted that that we got to see that and, uh, iPad Pro uh, second generation. + +17:36.610 --> 17:40.340 +299 again, it seems that must be a popular price. + +17:40.640 --> 17:43.370 +So you get a good sense once it's warmed up. + +17:43.370 --> 17:49.460 +Once you've populated the model, you can call it very quickly and you'll get back a quick response. + +17:49.460 --> 17:51.440 +And again, we're calling our agent. + +17:51.440 --> 17:57.290 +And what our agent is doing is our agent over here is simply it's done this already. + +17:57.290 --> 18:01.370 +It's already got the class, it's already instantiated it. + +18:01.370 --> 18:04.070 +And that class is already loaded in the files from Hugging Face. + +18:04.070 --> 18:06.470 +And it's warm and ready for business. + +18:06.470 --> 18:10.070 +And then when we call this method, it doesn't need to load in the files again. + +18:10.070 --> 18:11.090 +It's got it all ready. + +18:11.090 --> 18:12.800 +All it needs to do is inference. + +18:12.800 --> 18:14.420 +And that's why it's so quick. + +18:14.420 --> 18:16.490 +And that's that's the trick. + +18:16.520 --> 18:23.030 +That's how you build an API to a model that's running in the cloud, serverless, and how you make sure + +18:23.030 --> 18:29.420 +that you've cached the model weights in memory so that it's ready for action. + +18:29.420 --> 18:33.530 +So I know we went through it very fast, but now you have to go back and do this again yourself. + +18:33.560 --> 18:36.680 +Make sure that you can make it work and that it looks good on modal. + +18:36.680 --> 18:39.110 +And I will see you in the next video to wrap up. diff --git a/week5/community-contributions/subtitles/srts/59669049/ja_JP.srt b/week5/community-contributions/subtitles/srts/59669049/ja_JP.srt new file mode 100755 index 0000000..3e12c68 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669049/ja_JP.srt @@ -0,0 +1,775 @@ +WEBVTT + +00:00.860 --> 00:12.650 +つまり、 最終的にデプロイするアプリを選ぶ前にテストするための一時的なアプリということです。 + +00:12.680 --> 00:16.520 +エフェメラル・アプリにはもう少し長く付き合うつもりだ。 + +00:16.520 --> 00:24.380 +前回構築した独自モデルをエフェメラル・アプリとしてデプロイしてから、 実際にデプロイするつもりだからだ。 + +00:24.380 --> 00:31.460 +そのために、 Pricer ephemeralというファイルをお見せしよう。 Pythonのモジュールで、 先ほど見たものと似ている部分もありますが、 + +00:31.490 --> 00:43.910 +1つだけ重要な違いがあります。 + +00:43.910 --> 00:43.910 +1、 + +00:43.910 --> 00:50.990 +推論でモデルを実行し、 それをテストしていたときにコードラボで使用したコードと非常によく似たコードを使用して、 + +00:50.990 --> 00:54.830 +独自の微調整を行ったモデルを使用します。 + +00:54.830 --> 01:03.380 +このPythonモジュールでは、 まず前回と同じようにmodalをインポートします。 + +01:03.620 --> 01:06.590 +そしてまた、 ボリュームを取り除く。 + +01:06.800 --> 01:09.650 +一時はそれを使うつもりだったが、 結局使わなかった。 + +01:10.160 --> 01:14.240 +つまり、 アプリを立ち上げるわけだ。 + +01:14.240 --> 01:15.260 +プライサーと呼ばれている。 + +01:15.290 --> 01:16.760 +それが、 あなたが見たアプリの名前になる。 + +01:16.790 --> 01:17.630 +プライサー + +01:18.020 --> 01:20.510 +以前、 私のプリサーの展開版を見ただろう。 + +01:20.810 --> 01:25.640 +次に、 イメージを作成して、 同じパッケージをインストールします。 + +01:25.640 --> 01:27.410 +だが待ってくれ、 新しいパッケージがある。 + +01:27.410 --> 01:38.060 +微調整したウェイトと秘密をロードできるようにするために必要な、 パラメーター効率の良い微調整パッケージを覚えているだろう。 + +01:38.060 --> 01:42.890 +願わくば、 それらを加える方法を見つけて、 HFシークレットの下に置くべきだ。 + +01:42.890 --> 01:47.210 +あるいは、 もしそれを他のものの下に置くのであれば、 明らかにここの名前を変えてください。 + +01:47.210 --> 01:50.030 +それが秘密のカテゴリーの名前だ。 + +01:50.030 --> 01:57.650 +そしてその中に、 通常のHFアンダースコア・トークンを抱きしめて、 トークンそのものを抱きしめていなければならない。 + +01:58.040 --> 02:00.480 +ええと、 それが実際にコードのどこかにあるかどうか確認してください。 + +02:00.510 --> 02:02.070 +それはないと思う。 + +02:06.390 --> 02:13.740 +T4GPUが欲しいと指定する。 + +02:13.770 --> 02:16.200 +もちろんベースモデルは指定する。 + +02:16.230 --> 02:17.850 +ラマ 3. 180億ドル + +02:18.060 --> 02:19.620 +ええと、 プロジェクト名です。 + +02:19.620 --> 02:22.980 +これは、 ハギング・フェイスのハブから引っ張ってくるからだ。 + +02:23.280 --> 02:25.860 +これが僕のハグする顔のユーザーネームなんだ。 + +02:25.860 --> 02:30.000 +私の訓練されたバージョンを使いたいのであれば、 そこに私の名前を残しておいてほしい。 + +02:30.000 --> 02:35.130 +もし、 あなたが自分のバージョンを使いたいのであれば、 そのバージョンを微調整し、 もしかしたらさらに良くするかもしれないからだ。 + +02:35.130 --> 02:41.370 +もしかしたらハイパーパラメーターの最適化をやっていて、 私に勝ったのかもしれない。 + +02:41.520 --> 02:44.850 +うーん、 でもこれは僕のものだし、 僕のものを使うなら、 そのランネームを使うべきだよ。 + +02:44.850 --> 02:51.870 +そしてこのリビジョン番号は、 オーバーフィットを始める前の、 私が最高の結果を得たエポックだったことを覚えているだろう。 + +02:51.870 --> 02:56.250 +こうして、 微調整されたモデル名で終わる。 + +02:56.250 --> 03:00.030 +そうして、 シンプルな関数価格を手に入れた。 + +03:00.030 --> 03:04.980 +これは、 グーグル・コラボにあった機能とほとんど同じものだ。 + +03:05.190 --> 03:16.200 +これをローカル・ボックスに置いて、 このデコレーターでデコレーションするんだ。 + +03:16.290 --> 03:20.340 +しかし、 これはすべて同じものだ。 + +03:20.370 --> 03:21.990 +輸入も行っている。 + +03:22.380 --> 03:24.720 +その質問と接頭辞に注目したい。 + +03:25.230 --> 03:28.860 +1ドル単位でいくらですか? + +03:28.890 --> 03:32.580 +次に商品の説明、 そして価格がドル表示される。 + +03:32.580 --> 03:37.920 +そうなると、 モデルにとって、 価格がいくらなのか答えないのはほとんど耐え難いことになる。 + +03:38.340 --> 03:41.550 +そして、 ここで量子化を行う。 + +03:41.550 --> 03:44.160 +先ほどと同じようにモデルをロードする。 + +03:44.160 --> 03:46.980 +そして、 このセリフを覚えていてほしい。 + +03:46.980 --> 03:51.540 +ここでローラのファインチューン・モデルをロードする。 + +03:51.540 --> 03:53.400 +私たちはベースモデルを提供する。 + +03:53.400 --> 03:57.640 +微調整されたモデルの名前と、 この場合はリビジョン番号を提供する。 + +03:57.640 --> 04:01.030 +最終的なセーブを取るだけなら、 それは外してもいい。 + +04:01.030 --> 04:02.980 +中間点を取る必要はない。 + +04:02.980 --> 04:04.090 +私のあり方。 + +04:04.810 --> 04:06.670 +これは再現性のためだ。 + +04:06.670 --> 04:08.560 +ランダムシードが設定されるように。 + +04:08.590 --> 04:12.130 +だから、 トークン化するたびに同じ数字が得られる。 + +04:12.340 --> 04:19.570 +そして、 微調整したモデルにドットジェネレーションのコールをかけ、 デコードする。 + +04:19.570 --> 04:22.840 +そして最後に、 願わくばこのコードも認識していただきたい。 + +04:22.840 --> 04:29.440 +これは、 戻ってくるテキストから浮動小数点数を取り除く、 + +04:29.470 --> 04:39.700 +ちょっとしゃれたコードで、 ドル記号などが間違っている場合に備えている。 + +04:39.730 --> 04:42.970 +この時点では、 かなり見慣れたコードになっているはずだ。 + +04:42.970 --> 04:44.950 +では、 ここに戻ろう。 + +04:45.130 --> 04:48.640 +ええと、 ウォームアップが必要だから、 前と同じように走らせたんだ。 + +04:48.640 --> 04:49.840 +数分かかる。 + +04:49.840 --> 04:52.180 +だから、 毎回待たされるのは苦痛だ。 + +04:52.180 --> 04:53.620 +でも、 もう走ったんだ。 + +04:53.650 --> 04:55.700 +もちろん、 これは私が走ったものだ。 + +04:56.480 --> 05:00.470 +モーダルイネーブル出力で実際にこれを持つ必要はない、 とあなたは言う。 + +05:00.470 --> 05:04.550 +でも、 もしそうしなければ、 他にもたくさんのプリントがある。 + +05:04.550 --> 05:07.730 +だから、 これはすっきりとした形にする方法なんだ。 + +05:08.030 --> 05:10.700 +でも、 アプリのドットランは必要だよ。 + +05:10.940 --> 05:14.000 +ええと、 それから、 結果は遠隔地での価格だと言うだけだ。 + +05:14.000 --> 05:22.790 +そして、 私が求めているのは、 ある特定のものに値段をつけることです。 + +05:22.790 --> 05:27.140 +そして、 私が値付けを依頼した対象は、 まさにここで私が話しているマイクだ。 + +05:27.140 --> 05:29.690 +だから、 そこに入力したんだ。 + +05:29.690 --> 05:31.130 +いくらかかったか覚えている。 + +05:31.160 --> 05:31.940 +なぜそうしないのか? + +05:31.940 --> 05:34.490 +それを見積もってどうなるか見てみよう。 + +05:34.670 --> 05:35.900 +それで走ったんだ。 + +05:35.930 --> 05:37.190 +数分かかったよ。 + +05:37.220 --> 05:42.710 +このボックスをウォームアップして、 いろいろなものを入力しなければならないので、 + +05:42.800 --> 05:47.090 +2、 3分かかります。 + +05:47.090 --> 05:50.600 +Huggingfaceのハブからダウンロードして、 それをメモリにロードしなければならない。 + +05:50.810 --> 05:54.020 +ええと、 これらのパッケージをすべてpipでインストールする必要があります。 + +05:54.020 --> 05:57.230 +だから、 たくさんのことが起こらなければならないし、 だから少し時間がかかるんだ。 + +05:57.410 --> 06:04.370 +ええと、 それでいろいろやって、 最後には完了して、 1番、 33番、 133ドルを返した。 + +06:04.370 --> 06:06.770 +そして、 それがこの費用の相場だと思う。 + +06:06.770 --> 06:13.790 +だから、 ああ、 そうだね、 うん、 たぶん、 その通りだと思う。 + +06:13.790 --> 06:14.180 +誰が知っている? + +06:14.180 --> 06:18.530 +トレーニングデータセットの中にあったかもしれないが、 まあ、 確かにいい結果だ。 + +06:19.220 --> 06:22.670 +だから願わくば、 これを試してみてほしい。 + +06:22.670 --> 06:24.980 +2分、 2分、 3分待たなければならない。 + +06:24.980 --> 06:29.540 +そうしているうちにモーダルにも行けるようになるし、 エフェメラルアプリにも行けるようになる。 + +06:29.630 --> 06:33.320 +今はもう稼働していないが、 稼働している間はここにいて、 + +06:33.320 --> 06:40.850 +その様子を見て、 納得のいく結果を得ることができるだろう。 + +06:41.150 --> 06:41.780 +オーケー。 + +06:41.780 --> 06:48.560 +しかし、 それが終わった今、 リモートで機能を実行する方法に過ぎないエフェメラルなアプリから移行する時が来た。 + +06:48.560 --> 06:58.310 +デプロイされたアプリは、 いくつかの機能があり、 それを常設したいことをModellに伝えます。 + +06:58.340 --> 07:09.230 +しかし、 私たちはこのプロジェクトに名前をつけ、 必要なときにすぐに戻れるような適切な場所を作りたいと考えています。 + +07:09.650 --> 07:17.690 +コマンドラインでmodelと入力するんだ。 deployとデプロイしたいアプリの名前を入力する。 + +07:17.960 --> 07:24.260 +データサイエンティストとして構築したコードやモデルを、 + +07:24.260 --> 07:31.550 +本番で使えるようにデプロイしているのだ。 + +07:31.550 --> 07:33.740 +PythonのAPIを使う予定だ。 + +07:33.740 --> 07:40.220 +人々はPythonからこのAPIを呼び出すことができ、 APIを呼び出すことができる。 + +07:40.460 --> 07:47.540 +これはJupyterLabに元々組み込まれているプロダクション化の例です。 + +07:47.930 --> 07:55.450 +モーダルは、 Pythonから呼び出される必要がないように、 非常に簡単にRestエンドポイントを構築することもできます。 + +07:55.450 --> 08:00.070 +どのようなHTTPクエリからでも呼び出すことができる。 + +08:00.250 --> 08:03.610 +このクラスでは、 Pythonを使うので、 Pythonについては扱いません。 + +08:03.610 --> 08:05.980 +だから、 Pythonから呼び出せばいいのだ。 + +08:06.010 --> 08:09.160 +でも、 それはとても簡単なことだし、 ドキュメントにもはっきり書いてある。 + +08:09.700 --> 08:14.710 +それで、 ちょっと前に話したことなんだけど、 もう知っているかもしれないね。 + +08:14.710 --> 08:23.080 +ジュピターには、 感嘆符を押すだけでターミナルで何かを実行できる素晴らしい機能があるんだ。 + +08:23.080 --> 08:26.920 +そして、 実行したいコマンドの名前を入力する。 + +08:26.920 --> 08:32.170 +プラスボタンを押してターミナル・ウィンドウを開き、 ターミナルを作成することもできる。 + +08:32.170 --> 08:36.640 +ええと、 でもこれはもっと簡単で、 このプロンプトの中でできるんです。 + +08:36.640 --> 08:41.710 +コマンドはモーダルデプロイで、 次にパッケージ名を指定する。 + +08:41.710 --> 08:43.510 +つまり、 プライスレスなサービスなんだ。 + +08:43.510 --> 08:46.480 +では、 サービス価格というパッケージを見てみよう。 + +08:46.510 --> 08:48.340 +早速だが、 ここで私が得たものを紹介しよう。 + +08:48.370 --> 08:53.280 +だから......驚くほど似ているんだ。 + +08:53.550 --> 09:00.300 +基本的には、 定数があり、 関数が定義され、 + +09:00.300 --> 09:15.990 +基本的には同じことなのですが、 これでコードをサービスとしてデプロイするのに十分なのです。 + +09:16.140 --> 09:18.150 +でも、 同じことだよ。 + +09:18.150 --> 09:21.900 +パッケージが違うだけで、 同じコードだ。 + +09:21.900 --> 09:24.720 +そしてまた、 このボリュームではその必要はない。 + +09:25.080 --> 09:34.470 +初日のノートブックに戻ると、 プライスのサービスとしてデプロイされているのがわかる。 + +09:34.710 --> 09:37.530 +ええと、 最後に小さなURLが表示されるんだ。 + +09:37.800 --> 09:45.450 +それが終わったら、 ここに戻ってアプリの配置に行くと、 価格が表示されているのがわかると思う。 + +09:45.660 --> 09:50.830 +T4のGPUボックスの上に座っている。 + +09:50.860 --> 09:57.970 +ここで話を戻すと、 デプロイされたアプリを呼び出す方法は、 エフェメラル・アプリとは異なり、 実はシンプルだ。 + +09:58.000 --> 09:58.900 +それは必要ない。 + +09:58.900 --> 10:01.870 +さっきの足場。 + +10:01.900 --> 10:10.690 +その代わりに、 モーダル・ドット・ファンクション、 ドット・ルックアップと言い、 サービス名とファンクション名を指定する。 + +10:10.720 --> 10:11.800 +単純なことだ。 + +10:11.800 --> 10:12.760 +モーダルドット機能。 + +10:12.790 --> 10:15.430 +サービスと機能をドット検索する。 + +10:15.700 --> 10:22.120 +そうしたら、 price dot remoteと言って、 リモート・コールができる。 + +10:22.120 --> 10:25.120 +そして、 同じ133という数字が返ってきた。 + +10:25.120 --> 10:26.350 +それがすべてだ。 + +10:26.380 --> 10:34.480 +私たちは、 独自モデルのアプリをクラウドにデプロイし、 それを呼び出してクラウド上で実行したところです。 + +10:35.470 --> 10:39.910 +もっといい方法があるんだ。 + +10:39.910 --> 10:45.400 +このように作ると、 スリープして再び温める必要があるたびに、 + +10:45.400 --> 10:49.300 +1、 2分後にはスリープしてしまう。 + +10:49.810 --> 10:56.290 +そのため、 呼び出すたびに余分な時間を待たされ、 基本的には毎回、 画像を再構築し、 + +10:56.290 --> 11:02.230 +インポートを行い、 モデルをロードしなければならない。 + +11:02.560 --> 11:03.340 +あの、 すみません。 + +11:03.370 --> 11:05.800 +その必要がないバージョンにジャンプしたんだ。 + +11:05.800 --> 11:06.340 +さあ、 始めよう。 + +11:06.370 --> 11:09.340 +これはそのすべてをやらなければならない悪いバージョンだ。 + +11:09.430 --> 11:11.560 +だから、 第2バージョンを作ったんだ。 + +11:11.560 --> 11:13.780 +価格は2π。 + +11:13.810 --> 11:19.000 +このモジュールは......そしてこのモジュールは、 もう少しスマートな方法で作られている。 + +11:19.000 --> 11:28.300 +これはクラスを使って行われ、 初期化のいくつかの側面を事前に構築することができる。 + +11:28.300 --> 11:30.130 +だから毎回やる必要はない。 + +11:30.430 --> 11:39.250 +このようなクラスを定義すると、 このデコレーター・モデルのドットビルドを使用して、 最初にボックスをビルドしたときに、 + +11:39.250 --> 11:45.820 +そのボックスにデータを入力する関数を書くことができます。 + +11:46.000 --> 11:51.140 +この場合、 私はhuggingfaceからモデルをダウンロードし、 それをキャッシュに入れる。 + +11:51.290 --> 11:54.950 +時間の都合上、 練習として残しておくが、 + +11:54.950 --> 11:59.030 +これは完全に自明なことだ。 + +11:59.180 --> 12:05.720 +このコードはHuggingfaceからファイルをロードし、 Huggingfaceのキャッシュに置く。 + +12:05.990 --> 12:12.530 +そして、 このデコレーター・モーダル・ドットを入力させれば、 この関数が実際に呼び出される。 + +12:12.950 --> 12:21.680 +そしてここで、 トークナイザーをセットアップし、 一度読み込まれるベースモデルをセットアップする。 + +12:21.680 --> 12:23.510 +だから繰り返し使える。 + +12:23.660 --> 12:26.480 +これがスリープに入る前に呼ばれた場合。 + +12:26.990 --> 12:30.530 +ええと、 これは方法そのものの値段なんだ。 + +12:30.530 --> 12:47.000 +これは以前と同じですが、 モデルをメモリにロードするコードは、 Aデコレータのモーダルまたはエンターの下のセットアップのモーダルにあるこのコードに取り出されました。 + +12:47.750 --> 12:52.640 +この結果、 もし私がまだこの件で皆さんを迷わせていないのであれば、 初日に戻って、 + +12:52.640 --> 12:58.130 +重要なポイント、 つまり、 私はプロセス・サービス2をモーダルにデプロイする。 + +12:58.160 --> 13:00.320 +プロセス・サービス2が配備されるわけだ。 + +13:00.530 --> 13:04.310 +そうしたら、 このコードを少し変えて使うことができる。 + +13:04.340 --> 13:08.090 +前回、 モーダル・ドット・ファンクション・ドット・ルックアップをやったのを覚えているだろうか。 + +13:08.090 --> 13:13.190 +今は、 モーダルのドット・クラスをやっている。 + +13:13.340 --> 13:17.300 +うーん、 おそらくクラスはすでに使われているものだろう。 + +13:17.300 --> 13:18.680 +だから、 彼らはそれを使うことができなかった。 + +13:18.770 --> 13:26.420 +つまり、 モーダル・ドット・ドット・ルックアップし、 サービス価格と価格を大文字のP価格で表示する。 + +13:26.420 --> 13:27.650 +だからクラスなんだ。 + +13:27.650 --> 13:32.900 +これで、 今デプロイしたクラスを見つけることができる。 + +13:33.020 --> 13:35.450 +そのクラスをインスタンス化することができる。 + +13:35.450 --> 13:38.960 +だから、 プライスはプライスのインスタンスだと言っているんだ。 + +13:39.500 --> 13:42.770 +それから、 プライサー・ドット・プライス・ドット・リモートに電話すればいい。 + +13:42.950 --> 13:46.620 +そうすると、 またその答えが返ってくる。 + +13:46.620 --> 13:52.440 +そして、 以前これを実行したときは、 すでにメモリにあったため超高速で実行されたが、 今はすでにスワップアウトされているのだろう。 + +13:52.440 --> 13:58.500 +だから、 足手まといにならないように、 今はやめておくよ。 + +13:58.830 --> 14:02.400 +ええと、 その間に話ができるように、 今キックオフしようと思うんだ。 + +14:02.430 --> 14:03.390 +走らせてみよう。 + +14:03.390 --> 14:17.250 +そこで、 これをSpecialist Agentという便利なクラスにまとめました。 + +14:17.370 --> 14:23.970 +これは、 モーダルで動いているプライサーを調べて、 プライサーを呼び出すものです。 + +14:23.970 --> 14:29.220 +そして、 それはただ一つの関数、 一つのメソッド価格を取るものだ。 + +14:29.220 --> 14:31.560 +そして、 値段も自由に設定できる。 + +14:31.560 --> 14:32.460 +行って見よう。 + +14:32.460 --> 14:33.780 +それはエージェントにある。 + +14:33.900 --> 14:35.490 +そして今、 最初のプレビューができる。 + +14:35.520 --> 14:38.700 +今週プレーするさまざまなエージェントを見てほしい。 + +14:38.940 --> 14:42.120 +この捜査官を調べに行くんだ。 + +14:42.120 --> 14:46.380 +スペシャリスト・エージェントと呼ばれるのは、 我々の特別なモデルを使っているからだ。 + +14:46.380 --> 14:50.340 +これがスペシャリスト・エージェント用のPythonモジュールだ。 + +14:50.340 --> 14:56.220 +そして、 約束したように、 私がコメントを書いたり、 何らかのヒントがあることがわかるだろう。 + +14:56.430 --> 14:58.620 +だから、 このクラスはとてもシンプルなクラスだ。 + +14:58.620 --> 15:03.630 +これは、 我々の微調整されたLLMをモーダルでリモートで実行するエージェントだ。 + +15:04.020 --> 15:08.040 +ええと、 名前を決めて、 自分に名前と色をつけるんだ。 + +15:08.250 --> 15:13.290 +その理由は後でわかるが、 エージェントにログを残してもらうためだ。 + +15:13.290 --> 15:15.060 +だから、 彼らが何をしているのか見ることができる。 + +15:15.060 --> 15:17.430 +そしてスペシャリスト・エージェントは赤を選ぶ。 + +15:17.430 --> 15:20.490 +だから、 いつ何を考えているのか、 何をしているのか、 そういうことがわかるようになる。 + +15:21.240 --> 15:31.440 +コンストラクタの初期化メソッドで初期化し、 ログに記録する。 + +15:31.440 --> 15:35.340 +そして、 このモーダルなドット+ドット検索を行う。 + +15:35.340 --> 15:37.470 +そして、 サービスや価格を調べる。 + +15:37.650 --> 15:40.680 +そして、 これが先ほど配備したものだ。 + +15:40.680 --> 15:44.470 +だから、 それを調べてインスタンス化した。 + +15:44.470 --> 15:47.470 +そして、 ビジネスの準備ができたことを記録する。 + +15:47.530 --> 15:52.180 +そして、 プライスコール自体も、 まさに今見てきたようなことをする。 + +15:52.210 --> 15:55.540 +私たちは価格ドット・プライス・ドット・リモートを行っている。 + +15:55.570 --> 15:57.910 +ハロー・ドット・リモートの時と同じだよ。 + +15:57.910 --> 15:59.110 +私たちはファンクションを取っている。 + +15:59.110 --> 16:02.410 +私たちはリモートと呼んでいるが、 これはクラウド上で実行することを意味している。 + +16:02.410 --> 16:04.360 +ほとんど透明なんだ。 + +16:04.360 --> 16:12.520 +Pythonで直接呼び出しているようなものだが、 実際には指定したT4ボックス上のクラウド上で実行される。 + +16:12.550 --> 16:19.000 +説明を渡し、 完了したことを記録し、 結果を返す。 + +16:19.570 --> 16:24.340 +だから願わくば、 初日に戻ったときには終わっていて、 まだ終わっていないということになるくらい、 + +16:24.340 --> 16:26.290 +長い間おしゃべりしていたい。 + +16:26.650 --> 16:32.950 +それなら、 ここをめくって時間を埋めることもできる。 + +16:33.310 --> 16:39.850 +ええと、 だから今ここで、 思い出す前に、 僕は儚いアプリを探して迷子になったんだ。 + +16:39.880 --> 16:47.470 +現在、 デプロイされたアプリについて正しく理解しています。 なぜなら、 デプロイされたアプリであるPricerサービスまたはPricerを見ているからです。 + +16:47.470 --> 16:49.840 +私たちはこの中に入って見てみることができる。 + +16:49.870 --> 16:51.400 +ああ、 今は成功したと言っている。 + +16:51.400 --> 16:52.720 +戻って見てみよう。 + +16:52.720 --> 16:54.220 +それは確かに成功した。 + +16:54.220 --> 16:54.940 +これでよし。 + +16:54.940 --> 16:58.000 +それは成功し、 正しい数字133を返した。 + +16:58.030 --> 17:02.470 +そして、 遅滞なくすぐにもう一度走らせたら、 どれだけ速いかわかるだろう? + +17:02.470 --> 17:05.560 +すでに暖かくなっているときは、 それだけ早くなるのが普通だ。 + +17:05.560 --> 17:07.360 +それがクラウドに出るんだ。 + +17:07.360 --> 17:09.730 +早く違うものを考えよう。 + +17:09.760 --> 17:16.780 +iPhone SEセカンドエディションにしよう。 + +17:18.040 --> 17:19.300 +iPhoneの値段を見てみよう。 + +17:19.300 --> 17:19.660 +これでよし。 + +17:19.660 --> 17:21.640 +299ドルらしい。 + +17:21.850 --> 17:34.150 +そして、 iPad Pro第2世代を見ることができたことをうれしく思う。 + +17:36.610 --> 17:40.340 +299ドルというのは、 またしても人気の値段なのだろう。 + +17:40.640 --> 17:43.370 +だから、 暖まればいい感覚が得られる。 + +17:43.370 --> 17:49.460 +一旦モデルを入力したら、 それを素早く呼び出すことができ、 素早い応答が返ってくる。 + +17:49.460 --> 17:51.440 +そしてまた、 エージェントに電話する。 + +17:51.440 --> 17:57.290 +そして、 我々のエージェントがやっていることは、 こちらのエージェントがすでにやっていることだ。 + +17:57.290 --> 18:01.370 +すでにクラスがあり、 インスタンス化されている。 + +18:01.370 --> 18:04.070 +そして、 そのクラスはすでにハギング・フェイスのファイルにロードされている。 + +18:04.070 --> 18:06.470 +そして、 暖かく、 ビジネスの準備が整っている。 + +18:06.470 --> 18:10.070 +そして、 このメソッドを呼び出すと、 ファイルを再度読み込む必要がなくなる。 + +18:10.070 --> 18:11.090 +準備万端だ。 + +18:11.090 --> 18:12.800 +必要なのは推論だけだ。 + +18:12.800 --> 18:14.420 +だから、 とても速いんだ。 + +18:14.420 --> 18:16.490 +それがコツなんだ。 + +18:16.520 --> 18:23.030 +これが、 クラウドでサーバーレスで動作するモデルへのAPIを構築する方法であり、 + +18:23.030 --> 18:29.420 +モデルの重みをメモリにキャッシュして、 すぐに実行できるようにする方法だ。 + +18:29.420 --> 18:33.530 +だから、 私たちはあっという間に終わってしまった。 + +18:33.560 --> 18:36.680 +モーダルでうまく機能し、 見栄えがすることを確認してください。 + +18:36.680 --> 18:39.110 +それではまた、 次のビデオでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59669049/ko_KR.srt b/week5/community-contributions/subtitles/srts/59669049/ko_KR.srt new file mode 100755 index 0000000..3d13c49 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669049/ko_KR.srt @@ -0,0 +1,865 @@ +WEBVTT + +00:00.860 --> 00:07.040 +방금 보신 건 임시 앱입니다 임시 앱이란 뜻으로 배포할 + +00:07.040 --> 00:12.650 +앱을 선택하기 전에 테스트에만 사용하죠 + +00:12.680 --> 00:16.520 +비트는 좀 더 오래 사용할 겁니다 왜냐하면 + +00:16.520 --> 00:24.380 +지난 시간에 만든 독점 모델을 실제로 사용하기 전에 비트로 배포할 거니까요 + +00:24.380 --> 00:31.460 +그러기 위해서 다른 파일을 보여드릴게요 Pricer ephemeral이죠 파이썬 을 이용한 모듈인파이에는 유사점이 + +00:31.490 --> 00:37.460 +있습니다 그러나 한 가지 중요한 차이점이 있습니다 llama3을 + +00:37.460 --> 00:43.910 +이용하는 것 대신에 말이죠 첫째, 정교하게 튜닝된 모델을 사용할 겁니다 + +00:43.910 --> 00:50.990 +추론하기 위해 모델을 실행하고 테스트할 때 Codelab에서 사용했던 것과 아주 유사한 코드를 + +00:50.990 --> 00:54.830 +사용해서요 아주 잘 작동하는 순간은 정말 황홀했죠 + +00:54.830 --> 01:03.380 +파이썬 모듈에서는 이전처럼 모드 가져오기로 시작하죠 + +01:03.620 --> 01:06.590 +그리고 다시 볼륨을 제거할게요 + +01:06.800 --> 01:09.650 +한때는 그걸 사용하려고 했는데 결국 안 했어요 + +01:10.160 --> 01:14.240 +그래서 설정하고 앱이 있다고 하죠 + +01:14.240 --> 01:15.260 +프라이서라는 거예요 + +01:15.290 --> 01:16.760 +여러분이 보신 앱의 이름이 될 거예요 + +01:16.790 --> 01:17.630 +프라이서예요 + +01:18.020 --> 01:20.510 +배포된 Pricer 버전을 전에 보셨죠 + +01:20.810 --> 01:25.640 +이제 이미지를 생성하고 같은 패키지를 설치해요 + +01:25.640 --> 01:27.410 +잠깐만요, 새 소포가 왔어요 + +01:27.410 --> 01:34.850 +매개 변수 효율 미세 튜닝 패키지를 기억하실 겁니다 미세 튜닝과 비밀을 + +01:34.850 --> 01:38.060 +로드하기 위해 필요한 거죠 + +01:38.060 --> 01:42.890 +여러분이 추가하는 방법을 찾았길 바랍니다 HF 시크릿으로 입력하세요 + +01:42.890 --> 01:47.210 +또는 다른 곳에 Put을 하면 여기 이름을 바꾸죠 + +01:47.210 --> 01:50.030 +그게 비밀의 범주 이름이에요 + +01:50.030 --> 01:56.390 +그 안에 일반적인 HF_토큰을 가지고 있어야 합니다. 토큰 자체를 위한 포옹의 + +01:56.390 --> 01:57.650 +밑줄이죠. + +01:58.040 --> 02:00.480 +코드 어딘가에 있는지 찾아봐요 + +02:00.510 --> 02:02.070 +아닌 것 같아요 + +02:02.130 --> 02:05.940 +어쨌든, 그게 말이 됐으면 좋겠네요 + +02:06.390 --> 02:13.740 +t4 GPU를 원한다고 명시해요 + +02:13.770 --> 02:16.200 +기본 모델을 지정하죠 + +02:16.230 --> 02:17.850 +라마 3요 180억 달러요 + +02:18.060 --> 02:19.620 +프로젝트 이름요 + +02:19.620 --> 02:22.980 +H깅 페이스 허브에서 끌어오기 때문이죠 + +02:23.280 --> 02:25.860 +포옹하는 얼굴 닉네임이에요 + +02:25.860 --> 02:30.000 +훈련받은 버전을 쓰고 싶으면 내 이름을 그대로 둬요 + +02:30.000 --> 02:35.130 +자신의 버전을 쓰고 싶다면요 미세 조정해서 더 좋게 만들었을 수도 있으니까요 + +02:35.130 --> 02:39.600 +hyperpaameter 최적화를 했다가 저를 이겼을 수도 있죠 그럴 경우 + +02:39.600 --> 02:41.370 +반드시 1년도를 사용해야 해요 + +02:41.520 --> 02:44.850 +하지만 이건 제 거니까 제 걸 쓰시면 그 런네임을 쓰셔야죠 + +02:44.850 --> 02:50.400 +기억하시겠지만 이 수정 수치는 과도해지기 전이 가장 좋은 + +02:50.400 --> 02:51.870 +시절이었어요 + +02:51.870 --> 02:56.250 +미세 튜닝 모델 이름이 되는 거죠 + +02:56.250 --> 03:00.030 +간단한 함수 가격도 있고요 + +03:00.030 --> 03:04.980 +구글 콜랍에 있는 기능과 거의 똑같은 함수예요 + +03:05.190 --> 03:11.130 +여기 제 로컬 박스에 있어요 장식자로 장식할 겁니다 이 이미지 안에서 + +03:11.130 --> 03:16.200 +실행하고 싶다고 하죠 시크릿과 GPU 안에서요 + +03:16.290 --> 03:20.340 +하지만 이건 다 같은 거예요 + +03:20.370 --> 03:21.990 +수입도 좀 하고요 + +03:22.380 --> 03:24.720 +질문과 앞부분을 잘 보세요 + +03:25.230 --> 03:28.860 +가장 가까운 비용으로 얼마가 드는지를 질문할 거예요 + +03:28.890 --> 03:32.580 +제품 설명과 가격은 달러예요 + +03:32.580 --> 03:37.920 +그러면 가격이 얼마인지 모델 측에서 반응하지 않을 수 없어요 + +03:38.340 --> 03:41.550 +이제 여기서 퀀타이즈를 해요 + +03:41.550 --> 03:44.160 +아까처럼 모형을 로드해요 + +03:44.160 --> 03:46.980 +이 대사를 기억하면 좋겠네요 + +03:46.980 --> 03:51.540 +로라 파인 튜닝 모델을 실을 거예요 + +03:51.540 --> 03:53.400 +기본 모델을 제공하죠 + +03:53.400 --> 03:57.640 +미세 튜닝 모델의 이름과 이 경우에는 수정 번호를 제공하죠 + +03:57.640 --> 04:01.030 +마지막 세이브만 하실 거면 그건 빼도 돼요 + +04:01.030 --> 04:02.980 +그럴 필요 없어요 + +04:02.980 --> 04:04.090 +있는 그대로의 나 말이에요 + +04:04.810 --> 04:06.670 +이건 번식을 위한 거예요 + +04:06.670 --> 04:08.560 +무작위 씨앗이 설정된 거죠 + +04:08.590 --> 04:12.130 +Get up을 할 때마다 같은 숫자가 나오죠 + +04:12.340 --> 04:19.570 +그다음 미세 튜닝 모델에 .생산이라는 호출을 만들어 디코딩하죠 + +04:19.570 --> 04:22.840 +마지막으로 이 코드도 인식하셨으면 좋겠네요 + +04:22.840 --> 04:29.440 +이건 숫자, 부동점수를 제거하는 좀 엉성한 코드예요 텍스트 안에서요 잘못된 + +04:29.470 --> 04:33.490 +$ 기호가 있을 경우를 대비해서요 어떤 숫자든 + +04:33.490 --> 04:39.700 +get 할 수 있도록 하고 그 숫자를 반환합니다 그게 다예요 + +04:39.730 --> 04:42.970 +이 시점에서 아주 친숙한 코드죠 + +04:42.970 --> 04:44.950 +이제 다시 여기로 돌아가죠 + +04:45.130 --> 04:48.640 +이미 작동시켰어요 예열해야 하니까요 + +04:48.640 --> 04:49.840 +몇 분 걸려요 + +04:49.840 --> 04:52.180 +매번 기다려야 한다면 괴로울 거예요 + +04:52.180 --> 04:53.620 +벌써 조회했어요 + +04:53.650 --> 04:55.700 +물론 제가 뛰었던 곳이죠 + +04:56.480 --> 05:00.470 +출력 활성화 모듈이 필요 없다고 말씀하셨는데요 + +05:00.470 --> 05:04.550 +하지만 그렇게 하지 않으면 다른 프린트도 많아요 + +05:04.550 --> 05:07.730 +이렇게 하면 깔끔하고 멋지게 만들 수 있어요 + +05:08.030 --> 05:10.700 +하지만 app.lan은 있어야 해요 + +05:10.940 --> 05:14.000 +그리고 결과물은 가격 매기는 원격이라고 하죠 + +05:14.000 --> 05:20.990 +그리고 특정 객체의 가격을 매겨달라고 요청합니다. 그냥 둘러보면서 가격을 매겨달라고 + +05:20.990 --> 05:22.790 +요청하는 거죠. + +05:22.790 --> 05:27.140 +가격을 매겨달라고 한 객체는 바로 이 마이크예요 + +05:27.140 --> 05:29.690 +그래서 그냥 입력했어요 + +05:29.690 --> 05:31.130 +얼마였는지 기억해요 + +05:31.160 --> 05:31.940 +안 될 거 없죠 + +05:31.940 --> 05:34.490 +어떻게 계산되는지 보죠 + +05:34.670 --> 05:35.900 +그래서 조회해 봤죠 + +05:35.930 --> 05:37.190 +몇 분 걸렸죠 + +05:37.220 --> 05:42.710 +이제 몇 분 걸려요 상자를 데워야 하고 내용물도 채워야 하니까요 Colab으로 + +05:42.800 --> 05:47.090 +실행했을 때 해야 할 모든 일을 해야 하니까요 + +05:47.090 --> 05:50.600 +H깅페이스 허브에서 다운로드 받아 메모리에 로드해야 해요 + +05:50.810 --> 05:54.020 +또한 모든 패키지를 설치해야 하죠 + +05:54.020 --> 05:57.230 +비트를 만들려면 할 일이 많아서 시간이 좀 걸려요 + +05:57.410 --> 06:04.370 +그 모든 걸 다 하고 마지막에 1을 반환했어요 33, 133달러 + +06:04.370 --> 06:06.770 +이 작품의 가격은 대략 그 정도예요 + +06:06.770 --> 06:13.790 +네, 꽤 정확한 위치인 것 같아요 그래서 맞을지도 몰라요 + +06:13.790 --> 06:14.180 +누가 알겠어요? + +06:14.180 --> 06:18.530 +훈련 데이터 세트에도 있을 수 있지만 좋은 결과예요 + +06:19.220 --> 06:22.670 +여러분도 이렇게 해 보세요 + +06:22.670 --> 06:24.980 +2-3분 정도 기다려야 해요 + +06:24.980 --> 06:29.540 +작업하는 동안 Modal로 갈 수도 있고 단기 앱으로 갈 수도 있어요 + +06:29.630 --> 06:33.320 +지금은 실행 중이지만 실행 중일 때는 여기 + +06:33.320 --> 06:40.850 +있을 겁니다. 그리고 작업을 하는 것을 볼 수 있고 만족스러운 결과를 얻을 수 있죠. + +06:41.150 --> 06:41.780 +네 + +06:41.780 --> 06:46.160 +이제 그건 끝났고 짧은 앱에서 전환할 차례입니다 원격으로 + +06:46.160 --> 06:48.560 +기능을 실행하는 방법이죠 + +06:48.560 --> 06:54.590 +배포된 앱이 있는데 Modell에 기능성 몇 개를 영구적으로 + +06:54.590 --> 06:58.310 +포함하고 싶다고 말할 때죠 + +06:58.340 --> 07:03.530 +Modell은 여전히 실행할 때만 지불합니다 하지만 이름 같은 + +07:03.530 --> 07:09.230 +걸 정하고 싶어요 필요할 경우 재빨리 다시 할 수 있는 적절한 장소도요 + +07:09.650 --> 07:16.460 +명령줄에 모델을 입력하는 게 그 방법이에요 배포하고 배포하려는 앱 이름을 + +07:16.460 --> 07:17.690 +입력해요 + +07:17.960 --> 07:24.260 +중요한 건 바로 지금 이 순간 우리가 데이터 과학자로서 만든 코드나 + +07:24.260 --> 07:29.900 +모델을 생산 목적으로 사용할 수 있는 방식으로 배포하고 있다는 + +07:29.900 --> 07:31.550 +거예요 + +07:31.550 --> 07:33.740 +파이썬 API가 뒤에 있을 거예요 + +07:33.740 --> 07:40.220 +파이썬 을 이용해서 호출할 수 있고 API 호출도 할 수 있어요 + +07:40.460 --> 07:47.540 +이건 프로덕션화의 예입니다 원래 주피터랩에서 구축한 거죠 + +07:47.930 --> 07:54.460 +Modal 또한 REST 끝점을 아주 쉽게 빌드할 수 있게 해줍니다 파이썬으로 호출할 + +07:54.460 --> 07:55.450 +필요가 없죠 + +07:55.450 --> 08:00.070 +어떤 HTTP 쿼리에서도 호출될 수 있어요 + +08:00.250 --> 08:03.610 +이 수업에선 다루지 않겠습니다 파이썬 으로만 다루고 있으니까요 + +08:03.610 --> 08:05.980 +그러니 그냥 파이썬 에서 호출하는 게 낫겠어요 + +08:06.010 --> 08:09.160 +하지만 아주 쉽고 의사들도 명확하게 알려줬어요 + +08:09.700 --> 08:14.710 +제가 방금 얼버무린 게 있는데 이미 아실지도 모르겠네요 + +08:14.710 --> 08:20.980 +주피터에는 이런 멋진 기능이 있어요 터미널에서 뭔가를 실행할 수 있죠 + +08:20.980 --> 08:23.080 +느낌표만 찍으면 돼요 + +08:23.080 --> 08:26.920 +실행하고 싶은 명령의 이름을 적고요 + +08:26.920 --> 08:31.510 +터미널 윈도우를 열 수 있습니다 플러스 버튼을 눌러서 터미널을 생성할 + +08:31.510 --> 08:32.170 +수 있죠 + +08:32.170 --> 08:36.640 +하지만 이게 훨씬 더 쉬워요 여기 프롬프트에서 할 수 있거든요 제가 위에서 한 거죠 + +08:36.640 --> 08:41.710 +명령은 역시 양식 배포와 패키지 이름이죠 + +08:41.710 --> 08:43.510 +값을 매길 수 없는 서비스죠 + +08:43.510 --> 08:46.480 +그럼 프라이스 오브 서비스라는 패키지를 살펴보죠 + +08:46.510 --> 08:48.340 +여기 뭐가 있는지 간단히 말씀드릴게요 + +08:48.370 --> 08:53.280 +놀랍게도 비슷하네요 + +08:53.550 --> 09:00.300 +보시다시피 기본적으로 상수가 있고 함수를 정의합니다 + +09:00.300 --> 09:08.460 +기본적으로 같은 거지만 우리 코드를 서비스로 적절하게 배포하기에 + +09:08.490 --> 09:15.990 +충분합니다 이전 프로세스의 일시적인 버전 말고요 + +09:16.140 --> 09:18.150 +네, 같은 거예요 + +09:18.150 --> 09:21.900 +같은 코드인데 다르게 포장돼 있어요 + +09:21.900 --> 09:24.720 +이 볼륨으로는 필요 없어요 + +09:25.080 --> 09:32.310 +실행해 보죠 첫째 날 공책으로 돌아가면 Price Services로 + +09:32.310 --> 09:34.470 +배포된 게 보이시죠 + +09:34.710 --> 09:37.530 +끝에 작은 URL이 뜨네요 + +09:37.800 --> 09:43.500 +그게 완료되면 이제 여기로 돌아가서 배포된 앱으로 가면 가격이 실행되고 + +09:43.500 --> 09:45.450 +있는 게 보이시죠 + +09:45.660 --> 09:50.830 +T4 GPU 박스에 앉아서요 명시한 대로요 + +09:50.860 --> 09:57.100 +다시 돌아와서 보면 배포된 앱을 호출하는 방법은 에피헤머럴 앱과 다르고 사실 더 + +09:57.100 --> 09:57.970 +간단해요 + +09:58.000 --> 09:58.900 +그건 필요 없어요 + +09:58.900 --> 10:01.870 +방금 보신 비계 말이에요 + +10:01.900 --> 10:08.890 +Modal.comcom과 .outup을 입력하는 대신 서비스 이름을 입력하고 + +10:08.890 --> 10:10.690 +함수 이름을 입력해요 + +10:10.720 --> 10:11.800 +간단해요 + +10:11.800 --> 10:12.760 +Modal. 함수요 + +10:12.790 --> 10:15.430 +.찾기 서비스와 함수요 + +10:15.700 --> 10:22.120 +실행이 끝나면 가격. remote을 치고 원격 호출을 해요 + +10:22.120 --> 10:25.120 +그리고 아까와 같은 133을 얻게 돼요. Get up! Get up! + +10:25.120 --> 10:26.350 +그게 다인 거죠 + +10:26.380 --> 10:33.430 +클라우드에 소유 모델을 이용해 앱을 배포했고 클라우드에서 실행되도록 이름을 + +10:33.430 --> 10:34.480 +붙였어요 + +10:35.470 --> 10:39.910 +더 멋지고 더 멋진 방법이 있어요 + +10:39.910 --> 10:45.400 +이렇게 만들면 잠들었다가 다시 따뜻하게 해 줘야 해요 그런데 + +10:45.400 --> 10:49.300 +1-2분 정도 지나면 금방 잠들어 버리죠 + +10:49.810 --> 10:56.290 +그래서 전화를 걸 때마다 추가 시간을 기다려야 하고 이미지를 + +10:56.290 --> 11:02.230 +다시 구축하고 모델을 불러오는 등의 작업을 해야 하죠 + +11:02.560 --> 11:03.340 +죄송해요 + +11:03.370 --> 11:05.800 +그럴 필요가 없는 버전이 됐죠 + +11:05.800 --> 11:06.340 +시작할게요 + +11:06.370 --> 11:09.340 +이건 나쁜 버전이에요 그 모든 걸 해야 하죠 + +11:09.430 --> 11:11.560 +그래서 두 번째 버전을 만들었어요 + +11:11.560 --> 11:13.780 +가격은 2파이예요 + +11:13.810 --> 11:19.000 +이 모듈은 좀 더 비트 있게 진행되는데요 + +11:19.000 --> 11:27.370 +클래스를 이용하고 초기화의 몇 가지 측면을 미리 빌드할 수 있게 해 + +11:27.370 --> 11:28.300 +줘요 + +11:28.300 --> 11:30.130 +매번 안 해도 돼요 + +11:30.430 --> 11:39.250 +클래스를 이렇게 정의할 때 장식자 모델.빌드를 사용할 수 있고 처음 만들어졌을 + +11:39.250 --> 11:45.820 +때 여러분의 박스를 채울 함수를 쓸 수 있어요 + +11:46.000 --> 11:51.140 +이 경우엔 HINGPace 모델에서 다운로드 받아 캐시에 넣었어요 + +11:51.290 --> 11:54.950 +자세히 설명하진 않겠습니다 시간 관계상 + +11:54.950 --> 11:59.030 +연습 삼아 남겨두겠습니다만 아주 간단하죠 + +11:59.180 --> 12:05.720 +이 코드는 허깅페이스에서 파일을 불러와 허깅페이스 캐시에 놓죠 + +12:05.990 --> 12:12.530 +그리고 Modal.엔터를 장식할 수 있죠 함수가 호출될 때예요 + +12:12.950 --> 12:20.510 +여기서 설정합니다 토큰라이저를 설정하고 베이스 모델을 설정해 한 번 + +12:20.510 --> 12:21.680 +로드하죠 + +12:21.680 --> 12:23.510 +반복적으로 사용할 수 있죠 + +12:23.660 --> 12:26.480 +잠들기 전에 부르면요 + +12:26.990 --> 12:30.530 +이 방법 자체가 가격이에요 + +12:30.530 --> 12:40.040 +이전과 같지만 메모리에 모델을 로드하는 코드는 여기 여기로 나왔어요 Modal in Setting으로요 + +12:40.190 --> 12:47.000 +a Decoator Modal or enter 밑에서요 + +12:47.750 --> 12:52.640 +이 모든 것의 결과로 아직 이것과 관련해 여러분이 이해를 못 하신다면 첫날로 + +12:52.640 --> 12:58.130 +돌아가서 핵심 포인트를 말씀드리자면 프로세스 서비스 2의 배포 양식을 할 거예요 + +12:58.160 --> 13:00.320 +프로세스 서비스 2가 배포되었어요 + +13:00.530 --> 13:04.310 +그렇게 하고 나면 이 코드를 약간 다르게 사용할 수 있어요 + +13:04.340 --> 13:08.090 +지난번에 했던 거 기억하시죠 Modal. 함수 .조회를 했죠 + +13:08.090 --> 13:13.190 +Modal.Crice는 짜증 나게도 cls로 쓰였어요 + +13:13.340 --> 13:17.300 +클래스는 이미 사용된 걸 거예요 + +13:17.300 --> 13:18.680 +그래서 못 썼죠 + +13:18.770 --> 13:26.420 +Modal.dot.lup 서비스 가격과 P로 시작하는 가격이요 + +13:26.420 --> 13:27.650 +수업이 문제군요 + +13:27.650 --> 13:32.900 +방금 배포한 클래스를 찾을 거예요 + +13:33.020 --> 13:35.450 +그 클래스를 인스턴스화할 수 있어요 + +13:35.450 --> 13:38.960 +가격은 가격의 예시라고 하죠 + +13:39.500 --> 13:42.770 +Pricer.pice.triter라고 호출할 수 있어요 + +13:42.950 --> 13:46.620 +get을 입력하면 그 답이 다시 나오죠 + +13:46.620 --> 13:51.600 +전에 실행했을 때 아주 빠르게 실행되었어요 이미 메모리에 있었으니까요 하지만 지금은 아마 이미 바뀌었을 + +13:51.600 --> 13:52.440 +거예요 + +13:52.440 --> 13:57.510 +네, 지연되는 걸 피하려면 지금은 안 하겠습니다 마지막 예제로 넘어갈 테니까요 + +13:57.510 --> 13:58.500 +실행할 거예요 + +13:58.830 --> 14:02.400 +지금 시작할게요 기계가 작동하는 동안 얘기 좀 하게요 + +14:02.430 --> 14:03.390 +이대로 둘게요 + +14:03.390 --> 14:17.250 +그래서 이걸 스페셜리스트 에이전트라는 유용한 클래스로 패키지했어요 우리가 살펴볼 첫 번째 에이전트죠 + +14:17.370 --> 14:23.970 +모달에서 실행 중인 가격을 검색해 가격을 부를 거예요 + +14:23.970 --> 14:29.220 +단일 함수와 단일 메서드 가격만 취하는 거죠 + +14:29.220 --> 14:31.560 +원하는 가격대로 제출할 수 있어요 + +14:31.560 --> 14:32.460 +가서 보죠 + +14:32.460 --> 14:33.780 +에이전트예요 + +14:33.900 --> 14:35.490 +지금 첫 번째 미리 보기가 있어요. Get up! Get up! + +14:35.520 --> 14:38.700 +이번 주에 함께 일하게 될 모든 에이전트를 보세요 + +14:38.940 --> 14:42.120 +이제 들어가서 이 에이전트를 살펴볼 거예요 + +14:42.120 --> 14:46.380 +특수 요원이라고 불리는 건 우리 특수 모델을 사용했기 때문이죠 + +14:46.380 --> 14:50.340 +이건 전문 에이전트를 위한 파이썬 툴 모듈이에요 + +14:50.340 --> 14:56.220 +약속드린 대로 코멘트도 달았고 힌트도 몇 가지 있어요 + +14:56.430 --> 14:58.620 +이 클래스는 아주 간단해요 + +14:58.620 --> 15:03.630 +우리 LLM을 원격으로 조정하는 에이전트죠 + +15:04.020 --> 15:08.040 +이름과 색깔을 정하고 이름을 부여하죠 + +15:08.250 --> 15:13.290 +그 이유는 나중에 알게 되겠지만 요원 일지가 있기 때문이죠 + +15:13.290 --> 15:15.060 +다들 뭘 하는지 볼 수 있게요 + +15:15.060 --> 15:17.430 +전문가가 빨간색을 채취할 거예요 + +15:17.430 --> 15:20.490 +언제 생각하고 행동하는지 볼 수 있죠 + +15:21.240 --> 15:31.440 +생성자 메서드에서 초기화를 시작했고요 로그인하면 시작되고 있죠 + +15:31.440 --> 15:35.340 +Modal.+.찾기를 하고요 + +15:35.340 --> 15:37.470 +서비스 비용과 가격을 찾아봤어요 + +15:37.650 --> 15:40.680 +이건 조금 전에 우리가 배포한 거죠 + +15:40.680 --> 15:44.470 +그래서 찾아보고 인스턴스화했죠 + +15:44.470 --> 15:47.470 +그럼 일 시작할 준비가 됐다고 기록해요 + +15:47.530 --> 15:52.180 +가격 호출 자체도 방금 본 것과 똑같아요 + +15:52.210 --> 15:55.540 +가격도, 가격도, 리모컨도 있어요 + +15:55.570 --> 15:57.910 +헬로 닷 리모컨을 만들었을 때처럼요 + +15:57.910 --> 15:59.110 +함수를 취하는 거예요 + +15:59.110 --> 16:02.410 +원격 조종을 호출하고 있어요 클라우드에서 실행된다는 뜻이죠 + +16:02.410 --> 16:04.360 +우리 눈에는 거의 투명해요 + +16:04.360 --> 16:10.240 +파이썬 에서 직접 호출하는 것 같지만 사실은 우리가 명시한 T4 박스의 클라우드에서 + +16:10.240 --> 16:12.520 +실행될 거예요 + +16:12.550 --> 16:19.000 +설명을 전달하고 완료됐다는 사실을 기록하고 결과를 반환하죠 + +16:19.570 --> 16:24.340 +제가 너무 오래 떠들었나 봐요 첫날로 돌아왔을 때 끝났거나 + +16:24.340 --> 16:26.290 +안 끝났길 바라요 + +16:26.650 --> 16:32.950 +그럼 이쪽을 뒤집어서 시간을 채울 수도 있어요 그럼 작업을 하는 걸 살펴보죠 + +16:33.310 --> 16:39.850 +그래서 지금 우리가 기억하기 전에 일시적인 앱을 찾다가 길을 잃었어요 + +16:39.880 --> 16:46.270 +이제 제대로 배포된 앱에 있습니다 왜냐하면 Pricer 서비스를 보고 있으니까요 + +16:46.270 --> 16:47.470 +배포된 앱이죠 + +16:47.470 --> 16:49.840 +이 안에 들어가서 살펴보죠 + +16:49.870 --> 16:51.400 +성공했다고 뜨네요 + +16:51.400 --> 16:52.720 +다시 가서 보죠 + +16:52.720 --> 16:54.220 +확실히 성공했어요 + +16:54.220 --> 16:54.940 +됐어요 + +16:54.940 --> 16:58.000 +성공적으로 133을 반환했어요 + +16:58.030 --> 17:02.470 +지체 없이 바로 다시 실행하면 얼마나 빠른지 보이죠? + +17:02.470 --> 17:05.560 +이미 따뜻할 때는 보통 그 정도로 빨리 녹아요 + +17:05.560 --> 17:07.360 +클라우드에 올릴 거예요 + +17:07.360 --> 17:09.730 +빨리 다른 걸 생각해 보죠 + +17:09.760 --> 17:16.780 +iPhone SE의 2차 버전을 보죠 + +17:18.040 --> 17:19.300 +아이폰은 얼마죠? + +17:19.300 --> 17:19.660 +됐어요 + +17:19.660 --> 17:21.640 +299달러예요 + +17:21.850 --> 17:34.150 +그래서 2세대 아이패드 프로를 볼 수 있어서 정말 기뻐요 + +17:36.610 --> 17:40.340 +299달러라니 인기가 많은가 보군요 + +17:40.640 --> 17:43.370 +get이 따뜻해지면 좋은 감각을 갖게 되죠 + +17:43.370 --> 17:49.460 +모델을 일단 채우면 아주 빨리 호출할 수 있어요 빠른 응답을 얻게 되죠 get it + +17:49.460 --> 17:51.440 +에이전트에게 전화했죠 + +17:51.440 --> 17:57.290 +에이전트가 하는 일은 여기 있는 에이전트가 하는 일은 이미 완료되었다는 거죠 + +17:57.290 --> 18:01.370 +이미 클래스를 갖고 있고 인스턴스화했어요 + +18:01.370 --> 18:04.070 +그 클래스는 안은 얼굴 파일에 이미 로드됐어요 + +18:04.070 --> 18:06.470 +따뜻하고 일할 준비가 됐어요 + +18:06.470 --> 18:10.070 +이 메서드를 호출할 때 파일을 다시 로드할 필요가 없어요 + +18:10.070 --> 18:11.090 +다 준비해 놨어요 + +18:11.090 --> 18:12.800 +추론만 하면 돼요 + +18:12.800 --> 18:14.420 +그래서 그렇게 빠른 거예요 + +18:14.420 --> 18:16.490 +그게 비결이에요 + +18:16.520 --> 18:23.030 +클라우드에서 실행되는 모델에 API 구축하는 방법입니다 서버리스 + +18:23.030 --> 18:29.420 +모델이죠 메모리에 모델 값을 캐시하는 방법입니다 준비되도록요 + +18:29.420 --> 18:33.530 +아주 빠르게 진행했지만 이제 다시 돌아가서 직접 해야 해요 + +18:33.560 --> 18:36.680 +제대로 작동하고 모듈에서도 멋지게 보여야 해요 + +18:36.680 --> 18:39.110 +그럼 다음 영상에서 마무리하도록 하죠 diff --git a/week5/community-contributions/subtitles/srts/59669211/en_US.srt b/week5/community-contributions/subtitles/srts/59669211/en_US.srt new file mode 100755 index 0000000..73556ef --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669211/en_US.srt @@ -0,0 +1,67 @@ +WEBVTT + +00:00.710 --> 00:05.480 +Well, we took on a lot today and we seem to have been successful. + +00:05.510 --> 00:12.020 +These red icons that you see on this agent workflows diagram have been built, and we have them in the + +00:12.020 --> 00:16.280 +agents directory in the week eight folder. + +00:16.520 --> 00:22.220 +And so do spend some time looking through them and getting some appreciation for what we've built. + +00:22.250 --> 00:31.970 +And with that it brings us to a summary end of end of the day slide, which is inching ever closer to + +00:32.000 --> 00:34.130 +the summit of the mountain. + +00:34.160 --> 00:37.820 +You are at the moment at 92.5% of the way there. + +00:37.820 --> 00:42.260 +And I hope you're starting to really get that sense that you are becoming advanced. + +00:42.290 --> 00:48.560 +You're taking the conversion from being confident with this stuff to being an expert at this point. + +00:48.560 --> 00:54.380 +You can also, along with everything else you can do, you can now deploy models to production, including + +00:54.410 --> 01:02.210 +through modal, to call them remotely on the cloud and also building some pretty heavyweight rag pipelines + +01:02.210 --> 01:08.120 +that look over a big Croma database to build context to send to OpenAI. + +01:08.360 --> 01:15.230 +And combining that with machine learning models as well, to build a robust production solution. + +01:15.710 --> 01:19.970 +Next time we're going to look at some more things, we're going to look at something called structured + +01:20.000 --> 01:24.980 +outputs, which is a way that we can connect with OpenAI and enforce that. + +01:24.980 --> 01:28.640 +The response comes in a particular specification. + +01:28.640 --> 01:34.220 +It's something that's relatively new, and it's good for us to to add that to your skill sets to your + +01:34.220 --> 01:35.000 +tool belt. + +01:35.420 --> 01:42.650 +And it's just going to be about solving some more problems using models as you build and build expertise + +01:42.650 --> 01:44.090 +and keep upskilling. + +01:44.090 --> 01:49.460 +So I'm so excited and so pleased with our progress. + +01:49.490 --> 01:54.080 +I'll see you next time when we look at structured outputs. diff --git a/week5/community-contributions/subtitles/srts/59669211/ja_JP.srt b/week5/community-contributions/subtitles/srts/59669211/ja_JP.srt new file mode 100755 index 0000000..4748ee7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669211/ja_JP.srt @@ -0,0 +1,55 @@ +WEBVTT + +00:00.710 --> 00:05.480 +まあ、 今日は多くのことに挑戦し、 成功したようだ。 + +00:05.510 --> 00:12.020 +このエージェントのワークフロー図に表示されている赤いアイコンは、 構築されたもので、 week + +00:12.020 --> 00:16.280 +8フォルダのagentsディレクトリにあります。 + +00:16.520 --> 00:22.220 +だから、 時間をかけてそれらを眺め、 私たちが築き上げたものに対する感謝の念を抱いてほしい。 + +00:22.250 --> 00:34.130 +これで、 山頂に近づきつつある一日の終わりの滑走のまとめが終わった。 + +00:34.160 --> 00:37.820 +あなたは現在92歳です。 全体の5%だ。 + +00:37.820 --> 00:42.260 +そして、 自分が上級者になりつつあることを本当に感じ始めていることを願うよ。 + +00:42.290 --> 00:48.560 +この時点で、 あなたはこのようなことに自信を持っている状態から専門家になることに転換している。 + +00:48.560 --> 01:02.210 +また、 クラウド上でモデルをリモートで呼び出すモーダルや、 OpenAIに送信するコンテキストを構築するために大きなCromaデータベースを参照する、 + +01:02.210 --> 01:08.120 +かなりヘビー級のラグ・パイプラインを構築することもできます。 + +01:08.360 --> 01:15.230 +そしてそれを機械学習モデルとも組み合わせることで、 堅牢なプロダクション・ソリューションを構築する。 + +01:15.710 --> 01:19.970 +次回は、 構造化された出力と呼ばれる、 OpenAIと接続し、 + +01:20.000 --> 01:24.980 +それを強制する方法について見ていこうと思う。 + +01:24.980 --> 01:28.640 +返答は特定の仕様で来る。 + +01:28.640 --> 01:35.000 +それは比較的新しいことで、 あなたのスキルセットのツールベルトに加えるのは良いことだ。 + +01:35.420 --> 01:44.090 +そして、 専門性を高め、 スキルアップしていく中で、 モデルを使ってより多くの問題を解決していくことになる。 + +01:44.090 --> 01:49.460 +だから私はとても興奮しているし、 私たちの進歩にとても満足している。 + +01:49.490 --> 01:54.080 +それではまた次回、 構造化されたアウトプットを見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/59669211/ko_KR.srt b/week5/community-contributions/subtitles/srts/59669211/ko_KR.srt new file mode 100755 index 0000000..d4d2a10 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669211/ko_KR.srt @@ -0,0 +1,64 @@ +WEBVTT + +00:00.710 --> 00:05.480 +오늘 많은 일을 했는데 성공한 것 같아요 + +00:05.510 --> 00:12.020 +에이전트 워크플로 다이어그램에서 보시는 이 빨간 아이콘들은 구축된 겁니다 + +00:12.020 --> 00:16.280 +주 8 폴더에 에이전트 디렉터리에 있어요 + +00:16.520 --> 00:22.220 +그래서 시간을 들여 집을 둘러보며 우리가 지은 것에 감사하게 되죠 + +00:22.250 --> 00:31.970 +이것으로 오늘의 마지막 슬라이드를 요약해 보겠습니다 산 정상에 점점 가까워지고 + +00:32.000 --> 00:34.130 +있죠 + +00:34.160 --> 00:37.820 +지금 92살 맞으시죠? 5%는 됐어요 + +00:37.820 --> 00:42.260 +여러분이 발전하고 있다는 걸 느끼기 시작하길 바라요 Get it + +00:42.290 --> 00:48.560 +이런 것에 자신만만하던 변환을 전문가가 되는 거죠 + +00:48.560 --> 00:54.380 +또한 다른 모든 작업과 함께 프로덕션에 모델을 배포할 수 있습니다 클라우드에서 + +00:54.410 --> 01:02.210 +원격으로 호출할 수 있는 모듈을 포함해서요 오픈AI에 보낼 컨텍스트를 구축하기 위해 큰 크로마 데이터베이스 + +01:02.210 --> 01:08.120 +위에 있는 꽤 무거운 래그 파이프라인도 구축할 수 있죠 + +01:08.360 --> 01:15.230 +머신 러닝 모델과 결합해 강력한 생산 솔루션을 구축하죠 + +01:15.710 --> 01:19.970 +다음에 더 자세히 살펴볼 때는 구조화된 출력 항목들을 살펴보겠습니다 + +01:20.000 --> 01:24.980 +OpenAI와 연결하고 시행할 수 있는 방법이죠 + +01:24.980 --> 01:28.640 +특정한 기준에 따라 반응하죠 + +01:28.640 --> 01:34.220 +비교적 새로운 기술인데 여러분의 도구 벨트에 추가하는 게 좋을 것 + +01:34.220 --> 01:35.000 +같아요 + +01:35.420 --> 01:44.090 +모델을 이용해 문제를 더 해결하는 거죠 전문성을 구축하고 계속 개선하면서요 + +01:44.090 --> 01:49.460 +진전이 있어서 정말 기쁘고 기뻐요 + +01:49.490 --> 01:54.080 +구조화된 출력물들을 살펴볼 때 다시 만나요 diff --git a/week5/community-contributions/subtitles/srts/59669217/en_US.srt b/week5/community-contributions/subtitles/srts/59669217/en_US.srt new file mode 100755 index 0000000..6bce2dc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669217/en_US.srt @@ -0,0 +1,91 @@ +WEBVTT + +00:00.530 --> 00:04.220 +And welcome to the next part of visualizing the data. + +00:04.220 --> 00:06.500 +And just very quickly to show it to you in 3D. + +00:06.530 --> 00:12.350 +My box managed to survive me restarting and getting rid of that massive plot. + +00:12.380 --> 00:16.250 +I hope you didn't follow my track, but you did it more sensibly. + +00:16.280 --> 00:25.730 +Anyways, now to visualize in 3D just again, to get that that sense of appreciation for what it means + +00:25.730 --> 00:28.250 +to have a vector embedding of text. + +00:28.490 --> 00:34.070 +This time I have stuck with a more reasonable 10,000, boringly and otherwise. + +00:34.070 --> 00:41.450 +The code is just like we did it before when we looked at Rag, and we create the scatter plot using + +00:41.450 --> 00:43.250 +the Plotly library again. + +00:43.250 --> 00:47.660 +And this is what it looks like in the 3D visualization. + +00:47.690 --> 00:52.160 +It's hard to stop it from from zooming in and out, but there we go. + +00:52.280 --> 00:58.700 +And just as before, when we looked at the much smaller vector data space, it looks a little bit, + +00:58.700 --> 01:02.780 +um, uh, strange from, from a distance like that. + +01:02.780 --> 01:10.340 +But when you rotate it around and you interact with it, You absolutely start to see you get to appreciate + +01:10.340 --> 01:17.900 +the 3D, and you get to see how there are clusters that represent related kinds of products. + +01:17.930 --> 01:20.060 +And you can actually copy the code that we used before. + +01:20.060 --> 01:22.940 +So you get it to print the text of each one if you wish. + +01:22.940 --> 01:25.790 +It will use up more memory again, but you can do that. + +01:25.940 --> 01:31.760 +And that's a pretty cool way to satisfy yourself that the data is being represented in this way, that + +01:31.760 --> 01:33.440 +similar things are close to each other. + +01:33.440 --> 01:35.810 +That's really the important takeaway here. + +01:35.810 --> 01:41.510 +And you'll see when when purple dots have strayed away from the mainstream, you'll you'll get a sense + +01:41.510 --> 01:42.350 +of why. + +01:42.380 --> 01:44.150 +And it's really helpful to do that. + +01:44.150 --> 01:54.050 +So this is again more more of an exercise to build intuition designed to help see that as we scale up + +01:54.050 --> 02:00.170 +rag to this much bigger problem with much larger number of documents, that the same rules apply, and + +02:00.170 --> 02:03.860 +that you can visualize and experiment with your data in much the same way. + +02:04.160 --> 02:07.280 +Quite enough preamble on on vector data stores. + +02:07.280 --> 02:13.010 +It's time for us to actually build the Rag pipeline to estimate product prices using similar products. + +02:13.010 --> 02:14.240 +Let's get to it. diff --git a/week5/community-contributions/subtitles/srts/59669217/ja_JP.srt b/week5/community-contributions/subtitles/srts/59669217/ja_JP.srt new file mode 100755 index 0000000..b3352c9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669217/ja_JP.srt @@ -0,0 +1,79 @@ +WEBVTT + +00:00.530 --> 00:04.220 +そして、 データを視覚化する次のパートへようこそ。 + +00:04.220 --> 00:06.500 +そして、 3Dでお見せするために、 とても素早く。 + +00:06.530 --> 00:12.350 +私のボックスは、 私が再起動し、 その巨大なプロットを取り除くまでなんとか生き延びた。 + +00:12.380 --> 00:16.250 +私の轍を踏まなかったことを願うよ。 + +00:16.280 --> 00:28.250 +とにかく、 今一度3Dで視覚化することで、 テキストをベクターで埋め込むことの意味を理解することができる。 + +00:28.490 --> 00:34.070 +今回は、 つまらないことも含めて、 よりリーズナブルな1万円にこだわった。 + +00:34.070 --> 00:43.250 +コードは以前ラグを見たときと同じで、 再びPlotlyライブラリを使って散布図を作成する。 + +00:43.250 --> 00:47.660 +そして、 3Dビジュアライゼーションではこのように見える。 + +00:47.690 --> 00:52.160 +ズームインしたりズームアウトしたりするのを止めるのは難しいんだけどね。 + +00:52.280 --> 00:58.700 +先ほどと同じように、 もっと小さなベクトルデータ空間を見ると、 このように遠くから見ると、 + +00:58.700 --> 01:02.780 +ちょっと、 うーん、 変に見える。 + +01:02.780 --> 01:10.340 +しかし、 それを回転させ、 相互作用させると、 3Dを理解するようになり、 + +01:10.340 --> 01:17.900 +関連する種類の製品を表すクラスタがあることがわかるようになる。 + +01:17.930 --> 01:20.060 +そして、 実際に以前使ったコードをコピーすることができる。 + +01:20.060 --> 01:22.940 +希望すれば、 それぞれのテキストを印刷することができる。 + +01:22.940 --> 01:25.790 +またメモリを使うことになるが、 それはできる。 + +01:25.940 --> 01:31.760 +そして、 データがこのように表現されていること、 似たようなものが近くにあることを納得させる、 + +01:31.760 --> 01:33.440 +とてもクールな方法だ。 + +01:33.440 --> 01:35.810 +これが重要なポイントだ。 + +01:35.810 --> 01:42.350 +そして、 紫色の点が主流から外れたとき、 その理由がわかるだろう。 + +01:42.380 --> 01:44.150 +そうすることは本当に役に立つ。 + +01:44.150 --> 01:54.050 +これはまた、 直感を養うための練習というより、 もっと大きな問題、 もっと大きな数のドキュメントに拡大しても、 + +01:54.050 --> 02:03.860 +同じルールが適用され、 同じようにデータを視覚化して実験できることを確認するためのものだ。 + +02:04.160 --> 02:07.280 +ベクトル・データストアについての前置きはかなり十分だ。 + +02:07.280 --> 02:13.010 +似たような製品を使って製品価格を見積もるラグ・パイプラインを実際に構築する時が来たのだ。 + +02:13.010 --> 02:14.240 +さっそく始めよう。 diff --git a/week5/community-contributions/subtitles/srts/59669217/ko_KR.srt b/week5/community-contributions/subtitles/srts/59669217/ko_KR.srt new file mode 100755 index 0000000..eef3f1e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669217/ko_KR.srt @@ -0,0 +1,91 @@ +WEBVTT + +00:00.530 --> 00:04.220 +데이터 시각화의 다음 단계에 오신 걸 환영해요 + +00:04.220 --> 00:06.500 +재빨리 3D로 보여드릴 수 있죠 + +00:06.530 --> 00:12.350 +그 거대한 부지를 없애고 다시 켜고 나서도 제 상자는 무사했어요 + +00:12.380 --> 00:16.250 +제 방식을 따라 하지 않으셨길 바라요 하지만 더 현명하게 선택하셨네요 + +00:16.280 --> 00:25.730 +어쨌든 3D로 시각화해 보겠습니다 텍스트의 벡터 삽입이 어떤 의미인지 이해하기 + +00:25.730 --> 00:28.250 +위해서요 get it + +00:28.490 --> 00:34.070 +이번에는 좀 더 합리적인 10,000를 택했어요 지루하고 특이하죠 + +00:34.070 --> 00:41.450 +코드는 전에 Rag에서 했던 것과 같아요 Plotly 라이브러리를 이용해 다시 산란 + +00:41.450 --> 00:43.250 +플롯을 생성하죠 + +00:43.250 --> 00:47.660 +3D 시각화로 보면 이런 모습이에요 + +00:47.690 --> 00:52.160 +줌인과 아웃을 막기는 어렵지만 어쨌든 됐네요 + +00:52.280 --> 00:58.700 +전에 비트를 봤을 때처럼 훨씬 작은 벡터 데이터 공간을 보면 + +00:58.700 --> 01:02.780 +이렇게 멀리서 보면 좀 이상해 보여요 + +01:02.780 --> 01:10.340 +하지만 회전시키고 상호 작용해보면 3D의 진가를 확실히 알게 될 겁니다 관련 제품을 + +01:10.340 --> 01:17.900 +나타내는 클러스터가 어떻게 있는지 말이에요 Get in get get it + +01:17.930 --> 01:20.060 +전에 사용한 코드를 복사할 수 있어요 + +01:20.060 --> 01:22.940 +원한다면 각각의 텍스트를 get으로 출력할 수 있어요. + +01:22.940 --> 01:25.790 +메모리를 더 쓰게 될 텐데 그래도 괜찮아요 + +01:25.940 --> 01:31.760 +데이터는 비슷한 것들이 서로 가깝게 표현된다는 점에서 스스로 만족할 수 + +01:31.760 --> 01:33.440 +있는 멋진 방법이죠 + +01:33.440 --> 01:35.810 +그게 정말 중요한 부분이죠 + +01:35.810 --> 01:41.510 +그리고 보라색 점이 주류에서 벗어난 이유를 알게 될 거예요 Get + +01:41.510 --> 01:42.350 +it + +01:42.380 --> 01:44.150 +그게 정말 도움이 돼요 + +01:44.150 --> 01:54.050 +이건 직관을 키우기 위한 연습에 가까워요 훨씬 많은 문서에 대한 래그의 규모를 키워도 + +01:54.050 --> 02:00.170 +같은 규칙이 적용되고 데이터를 시각화하고 실험할 수 + +02:00.170 --> 02:03.860 +있다는 걸 보여주기 위해서요 + +02:04.160 --> 02:07.280 +벡터 데이터 저장소 얘기는 충분히 들었어요 + +02:07.280 --> 02:13.010 +이제 래그 파이프라인을 구축해서 비슷한 제품을 이용해 제품 가격을 추정할 때예요 + +02:13.010 --> 02:14.240 +Get it, get it 해 보죠 diff --git a/week5/community-contributions/subtitles/srts/59669375/en_US.srt b/week5/community-contributions/subtitles/srts/59669375/en_US.srt new file mode 100755 index 0000000..2883c45 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669375/en_US.srt @@ -0,0 +1,202 @@ +WEBVTT + +00:01.040 --> 00:02.930 +Here we are for the day. + +00:02.930 --> 00:04.730 +2.1 notebook. + +00:04.760 --> 00:07.760 +And don't let it be said that I don't ever do anything for you. + +00:07.760 --> 00:12.920 +As you will see, I have gone out on a limb with this one for your pleasure. + +00:13.190 --> 00:20.330 +Uh, so, uh, again, we are just going to be visualizing our data store for a moment now. + +00:20.360 --> 00:23.240 +Um, and to do that, we do some imports. + +00:23.600 --> 00:28.580 +And there is then a cell here where we select the maximum number of data points that we want to show + +00:28.580 --> 00:31.130 +in a visualization of our vectors. + +00:31.130 --> 00:35.240 +And my recommendation is that you stick with 10,000 which is a safe number. + +00:35.240 --> 00:40.580 +You get a nice image and your machine will not be ground to a halt. + +00:40.580 --> 00:42.650 +But that would be no fun for you. + +00:42.650 --> 00:49.760 +And I wanted to show you what it looks like if you get all 400,000 data points to show, but it's precarious + +00:49.760 --> 00:54.290 +and it puts my machine in a very unsafe position that it might crash at any moment. + +00:54.290 --> 00:59.360 +And indeed, in preparing for this, I have had my machine crash a couple of times and had to even start + +00:59.360 --> 01:04.220 +again with this Jupyter notebook, so I do not recommend you do this unless you have a very powerful + +01:04.220 --> 01:04.910 +machine. + +01:05.240 --> 01:11.550 +Um, so, uh, in the code, we, we, uh, we connect to the vector database. + +01:11.850 --> 01:17.730 +We have some code which is essentially a duplicate of what we already did in the rag week. + +01:17.790 --> 01:26.100 +Uh, when we, um, got some did some pre work to collect from the vector data, store the objects themselves, + +01:26.100 --> 01:32.850 +the documents, their categories that are in the metadata, and then color pick out the right color + +01:32.850 --> 01:36.120 +that would be able to allow us to identify the different points. + +01:36.120 --> 01:42.630 +And remember when I show you this, the thing that's super important to keep in mind is that the vectorization + +01:42.630 --> 01:49.290 +process, the process of deciding what vector to use for each of the documents, was based purely on + +01:49.290 --> 01:51.570 +the description of the documents themselves. + +01:51.570 --> 01:57.810 +It was only based on the language in in each product description that we pulled all 400,000 product + +01:57.810 --> 01:58.680 +descriptions. + +01:58.680 --> 02:04.590 +The fact that we happen to know which which ones are appliances, which ones are automotive, which + +02:04.590 --> 02:05.820 +ones are electronics. + +02:05.820 --> 02:10.740 +The model is not told that the model that builds the vector is just given the text. + +02:10.740 --> 02:14.640 +So it's helpful to then color it in so we can see. + +02:14.670 --> 02:15.030 +All right. + +02:15.030 --> 02:17.370 +This is the landscape of all of the vectors. + +02:17.610 --> 02:18.660 +Are there trends. + +02:18.660 --> 02:24.740 +Can we see that the model was able, just through the language, to separate out some of the different + +02:24.740 --> 02:26.750 +kinds of thing that's there? + +02:26.750 --> 02:33.080 +But this this kind of thing, this category was not part of the text that it vectorized. + +02:33.140 --> 02:33.890 +Okay. + +02:33.890 --> 02:40.610 +So anyway, with that, uh, this is now doing the t-SNE, uh, dimension reduction process, and this + +02:40.610 --> 02:42.620 +took about an hour to run on my machine. + +02:42.800 --> 02:47.690 +Uh, for the 400,000 it would take, it should be five minutes or something for 10,000. + +02:47.810 --> 02:52.160 +Uh, and then we can create a scatter plot, much as we did before. + +02:52.220 --> 02:55.010 +Uh, and then we can plot this scatter plot. + +02:55.010 --> 02:57.560 +And now I will show you what it looks like. + +02:59.150 --> 03:02.900 +And it's super slow to to do this on my machine. + +03:02.900 --> 03:09.470 +But this rather beautiful thing here is the result of looking at all of the vectors. + +03:09.560 --> 03:13.970 +Uh, let me try and and and shrink this a little bit. + +03:14.390 --> 03:16.820 +The machine is running very slowly. + +03:17.030 --> 03:23.870 +Uh, but you get a sense of the vector space from all 400,000 vectors. + +03:23.870 --> 03:24.350 +Here we go. + +03:24.380 --> 03:26.660 +It's just coming into view now. + +03:27.510 --> 03:31.920 +And the important thing to see is that. + +03:31.920 --> 03:33.600 +Yes, indeed. + +03:33.630 --> 03:35.070 +Uh, there it goes. + +03:35.100 --> 03:36.270 +Agonizingly slow. + +03:36.300 --> 03:37.680 +Yes, indeed. + +03:37.680 --> 03:44.580 +Different products have ended up most of the time in different territories, in vector space, with + +03:44.580 --> 03:48.540 +some clusters that appear to be, uh, near each other. + +03:48.540 --> 03:53.250 +And when you have a smaller number in here, you can go in and investigate the different ones and satisfy + +03:53.250 --> 03:59.820 +yourself that the reason that they are potentially in another territory is because they are perhaps + +03:59.820 --> 04:04.770 +products that straddle both being appliances and electronics or something like that. + +04:05.100 --> 04:12.510 +So this is really just for for an opportunity to look at the data and investigate it and understand + +04:12.510 --> 04:18.750 +it and just give a little bit more intuition about what does it mean to create vectors associated with + +04:18.750 --> 04:21.750 +documents, um, and to, to store them. + +04:21.810 --> 04:25.380 +Uh, and so it gives you that hands on tangible sense. + +04:25.440 --> 04:30.870 +And I hope that you enjoy this image, and I hope it was worth almost breaking my box. + +04:30.990 --> 04:33.540 +And hopefully you're doing it with a smaller number. + +04:33.540 --> 04:38.070 +And I will see you next time to see some of this in 3D instead. diff --git a/week5/community-contributions/subtitles/srts/59669375/ja_JP.srt b/week5/community-contributions/subtitles/srts/59669375/ja_JP.srt new file mode 100755 index 0000000..4862d97 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669375/ja_JP.srt @@ -0,0 +1,169 @@ +WEBVTT + +00:01.040 --> 00:02.930 +今日はこれで終わり。 + +00:02.930 --> 00:04.730 +2. ノート1冊 + +00:04.760 --> 00:07.760 +私があなたのために何もしないとは言わせない。 + +00:07.760 --> 00:12.920 +おわかりのように、 私はあなたの喜びのために、 この作品に思い切って挑戦した。 + +00:13.190 --> 00:20.330 +ええと、 もう一度言いますが、 これからデータストアを可視化します。 + +00:20.360 --> 00:23.240 +そのために、 いくつかの輸入をするんだ。 + +00:23.600 --> 00:31.130 +そしてここに、 ベクトルの可視化で表示したいデータポイントの最大数を選択するセルがある。 + +00:31.130 --> 00:35.240 +そして私のお勧めは、 安全な数字である1万本にこだわることだ。 + +00:35.240 --> 00:40.580 +美しい画像が得られ、 マシンが停止することもない。 + +00:40.580 --> 00:42.650 +でも、 それでは面白くないだろう。 + +00:42.650 --> 00:54.290 +そして、 40万点のデータをすべて表示させるとどのようになるかをお見せしたかったのですが、 不安定で、 私のマシンはいつクラッシュするかわからない非常に危険な状態に置かれています。 + +00:54.290 --> 01:04.910 +そして実際、 この準備のために何度かマシンがクラッシュし、 このJupyterノートブックからやり直さなければならなかった。 + +01:05.240 --> 01:11.550 +コードでは、 ベクター・データベースに接続する。 + +01:11.850 --> 01:17.730 +ラグ・ウィークですでにやったことと本質的に重複するコードがいくつかある。 + +01:17.790 --> 01:26.100 +ベクターデータを収集し、 オブジェクト自体、 ドキュメント、 メタデータにあるカテゴリーを保存し、 + +01:26.100 --> 01:36.120 +異なるポイントを識別できるような適切な色を選択するために、 いくつかの事前作業を行いました。 + +01:36.120 --> 01:42.630 +そして、 これをお見せするときに覚えておいていただきたいのは、 ベクトル化のプロセス、 つまりそれぞれの文書にどのベクトルを使うかを決めるプロセスは、 + +01:42.630 --> 01:51.570 +純粋に文書そのものの記述に基づいているということだ。 + +01:51.570 --> 01:58.680 +各商品説明に含まれる文言に基づいて、 40万件すべての商品説明を引き出したにすぎない。 + +01:58.680 --> 02:05.820 +どれが家電製品で、 どれが自動車で、 どれが電子機器なのか、 私たちはたまたま知っているのだ。 + +02:05.820 --> 02:10.740 +ベクトルを構築するモデルには、 テキストが与えられるだけであることは知らされない。 + +02:10.740 --> 02:14.640 +だから、 色付けしてくれるのは助かる。 + +02:14.670 --> 02:15.030 +分かった。 + +02:15.030 --> 02:17.370 +これはすべてのベクターの風景である。 + +02:17.610 --> 02:18.660 +トレンドはあるのか。 + +02:18.660 --> 02:26.750 +このモデルによって、 言葉だけで、 そこにあるさまざまな種類のものを分けることができたことがわかるだろうか? + +02:26.750 --> 02:33.080 +しかし、 この種のこと、 このカテゴリーは、 ベクトル化されたテキストの一部ではなかった。 + +02:33.140 --> 02:33.890 +オーケー。 + +02:33.890 --> 02:42.620 +とにかく、 これでt-SNEによる次元削減処理が完了した。 + +02:42.800 --> 02:47.690 +ええと、 40万人なら、 1万人なら5分かそこらになるはずだ。 + +02:47.810 --> 02:52.160 +そして、 散布図を作成することができる。 + +02:52.220 --> 02:55.010 +そして、 この散布図を描けばいい。 + +02:55.010 --> 02:57.560 +では、 その様子をお見せしよう。 + +02:59.150 --> 03:02.900 +そして、 私のマシンではこれを実行するのが超遅い。 + +03:02.900 --> 03:09.470 +しかし、 このかなり美しいものは、 すべてのベクトルを見た結果なのだ。 + +03:09.560 --> 03:13.970 +ええと、 少し縮めてみます。 + +03:14.390 --> 03:16.820 +マシンの動作が非常に遅い。 + +03:17.030 --> 03:23.870 +あー、 でも、 40万個のベクトルすべてからベクトル空間の感覚を得ることができる。 + +03:23.870 --> 03:24.350 +さあ、 始めよう。 + +03:24.380 --> 03:26.660 +今、 見えてきたところだ。 + +03:27.510 --> 03:31.920 +そして重要なのは、 そのことだ。 + +03:31.920 --> 03:33.600 +そうだね。 + +03:33.630 --> 03:35.070 +あ、 そうだ。 + +03:35.100 --> 03:36.270 +苦しいほど遅い。 + +03:36.300 --> 03:37.680 +そうだね。 + +03:37.680 --> 03:44.580 +異なる製品は、 ほとんどの場合、 異なるテリトリー、 ベクトル空間、 いくつかのクラスタは、 + +03:44.580 --> 03:48.540 +ええと、 互いに近くにあるように見える。 + +03:48.540 --> 03:53.250 +そして、 この数字が小さければ、 いろいろなものを調査して、 + +03:53.250 --> 03:59.820 +それが別の領域にある可能性があるのは、 おそらく電化製品と電子機器の両方にまたがる製品だからだろう、 + +03:59.820 --> 04:04.770 +などと納得することができる。 + +04:05.100 --> 04:12.510 +つまり、 これはデータを見て、 それを調査し、 理解し、 文書に関連するベクトルを作成し、 + +04:12.510 --> 04:21.750 +それを保存することの意味をもう少し直感的に理解するための機会なのだ。 + +04:21.810 --> 04:25.380 +そうすることで、 手に取るように実感できるんだ。 + +04:25.440 --> 04:30.870 +そして、 この画像を楽しんでもらえればと思うし、 私の箱を壊しかけた甲斐があったと思っている。 + +04:30.990 --> 04:33.540 +そして願わくば、 もっと小さな数字でやってほしい。 + +04:33.540 --> 04:38.070 +また次回、 3Dでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59669375/ko_KR.srt b/week5/community-contributions/subtitles/srts/59669375/ko_KR.srt new file mode 100755 index 0000000..4972ba0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669375/ko_KR.srt @@ -0,0 +1,196 @@ +WEBVTT + +00:01.040 --> 00:02.930 +오늘의 목적지예요 + +00:02.930 --> 00:04.730 +2번요 공책 한 권요 + +00:04.760 --> 00:07.760 +내가 당신을 위해 아무것도 안 한다고 하지 말아요 + +00:07.760 --> 00:12.920 +보시면 알겠지만, 당신의 즐거움을 위해 위험을 감수했어요 + +00:13.190 --> 00:20.330 +다시 말씀드리지만 잠시 데이터 저장소를 시각화해 볼게요 + +00:20.360 --> 00:23.240 +그러기 위해서는 수입을 해야 해요 + +00:23.600 --> 00:28.580 +그리고 셀이 있는데요 벡터의 시각화에서 보여주고자 하는 데이터 포인트의 + +00:28.580 --> 00:31.130 +최대 개수를 선택하는 곳이죠 + +00:31.130 --> 00:35.240 +저는 10,000개를 추천합니다 그게 안전한 숫자죠 + +00:35.240 --> 00:40.580 +이미지도 좋고 기계도 멈추지 않을 거예요. Get it. + +00:40.580 --> 00:42.650 +하지만 그건 재미없잖아요 + +00:42.650 --> 00:49.760 +400,000개의 데이터 포인트를 모두 모으면 어떻게 되는지 보여드리고 싶었는데 불안정하고 제 기계가 + +00:49.760 --> 00:54.290 +위험한 상황에 처하게 돼요 언제라도 충돌할 수 있죠 Get up + +00:54.290 --> 00:59.360 +이걸 준비하면서 제 기계가 몇 번 고장 났는데 이 주피터 노트북으로 다시 + +00:59.360 --> 01:04.910 +시작해야 했어요 그러니 아주 강력한 기계가 아닌 이상 이걸 하지 마세요 + +01:05.240 --> 01:11.550 +코드 안에서 우리는 벡터 데이터베이스에 연결돼요 + +01:11.850 --> 01:17.730 +코드가 있어요 래그 주간에서 이미 한 걸 복사한 거죠 + +01:17.790 --> 01:26.100 +벡터 데이터 수집을 위한 사전 작업을 할 때 개체 자체와 문서 메타데이터의 + +01:26.100 --> 01:32.850 +카테고리를 저장하고 적절한 색깔을 골라 여러 점을 식별할 + +01:32.850 --> 01:36.120 +수 있게 했죠 + +01:36.120 --> 01:42.630 +이걸 보여드릴 때 기억하셔야 할 아주 중요한 건 벡터화 프로세스, 각각의 문서에 + +01:42.630 --> 01:49.290 +대해 어떤 벡터를 사용할지를 결정하는 프로세스는 순전히 문서 그 자체의 설명에 + +01:49.290 --> 01:51.570 +근거했다는 거죠 + +01:51.570 --> 01:58.680 +각 제품 설명의 언어를 바탕으로 400,000개의 제품을 전부 추출했어요 + +01:58.680 --> 02:04.590 +어떤 게 전자 기기인지 자동차에 달린 건지 어떤 게 전자 기기인지 + +02:04.590 --> 02:05.820 +알 수 있죠 + +02:05.820 --> 02:10.740 +모델은 벡터를 구축하는 모델이 텍스트를 받는다는 걸 몰라요 + +02:10.740 --> 02:14.640 +그래서 볼 수 있게 색칠하는 게 도움이 되죠 + +02:14.670 --> 02:15.030 +좋아요 + +02:15.030 --> 02:17.370 +이게 모든 벡터의 풍경이에요 + +02:17.610 --> 02:18.660 +유행이 있나요? + +02:18.660 --> 02:24.740 +모델이 언어를 통해서 거기 있는 다른 종류의 것들을 분리할 + +02:24.740 --> 02:26.750 +수 있었나요? + +02:26.750 --> 02:33.080 +하지만 이런 종류의 카테고리는 벡터화 된 텍스트의 일부가 아니에요 + +02:33.140 --> 02:33.890 +네 + +02:33.890 --> 02:40.610 +어쨌든 이제 t-SNE 즉, 치수 감소 과정을 거치고 있어요 제 기계에 + +02:40.610 --> 02:42.620 +한 시간 정도 걸렸죠 + +02:42.800 --> 02:47.690 +400,000시간이면 5분 정도 걸릴 거예요 10,000시간요 + +02:47.810 --> 02:52.160 +그럼 전처럼 분산도를 만들 수 있어요 + +02:52.220 --> 02:55.010 +그러면 분산도를 그릴 수 있어요 + +02:55.010 --> 02:57.560 +이제 어떻게 생겼는지 보여드릴게요 + +02:59.150 --> 03:02.900 +제 컴퓨터로 하는 건 정말 느려요 + +03:02.900 --> 03:09.470 +여기 이 아름다운 것은 모든 벡터를 관찰한 결과예요 + +03:09.560 --> 03:13.970 +비트를 좀 줄여 볼게요 + +03:14.390 --> 03:16.820 +기계가 아주 느리게 돌아가요 + +03:17.030 --> 03:23.870 +400,000마리의 벡터를 통해 벡터 공간을 느낄 수 있어요. Get it. + +03:23.870 --> 03:24.350 +시작할게요 + +03:24.380 --> 03:26.660 +이제 보이네요 + +03:27.510 --> 03:31.920 +중요한 건 이거예요 + +03:31.920 --> 03:33.600 +네, 맞아요 + +03:33.630 --> 03:35.070 +저기 가네요 + +03:35.100 --> 03:36.270 +고통스러울 정도로 느려요 + +03:36.300 --> 03:37.680 +네, 맞아요 + +03:37.680 --> 03:44.580 +다른 제품들은 대부분 다른 지역에서 벡터 공간에 놓이게 됩니다 + +03:44.580 --> 03:48.540 +서로 가까운 클러스터들에서요 + +03:48.540 --> 03:53.250 +더 적은 수의 가전제품이 있다면 다른 것들을 살펴보고 + +03:53.250 --> 03:59.820 +스스로 만족할 수 있습니다. 잠재적으로 다른 영역에 있는 이유는 가전제품과 + +03:59.820 --> 04:04.770 +전자 제품과 양쪽에 걸쳐 있기 때문일 수도 있어요. + +04:05.100 --> 04:12.510 +데이터를 보고 조사하고 이해할 기회를 위한 거예요 문서와 관련된 벡터를 + +04:12.510 --> 04:18.750 +생성하고 저장하는 것이 어떤 의미인지 직관적인 것을 좀 더 + +04:18.750 --> 04:21.750 +보여주기 위해서요 + +04:21.810 --> 04:25.380 +그래서 눈에 보이는 감각을 손으로 만질 수 있어요 + +04:25.440 --> 04:30.870 +즐겁게 감상하시길 바라요 제 상자를 부술 뻔했는데도 가치가 있었길 바라요 + +04:30.990 --> 04:33.540 +더 적은 숫자로도 가능하길 바라요 + +04:33.540 --> 04:38.070 +다음 시간에 3D로 보여드릴 테니 기대하세요 diff --git a/week5/community-contributions/subtitles/srts/59669389/en_US.srt b/week5/community-contributions/subtitles/srts/59669389/en_US.srt new file mode 100755 index 0000000..c6ec4db --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669389/en_US.srt @@ -0,0 +1,226 @@ +WEBVTT + +00:00.530 --> 00:01.130 +Well. + +00:01.130 --> 00:01.730 +Hi there. + +00:01.730 --> 00:07.520 +So you've made it to day two of week eight, and I am super grateful that you've been hanging on, and + +00:07.520 --> 00:11.090 +that you're now at the point where there are four more days to go. + +00:11.090 --> 00:17.450 +And let me tell you something this day, this is probably the toughest day of the lot of the entire + +00:17.450 --> 00:18.050 +course. + +00:18.050 --> 00:22.250 +So if you survive through this day and you're on top of it and you're getting it and you're running + +00:22.250 --> 00:27.860 +the code and it's all making sense, then you you're already basically mastering LM engineering. + +00:27.860 --> 00:29.060 +This is what it's about. + +00:29.090 --> 00:30.530 +Today is going to be tough. + +00:30.710 --> 00:31.940 +Be prepared. + +00:32.060 --> 00:35.780 +But as it happens it's going to be really interesting. + +00:35.780 --> 00:38.600 +And we're going to get some great achievements done. + +00:38.600 --> 00:45.020 +So four days to go of the eight week journey to LM mastery. + +00:45.050 --> 00:46.610 +We're in the home stretch. + +00:46.610 --> 00:52.070 +You can already, of course generate text and code with frontier models, with open source, with hugging + +00:52.100 --> 00:58.610 +face back to front, you can follow a strategy to solve problems with curating data sets, which is + +00:58.610 --> 01:04.340 +hard making baseline models and fine tuning frontier models, and then carrying out a full end to end + +01:04.410 --> 01:07.770 +process to fine tune open source LMS. + +01:07.860 --> 01:14.640 +And finally, what we did last time is taking our fine tuned LMS and deploying it to production. + +01:14.670 --> 01:15.750 +We use modal. + +01:15.750 --> 01:20.400 +There are many others, but modal is great, and we use it in such a way that you can just write a simple + +01:20.400 --> 01:23.220 +piece of Python code that can run locally. + +01:23.220 --> 01:28.470 +Apparently it looks like it's running locally, but it in fact it is calling out to your model running + +01:28.470 --> 01:30.270 +on the cloud in modal. + +01:30.300 --> 01:32.910 +Paying only for what you use. + +01:33.360 --> 01:43.770 +So today we take a side step to look at more pricing models as we build our final final pricing model. + +01:43.770 --> 01:48.330 +That will be what's called an ensemble, a combination of multiple models. + +01:48.330 --> 01:55.230 +We're going to go back to Rag, and we're going to use Rag to build a frontier model solution that will + +01:55.260 --> 01:58.110 +operate against a massive Chrome data store. + +01:58.140 --> 02:00.870 +And to make things different to last time, we're not going to use Lang chain. + +02:00.870 --> 02:06.390 +We're just going to directly look up in the vector store and use that to build our prompt ourselves, + +02:06.390 --> 02:07.380 +which is good. + +02:07.380 --> 02:11.320 +Good to understand exactly how it works under the covers. + +02:11.650 --> 02:17.380 +We're going to then build this ensemble model with a high level of expertise and be able to deliver + +02:17.380 --> 02:21.370 +production ready code, calling a number of models. + +02:21.370 --> 02:26.680 +And I think perhaps the most important thing to say here is that whilst a lot of what we'll be doing + +02:26.680 --> 02:31.090 +is something of a repeat of things we've done before, just in a bit more of an industrial strength + +02:31.120 --> 02:35.290 +way, this is about solidifying what you've learned. + +02:35.290 --> 02:41.020 +This is about going from the point of being fairly confident with different aspects of LM engineering + +02:41.020 --> 02:47.290 +into being advanced, and so do take the time to go through the code and take this as an opportunity + +02:47.290 --> 02:54.100 +to practice, practice, practice, and get to the point of feeling like you've built true expertise. + +02:54.850 --> 02:55.390 +All right. + +02:55.390 --> 02:59.530 +So quick, a reminder on what is the project we're working on here. + +02:59.560 --> 03:07.000 +Project called The Price is Right building this autonomous Agentic framework agent workflows that will + +03:07.000 --> 03:14.080 +watch for when deals get published online, estimate their prices and send a push notification if it + +03:14.080 --> 03:20.410 +looks like this is a bargain and we are going to have these different agents collaborating to solve + +03:20.410 --> 03:28.660 +the problem, including our frontier busting model, our fine tuned model that significantly outperformed + +03:28.690 --> 03:30.670 +GPT four and Claude. + +03:30.700 --> 03:37.390 +All right, so you may remember last time I showed you this diagram that showed our agent workflows, + +03:37.390 --> 03:43.960 +the user interface on the left, the framework that we'll be using, planning agents to coordinate activities. + +03:43.960 --> 03:50.830 +And then the three the scanning agent looking for promising deals, ensemble estimating prices and a + +03:50.830 --> 03:54.640 +messaging agent that will send us push notifications. + +03:54.760 --> 04:01.150 +So I wanted to mention that this was a bit of a simplification, because the ensemble agent itself will + +04:01.150 --> 04:03.520 +be calling out to three different models. + +04:03.520 --> 04:11.710 +It's going to be calling a frontier agent, which is going to be a Rag workflow to price products based + +04:11.710 --> 04:15.430 +on an inventory of lots of existing products that it can look up. + +04:15.440 --> 04:18.170 +It's a perfect use case for rag. + +04:18.200 --> 04:23.090 +Of course, you may have already thought of it as I say, so you may have already been wondering why + +04:23.090 --> 04:24.050 +we didn't even try this. + +04:24.080 --> 04:24.800 +Well, we're going to. + +04:24.830 --> 04:31.430 +Today, the ensemble agent will use the specialist agent that we already have and built and have like + +04:31.460 --> 04:38.330 +a specialist agent class to represent, and it's going to use a random forest agent, which will be + +04:38.330 --> 04:42.800 +an agent using a random forest traditional machine learning approach. + +04:42.830 --> 04:44.690 +Except there's going to be a twist. + +04:44.720 --> 04:49.910 +It's going to be using vector embeddings that will use a transformer architecture. + +04:49.910 --> 04:54.110 +So it is a sort of modern take on traditional ML. + +04:54.290 --> 05:00.320 +So basically today we are going to be working on all of the icons there in red. + +05:00.320 --> 05:02.060 +We're going to be working on the ensemble agent. + +05:02.060 --> 05:03.740 +We've already built the specialist agent. + +05:03.740 --> 05:09.050 +So the other two will be part of our conquest today. + +05:09.050 --> 05:15.290 +And we will end today with an ensemble agent that is able to calculate prices, drawing on multiple + +05:15.290 --> 05:16.910 +other estimators. + +05:16.940 --> 05:18.230 +That's the challenge. + +05:18.230 --> 05:20.150 +Let's get over to JupyterLab. diff --git a/week5/community-contributions/subtitles/srts/59669389/ja_JP.srt b/week5/community-contributions/subtitles/srts/59669389/ja_JP.srt new file mode 100755 index 0000000..3e010bb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669389/ja_JP.srt @@ -0,0 +1,181 @@ +WEBVTT + +00:00.530 --> 00:01.130 +まあね。 + +00:01.130 --> 00:01.730 +こんにちは。 + +00:01.730 --> 00:07.520 +8週目の2日目まで来たわけだが、 よく頑張ってくれた、 そしてあと4日というところまで来てくれた、 + +00:07.520 --> 00:11.090 +と超感謝している。 + +00:11.090 --> 00:18.050 +そしてこの日、 おそらく全コースの中で最もタフな日だと思う。 + +00:18.050 --> 00:22.250 +だから、 もしあなたがこの1日を乗り切り、 それを理解し、 コードを実行し、 それがすべて意味をなすものであれば、 + +00:22.250 --> 00:27.860 +あなたはすでに基本的にLMエンジニアリングをマスターしていることになる。 + +00:27.860 --> 00:29.060 +こういうことなんだ。 + +00:29.090 --> 00:30.530 +今日はタフになりそうだ。 + +00:30.710 --> 00:31.940 +覚悟しておけ。 + +00:32.060 --> 00:35.780 +でも、 そうなれば本当に面白くなる。 + +00:35.780 --> 00:38.600 +そして、 私たちは素晴らしい功績を成し遂げようとしている。 + +00:38.600 --> 00:45.020 +LMマスターへの8週間の旅も残すところあと4日。 + +00:45.050 --> 00:46.610 +ホームストレッチに入った。 + +00:46.610 --> 00:52.070 +もちろん、 フロンティアモデルを使ってテキストやコードを生成することはできる。 + +00:52.100 --> 00:58.610 +オープンソースを使えば、 顔と顔をくっつけながら、 データセットのキュレーションで問題を解決する戦略に従って、 + +00:58.610 --> 01:07.770 +ベースラインモデルを作り、 フロンティアモデルを微調整することができる。 + +01:07.860 --> 01:14.640 +そして最後に、 私たちが前回行ったのは、 微調整したLMSを本番環境にデプロイすることです。 + +01:14.670 --> 01:15.750 +私たちはモーダルを使う。 + +01:15.750 --> 01:23.220 +他にもたくさんありますが、 モーダルは素晴らしいですし、 ローカルで実行できるシンプルなPythonコードを書くだけで済むような方法で使っています。 + +01:23.220 --> 01:30.270 +どうやらローカルで動いているように見えるが、 実際はクラウド上で動いているモデルをモーダルで呼び出しているようだ。 + +01:30.300 --> 01:32.910 +使った分だけ支払う。 + +01:33.360 --> 01:43.770 +そこで今日は、 最終的なプライシング・モデルを構築するために、 より多くのプライシング・モデルを見るためのサイドステップを踏む。 + +01:43.770 --> 01:48.330 +それはアンサンブルと呼ばれる、 複数のモデルの組み合わせになる。 + +01:48.330 --> 01:58.110 +Ragに戻り、 Ragを使って巨大なChromeデータストアに対して動作するフロンティアモデル・ソリューションを構築する。 + +01:58.140 --> 02:00.870 +前回と違うのは、 ラングチェーンを使わないことだ。 + +02:00.870 --> 02:07.380 +ベクターストアで直接調べて、 それを使ってプロンプトを作る。 + +02:07.380 --> 02:11.320 +カバーの中でどのように機能するかを正確に理解するのは良いことだ。 + +02:11.650 --> 02:17.380 +そして、 高い専門性をもってこのアンサンブル・モデルを構築し、 多くのモデルを呼び出しながら、 + +02:17.380 --> 02:21.370 +プロダクション・レディなコードを提供できるようにする。 + +02:21.370 --> 02:26.680 +そして、 おそらくここで最も重要なことは、 私たちがやることの多くは、 + +02:26.680 --> 02:35.290 +これまでやってきたことの繰り返しでありながら、 もう少し工業的な強さを持っているということだ。 + +02:35.290 --> 02:41.020 +これは、 LMエンジニアリングのさまざまな側面にかなり自信がある状態から、 + +02:41.020 --> 02:47.290 +上級者になることを目指すものだ。 だから、 時間をかけてコードに目を通し、 これを練習、 + +02:47.290 --> 02:54.100 +練習、 練習の機会としてとらえ、 真の専門知識を身につけたと感じられるようになるのだ。 + +02:54.850 --> 02:55.390 +分かった。 + +02:55.390 --> 02:59.530 +手っ取り早く、 私たちがここで取り組んでいるプロジェクトが何なのかを思い出してほしい。 + +02:59.560 --> 03:14.080 +ザ・プライス・イズ・ライトと呼ばれるプロジェクトでは、 この自律型エージェントフレームワークのエージェントワークフローを構築し、 + +03:14.080 --> 03:20.410 +お得な情報がオンラインで公開されるのを監視し、 その価格を推定し、 + +03:20.410 --> 03:30.670 +お買い得だと思われる場合はプッシュ通知を送信します。 + +03:30.700 --> 03:37.390 +前回、 エージェントのワークフローを示した図をお見せしましたが、 覚えていらっしゃるでしょうか。 + +03:37.390 --> 03:43.960 +左側がユーザー・インターフェースで、 右側がフレームワークです。 + +03:43.960 --> 03:54.640 +そして、 有望な取引を探すスキャン・エージェント、 価格を見積もるアンサンブル、 プッシュ通知を送ってくれるメッセージング・エージェントの3つだ。 + +03:54.760 --> 04:03.520 +というのも、 アンサンブル・エージェント自体が3つの異なるモデルに呼びかけるからだ。 + +04:03.520 --> 04:15.430 +フロンティア・エージェントを呼び出すことになるが、 これは、 たくさんの既存商品の在庫をもとに商品の価格を決めるラグ・ワークフローになる。 + +04:15.440 --> 04:18.170 +ラグにとっては完璧な使用例だ。 + +04:18.200 --> 04:24.050 +もちろん、 私が言うようにすでに考えていたかもしれない。 + +04:24.080 --> 04:24.800 +まあ、 そうするつもりだ。 + +04:24.830 --> 04:31.430 +今日、 アンサンブル・エージェントは、 すでにあるスペシャリスト・エージェントを使用し、 + +04:31.460 --> 04:42.800 +スペシャリスト・エージェント・クラスのようなものを構築し、 ランダムフォレスト・エージェントを使用します。 + +04:42.830 --> 04:44.690 +ただし、 ひねりがある。 + +04:44.720 --> 04:49.910 +トランスフォーマーアーキテクチャを使用するベクトル埋め込みを使用する予定だ。 + +04:49.910 --> 04:54.110 +つまり、 伝統的なMLを現代風にアレンジしたようなものだ。 + +04:54.290 --> 05:00.320 +というわけで、 今日は基本的に、 赤で示したアイコンのすべてに取り組むことになる。 + +05:00.320 --> 05:02.060 +これからアンサンブル・エージェントに取り組むつもりだ。 + +05:02.060 --> 05:03.740 +我々はすでに専門エージェントを構築している。 + +05:03.740 --> 05:09.050 +だから、 残りの2つは今日の征服の一部となる。 + +05:09.050 --> 05:16.910 +そして今日は、 他の複数の推計者を利用して価格を計算することができるアンサンブル・エージェントを紹介する。 + +05:16.940 --> 05:18.230 +それが課題だ。 + +05:18.230 --> 05:20.150 +JupyterLabに移動しよう。 diff --git a/week5/community-contributions/subtitles/srts/59669389/ko_KR.srt b/week5/community-contributions/subtitles/srts/59669389/ko_KR.srt new file mode 100755 index 0000000..b4f0a86 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669389/ko_KR.srt @@ -0,0 +1,220 @@ +WEBVTT + +00:00.530 --> 00:01.130 +글쎄요 + +00:01.130 --> 00:01.730 +안녕하세요 + +00:01.730 --> 00:07.520 +8주 차 중 이틀째예요 지금까지 버텨줘서 정말 고마워요 4일 + +00:07.520 --> 00:11.090 +더 남은 시점에 도달했다는 것도요 + +00:11.090 --> 00:18.050 +한마디만 하자면 오늘 코스가 전체 코스 중 가장 힘들 거예요 + +00:18.050 --> 00:22.250 +오늘 하루를 무사히 넘기고 모든 걸 파악하고 코드를 실행하고 + +00:22.250 --> 00:27.860 +모든 게 말이 된다면 여러분은 이미 LM 엔지니어링을 통달한 거예요 + +00:27.860 --> 00:29.060 +이게 핵심이에요 + +00:29.090 --> 00:30.530 +오늘은 힘들 거예요 + +00:30.710 --> 00:31.940 +준비하세요 + +00:32.060 --> 00:35.780 +하지만 정말 재미있을 거예요 + +00:35.780 --> 00:38.600 +Get in get 멋진 성과를 거두게 될 거예요 + +00:38.600 --> 00:45.020 +LM 마스터링까지 8주 여정이 4일 남았네요 + +00:45.050 --> 00:46.610 +이제 거의 다 왔어요 + +00:46.610 --> 00:52.070 +물론 이미 오픈 소스 모델로 텍스트와 코드를 생성할 수 있고 얼굴을 앞으로 + +00:52.100 --> 00:58.610 +당길 수 있으며 기본 모델을 만들기 어려운 큐레이팅 데이터 집합으로 문제를 해결하는 + +00:58.610 --> 01:04.340 +전략을 따를 수 있고 오픈 소스 LMS를 미세 조정하는 풀-엔드 투-엔드 + +01:04.410 --> 01:07.770 +프로세스를 수행할 수 있죠 + +01:07.860 --> 01:14.640 +마지막으로, 지난번에 한 건 잘 조정된 LMS를 프로덕션에 배포하는 거였죠 + +01:14.670 --> 01:15.750 +우린 모듈을 써요 + +01:15.750 --> 01:20.400 +다른 것도 많지만 Modal도 훌륭합니다 간단한 파이썬 코드를 + +01:20.400 --> 01:23.220 +작성해서 로컬에서 실행할 수 있죠 + +01:23.220 --> 01:28.470 +로컬에서 실행되는 것처럼 보이지만 사실은 클라우드에서 실행되는 모델에 + +01:28.470 --> 01:30.270 +호출하는 거예요 + +01:30.300 --> 01:32.910 +사용료만 지불하는 거죠 + +01:33.360 --> 01:43.770 +오늘은 추가로 가격 모델을 살펴봅니다 최종 가격 모델을 만드는 동안에요 + +01:43.770 --> 01:48.330 +그게 앙상블이 될 거예요 여러 모델을 합친 거죠 + +01:48.330 --> 01:55.230 +다시 Rag로 돌아가 거대한 크롬 데이터 스토어에 대항할 프런티어 모델 + +01:55.260 --> 01:58.110 +솔루션을 개발할 거예요 + +01:58.140 --> 02:00.870 +그리고 전과는 다르게 랑체인을 사용하지 않을 거예요 + +02:00.870 --> 02:06.390 +벡터 스토어에서 직접 찾아보고 프롬프트를 구축하는데 사용합니다, + +02:06.390 --> 02:07.380 +좋은 일이죠 + +02:07.380 --> 02:11.320 +이불 밑의 일을 정확히 이해하게 돼서 좋네요 + +02:11.650 --> 02:17.380 +그런 다음 높은 수준의 전문성을 가진 이 앙상블 모델을 만들 겁니다 그리고 생산 + +02:17.380 --> 02:21.370 +준비 코드를 제공하고 여러 모델을 호출할 수 있죠 + +02:21.370 --> 02:26.680 +여기서 가장 중요한 건 우리가 하는 많은 일이 이전에 해 본 + +02:26.680 --> 02:31.090 +일을 반복하는 것이긴 하지만 좀 더 산업적인 방식으로요 + +02:31.120 --> 02:35.290 +비트를 더 탄탄하게 만드는 거죠 + +02:35.290 --> 02:41.020 +LM 엔지니어링의 다양한 측면에서 상당히 확신하는 지점에서 고급으로 + +02:41.020 --> 02:47.290 +가는 겁니다 그러니 코드를 검토할 시간을 갖고 이걸 기회로 삼아 연습, + +02:47.290 --> 02:54.100 +연습, 연습 진정한 전문 지식을 구축했다는 느낌이 들도록 하세요 + +02:54.850 --> 02:55.390 +좋아요 + +02:55.390 --> 02:59.530 +우리가 작업하는 프로젝트가 뭔지 다시 한 번 보죠 + +02:59.560 --> 03:07.000 +가격은 오른쪽 프로젝트라는 자율적 에이전트 프레임워크를 구축하는 겁니다 에이전트 + +03:07.000 --> 03:14.080 +워크플로는 온라인 거래가 공개될 때를 주시하고 가격을 예측하고 가격에 따라 + +03:14.080 --> 03:20.410 +푸시 알림을 보냅니다 여러 에이전트가 문제를 해결하기 위해 협업할 겁니다 + +03:20.410 --> 03:30.670 +그중에는 개척자 단속 모델인 미세 튜닝 모델도 있습니다 GPT 4와 클로드보다 현저히 뛰어나죠 + +03:30.700 --> 03:37.390 +지난번에 이 다이어그램을 보여드린 걸 기억하실 겁니다 에이전트 워크플로를 보여드렸죠 왼쪽은 사용자 + +03:37.390 --> 03:43.960 +인터페이스 우리가 사용할 프레임워크입니다 에이전트가 활동을 조율하도록 계획하는 거죠 + +03:43.960 --> 03:50.830 +그리고 유망한 계약을 찾아주는 스캐닝 요원 세 명 앙상블 가격 산정 푸시 + +03:50.830 --> 03:54.640 +알림을 보내줄 메시지 요원 한 명요 + +03:54.760 --> 04:01.150 +이건 약간 단순화됐다는 걸 말씀드리고 싶었어요 앙상블 비트 자체가 세 가지 + +04:01.150 --> 04:03.520 +모델을 부를 테니까요 + +04:03.520 --> 04:11.710 +프런티어 에이전트를 호출할 거예요 검색할 수 있는 많은 기존 제품 인벤토리에 기반을 + +04:11.710 --> 04:15.430 +둔 가격 매기는 래그 워크플로우죠 + +04:15.440 --> 04:18.170 +걸레로 쓰기 딱 좋아요 + +04:18.200 --> 04:23.090 +물론 제가 말씀드린 대로 이미 생각해 보셨을 테니 왜 이걸 시도도 안 했나 궁금하실 + +04:23.090 --> 04:24.050 +거예요 + +04:24.080 --> 04:24.800 +그렇게 될 거예요 + +04:24.830 --> 04:31.430 +오늘 앙상블 에이전트는 우리가 이미 만들어둔 특수 에이전트를 사용할 거예요 대표할 + +04:31.460 --> 04:38.330 +특수 에이전트 클래스 같은 거죠 임의 숲 에이전트를 사용할 건데 전통적인 머신 러닝 + +04:38.330 --> 04:42.800 +접근법으로 임의 숲 에이전트를 만들 거예요 + +04:42.830 --> 04:44.690 +반전이 있을 거예요 + +04:44.720 --> 04:49.910 +변압기 구조를 사용할 벡터 내장화를 사용할 거예요 + +04:49.910 --> 04:54.110 +전통적인 ML을 현대적으로 재해석한 거군요 + +04:54.290 --> 05:00.320 +기본적으로 오늘은 저기 빨간 아이콘들을 작업할 거예요 + +05:00.320 --> 05:02.060 +앙상블 에이전트를 만들 거예요 + +05:02.060 --> 05:03.740 +특수 요원은 이미 만들었어요 + +05:03.740 --> 05:09.050 +나머지 둘은 오늘 정복에 참여할 거예요 + +05:09.050 --> 05:15.290 +그리고 오늘 마무리로 앙상블 에이전트가 가격을 계산하고 다른 평가자 여럿을 + +05:15.290 --> 05:16.910 +끌어들일 거예요 + +05:16.940 --> 05:18.230 +그게 도전 과제죠 + +05:18.230 --> 05:20.150 +Get it JupyterLab으로 가죠 diff --git a/week5/community-contributions/subtitles/srts/59669631/en_US.srt b/week5/community-contributions/subtitles/srts/59669631/en_US.srt new file mode 100755 index 0000000..0086daf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669631/en_US.srt @@ -0,0 +1,427 @@ +WEBVTT + +00:00.860 --> 00:07.880 +Here we are in our favorite place to be in JupyterLab, ready for some coding and a lot of coding that's + +00:07.880 --> 00:08.780 +going to be today. + +00:08.810 --> 00:15.800 +The first thing you may spot, it may have caught your eye that there isn't a day two Jupyter Notebook. + +00:15.830 --> 00:21.590 +There are, in fact, five day two Jupyter notebooks, which is a clue that we've got some work cut + +00:21.590 --> 00:22.610 +out for us. + +00:22.730 --> 00:27.080 +And it does also mean that I might be going a bit faster through the notebook than usual, and that's + +00:27.080 --> 00:31.670 +just an opportunity for you to come back and do it yourself and and experiment. + +00:31.910 --> 00:39.260 +Um, so today we're going to be building a more complex approach to estimating prices of goods, the + +00:39.260 --> 00:41.390 +same thing we've been working on for a while. + +00:41.630 --> 00:48.260 +Um, and the biggest part of today is that we're going to try and approach a rag pipeline solution for + +00:48.260 --> 00:49.970 +doing the same thing. + +00:50.030 --> 00:55.520 +Uh, the the intuition is surely this is actually a really great use case for Rag. + +00:55.520 --> 01:01.160 +We've got 400,000 prices of products in our training data set. + +01:01.220 --> 01:08.810 +Uh, what if we put all of them in a data store, and when we're calling a model, we give it some similar + +01:08.810 --> 01:13.460 +products and say, hey, please, can you give me the price of this electric toothbrush? + +01:13.460 --> 01:20.600 +And by the way, here are five similar electric toothbrushes from our training data and give that to + +01:20.630 --> 01:22.070 +a model to help it. + +01:22.070 --> 01:24.890 +And it will make total sense to use a frontier model for that. + +01:24.890 --> 01:30.710 +And you've got to imagine that armed with some similarities, we're giving a huge head start to the + +01:30.710 --> 01:32.030 +frontier model. + +01:32.120 --> 01:39.350 +And so it is a great example of where rag should shine, because we've got a repository of accurate + +01:39.350 --> 01:40.100 +data. + +01:40.130 --> 01:44.090 +So it's definitely fits right within the sweet spot. + +01:44.180 --> 01:50.030 +So what we're going to do in this notebook, the 2.0 notebook that I'm in right now in the week eight + +01:50.030 --> 01:57.260 +folder, we're going to create our vector data store with 400,000 training data points. + +01:57.260 --> 01:58.370 +So that's a big deal. + +01:58.370 --> 02:02.060 +That's a lot more than what we did in week five. + +02:02.540 --> 02:05.810 +In the next notebook day 2.1. + +02:05.810 --> 02:09.380 +We're going to visualize it in 2D and then we're going to visualize it in 3D. + +02:09.500 --> 02:10.910 +So those are quick notebooks. + +02:10.910 --> 02:16.910 +But it's great to get a hands on sense of these things in day 2.3. + +02:16.940 --> 02:20.330 +We're then going to build our Rag pipeline and test it. + +02:20.330 --> 02:25.910 +And in day 2.4 this is where we do a lot in 1 in 1 notebook. + +02:25.940 --> 02:31.970 +We're going to bring back the random forest processor that we looked at back in week six. + +02:32.270 --> 02:34.760 +And but we're going to do an improved version of it. + +02:34.760 --> 02:38.780 +And then we're going to build what's called an ensemble which which allows all of the models. + +02:38.780 --> 02:41.660 +Essentially, you can think of it as a sort of voting together. + +02:41.690 --> 02:46.400 +They're working together on the problem, and it's picking a linear combination of them that is going + +02:46.400 --> 02:47.660 +to give the best results. + +02:47.660 --> 02:49.460 +So a lot to get through. + +02:49.460 --> 02:55.910 +I did want to to mention, look, that we've already built a powerful product processor in the form + +02:55.910 --> 02:57.410 +of a specialized LLM. + +02:57.410 --> 03:00.590 +So you might be thinking, why are we spending so much time working on this? + +03:00.590 --> 03:02.060 +And the answer is simple. + +03:02.090 --> 03:03.620 +It's what I said a moment ago. + +03:03.620 --> 03:07.400 +This is see this as really about solidifying your expertise. + +03:07.400 --> 03:10.790 +We're doing this so you can get more practice with rag. + +03:11.000 --> 03:20.030 +More practice with other kinds of aspects of building sort of agent based functions, whilst also working + +03:20.030 --> 03:20.840 +on the same project. + +03:20.840 --> 03:24.740 +So we know what we're doing and it's satisfying, and we don't have to curate more data because we have + +03:24.740 --> 03:25.370 +the data. + +03:25.370 --> 03:29.180 +So hopefully that that sort of motivates you for what we're about to do. + +03:29.750 --> 03:36.170 +All right, without further ado, we will run some imports environment variables, log in to hugging + +03:36.200 --> 03:38.300 +face stuff we know. + +03:38.300 --> 03:44.900 +And I'm going to again I'm going to load in the training data, the pickle file that we have from before + +03:44.930 --> 03:47.930 +to simplify things just loaded in the training data. + +03:47.930 --> 03:53.180 +So you may remember we can look at the first of the training data points fuel pump module. + +03:53.180 --> 03:56.930 +We can call prompts if you remember this. + +03:56.930 --> 04:00.980 +And we will get the familiar prompt that's used for training. + +04:00.980 --> 04:02.660 +How much this cost to the nearest dollar. + +04:02.660 --> 04:07.280 +And then the price is and the value right there. + +04:07.280 --> 04:08.210 +Okay. + +04:08.240 --> 04:11.760 +With that time to create our Chroma Datastore. + +04:11.760 --> 04:20.010 +So you may remember back in week five, we created this store with it had 123 documents, if I remember + +04:20.010 --> 04:29.340 +that right, 123 chunks which were taken from our documents, um, of the fictional company ensure ensure + +04:29.370 --> 04:31.740 +um, that we used. + +04:31.740 --> 04:36.840 +So this time we're going to create a chroma datastore with 400,000 documents. + +04:36.840 --> 04:37.830 +That's a lot. + +04:37.890 --> 04:38.940 +We're not going to chunk it up. + +04:38.970 --> 04:39.840 +Of course we don't need to. + +04:39.870 --> 04:41.610 +These are already small chunks. + +04:41.760 --> 04:46.530 +Each each product is going to go in chroma as its own item. + +04:46.800 --> 04:53.910 +Um, so, uh, we're not going to be using Lang chain for this rag pipeline because it's good for us + +04:53.910 --> 04:56.190 +to, to actually build it ourselves. + +04:56.280 --> 05:00.270 +Uh, Lang chain gives some good, useful abstractions, but it's not necessary. + +05:00.300 --> 05:05.970 +And once you get to the level of proficiency that you are now at, you can just be working directly + +05:05.970 --> 05:12.750 +with Llms often you won't need necessarily these these sorts of abstraction layers built on top of it. + +05:13.110 --> 05:15.900 +Um, so that's what we're going to do. + +05:15.990 --> 05:25.560 +Uh, so we are going to create a chroma database, and I called it somewhere up here, I think. + +05:26.520 --> 05:30.810 +Um, I might have taken that constant out earlier. + +05:30.810 --> 05:32.190 +Let's do that right now. + +05:32.220 --> 05:33.330 +Apologies. + +05:33.600 --> 05:42.210 +Uh, we will add in here another constant DB equals products underscore vector store. + +05:42.930 --> 05:47.760 +I was cleaning up this notebook to make it useful, and I cleaned it up too much. + +05:48.150 --> 05:50.070 +Uh, if I spelled that right, I have products. + +05:50.070 --> 05:51.090 +Vector store. + +05:51.120 --> 05:52.620 +There we go. + +05:52.710 --> 05:55.710 +All right, so back we go. + +05:55.740 --> 05:57.270 +Momentary diversion. + +05:57.270 --> 06:00.840 +So we're going to create a chroma data store with that path. + +06:00.870 --> 06:02.070 +There we go. + +06:02.220 --> 06:06.450 +We're going to check if it exists and delete it if it does already exist. + +06:06.540 --> 06:07.680 +I'm not going to run that. + +06:07.680 --> 06:10.950 +It will delete this products vector store which I have created. + +06:10.950 --> 06:13.170 +And which takes a little bit of time. + +06:13.230 --> 06:15.060 +Uh, although not that much time, but. + +06:15.060 --> 06:17.940 +But I will let you run this yourself. + +06:18.450 --> 06:23.670 +So we are going to use a new a different type of embedding. + +06:23.670 --> 06:27.990 +We are going to use something called the sentence transformer. + +06:27.990 --> 06:34.800 +It's a useful model from hugging face it maps text chunks to 384 dimensions. + +06:34.920 --> 06:38.310 +Um, and it's ideal for things like semantic search. + +06:38.310 --> 06:41.640 +It's a transformer model that produces these vector embeddings. + +06:41.640 --> 06:47.130 +You may remember we used OpenAI embeddings when we built our Rag pipeline before. + +06:47.130 --> 06:52.980 +So if you compare the two of those together, this hugging face one, uh, it's it doesn't have the + +06:52.980 --> 06:54.330 +same dimensionality. + +06:54.330 --> 07:00.240 +So and it's probably not as accurate as the OpenAI embeddings one. + +07:00.240 --> 07:03.690 +But it's free, it's fast and we can run it locally. + +07:03.690 --> 07:08.250 +And that last point is super important because there are some cases where you might not want the data + +07:08.250 --> 07:09.420 +to leave your box. + +07:09.420 --> 07:13.650 +For example, if you're working on that project I assigned you from week five and you're looking to + +07:13.680 --> 07:19.220 +build your own personal Rag database with your own stuff like your emails and things. + +07:19.220 --> 07:23.990 +You might not want to send them all to OpenAI, even though it should be safe, you just might prefer + +07:23.990 --> 07:28.220 +to do it yourself, and this is an easy way to do it without it leaving your box. + +07:28.220 --> 07:31.100 +So we load in that model from hugging face. + +07:31.130 --> 07:35.960 +It gives a warning that I think we can ignore about in the future, that something will have to be set + +07:35.990 --> 07:36.740 +differently. + +07:37.010 --> 07:42.920 +Um, so what you can then do once you've created this model is you can call model dot encode. + +07:42.950 --> 07:47.000 +You pass in a list of text documents. + +07:47.000 --> 07:49.220 +What you'll get back is a numpy array. + +07:49.220 --> 07:54.590 +And you can just I can pluck the first one out of that and I will get my vector. + +07:54.590 --> 07:55.550 +And it was pretty quick. + +07:55.550 --> 07:57.680 +And it works well in bulk as well. + +07:57.680 --> 08:06.080 +So if we if we look at the length of this vector we will see it's got 384 dimensions to it. + +08:06.110 --> 08:08.450 +We can just print it and we'll see. + +08:08.450 --> 08:09.920 +It will be a big old vector. + +08:09.920 --> 08:10.490 +There it is. + +08:10.490 --> 08:18.260 +So that vector in some way represents well hi there in numbers in a way that is a multi-dimensional + +08:18.260 --> 08:21.260 +reflection According to the LM. + +08:21.470 --> 08:27.770 +So that is, uh, is worth, uh, um, playing with and getting a sense of it. + +08:28.430 --> 08:33.920 +So what I'm now going to do is write this utility method description that takes an item, one of our + +08:33.920 --> 08:36.950 +items, and turns it into just a chunk of text. + +08:36.950 --> 08:41.600 +So if I run that just to show you, if you remember, if we look at something like the first training + +08:41.600 --> 08:45.320 +point and we look at its prompt, that's what the prompt looks like. + +08:45.350 --> 08:54.320 +If instead of that I now call description on that item, I just get the same thing, but without the + +08:54.320 --> 08:59.870 +stuff at the front and without the price, it's just a description of the product on its own. + +09:00.050 --> 09:05.660 +And what we now do is we go and put that into our vector data store. + +09:05.900 --> 09:08.720 +So we run that code and it will take a while. + +09:08.720 --> 09:13.910 +And by the time it's finished, it will be in the vector data store and you'll be ready for business. + +09:13.910 --> 09:16.820 +I've already run it, so it's already created my vector data store. + +09:16.820 --> 09:18.380 +You can see over on the left. + +09:18.380 --> 09:19.760 +You should do that now. + +09:19.760 --> 09:22.580 +And when you're done, I will see you for the next video. diff --git a/week5/community-contributions/subtitles/srts/59669631/ja_JP.srt b/week5/community-contributions/subtitles/srts/59669631/ja_JP.srt new file mode 100755 index 0000000..cc89065 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669631/ja_JP.srt @@ -0,0 +1,367 @@ +WEBVTT + +00:00.860 --> 00:08.780 +JupyterLabのお気に入りの場所に来て、 コーディングの準備をしている。 + +00:08.810 --> 00:15.800 +まず目につくのは、 2日目のJupyter Notebookがないことだろう。 + +00:15.830 --> 00:22.610 +実際、 2日目のJupyterノートブックが5つある。 + +00:22.730 --> 00:31.670 +また、 いつもよりノートブックを読むスピードが少し速くなるかもしれない。 + +00:31.910 --> 00:41.390 +さて、 今日は商品の価格を推定するためのより複雑なアプローチを構築します。 + +00:41.630 --> 00:49.970 +ええと、 今日の最大のポイントは、 同じことをするためのラグ・パイプライン・ソリューションにアプローチしようとすることだ。 + +00:50.030 --> 00:55.520 +直感的には、 これはラグにとって本当に素晴らしいユースケースだと思う。 + +00:55.520 --> 01:01.160 +トレーニング・データ・セットには40万点の商品の価格がある。 + +01:01.220 --> 01:08.810 +そのすべてをデータストアに置いて、 モデルを呼び出すときに、 似たような製品をいくつか与えて、 この電動歯ブラシの価格を教えてください、 + +01:08.810 --> 01:13.460 +と言ったらどうだろう? + +01:13.460 --> 01:22.070 +ちなみに、 ここに学習データから似たような電動歯ブラシを5つ選び、 それをモデルに与えて助けてもらう。 + +01:22.070 --> 01:24.890 +そのためにフロンティア・モデルを使うのはまったく理にかなっている。 + +01:24.890 --> 01:32.030 +そして、 いくつかの類似点を武器に、 我々はフロンティアモデルに大きな先鞭をつけていることを想像してほしい。 + +01:32.120 --> 01:40.100 +正確なデータの宝庫を手に入れたのだから。 + +01:40.130 --> 01:44.090 +だから、 スウィートスポットにぴったりなんだ。 + +01:44.180 --> 01:50.030 +そこで、 このノート、 2. 0ノートブックの8週目のフォルダに、 + +01:50.030 --> 01:57.260 +40万トレーニング・データ・ポイントを含むベクトル・データ・ストアを作成する。 + +01:57.260 --> 01:58.370 +だから、 それは大きなことなんだ。 + +01:58.370 --> 02:02.060 +第5週よりもずっと多い。 + +02:02.540 --> 02:05.810 +次のノートでは2日目。 1. + +02:05.810 --> 02:09.380 +2Dで視覚化し、 次に3Dで視覚化する。 + +02:09.500 --> 02:10.910 +これがクイックノートだ。 + +02:10.910 --> 02:16.910 +でも、 2日目にこういったことを肌で感じられるのは素晴らしいことだ。 3. + +02:16.940 --> 02:20.330 +そしてラグ・パイプラインを構築し、 テストする。 + +02:20.330 --> 02:25.910 +そして2日目。 4 これが1 in 1ノートで多くのことを行っているところです。 + +02:25.940 --> 02:31.970 +第6週に見たランダムフォレスト・プロセッサーを復活させる。 + +02:32.270 --> 02:34.760 +そして、 しかし、 我々はその改良版をやるつもりだ。 + +02:34.760 --> 02:38.780 +そして、 すべてのモデルを可能にするアンサンブルと呼ばれるものを構築する。 + +02:38.780 --> 02:41.660 +基本的には、 一緒に投票するようなものだと考えればいい。 + +02:41.690 --> 02:47.660 +彼らは一緒に問題に取り組んでいて、 その中から最良の結果をもたらす直線的な組み合わせを選んでいるのだ。 + +02:47.660 --> 02:49.460 +だから、 乗り切らなければならないことがたくさんある。 + +02:49.460 --> 02:57.410 +私たちはすでに、 専門のLLMという形で強力な製品プロセッサを構築していることをお伝えしたかったのです。 + +02:57.410 --> 03:00.590 +それなのに、 なぜこんなに時間をかけているのだろう? + +03:00.590 --> 03:02.060 +答えは簡単だ。 + +03:02.090 --> 03:03.620 +さっき言ったとおりだ。 + +03:03.620 --> 03:07.400 +これは、 自分の専門性を確固たるものにすることだと考えている。 + +03:07.400 --> 03:10.790 +ボロ雑巾でもっと練習できるようにするためだ。 + +03:11.000 --> 03:20.840 +同じプロジェクトに携わりながら、 エージェント・ベースの機能を構築する他の種類の側面について、 より多くの練習を積むことができる。 + +03:20.840 --> 03:25.370 +だから、 私たちは自分たちが何をしているのかわかっているし、 満足している。 + +03:25.370 --> 03:29.180 +だから、 私たちがこれからやろうとしていることへのモチベーションを高めてもらえればと思う。 + +03:29.750 --> 03:38.300 +さて、 これ以上説明することなく、 いくつかのインポート環境変数を実行し、 私たちが知っている顔を抱きしめてログインする。 + +03:38.300 --> 03:47.930 +そしてまた、 トレーニング・データを読み込むつもりだ。 単純化するために、 以前から持っているピックル・ファイルをトレーニング・データに読み込む。 + +03:47.930 --> 03:53.180 +そこで、 トレーニング・データの最初のポイントである燃料ポンプ・モジュールを見てみよう。 + +03:53.180 --> 03:56.930 +これを覚えていれば、 プロンプトを呼ぶことができる。 + +03:56.930 --> 04:00.980 +そして、 トレーニングでおなじみのプロンプトが表示される。 + +04:00.980 --> 04:02.660 +1ドル単位でいくらかかったか。 + +04:02.660 --> 04:07.280 +そして、 価格と価値がそこにある。 + +04:07.280 --> 04:08.210 +オーケー。 + +04:08.240 --> 04:11.760 +これでChromaデータストアが完成した。 + +04:11.760 --> 04:20.010 +5週目にこのストアを作ったんだけど、 + +04:20.010 --> 04:31.740 +123個のドキュメントがあったのを覚えているかな。 + +04:31.740 --> 04:36.840 +そこで今回は、 400,000ドキュメントのクロマ・データストアを作成する。 + +04:36.840 --> 04:37.830 +多いね。 + +04:37.890 --> 04:38.940 +チャンクアップするつもりはない。 + +04:38.970 --> 04:39.840 +もちろん、 その必要はない。 + +04:39.870 --> 04:41.610 +これらはすでに小さな塊だ。 + +04:41.760 --> 04:46.530 +各製品は、 それぞれのアイテムとしてクロマに入る。 + +04:46.800 --> 04:56.190 +だから、 ラングチェーンをこのボロパイプラインに使うつもりはない。 + +04:56.280 --> 05:00.270 +ええと、 ラングチェーンは良い、 有用な抽象化を与えてくれるが、 その必要はない。 + +05:00.300 --> 05:05.970 +そして、 今のような熟練したレベルになれば、 Llmsを直接扱うだけでよくなり、 + +05:05.970 --> 05:12.750 +その上に構築される抽象化レイヤーは必ずしも必要ではなくなる。 + +05:13.110 --> 05:15.900 +だから、 そうするつもりなんだ。 + +05:15.990 --> 05:25.560 +クロマデータベースを作成します。 + +05:26.520 --> 05:30.810 +ええと、 私はもっと早くその定数を外していたかもしれない。 + +05:30.810 --> 05:32.190 +今すぐそうしよう。 + +05:32.220 --> 05:33.330 +謝罪する。 + +05:33.600 --> 05:42.210 +ええと、 ここにもうひとつ、 DBイコール商品アンダースコア・ベクトル・ストアの定数を追加します。 + +05:42.930 --> 05:47.760 +このノートを使いやすくするために整理していたんだけど、 整理しすぎたんだ。 + +05:48.150 --> 05:50.070 +スペルが正しければ、 私は製品を持っている。 + +05:50.070 --> 05:51.090 +ベクターストア + +05:51.120 --> 05:52.620 +これでよし。 + +05:52.710 --> 05:55.710 +よし、 じゃあ戻ろう。 + +05:55.740 --> 05:57.270 +一瞬の気晴らし。 + +05:57.270 --> 06:00.840 +そこで、 そのパスでクロマ・データ・ストアを作成する。 + +06:00.870 --> 06:02.070 +これでよし。 + +06:02.220 --> 06:06.450 +存在するかどうかをチェックし、 すでに存在する場合は削除する。 + +06:06.540 --> 06:07.680 +私はそれを実行するつもりはない。 + +06:07.680 --> 06:10.950 +それは私が作成したこの製品ベクターストアを削除します。 + +06:10.950 --> 06:13.170 +そして少し時間がかかる。 + +06:13.230 --> 06:15.060 +それほど時間はかからなかったけど。 + +06:15.060 --> 06:17.940 +でも、 これはあなた自身に任せるわ。 + +06:18.450 --> 06:23.670 +そこで私たちは、 新しい別のタイプのエンベッディングを使おうとしている。 + +06:23.670 --> 06:27.990 +私たちはセンテンス・トランスフォーマーというものを使おうと思っている。 + +06:27.990 --> 06:34.800 +これは、 テキストチャンクを384次元にマッピングするハグフェイスの便利なモデルだ。 + +06:34.920 --> 06:38.310 +それに、 セマンティック検索などには理想的だ。 + +06:38.310 --> 06:41.640 +このベクトル埋め込みを生成するのは変換モデルだ。 + +06:41.640 --> 06:47.130 +以前、 ラグ・パイプラインを構築したときにOpenAIのエンベッディングを使ったことを覚えているかもしれない。 + +06:47.130 --> 06:52.980 +だから、 この2つを一緒に比べてみると、 このハグしている顔のほうは、 あー、 + +06:52.980 --> 06:54.330 +次元が違う。 + +06:54.330 --> 07:00.240 +だから、 OpenAIのエンベッディングほど正確ではないだろう。 + +07:00.240 --> 07:03.690 +でも、 無料だし、 速いし、 ローカルで動かせる。 + +07:03.690 --> 07:09.420 +この最後のポイントは非常に重要だ。 + +07:09.420 --> 07:19.220 +例えば、 5週目に私が指定したプロジェクトに取り組んでいて、 Eメールなど自分のものを使って自分だけのラグ・データベースを構築しようとしているとする。 + +07:19.220 --> 07:28.220 +OpenAIにすべてを送信するのは、 安全であるべきとはいえ避けたいかもしれない。 + +07:28.220 --> 07:31.100 +そこで、 ハギング・フェイスのモデルをロードする。 + +07:31.130 --> 07:36.740 +これは、 将来的には無視してもいいような警告を与えている。 + +07:37.010 --> 07:42.920 +このモデルを作成したら、 model dot encodeを呼び出すことができます。 + +07:42.950 --> 07:47.000 +テキスト文書のリストを渡す。 + +07:47.000 --> 07:49.220 +返ってくるのはnumpyの配列だ。 + +07:49.220 --> 07:54.590 +そして、 その中から最初の1枚を抜き取るだけで、 私のベクトルを得ることができる。 + +07:54.590 --> 07:55.550 +そして、 かなり早かった。 + +07:55.550 --> 07:57.680 +そして、 バルクでもうまく機能する。 + +07:57.680 --> 08:06.080 +このベクトルの長さを見ると、 384次元あることがわかる。 + +08:06.110 --> 08:08.450 +印刷すればいい。 + +08:08.450 --> 08:09.920 +大きな古いベクトルになるだろう。 + +08:09.920 --> 08:10.490 +あれだ。 + +08:10.490 --> 08:18.260 +だから、 そのベクトルは、 ある意味では、 多次元的な反映である方法で、 数字でうまくハイそこに表している + +08:18.260 --> 08:21.260 +従った LMに。 + +08:21.470 --> 08:27.770 +だから、 それは......その......遊んで、 感覚をつかむ価値がある。 + +08:28.430 --> 08:33.920 +つまり、 これから書くユーティリティ・メソッドの説明は、 アイテム(項目のひとつ)を受け取り、 + +08:33.920 --> 08:36.950 +それを単なるテキストの塊に変えるものだ。 + +08:36.950 --> 08:41.600 +最初のトレーニングポイントを見て、 そのプロンプトを見ると、 + +08:41.600 --> 08:45.320 +プロンプトはこのように表示されます。 + +08:45.350 --> 08:54.320 +その代わりに、 その商品の説明を呼び出すと、 同じものが表示されるだけだが、 + +08:54.320 --> 08:59.870 +前面に何も表示されず、 価格も表示されない。 + +09:00.050 --> 09:05.660 +そのデータをベクター・データ・ストアに格納する。 + +09:05.900 --> 09:08.720 +そのコードを実行すると、 しばらく時間がかかる。 + +09:08.720 --> 09:13.910 +そして、 それが完成する頃には、 ベクター・データ・ストアに登録され、 ビジネスの準備が整う。 + +09:13.910 --> 09:16.820 +すでに実行したので、 ベクター・データ・ストアが作成されている。 + +09:16.820 --> 09:18.380 +左側に見える。 + +09:18.380 --> 09:19.760 +今すぐそうすべきだ。 + +09:19.760 --> 09:22.580 +それが終わったら、 また次のビデオで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59669631/ko_KR.srt b/week5/community-contributions/subtitles/srts/59669631/ko_KR.srt new file mode 100755 index 0000000..eac3c9f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59669631/ko_KR.srt @@ -0,0 +1,421 @@ +WEBVTT + +00:00.860 --> 00:07.880 +우리가 가장 좋아하는 JupyterLab에 왔습니다 오늘 코딩할 게 아주 + +00:07.880 --> 00:08.780 +많아요 + +00:08.810 --> 00:15.800 +가장 먼저 눈에 띄는 건 둘째 날의 주피터 공책이 없다는 거예요 + +00:15.830 --> 00:21.590 +5일간 작성된 주피터 공책도 있어요 우리가 할 일이 생겼다는 + +00:21.590 --> 00:22.610 +증거죠 + +00:22.730 --> 00:27.080 +비트는 제가 평소보다 공책을 빨리 작성할 수도 있다는 뜻이기도 + +00:27.080 --> 00:31.670 +해요 비트가 돌아오면 직접 해 볼 기회가 생기는 거죠 + +00:31.910 --> 00:39.260 +오늘은 좀 더 복잡한 접근법을 시도할 거예요 상품 가격 평가법이죠 한동안 해온 + +00:39.260 --> 00:41.390 +것과 같은 거예요 + +00:41.630 --> 00:48.260 +오늘 가장 중요한 부분은 같은 작업을 하기 위한 래그 파이프라인 해결책에 + +00:48.260 --> 00:49.970 +접근하는 거예요 + +00:50.030 --> 00:55.520 +직감적으로 봤을 때 랙을 사용하기에 아주 좋은 사례예요 + +00:55.520 --> 01:01.160 +교육 데이터 집합에 400,000가지 가격이 있어요 + +01:01.220 --> 01:08.810 +이것들을 데이터 스토어에 저장하고 모델을 호출할 때 유사한 제품들을 주면서 전동 + +01:08.810 --> 01:13.460 +칫솔의 가격을 알려달라고 하면 어떨까요? + +01:13.460 --> 01:20.600 +훈련 데이터에 있는 유사한 전동 칫솔 5개를 도움될 모델에 + +01:20.630 --> 01:22.070 +주세요 + +01:22.070 --> 01:24.890 +개척자 모델을 사용하는 게 아주 합리적이죠 + +01:24.890 --> 01:30.710 +비슷한 점을 생각해 보면 개척지 모델에서 훨씬 유리하게 출발할 수 있어요. HMSII + +01:30.710 --> 01:32.030 +HMSI HMSI + +01:32.120 --> 01:40.100 +래그가 빛나야 할 좋은 예죠 정확한 데이터의 저장소가 있거든요 + +01:40.130 --> 01:44.090 +최적의 조건에 딱 맞아요 + +01:44.180 --> 01:50.030 +이 공책에서 할 일은요 지금 제가 있는 0번 공책은 8주 차 폴더인데 + +01:50.030 --> 01:57.260 +400,000개의 훈련 데이터 포인트를 가진 벡터 데이터 저장소를 만들 거예요 + +01:57.260 --> 01:58.370 +그게 중요하죠 + +01:58.370 --> 02:02.060 +5주 차보다 훨씬 많이 했어요 + +02:02.540 --> 02:05.810 +다음 공책 둘째 날에요 1번요 + +02:05.810 --> 02:09.380 +2D로 시각화한 다음 3D로 시각화할 거예요 + +02:09.500 --> 02:10.910 +빠른 노트들이네요 + +02:10.910 --> 02:16.910 +하지만 촬영 2일째에 이런 걸 직접 경험하니 좋네요 Get it 3번요 + +02:16.940 --> 02:20.330 +그런 다음 래그 파이프라인을 만들어 테스트하죠 + +02:20.330 --> 02:25.910 +둘째 날에도요 여기선 공책 하나에 많은 걸 담을 수 있어요 + +02:25.940 --> 02:31.970 +6주 차에 봤던 임의의 산림 처리기를 다시 쓸 거예요 + +02:32.270 --> 02:34.760 +하지만 우린 그걸 개선한 버전을 할 거예요 + +02:34.760 --> 02:38.780 +그런 다음 앙상블이라는 걸 만들 거예요 모든 모형을 사용할 수 있도록요 + +02:38.780 --> 02:41.660 +함께 투표한다고 생각하면 돼요 + +02:41.690 --> 02:46.400 +함께 문제를 해결하고 선형 조합을 선택해서 최고의 결과를 + +02:46.400 --> 02:47.660 +내는 거죠 + +02:47.660 --> 02:49.460 +Get up 할 게 많네요 + +02:49.460 --> 02:55.910 +말씀드리고 싶은 것은 이미 강력한 제품 프로세서를 특수 LLM의 형태로 만들었다는 + +02:55.910 --> 02:57.410 +점인데요 + +02:57.410 --> 03:00.590 +왜 이렇게 많은 시간을 투자하는지 궁금하실 거예요 + +03:00.590 --> 03:02.060 +답은 간단해요 + +03:02.090 --> 03:03.620 +방금 제가 한 말이잖아요 + +03:03.620 --> 03:07.400 +자신의 전문성을 확고히 하는 과정이라고 생각하세요 + +03:07.400 --> 03:10.790 +Get이랑 더 연습하라고 하는 거예요 + +03:11.000 --> 03:20.840 +에이전트 기반 함수를 구축하는 다른 양상으로 연습하는 거죠 같은 프로젝트에서 작업하면서요 + +03:20.840 --> 03:24.740 +우린 뭘 하는지 알고 만족스럽죠 데이터가 있으니 더 큐레이팅할 필요가 + +03:24.740 --> 03:25.370 +없어요 + +03:25.370 --> 03:29.180 +그게 우리가 하려는 일에 동기를 부여해주면 좋겠네요 + +03:29.750 --> 03:36.170 +좋아요, 그럼 지체 없이 환경 변수를 가져오기 실행할게요 익숙한 얼굴 껴안기로 + +03:36.200 --> 03:38.300 +로그인하고요 + +03:38.300 --> 03:44.900 +훈련 데이터를 로드하겠습니다 아까 있던 피클 파일이죠 일을 단순화하기 + +03:44.930 --> 03:47.930 +위해 훈련 데이터를 로드했어요 + +03:47.930 --> 03:53.180 +첫 번째 훈련 데이터 포인트 연료 펌프 모듈을 기억하실 거예요 + +03:53.180 --> 03:56.930 +기억하신다면 프롬프트 호출할 수 있어요 + +03:56.930 --> 04:00.980 +Get Frompts는 훈련에 사용된 익숙한 프롬프트죠 + +04:00.980 --> 04:02.660 +이게 얼마짜리인지 말이에요 + +04:02.660 --> 04:07.280 +가격과 가치가 여기 있네요 + +04:07.280 --> 04:08.210 +네 + +04:08.240 --> 04:11.760 +그동안 크로마 데이터스토어를 만들 거예요 + +04:11.760 --> 04:20.010 +기억하실지 모르겠지만 5주 차에 이 스토어를 만들 때 123개의 문서가 있었어요 + +04:20.010 --> 04:29.340 +제 기억이 맞는다면 123개의 덩어리가 우리 문서에서 빼냈죠 우리가 사용하도록 보장하는 가상 + +04:29.370 --> 04:31.740 +회사의 문서였어요 + +04:31.740 --> 04:36.840 +이번엔 400,000개의 문서가 있는 크로마 데이터스토어를 만들 거예요 + +04:36.840 --> 04:37.830 +많네요 + +04:37.890 --> 04:38.940 +부수지 않을 거예요 + +04:38.970 --> 04:39.840 +당연히 안 해도 되죠 + +04:39.870 --> 04:41.610 +이미 작은 조각이에요 + +04:41.760 --> 04:46.530 +각각의 제품은 각각의 아이템으로 채도에 들어갈 거예요 + +04:46.800 --> 04:53.910 +이 랙 파이프라인에는 랑체인을 쓰지 않을 거예요 직접 만드는 + +04:53.910 --> 04:56.190 +게 더 좋거든요 + +04:56.280 --> 05:00.270 +랑 사슬은 유용한 추상적 개념을 제공하지만 꼭 필요한 건 아니에요 + +05:00.300 --> 05:05.970 +현재 수준에 이르면 그냥 LIms로 작업하면 + +05:05.970 --> 05:12.750 +됩니다. 이런 추상화 계층이 꼭 필요하진 않아요. + +05:13.110 --> 05:15.900 +그렇게 할 거예요 + +05:15.990 --> 05:25.560 +채도 데이터베이스를 만들 거예요 여기 어디쯤에 호출했죠 + +05:26.520 --> 05:30.810 +그 상표는 아까 뺐을 거예요 + +05:30.810 --> 05:32.190 +지금 당장요 + +05:32.220 --> 05:33.330 +미안해요 + +05:33.600 --> 05:42.210 +여기에 다른 상수 DB = 제품_벡터 스토어를 추가할게요 + +05:42.930 --> 05:47.760 +쓸모 있게 쓰려고 이 공책을 치우다가 너무 많이 치웠어요 + +05:48.150 --> 05:50.070 +철자가 맞는다면 상품이 있어요 + +05:50.070 --> 05:51.090 +벡터 스토어요 + +05:51.120 --> 05:52.620 +됐어요 + +05:52.710 --> 05:55.710 +다시 돌아가죠 + +05:55.740 --> 05:57.270 +잠시 우회한 거죠 + +05:57.270 --> 06:00.840 +그 경로로 크로마 데이터 저장소를 만들 거예요 + +06:00.870 --> 06:02.070 +됐어요 + +06:02.220 --> 06:06.450 +존재하는지는 확인하고 이미 존재한다면 삭제하죠 + +06:06.540 --> 06:07.680 +그건 안 할 거예요 + +06:07.680 --> 06:10.950 +제가 만든 벡터 스토어를 삭제할 거예요 + +06:10.950 --> 06:13.170 +비트를 만드는 데 시간이 좀 걸려요 + +06:13.230 --> 06:15.060 +그렇게 오래는 아니지만요 + +06:15.060 --> 06:17.940 +하지만 직접 운영하게 해드리죠 + +06:18.450 --> 06:23.670 +그래서 새로운 유형의 엠베딩을 사용할 거예요 + +06:23.670 --> 06:27.990 +문장 변압기라는 걸 사용할 거예요 + +06:27.990 --> 06:34.800 +얼굴을 감싸는 유용한 모델로 텍스트 덩어리를 384차원으로 매핑하죠 + +06:34.920 --> 06:38.310 +시맨틱 검색 같은 것에도 이상적이죠 + +06:38.310 --> 06:41.640 +벡터 내장 기능을 만드는 변압기 모델이죠 + +06:41.640 --> 06:47.130 +예전에 랙 파이프라인을 만들 때 오픈AI 엠디딩을 사용했던 걸 기억하시나요? + +06:47.130 --> 06:52.980 +그래서 이 두 가지를 함께 비교해 보면 서로 껴안는 얼굴 1번은 입체감이 + +06:52.980 --> 06:54.330 +달라요 + +06:54.330 --> 07:00.240 +오픈AI의 삽입 방식만큼 정확하진 않을 거예요 + +07:00.240 --> 07:03.690 +하지만 무료에 빠르고 로컬에서 실행할 수 있죠 + +07:03.690 --> 07:08.250 +마지막 포인트는 아주 중요합니다 왜냐하면 어떤 경우에는 데이터가 박스를 떠나지 않기를 + +07:08.250 --> 07:09.420 +원할 수도 있거든요 + +07:09.420 --> 07:13.650 +예를 들어, 제가 5주 차에 배정했던 프로젝트를 작업하고 + +07:13.680 --> 07:19.220 +있다면 이메일 같은 것으로 개인 랙 데이터베이스를 구축하고 싶을 거예요 + +07:19.220 --> 07:23.990 +안전한 방법이지만 오픈라이로 보내지 않고 직접 보내는 게 나을 수도 있습니다 + +07:23.990 --> 07:28.220 +이렇게 하면 상자에서 꺼내지 않고도 쉽게 전송할 수 있죠 + +07:28.220 --> 07:31.100 +안는 얼굴에서 모델을 로딩해요 + +07:31.130 --> 07:35.960 +이 경고는 무시해도 될 것 같아요 미래에는 뭔가 달라져야 한다는 + +07:35.990 --> 07:36.740 +경고죠 + +07:37.010 --> 07:42.920 +이 모델을 생성하고 나면 Model.in코드를 호출할 수 있어요 + +07:42.950 --> 07:47.000 +텍스트 문서 목록을 제출해요 + +07:47.000 --> 07:49.220 +NFF 배열을 얻게 될 거예요. + +07:49.220 --> 07:54.590 +첫 번째 것을 뽑고 벡터를 get 할 수 있어요 + +07:54.590 --> 07:55.550 +꽤 빨랐어요 + +07:55.550 --> 07:57.680 +대량으로 사용해도 효과적이죠 + +07:57.680 --> 08:06.080 +이 벡터의 길이를 보면 384차원인 걸 알 수 있어요 + +08:06.110 --> 08:08.450 +일단 출력해서 보죠 + +08:08.450 --> 08:09.920 +거대한 벡터가 될 거예요 + +08:09.920 --> 08:10.490 +저기 있네요 + +08:10.490 --> 08:18.260 +벡터는 어떤 식으로든 숫자로 나타내는데 LM에 따라 다차원적인 + +08:18.260 --> 08:21.260 +반사로 나타나죠 + +08:21.470 --> 08:27.770 +그래서 한번 해 보고 감을 잡아 볼 가치가 있어요 + +08:28.430 --> 08:33.920 +이제 이 유틸리티 메서드 설명을 작성하겠습니다 항목 하나를, 우리 항목 중 + +08:33.920 --> 08:36.950 +하나를 그냥 텍스트 덩어리로 바꾸는 거죠 + +08:36.950 --> 08:41.600 +보여드리기 위해 실행해 볼게요 기억하실지 모르겠지만 첫 번째 트레이닝 포인트 + +08:41.600 --> 08:45.320 +같은 걸 보고 프롬프트를 보면 프롬프트는 이렇게 생겼어요 + +08:45.350 --> 08:54.320 +만약 그것 대신에 해당 항목에 대한 설명을 호출하면 같은 것이 나오지만 앞에 있는 것들 없이 + +08:54.320 --> 08:59.870 +가격 없이요. 그냥 제품에 대한 설명만 있는 거에요. + +09:00.050 --> 09:05.660 +이제 벡터 데이터 저장소에 Put을 할 거예요 + +09:05.900 --> 09:08.720 +코드를 실행하면 시간이 좀 걸려요 + +09:08.720 --> 09:13.910 +완료될 때쯤엔 벡터 데이터 저장소에 있을 테니 여러분은 준비가 된 거죠 + +09:13.910 --> 09:16.820 +이미 실행했어요 벡터 데이터 저장소가 이미 생성됐죠 + +09:16.820 --> 09:18.380 +왼쪽을 보시면 돼요 + +09:18.380 --> 09:19.760 +지금 하세요 + +09:19.760 --> 09:22.580 +다 끝나면 다음 영상에서 만나요 diff --git a/week5/community-contributions/subtitles/srts/59670073/en_US.srt b/week5/community-contributions/subtitles/srts/59670073/en_US.srt new file mode 100755 index 0000000..68b7a96 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670073/en_US.srt @@ -0,0 +1,331 @@ +WEBVTT + +00:02.030 --> 00:07.640 +Okay, it's time to complete the Rag workflow in our Jupyter Lab on day 2.3. + +00:07.670 --> 00:13.490 +We've got this function Getprice that you are very familiar with now, which is the super simple one + +00:13.490 --> 00:17.690 +that takes a string and plucks out the floating point number from it. + +00:17.690 --> 00:21.620 +And now the function for GPT for mini. + +00:21.830 --> 00:26.600 +What we do is we say documents and prices is fine similares of items. + +00:26.600 --> 00:27.860 +So it's passed in an item. + +00:27.860 --> 00:30.200 +We find what is similar to that item. + +00:30.200 --> 00:35.570 +And then we call OpenAI chat dot completions dot create. + +00:35.660 --> 00:38.210 +We're going to specify GPT for mini. + +00:38.240 --> 00:42.290 +We're going to pass in the messages using the function messages for. + +00:42.410 --> 00:43.400 +And we'll give it a seed. + +00:43.400 --> 00:45.110 +We want back five tokens. + +00:45.110 --> 00:50.690 +And what comes back we say response dot choices zero dot message content. + +00:50.690 --> 00:57.050 +And then we call get price on the reply to pluck out the number from that. + +00:57.170 --> 00:59.420 +And that's all there should be to it. + +00:59.420 --> 01:02.670 +Let's try it out on one before we before we go crazy here. + +01:02.970 --> 01:13.650 +Uh, so we're going to say, um, let's see, uh, GPT four mini rag and we will pass in the test one + +01:13.650 --> 01:15.810 +that we just looked at that clutch thing. + +01:16.110 --> 01:18.360 +Uh, and let's see what we get back. + +01:19.470 --> 01:30.000 +Uh, we get back $201 and 45, uh, and let's see what what actually, it should be test one dot price. + +01:32.160 --> 01:33.540 +Uh, it's pretty close. + +01:33.540 --> 01:34.530 +It's pretty close. + +01:34.530 --> 01:35.610 +Look at that. + +01:35.910 --> 01:39.930 +Uh, so, uh, it's great the way it's so very quick to run. + +01:39.930 --> 01:41.880 +And you can see everything that's happened there. + +01:41.880 --> 01:44.610 +It looked up five similar items. + +01:44.610 --> 01:53.370 +It constructed a query, uh, for OpenAI that incorporated the context of those five similar items in + +01:53.370 --> 01:55.230 +the prompt that it sent to OpenAI. + +01:55.230 --> 02:00.120 +And then it made the call, got back the response, and stripped out the price and told us how much + +02:00.150 --> 02:00.600 +it cost. + +02:00.600 --> 02:02.640 +And it got pretty close. + +02:02.650 --> 02:03.700 +So with that. + +02:03.730 --> 02:06.610 +Do you remember our tester from before? + +02:06.640 --> 02:12.370 +Our framework for testing the 250, uh, top, uh, test results. + +02:12.400 --> 02:14.860 +And you remember where we came out with our model? + +02:14.890 --> 02:16.810 +Uh, it's definitely scary to think. + +02:16.840 --> 02:21.490 +Okay, but but now we've given a huge benefit to the frontier model. + +02:21.490 --> 02:23.410 +Uh, how can it do with this? + +02:23.410 --> 02:28.000 +So we can run it at this point, and we will see how it does? + +02:28.060 --> 02:31.690 +Uh, and off it will go while it's doing that. + +02:31.690 --> 02:34.750 +And you can already see a lot of greens in here. + +02:34.810 --> 02:38.110 +Uh, it's, of course, getting a lot of information. + +02:38.110 --> 02:42.640 +I want to show you what I've done over here in the agents folder. + +02:43.120 --> 02:49.390 +Uh, in the agents folder, I have made something called Frontier Agent, and this is now taking exactly + +02:49.390 --> 02:54.130 +the code we've just looked through and turning it into production ready code. + +02:54.130 --> 03:00.730 +And the way I've done that is that you can see from this sea of red that I've been adding in comments, + +03:00.730 --> 03:07.190 +which is an important step and I've been putting in type hints so that we get to see exactly what we're + +03:07.190 --> 03:08.660 +doing along the way. + +03:08.810 --> 03:13.190 +And you can see as we go through it's it's very simple. + +03:13.190 --> 03:14.330 +It's the same stuff. + +03:14.330 --> 03:21.410 +Make context creates the context to insert into the prompt messages for creates the message list in + +03:21.440 --> 03:23.330 +that open AI format. + +03:23.450 --> 03:28.940 +You may be familiar with this way of specifying putting a comment against each of the parameters and + +03:28.940 --> 03:30.500 +then what gets returned. + +03:30.830 --> 03:37.670 +You can also see some more sophisticated type hints here that specify when we have lists of strings, + +03:37.760 --> 03:44.360 +and a list of dictionaries with the key of a string and a value of a string, and then find similares, + +03:44.480 --> 03:49.820 +which returns a list of items similar to the given one by looking in chroma get price. + +03:49.820 --> 03:50.600 +We know it well. + +03:50.600 --> 03:54.410 +And then finally the price method here. + +03:54.650 --> 04:02.900 +So all of this is now part of a class called Frontier Agent, which is a subclass of my superclass agent, + +04:03.050 --> 04:07.850 +and it's packaged up nicely so that we can use it for production purposes. + +04:07.850 --> 04:13.730 +And so again, as part of your learning as you as you see that difference between moving from a Jupyter + +04:13.730 --> 04:18.770 +world into more production ready code, this is the kind of jump you would make. + +04:18.800 --> 04:25.580 +And typically you'd be building this in a tool like VSCode or in PyCharm or something like that. + +04:25.610 --> 04:31.400 +I tend to love PyCharm, but you can also use JupyterLab for this as we've got it right here. + +04:31.400 --> 04:36.800 +You can write code in Jupyter Lab, but there are ideas that are designed for this kind of thing and + +04:36.800 --> 04:37.580 +make it better. + +04:37.580 --> 04:42.680 +And we'll do stuff like filling in your, your, your prompts for your for what comments to write and + +04:42.680 --> 04:43.250 +things like that. + +04:43.250 --> 04:46.580 +So it's worth using an IDE when you get to this point. + +04:47.300 --> 04:48.260 +All right. + +04:48.290 --> 04:53.900 +Hopefully I've jabbered away for long enough to see if we're doing well here. + +04:55.250 --> 04:56.210 +It's getting close. + +04:56.210 --> 04:56.990 +It's getting close. + +04:56.990 --> 04:58.670 +And there we have it. + +04:58.790 --> 05:02.900 +Okay, so here's what we notice. + +05:02.900 --> 05:10.980 +First of all, it is substantially better than GPT four mini and GPT four without Rag. + +05:10.980 --> 05:18.000 +We have moved the needle in a big way, but we're still at the point that our fine tuned frontier model + +05:18.000 --> 05:21.330 +that we deployed up there is working better. + +05:21.420 --> 05:26.610 +So we still managed to beat even a rag pipeline based GPT four. + +05:26.880 --> 05:31.860 +But it is great to see that the Rag pipeline does well and gets this close. + +05:32.130 --> 05:35.400 +So and you can see what looks like a beautiful chart here. + +05:35.700 --> 05:37.740 +Uh, so that was great fun. + +05:37.800 --> 05:41.880 +Uh, and I hope you've enjoyed that and see the same thing. + +05:41.880 --> 05:46.290 +And do spend time looking through this code, see the prompts that are created, and print out some + +05:46.290 --> 05:52.890 +of these prompts and satisfy yourself that we've basically, uh, in a more, more of a manual way. + +05:52.890 --> 05:59.340 +We have done what exactly what Lang chain was automating for us before with with its wrapper code. + +05:59.340 --> 06:04.440 +We have gone out there, we've taken an item, we've found five similar items. + +06:04.440 --> 06:06.760 +We've packaged that up into some context. + +06:06.760 --> 06:08.950 +And that's what we've sent to GPT four. + +06:09.220 --> 06:14.470 +And that's given us significantly better results than when we didn't do that. + +06:14.500 --> 06:16.960 +Another thing to experiment with, of course I tried. + +06:16.960 --> 06:18.970 +I was passing in five. + +06:19.030 --> 06:20.020 +Results. + +06:20.020 --> 06:22.090 +That is what you would call a hyperparameter. + +06:22.090 --> 06:24.130 +We can explore what would happen if you make that ten. + +06:24.160 --> 06:25.630 +What would happen if you make it two? + +06:25.750 --> 06:28.810 +Having it at five means that this is still super cheap. + +06:28.810 --> 06:30.820 +It's a cent or two to run all of this. + +06:31.120 --> 06:31.750 +Of course you should. + +06:31.780 --> 06:34.510 +You should check for yourself in case pricing may vary, but. + +06:34.510 --> 06:35.980 +But it's very cheap. + +06:36.160 --> 06:38.590 +Um, if you make it ten, then it's going to start to. + +06:38.620 --> 06:39.850 +It will cost double that. + +06:40.030 --> 06:43.600 +Uh actually not not less than double that, but it will cost a little bit more. + +06:43.600 --> 06:50.590 +And so it's worth, uh, um, experimenting with these things within reason and see what you think, + +06:50.590 --> 06:51.700 +how it does. + +06:51.730 --> 07:01.030 +Anyway, with that, that concludes our rag pipeline, and I will see you for the next part, which + +07:01.030 --> 07:06.670 +is when we move to 2.4 and start talking about random forests again and ensemble models. + +07:06.700 --> 07:07.600 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59670073/ja_JP.srt b/week5/community-contributions/subtitles/srts/59670073/ja_JP.srt new file mode 100755 index 0000000..fd24f28 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670073/ja_JP.srt @@ -0,0 +1,298 @@ +WEBVTT + +00:02.030 --> 00:07.640 +さて、 2日目のJupyter LabでRagのワークフローを完成させる時が来た。 3. + +00:07.670 --> 00:13.490 +このGetprice関数は、 文字列を受け取ってそこから浮動小数点数を取り出すという超シンプルなもので、 + +00:13.490 --> 00:17.690 +もうお馴染みですね。 + +00:17.690 --> 00:21.620 +そして今度はGPT for miniの機能だ。 + +00:21.830 --> 00:26.600 +私たちがしていることは、 書類と値段を言うことです。 + +00:26.600 --> 00:27.860 +だからアイテムで渡される。 + +00:27.860 --> 00:30.200 +その項目に似たものを見つける。 + +00:30.200 --> 00:35.570 +そしてOpenAI chat dot completions dot createと呼ぶ。 + +00:35.660 --> 00:38.210 +ミニにはGPTを指定する。 + +00:38.240 --> 00:42.290 +メッセージはmessages forという関数を使って渡す。 + +00:42.410 --> 00:43.400 +そして種を与える。 + +00:43.400 --> 00:45.110 +5トークンを返してほしい。 + +00:45.110 --> 00:50.690 +そして返ってきたものは、 レスポンス・ドット・チョイス・ゼロ・ドット・メッセージ・コンテンツと言う。 + +00:50.690 --> 00:57.050 +そして、 そこから番号を抜き出すために、 返信に価格を求める電話をかける。 + +00:57.170 --> 00:59.420 +それがすべてだ。 + +00:59.420 --> 01:02.670 +ここで夢中になる前に、 ひとつ試してみよう。 + +01:02.970 --> 01:15.810 +ええと、 じゃあ、 GPTの4番ミニボロで、 今見たクラッチのテスト1回でパスします。 + +01:16.110 --> 01:18.360 +何が返ってくるか見てみよう。 + +01:19.470 --> 01:30.000 +ええと、 201ドルと45ドルが返ってきました。 では、 実際のところ、 テスト・ワン・ドット・プライスはいくらなのか見てみましょう。 + +01:32.160 --> 01:33.540 +かなり近いよ。 + +01:33.540 --> 01:34.530 +かなり近いよ。 + +01:34.530 --> 01:35.610 +あれを見ろ。 + +01:35.910 --> 01:39.930 +とても素早く走れるのが素晴らしい。 + +01:39.930 --> 01:41.880 +そして、 そこで起こったことのすべてを見ることができる。 + +01:41.880 --> 01:44.610 +似たような項目を5つも調べた。 + +01:44.610 --> 01:55.230 +OpenAIに送信するプロンプトには、 類似した5つのアイテムのコンテキストを組み込んだ。 + +01:55.230 --> 02:00.600 +そして電話をかけ、 返事が返ってきて、 値段を抜き出し、 いくらかかったかを教えてくれた。 + +02:00.600 --> 02:02.640 +そして、 かなり近づいた。 + +02:02.650 --> 02:03.700 +そういうことだ。 + +02:03.730 --> 02:06.610 +以前のテスターを覚えているだろうか? + +02:06.640 --> 02:12.370 +250の、 えー、 トップの、 えー、 テスト結果をテストするための私たちのフレームワーク。 + +02:12.400 --> 02:14.860 +そして、 私たちがモデルを発表した場所を覚えているだろうか? + +02:14.890 --> 02:16.810 +あー、 確かに考えるのは怖いね。 + +02:16.840 --> 02:21.490 +しかし、 フロンティア・モデルには大きなメリットがある。 + +02:21.490 --> 02:23.410 +ええと、 これでどうなるんですか? + +02:23.410 --> 02:28.000 +では、 この時点で走らせて、 どうなるか見てみようか? + +02:28.060 --> 02:31.690 +そうこうしているうちに出発するんだ。 + +02:31.690 --> 02:34.750 +そして、 ここにはすでにたくさんのグリーンが見える。 + +02:34.810 --> 02:38.110 +ええと、 もちろん、 多くの情報を得るためだ。 + +02:38.110 --> 02:42.640 +エージェント・フォルダーで私がやったことをお見せしたい。 + +02:43.120 --> 02:49.390 +Agentフォルダの中に、 Frontier Agentというものを作りました。 これは、 + +02:49.390 --> 02:54.130 +今見てきたコードをそのまま本番用のコードにしたものです。 + +02:54.130 --> 03:00.730 +この赤い海を見ればわかるように、 コメントを入れている。 これは重要なステップであり、 + +03:00.730 --> 03:08.660 +ヒントを入れることで、 途中で何をしているのかがわかるようにしている。 + +03:08.810 --> 03:13.190 +そして、 見ていくとわかるが、 とてもシンプルだ。 + +03:13.190 --> 03:14.330 +同じものだよ。 + +03:14.330 --> 03:23.330 +Make context は、 プロンプトメッセージに挿入するコンテキストを作成し、 そのオープンAIフォーマットでメッセージリストを作成する。 + +03:23.450 --> 03:30.500 +各パラメーターにコメントを付けて、 何が返されるかを指定するこの方法には慣れているかもしれない。 + +03:30.830 --> 03:37.670 +また、 文字列のリストや、 文字列をキーとし文字列を値とする辞書のリスト、 そしてクロマ・ゲット・プライスで検索し、 + +03:37.760 --> 03:49.820 +指定されたものに類似したアイテムのリストを返すfind similaresを指定する、 より洗練された型ヒントもここで見ることができる。 + +03:49.820 --> 03:50.600 +私たちはそれをよく知っている。 + +03:50.600 --> 03:54.410 +そして最後に、 このプライスメソッドだ。 + +03:54.650 --> 04:02.900 +スーパークラスであるエージェントのサブクラスであるFrontier + +04:03.050 --> 04:07.850 +Agentというクラスの一部である。 + +04:07.850 --> 04:13.730 +そしてまた、 Jupyterの世界からよりプロダクション・レディなコードに移行することの違いを理解するための学習の一環として、 + +04:13.730 --> 04:18.770 +このようなジャンプをするのです。 + +04:18.800 --> 04:25.580 +そして通常、 VSCodeやPyCharmのようなツールでこれを構築することになる。 + +04:25.610 --> 04:31.400 +私はPyCharmを愛用することが多いが、 JupyterLabを使うこともできる。 + +04:31.400 --> 04:37.580 +Jupyter Labでコードを書くこともできるが、 この種のことのために設計され、 より良くするアイデアがある。 + +04:37.580 --> 04:43.250 +そして、 どんなコメントを書けばいいか、 あなたのプロンプトを埋めていくようなこともする。 + +04:43.250 --> 04:46.580 +だから、 ここまで来たらIDEを使う価値がある。 + +04:47.300 --> 04:48.260 +分かった。 + +04:48.290 --> 04:53.900 +うまくいっているのかどうか、 十分な時間、 おしゃべりできたと思う。 + +04:55.250 --> 04:56.210 +もうすぐだ。 + +04:56.210 --> 04:56.990 +もうすぐだ。 + +04:56.990 --> 04:58.670 +そうだ。 + +04:58.790 --> 05:02.900 +さて、 ここで気づいたことがある。 + +05:02.900 --> 05:10.980 +まず第一に、 GPT4ミニやラグなしのGPT4よりも大幅に優れている。 + +05:10.980 --> 05:21.330 +私たちは大きく舵を切ったが、 まだ、 そこで展開したフロンティア・モデルの微調整がうまくいっている段階だ。 + +05:21.420 --> 05:26.610 +だから、 ボロパイプラインをベースにしたGPTの4人にもまだ勝てるんだ。 + +05:26.880 --> 05:31.860 +しかし、 ラグ・パイプラインが健闘し、 ここまで近づいたのは素晴らしいことだ。 + +05:32.130 --> 05:35.400 +だから、 ここに美しいチャートのようなものを見ることができる。 + +05:35.700 --> 05:37.740 +とても楽しかった。 + +05:37.800 --> 05:41.880 +ああ、 それを楽しんで、 同じものを見てほしい。 + +05:41.880 --> 05:46.290 +このコードに目を通して、 作成されたプロンプトを見て、 これらのプロンプトのいくつかをプリントアウトして、 + +05:46.290 --> 05:52.890 +私たちが基本的に、 よりマニュアルに近い方法で行ってきたことを納得してください。 + +05:52.890 --> 05:59.340 +ラング・チェーンが以前自動化してくれていたことを、 まさにラッパー・コードで実現したのだ。 + +05:59.340 --> 06:04.440 +私たちは現地に赴き、 ある品物を手に取り、 似たような品物を5つ見つけた。 + +06:04.440 --> 06:06.760 +私たちはそれをいくつかの文脈にまとめました。 + +06:06.760 --> 06:08.950 +そして、 これがGPT4に送ったものだ。 + +06:09.220 --> 06:14.470 +そうすることで、 そうしなかったときよりも格段に良い結果が得られた。 + +06:14.500 --> 06:16.960 +もちろん、 試してみた。 + +06:16.960 --> 06:18.970 +5分でパスした。 + +06:19.030 --> 06:20.020 +結果 + +06:20.020 --> 06:22.090 +これがハイパーパラメーターと呼ばれるものだ。 + +06:22.090 --> 06:24.130 +その10人にしたらどうなるかを探ることができる。 + +06:24.160 --> 06:25.630 +二つにしたらどうなる? + +06:25.750 --> 06:28.810 +5点ということは、 これでも超安いということだ。 + +06:28.810 --> 06:30.820 +これだけやっても1セントか2セントだ。 + +06:31.120 --> 06:31.750 +もちろん、 そうすべきだ。 + +06:31.780 --> 06:34.510 +万が一、 価格設定が異なるかもしれないので、 自分で確認するべきだが。 + +06:34.510 --> 06:35.980 +でも、 とても安いんだ。 + +06:36.160 --> 06:38.590 +うーん、 10回にしたら、 そうなり始めるだろうね。 + +06:38.620 --> 06:39.850 +その倍はかかるだろう。 + +06:40.030 --> 06:43.600 +実際にはその倍は下らないが、 もう少し高くなる。 + +06:43.600 --> 06:51.700 +だから、 無理のない範囲でいろいろ試してみて、 どう思うか、 どうなるか試してみる価値はあると思う。 + +06:51.730 --> 07:06.670 +とにかく、 これでボロ布のパイプラインは終わり。 + +07:06.670 --> 07:06.670 +4で、 再びランダムフォレストとアンサンブルモデルについて話し始める。 + +07:06.700 --> 07:07.600 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59670073/ko_KR.srt b/week5/community-contributions/subtitles/srts/59670073/ko_KR.srt new file mode 100755 index 0000000..7bc576a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670073/ko_KR.srt @@ -0,0 +1,322 @@ +WEBVTT + +00:02.030 --> 00:07.640 +이제 2일째 주피터 연구실에서 래그 워크플로우를 끝낼 시간이에요 3번요 + +00:07.670 --> 00:13.490 +이제 익숙해진 함수 Getprice가 있습니다 아주 간단한 것으로 + +00:13.490 --> 00:17.690 +문자열에서 부동 소수점 수를 뽑아내는 거죠 + +00:17.690 --> 00:21.620 +이제 미니를 위한 GPT 함수인데요 + +00:21.830 --> 00:26.600 +서류와 가격은 괜찮다고 하죠 비슷한 물건들을요 + +00:26.600 --> 00:27.860 +항목을 전달하는 거죠 + +00:27.860 --> 00:30.200 +비슷한 물건을 찾아냈어요 + +00:30.200 --> 00:35.570 +OpenAI 채팅 .완료 .Create라고 하죠 + +00:35.660 --> 00:38.210 +미니를 위해 GPT를 지정할게요 + +00:38.240 --> 00:42.290 +함수 메시지를 이용해 메시지를 전달할 거예요 + +00:42.410 --> 00:43.400 +씨앗을 뿌릴 거예요 + +00:43.400 --> 00:45.110 +5토큰을 돌려줘요 + +00:45.110 --> 00:50.690 +응답은 .선택 0.Message content라고 하죠 + +00:50.690 --> 00:57.050 +getPrice에 응답해 거기서 번호를 추출하는 거죠 + +00:57.170 --> 00:59.420 +그게 전부여야 해요 + +00:59.420 --> 01:02.670 +미치기 전에 한 번 시험해 보죠 + +01:02.970 --> 01:13.650 +GPT 4 미니 래그를 보고 방금 본 클러치 테스트를 통과할 + +01:13.650 --> 01:15.810 +거예요 + +01:16.110 --> 01:18.360 +Get in get을 해 보죠 + +01:19.470 --> 01:30.000 +201달러와 45달러를 환불받았고 이제 get att.s.pr.를 테스트해야 해요 + +01:32.160 --> 01:33.540 +거의 비슷해요 + +01:33.540 --> 01:34.530 +비슷해요 + +01:34.530 --> 01:35.610 +보세요 + +01:35.910 --> 01:39.930 +아주 빠르게 달릴 수 있어서 좋아요 + +01:39.930 --> 01:41.880 +거기서 일어난 모든 일을 볼 수 있어요 + +01:41.880 --> 01:44.610 +비슷한 아이템이 다섯 개 검색됐어요 + +01:44.610 --> 01:55.230 +오픈AI를 위한 쿼리를 구성했습니다. 유사한 항목 5개를 오픈AI에 보낸 프롬프트에요. + +01:55.230 --> 02:00.600 +그런 다음 전화를 걸었고 응답이 왔어요 가격을 줄이고 가격을 알려줬죠 + +02:00.600 --> 02:02.640 +꽤 비슷했어요 + +02:02.650 --> 02:03.700 +자, 됐어요 + +02:03.730 --> 02:06.610 +전에 왔던 테스터 기억나요? + +02:06.640 --> 02:12.370 +250개의 테스트 결과를 시험할 프레임워크예요 + +02:12.400 --> 02:14.860 +우리 모형이 어디서 나왔는지 기억해요? + +02:14.890 --> 02:16.810 +생각만 해도 무서워요 + +02:16.840 --> 02:21.490 +하지만 개척지 모델에 큰 혜택을 줬잖아요 + +02:21.490 --> 02:23.410 +이걸로 뭘 할 수 있죠? + +02:23.410 --> 02:28.000 +이 시점에서 실행하면 어떻게 되는지 볼까요? + +02:28.060 --> 02:31.690 +그렇게 하는 동안 작동할 거예요 + +02:31.690 --> 02:34.750 +벌써 많은 녹색이 보이시죠 + +02:34.810 --> 02:38.110 +물론 많은 정보를 얻죠 + +02:38.110 --> 02:42.640 +에이전트 폴더에서 제가 한 걸 보여드리고 싶어요 + +02:43.120 --> 02:49.390 +에이전트 폴더에 프론티어 에이전트라는 걸 만들었어요 이건 우리가 방금 살펴본 + +02:49.390 --> 02:54.130 +바로 그 코드를 프로덕션 준비 코드로 바꾸고 있죠 + +02:54.130 --> 03:00.730 +보다시피 빨간 바다를 보면 주석을 추가하고 있어요 중요한 단계죠 + +03:00.730 --> 03:07.190 +힌트도 입력했어요 우리가 뭘 하는지 정확히 알 수 있게요 get + +03:07.190 --> 03:08.660 +it + +03:08.810 --> 03:13.190 +보시면 아시겠지만 아주 간단해요 + +03:13.190 --> 03:14.330 +똑같은 거예요 + +03:14.330 --> 03:21.410 +컨텍스트를 만드는 것은 열린 인공지능 형식의 메시지 목록을 만들기 위해 프롬프트 메시지를 삽입할 컨텍스트를 + +03:21.440 --> 03:23.330 +만드는 것이죠 + +03:23.450 --> 03:28.940 +익숙하실 텐데요 각각의 매개 변수에 주석을 넣고 반환되는 것을 지정하는 + +03:28.940 --> 03:30.500 +방법이죠 + +03:30.830 --> 03:37.670 +보다 복잡한 형식 힌트도 보실 수 있습니다 문자열 목록이 언제 있는지 지정하고 문자열 키와 + +03:37.760 --> 03:44.360 +문자열 값을 가진 사전 목록도 지정하고 유사한 걸 찾아요 주어진 것과 유사한 아이템 목록을 + +03:44.480 --> 03:49.820 +반환하는 거죠 chroma getPrice를 보면서요 + +03:49.820 --> 03:50.600 +잘 알죠 + +03:50.600 --> 03:54.410 +마지막으로 가격 메서드예요 + +03:54.650 --> 04:02.900 +프론티어 에이전트라는 클래스의 일부예요 제 슈퍼클래스 에이전트의 서브클래스죠 + +04:03.050 --> 04:07.850 +잘 포장돼 있어서 생산 목적으로 사용할 수 있어요 + +04:07.850 --> 04:13.730 +다시 한 번 학습의 일부로 주피터 세상에서 생산 준비 코드로의 + +04:13.730 --> 04:18.770 +이동을 보면 이런 종류의 점프를 하게 되죠 + +04:18.800 --> 04:25.580 +일반적으로 VSCode나 PyCam 같은 도구에서 이걸 빌드하죠 + +04:25.610 --> 04:31.400 +전 파이컴을 좋아하지만 주피터랩을 이용해도 돼요 여기 있으니까요 + +04:31.400 --> 04:36.800 +주피터 랩에서 코드를 쓸 수 있지만 이런 걸 위해 설계되고 더 좋게 만드는 아이디어가 + +04:36.800 --> 04:37.580 +있어요 + +04:37.580 --> 04:43.250 +어떤 코멘트를 쓸지 프롬프트 같은 것도 채워 넣고요 + +04:43.250 --> 04:46.580 +이 지점까지 왔다면 IDE를 사용하는 게 좋아요. + +04:47.300 --> 04:48.260 +좋아요 + +04:48.290 --> 04:53.900 +제가 너무 떠들었나 봐요 + +04:55.250 --> 04:56.210 +거의 다 왔어요 + +04:56.210 --> 04:56.990 +거의 다 왔어요 + +04:56.990 --> 04:58.670 +다 됐어요 + +04:58.790 --> 05:02.900 +네, 이걸 보세요 + +05:02.900 --> 05:10.980 +우선 GPT 4 미니와 래그 없이도 훨씬 좋은 차예요 + +05:10.980 --> 05:18.000 +바늘을 크게 움직였지만 여전히 우리가 배치한 미세 조정 모델이 + +05:18.000 --> 05:21.330 +더 잘 작동하는 상태예요 + +05:21.420 --> 05:26.610 +랙 파이프라인 기반의 GPT 4조차도 이겼어요 + +05:26.880 --> 05:31.860 +래그 파이프라인이 잘 작동해서 이렇게 가까워진 걸 보니 좋네요 + +05:32.130 --> 05:35.400 +여기 아름다운 해도가 보이시죠 + +05:35.700 --> 05:37.740 +정말 재미있었어요 + +05:37.800 --> 05:41.880 +여러분도 즐거우셨길 바라고 같은 걸 보셨길 바라요 + +05:41.880 --> 05:46.290 +이 코드를 훑어보며 생성된 프롬프트를 보고 일부를 + +05:46.290 --> 05:52.890 +프린트해 자신을 만족시키세요 기본적으로 좀 더 수동적인 방법으로요 + +05:52.890 --> 05:59.340 +랭 체인이 자동화한 걸 정확히 했어요 래퍼 코드로요 + +05:59.340 --> 06:04.440 +현장에 가서 물건을 하나 가져왔고 비슷한 물건을 다섯 개 찾았어요 + +06:04.440 --> 06:06.760 +그걸 몇 가지 컨텍스트에 포장했어요 + +06:06.760 --> 06:08.950 +GPT 4에 보낸 자료예요 + +06:09.220 --> 06:14.470 +그렇게 하지 않았을 때보다 훨씬 좋은 결과를 얻었죠 + +06:14.500 --> 06:16.960 +실험해볼 만한 또 다른 방법이죠 물론 시도는 해봤어요 + +06:16.960 --> 06:18.970 +5분 후에 지나갔어요 + +06:19.030 --> 06:20.020 +결과요 + +06:20.020 --> 06:22.090 +하이퍼파라미터라고 부르는 거예요 + +06:22.090 --> 06:24.130 +10점이 되면 어떻게 될지 알아보죠 + +06:24.160 --> 06:25.630 +두 개면 어떻게 될까요? + +06:25.750 --> 06:28.810 +5달러면 정말 싼 거예요 + +06:28.810 --> 06:30.820 +1, 2센트면 다 운영할 수 있어요 + +06:31.120 --> 06:31.750 +당연히 그래야죠 + +06:31.780 --> 06:34.510 +가격이 다를 수 있으니 직접 확인해 보세요 + +06:34.510 --> 06:35.980 +하지만 아주 싸요 + +06:36.160 --> 06:38.590 +10분으로 하면 그렇게 될 거예요 + +06:38.620 --> 06:39.850 +두 배는 더 들 거예요 + +06:40.030 --> 06:43.600 +less는 아니지만 비트 요금은 좀 더 들 거예요 + +06:43.600 --> 06:50.590 +그러니 합당한 범위 내에서 실험해 보고 어떻게 반응하는지 + +06:50.590 --> 06:51.700 +봐야죠 + +06:51.730 --> 07:01.030 +어쨌든 이것으로 래그 파이프라인은 끝났습니다 다음 부분에서 뵙죠 + +07:01.030 --> 07:06.670 +2로 이동할 때요 4번, 다시 아무 숲이나 얘기하기 앙상블 모델 얘기하기 시작하세요 + +07:06.700 --> 07:07.600 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59670087/en_US.srt b/week5/community-contributions/subtitles/srts/59670087/en_US.srt new file mode 100755 index 0000000..2157b5f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670087/en_US.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:00.350 --> 00:05.270 +And welcome to part four of day two of week eight. + +00:05.330 --> 00:11.210 +Uh, there's a lot happening this week, and I have to tell you that this is the heftiest of the parts + +00:11.210 --> 00:12.410 +for today. + +00:12.530 --> 00:14.900 +Uh, let's get right into it. + +00:14.930 --> 00:19.970 +You remember, what we're going to do today is we're going to build some other kinds of prices and put + +00:19.970 --> 00:26.690 +them all together to improve the accuracy of our ability to estimate the value of products. + +00:26.810 --> 00:31.550 +And again, the reason we're doing this is an opportunity to revise on the various techniques we've + +00:31.550 --> 00:36.830 +learned about and solidify some of the learning, um, as well as being a fun exercise. + +00:37.070 --> 00:42.920 +Uh, we're going to be using this random forest type of machine learning that we experimented with in + +00:42.920 --> 00:43.670 +week six. + +00:43.670 --> 00:46.310 +That is a traditional ML, but we're going to be doing it differently. + +00:46.310 --> 00:53.510 +We're going to be using the vector embeddings that we have in chroma that are based on the, uh, hugging + +00:53.540 --> 01:00.560 +face sentence transformer Vectorizer, which means that we're using Transformers and, uh, traditional + +01:00.560 --> 01:01.810 +machine learning together. + +01:01.810 --> 01:06.370 +So we'll start by doing some imports and set some constants. + +01:06.370 --> 01:13.870 +This time I do remember to set the product vector, store constant, load the environment, load in + +01:13.870 --> 01:15.280 +the test data. + +01:15.280 --> 01:20.710 +We don't need the training data because it's sitting in Cromer and connect to Cromer itself, to the + +01:20.710 --> 01:26.200 +products collection in Cromer that we put in the variable collection and then we load in from Cromer. + +01:26.230 --> 01:31.900 +The results, which then gives us our vectors, our documents and our prices. + +01:32.530 --> 01:38.170 +So with that we are now going to look at random forests again. + +01:38.260 --> 01:42.640 +And you may remember this line which was how we did it last time. + +01:42.640 --> 01:45.070 +We train a random forest regressor. + +01:45.280 --> 01:50.650 +Uh, this here njobs is how many concurrent processes can run. + +01:50.650 --> 01:54.610 +And if you put minus one, it will use up your entire machine. + +01:54.610 --> 01:56.290 +It will run a process for every core. + +01:56.290 --> 01:58.210 +And it really hammers my box. + +01:58.210 --> 02:05.170 +And my M1 Mac takes about an hour to run this and I ran it already, so you should, uh, time it, + +02:05.170 --> 02:09.160 +uh, to to fit your box and only use -one inch here. + +02:09.160 --> 02:13.450 +If you can afford to step away and let your machine hum for a little bit. + +02:13.540 --> 02:21.700 +Once that's done, you can then save the model weights to a file using this useful utility Joblib dot + +02:21.700 --> 02:25.660 +dump, which is from again scikit learn. + +02:25.660 --> 02:31.570 +And you can then provide a model and then just save it, save those model weights. + +02:31.570 --> 02:36.550 +And what I then do is load it back in again so that I don't have to run the hours worth of training. + +02:36.910 --> 02:45.820 +Now, what I do here is I load in three agent objects the specialist agent, the frontier agent, and + +02:45.820 --> 02:47.560 +the random forest agent. + +02:47.590 --> 02:51.370 +Let's take a quick look in the agents folder and look at those. + +02:51.400 --> 02:54.460 +The specialist agent is one that we already looked at before. + +02:54.460 --> 02:55.450 +We already wrote this. + +02:55.450 --> 02:57.970 +This is Productionized code. + +02:58.060 --> 03:04.470 +Uh, that is uh, basically in the init in the in the constructor for the specialized agent, we use + +03:04.470 --> 03:05.820 +the we call modal. + +03:05.850 --> 03:08.100 +By saying modal class lookup. + +03:08.100 --> 03:13.470 +And we provide our service name and class name and we instantiate that class. + +03:13.470 --> 03:19.320 +And then when we're actually calling this to price, we simply say self dot price dot price, which + +03:19.320 --> 03:22.170 +is the function, the modal function dot remote. + +03:22.170 --> 03:24.300 +And that you'll remember is how we tell modal. + +03:24.300 --> 03:25.410 +We don't want to run this locally. + +03:25.410 --> 03:30.960 +We want to call out to the cloud, run it remotely and bring back the results, and then it returns + +03:30.960 --> 03:31.230 +it. + +03:31.230 --> 03:34.350 +So this is the specialist agent that we looked at before. + +03:34.380 --> 03:40.560 +If we look at the frontier agent you'll see what's here is the code that we went through last time as + +03:40.560 --> 03:47.310 +I said polished up, made to look nice with comments with the parameters are identified. + +03:47.310 --> 03:48.570 +There's docstring. + +03:48.600 --> 03:50.130 +There's the docstrings here. + +03:50.130 --> 03:54.330 +There's type hinting to describe what kinds of objects we're working with. + +03:54.570 --> 04:00.600 +And this is the kind of process that you would go through to take code from being Jupyter Notebook code + +04:00.600 --> 04:02.400 +to being ready for production. + +04:02.400 --> 04:08.750 +And typically you wouldn't write this in JupyterLab, you would be doing this in an IDE like VSCode + +04:08.750 --> 04:10.970 +or PyCharm is my favorite. + +04:11.240 --> 04:15.500 +And you would build it there because it will do things like help you with the type hints and fill in + +04:15.500 --> 04:16.520 +some of this gumpf. + +04:16.880 --> 04:19.730 +But you can you can use JupyterLab if you wish. + +04:20.030 --> 04:22.730 +Um, so this is the frontier agent. + +04:22.730 --> 04:27.410 +And now if I look at the random forest agent, this is super simple. + +04:27.410 --> 04:33.800 +In the constructor, we first of all create our the sentence transformer, the model that we use to + +04:33.830 --> 04:35.180 +create a vector. + +04:35.180 --> 04:39.170 +And then we load in the model that we just saved a second ago. + +04:39.530 --> 04:46.100 +Uh, and then when it comes to the actual doing and inference running a price, uh, what we do is we + +04:46.100 --> 04:51.770 +first take the description that's passed in the description of our product, we encode it into a vector. + +04:51.770 --> 04:55.430 +And then we call self dot model.predict with that vector. + +04:55.430 --> 04:59.840 +And that gives us our random forest results and we return it. + +05:00.200 --> 05:01.280 +It's as simple as that. + +05:01.280 --> 05:03.200 +You'll see I do a max of zero here. + +05:03.200 --> 05:07.390 +I suggest that I floor it at zero so it can't return negative numbers. + +05:07.390 --> 05:08.950 +I don't know if if it would or not. + +05:08.980 --> 05:14.860 +I think I might have seen it in an earlier version, and so that seemed like a sensible precaution to + +05:14.890 --> 05:15.160 +take. + +05:15.190 --> 05:17.470 +We don't want it predicting negative prices. + +05:18.010 --> 05:18.880 +Okay. + +05:18.880 --> 05:23.530 +So anyways that is the those are the the agents. + +05:23.590 --> 05:26.410 +We can then instantiate those agents. + +05:26.440 --> 05:31.600 +Now this function here description is exactly the same as we did in the last one. + +05:31.600 --> 05:39.880 +We simply take the item and we take its prompt and we pull out the to the nearest dollar. + +05:39.910 --> 05:41.770 +The the introductory text the header. + +05:41.770 --> 05:44.110 +And we also pull away the prices dollars. + +05:44.110 --> 05:48.670 +So we just get back to the blurb itself, the simple description of the product. + +05:49.030 --> 05:55.960 +Um, and with that in mind, we can now have a function, uh, RF, which is randomforest, which will + +05:55.960 --> 06:02.290 +take an item, turn it into a description, call our Randomforest agent price to price. + +06:02.290 --> 06:03.010 +It. + +06:03.010 --> 06:08.800 +Uh, and with that in mind, You, of course, remember our great test harness tester test. + +06:08.830 --> 06:15.130 +We can now test this with 250 data points and see how the random forest performs. + +06:15.190 --> 06:16.420 +Here we go. + +06:17.020 --> 06:19.090 +There's quite a lot of red in there. + +06:19.720 --> 06:20.710 +You remember last time? + +06:20.740 --> 06:22.870 +It got about 97, I think. + +06:23.260 --> 06:25.030 +Uh, let's see how it does. + +06:26.800 --> 06:30.400 +Uh, and it's, in fact, just a hair worse than it was last time. + +06:30.400 --> 06:31.900 +But obviously this is super close. + +06:31.900 --> 06:33.340 +It's basically the same. + +06:33.370 --> 06:41.140 +So the random forest, given, uh, these improved vectors versus the word two vec vectors, gives essentially + +06:41.140 --> 06:42.550 +the same number. + +06:42.550 --> 06:44.590 +And you can see visually it's doing okay. + +06:44.620 --> 06:46.960 +There's a sort of a wrong slope here. + +06:46.960 --> 06:50.260 +And uh, some problem uh, um, over there. + +06:50.260 --> 06:52.030 +But generally speaking, it's done. + +06:52.030 --> 06:52.690 +Laudably. + +06:52.690 --> 06:56.410 +Well, uh, not like our recent models though. + +06:56.680 --> 06:59.170 +So that is the random forest. + +06:59.170 --> 07:05.260 +And in the next video, we are going to move to the ensemble model that brings everything together. + +07:05.290 --> 07:06.550 +I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59670087/ja_JP.srt b/week5/community-contributions/subtitles/srts/59670087/ja_JP.srt new file mode 100755 index 0000000..ed5f199 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670087/ja_JP.srt @@ -0,0 +1,289 @@ +WEBVTT + +00:00.350 --> 00:05.270 +そして、 第8週2日目のパート4へようこそ。 + +00:05.330 --> 00:12.410 +ええと、 今週はいろいろなことが起こっている。 + +00:12.530 --> 00:14.900 +さっそく始めよう。 + +00:14.930 --> 00:19.970 +覚えているだろうか、 今日我々がやろうとしていることは、 商品の価値を推定する能力の精度を向上させるために、 + +00:19.970 --> 00:26.690 +他の種類の価格を構築し、 それらをまとめていくことだ。 + +00:26.810 --> 00:31.550 +繰り返しになるけど、 これをやっているのは、 これまで学んできたさまざまなテクニックを復習し、 + +00:31.550 --> 00:36.830 +学習の一部を定着させる機会であると同時に、 楽しい練習でもあるんだ。 + +00:37.070 --> 00:43.670 +第6週で実験したランダムフォレスト型の機械学習を使う。 + +00:43.670 --> 00:46.310 +それは伝統的なMLだが、 我々は違うやり方をするつもりだ。 + +00:46.310 --> 00:53.510 +私たちは、 クロマにあるベクトル埋め込みを使うつもりです。 それは、 えー、 抱擁顔文トランスフォーマーVectorizerに基づいています。 + +00:53.540 --> 01:01.810 +つまり、 トランスフォーマーと、 えー、 伝統的な機械学習を一緒に使うということです。 + +01:01.810 --> 01:06.370 +まずはインポートを行い、 いくつかの定数を設定する。 + +01:06.370 --> 01:15.280 +今回は、 製品ベクトルの設定、 定数の保存、 環境のロード、 テストデータのロードを忘れないようにした。 + +01:15.280 --> 01:20.710 +トレーニングデータはCromerにあるので必要ない。 Cromer自体に接続し、 Cromerの商品コレクションを変数コレクションに入れ、 + +01:20.710 --> 01:26.200 +Cromerからロードする。 + +01:26.230 --> 01:31.900 +その結果、 ベクトル、 ドキュメント、 価格が得られる。 + +01:32.530 --> 01:38.170 +ということで、 ランダムフォレストをもう一度見てみよう。 + +01:38.260 --> 01:42.640 +そして、 前回どうやったか、 このセリフを覚えているかもしれない。 + +01:42.640 --> 01:45.070 +ランダムフォレスト回帰器を訓練する。 + +01:45.280 --> 01:50.650 +このnjobsというのは、 同時に実行できるプロセスの数だ。 + +01:50.650 --> 01:54.610 +しかも、 マイナス1本だとマシン全体を使い切ることになる。 + +01:54.610 --> 01:56.290 +各コアごとにプロセスを実行する。 + +01:56.290 --> 01:58.210 +そして、 本当に僕の箱を叩くんだ。 + +01:58.210 --> 02:09.160 +私のM1マックでは、 これを実行するのに1時間くらいかかる。 + +02:09.160 --> 02:13.450 +もし余裕があれば、 少し離れてマシンの音に耳を傾けてみてほしい。 + +02:13.540 --> 02:21.700 +それが終わったら、 Joblib dot dumpという便利なユーティリティを使って、 + +02:21.700 --> 02:25.660 +モデルの重みをファイルに保存します。 + +02:25.660 --> 02:31.570 +そしてモデルを提供し、 それを保存してモデルの重みを保存すればいい。 + +02:31.570 --> 02:36.550 +そして、 何時間分ものトレーニングをする必要がないように、 再びロードし直すんだ。 + +02:36.910 --> 02:47.560 +ここでは、 3つのエージェント・オブジェクト、 スペシャリスト・エージェント、 フロンティア・エージェント、 そしてランダムフォレスト・エージェントを読み込みます。 + +02:47.590 --> 02:51.370 +エージェント・フォルダーを見てみよう。 + +02:51.400 --> 02:54.460 +専門エージェントは、 以前にも紹介したことがある。 + +02:54.460 --> 02:55.450 +これはすでに書いた。 + +02:55.450 --> 02:57.970 +これはプロダクション化されたコードだ。 + +02:58.060 --> 03:05.820 +つまり、 基本的には、 特化型エージェントのコンストラクタのinitで、 モーダルと呼ばれるものを使います。 + +03:05.850 --> 03:08.100 +モーダル・クラス・ルックアップと言う。 + +03:08.100 --> 03:13.470 +そして、 サービス名とクラス名を提供し、 そのクラスをインスタンス化する。 + +03:13.470 --> 03:19.320 +そして、 実際にpriceを呼び出すときには、 単にself dot price + +03:19.320 --> 03:22.170 +dot priceと言う。 + +03:22.170 --> 03:24.300 +モーダルの伝え方を覚えておいてほしい。 + +03:24.300 --> 03:25.410 +我々はこれをローカルで実行したくない。 + +03:25.410 --> 03:31.230 +私たちはクラウドを呼び出し、 リモートでそれを実行し、 結果を返したい。 + +03:31.230 --> 03:34.350 +というわけで、 これが以前見た専門エージェントだ。 + +03:34.380 --> 03:47.310 +フロンティア・エージェントを見ると、 ここにあるのは前回説明したコードであることがわかる。 + +03:47.310 --> 03:48.570 +docstringがある。 + +03:48.600 --> 03:50.130 +ここにdocstringsがある。 + +03:50.130 --> 03:54.330 +どのようなオブジェクトを扱っているかを示すタイプ・ヒンティングがある。 + +03:54.570 --> 04:02.400 +そしてこれは、 Jupyter Notebookのコードから本番に使えるコードにするためのプロセスだ。 + +04:02.400 --> 04:10.970 +そして通常、 JupyterLabでこれを書くことはなく、 VSCodeやPyCharmのようなIDEでこれを行うでしょう。 + +04:11.240 --> 04:16.520 +そして、 タイプヒントを助けたり、 このゴミを埋めたりしてくれるから、 そこに作ることになる。 + +04:16.880 --> 04:19.730 +しかし、 JupyterLabを使うこともできる。 + +04:20.030 --> 04:22.730 +これがフロンティア・エージェントなんだ。 + +04:22.730 --> 04:27.410 +ランダムフォレスト・エージェントを見ると、 これはとてもシンプルだ。 + +04:27.410 --> 04:35.180 +コンストラクターでは、 まず、 ベクトルを作成するためのモデルである文型変換器を作成する。 + +04:35.180 --> 04:39.170 +そして、 先ほど保存したモデルをロードする。 + +04:39.530 --> 04:46.100 +そして、 実際に価格を推論するときには、 まず、 商品の説明文に記載された説明を受け取り、 + +04:46.100 --> 04:51.770 +それをベクトルにエンコードします。 + +04:51.770 --> 04:55.430 +そして、 セルフ・ドット・モデルと呼ぶ。 そのベクトルで予測する。 + +04:55.430 --> 04:59.840 +これでランダムフォレストの結果が得られたので、 それを返す。 + +05:00.200 --> 05:01.280 +簡単なことだ。 + +05:01.280 --> 05:03.200 +ここではゼロを最大にしているのがわかるだろう。 + +05:03.200 --> 05:07.390 +負の数を返せないように、 床をゼロにすることを提案する。 + +05:07.390 --> 05:08.950 +そうなるかどうかは分からない。 + +05:08.980 --> 05:15.160 +以前のバージョンで見たかもしれないので、 賢明な予防措置だと思った。 + +05:15.190 --> 05:17.470 +マイナス価格を予測してほしくない。 + +05:18.010 --> 05:18.880 +オーケー。 + +05:18.880 --> 05:23.530 +とにかく、 これがエージェントたちだ。 + +05:23.590 --> 05:26.410 +そして、 それらのエージェントをインスタンス化することができる。 + +05:26.440 --> 05:31.600 +さて、 この関数の説明は、 前回とまったく同じである。 + +05:31.600 --> 05:39.880 +私たちは単純に品物を手に取り、 その敏速さを測り、 最も近いドル数を引き出します。 + +05:39.910 --> 05:41.770 +冒頭の文章はヘッダーである。 + +05:41.770 --> 05:44.110 +そして価格も引き下げる。 + +05:44.110 --> 05:48.670 +だから、 宣伝文句そのもの、 つまり商品のシンプルな説明に戻るだけだ。 + +05:49.030 --> 05:55.960 +そして、 それを念頭に置いて、 アイテムを受け取り、 それを説明文に変換し、 ランダムフォレスト・エージェントの価格を価格に呼び出す関数、 + +05:55.960 --> 06:02.290 +RF(randomforest)を持つことができます。 + +06:02.290 --> 06:03.010 +それだ。 + +06:03.010 --> 06:08.800 +もちろん、 我々の偉大なテストハーネス・テスターテストを覚えているだろう。 + +06:08.830 --> 06:15.130 +250点のデータでテストし、 ランダムフォレストのパフォーマンスを見ることができる。 + +06:15.190 --> 06:16.420 +さあ、 始めよう。 + +06:17.020 --> 06:19.090 +赤がかなり多いね。 + +06:19.720 --> 06:20.710 +前回を覚えているか? + +06:20.740 --> 06:22.870 +97点くらいだったと思う。 + +06:23.260 --> 06:25.030 +どうなるか見てみよう。 + +06:26.800 --> 06:30.400 +ええと、 実際、 前回よりほんの少し悪くなっている。 + +06:30.400 --> 06:31.900 +しかし、 これは明らかに超接戦だ。 + +06:31.900 --> 06:33.340 +基本的には同じだ。 + +06:33.370 --> 06:42.550 +つまり、 ランダムフォレストは、 改良されたベクトルと2つの単語のベクトルを比較した場合、 基本的に同じ数値を出す。 + +06:42.550 --> 06:44.590 +そして、 視覚的にもうまくいっていることがわかる。 + +06:44.620 --> 06:46.960 +ここにはある種の間違ったスロープがある。 + +06:46.960 --> 06:50.260 +それで、 ちょっと問題が......あ、 あそこで + +06:50.260 --> 06:52.030 +しかし、 一般的に言えば、 それは終わったことだ。 + +06:52.030 --> 06:52.690 +称賛に値する。 + +06:52.690 --> 06:56.410 +まあ、 最近のモデルとは違うけどね。 + +06:56.680 --> 06:59.170 +これがランダムフォレストだ。 + +06:59.170 --> 07:05.260 +そして次のビデオでは、 すべてをまとめるアンサンブル・モデルに移る。 + +07:05.290 --> 07:06.550 +そこで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59670087/ko_KR.srt b/week5/community-contributions/subtitles/srts/59670087/ko_KR.srt new file mode 100755 index 0000000..189e2e9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670087/ko_KR.srt @@ -0,0 +1,322 @@ +WEBVTT + +00:00.350 --> 00:05.270 +8주 차 둘째 날의 4부에 잘 오셨어요 + +00:05.330 --> 00:11.210 +이번 주에 할 일이 많은데 오늘 들어온 부품 중에 이게 제일 + +00:11.210 --> 00:12.410 +무거워요 + +00:12.530 --> 00:14.900 +Get it, get it, get it 바로 시작하죠 + +00:14.930 --> 00:19.970 +기억하세요 오늘 우리가 할 것은 다른 종류의 가격을 만들어 + +00:19.970 --> 00:26.690 +함께 합치는 겁니다 제품의 가치를 정확하게 추정하기 위해서요 Put + +00:26.810 --> 00:31.550 +이 수업을 하는 이유는 우리가 배운 다양한 기술을 복습하고 + +00:31.550 --> 00:36.830 +학습 능력을 강화하고 즐거운 운동이 될 수 있도록 하기 위해서죠 + +00:37.070 --> 00:42.920 +임의의 숲 학습 형태의 머신 러닝을 사용할 거예요 6주 차에 실험했던 + +00:42.920 --> 00:43.670 +거죠 + +00:43.670 --> 00:46.310 +전통적인 ML이지만 다르게 할 거예요 + +00:46.310 --> 00:53.510 +크로마에 있는 벡터 내장 기법을 사용할 거예요 얼굴 문장을 끌어안는 변압기 + +00:53.540 --> 01:01.810 +벡터라이저에 기반한 건데 트랜스포머와 머신 러닝을 함께 쓴다는 뜻이죠 + +01:01.810 --> 01:06.370 +몇 가지 수입과 상수 설정으로 시작하죠 + +01:06.370 --> 01:13.870 +이번엔 제품 벡터를 설정하고 일관되게 저장하고 환경을 로드하고 테스트 데이터를 로드하는 + +01:13.870 --> 01:15.280 +걸 기억하죠 + +01:15.280 --> 01:20.710 +훈련 데이터가 필요 없죠 그건 Cromer에 있고 Cromer 자체와 Cromer의 + +01:20.710 --> 01:26.200 +제품 컬렉션에 연결되어 변수 컬렉션에 넣고 Cromer에서 로드하니까요 + +01:26.230 --> 01:31.900 +결과로 벡터와 문서, 가격을 알 수 있죠 + +01:32.530 --> 01:38.170 +이제 임의의 숲을 다시 살펴볼게요 + +01:38.260 --> 01:42.640 +이 대사 기억하실지 모르겠네요 지난번엔 이렇게 했죠 + +01:42.640 --> 01:45.070 +무작위로 숲으로 퇴행하는 사람을 훈련해요 + +01:45.280 --> 01:50.650 +여기 이 놉은 동시 진행 가능한 절차의 수예요 + +01:50.650 --> 01:54.610 +Get 1을 누르면 컴퓨터 전체를 다 쓰게 돼요 + +01:54.610 --> 01:56.290 +모든 코어에 대한 프로세스를 실행하죠 + +01:56.290 --> 01:58.210 +제 상자를 부수죠 + +01:58.210 --> 02:05.170 +제 M1 Mac은 실행하는 데 1시간 정도 걸립니다 이미 실행했으니 여러분은 박스에 + +02:05.170 --> 02:09.160 +맞게 시간을 재고 1인치만 사용하세요 + +02:09.160 --> 02:13.450 +잠시 물러나서 비트를 감상할 수 있다면요 + +02:13.540 --> 02:21.700 +그게 끝나면 모델 무게를 파일에 저장할 수 있습니다 유용한 유틸리티 Joblib.Doplib를 이용해서요 + +02:21.700 --> 02:25.660 +scikit learn에 있는 거죠 + +02:25.660 --> 02:31.570 +그럼 모델을 제공해 저장할 수 있죠 모델 웨이트를 저장해요 + +02:31.570 --> 02:36.550 +그런 다음 다시 로드합니다 훈련 시간을 낭비하지 않으려고요 + +02:36.910 --> 02:45.820 +이제 세 가지 요소를 장착합니다 전문 요원, 개척지 요원 그리고 임의 + +02:45.820 --> 02:47.560 +숲 요원이죠 + +02:47.590 --> 02:51.370 +에이전트 폴더를 잠깐 살펴보고 그걸 살펴보죠 + +02:51.400 --> 02:54.460 +특수 요원은 이미 봤던 거예요 + +02:54.460 --> 02:55.450 +이미 써놨잖아요 + +02:55.450 --> 02:57.970 +이건 프로덕션화된 코드예요 + +02:58.060 --> 03:04.470 +기본적으로 이닛이라는 건데 컨스트럭터라는 특수 에이전트예요 모달이라고 + +03:04.470 --> 03:05.820 +하죠 + +03:05.850 --> 03:08.100 +ModalCass-out을 사용해서요 + +03:08.100 --> 03:13.470 +서비스 이름과 클래스 이름을 제공하고 클래스를 인스턴스화하죠 + +03:13.470 --> 03:19.320 +그리고 가격으로 호출할 때는 셀프.Price.Price라고 입력합니다 + +03:19.320 --> 03:22.170 +함수죠 함수 . remote이요 + +03:22.170 --> 03:24.300 +그 방법만 기억하면 돼요 + +03:24.300 --> 03:25.410 +이걸 로컬로 실행하면 안 돼요 + +03:25.410 --> 03:31.230 +클라우드에 호출해 원격으로 실행하고 결과를 가져오면 결과를 반환하죠 + +03:31.230 --> 03:34.350 +전에 봤던 특수 요원이에요 + +03:34.380 --> 03:40.560 +프런티어 에이전트를 보면 여기 있는 코드는 지난 시간에 살펴본 + +03:40.560 --> 03:47.310 +것으로 매개 변수를 식별하고 주석을 달아 멋지게 만들었죠 + +03:47.310 --> 03:48.570 +의사봉합이 있어요 + +03:48.600 --> 03:50.130 +여기 도서가 있어요 + +03:50.130 --> 03:54.330 +우리가 작업하는 개체를 설명하는 형식 힌트가 있어요 + +03:54.570 --> 04:00.600 +이런 과정을 거쳐야만 코드를 얻을 수 있어요 주피터 노트북 코드에서 + +04:00.600 --> 04:02.400 +생산 준비 단계로요 + +04:02.400 --> 04:08.750 +보통 JupyterLab에서는 작성하지 않아요 IDE에서 하죠 VSCode나 + +04:08.750 --> 04:10.970 +PyCam 같은 거요 + +04:11.240 --> 04:15.500 +여기에 구축하면 힌트 형식이나 검프 일부를 채우는 데 도움이 + +04:15.500 --> 04:16.520 +될 거예요 + +04:16.880 --> 04:19.730 +원한다면 주피터랩을 이용해도 돼요 + +04:20.030 --> 04:22.730 +이쪽은 프런티어 요원이에요 + +04:22.730 --> 04:27.410 +이제 임의의 산림청소를 보면 아주 간단해요 + +04:27.410 --> 04:33.800 +생성자에서 먼저 문장 변압기를 만듭니다 벡터를 만들기 위해 사용하는 + +04:33.830 --> 04:35.180 +모델이죠 + +04:35.180 --> 04:39.170 +그런 다음 조금 전에 저장한 모델을 로드하죠 + +04:39.530 --> 04:46.100 +실제 작업과 가격 실행 추론에 관해선 우리가 하는 일은 먼저 제품 + +04:46.100 --> 04:51.770 +설명에 통과된 설명을 취합니다 벡터로 암호화하죠 + +04:51.770 --> 04:55.430 +셀프.Model이라고 하고요 벡터로 예측하세요 + +04:55.430 --> 04:59.840 +그러면 임의의 숲 결과를 받고 반환하죠 + +05:00.200 --> 05:01.280 +아주 간단해요 + +05:01.280 --> 05:03.200 +0으로 최대치를 설정했어요 + +05:03.200 --> 05:07.390 +음수를 반환하지 못하게 0에서 밟는 게 좋겠어요 + +05:07.390 --> 05:08.950 +그럴지는 잘 모르겠어요 + +05:08.980 --> 05:15.160 +초기 버전에서 본 것 같아요 그래서 그게 현명한 예방책 같았죠 + +05:15.190 --> 05:17.470 +적자를 예상하면 안 되니까요 + +05:18.010 --> 05:18.880 +네 + +05:18.880 --> 05:23.530 +어쨌든 저건 에이전트란 거죠 + +05:23.590 --> 05:26.410 +그 에이전트를 인스턴스화할 수 있죠 + +05:26.440 --> 05:31.600 +여기 이 함수는 설명입니다 지난 번과 정확히 같죠 + +05:31.600 --> 05:39.880 +그냥 항목을 가져다가 가장 가까운 달러로 끌어내는 거죠 + +05:39.910 --> 05:41.770 +도입부 문자가 헤더에 들어가요 + +05:41.770 --> 05:44.110 +달러도 떼어낼 거예요 + +05:44.110 --> 05:48.670 +다시 광고 문구로 돌아가죠 제품에 대한 간단한 설명이요 Get up + +05:49.030 --> 05:55.960 +그걸 염두에 두고 RF라는 함수를 쓸 수 있어요 랜덤포레스트라는 함수인데 항목을 설명으로 + +05:55.960 --> 06:02.290 +바꾸는 거죠 랜덤포레스트 에이전트 가격을 랜덤포레스트로 불러요 + +06:02.290 --> 06:03.010 +그것요 + +06:03.010 --> 06:08.800 +그걸 명심하고 하니스 테스트 기억하시죠? + +06:08.830 --> 06:15.130 +이제 250개의 데이터 포인트로 시험할 수 있습니다 무작위 숲이 어떻게 작동하는지 보죠 + +06:15.190 --> 06:16.420 +시작할게요 + +06:17.020 --> 06:19.090 +붉은색이 꽤 많네요 + +06:19.720 --> 06:20.710 +지난번 일 기억나요? + +06:20.740 --> 06:22.870 +97 정도였던 것 같아요 + +06:23.260 --> 06:25.030 +어떻게 되나 보죠 + +06:26.800 --> 06:30.400 +사실 지난번보다 아주 조금 더 나빠졌어요 + +06:30.400 --> 06:31.900 +하지만 이건 정말 가깝네요 + +06:31.900 --> 06:33.340 +기본적으로 똑같아요 + +06:33.370 --> 06:42.550 +무작위 숲은 개선된 벡터와 두 베카 벡터가 같은 수를 제공하죠 + +06:42.550 --> 06:44.590 +겉으로 보기에는 괜찮아요 + +06:44.620 --> 06:46.960 +비탈이 좀 안 좋아요 + +06:46.960 --> 06:50.260 +그리고 문제가 좀 있어요 저기예요 + +06:50.260 --> 06:52.030 +하지만 일반적으로 보면 다 끝난 거예요 + +06:52.030 --> 06:52.690 +칭찬할 만해요 + +06:52.690 --> 06:56.410 +요즘 모델과는 다르지만요 + +06:56.680 --> 06:59.170 +저기가 무작위 숲이에요 + +06:59.170 --> 07:05.260 +다음 비디오에선 모든 걸 하나로 묶어주는 앙상블 모델로 넘어갈 거예요 + +07:05.290 --> 07:06.550 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59670121/en_US.srt b/week5/community-contributions/subtitles/srts/59670121/en_US.srt new file mode 100755 index 0000000..ff72da6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670121/en_US.srt @@ -0,0 +1,373 @@ +WEBVTT + +00:00.380 --> 00:02.360 +So it's business time right now. + +00:02.360 --> 00:09.020 +We are going to build a Rag pipeline to estimate the price of products, drawing on context for similar + +00:09.020 --> 00:11.780 +products that we have in our training dataset. + +00:12.110 --> 00:14.510 +Uh, and so we are right here. + +00:14.540 --> 00:19.580 +Day 2.3 building a Rag pipeline with GPT four mini. + +00:19.850 --> 00:22.640 +Uh, we're going to go pretty quickly today through a lot of code. + +00:22.640 --> 00:24.860 +So bear with me then. + +00:24.860 --> 00:30.440 +Remember the trick is to come back, run the code yourself and get a good sense of what's going on. + +00:30.440 --> 00:37.310 +So we do some imports, we log in as usual, and then we, uh, connect with OpenAI. + +00:37.490 --> 00:41.360 +Um, and it's using the fact that we've set the OpenAI API key. + +00:41.390 --> 00:44.450 +We're going to load in the test data set. + +00:44.480 --> 00:49.430 +Now, we don't need to load in the training data set because we'll be using the chroma data store for + +00:49.460 --> 00:50.120 +that. + +00:50.570 --> 00:51.350 +All right. + +00:51.350 --> 00:55.250 +So here's a function make context. + +00:55.250 --> 01:01.790 +We're going to use this function to produce the context that we're going to send GPT four mini that + +01:01.790 --> 01:04.910 +tells it about similar products that it could use. + +01:04.910 --> 01:10.680 +So we're going to say to provide some context, here are some other items that might be similar to the + +01:10.680 --> 01:12.120 +one you need to estimate. + +01:12.120 --> 01:18.000 +And then do you remember this zip construct that allows us to iterate through two different lists together. + +01:18.300 --> 01:24.840 +So we're going to collect similar and price from iterating through the similars and prices that are + +01:24.840 --> 01:25.680 +passed in. + +01:26.070 --> 01:32.670 +And then we're going to add in a message there's a potentially related product similar with this price. + +01:33.600 --> 01:35.670 +I'll execute this in just a second. + +01:35.670 --> 01:37.410 +So you see what it actually looks like. + +01:37.800 --> 01:44.640 +But then just as we've done in the past, I'm making this function messages for and it takes an item, + +01:44.640 --> 01:51.360 +it takes things similar to it and it takes prices and it's going to build that standard list of dicts + +01:51.360 --> 01:53.160 +that we know so, so well. + +01:53.520 --> 01:58.170 +There's going to be a system message which is going to be just the same one we've used before. + +01:58.200 --> 02:04.410 +You estimate price of items, reply only with the price we're going to add in the context. + +02:04.410 --> 02:07.980 +We're going to add in the stuff that we just built right here. + +02:08.040 --> 02:09.300 +And then we're going to say. + +02:09.330 --> 02:10.980 +And now the question for you. + +02:10.980 --> 02:18.420 +And now this is basically exactly the same as we used before when we called GPT four back in week six + +02:18.750 --> 02:20.550 +when we built that pipeline. + +02:20.640 --> 02:27.870 +So the thing to notice here is that we're doing this Rag pipeline ourself without Lang chain. + +02:28.350 --> 02:30.510 +And it's actually not that hard. + +02:30.600 --> 02:35.040 +Lang chain gave us a nice little abstraction on top with with a few simple objects. + +02:35.040 --> 02:39.060 +And I remember going on about how it was just like one line of code or something to do it, but it's + +02:39.060 --> 02:40.890 +not that much more just to do it ourselves. + +02:40.890 --> 02:45.900 +Particularly now that you understand what's going on under the hood and you know how to call llms and + +02:45.900 --> 02:49.230 +you know how to look up similar objects and so on. + +02:49.350 --> 02:52.830 +Um, it becomes relatively straightforward. + +02:53.010 --> 03:01.260 +Uh, so, um, hopefully this is not going to be too, uh, um, difficult, but let's keep going. + +03:01.290 --> 03:04.020 +Um, so we're going to, um. + +03:06.060 --> 03:07.830 +Collect our chroma. + +03:08.610 --> 03:10.740 +Um, and once more, I've done this again, haven't I? + +03:10.770 --> 03:13.920 +I deleted the cell where I defined the DB variable. + +03:13.920 --> 03:17.050 +It should be products Underscore. + +03:18.460 --> 03:19.780 +Vector store. + +03:22.690 --> 03:23.860 +Vector store. + +03:23.890 --> 03:25.930 +Some possibly hard word to spell. + +03:25.930 --> 03:26.560 +There we go. + +03:26.560 --> 03:27.700 +And now run that again. + +03:27.700 --> 03:28.450 +Fine. + +03:28.450 --> 03:33.700 +So we've now we're looking in the products collection in our vector data store. + +03:34.510 --> 03:35.950 +Um okay. + +03:35.950 --> 03:38.980 +And so we're now getting to the meat of the whole thing. + +03:38.980 --> 03:46.150 +So we're going to have a method, a function description that's going to take an item. + +03:46.150 --> 03:49.990 +And it's going to strip out from that item the stuff that we don't care about. + +03:49.990 --> 03:53.950 +So basically we'll take the prompt and we're going to take out this. + +03:53.950 --> 03:55.810 +How much does it cost to the nearest dollar. + +03:55.810 --> 04:00.910 +And we're going to then uh, ignore everything that comes after price is dollars. + +04:00.940 --> 04:03.430 +Let me show you exactly what's going on here. + +04:03.610 --> 04:08.380 +Uh, so if I have a quick look at my first training data point. + +04:09.430 --> 04:09.910 +Oops. + +04:09.940 --> 04:10.390 +Sorry. + +04:10.420 --> 04:10.870 +What have I done? + +04:10.900 --> 04:12.250 +Test the first test data point. + +04:12.250 --> 04:13.780 +We're not looking at training data anymore. + +04:13.810 --> 04:19.600 +The first test data point is this the repair kit for Ford, blah blah, blah, blah, blah with a price + +04:19.600 --> 04:20.230 +on it. + +04:20.560 --> 04:27.180 +Uh, if we look at its prompt, you'll see that that that's got all of this gubbins in there with the + +04:27.180 --> 04:29.040 +price and the question at the top. + +04:29.040 --> 04:36.390 +But what I can do is I can just say describe description of test zero. + +04:36.390 --> 04:41.670 +And what we should now get is this just the blurb without the price. + +04:41.670 --> 04:44.130 +So that should be clear. + +04:44.400 --> 04:44.910 +All right. + +04:44.940 --> 04:50.340 +Now we're going to load the model that is the sentence transformer from hugging face. + +04:50.340 --> 04:53.070 +That gives us our simple vector encodings. + +04:53.070 --> 04:54.720 +It is our vectorizer. + +04:54.840 --> 04:59.430 +And so this method here, this is in fact our Vectorizer function. + +04:59.430 --> 05:00.840 +It takes an item. + +05:00.840 --> 05:04.890 +It it calls description on that item to turn it into text. + +05:04.890 --> 05:09.420 +And it then puts it in a list and calls model dot encode. + +05:09.840 --> 05:11.850 +Simple as that. + +05:12.690 --> 05:17.790 +And now and now we get to a function find similars. + +05:17.790 --> 05:22.950 +It will be given an item and it will return similar items. + +05:22.980 --> 05:28.840 +And this, this is the some of the hardest part that Lang was doing for us before, but it's not that + +05:28.840 --> 05:29.470 +hard. + +05:29.590 --> 05:36.490 +You can see what we do is we say to our collection, our chroma DB collection, I want to query this. + +05:36.520 --> 05:38.140 +These are the query embeddings. + +05:38.140 --> 05:42.880 +This is it's going to be based on this, uh, the vector that we will pass in. + +05:42.880 --> 05:46.120 +We do have to turn that into a floating point number from being a numpy array. + +05:46.150 --> 05:50.140 +That's uh, from being a numpy float 32. + +05:50.200 --> 05:53.440 +You have to turn them into floats, and then we have to turn it into a list instead of being a numpy + +05:53.470 --> 05:54.010 +array. + +05:54.010 --> 05:59.380 +And then you just simply say, uh, number of results and that's how many you want back. + +05:59.380 --> 06:04.210 +So you can pass in a vector as your query embedding and get back five results. + +06:04.210 --> 06:05.740 +That's all there is to it. + +06:05.740 --> 06:11.560 +And what we'll get back is uh, is some similars, uh, so let's just run this. + +06:11.680 --> 06:18.040 +So now what we can do is we can look at test number one and let's look at test number one's prompt. + +06:18.040 --> 06:19.840 +So this is its prompt. + +06:19.930 --> 06:21.700 +Um how much does it cost to the nearest dollar. + +06:21.700 --> 06:24.160 +It's a fan clutch package. + +06:24.490 --> 06:26.860 +Uh, from, um motorcraft. + +06:26.860 --> 06:28.210 +So anyway, you get a sense. + +06:28.210 --> 06:32.020 +So we can now say documents and prices are fine. + +06:32.020 --> 06:34.020 +Find Similares from test one. + +06:34.020 --> 06:39.030 +We're going to call this function right here to find five similar results. + +06:39.060 --> 06:40.440 +Let's do that. + +06:41.100 --> 06:42.630 +And now it's thinking. + +06:42.630 --> 06:44.190 +And now let's print that. + +06:44.550 --> 06:51.390 +And here you will find five related results are potentially related products. + +06:51.390 --> 06:54.600 +And they are all potentially related product. + +06:54.630 --> 06:59.640 +They are all sort of fan clutchy kind of things that have various prices. + +06:59.730 --> 07:04.080 +Uh, and they do appear to be similar products at first blush. + +07:04.260 --> 07:09.540 +Uh, so you should convince yourself, do some more testing and make sure you're comfortable that indeed + +07:09.540 --> 07:15.840 +we can call this find similar function, which is simply calling query on our Cromer collection. + +07:15.840 --> 07:21.720 +And we will be able to collect similar products from our Cromer database. + +07:21.720 --> 07:23.490 +It's as simple as that. + +07:23.790 --> 07:31.350 +Uh, and once we've done that, we will then be able to put the final touches on our Rag data flow on + +07:31.350 --> 07:36.030 +our Rag pipeline and then use that to call GPT four zero. + +07:36.030 --> 07:38.610 +And we will do that in the next video. diff --git a/week5/community-contributions/subtitles/srts/59670121/ja_JP.srt b/week5/community-contributions/subtitles/srts/59670121/ja_JP.srt new file mode 100755 index 0000000..5aaf68f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670121/ja_JP.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:00.380 --> 00:02.360 +だから、 今はビジネスの時間なんだ。 + +00:02.360 --> 00:11.780 +我々は、 トレーニングデータセットにある類似商品のコンテキストを利用して、 商品の価格を推定するRagパイプラインを構築しようとしている。 + +00:12.110 --> 00:14.510 +それで、 今ここにいる。 + +00:14.540 --> 00:19.580 +2日目 3 GPT4ミニでラグ・パイプラインを構築する + +00:19.850 --> 00:22.640 +ええと、 今日はたくさんのコードを手短に説明します。 + +00:22.640 --> 00:24.860 +だから我慢してくれ。 + +00:24.860 --> 00:30.440 +コツは、 戻って自分でコードを実行し、 何が起こっているのかをよく理解することだ。 + +00:30.440 --> 00:37.310 +インポートして、 いつものようにログインして、 OpenAIに接続する。 + +00:37.490 --> 00:41.360 +OpenAIのAPIキーを設定したことを利用しています。 + +00:41.390 --> 00:44.450 +テストデータセットをロードする。 + +00:44.480 --> 00:50.120 +クロマ・データ・ストアを使うので、 トレーニング・データセットをロードする必要はない。 + +00:50.570 --> 00:51.350 +分かった。 + +00:51.350 --> 00:55.250 +そこで、 ファンクション・メイク・コンテクストだ。 + +00:55.250 --> 01:04.910 +この関数を使い、 GPTに送る4つのミニのコンテキストを作成する。 + +01:04.910 --> 01:12.120 +そこで、 文脈を説明するために、 見積もりが必要な項目と類似している可能性のある他の項目をいくつか挙げます。 + +01:12.120 --> 01:18.000 +そして、 2つの異なるリストを一緒に反復処理できるこのZIP構造を覚えているだろうか。 + +01:18.300 --> 01:25.680 +そこで、 渡された類似品と価格を反復処理することで、 類似品と価格を収集することにする。 + +01:26.070 --> 01:32.670 +そして、 この価格で似たような関連商品がある可能性があるというメッセージを追加します。 + +01:33.600 --> 01:35.670 +すぐに実行する。 + +01:35.670 --> 01:37.410 +だから、 実際にどのように見えるかを見てほしい。 + +01:37.800 --> 01:44.640 +しかし、 これまでと同じように、 この関数はアイテム、 それに似たもの、 + +01:44.640 --> 01:53.160 +価格を受け取り、 私たちがよく知っているディクツの標準的なリストを構築します。 + +01:53.520 --> 01:58.170 +システムメッセージはこれまでと同じものだ。 + +01:58.200 --> 02:04.410 +あなたはアイテムの価格を見積もり、 我々は文脈で追加しようとしている価格だけで答える。 + +02:04.410 --> 02:07.980 +先ほど作ったものをここに追加していく。 + +02:08.040 --> 02:09.300 +そして、 こう言うんだ。 + +02:09.330 --> 02:10.980 +そして、 あなたへの質問だ。 + +02:10.980 --> 02:20.550 +これは基本的に、 6週目にGPT4と呼んでパイプラインを構築したときに使ったものとまったく同じだ。 + +02:20.640 --> 02:27.870 +ここで注目すべきは、 ラング・チェーンを使わずにラグ・パイプラインを自前で行っていることだ。 + +02:28.350 --> 02:30.510 +そして、 実はそれほど難しくはない。 + +02:30.600 --> 02:35.040 +ラング・チェーンは、 いくつかのシンプルなオブジェクトの上に、 ちょっとした抽象化を施してくれた。 + +02:35.040 --> 02:40.890 +コード1行とかでできるけど、 自分たちでやるのはもっと大変なんだ。 + +02:40.890 --> 02:49.230 +特に今は、 フードの下で何が起こっているかを理解し、 llmsの呼び出し方や類似オブジェクトの検索方法などを知っている。 + +02:49.350 --> 02:52.830 +それは比較的簡単なことだ。 + +02:53.010 --> 03:01.260 +ええと、 だから、 ええと、 できれば、 あまり難しく考えないでほしいんだけど、 続けよう。 + +03:01.290 --> 03:04.020 +ええと、 だから私たちは、 ええと。 + +03:06.060 --> 03:07.830 +クロマを集めよう。 + +03:08.610 --> 03:10.740 +うーん、 またやってしまったね。 + +03:10.770 --> 03:13.920 +DB変数を定義したセルを削除した。 + +03:13.920 --> 03:17.050 +アンダースコアの製品であるべきだ。 + +03:18.460 --> 03:19.780 +ベクターストア + +03:22.690 --> 03:23.860 +ベクターストア + +03:23.890 --> 03:25.930 +スペルが難しいかもしれない。 + +03:25.930 --> 03:26.560 +これでよし。 + +03:26.560 --> 03:27.700 +そして今、 もう一度それを実行する。 + +03:27.700 --> 03:28.450 +素晴らしい。 + +03:28.450 --> 03:33.700 +ベクター・データ・ストアのproductsコレクションを見てみよう。 + +03:34.510 --> 03:35.950 +うーん、 わかった。 + +03:35.950 --> 03:38.980 +そして、 我々は今、 本題に入ろうとしている。 + +03:38.980 --> 03:46.150 +そこで、 アイテムを受け取るメソッド、 関数の説明を用意する。 + +03:46.150 --> 03:49.990 +そして、 その項目から私たちが気にしないものを取り除くことになる。 + +03:49.990 --> 03:53.950 +だから基本的には、 プロンプトを受け取って、 これを取り出そう。 + +03:53.950 --> 03:55.810 +ドル単位でいくらですか。 + +03:55.810 --> 04:00.910 +そして、 価格の後に来るものはすべて無視することになる。 + +04:00.940 --> 04:03.430 +ここで何が起こっているのか、 具体的にお見せしよう。 + +04:03.610 --> 04:08.380 +最初のトレーニングデータを見てみよう。 + +04:09.430 --> 04:09.910 +おっと。 + +04:09.940 --> 04:10.390 +申し訳ない。 + +04:10.420 --> 04:10.870 +私が何をしたというのか? + +04:10.900 --> 04:12.250 +最初のテストデータ点をテストする。 + +04:12.250 --> 04:13.780 +もうトレーニングデータは見ていない。 + +04:13.810 --> 04:20.230 +最初のテストデータは、 フォードの修理キットで、 値段が書いてある。 + +04:20.560 --> 04:29.040 +そのプロンプトを見ると、 価格と質問がトップに表示されている。 + +04:29.040 --> 04:36.390 +しかし、 私ができることは、 テストゼロの説明を記述することだ。 + +04:36.390 --> 04:41.670 +そして今、 私たちが手に入れるべきものは、 この価格抜きの紹介文だけである。 + +04:41.670 --> 04:44.130 +だから、 それは明らかだろう。 + +04:44.400 --> 04:44.910 +分かった。 + +04:44.940 --> 04:50.340 +それでは、 ハギング・フェイスのセンテンス・トランスフォームのモデルをロードする。 + +04:50.340 --> 04:53.070 +これで単純なベクトル符号化ができた。 + +04:53.070 --> 04:54.720 +ベクタライザーだ。 + +04:54.840 --> 04:59.430 +そしてこのメソッド、 実はこれがベクター・ライザー関数なのだ。 + +04:59.430 --> 05:00.840 +アイテムが必要だ。 + +05:00.840 --> 05:04.890 +これは、 そのアイテムの説明を呼び出してテキストに変換する。 + +05:04.890 --> 05:09.420 +そしてそれをリストに入れ、 model dot encodeを呼び出す。 + +05:09.840 --> 05:11.850 +単純なことだ。 + +05:12.690 --> 05:17.790 +そして今、 私たちは類似点を見つける機能にたどり着いた。 + +05:17.790 --> 05:22.950 +アイテムが与えられ、 似たようなアイテムを返す。 + +05:22.980 --> 05:29.470 +これは、 以前ラングがやってくれていた一番難しい部分なんだけど、 そんなに難しくはないんだ。 + +05:29.590 --> 05:36.490 +コレクション(クロマDBコレクション)に対して、 このクエリーを実行する。 + +05:36.520 --> 05:38.140 +これがクエリの埋め込みである。 + +05:38.140 --> 05:42.880 +これは、 このベクトルに基づいているんだ。 + +05:42.880 --> 05:46.120 +numpyの配列から浮動小数点数に変換する必要がある。 + +05:46.150 --> 05:50.140 +それは、 numpyのfloat32だからだ。 + +05:50.200 --> 05:54.010 +そしてそれをnumpyの配列ではなくリストにしなければならない。 + +05:54.010 --> 05:59.380 +そして、 単純に、 ええと、 結果の数、 と言って、 その数だけ返してほしい。 + +05:59.380 --> 06:04.210 +だから、 クエリーの埋め込みとしてベクトルを渡すと、 5つの結果が返ってくる。 + +06:04.210 --> 06:05.740 +それだけだ。 + +06:05.740 --> 06:11.560 +そして返ってくるのは、 いくつかの類似点だ。 + +06:11.680 --> 06:18.040 +では、 テスト1番のプロンプトを見てみよう。 + +06:18.040 --> 06:19.840 +これがそのプロンプトだ。 + +06:19.930 --> 06:21.700 +1ドル単位でいくらですか? + +06:21.700 --> 06:24.160 +ファンクラッチパッケージだ。 + +06:24.490 --> 06:26.860 +ええと、 モータークラフトの。 + +06:26.860 --> 06:28.210 +いずれにせよ、 おわかりいただけたと思う。 + +06:28.210 --> 06:32.020 +だから文書も価格も問題ないと言えるようになった。 + +06:32.020 --> 06:34.020 +テスト1からシミラーレを探す。 + +06:34.020 --> 06:39.030 +ここでこの関数を呼び出して、 似たような結果を5つ見つける。 + +06:39.060 --> 06:40.440 +そうしよう。 + +06:41.100 --> 06:42.630 +そして今、 考えている。 + +06:42.630 --> 06:44.190 +それを印刷しよう。 + +06:44.550 --> 06:51.390 +そしてここで、 5つの関連結果が潜在的な関連商品であることがわかります。 + +06:51.390 --> 06:54.600 +そして、 それらはすべて関連商品である可能性がある。 + +06:54.630 --> 06:59.640 +どれもファンのクラッチ的なもので、 値段も様々だ。 + +06:59.730 --> 07:04.080 +ええと、 一見すると似たような製品に見えるけど。 + +07:04.260 --> 07:09.540 +もっとテストして、 クロマー・コレクションにクエリーを呼び出すだけで、 + +07:09.540 --> 07:15.840 +似たような関数を見つけることができることを確認してください。 + +07:15.840 --> 07:21.720 +そして、 クロマーのデータベースから類似商品を集めることができるようになる。 + +07:21.720 --> 07:23.490 +簡単なことだ。 + +07:23.790 --> 07:31.350 +そうしたら、 ラグ・パイプラインのラグ・データ・フローの最終仕上げをして、 それを使ってGPT + +07:31.350 --> 07:36.030 +four zeroを呼び出すことができる。 + +07:36.030 --> 07:38.610 +それは次のビデオで。 diff --git a/week5/community-contributions/subtitles/srts/59670121/ko_KR.srt b/week5/community-contributions/subtitles/srts/59670121/ko_KR.srt new file mode 100755 index 0000000..233a7df --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670121/ko_KR.srt @@ -0,0 +1,370 @@ +WEBVTT + +00:00.380 --> 00:02.360 +이제 시작이에요 + +00:02.360 --> 00:09.020 +제품 가격을 추정하는 래그 파이프라인을 구축할 겁니다 훈련 데이터셋에 있는 유사한 + +00:09.020 --> 00:11.780 +제품의 컨텍스트를 참고해서요 + +00:12.110 --> 00:14.510 +그래서 여기 왔어요 + +00:14.540 --> 00:19.580 +둘째 날이죠 3번 GPT4 미니로 래그 파이프라인을 만들어요 + +00:19.850 --> 00:22.640 +오늘은 많은 코드를 빠르게 다룰 거예요 + +00:22.640 --> 00:24.860 +그럼 조금만 참아주세요 + +00:24.860 --> 00:30.440 +Get it의 비결은 직접 코드를 실행해 어떤 상황인지 파악하는 거예요 + +00:30.440 --> 00:37.310 +그래서 가져오기를 하고 평소처럼 로그인해서 오픈라이에 접속했어요 + +00:37.490 --> 00:41.360 +OpenAI API 키를 설정했다는 사실을 이용하고 있어요 + +00:41.390 --> 00:44.450 +테스트 데이터 세트를 로드할 거예요 + +00:44.480 --> 00:49.430 +훈련 데이터 세트를 로드할 필요는 없어요 크로마 데이터 저장소를 사용할 + +00:49.460 --> 00:50.120 +거니까요 + +00:50.570 --> 00:51.350 +좋아요 + +00:51.350 --> 00:55.250 +함수 만들기 컨텍스트가 있어요 + +00:55.250 --> 01:01.790 +이 함수를 이용해 컨텍스트를 생성할 겁니다 GPT for 미니를 보내 사용할 수 있는 + +01:01.790 --> 01:04.910 +비슷한 제품에 대해 알려주는 거죠 + +01:04.910 --> 01:10.680 +컨텍스트를 제공하라고 할 거예요 여러분이 추정해야 하는 것과 유사한 다른 항목들이 + +01:10.680 --> 01:12.120 +여기 있어요 + +01:12.120 --> 01:18.000 +압축 파일 생성기 기억하시나요? 두 개의 다른 리스트를 반복할 수 있게 해주죠 + +01:18.300 --> 01:24.840 +통과된 비슷한 가격과 비슷한 가격의 반복을 통해 비슷한 가격과 비슷한 것을 수집하는 + +01:24.840 --> 01:25.680 +거죠 + +01:26.070 --> 01:32.670 +그런 다음 메시지를 추가합니다 이 가격과 관련 있을 가능성이 있는 제품이 있다고요 + +01:33.600 --> 01:35.670 +잠시 후에 실행할게요 + +01:35.670 --> 01:37.410 +어떤 모습인지 볼 수 있죠 + +01:37.800 --> 01:44.640 +그런데 과거에 했던 것처럼 이 함수 메시지를 만들고 항목을 하나 가져가고 + +01:44.640 --> 01:51.360 +비슷한 걸 가져가고 가격을 가져가고 표준 독감 목록을 만듭니다 우리가 + +01:51.360 --> 01:53.160 +잘 아는 거죠 + +01:53.520 --> 01:58.170 +시스템 메시지가 있을 겁니다 전에 사용했던 것과 동일한 거죠 + +01:58.200 --> 02:04.410 +물품의 가격을 추정하고 컨텍스트에 추가할 가격으로만 응답하세요 + +02:04.410 --> 02:07.980 +방금 만든 걸 추가할게요 + +02:08.040 --> 02:09.300 +그리고 이렇게 말해요 + +02:09.330 --> 02:10.980 +이제 질문할게요 + +02:10.980 --> 02:18.420 +이건 기본적으로 우리가 전에 사용했던 것과 같습니다 6주 차에 GPT 4를 호출했을 때요 파이프라인을 + +02:18.750 --> 02:20.550 +구축했을 때요 + +02:20.640 --> 02:27.870 +여기서 주목할 건 랭 체인 없이 랙 파이프라인을 하고 있다는 거죠 + +02:28.350 --> 02:30.510 +사실 그렇게 어렵지도 않아요 + +02:30.600 --> 02:35.040 +랭 체인은 간단한 개체 몇 개로 추상화를 제공했어요 + +02:35.040 --> 02:39.060 +코드 한 줄이면 된다고 했던 게 기억나네요 하지만 직접 + +02:39.060 --> 02:40.890 +하기엔 그리 많지 않아요 + +02:40.890 --> 02:45.900 +특히 지금은 보닛 밑에서 무슨 일이 일어나는지 알고 llms를 호출하는 법과 + +02:45.900 --> 02:49.230 +유사한 개체들을 찾아내는 법 등을 알고 있으니까요 + +02:49.350 --> 02:52.830 +비교적 간단해요 + +02:53.010 --> 03:01.260 +너무 힘들지 않았으면 좋겠네요 계속 가보죠 + +03:01.290 --> 03:04.020 +그래서 갈 거예요 + +03:06.060 --> 03:07.830 +채도를 채취할게요 + +03:08.610 --> 03:10.740 +또 이런 말을 했네요 + +03:10.770 --> 03:13.920 +DB 변수를 정의했던 핸드폰을 지웠어요 + +03:13.920 --> 03:17.050 +제품 밑줄로 해야죠 + +03:18.460 --> 03:19.780 +벡터 스토어요 + +03:22.690 --> 03:23.860 +벡터 스토어요 + +03:23.890 --> 03:25.930 +어려운 단어일 거예요 + +03:25.930 --> 03:26.560 +됐어요 + +03:26.560 --> 03:27.700 +다시 실행해요 + +03:27.700 --> 03:28.450 +좋아요 + +03:28.450 --> 03:33.700 +이제 벡터 데이터 저장소에서 제품 컬렉션을 살펴봤죠 + +03:34.510 --> 03:35.950 +알았어요 + +03:35.950 --> 03:38.980 +이제 본론으로 들어가는 거죠 + +03:38.980 --> 03:46.150 +메서드, 함수 설명이 있습니다 항목을 취할 거예요 + +03:46.150 --> 03:49.990 +해당 항목에서 우리가 관심 없는 걸 걸러내죠 + +03:49.990 --> 03:53.950 +기본적으로 프롬프트를 가져다가 이걸 빼낼 거예요 + +03:53.950 --> 03:55.810 +1달러까지 얼마나 들까요? + +03:55.810 --> 04:00.910 +가격 이즈 달러 다음에 나오는 건 다 무시할 거예요 + +04:00.940 --> 04:03.430 +정확히 어떤 건지 보여드릴게요 + +04:03.610 --> 04:08.380 +제 첫 훈련 데이터 포인트를 잠깐 살펴볼게요 + +04:09.430 --> 04:09.910 +이런, 미안해요 + +04:09.940 --> 04:10.390 +미안해요 + +04:10.420 --> 04:10.870 +내가 무슨 짓을 한 거죠? + +04:10.900 --> 04:12.250 +첫 번째 테스트 데이터 포인트를 테스트하세요 + +04:12.250 --> 04:13.780 +훈련 데이터는 더 이상 안 봐요 + +04:13.810 --> 04:19.600 +첫 번째 데이터 포인트는 포드의 수리 키트 어쩌고저쩌고 가격도 적혀 + +04:19.600 --> 04:20.230 +있네요 + +04:20.560 --> 04:27.180 +프롬프트를 보면 여기 잡동사니들이 있는 게 보이실 겁니다 가격과 상단에 + +04:27.180 --> 04:29.040 +질문이 있죠 + +04:29.040 --> 04:36.390 +하지만 이렇게 말할 수 있어요 test 제로 설명 + +04:36.390 --> 04:41.670 +이제 가격 없는 카피만 Get up 하면 돼요 + +04:41.670 --> 04:44.130 +그러니 분명하죠 + +04:44.400 --> 04:44.910 +좋아요 + +04:44.940 --> 04:50.340 +이제 얼굴을 안는 것에서 문장 변압기인 모델을 로드할게요 + +04:50.340 --> 04:53.070 +그게 간단한 벡터 인코딩을 제공하죠 + +04:53.070 --> 04:54.720 +우리 벡터라이저예요 + +04:54.840 --> 04:59.430 +이 메서드는 벡터라이저 함수예요 + +04:59.430 --> 05:00.840 +아이템이 필요해요 + +05:00.840 --> 05:04.890 +해당 아이템에 대한 설명을 호출해 텍스트로 바꾸죠 + +05:04.890 --> 05:09.420 +목록에 넣고 모델 .in코드를 호출해요 + +05:09.840 --> 05:11.850 +간단해요 + +05:12.690 --> 05:17.790 +이제 유사한 함수를 찾는 함수가 나와요 + +05:17.790 --> 05:22.950 +항목을 받으면 비슷한 걸 반환하죠 + +05:22.980 --> 05:29.470 +랭이 전에 했던 것 중 가장 어려운 부분이지만 그렇게 어렵진 않아요 + +05:29.590 --> 05:36.490 +컬렉션에 이렇게 말하는 걸 볼 수 있어요 크로마 DB 컬렉션 이걸 쿼리 하고 싶어요 + +05:36.520 --> 05:38.140 +쿼리 삽입이죠 + +05:38.140 --> 05:42.880 +우리가 통과할 벡터를 기반으로 하는 거예요 + +05:42.880 --> 05:46.120 +nomp 배열에서 부동 소수점 수로 바꿔야 해요 + +05:46.150 --> 05:50.140 +그건 바보 32인조라서 그래요 + +05:50.200 --> 05:53.440 +플로트로 바꿔야 합니다 그리고 목록으로 바꿔야 해요 nomp 배열이 + +05:53.470 --> 05:54.010 +아니라요 + +05:54.010 --> 05:59.380 +그다음에는 결과의 수를 적으면 돼요 그게 다시 돌려받고 싶은 수죠 + +05:59.380 --> 06:04.210 +쿼리 삽입을 벡터로 전달해 5개의 결과를 get 할 수 있죠 + +06:04.210 --> 06:05.740 +그게 다예요 + +06:05.740 --> 06:11.560 +Get in get은 유사한 것들이에요. 실행해 볼게요. + +06:11.680 --> 06:18.040 +이제 할 수 있는 건 테스트 1을 볼 수 있습니다 테스트 1 프롬프트를 보죠 + +06:18.040 --> 06:19.840 +이게 프롬프트예요 + +06:19.930 --> 06:21.700 +얼마면 살 수 있어요? + +06:21.700 --> 06:24.160 +팬 클러치 패키지예요 + +06:24.490 --> 06:26.860 +모터크래프트에서 왔어요 + +06:26.860 --> 06:28.210 +어쨌든 감을 잡게 되죠 get it + +06:28.210 --> 06:32.020 +이제 문서와 가격은 괜찮다고 할 수 있죠 + +06:32.020 --> 06:34.020 +1번 테스트에서 유사점을 찾아요 + +06:34.020 --> 06:39.030 +이 함수를 호출해 비슷한 결과 5개를 찾아볼게요 + +06:39.060 --> 06:40.440 +그렇게 하죠 + +06:41.100 --> 06:42.630 +이제 생각하네요 + +06:42.630 --> 06:44.190 +이제 프린트할게요 + +06:44.550 --> 06:51.390 +여기서 관련 결과가 5개 나올 거예요 관련 제품이 될 수도 있죠 + +06:51.390 --> 06:54.600 +모두 관련 상품일 가능성이 있어요 + +06:54.630 --> 06:59.640 +클러치 달린 물건인데 가격이 다양해요 + +06:59.730 --> 07:04.080 +언뜻 보면 비슷한 제품으로 보이긴 해요 + +07:04.260 --> 07:09.540 +그러니 자신을 설득하고 더 테스트하고 유사한 함수를 + +07:09.540 --> 07:15.840 +찾도록 하세요 Cromer 컬렉션에 쿼리를 호출하는 거죠 + +07:15.840 --> 07:21.720 +크로머 데이터베이스에서 비슷한 제품을 수집할 수 있어요 + +07:21.720 --> 07:23.490 +아주 간단해요 + +07:23.790 --> 07:31.350 +그 작업이 끝나면 랙 파이프라인에 있는 랙 데이터 흐름에 최종 손질을 할 수 있습니다 + +07:31.350 --> 07:36.030 +그걸 이용해 GPT 40을 호출하죠 + +07:36.030 --> 07:38.610 +다음 비디오에서 보여드릴게요 diff --git a/week5/community-contributions/subtitles/srts/59670171/en_US.srt b/week5/community-contributions/subtitles/srts/59670171/en_US.srt new file mode 100755 index 0000000..9547a3d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670171/en_US.srt @@ -0,0 +1,409 @@ +WEBVTT + +00:00.470 --> 00:07.640 +So as the very final step on this part four of day two of week eight, we are now going to build an + +00:07.670 --> 00:10.220 +ensemble model that brings all of this together. + +00:10.220 --> 00:16.040 +And first, I just wanted to show you that if I take a product like my microphone right here, the Quadcast + +00:16.040 --> 00:23.630 +HyperX condenser mic, we've got these three objects now, Specialist Frontier and Random Forest, and + +00:23.630 --> 00:26.330 +we can ask each of them to price this product. + +00:26.330 --> 00:28.670 +And you'll see we get these three numbers. + +00:28.670 --> 00:33.080 +In this case, I think the frontier model is closest to the truth. + +00:33.080 --> 00:37.730 +I seem to remember when we and maybe I must have had slightly different text when we called the specialist + +00:37.730 --> 00:38.780 +model last time. + +00:38.780 --> 00:40.100 +I think we got even better. + +00:40.100 --> 00:43.940 +We got like 129, I think, which is even closer. + +00:44.120 --> 00:45.740 +Uh, but yes. + +00:45.740 --> 00:50.900 +Anyways, you can see that the random forest didn't do so great, but the other two were, uh, were + +00:50.900 --> 00:51.830 +reasonable. + +00:52.070 --> 00:55.280 +So what we do right now is quite simple. + +00:55.280 --> 01:02.450 +I take, uh, the first, I take a selected 250 test data points. + +01:02.510 --> 01:08.460 +Uh, I actually picked the ones from 1000 to 150 to keep it separate from the ones we've been using + +01:08.460 --> 01:10.170 +for actually testing. + +01:10.410 --> 01:18.630 +Um, and basically I take each of those items, I find its description and I add in, what price do + +01:18.630 --> 01:24.930 +we get from the specialist model, from the frontier model, and from the random forest model? + +01:24.930 --> 01:29.700 +And then I also have a list of prices where I put the actual true price of that item. + +01:29.700 --> 01:37.890 +So we will end up with these four lists a list of specialist results from our proprietary LLM frontier + +01:37.920 --> 01:45.090 +Rag based results that come from GPT four, with our extra context and the random forest results, and + +01:45.090 --> 01:47.310 +then the ground truth, the real numbers. + +01:47.310 --> 01:49.410 +And so we build all of that. + +01:50.160 --> 01:52.770 +I'm going to do a trick now which which is fairly common. + +01:52.770 --> 01:54.630 +It's the kind of thing you can really play with. + +01:54.630 --> 01:57.480 +I'm going to add two more, uh, into this. + +01:57.510 --> 02:04.830 +One of them is called mins and is the minimum of those three, and the other is called Max's and it's + +02:04.830 --> 02:06.750 +the maximum of those three. + +02:06.960 --> 02:13.250 +It's just it's another Um, fact that might might have some signal in there. + +02:13.250 --> 02:18.470 +It might be useful to also look at what is the lowest estimate that the three models had, and what + +02:18.470 --> 02:22.520 +is the highest estimate that they had for any one product. + +02:22.520 --> 02:30.140 +So now at this point we now have five results for each each of the 250 products, the specialist one, + +02:30.140 --> 02:35.630 +the frontier one, the random forest one, the minimum of those three, and the maximum of those three. + +02:35.660 --> 02:44.840 +They are sitting in five collections and I make a pandas dataframe out of those five specialist frontier, + +02:44.870 --> 02:47.240 +random forest, min and max. + +02:47.330 --> 02:52.340 +And I take the prices, the ground truth, and I convert that into a series. + +02:52.340 --> 03:00.350 +And I call this x and I call this y, which will be familiar to anyone from a traditional machine learning + +03:00.350 --> 03:01.010 +background. + +03:01.010 --> 03:07.670 +And I can then do exactly what we also did during week six, which is I can say, uh, let's train a + +03:07.670 --> 03:14.240 +linear regression model, a simple linear regression that says what weighted average of these different + +03:14.250 --> 03:20.190 +series gives you the best fit, the best result for this data. + +03:20.520 --> 03:22.800 +And so we do that. + +03:22.950 --> 03:25.140 +Um, and this is what we get. + +03:25.140 --> 03:26.820 +These are the coefficients. + +03:27.570 --> 03:31.980 +So both the min's and the Max's get pretty high weighting. + +03:32.070 --> 03:39.120 +Uh, so generally speaking it's taking its most, uh, been looking at some combination of the minimum + +03:39.120 --> 03:42.690 +and the maximum as what it has latched onto. + +03:42.750 --> 03:49.020 +Um, then it's taken a healthy share of the specialist proprietary LM and a much smaller share of the + +03:49.020 --> 03:49.980 +frontier model. + +03:49.980 --> 03:53.280 +And somewhat bizarrely, it's actually said the frontier. + +03:53.310 --> 04:00.990 +There is there is some signal in Random Forest, but it's going to subtract out that, uh, you'll see + +04:00.990 --> 04:06.180 +it's given a pretty large intercept and subtracted out a portion of the random forest numbers. + +04:06.300 --> 04:10.950 +So that's a curious result, which indicates that maybe the random forest numbers weren't weren't that + +04:10.950 --> 04:11.220 +good. + +04:11.250 --> 04:15.210 +But it it does think it's useful to incorporate that in the overall puzzle. + +04:15.240 --> 04:17.790 +Now you're probably you're going to thinking in your mind. + +04:17.790 --> 04:21.960 +You point out to me that random forest is already baked into these two, so I can't. + +04:21.990 --> 04:27.030 +You can't read too much into the fact that it's got a negative number there, because it's already factored + +04:27.030 --> 04:28.860 +into the min and the max numbers. + +04:28.860 --> 04:33.780 +So you can run this again, taking out min and Max to probably get a better assessment of how it weighs + +04:33.780 --> 04:35.280 +up those three models. + +04:35.880 --> 04:44.880 +Um, so that that's all it takes to build an ensemble model, because now we can use this model to take + +04:44.880 --> 04:51.870 +in these different factors and predict a price, taking the best linear combination of the models that + +04:51.870 --> 04:52.830 +we feed it. + +04:53.160 --> 04:58.890 +So first I save that to ensemble model so that we've got that captured for the future. + +04:58.890 --> 05:00.720 +We don't have to run it every time. + +05:00.720 --> 05:05.130 +And I have made a new agent called Ensemble Agent. + +05:05.160 --> 05:08.130 +Let's go and take a look at Ensemble Agent right now. + +05:08.820 --> 05:09.630 +Here it is. + +05:09.630 --> 05:11.610 +This is the code for ensemble agent. + +05:11.610 --> 05:13.230 +And it's very simple. + +05:13.500 --> 05:16.590 +Uh, it looks like I need to add some comments in here, which I will do. + +05:16.590 --> 05:20.550 +So before you get to see this yourself, uh, it needs comments. + +05:20.550 --> 05:21.450 +Bad meat. + +05:21.720 --> 05:23.700 +Uh, so sorry about that. + +05:24.060 --> 05:31.680 +In the init, we set it up by creating the three agents that it will be using for the different, uh, + +05:31.680 --> 05:33.480 +parts of its pricing. + +05:33.840 --> 05:40.380 +Uh, and we also load in its model weights the weighted combination when it comes to running the ensemble + +05:40.380 --> 05:46.560 +agent to do a price, uh, we calculate the price of the specialist by calling price. + +05:46.560 --> 05:48.270 +We call price for the frontier. + +05:48.270 --> 05:50.700 +We call price for the random forest. + +05:50.730 --> 05:55.830 +We build a data frame for X, including the min and the max. + +05:55.860 --> 06:03.600 +And finally we call Model.predict to predict why that should really be Y hat if we're using data science + +06:03.600 --> 06:07.560 +speak and we return that the prediction. + +06:08.310 --> 06:11.040 +Uh, so it's hopefully crystal clear for you. + +06:11.040 --> 06:16.830 +It's simply a way of packaging up the call to our linear regression model that gives a linear combination + +06:16.830 --> 06:19.860 +of the different models that we've built before. + +06:20.010 --> 06:28.280 +And so with that of course, the next thing that you can imagine I tried out pricing the same, uh, + +06:28.310 --> 06:30.710 +the the the microphone I've got right here. + +06:30.710 --> 06:34.880 +And it came up with a number that's somewhere in the middle, which is exactly what we were expecting. + +06:34.880 --> 06:44.120 +I package it into a function ensemble processor, and then of course, I call the tester dot test with + +06:44.120 --> 06:45.020 +the ensemble. + +06:45.020 --> 06:49.490 +Now, this takes a while to run because it's calling all these different models and modal takes a while. + +06:49.490 --> 06:50.810 +So I've run it in advance. + +06:50.810 --> 06:55.310 +And if you're watching this, remember it will take a few minutes for the first one while modal warms + +06:55.310 --> 06:59.270 +up, and then it's a few seconds for each of these. + +06:59.450 --> 07:05.750 +Out they come and as you will see, there's a few reds in there. + +07:05.750 --> 07:06.650 +I will tell you. + +07:06.680 --> 07:14.030 +Somewhat disappointingly, I was really hoping this would move the needle and beat, uh, the, the, + +07:14.030 --> 07:20.450 +the amazing proprietary model that we've got, somewhat disappointingly, using this, uh, approach + +07:20.450 --> 07:27.710 +of Ensembling multiple models seems to have moved us a hair poorer, a hair worse for this test data + +07:27.740 --> 07:30.590 +set, uh, than than we were at before. + +07:30.590 --> 07:35.320 +But you've got to imagine that that's that's more an artifact to the fact that it's fairly noisy. + +07:35.590 --> 07:36.970 +It's very, very close. + +07:36.970 --> 07:42.280 +It has to be an improvement that we're carrying out this ensemble of different models. + +07:42.490 --> 07:46.240 +Um, but there's clearly some more work that needs to be done here. + +07:46.270 --> 07:51.430 +Uh, the chart looks looks very nice, but there's some intercept problem there that might be that that + +07:51.430 --> 07:53.350 +intercept number was too high. + +07:53.380 --> 07:55.240 +Uh, on on what it did. + +07:55.450 --> 08:01.120 +Uh, and rather than spending a lot of time iterating over this, I think this is the time to say it's + +08:01.120 --> 08:01.930 +over to you. + +08:01.930 --> 08:07.090 +Now, I've spent a fair amount of time on this, but not so much on the ensembling technique and on + +08:07.090 --> 08:08.020 +some of these others. + +08:08.020 --> 08:15.700 +And it's wonderful to experiment with this because it's so easy to add on more terms, more serieses, + +08:15.700 --> 08:19.960 +and pass that into the linear regression as you build the ensemble. + +08:19.990 --> 08:22.120 +And this is a data scientist's dream. + +08:22.120 --> 08:22.960 +You've got data. + +08:22.990 --> 08:28.390 +You've got a clear, measurable, a clear way of determining success. + +08:28.540 --> 08:34.330 +And lots to experiment on, lots of hyperparameters and quite quick gratification. + +08:34.330 --> 08:37.520 +You can make the change and see the response very quickly. + +08:37.670 --> 08:39.920 +So you can do better than me. + +08:39.950 --> 08:41.540 +This is very much a challenge. + +08:41.540 --> 08:43.550 +You're now armed with lots of good tools. + +08:43.550 --> 08:47.000 +You may have already built a proprietary model that beats me. + +08:47.120 --> 08:53.720 +And even if not, you can, I'm sure use this ensembling technique to get ahead. + +08:53.780 --> 09:00.200 +So with that, that concludes the lab work for this part before we return to the slides. + +09:00.230 --> 09:08.660 +The just to say remember that whilst this the key objective for this was not necessarily to get super + +09:08.660 --> 09:15.890 +deep on how you price products, it was to solidify your understanding of things like vector embeddings, + +09:15.890 --> 09:24.080 +rag the running different models and go from a stage of being fairly confident with this kind of material + +09:24.110 --> 09:26.720 +to being advanced and super confident with it. + +09:26.720 --> 09:28.160 +And I hope you've got there now. + +09:28.160 --> 09:33.590 +And if you haven't, go back through these notebooks and go through each cell by cell and inspect the + +09:33.590 --> 09:38.180 +outcomes and convince yourself until you are very, very confident. + +09:38.180 --> 09:41.600 +And I will see you back in the slides in the next video. diff --git a/week5/community-contributions/subtitles/srts/59670171/ja_JP.srt b/week5/community-contributions/subtitles/srts/59670171/ja_JP.srt new file mode 100755 index 0000000..d10644c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670171/ja_JP.srt @@ -0,0 +1,328 @@ +WEBVTT + +00:00.470 --> 00:10.220 +第8週2日目パート4の最終段階として、 これらすべてを統合したアンサンブル・モデルを構築する。 + +00:10.220 --> 00:16.040 +まず、 私のマイク、 Quadcast HyperXコンデンサーマイクのような製品を例にとると、 + +00:16.040 --> 00:26.330 +Specialist FrontierとRandom Forestの3つのオブジェクトがあり、 それぞれにこの製品の価格を尋ねることができます。 + +00:26.330 --> 00:28.670 +そして、 この3つの数字をご覧いただきたい。 + +00:28.670 --> 00:33.080 +この場合、 フロンティアモデルが最も真実に近いと思う。 + +00:33.080 --> 00:38.780 +前回、 スペシャリストモデルに電話したとき、 私たちとたぶん私の文章が少し違っていたように記憶している。 + +00:38.780 --> 00:40.100 +さらに良くなったと思う。 + +00:40.100 --> 00:43.940 +129だったと思う。 + +00:44.120 --> 00:45.740 +あー、 でもそうだね。 + +00:45.740 --> 00:51.830 +とにかく、 ランダムフォレストの結果はあまり良くなかったが、 他の2つは、 まあ、 妥当だったということがわかるだろう。 + +00:52.070 --> 00:55.280 +だから、 私たちが今やっていることはとてもシンプルだ。 + +00:55.280 --> 01:02.450 +まず、 250のテストデータを取る。 + +01:02.510 --> 01:10.170 +実際にテストに使っているものとは別にするために、 1000から150までのものを選んだんだ。 + +01:10.410 --> 01:18.630 +基本的には、 それぞれの項目を取り出して、 その説明を見つけ、 スペシャリストモデル、 フロンティアモデル、 + +01:18.630 --> 01:24.930 +ランダムフォレストモデルから得られる価格を加える。 + +01:24.930 --> 01:29.700 +そして、 その商品の本当の値段を書く価格リストも持っている。 + +01:29.700 --> 01:37.890 +つまり、 GPT4から得られた独自のLLMフロンティアラグに基づく結果の専門家によるリストと、 私たちの特別なコンテキストとランダムフォレストの結果、 + +01:37.920 --> 01:47.310 +そしてグランドトゥルース(実際の数値)を加えた4つのリストが完成することになる。 + +01:47.310 --> 01:49.410 +そして、 そのすべてを構築する。 + +01:50.160 --> 01:52.770 +これから、 よくあるトリックをやってみよう。 + +01:52.770 --> 01:54.630 +本当に遊べるものなんだ。 + +01:54.630 --> 01:57.480 +あと2つ、 この中に加えようと思う。 + +01:57.510 --> 02:06.750 +そのうちの1つはMinsと呼ばれ、 3つのうちの最小値であり、 もう1つはMax'sと呼ばれ、 3つのうちの最大値である。 + +02:06.960 --> 02:13.250 +ただ、 それはもう一つのウム、 事実であり、 そこに何らかのシグナルがあるかもしれない。 + +02:13.250 --> 02:22.520 +また、 3つのモデルで最も低い見積もりと、 1つの製品で最も高い見積もりを見てみるのも有効かもしれない。 + +02:22.520 --> 02:30.140 +つまりこの時点で、 250の各製品について、 スペシャリスト、 フロンティア、 ランダムフォレスト、 この3つのうちの最小値、 + +02:30.140 --> 02:35.630 +そしてこの3つのうちの最大値の5つの結果が得られたことになる。 + +02:35.660 --> 02:47.240 +これらのデータは5つのコレクションに格納されており、 フロンティア、 ランダムフォレスト、 最小値、 最大値の5つのスペシャリストからパンダのデータフレームを作成する。 + +02:47.330 --> 02:52.340 +そして、 その価格、 グランドトゥルースをシリーズに変換する。 + +02:52.340 --> 03:01.010 +これをxと呼び、 これをyと呼ぶ。 従来の機械学習のバックグラウンドを持つ人なら誰でも知っていることだろう。 + +03:01.010 --> 03:07.670 +つまり、 線形回帰モデルをトレーニングして、 これらの異なる系列のどの加重平均が、 + +03:07.670 --> 03:20.190 +このデータに最もフィットし、 最も良い結果をもたらすか、 という単純な線形回帰を行うことができる。 + +03:20.520 --> 03:22.800 +だからそうするんだ。 + +03:22.950 --> 03:25.140 +そして、 こうなった。 + +03:25.140 --> 03:26.820 +これが係数である。 + +03:27.570 --> 03:31.980 +だから、 最低点と最高点の両方がかなり高いウェイトを占めている。 + +03:32.070 --> 03:42.690 +つまり、 一般的に言えば、 最小値と最大値の組み合わせを最もよく見ている。 + +03:42.750 --> 03:49.980 +そうすると、 LM専業メーカーのシェアは健闘しているが、 フロンティア・モデルのシェアはずっと小さい。 + +03:49.980 --> 03:53.280 +そして少々奇妙なことに、 それは実際にフロンティアと言われている。 + +03:53.310 --> 04:06.180 +ランダムフォレストにはシグナルがありますが、 それを差し引きます。 かなり大きな切片を与えて、 ランダムフォレストの数値の一部を差し引いているのがわかるでしょう。 + +04:06.300 --> 04:11.220 +これは不思議な結果で、 ランダムフォレストの数値がそれほど良くなかったことを示している。 + +04:11.250 --> 04:15.210 +しかし、 全体的なパズルの中にそれを取り入れることは有効だと思う。 + +04:15.240 --> 04:17.790 +今、 あなたはおそらく心の中でこう思っていることだろう。 + +04:17.790 --> 04:21.960 +この2つにはランダムフォレストがすでに組み込まれていると指摘されたので、 それはできない。 + +04:21.990 --> 04:28.860 +マイナスの数字があることは、 すでに最小値と最大値に織り込まれているからだ。 + +04:28.860 --> 04:35.280 +そのため、 この3つのモデルをどのように評価するかについて、 おそらくより良い評価を得るために、 最小値と最大値を取り除いてもう一度実行することができる。 + +04:35.880 --> 04:44.880 +つまり、 アンサンブルモデルを構築するのに必要なのは、 このモデルを使ってさまざまな要素を取り込み、 + +04:44.880 --> 04:52.830 +投入したモデルの最良の線形結合をとって価格を予測することだ。 + +04:53.160 --> 04:58.890 +だから、 まずアンサンブル・モデルにそれを保存して、 将来のためにキャプチャしておくんだ。 + +04:58.890 --> 05:00.720 +毎回走る必要はない。 + +05:00.720 --> 05:05.130 +そして、 アンサンブル・エージェントという新しいエージェントを作った。 + +05:05.160 --> 05:08.130 +今すぐアンサンブル・エージェントを見に行こう。 + +05:08.820 --> 05:09.630 +これだ。 + +05:09.630 --> 05:11.610 +これはアンサンブル・エージェントのコードである。 + +05:11.610 --> 05:13.230 +そして、 それはとてもシンプルだ。 + +05:13.500 --> 05:16.590 +ええと、 ここにコメントを加える必要があるようなので、 そうしよう。 + +05:16.590 --> 05:20.550 +だから、 これを自分で見る前に、 コメントが必要なんだ。 + +05:20.550 --> 05:21.450 +悪い肉だ。 + +05:21.720 --> 05:23.700 +それは申し訳ない。 + +05:24.060 --> 05:33.480 +イニシエーションでは、 価格設定に使用する3つのエージェントを作成し、 セットアップした。 + +05:33.840 --> 05:40.380 +また、 アンサンブル・エージェントを動かして価格を算出する際には、 + +05:40.380 --> 05:46.560 +そのモデルの重みをロードして加重結合を行います。 + +05:46.560 --> 05:48.270 +フロンティアの価格をこう呼ぶ。 + +05:48.270 --> 05:50.700 +ランダムフォレストの価格を呼ぶ。 + +05:50.730 --> 05:55.830 +最小値と最大値を含むXのデータフレームを作成する。 + +05:55.860 --> 06:03.600 +そして最後にモデルを呼ぶ。 データ・サイエンスの言葉を使うなら、 なぜYになるのかを予測し、 + +06:03.600 --> 06:07.560 +その予測を返す。 + +06:08.310 --> 06:11.040 +ええと、 だから、 あなたにとって明確なものであってほしい。 + +06:11.040 --> 06:19.860 +これは単純に、 以前に構築したさまざまなモデルの線形結合を与える線形回帰モデルへの呼び出しをパッケージ化する方法である。 + +06:20.010 --> 06:30.710 +それで、 もちろん、 次に試したのは、 想像できると思うけど、 同じように価格を設定することだった。 + +06:30.710 --> 06:34.880 +その結果、 私たちが期待していた通りの中間的な数字が出た。 + +06:34.880 --> 06:45.020 +そしてもちろん、 そのアンサンブルを使ってテスターのドットテストを呼び出す。 + +06:45.020 --> 06:49.490 +モーダルは時間がかかる。 + +06:49.490 --> 06:50.810 +だから、 事前に実行したんだ。 + +06:50.810 --> 06:55.310 +これをご覧になっている方は、 モーダルがウォームアップする間、 + +06:55.310 --> 06:59.270 +最初に数分かかることを覚えておいてください。 + +06:59.450 --> 07:05.750 +見ての通り、 赤が数匹いる。 + +07:05.750 --> 07:06.650 +私が教えてあげよう。 + +07:06.680 --> 07:14.030 +やや残念なことに、 私はこれで針が動き、 私たちが持っている素晴らしい独自モデルを打ち負かすことを本当に期待していたのですが、 + +07:14.030 --> 07:20.450 +やや残念なことに、 この、 複数のモデルをアンサンブルするというアプローチを使うと、 + +07:20.450 --> 07:30.590 +このテストデータセットでは、 以前よりも少し悪くなってしまったようです。 + +07:30.590 --> 07:35.320 +しかし、 それはむしろ、 かなりノイジーであるという事実によるものだと想像しなければならない。 + +07:35.590 --> 07:36.970 +とても近いよ。 + +07:36.970 --> 07:42.280 +このように異なるモデルのアンサンブルを実施していることは、 改善されなければならない。 + +07:42.490 --> 07:46.240 +うーん、 でも、 ここでもっとやるべきことがあるのは明らかだ。 + +07:46.270 --> 07:53.350 +ええと、 チャートの見た目はとてもいいのですが、 インターセプトの数値が高すぎるという問題があるかもしれません。 + +07:53.380 --> 07:55.240 +ええと、 何をしたかについてだけど。 + +07:55.450 --> 08:01.930 +それに、 この件で多くの時間を費やすくらいなら、 今こそ「もう終わったことだ」と言うべきだと思う。 + +08:01.930 --> 08:08.020 +さて、 これにはかなりの時間を費やしたが、 アンサンブルのテクニックや他のいくつかのテクニックにはそれほど時間をかけていない。 + +08:08.020 --> 08:15.700 +そして、 アンサンブルを構築する際に、 より多くの項や系列を追加し、 それを線形回帰に渡すのはとても簡単なので、 + +08:15.700 --> 08:19.960 +これを実験するのは素晴らしいことだ。 + +08:19.990 --> 08:22.120 +そしてこれはデータサイエンティストの夢でもある。 + +08:22.120 --> 08:22.960 +あなたはデータを持っている。 + +08:22.990 --> 08:28.390 +あなたは明確で、 測定可能で、 成功を決定する明確な方法を持っている。 + +08:28.540 --> 08:34.330 +ハイパーパラメーターも豊富で、 すぐに満足できる。 + +08:34.330 --> 08:37.520 +変更を加えれば、 すぐに反応を見ることができる。 + +08:37.670 --> 08:39.920 +だから君は僕よりうまくやれる。 + +08:39.950 --> 08:41.540 +これは非常にチャレンジングなことだ。 + +08:41.540 --> 08:43.550 +あなたは今、 たくさんの良いツールで武装している。 + +08:43.550 --> 08:47.000 +あなたはすでに私を打ち負かす独自のモデルを構築しているかもしれない。 + +08:47.120 --> 08:53.720 +また、 そうでなくても、 このアンサンブルのテクニックを使って前に進むことはできるはずだ。 + +08:53.780 --> 09:00.200 +それでは、 スライドに戻る前に、 このパートのラボワークを終わります。 + +09:00.230 --> 09:08.660 +ただ覚えておいてほしいのは、 今回の主な目的は、 必ずしも商品の値付け方法について超深入りすることではなく、 + +09:08.660 --> 09:15.890 +ベクトル埋め込みやさまざまなモデルの実行について理解を深め、 この種の材料にかなり自信がある段階から、 + +09:15.890 --> 09:26.720 +上級者になって超自信が持てるようになることだったということだ。 + +09:26.720 --> 09:28.160 +そして今、 そこにたどり着いたことを願うよ。 + +09:28.160 --> 09:33.590 +そして、 もしそうでないのなら、 このノートを見返して、 一つひとつの細胞を調べ、 結果を検証し、 + +09:33.590 --> 09:38.180 +とてもとても自信が持てるまで自分自身を納得させるのだ。 + +09:38.180 --> 09:41.600 +では、 また次のビデオでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59670171/ko_KR.srt b/week5/community-contributions/subtitles/srts/59670171/ko_KR.srt new file mode 100755 index 0000000..a92be86 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670171/ko_KR.srt @@ -0,0 +1,388 @@ +WEBVTT + +00:00.470 --> 00:07.640 +8주 차 둘째 날의 4부 작업의 마지막 단계로 이 모든 걸 하나로 합칠 앙상블 + +00:07.670 --> 00:10.220 +모델을 만들 거예요 + +00:10.220 --> 00:16.040 +먼저 보여드릴 것은 여기 제 마이크 같은 것을 가지고 쿼드캐스트 하이퍼X 콘덴서 + +00:16.040 --> 00:23.630 +마이크를 가지고 세 가지 오브젝트가 있습니다 스페셜 프론티어 그리고 랜덤 포레스트입니다 각각에게 + +00:23.630 --> 00:26.330 +제품의 가격을 물어볼 수 있죠 + +00:26.330 --> 00:28.670 +Get it, get it, get it! 숫자 3개가 보이죠 + +00:28.670 --> 00:33.080 +이 경우엔 개척지 모델이 진실에 가장 가까운 것 같아요 + +00:33.080 --> 00:37.730 +제 기억으로는 지난번에 스페셜리스트 모델을 호출했을 때 텍스트가 약간 달랐던 + +00:37.730 --> 00:38.780 +것 같아요 + +00:38.780 --> 00:40.100 +더 좋아진 것 같아요 + +00:40.100 --> 00:43.940 +129개 정도 있어요 더 가깝죠 + +00:44.120 --> 00:45.740 +네, 맞아요 + +00:45.740 --> 00:51.830 +어쨌든, 아무 숲이나 고른 건 별로였지만 다른 두 개는 괜찮았어요 + +00:52.070 --> 00:55.280 +지금 하는 일은 간단해요 + +00:55.280 --> 01:02.450 +우선 250개의 데이터 포인트를 선별해서 봐요 + +01:02.510 --> 01:08.460 +1,000에서 150까지 중에서 골랐어요 실제 테스트에 쓰는 것과 + +01:08.460 --> 01:10.170 +구별하려고요 + +01:10.410 --> 01:18.630 +기본적으로 각각의 물품에 대한 설명을 찾아서 추가합니다. 그리고 나서 전문가용 모델, + +01:18.630 --> 01:24.930 +프런티어 모델 임의의 숲 모델에서 얼마를 받을 수 있을까요? + +01:24.930 --> 01:29.700 +그리고 가격 목록도 있어요 해당 품목의 실제 가격을 입력하는 거죠 Put it's go + +01:29.700 --> 01:37.890 +그래서 이 4개의 리스트가 GPT 4에서 나온 LLM 프런티어 래그 기반 결과와 추가 컨텍스트와 + +01:37.920 --> 01:47.310 +임의 숲 결과 그리고 실제 수치를 포함한 LLM 프런티어 래그의 결과로 마무리되는데요 + +01:47.310 --> 01:49.410 +그래서 그걸 다 만들었죠 + +01:50.160 --> 01:52.770 +이제 트릭을 보여드릴게요 꽤 흔한 거죠 + +01:52.770 --> 01:54.630 +갖고 놀 수 있는 물건이죠 + +01:54.630 --> 01:57.480 +여기에 두 개를 더 넣을 거예요 + +01:57.510 --> 02:04.830 +하나는 최소라고 하는데 이 셋 중 최저이고 다른 건 맥스라고 하는데 + +02:04.830 --> 02:06.750 +이 셋 중 최대죠 + +02:06.960 --> 02:13.250 +그냥 또 다른 사실인데 신호가 잡힐지도 몰라요 + +02:13.250 --> 02:18.470 +세 가지 모델이 가장 낮게 추정하는 값과 한 제품의 가장 높은 + +02:18.470 --> 02:22.520 +값이 얼마인지 살펴보면 유용할 거예요 + +02:22.520 --> 02:30.140 +현재 250개 제품마다 5개의 결과가 나왔어요 전문가용, 개척자용 + +02:30.140 --> 02:35.630 +무작위 숲용, 최소 3개 최대 3개 결과죠 + +02:35.660 --> 02:44.840 +현재 다섯 개의 컬렉션이 있는데 저는 개척지 숲과 민, 맥스를 바탕으로 판다스의 데이터 + +02:44.870 --> 02:47.240 +프레임을 만들어요 + +02:47.330 --> 02:52.340 +저는 가격과 그 기본 진실을 시리즈로 만들었어요 + +02:52.340 --> 03:01.010 +이건 엑스 이건 Y라고 부를게요 전통적인 머신 러닝 배경의 사람들은 누구나 익숙할 이름이죠 + +03:01.010 --> 03:07.670 +6주 차에 했던 것과 똑같은 걸 할 수 있어요 선형 회귀 모델을 + +03:07.670 --> 03:14.240 +훈련해 보죠 간단한 선형 회귀로 시리즈별로 가정한 평균이 + +03:14.250 --> 03:20.190 +데이터에 가장 잘 맞는다는 걸 의미해요 + +03:20.520 --> 03:22.800 +그래서 그렇게 했죠 + +03:22.950 --> 03:25.140 +Get it get it 그리고 이게 나왔어요 + +03:25.140 --> 03:26.820 +이게 계수예요 + +03:27.570 --> 03:31.980 +gets in the Max 둘 다 상당히 무겁죠 + +03:32.070 --> 03:39.120 +일반적으로 말하자면 가장 많이 고려하는 건 최저와 + +03:39.120 --> 03:42.690 +최대치를 조합한 거예요 + +03:42.750 --> 03:49.020 +달 착륙선 전용 스페셜리스트 지분을 상당량 차지하고 프론티어 모델보다 훨씬 적은 지분을 + +03:49.020 --> 03:49.980 +차지하죠 + +03:49.980 --> 03:53.280 +좀 이상하지만 개척지라고도 하죠 + +03:53.310 --> 04:00.990 +랜덤 포레스트에 신호가 있는데 그걸 빼면... 꽤 큰 신호가 가로채고 + +04:00.990 --> 04:06.180 +임의 숲 숫자의 일부를 빼는 게 보이죠 + +04:06.300 --> 04:11.220 +흥미로운 결과예요 임의의 숲 숫자가 별로 좋지 않았다는 걸 의미하죠 + +04:11.250 --> 04:15.210 +하지만 전체적인 퍼즐에 포함하는 게 유용할 것 같아요 + +04:15.240 --> 04:17.790 +이제 머릿속으로 생각하실 거예요 + +04:17.790 --> 04:21.960 +이미 아무 숲이나 넣었다고 하니까 못 하겠어요 + +04:21.990 --> 04:27.030 +음수가 있다는 사실을 너무 많이 해석할 수는 없어요 최소와 최대 수에 이미 + +04:27.030 --> 04:28.860 +포함되어 있으니까요 + +04:28.860 --> 04:33.780 +다시 실행해 보세요 min과 Max를 제거하면 세 모델의 무게를 더 잘 + +04:33.780 --> 04:35.280 +알 수 있을 거예요. + +04:35.880 --> 04:44.880 +앙상블 모델은 이렇게 만들면 돼요 이제 이 모델을 이용해서 여러 요인을 취하고 가격을 + +04:44.880 --> 04:52.830 +예측할 수 있거든요 우리가 제공하는 모델의 최상의 선형 조합을 취해서요 + +04:53.160 --> 04:58.890 +그래서 우선 앙상블 모델에 저장해뒀어요 그걸 미래에 반영하려고요 + +04:58.890 --> 05:00.720 +매번 할 필요는 없어요 + +05:00.720 --> 05:05.130 +앙상블 에이전트라는 새 에이전트도 만들었죠 + +05:05.160 --> 05:08.130 +앙상블 에이전트를 보러 가죠 지금 당장요 + +05:08.820 --> 05:09.630 +여기 있네요 + +05:09.630 --> 05:11.610 +앙상블 에이전트 코드예요 + +05:11.610 --> 05:13.230 +아주 간단해요 + +05:13.500 --> 05:16.590 +여기에 주석을 달아야 할 것 같은데 달게요 + +05:16.590 --> 05:20.550 +Get it get it.com/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/D렉터/DE 직접 보기 전에 의견을 말씀해 주세요 + +05:20.550 --> 05:21.450 +상한 고기요 + +05:21.720 --> 05:23.700 +정말 죄송해요 + +05:24.060 --> 05:31.680 +init에서 설정하는 건 3가지 에이전트를 생성하는 거예요 가격 책정에서 각각 + +05:31.680 --> 05:33.480 +사용할 요소들이죠 + +05:33.840 --> 05:40.380 +어, 그리고 앙상블 에이전트가 가격을 불러올 때 그 제품의 모델 비율을 + +05:40.380 --> 05:46.560 +추가해서 가격을 불러오는 것으로 전문가의 가격을 계산해요 + +05:46.560 --> 05:48.270 +개척지를 위한 가격이죠 + +05:48.270 --> 05:50.700 +임의의 숲을 위한 가격이에요 + +05:50.730 --> 05:55.830 +X를 위한 데이터 프레임을 만들죠 최소와 최대를 포함해서요 + +05:55.860 --> 06:03.600 +마지막으로 Model을 호출해요 왜 그게 돼야 하는지 예측하기 위한 예측을 데이터 사이언스 + +06:03.600 --> 06:07.560 +스피크를 사용한다면요 그걸 예측으로 반환하는 거죠 + +06:08.310 --> 06:11.040 +명확하게 이해하셨길 바라요 + +06:11.040 --> 06:16.830 +선형 회귀 모델에 호출을 포장하는 방법일 뿐이에요 전에 만든 다양한 + +06:16.830 --> 06:19.860 +모델의 선형 조합을 제공하는 거죠 + +06:20.010 --> 06:28.280 +이제 다음으로 할 일은 같은 가격으로 가격을 매겨보는 건데요 여기 마이크를 + +06:28.310 --> 06:30.710 +가져왔어요 + +06:30.710 --> 06:34.880 +그런데 중간쯤 되는 액수가 나왔어요 우리가 예상했던 금액이었죠 + +06:34.880 --> 06:45.020 +함수 앙상블 프로세서에 패키지로 넣고 테스터 닷테스트를 앙상블이라고 불러요 + +06:45.020 --> 06:49.490 +실행에 시간이 좀 걸리죠 다양한 모델을 호출하고 모듈도 시간이 걸리니까요 + +06:49.490 --> 06:50.810 +미리 실행해뒀어요 + +06:50.810 --> 06:55.310 +이걸 보고 계신다면 첫 번째 모듈은 워밍업하는 데 몇 분 + +06:55.310 --> 06:59.270 +걸리고 각각 몇 초씩 걸린다는 걸 기억하세요 + +06:59.450 --> 07:05.750 +여기 보시면 붉은 연어도 몇 마리 있어요 + +07:05.750 --> 07:06.650 +말해 줄게요 + +07:06.680 --> 07:14.030 +실망스럽습니다 이걸로 뭔가 바뀌길 바랐거든요 기존의 특허 + +07:14.030 --> 07:20.450 +모델을 깨부수길 바랐는데 여러 모델을 조합하는 방식을 + +07:20.450 --> 07:27.710 +쓰다 보니 테스트 데이터 세트가 전보다 조금 더 약화되고 악화된 + +07:27.740 --> 07:30.590 +것 같아요 + +07:30.590 --> 07:35.320 +하지만 생각해 보세요 사실 꽤 시끄러운 소리에 더 가까운 인공물이에요 + +07:35.590 --> 07:36.970 +아주 근접했어요 + +07:36.970 --> 07:42.280 +다양한 모델을 조합해서 작업하는 건 더 나은 결과가 나와야 해요 + +07:42.490 --> 07:46.240 +하지만 아직 손볼 곳이 많아요 + +07:46.270 --> 07:51.430 +해도는 좋아 보이지만 가로막기 문제가 있어요 가로막기 숫자가 + +07:51.430 --> 07:53.350 +너무 높았던 것 같아요 + +07:53.380 --> 07:55.240 +뭘 했는지에 대해서요 + +07:55.450 --> 08:01.120 +이 문제를 반복하는 데 많은 시간을 쓰기보다는 이제 당신에게 넘기겠다고 말할 때인 + +08:01.120 --> 08:01.930 +것 같아요 + +08:01.930 --> 08:07.090 +이 동작에 시간을 많이 들였지만 조합 기술이나 다른 동작에는 별로 투자하지 + +08:07.090 --> 08:08.020 +않았어요 + +08:08.020 --> 08:15.700 +이런 실험을 하는 건 멋진 일이에요 더 많은 시리즈들을 추가할 수 있고 앙상블을 만들면서 + +08:15.700 --> 08:19.960 +선형 회귀 단계로 넘어가기 쉽거든요 + +08:19.990 --> 08:22.120 +데이터 과학자의 꿈이죠 + +08:22.120 --> 08:22.960 +데이터가 있잖아요 + +08:22.990 --> 08:28.390 +성공을 결정하는 명확하고, 측정 가능한 명확한 방법이 있어요 + +08:28.540 --> 08:34.330 +하이퍼파라미터도 많고 실험할 것도 많아요 + +08:34.330 --> 08:37.520 +변화를 주면 반응이 아주 빨라요 + +08:37.670 --> 08:39.920 +나보다 잘할 수 있어요 + +08:39.950 --> 08:41.540 +정말 어려운 과제예요 + +08:41.540 --> 08:43.550 +이제 좋은 도구들을 많이 갖게 되셨어요 + +08:43.550 --> 08:47.000 +날 이길 만한 독점 모델을 이미 구축했을지도 모르죠 + +08:47.120 --> 08:53.720 +안 되더라도 이 조합 기술로 get up 할 수 있겠죠 + +08:53.780 --> 09:00.200 +이것으로 슬라이드로 돌아가기 전에 이 부분의 랩 작업을 마치죠 + +09:00.230 --> 09:08.660 +다시 한 번 말씀드리지만 이 강의의 핵심 목표는 제품 가격에 대해 깊이 파고드는 것이 아닙니다 + +09:08.660 --> 09:15.890 +벡터 엠베딩 같은 것에 대한 이해를 확고히 하는 것입니다 다양한 모델을 래그화하여 + +09:15.890 --> 09:26.720 +이런 물질에 대해 꽤나 자신 있는 단계에서 더 발전하고 매우 자신 있는 단계로 가는 것이죠 + +09:26.720 --> 09:28.160 +여러분도 그렇게 됐길 바라요 + +09:28.160 --> 09:33.590 +경험이 없다면 이 공책을 다시 훑어보며 세포 하나하나를 살펴보고 아주 + +09:33.590 --> 09:38.180 +확신할 수 있을 때까지 결과를 점검하고 자신을 설득하세요 + +09:38.180 --> 09:41.600 +다음 비디오에서 슬라이드에서 다시 만나요 diff --git a/week5/community-contributions/subtitles/srts/59670259/en_US.srt b/week5/community-contributions/subtitles/srts/59670259/en_US.srt new file mode 100755 index 0000000..b0dc6e3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670259/en_US.srt @@ -0,0 +1,79 @@ +WEBVTT + +00:00.830 --> 00:01.970 +It's remarkable. + +00:01.970 --> 00:04.970 +But you are now at the 95% point. + +00:04.970 --> 00:07.820 +There's 5% remaining of this course. + +00:07.880 --> 00:13.850 +Uh, maybe it's not remarkable because hopefully you're feeling like you've built up such strong skills + +00:13.850 --> 00:15.800 +and you're getting really experienced. + +00:15.830 --> 00:21.050 +You probably saw some of the problems I was having a moment ago, before I did, and knew exactly how + +00:21.050 --> 00:22.010 +to sort them out. + +00:22.010 --> 00:25.880 +So what you can now do is this long list of things. + +00:25.880 --> 00:29.150 +I could no longer fit it into three bullets, and I struggled to fit it into four. + +00:29.180 --> 00:30.590 +There's far too much stuff. + +00:30.590 --> 00:35.660 +I had to put structured outputs in here in the first bullet, just to try and fit it in, but there's + +00:35.660 --> 00:41.090 +so much that you can do across closed source and open source models, calling them in all sorts of ways + +00:41.090 --> 00:43.700 +now, including productionizing them. + +00:43.850 --> 00:50.990 +Uh, and now we change topic to talk about agent workflows Agentic frameworks. + +00:51.110 --> 00:55.100 +This is something that we touched on in previous weeks. + +00:55.100 --> 01:00.380 +Uh, we, we had, we used uh, function calling, which we said at the time is a kind of light version + +01:00.380 --> 01:07.370 +of, of agent workflows where you do have, um, a task that's being divided into smaller tasks with + +01:07.370 --> 01:10.430 +specific models carrying out tasks. + +01:10.430 --> 01:15.350 +And as you take that up a notch and start to get to an environment like we've been talking about this + +01:15.350 --> 01:20.660 +week, where you have multiple models that are all working independently, potentially being organized + +01:20.660 --> 01:27.170 +by a planning task, a planning agent, and then put together in some sort of a framework which has + +01:27.170 --> 01:34.340 +memory and which can, uh, live beyond outside the context of a human necessarily interacting with + +01:34.340 --> 01:34.670 +it. + +01:34.700 --> 01:41.960 +You're getting to a full on agentic workflow, and that is what we will be talking about next time. + +01:41.960 --> 01:43.310 +And I'm really excited about it. + +01:43.310 --> 01:44.540 +And I will see you then. diff --git a/week5/community-contributions/subtitles/srts/59670259/ja_JP.srt b/week5/community-contributions/subtitles/srts/59670259/ja_JP.srt new file mode 100755 index 0000000..7accfa9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670259/ja_JP.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:00.830 --> 00:01.970 +驚くべきことだ。 + +00:01.970 --> 00:04.970 +しかし、 あなたは今、 95%の地点にいる。 + +00:04.970 --> 00:07.820 +このコースは残り5%。 + +00:07.880 --> 00:15.800 +でも、 そのような強いスキルを身につけ、 経験を積んでいることを実感しているのであれば、 それは注目に値しないことかもしれない。 + +00:15.830 --> 00:22.010 +おそらくあなたは、 私がさっきまで抱えていた問題のいくつかを、 私よりも先に見抜いていて、 それを解決する方法を正確に知っていたのだろう。 + +00:22.010 --> 00:25.880 +だから今できることは、 この長いリストだ。 + +00:25.880 --> 00:29.150 +もはや3つの弾丸に収めることはできず、 4つの弾丸に収めるのに苦労した。 + +00:29.180 --> 00:30.590 +物が多すぎる。 + +00:30.590 --> 00:35.660 +構造化されたアウトプットは、 最初の箇条書きでここに入れなければならなかったが、 + +00:35.660 --> 00:43.700 +クローズド・ソースやオープン・ソースのモデルでできることはたくさんある。 + +00:43.850 --> 00:50.990 +さて、 話題を変えてエージェントのワークフローについて話そう。 エージェントのフレームワークについてだ。 + +00:51.110 --> 00:55.100 +これは前週にも触れたことだ。 + +00:55.100 --> 01:00.380 +当時は、 エージェント・ワークフローの軽いバージョンだと言っていたんだが、 + +01:00.380 --> 01:10.430 +タスクを小さなタスクに分割して、 特定のモデルがタスクを実行するんだ。 + +01:10.430 --> 01:15.350 +そして、 それをさらに発展させて、 今週話してきたような環境になると、 + +01:15.350 --> 01:34.670 +複数のモデルがそれぞれ独立して働き、 計画タスクや計画エージェントによって組織化される可能性があり、 それをある種のフレームワークにまとめる。 + +01:34.700 --> 01:41.960 +次回はその話をしよう。 + +01:41.960 --> 01:43.310 +そして、 本当に興奮している。 + +01:43.310 --> 01:44.540 +その時にまた会おう。 diff --git a/week5/community-contributions/subtitles/srts/59670259/ko_KR.srt b/week5/community-contributions/subtitles/srts/59670259/ko_KR.srt new file mode 100755 index 0000000..107a56e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670259/ko_KR.srt @@ -0,0 +1,76 @@ +WEBVTT + +00:00.830 --> 00:01.970 +놀라워요 + +00:01.970 --> 00:04.970 +하지만 95%가 됐어요 + +00:04.970 --> 00:07.820 +코스의 5%가 남았어요 + +00:07.880 --> 00:13.850 +그렇게 대단한 건 아닐 거예요 여러분이 강한 기술을 익혔다고 느끼고 경험이 쌓였다고 + +00:13.850 --> 00:15.800 +느끼길 바라니까요 + +00:15.830 --> 00:21.050 +아마 조금 전에 제가 겪고 있는 문제를 보셨을 거예요 어떻게 해결해야 할지 정확히 + +00:21.050 --> 00:22.010 +알고 계셨겠죠 + +00:22.010 --> 00:25.880 +이제 할 수 있는 건 이 긴 목록이에요 + +00:25.880 --> 00:29.150 +총알 세 발은 더 이상 못 넣겠고 네 발은 겨우 넣었어요 + +00:29.180 --> 00:30.590 +너무 많아요 + +00:30.590 --> 00:35.660 +첫 번째 강의에서는 구조화된 출력물들을 넣어야 했습니다. 하지만 폐쇄 소스와 + +00:35.660 --> 00:41.090 +오픈 소스 모델을 통해 할 수 있는 일이 너무 많습니다. 이제는 프로덕션화를 포함해서 + +00:41.090 --> 00:43.700 +모든 방법으로 호출할 수 있죠. + +00:43.850 --> 00:50.990 +이제 주제를 바꿔서 에이전트 워크플로 에이전틱 프레임워크에 대해 얘기해보죠 + +00:51.110 --> 00:55.100 +이건 지난 몇 주 동안 다뤘던 거예요 + +00:55.100 --> 01:00.380 +함수 호출을 사용했는데 에이전트 워크플로의 + +01:00.380 --> 01:07.370 +가벼운 버전이라고 말씀드렸죠 작업을 작은 작업으로 나누어 특정 + +01:07.370 --> 01:10.430 +모델이 수행하는 거죠 + +01:10.430 --> 01:15.350 +그 수준을 한 단계 높이고 이번 주에 얘기했던 환경으로 + +01:15.350 --> 01:20.660 +넘어가면 여러 모델이 독립적으로 작동하고 플래닝 작업이나 + +01:20.660 --> 01:27.170 +플래닝 에이전트에 의해 조직될 수 있죠 그리고 메모리가 있는 프레임워크로 + +01:27.170 --> 01:34.670 +구성되어 인간의 맥락 밖에서 상호 작용을 할 수 있어요 + +01:34.700 --> 01:41.960 +에이전틱 워크플로우에 완전히 접근하고 있어요 다음 시간에 얘기할 게 바로 그거죠 + +01:41.960 --> 01:43.310 +정말 기대돼요 + +01:43.310 --> 01:44.540 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59670369/en_US.srt b/week5/community-contributions/subtitles/srts/59670369/en_US.srt new file mode 100755 index 0000000..f017cb7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670369/en_US.srt @@ -0,0 +1,166 @@ +WEBVTT + +00:00.560 --> 00:06.710 +It is terrific that you're hanging on in there and making such great progress with this course. + +00:06.710 --> 00:17.180 +As we enter the final few days of building really important expertise on the topic for today, for week + +00:17.210 --> 00:17.630 +eight. + +00:17.660 --> 00:25.910 +Day three is about continuing to strengthen, to upskill, building one more skill, but also resummarize + +00:25.970 --> 00:28.700 +revise some of the stuff that we've done in the past. + +00:28.700 --> 00:30.620 +So today we're going to look at something new. + +00:30.650 --> 00:35.630 +Structured outputs, which is something actually that is quite a recent innovation, a way that you + +00:35.630 --> 00:41.000 +can tell frontier models that you want them to respond according to a particular specification. + +00:41.000 --> 00:42.500 +So we'll be doing that today. + +00:42.500 --> 00:48.950 +And we're also just going to be doing more work experience with frontier models, carrying out something + +00:48.950 --> 00:54.140 +which is going to be a throwback, a callback to the first week, because we're going to be doing some + +00:54.140 --> 01:01.040 +internet scrappage, as we had done in the past, and using frontier models to help synthesize data. + +01:01.070 --> 01:04.220 +You remember we wrote a summarizer A way back. + +01:04.250 --> 01:07.610 +The Reader's Digest of the internet was our week. + +01:07.610 --> 01:10.310 +One day, one instant gratification. + +01:10.460 --> 01:13.070 +Well, we're taking that a couple of notches further. + +01:13.400 --> 01:15.980 +Um, so that's the plan for today. + +01:15.980 --> 01:21.740 +And a lot of this, again, is going to be about revising and building and experimenting. + +01:21.980 --> 01:26.270 +So let me just say a few words about structured outputs. + +01:26.300 --> 01:27.590 +So structured outputs. + +01:27.590 --> 01:30.710 +You remember in the past we've used JSON generation. + +01:30.710 --> 01:35.960 +We've we've said that we want the model to respond with an output format in JSON. + +01:35.960 --> 01:40.070 +And then in the prompt we describe exactly what that JSON should look like. + +01:40.070 --> 01:43.070 +And it's not 100% reliable. + +01:43.100 --> 01:44.600 +It's actually very good. + +01:44.600 --> 01:50.270 +It's it's uh, frequently, uh, if not almost all the time will respond with JSON. + +01:50.540 --> 01:55.580 +But where it starts to, to go wonky is if you've got really complicated objects that you need it to + +01:55.610 --> 02:03.680 +respond in, um, it will after a while, potentially hallucinate in some parts or give back wrong formats. + +02:03.680 --> 02:10.620 +So the idea of structured outputs was to be more directive about specifying exactly how the model should + +02:10.620 --> 02:11.490 +respond. + +02:11.490 --> 02:19.500 +And the way you do that is you define the response with a class, with a Python class, and it's actually + +02:19.500 --> 02:24.510 +going to be a class which is going to be a subclass of something called base model from Pydantic, which + +02:24.510 --> 02:28.800 +you may have already had experience with if you're from an engineering background, but don't worry. + +02:28.800 --> 02:30.240 +If not, I will show you. + +02:30.510 --> 02:36.990 +Uh, you make a subclass of base model, and you use that to describe exactly what you're looking for. + +02:37.110 --> 02:46.170 +Um, and then you specify that class when you call OpenAI, and it will create an instance of that class + +02:46.170 --> 02:47.970 +in what it sends back to you. + +02:48.480 --> 02:54.870 +Um, and so that's the, the idea, it's useful, as I say, for generating data in precisely a structure + +02:54.870 --> 02:55.980 +that you need. + +02:56.010 --> 03:03.210 +It needs to be compared with an alternative approach, which is use of tools function calling that we + +03:03.210 --> 03:05.580 +looked at again some time ago. + +03:05.760 --> 03:11.980 +Um, and they, they, they both they're quite similar techniques for ensuring that a particular, um, + +03:11.980 --> 03:16.840 +uh, type of, of structure comes back in the response from the model. + +03:16.840 --> 03:19.060 +And there's pros and cons of both of them. + +03:19.390 --> 03:26.020 +Generally speaking, the recommendation is that if you are going to be hooking up your model directly + +03:26.020 --> 03:31.930 +to application code so that it's going to be making calls to functions which need to have a particular + +03:31.930 --> 03:35.980 +method signature, then it's better to use function calling and tools. + +03:35.980 --> 03:41.800 +That is the the better model, because then it will absolutely respond according to that JSON structure + +03:41.800 --> 03:46.390 +that you've defined with the right parameters for calling your function. + +03:46.420 --> 03:53.140 +If what you're looking at is to try and generate data in a particular format for downstream consumption + +03:53.140 --> 03:56.860 +or something, uh, then structured outputs is the way. + +03:56.980 --> 04:01.900 +Uh, so those are some of the pros and cons, and it's something that you get a feel for after you've + +04:01.930 --> 04:07.060 +tried it for a while and get to appreciate when one performs better than the other. + +04:07.150 --> 04:10.480 +But with that introduction, let's head over to JupyterLab. + +04:10.480 --> 04:12.700 +We're going to try it out for ourselves. diff --git a/week5/community-contributions/subtitles/srts/59670369/ja_JP.srt b/week5/community-contributions/subtitles/srts/59670369/ja_JP.srt new file mode 100755 index 0000000..c448715 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670369/ja_JP.srt @@ -0,0 +1,139 @@ +WEBVTT + +00:00.560 --> 00:06.710 +あなたがこのコースで頑張り、 大きく前進しているのは素晴らしいことだ。 + +00:06.710 --> 00:17.630 +8週目の今日のトピックについて、 本当に重要な専門知識を構築する最後の数日間に入った。 + +00:17.660 --> 00:28.700 +日目は、 引き続き強化し、 スキルアップを図り、 もうひとつスキルを身につける。 + +00:28.700 --> 00:30.620 +だから今日は新しいものを見てみよう。 + +00:30.650 --> 00:41.000 +構造化出力というのは、 実はかなり最近の革新的なもので、 フロンティアモデルに特定の仕様に従って反応させたいことを伝えることができる方法だ。 + +00:41.000 --> 00:42.500 +だから、 今日はそうするつもりだ。 + +00:42.500 --> 00:48.950 +また、 フロンティア・モデルを使ったワークエクスペリエンスをさらに行い、 + +00:48.950 --> 00:54.140 +過去に行ったようなインターネット上でのスクラップを行い、 + +00:54.140 --> 01:01.040 +フロンティア・モデルを使ってデータを統合する。 + +01:01.070 --> 01:04.220 +私たちが昔、 サマライザーAを書いたのを覚えているだろう。 + +01:04.250 --> 01:07.610 +インターネットのリーダーズ・ダイジェストが私たちの週だった。 + +01:07.610 --> 01:10.310 +ある日、 即座に満足した。 + +01:10.460 --> 01:13.070 +まあ、 我々はそれをさらに2段階ほど進めている。 + +01:13.400 --> 01:15.980 +ええと、 それが今日のプランなんだ。 + +01:15.980 --> 01:21.740 +そしてその多くは、 やはり修正と構築、 そして実験になるだろう。 + +01:21.980 --> 01:26.270 +そこで、 構造化されたアウトプットについて少し言わせてほしい。 + +01:26.300 --> 01:27.590 +つまり、 構造化されたアウトプットだ。 + +01:27.590 --> 01:30.710 +過去にJSON生成を使っていたのを覚えているだろうか。 + +01:30.710 --> 01:35.960 +モデルにJSONの出力フォーマットで応答させたいと言いました。 + +01:35.960 --> 01:40.070 +そして、 プロンプトの中で、 そのJSONがどのように見えるべきかを正確に記述する。 + +01:40.070 --> 01:43.070 +それに100%信頼できるものでもない。 + +01:43.100 --> 01:44.600 +実際、 とてもいいんだ。 + +01:44.600 --> 01:50.270 +JSONで応答することが多いんだ。 + +01:50.540 --> 01:55.580 +しかし、 それがおかしくなり始めるのは、 + +01:55.610 --> 02:03.680 +本当に複雑なオブジェクトに反応させる必要がある場合だ。 + +02:03.680 --> 02:11.490 +つまり、 構造化出力のアイデアは、 モデルがどのように反応すべきかを正確に指定することをより指示的にすることだった。 + +02:11.490 --> 02:19.500 +Pythonのクラスを使ってレスポンスを定義するのですが、 実はこのクラスはPydanticのbase + +02:19.500 --> 02:28.800 +modelというもののサブクラスになります。 + +02:28.800 --> 02:30.240 +そうでなければ、 お見せしましょう。 + +02:30.510 --> 02:36.990 +ベースモデルのサブクラスを作り、 そのサブクラスを使って、 探しているものを正確に記述するんだ。 + +02:37.110 --> 02:47.970 +そして、 OpenAIを呼び出すときにそのクラスを指定すると、 OpenAIはそのクラスのインスタンスを作成してあなたに送り返します。 + +02:48.480 --> 02:55.980 +つまり、 必要な構造のデータを正確に生成するのに便利なんだ。 + +02:56.010 --> 03:05.580 +これは、 別のアプローチと比較する必要がある。 + +03:05.760 --> 03:11.980 +この2つは、 モデルからの反応に特定の、 ええと、 ええと、 構造のタイプが戻ってくるようにするための、 + +03:11.980 --> 03:16.840 +よく似たテクニックなんだ。 + +03:16.840 --> 03:19.060 +そして、 そのどちらにも長所と短所がある。 + +03:19.390 --> 03:26.020 +一般的に言えば、 モデルをアプリケーションコードに直接接続し、 特定のメソッドシグネチャを持つ必要がある関数を呼び出すのであれば、 + +03:26.020 --> 03:35.980 +関数呼び出しとツールを使うことをお勧めします。 + +03:35.980 --> 03:41.800 +なぜなら、 関数を呼び出すための適切なパラメータを定義したJSON構造に従って、 + +03:41.800 --> 03:46.390 +絶対に応答してくれるからだ。 + +03:46.420 --> 03:53.140 +もし、 下流で使用するために特定のフォーマットでデータを生成しようとするのであれば、 + +03:53.140 --> 03:56.860 +構造化されたアウトプットが適している。 + +03:56.980 --> 04:01.900 +長所と短所を挙げればきりがないし、 しばらく試してみて、 + +04:01.930 --> 04:07.060 +どちらが優れているかがわかるようになるものだ。 + +04:07.150 --> 04:10.480 +それではJupyterLabに向かいましょう。 + +04:10.480 --> 04:12.700 +自分たちで試してみるつもりだ。 diff --git a/week5/community-contributions/subtitles/srts/59670369/ko_KR.srt b/week5/community-contributions/subtitles/srts/59670369/ko_KR.srt new file mode 100755 index 0000000..42b3e25 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670369/ko_KR.srt @@ -0,0 +1,160 @@ +WEBVTT + +00:00.560 --> 00:06.710 +포기하지 않고 코스를 잘 통과해서 정말 대단해요 + +00:06.710 --> 00:17.630 +이제 마지막 며칠이 남았네요 오늘 8주 차를 위한 중요한 전문 지식을 쌓아야 해요 + +00:17.660 --> 00:25.910 +셋째 날은 계속해서 실력을 강화하고 한 단계 발전시키면서 기존 기술을 + +00:25.970 --> 00:28.700 +수정하는 날이에요 + +00:28.700 --> 00:30.620 +오늘은 새로운 걸 살펴볼 거예요 + +00:30.650 --> 00:35.630 +구조적인 출생 작품이란 최근에 나온 혁신으로 개척 모델에 + +00:35.630 --> 00:41.000 +특정 기준에 따라 반응하라고 말할 수 있는 방법이죠 + +00:41.000 --> 00:42.500 +오늘 할 거예요 + +00:42.500 --> 00:48.950 +또한 프론티어 모델과 함께 작업 경험을 더 쌓을 거예요 첫 주의 콜백 같은 + +00:48.950 --> 00:54.140 +걸 작업하는 거죠 과거에 작업했던 것처럼 인터넷 스크랩페이지를 + +00:54.140 --> 01:01.040 +만들 건데 프론티어 모델을 이용해 데이터를 합성할 거예요 + +01:01.070 --> 01:04.220 +예전에 요약본 쓴 거 기억하시죠? + +01:04.250 --> 01:07.610 +인터넷 독자 다이제스트는 우리의 한 주였어요 + +01:07.610 --> 01:10.310 +어느 날 갑자기 만족감이 밀려왔죠 + +01:10.460 --> 01:13.070 +우린 그걸 몇 단계 더 발전시킬 거예요 + +01:13.400 --> 01:15.980 +그게 오늘 계획이에요 + +01:15.980 --> 01:21.740 +이것 역시 많은 부분을 수정하고 만들고 실험하는 과정이죠 + +01:21.980 --> 01:26.270 +구조화된 출력물에 대해 몇 마디만 할게요 + +01:26.300 --> 01:27.590 +구조화된 출력물이죠 + +01:27.590 --> 01:30.710 +과거에 JSON 세대 사용했던 거 기억하시죠? + +01:30.710 --> 01:35.960 +JSON 내의 출력 형식으로 모델이 반응하길 원한다고 했죠 + +01:35.960 --> 01:40.070 +프롬프트에서 JSON이 정확히 어떻게 보여야 하는지 설명하죠 + +01:40.070 --> 01:43.070 +100% 믿을 수도 없고요 + +01:43.100 --> 01:44.600 +아주 맛있어요 + +01:44.600 --> 01:50.270 +그건 자주, 거의 항상 JSON으로 응답해요 + +01:50.540 --> 01:55.580 +하지만 삐뚤어지기 시작하는 건 아주 복잡한 개체가 있어서 응답해야 + +01:55.610 --> 02:03.680 +할 경우예요 시간이 지나면 일부 부분이 환각으로 나타나거나 잘못된 포맷을 보낼 수 있죠 + +02:03.680 --> 02:10.620 +구조적인 출력이라는 건 모델이 어떻게 반응해야 하는지 정확히 지시하는 + +02:10.620 --> 02:11.490 +거예요 + +02:11.490 --> 02:19.500 +그렇게 하는 방법은 클래스로 응답을 정의하는 겁니다 Python 클래스로요 사실 그건 클래스가 될 겁니다 파이디언틱의 + +02:19.500 --> 02:24.510 +베이스 모델이란 것의 서브클래스가 될 거예요 엔지니어링을 전공하신다면 + +02:24.510 --> 02:28.800 +이미 경험해 보셨을 수도 있지만 걱정 마세요 + +02:28.800 --> 02:30.240 +아니면 보여 줄게요 + +02:30.510 --> 02:36.990 +기본 모델의 서브클래스를 만들어서 여러분이 찾는 걸 정확히 설명하는 데 사용하죠 + +02:37.110 --> 02:46.170 +그런 다음 OpenAI를 호출할 때 해당 클래스를 지정합니다 그럼 해당 클래스의 인스턴스를 생성해 여러분에게 + +02:46.170 --> 02:47.970 +다시 보내죠 + +02:48.480 --> 02:55.980 +필요한 구조로 데이터를 생성하는 데 유용한 개념이죠 + +02:56.010 --> 03:03.210 +다른 접근법과 비교해봐야 합니다 도구 함수 호출 사용이죠 좀 전에 + +03:03.210 --> 03:05.580 +다시 살펴봤어요 + +03:05.760 --> 03:11.980 +둘 다 꽤 비슷한 기술이에요 모델로부터 특정한 + +03:11.980 --> 03:16.840 +구조의 반응을 확실히 가져오죠 + +03:16.840 --> 03:19.060 +둘 다 장단점이 있어요 + +03:19.390 --> 03:26.020 +일반적으로 권장하는 건 모델을 응용 프로그램 코드에 직접 연결할 + +03:26.020 --> 03:31.930 +경우 특정 메서드 서명이 필요한 함수를 호출할 때 함수 호출과 + +03:31.930 --> 03:35.980 +도구를 사용하는 게 낫다는 거죠 + +03:35.980 --> 03:41.800 +그게 더 나은 모델이죠 그럼 완전히 반응할 테니까요 여러분이 정의한 JSON 구조에 + +03:41.800 --> 03:46.390 +따라서요 여러분의 함수를 호출하기 위한 올바른 매개 변수로요 + +03:46.420 --> 03:53.140 +만약 여러분이 보고자 하는 것이 다운스트림 소비를 위한 특정 포맷의 데이터를 생성하는 + +03:53.140 --> 03:56.860 +것이라면 구조화된 출력화가 방법이에요 + +03:56.980 --> 04:01.900 +장단점이 이렇게 나뉘어 있는데요. 한동안 시도하고 나면 어느 + +04:01.930 --> 04:07.060 +쪽이 더 잘했는지 알게 되고, 어느 쪽이 더 잘했는지도 알게 되죠. + +04:07.150 --> 04:10.480 +소개를 마쳤으니 유피터랩으로 가보죠. HDMI + +04:10.480 --> 04:12.700 +직접 시험해 보려고요 diff --git a/week5/community-contributions/subtitles/srts/59670933/en_US.srt b/week5/community-contributions/subtitles/srts/59670933/en_US.srt new file mode 100755 index 0000000..68cdc69 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670933/en_US.srt @@ -0,0 +1,199 @@ +WEBVTT + +00:00.530 --> 00:06.770 +I realized my enthusiasm for this project is a bit insane, but I have to tell you that I am very satisfied + +00:06.770 --> 00:07.670 +with the outcome. + +00:07.700 --> 00:12.620 +It's rare that you come up with a project like this that has so many moving parts and is technically + +00:12.620 --> 00:20.240 +quite complex and has a lot of substance to it, but have such a simple and tangible outcome as our + +00:20.240 --> 00:22.340 +push notification for a good deal. + +00:22.520 --> 00:29.750 +There is something beyond the king of the notification that really makes this very satisfying indeed. + +00:30.290 --> 00:35.660 +But one point that I wanted to make about it, perhaps a more, more seriously, is to point out that + +00:35.660 --> 00:41.120 +this sort of template that we've got here for solving this problem is something that you can apply to + +00:41.150 --> 00:46.940 +a lot of other similar kinds of business problems and startup ideas and commercial problems that you + +00:46.940 --> 00:48.680 +might even have been thinking about already. + +00:48.710 --> 00:53.570 +I mean, to give you something that comes to mind right away is that when I was looking on hugging face + +00:53.570 --> 01:00.020 +for the data for this this project, for the Amazon pricing data, I also came across a bunch of data + +01:00.020 --> 01:07.670 +that that has company reports, annual reports from companies and shows how that relates to the change + +01:07.670 --> 01:14.610 +in share price on the day and the few days after the report comes out, which is a very interesting + +01:14.610 --> 01:15.420 +data set. + +01:15.420 --> 01:22.110 +And it occurs to me that whilst this is more of a stretch, it is also obviously a bigger opportunity + +01:22.110 --> 01:23.580 +if you can find a way to crack it. + +01:23.610 --> 01:30.810 +If there's a way to build a proprietary model that's able to look at that and make some prediction about + +01:30.810 --> 01:35.130 +what will happen to the share price as a result of a company's report being published. + +01:35.160 --> 01:41.910 +And you could imagine then having a scanner agent that scans the new company reports being being being + +01:41.910 --> 01:42.630 +released. + +01:42.660 --> 01:49.170 +And so you can really see the very similar agent workflow that you would adapt for that kind of of of + +01:49.170 --> 01:49.710 +problem. + +01:49.710 --> 01:54.660 +Now, of course, I will tell you that any trading decisions that you make are entirely at your own + +01:54.660 --> 01:56.880 +risk, and I have nothing to do with them at all. + +01:57.060 --> 02:03.120 +And obviously no one has yet cracked that problem of being able to build an automated AI system that + +02:03.120 --> 02:09.000 +can do things like read company reports and make, uh, trading decisions robust trading decisions as + +02:09.000 --> 02:09.510 +a result. + +02:09.510 --> 02:15.810 +But it seems like a serious, a very, um, important challenge to take on. + +02:15.810 --> 02:21.060 +And I wanted to point that out because it shows you how similar that is in terms of the the bits and + +02:21.060 --> 02:24.360 +pieces of the puzzle that you would look at if you wanted to do that. + +02:24.360 --> 02:33.160 +So that could certainly be a big challenge, if nothing else, just for the interest of seeing what + +02:33.160 --> 02:38.200 +happens and seeing whether you can find signal in that kind of data and using it as an opportunity to + +02:38.230 --> 02:40.600 +build a similar agent workflow. + +02:41.410 --> 02:42.040 +Okay. + +02:42.070 --> 02:49.930 +Anyway, with that, let's move to the last slide of this week, which is of this sorry, not of this + +02:49.930 --> 02:54.430 +week, of this day, which is to tell you that there is now one day left. + +02:54.430 --> 02:59.890 +There is one final day, week eight, day five, which will be the conclusion of this course. + +02:59.890 --> 03:01.930 +And you don't want to miss it. + +03:02.050 --> 03:07.210 +Uh, before I tell you what you'll do, then just to remind you, at this point you can generate code + +03:07.210 --> 03:13.510 +and text with frontier models, with open source models, using hugging face, using APIs with tools, + +03:13.510 --> 03:18.760 +structured outputs, rag with assistance from from Gradio. + +03:18.790 --> 03:25.180 +You can follow that strategy we went through to solve problems including curating your data, which + +03:25.180 --> 03:30.280 +is hard and grueling, but perhaps the single most important step. + +03:30.460 --> 03:34.330 +Um, making your baseline model and fine tuning a frontier model. + +03:34.330 --> 03:38.380 +And then you can Select and train an open source model. + +03:38.380 --> 03:39.370 +And we did it. + +03:39.400 --> 03:42.010 +We beat the frontier when we did that. + +03:42.040 --> 03:44.770 +You can deploy models to production. + +03:44.770 --> 03:48.730 +Now you can use modal to to put something up there on the cloud. + +03:48.760 --> 03:53.980 +You can also productionize your code in other ways and prepare it into something that can be run in + +03:53.980 --> 03:55.750 +production environments. + +03:56.140 --> 04:03.400 +And now you can build agentic workflows with planning memory, databases and multiple agents working + +04:03.430 --> 04:07.030 +together to take a complex task and break it down. + +04:07.360 --> 04:11.470 +And so that brings us to preparing for tomorrow. + +04:11.470 --> 04:16.780 +Tomorrow, of course, we're going to be building a user interface, which is going to be glorious because + +04:16.780 --> 04:17.920 +it's in Gradio. + +04:18.040 --> 04:23.980 +Uh, we'll also be looking at the fact that today when we ran that workflow, you'll notice that it + +04:23.980 --> 04:25.270 +ran and then it completed. + +04:25.270 --> 04:30.730 +So it's not really autonomous and it doesn't keep running and have it still relies on the human to press + +04:30.730 --> 04:31.330 +it each time. + +04:31.330 --> 04:32.950 +So we need to fix that of course. + +04:32.950 --> 04:35.320 +So we will do both of those things tomorrow. + +04:35.320 --> 04:43.270 +And once they are done, that will leave you in a position where you will have mastered AI and LM engineering. + +04:43.330 --> 04:45.100 +And I can't wait for that point. + +04:45.100 --> 04:49.300 +I hope that you are both excited and proud and I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59670933/ja_JP.srt b/week5/community-contributions/subtitles/srts/59670933/ja_JP.srt new file mode 100755 index 0000000..d650b02 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670933/ja_JP.srt @@ -0,0 +1,148 @@ +WEBVTT + +00:00.530 --> 00:07.670 +このプロジェクトに対する私の熱意が少々非常識であることは自覚しているが、 結果には非常に満足していることをお伝えしなければならない。 + +00:07.700 --> 00:12.620 +これほど多くの可動部品があり、 技術的に非常に複雑で、 実質的な内容も多いのに、 + +00:12.620 --> 00:22.340 +私たちのお得なプッシュ通知のようなシンプルで具体的な結果をもたらす、 このようなプロジェクトはめったにない。 + +00:22.520 --> 00:29.750 +通知王を超えた何かが、 これを実に満足のいくものにしている。 + +00:30.290 --> 00:35.660 +この問題を解決するためのテンプレートは、 似たようなビジネス上の問題やスタートアップのアイデア、 + +00:35.660 --> 00:48.680 +商業上の問題など、 あなたがすでに考えているような問題にも応用できるということです。 + +00:48.710 --> 00:53.570 +つまり、 すぐに思いつくことを挙げるとすれば、 このプロジェクトのためのデータ、 + +00:53.570 --> 01:00.020 +アマゾンの価格設定データをハギング・フェイスで探していたときに、 + +01:00.020 --> 01:15.420 +企業の年次報告書を掲載し、 その報告書が発表された日とその数日後の株価の変化との関係を示したデータもたくさん見つけました。 + +01:15.420 --> 01:23.580 +そして、 これはよりストレッチである一方で、 それを破る方法を見つけることができれば、 明らかに大きなチャンスでもあることに思い当たった。 + +01:23.610 --> 01:35.130 +それを見て、 ある企業の報告書が公表された結果、 株価がどうなるかを予測できる独自のモデルを構築する方法があるとすれば。 + +01:35.160 --> 01:42.630 +そして、 リリースされる新会社のレポートをスキャンするスキャナー・エージェントを持つことも想像できるだろう。 + +01:42.660 --> 01:49.710 +そのため、 このような問題に適応するエージェントのワークフローが非常によく似ていることがわかります。 + +01:49.710 --> 01:54.660 +もちろん、 取引に関する決定はすべて自己責任であり、 + +01:54.660 --> 01:56.880 +私は一切関知しない。 + +01:57.060 --> 02:03.120 +そして、 企業の報告書を読み、 その結果として強固な取引判断を下すような自動化されたAIシステムを構築できるという問題は、 + +02:03.120 --> 02:09.510 +明らかにまだ誰も解決できていない。 + +02:09.510 --> 02:15.810 +でも、 とても重要な挑戦だと思う。 + +02:15.810 --> 02:21.060 +そして、 それを指摘したかったのは、 もしあなたがそれをやりたかったら、 パズルの断片を見るという点で、 + +02:21.060 --> 02:24.360 +それがいかに似ているかを示しているからだ。 + +02:24.360 --> 02:33.160 +何が起こるか、 そしてそのようなデータからシグナルを見つけることができるかどうかを確認し、 + +02:33.160 --> 02:40.600 +同様のエージェントのワークフローを構築する機会として利用する。 + +02:41.410 --> 02:42.040 +オーケー。 + +02:42.070 --> 02:49.930 +ともあれ、 今週最後のスライドに移ろう。 今週の、 + +02:49.930 --> 02:54.430 +ではなく、 今日の、 である。 + +02:54.430 --> 02:59.890 +最終日は8週目の5日目で、 このコースの締めくくりとなる。 + +02:59.890 --> 03:01.930 +それを見逃す手はない。 + +03:02.050 --> 03:07.210 +この時点で、 フロンティア・モデル、 オープンソース・モデル、 ハギング・フェイス、 + +03:07.210 --> 03:13.510 +ツールを使ったAPI、 構造化された出力、 Gradioからの支援を受けたラグを使って、 + +03:13.510 --> 03:18.760 +コードとテキストを生成することができる。 + +03:18.790 --> 03:25.180 +データをキュレーションするのは大変で骨の折れる作業だが、 + +03:25.180 --> 03:30.280 +おそらく最も重要なステップだろう。 + +03:30.460 --> 03:34.330 +ベースラインモデルを作り、 フロンティアモデルを微調整することだ。 + +03:34.330 --> 03:38.380 +そして、 オープンソースのモデルを選択し、 トレーニングすることができる。 + +03:38.380 --> 03:39.370 +そして我々はそれを成し遂げた。 + +03:39.400 --> 03:42.010 +そうすることで、 私たちはフロンティアを打ち破った。 + +03:42.040 --> 03:44.770 +モデルをプロダクションにデプロイすることができます。 + +03:44.770 --> 03:48.730 +モーダルを使って、 クラウド上に何かを置くことができる。 + +03:48.760 --> 03:55.750 +また、 他の方法でコードをプロダクション化し、 本番環境で実行できるように準備することもできる。 + +03:56.140 --> 04:07.030 +そして今、 複雑なタスクを分解するために、 プランニング・メモリー、 データベース、 複数のエージェントが連携するエージェント型ワークフローを構築することができる。 + +04:07.360 --> 04:11.470 +そして、 明日の準備に取りかかる。 + +04:11.470 --> 04:17.920 +明日は、 もちろん、 ユーザー・インターフェースを構築する。 + +04:18.040 --> 04:25.270 +今日、 ワークフローを実行したとき、 それが実行され、 完了したことに気づくだろう。 + +04:25.270 --> 04:31.330 +だから自律走行ではないし、 走り続けるわけでもない。 + +04:31.330 --> 04:32.950 +だから、 もちろんそれを修正する必要がある。 + +04:32.950 --> 04:35.320 +だから、 明日はその両方をやるつもりだ。 + +04:35.320 --> 04:43.270 +それが終われば、 AIとLMエンジニアリングをマスターしたことになる。 + +04:43.330 --> 04:45.100 +その時が待ち遠しい。 + +04:45.100 --> 04:49.300 +君たちが興奮し、 誇りに思うことを願っている。 diff --git a/week5/community-contributions/subtitles/srts/59670933/ko_KR.srt b/week5/community-contributions/subtitles/srts/59670933/ko_KR.srt new file mode 100755 index 0000000..d1378ae --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59670933/ko_KR.srt @@ -0,0 +1,187 @@ +WEBVTT + +00:00.530 --> 00:07.670 +비트를 향한 제 열정이 좀 지나치긴 했지만 결과에 아주 만족해요 + +00:07.700 --> 00:12.620 +이런 프로젝트는 드물죠 이렇게 움직이는 부분이 많고 + +00:12.620 --> 00:20.240 +기술적으로 꽤 복잡하고 실질적인 것도 많지만 푸시 알림처럼 단순하고 실질적인 + +00:20.240 --> 00:22.340 +결과를 얻었어요 + +00:22.520 --> 00:29.750 +통지의 제왕 이상의 뭔가가 이 영화를 아주 만족스럽게 만들어요 + +00:30.290 --> 00:35.660 +다만 이에 관해 한 가지 강조하고 싶은 점은 이 문제를 해결하기 + +00:35.660 --> 00:41.120 +위해 준비한 이 템플릿은 여러분이 이미 생각해 둔 유사한 + +00:41.150 --> 00:46.940 +사업 문제나 스타트업 아이디어 상업적 문제에 적용할 수 있다는 + +00:46.940 --> 00:48.680 +거죠 + +00:48.710 --> 00:53.570 +즉, 바로 떠오르는 것을 말씀드리자면 얼굴을 안는 + +00:53.570 --> 01:00.020 +것을 볼 때 이 프로젝트 아마존 가격 데이터를 볼 때 회사들의 연간 리포트가 + +01:00.020 --> 01:07.670 +있는 많은 데이터를 보게 되었는데요 리포트가 나온 당일과 그 후 며칠간의 주가 변화와 + +01:07.670 --> 01:15.420 +어떻게 관련되어 있는지 보여줍니다 아주 흥미로운 데이터 집합이죠 + +01:15.420 --> 01:22.110 +이건 좀 억지스럽긴 하지만 그걸 풀 방법을 찾는다면 더 큰 기회가 + +01:22.110 --> 01:23.580 +되겠죠 + +01:23.610 --> 01:30.810 +소유 모델을 구축할 방법이 있다면 그걸 살펴보고 회사의 보고서가 공개된 결과로 + +01:30.810 --> 01:35.130 +주가가 어떻게 될지 예측할 수 있는 거죠 + +01:35.160 --> 01:42.630 +스캐너 요원이 출시 예정인 신규 회사를 스캔하는 걸 상상해 보세요 + +01:42.660 --> 01:49.170 +아주 유사한 에이전트 워크플로우를 보실 수 있습니다 그런 종류의 문제에 적응하는 + +01:49.170 --> 01:49.710 +거죠 + +01:49.710 --> 01:54.660 +물론 당신이 내리는 모든 거래 결정은 전적으로 당신 책임이고 + +01:54.660 --> 01:56.880 +나와는 전혀 상관이 없어요 + +01:57.060 --> 02:03.120 +물론 아직 아무도 그 문제를 해결하지 못했죠 회사 보고서를 읽고 그 결과로 + +02:03.120 --> 02:09.510 +탄탄한 의사 결정을 내리는 자동화된 인공지능 시스템을 개발하는 거요 + +02:09.510 --> 02:15.810 +하지만 이건 아주 심각하고 중요한 도전 같아요 + +02:15.810 --> 02:21.060 +그걸 지적하고 싶었어요 얼마나 비슷한지 보여주거든요 퍼즐 조각 + +02:21.060 --> 02:24.360 +같은 면에서요 그걸 원한다면 볼 수 있죠 + +02:24.360 --> 02:33.160 +그러니 큰 도전이 될 수 있죠 다른 건 없더라도요 어떤 일이 일어나는지 흥미를 위해 그런 종류의 데이터에서 신호를 + +02:33.160 --> 02:38.200 +찾을 수 있는지 보고 유사한 에이전트 워크플로우를 구축할 기회로 + +02:38.230 --> 02:40.600 +활용하기 위해서요 + +02:41.410 --> 02:42.040 +네 + +02:42.070 --> 02:49.930 +어쨌든, 이번 주의 마지막 슬라이드로 넘어가죠 이∙∙∙ 아니, 이번 주가 아니라 오늘의 + +02:49.930 --> 02:54.430 +슬라이드요 이제 하루 남았다는 걸 알려드리려고요 + +02:54.430 --> 02:59.890 +8주 차, 5일 차인 마지막 날이 이 코스의 마지막 날이죠 + +02:59.890 --> 03:01.930 +놓치면 후회하실 거예요 + +03:02.050 --> 03:07.210 +여러분이 뭘 할지 말씀드리기 전에 상기시켜 드리자면 이 시점에서 프론티어 모델로 + +03:07.210 --> 03:13.510 +코드와 텍스트를 생성할 수 있습니다 오픈 소스 모델로요 얼굴을 끌어안는 것과 도구를 이용한 API + +03:13.510 --> 03:18.760 +구조적 출력물들을 이용하죠 GRadio의 도움을 받아서요 + +03:18.790 --> 03:25.180 +문제 해결에 사용한 전략을 따르세요 데이터 큐레이팅을 포함해서요 + +03:25.180 --> 03:30.280 +힘들고 고된 일이지만 가장 중요한 단계죠 + +03:30.460 --> 03:34.330 +기본 모델을 만들고 개척지 모델을 세밀하게 조정하는 거죠 + +03:34.330 --> 03:38.380 +오픈 소스 모델을 선택하고 훈련할 수 있어요 + +03:38.380 --> 03:39.370 +그리고 해냈죠 + +03:39.400 --> 03:42.010 +개척지를 뛰어넘은 거죠 + +03:42.040 --> 03:44.770 +모델을 생산에 배포할 수 있어요 + +03:44.770 --> 03:48.730 +Modal을 이용해 클라우드에 뭔가를 놓을 수 있어요 + +03:48.760 --> 03:53.980 +다른 방법으로 코드를 제작할 수도 있어요 프로덕션 환경에서 실행될 수 + +03:53.980 --> 03:55.750 +있도록 준비하는 거죠 + +03:56.140 --> 04:03.400 +기획 메모리와 데이터베이스, 그리고 여러 에이전트가 함께 복잡한 작업을 수행하는 + +04:03.430 --> 04:07.030 +에이전트 워크플로를 구축할 수 있어요. + +04:07.360 --> 04:11.470 +그래서 내일을 준비하는 거예요 + +04:11.470 --> 04:16.780 +내일은 사용자 인터페이스를 만들 겁니다 그레이디오에 있으니 아주 + +04:16.780 --> 04:17.920 +훌륭하죠 + +04:18.040 --> 04:23.980 +오늘 워크플로우를 실행했을 때 실행된 후 완료된 걸 + +04:23.980 --> 04:25.270 +볼 수 있죠 + +04:25.270 --> 04:30.730 +자동적으로 계속 작동하지 않고 매번 사람이 눌러야 하는 + +04:30.730 --> 04:31.330 +거죠 + +04:31.330 --> 04:32.950 +물론 그걸 고쳐야겠죠 + +04:32.950 --> 04:35.320 +내일 둘 다 할 거예요 + +04:35.320 --> 04:43.270 +그 과정이 끝나면 인공지능과 LM 공학에 통달한 상태가 되죠 + +04:43.330 --> 04:45.100 +그 순간이 기다려져요 + +04:45.100 --> 04:49.300 +기대와 자부심을 느끼시길 바라요 거기서 뵐게요 diff --git a/week5/community-contributions/subtitles/srts/59671221/en_US.srt b/week5/community-contributions/subtitles/srts/59671221/en_US.srt new file mode 100755 index 0000000..2dfd428 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671221/en_US.srt @@ -0,0 +1,454 @@ +WEBVTT + +00:01.220 --> 00:07.940 +I gotta tell you, I don't like to toot my horn a whole lot, but I do think that I've done a great + +00:07.940 --> 00:12.920 +job with the project for this week, and I enjoyed it so much. + +00:12.920 --> 00:15.110 +And it's running now and I love it. + +00:15.110 --> 00:19.250 +I absolutely love it, and I can't wait to get into it and show it to you. + +00:19.520 --> 00:22.790 +And let's start, as always with the introduction. + +00:22.790 --> 00:26.870 +But very quickly we're going to get to code because that is where it's at this time. + +00:26.870 --> 00:31.250 +So we're going to go deeply into a gigantic I. + +00:31.280 --> 00:32.750 +It's such a hot topic. + +00:32.750 --> 00:35.630 +It's something that everyone can't get enough of right now. + +00:35.720 --> 00:42.410 +And so it's worth it that we really go deep and get into it, and we use it as an opportunity to learn + +00:42.410 --> 00:49.070 +more and more about the different components of LMS that we've worked on already over the last seven + +00:49.070 --> 00:50.120 +and a half weeks. + +00:50.120 --> 00:56.990 +So what we're going to be doing today is talking about agentic workflows, agent frameworks, and then + +00:56.990 --> 00:59.720 +we're going to build an agent framework. + +00:59.720 --> 01:07.430 +We're going to do it today that is able to send push notifications with information about great deals + +01:07.430 --> 01:10.520 +that it finds based on looking at RSS feeds. + +01:10.520 --> 01:14.840 +So it's really putting all the pieces together into a solution. + +01:15.140 --> 01:23.210 +So before we do that, let's just quickly talk about what what is what exactly is Agentic AI and agent + +01:23.210 --> 01:25.370 +workflows and all of this. + +01:25.370 --> 01:33.860 +And I think the truthful answer is that it's one of these ambiguous terms that still emerging and somewhat + +01:33.890 --> 01:35.870 +overused by different groups. + +01:35.870 --> 01:38.240 +So it's used to mean a number of different things. + +01:38.240 --> 01:43.640 +But I think if you take a step back and I did mention this when we touched on it briefly in a previous + +01:43.640 --> 01:49.850 +week, uh, but if you take a step back, you can think of the hallmarks, the key aspects of Agentic + +01:49.850 --> 01:52.580 +AI as having these five pieces to it. + +01:52.580 --> 01:56.510 +And no doubt there are some things that some people say it's more than this, and some people will say + +01:56.510 --> 01:57.140 +it's less than this. + +01:57.140 --> 02:00.130 +But I think that the these are the big five. + +02:00.160 --> 02:06.160 +So first of all, an agentic solution is one that is able to take a larger problem, a more complex + +02:06.160 --> 02:13.330 +problem, and divide it down into smaller pieces that can be executed potentially by LMS and maybe just + +02:13.330 --> 02:15.280 +by normal bits of software. + +02:15.460 --> 02:22.960 +But that ability to take a harder task and break it down is certainly a hallmark of agent solutions. + +02:23.110 --> 02:28.510 +The use of tools, function calling and structured outputs that we've covered at various points along + +02:28.510 --> 02:28.990 +the way. + +02:28.990 --> 02:33.250 +That's also something that often falls into the remit of an agent solution. + +02:33.250 --> 02:39.160 +Is this idea that you're giving an LMS something that's more than just a conversational here's a prompt, + +02:39.190 --> 02:41.350 +give me back a chat response. + +02:41.350 --> 02:45.190 +But it's something where it's fitting into a tighter construct. + +02:45.190 --> 02:47.830 +We need outputs in this particular JSON format. + +02:47.830 --> 02:51.250 +You can call these different functions to carry out different activities. + +02:51.250 --> 02:56.860 +So that's how that fits into Agentic AI and environment. + +02:56.890 --> 02:58.660 +A framework environment. + +02:58.690 --> 02:59.860 +The different words for it. + +02:59.860 --> 03:08.920 +But some kind of a of a of a sandbox in which, which provides some functionality which all the different + +03:08.920 --> 03:11.260 +agents would be able to take advantage of. + +03:11.290 --> 03:14.140 +The classic example of this would be something like memory. + +03:14.140 --> 03:19.600 +If there's something where all of the agents can share in some bit of information that reflects what's + +03:19.600 --> 03:24.040 +happened in the past or something like that, that would be an agent environment and just something + +03:24.040 --> 03:27.820 +which allows different agents to call each other in some way. + +03:28.810 --> 03:33.760 +Uh, and then typically and again, this, this one is an example of something which isn't a must have. + +03:33.760 --> 03:36.490 +It's not like without this you don't have an agent solution. + +03:36.490 --> 03:42.430 +But you often see agent solutions having a planning agent, an agent that's responsible for figuring + +03:42.430 --> 03:44.920 +out what tasks to do in what order. + +03:44.920 --> 03:49.660 +And again, I think normally when people talk about this, they're thinking of that planning agent being + +03:49.660 --> 03:54.310 +itself an LLM that's able to take a task and figure out, all right, I want to do this and then this + +03:54.310 --> 03:54.970 +and then this. + +03:54.970 --> 03:57.020 +But it doesn't have to be an LLM this. + +03:57.050 --> 04:01.820 +If it's a simple problem that just has five steps to it or something, then you can just write some + +04:01.820 --> 04:07.190 +Python code that calls those steps, or it can be a JSON configuration file or something. + +04:07.460 --> 04:13.550 +But there's got to be something which is considered your, your, your planner to tick this box and + +04:13.580 --> 04:19.010 +not that it's necessarily required, but perhaps the last one here, which is the one that we've not + +04:19.010 --> 04:22.430 +really done much of to date, is the kind of key. + +04:22.460 --> 04:28.820 +It's perhaps a single criterion that does distinguish between something that's agentic and it is not, + +04:28.820 --> 04:31.460 +and that is autonomy. + +04:31.460 --> 04:42.680 +That is this idea that your agentic AI solution has some kind of a existence that transcends a chat + +04:42.710 --> 04:43.610 +with a human. + +04:43.610 --> 04:51.680 +So we've we've had memory before because we've had Q&A chats like our Rag solution when we when we had + +04:51.680 --> 04:58.770 +a chat that talked about the insurance company and obviously it had memory there, and we've had other + +04:58.770 --> 04:59.700 +examples of that too. + +04:59.730 --> 05:05.790 +Even our airline chat had memory, but that's not really considered an autonomous AI because that. + +05:05.820 --> 05:11.190 +Memory only existed where we had that app running and while the human was interacting with it. + +05:11.490 --> 05:15.810 +It didn't really have any kind of a of a presence beyond that. + +05:15.810 --> 05:18.360 +So this idea of autonomy is some. + +05:18.390 --> 05:24.180 +And some kind of a sense that this, this thing has an existence that is more permanent and. + +05:24.210 --> 05:26.100 +Say, is running behind the scenes. + +05:26.130 --> 05:29.280 +Now, that might all sound a bit magical, and it's not at all. + +05:29.310 --> 05:34.590 +As you'll see, basically, if you've got a process that's running, that's carrying out some activity. + +05:34.620 --> 05:39.420 +That doesn't necessarily need human interaction, that in itself is good enough to say, okay. + +05:39.450 --> 05:41.550 +That sounds like that's an agent solution. + +05:42.090 --> 05:45.720 +So in a nutshell, it's not like there's one super clear. + +05:45.750 --> 05:46.740 +Definition. + +05:46.740 --> 05:52.920 +And a lot of the times when you're working with an AI solution that is solving a harder problem involving + +05:52.920 --> 06:00.750 +multiple models Involving coordination between them and in a way that isn't just a prompt and a response. + +06:00.750 --> 06:08.430 +The chat interface that we're so familiar with, anything like that is considered an agentic AI solution. + +06:08.970 --> 06:13.020 +Now there are a bunch of frameworks which offer agent capabilities. + +06:13.020 --> 06:16.860 +Langshan has a bunch of agent abilities. + +06:16.860 --> 06:19.440 +There's agent tools that you get with hugging face. + +06:19.680 --> 06:22.560 +Gradio has something and there's many others. + +06:22.560 --> 06:25.410 +Some of them are what they call no code. + +06:25.410 --> 06:28.230 +So all you're doing is stitching together different, different models. + +06:28.230 --> 06:34.140 +Some of them are have more code involved, like like a Lang Chain's offerings. + +06:34.140 --> 06:39.450 +But one of the points I wanted to make to you is that many of these platforms are putting on abstractions + +06:39.450 --> 06:45.540 +around Llms, much as Lang Chain did for Rag when we came across that before. + +06:45.540 --> 06:50.910 +And really to be building these kinds of agentic AI solutions, you don't need those abstractions. + +06:50.910 --> 06:54.980 +We know how to call Llms directly and we can just do it ourselves. + +06:54.980 --> 06:59.300 +We can have LMS running and we can send the right information to the right. + +06:59.360 --> 07:06.500 +LM as now a master of LM engineering, almost 5% away from being a master of LM engineering. + +07:06.500 --> 07:08.360 +That's well within your capabilities. + +07:08.360 --> 07:14.180 +So actually for for this session, as we get in and build our Agentic AI framework, we're just going + +07:14.180 --> 07:19.760 +to be creating these agents, as you already saw those classes and have them operating ourselves using + +07:19.760 --> 07:20.510 +Python code. + +07:20.510 --> 07:25.610 +We're going to collaborate them, stitch them together with our own code, which is a great way of doing + +07:25.610 --> 07:29.030 +it, and which also gives you deeper insight into what's happening. + +07:29.030 --> 07:33.740 +And we can actually see what information is being passed between the agents. + +07:33.860 --> 07:41.540 +Um, but of course, you can also use one of the more off the shelf, uh, more abstraction layer products. + +07:41.540 --> 07:42.440 +If you wish. + +07:42.470 --> 07:47.510 +You can you can look up any of the ones that's available from, from Langshan or the others. + +07:47.600 --> 07:51.650 +Um, and it might be an interesting exercise to then redo some of what we're doing. + +07:51.680 --> 07:56.150 +It was it would probably be quite, quite straightforward to do it using one of those off the shelf + +07:56.150 --> 07:56.750 +products. + +07:56.750 --> 07:59.270 +But for us, we're going to get to the nitty gritty. + +07:59.300 --> 08:05.270 +We're actually going to go and build our own little agent framework and have multiple llms participate + +08:05.270 --> 08:12.620 +in solving the problem that, you know, we're setting out to solve, which is scraping for for good + +08:12.620 --> 08:15.950 +deals on the internet and messaging us when it finds them. + +08:15.980 --> 08:19.100 +Let's remind ourselves quickly of what that framework looks like. + +08:19.100 --> 08:20.480 +What is our architecture? + +08:20.510 --> 08:25.070 +This is the the the workflows that we're putting together. + +08:25.310 --> 08:32.240 +Um, we have the three models that are running and an ensemble agent that calls them. + +08:32.420 --> 08:37.670 +This is an example of perhaps a bit of a stretch, because an ensemble model that calls other models + +08:37.670 --> 08:39.530 +is something that's been around for donkeys years. + +08:39.530 --> 08:46.040 +People haven't called that Agentic AI in the past, but since we do have these running as separate classes + +08:46.040 --> 08:51.500 +in their own right that have the same construct and the same ability as you'll see to log and to be + +08:51.530 --> 08:56.930 +participating in this framework, it kind of makes sense to think of these as separate agents in their + +08:56.930 --> 09:00.410 +own right, and we could be running them in different Python processes if we wish to. + +09:00.440 --> 09:05.780 +But for simplicity, I just have it just be being called directly, but we certainly could do. + +09:06.140 --> 09:11.420 +Um, so I have chosen to suggest that these are separate agents that carry out these three different + +09:11.420 --> 09:16.910 +models, and that we have an ensemble agent that calls each of these agents, collaborates with them, + +09:16.910 --> 09:23.600 +and then applies the linear regression weights to give an ensemble of a price of a product. + +09:23.690 --> 09:27.260 +The scanner agent is what we looked at last time. + +09:27.260 --> 09:28.490 +This is an agent. + +09:28.520 --> 09:31.340 +We ended it by by calling the scanner agent. + +09:31.340 --> 09:41.480 +It's able to go out, collect feeds, and then call Gpt4 zero as its way of finding out the good pithy + +09:41.510 --> 09:45.140 +description of each deal and the price point associated with it. + +09:45.140 --> 09:46.340 +And it collects that together. + +09:46.340 --> 09:51.160 +And you may remember that it had an input memory, which is part of the glue of how we're going to glue + +09:51.160 --> 09:52.270 +everything together. + +09:52.600 --> 09:58.540 +The memory is where we tell it not to surface a deal, that it's already surfaced in the past. + +09:59.800 --> 10:04.990 +And what we're going to look at today are these boxes in yellow that bring it all together, that we're + +10:04.990 --> 10:09.730 +going to look at a messaging agent, a very simple thing that's going to send push notifications to + +10:09.730 --> 10:11.650 +your phone, which is going to be delightful. + +10:11.680 --> 10:15.520 +A planning agent which is able to coordinate activities. + +10:15.520 --> 10:20.500 +And it's not going to be an LM, it's going to be a simple Python script, but it easily could be an + +10:20.500 --> 10:21.190 +LM. + +10:21.850 --> 10:26.170 +And then the agent framework, which sounds super fancy. + +10:26.170 --> 10:28.090 +Uh, it's not fancy in the least. + +10:28.090 --> 10:32.800 +It's just simply something which has all of these agents and which can allow messaging to go on. + +10:32.800 --> 10:36.520 +And that's going to be our agent framework creation today. + +10:36.520 --> 10:41.860 +And then tomorrow we're going to build the user interface that that wraps it all together and makes + +10:41.860 --> 10:43.390 +it look fabulous. + +10:43.690 --> 10:48.520 +But I hopefully have motivated you enough to be ready to go. + +10:48.550 --> 10:50.440 +I will see you in JupyterLab. diff --git a/week5/community-contributions/subtitles/srts/59671221/ja_JP.srt b/week5/community-contributions/subtitles/srts/59671221/ja_JP.srt new file mode 100755 index 0000000..9fd0b93 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671221/ja_JP.srt @@ -0,0 +1,367 @@ +WEBVTT + +00:01.220 --> 00:07.940 +でも、 今週の企画はとても良かったと思うし、 + +00:07.940 --> 00:12.920 +とても楽しめた。 + +00:12.920 --> 00:15.110 +今は走っているし、 とても気に入っている。 + +00:15.110 --> 00:19.250 +絶対に気に入っているし、 早く乗り込んで皆さんにお見せしたい。 + +00:19.520 --> 00:22.790 +そして、 いつものようにイントロダクションから始めよう。 + +00:22.790 --> 00:26.870 +しかし、 すぐにコードに取りかかるつもりだ。 + +00:26.870 --> 00:31.250 +だから、 私たちは巨大な "I "の中に深く入っていくことになる。 + +00:31.280 --> 00:32.750 +とてもホットな話題だ。 + +00:32.750 --> 00:35.630 +今、 誰もが満足できないものなんだ。 + +00:35.720 --> 00:42.410 +この7週間半の間にすでに取り組んできたLMSのさまざまな構成要素について、 + +00:42.410 --> 00:50.120 +より深く学ぶ機会として利用するのです。 + +00:50.120 --> 00:59.720 +そこで今日は、 エージェント的なワークフロー、 エージェントフレームワークについて話し、 そしてエージェントフレームワークを構築する。 + +00:59.720 --> 01:10.520 +RSSフィードを見て見つけたお得な情報を、 プッシュ通知で送ることができる。 + +01:10.520 --> 01:14.840 +つまり、 すべてのピースを組み合わせて解決策を導き出すのだ。 + +01:15.140 --> 01:25.370 +ではその前に、 エージェント型AIとは何か、 エージェントのワークフローとは何か、 これらすべてについて簡単にお話ししましょう。 + +01:25.370 --> 01:35.870 +正直なところ、 この曖昧な用語はさまざまなグループによっていまだに使われ続けている。 + +01:35.870 --> 01:38.240 +だから、 いろいろな意味で使われている。 + +01:38.240 --> 01:43.640 +しかし、 一歩引いて考えてみると、 前の週にも少し触れましたが、 一歩引いて考えてみると、 + +01:43.640 --> 01:52.580 +エージェント型AIの特徴や重要な側面は、 これら5つの要素から成り立っていると考えることができます。 + +01:52.580 --> 01:57.140 +そして、 これ以上だと言う人もいれば、 これ以下だと言う人もいるのは間違いない。 + +01:57.140 --> 02:00.130 +しかし、 私はこの5つがビッグ5だと思う。 + +02:00.160 --> 02:06.160 +つまり、 まず第一に、 エージェント・ソリューションとは、 より大きな問題、 より複雑な問題を、 + +02:06.160 --> 02:15.280 +LMSや通常のソフトウェアで実行できる可能性のある小さな断片に分割することができるものである。 + +02:15.460 --> 02:22.960 +しかし、 より困難なタスクを分解する能力は、 エージェント・ソリューションの特徴であることは間違いない。 + +02:23.110 --> 02:28.990 +これまで様々な場面で取り上げてきたツールの使用、 関数呼び出し、 構造化されたアウトプット。 + +02:28.990 --> 02:33.250 +これもまた、 エージェント・ソリューションの権限に含まれることが多い。 + +02:33.250 --> 02:41.350 +LMSに単なる会話以上のものを与えるという考え方は、 プロンプトが表示され、 チャットの返答を返すというものなのでしょうか。 + +02:41.350 --> 02:45.190 +しかし、 それはよりタイトな構成にフィットするものだ。 + +02:45.190 --> 02:47.830 +この特定のJSONフォーマットでの出力が必要なのです。 + +02:47.830 --> 02:51.250 +異なるアクティビティを実行するために、 これらの異なる関数を呼び出すことができる。 + +02:51.250 --> 02:56.860 +つまり、 それがエージェントAIと環境にどのようにフィットするかということだ。 + +02:56.890 --> 02:58.660 +フレームワーク環境。 + +02:58.690 --> 02:59.860 +それに対するさまざまな言葉。 + +02:59.860 --> 03:11.260 +しかし、 サンドボックスのようなものがあれば、 すべての異なるエージェントが利用できる機能を提供することができる。 + +03:11.290 --> 03:14.140 +その典型的な例が、 メモリのようなものだろう。 + +03:14.140 --> 03:19.600 +すべてのエージェントが、 過去に起こったことを反映した情報を共有できるようなものがあれば、 + +03:19.600 --> 03:27.820 +それはエージェント環境であり、 異なるエージェントが何らかの方法でお互いを呼び出せるようにするものだ。 + +03:28.810 --> 03:33.760 +それから、 これは必需品ではないものの例だ。 + +03:33.760 --> 03:36.490 +これがなければエージェント・ソリューションがないというわけではない。 + +03:36.490 --> 03:44.920 +しかし、 エージェント・ソリューションでは、 計画エージェント、 つまり、 どのタスクをどの順番で行うかを決定するエージェントを持つことが多い。 + +03:44.920 --> 03:49.660 +繰り返しになるが、 普通、 この話をするとき、 プランニング・エージェントというのは、 LLM(法学修士号)のような、 + +03:49.660 --> 03:54.970 +ある仕事を引き受け、 これとこれとこれをやりたい、 と考えることができる人のことを想像すると思う。 + +03:54.970 --> 03:57.020 +しかし、 LLMである必要はない。 + +03:57.050 --> 04:01.820 +5つのステップを踏むだけの簡単な問題なら、 そのステップを呼び出すPythonのコードを書けばいいし、 + +04:01.820 --> 04:07.190 +JSONの設定ファイルでもいい。 + +04:07.460 --> 04:13.550 +しかし、 このボックスにチェックを入れるには、 あなたの、 あなたの、 + +04:13.580 --> 04:22.430 +あなたのプランナーとみなされる何かが必要で、 必ずしもそれが必要というわけではありません。 + +04:22.460 --> 04:31.460 +主体的なものとそうでないものを区別する唯一の基準は、 おそらく自律性だろう。 + +04:31.460 --> 04:43.610 +つまり、 エージェント型AIソリューションには、 人間とのチャットを超越した何らかの存在があるという考えだ。 + +04:43.610 --> 04:51.680 +というのも、 私たちのラグ・ソリューションのようにQ&Aチャットをしたことがあり、 + +04:51.680 --> 04:59.700 +保険会社について話したのですが、 明らかにそのチャットにメモリがありました。 + +04:59.730 --> 05:05.790 +私たちの航空会社のチャットにもメモリがありましたが、 それは自律型AIとはみなされません。 + +05:05.820 --> 05:11.190 +メモリが存在するのは、 そのアプリを実行し、 人間がそのアプリとやりとりしている間だけだ。 + +05:11.490 --> 05:15.810 +それ以上の存在感はなかった。 + +05:15.810 --> 05:18.360 +だから、 自主性という考え方もある。 + +05:18.390 --> 05:24.180 +そして、 このものには、 より永続的な存在があるのだという、 ある種の感覚がある。 + +05:24.210 --> 05:26.100 +と言って、 舞台裏で動いている。 + +05:26.130 --> 05:29.280 +ちょっと不思議に聞こえるかもしれないが、 そんなことはない。 + +05:29.310 --> 05:34.590 +おわかりのように、 基本的に、 実行中のプロセスがある場合、 そのプロセスは何らかのアクティビティを実行している。 + +05:34.620 --> 05:39.420 +必ずしも人間的な交流が必要なわけではない。 + +05:39.450 --> 05:41.550 +それがエージェント・ソリューションのようだね。 + +05:42.090 --> 05:45.720 +だから、 一言で言えば、 超クリアなものがあるわけではないんだ。 + +05:45.750 --> 05:46.740 +定義 + +05:46.740 --> 05:52.920 +そして、 複数のモデルが関与する難しい問題を解決するAIソリューションを扱う場合、 + +05:52.920 --> 06:00.750 +多くの場合、 そのモデル間の連携が必要となり、 単なるプロンプトとレスポンスだけではありません。 + +06:00.750 --> 06:08.430 +私たちが慣れ親しんでいるチャット・インターフェースは、 そのようなものはすべてエージェント型AIソリューションと考えられている。 + +06:08.970 --> 06:13.020 +現在、 エージェント機能を提供するフレームワークはたくさんある。 + +06:13.020 --> 06:16.860 +ランシャンにはたくさんのエージェント能力がある。 + +06:16.860 --> 06:19.440 +ハグすることで手に入る道具がある。 + +06:19.680 --> 06:22.560 +グラディオは何かを持っているし、 他にもたくさんある。 + +06:22.560 --> 06:25.410 +中にはノーコードと呼ばれるものもある。 + +06:25.410 --> 06:28.230 +つまり、 異なるモデルをつなぎ合わせているだけなんだ。 + +06:28.230 --> 06:34.140 +中には、 ラング・チェーンのオファーのように、 より多くのコードが絡むものもある。 + +06:34.140 --> 06:39.450 +しかし、 私があなたに言いたかったことのひとつは、 これらのプラットフォームの多くがLlmsの周りに抽象化を施しているということです。 + +06:39.450 --> 06:45.540 +私たちが以前Lang Chainに出会ったとき、 Ragがそうであったように。 + +06:45.540 --> 06:50.910 +そして、 この種のエージェント型AIソリューションを構築するには、 そのような抽象的な概念は必要ない。 + +06:50.910 --> 06:54.980 +私たちはLlmsに直接電話する方法を知っているし、 自分たちだけでできる。 + +06:54.980 --> 06:59.300 +LMSを稼働させ、 適切な情報を適切な人に送ることができる。 + +06:59.360 --> 07:06.500 +LMは現在、 LMエンジニアリングのマスターであり、 あと5%でマスターになれる。 + +07:06.500 --> 07:08.360 +それはあなたの能力の範囲内だ。 + +07:08.360 --> 07:14.180 +このセッションでは、 エージェントAIのフレームワークを構築するために、 エージェントクラスを作成し、 + +07:14.180 --> 07:20.510 +Pythonのコードを使ってエージェントが動作するようにします。 + +07:20.510 --> 07:29.030 +私たちはそれらをコラボレーションさせ、 私たち自身のコードでつなぎ合わせる。 + +07:29.030 --> 07:33.740 +そして、 エージェント間でどのような情報がやり取りされているかを実際に見ることができる。 + +07:33.860 --> 07:41.540 +でも、 もちろん、 既製品の抽象化レイヤー製品を使うこともできる。 + +07:41.540 --> 07:42.440 +お望みなら + +07:42.470 --> 07:47.510 +Langshanや他の会社から入手可能なものを調べることができる。 + +07:47.600 --> 07:51.650 +今やっていることの一部をやり直すのも面白いかもしれない。 + +07:51.680 --> 07:56.750 +おそらく、 既製品のどれかを使えば簡単にできるだろう。 + +07:56.750 --> 07:59.270 +でも、 私たちとしては、 もっと細かいことを知りたい。 + +07:59.300 --> 08:05.270 +私たちは実際に小さなエージェントフレームワークを構築し、 私たちが解決しようとしている問題、 + +08:05.270 --> 08:12.620 +つまりインターネット上のお得な情報をかき集めて、 それを見つけたら私たちにメッセージを送るという問題を解決するために、 + +08:12.620 --> 08:15.950 +複数のllmsに参加してもらうつもりです。 + +08:15.980 --> 08:19.100 +そのフレームワークがどのようなものか、 手っ取り早く思い出してみよう。 + +08:19.100 --> 08:20.480 +我々の建築とは何か? + +08:20.510 --> 08:25.070 +これが私たちが組み立てているワークフローだ。 + +08:25.310 --> 08:32.240 +3つのモデルが動いていて、 それらを呼び出すアンサンブル・エージェントがいる。 + +08:32.420 --> 08:39.530 +他のモデルを呼び出すアンサンブルモデルは、 何年も前からあるものだからだ。 + +08:39.530 --> 08:46.040 +しかし、 このフレームワークに参加し、 ログを取るという同じ構造、 + +08:46.040 --> 08:51.500 +同じ機能を持つ別のクラスとして動作しているので、 + +08:51.530 --> 09:00.410 +これらを別のエージェントとして考えるのは理にかなっています。 + +09:00.440 --> 09:05.780 +ただ、 シンプルにするために、 直接呼ばれるようにしているだけだが、 確かにそうすることもできる。 + +09:06.140 --> 09:11.420 +私は、 これらの3つの異なるモデルを実行するエージェントは別々であり、 それぞれのエージェントを呼び出し、 + +09:11.420 --> 09:23.600 +それらのエージェントと協力し、 線形回帰の重みを適用して、 商品の価格のアンサンブルを与えるアンサンブル・エージェントがいることを提案することにしました。 + +09:23.690 --> 09:27.260 +スキャナー・エージェントは前回見たものだ。 + +09:27.260 --> 09:28.490 +これはエージェントだ。 + +09:28.520 --> 09:31.340 +私たちはスキャナーのエージェントに電話をかけて終わりにした。 + +09:31.340 --> 09:45.140 +フィードを収集し、 Gpt4ゼロを呼び出すことで、 各取引の簡潔な説明とそれに関連する価格帯を見つけることができる。 + +09:45.140 --> 09:46.340 +そして、 それをまとめている。 + +09:46.340 --> 09:52.270 +入力メモリがあったことを覚えているだろうか。 + +09:52.600 --> 09:58.540 +記憶とは、 過去にすでに表面化した取引を表面化させないように指示することだ。 + +09:59.800 --> 10:04.990 +メッセージング・エージェントは、 あなたの携帯電話にプッシュ通知を送る、 + +10:04.990 --> 10:11.650 +とてもシンプルなものです。 + +10:11.680 --> 10:15.520 +活動を調整することができるプランニング・エージェント。 + +10:15.520 --> 10:21.190 +LMではなく、 単純なPythonスクリプトになるだろうが、 簡単にLMにすることができる。 + +10:21.850 --> 10:26.170 +それからエージェント・フレームワーク。 + +10:26.170 --> 10:28.090 +ええと、 全然派手じゃないよ。 + +10:28.090 --> 10:32.800 +それは単に、 これらすべてのエージェントを持っていて、 メッセージングを続けることができるものだ。 + +10:32.800 --> 10:36.520 +そして、 それが今日のエージェント・フレームワークの作成となる。 + +10:36.520 --> 10:43.390 +そして明日は、 そのすべてを包み込み、 素晴らしく見せるユーザー・インターフェースを構築する。 + +10:43.690 --> 10:48.520 +でも、 もう十分やる気を出してもらえたと思う。 + +10:48.550 --> 10:50.440 +JupyterLabで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59671221/ko_KR.srt b/week5/community-contributions/subtitles/srts/59671221/ko_KR.srt new file mode 100755 index 0000000..674a815 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671221/ko_KR.srt @@ -0,0 +1,436 @@ +WEBVTT + +00:01.220 --> 00:07.940 +자랑하고 싶진 않지만 이번 주 프로젝트는 정말 + +00:07.940 --> 00:12.920 +잘 해낸 것 같아요 정말 즐거웠어요 + +00:12.920 --> 00:15.110 +지금은 잘 굴러가요 정말 좋네요 + +00:15.110 --> 00:19.250 +정말 마음에 들어요 빨리 들어가서 보여드리고 싶네요 Get it + +00:19.520 --> 00:22.790 +늘 그렇듯 소개부터 시작하죠 + +00:22.790 --> 00:26.870 +아주 빨리 코드로 가보겠습니다 지금은 get it에 있으니까요 + +00:26.870 --> 00:31.250 +거대한 I로 깊이 들어갈 거예요 + +00:31.280 --> 00:32.750 +화제의 중심이죠 + +00:32.750 --> 00:35.630 +Get it은 요즘 모두가 질리지 않는 거죠 + +00:35.720 --> 00:42.410 +그러니 깊이 파고들 가치가 있죠 LMS의 다양한 구성 요소에 대해 + +00:42.410 --> 00:50.120 +더 많이 배울 기회로 사용합니다 지난 7주 반 동안 이미 작업해온 것들이죠 + +00:50.120 --> 00:56.990 +오늘 우리가 할 것은 에이전틱 워크플로, 에이전트 프레임워크 그리고 에이전트 + +00:56.990 --> 00:59.720 +프레임워크를 구축하는 거예요 + +00:59.720 --> 01:07.430 +오늘 그걸 할 겁니다 푸시 알림을 보낼 수 있는 거죠 RSS 피드를 보고 찾은 훌륭한 + +01:07.430 --> 01:10.520 +서비스에 대한 정보와 함께요 + +01:10.520 --> 01:14.840 +모든 조각을 하나로 합치는 거죠 + +01:15.140 --> 01:23.210 +그 전에 간단히 얘길 좀 하죠 에이전트 인공지능과 에이전트 워크플로가 + +01:23.210 --> 01:25.370 +정확히 뭐죠? + +01:25.370 --> 01:33.860 +진실한 대답은 이 용어가 모호하다는 겁니다 지금도 여러 집단이 + +01:33.890 --> 01:35.870 +남용하고 있죠 + +01:35.870 --> 01:38.240 +여러 가지 의미로 쓰이죠 + +01:38.240 --> 01:43.640 +하지만 한 걸음 물러나서 지난 주에도 잠깐 언급했지만 한 걸음 + +01:43.640 --> 01:49.850 +물러나서 생각해 보면 에이전트 인공지능의 주요 특징인 특징들을 이 5가지 + +01:49.850 --> 01:52.580 +요소로 구성할 수 있어요 + +01:52.580 --> 01:56.510 +어떤 사람들은 더 많다고 하고 어떤 사람들은 less라고 + +01:56.510 --> 01:57.140 +하죠 + +01:57.140 --> 02:00.130 +하지만 이 다섯 가지가 가장 중요하다고 생각해요 + +02:00.160 --> 02:06.160 +에이전트 솔루션은 더 큰 문제, 더 복잡한 문제를 작은 조각으로 + +02:06.160 --> 02:13.330 +나누는 겁니다 잠재적으로 LMS나 소프트웨어의 일반 비트로 실행할 + +02:13.330 --> 02:15.280 +수 있는 거죠 + +02:15.460 --> 02:22.960 +하지만 어려운 작업을 분석하는 능력은 에이전트 솔루션의 특징이죠 + +02:23.110 --> 02:28.990 +지금까지 다뤘던 도구 사용, 함수 호출, 구조화된 출력들을 살펴보죠 + +02:28.990 --> 02:33.250 +그것도 에이전트 솔루션의 소관 사항에 속하는 경우가 많죠 + +02:33.250 --> 02:39.160 +LMS에 단순한 대화 이상의 것을 주는 아이디어인가요? 채팅 + +02:39.190 --> 02:41.350 +응답을 요청하세요 + +02:41.350 --> 02:45.190 +하지만 더 탄탄한 구조에 맞아야 해요 + +02:45.190 --> 02:47.830 +이 특정 JSON 포맷에서 출력이 필요해요 + +02:47.830 --> 02:51.250 +다른 활동을 수행하기 위해 이런 함수를 호출할 수 있어요 + +02:51.250 --> 02:56.860 +에이전트식 인공지능과 환경에 그렇게 들어맞는 거죠 + +02:56.890 --> 02:58.660 +프레임워크 환경이죠 + +02:58.690 --> 02:59.860 +다른 단어들이요 + +02:59.860 --> 03:08.920 +하지만 일종의 샌드박스 같은 것으로 모든 에이전트가 이용할 수 있는 기능성들을 + +03:08.920 --> 03:11.260 +제공하죠 + +03:11.290 --> 03:14.140 +대표적인 예가 메모리 같은 거예요 + +03:14.140 --> 03:19.600 +과거에 일어난 일을 반영하는 비트 정보를 모든 에이전트가 공유할 수 있는 그런 + +03:19.600 --> 03:24.040 +것이 있다면 그건 에이전트 환경이 되겠죠 다른 에이전트들이 어떤 + +03:24.040 --> 03:27.820 +식으로든 서로 호출할 수 있도록 해주는 그런 거요 + +03:28.810 --> 03:33.760 +그리고 전형적으로, 다시 말씀드리지만 이건 꼭 필요한 건 아닌 예시예요 + +03:33.760 --> 03:36.490 +이게 없으면 에이전트 해결책이 없는 게 아니에요 + +03:36.490 --> 03:42.430 +에이전트 솔루션에는 기획 에이전트가 있는 경우가 종종 있습니다 어떤 순서로 어떤 + +03:42.430 --> 03:44.920 +작업을 할지 결정하는 책임자죠 + +03:44.920 --> 03:49.660 +일반적으로 사람들이 이에 대해 얘기할 때 기획 에이전트 그 자체가 LLM이라고 + +03:49.660 --> 03:54.310 +생각하죠 작업을 받아 알아낼 수 있는 거죠 이렇게 하고 저렇게 하고 이렇게 하는 + +03:54.310 --> 03:54.970 +거요 + +03:54.970 --> 03:57.020 +LLM이 아니어도 돼요 + +03:57.050 --> 04:01.820 +간단한 문제이고 5단계 정도만 있다면 파이썬 코드를 작성해서 + +04:01.820 --> 04:07.190 +그 단계를 호출하거나 JSON 구성 파일을 만들면 돼요 + +04:07.460 --> 04:13.550 +하지만 여러분의 플래너로 간주되는 뭔가가 있어야만 해요 이 박스에 체크할 + +04:13.580 --> 04:19.010 +수 있도록요 꼭 필요한 건 아니지만요 여기 마지막 건 지금까지 별로 + +04:19.010 --> 04:22.430 +다루지 않은 건데 일종의 핵심이죠 + +04:22.460 --> 04:28.820 +상징적인 것과 그렇지 않은 것을 구분하는 단 하나의 기준은 + +04:28.820 --> 04:31.460 +바로 자율성이에요 + +04:31.460 --> 04:43.610 +그게 당신의 인공지능 솔루션이 인간과 대화하는 걸 초월하는 존재라는 생각이죠 + +04:43.610 --> 04:51.680 +메모리를 본 적이 있어요 래그 솔루션 같은 Q&A 채팅을 했거든요 보험 회사에 + +04:51.680 --> 04:59.700 +대해 얘기할 때요 당연히 메모리가 있었죠 다른 예제도 있었어요 + +04:59.730 --> 05:05.790 +항공사 채팅도 메모리가 있었지만 그건 자율 인공지능이라고 볼 수 없어요 + +05:05.820 --> 05:11.190 +메모리는 앱이 실행되고 사람이 상호 작용할 때만 존재했어요 + +05:11.490 --> 05:15.810 +그 외에는 어떤 존재감도 없었어요 + +05:15.810 --> 05:18.360 +자율성이라는 개념은 일부에 불과해요 + +05:18.390 --> 05:24.180 +그리고 이 생명체가 더 영구적으로 존재할 거라는 느낌도 들고요 + +05:24.210 --> 05:26.100 +무대 뒤에서 달리고 있어요 + +05:26.130 --> 05:29.280 +비트가 마법처럼 들리겠지만 전혀 그렇지 않아요 + +05:29.310 --> 05:34.590 +보다시피 기본적으로 실행 중인 프로세스가 있다면 일부 활동을 수행하는 거죠 + +05:34.620 --> 05:39.420 +인간의 상호 작용이 꼭 필요한 건 아니에요 그것만으로도 충분하죠 + +05:39.450 --> 05:41.550 +에이전트 해결책 같네요 + +05:42.090 --> 05:45.720 +간단히 말해서 아주 명확한 건 없어요 + +05:45.750 --> 05:46.740 +정의요 + +05:46.740 --> 05:52.920 +인공지능 솔루션을 사용할 때 더 어려운 문제를 해결하는 경우가 + +05:52.920 --> 06:00.750 +많습니다 여러 모델이 조화를 이루는 문제인데 즉각적인 대응이 아니죠 + +06:00.750 --> 06:08.430 +우리가 잘 아는 채팅 인터페이스는 에이전트식 인공지능 솔루션이에요 + +06:08.970 --> 06:13.020 +에이전트 기능을 제공하는 프레임워크가 많아요 + +06:13.020 --> 06:16.860 +랑산은 대리인으로서 능력이 뛰어나요 + +06:16.860 --> 06:19.440 +요원들을 끌어안으면 Get it이 가능해요 + +06:19.680 --> 06:22.560 +그라디오는 다른 것도 많이 갖고 있어요 + +06:22.560 --> 06:25.410 +어떤 건 코드도 없어요 + +06:25.410 --> 06:28.230 +다양한 모델을 꿰매기만 하면 돼요 + +06:28.230 --> 06:34.140 +일부는 코드가 더 많이 포함돼 있어요 랭 체인에서 제공하는 것처럼요 + +06:34.140 --> 06:39.450 +여러분께 말씀드리고 싶은 것 중 하나는 이런 플랫폼 대부분이 옐름스의 + +06:39.450 --> 06:45.540 +추상화에 있다는 겁니다 랭 체인이 전에 Rag에 대해 했던 것처럼요 + +06:45.540 --> 06:50.910 +에이전트식 인공지능 솔루션을 구축하는 건 그런 추상화가 필요 없어요 + +06:50.910 --> 06:54.980 +`Lms'에 직접 전화하는 법을 아니까 우리가 직접 하면 돼요 + +06:54.980 --> 06:59.300 +LMS가 실행되도록 할 수도 있고 올바른 정보를 올바른 곳으로 보낼 수도 있죠 + +06:59.360 --> 07:06.500 +달 착륙선은 이제 달 착륙선 공학의 달인이 되었습니다 달 착륙선 공학의 달인이 되기까지 5% 가까이 남은 거죠 + +07:06.500 --> 07:08.360 +당신 능력 안에서요 + +07:08.360 --> 07:14.180 +이 세션에선 에이전틱 인공지능 프레임워크를 구축할 텐데요 에이전트를 생성하는 + +07:14.180 --> 07:20.510 +걸 보겠습니다 이미 보신 클래스들이죠 파이썬 코드로 운영하는 거예요 + +07:20.510 --> 07:25.610 +우리가 만든 코드로 조합해서 함께 만들 거예요 아주 좋은 방법이죠 + +07:25.610 --> 07:29.030 +무슨 일이 일어나는지 더 잘 알 수 있고요 + +07:29.030 --> 07:33.740 +요원들 사이에 어떤 정보가 오가는지 볼 수 있어요 + +07:33.860 --> 07:41.540 +물론 규격화된 걸 사용할 수도 있어요 더 추상화 층 제품요 + +07:41.540 --> 07:42.440 +원하신다면요 + +07:42.470 --> 07:47.510 +랑산이나 다른 데서 구할 수 있는 건 다 찾아봐요 + +07:47.600 --> 07:51.650 +지금 하는 걸 다시 하는 것도 재미있을 것 같아요 + +07:51.680 --> 07:56.750 +기성품으로 만드는 게 훨씬 더 쉬울 거예요 + +07:56.750 --> 07:59.270 +Get it의 핵심으로 들어갈 거예요 + +07:59.300 --> 08:05.270 +우리만의 에이전트 프레임워크를 구축하고 여러 개의 llms가 + +08:05.270 --> 08:12.620 +문제를 해결하는 데 참여하게 할 겁니다 인터넷에서 좋은 거래를 찾아내고 찾으면 + +08:12.620 --> 08:15.950 +메시지를 보내는 거죠 + +08:15.980 --> 08:19.100 +프레임워크가 어떻게 생겼는지 잠깐 되짚어보죠 + +08:19.100 --> 08:20.480 +우리 건축물은 뭘까요? + +08:20.510 --> 08:25.070 +이게 우리가 만들고 있는 워크플로예요 + +08:25.310 --> 08:32.240 +모델 3대가 운영 중이고 앙상블 에이전트가 모델에게 연락해요 + +08:32.420 --> 08:37.670 +이건 비트를 좀 늘린 예라고 할 수 있죠 다른 모델에게 연락하는 앙상블 모델은 당나귀가 + +08:37.670 --> 08:39.530 +나온 지 오래됐거든요 + +08:39.530 --> 08:46.040 +에이전트 인공지능이라고 불린 적은 없지만 이것들은 독립된 클래스로서 실행되고 있고 동일한 + +08:46.040 --> 08:51.500 +구조와 기능을 가지고 있습니다 프레임워크에 로그인하고 참여하는 것과 동일합니다 + +08:51.530 --> 08:56.930 +이것들을 독립된 에이전트로 생각하는 것이 합리적입니다 원한다면 파이썬 + +08:56.930 --> 09:00.410 +프로세스에서 실행할 수도 있고요 + +09:00.440 --> 09:05.780 +간단히 말하자면 그냥 직접 호출하는 거죠 그렇게 할 수도 있어요 + +09:06.140 --> 09:11.420 +그래서 저는 이 세 가지 에이전트가 각각 다른 세 가지 모델을 수행한다고 + +09:11.420 --> 09:16.910 +제안했어요 그리고 앙상블 에이전트가 각각의 에이전트를 호출해서 + +09:16.910 --> 09:23.600 +협업하게 한 다음 선형 회귀 추를 적용해서 제품의 가격을 매기는 거죠 + +09:23.690 --> 09:27.260 +스캐너 요원은 지난번에 봤던 거예요 + +09:27.260 --> 09:28.490 +이 사람은 에이전트예요 + +09:28.520 --> 09:31.340 +스캐너 에이전트를 부르며 끝냈어요 + +09:31.340 --> 09:41.480 +나가서 피드를 수집하고 Gpt4 0을 호출할 수 있습니다 각 거래에 대한 간결한 설명과 관련된 + +09:41.510 --> 09:45.140 +가격점을 찾는 방법으로요 + +09:45.140 --> 09:46.340 +그게 한데 모이죠 + +09:46.340 --> 09:51.160 +입력 메모리가 있었던 걸 기억하실 겁니다 모든 걸 하나로 붙이는 접착제의 + +09:51.160 --> 09:52.270 +일부죠 + +09:52.600 --> 09:58.540 +이미 드러난 거래를 드러내지 말라고 메모리를 통해 말하죠 + +09:59.800 --> 10:04.990 +오늘 살펴볼 것은 모두 함께 가져오는 이 노란 상자입니다 메시징 에이전트를 + +10:04.990 --> 10:09.730 +살펴볼 건데요 여러분 폰으로 푸시 알림을 보내는 아주 간단한 거죠 + +10:09.730 --> 10:11.650 +아주 근사할 거예요 + +10:11.680 --> 10:15.520 +활동을 조정할 수 있는 기획 기획 요원이죠 + +10:15.520 --> 10:20.500 +LM이 되지는 않을 겁니다 간단한 파이썬 스크립트가 될 겁니다 하지만 쉽게 LM이 + +10:20.500 --> 10:21.190 +될 수 있죠 + +10:21.850 --> 10:26.170 +에이전트 프레임워크도 있는데 엄청 고급스럽게 들리네요 + +10:26.170 --> 10:28.090 +전혀 화려하지 않아요 + +10:28.090 --> 10:32.800 +단순히 이 모든 에이전트가 있고 메시지가 계속되게 해주는 거죠 + +10:32.800 --> 10:36.520 +그게 오늘날의 에이전트 프레임워크 생성이죠 + +10:36.520 --> 10:41.860 +내일은 사용자 인터페이스를 만들 겁니다 모든 걸 멋지게 감싸줄 + +10:41.860 --> 10:43.390 +인터페이스요 + +10:43.690 --> 10:48.520 +하지만 동기가 충분해서 갈 준비가 됐길 바라요 + +10:48.550 --> 10:50.440 +주피터랩에서 만나요 diff --git a/week5/community-contributions/subtitles/srts/59671231/en_US.srt b/week5/community-contributions/subtitles/srts/59671231/en_US.srt new file mode 100755 index 0000000..9a1b28d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671231/en_US.srt @@ -0,0 +1,442 @@ +WEBVTT + +00:00.350 --> 00:10.850 +And here we are back in the Jupyter Lab for the fast approaching conclusion with a really great project. + +00:10.880 --> 00:12.020 +Super satisfying. + +00:12.050 --> 00:13.130 +Wait till you see. + +00:13.160 --> 00:19.250 +So this time we're going to start by looking at two more of our simple agents. + +00:19.250 --> 00:20.900 +Probably the simplest we've got. + +00:20.930 --> 00:26.270 +There's one called the Messaging Agent, which we will use to be sending messages, and one called the + +00:26.270 --> 00:28.700 +planning agent, which will coordinate activities. + +00:28.700 --> 00:32.600 +And both of these agents are ones which are just Python code. + +00:32.630 --> 00:38.660 +They're not using Llms, and there's no reason why they can't be beefed up to use Llms. + +00:38.660 --> 00:44.120 +The messaging agent sends a notification, but of course, it could first make a quick call to a frontier + +00:44.150 --> 00:51.260 +model to improve that, to make it something that's juicier and that that is, is relevant to that particular + +00:51.260 --> 00:51.830 +deal. + +00:51.830 --> 00:53.690 +So there's plenty that can be done here. + +00:53.780 --> 00:56.270 +And the planning agent, it's written in code. + +00:56.270 --> 00:58.820 +You could experiment with turning that into an LLM. + +00:58.820 --> 01:01.010 +Getting to plan out what happens itself. + +01:01.010 --> 01:04.320 +But we'll at least see them and use them both in a second. + +01:04.740 --> 01:11.670 +Uh, and um, we're going to then afterwards put all of that into our agent framework. + +01:11.670 --> 01:16.620 +But you can imagine, because we've put all of the building blocks together, the final part of having + +01:16.620 --> 01:18.630 +an agent framework is a cinch. + +01:18.630 --> 01:21.150 +It's going to be just very straightforward. + +01:21.180 --> 01:24.720 +So let's start by talking about the messaging agent. + +01:24.720 --> 01:30.240 +This is the agent that's able to send us alerts when it finds a great deal. + +01:30.390 --> 01:33.780 +Um, and originally my plan was to send text messages for this. + +01:33.780 --> 01:41.100 +And I was going to use Twilio, which is a lovely framework that I've used in the past, but there's + +01:41.100 --> 01:46.740 +been a lot of regulations recently that makes it really quite hard to set up a Twilio account to send + +01:46.740 --> 01:52.170 +text messages, even to yourself, even when you've validated your own phone number and you want to + +01:52.170 --> 01:55.380 +send yourself text messages, you have to fill in a fair bit of paperwork. + +01:55.380 --> 01:59.280 +If you're in the US now, that might be a lot easier in other countries. + +01:59.280 --> 02:05.480 +It does seem to suggest on the website that that it's just a US restriction that makes it so hard. + +02:05.510 --> 02:07.730 +And Twilio has a free plan. + +02:07.850 --> 02:11.150 +Twilio is also free to make WhatsApp messages as well. + +02:11.150 --> 02:11.660 +So you can have it. + +02:11.690 --> 02:16.190 +WhatsApp and I did have it working whatsapping myself, but there are some constraints that you just, + +02:16.190 --> 02:21.530 +um, you can only use the free thing if you're using a sandbox which has some limitations, and you + +02:21.530 --> 02:23.420 +can only do it between certain hours of the day. + +02:23.420 --> 02:24.950 +So it wasn't ideal. + +02:24.950 --> 02:26.330 +So it was okay. + +02:26.330 --> 02:29.270 +And I've kept the code here so you can use it if you wish. + +02:29.270 --> 02:34.910 +But then I discovered something really, as I say, nifty called pushover. + +02:35.000 --> 02:39.680 +Uh, there's a few things like pushover, but pushover was was the one that I found to be the nicest + +02:39.710 --> 02:40.850 +of the ones I tried. + +02:41.030 --> 02:41.300 +Um. + +02:41.300 --> 02:46.910 +And pushover is a simple website where you can go and you set up an account. + +02:46.910 --> 02:48.140 +It's free. + +02:48.170 --> 02:50.120 +Uh, and you then download an app. + +02:50.120 --> 02:51.470 +In fact, you don't need to go to the website. + +02:51.470 --> 02:51.800 +You can. + +02:51.830 --> 02:54.890 +The website is pushover dot net, not.com. + +02:54.920 --> 02:57.080 +Uh, they weren't obviously weren't able to afford.com. + +02:57.080 --> 02:58.310 +It's still up for grabs. + +02:58.430 --> 03:00.470 +But pushover dot net is their website. + +03:00.470 --> 03:02.510 +And then there's an app called pushover. + +03:02.630 --> 03:04.650 +Uh And it's free to set up. + +03:04.650 --> 03:08.520 +At least it's free for up to 10,000 messages. + +03:08.790 --> 03:14.670 +And I think we're not likely for this particular project to get close to that. + +03:14.670 --> 03:19.380 +You can see I've managed to get to 35 messages of my allowance so far. + +03:19.620 --> 03:24.990 +Um, and now the reason this page looks a bit janky is that I've scrolled down because it shows above + +03:24.990 --> 03:30.480 +here my tokens in big letters, and I didn't particularly want to reveal all of my tokens to all of + +03:30.480 --> 03:30.600 +you. + +03:30.600 --> 03:33.990 +Or you might be constantly notifying me all through the day. + +03:34.290 --> 03:40.590 +So yeah, it's hard not to miss the tokens as soon as you sign up for your free pushover account. + +03:40.590 --> 03:46.140 +I promise I'm not affiliated, but it's, uh, seems to be really, really helpful. + +03:46.230 --> 03:53.280 +Um, once you've signed up, you then take your tokens and you can then easily send push notifications + +03:53.280 --> 03:58.920 +so you'll end up with two tokens a pushover user, which is a token that applies to, uh, to you. + +03:58.920 --> 04:02.820 +And then you have to set up each of your applications, and you just choose one. + +04:02.820 --> 04:07.780 +And I called it preserve for me and you can give it a token. + +04:08.080 --> 04:09.340 +Um, and sorry, you can't. + +04:09.340 --> 04:14.080 +You get given a token once you've set up your application for that. + +04:14.140 --> 04:18.070 +Uh, and these two things you should add to the EMV file. + +04:18.130 --> 04:18.850 +Wonderfully. + +04:18.850 --> 04:24.400 +You can also upload a, um, an image, uh, associated with your application. + +04:24.400 --> 04:28.570 +And I found an image, uh, looking around of a stack of coins. + +04:28.570 --> 04:30.820 +And so I uploaded that because it's great. + +04:30.820 --> 04:34.990 +When I get a push notification, there's a little image comes up and it comes up on my Apple Watch as + +04:34.990 --> 04:35.230 +well. + +04:35.230 --> 04:39.160 +The whole screen turns into this stack of coins, uh, which is really good fun. + +04:39.160 --> 04:40.960 +So you might want to do the same thing. + +04:40.990 --> 04:43.480 +Find a find a cute image to go with. + +04:43.510 --> 04:45.100 +Push over if you use that. + +04:45.340 --> 04:51.850 +Um, as I say, if you if you are able to, if you either already have a Twilio account or if you go + +04:51.880 --> 04:57.550 +there and you see that it is straightforward in whatever country you're joining from, then by all means + +04:57.550 --> 05:04.060 +you can do that instead and get a text, a bona fide, uh, SMS text message instead of a push notification. + +05:04.090 --> 05:06.750 +But push notifications are all we need for this. + +05:06.870 --> 05:07.500 +Okay. + +05:07.500 --> 05:14.760 +So I'm going to go straight to this this class messaging agent under agents. + +05:15.120 --> 05:17.430 +Messaging agent is one of our agents. + +05:17.430 --> 05:18.090 +Here it is. + +05:18.090 --> 05:19.410 +It's very straightforward. + +05:19.410 --> 05:24.930 +It is basically a wrapper around the Twilio API and around the pushover API. + +05:24.960 --> 05:32.580 +And I've got at the top do text and do push as two constants that you can set, because if you wish, + +05:32.580 --> 05:37.920 +I didn't want to make it a either or because you can have both, you can have it text you and push notify + +05:37.920 --> 05:41.040 +you if that is what you desire. + +05:41.040 --> 05:49.920 +So there's an init, uh, constructor which sets up, um, the, the various, uh, criteria. + +05:49.920 --> 05:55.920 +If you're doing push notification, like me, as I say, you need pushover user and pushover token, + +05:55.920 --> 06:01.230 +and you can always just type it straight in here if you don't want to faff around with env files, um, + +06:01.380 --> 06:03.930 +obviously, then don't push this code. + +06:03.930 --> 06:10.930 +Uh, and if you're using Twilio, then you need to have an account Sid and auth token. + +06:10.990 --> 06:16.630 +A Twilio from, which is the number that Twilio will give you, is the number that it's coming from. + +06:16.630 --> 06:20.260 +And then the phone number that you want it to text message to. + +06:20.290 --> 06:23.770 +And so I have these four in my env file. + +06:23.770 --> 06:27.760 +But I'm not using it because I filled in the paperwork just to have done it. + +06:27.760 --> 06:32.890 +But I haven't yet been granted authorization to send a text message to myself. + +06:32.980 --> 06:35.680 +Uh, but hopefully that's going to come one of these days. + +06:35.680 --> 06:38.950 +In the meantime, the push notifications are great. + +06:38.950 --> 06:45.820 +So I've then got two, uh, methods here message, which is simply a wrapper around the Twilio API, + +06:45.820 --> 06:48.370 +which as you can see, it's really simple. + +06:48.370 --> 06:51.370 +It's it's the Twilio client dot messages dot create. + +06:51.370 --> 06:55.330 +And you say who it's coming from and the text and who it's going to, and that's it. + +06:55.330 --> 06:56.740 +And it will send a text message. + +06:56.740 --> 06:57.580 +I've used it before. + +06:57.610 --> 06:58.750 +It just works. + +06:58.900 --> 07:01.210 +Receiving text messages is pretty easy too. + +07:01.240 --> 07:04.720 +You just have to have an endpoint that gets called a webhook. + +07:04.840 --> 07:07.640 +Um, and but obviously we don't need to do that. + +07:07.940 --> 07:11.600 +Uh, the push notification is also very simple. + +07:11.780 --> 07:17.930 +Um, we but it doesn't have it doesn't use an API because they, they, they're sort of keen to point + +07:17.960 --> 07:20.600 +out, I think, on their website that it's intentional. + +07:20.600 --> 07:23.150 +They just want to keep it as straightforward as possible. + +07:23.150 --> 07:27.710 +They don't even use the requests library in their example code because they wanted to say, you don't + +07:27.710 --> 07:29.180 +need to import anything. + +07:29.330 --> 07:35.060 +You can just simply use out of the box Python to send a push notification. + +07:35.180 --> 07:39.950 +Uh, and you simply pass in the token, uh, the message. + +07:39.950 --> 07:42.680 +And optionally you can include a sound. + +07:42.680 --> 07:48.320 +And I one of the ones that they suggest as being available is called cash Register. + +07:48.320 --> 07:54.080 +And it makes a very pleasing cash register sound when I get the notification, which is great fun. + +07:54.260 --> 07:56.240 +Uh, and that's all there is to it. + +07:56.240 --> 08:01.910 +And then there's an alert, uh, method here, which, based on whether you're texting or pushing it, + +08:01.910 --> 08:04.280 +will construct something to tell you about. + +08:04.280 --> 08:08.590 +And it will then send a message and it takes an opportunity. + +08:08.590 --> 08:12.760 +So you pass in an opportunity and you'll get a message about that opportunity. + +08:12.790 --> 08:19.330 +You remember, opportunity is the object that we looked at down here that has a deal, an estimate and + +08:19.330 --> 08:20.350 +a discount. + +08:21.310 --> 08:21.820 +All right. + +08:21.820 --> 08:23.410 +But first let's just quickly try it out. + +08:23.410 --> 08:26.470 +I should just be able to create an instance of this. + +08:26.470 --> 08:31.720 +And I should be able to say massive news and ping my phone just went. + +08:31.750 --> 08:38.110 +Now, unfortunately, I have quite aggressive noise filter on my microphone to to only bring out voice. + +08:38.110 --> 08:42.850 +So I imagine you didn't hear the cash register as I say, very pleasing indeed. + +08:42.850 --> 08:45.790 +Luckily you'll be able to hear it for yourself when you try it out. + +08:45.910 --> 08:49.690 +Uh, and yeah, I'm going to have to hear that again just because I love it. + +08:49.930 --> 08:51.400 +Uh, there it goes. + +08:51.580 --> 08:55.030 +And yeah, my watch two has has come up. + +08:55.210 --> 08:57.340 +So it's, um. + +08:57.340 --> 09:00.520 +Yeah, very satisfactory indeed. + +09:00.520 --> 09:05.350 +That then is the, uh, the messaging agent. + +09:05.350 --> 09:08.560 +I will see you next time for the planning agent. diff --git a/week5/community-contributions/subtitles/srts/59671231/ja_JP.srt b/week5/community-contributions/subtitles/srts/59671231/ja_JP.srt new file mode 100755 index 0000000..e18fffd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671231/ja_JP.srt @@ -0,0 +1,391 @@ +WEBVTT + +00:00.350 --> 00:10.850 +そして、 私たちはJupyter Labに戻り、 本当に素晴らしいプロジェクトの終了を間近に控えている。 + +00:10.880 --> 00:12.020 +超満足だ。 + +00:12.050 --> 00:13.130 +見るまで待ってくれ。 + +00:13.160 --> 00:19.250 +そこで今回は、 シンプルなエージェントをあと2つ紹介する。 + +00:19.250 --> 00:20.900 +おそらく、 我々が持っている中で最もシンプルなものだろう。 + +00:20.930 --> 00:26.270 +ひとつはメッセージング・エージェントと呼ばれるもので、 メッセージの送信に使う。 もうひとつはプランニング・エージェントと呼ばれるもので、 + +00:26.270 --> 00:28.700 +活動の調整を行う。 + +00:28.700 --> 00:32.600 +そして、 これらのエージェントはどちらも単なるPythonのコードである。 + +00:32.630 --> 00:38.660 +Llmsを使っていないし、 Llmsを使うために強化できない理由もない。 + +00:38.660 --> 00:44.120 +メッセージング・エージェントは通知を送るが、 もちろん、 その前にフロンティア・モデルに素早くコールして、 + +00:44.150 --> 00:51.830 +その特定の取引に関連する、 よりジューシーなものに改良することもできる。 + +00:51.830 --> 00:53.690 +だから、 ここでできることはたくさんある。 + +00:53.780 --> 00:56.270 +そして計画担当者は、 コードで書かれている。 + +00:56.270 --> 00:58.820 +それをLLMに変えるという実験もできるだろう。 + +00:58.820 --> 01:01.010 +何が起こるか、 それ自体を計画するようになる。 + +01:01.010 --> 01:04.320 +でも、 少なくとも2人を見て、 すぐに2人を使う。 + +01:04.740 --> 01:11.670 +そして、 そのすべてをエージェントのフレームワークに入れていく。 + +01:11.670 --> 01:18.630 +しかし、 私たちがすべての構成要素をまとめたので、 エージェントフレームワークを持つ最後の部分は簡単であることは想像できるだろう。 + +01:18.630 --> 01:21.150 +とても簡単なことだ。 + +01:21.180 --> 01:24.720 +では、 まずメッセージング・エージェントについて話そう。 + +01:24.720 --> 01:30.240 +これは、 素晴らしい取引を見つけたときにアラートを送ってくれるエージェントだ。 + +01:30.390 --> 01:33.780 +ええと、 もともとはこのためにテキストメッセージを送るつもりだったんだ。 + +01:33.780 --> 01:41.100 +Twilioを使おうと思っていました。 Twilioは過去に使ったことのある素敵なフレームワークですが、 + +01:41.100 --> 01:55.380 +最近規制が多くて、 Twilioのアカウントを設定してテキストメッセージを送信するのがとても難しくなっています。 + +01:55.380 --> 01:59.280 +今アメリカにいるのなら、 他の国ではもっと簡単かもしれない。 + +01:59.280 --> 02:05.480 +ウェブサイトを見る限りでは、 アメリカの規制のせいで、 それが難しくなっているようだ。 + +02:05.510 --> 02:07.730 +Twilioには無料プランがある。 + +02:07.850 --> 02:11.150 +TwilioはWhatsAppメッセージも無料で作成できる。 + +02:11.150 --> 02:11.660 +だから、 君にあげるよ。 + +02:11.690 --> 02:16.190 +WhatsAppは自分でも使えるようにしたんだけど、 サンドボックスを使用している場合のみ無料で使用できるという制約があり、 + +02:16.190 --> 02:23.420 +1日のうち特定の時間帯にしか使用できないんだ。 + +02:23.420 --> 02:24.950 +だから理想的ではなかった。 + +02:24.950 --> 02:26.330 +だから大丈夫だった。 + +02:26.330 --> 02:29.270 +コードをここに残しておいたので、 よかったら使ってほしい。 + +02:29.270 --> 02:34.910 +でもそのとき、 プッシュオーバーという実に気の利いたものを発見したんだ。 + +02:35.000 --> 02:40.850 +ええと、 プッシュオーバーのようなものはいくつかあるけど、 プッシュオーバーは私が試したものの中で一番素敵だと感じたものだった。 + +02:41.030 --> 02:41.300 +うーん。 + +02:41.300 --> 02:46.910 +プッシュオーバーはシンプルなウェブサイトで、 アカウントを作成することができる。 + +02:46.910 --> 02:48.140 +無料だ。 + +02:48.170 --> 02:50.120 +そしてアプリをダウンロードする。 + +02:50.120 --> 02:51.470 +実際、 ウェブサイトに行く必要はない。 + +02:51.470 --> 02:51.800 +できる。 + +02:51.830 --> 02:54.890 +ウェブサイトは「pushover dot net」である。 comに移籍した。 + +02:54.920 --> 02:57.080 +明らかに余裕がなかったんだ。 comに移籍した。 + +02:57.080 --> 02:58.310 +まだ可能性はある。 + +02:58.430 --> 03:00.470 +しかし、 プッシュオーバー・ドット・ネットは彼らのウェブサイトだ。 + +03:00.470 --> 03:02.510 +そしてpushoverというアプリがある。 + +03:02.630 --> 03:04.650 +ええと、 設定は無料です。 + +03:04.650 --> 03:08.520 +少なくとも10,000通までは無料だ。 + +03:08.790 --> 03:14.670 +そして、 この特別なプロジェクトがそれに近づく可能性は低いと思う。 + +03:14.670 --> 03:19.380 +今のところ、 35通までメッセージを送ることができた。 + +03:19.620 --> 03:24.990 +ええと、 このページがちょっとジャッキーに見えるのは、 この上に僕のトークンが大きく表示されているからで、 + +03:24.990 --> 03:30.600 +特に僕のトークンすべてを皆さんに公開したくなかったので、 下にスクロールしたんだ。 + +03:30.600 --> 03:33.990 +あるいは、 一日中常に私に通知してくるかもしれない。 + +03:34.290 --> 03:40.590 +そうそう、 無料のプッシュオーバー・アカウントにサインアップしてすぐにトークンを見逃さないようにするのは難しい。 + +03:40.590 --> 03:46.140 +私は関係者ではありませんが、 本当に、 本当に役に立っているようです。 + +03:46.230 --> 03:58.920 +サインアップしたら、 トークンを受け取って、 簡単にプッシュ通知を送ることができる。 + +03:58.920 --> 04:02.820 +そして、 それぞれのアプリケーションを設定し、 1つを選ぶだけだ。 + +04:02.820 --> 04:07.780 +そして、 私のためにそれを保存と呼んだ。 + +04:08.080 --> 04:09.340 +申し訳ないが、 それはできない。 + +04:09.340 --> 04:14.080 +そのためのアプリケーションを設定すると、 トークンが渡される。 + +04:14.140 --> 04:18.070 +それと、 この2つをEMVファイルに追加してください。 + +04:18.130 --> 04:18.850 +素晴らしい。 + +04:18.850 --> 04:24.400 +アプリケーションに関連する画像をアップロードすることもできます。 + +04:24.400 --> 04:28.570 +そして、 私はコインの山を見回す画像を見つけた。 + +04:28.570 --> 04:30.820 +だから、 それをアップロードしたんだ。 + +04:30.820 --> 04:35.230 +プッシュ通知を受け取ると、 小さな画像が表示され、 Apple Watchにも表示される。 + +04:35.230 --> 04:39.160 +画面全体がコインの山になるんだ。 + +04:39.160 --> 04:40.960 +だから、 あなたも同じことをした方がいいかもしれない。 + +04:40.990 --> 04:43.480 +それに合うかわいい画像を見つける。 + +04:43.510 --> 04:45.100 +それを使うならプッシュオーバーだ。 + +04:45.340 --> 04:51.850 +すでにTwilioのアカウントを持っていたり、 Twilioにアクセスして、 + +04:51.880 --> 05:04.060 +どの国からでも参加できることを確認できれば、 プッシュ通知の代わりにSMSテキストメッセージを受け取ることができます。 + +05:04.090 --> 05:06.750 +しかし、 プッシュ通知はこのために必要なものだ。 + +05:06.870 --> 05:07.500 +オーケー。 + +05:07.500 --> 05:14.760 +そこで、 エージェントの下にあるこのクラスのメッセージング・エージェントに直行することにする。 + +05:15.120 --> 05:17.430 +メッセージングエージェントは、 私たちのエージェントの一つです。 + +05:17.430 --> 05:18.090 +これだ。 + +05:18.090 --> 05:19.410 +とても簡単なことだ。 + +05:19.410 --> 05:24.930 +これは基本的にTwilio APIとpushover APIのラッパーである。 + +05:24.960 --> 05:32.580 +というのも、 どちらか一方しか設定したくないというわけではなく、 + +05:32.580 --> 05:41.040 +両方設定できるからだ。 + +05:41.040 --> 05:49.920 +つまり、 initコンストラクタがあり、 これがさまざまな基準を設定する。 + +05:49.920 --> 06:03.930 +私のようにプッシュ通知を行う場合は、 プッシュオーバー・ユーザーとプッシュオーバー・トークンが必要である。 + +06:03.930 --> 06:10.930 +Twilioを使う場合は、 アカウントIDと認証トークンが必要です。 + +06:10.990 --> 06:16.630 +Twilioの発信元は、 Twilioが教えてくれる番号です。 + +06:16.630 --> 06:20.260 +そして、 テキストメッセージを送信したい電話番号を入力する。 + +06:20.290 --> 06:23.770 +それで、 私のenvファイルにはこの4つがある。 + +06:23.770 --> 06:27.760 +でも、 ただ書類に記入しただけで使ってはいない。 + +06:27.760 --> 06:32.890 +でも、 自分宛にテキストメッセージを送る許可はまだ下りていない。 + +06:32.980 --> 06:35.680 +あー、 でもそのうちそうなるといいね。 + +06:35.680 --> 06:38.950 +とりあえず、 プッシュ通知は素晴らしい。 + +06:38.950 --> 06:48.370 +そして、 ここに2つのメソッドがあります。 メッセージはTwilio APIのラッパーで、 ご覧のようにとてもシンプルです。 + +06:48.370 --> 06:51.370 +それはTwilioクライアント・ドット・メッセージ・ドット・クリエイトです。 + +06:51.370 --> 06:55.330 +そして、 それが誰から来たのか、 本文と宛先を書いて、 それで終わりです。 + +06:55.330 --> 06:56.740 +そしてテキストメッセージを送信する。 + +06:56.740 --> 06:57.580 +以前にも使ったことがある。 + +06:57.610 --> 06:58.750 +うまくいくんだ。 + +06:58.900 --> 07:01.210 +テキストメッセージの受信もかなり簡単だ。 + +07:01.240 --> 07:04.720 +ウェブフックと呼ばれるエンドポイントがあればいい。 + +07:04.840 --> 07:07.640 +うーん、 でも、 明らかにその必要はない。 + +07:07.940 --> 07:11.600 +プッシュ通知も非常にシンプルだ。 + +07:11.780 --> 07:20.600 +でも、 APIは使っていないんだ。 なぜなら、 彼らはウェブサイトで、 意図的なものだと熱心に指摘しているからね。 + +07:20.600 --> 07:23.150 +彼らはただ、 できるだけわかりやすくしたいだけなのだ。 + +07:23.150 --> 07:29.180 +彼らはサンプルコードでリクエスト・ライブラリさえ使っていない。 + +07:29.330 --> 07:35.060 +Pythonを使えば、 プッシュ通知を送ることができる。 + +07:35.180 --> 07:39.950 +トークンとメッセージを渡すだけだ。 + +07:39.950 --> 07:42.680 +オプションでサウンドを入れることもできる。 + +07:42.680 --> 07:48.320 +その中のひとつにキャッシュ・レジスターがある。 + +07:48.320 --> 07:54.080 +そして、 通知を受けるとレジの音がとても心地よく、 とても楽しい。 + +07:54.260 --> 07:56.240 +ええと、 それで全部なんだ。 + +07:56.240 --> 08:01.910 +そして、 ここにアラートメソッドがあり、 テキストかプッシュかに基づいて、 + +08:01.910 --> 08:04.280 +何かを構築して教えてくれる。 + +08:04.280 --> 08:08.590 +そしてメッセージを送り、 チャンスをつかむ。 + +08:08.590 --> 08:12.760 +だから、 チャンスにパスをすれば、 そのチャンスについてのメッセージが届く。 + +08:12.790 --> 08:20.350 +覚えているだろうか、 機会とは、 この下で見てきた、 契約、 見積もり、 割引のある対象である。 + +08:21.310 --> 08:21.820 +分かった。 + +08:21.820 --> 08:23.410 +その前に、 さっそく試してみよう。 + +08:23.410 --> 08:26.470 +このインスタンスを作ればいいんだ。 + +08:26.470 --> 08:31.720 +そして、 私は大規模なニュースと私の携帯電話がちょうど行ったpingを言うことができるはずだ。 + +08:31.750 --> 08:38.110 +今、 残念なことに、 私はマイクにかなり強力なノイズフィルターをかけていて、 声だけを出すようにしている。 + +08:38.110 --> 08:42.850 +だから、 レジの音が聞こえなかったのだろう。 + +08:42.850 --> 08:45.790 +幸いなことに、 試してみれば自分の耳で確かめることができる。 + +08:45.910 --> 08:49.690 +あ、 そうそう、 この曲は大好きだから、 もう一度聴きたいんだ。 + +08:49.930 --> 08:51.400 +あ、 そうだ。 + +08:51.580 --> 08:55.030 +そうそう、 僕の時計は2つあるんだ。 + +08:55.210 --> 08:57.340 +だから、 その...。 + +08:57.340 --> 09:00.520 +ああ、 実に満足だ。 + +09:00.520 --> 09:05.350 +それがメッセージング・エージェントだ。 + +09:05.350 --> 09:08.560 +また次回、 企画担当者にお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/59671231/ko_KR.srt b/week5/community-contributions/subtitles/srts/59671231/ko_KR.srt new file mode 100755 index 0000000..b84f31f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671231/ko_KR.srt @@ -0,0 +1,436 @@ +WEBVTT + +00:00.350 --> 00:10.850 +다시 주피터 연구실입니다 곧 결과가 나올 텐데 아주 멋진 프로젝트가 있어요 + +00:10.880 --> 00:12.020 +정말 만족스러워요 + +00:12.050 --> 00:13.130 +기대해도 좋아요 + +00:13.160 --> 00:19.250 +이번에는 두 가지의 단순한 에이전트를 더 살펴볼게요 + +00:19.250 --> 00:20.900 +가장 간단한 방법일 거예요 + +00:20.930 --> 00:26.270 +메시지를 보내는 데 쓰는 메시지 에이전트도 있고 활동을 조율하는 + +00:26.270 --> 00:28.700 +기획 에이전트도 있어요 + +00:28.700 --> 00:32.600 +이 에이전트들은 파이썬 코드로 되어있죠 + +00:32.630 --> 00:38.660 +그들은 LM을 사용하지 않아요 LM을 사용하도록 강화하지 않을 이유가 없죠 + +00:38.660 --> 00:44.120 +에이전트는 통지를 보내지만 먼저 개척자 모델에 + +00:44.150 --> 00:51.260 +빠르게 연락해 더 맛있고 해당 제품과 관련 있는 제품을 개선할 수 + +00:51.260 --> 00:51.830 +있죠 + +00:51.830 --> 00:53.690 +할 수 있는 게 많아요 + +00:53.780 --> 00:56.270 +기획 담당자는 코드로 돼 있고요 + +00:56.270 --> 00:58.820 +LLM으로 바꾸는 실험을 할 수 있어요 + +00:58.820 --> 01:01.010 +무슨 일이 일어날지 계획하는 거죠 + +01:01.010 --> 01:04.320 +하지만 잠시 후에 둘 다 보게 될 거예요 + +01:04.740 --> 01:11.670 +그 후에 모든 걸 에이전트 프레임워크에 넣을 거예요. + +01:11.670 --> 01:16.620 +하지만 모든 구성 요소를 합쳤기 때문에 에이전트 프레임워크 + +01:16.620 --> 01:18.630 +만드는 건 식은 죽 먹기죠 + +01:18.630 --> 01:21.150 +아주 간단할 거예요 + +01:21.180 --> 01:24.720 +메신저 에이전트 얘기부터 하죠 + +01:24.720 --> 01:30.240 +좋은 물건을 찾으면 경보를 보내주는 중개인이죠 + +01:30.390 --> 01:33.780 +원래는 문자 메시지를 보내려고 했어요 + +01:33.780 --> 01:41.100 +트윌리오를 쓰려고 했어요 예전에 쓰던 프레임워크가 정말 좋았는데 최근에 규제가 많아져서 + +01:41.100 --> 01:46.740 +트윌리오 계정으로 문자 보내기가 정말 어려워요 심지어 자기 자신한테도요 + +01:46.740 --> 01:52.170 +자기 전화번호를 확인해서 자기한테 문자 보내고 싶어도 서류 작업을 + +01:52.170 --> 01:55.380 +많이 해야 해요 + +01:55.380 --> 01:59.280 +미국에 산다면 다른 나라에서는 훨씬 수월할 거예요 + +01:59.280 --> 02:05.480 +웹사이트에 따르면 미국 내 제한 때문에 규제가 어려워진 것 같아요 + +02:05.510 --> 02:07.730 +트윌리오가 공짜 요금제를 줬어요 + +02:07.850 --> 02:11.150 +트윌리오가 왓츠앱 메시지를 보내도 되고요 + +02:11.150 --> 02:11.660 +그러니 가져요 + +02:11.690 --> 02:16.190 +왓츠앱과 저도 왓츠앱으로 작동시켰지만 제약이 좀 있었어요 샌드박스를 + +02:16.190 --> 02:21.530 +써야만 무료로 사용할 수 있는데 거기엔 제한이 있어요 하루 중 특정 시간 사이에만 + +02:21.530 --> 02:23.420 +사용할 수 있죠 + +02:23.420 --> 02:24.950 +이상적인 상황은 아니었죠 + +02:24.950 --> 02:26.330 +그래서 괜찮았어요 + +02:26.330 --> 02:29.270 +코드를 여기 뒀으니 원하면 사용하세요 + +02:29.270 --> 02:34.910 +그러다 정말 멋진 걸 발견했죠 만만한 거요 + +02:35.000 --> 02:39.680 +호구도 몇 가지 있지만 호구가 먹어본 것 중에 가장 + +02:39.710 --> 02:40.850 +좋았어요 + +02:41.030 --> 02:41.300 +네 + +02:41.300 --> 02:46.910 +푸시오버는 간단한 웹사이트로 계정을 설정할 수 있어요 + +02:46.910 --> 02:48.140 +공짜예요 + +02:48.170 --> 02:50.120 +앱을 다운로드 하세요 + +02:50.120 --> 02:51.470 +사실 웹사이트에 갈 필요도 없어요 + +02:51.470 --> 02:51.800 +할 수 있어요 + +02:51.830 --> 02:54.890 +그 웹사이트는 푸시오버닷넷이에요 코무요 + +02:54.920 --> 02:57.080 +비용을 감당할 수 없었던 거죠 코무요 + +02:57.080 --> 02:58.310 +아직 몰라요 + +02:58.430 --> 03:00.470 +푸시오버닷넷이 그 웹사이트예요 + +03:00.470 --> 03:02.510 +푸시오버라는 앱도 있어요 + +03:02.630 --> 03:04.650 +설치도 자유롭게 할 수 있어요 + +03:04.650 --> 03:08.520 +적어도 10,000번까진 무료예요 + +03:08.790 --> 03:14.670 +이 프로젝트는 그런 목표에 도달하지 못할 것 같아요. Get up! Get up! + +03:14.670 --> 03:19.380 +지금까지 용돈 메시지를 35개나 보냈어요. Get up! Get up! + +03:19.620 --> 03:24.990 +이 페이지가 좀 조잡해 보이는 이유는 스크롤을 내렸기 때문이에요 이 위에 제 + +03:24.990 --> 03:30.600 +토큰이 큰 글자로 표시돼 있거든요 제 토큰을 전부 보여드리고 싶진 않았어요 + +03:30.600 --> 03:33.990 +아니면 온종일 통보할 수도 있고요 + +03:34.290 --> 03:40.590 +그러니 토큰을 놓치지 않을 수가 없죠 무료 푸시버 계정에 가입하는 순간요 + +03:40.590 --> 03:46.140 +전 관련 없다고 약속하지만 정말 도움이 많이 돼요 + +03:46.230 --> 03:53.280 +일단 등록을 하면 토큰을 가지고 쉽게 푸시 알림을 보낼 수 있습니다 그러면 토큰 두 개가 + +03:53.280 --> 03:58.920 +남게 되는데 푸시오버 사용자는 여러분에게 적용되는 토큰이죠 + +03:58.920 --> 04:02.820 +그런 다음 각각의 응용 프로그램을 셋업하고 하나만 선택하세요 + +04:02.820 --> 04:07.780 +전 그걸 보존이라고 불렀고 토큰을 줄 수 있어요 + +04:08.080 --> 04:09.340 +미안하지만 안 돼요 + +04:09.340 --> 04:14.080 +Get in 응용 프로그램을 설정하면 토큰을 받아요 + +04:14.140 --> 04:18.070 +EMV 파일에 추가해야 할 두 가지가 있어요 + +04:18.130 --> 04:18.850 +아주 잘 지내요 + +04:18.850 --> 04:24.400 +응용 프로그램과 관련된 이미지를 업로드 할 수도 있어요 + +04:24.400 --> 04:28.570 +동전 더미를 둘러보다가 스택을 발견했어요 + +04:28.570 --> 04:30.820 +그래서 그걸 업로드했어요 멋지잖아요 + +04:30.820 --> 04:35.230 +푸시 알림을 받으면 작은 이미지가 뜨는데 제 애플 워치에도 뜨죠. + +04:35.230 --> 04:39.160 +스택 스크린 전체가 동전 더미로 바뀌는데 정말 재미있어요 + +04:39.160 --> 04:40.960 +여러분도 같은 걸 하고 싶을 거예요 + +04:40.990 --> 04:43.480 +어울리는 귀여운 이미지를 찾으세요 + +04:43.510 --> 04:45.100 +그걸 쓰면 옆으로 밀어요 + +04:45.340 --> 04:51.850 +말씀드렸듯이 가능하다면 이미 트윌리오 계정이 있거나 방문해 + +04:51.880 --> 04:57.550 +보면 가입한 나라가 어디든 아주 간단해요 어떻게든 문자를 + +04:57.550 --> 05:04.060 +받을 수 있죠 푸시 알림 대신 SMS 문자 메시지를요 + +05:04.090 --> 05:06.750 +하지만 푸시 알림만 있으면 돼요 + +05:06.870 --> 05:07.500 +네 + +05:07.500 --> 05:14.760 +바로 이 클래스 메시지 에이전트로 갈게요 에이전트 하의 에이전트요 + +05:15.120 --> 05:17.430 +메시지를 보내는 요원도 우리 요원이에요 + +05:17.430 --> 05:18.090 +여기 있네요 + +05:18.090 --> 05:19.410 +아주 간단해요 + +05:19.410 --> 05:24.930 +기본적으로 트윌리오 API와 무허가 API를 감싸는 래퍼예요 + +05:24.960 --> 05:32.580 +상단에 텍스트 do와 push를 설정할 수 있는 두 상수로 설정했어요 원하시면 어느 + +05:32.580 --> 05:37.920 +쪽도 아닌 둘 다 가질 수 있으니까요 텍스트 do와 push를 + +05:37.920 --> 05:41.040 +원하는 경우에 설정할 수 있죠 + +05:41.040 --> 05:49.920 +이닛, 즉 생성자가 다양한 기준을 설정해요 + +05:49.920 --> 05:55.920 +푸시 알림을 하고 있다면 저처럼요 푸시오버 사용자와 푸시오버 토큰이 필요하다면 + +05:55.920 --> 06:01.230 +여기에 바로 입력하면 됩니다 imf 파일로 번거롭게 하고 싶지 않다면요 + +06:01.380 --> 06:03.930 +그럼 이 코드를 푸시하지 마세요 + +06:03.930 --> 06:10.930 +트윌리오를 사용하려면 계정 시드, 인증 토큰이 필요해요 + +06:10.990 --> 06:16.630 +트윌리오가 알려줄 번호가 발신 번호예요 + +06:16.630 --> 06:20.260 +그리고 문자 메시지를 보낼 전화번호도 있어요 + +06:20.290 --> 06:23.770 +제 부럽게도 이 4개가 있죠 + +06:23.770 --> 06:27.760 +하지만 안 쓸 거예요 서류는 작성했으니까요 + +06:27.760 --> 06:32.890 +하지만 아직 나한테 문자 보내도 된다는 인가는 못 받았어요 + +06:32.980 --> 06:35.680 +조만간 그렇게 되길 바라야죠 + +06:35.680 --> 06:38.950 +그동안 푸시 알림은 훌륭하죠 + +06:38.950 --> 06:45.820 +메서드가 2개 있는데 메시지는 Twilio API를 감싸는 래퍼예요 + +06:45.820 --> 06:48.370 +보다시피 아주 간단하죠 + +06:48.370 --> 06:51.370 +Twilio 클라이언트 .Message.Create예요 + +06:51.370 --> 06:55.330 +누가 보낸 문자인지 누구에게 보낼 건지 말하면 끝이죠 + +06:55.330 --> 06:56.740 +문자 메시지도 보내죠 + +06:56.740 --> 06:57.580 +전에 써봤어요 + +06:57.610 --> 06:58.750 +그냥 잘 맞아요 + +06:58.900 --> 07:01.210 +문자 받는 것도 꽤 쉬워요 + +07:01.240 --> 07:04.720 +웹훅이라 불리는 엔드포인트가 있어야 해요 + +07:04.840 --> 07:07.640 +하지만 그럴 필요는 없어요 + +07:07.940 --> 07:11.600 +푸시 알림도 아주 간단해요 + +07:11.780 --> 07:17.930 +API 사용은 안 해요 웹사이트에서 의도적이라는 걸 꼭 지적하고 + +07:17.960 --> 07:20.600 +싶어 하거든요 + +07:20.600 --> 07:23.150 +최대한 단순하게 가고 싶어 해요 + +07:23.150 --> 07:27.710 +예제 코드에서 요청 라이브러리도 사용하지 않아요 아무것도 불러올 필요가 없다고 + +07:27.710 --> 07:29.180 +말하고 싶으니까요 + +07:29.330 --> 07:35.060 +단순히 out out a box 파이썬을 이용해서 푸시 알림을 보낼 수 있어요. + +07:35.180 --> 07:39.950 +토큰으로 메시지를 전달하면 돼요 + +07:39.950 --> 07:42.680 +선택적으로 소리를 포함할 수도 있어요 + +07:42.680 --> 07:48.320 +그리고 제가 제안한 것 중 하나가 현금 등록기라는 건데요 + +07:48.320 --> 07:54.080 +알림을 받으면 아주 기분 좋은 계산대 소리가 나요 Get it! 아주 재미있죠 + +07:54.260 --> 07:56.240 +그게 전부인 것 같아요 + +07:56.240 --> 08:01.910 +그리고 알림 메서드가 있어요 문자를 보내든 푸시를 하든 뭔가 + +08:01.910 --> 08:04.280 +설명할 걸 생성하죠 + +08:04.280 --> 08:08.590 +그리고 메시지를 보내는데 기회를 잡는 거죠 + +08:08.590 --> 08:12.760 +Get in get 기회를 놓치면 그 기회에 대한 메시지를 받게 되죠 + +08:12.790 --> 08:19.330 +기억하시죠? 기회는 우리가 여기서 본 객체입니다 할인과 견적서, 할인을 가지고 + +08:19.330 --> 08:20.350 +있죠 + +08:21.310 --> 08:21.820 +좋아요 + +08:21.820 --> 08:23.410 +하지만 먼저 빨리 시험해 보죠 + +08:23.410 --> 08:26.470 +이것의 인스턴스를 생성할 수 있어야 해요 + +08:26.470 --> 08:31.720 +엄청난 소식을 전하면 핑이 울려야 하는데 말이죠 + +08:31.750 --> 08:38.110 +안타깝게도 마이크에 있는 노이즈 필터가 너무 공격적이라 목소리만 나오게 돼 있어요 + +08:38.110 --> 08:42.850 +금전 등록기 소리를 못 들으셨나 봐요 정말 기분 좋네요 + +08:42.850 --> 08:45.790 +직접 해 보시면 들을 수 있을 거예요 + +08:45.910 --> 08:49.690 +네, 그 말 다시 듣고 싶어요 너무 좋거든요 + +08:49.930 --> 08:51.400 +저기 가네요 + +08:51.580 --> 08:55.030 +네, 시계 2호도 있어요 + +08:55.210 --> 08:57.340 +그래서... 네 + +08:57.340 --> 09:00.520 +네, 아주 만족스러워요 + +09:00.520 --> 09:05.350 +그 메시지 전달원이란 사람이에요 + +09:05.350 --> 09:08.560 +기획사 일로 다음에 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59671315/en_US.srt b/week5/community-contributions/subtitles/srts/59671315/en_US.srt new file mode 100755 index 0000000..fe04de1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671315/en_US.srt @@ -0,0 +1,469 @@ +WEBVTT + +00:01.040 --> 00:07.820 +Okay, so here we are, back in Jupyter Lab, ready for the next use of a frontier model, and see this + +00:07.820 --> 00:10.460 +now as somewhere where you're really building expertise. + +00:10.460 --> 00:12.320 +This should be very natural to you. + +00:12.710 --> 00:17.900 +As usual, we start by considering our system prompt and being thoughtful about how we will prompt the + +00:17.900 --> 00:18.470 +model. + +00:18.800 --> 00:25.610 +Um, what we're going to say to to the model is, look, you identify and summarize the five most detailed + +00:25.610 --> 00:32.990 +items from a list by selecting deals that have the most detailed, high quality descriptions and the + +00:32.990 --> 00:37.820 +most clear price, uh, respond strictly in JSON, blah, blah, blah. + +00:37.910 --> 00:42.740 +Um, most important is you respond with five deals that have the most detailed product description. + +00:42.740 --> 00:44.810 +It's not important to mention the terms. + +00:45.110 --> 00:49.940 +Um, and now I also I have laid out here the JSON structure. + +00:49.940 --> 00:54.020 +I should probably mention that this isn't required because we're going to be using structured outputs, + +00:54.020 --> 01:01.070 +but I like to do this to use it as another opportunity to give more direction on exactly what's expected + +01:01.070 --> 01:01.940 +in each one. + +01:02.120 --> 01:04.650 +Uh, so this is something which you can experiment with. + +01:04.650 --> 01:08.700 +But strictly speaking we shouldn't need to specify JSON like that. + +01:09.420 --> 01:11.640 +So that's my system prompt. + +01:11.670 --> 01:16.740 +The user prompt says respond with the most promising five deals from this list. + +01:16.740 --> 01:17.700 +Selecting those. + +01:17.730 --> 01:18.510 +Blah blah blah blah blah. + +01:18.540 --> 01:19.470 +I will let you read it through. + +01:19.470 --> 01:20.610 +But you know, this kind of stuff. + +01:20.610 --> 01:29.730 +Well, uh, and um, uh, the deals and then it lists the deals and it calls deal dot describe for each + +01:29.730 --> 01:30.780 +of those. + +01:31.410 --> 01:31.800 +Okay. + +01:31.830 --> 01:32.310 +That's simple. + +01:32.310 --> 01:36.660 +So now let me just print the beginning of the user prompt is quite long. + +01:36.660 --> 01:39.720 +So I'm just going to print the first 2000 characters of the user prompt. + +01:39.720 --> 01:41.310 +So you get a sense for it. + +01:41.340 --> 01:42.270 +Here it is. + +01:42.270 --> 01:43.290 +Let's have a look. + +01:43.290 --> 01:48.270 +So there's the uh user prompt respond with blah blah blah blah blah blah blah. + +01:48.270 --> 01:50.370 +And then here are the deals. + +01:50.370 --> 01:58.500 +And this is the description that we've taken from um, the, the, the description function that we + +01:58.500 --> 02:00.960 +looked at before the describe method. + +02:00.960 --> 02:10.010 +So essentially the task that we're assigning to GPT four is to digest this list of deals, select pluck + +02:10.040 --> 02:12.650 +out from this list and there's going to be 50 of them. + +02:12.650 --> 02:19.220 +Pick the five which have the clearest description and from which it can take the price. + +02:19.250 --> 02:24.950 +Parse the price out of this in a way that is that is most clear. + +02:25.100 --> 02:29.240 +And that is, of course, a fuzzy kind of problem that requires some thought. + +02:29.240 --> 02:33.680 +It's the kind of thing that would be only a year or two ago would have been unthinkably hard. + +02:33.770 --> 02:38.180 +How would you do that in a reliable way, using just engineering code. + +02:38.180 --> 02:44.990 +And it's the kind of problem that's so perfect for frontier models, because that kind of nuanced understanding + +02:44.990 --> 02:51.260 +and being able to realize that up to something means a price and that, but up to something off is not + +02:51.260 --> 02:52.190 +the same as a price. + +02:52.190 --> 02:54.620 +That kind of logic is really hard. + +02:54.620 --> 03:00.680 +But frontier models are remarkably effective at understanding that, understanding the meaning behind + +03:00.680 --> 03:05.630 +these expressions and using that intelligently to give us an output. + +03:05.630 --> 03:12.950 +So just based on these prompts, we're able to now say, okay, let's get recommendations. + +03:12.950 --> 03:18.290 +And now we're going to see a different API than usual because we're using structured output. + +03:18.710 --> 03:23.300 +So my function get recommendations completion is OpenAI. + +03:23.300 --> 03:24.800 +And look it's a bit different here. + +03:24.800 --> 03:31.160 +The this is at least as of this moment uh is a beta offering from OpenAI. + +03:31.190 --> 03:34.310 +But I imagine it's going to be fully productionized very, very soon. + +03:34.310 --> 03:39.110 +So you say OpenAI beta or that's beta for Americans. + +03:39.170 --> 03:44.870 +Uh, OpenAI beta, dot chat, dot completions, dot parse. + +03:44.960 --> 03:52.520 +Uh, that's the, the the way that you call this, um, and you provide the model as, as usual. + +03:52.520 --> 03:57.620 +And this is supported by GPT four and mini and GPT four, the frontier version two. + +03:57.770 --> 04:02.120 +Uh, as always, the message is, you know, I'm just going to not even say it. + +04:02.120 --> 04:02.870 +You know it. + +04:02.870 --> 04:04.100 +You don't need me to tell you. + +04:04.130 --> 04:07.310 +Surely, uh, this is the difference. + +04:07.310 --> 04:12.440 +You remember, in the past, we've used this response format as a way of specifying that we require + +04:12.470 --> 04:14.000 +JSON to come back. + +04:14.030 --> 04:16.700 +Now, this time we don't say JSON. + +04:16.730 --> 04:20.900 +What we do is we pass in the class the deal selection class. + +04:20.900 --> 04:25.490 +That's the class we were just looking at here, the one that I did some scrappy JSON for. + +04:25.520 --> 04:30.020 +We pass in that class in as our response format. + +04:30.500 --> 04:35.810 +And then when we're getting the results we don't call our usual API calls. + +04:35.810 --> 04:43.340 +We say completion is zero as usual dot message and then not content but instead dot parsed. + +04:43.340 --> 04:49.970 +And that is going to take the response and parse it into an instance of deal selection. + +04:50.000 --> 04:52.100 +And that is what it will return. + +04:52.400 --> 04:54.710 +So with that build up it better return it. + +04:54.740 --> 04:59.780 +Let's run that code and see what happens and what we get back. + +04:59.780 --> 05:05.150 +So we are calling GPT for zero and we're sorry GPT for mini. + +05:05.180 --> 05:09.830 +We are specifying a particular structured format of output. + +05:09.830 --> 05:10.820 +It has returned. + +05:10.850 --> 05:17.390 +Let's see why don't we print what type of thing has come back type results. + +05:17.390 --> 05:19.520 +Let's see what kind of object has come back. + +05:19.520 --> 05:23.210 +It is an agents deals deal selection. + +05:23.210 --> 05:26.150 +That is indeed the kind of object that we were hoping to get back. + +05:26.150 --> 05:31.760 +I think we better, uh, see what it actually looks like. + +05:31.790 --> 05:34.490 +It's going to be a set. + +05:34.610 --> 05:38.780 +Uh, it's a deal selection which has within it deals is the one field. + +05:38.780 --> 05:39.350 +If you remember. + +05:39.380 --> 05:40.850 +Deals is the one thing we set. + +05:40.850 --> 05:42.140 +And here are the deals. + +05:42.140 --> 05:45.830 +So let's let's find out how many deals we got back. + +05:46.460 --> 05:49.730 +We are hoping of course, that we got back five deals. + +05:50.000 --> 05:50.870 +Let's see. + +05:50.900 --> 05:53.870 +We did indeed get back five deals. + +05:53.870 --> 05:57.020 +And let's print the first of those deals. + +05:57.020 --> 06:02.780 +And we get a Samsung Galaxy Watch Ultra, a top tier smartwatch. + +06:02.960 --> 06:06.710 +Uh, and we've got a nice summary of this. + +06:06.710 --> 06:10.010 +And you'll notice this is actually looking it's looking great for us. + +06:10.010 --> 06:14.150 +It's looking like this is the kind of summary that we're going to want to use when we pass it into our + +06:14.150 --> 06:22.250 +prices to estimate the price of this, and GPT four mini has pulled out a price and a URL. + +06:22.250 --> 06:26.060 +And of course, if we click on that URL, it's going to open up that deal. + +06:26.060 --> 06:27.020 +There it is. + +06:27.020 --> 06:28.160 +Wonderful. + +06:28.310 --> 06:32.690 +Uh, so it is working well for us. + +06:32.960 --> 06:34.790 +Uh, okay. + +06:34.820 --> 06:46.850 +Now, the last thing to show here is that, um, we are going to, uh, look to import our scanner agent. + +06:46.850 --> 06:50.600 +So I'm going to do the thing again where I'm going to kick this off because it's going to take a minute + +06:50.600 --> 06:51.380 +or two. + +06:51.770 --> 06:57.950 +Um, and while it's running, I'm going to go over and show you what the scanner agent is. + +06:57.950 --> 07:01.370 +So the scanner agent is over here. + +07:01.640 --> 07:08.180 +Uh, this is a bunch of code that's going to look very similar to the code that we looked at before. + +07:08.540 --> 07:15.920 +Uh, and so it's, uh, just been written now with comments and with better structure so that this is + +07:15.920 --> 07:18.620 +something we can consider to be production quality. + +07:18.620 --> 07:18.620 +see. + +07:18.890 --> 07:21.710 +Um, but it has the system prompts that we just looked at. + +07:21.710 --> 07:28.880 +It has a user prompt that we looked at and it says respond strictly with five deals, no more. + +07:28.880 --> 07:33.890 +And what we get is an init method that sets it up and connects to OpenAI. + +07:33.920 --> 07:41.000 +The fetch deals is the method that we were just looking at that does, uh, gets a list of scraped deal + +07:41.300 --> 07:45.020 +by calling scrape deal fetch, just as you would expect. + +07:45.050 --> 07:52.820 +What it also does here, though, is that potentially it gets a memory object, which is a list of URLs, + +07:52.820 --> 07:59.540 +and it will remove from the results anything that it remembers has already been surfaced in a previous + +07:59.540 --> 08:00.200 +search. + +08:00.200 --> 08:05.030 +And this is how we make sure that we fetch new deals, not deals that we've already looked at before. + +08:05.030 --> 08:10.850 +And of course, memory, which is one of the of the facets of building an agent framework, is something + +08:10.850 --> 08:12.860 +we'll be looking at more in the future. + +08:13.790 --> 08:20.510 +Uh, and then we make user prompt and then we do, uh, this is the same code as before. + +08:20.510 --> 08:25.100 +And then scan is basically exactly the function that we just looked at. + +08:25.130 --> 08:30.920 +We make a user prompt and then we call OpenAI beta dot chat, dot completions, dot parse. + +08:30.920 --> 08:32.390 +We pass in the model. + +08:32.390 --> 08:39.260 +We specify that the response format needs to be the deal selection class, so that we know that it's + +08:39.260 --> 08:43.250 +going to return that and we parse it back into result. + +08:43.250 --> 08:46.130 +And that is what we get to right here. + +08:46.340 --> 08:53.000 +When we then, um, uh, we do first remove any deals. + +08:53.000 --> 08:57.860 +If the price isn't greater than zero, we don't want to return deals which have zero price. + +08:58.010 --> 09:00.020 +Um, and we return that result. + +09:00.020 --> 09:05.180 +And if we got no deals back, um, because perhaps we'd already surfaced everything, then we return + +09:05.180 --> 09:05.750 +none. + +09:05.750 --> 09:08.930 +And that's indicated by that optional at the top there. + +09:08.990 --> 09:10.640 +So that's the class. + +09:10.670 --> 09:13.340 +And with that, let me go back to day three. + +09:13.340 --> 09:16.790 +And we should see that the results have come back. + +09:17.900 --> 09:22.190 +Now one of the things I'm aware of that you may have caught, you may have noticed this yourself when + +09:22.190 --> 09:27.020 +I was doing this, but I was busy thinking about this while I was talking is that I think we might have + +09:27.020 --> 09:35.300 +just seen a goof from the model that the price here it's saying, is $350 associated with this Samsung + +09:35.300 --> 09:36.230 +Galaxy Watch. + +09:36.230 --> 09:45.530 +But I think when this website came up, let's close this down that it says up to 350 off with trade + +09:45.530 --> 09:45.860 +in. + +09:45.860 --> 09:48.830 +If you have another device you can save up to 350. + +09:48.860 --> 09:55.280 +So after I showed off about how frontier models are so incredibly good at this stuff, this is an example + +09:55.280 --> 10:01.850 +of a mistake that the model has actually made, that it thinks that this is worth 350, when in fact + +10:01.850 --> 10:03.530 +it's $350 off. + +10:03.530 --> 10:06.020 +So I should eat my words. + +10:06.020 --> 10:07.940 +And it did actually get that wrong. + +10:07.940 --> 10:09.620 +And it's going to be interesting. + +10:09.650 --> 10:13.400 +That is, it's useful to have learned that and to have seen that. + +10:13.670 --> 10:20.150 +And I might try adding that in to the system prompt and see if we can improve on the performance of + +10:20.180 --> 10:20.630 +that. + +10:20.630 --> 10:23.360 +Now that we see that it can make those mistakes. + +10:23.360 --> 10:27.380 +Anyway, I will see you next time for the slides. diff --git a/week5/community-contributions/subtitles/srts/59671315/ja_JP.srt b/week5/community-contributions/subtitles/srts/59671315/ja_JP.srt new file mode 100755 index 0000000..9d2a967 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671315/ja_JP.srt @@ -0,0 +1,409 @@ +WEBVTT + +00:01.040 --> 00:10.460 +さて、 ここでJupyter Labに戻り、 フロンティア・モデルの次の使用に備える。 + +00:10.460 --> 00:12.320 +これはごく自然なことのはずだ。 + +00:12.710 --> 00:18.470 +いつものように、 システムのプロンプトを検討し、 モデルをどのようにプロンプトさせるかを考えることから始める。 + +00:18.800 --> 00:25.610 +つまり、 最も詳細で質の高い説明があり、 価格が明確で、 JSONで厳密に応答する案件を選択することで、 + +00:25.610 --> 00:37.820 +リストから最も詳細な5つの項目を特定し、 要約するのです。 + +00:37.910 --> 00:42.740 +ええと、 最も重要なのは、 最も詳細な商品説明がある5つの案件を回答することです。 + +00:42.740 --> 00:44.810 +用語について言及することは重要ではない。 + +00:45.110 --> 00:49.940 +それから、 JSONの構造もここに書きました。 + +00:49.940 --> 00:54.020 +構造化されたアウトプットを使うのだから、 + +00:54.020 --> 01:01.940 +これは必須ではないことは言っておくべきだろう。 + +01:02.120 --> 01:04.650 +だから、 これは実験できることなんだ。 + +01:04.650 --> 01:08.700 +しかし厳密に言えば、 JSONをこのように指定する必要はないはずだ。 + +01:09.420 --> 01:11.640 +これが私のシステム・プロンプトだ。 + +01:11.670 --> 01:16.740 +ユーザーには、 このリストの中から最も有望な5つの取引を回答するよう促される。 + +01:16.740 --> 01:17.700 +それらを選ぶ。 + +01:17.730 --> 01:18.510 +ブラブラ、 ブラブラ、 ブラブラ。 + +01:18.540 --> 01:19.470 +最後まで読んでほしい。 + +01:19.470 --> 01:20.610 +でもね、 こういうことなんだ。 + +01:31.410 --> 01:31.800 +オーケー。 + +01:31.830 --> 01:32.310 +簡単なことだ。 + +01:32.310 --> 01:36.660 +それでは、 ユーザー・プロンプトの冒頭をプリントしてみよう。 + +01:36.660 --> 01:39.720 +そこで、 ユーザー・プロンプトの最初の2000文字を表示することにする。 + +01:39.720 --> 01:41.310 +それで感覚をつかむんだ。 + +01:41.340 --> 01:42.270 +これだ。 + +01:42.270 --> 01:43.290 +見てみよう。 + +01:43.290 --> 01:48.270 +だから、 ユーザーのプロンプトは、 ブラブラ、 ブラブラ、 ブラブラ、 ブラブラと答えるんだ。 + +01:48.270 --> 01:50.370 +そしてお得な情報だ。 + +01:50.370 --> 02:00.960 +そして、 これはdescribeメソッドの前に見たdescribe関数から取り出した記述です。 + +02:00.960 --> 02:12.650 +つまり、 GPT4に課している仕事は、 この案件リストを消化し、 その中から50件を選び出すことだ。 + +02:12.650 --> 02:19.220 +最も明確な説明があり、 価格がわかる5つを選ぶ。 + +02:19.250 --> 02:24.950 +最も明確な方法でこの価格を解析する。 + +02:25.100 --> 02:29.240 +そしてそれはもちろん、 考えなければならないファジーな種類の問題である。 + +02:29.240 --> 02:33.680 +ほんの1、 2年前なら考えられなかったようなことだ。 + +02:33.770 --> 02:38.180 +エンジニアリング・コードだけを使って、 信頼性の高い方法でそれを行うにはどうしたらいいのだろう。 + +02:38.180 --> 02:44.990 +フロンティア・モデルには最適な問題だ。 このようなニュアンスの理解や、 何かまでは価格を意味し、 + +02:44.990 --> 02:52.190 +何かから離れることまでは価格と同じではないことを理解することができるからだ。 + +02:52.190 --> 02:54.620 +そういうロジックは本当に難しい。 + +02:54.620 --> 03:00.680 +しかし、 フロンティア・モデルは、 それを理解し、 これらの表現の背後にある意味を理解し、 それをインテリジェントに使ってアウトプットを出すという点で、 + +03:00.680 --> 03:05.630 +驚くほど効果的なのだ。 + +03:05.630 --> 03:12.950 +だから、 これらのプロンプトに基づいて、 オーケー、 推薦状をもらおう、 と言えるようになった。 + +03:12.950 --> 03:18.290 +そして今、 構造化出力を使っているため、 通常とは異なるAPIを見ることになる。 + +03:18.710 --> 03:23.300 +だから、 私の推薦文を完成させる機能はOpenAIだ。 + +03:23.300 --> 03:24.800 +そして、 ここはちょっと違う。 + +03:24.800 --> 03:31.160 +少なくとも現時点では、 これはOpenAIのベータ版である。 + +03:31.190 --> 03:34.310 +でも、 もうすぐ完全生産化されるんだろうね。 + +03:34.310 --> 03:39.110 +OpenAIのベータ版、 あるいはアメリカ人にとってはベータ版ということですね。 + +03:39.170 --> 03:44.870 +OpenAIのベータ版、 ドットチャット、 ドット補完、 ドット解析。 + +03:44.960 --> 03:52.520 +そうやって、 いつものようにモデルを提供するんだ。 + +03:52.520 --> 03:57.620 +そして、 これはGPT4とミニ、 GPT4、 フロンティア・バージョン2でサポートされている。 + +03:57.770 --> 04:02.120 +ええと、 いつものように、 メッセージは、 ほら、 言わないでおこうと思ってるんだ。 + +04:02.120 --> 04:02.870 +知っているはずだ。 + +04:02.870 --> 04:04.100 +私が言うまでもないだろう。 + +04:04.130 --> 04:07.310 +確かに、 あー、 ここが違う。 + +04:07.310 --> 04:14.000 +過去に、 JSONを要求することを指定する方法として、 このレスポンス・フォーマットを使用したことを覚えているだろう。 + +04:14.030 --> 04:16.700 +さて、 今回はJSONとは言わない。 + +04:16.730 --> 04:20.900 +私たちがすることは、 取引選択クラスをクラスに渡すことだ。 + +04:20.900 --> 04:25.490 +これが今見ていたクラスで、 私がJSONをスクラップしたものだ。 + +04:25.520 --> 04:30.020 +そのクラスを応答フォーマットとして渡す。 + +04:30.500 --> 04:35.810 +そして、 結果を得るときには、 通常のAPIコールは呼び出さない。 + +04:35.810 --> 04:43.340 +完了はいつものようにドット・メッセージでゼロ、 そして内容ではなくドット・パースされたものとする。 + +04:43.340 --> 04:49.970 +そして、 その応答を解析して、 ディールセレクションのインスタンスにする。 + +04:50.000 --> 04:52.100 +そして、 それが返ってくる。 + +04:52.400 --> 04:54.710 +だから、 その積み重ねで返した方がいい。 + +04:54.740 --> 04:59.780 +このコードを実行し、 何が起こり、 何が返ってくるか見てみよう。 + +04:59.780 --> 05:05.150 +だからGPTはゼロ、 GPTはミニと呼んでいる。 + +05:05.180 --> 05:09.830 +特定の構造化された出力形式を指定しているのだ。 + +05:09.830 --> 05:10.820 +戻ってきた。 + +05:10.850 --> 05:17.390 +では、 どのような結果が出たのかプリントしてみよう。 + +05:17.390 --> 05:19.520 +どんなものが戻ってきたのか見てみよう。 + +05:19.520 --> 05:23.210 +エージェントのお得な契約セレクションだ。 + +05:23.210 --> 05:26.150 +それこそ、 私たちが取り戻したいと願っていたものだ。 + +05:26.150 --> 05:31.760 +実際にどうなのか、 見たほうがいいと思う。 + +05:31.790 --> 05:34.490 +セットだろう。 + +05:34.610 --> 05:38.780 +ええと、 これはディールの選択で、 その中にディールというフィールドがあります。 + +05:38.780 --> 05:39.350 +覚えているかな? + +05:39.380 --> 05:40.850 +ディールは私たちが設定したものだ。 + +05:40.850 --> 05:42.140 +そして、 こちらがお得な情報だ。 + +05:42.140 --> 05:45.830 +では、 どれだけのディールが戻ってきたかを見てみよう。 + +05:46.460 --> 05:49.730 +もちろん、 5つの契約を取り戻したいと思っている。 + +05:50.000 --> 05:50.870 +見てみよう。 + +05:50.900 --> 05:53.870 +確かに5つの契約を取り返すことができた。 + +05:53.870 --> 05:57.020 +そして、 その最初の取引を印刷しよう。 + +05:57.020 --> 06:02.780 +そして、 トップクラスのスマートウォッチ、 サムスン・ギャラクシー・ウォッチ・ウルトラを手に入れた。 + +06:02.960 --> 06:06.710 +ええと、 これをうまくまとめたものがあるんだ。 + +06:06.710 --> 06:10.010 +そして、 これが実際、 我々にとって素晴らしいものに見えていることにお気づきだろう。 + +06:10.010 --> 06:22.250 +GPT4ミニが価格とURLを出してきた。 + +06:22.250 --> 06:26.060 +もちろん、 そのURLをクリックすれば、 その取引が開かれる。 + +06:26.060 --> 06:27.020 +あれだ。 + +06:27.020 --> 06:28.160 +素晴らしい。 + +06:28.310 --> 06:32.690 +僕らにとってはうまくいっているよ。 + +06:32.960 --> 06:34.790 +ああ、 わかった。 + +06:34.820 --> 06:46.850 +さて、 最後にスキャナ・エージェントをインポートしてみましょう。 + +06:46.850 --> 06:51.380 +1、 2分かかるだろうから、 もう一度、 キックオフをやってみるよ。 + +06:51.770 --> 06:57.950 +その間に、 スキャナー・エージェントがどんなものかをお見せしましょう。 + +06:57.950 --> 07:01.370 +スキャナー係はこっちだ。 + +07:01.640 --> 07:08.180 +ええと、 これは前に見たコードとよく似たコードの束です。 + +07:08.540 --> 07:18.620 +それで、 今、 コメントを入れて、 よりよい構成で書いている。 + +07:18.620 --> 07:18.620 +ご覧ください。 + +07:18.890 --> 07:21.710 +でも、 さっき見たようなシステムプロンプトがあるんだ。 + +07:21.710 --> 07:28.880 +ユーザープロンプトがあり、 それを見ると、 5つのディールを厳守してください。 + +07:28.880 --> 07:33.890 +そして init メソッドがセットアップされ、 OpenAI に接続される。 + +07:33.920 --> 07:45.020 +fetch dealsは今見ていたメソッドで、 scrape deal fetchを呼び出してスクレイピングされたディールのリストを取得します。 + +07:45.050 --> 07:52.820 +しかし、 ここでもやっていることは、 潜在的にURLのリストであるメモリー・オブジェクトを取得し、 + +07:52.820 --> 08:00.200 +以前の検索ですでに表示されたと記憶しているものを結果から削除することである。 + +08:00.200 --> 08:05.030 +こうして、 すでに見たことのある案件ではなく、 新しい案件を獲得するようにしている。 + +08:05.030 --> 08:10.850 +そしてもちろん、 エージェント・フレームワークを構築する上で重要な要素のひとつであるメモリーについては、 + +08:10.850 --> 08:12.860 +今後さらに検討していくつもりだ。 + +08:13.790 --> 08:20.510 +そして、 ユーザー・プロンプトを作成し、 そして、 これは前と同じコードだ。 + +08:20.510 --> 08:25.100 +そして、 スキャンは基本的に、 先ほど見た機能そのものである。 + +08:25.130 --> 08:30.920 +ユーザーのプロンプトを作成し、 OpenAIベータにドットチャット、 ドット補完、 ドット解析を呼び出します。 + +08:30.920 --> 08:32.390 +我々はモデルにパスを出す。 + +08:32.390 --> 08:39.260 +レスポンス・フォーマットが取引選択クラスである必要があることを指定し、 それが返されることを確認し、 + +08:39.260 --> 08:43.250 +それをパースしてresultに戻す。 + +08:43.250 --> 08:46.130 +そして、 それが今ここにある。 + +08:46.340 --> 08:53.000 +そうしたら、 えーと、 えーと、 まず、 どんな取引も排除する。 + +08:53.000 --> 08:57.860 +価格がゼロより大きくない場合、 価格がゼロの案件を返したくありません。 + +08:58.010 --> 09:00.020 +そしてその結果を返す。 + +09:00.020 --> 09:05.750 +そして、 もし何の取引も返ってこなかったら、 うーん、 おそらく、 すでにすべて表面化していたのだろうから、 何も返さない。 + +09:05.750 --> 09:08.930 +そして、 それは一番上のオプションで示されている。 + +09:08.990 --> 09:10.640 +それがこのクラスだ。 + +09:10.670 --> 09:13.340 +ということで、 3日目に戻ろう。 + +09:13.340 --> 09:16.790 +そして、 結果が戻ってきたことを確認しなければならない。 + +09:17.900 --> 09:22.190 +さて、 私が気づいたことのひとつに、 皆さんもお気づきかもしれませんが、 + +09:22.190 --> 09:27.020 +私がこの作業をしているときに、 皆さんもお気づきかもしれませんが、 + +09:27.020 --> 09:36.230 +私はこのことを考えながら話をするのに夢中でした。 + +09:36.230 --> 09:45.860 +でも、 このウェブサイトを見たとき、 下取りで最大350ドル引きと書いてあった気がする。 + +09:45.860 --> 09:48.830 +他のデバイスをお持ちの場合、 最大で350ドル節約できます。 + +09:48.860 --> 09:55.280 +フロンティア・モデルがいかにこのようなことに長けているかということをお見せした後で、 + +09:55.280 --> 10:03.530 +これはモデルが実際に犯したミスの例である。 + +10:03.530 --> 10:06.020 +だから、 私は自分の言葉を食べなければならない。 + +10:06.020 --> 10:07.940 +そして、 実際にそれは間違っていた。 + +10:07.940 --> 10:09.620 +そして面白くなりそうだ。 + +10:09.650 --> 10:13.400 +つまり、 それを学び、 それを見たことは有益なのだ。 + +10:13.670 --> 10:20.630 +システム・プロンプトにそれを追加して、 そのパフォーマンスを改善できるかどうか試してみるかもしれない。 + +10:20.630 --> 10:23.360 +今、 私たちはそれがミスを犯す可能性があることを知った。 + +10:23.360 --> 10:27.380 +とにかく、 スライドはまた次回に。 diff --git a/week5/community-contributions/subtitles/srts/59671315/ko_KR.srt b/week5/community-contributions/subtitles/srts/59671315/ko_KR.srt new file mode 100755 index 0000000..91fb7ae --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671315/ko_KR.srt @@ -0,0 +1,445 @@ +WEBVTT + +00:01.040 --> 00:07.820 +다시 주피터 연구실입니다 프런티어 모델을 사용할 준비가 됐어요 전문 지식을 + +00:07.820 --> 00:10.460 +구축하는 곳으로 보고 싶네요 + +00:10.460 --> 00:12.320 +아주 자연스러울 거예요 + +00:12.710 --> 00:18.470 +늘 그렇듯 시스템 프롬프트와 모델을 어떻게 프롬프트할지 심사숙고하며 시작하죠 + +00:18.800 --> 00:25.610 +모델에게 하는 말은 목록에서 가장 자세한 5가지 항목을 골라 요약하라는 + +00:25.610 --> 00:32.990 +겁니다 가장 자세하고, 잘 묘사되고 가장 확실한 가격이 있는 걸 선택해서요 + +00:32.990 --> 00:37.820 +JSON에 대응하세요 + +00:37.910 --> 00:42.740 +가장 중요한 건 5건의 거래에 가장 자세한 제품 설명이 있는 거예요 + +00:42.740 --> 00:44.810 +조건은 중요하지 않아요 + +00:45.110 --> 00:49.940 +그리고 여기 JSON 구조도 배치해뒀어요 + +00:49.940 --> 00:54.020 +꼭 필요한 것은 아니라는 것을 말씀드리고 싶네요. 구조화된 출력물들을 + +00:54.020 --> 01:01.070 +사용할 것이기 때문입니다. 하지만 저는 이것을 또 다른 기회로 사용하고 싶습니다. 각각의 것에 대해 더 많은 방향을 제시할 수 + +01:01.070 --> 01:01.940 +있도록요. + +01:02.120 --> 01:04.650 +실험해 볼 수 있는 거예요 + +01:04.650 --> 01:08.700 +엄밀히 말하면 JSON을 그렇게 지정할 필요는 없어요 + +01:09.420 --> 01:11.640 +그게 제 시스템 프롬프트예요 + +01:11.670 --> 01:16.740 +사용자 프롬프트는 이 목록에서 가장 유망한 거래 5건에 응답하라고 하죠 + +01:16.740 --> 01:17.700 +그걸 선택하는 거죠 + +01:17.730 --> 01:18.510 +어쩌고저쩌고 말이죠 + +01:18.540 --> 01:19.470 +읽어 보게 해 줄게요 + +01:19.470 --> 01:20.610 +하지만 이런 건 잘 아시죠 + +01:20.610 --> 01:30.780 +딜을 적고 나열한 다음 딜닷컴을 호출해요 각각에 대해서요 + +01:31.410 --> 01:31.800 +네 + +01:31.830 --> 01:32.310 +간단해요 + +01:32.310 --> 01:36.660 +사용자 프롬프트의 시작을 프린트하겠습니다 꽤 길군요 + +01:36.660 --> 01:39.720 +사용자 프롬프트의 처음 2천 글자를 프린트할게요 + +01:39.720 --> 01:41.310 +Get it 하면 감이 오죠 + +01:41.340 --> 01:42.270 +여기 있네요 + +01:42.270 --> 01:43.290 +한번 보죠 + +01:43.290 --> 01:48.270 +사용자 프롬프트 응답에 어쩌고저쩌고 라음이 있네요 + +01:48.270 --> 01:50.370 +그럼 이렇게 하죠 + +01:50.370 --> 01:58.500 +이게 설명이에요 설명 메서드 전에 봤던 설명 함수에서 + +01:58.500 --> 02:00.960 +가져온 거죠 + +02:00.960 --> 02:10.010 +본질적으로 GPT 4에 할당하는 작업은 이 목록의 거래를 파악하는 겁니다 목록에서 선택 선택하기 + +02:10.040 --> 02:12.650 +50개가 될 거예요 + +02:12.650 --> 02:19.220 +가장 명확한 묘사와 가격을 나타내는 5개를 고르세요 + +02:19.250 --> 02:24.950 +가장 명확한 방식으로 가격을 분석하세요 + +02:25.100 --> 02:29.240 +물론 그런 건 좀 모호한 문제라 생각을 해야 하죠 + +02:29.240 --> 02:33.680 +1, 2년 전이었다면 상상할 수 없을 정도로 힘들었을 거예요 + +02:33.770 --> 02:38.180 +어떻게 믿을만한 방법으로 할 수 있을까요? 엔지니어링 코드를 이용해서요 + +02:38.180 --> 02:44.990 +개척자 모델에 딱 맞는 문제입니다. 그런 미묘한 차이를 이해하고 어떤 것에 이르는 건 가격이라는 + +02:44.990 --> 02:52.190 +걸 깨달을 수 있으니까요. 하지만 어떤 것에 이르는 것과 무언가를 잃는 건 가격이 아니에요. + +02:52.190 --> 02:54.620 +그런 논리는 정말 어려워요 + +02:54.620 --> 03:00.680 +하지만 개척자 모델은 그걸 아주 효과적으로 이해하고 이런 표정의 + +03:00.680 --> 03:05.630 +의미를 이해해서 지능적으로 결과물을 만들어내죠 + +03:05.630 --> 03:12.950 +이 프롬프트들을 기반으로 이렇게 말할 수 있죠 get 추천을 불러와 + +03:12.950 --> 03:18.290 +이제 다른 API를 보게 될 겁니다 구조화된 출력을 사용하고 있으니까요 + +03:18.710 --> 03:23.300 +추천서 get 완료 함수는 OpenAI예요 + +03:23.300 --> 03:24.800 +비트가 좀 다르죠? + +03:24.800 --> 03:31.160 +적어도 지금 이 순간만큼은 오픈라이의 베타 제공이죠 + +03:31.190 --> 03:34.310 +하지만 곧 완전히 생산될 거예요 + +03:34.310 --> 03:39.110 +오픈나이 베타라고 하나요? 미국에선 베타죠 + +03:39.170 --> 03:44.870 +오픈아이 베타, 닷챗 닷 완성, 닷 파세예요 + +03:44.960 --> 03:52.520 +그렇게 부르는 게 맞겠죠 그리고 평소처럼 모델을 제공해요 + +03:52.520 --> 03:57.620 +이것은 GPT4와 미니, 프런티어 버전 2 GPT4가 지원하죠 + +03:57.770 --> 04:02.120 +늘 그렇듯 메시지는 그냥 말하지도 않을 거예요 + +04:02.120 --> 04:02.870 +잘 아시네요 + +04:02.870 --> 04:04.100 +말 안 해도 알잖아요 + +04:04.130 --> 04:07.310 +이게 차이점이에요 + +04:07.310 --> 04:12.440 +과거엔 이 응답 형식을 JSON이 돌아오길 요구하는 방법으로 + +04:12.470 --> 04:14.000 +사용했었죠 + +04:14.030 --> 04:16.700 +이번엔 JSON이라고 안 해요 + +04:16.730 --> 04:20.900 +우리가 하는 일은 거래 선택반이라는 반에서 통과시키는 거예요 + +04:20.900 --> 04:25.490 +방금 살펴본 클래스가 그거예요 스크래피 JSON을 했던 거죠 + +04:25.520 --> 04:30.020 +응답 포맷으로 그 클래스를 넘겨요 + +04:30.500 --> 04:35.810 +결과를 얻을 때 일반적인 API 호출을 하지 않아요 + +04:35.810 --> 04:43.340 +완료는 보통 0이라고 하죠 .Mession이요 그리고 content가 아니라 .파싱이라고 해요 + +04:43.340 --> 04:49.970 +응답을 취해 딜 선택 인스턴스로 구문 분석을 하죠 + +04:50.000 --> 04:52.100 +그렇게 돌아올 거예요 + +04:52.400 --> 04:54.710 +빌드 업을 한 후 반환하는 게 좋아요 + +04:54.740 --> 04:59.780 +코드를 실행해 어떻게 되는지 get get이 뭔지 보죠 + +04:59.780 --> 05:05.150 +GPT는 0으로 호출하고 미니도 GPT로 호출할게요 + +05:05.180 --> 05:09.830 +특정 구조의 출력 형식을 지정하고 있어요 + +05:09.830 --> 05:10.820 +돌아왔군요 + +05:10.850 --> 05:17.390 +어떤 유형이 돌아왔는지 프린트해 볼까요? 유형 결과요 + +05:17.390 --> 05:19.520 +어떤 객체가 돌아왔는지 보죠 + +05:19.520 --> 05:23.210 +에이전트가 거래 상대를 고르는 거예요 + +05:23.210 --> 05:26.150 +Get up! Get up! Get up! 바로 저런 게 우리가 되찾고 싶었던 객체죠 + +05:26.150 --> 05:31.760 +실제로 어떻게 생겼는지 보는 게 좋겠어요 + +05:31.790 --> 05:34.490 +세트로 만들 거예요 + +05:34.610 --> 05:38.780 +거래 선택에 한 가지 분야가 포함돼 있어요 + +05:38.780 --> 05:39.350 +기억하신다면요 + +05:39.380 --> 05:40.850 +거래는 우리가 정한 거예요 + +05:40.850 --> 05:42.140 +이렇게 하죠 + +05:42.140 --> 05:45.830 +그럼 거래를 얼마나 따냈는지 알아보죠 + +05:46.460 --> 05:49.730 +다섯 건의 거래를 성사시켰으면 좋겠어요 + +05:50.000 --> 05:50.870 +어디 보죠 + +05:50.900 --> 05:53.870 +정말 다섯 건의 거래를 성사시켰어요 Get it + +05:53.870 --> 05:57.020 +그 거래 중 첫 번째를 인쇄하죠 + +05:57.020 --> 06:02.780 +그리고 탑클래스 스마트워치 삼성 갤럭시 워치 울트라를 구입했어요. + +06:02.960 --> 06:06.710 +요약본을 준비해 봤어요 + +06:06.710 --> 06:10.010 +보다시피 아주 잘 되고 있어요 + +06:10.010 --> 06:14.150 +요약본처럼 보이네요 가격을 입력해서 가격을 + +06:14.150 --> 06:22.250 +추정할 때 사용할 수 있겠어요 GPT for 미니가 가격과 URL을 꺼냈네요 + +06:22.250 --> 06:26.060 +물론 그 URL을 클릭하면 그 거래가 열리죠 + +06:26.060 --> 06:27.020 +저기 있네요 + +06:27.020 --> 06:28.160 +좋아요 + +06:28.310 --> 06:32.690 +그래서 잘 되고 있어요 + +06:32.960 --> 06:34.790 +네 + +06:34.820 --> 06:46.850 +마지막으로 보여드릴 것은 스캐너 에이전트를 가져올 건데요 + +06:46.850 --> 06:51.380 +다시 한 번 해볼게요. 1-2분 정도 걸릴 것 같아요. + +06:51.770 --> 06:57.950 +실행되는 동안 스캐너 에이전트가 뭔지 보여드릴게요 + +06:57.950 --> 07:01.370 +스캐너 에이전트는 여기 있어요 + +07:01.640 --> 07:08.180 +이건 코드 뭉치인데 전에 봤던 코드와 아주 비슷할 거예요 + +07:08.540 --> 07:15.920 +방금 작성한 건데 논평도 있고 구조도 더 나아져서 생산 품질로 + +07:15.920 --> 07:18.620 +평가할 수 있어요 + +07:18.620 --> 07:18.620 +보세요 + +07:18.890 --> 07:21.710 +하지만 방금 본 프롬프트가 있어요 + +07:21.710 --> 07:28.880 +우리가 살펴본 사용자 프롬프트가 있는데 5건의 거래로 응답하라고 하네요 + +07:28.880 --> 07:33.890 +그러면 init 메서드를 설정하고 OpenAI에 연결할 수 있죠 + +07:33.920 --> 07:41.000 +페치 거래는 방금 살펴본 방법인데 긁힌 거래 목록을 얻어요 긁힌 거래 + +07:41.300 --> 07:45.020 +페치라고 부르면 예상대로죠 + +07:45.050 --> 07:52.820 +여기서 하는 또 다른 일은 잠재적으로 메모리 객체를 갖는 겁니다 URL의 + +07:52.820 --> 08:00.200 +목록이죠 기억되는 건 이전 검색에서 이미 나타났어요 + +08:00.200 --> 08:05.030 +그래야 새로운 거래를 성사시킬 수 있어요 이미 검토한 거래가 아니라요 + +08:05.030 --> 08:10.850 +물론 메모리는 에이전트 프레임워크를 구축하는 한 측면인데 미래에 + +08:10.850 --> 08:12.860 +더 주목할 부분이죠 + +08:13.790 --> 08:20.510 +사용자 프롬프트를 만들고 나서 전에 했던 것과 같은 코드를 만들고요 + +08:20.510 --> 08:25.100 +스캔은 우리가 방금 본 바로 그 함수예요 + +08:25.130 --> 08:30.920 +사용자 프롬프트에서 OpenAI 베타.챗 .완료 .파스를 호출해요 + +08:30.920 --> 08:32.390 +모델을 통과시키죠 + +08:32.390 --> 08:39.260 +응답 형식이 딜 선택 클래스여야 한다고 지정합니다 그걸 반환할 것을 + +08:39.260 --> 08:43.250 +알고 결과로 구문 분석을 할 수 있도록요 + +08:43.250 --> 08:46.130 +Get it가 바로 여기죠 + +08:46.340 --> 08:53.000 +그때 가장 먼저 계약을 파기해요 + +08:53.000 --> 08:57.860 +가격이 0보다 크지 않으면 0달러짜리 거래를 반환하고 싶지 않아요 + +08:58.010 --> 09:00.020 +그 결과를 반환하죠 + +09:00.020 --> 09:05.180 +만약 거래를 못 따내면... 이미 모든 걸 드러냈을 수도 있으니까요 그럼 하나도 못 + +09:05.180 --> 09:05.750 +따내요 + +09:05.750 --> 09:08.930 +상단에 선택 사항이 있는 게 그걸 나타내죠 + +09:08.990 --> 09:10.640 +그게 수업 내용이에요 + +09:10.670 --> 09:13.340 +그럼 3일 차 얘기를 해 보죠 + +09:13.340 --> 09:16.790 +결과가 어떻게 나올지 봐야죠 + +09:17.900 --> 09:22.190 +제가 알고 있는 것 중 하나는 제가 이걸 할 때 눈치챘을 + +09:22.190 --> 09:27.020 +수도 있지만 저는 얘기하면서 생각을 하느라 바빴어요 + +09:27.020 --> 09:36.230 +모델에서 실수를 본 것 같아요 가격이 350달러라고 되어 있네요 삼성 갤럭시 워치와 관련해서요 + +09:36.230 --> 09:45.860 +하지만 이 웹사이트가 나왔을 때∙∙∙ 이걸 닫죠 트레이드 인과 함께 350까지 오퍼라고 돼 있어요 + +09:45.860 --> 09:48.830 +다른 장치가 있으면 350까지 저축할 수 있어요 + +09:48.860 --> 09:55.280 +프론티어 모델이 이런 걸 얼마나 잘하는지 보여드렸더니 모델이 실제로 + +09:55.280 --> 10:01.850 +저지른 실수의 예가 이런 겁니다 350달러라고 생각하는데 사실은 350달러나 + +10:01.850 --> 10:03.530 +싸요 + +10:03.530 --> 10:06.020 +내가 한 말 취소할게요 + +10:06.020 --> 10:07.940 +Get it은 사실 잘못됐어요 + +10:07.940 --> 10:09.620 +재미있을 거예요 + +10:09.650 --> 10:13.400 +그런 걸 배우고 목격해서 유용하죠 + +10:13.670 --> 10:20.630 +시스템 프롬프트에 추가해 성능을 향상시킬 수 있는지 볼게요 + +10:20.630 --> 10:23.360 +이제 그런 실수가 가능하다는 걸 알게 됐죠 + +10:23.360 --> 10:27.380 +그럼 슬라이드는 다음에 보도록 하죠 diff --git a/week5/community-contributions/subtitles/srts/59671441/en_US.srt b/week5/community-contributions/subtitles/srts/59671441/en_US.srt new file mode 100755 index 0000000..4ea0059 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671441/en_US.srt @@ -0,0 +1,535 @@ +WEBVTT + +00:00.560 --> 00:09.650 +And welcome once more to our favorite place to be Jupyter Lab, the Paradise for a data scientist experimenter. + +00:09.830 --> 00:17.000 +So as we launch into the day three notebook in week eight, you'll see that there are precious few comments + +00:17.000 --> 00:18.020 +this time around. + +00:18.020 --> 00:23.540 +I'm commenting the code that is our production quality code in the notebooks. + +00:23.630 --> 00:24.260 +At this point. + +00:24.260 --> 00:25.310 +You guys are pros. + +00:25.310 --> 00:30.260 +You don't need a whole ton of waffle from me, so we just dive straight in. + +00:30.470 --> 00:32.930 +Uh, so I'm going to begin with some imports. + +00:32.930 --> 00:39.530 +As we get into today's puzzle, uh, I'm going to set up some environment things, and now we get to + +00:39.560 --> 00:41.750 +the first piece of substance. + +00:42.080 --> 00:48.350 +Uh, I, uh, sneakily imported something called Scraped deal, uh, here without mentioning it, but + +00:48.380 --> 00:51.770 +I now have this scraped deal, and I'm going to call fetch. + +00:51.980 --> 00:56.810 +Um, with setting show progress to true, and I'm going to run that without telling you what it does, + +00:56.810 --> 01:00.020 +because it takes a couple of minutes, so we might as well let it run. + +01:00.020 --> 01:01.460 +And then I will talk more about it. + +01:01.460 --> 01:02.930 +So kick it off. + +01:03.020 --> 01:04.850 +Off it goes. + +01:04.850 --> 01:06.860 +So what is scraped? + +01:06.860 --> 01:07.220 +Deal. + +01:07.220 --> 01:10.310 +It is sitting in the agents folder in the package. + +01:10.310 --> 01:14.210 +And here is it is in part of deals dot pie. + +01:14.240 --> 01:16.760 +So let me show you deals dot pie. + +01:16.790 --> 01:23.900 +So uh, this is a Python module which starts by defining a series of feeds. + +01:23.900 --> 01:31.790 +And these are URLs to RSS feeds, which are useful ones that happen to have good deals being announced + +01:31.880 --> 01:33.350 +in a various categories. + +01:33.350 --> 01:38.210 +And these categories happen to be close matches for the categories that we know a thing or two about + +01:38.210 --> 01:44.030 +because our model was trained for them electronics, computers, automotive, and then mostly smart + +01:44.060 --> 01:45.350 +home and home garden. + +01:45.350 --> 01:46.370 +So here are some feeds. + +01:46.370 --> 01:51.740 +There are a bunch of others, and if you have the stomach to wait a bit longer, you can slap in a bunch + +01:51.740 --> 01:55.610 +more feeds in here and have a bigger data set to work with. + +01:55.610 --> 02:01.030 +So then there are some utilities here that you can look through in your own time. + +02:01.060 --> 02:06.910 +A method called extract, a function called extract, which cleans up some HTML and returns useful text. + +02:06.910 --> 02:09.880 +And then there's a class scraped deal. + +02:09.880 --> 02:14.380 +And this represents a deal that we have retrieved from an RSS feed. + +02:14.410 --> 02:16.840 +So it's not exactly scraping, it's retrieving it from RSS. + +02:16.840 --> 02:19.720 +But we will do a bit of a of a lookup as well. + +02:19.720 --> 02:21.790 +So so there's there's some truth to it. + +02:22.060 --> 02:28.630 +Um, what we do in the init we take something called an entry, which is just a dictionary of values. + +02:28.630 --> 02:30.160 +And we're going to pass in. + +02:30.160 --> 02:32.650 +And this is something we get straight from the RSS feed. + +02:32.680 --> 02:37.840 +And what we do is we pick the title, the summary, and we take the links. + +02:37.840 --> 02:45.400 +And then if you look at here, what we do here is we take its URL and we actually go and fetch that. + +02:45.400 --> 02:47.500 +So we do are doing some scraping here. + +02:48.520 --> 02:49.330 +Sorry. + +02:49.510 --> 02:57.360 +We do some some fetching of that URL and we put the results in stuff and then we parse stuff with beautiful + +02:57.360 --> 02:57.780 +soup. + +02:57.780 --> 03:04.620 +So this is similar to what we did in week one, day one when we were parsing URLs that we retrieved + +03:04.620 --> 03:06.600 +from Requests.get. + +03:07.380 --> 03:08.220 +Okay. + +03:08.280 --> 03:13.860 +And then there's some more stuff here that takes the contents and scrubs it. + +03:14.190 --> 03:18.930 +And then we potentially build some features if we have the features. + +03:18.930 --> 03:27.210 +So this is all a bit of, uh, scraping code to be able to take something that comes in an RSS feed, + +03:27.210 --> 03:30.390 +clean it up and turn it into a record. + +03:30.570 --> 03:33.090 +Um, and then there's something that prints what it is. + +03:33.690 --> 03:39.030 +And this class method fetch, is exactly the one that we just kicked off a moment ago that you can see + +03:39.060 --> 03:40.110 +has finished running. + +03:40.260 --> 03:46.770 +Uh, and what it does is it iterates through all of the feeds, um, and it calls something called feed + +03:46.770 --> 03:48.090 +parser.parse. + +03:48.090 --> 03:55.500 +And feedparser is a package that I've imported, which is a useful package that allows you to pull RSS + +03:55.500 --> 03:58.380 +feeds and it will give them to you as a dictionary. + +03:58.380 --> 04:06.180 +So we are using Feedparser as our Python package for doing this, and we're just taking the top ten + +04:06.210 --> 04:08.940 +that comes back from each of these different feeds. + +04:08.940 --> 04:11.850 +We take the top ten deals that we get from each one. + +04:12.000 --> 04:15.990 +And of course this is something where you can choose to bring back more data if you wish. + +04:16.020 --> 04:19.200 +For the moment, I'm just constraining it to that number. + +04:19.200 --> 04:23.790 +And then for each of them we create this is this is ourselves. + +04:23.790 --> 04:28.800 +We create an instance of us, uh, for that entry. + +04:28.800 --> 04:32.070 +That entry, of course, is the dictionary that we looked at a moment ago. + +04:32.340 --> 04:36.000 +Um, and there's a little time.sleep in here, if you're wondering about that. + +04:36.030 --> 04:42.390 +Uh, that's because I figured that since we're then going and doing a get to to retrieve that web page + +04:42.390 --> 04:46.590 +from this deals website, it was antisocial. + +04:46.590 --> 04:52.950 +If we hammer that website with tons of requests, one after another with a split second between them. + +04:52.950 --> 05:01.040 +So it's considered good scraping practices to put in a sleep so that you're not overly, uh, beating + +05:01.040 --> 05:04.250 +up a web server and being too needy. + +05:04.250 --> 05:11.030 +So this is a way of us being better citizens when we retrieve these deals from the websites, and then + +05:11.030 --> 05:12.740 +it returns those deals. + +05:12.740 --> 05:13.760 +So that's what I just did. + +05:13.790 --> 05:16.430 +And those deals should be sitting waiting for us in this notebook. + +05:16.430 --> 05:21.080 +But I will first just mention a couple of other things in this useful module. + +05:21.260 --> 05:26.090 +Um, there are these three classes here that are going to be important in a minute because this is how + +05:26.090 --> 05:28.820 +we define structured outputs. + +05:28.940 --> 05:37.880 +When we ask GPT four to respond, um, we are defining here a class deal, a class deal selection and + +05:37.880 --> 05:39.350 +then a class opportunity. + +05:39.350 --> 05:43.010 +And you can see that these are subclasses of base model. + +05:43.040 --> 05:49.100 +Base model is uh, comes from the Pydantic package, which does a number of different things. + +05:49.100 --> 05:57.920 +One of the things it does is it very easily allows you to switch between JSON versions of of a class + +05:57.920 --> 06:04.310 +and its structure and the class itself, and it also is able to enforce that a class adheres to a schema. + +06:04.460 --> 06:08.000 +So there's a lot about it that probably many of you are very familiar with. + +06:08.000 --> 06:08.810 +Pedantic. + +06:08.900 --> 06:15.560 +Um, but but all you need to do to use it is simply create a new class that is a subclass of base model. + +06:15.770 --> 06:20.360 +So our first class that we define is just called a deal. + +06:20.360 --> 06:25.010 +And it is something which just has a product description, a price and a URL. + +06:25.040 --> 06:28.010 +That's it description price URL. + +06:28.100 --> 06:31.280 +And then we have another thing called deal selection. + +06:31.280 --> 06:35.060 +And this is what we're going to ask GPT four to respond with. + +06:35.060 --> 06:37.880 +We're going to tell it we want a deal selection. + +06:37.880 --> 06:39.650 +So this is the important one. + +06:39.650 --> 06:41.180 +And it's very simple. + +06:41.180 --> 06:48.500 +It's just something that has a list of these deals in a single attribute called deals. + +06:48.950 --> 06:50.690 +That's that's all there is to it. + +06:50.690 --> 06:55.000 +So deal selection means I want a list of deal objects. + +06:55.000 --> 06:58.810 +So if you think of this in your mind in JSON speak, what? + +06:58.840 --> 07:04.630 +What this will look like in JSON terms is it's going to be like this deal selection is a single object + +07:04.630 --> 07:07.750 +which only has one attribute deals. + +07:07.750 --> 07:09.850 +And that is a list. + +07:09.940 --> 07:13.720 +And it's a list of things which are each objects. + +07:13.720 --> 07:15.850 +So when that goes into JSON it will look like this. + +07:15.880 --> 07:18.520 +It has a product description. + +07:22.960 --> 07:28.720 +And it has a price which is a float. + +07:29.410 --> 07:35.620 +And it has a URL which is a some kind of a. + +07:37.660 --> 07:41.590 +URL like so uh, and that makes a deal. + +07:41.590 --> 07:44.920 +And there is a whole bunch of them potentially in a list of deals. + +07:44.920 --> 07:47.170 +And that makes up a deal selection. + +07:47.170 --> 07:53.410 +So if you look at the JSON that I just typed there and compare it to these class definitions, I hope + +07:53.410 --> 07:57.640 +it becomes clearer in your mind how they are analogous to each other. + +07:58.090 --> 08:02.740 +This is just the JSON representation of this structure here. + +08:02.770 --> 08:10.450 +And indeed, when we say to GPT four, we want the structured output to be in this format, what we're + +08:10.450 --> 08:16.600 +kind of doing is saying we want this to be the kind of JSON that you respond with. + +08:16.630 --> 08:18.250 +That's all that's going on. + +08:18.760 --> 08:23.560 +So that hopefully gives you a sense of of how this works. + +08:23.740 --> 08:25.030 +And I'll delete that. + +08:25.030 --> 08:26.320 +Now that's not necessary. + +08:26.350 --> 08:31.210 +The final thing to mention is that there's also a class called opportunity that we define here, which + +08:31.210 --> 08:33.220 +basically is something which has a deal. + +08:33.220 --> 08:38.920 +One of these guys, and also an estimate, which is something later we're going to use when we are estimating + +08:38.920 --> 08:40.570 +the value of these deals. + +08:40.570 --> 08:46.240 +And then the discount is simply going to be the difference between the deal's price and the estimate + +08:46.240 --> 08:46.780 +it's at. + +08:46.810 --> 08:50.230 +How much of a discount are we finding that this is being offered? + +08:50.620 --> 08:54.070 +So that is the the setup. + +08:54.370 --> 08:58.210 +Uh, and with that, let's go back over here. + +08:58.510 --> 09:04.690 +Um, so, um, let's fix something there. + +09:04.960 --> 09:09.430 +Uh, so the, uh, the we've just run, scrape, deal. + +09:09.430 --> 09:10.510 +Dot, fetch. + +09:10.660 --> 09:13.120 +Uh, we can now look at how many do we have? + +09:13.150 --> 09:16.870 +We have 50 deals sitting in deals. + +09:16.900 --> 09:23.560 +The reason we have 50 deals is because we have, uh, we had five feeds, and we asked for ten deals + +09:23.560 --> 09:24.310 +from each feed. + +09:24.310 --> 09:26.230 +And so that comes to 50, obviously. + +09:26.380 --> 09:28.870 +Uh, so that's hopefully what you're expecting to hear. + +09:28.900 --> 09:35.590 +Uh, if we look at, uh, um, deal number 44, um, it prints out nicely because you might have seen + +09:35.590 --> 09:41.380 +I had a, I used one of the Python magic functions to make sure that it was going to print nicely the + +09:41.380 --> 09:42.940 +repro function. + +09:43.150 --> 09:46.660 +Um, and so that is what deal number 44 is. + +09:46.750 --> 09:49.770 +And if we do the full Describe. + +09:49.770 --> 09:52.710 +This is the full bit of information we have about it. + +09:52.740 --> 09:58.050 +It's loads daily deal garage storage bla bla bla bla bla bla bla. + +09:58.080 --> 10:00.750 +Choose install to dodge the shipping fee. + +10:01.380 --> 10:01.860 +Uh. + +10:01.860 --> 10:04.230 +So here's the thing. + +10:04.230 --> 10:07.260 +If you look at that, you'll notice a couple of things about it. + +10:07.290 --> 10:12.690 +One of them is that the price doesn't come separately in the RSS feed. + +10:12.690 --> 10:16.380 +We don't get the price point, we just get the description of it. + +10:16.380 --> 10:20.760 +And the other thing you'll see about it is that this one doesn't even have a price. + +10:20.760 --> 10:22.590 +It's telling you how much off it is. + +10:22.590 --> 10:27.690 +It's telling you some things about the free shipping and things like that, but it's not actually giving + +10:27.690 --> 10:29.520 +you a price associated with this product. + +10:29.520 --> 10:30.900 +And that's a bore. + +10:31.050 --> 10:36.450 +And so that means that we're not going to be able to always use these properly. + +10:36.450 --> 10:41.430 +And we're going to have to do some parsing to figure out what is the actual price that's being offered + +10:41.430 --> 10:42.870 +against each of these items. + +10:42.870 --> 10:50.870 +What you'll find if you look at these items is also some of them combine Multiple, uh, things in one, + +10:50.870 --> 10:51.950 +uh, blurb. + +10:51.950 --> 10:56.390 +There's maybe different models of the Apple Watch that are all being offered 20% off. + +10:56.480 --> 11:02.930 +And so trying to digest that and pull out what we want is going to be challenging, very challenging, + +11:02.930 --> 11:05.600 +very hard to code that in a way that would be robust. + +11:05.600 --> 11:08.270 +And that's why we need to use a frontier model. + +11:08.270 --> 11:16.250 +We are going to use GPT four zero to take each of our RSS feeds, scraped deals, and turn that scraped + +11:16.250 --> 11:20.090 +deal into something, which is a good, useful deal for us. + +11:20.090 --> 11:26.240 +We're actually going to send it all 50 and say, look, we want you to find the best five deals which + +11:26.240 --> 11:32.420 +are most clearly explained from this big set, pluck them out and summarize it back to us. + +11:32.420 --> 11:34.730 +And we want that in structured output. + +11:34.730 --> 11:39.620 +We're going to tell you what format we want, and we're going to ask you to respond with exactly that + +11:39.620 --> 11:40.400 +format. + +11:40.430 --> 11:44.000 +And so now you now you've, uh, it's been teed up. + +11:44.000 --> 11:45.680 +You understand what we're trying to accomplish. + +11:45.680 --> 11:48.110 +And we're going to do it in the next video. diff --git a/week5/community-contributions/subtitles/srts/59671441/ja_JP.srt b/week5/community-contributions/subtitles/srts/59671441/ja_JP.srt new file mode 100755 index 0000000..2aec5ab --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671441/ja_JP.srt @@ -0,0 +1,457 @@ +WEBVTT + +00:00.560 --> 00:09.650 +データサイエンティストの実験パラダイス、 Jupyter Labへようこそ。 + +00:09.830 --> 00:18.020 +第8週の3日目のノートに入ると、 今回は貴重なコメントが少ないことがわかるだろう。 + +00:18.020 --> 00:23.540 +私はノートブックにプロダクション・クオリティーのコードをコメントしている。 + +00:23.630 --> 00:24.260 +この時点ではね。 + +00:24.260 --> 00:25.310 +君たちはプロだ。 + +00:25.310 --> 00:30.260 +私があれこれ説明するまでもないだろうから、 さっそく本題に入ろう。 + +00:30.470 --> 00:32.930 +ええと、 では輸入品から始めようと思う。 + +00:32.930 --> 00:41.750 +今日のパズルに入るにあたって、 いくつか環境を整えようと思う。 + +00:42.080 --> 00:48.350 +ええと、 スクレイプド・ディールというものをこっそりインポートしたんだけど、 そのことを言わずに、 このスクレイプド・ディールを持って、 + +00:48.380 --> 00:51.770 +フェッチを呼ぼうと思うんだ。 + +00:51.980 --> 01:00.020 +進行状況を表示するをtrueに設定して、 何をするかは言わないで実行します。 + +01:00.020 --> 01:01.460 +そして、 それについてもっと話す。 + +01:01.460 --> 01:02.930 +だから、 キックオフだ。 + +01:03.020 --> 01:04.850 +さあ、 出発だ。 + +01:04.850 --> 01:06.860 +では、 何が削られたのか? + +01:06.860 --> 01:07.220 +取引する。 + +01:07.220 --> 01:10.310 +パッケージの中のエージェント・フォルダに入っている。 + +01:10.310 --> 01:14.210 +そして、 これがドットパイの一部である。 + +01:14.240 --> 01:16.760 +では、 お得なドットパイをお見せしよう。 + +01:16.790 --> 01:23.900 +これは、 一連のフィードを定義することから始まるPythonモジュールだ。 + +01:23.900 --> 01:33.350 +そしてこれらはRSSフィードのURLで、 たまたま様々なカテゴリーでお得な情報が発表されている便利なものだ。 + +01:33.350 --> 01:38.210 +これらのカテゴリーは、 私たちが知っているカテゴリーと偶然にも一致した。 私たちのモデルは、 + +01:38.210 --> 01:45.350 +エレクトロニクス、 コンピューター、 自動車、 そして主にスマートホームと家庭菜園を対象としてトレーニングされたからだ。 + +01:45.350 --> 01:46.370 +そこで、 いくつかのフィードを紹介しよう。 + +01:46.370 --> 01:51.740 +他にもたくさんあるし、 もう少し待つ気概があれば、 もっとたくさんのフィードをここに入れて、 + +01:51.740 --> 01:55.610 +より大きなデータセットを扱うことができる。 + +01:55.610 --> 02:01.030 +だから、 ここにユーティリティがいくつかあるので、 自分の好きな時間に目を通すことができる。 + +02:01.060 --> 02:06.910 +extractと呼ばれるメソッド、 extractと呼ばれる関数は、 HTMLの一部をクリーンアップし、 有用なテキストを返す。 + +02:06.910 --> 02:09.880 +そして、 擦ったクラスの契約もある。 + +02:09.880 --> 02:14.380 +そして、 これはRSSフィードから取得した取引を表している。 + +02:14.410 --> 02:16.840 +つまり、 正確にはスクレイピングではなく、 RSSから取得しているのだ。 + +02:16.840 --> 02:19.720 +しかし、 ルックアップも少しやってみよう。 + +02:19.720 --> 02:21.790 +だから、 それは真実なんだ。 + +02:22.060 --> 02:28.630 +ええと、 initでやることは、 エントリーと呼ばれる、 単なる値の辞書を取ることです。 + +02:28.630 --> 02:30.160 +そして、 私たちは中に入る。 + +02:30.160 --> 02:32.650 +そして、 これはRSSフィードから直接得られるものだ。 + +02:32.680 --> 02:37.840 +そして、 タイトルと要約を選び、 リンクを取る。 + +02:37.840 --> 02:45.400 +そして、 ここを見てほしい。 ここでやっているのは、 URLを取得して、 それを実際にフェッチしに行くということだ。 + +02:45.400 --> 02:47.500 +だから、 ここでスクレイピングをしているんだ。 + +02:48.520 --> 02:49.330 +申し訳ない。 + +02:49.510 --> 02:57.780 +そのURLをいくつかフェッチして、 その結果をいろいろなものに入れ、 美しいスープでいろいろなものをパースする。 + +02:57.780 --> 03:06.600 +これは1週目、 初日にRequestsから取得したURLを解析するときにやったことと似ている。 + +03:06.600 --> 03:06.600 +を得る。 + +03:07.380 --> 03:08.220 +オーケー。 + +03:08.280 --> 03:13.860 +そして、 ここにさらに中身を取り込んで、 こすり洗いするものがある。 + +03:14.190 --> 03:18.930 +そして、 機能があれば、 いくつかの機能を構築する可能性もある。 + +03:18.930 --> 03:30.390 +これは、 RSSフィードから送られてきたものをクリーンアップしてレコードにするための、 ちょっとしたスクレイピング・コードだ。 + +03:30.570 --> 03:33.090 +そして、 それが何であるかを印刷するものがある。 + +03:33.690 --> 03:40.110 +そして、 このクラス・メソッドfetchは、 まさに先ほどキックオフしたもので、 実行が終了したのがわかるだろう。 + +03:40.260 --> 03:48.090 +そして、 すべてのフィードを繰り返し、 フィード・パーサーと呼ばれるものを呼び出します。 + +03:48.090 --> 03:48.090 +パースする。 + +03:48.090 --> 03:58.380 +feedparserは私がインポートしたパッケージで、 RSSフィードを引っ張ってきて辞書として渡してくれる便利なパッケージだ。 + +03:58.380 --> 04:08.940 +FeedparserをPythonのパッケージとして使い、 それぞれのフィードから戻ってきたトップ10を取り出します。 + +04:08.940 --> 04:11.850 +私たちは、 それぞれの会社から得た取引のトップ10を取り上げる。 + +04:12.000 --> 04:15.990 +そしてもちろん、 希望すればより多くのデータを持ち帰ることもできる。 + +04:16.020 --> 04:19.200 +今のところ、 この数字に限定している。 + +04:19.200 --> 04:23.790 +そして、 彼ら一人ひとりのために、 これが自分自身であることを創造する。 + +04:23.790 --> 04:28.800 +そのエントリーのために、 僕らのインスタンスを作るんだ。 + +04:28.800 --> 04:32.070 +その項目はもちろん、 先ほど見た辞書である。 + +04:32.340 --> 04:36.000 +ええと、 少し時間があるんだ。 気になるなら、 ここで寝てくれ。 + +04:36.030 --> 04:46.590 +それは、 このディールのウェブサイトからそのウェブページを取得するためにアクセスするのは反社会的だと思ったからなんだ。 + +04:46.590 --> 04:52.950 +もし私たちが、 そのウェブサイトに大量のリクエストを、 一瞬の隙を突いて次から次へと殺到させたら......。 + +04:52.950 --> 05:04.250 +だから、 ウェブサーバーに過度な負担をかけたり、 過度な要求をしたりしないように、 スリープを入れるのが良いスクレイピングのやり方とされている。 + +05:04.250 --> 05:12.740 +つまり、 これは、 ウェブサイトからお得な情報を取得し、 その情報を返すことで、 私たちがより良い市民になるための方法なのだ。 + +05:12.740 --> 05:13.760 +だから、 今やったんだ。 + +05:13.790 --> 05:16.430 +そして、 それらの取引はこのノートの中で私たちを待っているはずだ。 + +05:16.430 --> 05:21.080 +しかし、 その前に、 この有用なモジュールにある他のいくつかの事柄に触れておこう。 + +05:21.260 --> 05:26.090 +この3つのクラスは、 構造化されたアウトプットを定義する方法なので、 + +05:26.090 --> 05:28.820 +すぐに重要になる。 + +05:28.940 --> 05:39.350 +GPTの4人に回答を求めると、 ええと、 私たちはここで、 クラス取引、 クラス取引の選択、 そしてクラス機会を定義しています。 + +05:39.350 --> 05:43.010 +これらはベースモデルのサブクラスであることがわかるだろう。 + +05:43.040 --> 05:49.100 +ベースモデルは......パイダンティック・パッケージで、 いろいろなことができる。 + +05:49.100 --> 06:04.310 +この機能でできることのひとつは、 JSONバージョンのクラスとその構造、 そしてクラスそのものを非常に簡単に切り替えることができることだ。 + +06:04.460 --> 06:08.000 +だから、 おそらく多くの人がよく知っていることがたくさんある。 + +06:08.000 --> 06:08.810 +ペダンティックだ。 + +06:08.900 --> 06:15.560 +でも、 でも、 それを使うために必要なのは、 単にベースモデルのサブクラスである新しいクラスを作ることだけです。 + +06:15.770 --> 06:20.360 +つまり、 最初に定義するクラスはディールと呼ばれるものだ。 + +06:20.360 --> 06:25.010 +しかも、 商品説明と価格とURLだけのものだ。 + +06:25.040 --> 06:28.010 +以上、 説明価格URLでした。 + +06:28.100 --> 06:31.280 +そしてもうひとつ、 ディールの選択というものがある。 + +06:31.280 --> 06:35.060 +そして、 GPT4にはこう答えてもらうつもりだ。 + +06:35.060 --> 06:37.880 +私たちは、 取引を選択したいと伝えるつもりだ。 + +06:37.880 --> 06:39.650 +だからこれが重要なんだ。 + +06:39.650 --> 06:41.180 +そして、 それはとてもシンプルだ。 + +06:41.180 --> 06:48.500 +これは、 ディールと呼ばれる単一の属性に、 これらのディールのリストを持つだけのものだ。 + +06:48.950 --> 06:50.690 +それがすべてだ。 + +06:50.690 --> 06:55.000 +つまり、 ディールの選択とは、 ディール・オブジェクトのリストが欲しいということだ。 + +06:55.000 --> 06:58.810 +では、 これをJSONの言葉で考えてみると? + +06:58.840 --> 07:04.630 +JSONの用語で言うと、 この取引選択は1つのオブジェクトで、 + +07:04.630 --> 07:07.750 +1つの属性しか持っていません。 + +07:07.750 --> 07:09.850 +それがリストだ。 + +07:09.940 --> 07:13.720 +そして、 それはそれぞれのオブジェクトである物事のリストである。 + +07:13.720 --> 07:15.850 +つまり、 これがJSONになると次のようになる。 + +07:15.880 --> 07:18.520 +商品説明がある。 + +07:22.960 --> 07:28.720 +そして、 フロート(浮動株)である価格を持っている。 + +07:29.410 --> 07:35.620 +そして、 ある種のURLを持っている。 + +07:37.660 --> 07:41.590 +そうすれば、 契約は成立する。 + +07:41.590 --> 07:44.920 +そして、 案件のリストには潜在的にそれらの一群がある。 + +07:44.920 --> 07:47.170 +そして、 それがディールセレクションを構成する。 + +07:47.170 --> 07:57.640 +だから、 今私が入力したJSONを見て、 これらのクラス定義と比較してみてほしい。 + +07:58.090 --> 08:02.740 +これは、 この構造をJSONで表現したものだ。 + +08:02.770 --> 08:10.450 +そして実際、 GPT4に対して、 構造化された出力をこのフォーマットにしてほしいと言うとき、 私たちがやっていることは、 + +08:10.450 --> 08:16.600 +このようなJSONで応答してほしいと言っているようなものだ。 + +08:16.630 --> 08:18.250 +それだけだ。 + +08:18.760 --> 08:23.560 +それで、 これがどのように機能するかということがお分かりいただけたと思う。 + +08:23.740 --> 08:25.030 +そして、 それを削除する。 + +08:25.030 --> 08:26.320 +今はその必要はない。 + +08:26.350 --> 08:33.220 +最後に言っておきたいのは、 ここで定義したopportunityというクラスもある。 + +08:33.220 --> 08:40.570 +そのうちの一人、 そして見積もり。 これは後でこれらの取引の価値を見積もるときに使うものだ。 + +08:40.570 --> 08:46.780 +そして、 値引きは単純に、 契約価格と見積もり価格との差額となる。 + +08:46.810 --> 08:50.230 +どの程度のディスカウントが提示されているのだろうか? + +08:50.620 --> 08:54.070 +これがセットアップだ。 + +08:54.370 --> 08:58.210 +あー、 それじゃ、 こっちに戻ろうか。 + +08:58.510 --> 09:04.690 +うーん、 じゃあ、 そこを直そうか。 + +09:04.960 --> 09:09.430 +ええと、 それで、 その......僕たちは今、 走って、 擦って、 契約したんだ。 + +09:09.430 --> 09:10.510 +ドット、 フェッチ。 + +09:10.660 --> 09:13.120 +何人いるんだ? + +09:13.150 --> 09:16.870 +ディールには50件の案件が眠っている。 + +09:16.900 --> 09:24.310 +なぜ50件かというと、 5つのフィードがあり、 それぞれのフィードから10件ずつお願いしたからです。 + +09:24.310 --> 09:26.230 +それで50になる。 + +09:26.380 --> 09:28.870 +ええと、 だから君が期待しているのはそういうことなんだ。 + +09:28.900 --> 09:42.940 +44番を見ると、 きれいにプリントアウトされている。 + +09:43.150 --> 09:46.660 +それが44番目の契約だ。 + +09:46.750 --> 09:49.770 +そして、 もしフルディスクリプションをするならば + +09:49.770 --> 09:52.710 +これが、 我々が持っている情報の全容だ。 + +09:52.740 --> 09:58.050 +日替わりのガレージ・ストレージがあるんだ。 + +09:58.080 --> 10:00.750 +送料をかわすためにインストールを選択する。 + +10:01.380 --> 10:01.860 +ええと。 + +10:01.860 --> 10:04.230 +つまり、 こういうことだ。 + +10:04.230 --> 10:07.260 +これを見ると、 いくつかのことに気づくだろう。 + +10:07.290 --> 10:12.690 +そのひとつは、 RSSフィードに価格が表示されないことだ。 + +10:12.690 --> 10:16.380 +価格帯はわからない。 + +10:16.380 --> 10:20.760 +そしてもうひとつ、 これには価格すらない。 + +10:20.760 --> 10:22.590 +どれくらいずれているかを教えてくれるんだ。 + +10:22.590 --> 10:29.520 +送料が無料になるとか、 そういうことは表示されるのですが、 実際にこの商品に関連する価格が表示されるわけではありません。 + +10:29.520 --> 10:30.900 +そして、 それは退屈だ。 + +10:31.050 --> 10:36.450 +つまり、 常に適切な使い方ができるわけではない。 + +10:36.450 --> 10:42.870 +そして、 これらの各項目に対して提示されている実際の価格を把握するために、 いくつかの解析を行う必要がある。 + +10:42.870 --> 10:51.950 +これらの項目を見ていただければわかると思うが、 1つの項目の中に複数の、 えー、 ものを組み合わせているものもある。 + +10:51.950 --> 10:56.390 +アップルウォッチには、 20%オフで提供されるさまざまなモデルがある。 + +10:56.480 --> 11:02.930 +そして、 それを消化し、 私たちが望むものを引き出そうとするのは、 + +11:02.930 --> 11:05.600 +非常に困難なことである。 + +11:05.600 --> 11:08.270 +だからフロンティアモデルを使う必要があるんだ。 + +11:08.270 --> 11:16.250 +私たちはGPT four zeroを使って、 RSSフィード、 スクレイピングされたディール、 そしてそのスクレイピングされたディールを、 + +11:16.250 --> 11:20.090 +私たちにとって有益なディールに変えていく。 + +11:20.090 --> 11:26.240 +この大きなセットから、 最もわかりやすく説明されているベスト5を見つけて、 + +11:26.240 --> 11:32.420 +それを抜き出して要約して私たちに返してほしい。 + +11:32.420 --> 11:34.730 +そして、 構造化されたアウトプットにそれを求める。 + +11:34.730 --> 11:40.400 +私たちがどのようなフォーマットが欲しいかを伝え、 そのフォーマットで回答してもらう。 + +11:40.430 --> 11:44.000 +それで今、 君は、 あー、 ティーアップされたんだ。 + +11:44.000 --> 11:45.680 +私たちが成し遂げようとしていることを理解してくれている。 + +11:45.680 --> 11:48.110 +次のビデオでそれをやるつもりだ。 diff --git a/week5/community-contributions/subtitles/srts/59671441/ko_KR.srt b/week5/community-contributions/subtitles/srts/59671441/ko_KR.srt new file mode 100755 index 0000000..db98c34 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671441/ko_KR.srt @@ -0,0 +1,517 @@ +WEBVTT + +00:00.560 --> 00:09.650 +다시 한번 환영합니다 이곳은 우리가 가장 좋아하는 유피터 연구소입니다 데이터 과학자 실험자의 천국이죠 + +00:09.830 --> 00:17.000 +8주 차에 나온 셋째 날 공책을 보면 이번에는 댓글이 거의 없는 걸 볼 수 + +00:17.000 --> 00:18.020 +있어요 + +00:18.020 --> 00:23.540 +생산 품질의 코드를 공책에 적었어요 + +00:23.630 --> 00:24.260 +지금은요 + +00:24.260 --> 00:25.310 +두 분은 프로예요 + +00:25.310 --> 00:30.260 +와플을 많이 먹을 필요는 없어요 바로 시작하죠 + +00:30.470 --> 00:32.930 +몇 가지 수입품부터 시작할게요 + +00:32.930 --> 00:39.530 +오늘 퍼즐을 풀려면 환경 설정부터 해야 해요 그럼 첫 번째 퍼즐 조각을 + +00:39.560 --> 00:41.750 +맞춰 보죠 get it + +00:42.080 --> 00:48.350 +제가 Scraped 딜이라는 걸 몰래 수입했어요 언급하지 않고서요 어쨌든 + +00:48.380 --> 00:51.770 +이 스크랩드 딜이 있으니 불러올게요 + +00:51.980 --> 00:56.810 +Show Properue를 true로 설정합니다 뭘 하는지는 말 안 하고 + +00:56.810 --> 01:00.020 +실행할게요 몇 분 걸리니까요 그냥 실행하죠 + +01:00.020 --> 01:01.460 +그 후에 더 얘기할게요 + +01:01.460 --> 01:02.930 +그럼 시작하죠 + +01:03.020 --> 01:04.850 +출발해요 + +01:04.850 --> 01:06.860 +뭘 긁어내죠? + +01:06.860 --> 01:07.220 +좋아요 + +01:07.220 --> 01:10.310 +패키지 안의 에이전트 폴더에 있어요 + +01:10.310 --> 01:14.210 +할인 닷 파이에 나와 있어요 + +01:14.240 --> 01:16.760 +할인 닷 파이를 보여드릴게요 + +01:16.790 --> 01:23.900 +이건 파이썬 으로 구성된 모듈입니다 피드들을 정의하는 것으로 시작을 하죠 + +01:23.900 --> 01:31.790 +URL에서 RSS로의 피드입니다 좋은 거래가 다양한 카테고리로 발표되는 유용한 + +01:31.880 --> 01:33.350 +것들이죠 + +01:33.350 --> 01:38.210 +이 부문들은 우리가 잘 아는 부문과 막상막하입니다 우리 모델은 + +01:38.210 --> 01:45.350 +전자제품 컴퓨터, 자동차 그리고 스마트 홈 가든 부문에서 훈련받았거든요 + +01:45.350 --> 01:46.370 +여기 영상이 있어요 + +01:46.370 --> 01:51.740 +다른 것도 많아요 좀 더 기다릴 배짱이 있다면 여기에 비트를 + +01:51.740 --> 01:55.610 +잔뜩 추가해 작업할 데이터 모음이 커지죠 + +01:55.610 --> 02:01.030 +여기 몇 가지 유틸리티가 있는데 여러분이 원할 때 살펴볼 수 있어요 + +02:01.060 --> 02:06.910 +추출물이라는 메서드와 함수가 있는데 HTML을 청소하고 유용한 텍스트를 반환하죠 + +02:06.910 --> 02:09.880 +그리고 학급 간의 마찰도 있죠 + +02:09.880 --> 02:14.380 +이건 RSS 피드에서 회수한 거래예요 + +02:14.410 --> 02:16.840 +긁는 게 아니라 RSS에서 회수하는 거군요 + +02:16.840 --> 02:19.720 +비트 박스도 좀 살펴볼 거예요 + +02:19.720 --> 02:21.790 +어느 정도 사실인 거죠 + +02:22.060 --> 02:28.630 +init에서 하는 일은 항목이라는 걸 갖는 거예요 그냥 값의 사전이죠 + +02:28.630 --> 02:30.160 +우린 통과할 거예요 + +02:30.160 --> 02:32.650 +RSS 피드에서 바로 얻을 수 있는 정보죠 + +02:32.680 --> 02:37.840 +우리가 하는 일은 제목과 요약을 고르고 링크를 가져오는 거죠 + +02:37.840 --> 02:45.400 +여길 보시면 URL을 갖고 그걸 페치하는 거죠 + +02:45.400 --> 02:47.500 +그래서 긁어내고 있어요 + +02:48.520 --> 02:49.330 +미안해요 + +02:49.510 --> 02:57.780 +URL 페치잉을 하고 그 결과들을 입력하고 멋진 put으로 그걸 분석하죠 + +02:57.780 --> 03:04.620 +첫째 주에 했던 것과 유사하죠 요청에서 검색한 URL 파싱 + +03:04.620 --> 03:06.600 +때요 get이요. + +03:07.380 --> 03:08.220 +네 + +03:08.280 --> 03:13.860 +그리고 내용물을 스크럽하는 것도 있어요 + +03:14.190 --> 03:18.930 +그런 다음 기능이 있다면 일부 기능을 구축하죠 + +03:18.930 --> 03:27.210 +RSS 비트로 들어온 걸 깨끗하게 청소해서 레코드로 만드는 건 + +03:27.210 --> 03:30.390 +다 긁어내는 코드예요 + +03:30.570 --> 03:33.090 +그리고 인쇄하는 것도 있어요 + +03:33.690 --> 03:39.030 +이 클래스 메서드 페치는 조금 전에 시작한 것으로 실행이 끝난 걸 + +03:39.060 --> 03:40.110 +보실 수 있죠 + +03:40.260 --> 03:48.090 +하는 일은 모든 피드를 반복하는 겁니다 피드 파서라는 걸 호출하죠 + +03:48.090 --> 03:48.090 +파즈예요 + +03:48.090 --> 03:55.500 +피드파서는 제가 가져온 패키지예요 RSS 피드를 끌어오게 해주는 유용한 패키지죠 + +03:55.500 --> 03:58.380 +사전으로서 그걸 줘요 + +03:58.380 --> 04:06.180 +파이썬 패키지로 Feedparer를 이용하고 있습니다 각각의 피드에서 가져온 상위 + +04:06.210 --> 04:08.940 +10개만 가지고 올 수 있죠 + +04:08.940 --> 04:11.850 +Get in get it up + +04:12.000 --> 04:15.990 +물론 원한다면 더 많은 데이터를 가져올 수도 있어요 + +04:16.020 --> 04:19.200 +지금은 그냥 그 숫자에 묶을게요 + +04:19.200 --> 04:23.790 +각각의 캐릭터는 이게 우리 자신이에요 + +04:23.790 --> 04:28.800 +그 항목을 위해 우리에 대한 예를 만들죠 + +04:28.800 --> 04:32.070 +그 항목은 물론 우리가 방금 본 사전이죠 + +04:32.340 --> 04:36.000 +시간이 좀 있어요 혹시 궁금하다면 여기서 자요 + +04:36.030 --> 04:42.390 +할인 웹사이트에서 웹 페이지를 검색하러 get to를 하는 + +04:42.390 --> 04:46.590 +거니까 반사회적이라고 생각했거든요 + +04:46.590 --> 04:52.950 +수많은 요청이 들어온 그 웹사이트를 공격하면 짧은 시간 안에 하나씩 차례로 올라올 거예요 + +04:52.950 --> 05:01.040 +웹 서버를 너무 혹사하거나 너무 많은 것을 요구하지 않도록 get 절약을 + +05:01.040 --> 05:04.250 +하는 것이 좋은 방법이죠. + +05:04.250 --> 05:11.030 +우리가 웹사이트에서 이런 계약을 회수하면 더 나은 시민이 + +05:11.030 --> 05:12.740 +되는 거죠 + +05:12.740 --> 05:13.760 +그래서 그렇게 했어요 + +05:13.790 --> 05:16.430 +그 거래들이 이 공책에 담겨 있어야 해요 + +05:16.430 --> 05:21.080 +하지만 먼저 이 유용한 모듈에서 몇 가지 다른 걸 언급할게요 + +05:21.260 --> 05:26.090 +여기 세 가지 강좌가 있는데 잠시 후에 중요하게 다룰 거예요 구조화된 출력물들을 + +05:26.090 --> 05:28.820 +어떻게 정의하는지 보여주거든요 + +05:28.940 --> 05:37.880 +GPT 4에 답변을 요청할 때 클래스 딜을 정의하고 있습니다 클래스 딜 선택과 클래스 + +05:37.880 --> 05:39.350 +기회죠 + +05:39.350 --> 05:43.010 +이게 기본 모델의 서브클래스인 게 보이시죠 + +05:43.040 --> 05:49.100 +베이스 모델은 파이댄틱 패키지에서 나온 건데 여러 가지 기능이 있어요 + +05:49.100 --> 05:57.920 +그 중 하나는 아주 쉽게 클래스의 JSON 버전과 그 구조, 클래스 자체를 전환할 수 있게 해줍니다 + +05:57.920 --> 06:04.310 +또한 클래스가 스키마에 충실하도록 시행할 수도 있어요 + +06:04.460 --> 06:08.000 +여러분 중 많은 분이 아주 익숙하실 거예요 + +06:08.000 --> 06:08.810 +현학적이군요 + +06:08.900 --> 06:15.560 +하지만 그걸 사용하려면 기본 모델의 서브클래스인 새 클래스를 생성하면 돼요 + +06:15.770 --> 06:20.360 +첫 번째 클래스는 거래라고 불러요 + +06:20.360 --> 06:25.010 +제품 설명, 가격, URL 같은 것만 있는 거죠 + +06:25.040 --> 06:28.010 +설명 가격 URL 예요 + +06:28.100 --> 06:31.280 +거래 선택이라는 것도 있어요 + +06:31.280 --> 06:35.060 +GPT 4에 대응하라고 요청할 거예요 + +06:35.060 --> 06:37.880 +우린 거래를 원한다고 말할 거예요 + +06:37.880 --> 06:39.650 +이게 중요한 거예요 + +06:39.650 --> 06:41.180 +아주 간단해요 + +06:41.180 --> 06:48.500 +딜이라고 부르는 단일 특성에 이런 딜의 리스트를 가진 뭔가죠 + +06:48.950 --> 06:50.690 +그게 다인 것 같아요 + +06:50.690 --> 06:55.000 +딜 선택은 딜 객체의 리스트를 원한다는 뜻이죠 + +06:55.000 --> 06:58.810 +JSON 스피커로 이걸 생각해보면 뭐죠? + +06:58.840 --> 07:04.630 +JSON 용어에서 이건 이런 모습입니다 이 딜 선택은 하나의 객체로 + +07:04.630 --> 07:07.750 +오직 하나의 특성 딜을 갖고 있죠 + +07:07.750 --> 07:09.850 +그게 목록이에요 + +07:09.940 --> 07:13.720 +각각의 개체들의 목록이죠 + +07:13.720 --> 07:15.850 +JSON으로 가면 이렇게 보일 거예요 + +07:15.880 --> 07:18.520 +제품 설명도 있어요 + +07:22.960 --> 07:28.720 +가격표는 플로트라고 해요 + +07:29.410 --> 07:35.620 +URL 같은 것도 있고요 + +07:37.660 --> 07:41.590 +URL 같은 거요 그럼 거래가 성사되죠 + +07:41.590 --> 07:44.920 +아마 목록에 그런 게 아주 많을 거예요 + +07:44.920 --> 07:47.170 +그럼 거래 선택이 완성되죠 + +07:47.170 --> 07:53.410 +제가 방금 입력한 JSON을 보시면 이 클래스 정의와 비교해 보세요 + +07:53.410 --> 07:57.640 +서로 어떻게 유사한지 더 명확해지길 바라요 + +07:58.090 --> 08:02.740 +이건 이 구조의 JSON 표현이에요 + +08:02.770 --> 08:10.450 +GPT 4에 구조화된 출력이 이 포맷에 있길 원한다고 할 때 여러분이 + +08:10.450 --> 08:16.600 +응답하는 일종의 JSON이 되길 원한다고 하는 거죠 + +08:16.630 --> 08:18.250 +그게 다예요 + +08:18.760 --> 08:23.560 +그게 어떻게 작동하는지 감을 잡으셨으면 좋겠네요 + +08:23.740 --> 08:25.030 +이건 삭제할게요 + +08:25.030 --> 08:26.320 +그럴 필요 없어요 + +08:26.350 --> 08:31.210 +마지막으로 언급할 것은 기회라는 클래스도 있다는 겁니다 여기서 정의한 것으로 + +08:31.210 --> 08:33.220 +기본적으로 거래를 하는 거죠 + +08:33.220 --> 08:38.920 +이 중 하나와 견적서요 견적은 나중에 사용할 겁니다 이 거래의 가치를 + +08:38.920 --> 08:40.570 +평가할 때요 + +08:40.570 --> 08:46.780 +그리고 할인은 단순히 거래 가격과 예상 가격의 차이죠 + +08:46.810 --> 08:50.230 +이 제품은 얼마나 할인받을 수 있나요? + +08:50.620 --> 08:54.070 +그게 설정이에요 + +08:54.370 --> 08:58.210 +그럼 다시 이쪽으로 오시죠 + +08:58.510 --> 09:04.690 +그 부분을 좀 고쳐 보죠 + +09:04.960 --> 09:09.430 +그래서 우린 방금 도망치고 긁고 거래했어요 + +09:09.430 --> 09:10.510 +도로시, 가져와요 + +09:10.660 --> 09:13.120 +이제 몇 개인지 볼 수 있어요 + +09:13.150 --> 09:16.870 +50건의 거래가 성사됐어요 + +09:16.900 --> 09:24.310 +50개 세일을 하는 이유는 5개의 피드가 있는데 각각 10개씩 세일을 요구했기 때문이에요 + +09:24.310 --> 09:26.230 +그럼 50명이 되는군요 + +09:26.380 --> 09:28.870 +여러분이 기대하시는 게 그거이길 바라요 + +09:28.900 --> 09:35.590 +거래 번호 44를 보면 프린트가 잘 됐어요 파이썬 마법 + +09:35.590 --> 09:42.940 +기능을 사용했거든요 복제 기능을 잘 프린트하도록요 + +09:43.150 --> 09:46.660 +그게 44번 계약 조건이에요 + +09:46.750 --> 09:49.770 +전체 묘사를 해 보죠 + +09:49.770 --> 09:52.710 +우리가 가진 정보는 이 비트에 다 있어요 + +09:52.740 --> 09:58.050 +매일 거래되는 차고 저장장치 bla bla bla bla bla bla bla bla bla가 많아요 + +09:58.080 --> 10:00.750 +배송비를 피하려면 설치를 선택하세요 + +10:01.380 --> 10:01.860 +네 + +10:01.860 --> 10:04.230 +그런데 말이죠 + +10:04.230 --> 10:07.260 +그걸 보면 몇 가지 눈에 띌 거예요 + +10:07.290 --> 10:12.690 +그중 하나는 RSS 피드에 가격이 따로 나오지 않는다는 거죠 + +10:12.690 --> 10:16.380 +Get it의 가격은 모르고 모양만 알죠 + +10:16.380 --> 10:20.760 +또 다른 특징은 가격조차 없다는 거예요 + +10:20.760 --> 10:22.590 +얼마나 떨어졌는지 알 수 있죠 + +10:22.590 --> 10:27.690 +무료 배송 같은 건 알려 주지만 이 제품과 관련된 가격은 + +10:27.690 --> 10:29.520 +알려 주지 않아요 + +10:29.520 --> 10:30.900 +그건 지루해요 + +10:31.050 --> 10:36.450 +그래서 항상 적절하게 사용할 수 있는 건 아니에요 + +10:36.450 --> 10:41.430 +파싱 작업을 통해 각각의 항목에 대해 제공되는 실제 가격이 얼마인지 + +10:41.430 --> 10:42.870 +알아내야 해요 + +10:42.870 --> 10:51.950 +이 아이템들을 보시면 여러 개를 하나로 묶은 것도 있어요 하나의 카피에요 + +10:51.950 --> 10:56.390 +애플 워치의 다른 모델은 전부 20% 할인을 제공해요 + +10:56.480 --> 11:02.930 +따라서 그걸 이해하고 원하는 걸 끌어내는 건 아주 어려울 겁니다 아주 힘들고 그걸 견고한 방식으로 + +11:02.930 --> 11:05.600 +코드화하는 건 아주 어렵죠 + +11:05.600 --> 11:08.270 +그래서 개척자 모델을 써야 해요 + +11:08.270 --> 11:16.250 +GPT 40을 이용해서 각 RSS의 피드와 거래를 수집하고 그 거래를 뭔가로 + +11:16.250 --> 11:20.090 +바꿀 겁니다 우리에게 유용한 거래죠 + +11:20.090 --> 11:26.240 +50개 모두를 보내 이렇게 말할 겁니다 이 대형 세트에서 가장 명확하게 설명된 + +11:26.240 --> 11:32.420 +최고의 계약 5개를 찾아 그걸 추려내서 요약해 우리에게 보내라고요 + +11:32.420 --> 11:34.730 +구조화된 출력에서 그걸 원하죠 + +11:34.730 --> 11:39.620 +어떤 형식을 원하는지 말하고 정확히 그 형식으로 답변해 달라고 요청할 + +11:39.620 --> 11:40.400 +거예요 + +11:40.430 --> 11:44.000 +이제 티를 입힌 거예요 + +11:44.000 --> 11:45.680 +우리가 뭘 이루려는 건지 알잖아요 + +11:45.680 --> 11:48.110 +다음 비디오에서 보여드릴게요 diff --git a/week5/community-contributions/subtitles/srts/59671567/en_US.srt b/week5/community-contributions/subtitles/srts/59671567/en_US.srt new file mode 100755 index 0000000..b00198c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671567/en_US.srt @@ -0,0 +1,700 @@ +WEBVTT + +00:00.950 --> 00:04.760 +Well, the first thing you're going to notice is that I don't have a notebook open for you. + +00:04.760 --> 00:08.090 +And that's because this time we're just going to be looking at code. + +00:08.090 --> 00:11.150 +Uh, no notebooks for this recording. + +00:11.150 --> 00:18.710 +For this video, we are going to look at the actual finally, the actual agent framework itself that + +00:18.710 --> 00:22.880 +we're writing that we've, that we've built, that we're going to run it in. + +00:22.880 --> 00:27.530 +And it sounds like something so fancy, but in fact, it's going to be incredibly simple. + +00:27.530 --> 00:32.240 +And it's the reason why I didn't want to use an off the shelf kind of package, because I just wanted + +00:32.240 --> 00:37.880 +to show you that there's nothing more than connecting together, different llms and bits of Python code + +00:37.880 --> 00:40.340 +to carry out a more advanced function. + +00:40.640 --> 00:45.440 +Now, typically when you talk about these sorts of agent frameworks, what you're looking for is you're + +00:45.440 --> 00:50.630 +looking at something that can handle the database, uh, database connectivity, in our case to our + +00:50.630 --> 00:53.990 +Chrome data store memory. + +00:54.080 --> 00:57.590 +So we're going to want some sense of persistent memory. + +00:57.770 --> 01:03.270 +Um, that can that can survive between different chats or of different instances, so we'll understand + +01:03.270 --> 01:05.040 +how memory is going to work. + +01:05.700 --> 01:12.180 +And also stuff like logging I mentioned is sort of good, good practice to be able to understand what's + +01:12.180 --> 01:13.620 +happened in your environment. + +01:13.620 --> 01:17.610 +And then there's also going to be some user interface concerns that we're going to want to build a Gradio + +01:17.640 --> 01:20.250 +UI that's going to be able to use this framework. + +01:20.250 --> 01:21.960 +So it's going to have to to handle that. + +01:21.960 --> 01:26.850 +But that last one we're going to get to in the next set of series. + +01:27.030 --> 01:31.500 +Uh, so for now we're going to look at our, at our framework. + +01:31.500 --> 01:34.080 +Just before that, a couple of things to point out. + +01:34.080 --> 01:38.580 +First of them, when we looked in our agents folder, we've looked at each of these different classes. + +01:38.580 --> 01:40.920 +That represents a different unit of work. + +01:40.920 --> 01:45.450 +And you'll notice that each of them were subclasses of agent. + +01:45.570 --> 01:47.100 +Uh, and you may have wondered, what is that? + +01:47.130 --> 01:52.260 +What is this agent that he's subclassing is there must be all sorts of functionality in there. + +01:52.260 --> 01:55.920 +Well, actually, no, there's very little functionality in this superclass. + +01:55.920 --> 02:00.210 +Uh, it is actually just an abstract class. + +02:00.270 --> 02:03.820 +Uh, which is able to log a message. + +02:03.910 --> 02:07.090 +And the way that it logs a message is it does logging.info. + +02:07.120 --> 02:08.530 +I imagine you're familiar. + +02:08.560 --> 02:10.360 +You've used Python logging, probably. + +02:10.360 --> 02:11.200 +And so you're pretty. + +02:11.230 --> 02:17.590 +Familiar that logging.info logs, uh, to to to wherever you set up a logger for, um. + +02:17.620 --> 02:18.550 +And. + +02:18.700 --> 02:23.560 +The one thing that I'm doing that's a bit different here, um, is that I'm going to. + +02:23.590 --> 02:27.190 +Add in the name of the subclass of the agent. + +02:27.190 --> 02:28.840 +That's that's, uh, that's. + +02:28.870 --> 02:34.120 +Actually, uh, sending this log message and we're going to give each one a color. + +02:34.210 --> 02:39.400 +So that we can see, uh, what which agent is doing what? + +02:39.400 --> 02:45.220 +And for example, if I go back to, uh, or if I go to the specialist agent, you can see that it's + +02:45.220 --> 02:46.300 +given itself a name. + +02:46.330 --> 02:47.560 +Of specialist agent. + +02:47.560 --> 02:51.310 +And its color is red, so that when it logs, it's going to. + +02:51.340 --> 02:52.270 +Appear in red. + +02:52.270 --> 02:54.610 +And you can see that you might have noticed that I didn't do. + +02:54.640 --> 02:56.980 +Logging.info I did self dot log. + +02:56.980 --> 03:00.070 +And that's so that I can call the log message in the superclass. + +03:00.070 --> 03:04.820 +So you know, there's really nothing fancy to this agent superclass. + +03:04.820 --> 03:06.920 +It just meant that we could log consistently. + +03:06.920 --> 03:11.090 +You could equally well have just included that function in the different classes. + +03:11.090 --> 03:14.480 +Nothing particularly special about the agent. + +03:14.930 --> 03:15.860 +All right. + +03:15.860 --> 03:22.250 +One other bit of groundwork to do is to show you this JSON file called memory JSON. + +03:22.250 --> 03:24.710 +And it is a simple piece of JSON. + +03:24.710 --> 03:29.150 +And if it didn't exist then then you'll see it will it will create it manually. + +03:29.150 --> 03:32.870 +Let's open it with the editor, not with the fancy one, but with the normal editor. + +03:32.870 --> 03:34.700 +And this is what it looks like. + +03:34.700 --> 03:36.440 +It is a list. + +03:36.680 --> 03:43.250 +You can see it's got a list on the outside of it, a list of JSON blobs. + +03:43.250 --> 03:48.320 +And each JSON blob is something which may look familiar to you. + +03:48.350 --> 03:49.700 +Do you recognize what it is? + +03:50.120 --> 03:55.070 +Yes, it's of course, an opportunity object turned into JSON. + +03:55.460 --> 04:02.240 +It's got a deal which has a product description, price and URL, and then it has the estimate that's + +04:02.240 --> 04:08.370 +come from the model and it has a discount, which is quite simply this number minus this number. + +04:08.400 --> 04:08.880 +Oops. + +04:08.910 --> 04:09.630 +That number. + +04:09.660 --> 04:10.020 +Ha. + +04:10.380 --> 04:15.300 +Uh, it's how much is this being offered at a discount to what we think it's really worth? + +04:15.300 --> 04:20.790 +So this memory has three deals that have been surfaced. + +04:21.060 --> 04:26.730 +Uh, now, storing it in a JSON file like this is perhaps not the most industrial strength. + +04:26.730 --> 04:31.830 +If that went into a database, then it's something that could handle multiple of these agent frameworks + +04:31.830 --> 04:32.160 +running. + +04:32.160 --> 04:37.740 +But we only have one agent framework that will only write to this memory, uh, once when it does its + +04:37.740 --> 04:38.220 +work. + +04:38.220 --> 04:42.150 +So that wasn't strictly necessary, but of course it could go in Chrome easily. + +04:42.150 --> 04:46.890 +We've got a database hanging around already, so you could certainly take that as an exercise if you + +04:46.890 --> 04:47.550 +wish. + +04:47.880 --> 04:52.620 +Um, that would certainly be a good, good production improvement to make. + +04:53.250 --> 05:01.620 +But anyway, the time has come to look at the actual, uh, the, the framework itself, the agent framework. + +05:01.710 --> 05:04.620 +Uh, so let's do that right away. + +05:06.220 --> 05:11.710 +Um, and I want to make sure I bring up the right class so that I don't give too much away of what is + +05:11.710 --> 05:12.460 +still to come. + +05:12.460 --> 05:12.970 +Here it is. + +05:12.970 --> 05:14.770 +Deal agent framework. + +05:15.130 --> 05:16.540 +Uh, okay. + +05:16.540 --> 05:24.790 +So the, uh, agent framework, um, is something which, first of all, it sets up logging. + +05:24.790 --> 05:31.870 +This is something which is standard Python stuff that will make sure that when someone does logging.info, + +05:32.020 --> 05:36.790 +um, it will get sent to standard out and it has some, some structure to it. + +05:36.790 --> 05:43.300 +So this is a sort of standard, uh, um, boilerplate Python stuff to make sure we log properly. + +05:43.300 --> 05:46.330 +And then this is the deal agent framework. + +05:46.330 --> 05:51.640 +It's got coded in it, the database, the thing that I accidentally left off a couple of times, uh, + +05:51.640 --> 05:55.480 +so that's coded in here along with the name of the memory file. + +05:55.600 --> 05:58.750 +When it starts up, it will begin logging. + +05:58.780 --> 06:00.730 +It will make a log message itself. + +06:00.730 --> 06:02.170 +It will load the env. + +06:02.200 --> 06:05.770 +This is something that we, of course, always do in all of our Jupyter notebooks. + +06:05.770 --> 06:07.520 +So this needs to do it now. + +06:07.760 --> 06:12.800 +It will, it will create or it will access the database. + +06:13.250 --> 06:17.090 +Um, it will read in memory from that JSON file. + +06:17.390 --> 06:25.010 +Uh, it will get the products collection and it will then set the planning agent, uh, initializing + +06:25.010 --> 06:28.070 +it with the collection that it needs to look at. + +06:28.100 --> 06:30.110 +And then it will say, I'm ready. + +06:30.620 --> 06:35.930 +This this method here read memory does exactly what it says on the tin. + +06:35.930 --> 06:39.140 +It, uh, you can see I haven't hard coded the name of the file. + +06:39.140 --> 06:40.160 +Well done, well done. + +06:40.190 --> 06:40.700 +Me? + +06:40.820 --> 06:43.880 +Uh, so it loads this in from JSON. + +06:43.880 --> 06:50.030 +If it doesn't already exist, then it just returns an empty one, which is completely fine, because + +06:50.240 --> 06:52.130 +there's also a function. + +06:52.160 --> 07:00.890 +A method here writes memory, which writes out these opportunities to, uh, the the memory file. + +07:01.940 --> 07:08.710 +There's a log, uh, a method here to send a log message with its own agent framework tag. + +07:09.490 --> 07:14.590 +Then there's a run method that actually kicks off the planning agent. + +07:14.800 --> 07:20.860 +Uh, and uh, once it gets back a result, it says it calls self dot planner. + +07:21.070 --> 07:22.120 +Plan that. + +07:22.120 --> 07:23.500 +Of course, that's the big moment. + +07:23.500 --> 07:26.200 +That's when we we actually set this thing going. + +07:26.200 --> 07:28.750 +And look it passes in its memory. + +07:28.780 --> 07:32.350 +It passes in the memory of opportunities that it knows about. + +07:32.350 --> 07:38.050 +So in that goes uh, and then it logs the results. + +07:38.050 --> 07:43.600 +And if the result wasn't none, remember that the planner might return none if it doesn't find anything + +07:43.600 --> 07:43.990 +new. + +07:43.990 --> 07:50.020 +But if it was a real result, then it adds it to the memory and writes the memory and returns the result. + +07:50.020 --> 07:51.220 +And that is it. + +07:51.220 --> 07:57.040 +This last piece here is coming up in the UI to the next time, but for now, that is the end of the + +07:57.040 --> 07:57.970 +framework. + +07:58.420 --> 07:59.980 +So how do we run this framework? + +07:59.980 --> 08:01.690 +Well, we run it from the command line. + +08:01.690 --> 08:03.100 +This is now for reals. + +08:03.100 --> 08:05.500 +This is no longer this is all Python code. + +08:05.500 --> 08:09.080 +We're outside the Jupyter world and we're using this interface for convenience. + +08:09.080 --> 08:09.710 +Really. + +08:09.980 --> 08:12.440 +But you could be using VSCode or something like that. + +08:12.470 --> 08:16.310 +The way to run a terminal is we press the plus button here. + +08:16.310 --> 08:20.330 +We can also do it in JupyterLab and we can open a new terminal like this. + +08:20.330 --> 08:25.010 +This is a you can also, of course be doing this however you like to do it. + +08:25.130 --> 08:29.540 +Um, it could be an anaconda prompt or it could be you could be using. + +08:29.540 --> 08:32.240 +If you're using a mac, then you can just bring up a new terminal window. + +08:32.330 --> 08:36.380 +Um, but I am first going to have to activate my conda environment. + +08:36.560 --> 08:47.330 +Um, so I do conda activate LMS, uh, which will mean that as you're now very familiar, uh, my environment + +08:47.330 --> 08:54.350 +changes from base to LMS because I'm now in that Anaconda environment, and I really think that's it. + +08:54.350 --> 09:01.730 +I should now be able to say Python deal agent framework. + +09:02.510 --> 09:04.010 +Spell it right. + +09:06.410 --> 09:07.490 +Dot py. + +09:07.640 --> 09:08.990 +And that should really be it. + +09:09.000 --> 09:11.670 +There is one more thing I need to show you before I do that though. + +09:11.820 --> 09:14.190 +I almost forgot just as well. + +09:14.190 --> 09:16.320 +It's just this very last few lines here. + +09:16.320 --> 09:20.070 +You might have been wondering what's going to happen if I do that, if I don't show you this. + +09:20.460 --> 09:26.700 +So the very end, we of course, make it clear that if this is being run from the command line so that + +09:26.700 --> 09:32.550 +this file is main, then we instantiate, we create a new instance of deal agent framework. + +09:32.550 --> 09:35.760 +And we call this run method that we just looked at. + +09:35.790 --> 09:39.780 +That's the little missing link there just to explain okay okay. + +09:39.810 --> 09:40.320 +With that. + +09:40.320 --> 09:41.640 +Now I should be able to run this. + +09:41.670 --> 09:42.540 +Take a deep breath. + +09:42.540 --> 09:43.410 +Let's do it. + +09:46.950 --> 09:49.020 +A long pause while it thinks, huh? + +09:51.060 --> 09:51.930 +All right. + +09:51.930 --> 09:53.310 +Let's see what's going on. + +09:53.310 --> 09:55.800 +The agent framework says it's initializing. + +09:55.800 --> 09:58.350 +The planning agent is initializing scanner. + +09:58.380 --> 09:59.850 +Agent becomes ready. + +09:59.850 --> 10:02.550 +The ensemble agent gets ready. + +10:02.550 --> 10:04.170 +So then it's a bit hard to see this. + +10:04.170 --> 10:09.600 +Read the specialist agent, which is connecting to modal, gets ready. + +10:10.300 --> 10:12.910 +And then the frontier model gets ready. + +10:12.940 --> 10:14.920 +It has to load in that sentence transformer. + +10:14.920 --> 10:16.360 +You can see it's happening there. + +10:16.360 --> 10:19.810 +And then the random forest agent gets ready as well. + +10:19.810 --> 10:21.340 +And that's done. + +10:21.430 --> 10:24.340 +The ensemble is ready, the messenger is ready. + +10:24.340 --> 10:25.810 +And everything in blue. + +10:25.840 --> 10:28.360 +Here it says Agent Framework is ready. + +10:28.360 --> 10:33.130 +So it went through instantiating and creating all of our agents and setting them all up. + +10:33.130 --> 10:39.190 +They've loaded their model weights that they've the single one the specialist model has connected to + +10:39.220 --> 10:39.970 +modal. + +10:40.390 --> 10:46.180 +And they have now, um, all prepared themselves for action. + +10:46.210 --> 10:51.010 +Model weights have been read in and it's kicked off a run. + +10:51.040 --> 10:56.110 +The scanner agent you can see here, uh, received there's a there's an error there. + +10:56.110 --> 10:57.160 +I'll improve that. + +10:57.160 --> 10:59.020 +It's not printing the actual number of results. + +10:59.020 --> 11:02.290 +It's printing needs an F before it. + +11:02.620 --> 11:05.080 +Uh, I will have fixed that before you see it. + +11:05.170 --> 11:07.660 +Uh, so the scanner agent is making that call. + +11:07.780 --> 11:16.400 +Um, and now the planning agent is pricing up the deal, and it's now sitting with modal while my machine + +11:16.400 --> 11:17.960 +warms up. + +11:18.050 --> 11:26.420 +Uh, but as they say on those cooking shows, uh, if you know what I mean, uh, you have those cooking + +11:26.420 --> 11:32.270 +shows when they put the thing in the oven and they say, and here's the one that I put in earlier, + +11:32.270 --> 11:33.980 +and they pull that out of the oven. + +11:33.980 --> 11:37.100 +I always think that's a devious trick when they do that. + +11:37.190 --> 11:39.800 +But you're going to have to trust me that I did just do this. + +11:39.800 --> 11:45.440 +I just kicked it off a few minutes ago and it ran over here, and that's where I left it, and it finished + +11:45.440 --> 11:46.070 +up its thing. + +11:46.070 --> 11:52.700 +And you can see that basically it for each of the five deals that returned, it made a series of calls + +11:52.700 --> 11:54.140 +to all of the different models. + +11:54.140 --> 11:55.670 +You can read through each of this. + +11:55.700 --> 11:58.490 +The ensemble model returns a number. + +11:58.700 --> 12:01.160 +Um, so we should we should work back. + +12:01.190 --> 12:06.350 +First of all, the specialist model calculates a number and then the frontier model calculates the number + +12:06.350 --> 12:08.990 +with the Rag lookup and then the random forest model. + +12:08.990 --> 12:10.430 +And then the ensemble model. + +12:10.430 --> 12:12.870 +And that goes back to the planning agent. + +12:13.200 --> 12:15.300 +And so you can read through all of this. + +12:15.300 --> 12:20.100 +And at the end of this it completed and it sent a push notification. + +12:20.340 --> 12:29.700 +And I did indeed get a push notification about this particular deal that it found, which was a certified + +12:29.700 --> 12:39.090 +refurbished laptop boasting a 13th generation i5, which the estimate was $925 for that, which was + +12:39.090 --> 12:45.030 +a very healthy discount to what it was going for at $560. + +12:45.030 --> 12:46.950 +So there we have it. + +12:46.950 --> 12:50.970 +That was the run of our agent framework running. + +12:51.210 --> 12:55.980 +And yeah, it's incredibly satisfying to see that happening. + +12:55.980 --> 13:00.870 +I love absolutely love watching it and could watch it all day. + +13:00.870 --> 13:04.200 +I have to say it's it's hugely enjoyable. + +13:04.230 --> 13:07.920 +Look, I didn't even need to put that to bring that one out of the oven, because the one that we put + +13:07.920 --> 13:09.780 +in the oven is now cooking up. + +13:09.780 --> 13:14.960 +You can see once modal is warmed up, it's quite fast in how it turns around the different things. + +13:15.170 --> 13:16.910 +So it's going through there. + +13:16.910 --> 13:18.020 +It's come up with something. + +13:18.020 --> 13:22.790 +And of course, what it's proposing is different to what it proposed a moment ago. + +13:23.000 --> 13:26.870 +And I just got the notification on my phone and no doubt on my watch. + +13:27.350 --> 13:34.880 +So we just watched we just saw the full framework running and me getting the notification. + +13:34.880 --> 13:42.980 +And you can read through this trace and see how each of the agents are collaborating to solve this problem. + +13:43.280 --> 13:51.470 +Um, and so all that remains for this is to make it really autonomous so that it's running all the time. + +13:51.470 --> 13:56.810 +And while we're doing that, we might as well slap on a great UI because we know how to use gradio and + +13:56.810 --> 13:57.680 +we know how good it is. + +13:57.710 --> 14:01.190 +And all of that is what we're going to do tomorrow. + +14:01.250 --> 14:04.460 +But for now, I'm going to let you, of course, enjoy this. + +14:04.460 --> 14:07.670 +I do hope that you're running this a few times because it's so satisfying. + +14:07.700 --> 14:08.900 +Honestly, I love it. + +14:08.930 --> 14:14.060 +Hope you're finding just as much enjoyment as me, and I will see you for the slides to wrap up the + +14:14.060 --> 14:14.540 +day. diff --git a/week5/community-contributions/subtitles/srts/59671567/ja_JP.srt b/week5/community-contributions/subtitles/srts/59671567/ja_JP.srt new file mode 100755 index 0000000..ef13e63 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671567/ja_JP.srt @@ -0,0 +1,625 @@ +WEBVTT + +00:00.950 --> 00:04.760 +さて、 まず最初にお気づきになるのは、 私があなたのためにノートを開いていないということだ。 + +00:04.760 --> 00:08.090 +今回はコードを見るだけだからだ。 + +00:08.090 --> 00:11.150 +ええと、 このレコーディングにはノートはありません。 + +00:11.150 --> 00:22.880 +このビデオでは、 私たちが書いている実際のエージェント・フレームワークを最終的に見ていきます。 + +00:22.880 --> 00:27.530 +とても派手なことのように聞こえるが、 実際は驚くほど簡単なことだ。 + +00:27.530 --> 00:32.240 +既製品のようなパッケージを使いたくなかったのは、 より高度な機能を実行するためには、 + +00:32.240 --> 00:40.340 +さまざまなllmやPythonコードの断片をつなげるだけでは不十分だということを伝えたかったからだ。 + +00:40.640 --> 00:45.440 +さて、 一般的にこの種のエージェントフレームワークについて語るとき、 + +00:45.440 --> 00:53.990 +あなたが探しているのは、 データベース、 あー、 データベース接続を処理できるものです。 + +00:54.080 --> 00:57.590 +だから、 永続的な記憶というものが欲しいんだ。 + +00:57.770 --> 01:05.040 +異なるチャットや異なるインスタンス間で生き残ることができるので、 メモリがどのように機能するかを理解することができます。 + +01:05.700 --> 01:13.620 +それに、 さっき言ったロギングのようなことも、 自分の環境で何が起きたかを理解するための良い練習になる。 + +01:13.620 --> 01:20.250 +そして、 このフレームワークを使うことができるGradio UIを作りたいというユーザー・インターフェイスの懸念も出てくるだろう。 + +01:20.250 --> 01:21.960 +だから、 それを処理しなければならない。 + +01:21.960 --> 01:26.850 +しかし、 その最後については、 次のシリーズで紹介するつもりだ。 + +01:27.030 --> 01:31.500 +とりあえず、 僕らのフレームワークを見てみよう。 + +01:31.500 --> 01:34.080 +その前に、 いくつか指摘しておきたいことがある。 + +01:34.080 --> 01:38.580 +まず、 エージェント・フォルダーを見て、 それぞれのクラスを調べた。 + +01:38.580 --> 01:40.920 +これは別の仕事の単位を表している。 + +01:40.920 --> 01:45.450 +そして、 それぞれがエージェントのサブクラスであることにお気づきだろう。 + +01:45.570 --> 01:47.100 +それは何だろう? + +01:47.130 --> 01:52.260 +彼がサブクラス化しているエージェントには、 いろいろな機能があるはずだ。 + +01:52.260 --> 01:55.920 +このスーパークラスにはほとんど機能がない。 + +01:55.920 --> 02:00.210 +ええと、 これは単なる抽象クラスです。 + +02:00.270 --> 02:03.820 +ええと、 メッセージを記録することができます。 + +02:03.910 --> 02:07.090 +そして、 メッセージのログを取る方法は、 ログを取ることだ。 インフォメーション + +02:07.120 --> 02:08.530 +お馴染みだろう。 + +02:08.560 --> 02:10.360 +Pythonのロギングは使ったことがあるだろう。 + +02:10.360 --> 02:11.200 +だから君は可愛いんだ。 + +02:11.230 --> 02:17.590 +お馴染みの伐採。 インフォメーション・ログは、 あなたがロガーを設定した場所に、 ええと、 ログを記録します。 + +02:17.620 --> 02:18.550 +そして + +02:18.700 --> 02:23.560 +僕がここでやっていることの中で、 ちょっと違うのは、 そうだね。 + +02:23.590 --> 02:27.190 +エージェントのサブクラスの名前を追加します。 + +02:27.190 --> 02:28.840 +それは、 それは、 それは、 それは。 + +02:28.870 --> 02:34.120 +実は、 このログメッセージを送信して、 それぞれに色をつけているんだ。 + +02:34.210 --> 02:39.400 +そうすれば、 どのエージェントが何をしているのかがわかる。 + +02:39.400 --> 02:46.300 +例えば、 スペシャリスト・エージェントに戻ってみると、 自分で名前をつけているのがわかる。 + +02:46.330 --> 02:47.560 +専門エージェントの。 + +02:47.560 --> 02:51.310 +そしてその色は赤。 + +02:51.340 --> 02:52.270 +赤く表示される。 + +02:52.270 --> 02:54.610 +そして、 私がしていないことに気づいたかもしれないことがわかるだろう。 + +02:54.640 --> 02:56.980 +ロギング。 情報 セルフ・ドット・ログを取った。 + +02:56.980 --> 03:00.070 +これは、 スーパークラスでログ・メッセージを呼び出せるようにするためだ。 + +03:00.070 --> 03:04.820 +だから、 このエージェント・スーパークラスには派手さはない。 + +03:04.820 --> 03:06.920 +ただ、 コンスタントにログを取ることができたということだ。 + +03:06.920 --> 03:11.090 +その機能を別のクラスに含めることも同様にできたはずだ。 + +03:11.090 --> 03:14.480 +エージェントについては特に何もない。 + +03:14.930 --> 03:15.860 +分かった。 + +03:15.860 --> 03:22.250 +もうひとつ、 メモリーJSONと呼ばれるJSONファイルをお見せしましょう。 + +03:22.250 --> 03:24.710 +そしてそれは単純なJSONの一部である。 + +03:24.710 --> 03:29.150 +もし存在しなければ、 手動で作成される。 + +03:29.150 --> 03:32.870 +派手なエディターではなく、 普通のエディターで開いてみよう。 + +03:32.870 --> 03:34.700 +そして、 こんな感じだ。 + +03:34.700 --> 03:36.440 +これはリストだ。 + +03:36.680 --> 03:43.250 +外側にJSON blobのリストがあるのがわかるだろう。 + +03:43.250 --> 03:48.320 +そして、 それぞれのJSONブロブは、 あなたにとって見慣れたものだろう。 + +03:48.350 --> 03:49.700 +それが何なのか分かりますか? + +03:50.120 --> 03:55.070 +そう、 もちろんJSONに変換された機会オブジェクトだ。 + +03:55.460 --> 04:02.240 +商品説明、 価格、 URLが記載され、 モデルから得られた見積もりが表示され、 + +04:02.240 --> 04:08.370 +単純にこの数字からこの数字を引いた値引きが表示される。 + +04:08.400 --> 04:08.880 +おっと。 + +04:08.910 --> 04:09.630 +その数字だ。 + +04:09.660 --> 04:10.020 +ハ。 + +04:10.380 --> 04:15.300 +つまり、 私たちが本当に価値があると思う金額より、 どれだけ安く提供されているかということだ。 + +04:15.300 --> 04:20.790 +というわけで、 このメモリーには3つの取引が浮上している。 + +04:21.060 --> 04:26.730 +さて、 このようにJSONファイルに保存するのは、 おそらく最も工業的な強さではないだろう。 + +04:26.730 --> 04:32.160 +もしそれがデータベース化されれば、 複数のエージェント・フレームワークの実行に対応できるだろう。 + +04:32.160 --> 04:38.220 +しかし、 エージェントフレームワークは1つしかなく、 このメモリに書き込むのは、 仕事をするときに1回だけだ。 + +04:38.220 --> 04:42.150 +だから厳密には必要なかったが、 もちろんクロームには簡単に入れることができる。 + +04:42.150 --> 04:47.550 +私たちはすでにデータベースを持っているので、 もし望むなら、 それを練習として使うこともできる。 + +04:47.880 --> 04:52.620 +うーん、 それは確かに良いことで、 良いプロダクションの改善になるだろうね。 + +04:53.250 --> 05:01.620 +しかし、 いずれにせよ、 実際の、 あー、 フレームワークそのもの、 エージェントのフレームワークを見る時が来た。 + +05:01.710 --> 05:04.620 +ええと、 ではすぐにそうしましょう。 + +05:06.220 --> 05:12.460 +そして、 これから起こることをあまり漏らさないように、 適切なクラスを取り上げたい。 + +05:12.460 --> 05:12.970 +これだ。 + +05:12.970 --> 05:14.770 +ディール・エージェントのフレームワーク + +05:15.130 --> 05:16.540 +ああ、 わかった。 + +05:16.540 --> 05:24.790 +つまり、 エージェントのフレームワークは、 まずロギングを設定するものだ。 + +05:24.790 --> 05:31.870 +これはPythonの標準的なもので、 誰かがロギングを行ったときに、 そのロギングが行われるようにするものです。 情報は、 ええと、 スタンダードに送信され、 + +05:32.020 --> 05:36.790 +いくつかの構造を持っています。 + +05:36.790 --> 05:43.300 +これはPythonの標準的な、 えーと、 えーと、 定型文のようなもので、 ログを適切に記録するためのものです。 + +05:43.300 --> 05:46.330 +そして、 これがディール・エージェントのフレームワークだ。 + +05:46.330 --> 05:51.640 +データベースがコード化されているんだ。 僕が何度かうっかり省いてしまったことが、 + +05:51.640 --> 05:55.480 +メモリファイルの名前と一緒にコード化されている。 + +05:55.600 --> 05:58.750 +起動するとロギングが始まる。 + +05:58.780 --> 06:00.730 +それ自体がログメッセージを作成する。 + +06:00.730 --> 06:02.170 +envをロードする。 + +06:02.200 --> 06:05.770 +これはもちろん、 すべてのJupyterノートブックで常に行っていることだ。 + +06:05.770 --> 06:07.520 +だから、 これは今やる必要がある。 + +06:07.760 --> 06:12.800 +データベースを作成したり、 データベースにアクセスしたりする。 + +06:13.250 --> 06:17.090 +そのJSONファイルからメモリーを読み込む。 + +06:17.390 --> 06:28.070 +商品コレクションを取得し、 プランニング・エージェントにセットする。 + +06:28.100 --> 06:30.110 +そして、 準備ができたと言うだろう。 + +06:30.620 --> 06:35.930 +このリード・メモリーの方法は、 まさに書いてあるとおりのものだ。 + +06:35.930 --> 06:39.140 +ファイル名をハードコードしていないのがわかるだろう。 + +06:39.140 --> 06:40.160 +よくやった、 よくやった。 + +06:40.190 --> 06:40.700 +私が? + +06:40.820 --> 06:43.880 +これはJSONから読み込まれる。 + +06:43.880 --> 06:52.130 +もしそれがまだ存在しなければ、 ただ空のものを返す。 + +06:52.160 --> 07:00.890 +このメソッドはメモリを書き出し、 その機会をメモリファイルに書き出す。 + +07:01.940 --> 07:08.710 +ここには、 独自のエージェントフレームワークタグを持つログメッセージを送信するメソッドがあります。 + +07:09.490 --> 07:14.590 +次に、 プランニング・エージェントを実際にキックオフする実行メソッドがある。 + +07:14.800 --> 07:20.860 +そして、 結果が返ってきたら、 セルフ・ドット・プランナーを呼び出すと書いてある。 + +07:21.070 --> 07:22.120 +それを計画する。 + +07:22.120 --> 07:23.500 +もちろん、 それが大事な瞬間だ。 + +07:23.500 --> 07:26.200 +それが、 僕らが実際にこの仕事を始めた時だ。 + +07:26.200 --> 07:28.750 +そして、 記憶の中を通り過ぎる。 + +07:28.780 --> 07:32.350 +知っているチャンスの記憶の中を通り過ぎる。 + +07:32.350 --> 07:38.050 +そして、 その結果をログに記録する。 + +07:38.050 --> 07:43.990 +そして、 結果が「なし」でなかった場合、 プランナーは何も新しいものが見つからなければ「なし」を返すかもしれないことを覚えておいてほしい。 + +07:43.990 --> 07:50.020 +しかし、 それが本当の結果であれば、 それをメモリに追加し、 メモリを書き込んで結果を返す。 + +07:50.020 --> 07:51.220 +それだけだ。 + +07:51.220 --> 07:57.970 +この最後のピースは、 次回のUIで出てくるが、 とりあえず、 これでフレームワークは終わり。 + +07:58.420 --> 07:59.980 +では、 どうやってこのフレームワークを動かすのか? + +07:59.980 --> 08:01.690 +では、 コマンドラインから実行してみよう。 + +08:01.690 --> 08:03.100 +これはもう現実だ。 + +08:03.100 --> 08:05.500 +これはもはや、 すべてPythonのコードだ。 + +08:05.500 --> 08:09.080 +我々はJupyterの世界の外にいるので、 便宜上このインターフェースを使っている。 + +08:09.080 --> 08:09.710 +本当に。 + +08:09.980 --> 08:12.440 +でも、 VSCodeか何かを使っているかもしれない。 + +08:12.470 --> 08:16.310 +ターミナルを起動するには、 ここでプラスボタンを押す。 + +08:16.310 --> 08:20.330 +JupyterLabでも可能で、 このように新しいターミナルを開くことができる。 + +08:20.330 --> 08:25.010 +これはもちろん、 あなたが好きなようにやってもいい。 + +08:25.130 --> 08:29.540 +ええと、 それはアナコンダのプロンプトかもしれないし、 あなたが使っているものかもしれない。 + +08:29.540 --> 08:32.240 +マックを使っているなら、 新しいターミナル・ウィンドウを立ち上げればいい。 + +08:32.330 --> 08:36.380 +でも、 まずcondaの環境をアクティブにする必要がある。 + +08:36.560 --> 08:54.350 +LMSを起動させると、 皆さんもよくご存知のように、 私の環境はベースからLMSに変わります。 + +08:54.350 --> 09:01.730 +これでPythonのエージェントフレームワークと言えるようになったはずだ。 + +09:02.510 --> 09:04.010 +スペルは正しく。 + +09:06.410 --> 09:07.490 +ドットパイ。 + +09:07.640 --> 09:08.990 +それでいいはずだ。 + +09:09.000 --> 09:11.670 +でも、 その前にもうひとつお見せしなければならないことがある。 + +09:11.820 --> 09:14.190 +私も忘れるところだった。 + +09:14.190 --> 09:16.320 +この最後の数行だけだ。 + +09:16.320 --> 09:20.070 +これを見せなかったらどうなるんだろうと思ったかもしれない。 + +09:20.460 --> 09:26.700 +もちろん、 このファイルがメインであるようにコマンドラインから実行されるのであれば、 + +09:26.700 --> 09:32.550 +ディール・エージェント・フレームワークの新しいインスタンスを作成します。 + +09:32.550 --> 09:35.760 +そして、 先ほど見たrunメソッドを呼び出す。 + +09:35.790 --> 09:39.780 +それが、 ちょっとしたミッシングリンクなんだ。 + +09:39.810 --> 09:40.320 +それでいい。 + +09:40.320 --> 09:41.640 +これで実行できるはずだ。 + +09:41.670 --> 09:42.540 +深呼吸をする。 + +09:42.540 --> 09:43.410 +そうしよう。 + +09:46.950 --> 09:49.020 +考えている間、 長い間休止しているのか? + +09:51.060 --> 09:51.930 +分かった。 + +09:51.930 --> 09:53.310 +何が起こっているのか見てみよう。 + +09:53.310 --> 09:55.800 +エージェントフレームワークは初期化中だと言っている。 + +09:55.800 --> 09:58.350 +プランニング・エージェントはスキャナーを初期化している。 + +09:58.380 --> 09:59.850 +エージェントが準備完了。 + +09:59.850 --> 10:02.550 +アンサンブル・エージェントは準備を整える。 + +10:02.550 --> 10:04.170 +だから、 これを見るのはちょっと難しい。 + +10:04.170 --> 10:09.600 +モーダルに接続するスペシャリスト・エージェントの準備を整える。 + +10:10.300 --> 10:12.910 +そしてフロンティア・モデルの準備が整う。 + +10:12.940 --> 10:14.920 +その文の変圧器に負荷をかけなければならない。 + +10:14.920 --> 10:16.360 +そこで起きていることがわかるだろう。 + +10:16.360 --> 10:19.810 +そして、 ランダムフォレスト・エージェントも準備を整える。 + +10:19.810 --> 10:21.340 +それで完了だ。 + +10:21.430 --> 10:24.340 +アンサンブルの準備もメッセンジャーの準備もできている。 + +10:24.340 --> 10:25.810 +そしてすべてが青で統一されている。 + +10:25.840 --> 10:28.360 +ここでは、 Agent Frameworkの準備ができたと表示されます。 + +10:28.360 --> 10:33.130 +すべてのエージェントをインスタンス化して作成し、 セットアップする。 + +10:33.130 --> 10:39.970 +彼らは、 専門家モデルがモーダルに接続されている単一のものを持っている彼らのモデルの重みをロードしている。 + +10:40.390 --> 10:46.180 +そして、 彼らは今、 その、 行動の準備を整えている。 + +10:46.210 --> 10:51.010 +モデルのウェイトが読み込まれ、 走り出した。 + +10:51.040 --> 10:56.110 +スキャナー・エージェントは、 ここで、 あー、 受信したんだけど、 そこでエラーが出てるんだ。 + +10:56.110 --> 10:57.160 +改善するよ。 + +10:57.160 --> 10:59.020 +実際の結果数は表示されない。 + +10:59.020 --> 11:02.290 +印刷の前に "F "が必要なんだ。 + +11:02.620 --> 11:05.080 +ええと、 あなたがご覧になる前に直しておきます。 + +11:05.170 --> 11:07.660 +ええと、 だから、 スキャナー捜査官がその電話をかけているんだ。 + +11:07.780 --> 11:17.960 +そして今、 プランニング・エージェントは契約の値付けをしていて、 私のマシンが温まるまでモーダルで待機している。 + +11:18.050 --> 11:26.420 +あー、 でも料理番組で言うように、 あー、 どういう意味かわかるかな、 あー、 料理番組でオーブンに入れて、 + +11:26.420 --> 11:33.980 +これがさっき入れたやつだ、 と言ってオーブンから出すんだ。 + +11:33.980 --> 11:37.100 +そういうことをされると、 いつも悪巧みだと思う。 + +11:37.190 --> 11:39.800 +でも、 私がこれをやっただけだということを信じてほしい。 + +11:39.800 --> 11:46.070 +数分前にキックオフして、 こっちに走ってきたんだ。 + +11:46.070 --> 11:54.140 +そして、 基本的に、 戻ってきた5つの案件のそれぞれについて、 異なるモデルすべてに一連の呼び出しを行ったことがわかる。 + +11:54.140 --> 11:55.670 +それぞれに目を通すことができる。 + +11:55.700 --> 11:58.490 +アンサンブルモデルは数値を返す。 + +11:58.700 --> 12:01.160 +ええと、 だから、 私たちは働き直すべきだ。 + +12:01.190 --> 12:06.350 +まずスペシャリスト・モデルが数値を計算し、 次にフロンティア・モデルがラグ・ルックアップで数値を計算し、 + +12:06.350 --> 12:08.990 +次にランダムフォレスト・モデルが計算する。 + +12:08.990 --> 12:10.430 +そしてアンサンブルモデル。 + +12:10.430 --> 12:12.870 +そして、 それは計画担当者の話に戻る。 + +12:13.200 --> 12:15.300 +そして、 このすべてに目を通すことができる。 + +12:15.300 --> 12:20.100 +そして、 これが完了すると、 プッシュ通知が送信された。 + +12:20.340 --> 12:29.700 +それは第13世代i5を搭載した認定整備済みノートパソコンで、 + +12:29.700 --> 12:45.030 +見積もりは925ドル、 560ドルで販売されていたものよりも非常に健全なディスカウントだった。 + +12:45.030 --> 12:46.950 +そうだ。 + +12:46.950 --> 12:50.970 +それがエージェント・フレームワークの走りだった。 + +12:51.210 --> 12:55.980 +そうだね、 そうなるのを見るのは信じられないほど満足なことだよ。 + +12:55.980 --> 13:00.870 +一日中見ていても飽きない。 + +13:00.870 --> 13:04.200 +とても楽しいと言わざるを得ない。 + +13:04.230 --> 13:09.780 +ほら、 オーブンから出すためにそれを置く必要もなかった。 + +13:09.780 --> 13:14.960 +モードが温まると、 さまざまなものを回転させるのが非常に速いのがわかるだろう。 + +13:15.170 --> 13:16.910 +だから、 そこを通るんだ。 + +13:16.910 --> 13:18.020 +何かを思いついたんだ。 + +13:18.020 --> 13:22.790 +そしてもちろん、 今提案していることは、 ちょっと前に提案したこととは違う。 + +13:23.000 --> 13:26.870 +携帯電話にも通知が来たし、 時計にも間違いなく通知が来た。 + +13:27.350 --> 13:34.880 +だから、 私たちはただフレームワーク全体が動き、 私が通知を受けるのを見ただけだ。 + +13:34.880 --> 13:42.980 +そして、 このトレースを読んで、 各エージェントがこの問題を解決するためにどのように協力しているかを見ることができる。 + +13:43.280 --> 13:51.470 +だから、 あとは自律的に常に動くようにするだけだ。 + +13:51.470 --> 13:57.680 +そうしている間に、 私たちはグラディオの使い方を知っているし、 その良さも知っているのだから、 素晴らしいUIを叩きつけることもできるだろう。 + +13:57.710 --> 14:01.190 +そして、 そのすべてを明日やるつもりだ。 + +14:01.250 --> 14:04.460 +でも今は、 もちろん楽しんでもらおうと思う。 + +14:04.460 --> 14:07.670 +とても満足できるものだから、 何度か走ってほしいね。 + +14:07.700 --> 14:08.900 +正直、 大好きだよ。 + +14:08.930 --> 14:14.540 +皆さんも私と同じように楽しんでいることを願っている。 diff --git a/week5/community-contributions/subtitles/srts/59671567/ko_KR.srt b/week5/community-contributions/subtitles/srts/59671567/ko_KR.srt new file mode 100755 index 0000000..4af8416 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59671567/ko_KR.srt @@ -0,0 +1,682 @@ +WEBVTT + +00:00.950 --> 00:04.760 +가장 먼저 눈에 띄는 건 펼쳐놓은 공책이 없다는 거죠 + +00:04.760 --> 00:08.090 +왜냐하면 이번엔 코드만 볼 거니까요 + +00:08.090 --> 00:11.150 +녹음에 수첩은 안 돼요 + +00:11.150 --> 00:18.710 +이 비디오에서 실제 finally를 살펴보겠습니다 우리가 작성하고 구축하고 + +00:18.710 --> 00:22.880 +실행할 실제 에이전트 프레임워크요 + +00:22.880 --> 00:27.530 +아주 멋진 것 같지만 사실 아주 간단해요 + +00:27.530 --> 00:32.240 +이런 이유 때문에 제가 기성품 패키지를 사용하지 않는 겁니다 + +00:32.240 --> 00:37.880 +보다 고급된 함수를 수행하기 위해 서로 연결하는 것 이상의 것은 없다는 + +00:37.880 --> 00:40.340 +걸 보여드리고 싶었어요 + +00:40.640 --> 00:45.440 +보통 이런 종류의 에이전트 프레임워크를 이야기할 때 데이터베이스 + +00:45.440 --> 00:50.630 +연결을 다룰 수 있는 무언가를 찾습니다 이 경우에는 크롬 데이터 + +00:50.630 --> 00:53.990 +스토어 메모리인데요 + +00:54.080 --> 00:57.590 +지속적인 메모리 감각을 원하죠 + +00:57.770 --> 01:03.270 +채팅이나 인스턴스 사이에서 살아남을 수 있도록요 메모리가 어떻게 작동하는지 + +01:03.270 --> 01:05.040 +이해할 수 있게요 + +01:05.700 --> 01:12.180 +그리고 로깅 같은 것도 말씀드렸듯이 여러분 환경에서 무슨 일이 일어났는지 이해하는 데 + +01:12.180 --> 01:13.620 +좋은 연습이 되죠 + +01:13.620 --> 01:17.610 +사용자 인터페이스 문제도 있을 겁니다 이 프레임워크를 사용할 수 + +01:17.640 --> 01:20.250 +있는 그래디오 UI를 구축해야 하니까요 + +01:20.250 --> 01:21.960 +그걸 처리해야 해요 + +01:21.960 --> 01:26.850 +마지막 것은 다음 시리즈에서 보여드릴게요. Get up + +01:27.030 --> 01:31.500 +지금은 프레임워크를 살펴볼 거예요 + +01:31.500 --> 01:34.080 +그 전에 몇 가지 짚고 넘어갈 게 있어요 + +01:34.080 --> 01:38.580 +먼저 에이전트 폴더를 보면 각각의 다른 클래스를 보았었죠. + +01:38.580 --> 01:40.920 +그건 다른 업무를 상징하죠 + +01:40.920 --> 01:45.450 +모두 에이전트의 서브클래스인 게 보이시죠 + +01:45.570 --> 01:47.100 +이게 뭔지 궁금하실 거예요 + +01:47.130 --> 01:52.260 +서브클래싱하는 에이전트가 무엇인지 모든 기능성 요소가 들어있는지를요 + +01:52.260 --> 01:55.920 +사실 이 슈퍼클래스엔 기능성이라곤 거의 없어요 + +01:55.920 --> 02:00.210 +그냥 추상 수업이에요 + +02:00.270 --> 02:03.820 +메시지를 기록할 수 있죠 + +02:03.910 --> 02:07.090 +메시지를 기록하는 방법은 로깅을 하는 거죠 정보요 + +02:07.120 --> 02:08.530 +잘 아실 거예요 + +02:08.560 --> 02:10.360 +파이썬 을 로깅한 적이 있을 거예요 + +02:10.360 --> 02:11.200 +그래서 예쁘죠 + +02:11.230 --> 02:17.590 +로깅이 익숙하네요 벌목꾼이 있는 곳이면 어디든 정보를 얻을 수 있어요 + +02:17.620 --> 02:18.550 +그리고요 + +02:18.700 --> 02:23.560 +제가 여기서 하는 것 중 조금 다른 건 비트를 입힐 거예요 + +02:23.590 --> 02:27.190 +에이전트의 서브클래스 이름을 추가하세요 + +02:27.190 --> 02:28.840 +그건... 그건 말이죠 + +02:28.870 --> 02:34.120 +로그 메시지를 보낼 건데 각각 색깔을 지정할 거예요 + +02:34.210 --> 02:39.400 +그러면 어떤 요원이 어떤 역할을 하는지 알 수 있죠 + +02:39.400 --> 02:45.220 +예를 들어, 다시 돌아가서, 전문 에이전트로 가보면, 이름이 있는 것을 볼 + +02:45.220 --> 02:46.300 +수 있어요. + +02:46.330 --> 02:47.560 +특수 요원이죠 + +02:47.560 --> 02:51.310 +색은 빨간색이라 통나무를 쌓을 때 이렇게 되죠 + +02:51.340 --> 02:52.270 +빨간색으로요 + +02:52.270 --> 02:54.610 +제가 안 한 걸 눈치챘을 거예요 + +02:54.640 --> 02:56.980 +로깅을 했어요 셀프.로그를 했어요 + +02:56.980 --> 03:00.070 +슈퍼클래스에서 로그 메시지를 호출할 수 있어요 + +03:00.070 --> 03:04.820 +이 에이전트 슈퍼클래스는 화려한 게 없어요 + +03:04.820 --> 03:06.920 +일관되게 로그인할 수 있었죠 + +03:06.920 --> 03:11.090 +다른 클래스에도 똑같이 함수를 포함할 수 있었어요 + +03:11.090 --> 03:14.480 +그 요원은 특별할 게 없어요 + +03:14.930 --> 03:15.860 +좋아요 + +03:15.860 --> 03:22.250 +비트 워크 중 하나는 메모리 JSON 파일을 보여드리는 거예요 + +03:22.250 --> 03:24.710 +단순한 JSON 기능이죠 + +03:24.710 --> 03:29.150 +존재하지 않았다면 수동으로 생성하는 걸 보실 수 있어요 + +03:29.150 --> 03:32.870 +편집기로 시작합시다 화려한 거 말고 일반 편집기로요 + +03:32.870 --> 03:34.700 +이렇게 생긴 거예요 + +03:34.700 --> 03:36.440 +목록 맞아요 + +03:36.680 --> 03:43.250 +바깥쪽에 목록이 있는 게 보이시죠 JSON blobs 목록이요 + +03:43.250 --> 03:48.320 +각각의 JSON 덩어리는 여러분에게 익숙할 거예요 + +03:48.350 --> 03:49.700 +뭔지 알겠어요? + +03:50.120 --> 03:55.070 +네, 기회 객체가 JSON으로 바뀐 거죠 + +03:55.460 --> 04:02.240 +제품 설명, 가격, URL 등의 정보가 있는 거래가 있고 모델에서 나온 추정값도 + +04:02.240 --> 04:08.370 +있어요 할인 항목도 있는데 간단히 이 숫자에서 이 숫자를 빼죠 + +04:08.400 --> 04:08.880 +이런, 미안해요 + +04:08.910 --> 04:09.630 +그 숫자요 + +04:09.660 --> 04:10.020 +네 + +04:10.380 --> 04:15.300 +실제 가치로 추정할 때 얼마나 할인해 주나요? + +04:15.300 --> 04:20.790 +이 메모리에 세 건의 거래가 드러났어요 + +04:21.060 --> 04:26.730 +JSON 파일에 이렇게 저장하는 건 산업용 강도가 그리 높지 않아요 + +04:26.730 --> 04:32.160 +데이터베이스로 간다면 실행 중인 다수의 에이전트 프레임워크를 처리할 수 있는 뭔가가 되겠죠 + +04:32.160 --> 04:38.220 +하지만 에이전트 프레임워크는 한 번만 이 메모리에 입력할 수 있어요 + +04:38.220 --> 04:42.150 +꼭 필요한 건 아니지만 크롬 도금 작업은 쉽죠 + +04:42.150 --> 04:47.550 +데이터베이스가 이미 있으니 원하시면 그걸 연습으로 삼으세요 + +04:47.880 --> 04:52.620 +생산성이 크게 향상될 거예요 + +04:53.250 --> 05:01.620 +어쨌든 이제 프레임워크 자체를 살펴볼 때가 됐어요 에이전트 프레임워크요 + +05:01.710 --> 05:04.620 +그럼 바로 시작하죠 + +05:06.220 --> 05:11.710 +그리고 올바른 수업을 진행하고 싶어요 앞으로 있을 수업에 너무 많은 걸 알려주지 + +05:11.710 --> 05:12.460 +않게요 + +05:12.460 --> 05:12.970 +여기 있네요 + +05:12.970 --> 05:14.770 +거래 요원 프레임워크예요 + +05:15.130 --> 05:16.540 +네 + +05:16.540 --> 05:24.790 +에이전트 프레임워크는 우선 로깅을 설정하는 거예요 + +05:24.790 --> 05:31.870 +이건 표준 파이썬 으로 누군가 로깅을 할 때 확실히 해두죠 get-turefo는 기본으로 + +05:32.020 --> 05:36.790 +보내지고 몇 가지 구조가 있어요 + +05:36.790 --> 05:43.300 +이건 표준 파이썬 문서예요 제대로 기록하기 위한 상용 문서요 + +05:43.300 --> 05:46.330 +이건 거래 에이전트 프레임워크예요 + +05:46.330 --> 05:51.640 +데이터베이스에 코딩되어 있어요 제가 실수로 몇 번 놓친 거죠 메모리 + +05:51.640 --> 05:55.480 +파일 이름과 함께 여기에 코딩되어 있어요 + +05:55.600 --> 05:58.750 +불이 붙으면 로깅을 시작할 거예요 + +05:58.780 --> 06:00.730 +로그 메시지를 만들 거예요 + +06:00.730 --> 06:02.170 +그럼 부럽죠 + +06:02.200 --> 06:05.770 +이건 우리가 모든 주피터 공책에 항상 쓰는 거예요 + +06:05.770 --> 06:07.520 +이제 이걸 써야 해요 + +06:07.760 --> 06:12.800 +데이터베이스를 생성하거나 접근할 거예요 + +06:13.250 --> 06:17.090 +JSON 파일 메모리에서 읽을 거예요 + +06:17.390 --> 06:25.010 +어, 제품 컬렉션을 받고 기획 에이전트를 설정해서 Get up을 해야 하는 + +06:25.010 --> 06:28.070 +컬렉션을 초기화하는 거죠 + +06:28.100 --> 06:30.110 +그럼 준비가 됐다고 나오죠 + +06:30.620 --> 06:35.930 +이 메서드는 메모리를 읽는데 함석에 적힌 그대로를 해요 + +06:35.930 --> 06:39.140 +파일 이름을 하드코딩하지 않은 걸 보실 수 있어요 + +06:39.140 --> 06:40.160 +정말 잘했어요 + +06:40.190 --> 06:40.700 +저요? + +06:40.820 --> 06:43.880 +JSON에서 로드하는 거죠 + +06:43.880 --> 06:50.030 +이미 존재하지 않는다면 빈 것을 반환합니다 함수도 있으니 + +06:50.240 --> 06:52.130 +괜찮아요 + +06:52.160 --> 07:00.890 +메서드는 메모리를 쓰고 메모리 파일에 이런 기회를 쓰는 거예요 + +07:01.940 --> 07:08.710 +로그 메시지가 있어요 로그 메시지를 보내는 메서드죠 에이전트 프레임워크 태그와 함께요 + +07:09.490 --> 07:14.590 +그리고 기획 에이전트를 시작하는 실행 메서드가 있어요 + +07:14.800 --> 07:20.860 +결과를 받으면 셀프 닷플래너를 호출해요 + +07:21.070 --> 07:22.120 +그렇게 해요 + +07:22.120 --> 07:23.500 +중요한 순간이죠 + +07:23.500 --> 07:26.200 +그때 이 작업을 시작하게 됐죠 + +07:26.200 --> 07:28.750 +보세요 메모리로 지나가요 + +07:28.780 --> 07:32.350 +자신이 아는 기회의 메모리를 지나쳐 가죠 + +07:32.350 --> 07:38.050 +여기에 넣으면 결과를 기록해요 + +07:38.050 --> 07:43.990 +그리고 0이 아니면 새로운 결과가 나오지 않으면 0을 반환할 수도 있다는 거 기억하세요 + +07:43.990 --> 07:50.020 +하지만 실제 결과라면 메모리에 추가하고 메모리를 쓰고 결과를 반환하죠 + +07:50.020 --> 07:51.220 +그게 다예요 + +07:51.220 --> 07:57.040 +마지막 부분은 UI 다음에 나올 겁니다 지금은 프레임워크의 + +07:57.040 --> 07:57.970 +끝이죠 + +07:58.420 --> 07:59.980 +이 프레임워크를 어떻게 실행하죠? + +07:59.980 --> 08:01.690 +명령줄에서부터 실행하는 거죠 + +08:01.690 --> 08:03.100 +이제 진짜 시작이에요 + +08:03.100 --> 08:05.500 +파이썬 으로만 된 코드죠 + +08:05.500 --> 08:09.080 +우린 주피터 세계 밖에 있고 편리함을 위해 인터페이스를 사용하죠 + +08:09.080 --> 08:09.710 +정말이에요 + +08:09.980 --> 08:12.440 +하지만 VSCode나 그런 걸 사용할 수도 있죠 + +08:12.470 --> 08:16.310 +터미널을 작동하려면 플러스 버튼을 눌러요 + +08:16.310 --> 08:20.330 +주피터랩에서도 이렇게 새 터미널을 열 수 있어요 + +08:20.330 --> 08:25.010 +물론 여러분이 원하는 대로 할 수도 있어요 + +08:25.130 --> 08:29.540 +아나콘다 프롬프트일 수도 있고 여러분이 사용할 수도 있죠 + +08:29.540 --> 08:32.240 +Mac을 사용한다면 새 터미널 창을 불러올 수 있어요 + +08:32.330 --> 08:36.380 +하지만 먼저 콘다 환경을 활성화해야 해요 + +08:36.560 --> 08:47.330 +콘다가 LMS를 활성화합니다 그 말은 여러분이 이제 아주 익숙해졌지만 제 환경이 베이스에서 LMS로 + +08:47.330 --> 08:54.350 +바뀝니다 지금 아나콘다 환경에 있으니까요 정말 그게 다인 것 같아요 + +08:54.350 --> 09:01.730 +파이썬 딜 에이전트 프레임워크라고 할 수 있어야 해요 + +09:02.510 --> 09:04.010 +철자를 맞춰요 + +09:06.410 --> 09:07.490 +닷 피요 + +09:07.640 --> 09:08.990 +그게 다일 거예요 + +09:09.000 --> 09:11.670 +그 전에 보여드릴 게 하나 더 있어요 + +09:11.820 --> 09:14.190 +하마터면 잊을 뻔했네요 + +09:14.190 --> 09:16.320 +이 마지막 선만 남았어요 + +09:16.320 --> 09:20.070 +제가 이걸 보여드리지 않으면 어떻게 될지 궁금하셨을 거예요 + +09:20.460 --> 09:26.700 +끝에서 명확히 해야 할 것은 명령줄에서 실행되어 이 파일이 메인이라면 인스턴스화하고 + +09:26.700 --> 09:32.550 +거래 에이전트 프레임워크의 새 인스턴스를 생성해요 + +09:32.550 --> 09:35.760 +방금 본 실행 메서드죠 + +09:35.790 --> 09:39.780 +빠진 고리가 있네요 설명하자면요 + +09:39.810 --> 09:40.320 +저거로요 + +09:40.320 --> 09:41.640 +이제 실행할 수 있어야 해요 + +09:41.670 --> 09:42.540 +심호흡을 하세요 + +09:42.540 --> 09:43.410 +시작하죠 + +09:46.950 --> 09:49.020 +생각할 시간이 길군요 + +09:51.060 --> 09:51.930 +좋아요 + +09:51.930 --> 09:53.310 +무슨 일인지 보죠 + +09:53.310 --> 09:55.800 +프레임워크 말로는 초기화되고 있대요 + +09:55.800 --> 09:58.350 +계획 담당이 스캐너를 가동 중이에요 + +09:58.380 --> 09:59.850 +에이전트가 준비됐죠 + +09:59.850 --> 10:02.550 +앙상블 에이전트가 준비 중이에요 + +10:02.550 --> 10:04.170 +그래서 비트를 보기가 좀 힘들어요 + +10:04.170 --> 10:09.600 +모달과 연관된 특수 요원이 준비 중이에요 + +10:10.300 --> 10:12.910 +개척지 모델이 준비됐죠 + +10:12.940 --> 10:14.920 +그 문장에 변압기를 로드해야 해요 + +10:14.920 --> 10:16.360 +저기서 일어나는 일이 보이죠 + +10:16.360 --> 10:19.810 +그리고 임의의 숲 관리인도 준비를 하죠 + +10:19.810 --> 10:21.340 +다 됐어요 + +10:21.430 --> 10:24.340 +앙상블도 준비됐고 전달자도 준비됐어요 + +10:24.340 --> 10:25.810 +전부 파란색으로 칠하고요 + +10:25.840 --> 10:28.360 +프레임워크 요원이 준비됐다고 나오네요 + +10:28.360 --> 10:33.130 +인스턴스화와 모든 에이전트를 생성하고 셋업하는 과정을 거쳤어요 + +10:33.130 --> 10:39.970 +전문가용 모델에 맞는 무게추 하나만 실었어요 + +10:40.390 --> 10:46.180 +이제 다들 행동에 나설 준비가 됐어요 + +10:46.210 --> 10:51.010 +모델 웨이트를 읽고 런을 시작하네요 + +10:51.040 --> 10:56.110 +여기 보이는 스캐너 요원은 오류가 있다는 걸 받았어요 + +10:56.110 --> 10:57.160 +더 잘할게요 + +10:57.160 --> 10:59.020 +실제 결과의 수는 출력하지 않아요 + +10:59.020 --> 11:02.290 +인쇄하기 전에 F가 필요해요 + +11:02.620 --> 11:05.080 +보시기 전에 고쳐 놓을게요 + +11:05.170 --> 11:07.660 +스캐너 요원이 판단하는 거죠 + +11:07.780 --> 11:16.400 +기획사에서 가격을 매기고 있고 제 기계가 예열되는 동안 수정을 기다리고 + +11:16.400 --> 11:17.960 +있어요 + +11:18.050 --> 11:26.420 +근데 요리 프로에서 보면 이런 말이 있잖아요 무슨 말인지 아시죠? 그런 프로 보면 오븐에 + +11:26.420 --> 11:32.270 +뭘 넣잖아요 제가 아까 넣은 건 이렇게 나오는데 그걸 오븐에서 + +11:32.270 --> 11:33.980 +꺼내요 + +11:33.980 --> 11:37.100 +그런 걸 할 때 보면 교활한 속임수 같아요 + +11:37.190 --> 11:39.800 +하지만 내가 방금 했다는 걸 믿어야 해요 + +11:39.800 --> 11:45.440 +몇 분 전에 발로 찼는데 이쪽으로 달려오더라고요 그래서 여기 뒀더니 일을 다 + +11:45.440 --> 11:46.070 +봤어요 + +11:46.070 --> 11:52.700 +기본적으로 반환된 5개의 거래마다 모든 다양한 모델에 일련의 전화를 + +11:52.700 --> 11:54.140 +걸었어요 + +11:54.140 --> 11:55.670 +하나씩 읽어 보세요 + +11:55.700 --> 11:58.490 +앙상블 모델은 숫자를 돌려줘요 + +11:58.700 --> 12:01.160 +그럼 다시 돌아가죠 + +12:01.190 --> 12:06.350 +먼저 전문 모델에서 숫자를 계산하고 그다음은 개척 모델에서 랙 검색을 통해 숫자를 + +12:06.350 --> 12:08.990 +계산해요 그다음은 무작위 숲 모델이죠 + +12:08.990 --> 12:10.430 +앙상블 모델도요 + +12:10.430 --> 12:12.870 +다 기획 담당자 책임이죠 + +12:13.200 --> 12:15.300 +이걸 다 읽어보세요 + +12:15.300 --> 12:20.100 +이게 끝나면 푸시 알림을 보내죠 + +12:20.340 --> 12:29.700 +푸시 알림을 받았어요 어떤 거래에 관한 거였죠 재정비된 인증된 노트북이었어요 + +12:29.700 --> 12:39.090 +13세대 i5를 자랑하고 있었죠 추정 가격은 925달러였어요 560달러였으니 + +12:39.090 --> 12:45.030 +상당히 할인된 가격이죠 + +12:45.030 --> 12:46.950 +자, 됐어요 + +12:46.950 --> 12:50.970 +에이전트 프레임워크가 실행되고 있는 것이었고요 + +12:51.210 --> 12:55.980 +그런 모습을 보면 정말 만족스러워요 + +12:55.980 --> 13:00.870 +보는 게 정말 좋아요 온종일 볼 수도 있어요 + +13:00.870 --> 13:04.200 +솔직히 정말 즐거워요 + +13:04.230 --> 13:07.920 +보세요, 오븐에서 꺼낼 때 이걸 꺼낼 필요도 없었어요 이미 오븐에 + +13:07.920 --> 13:09.780 +넣은 게 지금 익고 있거든요 + +13:09.780 --> 13:14.960 +모듈이 워밍업이 되면 다양한 것들을 빠르게 전환할 수 있어요 + +13:15.170 --> 13:16.910 +여길 통과하는군요 + +13:16.910 --> 13:18.020 +뭔가 떠올랐어요 + +13:18.020 --> 13:22.790 +물론 방금 제안한 것과 다른 내용이죠 + +13:23.000 --> 13:26.870 +방금 휴대폰 알림이 떴는데 시계에도 떴을 거예요 + +13:27.350 --> 13:34.880 +방금 전체 프레임워크가 실행되는 걸 봤죠 제가 알림을 받는 것도요 + +13:34.880 --> 13:42.980 +이 흔적을 읽어보면 각 요원이 이 문제를 해결하기 위해 어떻게 협력하는지 알 수 있죠 + +13:43.280 --> 13:51.470 +이제 남은 건 자율 주행이 가능하게 만드는 거예요 + +13:51.470 --> 13:56.810 +그걸 하는 동안 멋진 UI를 적용하는 게 좋겠죠 그러디오 사용법을 알고 그게 얼마나 좋은지 + +13:56.810 --> 13:57.680 +알거든요 + +13:57.710 --> 14:01.190 +그 모든 걸 내일 할 거예요 + +14:01.250 --> 14:04.460 +하지만 지금은 이 순간을 즐기게 해 줄게요 + +14:04.460 --> 14:07.670 +몇 번 더 해보세요 정말 만족스럽거든요 + +14:07.700 --> 14:08.900 +솔직히 마음에 들어요 + +14:08.930 --> 14:14.540 +여러분도 저만큼 즐거우시면 좋겠네요 오늘 마무리는 슬라이드에서 보여드릴게요 diff --git a/week5/community-contributions/subtitles/srts/59673431/en_US.srt b/week5/community-contributions/subtitles/srts/59673431/en_US.srt new file mode 100755 index 0000000..0298b4e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673431/en_US.srt @@ -0,0 +1,94 @@ +WEBVTT + +00:00.950 --> 00:02.300 +And here we have it. + +00:02.300 --> 00:04.100 +The user interface is completed. + +00:04.100 --> 00:11.150 +The extra notification came through on my phone and my watch, and you can see the wonderful trace of + +00:11.150 --> 00:16.070 +what went on here with the surfaced conclusion at the bottom. + +00:16.280 --> 00:22.010 +And it's actually just you just saw it up in the memory and it's just going off right now because it's + +00:22.010 --> 00:24.560 +kicking off another run while we speak. + +00:24.590 --> 00:27.830 +Uh, I left it too long, but there you got the idea. + +00:27.830 --> 00:35.420 +It ran, it completed, we saw the latest memories, and it was able to show the trace of the thinking + +00:35.420 --> 00:37.220 +that it was doing and what was happening. + +00:37.220 --> 00:40.160 +And it will be repeating this every five minutes. + +00:40.160 --> 00:41.990 +I should probably slow that down a bit. + +00:42.110 --> 00:48.380 +And notifying me all through the day and night of new deals. + +00:48.560 --> 00:54.470 +So hopefully this has been, uh, satisfying for you to see it come together. + +00:54.470 --> 00:59.600 +Hopefully you're running this yourself and seeing the same thing, seeing the conversation between the + +00:59.600 --> 01:05.870 +different agents in our agent framework and seeing this being surfaced and highlighted in this user + +01:05.870 --> 01:13.250 +interface, and seeing how easy, of course, how incredibly easy it is to use Gradio to build something + +01:13.280 --> 01:21.200 +off the shelf that is able to give us such great insights into what's going on and give us so much functionality, + +01:21.200 --> 01:23.330 +all in a set of screens. + +01:23.330 --> 01:28.700 +So with this, I hope that you enjoy working with this agent framework. + +01:28.700 --> 01:29.960 +Make it your own. + +01:29.990 --> 01:35.960 +I will probably go and fix the fact that the screen should refresh whilst it's also processing, so + +01:35.960 --> 01:37.250 +you don't have to to wait. + +01:37.250 --> 01:42.410 +But if I don't do it, then by all means you should do it yourself and add on more to this user interface + +01:42.410 --> 01:43.820 +and make this your own. + +01:43.820 --> 01:50.720 +Not only have you hopefully trained your own LLM to be even more accurate than R1, but also you're + +01:50.720 --> 01:55.790 +adding more functionality, improving the prompts, and finding other ways that we can bring this to + +01:55.820 --> 02:00.290 +life and also add add more features and surface them in the UI. + +02:00.290 --> 02:06.200 +And then when you've done that, push your code so that I can see it and admire it, and other students + +02:06.230 --> 02:08.780 +can take advantage of the changes that you've made. + +02:08.990 --> 02:15.680 +Enjoy working with this agent framework, and I will see you for the next video where we wrap things + +02:15.680 --> 02:16.220 +up. diff --git a/week5/community-contributions/subtitles/srts/59673431/ja_JP.srt b/week5/community-contributions/subtitles/srts/59673431/ja_JP.srt new file mode 100755 index 0000000..7293efb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673431/ja_JP.srt @@ -0,0 +1,73 @@ +WEBVTT + +00:00.950 --> 00:02.300 +そしてここにある。 + +00:02.300 --> 00:04.100 +ユーザー・インターフェースは完成した。 + +00:04.100 --> 00:11.150 +余計な通知は私の携帯と腕時計に届き、 ここで何が起こったのか、 その素晴らしい痕跡を、 + +00:11.150 --> 00:16.070 +浮上した結論とともに下のほうで見ることができる。 + +00:16.280 --> 00:24.560 +そして、 実は今、 メモリにアップされているのを見たばかりで、 ちょうど今発進しているところなんだ。 + +00:24.590 --> 00:27.830 +ええと、 長くなりすぎましたけど、 おわかりいただけたと思います。 + +00:27.830 --> 00:37.220 +走って、 完成して、 最新の記憶を見ることができ、 その思考の痕跡と何が起こっているのかを示すことができた。 + +00:37.220 --> 00:40.160 +そして、 これを5分ごとに繰り返す。 + +00:40.160 --> 00:41.990 +もう少しペースを落とすべきだろう。 + +00:42.110 --> 00:48.380 +そして、 昼夜を問わず新しい取引のお知らせをしてくれる。 + +00:48.560 --> 00:54.470 +だから、 これがうまくいって、 満足してもらえたらうれしい。 + +00:54.470 --> 00:59.600 +エージェントフレームワークの異なるエージェント間の会話を見て、 それがこのユーザーインターフェイスに表示され、 + +00:59.600 --> 01:05.870 +強調されているのを見て、 そして、 Gradioを使って、 何が起こっているのかについての素晴らしい洞察を与えてくれ、 + +01:05.870 --> 01:13.250 +非常に多くの機能を与えてくれるものを、 一連の画面の中で構築することが、 いかに簡単であるか、 もちろん、 + +01:13.280 --> 01:23.330 +信じられないほど簡単であるかを見てください。 + +01:23.330 --> 01:28.700 +ということで、 このエージェント・フレームワークでの仕事を楽しんでほしい。 + +01:28.700 --> 01:29.960 +自分のものにする。 + +01:29.990 --> 01:37.250 +おそらく、 処理中に画面が更新されるように修正すると思う。 + +01:37.250 --> 01:43.820 +でも、 もし私がやらないのなら、 ぜひ自分でやって、 このユーザーインターフェイスにさらに手を加えて、 これを自分のものにしてほしい。 + +01:43.820 --> 01:50.720 +あなた自身のLLMがR1よりもさらに正確なものになるよう、 うまくいけばトレーニングを積んできただけでなく、 + +01:50.720 --> 01:55.790 +より多くの機能を追加し、 プロンプトを改善し、 私たちがこれを実現できる他の方法を見つけ、 + +01:55.820 --> 02:00.290 +さらに機能を追加してUIに表出させる。 + +02:00.290 --> 02:08.780 +それができたら、 コードをプッシュして、 私がそれを見て賞賛できるようにする。 + +02:08.990 --> 02:16.220 +このエージェント・フレームワークでの作業を楽しんでください。 diff --git a/week5/community-contributions/subtitles/srts/59673431/ko_KR.srt b/week5/community-contributions/subtitles/srts/59673431/ko_KR.srt new file mode 100755 index 0000000..57ef769 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673431/ko_KR.srt @@ -0,0 +1,94 @@ +WEBVTT + +00:00.950 --> 00:02.300 +여기 있네요 + +00:02.300 --> 00:04.100 +사용자 인터페이스가 완성됐어요 + +00:04.100 --> 00:11.150 +제 폰과 시계에 추가 알림이 떴어요 여기서 무슨 일이 있었는지 멋진 + +00:11.150 --> 00:16.070 +흔적을 볼 수 있죠 아래에 드러난 결론도요 + +00:16.280 --> 00:22.010 +방금 메모리에서 보셨는데 지금 작동되고 있어요 우리가 말하는 동안 + +00:22.010 --> 00:24.560 +다른 런이 시작됐거든요 + +00:24.590 --> 00:27.830 +너무 오래 놔뒀네요 대충 아시겠죠? + +00:27.830 --> 00:35.420 +실행되고 완성되며 가장 최근의 기억을 봤죠 자신이 한 생각과 일어난 일을 흔적으로 + +00:35.420 --> 00:37.220 +남겼어요 + +00:37.220 --> 00:40.160 +5분마다 반복될 거예요 + +00:40.160 --> 00:41.990 +비트 속도를 늦춰야겠어요 + +00:42.110 --> 00:48.380 +밤낮으로 새로운 거래에 대해 알려줬고요 + +00:48.560 --> 00:54.470 +완성된 모습을 보니 만족스러우셨길 바라요 + +00:54.470 --> 00:59.600 +여러분이 직접 실행해 같은 걸 보셨길 바랍니다 에이전트 프레임워크에서 + +00:59.600 --> 01:05.870 +다른 에이전트 간의 대화를 보고 이 사용자 인터페이스에서 강조되어 드러나는 + +01:05.870 --> 01:13.250 +걸 보고 얼마나 쉬운지, 물론 Gadio를 이용해 규격화된 뭔가를 구축하는 게 얼마나 쉬운지 + +01:13.280 --> 01:21.200 +보셨길 바랍니다 무슨 일이 벌어지고 있는지에 대한 훌륭한 통찰력을 주고 화면 모음에 많은 기능성을 + +01:21.200 --> 01:23.330 +제공하죠 + +01:23.330 --> 01:28.700 +에이전트 프레임워크와 즐겁게 작업하시길 바라요 + +01:28.700 --> 01:29.960 +당신 것으로 만들어요 + +01:29.990 --> 01:35.960 +프로세싱 중에도 스크린이 새로 고침돼야 한다는 사실을 수정할 겁니다 그러니 기다릴 + +01:35.960 --> 01:37.250 +필요 없어요 + +01:37.250 --> 01:42.410 +하지만 제가 안 하면 여러분이 직접 하셔야 해요 이 사용자 인터페이스에 추가해 여러분만의 + +01:42.410 --> 01:43.820 +것으로 만들어야죠 + +01:43.820 --> 01:50.720 +여러분 자신의 LLM이 R1보다 더 정확하도록 훈련했을 뿐 아니라 더 많은 기능성을 + +01:50.720 --> 01:55.790 +추가하고 프롬프트 성능을 향상하고 이것을 활성화하고 UI + +01:55.820 --> 02:00.290 +표면에 더 많은 기능을 추가할 방법을 찾고 있죠 + +02:00.290 --> 02:06.200 +제가 보고 감탄할 수 있도록 코드를 푸시하세요 다른 학생들이 여러분이 + +02:06.230 --> 02:08.780 +바꾼 걸 이용할 수 있도록요 + +02:08.990 --> 02:15.680 +에이전트 프레임워크와 즐겁게 작업하세요 다음 비디오에서 마무리할 때 + +02:15.680 --> 02:16.220 +뵙죠 diff --git a/week5/community-contributions/subtitles/srts/59673449/en_US.srt b/week5/community-contributions/subtitles/srts/59673449/en_US.srt new file mode 100755 index 0000000..5646193 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673449/en_US.srt @@ -0,0 +1,115 @@ +WEBVTT + +00:01.220 --> 00:03.740 +Well, I have to tell you that I'm a little bit sad. + +00:03.740 --> 00:07.760 +This is the beginning of the beginning of the end. + +00:07.970 --> 00:10.940 +It's the beginning of our last time together. + +00:10.970 --> 00:11.780 +The final. + +00:11.810 --> 00:13.400 +The end of the finale. + +00:13.400 --> 00:17.330 +The conclusion of our eight week voyage. + +00:17.330 --> 00:21.860 +To take you from the very beginning to LLM mastery. + +00:21.860 --> 00:25.970 +And what a fitting conclusion it should hopefully be. + +00:26.000 --> 00:31.940 +I realize I maybe oversold some of the excitement that was in store, but I do hope that you shared + +00:31.940 --> 00:37.820 +some of it with me, that you also enjoyed this project and that you're ready for the last stages. + +00:37.820 --> 00:43.610 +So what you can do now is so much to it that I couldn't possibly, possibly do it. + +00:43.640 --> 00:46.010 +The injustice of squeezing it into three bullets. + +00:46.010 --> 00:48.590 +So it now dominates most of this page. + +00:48.590 --> 00:56.660 +And obviously what's in store for us today is to get to the top of the mountain and complete the mastering + +00:56.690 --> 01:05.990 +of AI and engineering and LLM engineering, the missing piece for us, if we look at our Agentic AI + +01:06.020 --> 01:13.040 +solution, we covered each of these different pieces, and perhaps the one that we hadn't yet completely + +01:13.040 --> 01:15.590 +ticked the box of was having autonomy. + +01:15.620 --> 01:16.520 +We have got memory. + +01:16.550 --> 01:21.560 +We've in a sense, although there's not much point in having memory if it doesn't, if it only runs + +01:21.560 --> 01:22.310 +once. + +01:22.490 --> 01:30.060 +So yeah, we really need to pull all of this together and have it into a process that is running, and + +01:30.060 --> 01:35.820 +so that you'll be able to kick something off and then just go away and get on with your daily life and + +01:35.820 --> 01:36.900 +get texted. + +01:36.900 --> 01:44.160 +Push notified from time to time as deals come up on the internet that seem worthy of your attention. + +01:44.460 --> 01:50.760 +Uh, and so this one more time was the agentic workflows that we had put together. + +01:50.760 --> 01:58.530 +And of course, we built everything in red in one session when we we built out our, our prices, including + +01:58.530 --> 02:05.310 +the specialist pricing that calls out to modal and the frontier processor that uses our chroma lookup. + +02:05.550 --> 02:10.260 +Um, and then we built an ensemble that takes the weighted average of them all. + +02:10.500 --> 02:16.590 +Uh, and we then assembled it together with our scanner and our messaging and planning so that we'd + +02:16.590 --> 02:18.060 +have everything together. + +02:18.060 --> 02:24.900 +And then last time we used something that was fancily called an agent framework, but was nothing more + +02:24.900 --> 02:27.750 +than a Python script to glue everything together. + +02:27.750 --> 02:32.880 +And of course you can, as I say, get agent frameworks through through various off the shelf frameworks. + +02:32.880 --> 02:38.670 +But most of what they're doing is exactly what we did ourselves, uh, through simple Python. + +02:38.760 --> 02:39.420 +All right. + +02:39.420 --> 02:45.660 +So what remains, what remains is for us to go back with one last time to JupyterLab, where we will + +02:45.660 --> 02:49.080 +start playing with my old friend Gradio. + +02:49.170 --> 02:51.090 +Uh, we will be right there. diff --git a/week5/community-contributions/subtitles/srts/59673449/ja_JP.srt b/week5/community-contributions/subtitles/srts/59673449/ja_JP.srt new file mode 100755 index 0000000..c54cf35 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673449/ja_JP.srt @@ -0,0 +1,91 @@ +WEBVTT + +00:01.220 --> 00:03.740 +まあ、 少し寂しいというのが正直なところだ。 + +00:03.740 --> 00:07.760 +これは終わりの始まりの始まりである。 + +00:07.970 --> 00:10.940 +一緒に過ごす最後の時間の始まりだ。 + +00:10.970 --> 00:11.780 +決勝だ。 + +00:11.810 --> 00:13.400 +フィナーレの終わり。 + +00:13.400 --> 00:17.330 +8週間の航海の締めくくり。 + +00:17.330 --> 00:21.860 +LLMの初歩からマスターまで。 + +00:21.860 --> 00:25.970 +そして、 願わくばそれにふさわしい結末であってほしい。 + +00:26.000 --> 00:31.940 +しかし、 その興奮の一端を私と分かち合ってくれたこと、 このプロジェクトを楽しんでくれたこと、 + +00:31.940 --> 00:37.820 +そして最終段階への準備が整ったことを願う。 + +00:37.820 --> 00:43.610 +だから、 今できることは、 僕には到底無理なことなんだ。 + +00:43.640 --> 00:46.010 +それを3つの弾丸に絞ることの不当さ。 + +00:46.010 --> 00:48.590 +だから今、 このページの大半を占めている。 + +00:48.590 --> 00:56.660 +そして今日、 私たちに待ち受けているのは、 山の頂上に到達し、 AIとエンジニアリングとLLMエンジニアリングの習得を完了させることであることは明らかです。 + +00:56.690 --> 01:15.590 +私たちに欠けているピースは、 私たちのAgentic AIソリューションを見ると、 これらの異なるピースをそれぞれカバーしています。 + +01:15.620 --> 01:16.520 +私たちには記憶がある。 + +01:16.550 --> 01:22.310 +一度しか走らないのであれば、 メモリーを持つ意味はあまりない。 + +01:22.490 --> 01:36.900 +そう、 私たちはこれらすべてを統合し、 実行可能なプロセスにする必要がある。 + +01:36.900 --> 01:44.160 +インターネット上で注目するに値するようなお得な情報が出てきたら、 随時プッシュ通知する。 + +01:44.460 --> 01:50.760 +それで、 もう1回、 私たちが作成したエージェントのワークフローを紹介します。 + +01:50.760 --> 01:58.530 +そしてもちろん、 モーダルを呼び出すスペシャリスト・プライスや、 クロマ・ルックアップを使用するフロンティア・プロセッサーなど、 + +01:58.530 --> 02:05.310 +価格を構築したときには、 1回のセッションですべてを赤で構築した。 + +02:05.550 --> 02:10.260 +そして、 それらの加重平均を取るアンサンブルを構築した。 + +02:10.500 --> 02:18.060 +そして、 それをスキャナーやメッセージング、 プランニングと一緒に組み立てて、 すべてを一緒にしたんだ。 + +02:18.060 --> 02:27.750 +そして前回は、 エージェントフレームワークと派手に呼ばれるものを使ったが、 すべてを接着するためのPythonスクリプトに過ぎなかった。 + +02:27.750 --> 02:32.880 +もちろん、 エージェント・フレームワークは、 さまざまな既製フレームワークを通じて入手することができる。 + +02:32.880 --> 02:38.670 +でも、 彼らがやっていることのほとんどは、 単純なPythonを使って僕たち自身がやったことと同じなんだ。 + +02:38.760 --> 02:39.420 +分かった。 + +02:39.420 --> 02:49.080 +というわけで、 残るは最後にもう一度JupyterLabに戻って、 私の古い友人であるGradioと遊び始めることだ。 + +02:49.170 --> 02:51.090 +ああ、 すぐに行くよ。 diff --git a/week5/community-contributions/subtitles/srts/59673449/ko_KR.srt b/week5/community-contributions/subtitles/srts/59673449/ko_KR.srt new file mode 100755 index 0000000..3d0d127 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673449/ko_KR.srt @@ -0,0 +1,109 @@ +WEBVTT + +00:01.220 --> 00:03.740 +솔직히 말하면 좀 슬퍼요 비트 박스요 + +00:03.740 --> 00:07.760 +이건 종말의 시작의 시작이에요 + +00:07.970 --> 00:10.940 +마지막 시간이 시작됐어요 + +00:10.970 --> 00:11.780 +결승전요 + +00:11.810 --> 00:13.400 +피날레의 끝이죠 + +00:13.400 --> 00:17.330 +8주간의 항해가 이렇게 끝났네요 + +00:17.330 --> 00:21.860 +초기부터 LLM 마스터까지 가르쳐 드리죠 + +00:21.860 --> 00:25.970 +아주 적절한 결론이죠 + +00:26.000 --> 00:31.940 +제가 너무 기대감을 부풀린 것 같지만 그 기대감을 저와 공유하셨으면 + +00:31.940 --> 00:37.820 +해요 이 프로젝트를 즐기셨고 마지막 단계로 갈 준비가 되셨죠 + +00:37.820 --> 00:43.610 +지금 할 수 있는 건 제가 할 수 없는 거예요 + +00:43.640 --> 00:46.010 +총알 세 발로 쪼개다니 불공평해요 + +00:46.010 --> 00:48.590 +그래서 이 페이지 대부분을 차지하고 있죠 + +00:48.590 --> 00:56.660 +오늘 우리는 정상에 올라서 인공지능과 LLM 공학에 통달해야 합니다 + +00:56.690 --> 01:05.990 +에이전틱 인공지능 솔루션을 보면 우리가 놓친 부분이 있습니다 각각의 요소를 + +01:06.020 --> 01:13.040 +다뤘는데 자율성을 갖는다는 부분에서 아직 미흡했던 부분이 + +01:13.040 --> 01:15.590 +있죠 + +01:15.620 --> 01:16.520 +메모리가 있어요 + +01:16.550 --> 01:22.310 +어떤 면에선 메모리를 갖는 게 별 의미가 없어요 한 번만 실행한다면요 + +01:22.490 --> 01:30.060 +네, 이 모든 걸 함께 끌어모아 실행 중인 프로세스에 넣어야 해요 그래야 여러분이 + +01:30.060 --> 01:36.900 +뭔가를 시작하고 그냥 가서 일상을 보낼 수 있죠 문자도 받고요 + +01:36.900 --> 01:44.160 +인터넷에 관심 가질 만한 거래가 올라오면 가끔씩 알려주세요 + +01:44.460 --> 01:50.760 +에이전틱 워크플로를 다시 한 번 살펴봤어요 + +01:50.760 --> 01:58.530 +한 세션에서 모든 것을 빨간색으로 설정했습니다. Modal과 크로마 검색을 + +01:58.530 --> 02:05.310 +이용한 프론티어 프로세서의 전문가 가격도 포함해서요. + +02:05.550 --> 02:10.260 +그리고 모든 참가자의 평균을 반영하는 앙상블을 만들었어요 + +02:10.500 --> 02:16.590 +스캐너로 정보를 수집하고 메시지를 보내고 계획을 세워서 모든 걸 하나로 + +02:16.590 --> 02:18.060 +모았어요 + +02:18.060 --> 02:24.900 +그리고 마지막으로 사용했던 것은 에이전트 프레임워크라는 것이었는데 파이썬 을 이용해서 + +02:24.900 --> 02:27.750 +모든 것을 하나로 연결했어요 + +02:27.750 --> 02:32.880 +그리고 물론 다양한 규격 프레임워크를 통해 에이전트 프레임워크를 get 할 수 있어요 + +02:32.880 --> 02:38.670 +하지만 대부분의 작업은 저희가 심플 파이썬 으로 했던 것과 동일해요 + +02:38.760 --> 02:39.420 +좋아요 + +02:39.420 --> 02:45.660 +이제 남은 건 마지막으로 유피터랩으로 돌아가는 겁니다 제 + +02:45.660 --> 02:49.080 +오랜 친구 그라디오와 함께요 + +02:49.170 --> 02:51.090 +금방 갈게요 diff --git a/week5/community-contributions/subtitles/srts/59673595/en_US.srt b/week5/community-contributions/subtitles/srts/59673595/en_US.srt new file mode 100755 index 0000000..890ce35 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673595/en_US.srt @@ -0,0 +1,283 @@ +WEBVTT + +00:00.890 --> 00:03.800 +That concludes a mammoth project. + +00:03.830 --> 00:05.780 +Three weeks in the making. + +00:05.780 --> 00:12.440 +In the course of those three weeks, starting with the data curation, the working with frontier models, + +00:12.440 --> 00:20.360 +and then ending with the complete user interface in Gradio with even that very unnecessary but great + +00:20.360 --> 00:27.650 +plot 3D chart on the bottom right, together with the trace from the agents and the results of the memory. + +00:27.770 --> 00:30.110 +Um, and all of it came together. + +00:30.110 --> 00:32.360 +Remember, the user interface was not really the point. + +00:32.360 --> 00:34.820 +The user interface was a bit extra, so we can monitor it. + +00:34.820 --> 00:41.420 +The main point is that that just runs and it just keeps running, and as it runs, it will be continually + +00:41.420 --> 00:47.090 +notifying me every few minutes with a new deal that it hasn't surfaced before. + +00:47.150 --> 00:55.070 +And that is the conclusion of a very satisfying, great project that we have built end to end. + +00:55.910 --> 01:03.310 +And so with that, let's take a moment to do a retro of the last eight weeks. + +01:03.310 --> 01:04.510 +One more time. + +01:04.510 --> 01:10.930 +I've gone on about this so many times, and so you'll be happy, or maybe a bit sad that this is the + +01:10.930 --> 01:14.590 +final retrospective you started eight weeks ago. + +01:14.590 --> 01:23.260 +Over on the left, we wanted to get to being an LM engineer, someone who had mastered LM engineering, + +01:23.260 --> 01:27.070 +highly proficient, advanced, and this is how we got there. + +01:27.070 --> 01:29.530 +In the first week we played with lots of models. + +01:29.530 --> 01:32.920 +We found out how many times the letter A appeared in sentences. + +01:32.920 --> 01:35.920 +We took a quick look at one preview amongst other things. + +01:35.920 --> 01:42.610 +In week two we first saw gradient and we played with some multi-modality, which was fun. + +01:42.610 --> 01:46.600 +We also saw an early version of agent ization. + +01:46.600 --> 01:49.540 +In week three we got stuck into hugging face. + +01:49.540 --> 01:52.750 +We had pipelines, we had tokenizers, we had models. + +01:52.750 --> 01:55.810 +In week four we got deeper into hugging face. + +01:55.810 --> 02:02.710 +We selected Llms, we generated code and we had that remarkable project, with the 60 000 time performance + +02:02.710 --> 02:04.480 +improvement in week five. + +02:04.510 --> 02:13.930 +We built our Rag solution for insert film, which used chroma, and also briefly we used face as well + +02:14.170 --> 02:16.390 +and created our expert. + +02:16.390 --> 02:19.210 +And maybe you did the big project associated with that. + +02:19.240 --> 02:24.670 +Week six we fine tuned a frontier model, although most of which six was spent curating data. + +02:24.670 --> 02:26.950 +But that is such an important activity. + +02:27.160 --> 02:27.910 +Week seven. + +02:27.910 --> 02:34.600 +We did fine tune an open source model that then beat the frontier, and in week eight, we packaged + +02:34.600 --> 02:43.480 +it together to a genetic AI solution complete with seven agents and a user interface. + +02:43.480 --> 02:45.700 +And it was fabulous. + +02:45.700 --> 02:48.580 +So that was the journey. + +02:49.600 --> 02:52.270 +I need to take a moment to thank you. + +02:52.300 --> 02:55.750 +Thank you so much for staying through to the end. + +02:55.780 --> 03:00.160 +You can't, I can't I can't explain how much I appreciate it. + +03:00.190 --> 03:06.900 +It's really, uh, so, so wonderful to have had people come all the way through the course, gone through + +03:06.900 --> 03:12.330 +the eight week journey, and take advantage of everything that we've been doing and get to this point. + +03:12.330 --> 03:17.910 +And I've, I've heard from several of you along the way, and it's been really, really rewarding for + +03:17.910 --> 03:19.530 +me to experience this. + +03:19.560 --> 03:21.330 +Uh, super grateful. + +03:21.480 --> 03:23.250 +I hope you've enjoyed it. + +03:23.280 --> 03:25.350 +I obviously I've enjoyed it a lot. + +03:25.380 --> 03:26.280 +Uh, far too much. + +03:26.310 --> 03:28.020 +I hope you've enjoyed it as well. + +03:28.050 --> 03:29.820 +I really hope you can stay in touch. + +03:29.820 --> 03:32.340 +By all means, please do LinkedIn with me if you're open to that. + +03:32.340 --> 03:34.470 +If you're okay with that, I'm very much welcome. + +03:34.470 --> 03:35.880 +LinkedIn connections. + +03:35.880 --> 03:40.890 +And we can have a community, um, and message me if you've got to this point, I definitely want to + +03:40.890 --> 03:41.550 +hear it. + +03:41.700 --> 03:44.160 +Uh, and of course, you've got this big challenge. + +03:44.160 --> 03:48.150 +Now, can you take what you've learned and use it to build your own project? + +03:48.180 --> 03:52.260 +Maybe that idea I had about using the finance data would be an interesting one. + +03:52.290 --> 03:55.230 +See if you can build something that could make some money. + +03:55.230 --> 03:57.840 +If you do, then I expect a lunch out of it. + +03:57.870 --> 03:59.340 +At the very least, perhaps. + +03:59.550 --> 04:01.280 +Uh, but that would be a fun challenge. + +04:01.520 --> 04:04.010 +Whatever you do with it, I want to hear about it. + +04:04.040 --> 04:10.130 +If you built a great platform that is using some of this learning, then please share it. + +04:10.160 --> 04:16.940 +I'd love other students to see that too, and it's great to have that kind of output as a tiny little + +04:16.940 --> 04:17.360 +extra. + +04:17.390 --> 04:21.170 +I don't know if you remember all the way, way, way, way back in week one, I did mention there was + +04:21.170 --> 04:25.100 +going to be a little extra juicy nugget at the very end, and this is what it is. + +04:25.130 --> 04:30.590 +I wanted to tell you that I did a personal project where I fine tuned an LLM. + +04:30.590 --> 04:37.880 +It was in fact a llama two LLM from the beginning of this year, uh, on all of my text message history. + +04:37.910 --> 04:45.050 +It turns out that I have 240,000 text messages that have built up over time on my iPhone, uh, since + +04:45.050 --> 04:47.300 +I had the first iPhone some time ago. + +04:47.420 --> 04:53.180 +And so I had a lot of text message history, and I was able to use that to train llama two to make a + +04:53.180 --> 04:54.800 +simulation of me. + +04:54.890 --> 04:59.600 +Uh, and there's a write up on my website, on my blog, Edward Dot com. + +04:59.600 --> 05:03.200 +You can take a look and there's instructions for how you can do it too. + +05:03.230 --> 05:09.560 +And of course, I was using llama two and now llama 3.1 is so much better along with some of the others + +05:09.590 --> 05:11.030 +like like Kwan and so on. + +05:11.210 --> 05:16.640 +So you could definitely have a stab at this, and you will probably have even better results than I + +05:16.640 --> 05:16.880 +had. + +05:16.910 --> 05:19.010 +And the results I had were spooky. + +05:19.310 --> 05:21.230 +They really were very good indeed. + +05:22.310 --> 05:27.410 +And so with that, I have to bring up the final slide. + +05:27.470 --> 05:29.300 +Congratulations. + +05:29.690 --> 05:32.270 +I hope that you're proud of what you've accomplished. + +05:32.300 --> 05:38.150 +I hope you do feel that sense that you have now reached an advanced point in your learning. + +05:38.180 --> 05:41.120 +You have got to the summit of the mountain. + +05:41.360 --> 05:43.610 +I'm so, so very happy. + +05:43.820 --> 05:51.200 +And I hope, I really, really hope that you're able to take this and use it in your day job, in your + +05:51.200 --> 05:52.580 +career to move forwards. + +05:52.580 --> 05:54.860 +And I very much want to hear all about it. + +05:54.860 --> 05:56.540 +So do stay in touch. + +05:56.540 --> 05:58.280 +Thank you once again. + +05:58.280 --> 06:01.250 +And a huge congratulations. diff --git a/week5/community-contributions/subtitles/srts/59673595/ja_JP.srt b/week5/community-contributions/subtitles/srts/59673595/ja_JP.srt new file mode 100755 index 0000000..00f1808 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673595/ja_JP.srt @@ -0,0 +1,247 @@ +WEBVTT + +00:00.890 --> 00:03.800 +これでマンモスプロジェクトは終了だ。 + +00:03.830 --> 00:05.780 +構想3週間。 + +00:05.780 --> 00:12.440 +この3週間の間に、 データのキュレーションから始まり、 フロンティアモデルでの作業、 + +00:12.440 --> 00:20.360 +そして、 エージェントからのトレースとメモリの結果とともに、 Gradioの完全なユーザーインターフェイスと、 + +00:20.360 --> 00:27.650 +右下の非常に不要だが素晴らしいプロット3Dチャートが完成した。 + +00:27.770 --> 00:30.110 +そして、 そのすべてが一緒になった。 + +00:30.110 --> 00:32.360 +覚えておいてほしいのは、 ユーザーインターフェイスは、 本当は重要ではなかったということだ。 + +00:32.360 --> 00:34.820 +ユーザー・インターフェイスは少し余分だったので、 モニターすることができる。 + +00:34.820 --> 00:41.420 +最大のポイントは、 それがただ実行され、 ただ実行され続けるということである。 そして、 それが実行されるにつれて、 + +00:41.420 --> 00:47.090 +数分ごとに、 まだ表面化していない新しい取引を絶えず通知してくる。 + +00:47.150 --> 00:55.070 +そしてこれが、 私たちが最後まで作り上げた、 非常に満足のいく素晴らしいプロジェクトの結論である。 + +00:55.910 --> 01:03.310 +それでは、 この8週間を振り返ってみよう。 + +01:03.310 --> 01:04.510 +もう1度だけ。 + +01:04.510 --> 01:10.930 +何度も繰り返してきたことだから、 8週間前に始めた回顧が今回で最後となるのは、 + +01:10.930 --> 01:14.590 +嬉しいような、 ちょっと寂しいような。 + +01:14.590 --> 01:27.070 +左側では、 私たちはLMエンジニア、 つまりLMエンジニアリングをマスターし、 高度に熟達した上級者になりたかった。 + +01:27.070 --> 01:29.530 +最初の週は、 たくさんのモデルを使ってプレーした。 + +01:29.530 --> 01:32.920 +文中にAという文字が何回出てくるかを調べた。 + +01:32.920 --> 01:35.920 +私たちは、 とりわけプレビューをざっと見てみた。 + +01:35.920 --> 01:42.610 +週目にはグラデーションを初めて見て、 マルチモダリティで遊んだ。 + +01:42.610 --> 01:46.600 +エージェント化の初期バージョンも見た。 + +01:46.600 --> 01:49.540 +第3週は、 ハグにはまった。 + +01:49.540 --> 01:52.750 +パイプラインがあり、 トークナイザーがあり、 モデルがあった。 + +01:52.750 --> 01:55.810 +第4週では、 ハグする顔をより深く追求した。 + +01:55.810 --> 02:04.480 +私たちはLlmsを選択し、 コードを生成し、 5週目には6万時間のパフォーマンス向上という驚くべきプロジェクトを実現した。 + +02:04.510 --> 02:16.390 +インサートフィルムのために、 クロマを使ったラグ・ソリューションを作った。 + +02:16.390 --> 02:19.210 +それに付随する大きなプロジェクトもやったかもしれない。 + +02:19.240 --> 02:24.670 +第6週はフロンティアモデルの微調整を行ったが、 その大半はデータのキュレーションに費やされた。 + +02:24.670 --> 02:26.950 +でも、 それはとても重要な活動なんだ。 + +02:27.160 --> 02:27.910 +第7週 + +02:27.910 --> 02:43.480 +そして8週目には、 それを7つのエージェントとユーザー・インターフェースを備えた遺伝的AIソリューションとしてパッケージ化した。 + +02:43.480 --> 02:45.700 +そして素晴らしかった。 + +02:45.700 --> 02:48.580 +それが旅だった。 + +02:49.600 --> 02:52.270 +この場を借りてお礼を言わせてください。 + +02:52.300 --> 02:55.750 +最後まで残ってくれて本当にありがとう。 + +02:55.780 --> 03:00.160 +どれだけ感謝しているか説明できないよ。 + +03:00.190 --> 03:06.900 +コースを通して、 8週間の旅を通して、 私たちがやってきたことをすべて利用して、 ここまでたどり着いた人たちがいるのは、 + +03:06.900 --> 03:12.330 +本当に、 とても素晴らしいことだ。 + +03:12.330 --> 03:19.530 +その過程で、 何人かの皆さんからお話を伺いましたが、 このような経験をすることができて、 本当に本当にやりがいがありました。 + +03:19.560 --> 03:21.330 +とても感謝している。 + +03:21.480 --> 03:23.250 +楽しんでいただけたなら幸いだ。 + +03:23.280 --> 03:25.350 +もちろん、 とても楽しんでいるよ。 + +03:25.380 --> 03:26.280 +ああ、 あまりにも多すぎる。 + +03:26.310 --> 03:28.020 +楽しんでいただけたなら幸いだ。 + +03:28.050 --> 03:29.820 +これからも連絡を取り合ってほしい。 + +03:29.820 --> 03:32.340 +ぜひ、 それを受け入れてくれるなら、 私と一緒にLinkedInをやってほしい。 + +03:32.340 --> 03:34.470 +それでよければ、 大歓迎だよ。 + +03:34.470 --> 03:35.880 +リンクトインのコネクション + +03:35.880 --> 03:41.550 +そして私たちはコミュニティを持つことができる、 うーん、 もしあなたがここまでたどり着いたのなら、 私にメッセージをください。 + +03:41.700 --> 03:44.160 +そしてもちろん、 あなたにはこの大きな挑戦がある。 + +03:44.160 --> 03:48.150 +さて、 あなたは学んだことを自分のプロジェクトに生かすことができるだろうか? + +03:48.180 --> 03:52.260 +ファイナンスのデータを使うというアイデアは面白いかもしれない。 + +03:52.290 --> 03:55.230 +お金になりそうなものを作れるかどうか見てみよう。 + +03:55.230 --> 03:57.840 +もしそうなら、 私はランチを期待している。 + +03:57.870 --> 03:59.340 +少なくとも、 おそらくは。 + +03:59.550 --> 04:01.280 +あー、 でもそれは楽しい挑戦だね。 + +04:01.520 --> 04:04.010 +何をするにしても、 その話を聞きたい。 + +04:04.040 --> 04:10.130 +もし、 あなたがこの学習の一部を利用して素晴らしいプラットフォームを構築したのであれば、 ぜひシェアしてほしい。 + +04:10.160 --> 04:17.360 +他の生徒たちにもぜひ見てもらいたいし、 そういうアウトプットを小さなおまけとして持っているのは素晴らしいことだ。 + +04:17.390 --> 04:21.170 +第1週目に、 最後の最後にちょっとしたおまけがあると言ったのを覚えているかどうかわからないが、 + +04:21.170 --> 04:25.100 +それがこれだ。 + +04:25.130 --> 04:30.590 +私は個人的なプロジェクトでLLMを微調整したことをお伝えしたかったのです。 + +04:30.590 --> 04:37.880 +実際、 今年の初めからラマ2世LLMだった。 + +04:37.910 --> 04:47.300 +私のiPhoneには24万通のテキストメッセージがあることがわかった。 + +04:47.420 --> 04:54.800 +それで、 たくさんのテキストメッセージの履歴があって、 それを使ってラマ2世に私のシミュレーションをさせることができた。 + +04:54.890 --> 04:59.600 +僕のウェブサイト、 エドワード・ドット・コムのブログにも記事があるよ。 + +04:59.600 --> 05:03.200 +見てみればいいし、 やり方も書いてある。 + +05:03.230 --> 05:11.030 +そしてもちろん、 僕はllama 2を使っていて、 今はllama 3を使っている。 1は、 クワンなど他の選手と並んでとても良い。 + +05:11.210 --> 05:16.880 +そうすれば、 私よりももっといい結果が得られるだろう。 + +05:16.910 --> 05:19.010 +そして、 私が得た結果は不気味なものだった。 + +05:19.310 --> 05:21.230 +本当に素晴らしかった。 + +05:22.310 --> 05:27.410 +それでは最後のスライドをご覧ください。 + +05:27.470 --> 05:29.300 +おめでとう。 + +05:29.690 --> 05:32.270 +自分が成し遂げたことを誇りに思ってほしい。 + +05:32.300 --> 05:38.150 +そのような感覚を感じてほしい。 + +05:38.180 --> 05:41.120 +あなたは山頂に着いた。 + +05:41.360 --> 05:43.610 +とても、 とても嬉しい。 + +05:43.820 --> 05:52.580 +そして、 あなたがこれを本業やキャリアに活かして前進してくれることを心から願っている。 + +05:52.580 --> 05:54.860 +そして、 私はそのすべてを聞きたいと思っている。 + +05:54.860 --> 05:56.540 +だから、 連絡を取り合ってほしい。 + +05:56.540 --> 05:58.280 +改めてありがとう。 + +05:58.280 --> 06:01.250 +そして、 本当におめでとう。 diff --git a/week5/community-contributions/subtitles/srts/59673595/ko_KR.srt b/week5/community-contributions/subtitles/srts/59673595/ko_KR.srt new file mode 100755 index 0000000..c7018a4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673595/ko_KR.srt @@ -0,0 +1,277 @@ +WEBVTT + +00:00.890 --> 00:03.800 +대규모 프로젝트가 완성됐어요 + +00:03.830 --> 00:05.780 +3주나 걸렸어요 + +00:05.780 --> 00:12.440 +그 3주 동안 데이터 큐레이션을 시작으로 프론티어 모델 작업을 하고 + +00:12.440 --> 00:20.360 +그라디오의 사용자 인터페이스를 완성했습니다 불필요하지만 훌륭한 플롯 3D + +00:20.360 --> 00:27.650 +차트인 우측 하단에 요원의 흔적과 메모리의 결과가 포함돼 있죠 + +00:27.770 --> 00:30.110 +모든 게 잘 맞아떨어졌어요 + +00:30.110 --> 00:32.360 +기억하세요 사용자 인터페이스는 요점이 아니었어요 + +00:32.360 --> 00:34.820 +사용자 인터페이스가 좀 특이해서 비트를 모니터할 수 있어요 + +00:34.820 --> 00:41.420 +중요한 건 그게 계속 실행된다는 겁니다 실행되는 동안 몇 분마다 + +00:41.420 --> 00:47.090 +계속 제게 알려요 전에 나타나지 않은 새 계약과 함께요 + +00:47.150 --> 00:55.070 +끝에서 끝까지 우리가 만든 아주 만족스럽고 훌륭한 프로젝트의 결말이죠 + +00:55.910 --> 01:03.310 +그럼 지난 8주간의 과거를 되짚어 보도록 하죠 + +01:03.310 --> 01:04.510 +한 번 더요 + +01:04.510 --> 01:10.930 +비트코인 얘기를 정말 많이 했는데 기쁘기도 하고 슬프기도 할 거예요 + +01:10.930 --> 01:14.590 +8주 전에 시작한 마지막 회고니까요 + +01:14.590 --> 01:23.260 +왼쪽에서 LM 엔지니어가 되고 싶었어요 LM 엔지니어링을 통달하고 고도로 숙련되고 + +01:23.260 --> 01:27.070 +진보한 사람요 이렇게 된 거예요 + +01:27.070 --> 01:29.530 +첫 주에는 모델들과 많이 놀았어요 + +01:29.530 --> 01:32.920 +A가 문장에 몇 번이나 나왔는지 알아냈어요 + +01:32.920 --> 01:35.920 +미리 보기와 함께 다른 것들도 살펴봤죠 + +01:35.920 --> 01:42.610 +2주 차에는 그러데이션을 처음 접했는데 다중 모듈을 활용해서 재미있게 작업했어요 + +01:42.610 --> 01:46.600 +요원 아이즈의 초기 버전도 봤죠 + +01:46.600 --> 01:49.540 +3주 차에는 얼굴 껴안기가 유행이었죠 + +01:49.540 --> 01:52.750 +파이프라인, 토큰라이저, 모델 등이 있었죠 + +01:52.750 --> 01:55.810 +4주 차에는 얼굴을 더 많이 껴안았어요 + +01:55.810 --> 02:02.710 +Lms를 선택하고 코드를 생성해 놀라운 프로젝트를 진행했습니다 5주 차에 6만 시간 + +02:02.710 --> 02:04.480 +성능 향상을 기록했죠 + +02:04.510 --> 02:13.930 +필름 삽입용 래그 솔루션을 만들었는데 크로마를 사용했고 얼굴도 잠깐 사용했어요 + +02:14.170 --> 02:16.390 +전문가도 만들었죠 + +02:16.390 --> 02:19.210 +그것과 관련된 큰 프로젝트를 맡았을지도 모르죠 + +02:19.240 --> 02:24.670 +6주 차에는 개척 시대 모델을 미세 조정했습니다 하지만 6주 차 대부분은 데이터를 수집하느라 썼죠 + +02:24.670 --> 02:26.950 +하지만 그건 중요한 활동이잖아요 + +02:27.160 --> 02:27.910 +7주째예요 + +02:27.910 --> 02:34.600 +미개척지를 돌파한 오픈 소스 모델을 완성했고 8주 차에 유전자 + +02:34.600 --> 02:43.480 +인공지능 솔루션에 7개의 에이전트와 사용자 인터페이스를 결합했죠 + +02:43.480 --> 02:45.700 +정말 멋졌어요 + +02:45.700 --> 02:48.580 +그게 여정이었어요 + +02:49.600 --> 02:52.270 +잠깐 감사의 말을 해야겠어요 + +02:52.300 --> 02:55.750 +끝까지 함께해 줘서 고마워요 + +02:55.780 --> 03:00.160 +얼마나 감사한지 말로 다 못 해요 + +03:00.190 --> 03:06.900 +정말 멋져요 8주간의 여정을 거쳐 여기까지 온 모든 분들과 함께해서 정말 기쁩니다 + +03:06.900 --> 03:12.330 +우리가 해온 모든 걸 활용해서 여기까지 온 거죠 Get up + +03:12.330 --> 03:17.910 +그동안 여러 분께 이야기를 들었는데 이번 경험을 통해 정말 큰 + +03:17.910 --> 03:19.530 +보람을 느꼈어요 + +03:19.560 --> 03:21.330 +정말 감사해요 + +03:21.480 --> 03:23.250 +즐거우셨길 바라요 + +03:23.280 --> 03:25.350 +물론 정말 즐거웠어요 + +03:25.380 --> 03:26.280 +너무 많이요 + +03:26.310 --> 03:28.020 +여러분도 즐거우셨길 바라요 + +03:28.050 --> 03:29.820 +계속 연락하고 지냈으면 좋겠어요 + +03:29.820 --> 03:32.340 +괜찮다면 링크드인에서 저와 함께 일하세요 + +03:32.340 --> 03:34.470 +그래도 괜찮으시다면 전 환영이에요 + +03:34.470 --> 03:35.880 +링크드인 연결망요 + +03:35.880 --> 03:40.890 +공동체를 만들 수 있어요 여기까지 오면 메시지 주세요 꼭 듣고 + +03:40.890 --> 03:41.550 +싶어요 + +03:41.700 --> 03:44.160 +물론 큰 도전도 있죠 + +03:44.160 --> 03:48.150 +지금까지 배운 걸 자신의 프로젝트에 활용할 수 있겠어요? + +03:48.180 --> 03:52.260 +재무 자료를 이용하자는 제 아이디어가 흥미로울지도 몰라요 + +03:52.290 --> 03:55.230 +돈이 될 만한 걸 만들어 봐요 + +03:55.230 --> 03:57.840 +그럼 점심으로 사 주세요 + +03:57.870 --> 03:59.340 +적어도 그럴 거예요 + +03:59.550 --> 04:01.280 +하지만 재미있는 도전이 될 거예요 + +04:01.520 --> 04:04.010 +그 돈으로 뭘 하든 내게도 알려줘요 + +04:04.040 --> 04:10.130 +이 학습을 이용해 훌륭한 플랫폼을 구축했다면 공유하세요 + +04:10.160 --> 04:17.360 +다른 학생들도 그걸 봤으면 해요 그런 출력을 작은 엑스트라로 갖는 건 멋진 일이죠 + +04:17.390 --> 04:21.170 +기억하실지 모르겠지만 아주 오래전 첫째 주에 제가 마지막에 + +04:21.170 --> 04:25.100 +아주 흥미진진한 게 나올 거라고 했었죠 바로 이거예요 + +04:25.130 --> 04:30.590 +제가 개인적으로 LLM을 조율한 프로젝트가 있어요 + +04:30.590 --> 04:37.880 +올해 초부터 문자 메시지 기록에 라마 2개의 LLM이 있었어요 + +04:37.910 --> 04:45.050 +문자 메시지가 240,000개나 왔더라고요 시간이 지나면서 아이폰에 쌓인 거죠 아이폰이 + +04:45.050 --> 04:47.300 +처음 생긴 지 좀 된 후로요 + +04:47.420 --> 04:53.180 +문자 기록이 아주 많았는데 그걸 이용해서 라마 2를 훈련해서 제 시뮬레이션을 + +04:53.180 --> 04:54.800 +만들었어요 + +04:54.890 --> 04:59.600 +제 블로그 에드워드 닷컴에도 글이 올라왔어요 + +04:59.600 --> 05:03.200 +어떻게 하는지도 설명서가 나와 있어요 + +05:03.230 --> 05:09.560 +라마 2에서 라마 3으로 바뀌었죠 1이 훨씬 나아요 콴 같은 다른 선수들과 + +05:09.590 --> 05:11.030 +비교하면요 + +05:11.210 --> 05:16.880 +그러니 여러분도 시도해 보세요 제가 했던 것보다 더 좋은 결과가 나올 거예요 + +05:16.910 --> 05:19.010 +결과는 소름 끼쳤어요 + +05:19.310 --> 05:21.230 +정말 잘하더군요 + +05:22.310 --> 05:27.410 +이제 마지막 슬라이드를 불러올게요 + +05:27.470 --> 05:29.300 +축하해요 + +05:29.690 --> 05:32.270 +당신이 이룬 성과에 자부심을 느끼길 바라요 + +05:32.300 --> 05:38.150 +여러분이 이제 학습의 수준에 도달했다고 느끼길 바라요 + +05:38.180 --> 05:41.120 +산 정상에 올라야 해요 + +05:41.360 --> 05:43.610 +정말 행복해요 + +05:43.820 --> 05:51.200 +진심으로 바라건대 이 경험을 본업과 경력에 활용해서 앞으로 나아가길 + +05:51.200 --> 05:52.580 +바라요 + +05:52.580 --> 05:54.860 +전부 다 듣고 싶어요 + +05:54.860 --> 05:56.540 +계속 연락해요 + +05:56.540 --> 05:58.280 +다시 한번 고마워요 + +05:58.280 --> 06:01.250 +그리고 정말 축하해요 diff --git a/week5/community-contributions/subtitles/srts/59673639/en_US.srt b/week5/community-contributions/subtitles/srts/59673639/en_US.srt new file mode 100755 index 0000000..277af7f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673639/en_US.srt @@ -0,0 +1,352 @@ +WEBVTT + +00:00.680 --> 00:08.180 +And welcome now to the code for our user interface, which we will find in this Python module. + +00:08.180 --> 00:10.130 +Price is right dot pi. + +00:10.130 --> 00:18.860 +And this is where we will be looking at particularly the code that constructs the UI behind our product. + +00:19.070 --> 00:22.070 +And it starts with a few imports. + +00:22.190 --> 00:24.110 +This very important one to import Gradio. + +00:24.110 --> 00:29.210 +And this even more important one to import the deal agent framework, which is at the heart of what + +00:29.210 --> 00:29.900 +we built. + +00:30.230 --> 00:33.440 +Uh, and then it defines a class app. + +00:33.440 --> 00:39.080 +And if I scroll to the bottom, you'll see that at the bottom it calls app dot run. + +00:39.110 --> 00:41.270 +It creates app and then runs it. + +00:41.270 --> 00:43.250 +And what does that involve creating? + +00:43.250 --> 00:44.240 +It does very little. + +00:44.240 --> 00:47.420 +It just sets an empty variable for agent framework. + +00:47.450 --> 00:51.260 +The juice the meat is in the run uh method. + +00:51.260 --> 00:59.200 +And that of course creates a user interface using Gradio more advanced construct of great blocks with + +00:59.200 --> 01:00.280 +the Price is Right. + +01:00.610 --> 01:05.590 +Um, and then what you see here is some code that's quite similar to the code that we've already looked + +01:05.590 --> 01:06.250 +at before. + +01:06.280 --> 01:10.480 +In fact, the user interface part of it right here is identical. + +01:10.480 --> 01:14.020 +We have some rows with some titles. + +01:14.020 --> 01:17.890 +Um, and then we include our data frame. + +01:17.890 --> 01:22.630 +It has description price estimate discount and Earl as the columns. + +01:22.630 --> 01:28.480 +And it's set up and assigned to something called opportunities data frame just as before. + +01:28.750 --> 01:33.040 +Uh, the only thing that's changed is what's coming right afterwards. + +01:33.040 --> 01:34.810 +So here we go. + +01:34.840 --> 01:37.630 +This is the new stuff in this file. + +01:37.630 --> 01:40.780 +There is, uh, a UI dot load. + +01:40.780 --> 01:43.660 +So this is again where we tell Gradio. + +01:43.690 --> 01:48.310 +When you load, we want you to carry out a particular, uh, instruction. + +01:48.310 --> 01:54.060 +And we specify that using the standard format for gradio, where you start with the name of the function, + +01:54.060 --> 01:56.970 +it should call the inputs and the outputs. + +01:56.970 --> 02:00.690 +And in this case we're saying we want you to call the function called start. + +02:00.720 --> 02:02.700 +There are no inputs to this function. + +02:02.700 --> 02:07.890 +And whatever it returns, we want you to hook that up to the opportunities data frame. + +02:07.890 --> 02:13.500 +And that means we expect it to return some information that is suitable to be put into a table. + +02:13.500 --> 02:16.860 +So that's what we expect to see from a function start. + +02:17.310 --> 02:23.130 +And then we're going to create another Gradio UI component called a GR dot timer. + +02:23.130 --> 02:26.160 +And that is something which which does some timing. + +02:26.160 --> 02:29.100 +It is in fact a UI component which is invisible. + +02:29.400 --> 02:35.160 +It's something that runs behind the scenes and it will wake up every 60s. + +02:35.160 --> 02:38.580 +And what does it do every 60s well, you have to tell Gradio what it should do. + +02:38.580 --> 02:42.480 +And the way you tell Gradio is by calling timer, dot, tick. + +02:42.480 --> 02:48.870 +And when you do that, you provide the information to Gradio again using the standard Gradio structure + +02:48.870 --> 02:50.060 +standard approach. + +02:50.060 --> 02:52.220 +You tell it the function that will be called. + +02:52.220 --> 02:54.350 +In this case I say go. + +02:54.380 --> 03:01.610 +You give it the inputs, there aren't any and the outputs, whatever go returns should be put into the + +03:01.610 --> 03:03.080 +opportunities data frame. + +03:03.080 --> 03:05.300 +Again, simple as that. + +03:05.330 --> 03:11.180 +The select method here that's that we call is exactly the same as before. + +03:11.180 --> 03:12.440 +It's another thing that hasn't changed. + +03:12.440 --> 03:13.550 +It's not exactly the same. + +03:13.550 --> 03:16.490 +It's changed a tiny bit, but it's basically the same. + +03:16.550 --> 03:23.570 +And it ends with a call to the agent framework planner messenger alert, alert about the opportunity. + +03:23.840 --> 03:27.290 +So you probably noticed the small the small change there. + +03:27.290 --> 03:29.360 +It's going to do a full alert. + +03:29.750 --> 03:36.410 +Uh, and all that remains is for us to go and look at the two functions we're expecting to see start + +03:36.410 --> 03:37.130 +and go. + +03:37.130 --> 03:44.810 +And at the top of the display you will find is indeed start and go the two functions in question. + +03:44.820 --> 03:48.030 +And if we look at them, let's start with start. + +03:48.060 --> 03:53.280 +What start does is it creates a new instance of the deal agent framework. + +03:53.310 --> 03:56.610 +The key object that controls our whole framework. + +03:56.940 --> 04:02.460 +It instantiates it, and it then asks it for its memory of what opportunities does it have. + +04:02.460 --> 04:08.430 +And then it calls this function table for with these opportunities and it returns that table. + +04:08.460 --> 04:09.930 +So what does table four do. + +04:09.960 --> 04:12.360 +Table four is this tiny function up here. + +04:12.900 --> 04:15.870 +So table four takes opportunities. + +04:15.870 --> 04:18.810 +And then you remember opportunities if we go in here. + +04:18.840 --> 04:23.250 +Opportunities are these objects here. + +04:23.250 --> 04:29.340 +That is exactly what gets stored in the memory of the agent framework contains a deal an estimate and + +04:29.340 --> 04:30.000 +a discount. + +04:30.000 --> 04:31.380 +That is an opportunity. + +04:31.980 --> 04:34.170 +So we go back to week eight. + +04:34.440 --> 04:42.530 +So um, the this function converts an opportunity object into just a list of strings. + +04:42.560 --> 04:47.840 +A list of lists of strings with the description, the price, the estimate, the discount, the URL + +04:47.840 --> 04:54.860 +exactly the type, the structure that's needed to slot into the data frame that we have down here. + +04:54.890 --> 04:58.730 +So again, start just simply creates a deal. + +04:58.730 --> 04:59.480 +Agent framework. + +04:59.510 --> 05:06.440 +Looks at its memory, converts that into strings and returns it and go. + +05:06.470 --> 05:09.920 +Which is the thing that gets called every 60s. + +05:09.950 --> 05:13.160 +Go is going to do something very, very similar. + +05:13.160 --> 05:14.630 +You'll see it looks very similar. + +05:14.660 --> 05:18.200 +It just has this one little tiny line added, a tiny little line. + +05:18.350 --> 05:19.490 +It doesn't do very much. + +05:19.490 --> 05:20.780 +It does everything. + +05:20.780 --> 05:25.370 +This is the line that calls run on our agent framework. + +05:25.370 --> 05:27.950 +It's the thing that we were doing manually before. + +05:27.950 --> 05:31.610 +It triggers the entire workflow that we have built. + +05:31.610 --> 05:33.290 +And that's what go does. + +05:33.320 --> 05:41.800 +And go will be kicked off every 60s by this GR timer so that that is the business of it. + +05:42.010 --> 05:45.700 +All that remains is to see it working in action. + +05:45.820 --> 05:52.630 +And so now to see this happening, we're going to go to the launcher and we're going to open a new terminal. + +05:52.660 --> 05:58.750 +And I'm going to change to my to activate my conda environment with conda activate LMS. + +05:59.920 --> 06:01.090 +There we go. + +06:01.090 --> 06:08.020 +And now I can run it by just typing Python price is right. + +06:08.050 --> 06:10.570 +Dot py should be as simple as that. + +06:10.600 --> 06:13.390 +Let's see a bit of a drum roll. + +06:13.420 --> 06:15.220 +And 321. + +06:15.220 --> 06:16.180 +Here we go. + +06:18.100 --> 06:23.050 +There's a pause while it thinks for a second and bam! + +06:23.080 --> 06:27.100 +Up comes the price is right and there's more work going on. + +06:27.100 --> 06:29.950 +And here we see right in front of us. + +06:30.250 --> 06:32.200 +What we're looking at, of course, is the memory. + +06:32.230 --> 06:34.860 +We're looking at the opportunities that it's come up with. + +06:35.280 --> 06:39.120 +And we can scroll down and see this bottom item here. + +06:39.120 --> 06:45.180 +I do believe that that is the item that was just surfaced to us most recently, when we ran it from + +06:45.180 --> 06:46.920 +the command line before. + +06:47.010 --> 06:49.500 +Um, and of course it then puts it into its memory. + +06:49.500 --> 06:54.000 +So we're seeing the items that have been surfaced here. + +06:54.000 --> 07:02.730 +And if I go back over to, uh, the command line, we'll see, of course, that it initialized, it + +07:02.730 --> 07:09.480 +set things up, it got itself ready, uh, and it's now going to be sitting in a loop so that, uh, + +07:09.480 --> 07:11.760 +it's going to be waiting for the agent to tick. + +07:11.790 --> 07:16.590 +And after a minute, it will kick off and launch and hopefully you're doing the same thing. + +07:16.590 --> 07:19.830 +You've opened this up, you're running it, you're going to give this a try. + +07:19.830 --> 07:22.590 +And like me, you will then see it run. + +07:22.590 --> 07:28.980 +And I will go to the next lecture where we will take a look at the outcome. + +07:29.250 --> 07:30.720 +Uh, I will see you there. diff --git a/week5/community-contributions/subtitles/srts/59673639/ja_JP.srt b/week5/community-contributions/subtitles/srts/59673639/ja_JP.srt new file mode 100755 index 0000000..a6f523d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673639/ja_JP.srt @@ -0,0 +1,322 @@ +WEBVTT + +00:00.680 --> 00:08.180 +そして、 このPythonモジュールにあるユーザーインターフェースのコードへようこそ。 + +00:08.180 --> 00:10.130 +価格は適正なドットパイ。 + +00:10.130 --> 00:18.860 +そしてここでは、 特に製品の背後にあるUIを構築するコードを見ていく。 + +00:19.070 --> 00:22.070 +そして、 それはいくつかの輸入品から始まる。 + +00:22.190 --> 00:24.110 +グラディオを輸入する上で非常に重要なものだ。 + +00:24.110 --> 00:29.900 +さらに重要なのは、 ディール・エージェントの枠組みを輸入することだ。 + +00:30.230 --> 00:33.440 +そして、 クラス・アプリを定義する。 + +00:33.440 --> 00:39.080 +一番下までスクロールすると、 一番下にapp dot runとあるのがわかるだろう。 + +00:39.110 --> 00:41.270 +アプリを作成し、 それを実行する。 + +00:41.270 --> 00:43.250 +そして、 それは何を創造することなのか? + +00:43.250 --> 00:44.240 +ほとんど何もしない。 + +00:44.240 --> 00:47.420 +エージェントフレームワークのために空の変数を設定するだけだ。 + +00:47.450 --> 00:51.260 +肉汁は "ラン・アー "方式だ。 + +00:51.260 --> 01:00.280 +そしてそれはもちろん、 「プライス・イズ・ライト」の素晴らしいブロックを使ったGradioのより高度な構造を使ったユーザーインターフェイスを生み出す。 + +01:00.610 --> 01:06.250 +それから、 ここにあるのは、 前に見たコードとよく似たコードだ。 + +01:06.280 --> 01:10.480 +実際、 ここにあるユーザーインターフェースの部分は同じだ。 + +01:10.480 --> 01:14.020 +いくつかのタイトルの行がある。 + +01:14.020 --> 01:17.890 +それからデータフレームを入れる。 + +01:17.890 --> 01:22.630 +これは、 列として説明価格見積もり割引とアールを持っています。 + +01:22.630 --> 01:28.480 +そして、 以前と同じように機会データフレームと呼ばれるものにセットアップされ、 割り当てられる。 + +01:28.750 --> 01:33.040 +ええと、 唯一変わったのは、 その直後に起こることだ。 + +01:33.040 --> 01:34.810 +それでは、 どうぞ。 + +01:34.840 --> 01:37.630 +これがこのファイルの新しい内容だ。 + +01:37.630 --> 01:40.780 +UIのドットロードがあるんだ。 + +01:40.780 --> 01:43.660 +だから、 ここでもグラディオに言うのだ。 + +01:43.690 --> 01:48.310 +ロードするときは、 ある指示を実行してほしい。 + +01:48.310 --> 01:54.060 +そして、 gradioの標準フォーマットである関数名から始まるフォーマットを使って、 + +01:54.060 --> 01:56.970 +入力と出力を呼び出すように指定する。 + +01:56.970 --> 02:00.690 +そしてこの場合、 startという関数を呼び出してほしいと言っているのだ。 + +02:00.720 --> 02:02.700 +この機能には入力はない。 + +02:02.700 --> 02:07.890 +そして、 それが何を返すにせよ、 それを機会データフレームにフックしてほしい。 + +02:07.890 --> 02:13.500 +つまり、 テーブルに入れるのに適した情報を返してくれることを期待しているのだ。 + +02:13.500 --> 02:16.860 +だから、 機能的なスタートが期待できる。 + +02:17.310 --> 02:23.130 +そして、 GRドットタイマーと呼ばれるGradio UIコンポーネントを作成します。 + +02:23.130 --> 02:26.160 +そして、 それはいくつかのタイミングを計るものだ。 + +02:26.160 --> 02:29.100 +これは実際には目に見えないUIコンポーネントである。 + +02:29.400 --> 02:35.160 +それは舞台裏で動いているもので、 60年代には必ず目を覚ます。 + +02:35.160 --> 02:38.580 +そして、 60秒ごとに何をするかというと......グラディオに何をすべきかを指示しなければならない。 + +02:38.580 --> 02:42.480 +Gradioに伝える方法は、 タイマー、 ドット、 ティックを呼び出すことだ。 + +02:42.480 --> 02:50.060 +そして、 そうしたら、 Gradioの標準的な構造標準的なアプローチを使って、 再びGradioに情報を提供する。 + +02:50.060 --> 02:52.220 +呼び出される関数を指定する。 + +02:52.220 --> 02:54.350 +この場合、 私は行けと言う。 + +02:54.380 --> 03:03.080 +入力を与えれば、 何もない。 出力は、 何が返ってきても、 機会データフレームに入れなければならない。 + +03:03.080 --> 03:05.300 +もう一度言う。 + +03:05.330 --> 03:11.180 +ここで呼び出すセレクト・メソッドは、 以前とまったく同じだ。 + +03:11.180 --> 03:12.440 +これも変わらないことだ。 + +03:12.440 --> 03:13.550 +全く同じではない。 + +03:13.550 --> 03:16.490 +少し変わったけど、 基本的には同じだよ。 + +03:16.550 --> 03:23.570 +そして、 エージェントフレームワークのメッセンジャーアラート、 チャンスについてのアラートへのコールで終わる。 + +03:23.840 --> 03:27.290 +だから、 その小さな変化にお気づきだろう。 + +03:27.290 --> 03:29.360 +完全な警戒態勢に入る。 + +03:29.750 --> 03:37.130 +さて、 あとは2つの機能を見ていくだけだ。 + +03:37.130 --> 03:44.810 +そしてディスプレイの上部には、 問題の2つの機能であるスタートとゴーが表示される。 + +03:44.820 --> 03:48.030 +そして、 それらを見るなら、 まずはスタートから始めよう。 + +03:48.060 --> 03:53.280 +スタートが行うのは、 ディール・エージェント・フレームワークの新しいインスタンスを作成することだ。 + +03:53.310 --> 03:56.610 +フレームワーク全体をコントロールする重要なオブジェクト。 + +03:56.940 --> 04:02.460 +それをインスタンス化し、 どのような機会があるかを記憶させる。 + +04:02.460 --> 04:08.430 +そして、 これらの機会を表す関数テーブルを呼び出し、 そのテーブルを返す。 + +04:08.460 --> 04:09.930 +では、 テーブル4は何をしているのか。 + +04:09.960 --> 04:12.360 +テーブル4はこの小さな機能だ。 + +04:12.900 --> 04:15.870 +だから、 4番テーブルはチャンスがある。 + +04:15.870 --> 04:18.810 +そして、 ここに入ればチャンスを思い出すことができる。 + +04:18.840 --> 04:23.250 +チャンスはここにある。 + +04:23.250 --> 04:30.000 +それこそが、 エージェントフレームワークのメモリに保存される、 取引と見積もりと値引きを含むものである。 + +04:30.000 --> 04:31.380 +それはチャンスだ。 + +04:31.980 --> 04:34.170 +そこで第8週に戻る。 + +04:34.440 --> 04:42.530 +つまり、 この関数は機会オブジェクトを単なる文字列のリストに変換する。 + +04:42.560 --> 04:54.860 +説明、 価格、 見積もり、 割引、 URL、 正確にはタイプ、 ここにあるデータ・フレームに入れるために必要な構造を持つ文字列のリストのリスト。 + +04:54.890 --> 04:58.730 +だからまた、 単純に取引を開始する。 + +04:58.730 --> 04:59.480 +エージェントのフレームワーク。 + +04:59.510 --> 05:06.440 +メモリを見て、 それを文字列に変換して返す。 + +05:06.470 --> 05:09.920 +60年代には必ず呼ばれたものだ。 + +05:09.950 --> 05:13.160 +囲碁は非常に、 非常に似たことをやろうとしている。 + +05:13.160 --> 05:14.630 +よく似ているのがわかるだろう。 + +05:14.660 --> 05:18.200 +ただ、 この小さな小さな一本の線が追加されているだけだ。 + +05:18.350 --> 05:19.490 +大したことはできない。 + +05:19.490 --> 05:20.780 +何でもやってくれる。 + +05:20.780 --> 05:25.370 +これは、 エージェントフレームワーク上で実行される呼び出しの行である。 + +05:25.370 --> 05:27.950 +以前は手作業でやっていたことだ。 + +05:27.950 --> 05:31.610 +私たちが構築したワークフロー全体がトリガーされる。 + +05:31.610 --> 05:33.290 +それが囲碁だ。 + +05:33.320 --> 05:41.800 +そして、 このGRタイマーによって、 60歳ごとに碁はキックオフされる。 + +05:42.010 --> 05:45.700 +あとは実際に動いているところを見るだけだ。 + +05:45.820 --> 05:52.630 +ランチャーで新しいターミナルを開いてみよう。 + +05:52.660 --> 05:58.750 +そして、 conda activate LMSでconda環境を有効化するように変更します。 + +05:59.920 --> 06:01.090 +これでよし。 + +06:01.090 --> 06:08.020 +今はPython price is rightと入力するだけで実行できる。 + +06:08.050 --> 06:10.570 +ドットパイはそれくらいシンプルであるべきだ。 + +06:10.600 --> 06:13.390 +ちょっとドラムロールを見てみよう。 + +06:13.420 --> 06:15.220 +そして321だ。 + +06:15.220 --> 06:16.180 +さあ、 始めよう。 + +06:18.100 --> 06:23.050 +ちょっと考えている間に間があって、 バーン! + +06:23.080 --> 06:27.100 +値段も手頃だし、 もっと仕事がある。 + +06:27.100 --> 06:29.950 +そして、 目の前にあるのがこれだ。 + +06:30.250 --> 06:32.200 +もちろん、 私たちが見ているのは記憶だ。 + +06:32.230 --> 06:34.860 +我々はそれがもたらすチャンスに注目している。 + +06:35.280 --> 06:39.120 +下にスクロールして、 一番下の項目を見てみよう。 + +06:39.120 --> 06:46.920 +それは、 以前コマンドラインから実行したときに、 つい最近私たちに表面化した項目だと思う。 + +06:47.010 --> 06:49.500 +そしてもちろん、 それをメモリーに保存する。 + +06:49.500 --> 06:54.000 +だから、 私たちはここで浮上した項目を見ている。 + +06:54.000 --> 07:02.730 +コマンドラインに戻ると、 もちろん、 初期化され、 + +07:02.730 --> 07:11.760 +セットアップされ、 準備が整ったことがわかる。 + +07:11.790 --> 07:16.590 +そして1分後、 キックオフして起動する。 + +07:16.590 --> 07:19.830 +あなたはこれを開き、 実行し、 これを試してみようとしている。 + +07:19.830 --> 07:22.590 +そして、 私と同じように、 それが走るのを見ることになる。 + +07:22.590 --> 07:28.980 +そして次回の講義では、 その結果について見ていくことにしよう。 + +07:29.250 --> 07:30.720 +そこで会おう。 diff --git a/week5/community-contributions/subtitles/srts/59673639/ko_KR.srt b/week5/community-contributions/subtitles/srts/59673639/ko_KR.srt new file mode 100755 index 0000000..d19c3a8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673639/ko_KR.srt @@ -0,0 +1,343 @@ +WEBVTT + +00:00.680 --> 00:08.180 +사용자 인터페이스에 대한 코드에 오신 것을 환영합니다 파이썬 을 살펴보도록 하죠 + +00:08.180 --> 00:10.130 +가격 이즈 라이트 닷파이예요 + +00:10.130 --> 00:18.860 +여기서 특히 제품 뒤 UI를 구성하는 코드를 살펴볼 거예요 + +00:19.070 --> 00:22.070 +몇 가지 수입품으로 시작하죠 + +00:22.190 --> 00:24.110 +그라디오를 수입하는 데 아주 중요한 부분이죠 + +00:24.110 --> 00:29.210 +이건 훨씬 더 중요한 겁니다 거래 에이전트 프레임워크를 수입하는 데요 우리가 구축한 것의 + +00:29.210 --> 00:29.900 +핵심이죠 + +00:30.230 --> 00:33.440 +클래스 앱을 정의하죠 + +00:33.440 --> 00:39.080 +아래로 스크롤을 하면 app.lan이라고 호출하는 것이 보이죠. + +00:39.110 --> 00:41.270 +앱을 생성해 실행하죠 + +00:41.270 --> 00:43.250 +뭘 만드는 거죠? + +00:43.250 --> 00:44.240 +별로 하는 게 없어요 + +00:44.240 --> 00:47.420 +에이전트 프레임워크를 위한 빈 변수를 설정하죠 + +00:47.450 --> 00:51.260 +육즙이 흘러나오는 방식이죠 + +00:51.260 --> 00:59.200 +사용자 인터페이스를 생성하죠 그래디오의 더 고급 구성 요소를 이용해서요 가격은 적당하다를 + +00:59.200 --> 01:00.280 +통해서요 + +01:00.610 --> 01:06.250 +여기 보이는 코드는 우리가 이미 봤던 것과 꽤 유사해요 + +01:06.280 --> 01:10.480 +사실 사용자 인터페이스는 여기 이 부분과 동일해요 + +01:10.480 --> 01:14.020 +말다툼이 좀 있었어요 + +01:14.020 --> 01:17.890 +그리고 데이터 프레임을 포함해요 + +01:17.890 --> 01:22.630 +설명 가격 추정 할인이랑 얼이 기둥이에요 + +01:22.630 --> 01:28.480 +아까처럼 기회 데이터 프레임이란 것에 설정되고 할당되죠 + +01:28.750 --> 01:33.040 +바뀐 건 그 후에 일어날 일뿐이에요 + +01:33.040 --> 01:34.810 +자, 시작하죠 + +01:34.840 --> 01:37.630 +이 파일의 새로운 것들이죠 + +01:37.630 --> 01:40.780 +UI .load가 있어요 + +01:40.780 --> 01:43.660 +여기서 다시 그래디오에게 말하죠 + +01:43.690 --> 01:48.310 +장전할 때 특정 지시를 따라야 해요 + +01:48.310 --> 01:54.060 +그러데이션을 위한 표준 포맷을 이용해 명시합니다 함수의 이름으로 시작하면 + +01:54.060 --> 01:56.970 +입력과 출력을 호출해야 하죠 + +01:56.970 --> 02:00.690 +이 경우엔 start라는 함수를 호출하라고 하고 있어요 + +02:00.720 --> 02:02.700 +이 함수에 입력값이 없어요 + +02:02.700 --> 02:07.890 +그게 뭘 반환하든 그걸 기회 데이터 프레임에 연결하세요 + +02:07.890 --> 02:13.500 +그 말은 테이블에 Put이 가능한 일부 정보를 반환하길 기대한다는 거죠 + +02:13.500 --> 02:16.860 +함수 시작에서 기대하는 게 바로 이거죠 + +02:17.310 --> 02:23.130 +그런 다음 다른 Gadio UI 구성 요소를 생성합니다 GR.Timer라는 거죠 + +02:23.130 --> 02:26.160 +그게 타이밍을 잡아주죠 + +02:26.160 --> 02:29.100 +이건 사실 보이지 않는 UI 구성 요소예요 + +02:29.400 --> 02:35.160 +무대 뒤에서 돌아가며 60대마다 깨어나는 거죠 + +02:35.160 --> 02:38.580 +60년대에는 어떻게∙∙∙ 아니, 그래디오에게 어떻게 해야 할지 말해줘야 해요 + +02:38.580 --> 02:42.480 +당신은 그래디오에게 타이머, 점, 체크라고 말해요 + +02:42.480 --> 02:48.870 +그렇게 할 때 정보를 다시 그라디오에게 제공하죠 표준 그라디오 구조의 표준 접근법을 + +02:48.870 --> 02:50.060 +사용해서요 + +02:50.060 --> 02:52.220 +호출될 함수를 알려주는 거죠 + +02:52.220 --> 02:54.350 +이 경우에는 출발해야죠 + +02:54.380 --> 03:01.610 +입력값을 주는데 아무것도 없으면 출력은 무엇이든지 기회 데이터 프레임에 + +03:01.610 --> 03:03.080 +놓여져야 해요 + +03:03.080 --> 03:05.300 +아주 간단하죠 + +03:05.330 --> 03:11.180 +우리가 호출하는 SELECT 메서드는 아까와 완전히 똑같아요 + +03:11.180 --> 03:12.440 +변하지 않은 또 다른 거죠 + +03:12.440 --> 03:13.550 +완전히 똑같진 않아요 + +03:13.550 --> 03:16.490 +비트만 좀 바뀌었지 거의 똑같아요 + +03:16.550 --> 03:23.570 +에이전트 프레임워크 플래너에게 전화하는 걸로 끝나요 메신저 알림 기회에 대한 알림이죠 + +03:23.840 --> 03:27.290 +작은 변화가 보이실 거예요 + +03:27.290 --> 03:29.360 +전체 알림을 할 거예요 + +03:29.750 --> 03:37.130 +이제 남은 건 시작과 시작을 볼 수 있는 두 가지 함수를 살펴보는 거예요 + +03:37.130 --> 03:44.810 +디스플레이 상단에 시작과 두 가지 함수가 있어요 + +03:44.820 --> 03:48.030 +그럼 시작부터 살펴보죠 + +03:48.060 --> 03:53.280 +시작은 거래 에이전트 프레임워크의 새 인스턴스를 생성해요 + +03:53.310 --> 03:56.610 +전체 프레임워크를 통제하는 핵심 객체죠 + +03:56.940 --> 04:02.460 +인스턴스화하고 어떤 기회가 있는지 메모리를 요청하죠 + +04:02.460 --> 04:08.430 +그리고 이 기회들을 위해 함수 테이블을 호출하고 그 테이블을 반환하죠 + +04:08.460 --> 04:09.930 +4번 테이블은 뭘 하죠? + +04:09.960 --> 04:12.360 +4번 테이블은 이 작은 함수예요 + +04:12.900 --> 04:15.870 +4번 테이블은 기회를 잡아요 + +04:15.870 --> 04:18.810 +여기 들어가면 기회가 있다는 걸 기억하세요 + +04:18.840 --> 04:23.250 +이 물건들이 기회예요 + +04:23.250 --> 04:30.000 +에이전트 프레임워크의 메모리에 저장되는 게 바로 그겁니다 거래, 추정, 할인을 포함하죠 + +04:30.000 --> 04:31.380 +그게 기회예요 + +04:31.980 --> 04:34.170 +8주 차로 돌아가 보죠 + +04:34.440 --> 04:42.530 +이 함수는 기회 객체를 문자열 목록으로 변환해요 + +04:42.560 --> 04:47.840 +문자열 리스트입니다 설명, 가격, 추정, 할인 URL + +04:47.840 --> 04:54.860 +정확한 유형과 구조죠 여기 아래 데이터 프레임에 넣어야 하는 거요 + +04:54.890 --> 04:58.730 +시작은 단순히 거래를 만드는 거죠 + +04:58.730 --> 04:59.480 +프레임워크 요원이에요 + +04:59.510 --> 05:06.440 +메모리를 보고 문자열로 변환해 리턴하고 가는 거죠 + +05:06.470 --> 05:09.920 +60년대마다 그렇게 불리죠 + +05:09.950 --> 05:13.160 +고도 아주 비슷한 걸 할 거예요 + +05:13.160 --> 05:14.630 +아주 비슷하게 생겼어요 + +05:14.660 --> 05:18.200 +아주 작은 선 하나만 추가된 거예요 + +05:18.350 --> 05:19.490 +별 효과가 없어요 + +05:19.490 --> 05:20.780 +만능이에요 + +05:20.780 --> 05:25.370 +에이전트 프레임워크에서 실행되는 라인이죠 + +05:25.370 --> 05:27.950 +전에는 수동으로 작업했었죠 + +05:27.950 --> 05:31.610 +우리가 구축한 워크플로우 전체를 촉발하죠 + +05:31.610 --> 05:33.290 +그게 고우의 역할이죠 + +05:33.320 --> 05:41.800 +GR 타이머로 60분마다 고가 차단됩니다 그게 작업의 핵심이죠 + +05:42.010 --> 05:45.700 +이제 작동하는 걸 보는 일만 남았죠 + +05:45.820 --> 05:52.630 +이제 런처로 가서 새 터미널을 열어 볼게요 + +05:52.660 --> 05:58.750 +이제 변경할 것은 LMS를 활성화한 콘다 환경을 활성화하는 거죠 + +05:59.920 --> 06:01.090 +됐어요 + +06:01.090 --> 06:08.020 +파이썬 + $s right이라고 입력하면 실행할 수 있어요 + +06:08.050 --> 06:10.570 +도트 파이는 이렇게 간단해요 + +06:10.600 --> 06:13.390 +드럼 비트 주세요 + +06:13.420 --> 06:15.220 +321번도요 + +06:15.220 --> 06:16.180 +시작할게요 + +06:18.100 --> 06:23.050 +잠깐 생각하는 동안 멈췄다가 쾅! 하고 터져요 + +06:23.080 --> 06:27.100 +가격이 적당해서 더 많은 일이 진행되고 있어요 + +06:27.100 --> 06:29.950 +바로 우리 앞에 있어요 + +06:30.250 --> 06:32.200 +우리가 보는 건 물론 메모리예요 + +06:32.230 --> 06:34.860 +주어진 기회를 살피는 거죠 + +06:35.280 --> 06:39.120 +스크롤을 내려 하단 항목을 볼 수 있어요 + +06:39.120 --> 06:45.180 +가장 최근에야 그 물건이 떠올랐다고 생각해요 전에 명령줄에서 + +06:45.180 --> 06:46.920 +실행했을 때요 + +06:47.010 --> 06:49.500 +그리고 당연히 메모리에 저장하죠 + +06:49.500 --> 06:54.000 +여기서 발견된 물건들을 보고 있어요 + +06:54.000 --> 07:02.730 +명령줄로 다시 가보면 초기화되고 설정되고 준비된 것이 보이죠 이제 + +07:02.730 --> 07:09.480 +반복문 안에 있게 될 겁니다 에이전트가 체크하기를 + +07:09.480 --> 07:11.760 +기다리고 있죠 + +07:11.790 --> 07:16.590 +1분 후에 시작될 겁니다 여러분도 그렇게 하세요 + +07:16.590 --> 07:19.830 +이걸 열어 실행하고 있으니 시도해 보세요 + +07:19.830 --> 07:22.590 +그럼 저처럼 달리는 걸 보게 될 거예요 + +07:22.590 --> 07:28.980 +다음 강의에서는 그 결과를 살펴볼 거예요 + +07:29.250 --> 07:30.720 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/59673663/en_US.srt b/week5/community-contributions/subtitles/srts/59673663/en_US.srt new file mode 100755 index 0000000..389639e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673663/en_US.srt @@ -0,0 +1,184 @@ +WEBVTT + +00:00.890 --> 00:03.080 +But wait, there's more. + +00:03.110 --> 00:07.670 +We need to add some more to the user interface just to make it look more cool. + +00:07.760 --> 00:13.370 +And so I put a little bit more work into this, and it's not particularly worth going through the code + +00:13.370 --> 00:17.660 +in that much detail other than to really show you what, what I came up with. + +00:17.660 --> 00:24.650 +But it occurred to me that whilst it is satisfactory to see the log messages appearing so you can watch + +00:24.650 --> 00:28.550 +to see what's going on, it will be much nicer if we could surface them to the user interface. + +00:28.580 --> 00:34.070 +And that's a bit harder because typically the user interface with Gradio, it's a matter of making a + +00:34.070 --> 00:38.330 +request and getting back a response, but it's perfectly doable. + +00:38.450 --> 00:45.350 +And so whilst it's not necessarily worth us going through exactly how I approached it, but the I've + +00:45.350 --> 00:50.750 +now turned to a new, uh, um, file, a new module called prices. + +00:50.750 --> 00:51.080 +Right. + +00:51.080 --> 00:54.590 +Final dot pi and in the Price is Right final. + +00:54.590 --> 01:02.420 +I've added in some stuff at the top to handle being able to subscribe to long log messages. + +01:02.660 --> 01:06.540 +Um, and then I've thrown in just a little bit of extra stuff as well. + +01:06.780 --> 01:13.590 +So with that in mind, let's just create a new terminal window and have a look at this user interface + +01:13.590 --> 01:14.340 +now. + +01:14.700 --> 01:19.650 +So we can say um, Python price. + +01:19.680 --> 01:22.050 +Oh sorry I need to activate my conda environment. + +01:22.080 --> 01:26.760 +Obviously conda activate LMS. + +01:26.850 --> 01:36.360 +And now now we can say python price is right final.py. + +01:36.660 --> 01:38.820 +And we can see what comes up. + +01:38.880 --> 01:42.360 +Uh so again it thinks for a little while. + +01:42.390 --> 01:43.680 +Up it comes. + +01:43.680 --> 01:46.830 +And you're going to notice 1 or 2 changes here. + +01:46.890 --> 01:56.070 +Uh, so first of all, uh, as I promised, we've got this very attractive looking, uh, log here that + +01:56.070 --> 02:03.270 +shows you what's going on and allows you to look across the different, uh, agents as they run, see + +02:03.270 --> 02:04.920 +who is doing what. + +02:05.040 --> 02:08.730 +Uh, as before, we have our table up here, and that's going to allow us. + +02:08.730 --> 02:12.830 +I just clicked on that and I saw out of the corner of my eye that of course I got a push notification + +02:12.830 --> 02:13.940 +right away. + +02:14.120 --> 02:16.070 +Uh, and what do we have here? + +02:16.100 --> 02:22.580 +Well, what we have here is a 3D representation of our chroma database that's being used to look up + +02:22.580 --> 02:27.170 +the products to send to the frontier model when it appears over here. + +02:27.380 --> 02:29.210 +Uh, now, I know what you're thinking. + +02:29.270 --> 02:36.500 +You're thinking, okay, but what's the business purpose of having this particular rag knowledge store + +02:36.500 --> 02:38.930 +being surfaced on this user interface? + +02:39.020 --> 02:43.070 +Uh, and it's an important question and an important answer. + +02:43.190 --> 02:45.410 +Um, is that there's no purpose whatsoever. + +02:45.440 --> 02:47.690 +No purpose to having this diagram. + +02:47.720 --> 02:48.920 +It just looks great. + +02:48.920 --> 02:49.850 +And I want it to. + +02:49.880 --> 02:51.050 +And it's my course. + +02:51.080 --> 02:52.730 +And so I'll do it if I want to. + +02:53.870 --> 02:59.450 +Uh, so yeah, I mean, in all seriousness there, uh, it was nice to be able to surface some data + +02:59.450 --> 03:04.730 +and be able to illustrate how easy it is to get a plot to show on a Gradio user interface. + +03:04.850 --> 03:10.850 +Um, but, uh, this this is running, uh, just, uh, in to show off. + +03:10.850 --> 03:17.660 +What would perhaps be useful over time is to show in this the memory and actually be able to show in + +03:17.660 --> 03:21.710 +vector format the different points on the memory, and you can hover over it and see it. + +03:21.890 --> 03:27.560 +So maybe I'll make that adjustment myself, but or you could do it for me and push the code that would + +03:27.560 --> 03:32.450 +make this a little bit more useful and less of a gratuitous change. + +03:32.510 --> 03:36.800 +Now, the other thing that you might have noticed while I've been speaking is that the memory suddenly + +03:36.800 --> 03:41.630 +went blank, and you may be wondering why on earth did the memory go blank? + +03:41.660 --> 03:47.240 +And the reason the memory went blank is, of course, because the run has kicked off over here. + +03:47.330 --> 03:53.570 +And actually, again, there are ways around this, but right now, when the run is in progress, that + +03:53.570 --> 03:59.360 +table goes blank and then it will repopulate when the run completes, which is a bit janky and can be + +03:59.360 --> 03:59.750 +fixed. + +03:59.750 --> 04:02.420 +And that could be an exercise for you to improve the UI. + +04:02.630 --> 04:07.280 +Um, but you can see right now it's got to the point where it's calling modal. + +04:07.280 --> 04:09.890 +And so modal will be warming up. + +04:10.340 --> 04:16.550 +So with that, because this will take a minute, I will pause and I will come back when it has completed. + +04:16.580 --> 04:17.660 +See you then. diff --git a/week5/community-contributions/subtitles/srts/59673663/ja_JP.srt b/week5/community-contributions/subtitles/srts/59673663/ja_JP.srt new file mode 100755 index 0000000..a74679a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673663/ja_JP.srt @@ -0,0 +1,157 @@ +WEBVTT + +00:00.890 --> 00:03.080 +だが、 まだある。 + +00:03.110 --> 00:07.670 +ユーザーインターフェイスをもっとかっこよくするために、 もう少し追加する必要がある。 + +00:07.760 --> 00:17.660 +だから、 このコードにはもうちょっと手を入れた。 + +00:17.660 --> 00:24.650 +しかし、 ログメッセージが表示され、 何が起こっているのか見ることができるのは満足だが、 それをユーザー・インターフェースに表示できれば、 + +00:24.650 --> 00:28.550 +もっとすっきりするのではないかと思いついた。 + +00:28.580 --> 00:34.070 +Gradioのユーザー・インターフェースでは、 リクエストをしてレスポンスを返すというのが一般的なので、 + +00:34.070 --> 00:38.330 +少し難しいですが、 完全に可能です。 + +00:38.450 --> 00:45.350 +それで、 私がどのようにアプローチしたかを正確に説明する価値は必ずしもないのですが、 私は今、 + +00:45.350 --> 00:50.750 +新しい、 ええと、 ファイル、 価格という新しいモジュールに目を向けました。 + +00:50.750 --> 00:51.080 +そうだね。 + +00:51.080 --> 00:54.590 +ファイナル・ドット・パイとプライス・イズ・ライトの決勝戦。 + +00:54.590 --> 01:02.420 +長いログメッセージを購読できるようにするために、 上部にいくつか追加した。 + +01:02.660 --> 01:06.540 +それから、 ほんの少しおまけも入れたよ。 + +01:06.780 --> 01:14.340 +それでは、 新しいターミナル・ウィンドウを作って、 このユーザー・インターフェースを見てみよう。 + +01:14.700 --> 01:19.650 +だから、 パイソン価格と言える。 + +01:19.680 --> 01:22.050 +すみません、 condaの環境をアクティブにする必要があります。 + +01:22.080 --> 01:26.760 +コンダがLMSを活性化するのは明らかだ。 + +01:26.850 --> 01:36.360 +そして今、 我々はパイソンの価格が最終的に適正であると言うことができる。 パイ。 + +01:36.660 --> 01:38.820 +何が出てくるか見てみよう + +01:38.880 --> 01:42.360 +ええと......それで、 またしばらく考えるんだ。 + +01:42.390 --> 01:43.680 +上がってきた。 + +01:43.680 --> 01:46.830 +そして、 ここで1つか2つの変化に気づくだろう。 + +01:46.890 --> 02:04.920 +まず最初に、 お約束したように、 とても魅力的なログができました。 + +02:05.040 --> 02:08.730 +前回と同じように、 ここにテーブルを置いている。 + +02:08.730 --> 02:13.940 +それをクリックしたら、 目の端にプッシュ通知が届いた。 + +02:14.120 --> 02:16.070 +それで、 ここには何があるんだい? + +02:16.100 --> 02:27.170 +さて、 ここにあるのはクロマ・データベースの3D表示で、 フロンティア・モデルに送る商品を探すのに使っている。 + +02:27.380 --> 02:29.210 +何を考えているかは分かるよ。 + +02:29.270 --> 02:38.930 +でも、 このユーザー・インターフェイスにこのボロ知識ストアを表示させるビジネス上の目的は何だろう? + +02:39.020 --> 02:43.070 +それは重要な質問であり、 重要な答えだ。 + +02:43.190 --> 02:45.410 +つまり、 何の目的もないということだ。 + +02:45.440 --> 02:47.690 +この図を持っている意味はない。 + +02:47.720 --> 02:48.920 +とにかく素晴らしい。 + +02:48.920 --> 02:49.850 +そうしてほしい。 + +02:49.880 --> 02:51.050 +私のコースだしね。 + +02:51.080 --> 02:52.730 +だから、 やりたければやる。 + +02:53.870 --> 03:04.730 +つまり、 真面目に言うと、 データを表に出して、 Gradioのユーザー・インターフェースにプロットを表示させるのがいかに簡単かを説明できたのは良かった。 + +03:04.850 --> 03:10.850 +ええと、 でも、 これは、 ええと、 ただ、 見せびらかすために走っているんだ。 + +03:10.850 --> 03:17.660 +時間が経てば、 おそらく便利なのは、 このメモリーに表示され、 実際にメモリー上のさまざまなポイントをベクター形式で表示でき、 + +03:17.660 --> 03:21.710 +その上にカーソルを置いて見ることができることだろう。 + +03:21.890 --> 03:27.560 +だから、 私が自分で調整するかもしれない。 でも、 あるいは、 あなたが私の代わりにやってくれて、 これをもう少し便利で、 + +03:27.560 --> 03:32.450 +無償の変更でなくなるようなコードをプッシュしてくれるかもしれない。 + +03:32.510 --> 03:36.800 +さて、 私が話している間にもうひとつ気づいたかもしれないが、 記憶が突然真っ白になった。 + +03:36.800 --> 03:41.630 +いったいなぜ記憶が真っ白になったのか、 不思議に思うかもしれない。 + +03:41.660 --> 03:47.240 +そして、 記憶が真っ白になったのは、 もちろん、 こちらで滑走が始まったからだ。 + +03:47.330 --> 03:53.570 +実は、 これを回避する方法もあるのだが、 今のところ、 実行中はテーブルが空白になり、 + +03:53.570 --> 03:59.750 +実行が完了すると再入力される。 + +03:59.750 --> 04:02.420 +そしてそれは、 UIを改善するための練習になるかもしれない。 + +04:02.630 --> 04:07.280 +うーん、 でも今、 モーダルを呼び出すところまで来ているのがわかるだろう。 + +04:07.280 --> 04:09.890 +そうしてモードは温まっていく。 + +04:10.340 --> 04:16.550 +それでは、 少し時間がかかるので、 一時中断し、 終了したらまた戻ってきます。 + +04:16.580 --> 04:17.660 +ではまた diff --git a/week5/community-contributions/subtitles/srts/59673663/ko_KR.srt b/week5/community-contributions/subtitles/srts/59673663/ko_KR.srt new file mode 100755 index 0000000..49aee0b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673663/ko_KR.srt @@ -0,0 +1,181 @@ +WEBVTT + +00:00.890 --> 00:03.080 +잠깐만요, 더 있어요 + +00:03.110 --> 00:07.670 +사용자 인터페이스에 추가해야 더 멋져 보일 거예요 + +00:07.760 --> 00:13.370 +비트 박스에 좀 더 공을 들였어요 코드를 그렇게 상세히 검토할 필요는 없어요 + +00:13.370 --> 00:17.660 +제가 생각해낸 걸 보여드리기 위한 것 외에는요 + +00:17.660 --> 00:24.650 +하지만 로그 메시지가 나타나 상황을 볼 수 있는 게 만족스럽긴 하지만 사용자 인터페이스에 + +00:24.650 --> 00:28.550 +표면을 만들면 훨씬 더 좋을 것 같아요 + +00:28.580 --> 00:34.070 +좀 더 어렵죠 일반적으로 그래디오의 사용자 인터페이스는 요청을 하고 + +00:34.070 --> 00:38.330 +응답을 받는 문제지만 완벽하게 비트가 가능해요 + +00:38.450 --> 00:45.350 +제가 접근한 방식을 살펴볼 필요는 없지만 이제 새로운 파일로 + +00:45.350 --> 00:50.750 +넘어가죠 가격이라는 새 모듈이에요 + +00:50.750 --> 00:51.080 +네 + +00:51.080 --> 00:54.590 +마지막 도트파이 가격 맞히기 직전이군요 + +00:54.590 --> 01:02.420 +위에 몇 가지를 추가했어요 긴 로그 메시지를 구독할 수 있도록요 + +01:02.660 --> 01:06.540 +그리고 비트를 좀 더 추가했어요 + +01:06.780 --> 01:13.590 +그걸 염두에 두고 새 터미널 창을 만들어 사용자 인터페이스를 + +01:13.590 --> 01:14.340 +보죠 + +01:14.700 --> 01:19.650 +파이썬 가격이라고 할 수 있죠 + +01:19.680 --> 01:22.050 +콘다 환경을 활성화해야 해요 + +01:22.080 --> 01:26.760 +콘다가 LMS를 활성화한 게 분명해요 + +01:26.850 --> 01:36.360 +파이썬 을 사용하면 최종 가격이라고 할 수 있죠 네 + +01:36.660 --> 01:38.820 +뭐가 나오는지 보죠 + +01:38.880 --> 01:42.360 +그래서 한동안 생각을 해요 + +01:42.390 --> 01:43.680 +올라와요 + +01:43.680 --> 01:46.830 +여기 1, 2개의 변화가 보이시죠 + +01:46.890 --> 01:56.070 +먼저, 말씀드렸듯이 여기 아주 매력적인 로그가 있는데요, 진행 상황을 보여주고, + +01:56.070 --> 02:03.270 +서로 다른 에이전트들이 운영되는 동안 누가 무엇을 하는지 볼 수 + +02:03.270 --> 02:04.920 +있죠 + +02:05.040 --> 02:08.730 +아까처럼 테이블은 여기 위에 있어요 + +02:08.730 --> 02:12.830 +클릭을 했더니 제 곁눈질로 푸시 알림이 바로 뜨는 + +02:12.830 --> 02:13.940 +걸 봤어요 + +02:14.120 --> 02:16.070 +이건 또 뭐죠? + +02:16.100 --> 02:22.580 +이건 크로마 데이터베이스의 3D 모형입니다 프론티어 + +02:22.580 --> 02:27.170 +모델에 보낼 제품을 찾는 데 사용되죠 + +02:27.380 --> 02:29.210 +무슨 생각 하시는지 알아요 + +02:29.270 --> 02:36.500 +이 사용자 인터페이스에 이 특정 래그 지식 저장소가 나타나는 비즈니스 + +02:36.500 --> 02:38.930 +목적이 뭘까 생각하죠 + +02:39.020 --> 02:43.070 +중요한 질문이자 중요한 대답이에요 + +02:43.190 --> 02:45.410 +목적이 전혀 없다는 거예요 + +02:45.440 --> 02:47.690 +이 도표를 왜 갖고 있죠? + +02:47.720 --> 02:48.920 +정말 멋져요 + +02:48.920 --> 02:49.850 +그러길 바라요 + +02:49.880 --> 02:51.050 +제 수업이고요 + +02:51.080 --> 02:52.730 +그러니 하고 싶으면 해야죠 + +02:53.870 --> 02:59.450 +네, 정말 진지하게 말씀드리면 데이터를 표면화할 수 있어서 좋았고 Gadio 사용자 + +02:59.450 --> 03:04.730 +인터페이스에서 줄거리를 보여주는 게 얼마나 쉬운지 설명할 수 있어서 좋았어요 + +03:04.850 --> 03:10.850 +하지만 이 차는 자랑하려고 달리는 거예요 + +03:10.850 --> 03:17.660 +시간이 흐르면 메모리에서 보여주는 게 유용할 거예요 벡터 포맷으로 메모리의 여러 지점을 + +03:17.660 --> 03:21.710 +보여주는 거죠 마우스로 가리키면 보이고요 + +03:21.890 --> 03:27.560 +제가 직접 조정할 수도 있지만 아니면 비트가 해줘서 코드를 푸시해 이걸 + +03:27.560 --> 03:32.450 +좀 더 유용하게 만들고 불필요한 변화는 줄여줄 수도 있죠 + +03:32.510 --> 03:36.800 +제가 말하는 동안 눈치챘을지도 모르는 또 다른 건 갑자기 메모리가 + +03:36.800 --> 03:41.630 +백지화됐다는 겁니다 왜 메모리가 백지화됐는지 궁금하실 거예요 + +03:41.660 --> 03:47.240 +메모리가 빈 이유는 물론 여기서 실행이 시작됐기 때문이죠 + +03:47.330 --> 03:53.570 +이것도 비트는 방법이 있지만 현재는 실행이 진행 중일 때 테이블이 비어있다가 + +03:53.570 --> 03:59.750 +실행이 완료되면 다시 채워질 겁니다 좀 엉성하지만 고칠 수 있죠 + +03:59.750 --> 04:02.420 +UI 개선을 위한 연습이 될 수 있어요 + +04:02.630 --> 04:07.280 +하지만 지금은 모듈이라고 부르는 지점에 이르렀어요 + +04:07.280 --> 04:09.890 +그래서 수단이 준비 운동을 하는 거죠 + +04:10.340 --> 04:16.550 +시간이 좀 걸리기 때문에 일시 정지하고 완료되면 다시 돌아오죠 + +04:16.580 --> 04:17.660 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/59673721/en_US.srt b/week5/community-contributions/subtitles/srts/59673721/en_US.srt new file mode 100755 index 0000000..8d21296 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673721/en_US.srt @@ -0,0 +1,460 @@ +WEBVTT + +00:00.770 --> 00:09.020 +And here we are in JupyterLab for the last time, and we are looking here at day five, the last day + +00:09.020 --> 00:13.130 +of the last week, week eight of the program. + +00:13.130 --> 00:21.140 +And we start with the beautiful beginning line import Gradio as GR, which you know what a great fan + +00:21.140 --> 00:25.880 +I am of Gradio, so I was considering using Streamlit instead. + +00:25.880 --> 00:30.440 +This time you'll see in my repo I have some other example projects in Streamlit. + +00:30.620 --> 00:36.920 +Um, but I figured there's, there's uh, so much, there's great things about Gradio and Streamlit + +00:36.920 --> 00:39.650 +and all in all, I think I would double down on Gradio. + +00:39.650 --> 00:43.880 +And so there's no point in, in confusing you with showing you different things that you don't need + +00:43.910 --> 00:45.350 +because it's all in gradio. + +00:45.380 --> 00:46.640 +So there we go. + +00:46.850 --> 00:53.720 +Uh, with that, we, uh, um, import the deal agent framework and that. + +00:53.720 --> 00:58.010 +So we basically import in here the whole of the thing we were looking at before. + +00:58.010 --> 01:04.820 +And you remember, you can just instantiate this and and call run to set the thing off and we'll import + +01:04.820 --> 01:06.200 +opportunity and deal. + +01:06.200 --> 01:14.510 +So now as we have talked about I think briefly we covered this, but somewhat in passing in the past + +01:14.510 --> 01:20.930 +that Gradio has these two different levels of detail, a high level API and a low level, a bit like + +01:20.960 --> 01:24.710 +Huggingface has the pipelines and then the Tokenizers and models. + +01:24.740 --> 01:32.900 +Gradio has great interface, which is its high level API for quickly building an interface, and it's + +01:32.900 --> 01:36.470 +really simple to get up and running if you've got like a single function, some inputs and outputs, + +01:36.470 --> 01:42.050 +and you want it to pop up, you can just use great interface and its cousin, great chat interface that + +01:42.050 --> 01:44.360 +of course we've used to great success. + +01:44.570 --> 01:48.560 +Um, but the interface is a wonderful way to get up and running. + +01:48.560 --> 01:56.120 +The lower level API is called GR dot blocks, and GR blocks gives you an ability to construct your user + +01:56.120 --> 01:58.640 +interface with with some more finesse. + +01:58.910 --> 02:04.880 +Um, and you say with roadblocks as and then you can call it anything you want. + +02:04.970 --> 02:11.600 +I say as you are right here, and then you say UI dot launch instead of interface dot launch in-browser + +02:11.600 --> 02:12.650 +dot equals true. + +02:12.650 --> 02:17.480 +Just means it's going to, uh, throw up a new screen as soon as we run this, rather than it being + +02:17.480 --> 02:18.380 +just a link. + +02:18.680 --> 02:20.570 +Um, so that's convenient to know. + +02:20.780 --> 02:25.370 +Um, and you can, you can give it a title of the window that's going to come up. + +02:25.370 --> 02:27.470 +Fill width is true is worth knowing. + +02:27.500 --> 02:33.020 +If you don't set that, then gradient puts big margins on either side of the window, which sometimes + +02:33.050 --> 02:33.620 +is what you want. + +02:33.620 --> 02:38.630 +But but often, particularly with the kinds of UIs we're going to be building, we want to use every + +02:38.630 --> 02:40.100 +possible pixel. + +02:40.370 --> 02:43.970 +So fill width is true means that it fills the whole width. + +02:44.000 --> 02:50.240 +So then there's this construct that you get used to, which uh, um, again, we have come across this + +02:50.240 --> 02:51.380 +now a couple of times. + +02:51.380 --> 02:58.670 +But you say with grow and then you put each row of your user interface and you break your user interface + +02:58.670 --> 03:00.470 +into rows like this. + +03:00.470 --> 03:08.150 +And in each of these context manager, each of these pieces, you can put a bunch of different UI widgets. + +03:08.300 --> 03:15.590 +Um, and then somewhat confusingly, you can also have with column, which is saying within a row. + +03:15.620 --> 03:18.020 +I would then like to break things into columns. + +03:18.050 --> 03:19.640 +And we'll see that in a moment. + +03:19.670 --> 03:25.250 +Another it is worth knowing a pro tip is that you can't have with geo column at the top level. + +03:25.250 --> 03:30.230 +You have to start with zero and then you can have columns and rows, uh, nested I think as deep as + +03:30.230 --> 03:31.370 +you want within that. + +03:31.370 --> 03:37.280 +But the top level one has to be rows because Gradio is built to imagine a typical scrollable window. + +03:37.820 --> 03:44.000 +Okay, so I am beginning with Geo Row, and I'm just going to put a markdown a heading here. + +03:44.000 --> 03:48.680 +And when you use GR dot markdown which gives you a markdown field which I know we've used a bunch of + +03:48.680 --> 03:54.500 +times now, you can also put in straight up HTML in there, which is what I do to have some centered + +03:54.500 --> 03:55.310 +big text. + +03:55.310 --> 03:56.480 +The price is right. + +03:56.480 --> 03:59.960 +And then I put like something below that as well. + +03:59.990 --> 04:01.370 +And let's see that running. + +04:01.370 --> 04:04.910 +I just execute this cell and up comes the window. + +04:04.910 --> 04:07.250 +And it's pretty vanilla at the moment. + +04:07.250 --> 04:08.420 +The price is right. + +04:08.420 --> 04:09.140 +It's got a heading. + +04:09.140 --> 04:12.050 +You can see that this is a row and that's another row. + +04:12.050 --> 04:13.430 +And there we have it. + +04:13.430 --> 04:14.870 +That is simple enough. + +04:14.900 --> 04:15.590 +We'll close that. + +04:15.590 --> 04:16.760 +Come back here. + +04:17.570 --> 04:21.500 +And uh, now we can we can do a little bit more. + +04:21.500 --> 04:27.230 +So what we're going to do now is add another row. + +04:27.770 --> 04:34.160 +And this row is going to contain a data frame, a GR data frame, which is basically a table. + +04:34.160 --> 04:38.630 +It's like a think of it like a spreadsheet a little embedded spreadsheet on our screen. + +04:38.630 --> 04:46.460 +And you tell it what headers you want description, price estimate, discount URL, uh, and uh, wrap + +04:46.460 --> 04:50.600 +is a bit like in Excel or in Google Sheets. + +04:50.630 --> 04:55.910 +It's saying that I want to wrap within any cell the column widths which are relative. + +04:55.910 --> 04:59.930 +So you can say that I want the first one to be four times as large as the others. + +04:59.930 --> 05:05.650 +How many rows, how many columns, and the height if you want it to have a fixed height. + +05:05.650 --> 05:07.090 +And some of these are optional. + +05:07.660 --> 05:15.520 +Um, and then, uh, what I'm doing is I'm calling something called UI dot load, which looks like many + +05:15.520 --> 05:16.780 +of the gradio functions. + +05:16.780 --> 05:24.640 +You'll see a very consistent pattern across Gradio, where you say the function takes the name of a + +05:24.640 --> 05:25.240 +function. + +05:25.240 --> 05:29.110 +In our case, get table inputs and outputs. + +05:29.110 --> 05:34.660 +And that's how you hook up a function you're providing to this particular user interface. + +05:34.690 --> 05:40.900 +And what we're saying is that, um, I have something called opportunities that I'll mention in just + +05:40.900 --> 05:41.410 +a moment. + +05:41.410 --> 05:45.580 +I want that to be the input of this thing, this table. + +05:45.580 --> 05:47.380 +And I want the output. + +05:47.380 --> 05:53.770 +Whatever this returns, I want to hook that up to my data frame that I just defined right here. + +05:54.220 --> 05:58.210 +Did you follow that the inputs are going to be this thing called opportunities that I'll speak about + +05:58.210 --> 05:58.900 +in a minute. + +05:58.900 --> 06:06.640 +And the whatever this get table returns, I want that to be hooked up to this table, this data frame + +06:06.640 --> 06:07.990 +that we've constructed. + +06:08.860 --> 06:10.090 +Hopefully that makes sense. + +06:10.090 --> 06:12.790 +So what is this opportunities thing. + +06:12.790 --> 06:18.340 +So this opportunities thing is a great state, which means it's a bit of information that I want the + +06:18.340 --> 06:19.810 +user interface to remember. + +06:19.960 --> 06:26.080 +And this great state is going to have in it something called initial opportunity, which is just going + +06:26.080 --> 06:32.620 +to be something I'm hard coding in here, which is going to be an opportunity for a deal called example + +06:32.650 --> 06:34.630 +description price $100. + +06:34.630 --> 06:36.970 +There's going to be a URL cnn.com. + +06:36.970 --> 06:38.530 +And that's going to be it. + +06:38.530 --> 06:41.440 +So let's run this and see what happens. + +06:41.440 --> 06:43.300 +We get a user interface. + +06:43.300 --> 06:48.010 +We get this nice looking data frame which is like a table. + +06:48.010 --> 06:54.460 +And it has in it one row which has the thing that I just created with an example description, a price, + +06:54.460 --> 06:57.490 +an estimate, a discount and a URL. + +06:57.520 --> 07:00.670 +And that's, that's really the extent of it. + +07:01.360 --> 07:03.430 +Okay, so far, so good. + +07:03.460 --> 07:05.260 +So let's keep going. + +07:05.500 --> 07:06.250 +All right. + +07:06.280 --> 07:08.260 +Now the plot thickens. + +07:08.260 --> 07:11.890 +In this next cell, you can see I'm building on the user interface in each time. + +07:11.890 --> 07:12.550 +And I love doing that. + +07:12.550 --> 07:17.680 +That's one of the great things about it's a combination of the great thing about Jupyter Labs and also + +07:17.680 --> 07:21.070 +about Gradio is you can build in this way incrementally. + +07:21.070 --> 07:27.310 +So I'm now going to create a new instance of an agent framework, a deal agent framework. + +07:27.670 --> 07:34.930 +Uh, and uh, otherwise I'm going to, um, I guess we're going to hook up to this, I think, in a, + +07:34.930 --> 07:36.310 +in a, in a later one. + +07:36.310 --> 07:41.890 +For now, uh, all I wanted to do was make it so that, uh, this would be hooked up. + +07:41.890 --> 07:47.260 +We'd see it creating itself, we'd see some of the information, and I'd have it so that you can select + +07:47.260 --> 07:51.910 +an opportunity, and it would call the messenger to alert about that opportunity. + +07:52.120 --> 07:58.600 +Um, so what you can see down here is I've now got two different things hooked up using radios, standard + +07:58.600 --> 07:59.320 +pattern. + +07:59.350 --> 08:01.120 +This is the same as before. + +08:01.120 --> 08:08.200 +If you load it calls gettable, it passes in the opportunities and the outputs go in the frame. + +08:08.410 --> 08:16.510 +If you select if you, if I'm um, I'm calling the select uh function on the data frame itself so that + +08:16.510 --> 08:24.130 +if a selection is made it should call the function do select the inputs should again be the opportunity. + +08:24.130 --> 08:25.840 +There aren't any outputs. + +08:25.840 --> 08:27.880 +What does do select do. + +08:27.910 --> 08:30.910 +It finds the index of what you've selected. + +08:30.910 --> 08:37.840 +And you specify that by having that as an argument right here, it looks up that opportunity from the + +08:37.840 --> 08:38.950 +list of opportunities. + +08:38.950 --> 08:45.880 +And it will then call the agent framework the planner the messenger and send an alert about that opportunity. + +08:46.330 --> 08:47.200 +All right. + +08:47.350 --> 08:48.700 +Seems reasonable. + +08:48.730 --> 08:49.990 +Let's run this. + +08:50.020 --> 08:55.300 +When we run it, the first thing you'll see is that there's a lot of, um. + +08:55.960 --> 08:57.640 +It was too fast. + +08:57.670 --> 09:00.100 +If I go back, you'll see that it was printing. + +09:00.130 --> 09:08.290 +Of course, our log output to the the command line here to to the output of the Jupyter lab. + +09:08.290 --> 09:09.580 +So we could see standard out. + +09:09.580 --> 09:10.780 +That's what I was grasping for. + +09:10.810 --> 09:15.580 +Then standard out is showing the log messages with the color system. + +09:15.580 --> 09:17.020 +So we know what is doing what. + +09:17.050 --> 09:18.280 +Just as we wanted. + +09:18.880 --> 09:22.270 +And if we come over here it's showing our table with CNN. + +09:22.270 --> 09:27.700 +And if I click on this I did just get a notification to show you believe it. + +09:27.730 --> 09:30.220 +Hang on, I will press it again. + +09:30.220 --> 09:33.190 +Press and bang, I get a notification. + +09:33.610 --> 09:37.210 +Um, and so there you have it. + +09:37.240 --> 09:44.140 +We have now hooked up gradio to our agent framework at least so that it can notify us. + +09:44.140 --> 09:47.020 +So it's the beginnings of a user interface. + +09:47.020 --> 09:51.100 +It's the beginnings of something with the agent framework running behind the scenes. + +09:51.130 --> 09:59.530 +In the next video, we will move off the Jupyter Lab environment into the real code for the user interface. + +09:59.560 --> 10:00.640 +I'll see you then. diff --git a/week5/community-contributions/subtitles/srts/59673721/ja_JP.srt b/week5/community-contributions/subtitles/srts/59673721/ja_JP.srt new file mode 100755 index 0000000..6477533 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673721/ja_JP.srt @@ -0,0 +1,379 @@ +WEBVTT + +00:00.770 --> 00:13.130 +そして、 JupyterLabでの最終回。 プログラムの最終週、 第8週の最終日である5日目を見ている。 + +00:13.130 --> 00:25.880 +私がGradioの大ファンであることはご存じだろうから、 代わりにStreamlitを使うことも考えていた。 + +00:25.880 --> 00:30.440 +今回、 私のレポにはStreamlitの他のサンプル・プロジェクトがいくつかある。 + +00:30.620 --> 00:39.650 +でも、 GradioとStreamlitには素晴らしいところがたくさんあるし、 総合的に考えて、 僕はGradioをダブルダウンすると思う。 + +00:39.650 --> 00:45.350 +だから、 必要ないものを見せて混乱させても意味がないんだ。 + +00:45.380 --> 00:46.640 +そうだ。 + +00:46.850 --> 00:53.720 +それで、 ディールエージェントのフレームワークをインポートするんだ。 + +00:53.720 --> 00:58.010 +だから、 基本的には、 以前見ていたものを丸ごとここに取り込むんだ。 + +00:58.010 --> 01:06.200 +覚えておいてほしいのは、 これをインスタンス化し、 runを呼び出して起動させればいいということだ。 + +01:06.200 --> 01:14.510 +Huggingfaceがパイプラインを持ち、 + +01:14.510 --> 01:24.710 +次にTokenizersとモデルを持つようなものだ。 + +01:24.740 --> 01:32.900 +Gradioには素晴らしいインターフェイスがあり、 それはインターフェイスを素早く構築するための高レベルAPIで、 + +01:32.900 --> 01:36.470 +一つの関数、 いくつかの入力と出力があり、 それをポップアップさせたい場合、 + +01:36.470 --> 01:44.360 +本当に簡単に立ち上げることができます。 + +01:44.570 --> 01:48.560 +うーん、 でも、 このインターフェイスは立ち上がるのに素晴らしい方法だよ。 + +01:48.560 --> 01:58.640 +下位レベルのAPIはGRドット・ブロックと呼ばれ、 GRブロックはユーザー・インターフェースをより精巧に構築する能力を与えてくれる。 + +01:58.910 --> 02:04.880 +そして、 あなたは道路封鎖のことを "道路封鎖 "と言っている。 + +02:04.970 --> 02:12.650 +そして、 インターフェイス・ドット・ローンチではなく、 UIドット・ローンチ・イン・ブラウザ・ドット・イコール・トゥルーと言うのだ。 + +02:12.650 --> 02:18.380 +ただ、 リンクを貼るだけでなく、 これを実行するとすぐに新しい画面が表示されます。 + +02:18.680 --> 02:20.570 +それは便利なことだね。 + +02:20.780 --> 02:25.370 +そして、 表示されるウィンドウのタイトルをつけることができます。 + +02:25.370 --> 02:27.470 +塗りつぶし幅は知っておいて損はない。 + +02:27.500 --> 02:33.620 +これを設定しないと、 グラデーションはウィンドウの両側に大きな余白を作る。 + +02:33.620 --> 02:40.100 +しかし、 多くの場合、 特にこれから作ろうとしている種類のUIでは、 可能な限りのピクセルを使いたい。 + +02:40.370 --> 02:43.970 +つまり、 fill widthがtrueであれば、 幅いっぱいに表示されるということだ。 + +02:44.000 --> 02:51.380 +だから、 この構成に慣れる必要があるんだ。 + +02:51.380 --> 03:00.470 +しかし、 あなたはgrowと言い、 ユーザー・インターフェースの各行をこのように分割する。 + +03:00.470 --> 03:08.150 +そして、 これらのコンテキスト・マネージャーや各部分には、 さまざまなUIウィジェットを置くことができる。 + +03:08.300 --> 03:15.590 +それから、 少し紛らわしいのですが、 列を持つこともできます。 + +03:15.620 --> 03:18.020 +それから、 物事を列に分けたい。 + +03:18.050 --> 03:19.640 +それはすぐにわかる。 + +03:19.670 --> 03:25.250 +もうひとつ、 プロとしての心得として知っておいて損はないのは、 トップレベルにジオ・カラムを持つことはできないということだ。 + +03:25.250 --> 03:31.370 +ゼロから始めて、 その中に好きなだけ列や行を入れ子にすることができる。 + +03:31.370 --> 03:37.280 +しかし、 Gradioは典型的なスクロール可能なウィンドウを想像して作られているため、 最上位のものは行でなければならない。 + +03:37.820 --> 03:44.000 +では、 ジオ・ロウから始めます。 ここにマークダウンの見出しを付けます。 + +03:44.000 --> 03:48.680 +GR dot markdownを使うと、 マークダウン・フィールドができ、 + +03:48.680 --> 03:55.310 +これまで何度も使ってきたと思うが、 そこにHTMLをそのまま入れることもできる。 + +03:55.310 --> 03:56.480 +価格は適正だ。 + +03:56.480 --> 03:59.960 +そして、 その下にも何か入れる。 + +03:59.990 --> 04:01.370 +そして、 その走りを見てみよう。 + +04:01.370 --> 04:04.910 +このセルを実行するとウィンドウが出てくる。 + +04:04.910 --> 04:07.250 +それに、 現時点ではかなりバニラ的だ。 + +04:07.250 --> 04:08.420 +価格は適正だ。 + +04:08.420 --> 04:09.140 +見出しがある。 + +04:09.140 --> 04:12.050 +これが行で、 これが別の行であることがわかるだろう。 + +04:12.050 --> 04:13.430 +そうだ。 + +04:13.430 --> 04:14.870 +簡単なことだ。 + +04:14.900 --> 04:15.590 +それは終わりにしよう。 + +04:15.590 --> 04:16.760 +ここに戻ってこい。 + +04:17.570 --> 04:21.500 +そして、 今はもう少しできることがある。 + +04:21.500 --> 04:27.230 +そこで、 これから行うのは、 もう1行追加することだ。 + +04:27.770 --> 04:34.160 +この行にはデータフレーム、 GRデータフレームが入る。 + +04:34.160 --> 04:38.630 +それはまるでスプレッドシートのようなもので、 画面上に埋め込まれた小さなスプレッドシートのようなものだ。 + +04:38.630 --> 04:46.460 +そして、 説明文、 見積価格、 割引URL、 そしてラップを、 エクセルやグーグル・シートと同じように、 + +04:46.460 --> 04:50.600 +ヘッダーで指示する。 + +04:50.630 --> 04:55.910 +相対的な列幅を任意のセル内で折り返したいと言っているのです。 + +04:55.910 --> 04:59.930 +つまり、 最初の1本は他の4倍の大きさが欲しいということだ。 + +04:59.930 --> 05:05.650 +行数、 列数、 高さを固定したい場合は高さを指定します。 + +05:05.650 --> 05:07.090 +そして、 これらのいくつかはオプションである。 + +05:07.660 --> 05:16.780 +それから、 僕がやっているのは、 UIドットロードというものを呼び出しているんだけど、 これはグラディオの関数の多くに似ている。 + +05:16.780 --> 05:25.240 +Gradioでは、 関数が関数の名前を取るという、 非常に一貫したパターンが見られます。 + +05:25.240 --> 05:29.110 +この場合、 テーブルのインプットとアウトプットを取得する。 + +05:29.110 --> 05:34.660 +そうやって、 提供する機能を特定のユーザー・インターフェースに接続するのだ。 + +05:34.690 --> 05:41.410 +私たちが言っているのは、 ええと、 機会というものがあるんだ。 + +05:41.410 --> 05:45.580 +それをこのテーブルのインプットにしたいんだ。 + +05:45.580 --> 05:47.380 +そしてアウトプットが欲しい。 + +05:47.380 --> 05:53.770 +これが何を返すにせよ、 ここで定義したデータ・フレームにフックしたい。 + +05:54.220 --> 05:58.900 +インプットは、 これからお話しする機会と呼ばれるものになることはお分かりいただけただろうか。 + +05:58.900 --> 06:07.990 +そして、 このgetテーブルが返すものは何でも、 このテーブル、 つまり我々が構築したデータ・フレームにフックさせたい。 + +06:08.860 --> 06:10.090 +それが理解できればいいのだが......。 + +06:10.090 --> 06:12.790 +では、 この機会とは何なのか。 + +06:12.790 --> 06:19.810 +つまり、 この機会というのは、 ユーザー・インターフェースに記憶させたいちょっとした情報なのだ。 + +06:19.960 --> 06:26.080 +そしてこの素晴らしい状態には、 初回オポチュニティと呼ばれるものがあり、 これは私がここにハードコーディングしたもので、 + +06:26.080 --> 06:34.630 +例の説明価格100ドルと呼ばれる取引のオポチュニティとなる。 + +06:34.630 --> 06:36.970 +URLはcnnになります。 comに移籍した。 + +06:36.970 --> 06:38.530 +それで終わりだろう。 + +06:38.530 --> 06:41.440 +では、 これを実行してどうなるか見てみよう。 + +06:41.440 --> 06:43.300 +私たちはユーザー・インターフェースを手に入れる。 + +06:43.300 --> 06:48.010 +テーブルのような見栄えの良いデータフレームが出来上がる。 + +06:48.010 --> 06:54.460 +この行には、 先ほど作成した説明文、 価格、 見積もり、 割引、 + +06:54.460 --> 06:57.490 +URLが記載されている。 + +06:57.520 --> 07:00.670 +その程度なんだ。 + +07:01.360 --> 07:03.430 +よし、 ここまでは順調だ。 + +07:03.460 --> 07:05.260 +だから続けよう。 + +07:05.500 --> 07:06.250 +分かった。 + +07:06.280 --> 07:08.260 +さて、 筋書きはさらに複雑になる。 + +07:08.260 --> 07:11.890 +この次のセルでは、 その都度ユーザー・インターフェースを構築しているのがわかるだろう。 + +07:11.890 --> 07:12.550 +それが大好きなんだ。 + +07:12.550 --> 07:17.680 +これはJupyter LabsとGradioの素晴らしい点の組み合わせで、 + +07:17.680 --> 07:21.070 +このように段階的に構築できるのです。 + +07:21.070 --> 07:27.310 +そこで、 エージェントフレームワークの新しいインスタンス、 ディールエージェントフレームワークを作ろうと思う。 + +07:27.670 --> 07:36.310 +ええと、 そうでなければ、 僕は、 ええと、 僕たちはこの件に引っ掛けるつもりだと思うんだ。 + +07:36.310 --> 07:41.890 +今のところ、 僕がやりたかったのは、 これを接続できるようにすることだけなんだ。 + +07:41.890 --> 07:51.910 +そして、 チャンスを選択すると、 メッセンジャーにそのチャンスについて警告を発するようにする。 + +07:52.120 --> 07:59.320 +この下に見えるのは、 2つの異なるものを無線でつないだ、 標準的なパターンだ。 + +07:59.350 --> 08:01.120 +これは以前と同じだ。 + +08:01.120 --> 08:08.200 +gettableを呼び出すと、 機会が渡され、 出力はフレームに入る。 + +08:08.410 --> 08:16.510 +もし選択するのであれば、 データ・フレーム自体でselect関数を呼び出しているのだから、 + +08:16.510 --> 08:24.130 +選択が行われればselect関数が呼び出されるはずだ。 + +08:24.130 --> 08:25.840 +アウトプットはない。 + +08:25.840 --> 08:27.880 +セレクトは何をするのか。 + +08:27.910 --> 08:30.910 +選択した項目のインデックスを見つける。 + +08:30.910 --> 08:38.950 +そして、 ここで引数としてそれを指定することで、 機会のリストからその機会を検索する。 + +08:38.950 --> 08:45.880 +そして、 エージェントのフレームワークをメッセンジャーに呼び出し、 その機会についてアラートを送る。 + +08:46.330 --> 08:47.200 +分かった。 + +08:47.350 --> 08:48.700 +合理的だと思う。 + +08:48.730 --> 08:49.990 +これを実行しよう。 + +08:50.020 --> 08:55.300 +走らせてみると、 まず目につくのは、 たくさんの......うーん。 + +08:55.960 --> 08:57.640 +速すぎた。 + +08:57.670 --> 09:00.100 +遡れば、 それが印刷だったことがわかるだろう。 + +09:00.130 --> 09:08.290 +もちろん、 ここでコマンドラインに出力されるログは、 Jupyterラボの出力になる。 + +09:08.290 --> 09:09.580 +だから、 スタンダードを見ることができた。 + +09:09.580 --> 09:10.780 +それが私が掴んでいたものだ。 + +09:10.810 --> 09:15.580 +そして、 スタンダード・アウトは、 ログ・メッセージをカラー・システムで表示する。 + +09:15.580 --> 09:17.020 +だから、 何が何をやっているのかがわかる。 + +09:17.050 --> 09:18.280 +我々が望んでいた通りだ。 + +09:18.880 --> 09:22.270 +そして、 こちらに来ると、 CNNのあるテーブルが表示される。 + +09:22.270 --> 09:27.700 +そして、 これをクリックすると、 あなたがそれを信じていることを示す通知が届いた。 + +09:27.730 --> 09:30.220 +ちょっと待って、 もう一度押すから。 + +09:30.220 --> 09:33.190 +押すと、 バーンと通知が来る。 + +09:33.610 --> 09:37.210 +それで、 こうなった。 + +09:37.240 --> 09:44.140 +私たちは今、 少なくともエージェントフレームワークにgradioを接続し、 通知できるようにした。 + +09:44.140 --> 09:47.020 +つまり、 ユーザーインターフェースの始まりだ。 + +09:47.020 --> 09:51.100 +裏で動いているエージェント・フレームワークを使った何かの始まりだ。 + +09:51.130 --> 09:59.530 +次のビデオでは、 Jupyter Lab環境からユーザー・インターフェースの実際のコードに移る。 + +09:59.560 --> 10:00.640 +それじゃ、 また diff --git a/week5/community-contributions/subtitles/srts/59673721/ko_KR.srt b/week5/community-contributions/subtitles/srts/59673721/ko_KR.srt new file mode 100755 index 0000000..431963c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/59673721/ko_KR.srt @@ -0,0 +1,442 @@ +WEBVTT + +00:00.770 --> 00:09.020 +유피터랩에 온 건 이번이 마지막이에요 5일째를 보고 있어요 프로그램 + +00:09.020 --> 00:13.130 +마지막 주 마지막 날, 8주째죠 + +00:13.130 --> 00:21.140 +GR로 그라디오를 가져오는 아름다운 장면으로 시작해요 제가 그라디오의 열성 + +00:21.140 --> 00:25.880 +팬이라 스트림릿으로 할까 생각 중이에요 + +00:25.880 --> 00:30.440 +이번에는 압류에서 보실 수 있는데, 간소화의 다른 예제 프로젝트들이 있어요 + +00:30.620 --> 00:36.920 +하지만 그래디오와 스트림리츠에는 좋은 점이 많아요 전체적으로 그래디오에 + +00:36.920 --> 00:39.650 +더 집중하고 싶어요 + +00:39.650 --> 00:43.880 +그러니 불필요한 걸 보여줘서 혼란스럽게 할 필요가 없죠 전부 + +00:43.910 --> 00:45.350 +안정적이니까요 + +00:45.380 --> 00:46.640 +자, 됐어요 + +00:46.850 --> 00:53.720 +그렇게 해서 거래 에이전트 프레임워크를 수입하죠 + +00:53.720 --> 00:58.010 +기본적으로 여기서 전에 봤던 모든 걸 불러오는 거죠 + +00:58.010 --> 01:04.820 +기억하세요 이걸 인스턴스화하고 실행을 호출해 시작하세요 기회와 거래를 + +01:04.820 --> 01:06.200 +불러올게요 + +01:06.200 --> 01:14.510 +간단히 다뤘던 것 같은데 과거에는 그레이디오에 두 가지 수준의 세부 사항이 있었죠 높은 + +01:14.510 --> 01:20.930 +수준의 API와 낮은 수준이요 H깅페이스가 파이프라인과 토큰라이저와 + +01:20.960 --> 01:24.710 +모델을 가진 것처럼요 + +01:24.740 --> 01:32.900 +그레이디오는 인터페이스가 훌륭합니다 인터페이스를 빠르게 구축하기 위한 상위 레벨 API입니다 단일 함수나 입력, 출력이 + +01:32.900 --> 01:36.470 +있다면 get up 실행이 정말 간단합니다 팝업 창을 열고 + +01:36.470 --> 01:42.050 +싶다면 훌륭한 인터페이스와 그 사촌인 채팅 인터페이스를 이용하면 됩니다 물론 우리가 + +01:42.050 --> 01:44.360 +아주 성공적으로 사용했죠 + +01:44.570 --> 01:48.560 +인터페이스는 get과 실행에 훌륭한 방법이에요 + +01:48.560 --> 01:56.120 +하위 레벨 API는 GR.블록스라고 합니다 GR블록스는 보다 정교하게 사용자 인터페이스를 + +01:56.120 --> 01:58.640 +만들 수 있게 해주죠 + +01:58.910 --> 02:04.880 +바리케이드도 있고 원하는 대로 부를 수 있어요 + +02:04.970 --> 02:11.600 +여기 있는 그대로라고 하고 UI.Clow라고 해요 인터페이스 대신요 시작 .in-browser + +02:11.600 --> 02:12.650 +.=true + +02:12.650 --> 02:17.480 +그냥 링크가 아니라 이걸 실행하자마자 새 스크린이 나온다는 + +02:17.480 --> 02:18.380 +뜻이죠 + +02:18.680 --> 02:20.570 +알아서 다행이네요 + +02:20.780 --> 02:25.370 +앞으로 나올 창의 이름을 붙일 수도 있어요 + +02:25.370 --> 02:27.470 +필 너비는 참은 알 가치가 있어요 + +02:27.500 --> 02:33.020 +그걸 설정하지 않으면 창문 양쪽의 마진이 크게 남아요 그게 필요할 때도 + +02:33.050 --> 02:33.620 +있죠 + +02:33.620 --> 02:38.630 +하지만 종종, 특히 우리가 만들 UI의 종류에 있어서 가능한 모든 픽셀을 + +02:38.630 --> 02:40.100 +사용하고 싶어하죠 + +02:40.370 --> 02:43.970 +필링 너비는 참이란 너비 전체를 채운다는 뜻이죠 + +02:44.000 --> 02:50.240 +그리고 여러분이 익숙해질 만한 개념이 있는데 다시 말하지만 이건 몇 번 다뤘던 개념이에요. + +02:50.240 --> 02:51.380 +Get it. + +02:51.380 --> 02:58.670 +get을 하고 사용자 인터페이스를 각각의 행에 놓고 사용자 인터페이스를 이런 + +02:58.670 --> 03:00.470 +행으로 나누면 되죠 + +03:00.470 --> 03:08.150 +각각의 컨텍스트 관리자 각각의 조각들에 다양한 UI 위젯을 놓을 수 있어요 + +03:08.300 --> 03:15.590 +그리고 좀 혼란스러운 건 Column도 있어요 행 안에 있다고 하는 거죠 + +03:15.620 --> 03:18.020 +물건을 열로 쪼개고 싶어요 + +03:18.050 --> 03:19.640 +잠시 후에 보실 거예요 + +03:19.670 --> 03:25.250 +전문가로서 알아야 할 또 다른 팁은 지오 열은 최상위 레벨에서 가질 수 없다는 거예요 + +03:25.250 --> 03:30.230 +0에서 시작해야 해요 그런 다음 열과 행을 중첩할 수 있죠 그 안에서 원하는 + +03:30.230 --> 03:31.370 +만큼 깊이요 + +03:31.370 --> 03:37.280 +하지만 최상위 레벨 1은 열로 되어 있어야 합니다 그래디오는 전형적인 스크롤 가능한 창을 상상하도록 만들어졌거든요 + +03:37.820 --> 03:44.000 +좋아요, 지오 Row로 시작할게요 마크다운을 입력하죠 여기다가요 + +03:44.000 --> 03:48.680 +GR. 마크다운을 쓸 때 마크다운 필드를 제공하죠 지금 많이 사용했어요 + +03:48.680 --> 03:55.310 +HTML을 바로 넣을 수도 있어요 중앙에 큰 텍스트를 넣기 위해 제가 하는 거죠 + +03:55.310 --> 03:56.480 +가격이 적당해요 + +03:56.480 --> 03:59.960 +그 아래에도 뭔가를 Put처럼 했어요 + +03:59.990 --> 04:01.370 +실행해 보죠 + +04:01.370 --> 04:04.910 +이 감방을 실행하면 창문이 열리죠 + +04:04.910 --> 04:07.250 +지금은 아주 밋밋해요 + +04:07.250 --> 04:08.420 +가격이 적당해요 + +04:08.420 --> 04:09.140 +방향이 있어요 + +04:09.140 --> 04:12.050 +이게 행이고 저게 또 다른 행이죠 + +04:12.050 --> 04:13.430 +다 됐어요 + +04:13.430 --> 04:14.870 +아주 간단하죠 + +04:14.900 --> 04:15.590 +닫을게요 + +04:15.590 --> 04:16.760 +이리 와요 + +04:17.570 --> 04:21.500 +이제 비트 더 넣을 수 있어요 + +04:21.500 --> 04:27.230 +이제 행을 하나 더 추가할 거예요 + +04:27.770 --> 04:34.160 +이 행은 기본적으로 테이블인 GR 데이터 프레임을 포함하고 있어요 + +04:34.160 --> 04:38.630 +스프레드시트를 생각해 보세요 스크린에 스프레드시트를 끼워 넣는 거죠 + +04:38.630 --> 04:46.460 +헤더에 대해 설명, 가격 추정, 디스카운트 URL 그리고 랩은 엑셀이나 + +04:46.460 --> 04:50.600 +구글 시츠에 있는 것과 비슷해요. + +04:50.630 --> 04:55.910 +상대적인 셀 너비 내에서 래핑하고 싶다고 말하고 있어요 + +04:55.910 --> 04:59.930 +첫 번째 것은 다른 것의 4배로 만들라고 할 수 있죠 + +04:59.930 --> 05:05.650 +행의 개수, 열의 개수 고정된 높이를 원한다면 높이를 정하죠 + +05:05.650 --> 05:07.090 +어떤 건 선택 사항이에요 + +05:07.660 --> 05:15.520 +UI.Dload라는 걸 호출하고 있어요 그러디오 기능처럼 + +05:15.520 --> 05:16.780 +보이죠 + +05:16.780 --> 05:24.640 +그라디오에 걸쳐 일관된 패턴이 보일 겁니다 함수가 함수의 이름을 취한다고 할 + +05:24.640 --> 05:25.240 +때요 + +05:25.240 --> 05:29.110 +이 경우에는 테이블 입력과 출력을 get으로 하고요 + +05:29.110 --> 05:34.660 +이 사용자 인터페이스에 제공하는 함수를 연결하는 방법이죠 + +05:34.690 --> 05:41.410 +그러니까 제 말은 저한테 기회가 있다는 거예요 잠시 후에 말씀드릴게요 + +05:41.410 --> 05:45.580 +그게 이 테이블의 입력값이 되길 원해요 + +05:45.580 --> 05:47.380 +결과물도 필요해요 + +05:47.380 --> 05:53.770 +이게 뭘 반환하든 데이터 프레임에 연결하고 싶어요 여기서 방금 정의한 거죠 + +05:54.220 --> 05:58.900 +입력이 기회라는 것이 된다는 것을 이해하셨나요? 잠시 후에 말씀드릴게요 + +05:58.900 --> 06:06.640 +이 get 테이블이 반환하는 게 뭐든 그게 이 테이블에 연결되길 원해요 우리가 구성한 이 데이터 + +06:06.640 --> 06:07.990 +프레임에요 + +06:08.860 --> 06:10.090 +이해가 되면 좋겠네요 + +06:10.090 --> 06:12.790 +기회라는 게 뭔가요? + +06:12.790 --> 06:18.340 +이 기회란 건 훌륭한 상태예요 사용자 인터페이스가 기억해야 할 정보의 + +06:18.340 --> 06:19.810 +비트란 뜻이죠 + +06:19.960 --> 06:26.080 +이 훌륭한 상태는 초기 기회라는 것을 갖게 됩니다 여기서 하드 코딩하는 + +06:26.080 --> 06:34.630 +뭔가가 되겠죠 거래를 할 기회가 될 겁니다 예를 들어 설명 가격은 100달러죠 + +06:34.630 --> 06:36.970 +URL cnn이 뜨겠죠 코무요 + +06:36.970 --> 06:38.530 +그게 다예요 + +06:38.530 --> 06:41.440 +실행해 어떻게 되는지 보죠 + +06:41.440 --> 06:43.300 +get in 사용자 인터페이스가 있어요 + +06:43.300 --> 06:48.010 +이렇게 멋진 데이터 프레임이 생겼어요 get 테이블 같은 거죠 + +06:48.010 --> 06:54.460 +한 행에 제가 방금 만든 게 있어요 설명, 가격, 추정값, + +06:54.460 --> 06:57.490 +디스카운트 URL + +06:57.520 --> 07:00.670 +그게 전부인 것 같아요 + +07:01.360 --> 07:03.430 +지금까진 좋아요 + +07:03.460 --> 07:05.260 +계속 가보죠 + +07:05.500 --> 07:06.250 +좋아요 + +07:06.280 --> 07:08.260 +얘기가 복잡해지네요 + +07:08.260 --> 07:11.890 +다음 셀에서 사용자 인터페이스를 매번 구축하고 있는 걸 보실 수 있어요 + +07:11.890 --> 07:12.550 +전 그런 게 좋아요 + +07:12.550 --> 07:17.680 +그게 훌륭한 점 중 하나죠 유피터 랩의 훌륭한 점과 그래디오의 멋진 점이 이런 + +07:17.680 --> 07:21.070 +식으로 점진적으로 구축할 수 있다는 거예요 + +07:21.070 --> 07:27.310 +이제 에이전트 프레임워크의 새 인스턴스를 생성하겠습니다 딜 에이전트 프레임워크요 + +07:27.670 --> 07:36.310 +안 그러면 이 영상은 다음에 봐야 할 것 같네요 + +07:36.310 --> 07:41.890 +지금은 이걸 연결하는 것만 신경 쓰고 있어요 + +07:41.890 --> 07:47.260 +스스로 생성하는 걸 봤죠 일부 정보를 봤어요 기회를 선택할 수 있도록요 + +07:47.260 --> 07:51.910 +그 기회에 대해 경고하기 위해 메신저를 호출하죠 + +07:52.120 --> 07:59.320 +여기 아래를 보시면 두 개의 다른 것을 라디오로 연결했어요 표준 패턴이죠 + +07:59.350 --> 08:01.120 +아까랑 똑같네요 + +08:01.120 --> 08:08.200 +gettable이라고 불러도 기회를 지나쳐 프레임에 출력되죠 + +08:08.410 --> 08:16.510 +선택하면.. 만약 데이터 프레임에 SELECT U 함수를 호출하면 + +08:16.510 --> 08:24.130 +선택이 되면 함수를 호출하고 입력이 기회가 되겠죠. + +08:24.130 --> 08:25.840 +결과물이 없어요 + +08:25.840 --> 08:27.880 +무엇을 선택할까요? + +08:27.910 --> 08:30.910 +여러분이 선택한 것의 인덱스를 찾아요 + +08:30.910 --> 08:37.840 +그걸 바로 여기 인수로 갖고 그걸 지정합니다 기회 목록에서 기회를 찾아내는 + +08:37.840 --> 08:38.950 +거죠 + +08:38.950 --> 08:45.880 +에이전트 프레임워크를 기획자 메신저로 호출해 그 기회에 대한 경고를 보내죠 + +08:46.330 --> 08:47.200 +좋아요 + +08:47.350 --> 08:48.700 +합리적인 것 같네요 + +08:48.730 --> 08:49.990 +이걸 실행하죠 + +08:50.020 --> 08:55.300 +이걸 실행하면 제일 먼저 많은 게 보일 거예요 + +08:55.960 --> 08:57.640 +너무 빨랐어요 + +08:57.670 --> 09:00.100 +되돌아가면 인쇄되고 있는 게 보일 거예요 + +09:00.130 --> 09:08.290 +물론 로그 출력은 명령줄로 유피터 랩의 출력으로 가죠 + +09:08.290 --> 09:09.580 +표준형 밖을 볼 수 있죠 + +09:09.580 --> 09:10.780 +그걸 노린 거예요 + +09:10.810 --> 09:15.580 +그 다음 표준 아웃은 색깔을 입힌 로그 메시지를 보여줘요 + +09:15.580 --> 09:17.020 +뭐가 뭘 하는지 알 수 있죠 + +09:17.050 --> 09:18.280 +우리가 바라던 대로예요 + +09:18.880 --> 09:22.270 +여기 오면 CNN이 우리 테이블을 보여주고 있어요 + +09:22.270 --> 09:27.700 +이걸 클릭하면 방금 알림이 나타났어요 get get get get + +09:27.730 --> 09:30.220 +잠깐만요, 다시 눌러볼게요 + +09:30.220 --> 09:33.190 +Get 버튼을 누르면 알림이 울리죠 + +09:33.610 --> 09:37.210 +그렇게 된 거예요 + +09:37.240 --> 09:44.140 +이제 그라디오와 우리 요원 프레임워크를 연결했으니 우리에게 통보할 수 있어요 + +09:44.140 --> 09:47.020 +사용자 인터페이스의 시작이죠 + +09:47.020 --> 09:51.100 +에이전트 프레임워크가 막후에서 작동하는 것의 시작이죠 + +09:51.130 --> 09:59.530 +다음 비디오에선 유피터 랩 환경에서 사용자 인터페이스를 위한 진짜 코드로 옮겨가죠 + +09:59.560 --> 10:00.640 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/60395261/en_US.srt b/week5/community-contributions/subtitles/srts/60395261/en_US.srt new file mode 100755 index 0000000..bfde31f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60395261/en_US.srt @@ -0,0 +1,529 @@ +WEBVTT + +00:00.680 --> 00:05.900 +Let's keep going with our project to equip our LM with a tool. + +00:05.900 --> 00:12.200 +We just created this piece of code to describe our function, which I'll now execute. + +00:12.200 --> 00:19.760 +And then the next step is we just create something called tools, which now has a single it's a it's + +00:19.790 --> 00:20.930 +a list of tools. + +00:20.960 --> 00:22.340 +And in our case we only have one. + +00:22.340 --> 00:24.560 +It's one element type is function. + +00:24.560 --> 00:28.460 +And the function is this thing right here. + +00:28.460 --> 00:31.880 +So that is now in a list that's called tools. + +00:32.840 --> 00:38.630 +And now the moment it's time to give an LM in our case GPT. + +00:38.660 --> 00:41.570 +For many the power to use this tool. + +00:41.570 --> 00:43.190 +How is that going to work. + +00:43.190 --> 00:48.710 +So it's a bit fiddly I gotta warn you, be be mentally prepared for something a bit fiddly. + +00:48.710 --> 00:51.980 +It starts with something that will look very familiar to you. + +00:51.980 --> 00:54.680 +This is our usual chat function. + +00:54.680 --> 00:55.760 +You know it well. + +00:55.760 --> 01:03.290 +It has a message in history that you know and then it uses that to craft the messages that go to OpenAI. + +01:03.320 --> 01:05.930 +Now there is a small, subtle difference here. + +01:05.930 --> 01:06.950 +Something has changed. + +01:06.950 --> 01:08.360 +Something has been added. + +01:08.570 --> 01:10.880 +I'm going to wait a moment for you to spot it. + +01:10.910 --> 01:15.860 +Once you've spotted it, it's super obvious, but there is a little addition that's been slipped in + +01:15.890 --> 01:16.220 +there. + +01:16.220 --> 01:19.070 +And if you saw it, very nice. + +01:19.070 --> 01:20.720 +If you didn't, it's very obvious. + +01:20.720 --> 01:21.500 +Here it is. + +01:22.370 --> 01:23.870 +We pass in the tools. + +01:23.870 --> 01:27.350 +The API is exactly the same chat dot completions, dot create. + +01:27.380 --> 01:31.520 +We give it a model, we give it the messages and then we also give it the tools. + +01:31.520 --> 01:34.970 +In other words, we give it this object right here. + +01:35.270 --> 01:44.420 +Uh, and when ChatGPT sorry when chat when OpenAI is taking that and building the prompt, the tokens + +01:44.420 --> 01:52.250 +that get sent to the to the LLM, to the GPT four LLM, it's going to take this and convert it into + +01:52.250 --> 01:55.700 +a series of tokens that are going to describe our function. + +01:55.700 --> 02:02.810 +It's going to use this English to say what it does, and it's going to inform the LLM that this is a + +02:02.810 --> 02:05.180 +function that it can call. + +02:05.210 --> 02:10.910 +Uh, and the reason that works is because it's been trained with lots of examples that use tokens in + +02:10.940 --> 02:14.120 +that way to give the LLM that ability. + +02:14.690 --> 02:21.080 +And so the some of the magic is about to be lost when you see what's going to happen next. + +02:22.430 --> 02:28.430 +What comes back from the LLM you remember is in response choices zero. + +02:28.430 --> 02:35.450 +And what we do is we find out whether or not the finish reason is this thing tool calls. + +02:35.450 --> 02:41.300 +That happens when GPT four is telling us, I don't have an answer for you yet. + +02:41.300 --> 02:48.410 +Instead, I'm going to stop because I want you to call one of your tools and provide me with its output. + +02:48.470 --> 02:56.240 +So if that happens, what we then need to do is collect the message from GPT four. + +02:56.450 --> 03:00.800 +So this collects that response choice zero dot message. + +03:00.860 --> 03:07.970 +It contains the whatever it's sent back, which will in fact be a request for us to run a tool. + +03:08.180 --> 03:13.390 +And then there's a bit of work to do to unpack that message. + +03:13.450 --> 03:15.820 +Figure out what it wants to do and do it. + +03:15.820 --> 03:17.710 +And I've put all of that in a separate function. + +03:17.710 --> 03:18.370 +I've cheated. + +03:18.370 --> 03:21.400 +I could put it all in here, but it becomes quite messy. + +03:21.610 --> 03:23.350 +So instead I've separated that out. + +03:23.350 --> 03:25.810 +So at this point we'll see it in a second. + +03:25.810 --> 03:32.440 +Believe me that this function will unpack the message back from GPT four zero and if necessary, well, + +03:32.440 --> 03:33.250 +it will be necessary. + +03:33.250 --> 03:37.060 +It will call our tool and it will return the result of that tool. + +03:37.060 --> 03:44.530 +The response to go back to GPT four zero and the city that was called, uh, what we then do is we have + +03:44.530 --> 03:49.180 +to add two more rows to our list of messages. + +03:49.210 --> 03:53.410 +Our messages, you remember, have like user assistant, user assistant. + +03:53.410 --> 03:56.320 +We're now going to add two new rows. + +03:56.320 --> 04:04.840 +One of those rows is this thing message, which is nothing more than what we got back from GPT four + +04:04.870 --> 04:07.600 +zero asking us to call a tool. + +04:07.600 --> 04:12.400 +So the first thing we add in is the assistant asking us to run a tool. + +04:12.400 --> 04:19.840 +We put that in the list of messages, and then after that, what we put next in the list of messages + +04:19.870 --> 04:23.740 +is our result of calling the function. + +04:23.950 --> 04:32.620 +So what's now in this list of messages is user assistant, user assistant, user assistant says run + +04:32.620 --> 04:33.340 +tool. + +04:33.340 --> 04:35.710 +We say this is the tool result. + +04:35.980 --> 04:40.420 +And then that is what we're now sending back to OpenAI. + +04:40.450 --> 04:41.920 +I hope that made sense. + +04:41.950 --> 04:46.990 +If not, you simply put some print statements in here when you run it and you'll see it's exactly what + +04:46.990 --> 04:47.470 +I said. + +04:47.470 --> 04:51.400 +And once you print the whole messages thing, I think it will be crystal clear to you. + +04:51.430 --> 04:56.170 +You'll see that full exchange appearing in those messages. + +04:57.160 --> 05:04.060 +And then at the end here, as usual, we just return the ultimate answer from the LM. + +05:04.270 --> 05:09.670 +Uh, what I should point out probably is that I've decided I don't pass in the tools a second time, + +05:09.670 --> 05:14.500 +because we wouldn't expect it to run our tool twice, but there'd be no harm in putting it in there. + +05:14.500 --> 05:20.370 +But it obviously wouldn't use it, So the final missing ingredient, of course, is that I now have + +05:20.370 --> 05:25.950 +to write this function here, which is again a little bit more involved than than one might like it, + +05:25.950 --> 05:28.740 +but you can just use it verbatim in your own projects. + +05:28.770 --> 05:30.780 +It's just some stuff to know. + +05:30.930 --> 05:36.510 +Um, so we've got this, this thing, this message that's come back from GPT four. + +05:36.930 --> 05:43.740 +And what we have to do is unpack it to find out which tool was it wanting to call. + +05:43.770 --> 05:48.330 +Now, in our case, we know what tool it wants to call because we only have one tool and it's the tool + +05:48.360 --> 05:49.380 +to get prices. + +05:49.410 --> 05:53.370 +Um, but I'm keeping this in here anyway so that you can see how it works. + +05:53.430 --> 05:59.850 +Um, and what we should really do here is say if tool call equals, uh, get ticket price, then. + +05:59.880 --> 06:05.700 +So there should really be a sort of like, like if or some sort of, uh, a series of, uh, like a, + +06:05.730 --> 06:10.290 +like a, like a switch of all the possible, uh, things that it could ask to, to call. + +06:10.290 --> 06:12.330 +But in our case, we know there's only one tool available. + +06:12.330 --> 06:13.560 +So that's what it is. + +06:13.830 --> 06:18.570 +What we can then do is load the arguments that it wants to call. + +06:18.570 --> 06:23.520 +So in this this tool call there is a tool called dot function. + +06:23.550 --> 06:28.710 +Dot arguments that tells us what parameters it's chosen. + +06:28.920 --> 06:31.770 +Um, and that comes back in the form of JSON. + +06:31.770 --> 06:32.880 +Uh, just a string. + +06:32.880 --> 06:38.670 +So we have to use the Json.loads Loadstring function to convert that into a dictionary. + +06:38.670 --> 06:44.880 +And then we look up the only actual argument that we have, which is destination city. + +06:44.910 --> 06:48.420 +We look that up and put that in a variable city. + +06:48.420 --> 06:52.020 +So we have now unpacked the tool and the argument. + +06:52.020 --> 06:56.190 +And what remains to be done is this line here. + +06:56.190 --> 06:59.670 +And that line obviously is nothing very clever. + +06:59.670 --> 07:04.890 +That line is quite simply calling our get ticket price function. + +07:04.890 --> 07:10.890 +We've established that's the tool it wants to run, and we've plucked out the city and we now call that + +07:10.890 --> 07:11.790 +function. + +07:12.000 --> 07:14.340 +Um, we then build a response. + +07:14.340 --> 07:15.990 +And here is the response. + +07:15.990 --> 07:20.910 +This is the thing that's going to get shoved at the bottom of the the messages. + +07:20.910 --> 07:21.420 +There it goes. + +07:21.420 --> 07:24.920 +It's going to get shoved in there and what their response looked like is this. + +07:24.950 --> 07:29.060 +Now, you know, each of these rows has a role and content. + +07:29.390 --> 07:35.600 +And in the past we've seen that role can be user or it can be system user or assistant system user assistant, + +07:35.600 --> 07:36.470 +user assistant. + +07:36.470 --> 07:38.090 +Well, there's something else it can be too. + +07:38.120 --> 07:39.590 +It can be this word tool. + +07:39.590 --> 07:43.100 +So in this case we put into the response tool. + +07:43.100 --> 07:47.630 +And for the content we put in a string which is this. + +07:47.660 --> 07:52.280 +This dictionary turned into a string using the JSON dump string function. + +07:52.280 --> 07:55.220 +So we put in the destination city and the price. + +07:55.220 --> 08:04.670 +We do also need to add this tool call ID into the the message, which is a way that it links this response + +08:04.670 --> 08:08.180 +to the request that came right before it. + +08:08.180 --> 08:17.270 +So we're putting message.tool.id into this guy so that when it sees these two, it fully understands + +08:17.270 --> 08:20.990 +that that is associated with this request. + +08:20.990 --> 08:23.990 +And that's all there is to it. + +08:24.260 --> 08:27.480 +Although this time This time it was a fair amount. + +08:27.510 --> 08:29.370 +There's quite a lot to take on board there. + +08:29.580 --> 08:34.710 +But as I say, step through it, put some print statements, see that working. + +08:34.710 --> 08:38.070 +But let's see now what happens if we bring this up in a chat. + +08:39.720 --> 08:40.800 +Hi there. + +08:44.550 --> 08:45.270 +Hello. + +08:45.270 --> 08:46.500 +How can I assist you today? + +08:46.530 --> 08:49.920 +I'd like to go to London. + +08:51.600 --> 08:52.170 +Sure. + +08:52.170 --> 08:53.460 +Would you like to know the price? + +08:53.490 --> 08:54.390 +Yes. + +08:55.770 --> 08:59.520 +The ticket price for a return trip to London is $7.99. + +08:59.520 --> 09:00.900 +So it got the right price. + +09:00.900 --> 09:05.850 +So what we're hoping is if we turn back to our Jupyter lab, what we should see is that it's printed + +09:05.850 --> 09:08.490 +that our tool was called, uh. + +09:08.490 --> 09:11.370 +And I think we can be pretty confident that it will, but let's have a look. + +09:11.400 --> 09:15.210 +It is indeed tool get ticket price called for London. + +09:15.240 --> 09:16.740 +Let's keep going. + +09:17.220 --> 09:22.620 +Uh, and how about, uh, Paris? + +09:25.320 --> 09:27.870 +899 and Tokyo. + +09:30.710 --> 09:34.130 +The ticket price for a return trip to Tokyo is 1400. + +09:34.760 --> 09:35.930 +And Berlin. + +09:37.280 --> 09:38.660 +Berlin was the one we just added in. + +09:38.690 --> 09:39.530 +Let's see if it's got that. + +09:39.560 --> 09:41.480 +Yes, 499. + +09:41.510 --> 09:46.850 +It certainly seems to work and timbuk2. + +09:48.710 --> 09:53.420 +I'm sorry, but I don't have information on ticket prices to timbuk2. + +09:53.810 --> 09:55.520 +Uh, let's have a look here. + +09:55.520 --> 09:58.940 +And you can see the series of tools called. + +09:58.940 --> 10:02.720 +And we know that when it was called for Timbuk2, it would replied unknown. + +10:02.720 --> 10:07.910 +And as a result, because we gave it the system prompt to say when it doesn't know, it was quite clear + +10:07.910 --> 10:08.960 +that it didn't know. + +10:09.680 --> 10:12.530 +And with that, there was a lot going on. + +10:12.530 --> 10:13.910 +I hope you got a sense. + +10:13.910 --> 10:18.560 +This is in fact, a powerful, uh, piece of functionality. + +10:18.560 --> 10:27.650 +It's a powerful technique to allow you to give more powers to your LM, but but under the covers, it's + +10:27.650 --> 10:28.580 +not magical. + +10:28.580 --> 10:34.990 +It's really a bunch of if statements and some complicated messages to and fro, so that we can allow + +10:34.990 --> 10:38.770 +the LLM to inform us that it needs more information about something. + +10:38.770 --> 10:40.990 +And that's how it works under the covers. + +10:40.990 --> 10:42.850 +And I hope that makes sense to you. + +10:42.850 --> 10:45.610 +And I hope you're able to use this in your own projects. + +10:45.610 --> 10:47.350 +As ways to extend this. + +10:47.350 --> 10:54.340 +You can look at adding more kinds of tools and tools that might say something about the availability + +10:54.340 --> 10:56.710 +of that flight or something like that. + +10:56.710 --> 10:58.510 +So you can use more tools. + +10:58.510 --> 11:03.640 +Or if you want to be really bold, you can add a tool to actually book the flight. + +11:03.640 --> 11:07.120 +It's a tool that, when it's called again, could just print something or could write something to a + +11:07.120 --> 11:08.170 +file or whatever. + +11:08.170 --> 11:14.890 +And that would allow the LLM to call back into your second tool and actually book a flight when the + +11:14.890 --> 11:16.390 +user asks for it. + +11:16.390 --> 11:21.100 +So that would be a fun one, and you'd have a bunch of arguments about the dates and the like. + +11:21.100 --> 11:28.810 +So give that a shot, and by the end of that, you will be very proficient in how to write tools and + +11:28.810 --> 11:35.560 +how to equip your LLM to carry out actions that run in your software. diff --git a/week5/community-contributions/subtitles/srts/60395261/ja_JP.srt b/week5/community-contributions/subtitles/srts/60395261/ja_JP.srt new file mode 100755 index 0000000..5014e4c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60395261/ja_JP.srt @@ -0,0 +1,457 @@ +WEBVTT + +00:00.680 --> 00:05.900 +LMにツールを装備するプロジェクトを続けよう。 + +00:05.900 --> 00:12.200 +関数を記述するために、 このコードを作成しました。 + +00:12.200 --> 00:20.930 +そして次のステップは、 toolsと呼ばれるものを作成する。 + +00:20.960 --> 00:22.340 +そして私たちの場合は1つしかない。 + +00:22.340 --> 00:24.560 +それはファンクションである。 + +00:24.560 --> 00:28.460 +そして、 その機能がここにある。 + +00:28.460 --> 00:31.880 +ツールというリストに入っている。 + +00:32.840 --> 00:38.630 +そして今、 このGPTでLMを与える時が来た。 + +00:38.660 --> 00:41.570 +多くの人にとって、 このツールを使う力は大きい。 + +00:41.570 --> 00:43.190 +どうなるんだ? + +00:43.190 --> 00:48.710 +だから、 ちょっと手こずるんだ......警告しておくけど、 ちょっと手こずることを精神的に覚悟しておいてほしい。 + +00:48.710 --> 00:51.980 +それは、 あなたにとって非常に見慣れたものから始まる。 + +00:51.980 --> 00:54.680 +これはいつものチャット機能です。 + +00:54.680 --> 00:55.760 +よくご存知でしょう。 + +00:55.760 --> 01:03.290 +あなたが知っているメッセージを履歴に残し、 それを使ってOpenAIに送るメッセージを作成する。 + +01:03.320 --> 01:05.930 +さて、 ここには小さな微妙な違いがある。 + +01:05.930 --> 01:06.950 +何かが変わった。 + +01:06.950 --> 01:08.360 +何かが追加された。 + +01:08.570 --> 01:10.880 +あなたがそれを見つけるまで、 少し待ちます。 + +01:10.910 --> 01:16.220 +一度目につければ一目瞭然だが、 そこにはちょっとした補足がある。 + +01:16.220 --> 01:19.070 +そして、 もし見たなら、 とても素晴らしかった。 + +01:19.070 --> 01:20.720 +そうでないなら、 それは非常に明白だ。 + +01:20.720 --> 01:21.500 +これだ。 + +01:22.370 --> 01:23.870 +道具の中でパスする。 + +01:23.870 --> 01:27.350 +APIはまったく同じチャット・ドット補完、 ドット作成だ。 + +01:27.380 --> 01:31.520 +モデルを与え、 メッセージを与え、 そしてツールも与える。 + +01:31.520 --> 01:34.970 +つまり、 このオブジェクトをここに置くのだ。 + +01:35.270 --> 01:44.420 +ChatGPTは、 OpenAIがそれを受け取り、 プロンプトを構築するとき、 LLMに送信されるトークン、 + +01:44.420 --> 01:55.700 +GPT 4 LLMに送信されるトークンを受け取り、 私たちの機能を説明する一連のトークンに変換します。 + +01:55.700 --> 02:05.180 +LLMは、 この英語を使って何をするのかを説明し、 LLMが呼び出すことのできる関数であることを知らせる。 + +02:05.210 --> 02:14.120 +それが機能するのは、 LLMにその能力を与えるために、 トークンをそのように使う例をたくさん使って訓練されているからだ。 + +02:14.690 --> 02:21.080 +そして、 次に何が起こるかを見たとき、 マジックの一部は失われようとしている。 + +02:22.430 --> 02:28.430 +あなたが覚えているLLMから戻ってくるものは、 応答選択肢ゼロである。 + +02:28.430 --> 02:35.450 +そして、 私たちがすることは、 最終的な理由がこのツールの呼び出しなのかどうかを見極めることだ。 + +02:35.450 --> 02:41.300 +GPT4位が「まだ答えは出ていない。 + +02:41.300 --> 02:48.410 +そのかわり、 あなたのツールのひとつを呼び出して、 その出力を教えてほしいから、 もうやめよう。 + +02:48.470 --> 02:56.240 +そうなれば、 GPT4からメッセージを収集する必要がある。 + +02:56.450 --> 03:00.800 +つまり、 これはそのレスポンス・チョイスのゼロ・ドット・メッセージを集めたものだ。 + +03:00.860 --> 03:07.970 +送り返されたものが何であれ、 それは実際、 ツールの実行を要求するものだ。 + +03:08.180 --> 03:13.390 +そして、 そのメッセージを解きほぐすための作業が少しある。 + +03:13.450 --> 03:15.820 +それが何をしたいのかを見極め、 それを実行する。 + +03:15.820 --> 03:17.710 +そして、 私はそのすべてを別の関数に入れた。 + +03:17.710 --> 03:18.370 +私はズルをしてきた。 + +03:18.370 --> 03:21.400 +ここに全部書いてもいいんだけど、 ごちゃごちゃしちゃうからね。 + +03:21.610 --> 03:23.350 +だからその代わりに、 私はその部分を切り離した。 + +03:23.350 --> 03:25.810 +だから、 この時点ではすぐに見ることができる。 + +03:25.810 --> 03:33.250 +この機能は、 GPT4ゼロからメッセージを解凍し、 必要であれば、 まあ、 必要であろう。 + +03:33.250 --> 03:37.060 +このツールは我々のツールを呼び出し、 その結果を返す。 + +03:37.060 --> 03:44.530 +GPT4のゼロに戻るレスポンスと、 呼び出された都市は、 ええと、 次にすることは、 + +03:44.530 --> 03:49.180 +メッセージのリストにさらに2行追加することだ。 + +03:49.210 --> 03:53.410 +私たちのメッセージは、 ユーザー・アシスタント、 ユーザー・アシスタントのようなものだ。 + +03:53.410 --> 03:56.320 +これから新しい行を2つ追加する。 + +03:56.320 --> 04:07.600 +その行のひとつがこのメッセージで、 これはGPT4ゼロから戻ってきた、 ツールを呼ぶようにというメッセージ以外の何ものでもない。 + +04:07.600 --> 04:12.400 +そこでまず、 アシスタントにツールの実行を依頼する。 + +04:12.400 --> 04:19.840 +それをメッセージのリストに入れ、 その次にメッセージのリストに入れるのは、 + +04:19.870 --> 04:23.740 +関数を呼び出した結果だ。 + +04:23.950 --> 04:33.340 +このメッセージのリストには、 ユーザー・アシスタント、 ユーザー・アシスタント、 ユーザー・アシスタントがツールを実行すると書いてある。 + +04:33.340 --> 04:35.710 +私たちは、 これがツールの結果だと言っている。 + +04:35.980 --> 04:40.420 +そして、 それをOpenAIに送り返す。 + +04:40.450 --> 04:41.920 +お分かりいただけただろうか。 + +04:41.950 --> 04:47.470 +そうでない場合は、 実行時にここにprint文を入れるだけで、 私が言ったとおりのことがわかる。 + +04:47.470 --> 04:51.400 +そして、 メッセージをすべて印刷すれば、 はっきりとわかると思う。 + +04:51.430 --> 04:56.170 +そのメッセージには、 そのやりとりの全文が表示される。 + +04:57.160 --> 05:04.060 +そして最後に、 いつものようにLMから究極の答えを返す。 + +05:04.270 --> 05:09.670 +ツールを2回実行することは想定していないので、 2度目は通さないことにしているんだが、 + +05:09.670 --> 05:14.500 +入れておいて損はないだろう。 + +05:14.500 --> 05:20.370 +もちろん、 最終的に足りないのは、 + +05:20.370 --> 05:28.740 +この関数を書かなければならないということだ。 + +05:28.770 --> 05:30.780 +知っておいて損はないよ + +05:30.930 --> 05:36.510 +GPT4から戻ってきたメッセージがこれだ。 + +05:36.930 --> 05:43.740 +そして、 私たちがしなければならないのは、 それを解凍して、 どのツールを呼び出したかったのかを突き止めることだ。 + +05:43.770 --> 05:49.380 +今、 私たちの場合、 どのツールを呼び出したいかはわかっている。 + +05:49.410 --> 05:53.370 +うーん、 でも、 どう動くか見てもらうために、 とりあえずここに置いておくよ。 + +05:53.430 --> 05:59.850 +そして、 ここで本当にすべきことは、 ツール・コールがチケットの値段とイコールであれば、 チケットの値段を知ることができる、 ということだ。 + +06:10.290 --> 06:12.330 +しかし、 我々の場合、 使えるツールは1つしかない。 + +06:12.330 --> 06:13.560 +そういうことだ。 + +06:13.830 --> 06:18.570 +次にできることは、 呼び出したい引数をロードすることだ。 + +06:18.570 --> 06:23.520 +つまり、 このツール・コールの中にドット・ファンクションというツールがある。 + +06:23.550 --> 06:28.710 +どのようなパラメータを選択したかを示すドット引数。 + +06:28.920 --> 06:31.770 +それがJSONの形で戻ってくる。 + +06:31.770 --> 06:32.880 +ええと、 ただのひもです。 + +06:32.880 --> 06:38.670 +だから、 Jsonを使わなければならない。 Loadstring関数をロードして辞書に変換する。 + +06:38.670 --> 06:44.880 +そして、 私たちが実際に持っている唯一の論拠である目的地都市について調べる。 + +06:44.910 --> 06:48.420 +それを調べて、 可変都市に入れる。 + +06:48.420 --> 06:52.020 +というわけで、 道具と議論を解きほぐした。 + +06:52.020 --> 06:56.190 +そして、 残るはこのラインだ。 + +06:56.190 --> 06:59.670 +そして、 そのセリフは明らかに気の利いたものではない。 + +06:59.670 --> 07:04.890 +この行は、 単純にチケット価格の取得関数を呼び出しているだけだ。 + +07:04.890 --> 07:11.790 +我々は、 それが実行したいツールであることを確認し、 都市を抜き出して、 その関数を呼び出した。 + +07:12.000 --> 07:14.340 +そして、 私たちは対応策を構築する。 + +07:14.340 --> 07:15.990 +その返答がこれだ。 + +07:15.990 --> 07:20.910 +これはメッセージの一番下に押し込まれるものだ。 + +07:20.910 --> 07:21.420 +そうだ。 + +07:21.420 --> 07:24.920 +そこに押し込まれることになるが、 彼らの反応はこうだった。 + +07:24.950 --> 07:29.060 +さて、 これらの行にはそれぞれ役割と内容がある。 + +07:29.390 --> 07:36.470 +過去には、 ロールはユーザーであったり、 システム・ユーザーであったり、 システム・ユーザー・アシスタントであったり、 ユーザー・アシスタントであったりした。 + +07:36.470 --> 07:38.090 +まあ、 それ以外にもあるんだけどね。 + +07:38.120 --> 07:39.590 +それはこの言葉のツールである。 + +07:39.590 --> 07:43.100 +だから、 この場合はレスポンスツールに入れた。 + +07:43.100 --> 07:47.630 +そして、 コンテンツには次のような文字列を入れた。 + +07:47.660 --> 07:52.280 +この辞書は、 JSONダンプ文字列関数を使用して文字列に変換される。 + +07:52.280 --> 07:55.220 +そこで、 目的地の都市と料金を入れた。 + +07:55.220 --> 08:08.180 +また、 このツールコールIDをメッセージに追加する必要がある。 これは、 このレスポンスをその直前に来たリクエストにリンクする方法である。 + +08:08.180 --> 08:20.990 +だから、 メッセージを入れているんだ。 ツールを使用する。 この2つを見たときに、 それがこのリクエストに関連していることを完全に理解できるようにするためだ。 + +08:20.990 --> 08:23.990 +それがすべてだ。 + +08:24.260 --> 08:27.480 +今回はかなりの量だったが。 + +08:27.510 --> 08:29.370 +そこには受け止めるべきことがたくさんある。 + +08:29.580 --> 08:34.710 +しかし、 私が言うように、 それを通してステップを踏み、 いくつかのprintステートメントを置き、 それが機能していることを確認する。 + +08:34.710 --> 08:38.070 +しかし、 チャットでこの話を持ち出したらどうなるか、 今から見てみよう。 + +08:39.720 --> 08:40.800 +こんにちは。 + +08:44.550 --> 08:45.270 +こんにちは。 + +08:45.270 --> 08:46.500 +本日はどのようなご用件でしょうか? + +08:46.530 --> 08:49.920 +ロンドンに行きたい。 + +08:51.600 --> 08:52.170 +もちろんだ。 + +08:52.170 --> 08:53.460 +値段を知りたいですか? + +08:53.490 --> 08:54.390 +そうだ。 + +08:55.770 --> 08:59.520 +ロンドンまでの往復チケットは7ドル。 99. + +08:59.520 --> 09:00.900 +だから適正価格だった。 + +09:00.900 --> 09:08.490 +つまり、 Jupyterラボに戻ると、 私たちのツールの名前が印刷されているはずです。 + +09:08.490 --> 09:11.370 +その自信はかなりあると思うが、 ちょっと見てみよう。 + +09:11.400 --> 09:15.210 +さすがにロンドン行きのチケット代はツール・ゲットと呼ばれるだけある。 + +09:15.240 --> 09:16.740 +続けよう。 + +09:17.220 --> 09:22.620 +パリはどう? + +09:25.320 --> 09:27.870 +899と東京。 + +09:30.710 --> 09:34.130 +東京までの往復航空券は1400ドル。 + +09:34.760 --> 09:35.930 +そしてベルリン。 + +09:37.280 --> 09:38.660 +ベルリンは今、 追加したところだ。 + +09:38.690 --> 09:39.530 +それがあるかどうか見てみよう。 + +09:39.560 --> 09:41.480 +はい、 499です。 + +09:41.510 --> 09:46.850 +確かに効きそうだし、 ティンブク2も。 + +09:48.710 --> 09:53.420 +申し訳ないが、 timbuk2へのチケット料金に関する情報は持っていない。 + +09:53.810 --> 09:55.520 +ええと、 ここを見てみよう。 + +09:55.520 --> 09:58.940 +と呼ばれる一連のツールを見ることができる。 + +09:58.940 --> 10:02.720 +そして、 Timbuk2のために呼ばれたとき、 それが不明と答えたであろうことも知っている。 + +10:02.720 --> 10:08.960 +その結果、 わからないときはわからないと言うようにシステムプロンプトを与えたので、 わからないということがはっきりした。 + +10:09.680 --> 10:12.530 +それとともに、 いろいろなことがあった。 + +10:12.530 --> 10:13.910 +お分かりいただけただろうか。 + +10:13.910 --> 10:18.560 +これは実際、 強力な機能の一部だ。 + +10:18.560 --> 10:28.580 +LMにさらなる力を与えるための強力なテクニックだが、 しかし、 その裏では魔法ではない。 + +10:28.580 --> 10:38.770 +LLMが何かについてもっと情報が必要だと知らせてくれるようにするためだ。 + +10:38.770 --> 10:40.990 +それが、 布団の中での仕事だ。 + +10:40.990 --> 10:42.850 +そして、 それがあなたにとって意味のあるものであることを願っている。 + +10:42.850 --> 10:45.610 +そして、 これをあなた自身のプロジェクトに役立ててほしい。 + +10:45.610 --> 10:47.350 +これを拡張する方法として。 + +10:47.350 --> 10:56.710 +より多くの種類のツールや、 そのフライトの空席状況などを示すようなツールを追加することも検討できる。 + +10:56.710 --> 10:58.510 +だから、 より多くの道具を使うことができる。 + +10:58.510 --> 11:03.640 +あるいは、 本当に大胆にやりたいなら、 実際にフライトを予約するツールを追加することもできる。 + +11:03.640 --> 11:08.170 +これは、 再び呼び出されたときに、 何かを印刷したり、 ファイルに何かを書き込んだりするツールだ。 + +11:08.170 --> 11:16.390 +そうすれば、 LLMは2つ目のツールにコールバックし、 ユーザーがフライトの予約を要求したときに、 実際にフライトを予約することができる。 + +11:16.390 --> 11:21.100 +だから、 それはそれで面白いし、 日付とかについて何度も議論することになる。 + +11:21.100 --> 11:28.810 +そして、 それが終わるころには、 ツールの書き方や、 ソフトウェアで実行されるアクションを実行するためにLLMを装備する方法について、 + +11:28.810 --> 11:35.560 +非常に習熟していることだろう。 diff --git a/week5/community-contributions/subtitles/srts/60395261/ko_KR.srt b/week5/community-contributions/subtitles/srts/60395261/ko_KR.srt new file mode 100755 index 0000000..d8e17fd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60395261/ko_KR.srt @@ -0,0 +1,514 @@ +WEBVTT + +00:00.680 --> 00:05.900 +LM에 도구를 장착하기 위한 프로젝트를 계속하죠 + +00:05.900 --> 00:12.200 +우리 함수를 설명하기 위해 이 코드를 만들었죠 이제 실행할게요 + +00:12.200 --> 00:20.930 +다음 단계는 도구라는 걸 만드는 겁니다 도구 목록이죠 + +00:20.960 --> 00:22.340 +우리는 한 마리밖에 없어요 + +00:22.340 --> 00:24.560 +한 가지 요소 유형은 함수예요 + +00:24.560 --> 00:28.460 +함수는 바로 이거예요 + +00:28.460 --> 00:31.880 +그게 도구 목록에 있어요 + +00:32.840 --> 00:38.630 +이제 LM을 줄 차례입니다 GPT를요 + +00:38.660 --> 00:41.570 +많은 사람이 이 도구를 사용하죠 + +00:41.570 --> 00:43.190 +어떻게 그럴 수 있죠? + +00:43.190 --> 00:48.710 +비트 박스를 만드는 건 좀 성가신 일이에요 미리 경고하는데 성가신 일에 대비하세요 + +00:48.710 --> 00:51.980 +아주 친숙한 것부터 시작하죠 + +00:51.980 --> 00:54.680 +이건 일반적인 채팅 함수예요 + +00:54.680 --> 00:55.760 +잘 아시네요 + +00:55.760 --> 01:03.290 +여러분이 아는 역사적 메시지를 오픈AI로 보내는 데 사용하죠 + +01:03.320 --> 01:05.930 +미묘한 차이가 있어요 + +01:05.930 --> 01:06.950 +뭔가 변했어요 + +01:06.950 --> 01:08.360 +뭔가 추가됐어요 + +01:08.570 --> 01:10.880 +당신이 알아차릴 때까지 잠시 기다릴게요 + +01:10.910 --> 01:16.220 +일단 눈에 띄면 아주 명확하지만 추가로 넣은 부분이 있어요 + +01:16.220 --> 01:19.070 +직접 보셨다면 아주 좋았겠죠 + +01:19.070 --> 01:20.720 +안 했으면 뻔하죠 + +01:20.720 --> 01:21.500 +여기 있네요 + +01:22.370 --> 01:23.870 +도구를 전달하죠 + +01:23.870 --> 01:27.350 +API는 채팅방 .완성 .Create와 정확히 동일해요 + +01:27.380 --> 01:31.520 +모델과 메시지를 주고 도구도 주죠 + +01:31.520 --> 01:34.970 +즉, 이 객체를 여기에 주는 거죠 + +01:35.270 --> 01:44.420 +챗GPT, 아니 오픈AI가 프롬프트를 구축할 때 전송되는 토큰은 GPT + +01:44.420 --> 01:52.250 +4개 LLM으로 전송됩니다 이 토큰을 토큰 시리즈로 변환해서 + +01:52.250 --> 01:55.700 +함수를 설명하죠 + +01:55.700 --> 02:02.810 +이 영어를 사용해 뭘 하는지 말할 거예요 LLM에게 이게 호출 가능한 + +02:02.810 --> 02:05.180 +함수라는 걸 알려주죠 + +02:05.210 --> 02:10.910 +그 이유는 많은 예제들과 함께 훈련되었기 때문입니다 토큰을 이용해서 + +02:10.940 --> 02:14.120 +LLM에게 그 기능을 주기 위해서요 + +02:14.690 --> 02:21.080 +그래서 다음에 일어날 일을 보면 마법 같은 일이 사라지게 되죠 + +02:22.430 --> 02:28.430 +기억하시는 LLM에서 돌아온 건 응답 선택에서 0이죠 + +02:28.430 --> 02:35.450 +그 최종 이유가 이 도구 호출인지 아닌지 알아내는 거죠 + +02:35.450 --> 02:41.300 +GPT 4가 알려 줄 때 발생하는 현상이죠 아직 답은 못 드렸어요 + +02:41.300 --> 02:48.410 +대신에, 멈추겠습니다 여러분이 가진 도구 중 하나를 호출해 결과물을 제공해 주세요 + +02:48.470 --> 02:56.240 +그런 일이 발생하면 GPT 4로부터 메시지를 수집해야 하죠 + +02:56.450 --> 03:00.800 +응답 선택 .Message가 수집되죠 + +03:00.860 --> 03:07.970 +보내진 게 뭐든 포함하고 있어요 도구를 실행해 달라는 요청이 되겠죠 + +03:08.180 --> 03:13.390 +그 비트를 다시 풀려면 할 일이 좀 있어요 + +03:13.450 --> 03:15.820 +뭘 원하는지 알아내서 행동으로 옮기죠 + +03:15.820 --> 03:17.710 +모든 걸 별도의 함수에 넣었어요 Put + +03:17.710 --> 03:18.370 +속임수를 썼어요 + +03:18.370 --> 03:21.400 +여기에다 다 넣으면은 좀 많이 지저분해지거든요, Put it up + +03:21.610 --> 03:23.350 +그래서 그걸 분리했어요 + +03:23.350 --> 03:25.810 +이제 곧 보게 될 거예요 + +03:25.810 --> 03:32.440 +이 함수는 GPT 40으로부터 메시지를 풀어낼 겁니다 필요하다면 필요할 + +03:32.440 --> 03:33.250 +거고요 + +03:33.250 --> 03:37.060 +도구를 호출하고 그 도구의 결과를 반환하죠 + +03:37.060 --> 03:44.530 +GPT 40으로 돌아가거나 도시로 가면 메시지 + +03:44.530 --> 03:49.180 +목록에 두 행을 추가해야 해요 + +03:49.210 --> 03:53.410 +우리 메시지에 사용자 비서가 있는 걸 기억하세요 + +03:53.410 --> 03:56.320 +이제 두 개의 새로운 행을 추가할 거예요 + +03:56.320 --> 04:04.840 +그 중 하나는 메시지입니다 도구를 호출하라는 GPT 40에서 받은 + +04:04.870 --> 04:07.600 +것 그 이상은 아니죠 + +04:07.600 --> 04:12.400 +가장 먼저 추가할 것은 도구를 실행하라는 보조예요 + +04:12.400 --> 04:19.840 +그걸 메시지 목록에 넣고 그 다음에 입력할 것은 함수를 + +04:19.870 --> 04:23.740 +호출한 결과예요. + +04:23.950 --> 04:32.620 +이 메시지 목록에 있는 건 사용자 비서가 실행 도구를 명령한다는 + +04:32.620 --> 04:33.340 +거죠 + +04:33.340 --> 04:35.710 +이게 도구 결과예요 + +04:35.980 --> 04:40.420 +이제 오픈AI로 다시 보낼 거예요 + +04:40.450 --> 04:41.920 +이해하셨길 바라요 + +04:41.950 --> 04:47.470 +아니라면, print문을 여기에 넣으면 됩니다 실행할 때 제가 말한 그대로죠 + +04:47.470 --> 04:51.400 +메시지를 인쇄해 보면 더 명확해질 거예요 + +04:51.430 --> 04:56.170 +그 모든 게 메시지에 나타나죠 + +04:57.160 --> 05:04.060 +그리고 마지막에 늘 그렇듯 달 착륙선에서 최종 답변이 오죠 + +05:04.270 --> 05:09.670 +이 말씀을 드려야 할 것 같은데 저는 도구를 두 번 전달하지 않기로 결정했습니다 왜냐하면 우리 + +05:09.670 --> 05:14.500 +도구를 두 번 실행할 수는 없으니까요 하지만 저기에 넣는다고 해가 될 것은 없죠 + +05:14.500 --> 05:20.370 +하지만 사용하진 않을 겁니다 마지막 비트는 여기에 함수를 써야 한다는 + +05:20.370 --> 05:25.950 +거죠 이건 다른 사람보다 좀 더 복잡하지만 여러분 프로젝트에서 + +05:25.950 --> 05:28.740 +그대로 사용할 수 있어요 + +05:28.770 --> 05:30.780 +그냥 알아두면 좋을 것 같아서요 + +05:30.930 --> 05:36.510 +GPT 4에서 메시지가 왔어요 네 + +05:36.930 --> 05:43.740 +이제 패키지를 풀어서 어떤 툴을 호출하려는지 알아보죠 + +05:43.770 --> 05:48.330 +우리는 어떤 도구를 호출하는지 압니다. 왜냐하면 우리는 하나의 도구만 가지고 있고 가격을 get get + +05:48.360 --> 05:49.380 +할 수 있기 때문이죠. + +05:49.410 --> 05:53.370 +어떻게 작동하는지 보여드리려고 여기 넣어뒀어요 + +05:53.430 --> 05:59.850 +여기서 해야 할 건 도구 호출이 get 티켓 가격과 같으면 이렇게 하는 거죠 + +05:59.880 --> 06:05.700 +그러니까 만약이나 일종의 일련의 스위치가 있어야 해요 + +06:05.730 --> 06:10.290 +가능한 모든 호출을 요청할 수 있는 스위치요 + +06:10.290 --> 06:12.330 +하지만 이 경우엔 도구가 하나뿐이죠 + +06:12.330 --> 06:13.560 +그런 거였군요 + +06:13.830 --> 06:18.570 +그럼 호출하고자 하는 인수를 로드할 수 있어요 + +06:18.570 --> 06:23.520 +이 도구 호출에는 . 함수라는 도구가 있어요 + +06:23.550 --> 06:28.710 +.인수는 어떤 매개 변수를 선택했는지 말해주죠 + +06:28.920 --> 06:31.770 +JSON 형태로 다시 나타나요 + +06:31.770 --> 06:32.880 +문자열 하나면 돼요 + +06:32.880 --> 06:38.670 +Json을 사용해야 해요 사전을 변환하기 위해 Loadstring 함수를 로드해요 + +06:38.670 --> 06:44.880 +그리고 우리의 유일한 논점을 찾아냅니다 바로 목적지 도시예요 + +06:44.910 --> 06:48.420 +그걸 찾아 변수 도시에 Put it 했어요 + +06:48.420 --> 06:52.020 +이제 도구와 인수를 풀었는데요 + +06:52.020 --> 06:56.190 +이제 남은 건 이 선이에요 + +06:56.190 --> 06:59.670 +저 대사는 별로 기발하지 않아요 + +06:59.670 --> 07:04.890 +이 라인은 get 티켓 가격 함수를 호출하고 있어요 + +07:04.890 --> 07:11.790 +실행할 수 있는 도구로 설정했습니다 도시를 뽑고 함수를 호출하죠 + +07:12.000 --> 07:14.340 +그리고 대응을 구축하죠 + +07:14.340 --> 07:15.990 +이게 그 대답이에요 + +07:15.990 --> 07:20.910 +Get it은 메시지 맨 밑에 놓이게 돼요 + +07:20.910 --> 07:21.420 +됐어요 + +07:21.420 --> 07:24.920 +Get이 그 안에 박히면 이런 반응을 보이죠 + +07:24.950 --> 07:29.060 +각 행은 역할과 콘텐츠를 가지고 있어요 + +07:29.390 --> 07:35.600 +과거에는 그 역할이 사용자일 수도 있고 시스템 사용자일 수도 있고 비서가 될 수도 있었죠 시스템 사용자 비서요 + +07:35.600 --> 07:36.470 +사용자 비서요 + +07:36.470 --> 07:38.090 +다른 방법도 있어요 + +07:38.120 --> 07:39.590 +이 툴이 되겠죠 + +07:39.590 --> 07:43.100 +이 경우엔 응답 도구에 입력했어요 + +07:43.100 --> 07:47.630 +문자열에 입력한 콘텐츠는 이거예요 Put + +07:47.660 --> 07:52.280 +이 사전은 JSON 덤프 문자열 함수를 이용해 문자열로 변했어요 + +07:52.280 --> 07:55.220 +그래서 목적지 도시와 가격을 입력했어요. TUT D. + +07:55.220 --> 08:04.670 +이 도구 호출 ID도 메시지에 추가해야 합니다 이 응답을 바로 전에 온 요청에 + +08:04.670 --> 08:08.180 +링크하는 방법이죠 + +08:08.180 --> 08:17.270 +메시지를 넣는군요 도구요 id를 입력해 이 둘을 볼 때 이 요청과 + +08:17.270 --> 08:20.990 +관련 있다는 걸 완전히 이해하게 되죠 + +08:20.990 --> 08:23.990 +그게 다인 거죠 + +08:24.260 --> 08:27.480 +하지만 이번에는 양이 꽤 많았어요 + +08:27.510 --> 08:29.370 +배울 게 많아요 + +08:29.580 --> 08:34.710 +하지만 앞서 말했듯이 print문을 넣으면 작동하죠 + +08:34.710 --> 08:38.070 +채팅방에서 이걸 꺼내면 어떻게 되는지 보죠 + +08:39.720 --> 08:40.800 +안녕하세요 + +08:44.550 --> 08:45.270 +안녕하세요 + +08:45.270 --> 08:46.500 +무엇을 도와드릴까요? + +08:46.530 --> 08:49.920 +런던에 가고 싶어요 + +08:51.600 --> 08:52.170 +네 + +08:52.170 --> 08:53.460 +얼마인지 알고 싶어요? + +08:53.490 --> 08:54.390 +네 + +08:55.770 --> 08:59.520 +런던 왕복 비행기 표는 7달러예요 99살요 + +08:59.520 --> 09:00.900 +가격도 적당했고요 + +09:00.900 --> 09:05.850 +이제 주피터 실험실로 돌아가면 우리가 만든 도구의 이름이 + +09:05.850 --> 09:08.490 +인쇄된 걸 볼 수 있을 거예요 + +09:08.490 --> 09:11.370 +그렇게 될 거라고 확신하지만 한번 보죠 + +09:11.400 --> 09:15.210 +툴 get 푯값으로 런던에 가야 해요 + +09:15.240 --> 09:16.740 +계속하죠 + +09:17.220 --> 09:22.620 +파리는 어때요? + +09:25.320 --> 09:27.870 +899와 도쿄요 + +09:30.710 --> 09:34.130 +도쿄까지 왕복 티켓은 1,400페소예요 + +09:34.760 --> 09:35.930 +베를린도요 + +09:37.280 --> 09:38.660 +베를린은 우리가 추가한 거예요 + +09:38.690 --> 09:39.530 +그게 있는지 보죠 + +09:39.560 --> 09:41.480 +네, 499 + +09:41.510 --> 09:46.850 +팀북2도 잘 작동하는 것 같고요 + +09:48.710 --> 09:53.420 +죄송하지만 팀북2 티켓 가격은 저도 몰라요 + +09:53.810 --> 09:55.520 +어디 보죠 + +09:55.520 --> 09:58.940 +일련의 도구들이 보이죠 + +09:58.940 --> 10:02.720 +팀북2를 호출했을 때 미상이라는 응답이 왔어요 + +10:02.720 --> 10:07.910 +그 결과 시스템 프롬프트에서 모른다는 걸 알려줬기 때문에 모른다는 게 꽤 + +10:07.910 --> 10:08.960 +명확했어요 + +10:09.680 --> 10:12.530 +그래서 많은 일이 벌어졌죠 + +10:12.530 --> 10:13.910 +감이 있으면 좋겠네요 + +10:13.910 --> 10:18.560 +사실 이건 강력한 기능성 조각이에요 + +10:18.560 --> 10:28.580 +달 착륙선에 더 많은 힘을 주는 강력한 기술이지만 그 이면에는 마법이 없어요 + +10:28.580 --> 10:34.990 +정말 많은 if문과 복잡한 메시지 투 프롤로예요 그래서 LLM이 뭔가에 대한 + +10:34.990 --> 10:38.770 +더 많은 정보가 필요하다고 알려주도록 하죠 + +10:38.770 --> 10:40.990 +이불 속에서는 그렇게 작동해요 + +10:40.990 --> 10:42.850 +그게 이해가 되셨으면 좋겠네요 + +10:42.850 --> 10:45.610 +여러분의 프로젝트에서 사용할 수 있길 바라요 + +10:45.610 --> 10:47.350 +확장하는 방법으로요 + +10:47.350 --> 10:54.340 +더 많은 종류의 도구를 추가할 수 있습니다 비행의 가능성에 대해 말해주는 + +10:54.340 --> 10:56.710 +도구 같은 거요 + +10:56.710 --> 10:58.510 +도구를 더 많이 쓸 수 있죠 + +10:58.510 --> 11:03.640 +더 대담하게 하고 싶다면 비행기 예약을 위한 도구를 추가하세요 + +11:03.640 --> 11:07.120 +다시 호출될 때 뭔가를 프린트하거나 파일에 뭔가를 쓰거나 + +11:07.120 --> 11:08.170 +할 수 있는 도구죠 + +11:08.170 --> 11:14.890 +그러면 LLM이 두 번째 도구로 호출해 사용자가 요청할 때 비행기를 + +11:14.890 --> 11:16.390 +예약할 수 있죠 + +11:16.390 --> 11:21.100 +그래서 재미있었겠죠 그리고 날짜 같은 걸로 많이 싸웠을 거예요 + +11:21.100 --> 11:28.810 +그러니 한 번 해보시고 그게 끝날 때쯤엔 도구 작성하는 방법에 아주 능숙해지실 겁니다 소프트웨어에서 + +11:28.810 --> 11:35.560 +실행되는 액션을 수행하기 위해 LLM을 장비하는 방법에도요 diff --git a/week5/community-contributions/subtitles/srts/60595637/en_US.srt b/week5/community-contributions/subtitles/srts/60595637/en_US.srt new file mode 100755 index 0000000..6784da2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60595637/en_US.srt @@ -0,0 +1,256 @@ +WEBVTT + +00:00.080 --> 00:05.690 +Here we are back in the Colab, which has been running overnight for me and probably for you too, I + +00:05.720 --> 00:06.050 +hope. + +00:06.050 --> 00:10.160 +And if anything, like me, you've been eagerly glued to it. + +00:10.520 --> 00:19.430 +So this is showing the part in the colab where it's running away and you can see it's ticking through. + +00:19.460 --> 00:24.080 +It's more than halfway at this point as it makes its way through the four epochs. + +00:24.080 --> 00:25.820 +Four epochs are not required for this. + +00:25.850 --> 00:28.100 +You only need to do one epoch, of course. + +00:28.130 --> 00:34.310 +And just that I'm a sucker for this stuff and loving it. + +00:34.310 --> 00:37.430 +So it's it's ticking away. + +00:37.460 --> 00:41.750 +Let's go to the fabulous weights and biases to see how it looks here. + +00:41.750 --> 00:43.340 +This is our run. + +00:43.370 --> 00:45.080 +You remember in weights and biases. + +00:45.080 --> 00:49.280 +The navigation at the top here lets you see the different projects that you may have. + +00:49.310 --> 00:52.760 +And we're looking at my Pricer project, which is the one in question. + +00:52.790 --> 00:56.390 +I've also got a Pricer GPT project for for where we fine tune GPT. + +00:56.960 --> 01:02.720 +Um, and then here are the different runs I name the runs in the code after the date and time that they + +01:02.720 --> 01:03.440 +were kicked off. + +01:03.470 --> 01:04.460 +You don't need to do that. + +01:04.460 --> 01:06.350 +You can call them runs anything you want. + +01:06.530 --> 01:13.050 +I do this because it helps me be able to, uh, recollect when I did, what, run and so on. + +01:13.050 --> 01:14.880 +So I found this quite a useful trick. + +01:15.030 --> 01:18.420 +But you could also name it to describe the kind of run that you're doing. + +01:18.900 --> 01:21.630 +Um, and you can also rename it by, by right clicking on it. + +01:22.260 --> 01:26.070 +Uh, so the current run is this blue run right here. + +01:26.070 --> 01:27.960 +This is what we've been running. + +01:27.960 --> 01:33.390 +And if I zoom in on the training loss, which is the the diagram that really matters, you now know + +01:33.390 --> 01:35.520 +this is cross-entropy loss we're seeing here. + +01:35.760 --> 01:41.280 +Uh, and you'll see that it clearly has uh, this was the first epoch. + +01:41.280 --> 01:46.530 +It comes down a bit here, uh, potentially because some overfitting starts to happen when it sees the + +01:46.530 --> 01:51.720 +data a second time, and then it drops again for the beginning of the third epoch here. + +01:51.720 --> 01:56.220 +The thing that I'm not doing that is a very much a best practice that I should be doing is having a + +01:56.220 --> 01:59.700 +validation data set, and we'd be able to see validation loss. + +01:59.700 --> 02:05.820 +And I imagine what you'd find is that it maybe only decreases a little bit here, and maybe quite soon + +02:05.820 --> 02:09.240 +it will start to increase a bit because we are overfitting. + +02:09.270 --> 02:13.960 +Uh, we'll find that out by, by running the model in inference mode, but it would be better to see + +02:13.960 --> 02:14.980 +the validation results. + +02:14.980 --> 02:17.320 +And hopefully that's something that you are doing. + +02:17.770 --> 02:20.230 +And I would love to see those charts by the way. + +02:21.040 --> 02:28.330 +So what we can also do is layer on top of this, the prior run that I had done when I ran it through + +02:28.330 --> 02:29.500 +to completion. + +02:29.860 --> 02:30.940 +Here we go. + +02:30.940 --> 02:34.030 +Let's zoom in again on both of these runs together. + +02:34.030 --> 02:38.500 +And what you'll see is that the two runs are very, very similar indeed. + +02:38.710 --> 02:45.160 +Obviously I had the same, um, the same hyperparameters, and I'd set random seeds. + +02:45.160 --> 02:49.600 +And so it's not not a great surprise, but it does show you that despite all of the complexity and everything + +02:49.600 --> 02:56.170 +that's going on, you do get the, the same numbers, um, from these runs. + +02:56.380 --> 02:58.660 +So that's somewhat comforting. + +02:58.840 --> 03:01.930 +Uh, and I think that's probably all to all to show you. + +03:01.960 --> 03:08.290 +We can see that in terms of the learning rate that now, well, before we were suspicious that the blue + +03:08.290 --> 03:14.170 +line, if we just look at the blue line only, uh, just for a moment, flashed up with what it used + +03:14.170 --> 03:14.560 +to see. + +03:14.590 --> 03:20.390 +It used to be if you saw that, uh, if we bring this up, you'll see that the last time it was all + +03:20.390 --> 03:21.200 +the way up here. + +03:21.230 --> 03:26.480 +And maybe you were skeptical about whether we were really seeing a nice, smooth curve. + +03:26.480 --> 03:31.190 +And now you clearly see that it's coming down in a very nice way. + +03:31.190 --> 03:37.550 +So that cosine learning rate scheduler is a good trick to know, a good way to vary the learning rate + +03:37.550 --> 03:39.290 +during the course of your batch. + +03:40.070 --> 03:40.910 +Okay. + +03:40.910 --> 03:44.060 +And then final thing to show you is to flip to hugging face. + +03:44.090 --> 03:51.920 +I'll mention if you look at this model, you'll see that the name of this ends in 11 seconds at that + +03:51.920 --> 03:52.730 +timestamp. + +03:52.730 --> 03:57.080 +If we go over to Hugging Face in the hub, I've got all these different models. + +03:57.350 --> 04:02.780 +And this one, this one ending in 11 seconds is, of course, the run in question that's running right + +04:02.780 --> 04:03.320 +now. + +04:03.320 --> 04:05.690 +And in fact, it even says updated two hours ago. + +04:05.690 --> 04:07.130 +So we know it's the right one. + +04:07.160 --> 04:13.160 +As I say, some people will just have the single repo they'll just write to for all of their different + +04:13.160 --> 04:15.500 +runs, and that's a perfectly good way of doing it. + +04:15.590 --> 04:19.970 +I prefer doing it this way, so I keep my my different runs completely separate. + +04:20.060 --> 04:26.100 +And if I go into this repo we're now looking at if I click on files and versions, these are the files + +04:26.100 --> 04:27.360 +associated with this. + +04:27.360 --> 04:29.670 +Run again the safe tensors. + +04:29.670 --> 04:30.840 +That's the business. + +04:30.840 --> 04:32.070 +That's where it all happens. + +04:32.070 --> 04:39.030 +It's 109MB worth of parameters that are the parameters of our Lora adapters. + +04:39.270 --> 04:43.620 +Um, and over here you'll see history nine commits. + +04:43.650 --> 04:51.450 +If I click on this, it's showing me that just as I had asked in my parameters in my setup, uh, hugging + +04:51.480 --> 04:58.230 +face has been saving this to the hub, uploading it, making a different revision of these model weights + +04:58.260 --> 05:00.840 +every 5000 steps. + +05:01.260 --> 05:08.670 +Um, and so, uh, that's something we'll have access to if we want to go back and do some, uh, do + +05:08.700 --> 05:11.130 +inference on any one of those different commits. + +05:11.130 --> 05:14.400 +And hopefully you can see why I like to keep it as a separate repo. + +05:14.400 --> 05:21.480 +So I don't muddle up the different saves during a particular run with the different versions of training. + +05:22.500 --> 05:25.680 +Okay, I think that's enough of a tour of where we're at. + +05:25.950 --> 05:30.750 +Uh, head back to the slides one more time before we actually get to inference. diff --git a/week5/community-contributions/subtitles/srts/60595637/ja_JP.srt b/week5/community-contributions/subtitles/srts/60595637/ja_JP.srt new file mode 100755 index 0000000..8c43f38 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60595637/ja_JP.srt @@ -0,0 +1,208 @@ +WEBVTT + +00:00.080 --> 00:06.050 +私にとっても、 おそらくあなたにとっても、 一晩中走り続けてきたColabに戻ってきた。 + +00:06.050 --> 00:10.160 +そして、 どちらかといえば、 私のように、 この映画に釘付けになっているはずだ。 + +00:10.520 --> 00:19.430 +だから、 これはコラブの中で逃げ惑う部分を示していて、 カチカチと音を立てているのがわかるだろう。 + +00:19.460 --> 00:24.080 +4つのエポックを経て、 この時点で半分以上。 + +00:24.080 --> 00:25.820 +そのために4つのエポックは必要ない。 + +00:25.850 --> 00:28.100 +もちろん、 エポックは1回だけでいい。 + +00:28.130 --> 00:34.310 +そしてただ、 私はこの手のものが好きで好きでたまらない。 + +00:34.310 --> 00:37.430 +だから、 刻々と時間が過ぎていく。 + +00:37.460 --> 00:41.750 +それでは、 ファビュラス・ウェイトとバイアスをご覧いただこう。 + +00:41.750 --> 00:43.340 +これが我々の走りだ。 + +00:43.370 --> 00:45.080 +ウェイトとバイアスで覚えている + +00:45.080 --> 00:49.280 +上部のナビゲーションで、 さまざまなプロジェクトを見ることができます。 + +00:49.310 --> 00:52.760 +そして、 私のプライサープロジェクトを見ている。 + +00:52.790 --> 00:56.390 +GPTを微調整するためのプライサーGPTプロジェクトもあるんだ。 + +00:56.960 --> 01:03.440 +ええと、 それから、 ここでは、 キックオフされた日時にちなんで、 コード内のランに名前をつけている。 + +01:03.470 --> 01:04.460 +そんなことをする必要はない。 + +01:04.460 --> 01:06.350 +呼び方は自由だ。 + +01:06.530 --> 01:13.050 +そうすることで、 いつ、 何をしたのか、 走ったのかなどを思い出すことができるからだ。 + +01:13.050 --> 01:14.880 +だから、 これはとても便利なトリックだと思った。 + +01:15.030 --> 01:18.420 +でも、 どんな走りをするのかを表す名前をつけることもできる。 + +01:18.900 --> 01:21.630 +右クリックして名前を変更することもできます。 + +01:22.260 --> 01:26.070 +ええと、 現在の滑走はこの青い滑走です。 + +01:26.070 --> 01:27.960 +これが私たちが走らせてきたものだ。 + +01:27.960 --> 01:35.520 +トレーニングのロスを拡大してみると、 これが本当に重要な図であることがわかる。 + +01:35.760 --> 01:41.280 +そして、 これが最初のエポックであることがわかるだろう。 + +01:41.280 --> 01:51.720 +これは、 2回目のデータを見たときにオーバーフィッティングが起こり始めた可能性がある。 + +01:51.720 --> 01:56.220 +私がやっていないことで、 ベストプラクティスとしてやるべきことは、 + +01:56.220 --> 01:59.700 +検証データセットを持つことです。 + +01:59.700 --> 02:09.240 +想像するに、 ここで少し減少するだけで、 すぐに増加し始めるのではないだろうか。 + +02:09.270 --> 02:14.980 +それは推論モードでモデルを走らせればわかりますが、 検証結果を見たほうがいいでしょう。 + +02:14.980 --> 02:17.320 +そして願わくば、 それがあなたのやっていることであってほしい。 + +02:17.770 --> 02:20.230 +ところで、 そのグラフをぜひ見せてほしい。 + +02:21.040 --> 02:29.500 +だから、 この上に、 私が完了まで走らせたときに行った前の走行を重ねることもできる。 + +02:29.860 --> 02:30.940 +さあ、 始めよう。 + +02:30.940 --> 02:34.030 +両者の走りをもう一度拡大して見てみよう。 + +02:34.030 --> 02:38.500 +そして、 この2つの走りは実によく似ていることがわかるだろう。 + +02:38.710 --> 02:45.160 +もちろん、 同じハイパーパラメーターを使い、 ランダムなシードを設定した。 + +02:45.160 --> 02:49.600 +だから、 大きな驚きというわけではないが、 複雑でいろいろなことが起こっているにもかかわらず、 + +02:49.600 --> 02:56.170 +同じ数字が得られるということを示している。 + +02:56.380 --> 02:58.660 +だから、 多少は慰めになる。 + +02:58.840 --> 03:01.930 +ええと、 これでお見せできるのは全部だと思います。 + +03:01.960 --> 03:08.290 +学習率という点から見ると、 今は、 いや、 以前は、 青い線だけを見ていると、 えー、 ちょっとだけだが、 + +03:08.290 --> 03:14.560 +以前見ていたのと同じように、 青い線が点滅しているのがわかる。 + +03:14.590 --> 03:21.200 +以前は......これを上に持ってくれば、 前回はここまでの高さだったことがわかるだろう。 + +03:21.230 --> 03:26.480 +そして、 本当に滑らかなカーブを描いているのかどうか、 懐疑的だったのではないだろうか。 + +03:26.480 --> 03:31.190 +そして今、 とてもいい形で降りてきているのがはっきりとわかる。 + +03:31.190 --> 03:39.290 +つまり、 コサイン学習率スケジューラーは、 バッチの過程で学習率を変化させる良い方法なのだ。 + +03:40.070 --> 03:40.910 +オーケー。 + +03:40.910 --> 03:44.060 +そして最後に見せるのは、 ハグする顔への反転だ。 + +03:44.090 --> 03:52.730 +このモデルを見れば、 そのタイムスタンプで11秒後にこの名前が終わっていることがわかるだろう。 + +03:52.730 --> 03:57.080 +ハブのハギング・フェイスに行くと、 いろいろなモデルがあるんだ。 + +03:57.350 --> 04:03.320 +そして、 11秒で終わるこの1本は、 もちろん、 今走っている問題のランだ。 + +04:03.320 --> 04:05.690 +そして実際、 2時間前に更新されたとさえ書かれている。 + +04:05.690 --> 04:07.130 +だから、 私たちはそれが正しいものだと知っている。 + +04:07.160 --> 04:15.500 +私が言ったように、 ある人はただ1つのレポを持ち、 そのレポに書き込むだけだ。 + +04:15.590 --> 04:19.970 +私はこのやり方が好きなので、 異なるランを完全に分けている。 + +04:20.060 --> 04:27.360 +このレポに入り、 ファイルとバージョンをクリックすると、 このレポに関連するファイルが表示される。 + +04:27.360 --> 04:29.670 +安全なテンソルをもう一度実行する。 + +04:29.670 --> 04:30.840 +それがビジネスだ。 + +04:30.840 --> 04:32.070 +そこですべてが起こる。 + +04:32.070 --> 04:39.030 +これは109MB分のパラメーターで、 我々のLoraアダプターのパラメーターだ。 + +04:39.270 --> 04:43.620 +そして、 こちらには9つのコミット履歴があります。 + +04:43.650 --> 04:51.450 +これをクリックすると、 私がセットアップのパラメータで要求したように、 ハギング・フェイスがこれをハブに保存し、 + +04:51.480 --> 05:00.840 +アップロードし、 5000ステップごとに異なるリビジョンのモデルウェイトを作成していることが表示される。 + +05:01.260 --> 05:11.130 +それで、 そのコミットのどれかに戻って推論を行いたければ、 そのコミットにアクセスすることができる。 + +05:11.130 --> 05:14.400 +そして、 私がなぜそれを別のレポとして残しておきたいのか、 お分かりいただけると幸いだ。 + +05:14.400 --> 05:21.480 +だから、 トレーニングのバージョンが違っても、 走行中のセーブを混同しないようにしている。 + +05:22.500 --> 05:25.680 +さて、 僕らが今いる場所のツアーはこれで十分だと思う。 + +05:25.950 --> 05:30.750 +ええと、 推論に入る前にもう1度スライドに戻ってください。 diff --git a/week5/community-contributions/subtitles/srts/60595637/ko_KR.srt b/week5/community-contributions/subtitles/srts/60595637/ko_KR.srt new file mode 100755 index 0000000..2a1f4bb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60595637/ko_KR.srt @@ -0,0 +1,244 @@ +WEBVTT + +00:00.080 --> 00:06.050 +다시 콜랍입니다 밤새 틀어 놨는데 여러분도 그랬으면 좋겠네요 + +00:06.050 --> 00:10.160 +나처럼 당신도 열성적으로 매달렸잖아요 + +00:10.520 --> 00:19.430 +콜랍에 있는 이 부분이 흘러가는 걸 보여줍니다 돌아가는 게 보이죠? + +00:19.460 --> 00:24.080 +지금까지는 네 개의 개혁을 거치며 절반 이상이 지났죠 + +00:24.080 --> 00:25.820 +네 개의 개혁은 필요 없어요 + +00:25.850 --> 00:28.100 +물론 한 개혁만 성공하면 돼요 + +00:28.130 --> 00:34.310 +전 이런 걸 정말 좋아해요 + +00:34.310 --> 00:37.430 +시간이 가고 있어요 + +00:37.460 --> 00:41.750 +멋진 무게와 바이어스로 가서 어떻게 보이는지 보죠 + +00:41.750 --> 00:43.340 +우리가 달려요 + +00:43.370 --> 00:45.080 +무게와 편향성을 기억하죠 + +00:45.080 --> 00:49.280 +상단의 탐색은 여러분이 갖고 있는 다양한 프로젝트를 볼 수 있게 해줘요 + +00:49.310 --> 00:52.760 +지금 보고 있는 건 문제의 프라이커 프로젝트예요 + +00:52.790 --> 00:56.390 +GPT를 미세 조정하기 위한 Pricer GPT 프로젝트도 있는데요 + +00:56.960 --> 01:02.720 +여기 다양한 실행이 있어요 코드 내 실행에 이름을 붙였죠 실행이 시작된 날짜와 시간의 + +01:02.720 --> 01:03.440 +이름을요 + +01:03.470 --> 01:04.460 +그럴 필요 없어요 + +01:04.460 --> 01:06.350 +마음대로 불러도 돼요 + +01:06.530 --> 01:13.050 +달리기 같은 걸 할 때 기억이 잘 나거든요 + +01:13.050 --> 01:14.880 +꽤 유용한 방법이었어요 + +01:15.030 --> 01:18.420 +하지만 여러분이 작업하는 런을 설명하기 위해 이름을 붙일 수도 있어요 + +01:18.900 --> 01:21.630 +오른쪽 클릭으로 이름을 다시 지을 수도 있어요 + +01:22.260 --> 01:26.070 +현재는 이 블루 런이 있어요 + +01:26.070 --> 01:27.960 +이게 우리가 해온 거예요 + +01:27.960 --> 01:33.390 +훈련 손실을 확대해보면 이 도표가 정말 중요한데 여기서 보이는 게 교차 엔트로피 + +01:33.390 --> 01:35.520 +손실이란 걸 아시겠죠 + +01:35.760 --> 01:41.280 +여길 보시면 확실히 첫 번째 시대예요 + +01:41.280 --> 01:46.530 +비트가 약간 내려갔는데 데이터를 두 번째로 볼 때 비트가 과잉 설정되기 + +01:46.530 --> 01:51.720 +때문입니다 세 번째 이포크가 시작되면 다시 한 번 감소하죠 + +01:51.720 --> 01:56.220 +제가 하지 않는 것은 최선의 관행으로 유효성 검사 데이터 집합을 + +01:56.220 --> 01:59.700 +하는 것입니다 유효성 검사 손실을 볼 수 있죠 + +01:59.700 --> 02:05.820 +보시면 아시겠지만 이 부분만 조금 줄어들고 머지않아 조금씩 늘어날 + +02:05.820 --> 02:09.240 +거예요 비트가 과하게 들어가니까요 + +02:09.270 --> 02:13.960 +추론 모드의 모델을 실행하면 알아낼 수 있지만 검증 결과를 보는 + +02:13.960 --> 02:14.980 +게 더 낫겠죠 + +02:14.980 --> 02:17.320 +그게 당신이 하는 일이면 좋겠네요 + +02:17.770 --> 02:20.230 +그 차트 좀 보고 싶네요 + +02:21.040 --> 02:28.330 +이 위에 레이어를 둘 수 있어요 완료까지 실행했을 때 실행했던 + +02:28.330 --> 02:29.500 +거죠 + +02:29.860 --> 02:30.940 +시작할게요 + +02:30.940 --> 02:34.030 +이 두 경기를 함께 확대해보죠 + +02:34.030 --> 02:38.500 +보면 아시겠지만 두 번의 시도는 아주 비슷했어요 + +02:38.710 --> 02:45.160 +물론 하이퍼파라미터도 같았고 무작위로 시드를 설정했어요 + +02:45.160 --> 02:49.600 +그리 놀랍진 않지만 모든 복잡한 상황에도 불구하고 + +02:49.600 --> 02:56.170 +get run에서 같은 숫자가 나온다는 걸 보여주죠 + +02:56.380 --> 02:58.660 +그래서 좀 위안이 돼요 + +02:58.840 --> 03:01.930 +보여드릴 건 그게 다인 것 같네요 + +03:01.960 --> 03:08.290 +학습률 측면에서 보면 예전엔 의심을 안 했는데 지금은 파란 + +03:08.290 --> 03:14.560 +선을 보면 잠깐이지만 예전에 보던 게 반짝거려요 + +03:14.590 --> 03:21.200 +보시면 알겠지만 마지막으로 찍은 건 이 정도 높이에서 찍었어요 + +03:21.230 --> 03:26.480 +매끄러운 곡선이 보이는지 회의적이었을 수도 있어요 + +03:26.480 --> 03:31.190 +이제 아주 멋지게 무너지는 게 보이죠 + +03:31.190 --> 03:37.550 +코사인 학습률 스케줄러는 알아두면 좋은 방법입니다 기간에 학습률을 다양하게 할 + +03:37.550 --> 03:39.290 +수 있는 좋은 방법이죠 + +03:40.070 --> 03:40.910 +네 + +03:40.910 --> 03:44.060 +마지막으로 얼굴 껴안기 버튼을 눌러요 + +03:44.090 --> 03:51.920 +이 모델을 보시면 이 이름의 끝이 11초 후에 타임스탬프로 끝나는 걸 보실 수 + +03:51.920 --> 03:52.730 +있어요 + +03:52.730 --> 03:57.080 +허브에서 페이스 포옹으로 가면 다양한 모델이 있어요 + +03:57.350 --> 04:03.320 +그리고 이 화면은 11초 안에 끝나는데, 물론 지금 실행중인 화면이죠 + +04:03.320 --> 04:05.690 +2시간 전에 업데이트됐다고도 뜨네요 + +04:05.690 --> 04:07.130 +제대로 온 거예요 + +04:07.160 --> 04:13.160 +어떤 사람들은 단일 압류만 갖기도 합니다 모든 실행에 대해 글로 작성하는 거죠 그게 + +04:13.160 --> 04:15.500 +완벽하게 좋은 방법이에요 + +04:15.590 --> 04:19.970 +전 이렇게 하는 게 좋아요 각각의 코스를 완전히 분리하는 거죠 + +04:20.060 --> 04:26.100 +이 압류로 가면 지금 보고 있는 건 파일과 버전을 클릭하면 이것과 관련된 + +04:26.100 --> 04:27.360 +파일들이죠 + +04:27.360 --> 04:29.670 +안전한 텐서를 사용하세요 + +04:29.670 --> 04:30.840 +그게 사업이죠 + +04:30.840 --> 04:32.070 +거기서 모든 게 시작되죠 + +04:32.070 --> 04:39.030 +109MB 정도의 파라미터가 로라 어댑터의 파라미터예요 + +04:39.270 --> 04:43.620 +이쪽에는 역사 9가 커밋하는 걸 보실 수 있어요 + +04:43.650 --> 04:51.450 +클릭을 하면, 설정에서 매개 변수에서 요청했던 것을 볼 수 있습니다 포옹하는 얼굴이 허브에 + +04:51.480 --> 04:58.230 +저장되어 있고 업로드되어 있습니다 5,000 걸음마다 모델의 무게를 다르게 + +04:58.260 --> 05:00.840 +수정하고 있네요 + +05:01.260 --> 05:08.670 +그 액세스를 사용할 수 있습니다 과거로 돌아가 커밋에 대한 추론을 + +05:08.700 --> 05:11.130 +하고 싶다면요 + +05:11.130 --> 05:14.400 +왜 따로 압류하는지 이해하셨으면 좋겠네요 + +05:14.400 --> 05:21.480 +그래서 여러 가지 세이브를 여러 가지 훈련과 혼동하지 않아요 + +05:22.500 --> 05:25.680 +좋아요, 현장은 이 정도면 충분한 것 같아요 + +05:25.950 --> 05:30.750 +추론으로 넘어가기 전에 슬라이드로 돌아가 주세요. Get it get get it. diff --git a/week5/community-contributions/subtitles/srts/60614541/en_US.srt b/week5/community-contributions/subtitles/srts/60614541/en_US.srt new file mode 100755 index 0000000..071d842 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614541/en_US.srt @@ -0,0 +1,37 @@ +WEBVTT + +00:00.620 --> 00:07.610 +I am delighted to welcome you to the first day of our eight weeks together as you join me on this adventure + +00:07.610 --> 00:11.120 +to master AI and LLM engineering. + +00:11.150 --> 00:15.560 +Now look, it's customary in these things to start with me giving an introduction about myself. + +00:15.590 --> 00:16.520 +Tell you about the course. + +00:16.550 --> 00:18.380 +Tell you what you all accomplish over eight weeks. + +00:18.410 --> 00:19.700 +ET cetera, et cetera, et cetera. + +00:19.700 --> 00:21.650 +But no, this isn't that kind. + +00:21.650 --> 00:23.090 +Of course it's not what I'm going to do. + +00:23.120 --> 00:25.340 +We have plenty of time to get back to that later. + +00:25.340 --> 00:30.380 +What we're going to do now is get right into the thick of it and get you set up and using an LLM on + +00:30.380 --> 00:33.710 +your computer right away for a useful purpose. + +00:33.800 --> 00:37.520 +So without further ado, go on to the next video and I'll get you set up. diff --git a/week5/community-contributions/subtitles/srts/60614541/ja_JP.srt b/week5/community-contributions/subtitles/srts/60614541/ja_JP.srt new file mode 100755 index 0000000..1fa516f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614541/ja_JP.srt @@ -0,0 +1,31 @@ +WEBVTT + +00:00.620 --> 00:11.120 +AIとLLMエンジニアリングをマスターするための冒険の旅に私と一緒に参加する皆さんを、 8週間の最初の日にお迎えできることを嬉しく思います。 + +00:11.150 --> 00:15.560 +さて、 こういった場では、 まず私が自己紹介をするのが慣例となっている。 + +00:15.590 --> 00:16.520 +コースについて教えてください。 + +00:16.550 --> 00:18.380 +8週間で何を達成したか教えてください。 + +00:18.410 --> 00:19.700 +エトセトラ、 エトセトラ、 エトセトラ。 + +00:19.700 --> 00:21.650 +しかし、 いや、 これはそういう種類のものではない。 + +00:21.650 --> 00:23.090 +もちろん、 私がすることではない。 + +00:23.120 --> 00:25.340 +それについては、 後でいくらでも時間がある。 + +00:25.340 --> 00:33.710 +私たちがこれからすることは、 すぐにでもLLMをセットアップして、 あなたのコンピューターですぐに役立つ目的で使っていただくことです。 + +00:33.800 --> 00:37.520 +では、 早速次のビデオを見てください。 diff --git a/week5/community-contributions/subtitles/srts/60614541/ko_KR.srt b/week5/community-contributions/subtitles/srts/60614541/ko_KR.srt new file mode 100755 index 0000000..fa1ee1b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614541/ko_KR.srt @@ -0,0 +1,37 @@ +WEBVTT + +00:00.620 --> 00:07.610 +8주간의 동행 첫날에 여러분을 초대하게 되어 기쁩니다 AI와 LLM 공학의 + +00:07.610 --> 00:11.120 +달인이 되는 여정을 함께하시죠 + +00:11.150 --> 00:15.560 +이런 일에는 제가 제 소개로 시작하는 게 관례예요 + +00:15.590 --> 00:16.520 +코스 얘기를 해드릴게요 + +00:16.550 --> 00:18.380 +8주 동안 여러분이 이룬 걸 말씀드리죠 + +00:18.410 --> 00:19.700 +기타 등등요 + +00:19.700 --> 00:21.650 +하지만 이건 그런 게 아니에요 + +00:21.650 --> 00:23.090 +당연히 안 하죠 + +00:23.120 --> 00:25.340 +Get in get은 나중에 다시 해도 돼요 + +00:25.340 --> 00:30.380 +이제부터는 본격적으로 작업에 착수해 컴퓨터에 LLM을 설치하고 그걸 바로 + +00:30.380 --> 00:33.710 +사용하도록 하겠습니다 유용한 목적으로요 + +00:33.800 --> 00:37.520 +다음 비디오로 넘어가죠 get it 설정할게요 diff --git a/week5/community-contributions/subtitles/srts/60614589/en_US.srt b/week5/community-contributions/subtitles/srts/60614589/en_US.srt new file mode 100755 index 0000000..90c2575 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614589/en_US.srt @@ -0,0 +1,208 @@ +WEBVTT + +00:00.500 --> 00:07.280 +So we're now going to run a large language model directly on your box using a platform called llama, + +00:07.280 --> 00:14.330 +which lets you run C plus plus code version of Llms compiled directly on your machine. + +00:14.360 --> 00:17.810 +I'm going to start by showing you on windows, and then we'll flip over to Mac. + +00:17.840 --> 00:21.560 +So to start with llama you bring up your favorite browser. + +00:21.590 --> 00:22.880 +Let's go to Microsoft Edge. + +00:22.880 --> 00:24.950 +And you can simply go to llama. + +00:24.980 --> 00:31.310 +Com or just search llama llama it's O followed by llama. + +00:31.820 --> 00:35.660 +And the first hit here in the search results will be Alarm.com. + +00:35.660 --> 00:36.650 +And here it is. + +00:36.680 --> 00:42.230 +It's called llama because it was initially built around a model that's called llama, which is the open + +00:42.230 --> 00:47.480 +source model from meta, which really stole the show a couple of years ago when it first came out. + +00:47.480 --> 00:51.230 +So downloading is as simple as pressing the download button. + +00:51.230 --> 00:56.210 +And once you press the download button, you press download for windows and it will start to download + +00:56.210 --> 00:57.740 +the windows version. + +00:57.950 --> 01:02.930 +And if I look here at my downloads, I will see that it has downloaded. + +01:02.930 --> 01:04.380 +There's a Setup.exe. + +01:04.560 --> 01:07.530 +If I press the open file button, it will prompt me. + +01:07.530 --> 01:08.100 +Oh, it's still. + +01:08.130 --> 01:09.000 +It's still downloading again. + +01:09.030 --> 01:09.630 +I just ran this. + +01:09.660 --> 01:15.240 +It will prompt me to install it locally, which I will do as soon as it's installed locally. + +01:15.600 --> 01:21.060 +You then go to the start menu and you're probably familiar with this, but you bring up a PowerShell + +01:21.090 --> 01:27.630 +by typing power in here, and as soon as you do, it prompts you and you press open and up comes a PowerShell, + +01:27.630 --> 01:31.920 +which is where we are going right now to run a large language model. + +01:31.920 --> 01:36.780 +Minutes into this course and the way we do it is we simply type once. + +01:36.810 --> 01:44.280 +Once that install is completed and you pressed okay, you can just simply say olama o l l a m a run. + +01:44.280 --> 01:49.290 +And now you put the name of the model next, and the name of the model we're going to use is going to + +01:49.290 --> 01:55.020 +be llama 3.2, which is one of the very newest models. + +01:55.860 --> 01:57.060 +And so here it goes. + +01:57.060 --> 01:58.020 +And it's run. + +01:58.020 --> 01:59.400 +It's happening right now. + +01:59.400 --> 02:03.060 +That blinking cursor means that we have a large language model running on this computer. + +02:03.060 --> 02:04.770 +It might take a bit longer for you the first time. + +02:04.770 --> 02:10.210 +It has to download the 2 billion parameters associated with Lama, and depending on your internet connection, + +02:10.210 --> 02:13.870 +that might take a little bit, but you'll get a progress bar that will show you what's going on. + +02:14.230 --> 02:18.460 +So what we're now going to do is try and talk to this LLM, and we're going to try and do something + +02:18.460 --> 02:20.020 +useful with a free LLM. + +02:20.020 --> 02:21.250 +And here's what we'll do. + +02:21.280 --> 02:23.620 +I'm actually trying to learn Spanish at the moment. + +02:23.620 --> 02:28.630 +And I can tell you that my grasp of the Spanish language is distinctly less inferior to my grasp of + +02:28.630 --> 02:29.350 +llms. + +02:29.710 --> 02:36.730 +And the app that I'm using to try and teach me has a service where you can pay to chat with a with an + +02:36.730 --> 02:37.720 +AI tutor. + +02:37.750 --> 02:42.370 +Now, I don't want to pay for that because I know I can run an open source model and do it for free. + +02:42.370 --> 02:48.130 +So right now we're going to build ourselves a Spanish tutor which will teach me Spanish. + +02:48.490 --> 02:51.400 +All open source without paying a penny. + +02:51.400 --> 02:57.460 +So I'm going to say I am trying to learn Spanish. + +02:58.660 --> 03:01.960 +I am a complete beginner. + +03:04.690 --> 03:12.000 +Please chat with me in basic Spanish to teach me. + +03:13.200 --> 03:15.630 +And so that is the prompt that we will start with. + +03:15.630 --> 03:16.830 +And let's see what happens. + +03:16.830 --> 03:17.820 +It's thinking. + +03:18.390 --> 03:19.230 +Hola. + +03:19.590 --> 03:21.480 +Welcome to our conversation in Spanish. + +03:21.510 --> 03:24.570 +To start, let's begin with some basic greetings. + +03:24.630 --> 03:25.800 +Como estas? + +03:26.250 --> 03:30.540 +So you can see that it's giving me a very good starting prompt. + +03:30.540 --> 03:31.890 +It's giving me some examples. + +03:31.890 --> 03:33.930 +It understands where I'm coming from. + +03:33.960 --> 03:36.240 +And I can now say something like. + +03:40.980 --> 03:42.120 +Put an accent on. + +03:42.450 --> 03:44.550 +Uh, so. + +03:44.670 --> 03:46.410 +Oh, there's a small mistake. + +03:51.000 --> 03:52.050 +Okay, there we go. + +03:52.050 --> 03:52.710 +It's correcting me. + +03:52.710 --> 03:58.320 +I think it's the accent that I missed and maybe the question mark as well, so you get the idea. + +03:58.320 --> 04:03.870 +We've actually just built something which has commercial value because indeed, this is sold at a price + +04:03.870 --> 04:11.790 +immediately for free on our boxes using an open source LLM that is llms in action from the get go. + +04:11.790 --> 04:14.700 +And now for Mac people, let's flip over to a mac. diff --git a/week5/community-contributions/subtitles/srts/60614589/ja_JP.srt b/week5/community-contributions/subtitles/srts/60614589/ja_JP.srt new file mode 100755 index 0000000..645bbd8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614589/ja_JP.srt @@ -0,0 +1,187 @@ +WEBVTT + +00:00.500 --> 00:14.330 +llamaと呼ばれるプラットフォームは、 LlmsのCプラス・コード・バージョンを直接マシンでコンパイルして実行できる。 + +00:14.360 --> 00:17.810 +まずウィンドウズでお見せして、 それからマックに移ります。 + +00:17.840 --> 00:21.560 +llamaを使うには、 まずお気に入りのブラウザを立ち上げる。 + +00:21.590 --> 00:22.880 +Microsoft Edgeにアクセスしよう。 + +00:22.880 --> 00:24.950 +リャマに行くだけでいい。 + +00:24.980 --> 00:31.310 +またはllama llamaで検索してください。 + +00:31.820 --> 00:35.660 +そして、 検索結果で最初にヒットするのは「アラーム」だ。 comに移籍した。 + +00:35.660 --> 00:36.650 +そしてここにある。 + +00:36.680 --> 00:47.480 +llamaと呼ばれているのは、 当初はmetaのオープンソースモデルであるllamaというモデルを中心に構築されたからだ。 + +00:47.480 --> 00:51.230 +そのため、 ダウンロードはダウンロードボタンを押すだけと簡単だ。 + +00:51.230 --> 00:57.740 +ダウンロードボタンを押すと、 ウィンドウズ版のダウンロードが開始される。 + +00:57.950 --> 01:02.930 +ダウンロードを見ると、 ダウンロードが完了している。 + +01:02.930 --> 01:04.380 +セットアップがある。 exe。 + +01:04.560 --> 01:07.530 +ファイルを開くボタンを押すと、 プロンプトが表示される。 + +01:07.530 --> 01:08.100 +ああ、 まだだよ。 + +01:08.130 --> 01:09.000 +まだダウンロード中だ。 + +01:09.030 --> 01:09.630 +私はこれを実行しただけだ。 + +01:09.660 --> 01:15.240 +ローカルにインストールするよう促されるので、 ローカルにインストールしたらすぐにそうするつもりだ。 + +01:15.600 --> 01:21.060 +スタート・メニューからPowerShellを呼び出します。 + +01:21.090 --> 01:31.920 +ここでpowerと入力すると、 すぐにプロンプトが表示され、 openを押すとPowerShellが出てきます。 + +01:31.920 --> 01:36.780 +このコースの開始時間は数分で、 やり方は単純に1回タイプするだけだ。 + +01:36.810 --> 01:44.280 +インストールが完了し、 OKを押したら、 olama o l l a m a runと言えばいい。 + +01:44.280 --> 01:49.290 +そして次にモデル名を入力します。 今回使用するモデル名はllama + +01:49.290 --> 01:55.020 +3です。 2は最新モデルのひとつだ。 + +01:55.860 --> 01:57.060 +そして、 こうなる。 + +01:57.060 --> 01:58.020 +そして走る。 + +01:58.020 --> 01:59.400 +今まさに起きていることだ。 + +01:59.400 --> 02:03.060 +カーソルが点滅しているのは、 このコンピューター上で大規模な言語モデルが稼働していることを意味する。 + +02:03.060 --> 02:04.770 +初回は少し時間がかかるかもしれない。 + +02:04.770 --> 02:10.210 +ラマに関連する20億のパラメータをダウンロードする必要があり、 インターネット接続によっては少し時間がかかるかもしれないが、 + +02:10.210 --> 02:13.870 +進行状況を示すプログレスバーが表示される。 + +02:14.230 --> 02:20.020 +だから、 私たちが今しようとしていることは、 このLLMと話をしてみることだ。 + +02:20.020 --> 02:21.250 +そして、 こうしよう。 + +02:21.280 --> 02:23.620 +実は今、 スペイン語を学ぼうとしているんだ。 + +02:23.620 --> 02:29.350 +そして、 私のスペイン語の理解力は、 llmsの理解力よりも明らかに劣っていると断言できる。 + +02:29.710 --> 02:37.720 +私が使っているアプリには、 お金を払ってAI家庭教師とチャットできるサービスがある。 + +02:37.750 --> 02:42.370 +今は、 オープンソースのモデルを使えば無料でできることを知っているから、 そのためにお金を払いたくはない。 + +02:42.370 --> 02:48.130 +だから今は、 スペイン語を教えてくれる家庭教師を作ろうと思っているんだ。 + +02:48.490 --> 02:51.400 +すべてオープンソースで、 1ペニーも支払う必要はない。 + +02:51.400 --> 02:57.460 +だから、 私はスペイン語を学ぼうとしていると言うつもりだ。 + +02:58.660 --> 03:01.960 +私は全くの初心者です。 + +03:04.690 --> 03:12.000 +基本的なスペイン語でチャットして教えてください。 + +03:13.200 --> 03:15.630 +というわけで、 まずはこのプロンプトから。 + +03:15.630 --> 03:16.830 +どうなるか見てみよう。 + +03:16.830 --> 03:17.820 +考えているんだ。 + +03:18.390 --> 03:19.230 +ホラ。 + +03:19.590 --> 03:21.480 +スペイン語での会話へようこそ。 + +03:21.510 --> 03:24.570 +手始めに、 基本的な挨拶から始めよう。 + +03:24.630 --> 03:25.800 +どうですか? + +03:26.250 --> 03:30.540 +だから、 このプロンプトがとてもいいスタートを切っているのがわかるだろう。 + +03:30.540 --> 03:31.890 +いくつか例を挙げてくれている。 + +03:31.890 --> 03:33.930 +私がどこから来たかを理解してくれる。 + +03:33.960 --> 03:36.240 +そして今、 私はこんなことが言える。 + +03:40.980 --> 03:42.120 +アクセントをつける。 + +03:42.450 --> 03:44.550 +ええと、 それで。 + +03:44.670 --> 03:46.410 +ああ、 小さなミスがある。 + +03:51.000 --> 03:52.050 +よし、 行くぞ。 + +03:52.050 --> 03:52.710 +訂正してくれているんだ。 + +03:52.710 --> 03:58.320 +私が聞き逃したのはアクセントで、 たぶんクエスチョンマークもそうだと思う。 + +03:58.320 --> 04:03.870 +というのも、 これはオープンソースのLLMを使ったもので、 + +04:03.870 --> 04:11.790 +最初からLLMが動いているのだ。 + +04:11.790 --> 04:14.700 +そして今度はマックの人のために、 マックにひっくり返してみよう。 diff --git a/week5/community-contributions/subtitles/srts/60614589/ko_KR.srt b/week5/community-contributions/subtitles/srts/60614589/ko_KR.srt new file mode 100755 index 0000000..69fc4a2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614589/ko_KR.srt @@ -0,0 +1,208 @@ +WEBVTT + +00:00.500 --> 00:07.280 +이제 여러분 컴퓨터에 직접적으로 큰 언어 모델을 실행하겠습니다 llama라는 플랫폼을 + +00:07.280 --> 00:14.330 +사용해서요 여러분 컴퓨터에서 직접 컴파일된 C++ 코드 버전의 Lms를 실행할 수 있도록요 + +00:14.360 --> 00:17.810 +먼저 윈도우부터 보여드리고 맥으로 넘어갈게요 + +00:17.840 --> 00:21.560 +라마를 시작으로 좋아하는 브라우저를 불러오세요 + +00:21.590 --> 00:22.880 +마이크로소프트 Edge로 가죠 + +00:22.880 --> 00:24.950 +라마로 가면 돼요 + +00:24.980 --> 00:31.310 +라마 라마를 검색하면 O 다음에 라마가 나와요 + +00:31.820 --> 00:35.660 +검색 결과의 첫 번째 타자는 Alam이에요 코무요 + +00:35.660 --> 00:36.650 +여기 있네요 + +00:36.680 --> 00:42.230 +llama라고 불리는 이유는 처음에 llama라는 모델 주위에 만들어졌기 + +00:42.230 --> 00:47.480 +때문이죠 메타의 오픈 소스 모델로 몇 년 전 처음 나왔을 때 정말 인기를 끌었죠 + +00:47.480 --> 00:51.230 +다운로드 작업은 다운로드 버튼을 누르는 것만큼 간단해요 + +00:51.230 --> 00:56.210 +다운로드 버튼을 누르면 윈도우용 다운로드가 시작되고 윈도우 버전을 + +00:56.210 --> 00:57.740 +다운로드하게 되죠 + +00:57.950 --> 01:02.930 +제가 다운로드한 것을 보면 다운로드 된 것을 볼 수 있어요. + +01:02.930 --> 01:04.380 +함정이에요 exe요 + +01:04.560 --> 01:07.530 +파일 열기 버튼을 누르면 프롬프트가 뜨죠 + +01:07.530 --> 01:08.100 +아직 안 움직이네요 + +01:08.130 --> 01:09.000 +아직도 다운로드 중이에요 + +01:09.030 --> 01:09.630 +방금 이걸 실행했어요 + +01:09.660 --> 01:15.240 +로컬에 설치하라고 할 텐데 로컬에 설치되자마자 할 거예요 + +01:15.600 --> 01:21.060 +시작 메뉴로 가면 아마 익숙하실 텐데 PowerShell을 불러오죠 + +01:21.090 --> 01:27.630 +여기 PowerShell을 입력하면 프롬프트 되고 열기를 누르면 PowerShell이 + +01:27.630 --> 01:31.920 +나와요 지금 큰 언어 모델을 실행할 곳이죠 + +01:31.920 --> 01:36.780 +몇 분 후 수업이 시작됐는데 그냥 한 번만 입력하면 돼요 + +01:36.810 --> 01:44.280 +설치가 완료되고 확인을 누르면 그냥 ohama o l l a m a run이라고 하면 돼요 + +01:44.280 --> 01:49.290 +모델의 이름을 다음이라고 입력합니다. 우리가 사용할 모델의 이름은 + +01:49.290 --> 01:55.020 +llama 3이 될 거예요. 2는 최신 모델 중 하나죠 + +01:55.860 --> 01:57.060 +자, 시작할게요 + +01:57.060 --> 01:58.020 +달려요 + +01:58.020 --> 01:59.400 +지금 진행 중이에요 + +01:59.400 --> 02:03.060 +저 깜빡이는 커서가 의미하는 건 이 컴퓨터에서 큰 언어 모델이 실행된다는 거죠 + +02:03.060 --> 02:04.770 +비트는 처음보다 시간이 더 걸릴 거예요 + +02:04.770 --> 02:10.210 +이것은 Lama와 관련된 20억 개의 파라미터를 다운로드 해야 합니다. 당신의 인터넷 연결에 따라 시간이 좀 걸릴 수도 + +02:10.210 --> 02:13.870 +있지만 진행률 표시줄이 어떤 일이 일어나는지 보여줄 거예요. Get it. + +02:14.230 --> 02:18.460 +이제 이 LLM과 얘길 해보겠습니다 free LLM으로 뭔가 + +02:18.460 --> 02:20.020 +유용한 걸 해보려고요 + +02:20.020 --> 02:21.250 +이렇게 하죠 + +02:21.280 --> 02:23.620 +지금은 스페인어를 배우려고 해요 + +02:23.620 --> 02:28.630 +그리고 제 스페인어 실력은 확실히 less llms에 비하면 한참 + +02:28.630 --> 02:29.350 +아래죠 + +02:29.710 --> 02:36.730 +제가 사용하고 있는 앱에 AI 튜터와 채팅하려면 돈을 내야 하는 서비스가 + +02:36.730 --> 02:37.720 +있어요 + +02:37.750 --> 02:42.370 +돈을 내고 싶진 않아요 오픈 소스 모델을 무료로 실행할 수 있으니까요 + +02:42.370 --> 02:48.130 +스페인어 선생님을 만들 거예요 저를 가르쳐 주실 분요 + +02:48.490 --> 02:51.400 +전부 오픈 소스예요 + +02:51.400 --> 02:57.460 +스페인어를 배우려고 한다고 할게요 + +02:58.660 --> 03:01.960 +전 완전히 초보예요 + +03:04.690 --> 03:12.000 +기초 스페인어로 대화 좀 해 주세요 + +03:13.200 --> 03:15.630 +이 프롬프트에서 시작할 거예요 + +03:15.630 --> 03:16.830 +어떻게 되는지 보죠 + +03:16.830 --> 03:17.820 +생각 중이에요 + +03:18.390 --> 03:19.230 +안녕하세요 + +03:19.590 --> 03:21.480 +스페인어 대화에 오신 걸 환영해요 + +03:21.510 --> 03:24.570 +기본적인 인사말부터 시작해 보죠 + +03:24.630 --> 03:25.800 +안녕하세요? + +03:26.250 --> 03:30.540 +아주 좋은 시작 프롬프트를 제공하고 있는 걸 보실 수 있죠 + +03:30.540 --> 03:31.890 +예를 들어볼게요 + +03:31.890 --> 03:33.930 +제 입장을 이해해주죠 + +03:33.960 --> 03:36.240 +지금은 이런 말도 할 수 있어요 + +03:40.980 --> 03:42.120 +Put it, Put it, Put it 악센트를 넣어 주세요 + +03:42.450 --> 03:44.550 +네 + +03:44.670 --> 03:46.410 +작은 실수가 있어요 + +03:51.000 --> 03:52.050 +좋아요, 됐어요 + +03:52.050 --> 03:52.710 +정정하고 있어요 + +03:52.710 --> 03:58.320 +억양을 놓친 것 같아요 물음표도요 어떤 건지 아시겠죠? Get it + +03:58.320 --> 04:03.870 +상업적 가치가 있는 걸 방금 만들었어요 왜냐하면 우리 박스에서 즉시 + +04:03.870 --> 04:11.790 +무료로 팔리거든요 오픈 소스 LLM을 이용해서요 get go부터 작동하는 llms죠 + +04:11.790 --> 04:14.700 +이제 Mac으로 넘어가죠 Mac으로 넘어가죠 diff --git a/week5/community-contributions/subtitles/srts/60614591/en_US.srt b/week5/community-contributions/subtitles/srts/60614591/en_US.srt new file mode 100755 index 0000000..f446370 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614591/en_US.srt @@ -0,0 +1,82 @@ +WEBVTT + +00:00.830 --> 00:06.770 +The mantra of this course is that the best way to learn is by doing, and we will be doing stuff together + +00:06.770 --> 00:07.340 +throughout. + +00:07.370 --> 00:12.350 +There will be eight commercial projects which will be applying what you're learning to real business + +00:12.350 --> 00:15.380 +problems, and they'll be exercises for you to do the same thing. + +00:15.380 --> 00:21.050 +And I'm hoping that you'll be able to set up GitHub and be able to make your own versions and push them + +00:21.050 --> 00:26.030 +and send me a PR, a pull request, and I'll be able to merge in your code so that other people can + +00:26.030 --> 00:27.320 +see your solutions too. + +00:27.350 --> 00:32.570 +But you'll be able to add this to your GitHub and be able to show yourself that you are building projects + +00:32.570 --> 00:34.790 +to solve real world commercial problems. + +00:34.790 --> 00:38.570 +One of our projects I mentioned is going to be a chatbot assistant. + +00:38.570 --> 00:40.100 +We're going to build one for an airline. + +00:40.100 --> 00:43.400 +It's going to be able to do things like use tools to look up the price of tickets. + +00:43.400 --> 00:48.230 +It's going to make audio and it's going to show pictures like this beautiful pop art version of London + +00:48.230 --> 00:52.490 +showing here on the right as you interact with the chatbot assistant. + +00:52.910 --> 00:58.910 +We're also going to be building rag pipelines, putting vectors of information in a vector data store + +00:58.940 --> 01:03.260 +that you'll have running, and we'll be able to look at things like visualizations of the vectors, + +01:03.260 --> 01:09.920 +mess around with them in 3D space, look into them to understand how different kinds of information + +01:09.920 --> 01:15.980 +get placed in different locations in vector space, and why that is fundamental to Rag retrieval, augmented + +01:15.980 --> 01:16.910 +generation. + +01:16.910 --> 01:21.800 +And then, as I say, the the moment, the big moment in the project will be in week eight, when we + +01:21.800 --> 01:27.620 +will build an Agentic AI solution that will bring together everything that you've learned about through + +01:27.620 --> 01:31.340 +the eight weeks for something which will solve a business problem. + +01:31.640 --> 01:36.260 +And I'll be able to give you some examples of how you can then apply that to solving other business + +01:36.260 --> 01:39.260 +problems that you might face in the future, or that you might have today. + +01:39.320 --> 01:44.660 +And you'll be able to watch as agents collaborate, you'll see the memory of the agents and see what's + +01:44.660 --> 01:47.180 +going on in our agent world. + +01:47.210 --> 01:48.890 +A lot to look forward to. diff --git a/week5/community-contributions/subtitles/srts/60614591/ja_JP.srt b/week5/community-contributions/subtitles/srts/60614591/ja_JP.srt new file mode 100755 index 0000000..9c9c640 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614591/ja_JP.srt @@ -0,0 +1,55 @@ +WEBVTT + +00:00.830 --> 00:07.340 +このコースの信条は、 学ぶための最良の方法は実践することであり、 私たちは終始、 一緒に何かを実践していく。 + +00:07.370 --> 00:15.380 +8つの商業プロジェクトがあり、 そこで学んだことを実際のビジネス問題に応用する。 + +00:15.380 --> 00:21.050 +そして、 GitHubを立ち上げて、 自分のバージョンを作ってプッシュし、 PR(プル・リクエスト)を僕に送ってくれれば、 + +00:21.050 --> 00:27.320 +僕は君のコードをマージして、 他の人たちにも君の解決策を見てもらえるようにしたいんだ。 + +00:27.350 --> 00:34.790 +しかし、 これをGitHubに追加することで、 現実の商業的な問題を解決するためのプロジェクトを構築していることをアピールすることができる。 + +00:34.790 --> 00:38.570 +私たちのプロジェクトのひとつに、 チャットボット・アシスタントがある。 + +00:38.570 --> 00:40.100 +ある航空会社のために作るんだ。 + +00:40.100 --> 00:43.400 +チケットの値段を調べるツールなども使えるようになるだろう。 + +00:43.400 --> 00:52.490 +チャットボットのアシスタントと対話すると、 音声が流れ、 右側にあるロンドンの美しいポップアートのような写真が表示される。 + +00:52.910 --> 00:58.910 +また、 ラグ・パイプラインを構築し、 ベクトル・データ・ストアに情報のベクトルを置き、 + +00:58.940 --> 01:03.260 +そのベクトルを視覚化したり、 3D空間でいじくりまわしたり、 + +01:03.260 --> 01:16.910 +ベクトル空間のさまざまな場所にさまざまな種類の情報がどのように配置されるかを調べたり、 なぜそれがラグ検索や拡張世代にとって基本的なのかを理解したりすることもできるだろう。 + +01:16.910 --> 01:21.800 +そして、 8週目には、 8週目までに学んだことをすべてまとめて、 + +01:21.800 --> 01:31.340 +ビジネス上の問題を解決するためのエージェント型AIソリューションを構築します。 + +01:31.640 --> 01:36.260 +そして、 将来直面するかもしれない、 あるいは現在抱えているかもしれない他のビジネス上の問題を解決するために、 それをどのように応用できるか、 + +01:36.260 --> 01:39.260 +いくつかの例を挙げることができるだろう。 + +01:39.320 --> 01:47.180 +また、 エージェントの共同作業を見ることができ、 エージェントのメモリーを見ることができ、 エージェントの世界で何が起こっているかを見ることができる。 + +01:47.210 --> 01:48.890 +楽しみなことがたくさんある。 diff --git a/week5/community-contributions/subtitles/srts/60614591/ko_KR.srt b/week5/community-contributions/subtitles/srts/60614591/ko_KR.srt new file mode 100755 index 0000000..27dad8e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60614591/ko_KR.srt @@ -0,0 +1,79 @@ +WEBVTT + +00:00.830 --> 00:06.770 +이번 수업의 만트라는 가장 좋은 학습 방법은 행동이라는 겁니다 수업 내내 함께하게 될 + +00:06.770 --> 00:07.340 +거예요 + +00:07.370 --> 00:12.350 +8개의 상업 프로젝트가 있는데 여러분이 배운 것을 실제 사업 문제에 적용할 겁니다 + +00:12.350 --> 00:15.380 +그리고 같은 것을 하는 연습을 할 거고요 + +00:15.380 --> 00:21.050 +여러분이 깃허브를 설정해 자신만의 버전을 만들어 푸시하고 PR, 끌어오기 + +00:21.050 --> 00:26.030 +요청을 보내시면 여러분 코드에 병합해 다른 사람들도 솔루션을 볼 수 + +00:26.030 --> 00:27.320 +있게 할게요 + +00:27.350 --> 00:32.570 +하지만 이걸 깃허브에 추가해 현실 세계의 상업적 문제를 해결하는 프로젝트를 만들고 + +00:32.570 --> 00:34.790 +있다는 걸 보여줄 수 있어요 + +00:34.790 --> 00:38.570 +제가 말씀드린 프로젝트 중 하나가 챗봇 비서가 되는 거예요 + +00:38.570 --> 00:40.100 +항공사에도 하나 만들 거예요 + +00:40.100 --> 00:43.400 +툴을 이용해 티켓 가격을 검색하는 그런 작업을 할 수 있어요 + +00:43.400 --> 00:48.230 +오디오를 만들고 사진을 보여줄 겁니다 이 아름다운 팝아트 버전 런던처럼요 + +00:48.230 --> 00:52.490 +여러분이 챗봇 비서와 상호 작용할 때 오른쪽에 보이는 거죠 + +00:52.910 --> 00:58.910 +래그 파이프라인도 만들 겁니다 벡터 데이터 저장소에 정보의 벡터를 넣는 거죠 + +00:58.940 --> 01:03.260 +벡터에 대한 시각화 같은 걸 살펴볼 수 있을 겁니다 3D + +01:03.260 --> 01:09.920 +공간에서 그걸 뒤흔들고 살펴봐서 다양한 종류의 정보가 어떻게 벡터 공간의 다양한 + +01:09.920 --> 01:16.910 +장소에 있는지 이해할 겁니다 왜 그게 대량 회수, 증강 세대에 필수적인지도요 + +01:16.910 --> 01:21.800 +말씀드렸듯이 프로젝트의 핵심은 8주 차에 찾아옵니다 에이전트 + +01:21.800 --> 01:27.620 +AI 솔루션을 개발할 때죠 지난 8주간 여러분이 배운 모든 것을 하나로 + +01:27.620 --> 01:31.340 +합쳐 사업상의 문제를 해결할 거예요 + +01:31.640 --> 01:36.260 +여러분이 미래에 직면하거나 오늘 직면할 수 있는 다른 비즈니스 문제 해결에 + +01:36.260 --> 01:39.260 +어떻게 적용할 수 있는지 예를 들어드릴 수 있어요 + +01:39.320 --> 01:44.660 +요원들이 협력하는 모습을 볼 수 있을 겁니다 요원들의 메모리와 요원 세계에서 + +01:44.660 --> 01:47.180 +벌어지는 일을 볼 수 있죠 + +01:47.210 --> 01:48.890 +기대가 커요 diff --git a/week5/community-contributions/subtitles/srts/60616407/en_US.srt b/week5/community-contributions/subtitles/srts/60616407/en_US.srt new file mode 100755 index 0000000..3e3dd27 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616407/en_US.srt @@ -0,0 +1,217 @@ +WEBVTT + +00:00.560 --> 00:03.110 +And now over to my Mac people. + +00:03.110 --> 00:04.130 +And I have news for you. + +00:04.130 --> 00:05.570 +It's exactly the same thing. + +00:05.570 --> 00:10.640 +You go to a favorite browser, you go to Alarm.com, you'll see the same screen. + +00:10.640 --> 00:14.600 +You press download and then download for Mac OS. + +00:14.630 --> 00:17.150 +It will of course download it locally. + +00:17.150 --> 00:20.390 +You will then run the installer and it will install it. + +00:20.390 --> 00:25.460 +You then bring up a terminal window and you bring up a terminal window by going to applications and + +00:25.460 --> 00:26.990 +utilities and then terminal. + +00:27.050 --> 00:32.960 +And when you bring up that terminal window as it comes up here, you type the same instruction Olama + +00:33.830 --> 00:34.640 +run. + +00:34.640 --> 00:42.320 +And now the name of the model, which in our case is llama 3.2, the very latest small model from meta. + +00:42.320 --> 00:44.360 +And it will then come up. + +00:44.420 --> 00:49.190 +It will probably take a minute or two because it will need to download the 2 billion parameters associated + +00:49.190 --> 00:53.030 +with with 3.2, but then it will be running just like this. + +00:53.030 --> 00:56.180 +And now again we can try and put it to good use. + +00:56.270 --> 00:58.550 +We can ask it this time will use a different language. + +00:58.550 --> 00:58.910 +Why not? + +00:58.940 --> 01:04.470 +Let's say I am trying to learn French. + +01:05.400 --> 01:07.620 +I am a complete beginner. + +01:09.870 --> 01:17.400 +Please have a conversation with me to teach me French. + +01:18.480 --> 01:19.980 +And let's see how it does. + +01:20.010 --> 01:22.530 +The first thing you'll notice is that it's a lot faster. + +01:22.530 --> 01:25.470 +And that's because I'm actually running on a mac with an M1 chip. + +01:25.470 --> 01:27.870 +And what we were seeing earlier was an emulation of a PC. + +01:27.870 --> 01:29.850 +So of course this is a bit faster. + +01:29.940 --> 01:34.080 +Your computer may be somewhere in between those two, depending on your architecture. + +01:34.350 --> 01:39.360 +So off it's gone with the start and you can see that it's done a fab job. + +01:39.360 --> 01:42.000 +It's clearly given a bunch of different options. + +01:42.000 --> 01:43.230 +It's explained itself. + +01:43.230 --> 01:45.150 +It's super impressive. + +01:45.150 --> 01:48.240 +Uh, let's say uh, sure. + +01:49.890 --> 01:54.960 +So I'm not actually answering his question, but there we go. + +01:54.960 --> 01:56.070 +You get the idea. + +01:56.070 --> 02:01.770 +And the point I wanted to make again is that this is, in fact, a paid commercial product on an app + +02:01.770 --> 02:02.520 +that I'm using. + +02:02.520 --> 02:03.540 +And here we have it. + +02:03.570 --> 02:10.120 +We have built effectively a commercial project for free, using open source in a matter of minutes, + +02:10.150 --> 02:14.290 +just immediately unleashing the power of an LLM on your computer. + +02:15.220 --> 02:18.670 +So I now have an exercise for you right away. + +02:18.670 --> 02:20.380 +I'd like you to of course, do this. + +02:20.410 --> 02:22.150 +Install it, make sure that it works. + +02:22.150 --> 02:26.110 +If you have any problems whatsoever, then you can always contact me at any point. + +02:26.140 --> 02:31.570 +Feel free to message me or send me an email or LinkedIn with me and ask for help. + +02:31.570 --> 02:35.530 +But hopefully this is a really easy install that will have you up and running. + +02:35.560 --> 02:36.370 +Do this. + +02:36.370 --> 02:39.040 +Have a quick experiment with a with a language. + +02:39.040 --> 02:41.950 +Pick a language maybe one you don't know and try it out. + +02:42.040 --> 02:46.060 +And then the next thing to try is I'd like you to experiment with different models. + +02:46.060 --> 02:50.650 +So if we go back to the llama page, you'll see that there's a model heading up here. + +02:50.650 --> 02:53.710 +And for both PC and Mac people, it's the same thing. + +02:53.710 --> 02:57.070 +You can see some different models and read about the story behind them. + +02:57.070 --> 03:03.700 +Llama 3.2 from Meta Jama from Google, which is its open source low parameter version. + +03:03.700 --> 03:10.130 +Quan, which is a powerhouse of a model from Alibaba Cloud, which is less well known, but it is one + +03:10.130 --> 03:11.360 +of the strongest. + +03:11.390 --> 03:16.970 +You can click on a model and you will then see the different versions of it, and over here is the way + +03:16.970 --> 03:17.750 +that you can run it. + +03:17.750 --> 03:23.030 +You can simply press that button there to copy it to the clipboard, and then paste it into your PowerShell + +03:23.030 --> 03:24.170 +or your terminal. + +03:24.260 --> 03:27.650 +And these are the various different versions of it. + +03:27.650 --> 03:31.310 +And you can just go back to models, browse through them. + +03:31.310 --> 03:34.250 +Experiment 53.5 from Microsoft. + +03:34.400 --> 03:39.350 +Also very powerful model and you can read more about them. + +03:39.350 --> 03:40.580 +Uh, experiment with them. + +03:40.580 --> 03:43.580 +And I would like to ask you to try this out for a new language. + +03:43.670 --> 03:49.550 +Try exploring, maybe try and learn something completely different and use different models and see + +03:49.550 --> 03:52.820 +if you can figure out which models are best for you. + +03:52.850 --> 03:57.590 +As you pick some of the larger models, they may be slower on your computer, so find out which models + +03:57.590 --> 04:00.320 +perform the best and give you the best results. + +04:00.320 --> 04:03.890 +That's your exercise and then I will see you for the next time when we will. + +04:03.920 --> 04:06.920 +Then do things like introductions and course and all that stuff. + +04:07.010 --> 04:07.700 +See you then. diff --git a/week5/community-contributions/subtitles/srts/60616407/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616407/ja_JP.srt new file mode 100755 index 0000000..baf7321 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616407/ja_JP.srt @@ -0,0 +1,193 @@ +WEBVTT + +00:00.560 --> 00:03.110 +そして次はマックの人たちだ。 + +00:03.110 --> 00:04.130 +そして、 あなたにお知らせがある。 + +00:04.130 --> 00:05.570 +まったく同じことだ。 + +00:05.570 --> 00:10.640 +お気に入りのブラウザーでアラームにアクセスする。 comをクリックすると、 同じ画面が表示されます。 + +00:10.640 --> 00:14.600 +ダウンロードを押して、 Mac OS用のダウンロードをする。 + +00:14.630 --> 00:17.150 +もちろんローカルにダウンロードされる。 + +00:17.150 --> 00:20.390 +その後、 インストーラーを実行するとインストールされる。 + +00:20.390 --> 00:26.990 +アプリケーションとユーティリティ」→「ターミナル」でターミナルウィンドウを表示する。 + +00:27.050 --> 00:34.640 +そして、 ここに表示されているようにターミナル・ウィンドウを表示させ、 Olama runと同じ命令を入力する。 + +00:34.640 --> 00:42.320 +そしてモデル名、 この場合はラマ3だ。 2、 メタの最新小型モデル。 + +00:42.320 --> 00:44.360 +そして、 それが表示される。 + +00:44.420 --> 00:49.190 +3.に関連する20億のパラメーターをダウンロードする必要があるので、 おそらく1、 + +00:49.190 --> 00:53.030 +2分かかるだろう。 2、 しかし、 その場合はこのように実行される。 + +00:53.030 --> 00:56.180 +そして今また、 それを有効に使おうとしている。 + +00:56.270 --> 00:58.550 +今度は違う言葉で聞いてみよう。 + +00:58.550 --> 00:58.910 +なぜだ? + +00:58.940 --> 01:04.470 +私がフランス語を学ぼうとしているとしよう。 + +01:05.400 --> 01:07.620 +私は全くの初心者です。 + +01:09.870 --> 01:17.400 +私にフランス語を教えてください。 + +01:18.480 --> 01:19.980 +どうなるか見てみよう。 + +01:20.010 --> 01:22.530 +まず気づくのは、 かなり速くなったということだ。 + +01:22.530 --> 01:25.470 +そしてそれは、 私が実際にM1チップを搭載したマックで動いているからだ。 + +01:25.470 --> 01:27.870 +そして、 先ほど見ていたのはPCのエミュレーションだった。 + +01:27.870 --> 01:29.850 +だからもちろん、 この方が少し速い。 + +01:29.940 --> 01:34.080 +あなたのコンピューターは、 あなたのアーキテクチャによって、 この2つの中間にあるかもしれない。 + +01:34.350 --> 01:39.360 +だから、 スタートとともに走り出し、 素晴らしい仕事をしているのがわかるだろう。 + +01:39.360 --> 01:42.000 +明らかにいろいろな選択肢が与えられている。 + +01:42.000 --> 01:43.230 +自分で説明したことだ。 + +01:43.230 --> 01:45.150 +超感動的だ。 + +01:45.150 --> 01:48.240 +ああ、 そうだね。 + +01:49.890 --> 01:54.960 +だから、 彼の質問には答えていないんだけどね。 + +01:54.960 --> 01:56.070 +おわかりだろう。 + +01:56.070 --> 02:02.520 +そして、 私がもう一度言いたかったのは、 これは実際、 私が使っているアプリの有料商用製品だということだ。 + +02:02.520 --> 02:03.540 +そしてここにある。 + +02:03.570 --> 02:14.290 +私たちは、 オープンソースを使い、 数分のうちに、 事実上商業的なプロジェクトを無料で構築しました。 + +02:15.220 --> 02:18.670 +では、 さっそく練習してみよう。 + +02:18.670 --> 02:20.380 +もちろん、 そうしてほしい。 + +02:20.410 --> 02:22.150 +インストールし、 動作することを確認する。 + +02:22.150 --> 02:26.110 +何か問題があれば、 いつでも私に連絡してください。 + +02:26.140 --> 02:31.570 +メッセージやメール、 LinkedInで気軽に相談してほしい。 + +02:31.570 --> 02:35.530 +でも、 これが本当に簡単なインストールで、 すぐに使えるようになることを祈るよ。 + +02:35.560 --> 02:36.370 +こうするんだ。 + +02:36.370 --> 02:39.040 +言語を使って簡単な実験をしてみよう。 + +02:39.040 --> 02:41.950 +知らない言語を選んで、 試してみる。 + +02:42.040 --> 02:46.060 +そして次に試してほしいのは、 いろいろなモデルで実験してみることだ。 + +02:46.060 --> 02:50.650 +リャマのページに戻ると、 ここにモデルの見出しがあるのがわかるだろう。 + +02:50.650 --> 02:53.710 +PCでもMacでも同じことだ。 + +02:53.710 --> 02:57.070 +さまざまなモデルを見ることができ、 その背景にあるストーリーも読むことができる。 + +02:57.070 --> 03:03.700 +ラマ 3. 2 GoogleのMeta Jamaから。 これはオープンソースの低パラメータ版である。 + +03:03.700 --> 03:11.360 +クアンはアリババ・クラウドのモデルで、 あまり知られていないが、 強力なもののひとつだ。 + +03:11.390 --> 03:17.750 +モデルをクリックすると、 さまざまなバージョンが表示されます。 + +03:17.750 --> 03:24.170 +そのボタンを押してクリップボードにコピーし、 PowerShellやターミナルにペーストすればいい。 + +03:24.260 --> 03:27.650 +そして、 これらは様々な異なるバージョンである。 + +03:27.650 --> 03:31.310 +そして、 モデルに戻ってブラウズすることができる。 + +03:31.310 --> 03:34.250 +実験53 マイクロソフトから5 + +03:34.400 --> 03:39.350 +こちらも非常にパワフルなモデルで、 詳しくはこちらをご覧ください。 + +03:39.350 --> 03:40.580 +実験してみるんだ。 + +03:40.580 --> 03:43.580 +そして、 これを新しい言語で試していただきたい。 + +03:43.670 --> 03:52.820 +まったく違うことを学んでみたり、 いろいろなモデルを使ってみたりして、 どのモデルが自分にとってベストなのかを探ってみてほしい。 + +03:52.850 --> 03:57.590 +大型のモデルを選ぶと、 コンピュータの動作が遅くなることがあるので、 どのモデルが最も性能がよく、 + +03:57.590 --> 04:00.320 +最高の結果が得られるかを調べてください。 + +04:00.320 --> 04:03.890 +それがあなたの練習であり、 また次の機会にお会いしましょう。 + +04:03.920 --> 04:06.920 +それから、 自己紹介とか、 コースとか、 そういうことをやるんだ。 + +04:07.010 --> 04:07.700 +ではまた diff --git a/week5/community-contributions/subtitles/srts/60616407/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616407/ko_KR.srt new file mode 100755 index 0000000..0db0804 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616407/ko_KR.srt @@ -0,0 +1,214 @@ +WEBVTT + +00:00.560 --> 00:03.110 +이제 맥을 담당하는 분들에게 넘길게요 + +00:03.110 --> 00:04.130 +알려드릴 게 있어요 + +00:04.130 --> 00:05.570 +완전히 똑같아요 + +00:05.570 --> 00:10.640 +즐겨찾기 브라우저로 가거나 알람으로 가요 같은 화면이 보일 거예요 + +00:10.640 --> 00:14.600 +다운로드를 누르고 Mac OS를 다운로드해요 + +00:14.630 --> 00:17.150 +물론 로컬로 다운로드 되죠 + +00:17.150 --> 00:20.390 +그런 다음 설치 관리자를 실행하면 설치되죠 + +00:20.390 --> 00:25.460 +그런 다음 터미널 윈도우를 불러옵니다 어플리케이션과 유틸리티로 가서 터미널 + +00:25.460 --> 00:26.990 +윈도우를 불러오죠 + +00:27.050 --> 00:34.640 +터미널 창을 불러와서 여기 올라옵니다 동일한 지침을 올라마 런이라고 입력하세요 + +00:34.640 --> 00:42.320 +모델의 이름은, 우리 경우엔 llama 3이죠 2번, 메타에서 나온 최신 소형 모델이에요 + +00:42.320 --> 00:44.360 +그럼 떠오를 거예요 + +00:44.420 --> 00:49.190 +1, 2분 정도 걸릴 겁니다 3과 관련된 20억 개의 매개 변수를 + +00:49.190 --> 00:53.030 +다운로드 해야 하니까요 2개요, 하지만 그러면 이렇게 작동할 거예요 + +00:53.030 --> 00:56.180 +이제 다시 좋은 일에 쓸 수 있게 됐어요 Put it up Put it up Put it up Put it up Put it up Put it + +00:56.270 --> 00:58.550 +이번엔 다른 언어로 물어볼 거예요 + +00:58.550 --> 00:58.910 +왜요? + +00:58.940 --> 01:04.470 +제가 프랑스어를 배우려고 한다고 가정해 보죠 + +01:05.400 --> 01:07.620 +전 완전히 초보예요 + +01:09.870 --> 01:17.400 +프랑스어 좀 가르쳐 주세요 + +01:18.480 --> 01:19.980 +어떻게 되는지 보죠 + +01:20.010 --> 01:22.530 +가장 먼저 눈에 띄는 건 훨씬 빠르다는 거예요 + +01:22.530 --> 01:25.470 +M1 칩이 달린 맥으로 작동하기 때문이죠 + +01:25.470 --> 01:27.870 +아까 본 건 PC의 에뮬레이션이었어요 + +01:27.870 --> 01:29.850 +비트가 좀 더 빠르죠 + +01:29.940 --> 01:34.080 +여러분의 컴퓨터는 그 둘 사이 어딘가에 있을 겁니다 여러분의 아키텍처에 따라서요 + +01:34.350 --> 01:39.360 +시작과 함께 꺼냈는데 아주 잘 된 걸 볼 수 있죠 + +01:39.360 --> 01:42.000 +다양한 선택지가 주어졌어요 + +01:42.000 --> 01:43.230 +설명이 됐어요 + +01:43.230 --> 01:45.150 +정말 인상적이에요 + +01:45.150 --> 01:48.240 +이렇게 해두죠 + +01:49.890 --> 01:54.960 +질문에 대답하는 건 아니지만 어쨌든 됐네요 + +01:54.960 --> 01:56.070 +Get it, Get it 아시겠죠? + +01:56.070 --> 02:01.770 +다시 강조하고 싶은 점은 이건 사실 제가 사용하는 앱의 유료 상업 제품이라는 + +02:01.770 --> 02:02.520 +거예요 + +02:02.520 --> 02:03.540 +여기 있네요 + +02:03.570 --> 02:10.120 +오픈 소스를 이용해 몇 분 만에 상업 프로젝트를 무료로 진행했습니다 컴퓨터에 + +02:10.150 --> 02:14.290 +LLM의 힘을 즉시 발휘할 수 있죠 + +02:15.220 --> 02:18.670 +그래서 바로 실습을 준비했어요 + +02:18.670 --> 02:20.380 +물론 이렇게 해 주세요 + +02:20.410 --> 02:22.150 +설치하고 작동하는지 확인해요 + +02:22.150 --> 02:26.110 +무슨 문제가 생기면 언제든 연락해요 + +02:26.140 --> 02:31.570 +언제든 제게 메시지를 보내거나 이메일이나 링크드인을 보내 도움을 청하세요 + +02:31.570 --> 02:35.530 +하지만 설치가 쉬워서 잘 작동하면 좋겠네요 + +02:35.560 --> 02:36.370 +이렇게 해요 + +02:36.370 --> 02:39.040 +언어를 가지고 빠르게 실험해 볼게요 + +02:39.040 --> 02:41.950 +모르는 언어를 골라서 한번 해 보세요 + +02:42.040 --> 02:46.060 +다음으로는 다양한 모델을 실험해 보세요 + +02:46.060 --> 02:50.650 +llama 페이지로 돌아가면 모델이 위로 향하고 있는 게 보이시죠 + +02:50.650 --> 02:53.710 +PC와 Mac 모두 같은 거예요 + +02:53.710 --> 02:57.070 +다양한 모델을 보고 그 뒤에 숨겨진 이야기를 들을 수 있죠 + +02:57.070 --> 03:03.700 +라마 3요 구글 메타 자마에서 2개요 오픈 소스 매개 변수 버전이 낮죠 + +03:03.700 --> 03:10.130 +콴은 동력원인 알리바바 클라우드의 모델로 less는 알려졌지만 가장 강력한 + +03:10.130 --> 03:11.360 +모델 중 하나죠 + +03:11.390 --> 03:16.970 +모델을 클릭하면 다양한 버전을 볼 수 있어요 여기서 실행할 + +03:16.970 --> 03:17.750 +수 있죠 + +03:17.750 --> 03:23.030 +저 버튼을 눌러 클립보드로 복사한 다음 PowerShell이나 터미널에 붙여넣으면 + +03:23.030 --> 03:24.170 +돼요 + +03:24.260 --> 03:27.650 +이건 다양한 버전이에요 + +03:27.650 --> 03:31.310 +모델로 돌아가서 살펴볼 수 있어요 + +03:31.310 --> 03:34.250 +53번째 실험이에요 마이크로소프트에서 5년요 + +03:34.400 --> 03:39.350 +아주 강력한 모델이고 더 자세히 읽어볼 수 있어요 + +03:39.350 --> 03:40.580 +실험해 보려고요 + +03:40.580 --> 03:43.580 +이걸 새로운 언어로 사용해 보세요 + +03:43.670 --> 03:49.550 +완전히 다른 것을 탐구하고 배우려고 노력하세요 다른 모델을 사용해 보고 + +03:49.550 --> 03:52.820 +어떤 모델이 가장 좋은지 알아내세요 + +03:52.850 --> 03:57.590 +대형 모델을 고를수록 컴퓨터에서 느리게 보일 수 있으니 어떤 모델이 가장 잘 작동하는지 + +03:57.590 --> 04:00.320 +알아보고 가장 좋은 결과를 내세요 + +04:00.320 --> 04:03.890 +오늘 한 운동으로 다음에 다시 만나요 + +04:03.920 --> 04:06.920 +소개나 수업 같은 걸 하는 거죠 + +04:07.010 --> 04:07.700 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/60616423/en_US.srt b/week5/community-contributions/subtitles/srts/60616423/en_US.srt new file mode 100755 index 0000000..417403f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616423/en_US.srt @@ -0,0 +1,262 @@ +WEBVTT + +00:00.050 --> 00:05.870 +So I hope you've just enjoyed yourself experimenting with different LMS locally on your box using the + +00:05.870 --> 00:07.400 +power of Olama. + +00:07.430 --> 00:10.040 +You've probably had similar experience to me, which is. + +00:10.040 --> 00:16.130 +I certainly found that Quinn 2.5 is perhaps the most powerful model when it comes to grasp of different + +00:16.130 --> 00:16.820 +languages. + +00:16.820 --> 00:20.300 +Some of the other models are better, I think, at explaining. + +00:20.360 --> 00:22.820 +So I'd be very interested to hear your observations. + +00:22.820 --> 00:26.750 +Please do post them with the course or message me direct. + +00:26.780 --> 00:31.370 +I'd love to hear what you've discovered and this kind of experimenting with different models and and + +00:31.370 --> 00:34.460 +finding the one that works best for your your problem. + +00:34.460 --> 00:37.160 +That is a critical skill for an LM engineer. + +00:37.160 --> 00:39.560 +So this was valuable time spent. + +00:39.590 --> 00:40.370 +All right. + +00:40.370 --> 00:42.620 +Let's talk about the next eight weeks. + +00:42.620 --> 00:50.570 +So I am looking to take you from where you are today, over on the left to being a master of LM engineering + +00:50.570 --> 00:51.650 +in eight weeks time. + +00:51.650 --> 00:53.090 +And this is how we'll do it. + +00:53.120 --> 00:59.150 +We'll start this week by looking at models at the frontier of what's possible today, which people call + +00:59.150 --> 01:00.200 +frontier models. + +01:00.260 --> 01:09.230 +Things like GPT 4001 preview and Claude 3.5, and a number of other pioneering models that are closed + +01:09.240 --> 01:11.730 +source and are able to achieve amazing things. + +01:11.730 --> 01:16.800 +And we'll do that through the web user interface like ChatGPT, and also then through the APIs. + +01:17.280 --> 01:22.410 +And we're going to build a commercial project, something immediately that will be useful, and there'll + +01:22.440 --> 01:25.290 +be an interesting commercial exercise for you as well. + +01:25.410 --> 01:31.380 +Then next week we will slap a user interface on top of it using a platform which I love, which is called + +01:31.380 --> 01:32.160 +Gradio. + +01:32.160 --> 01:34.980 +And we will have good fun with it and you'll see that I love it. + +01:34.980 --> 01:40.260 +I go on about it a bit, but it's so easy to use and it's so easy for people like me who are terrible + +01:40.260 --> 01:45.300 +at front end to build a nice, sharp user interface very quickly indeed. + +01:45.300 --> 01:53.400 +We'll do it to solve a classic JNI use case, which is the building an AI assistant, a chatbot, and + +01:53.400 --> 01:57.120 +we'll but we'll do so in a way that has audio and pictures. + +01:57.120 --> 02:03.720 +So it's multimodal and it will be able to use tools, which means that it's able to call out to code + +02:03.720 --> 02:07.650 +running on your computer, which sounds kind of spooky, but it's going to make sense when we do it. + +02:07.650 --> 02:08.760 +So that is all. + +02:08.760 --> 02:09.690 +Week two. + +02:10.290 --> 02:16.380 +In week three, we turn to open source, and we use the ubiquitous Hugging Face platform, which is + +02:16.380 --> 02:21.870 +used by data scientists and LM engineers across the board and will use it to build both. + +02:21.960 --> 02:27.270 +We'll use the simple API in hugging face called the pipelines API, and then we'll use the more advanced + +02:27.270 --> 02:32.040 +API, and we'll explore things like Tokenizers and models in Hugging Face. + +02:32.280 --> 02:38.460 +In week four, we're going to talk about something which is a particularly thorny issue in the world + +02:38.460 --> 02:42.840 +of AI, which is there are so many models to choose from. + +02:42.840 --> 02:47.220 +How do you go about selecting what is the right model for the task you have at hand? + +02:47.220 --> 02:52.650 +So we'll we'll work on things like benchmarks and leaderboards and figure out how do you go about that + +02:52.650 --> 02:53.940 +decision path. + +02:53.940 --> 02:58.560 +And then we're going to take on a particularly different kind of commercial problem about generating + +02:58.560 --> 02:59.160 +code. + +02:59.160 --> 03:06.330 +We're going to build an application which is able to rewrite Python code as C plus plus high performance + +03:06.330 --> 03:07.380 +C plus plus code. + +03:07.380 --> 03:11.010 +And we're going to then try it out with a bunch of closed source and open source models. + +03:11.010 --> 03:12.270 +And one of them will be the winner. + +03:12.300 --> 03:15.390 +The one that's the winner is going to take our test Python code. + +03:15.390 --> 03:22.620 +It's going to rewrite it and the new code is going to run 60,000 times faster, which is shocking. + +03:22.650 --> 03:24.210 +And you will see that yourself. + +03:24.270 --> 03:28.760 +And then there'll be some exercises for you to build other kinds of code generation tools. + +03:29.210 --> 03:35.780 +In week five, we will turn to one of the the topics that is super hot at the moment, which is rag + +03:35.780 --> 03:43.490 +retrieval, augmented generation, using, uh, data stores of information to add expertise to your + +03:43.490 --> 03:49.640 +LLM will be building our own Rag pipeline for answering questions that pertain to an organization. + +03:49.640 --> 03:54.860 +And then there'll be a difficult commercial challenge for you and exercise in which you apply this to + +03:54.890 --> 03:56.480 +your own information. + +03:56.480 --> 04:01.640 +And I'm really excited to see what people make of this, and to see some of your projects of rebuilding + +04:01.670 --> 04:03.860 +a Rag pipeline for yourself. + +04:04.790 --> 04:10.400 +In week six, we begin our three week flagship project for this course. + +04:10.400 --> 04:13.250 +Uh, week six, we will set up the business problem. + +04:13.250 --> 04:19.340 +We'll do a lot of work on data, and we're then going to create some traditional machine learning models, + +04:19.340 --> 04:21.410 +which is very important to do to build a baseline. + +04:21.410 --> 04:26.720 +And then we'll try models at the frontier, and we'll fine tune models at the frontier as well, to + +04:26.750 --> 04:29.810 +do as well as we possibly can with this business problem. + +04:29.810 --> 04:33.110 +In week seven, we'll apply it to open source. + +04:33.110 --> 04:38.080 +We're going to take open source models and they're initially going to perform terribly, and we're going + +04:38.080 --> 04:44.440 +to make it our mission to improve those open source models by fine tuning until at least we can compete + +04:44.440 --> 04:46.000 +with GPT four. + +04:46.210 --> 04:47.680 +The model at the frontier. + +04:47.680 --> 04:52.810 +And I'm not going to tell you what happens, but I will tell you that I believe that the results will + +04:52.810 --> 04:53.770 +astonish you. + +04:53.800 --> 04:54.880 +I will tell you that. + +04:54.880 --> 04:59.080 +So it is very much worth hanging on and seeing what happens in week seven. + +04:59.350 --> 05:05.200 +But then it all comes together in the finale in week eight, which is a fitting conclusion to the eight + +05:05.230 --> 05:13.060 +weeks we are going to build a fully autonomous Agentic AI solution, which will have seven agents collaborating + +05:13.060 --> 05:15.430 +to solve a real commercial problem. + +05:15.730 --> 05:17.170 +And the end. + +05:17.200 --> 05:21.400 +Not only will it be doing something where it scans the internet for various things, but will end up + +05:21.400 --> 05:24.880 +sending you push notifications with some of its discoveries. + +05:24.940 --> 05:27.340 +So it's going to be really fabulous. + +05:27.340 --> 05:30.100 +It's going to have a terrific result at the end of it. + +05:30.130 --> 05:35.830 +It will be a good way to to be a culmination of everything that you've learned each week, building + +05:35.830 --> 05:41.710 +on top of it, of the of the prior week, and resulting in true commercial projects that you'll be able + +05:41.710 --> 05:45.100 +to put into action in your day job right away. diff --git a/week5/community-contributions/subtitles/srts/60616423/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616423/ja_JP.srt new file mode 100755 index 0000000..4520f17 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616423/ja_JP.srt @@ -0,0 +1,193 @@ +WEBVTT + +00:00.050 --> 00:07.400 +というわけで、 Olamaのパワーを使って、 ローカルでさまざまなLMSの実験を楽しんでいただけたなら幸いです。 + +00:07.430 --> 00:10.040 +おそらく私と同じような経験をしたことがあるだろう。 + +00:10.040 --> 00:16.820 +クイン2は確かにそうだった。 5はおそらく、 異なる言語を把握する上で最も強力なモデルだろう。 + +00:16.820 --> 00:20.300 +他のモデルの中には、 説明するのが上手なものもあると思う。 + +00:20.360 --> 00:22.820 +だから、 あなたの見解を聞きたい。 + +00:22.820 --> 00:26.750 +コースと一緒に投稿するか、 私に直接メッセージをください。 + +00:26.780 --> 00:34.460 +いろいろなモデルを試してみて、 自分の問題に最も適したものを見つける。 + +00:34.460 --> 00:37.160 +これはLMエンジニアにとって重要なスキルだ。 + +00:37.160 --> 00:39.560 +だからこれは貴重な時間だった。 + +00:39.590 --> 00:40.370 +分かった。 + +00:40.370 --> 00:42.620 +これからの8週間について話そう。 + +00:42.620 --> 00:51.650 +だから私は、 あなたが今いる左側にいる状態から、 8週間後にはLMエンジニアリングのマスターになることを目指している。 + +00:51.650 --> 00:53.090 +そして、 こうするんだ。 + +00:53.120 --> 01:00.200 +今週は、 フロンティア・モデルと呼ばれる、 現在可能なことのフロンティアにあるモデルを見ることから始めよう。 + +01:00.260 --> 01:11.730 +GPT4001のプレビューやクロード3などだ。 5、 そしてクローズドソースでありながら驚くべきことを成し遂げることができる先駆的なモデルの数々。 + +01:11.730 --> 01:16.800 +ChatGPTのようなウェブ・ユーザー・インターフェイス、 そしてAPIを通してそれを行います。 + +01:17.280 --> 01:25.290 +そして、 すぐに役立つような商業的なプロジェクトを立ち上げる。 + +01:25.410 --> 01:32.160 +そして来週は、 私が大好きなGradioというプラットフォームを使って、 その上にユーザー・インターフェースを載せる予定だ。 + +01:32.160 --> 01:34.980 +そして、 私たちはそれを楽しみ、 私がそれを愛していることがわかるだろう。 + +01:34.980 --> 01:45.300 +少し説明してしまうが、 とても使いやすく、 フロントエンドが苦手な私のような人間でも、 素晴らしくシャープなユーザー・インターフェースを実に簡単に素早く構築することができる。 + +01:45.300 --> 01:53.400 +古典的なJNIのユースケース、 つまりAIアシスタント、 チャットボットを構築することを解決するために、 + +01:53.400 --> 01:57.120 +音声と画像を使った方法でそれを行います。 + +01:57.120 --> 02:07.650 +マルチモーダルであり、 ツールを使用することができる。 つまり、 コンピューター上で実行されているコードを呼び出すことができる。 + +02:07.650 --> 02:08.760 +それだけだ。 + +02:08.760 --> 02:09.690 +週目。 + +02:10.290 --> 02:16.380 +第3週はオープンソースに目を向け、 データサイエンティストやLMエンジニアがこぞって使用しているユビキタス・プラットフォーム「Hugging + +02:16.380 --> 02:21.870 +Face」を使い、 両者を構築する。 + +02:21.960 --> 02:27.270 +パイプラインAPIと呼ばれるハギング・フェイスのシンプルなAPIを使い、 より高度なAPIを使い、 + +02:27.270 --> 02:32.040 +ハギング・フェイスのトークナイザーやモデルのようなものを探求していく。 + +02:32.280 --> 02:42.840 +第4週は、 AIの世界で特に茨の道となっている問題についてお話しします。 + +02:42.840 --> 02:47.220 +目の前のタスクに適したモデルを選ぶにはどうすればいいのか? + +02:47.220 --> 02:53.940 +だから、 ベンチマークやリーダーボードのようなものに取り組み、 その決定経路をどうするか考えたい。 + +02:53.940 --> 02:59.160 +そして、 コード生成に関する特に異なる種類の商業的な問題に取り組むつもりだ。 + +02:59.160 --> 03:07.380 +我々は、 PythonのコードをC+++の高性能なC+++のコードとして書き換えることができるアプリケーションを作ろうとしている。 + +03:07.380 --> 03:11.010 +そして、 クローズドソースとオープンソースのモデルで試してみるつもりだ。 + +03:11.010 --> 03:12.270 +そして、 そのうちの1人が優勝する。 + +03:12.300 --> 03:15.390 +勝った方がテスト用のPythonコードを受け取ることになる。 + +03:15.390 --> 03:22.620 +新しいコードは60,000倍も速くなるんだ。 + +03:22.650 --> 03:24.210 +そして、 あなた自身がそれを目にすることになる。 + +03:24.270 --> 03:28.760 +そして、 他の種類のコード生成ツールを作るための練習もある。 + +03:29.210 --> 03:35.780 +第5週目では、 今最もホットなトピックのひとつである、 ボロ検索、 拡張世代、 + +03:35.780 --> 03:49.640 +専門知識をLLMに追加するための情報のデータ・ストアの利用、 組織に関する質問に答えるための独自のラグ・パイプラインの構築を取り上げる。 + +03:49.640 --> 03:56.480 +そして、 あなたにとって難しい商業的な挑戦があり、 これをあなた自身の情報に応用する練習がある。 + +03:56.480 --> 04:03.860 +そして、 みんながこれをどう評価するのか、 そして自分自身のラグ・パイプラインを再構築するプロジェクトを見るのがとても楽しみだ。 + +04:04.790 --> 04:10.400 +第6週からは、 このコースの3週間のフラッグシップ・プロジェクトが始まる。 + +04:10.400 --> 04:13.250 +ええと、 第6週はビジネス問題を設定します。 + +04:13.250 --> 04:21.410 +私たちはデータについて多くの作業を行い、 そして伝統的な機械学習モデルを作成する。 + +04:21.410 --> 04:29.810 +そして、 フロンティアでモデルを試し、 フロンティアでもモデルを微調整して、 このビジネス問題でできる限りうまくいくようにする。 + +04:29.810 --> 04:33.110 +第7週は、 これをオープンソースに応用する。 + +04:33.110 --> 04:38.080 +私たちはオープンソースのモデルを採用し、 当初はひどいパフォーマンスを示すだろう。 + +04:38.080 --> 04:46.000 +少なくともGPT4と競争できるようになるまで微調整することで、 オープンソースのモデルを改善することを使命とするつもりだ。 + +04:46.210 --> 04:47.680 +フロンティアでのモデル。 + +04:47.680 --> 04:53.770 +何が起こるかを話すつもりはないが、 その結果はあなたを驚かせると信じている。 + +04:53.800 --> 04:54.880 +そう言っておこう。 + +04:54.880 --> 04:59.080 +だから、 第7週に何が起こるか、 見守る価値は大いにある。 + +04:59.350 --> 05:05.200 +しかし、 8週目のフィナーレですべてがまとまる。 これは、 完全自律型のAgentic + +05:05.230 --> 05:15.430 +AIソリューションを構築する8週間の締めくくりにふさわしいもので、 7人のエージェントが協力して実際の商業的問題を解決する。 + +05:15.730 --> 05:17.170 +そして終わり。 + +05:17.200 --> 05:21.400 +インターネットをスキャンして様々なものを探し出すだけでなく、 + +05:21.400 --> 05:24.880 +その発見をプッシュ通知で送ってくれるのだ。 + +05:24.940 --> 05:27.340 +だから、 本当に素晴らしいものになるよ。 + +05:27.340 --> 05:30.100 +最後には素晴らしい結果が待っている。 + +05:30.130 --> 05:35.830 +毎週学んできたことの集大成として、 前週に学んだことを土台にして、 + +05:35.830 --> 05:45.100 +本業ですぐに実践できるような真の商業的なプロジェクトを生み出す良い方法となるだろう。 diff --git a/week5/community-contributions/subtitles/srts/60616423/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616423/ko_KR.srt new file mode 100755 index 0000000..4f09f9f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616423/ko_KR.srt @@ -0,0 +1,259 @@ +WEBVTT + +00:00.050 --> 00:05.870 +올라마의 파워를 이용해 다양한 LMS를 실험해보면서 즐거우셨길 + +00:05.870 --> 00:07.400 +바라요 + +00:07.430 --> 00:10.040 +여러분도 저와 비슷한 경험을 했을 거예요 + +00:10.040 --> 00:16.130 +퀸 2를 찾았어요 5는 다른 언어를 이해하는 데 있어서 가장 강력한 모델일 + +00:16.130 --> 00:16.820 +거예요 + +00:16.820 --> 00:20.300 +다른 모델들은 설명을 더 잘하더군요 + +00:20.360 --> 00:22.820 +여러분의 의견을 듣고 싶어요 + +00:22.820 --> 00:26.750 +강의랑 같이 post로 보내거나 저한테 직접 보내주세요. + +00:26.780 --> 00:31.370 +뭘 발견했는지 듣고 싶어요 다양한 모델로 실험하면서 본인 + +00:31.370 --> 00:34.460 +문제에 가장 잘 맞는 걸 찾는 것도요 + +00:34.460 --> 00:37.160 +달 착륙선 엔지니어에겐 중요한 기술이죠 + +00:37.160 --> 00:39.560 +귀중한 시간을 보냈어요 + +00:39.590 --> 00:40.370 +좋아요 + +00:40.370 --> 00:42.620 +앞으로 8주 동안의 일을 얘기해 보죠 + +00:42.620 --> 00:50.570 +그래서 저는 지금 이 자리에서 8주 안에 LM 엔지니어링의 달인이 되게 해드리고 + +00:50.570 --> 00:51.650 +싶어요 + +00:51.650 --> 00:53.090 +이렇게 하는 거예요 + +00:53.120 --> 00:59.150 +이번 주에는 현대 기술의 한계를 시험하는 모델들을 살펴볼 + +00:59.150 --> 01:00.200 +거예요 + +01:00.260 --> 01:09.230 +GPT 4001 프리뷰나 클로드 3 같은 거요 5개와 다른 선구적인 모델도 많이 있습니다 비공개 소스로 놀라운 + +01:09.240 --> 01:11.730 +걸 성취할 수 있는 모델이죠 + +01:11.730 --> 01:16.800 +챗GPT 같은 웹 사용자 인터페이스를 통해 작업하고 API를 통해서도 하죠 + +01:17.280 --> 01:22.410 +상업 프로젝트를 만들 거예요 당장 유용한 거죠 흥미로운 + +01:22.440 --> 01:25.290 +상업 활동도 준비돼 있고요 + +01:25.410 --> 01:31.380 +다음 주에는 사용자 인터페이스를 그 위에 덧붙일 겁니다 제가 좋아하는 플랫폼을 사용해서요 그래디오라는 + +01:31.380 --> 01:32.160 +거죠 + +01:32.160 --> 01:34.980 +정말 재미있을 거예요 제가 좋아하는 걸 알게 될 거예요 + +01:34.980 --> 01:40.260 +제가 좀 떠들긴 했지만 사용하기 정말 쉬워요 저처럼 프런트 엔드에 서툰 사람들이 + +01:40.260 --> 01:45.300 +멋지고 날카로운 사용자 인터페이스를 아주 빨리 빌드하기 정말 쉽죠 + +01:45.300 --> 01:53.400 +JNI의 전형적인 사용 사례를 해결하기 위해 할 겁니다 인공지능 비서, 챗봇을 만드는 거요 하지만 + +01:53.400 --> 01:57.120 +오디오와 영상이 있는 방식으로 할 거예요 + +01:57.120 --> 02:03.720 +멀티모달이고 도구를 사용할 수 있어요 컴퓨터에서 실행되는 코드를 호출할 수 있다는 뜻이죠 + +02:03.720 --> 02:07.650 +좀 오싹하게 들리지만 우리가 하면 말이 돼요 + +02:07.650 --> 02:08.760 +그게 다예요 + +02:08.760 --> 02:09.690 +2주 차예요 + +02:10.290 --> 02:16.380 +3주 차에는 오픈 소스로 넘어갑니다 포옹형 얼굴 플랫폼을 어디에나 사용할 수 있습니다 데이터 + +02:16.380 --> 02:21.870 +과학자와 LM 엔지니어들이 전반에 걸쳐 사용하고 둘 다 구축할 때 사용하죠 + +02:21.960 --> 02:27.270 +얼굴을 안는 간단한 API 파이프라인 API를 사용하고 좀 더 고급 API를 + +02:27.270 --> 02:32.040 +사용합니다 얼굴을 안는 토큰라이저와 모델도 탐색하고요 + +02:32.280 --> 02:38.460 +4주 차에는 인공지능 분야에서 특히 골치 아픈 문제를 다룰 + +02:38.460 --> 02:42.840 +겁니다 선택할 모델이 너무 많다는 거죠 + +02:42.840 --> 02:47.220 +주어진 작업에 적합한 모델을 어떻게 선택하나요? + +02:47.220 --> 02:52.650 +벤치마크나 leaderboard 같은 걸 작업해 결정 경로를 어떻게 할지 + +02:52.650 --> 02:53.940 +알아내는 거죠 + +02:53.940 --> 02:59.160 +그리고 특히 다른 종류의 상업적 문제를 다룰 겁니다 코드 생성이죠 + +02:59.160 --> 03:06.330 +파이썬 코드를 다시 쓸 수 있는 응용 프로그램을 만들겠습니다 C++ 고성능 C++ + +03:06.330 --> 03:07.380 +코드로요 + +03:07.380 --> 03:11.010 +비공개 소스와 오픈 소스 모델로 시도해 볼게요 + +03:11.010 --> 03:12.270 +그중 한 명이 우승자가 되겠죠 + +03:12.300 --> 03:15.390 +파이썬 테스트 코드를 가져가는 것이 승자가 되는 것이죠 + +03:15.390 --> 03:22.620 +다시 쓸 거예요 새 코드는 60,000배 더 빨리 실행되죠 충격적이에요 + +03:22.650 --> 03:24.210 +직접 보시게 될 거예요 + +03:24.270 --> 03:28.760 +다른 종류의 코드 생성 도구를 만드는 연습도 할 거예요 + +03:29.210 --> 03:35.780 +5주 차에는 현재 아주 핫한 주제 중 하나로 넘어갈 겁니다 쓰레기 회수, + +03:35.780 --> 03:43.490 +증강 세대 정보 저장소를 이용해 전문 지식을 더하는 거죠 LLM은 자체 쓰레기 + +03:43.490 --> 03:49.640 +파이프라인을 구축해 조직과 관련된 질문에 답할 거예요 + +03:49.640 --> 03:54.860 +상업적으로 어려운 과제를 수행할 때 자신의 정보에 + +03:54.890 --> 03:56.480 +적용해야 해요 + +03:56.480 --> 04:01.640 +사람들이 어떻게 생각할지 정말 기대되네요 직접 만든 랙 파이프라인을 + +04:01.670 --> 04:03.860 +어떻게 재건할지도요 + +04:04.790 --> 04:10.400 +6주 차에는 이 과정의 3주 간판 프로젝트를 시작하죠 + +04:10.400 --> 04:13.250 +6주 차에는 사업 문제를 다룰 거예요 + +04:13.250 --> 04:19.340 +데이터 작업을 많이 할 겁니다 그런 다음 전통적인 머신 러닝 모델을 만들 거예요 기준선을 + +04:19.340 --> 04:21.410 +구축하는 게 아주 중요하죠 + +04:21.410 --> 04:26.720 +거기서 모델을 만들어 보고 사업상 문제를 해결하기 + +04:26.750 --> 04:29.810 +위해 최대한 노력할 거예요 + +04:29.810 --> 04:33.110 +7주째에는 오픈 소스에 적용할 거예요 + +04:33.110 --> 04:38.080 +오픈 소스 모델을 도입할 겁니다 처음에는 성능이 형편없었지만 미세 + +04:38.080 --> 04:44.440 +조정으로 오픈 소스 모델을 개선하는 것이 우리의 임무입니다 GPT 4와 경쟁할 수 + +04:44.440 --> 04:46.000 +있을 때까지요 + +04:46.210 --> 04:47.680 +개척지의 모델이죠 + +04:47.680 --> 04:52.810 +결과는 말씀 못 드리지만 이 말은 해드릴 수 있어요 그 결과가 여러분을 놀라게 + +04:52.810 --> 04:53.770 +할 거예요 + +04:53.800 --> 04:54.880 +그건 확실해요 + +04:54.880 --> 04:59.080 +7주 차에 어떻게 될지 지켜보는 게 좋겠어요 + +04:59.350 --> 05:05.200 +하지만 8주 차 최종회에서 모든 게 맞아떨어져요 8주 동안 완전한 + +05:05.230 --> 05:13.060 +자율형 인공지능 해결책을 개발할 겁니다 7명의 요원이 협력해서 진짜 상업적 문제를 + +05:13.060 --> 05:15.430 +해결할 거예요 + +05:15.730 --> 05:17.170 +끝이죠 + +05:17.200 --> 05:21.400 +인터넷에서 다양한 걸 스캔할 뿐 아니라 푸시 알림도 + +05:21.400 --> 05:24.880 +보내죠 발견되는 것들과 함께요 + +05:24.940 --> 05:27.340 +정말 멋질 거예요 + +05:27.340 --> 05:30.100 +결국 멋진 결과가 나올 거예요 + +05:30.130 --> 05:35.830 +매주 배운 모든 것의 정점이 될 수 있는 좋은 기회예요 지난주 배운 + +05:35.830 --> 05:41.710 +걸 바탕으로 해서 당장 본업에서 실행할 수 있는 진정한 상업 프로젝트가 + +05:41.710 --> 05:45.100 +탄생하는 거죠 Put diff --git a/week5/community-contributions/subtitles/srts/60616493/en_US.srt b/week5/community-contributions/subtitles/srts/60616493/en_US.srt new file mode 100755 index 0000000..35e49ff --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616493/en_US.srt @@ -0,0 +1,94 @@ +WEBVTT + +00:00.050 --> 00:04.430 +I'll just take a quick moment to introduce myself to convince you that I am actually qualified to be + +00:04.430 --> 00:05.960 +taking you on this eight week journey. + +00:05.990 --> 00:07.730 +So my name is Ed Donner. + +00:07.760 --> 00:13.400 +I've got about 20 years of experience in software engineering, data science, and tech leadership. + +00:13.400 --> 00:18.710 +Most of my career at JP Morgan, where at the end I ran engineering teams of about 300 people. + +00:18.740 --> 00:21.050 +I started out in London where I'm from. + +00:21.110 --> 00:26.000 +I then moved to Tokyo for a bit and ended up in New York City, where I live now. + +00:26.210 --> 00:29.720 +After JP Morgan, I founded a couple of AI startups. + +00:29.840 --> 00:35.720 +My first startup was sold, was acquired a couple of years ago, and in fact, that picture on the bottom + +00:35.720 --> 00:41.720 +right shows the moment when it was announced on a Times Square billboard. + +00:41.720 --> 00:44.720 +That was a particularly magical moment for me. + +00:44.720 --> 00:50.870 +But I don't want to show you that photo to brag about my Times Square moment, but also because, curiously, + +00:50.870 --> 00:53.030 +I actually live very near Times Square. + +00:53.060 --> 00:57.410 +It's an unusual place to live because it's such a sort of tourist place to be. + +00:57.440 --> 01:02.810 +But if you see these two red arrows and you imagine where they join up, that's basically where I live. + +01:02.810 --> 01:04.450 +And that's where I am right now. + +01:04.660 --> 01:09.250 +And so if you can imagine if I wave at you right now, if this were somehow a real time photo, you'd + +01:09.250 --> 01:13.120 +have a little me waving at you from roundabout where those red lines meet. + +01:13.600 --> 01:13.930 +Okay. + +01:13.960 --> 01:19.960 +And the final thing to mention, the picture in the middle at the bottom shows me flying. + +01:19.990 --> 01:21.430 +Or in front of a plane. + +01:21.430 --> 01:25.300 +You might be thinking, that's me saying, this is my big passion, my hobby, I fly planes. + +01:25.420 --> 01:28.030 +But in fact, that's that's not not what I was going to say. + +01:28.090 --> 01:34.000 +It turns out that whilst I'm very proficient and highly skilled in the field of llms, that does not + +01:34.000 --> 01:40.300 +translate in any way to my hand-eye coordination, which is terrible on almost any front, including + +01:40.300 --> 01:44.620 +flying planes where I have tried and am very shaky to say the least. + +01:44.650 --> 01:50.230 +So if it turns out that you find yourself in a plane and you look at the cockpit and you see that I + +01:50.230 --> 01:54.370 +am the person there on the stick, I suggest you look quickly for the nearest parachute. + +01:54.610 --> 02:01.120 +But if it comes to you needing to go on an eight week course to build up commercial expertise in how + +02:01.120 --> 02:04.750 +to be an LM engineer, then you've come to the right place. + +02:04.780 --> 02:07.750 +All right, without further ado, let's get to the course. diff --git a/week5/community-contributions/subtitles/srts/60616493/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616493/ja_JP.srt new file mode 100755 index 0000000..cf413d8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616493/ja_JP.srt @@ -0,0 +1,82 @@ +WEBVTT + +00:00.050 --> 00:05.960 +私がこの8週間の旅に皆さんをご案内する資格があることを納得していただくために、 ちょっとだけ自己紹介をさせてください。 + +00:05.990 --> 00:07.730 +私の名前はエド・ドナーだ。 + +00:07.760 --> 00:13.400 +ソフトウェア・エンジニアリング、 データ・サイエンス、 技術リーダーシップの分野で約20年の経験がある。 + +00:13.400 --> 00:18.710 +キャリアの大半はJPモルガンで、 最終的には300人規模のエンジニアリング・チームを率いていました。 + +00:18.740 --> 00:21.050 +最初は出身地のロンドンで始めたんだ。 + +00:21.110 --> 00:26.000 +その後、 東京に少し引っ越し、 結局、 今住んでいるニューヨークにたどり着いた。 + +00:26.210 --> 00:29.720 +JPモルガンの後、 私はいくつかのAIスタートアップを設立した。 + +00:29.840 --> 00:35.720 +私の最初のスタートアップは売却され、 数年前に買収された。 実際、 + +00:35.720 --> 00:41.720 +右下の写真はタイムズ・スクエアのビルボードで発表された瞬間だ。 + +00:41.720 --> 00:44.720 +私にとっては特に不思議な瞬間だった。 + +00:44.720 --> 00:53.030 +しかし、 この写真をお見せしたいのは、 タイムズ・スクエアの瞬間を自慢したいからではなく、 不思議なことに、 実は私はタイムズ・スクエアのすぐ近くに住んでいるからなのだ。 + +00:53.060 --> 00:57.410 +一種の観光地なので、 住むには珍しい場所だ。 + +00:57.440 --> 01:02.810 +でも、 この2つの赤い矢印を見て、 どこで合流するか想像してみてほしい。 + +01:02.810 --> 01:04.450 +そして今、 私はそこにいる。 + +01:04.660 --> 01:13.120 +もし、 これがリアルタイムの写真だとしたら、 赤い線が交わるロータリーから私が手を振っていることになる。 + +01:13.600 --> 01:13.930 +オーケー。 + +01:13.960 --> 01:19.960 +そして最後に、 一番下の真ん中の写真は私が飛んでいるところです。 + +01:19.990 --> 01:21.430 +飛行機の前でもいい。 + +01:21.430 --> 01:25.300 +と思うかもしれないが、 これは私の大きな情熱であり、 趣味であり、 飛行機を飛ばすことなのだ。 + +01:25.420 --> 01:28.030 +でも実際、 私が言いたかったのはそういうことではないんだ。 + +01:28.090 --> 01:34.000 +私はllmsの分野では非常に熟練しており、 + +01:34.000 --> 01:44.620 +高度な技術を持っているが、 それは私の手と目の協調性には全く反映されない。 + +01:44.650 --> 01:50.230 +だから、 もしあなたが飛行機に乗っていて、 コックピットを見たとき、 操縦桿を握っているのが私だとわかったら、 + +01:50.230 --> 01:54.370 +一番近くにあるパラシュートを急いで探すことをお勧めする。 + +01:54.610 --> 02:01.120 +しかし、 もしあなたがLMエンジニアになるための商業的な専門知識を身につけるために8週間のコースを受講する必要があるのなら、 + +02:01.120 --> 02:04.750 +あなたは正しい場所に来ていることになる。 + +02:04.780 --> 02:07.750 +さて、 前置きはこれくらいにして、 コースに入ろう。 diff --git a/week5/community-contributions/subtitles/srts/60616493/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616493/ko_KR.srt new file mode 100755 index 0000000..50c6e20 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616493/ko_KR.srt @@ -0,0 +1,94 @@ +WEBVTT + +00:00.050 --> 00:04.430 +잠깐 제 소개를 할게요 8주간의 여정을 함께할 자격이 있다는 + +00:04.430 --> 00:05.960 +걸 증명하려고요 + +00:05.990 --> 00:07.730 +전 에드 도너예요 + +00:07.760 --> 00:13.400 +소프트웨어 공학 데이터 과학 기술 리더십 분야에서 20년 경력을 쌓았어요 + +00:13.400 --> 00:18.710 +JP 모건에서 근무하며 300명 정도 되는 엔지니어링 팀을 운영했어요 + +00:18.740 --> 00:21.050 +제 고향인 런던에서 시작했어요 + +00:21.110 --> 00:26.000 +그 후 도쿄로 잠시 이사했고 지금은 뉴욕에 살고 있죠 비트 코스트 + +00:26.210 --> 00:29.720 +JP 모건 이후 인공지능 스타트업 두 개를 설립했죠 + +00:29.840 --> 00:35.720 +제 첫 스타트업 회사가 몇 년 전에 팔렸어요 오른쪽 하단의 + +00:35.720 --> 00:41.720 +사진을 보시면 타임스스퀘어 광고판에 발표됐을 때예요 + +00:41.720 --> 00:44.720 +제게는 특히 마법 같은 순간이었어요 + +00:44.720 --> 00:50.870 +타임스스퀘어에서의 추억을 자랑하려고 사진을 보여드리긴 싫지만 신기하게도 타임스스퀘어 + +00:50.870 --> 00:53.030 +근처에 살거든요 + +00:53.060 --> 00:57.410 +독특한 곳이에요 관광지로 유명하기 때문이죠 + +00:57.440 --> 01:02.810 +빨간 화살표 두 개가 만나는 곳을 상상해 보세요 제가 사는 곳이에요 + +01:02.810 --> 01:04.450 +지금 제가 그래요 + +01:04.660 --> 01:09.250 +제가 지금 여러분께 손을 흔든다고 상상해 보세요 이게 실시간 사진이라면 저 + +01:09.250 --> 01:13.120 +빨간 선이 만나는 곳에서 제가 여러분께 손을 흔들고 있을 거예요 + +01:13.600 --> 01:13.930 +네 + +01:13.960 --> 01:19.960 +마지막으로 말씀드릴 건 가운데 하단 사진에 제가 나는 게 나와요 + +01:19.990 --> 01:21.430 +비행기 앞일 수도 있죠 + +01:21.430 --> 01:25.300 +비행기 조종이 취미라고 생각하는 분도 계시겠죠 + +01:25.420 --> 01:28.030 +하지만 사실 제가 하려던 말은 그게 아니에요 + +01:28.090 --> 01:34.000 +저는 llms에 능숙하고 숙련된 조종사지만 눈과 손의 협응력은 전혀 + +01:34.000 --> 01:40.300 +발휘하지 못했어요 거의 모든 전선에서 형편없었죠 비행기를 조종할 때도 + +01:40.300 --> 01:44.620 +해 봤지만 솔직히 말하면 너무 불안했어요 + +01:44.650 --> 01:50.230 +여러분이 비행기 안에 있다고 가정해 보죠 조종석에 앉은 사람이 저라고 + +01:50.230 --> 01:54.370 +생각한다면 재빨리 가까운 낙하산을 찾으세요 + +01:54.610 --> 02:01.120 +하지만 8주 과정을 거쳐 상업적 전문성을 쌓아야만 달 착륙선 엔지니어가 + +02:01.120 --> 02:04.750 +될 수 있다면 잘 찾아오셨어요 + +02:04.780 --> 02:07.750 +자, 그럼 본격적으로 시작할게요 get it diff --git a/week5/community-contributions/subtitles/srts/60616623/en_US.srt b/week5/community-contributions/subtitles/srts/60616623/en_US.srt new file mode 100755 index 0000000..902cfb9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616623/en_US.srt @@ -0,0 +1,298 @@ +WEBVTT + +00:00.050 --> 00:05.660 +So we're now going to start week one of the course when we are going to be looking at exploring frontier + +00:05.690 --> 00:10.880 +models, models at the forefront of what's possible in this first day, we're going to set up an environment + +00:10.880 --> 00:13.310 +and then build your first LM solution. + +00:13.310 --> 00:17.000 +We're then going to talk about how to be positioned for success in the next few weeks. + +00:17.000 --> 00:21.320 +We're then going to compare models at the very frontier of what's possible. + +00:21.320 --> 00:23.900 +Some that you know well and some I think will be new to you. + +00:23.930 --> 00:29.030 +We're then going to talk about transformers, which is the architecture at the heart of the different + +00:29.030 --> 00:32.210 +models that we use in this course, almost all of the models. + +00:32.270 --> 00:38.090 +And then finally, we'll end with a juicy commercial project that you'll be able to use, and I'll have + +00:38.090 --> 00:40.310 +an exercise for you to make it your own. + +00:40.310 --> 00:44.540 +Apply it to different business areas, something that maybe you can use in your day job immediately. + +00:44.540 --> 00:49.580 +Maybe you'll be able to use it as a project that you can have as an example project for yourself. + +00:50.240 --> 00:55.910 +So we're now going to get hopefully some satisfying instant results for you. + +00:55.940 --> 01:00.080 +We already used a llama to do some some fun things on your computer already, but now we're going to + +01:00.080 --> 01:03.890 +take it a notch higher by calling out to APIs directly. + +01:03.980 --> 01:11.630 +But first, the necessary first step is to set up your environment, create a full spec data science + +01:11.630 --> 01:16.700 +environment that will have the the sufficient horsepower to take you through the entire eight week program + +01:16.700 --> 01:17.450 +we have ahead. + +01:17.450 --> 01:22.190 +And there's quite a lot of stuff that we're going to need to install to make sure we're at that point. + +01:22.400 --> 01:28.580 +And I guess I should mention that sometimes, sometimes this experience can be a bit frustrating. + +01:28.580 --> 01:32.090 +Most of the time it goes really smoothly and everything just fits in place. + +01:32.090 --> 01:33.560 +And it's five minutes and you're done. + +01:33.560 --> 01:38.600 +But sometimes some people hit speed bumps for whatever reason, because of something about the system + +01:38.630 --> 01:41.720 +or or some other new effect. + +01:41.780 --> 01:47.720 +And it can be a bit frustrating, but I am here to take you through it step by step and make sure it's + +01:47.720 --> 01:50.420 +successful for you so that this will go smoothly. + +01:50.420 --> 01:55.280 +It'll be behind you in no time, and we'll be on to actual projects, and we'll never have to worry + +01:55.280 --> 01:58.250 +about environment setup again for the next eight weeks. + +01:58.250 --> 02:04.590 +So creating the full spec data science environment, it ain't as easy as I wish it were. + +02:04.950 --> 02:07.860 +There is some stuff to do, but we'll get through it fast. + +02:07.860 --> 02:11.910 +First of all, we'll clone the repo for people who are familiar with git. + +02:11.910 --> 02:16.500 +That's a that's easy, but but I will make sure I walk you through it in case this is a new thing. + +02:16.620 --> 02:21.930 +Then we will go through the Readme instructions to set up the environment. + +02:21.960 --> 02:27.180 +Now I recommend that we use something called Anaconda that some of you may have already used Anaconda. + +02:27.180 --> 02:30.960 +It's a high powered thing and it's quite heavyweight. + +02:30.960 --> 02:37.140 +It creates an entire environment for you, a dedicated, isolated environment on your computer for this + +02:37.140 --> 02:43.200 +course, and it does so in a way that guarantees high compatibility between what you're doing on your + +02:43.200 --> 02:46.590 +computer and what I do on mine and everyone else on this course. + +02:46.680 --> 02:48.900 +So it's very good at that. + +02:49.170 --> 02:50.550 +But it is quite heavyweight. + +02:50.550 --> 02:55.110 +It involves downloading a lot from the internet and installs a lot because it builds everything from + +02:55.110 --> 02:56.130 +the ground up. + +02:56.520 --> 02:59.100 +If it causes you problems, there's an alternative. + +02:59.100 --> 03:01.080 +We can use a simpler approach. + +03:01.080 --> 03:06.440 +The standard Python virtual environment approach with Pip, and I've got instructions for that too. + +03:06.470 --> 03:13.580 +It's quicker, it's simpler, and it's just not as guaranteed to be compatible as Anaconda, so I'd + +03:13.580 --> 03:15.740 +prefer we try Anaconda if possible. + +03:15.770 --> 03:17.150 +It does work most of the time. + +03:17.150 --> 03:18.380 +Any problems at all? + +03:18.380 --> 03:20.900 +The Python virtual environment is your fallback. + +03:20.930 --> 03:24.620 +We then need to set up an OpenAI key and I'll talk more about that. + +03:24.620 --> 03:28.310 +I'll talk about API costs and what to do if that's a that's a problem for you. + +03:28.400 --> 03:29.930 +We'll go through that at the time. + +03:29.930 --> 03:35.960 +We have to create something called a EMV file, which has your private keys in it. + +03:35.960 --> 03:40.460 +And it's going to be important that that file is called exactly dot EMV, as we'll see. + +03:40.460 --> 03:45.410 +I will explain, but it can't be called keys dot EMV or dot EMV dot text. + +03:45.440 --> 03:47.600 +It has to be called exactly dot EMV. + +03:48.050 --> 03:53.840 +And then finally we activate your environment and we run JupyterLab to start the program. + +03:54.560 --> 03:59.720 +So as I say, most of the time all of this will go great. + +03:59.930 --> 04:06.190 +Most people report that everything goes through fine, but if it doesn't, then then there are some + +04:06.190 --> 04:09.460 +secret weapons at your disposal to make sure that it does. + +04:09.490 --> 04:15.490 +The first of them is that I've put a troubleshooting notebook that I will show you in, in the in, + +04:15.520 --> 04:20.650 +in the GitHub repo, which will take you step by step through diagnosing problems, running some code + +04:20.650 --> 04:24.340 +that will figure out what's wrong, and then fixing each problem in turn. + +04:24.340 --> 04:25.900 +So that is there. + +04:25.930 --> 04:31.840 +The second resource you have is a kind of surprising one at ChatGPT and and Claude. + +04:31.870 --> 04:34.840 +They are spooky in how good they are. + +04:34.870 --> 04:42.670 +If you say a problem and paste like a stack trace in there, they are very, uh, impressive at giving + +04:42.670 --> 04:45.310 +quite nuanced answers about what you need to do. + +04:45.340 --> 04:48.970 +I've been caught off guard by the times I've run into an infrastructure problem, and I thought, this + +04:48.970 --> 04:49.870 +is way too much. + +04:49.870 --> 04:52.300 +I can't even get the answer on Stack Overflow. + +04:52.480 --> 04:55.570 +Uh, but I've been rescued by Claude. + +04:55.660 --> 04:57.400 +I think Claude is the stronger of the two. + +04:57.430 --> 04:57.640 +But. + +04:57.640 --> 04:59.440 +But both of them give good answers. + +04:59.440 --> 05:04.990 +So it's worth realizing that you can often just fix the problem by pasting it in there. + +05:05.020 --> 05:06.670 +It's worth giving it a shot. + +05:06.730 --> 05:12.140 +But if if if those two fail, or even if you'd just rather go straight to this third option. + +05:12.140 --> 05:15.770 +The third option is there, and the third option is to contact me. + +05:15.890 --> 05:17.780 +I am standing by to help you. + +05:17.780 --> 05:19.070 +That is my job. + +05:19.130 --> 05:20.990 +So please, please reach out. + +05:20.990 --> 05:23.900 +You can reach out to me by messaging me on the platform. + +05:23.900 --> 05:25.130 +You can email me. + +05:25.160 --> 05:28.700 +My email address is in the is in the readme and you can also linked in with me. + +05:28.700 --> 05:30.500 +I love getting LinkedIn connections by the way. + +05:30.560 --> 05:36.650 +So LinkedIn with me for sure and message me there too and you'll find that I'm very responsive. + +05:36.740 --> 05:41.900 +I typically if I'm not sleeping or traveling, then you'll see from my other answers that I get back + +05:41.990 --> 05:47.210 +very quickly, and I see it as my job to fix it and to get you up and running. + +05:47.210 --> 05:48.320 +And I have never failed. + +05:48.320 --> 05:55.340 +Yet I have always, I think, 5000 people through and everyone has a running environment, as will you. + +05:55.340 --> 05:59.570 +So if you do hit any speed bumps at all, you can go with these other two options. + +05:59.570 --> 06:00.860 +But you can come straight to me. + +06:00.860 --> 06:01.670 +I will fix it. + +06:01.670 --> 06:03.200 +I will find what's going on. + +06:03.200 --> 06:05.690 +We will have you up and running quickly. + +06:05.840 --> 06:06.680 +All right. + +06:06.680 --> 06:11.630 +With that, let me now take you on a walkthrough of what it takes to set up your environment. diff --git a/week5/community-contributions/subtitles/srts/60616623/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616623/ja_JP.srt new file mode 100755 index 0000000..6649f80 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616623/ja_JP.srt @@ -0,0 +1,259 @@ +WEBVTT + +00:00.050 --> 00:05.660 +これからコースの第1週目をスタートし、 フロンティア・モデル、 + +00:05.690 --> 00:13.310 +つまり可能性の最前線にあるモデルを探求していく。 + +00:13.310 --> 00:17.000 +そして、 これから数週間、 成功のためにどのような態勢を整えればよいかを話していく。 + +00:17.000 --> 00:21.320 +そして、 可能性のフロンティアにあるモデルを比較する。 + +00:21.320 --> 00:23.900 +よくご存知のものもあれば、 初めて耳にするものもあるだろう。 + +00:23.930 --> 00:32.210 +このコースで使用するさまざまなモデル(ほとんどすべてのモデル)の核となるアーキテクチャである。 + +00:32.270 --> 00:40.310 +そして最後に、 皆さんが使えるようなジューシーなコマーシャル・プロジェクトを紹介し、 それを自分のものにするための練習をします。 + +00:40.310 --> 00:44.540 +それをさまざまなビジネス分野に応用し、 もしかしたら本業ですぐに使えるかもしれない。 + +00:44.540 --> 00:49.580 +もしかしたら、 自分のための見本となるプロジェクトとして使えるかもしれない。 + +00:50.240 --> 00:55.910 +だから、 うまくいけば、 すぐに満足のいく結果が得られるだろう。 + +00:55.940 --> 01:00.080 +我々はすでにラマを使って、 コンピューター上でいくつかの楽しいことをやっているが、 + +01:00.080 --> 01:03.890 +今度はAPIを直接呼び出すことで、 さらに一段上のことをやってみよう。 + +01:03.980 --> 01:11.630 +しかし、 その前に必要な最初のステップは、 環境をセットアップすることである。 これから始まる8週間のプログラム全体を乗り切るのに十分な馬力を備えた、 + +01:11.630 --> 01:17.450 +フルスペックのデータサイエンス環境を構築することだ。 + +01:17.450 --> 01:22.190 +そのために必要なものがたくさんあるんだ。 + +01:22.400 --> 01:28.580 +そして時々、 この経験が少々フラストレーションになることもあることも言っておかなければならないだろう。 + +01:28.580 --> 01:32.090 +たいていの場合は本当にスムーズに進み、 すべてが所定の位置に収まる。 + +01:32.090 --> 01:33.560 +そして5分で終わる。 + +01:33.560 --> 01:41.720 +しかし、 時には何らかの理由でスピードバンプにぶつかる人もいる。 + +01:41.780 --> 01:47.720 +少しイライラすることもあるかもしれませんが、 私はここで一歩一歩ステップを踏んでいき、 これがスムーズにいくように、 + +01:47.720 --> 01:50.420 +あなたにとって成功するようにします。 + +01:50.420 --> 01:58.250 +すぐに終わって実際のプロジェクトに移れるし、 今後8週間はもう環境設定について心配する必要はない。 + +01:58.250 --> 02:04.590 +だから、 フルスペックのデータサイエンス環境を構築するのは、 私が望むほど簡単ではない。 + +02:04.950 --> 02:07.860 +やることはいくつかあるけど、 早く終わらせるよ。 + +02:07.860 --> 02:11.910 +まず最初に、 gitに慣れている人のためにレポをクローンしておこう。 + +02:11.910 --> 02:16.500 +それは簡単なことなんだけど、 でも、 これが初めてのことであることを念のために説明しておくよ。 + +02:16.620 --> 02:21.930 +次に、 Readmeの指示に従って環境をセットアップする。 + +02:21.960 --> 02:27.180 +さて、 Anacondaというものを使うことをお勧めする。 すでにAnacondaを使ったことがある人もいるかもしれない。 + +02:27.180 --> 02:30.960 +ハイパワーだし、 かなりヘビー級だ。 + +02:30.960 --> 02:37.140 +このコース専用の隔離された環境をあなたのコンピューター上に構築し、 あなたのコンピューターで行っていることと、 + +02:37.140 --> 02:46.590 +私のコンピューターで行っていること、 そしてこのコースの他の参加者全員の間で高い互換性が保証されるようにするのです。 + +02:46.680 --> 02:48.900 +だから、 その点では非常に優れている。 + +02:49.170 --> 02:50.550 +しかし、 かなりヘビー級だ。 + +02:50.550 --> 02:56.130 +インターネットから多くのものをダウンロードし、 一からすべてを構築するため、 多くのものをインストールする必要がある。 + +02:56.520 --> 02:59.100 +もしそれが問題を引き起こすのであれば、 別の方法がある。 + +02:59.100 --> 03:01.080 +もっとシンプルな方法を使えばいい。 + +03:01.080 --> 03:06.440 +Pipを使った標準的なPython仮想環境でのアプローチ。 + +03:06.470 --> 03:15.740 +より速く、 よりシンプルで、 アナコンダほど互換性が保証されているわけではない。 + +03:15.770 --> 03:17.150 +たいていの場合はうまくいく。 + +03:17.150 --> 03:18.380 +何か問題は? + +03:18.380 --> 03:20.900 +Python仮想環境はあなたの予備だ。 + +03:20.930 --> 03:24.620 +それからOpenAIのキーを設定する必要がありますが、 それについてはまた詳しくお話しします。 + +03:24.620 --> 03:28.310 +APIのコストと、 もしそれがあなたにとって問題ならどうすべきかについてお話しします。 + +03:28.400 --> 03:29.930 +それはその時に確認する。 + +03:29.930 --> 03:35.960 +EMVファイルと呼ばれるものを作成する必要があり、 その中にあなたの秘密鍵が入っている。 + +03:35.960 --> 03:40.460 +そして、 後述するように、 そのファイルが正確にドットEMVと呼ばれていることが重要になる。 + +03:40.460 --> 03:45.410 +説明するが、 キー・ドットEMVやドットEMV・ドット・テキストとは呼べない。 + +03:45.440 --> 03:47.600 +正確にはドットEMVと呼ばなければならない。 + +03:48.050 --> 03:53.840 +そして最後に環境をアクティブにして、 JupyterLabを実行してプログラムを開始する。 + +03:54.560 --> 03:59.720 +だから、 私が言うように、 ほとんどの場合、 このすべてがうまくいくだろう。 + +03:59.930 --> 04:09.460 +ほとんどの人はすべてうまくいったと報告するが、 そうでない場合は、 うまくいくようにするための秘密兵器がある。 + +04:09.490 --> 04:15.490 +その第一は、 GitHubのレポにあるトラブルシューティングノートブックで、 問題を診断し、 + +04:15.520 --> 04:24.340 +何が問題なのかを突き止めるコードを実行し、 それぞれの問題を順番に解決していくというステップを踏むことができる。 + +04:24.340 --> 04:25.900 +だからそれがある。 + +04:25.930 --> 04:31.840 +2つ目のリソースは、 ChatGPTとクロードにある意外なものだ。 + +04:31.870 --> 04:34.840 +不気味なほどうまい。 + +04:34.870 --> 04:45.310 +問題を言って、 スタックトレースみたいなものをそこに貼り付けると、 彼らは非常に、 あー、 何をすべきかについて非常に微妙な答えを出してくれるのが印象的だ。 + +04:45.340 --> 04:49.870 +インフラストラクチャーの問題にぶつかったとき、 これはやりすぎだ、 と油断したことがある。 + +04:49.870 --> 04:52.300 +Stack Overflowでも答えが見つからない。 + +04:52.480 --> 04:55.570 +でも、 クロードに助けられたんだ。 + +04:55.660 --> 04:57.400 +クロードの方が強いと思う。 + +04:57.430 --> 04:57.640 +でもね。 + +04:57.640 --> 04:59.440 +しかし、 2人ともいい答えをしている。 + +04:59.440 --> 05:04.990 +だから、 そこに貼り付けるだけで問題を解決できることが多いということを認識しておく価値がある。 + +05:05.020 --> 05:06.670 +試してみる価値はある。 + +05:06.730 --> 05:12.140 +しかし、 もしこの2つが失敗した場合、 あるいはこの3つ目の選択肢に直行したい場合もある。 + +05:12.140 --> 05:15.770 +第3の選択肢は、 私に連絡することだ。 + +05:15.890 --> 05:17.780 +私はあなたを助けるために待機している。 + +05:17.780 --> 05:19.070 +それが私の仕事だ。 + +05:19.130 --> 05:20.990 +だから、 どうか手を差し伸べてほしい。 + +05:20.990 --> 05:23.900 +私に連絡を取るには、 プラットフォーム上でメッセージを送ってください。 + +05:23.900 --> 05:25.130 +メールでも構いません。 + +05:25.160 --> 05:28.700 +私のメールアドレスはReadmeにあります。 + +05:28.700 --> 05:30.500 +ところで、 私はLinkedInのコネクションを得るのが大好きだ。 + +05:30.560 --> 05:36.650 +LinkedInに登録して、 メッセージも送ってください。 + +05:36.740 --> 05:41.900 +私は通常、 寝ているか旅行中でなければ、 他の回答を見てもらえればわかると思うが、 + +05:41.990 --> 05:47.210 +非常に早く戻ってくる。 + +05:47.210 --> 05:48.320 +そして、 私は一度も失敗したことがない。 + +05:48.320 --> 05:55.340 +しかし、 私は常に5000人の人を通して、 誰もが走る環境を持っていると思う。 + +05:55.340 --> 05:59.570 +だから、 もしスピードバンプにぶつかっても、 他の2つの選択肢を選ぶことができる。 + +05:59.570 --> 06:00.860 +でも、 私のところに直接来てください。 + +06:00.860 --> 06:01.670 +直します。 + +06:01.670 --> 06:03.200 +何が起こっているのか、 私が見つける。 + +06:03.200 --> 06:05.690 +私たちは、 お客様を素早く立ち上げ、 稼動させます。 + +06:05.840 --> 06:06.680 +分かった。 + +06:06.680 --> 06:11.630 +それでは、 あなたの環境をセットアップするために必要なことを、 ウォークスルー形式で説明しよう。 diff --git a/week5/community-contributions/subtitles/srts/60616623/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616623/ko_KR.srt new file mode 100755 index 0000000..164f982 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616623/ko_KR.srt @@ -0,0 +1,295 @@ +WEBVTT + +00:00.050 --> 00:05.660 +강의 첫 주에는 개척자 모델을 살펴볼 겁니다 첫날에 무엇이 가능한지 + +00:05.690 --> 00:10.880 +최전선에서 살펴볼 겁니다 환경을 설정하고 첫 LM 솔루션을 + +00:10.880 --> 00:13.310 +구축할 거예요 + +00:13.310 --> 00:17.000 +앞으로 몇 주 동안 어떻게 하면 성공할 수 있을지 얘기해 보죠 + +00:17.000 --> 00:21.320 +그리고 최첨단 모델을 비교해 볼 거예요 + +00:21.320 --> 00:23.900 +당신이 잘 아는 것도 있고 처음 보는 것도 있어요 + +00:23.930 --> 00:29.030 +다음은 트랜스포머에 관해 이야기할 거예요 이번 수업에서 다룰 + +00:29.030 --> 00:32.210 +거의 모든 모델의 핵심 구조물이죠 + +00:32.270 --> 00:38.090 +마지막으로 여러분이 사용할 수 있는 상업 프로젝트로 마무리할 거예요 여러분 것으로 만들 + +00:38.090 --> 00:40.310 +수 있는 연습 과제를 드릴게요 + +00:40.310 --> 00:44.540 +다양한 비즈니스 영역에 적용하세요 본업에서 바로 사용할 수 있는 거죠 + +00:44.540 --> 00:49.580 +프로젝트로 사용할 수도 있고 예제로 가질 수도 있어요 + +00:50.240 --> 00:55.910 +이제 만족스러운 즉석 결과를 얻게 될 거예요 get it + +00:55.940 --> 01:00.080 +라마는 이미 여러분 컴퓨터에서 재미있는 걸 했었죠 이번엔 + +01:00.080 --> 01:03.890 +한 단계 더 높일 겁니다 API를 직접 호출해서요 + +01:03.980 --> 01:11.630 +하지만 먼저 필요한 건 여러분의 환경을 설정하는 겁니다 앞으로 진행할 8주 프로그램을 모두 통과할 + +01:11.630 --> 01:17.450 +수 있을 만큼 충분한 마력을 갖춘 데이터 과학 환경을 만드는 거죠 + +01:17.450 --> 01:22.190 +그 지점에 도달하기 위해 설치해야 할 게 꽤 많아요 + +01:22.400 --> 01:28.580 +이 말을 해야겠네요 비트 박스를 하다 보면 가끔 답답할 때가 있어요 + +01:28.580 --> 01:32.090 +대부분은 아주 매끄럽게 진행되고 모든 게 제자리에 들어맞아요 + +01:32.090 --> 01:33.560 +5분이면 다 익어요 + +01:33.560 --> 01:38.600 +하지만 어떤 사람들은 어떤 이유에서든 과속 방지 턱에 부딪히죠 + +01:38.630 --> 01:41.720 +시스템 문제나 새로운 효과 때문에요 + +01:41.780 --> 01:47.720 +힘들 수도 있지만 제가 차근차근 설명해 드릴게요 성공적으로 진행되도록요 + +01:47.720 --> 01:50.420 +그래야 비트가 순조롭죠 + +01:50.420 --> 01:55.280 +곧 뒤로 물러날 겁니다 실제 프로젝트로 넘어가 앞으로 8주간은 + +01:55.280 --> 01:58.250 +환경 설정 걱정을 안 해도 돼요 + +01:58.250 --> 02:04.590 +전체 스펙 데이터 과학 환경을 만드는 건 생각만큼 쉽지 않아요 + +02:04.950 --> 02:07.860 +할 일이 좀 있지만 빨리 끝낼 거예요 get it get it + +02:07.860 --> 02:11.910 +먼저, 깃을 잘 아는 사람들을 위해 압류 차량을 복제할 거예요 + +02:11.910 --> 02:16.500 +그건 쉽죠 하지만 제가 꼭 알려드릴게요 이게 새로운 것일 수 있으니까요 + +02:16.620 --> 02:21.930 +그런 다음 읽기 설명서를 통해 환경을 설정하죠 + +02:21.960 --> 02:27.180 +아나콘다라는 걸 사용하길 권해드립니다 이미 사용해본 분들도 계실 거예요 + +02:27.180 --> 02:30.960 +힘이 세고 꽤 묵직해요 + +02:30.960 --> 02:37.140 +이 과정을 위해 여러분 컴퓨터에 전용적이고 격리된 환경을 만들어줍니다 + +02:37.140 --> 02:43.200 +여러분이 하는 것과 제가 하는 것, 그리고 다른 모든 과정 사이에 높은 + +02:43.200 --> 02:46.590 +호환성을 보장하는 거죠 + +02:46.680 --> 02:48.900 +아주 잘하죠 + +02:49.170 --> 02:50.550 +하지만 꽤 묵직해요 + +02:50.550 --> 02:55.110 +인터넷에서 많이 다운로드 받고 설치도 많이 해야 해요 모든 걸 처음부터 + +02:55.110 --> 02:56.130 +만들거든요 + +02:56.520 --> 02:59.100 +그게 문제가 된다면 대안이 있어요 + +02:59.100 --> 03:01.080 +더 간단한 방법을 쓸 수도 있어요 + +03:01.080 --> 03:06.440 +Pip의 표준 파이썬 가상 환경 접근 방법에 대한 설명도 가지고 있어요 + +03:06.470 --> 03:13.580 +더 빠르고 단순하고 아나콘다만큼 호환이 확실하지 않아요 가능하면 아나콘다를 + +03:13.580 --> 03:15.740 +시도해 보려고요 + +03:15.770 --> 03:17.150 +대체로 잘 작동해요 + +03:17.150 --> 03:18.380 +아무 문제 없었어요? + +03:18.380 --> 03:20.900 +파이썬 가상 환경은 대체책이죠 + +03:20.930 --> 03:24.620 +OpenAI 키를 설정해야 하는데 나중에 더 얘기할게요 + +03:24.620 --> 03:28.310 +API 비용에 대해 얘기할게요 그게 여러분께 문제가 될 경우 어떻게 해야 할지요 + +03:28.400 --> 03:29.930 +그때 가서 얘기하죠 + +03:29.930 --> 03:35.960 +EMV 파일이라는 걸 만들어야 해요 개인 키가 들어 있는 거죠 + +03:35.960 --> 03:40.460 +파일이 정확히 .MV로 불려야 한다는 게 중요해요 + +03:40.460 --> 03:45.410 +설명할게요, 하지만 키즈. EMV. EMV.Txt는 안 돼요 + +03:45.440 --> 03:47.600 +정확히 .MV라고 불러야 해요 + +03:48.050 --> 03:53.840 +마지막으로 환경을 활성화하고 주피터랩을 실행해 프로그램을 시작해요 + +03:54.560 --> 03:59.720 +말씀드렸듯이 대부분은 다 잘 될 거예요 + +03:59.930 --> 04:06.190 +대부분은 모든 게 잘 진행된다고 보고하지만 그렇지 않다면 마음대로 + +04:06.190 --> 04:09.460 +쓸 수 있는 비밀 병기가 있죠 + +04:09.490 --> 04:15.490 +그 중 첫 번째는 문제 해결 공책입니다 깃허브 리포에 있는 걸 보여드릴게요 + +04:15.520 --> 04:20.650 +문제를 단계별로 진단하고 뭐가 잘못됐는지 코드를 + +04:20.650 --> 04:24.340 +실행해 하나씩 고쳐요 + +04:24.340 --> 04:25.900 +저기 있네요 + +04:25.930 --> 04:31.840 +두 번째 자원은 챗GPT와 클로드에 있는 놀라운 자원이에요 + +04:31.870 --> 04:34.840 +얼마나 잘하는지 소름 끼쳐요 + +04:34.870 --> 04:42.670 +문제를 말하고 스택 트레이스처럼 붙여넣기를 하면 아주 인상적이에요 해야 할 일에 대해 + +04:42.670 --> 04:45.310 +아주 미묘한 답을 주거든요 + +04:45.340 --> 04:48.970 +인프라 문제와 마주칠 때마다 허를 찔린 적이 있는데 이건 너무 심하다고 + +04:48.970 --> 04:49.870 +생각했어요 + +04:49.870 --> 04:52.300 +Get up OVERflow 스택도 못 맞히겠어요 + +04:52.480 --> 04:55.570 +클로드가 절 구해줬어요 + +04:55.660 --> 04:57.400 +클로드가 더 강한 것 같아요 + +04:57.430 --> 04:57.640 +하지만요 + +04:57.640 --> 04:59.440 +하지만 둘 다 좋은 답을 내놓았죠 + +04:59.440 --> 05:04.990 +그러니 가끔은 그냥 붙여서 문제를 해결할 수 있다는 걸 아셔야 해요 + +05:05.020 --> 05:06.670 +시도해 볼 만해요 + +05:06.730 --> 05:12.140 +하지만 그 두 가지가 실패하거나 아니면 바로 이 세 번째 선택지를 선택해도 돼요 + +05:12.140 --> 05:15.770 +세 번째 방법은 저한테 연락하는 거예요 + +05:15.890 --> 05:17.780 +당신을 도우려고 대기 중이에요 + +05:17.780 --> 05:19.070 +그게 제 일이에요 + +05:19.130 --> 05:20.990 +그러니 제발 연락 좀 주세요 + +05:20.990 --> 05:23.900 +저한테 연락하실 거면 플랫폼에서 메시지로 하세요 + +05:23.900 --> 05:25.130 +이메일로 보내주세요 + +05:25.160 --> 05:28.700 +제 이메일 주소는 읽기 안에 있어요 저와 링크할 수도 있고요 + +05:28.700 --> 05:30.500 +그나저나 전 링크트인 연결망이 좋아요 + +05:30.560 --> 05:36.650 +LinkedIn도 물론 있고요 거기도 메시지를 보내주세요 제가 아주 반응하는 걸 보실 수 있어요 + +05:36.740 --> 05:41.900 +잠을 자지 않거나 여행을 가지 않으면 다른 답변을 보면 금방 답이 와요 + +05:41.990 --> 05:47.210 +그걸 고치고 get up 작동시키는 게 제 일이라고 생각하죠 + +05:47.210 --> 05:48.320 +난 실패한 적이 없어요 + +05:48.320 --> 05:55.340 +하지만 5천 명이 모두 작업 환경을 갖추고 있어요 + +05:55.340 --> 05:59.570 +과속 방지턱에 부딪히면 이 두 가지 옵션을 선택하세요 + +05:59.570 --> 06:00.860 +나한테 바로 와도 돼요 + +06:00.860 --> 06:01.670 +제가 해결할게요 + +06:01.670 --> 06:03.200 +무슨 일인지 알아볼게요 + +06:03.200 --> 06:05.690 +금방 다시 시작할 수 있어요 + +06:05.840 --> 06:06.680 +좋아요 + +06:06.680 --> 06:11.630 +이제 여러분 환경을 설정하는 데 필요한 걸 보여드리죠 diff --git a/week5/community-contributions/subtitles/srts/60616629/en_US.srt b/week5/community-contributions/subtitles/srts/60616629/en_US.srt new file mode 100755 index 0000000..2ad2fd8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616629/en_US.srt @@ -0,0 +1,292 @@ +WEBVTT + +00:00.050 --> 00:05.030 +And welcome back to team PC and Team Mac as we come back together again for a quick video. + +00:05.030 --> 00:07.640 +In this one, I'm just going to quickly walk through what you do. + +00:07.640 --> 00:11.990 +If Anaconda has given you problems, you're not able to get that to work and you want to fall back to + +00:12.020 --> 00:14.300 +the simpler virtualenv approach. + +00:14.300 --> 00:16.820 +This is a quicker, simpler approach. + +00:16.820 --> 00:19.160 +It's less guaranteed to be compatible. + +00:19.160 --> 00:24.020 +It's less heavyweight and powerful than the Anaconda environment, but it's perfectly okay. + +00:24.020 --> 00:27.020 +And so by all means, use this if you would prefer. + +00:27.350 --> 00:29.000 +So I'm going to do it on my Mac. + +00:29.000 --> 00:33.680 +But I'll point out when things are different for the PC and it's basically the same, although I'll + +00:33.680 --> 00:40.190 +say the the instructions begin where it says alternative setup instructions in the Readme on on the, + +00:40.190 --> 00:41.360 +the, the repo. + +00:41.360 --> 00:47.930 +And one important difference between PC and Mac is that if you haven't, I imagine that that almost + +00:47.930 --> 00:51.680 +everyone on this has has installed Python before on their PC. + +00:51.950 --> 00:54.050 +But if you haven't, then you will need to do that. + +00:54.260 --> 01:01.790 +Python comes on Macs by default, but not on PCs, so I've got a link in here to go to the Python website. + +01:01.820 --> 01:07.080 +When you go to the Python download website, you can choose to download the latest version of Python, + +01:07.080 --> 01:14.970 +but it might be better to choose Python 3.11.9, which is the latest version 3.11 to be compatible, + +01:15.000 --> 01:20.940 +and you can come down here and do a windows installer to install on windows. + +01:20.970 --> 01:25.890 +And if you don't have this version and if you have the time to install this version on windows, that + +01:25.890 --> 01:27.060 +would be better. + +01:27.210 --> 01:27.990 +Okay. + +01:28.020 --> 01:33.060 +Once you have done that, then on a mac, bring up a terminal window on the PC. + +01:33.090 --> 01:39.270 +Bring up a normal PowerShell prompt, not the Anaconda prompt, just a regular prompt, and go into + +01:39.270 --> 01:45.780 +the projects directory that we created earlier and then into the LM Engineering directory where the + +01:45.780 --> 01:47.220 +project sits. + +01:47.220 --> 01:49.980 +And now you do a simple command. + +01:50.340 --> 01:55.110 +Let's first just quickly check what version of Python we're running Python minus minus version. + +01:55.320 --> 01:59.730 +And on my computer my default version is actually 3.12.3. + +01:59.760 --> 02:04.200 +It still works as far as I can tell for everything, but it's not as guaranteed to be compatible. + +02:04.200 --> 02:06.480 +But still, I'm going to push ahead with this. + +02:06.750 --> 02:10.800 +Uh, and what we now do is we type Python minus m. + +02:11.410 --> 02:13.450 +Venv venv. + +02:13.600 --> 02:18.280 +You might wonder what magic, what strange concoction that is. + +02:18.490 --> 02:24.100 +Python is saying I want to create a new virtual environment, and then the name of that virtual environment + +02:24.100 --> 02:28.210 +is going to be Venv again, which is something that people usually often use. + +02:28.210 --> 02:34.180 +And if I look in this directory, you'll see that there is now a folder called Venv virtual environment, + +02:34.180 --> 02:37.750 +which will contain all of the Python files for this virtual environment. + +02:37.750 --> 02:40.480 +And so far everything has been the same with a PC. + +02:40.720 --> 02:43.960 +What I'm going to do now is different between a mac and a PC. + +02:44.170 --> 02:49.510 +On a mac, what we're going to do is activate this virtual environment on a mac. + +02:49.540 --> 02:55.840 +You do that by typing source venv slash bin slash, activate. + +02:55.990 --> 02:59.830 +And again, all of this is of course I just did that. + +02:59.830 --> 03:02.260 +All of this is of course written in the Readme instructions. + +03:02.260 --> 03:06.970 +And the way we know we've activated the environment is that Venv appears over here on the left. + +03:07.000 --> 03:11.590 +You can see there's also this base here because I've got Anaconda installed. + +03:11.590 --> 03:16.180 +You may not see that if you haven't installed Anaconda and on a PC you won't, you won't see that. + +03:16.440 --> 03:18.300 +but that can be safely ignored. + +03:18.300 --> 03:19.620 +We are in the venv. + +03:19.650 --> 03:20.610 +Environment. + +03:21.030 --> 03:25.740 +And now what you do, which is something that probably most of you have done many times before, is + +03:25.770 --> 03:29.610 +we do pip install because we want to install Python packages. + +03:29.610 --> 03:37.500 +Minus R means we have a file of names of packages that we want to specify, and it's called requirements.txt. + +03:38.340 --> 03:42.630 +And then when I press enter it is now going to be installing these packages. + +03:42.630 --> 03:48.210 +But it's not installing them on my the python that comes with my box. + +03:48.210 --> 03:52.560 +It's installing them specifically on this virtual environment called Venv. + +03:52.560 --> 03:59.130 +So I'm isolating my environment from anything else that I might be doing on my computer with Python. + +03:59.340 --> 04:04.110 +And you can see that there's plenty going on here as it does all of these things. + +04:04.140 --> 04:07.890 +It will be slower for you because you can see it's saying it's used cached. + +04:08.070 --> 04:12.780 +Because I've done this before, it won't be cached for you presumably. + +04:12.780 --> 04:18.540 +So there'll be a lot of downloading going on, but it's still a lot quicker than setting up the Anaconda, + +04:18.570 --> 04:19.920 +uh, window. + +04:19.920 --> 04:22.720 +And then the business happens at the end. + +04:22.750 --> 04:28.990 +Right now it is at the moment, now going ahead and installing all of that. + +04:29.020 --> 04:32.290 +And you can see a lot of different packages here. + +04:32.320 --> 04:36.400 +Uh, of course, only some of these are the packages we're actually going to use on this course. + +04:36.400 --> 04:38.920 +Many of them are things on which they depend. + +04:38.950 --> 04:43.690 +So it's installed both the packages we need and all of their dependencies up there. + +04:43.750 --> 04:48.280 +Um, but you'll see that there's things like SQL alchemy, and that's because we're going to be using + +04:48.280 --> 04:51.430 +an object database that's going to depend on that. + +04:51.550 --> 04:53.680 +Um, and you can see other things going on here. + +04:53.680 --> 05:00.220 +Chroma is the the vector data store, a wonderful open source vector data store that we'll be using + +05:00.310 --> 05:04.150 +Tokenizers part of our Huggingface code. + +05:04.360 --> 05:10.780 +So lots of things you can take your time while it's doing this, to look around and see the names of + +05:10.810 --> 05:12.730 +some of the packages that we will be using. + +05:12.760 --> 05:13.360 +All right. + +05:13.360 --> 05:14.800 +That has now happened. + +05:14.830 --> 05:16.660 +And it didn't take very long at all. + +05:16.660 --> 05:22.630 +And at the end of this, all I have to now do is again, uh oh, I might just quickly mention that the + +05:22.630 --> 05:32.730 +the command for a PC to activate your environment is in fact venv and then backslash scripts. + +05:32.940 --> 05:34.890 +Backslash activate. + +05:34.920 --> 05:41.520 +So you would type that first and then you would type the pip install r requirements.txt. + +05:41.520 --> 05:44.010 +And of course that's all specified in the readme. + +05:44.430 --> 05:50.880 +But whether you're on a mac or on a PC at this point all you would do is type the simple JupyterLab. + +05:50.880 --> 05:57.870 +And with any luck, when I press enter, it is now going to launch the JupyterLab environment. + +05:57.900 --> 05:59.040 +Here it comes. + +05:59.040 --> 06:03.000 +And this is going to be our home for the next few weeks. + +06:03.000 --> 06:04.020 +The next eight weeks. + +06:04.020 --> 06:07.710 +Folders for each of the eight weeks and the usual setup. + +06:07.710 --> 06:12.270 +The only difference being now we have this Venv folder which contains our virtual environment. + +06:12.810 --> 06:15.630 +So that's the fallback plan. + +06:15.630 --> 06:16.530 +I hope that makes sense. + +06:16.530 --> 06:21.990 +If you have any problems at all with the fallback plan, then please, please, please reach out. + +06:21.990 --> 06:22.950 +I will fix it. + +06:22.980 --> 06:25.080 +We'll get you on your way quickly. + +06:25.110 --> 06:31.590 +All right, on to the next video when we're going to be doing setting up API keys with OpenAI. + +06:31.620 --> 06:32.430 +See you there. diff --git a/week5/community-contributions/subtitles/srts/60616629/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616629/ja_JP.srt new file mode 100755 index 0000000..a8bb463 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616629/ja_JP.srt @@ -0,0 +1,256 @@ +WEBVTT + +00:00.050 --> 00:05.030 +そして、 チームPCとチームMacにおかえりなさい。 + +00:05.030 --> 00:07.640 +今回は、 あなたがすることを手短に説明します。 + +00:07.640 --> 00:14.300 +Anacondaが問題を起こしている場合、 それを動作させることができず、 より単純なvirtualenvのアプローチに戻りたい。 + +00:14.300 --> 00:16.820 +これはより迅速でシンプルなアプローチだ。 + +00:16.820 --> 00:19.160 +互換性はあまり保証されていない。 + +00:19.160 --> 00:24.020 +アナコンダ環境に比べると重量級でパワフルではないが、 まったく問題ない。 + +00:24.020 --> 00:27.020 +というわけで、 お望みなら、 ぜひこれをお使いください。 + +00:27.350 --> 00:29.000 +だから、 マックでやろうと思っているんだ。 + +00:29.000 --> 00:41.360 +ただ、 PC用と基本的には同じなのですが、 Readmeにある代替セットアップ手順と書かれているところから説明が始まります。 + +00:41.360 --> 00:51.680 +PCとMacの重要な違いのひとつは、 もしまだインストールしていないのであれば、 ここにいるほとんどの人がPCにPythonをインストールしたことがあるだろうということだ。 + +00:51.950 --> 00:54.050 +しかし、 もしそうでないなら、 そうする必要がある。 + +00:54.260 --> 01:01.790 +PythonはMacにはデフォルトで入っているが、 PCには入っていないので、 ここにPythonのウェブサイトへのリンクを貼っておく。 + +01:01.820 --> 01:07.080 +Pythonのダウンロードサイトに行くと、 最新バージョンのPythonをダウンロードすることができるが、 Python + +01:07.080 --> 01:20.940 +3を選んだ方がいいかもしれない。 11. 9が最新バージョン3である。 11と互換性があるので、 ここに来てウィンドウズ・インストーラーを使ってウィンドウズにインストールすることができる。 + +01:20.970 --> 01:25.890 +もしこのバージョンを持っていなくて、 ウィンドウズにこのバージョンをインストールする時間があるなら、 + +01:25.890 --> 01:27.060 +その方がいいだろう。 + +01:27.210 --> 01:27.990 +オーケー。 + +01:28.020 --> 01:33.060 +それができたら、 マックでPCのターミナル・ウィンドウを立ち上げる。 + +01:33.090 --> 01:39.270 +Anaconda プロンプトではなく、 通常の PowerShell プロンプトを表示し、 先ほど作成した + +01:39.270 --> 01:47.220 +projects ディレクトリに移動し、 プロジェクトが置かれている LM Engineering ディレクトリに移動します。 + +01:47.220 --> 01:49.980 +そして今度は簡単なコマンドを実行する。 + +01:50.340 --> 01:55.110 +まずはPythonのバージョンを確認しよう。 + +01:55.320 --> 01:59.730 +私のコンピューターでは、 デフォルトのバージョンは3だ。 12. 3. + +01:59.760 --> 02:04.200 +私が知る限りではまだすべてにおいて機能するが、 互換性が保証されているわけではない。 + +02:04.200 --> 02:06.480 +それでも、 私はこれを推し進めるつもりだ。 + +02:06.750 --> 02:10.800 +そして、 Pythonのマイナスmを入力する。 + +02:11.410 --> 02:13.450 +ヴェンヴェンヴェン + +02:13.600 --> 02:18.280 +どんな魔法なのか、 どんな奇妙な調合なのかと思うかもしれない。 + +02:18.490 --> 02:28.210 +Pythonは、 新しい仮想環境を作りたいと言っている。 その仮想環境の名前はまたVenvになる。 + +02:28.210 --> 02:37.750 +このディレクトリを見ると、 Venv virtual environmentというフォルダがあり、 この仮想環境用のPythonファイルがすべて入っている。 + +02:37.750 --> 02:40.480 +そして今のところ、 PCを使ってもすべてが同じだ。 + +02:40.720 --> 02:43.960 +これからやることは、 マックとPCでは違う。 + +02:44.170 --> 02:49.510 +マックでは、 この仮想環境をマック上でアクティブにする。 + +02:49.540 --> 02:55.840 +そのためには、 source venv slash bin slash, activateと入力する。 + +02:55.990 --> 02:59.830 +繰り返しになるけど、 もちろん、 これはすべて僕がやったことなんだ。 + +02:59.830 --> 03:02.260 +もちろん、 これらはすべてReadmeに書かれている。 + +03:02.260 --> 03:06.970 +環境を起動させたことを確認する方法は、 左側にヴェンヴが現れることだ。 + +03:07.000 --> 03:11.590 +Anacondaがインストールされているので、 このベースがあるのがわかるだろう。 + +03:11.590 --> 03:16.180 +AnacondaをインストールしていないPCでは表示されないかもしれません。 + +03:16.440 --> 03:18.300 +しかし、 それは無視しても構わない。 + +03:18.300 --> 03:19.620 +私たちはベンチにいる。 + +03:19.650 --> 03:20.610 +環境。 + +03:21.030 --> 03:25.740 +Pythonのパッケージをインストールしたいので、 + +03:25.770 --> 03:29.610 +pip installを行う。 + +03:29.610 --> 03:37.500 +マイナスRとは、 指定したいパッケージの名前を集めたファイルがあり、 それをrequirementsと呼ぶ。 txt。 + +03:38.340 --> 03:42.630 +エンターキーを押すと、 これらのパッケージがインストールされる。 + +03:42.630 --> 03:48.210 +でも、 僕のボックスに付属しているパイソンではインストールできないんだ。 + +03:48.210 --> 03:52.560 +Venvという仮想環境に特別にインストールしているのだ。 + +03:52.560 --> 03:59.130 +だから私は、 Pythonを使って自分の環境を自分のコンピューターでやっている他のことから隔離しているんだ。 + +03:59.340 --> 04:04.110 +そして、 これらすべてのことをこなしながら、 たくさんのことが起こっているのがわかるだろう。 + +04:04.140 --> 04:07.890 +キャッシュを使用していると表示されるからだ。 + +04:08.070 --> 04:12.780 +以前にもやったことがあるので、 おそらくキャッシュされることはないだろう。 + +04:12.780 --> 04:19.920 +だから、 ダウンロードが多くなるけど、 アナコンダのウィンドウをセットアップするよりはずっと早い。 + +04:19.920 --> 04:22.720 +そして最後にビジネスが起こる。 + +04:22.750 --> 04:28.990 +今現在は、 そのすべてをインストールしているところだ。 + +04:29.020 --> 04:32.290 +そして、 ここではさまざまなパッケージを見ることができる。 + +04:32.320 --> 04:36.400 +もちろん、 このコースで実際に使うパッケージはこの中の一部だけだ。 + +04:36.400 --> 04:38.920 +その多くは、 彼らが依存しているものだ。 + +04:38.950 --> 04:43.690 +つまり、 必要なパッケージとその依存関係をすべてインストールしたことになる。 + +04:43.750 --> 04:48.280 +SQLの錬金術のようなものがあるのがわかると思いますが、 これはオブジェクト・データベースを使うことになり、 + +04:48.280 --> 04:51.430 +それに依存することになるからです。 + +04:51.550 --> 04:53.680 +他にもいろいろあるんだ。 + +04:53.680 --> 05:04.150 +Chromaはベクター・データ・ストアで、 素晴らしいオープンソースのベクター・データ・ストアで、 Huggingfaceのコードの一部でTokenizersを使う予定だ。 + +05:04.360 --> 05:12.730 +だから、 この作業をしている間に、 これから使ういくつかのパッケージの名前を見て回ることができる。 + +05:12.760 --> 05:13.360 +分かった。 + +05:13.360 --> 05:14.800 +それが今、 起こった。 + +05:14.830 --> 05:16.660 +しかも、 それほど時間はかからなかった。 + +05:16.660 --> 05:32.730 +最後に、 PCで環境をアクティブにするためのコマンドはvenvとバックスラッシュ・スクリプトであることを簡単に書いておこう。 + +05:32.940 --> 05:34.890 +バックスラッシュが有効。 + +05:34.920 --> 05:41.520 +だから、 まずそれを入力し、 それからpip install r requirementsと入力する。 txt。 + +05:41.520 --> 05:44.010 +もちろん、 それはすべてreadmeに明記されている。 + +05:44.430 --> 05:50.880 +しかし、 マックでもPCでも、 この時点であなたがすることは、 シンプルなJupyterLabを入力することだけだ。 + +05:50.880 --> 05:57.870 +そして運が良ければ、 エンターキーを押すとJupyterLab環境が起動する。 + +05:57.900 --> 05:59.040 +来たぞ。 + +05:59.040 --> 06:03.000 +そして、 ここがこれから数週間の私たちの家になる。 + +06:03.000 --> 06:04.020 +これから8週間。 + +06:04.020 --> 06:07.710 +8週間ごとのフォルダと通常のセットアップ。 + +06:07.710 --> 06:12.270 +唯一の違いは、 仮想環境を含むVenvフォルダがあることだ。 + +06:12.810 --> 06:15.630 +だから、 それが予備のプランなんだ。 + +06:15.630 --> 06:16.530 +ご理解いただけただろうか。 + +06:16.530 --> 06:21.990 +もし、 フォールバック・プランについて何か問題があれば、 どうか、 どうか、 連絡を取ってください。 + +06:21.990 --> 06:22.950 +直します。 + +06:22.980 --> 06:25.080 +私たちは、 あなたをすぐにお連れします。 + +06:25.110 --> 06:31.590 +さて、 次のビデオではOpenAIでAPIキーを設定する。 + +06:31.620 --> 06:32.430 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/60616629/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616629/ko_KR.srt new file mode 100755 index 0000000..f2ea9cf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616629/ko_KR.srt @@ -0,0 +1,289 @@ +WEBVTT + +00:00.050 --> 00:05.030 +PC 팀과 Mac 팀에 다시 오신 걸 환영합니다 짧은 비디오를 위해 다시 뭉쳤네요 + +00:05.030 --> 00:07.640 +이번에는 당신이 하는 일을 간단히 설명할게요 + +00:07.640 --> 00:11.990 +아나콘다가 문제가 된다면 get it을 할 수 없습니다. 그러면 간단한 virtualenv + +00:12.020 --> 00:14.300 +접근을 사용하세요. + +00:14.300 --> 00:16.820 +이게 더 빠르고 간단해요 + +00:16.820 --> 00:19.160 +양립이 보장되진 않아요 less + +00:19.160 --> 00:24.020 +아나콘다보다 무겁지도 않고 힘도 덜 세지만 그럭저럭 괜찮아요 + +00:24.020 --> 00:27.020 +그러니 원한다면 이걸 사용하세요 + +00:27.350 --> 00:29.000 +제 Mac에서 할 거예요 + +00:29.000 --> 00:33.680 +PC에 대해 다른 점은 기본적으로 같다는 거죠 비록 설명서가 + +00:33.680 --> 00:41.360 +시작되는 곳은 다른 설정 설명서가 있는 곳이지만요 리뎀션에 있는 읽기에요 + +00:41.360 --> 00:47.930 +PC와 Mac의 중요한 차이점 중 하나는 경험이 없다면 이 앱에 있는 거의 모든 사람이 자신의 + +00:47.930 --> 00:51.680 +PC에 파이썬을 설치해본 적이 있을 거라는 거죠 + +00:51.950 --> 00:54.050 +하지만 아직 안 했다면 그렇게 해야 해요 + +00:54.260 --> 01:01.790 +파이썬 은 기본적으로 Macs로 나오지만 PC에서는 아닙니다 그래서 파이썬 웹사이트로 가는 링크를 걸어두었어요 + +01:01.820 --> 01:07.080 +파이썬 을 다운로드 하실 때 최신 버전의 파이썬 을 다운로드할 수 있습니다 파이썬 3을 선택하는 + +01:07.080 --> 01:14.970 +것이 더 나을 수도 있어요 11살요 9는 최신 버전 3이죠 호환 가능하도록요 그리고 + +01:15.000 --> 01:20.940 +여기로 와서 윈도우에 설치할 윈도우 설치자를 할 수 있어요 + +01:20.970 --> 01:25.890 +이 버전이 없고 Windows에 이 버전을 설치할 시간이 있다면 + +01:25.890 --> 01:27.060 +그게 더 낫겠죠 + +01:27.210 --> 01:27.990 +네 + +01:28.020 --> 01:33.060 +그렇게 하고 나면 Mac에서 PC의 터미널 창을 띄워요 + +01:33.090 --> 01:39.270 +아나콘다 프롬프트 말고 일반적인 PowerShell 프롬프트를 불러옵니다 + +01:39.270 --> 01:45.780 +아까 만든 프로젝트 디렉토리로 가서 프로젝트가 있는 LM 엔지니어링 디렉토리로 + +01:45.780 --> 01:47.220 +가요 + +01:47.220 --> 01:49.980 +이제 간단한 명령을 하죠 + +01:50.340 --> 01:55.110 +먼저 파이썬 을 어떤 버전으로 실행하는지 빠르게 확인해 봅시다 파이썬 마이너스 버전이죠 + +01:55.320 --> 01:59.730 +제 컴퓨터의 기본 버전은 사실 3이에요 12살요 3번요 + +01:59.760 --> 02:04.200 +제가 아는 한 모든 게 잘 작동하지만 호환성이 보장되진 않아요 + +02:04.200 --> 02:06.480 +그래도 밀어붙일 거예요 + +02:06.750 --> 02:10.800 +이제 파이썬 마이너스 m을 입력해요 + +02:11.410 --> 02:13.450 +벵갈루루요 + +02:13.600 --> 02:18.280 +이게 무슨 마법이고 무슨 이상한 혼합물인지 궁금하실 거예요 + +02:18.490 --> 02:24.100 +파이썬 을 보면 새로운 가상 환경을 생성하고 싶다고 합니다 가상 환경의 이름은 + +02:24.100 --> 02:28.210 +Venv가 될 것입니다 사람들이 자주 사용하는 것이죠 + +02:28.210 --> 02:34.180 +이 디렉터리를 보면 Venv 가상 환경이라는 폴더가 있는 게 보이실 겁니다 이 + +02:34.180 --> 02:37.750 +가상 환경에 대한 모든 파이썬 파일을 갖고 있죠 + +02:37.750 --> 02:40.480 +지금까지는 모든 게 똑같았어요 + +02:40.720 --> 02:43.960 +이제 제가 할 것은 Mac과 PC의 차이예요 + +02:44.170 --> 02:49.510 +Mac에서 우리가 할 일은 Mac에서 이 가상 환경을 활성화하는 거죠 + +02:49.540 --> 02:55.840 +소스 사슴V/ bin/ 활성화라고 입력하면 돼요 + +02:55.990 --> 02:59.830 +이 모든 건 물론 제가 한 거죠 + +02:59.830 --> 03:02.260 +물론 이 모든 건 읽기 설명서에 쓰여 있어요 + +03:02.260 --> 03:06.970 +환경을 활성화한 방법은 Venv가 여기 왼쪽에 나타난다는 거죠 + +03:07.000 --> 03:11.590 +여기 이 베이스도 있는 걸 보실 수 있어요 아나콘다가 설치돼 있거든요 + +03:11.590 --> 03:16.180 +아나콘다를 설치하지 않았다면 안 보일 수도 있어요 PC에도요 + +03:16.440 --> 03:18.300 +하지만 그건 무시해도 돼요 + +03:18.300 --> 03:19.620 +벤트베어에 들어왔어요 + +03:19.650 --> 03:20.610 +환경이요 + +03:21.030 --> 03:25.740 +이제 여러분이 할 일은 대부분 전에 많이 해보셨을 텐데요 파이썬 + +03:25.770 --> 03:29.610 +패키지를 설치하기 위해 파이프 설치를 하죠 + +03:29.610 --> 03:37.500 +R은 우리가 명시하길 원하는 패키지 이름의 파일이 있다는 뜻입니다 요구 사항이라고 하죠 txt요 + +03:38.340 --> 03:42.630 +엔터를 누르면 이런 패키지들이 설치되고 있어요 + +03:42.630 --> 03:48.210 +하지만 제 박스에 딸려오는 파이썬에는 설치하지 않아요 + +03:48.210 --> 03:52.560 +Venv라는 가상 환경에 특별히 설치하고 있어요 + +03:52.560 --> 03:59.130 +환경을 격리하는 거죠 파이썬 으로 컴퓨터에서 작업하는 다른 것으로부터요 + +03:59.340 --> 04:04.110 +이 모든 걸 하는 동안 많은 일이 일어나는 걸 볼 수 있죠 + +04:04.140 --> 04:07.890 +더 느려질 거예요 사용된 캐시라고 하니까요 + +04:08.070 --> 04:12.780 +제가 전에 해봤는데 당신한텐 캐시가 안 되겠죠? + +04:12.780 --> 04:18.540 +다운로드가 많이 되겠지만 아나콘다 창을 설정하는 것보다 + +04:18.570 --> 04:19.920 +훨씬 빨라요 + +04:19.920 --> 04:22.720 +그리고 사업은 마지막에 일어나죠 + +04:22.750 --> 04:28.990 +지금은 모든 걸 설치하고 있어요 + +04:29.020 --> 04:32.290 +다양한 패키지가 많이 보이죠 + +04:32.320 --> 04:36.400 +물론 이 중 일부는 이 코스에서 실제로 사용할 패키지들이죠 + +04:36.400 --> 04:38.920 +많은 부분이 그들이 의존하는 것들이죠 + +04:38.950 --> 04:43.690 +필요한 패키지와 종속성 둘 다 설치됐어요 + +04:43.750 --> 04:48.280 +SQL 연금술 같은 것도 있어요 그건 우리가 객체 데이터베이스를 사용할 + +04:48.280 --> 04:51.430 +것이기 때문이죠 그것에 따라 달라져요 + +04:51.550 --> 04:53.680 +다른 것들도 볼 수 있어요 + +04:53.680 --> 05:00.220 +크로마는 벡터 데이터 저장소입니다 오픈 소스 벡터 데이터 저장소로 H깅페이스 + +05:00.310 --> 05:04.150 +코드의 일부인 토큰라이저를 사용할 거예요 + +05:04.360 --> 05:10.780 +이 작업을 하는 동안 시간을 들여 살펴볼 수 있는 게 많아요 우리가 사용할 패키지 + +05:10.810 --> 05:12.730 +이름을 보는 거죠 + +05:12.760 --> 05:13.360 +좋아요 + +05:13.360 --> 05:14.800 +그런 일이 벌어졌어요 + +05:14.830 --> 05:16.660 +오래 걸리지 않았어요 + +05:16.660 --> 05:22.630 +이 끝에 가서 다시 해야 할 일은 아, 짧게 말씀드릴게요 + +05:22.630 --> 05:32.730 +환경을 활성화하는 PC 명령은 사실 베니브와 백슬래시 스크립트예요 + +05:32.940 --> 05:34.890 +백슬래시 활성화요 + +05:34.920 --> 05:41.520 +그걸 먼저 입력하고 나서 파이프 설치 r 요구 사항을 입력해요 txt요 + +05:41.520 --> 05:44.010 +물론 전부 읽기에 명시돼 있죠 + +05:44.430 --> 05:50.880 +하지만 Mac이든 PC든 이 시점에선 간단한 주피터랩을 입력하겠죠 + +05:50.880 --> 05:57.870 +운이 좋으면 엔터를 누르면 주피터랩 환경이 시작될 거예요 + +05:57.900 --> 05:59.040 +나오네요 + +05:59.040 --> 06:03.000 +앞으로 몇 주간 여기서 지낼 거예요 + +06:03.000 --> 06:04.020 +앞으로 8주요 + +06:04.020 --> 06:07.710 +8주간의 폴더와 일반적인 설정이죠 + +06:07.710 --> 06:12.270 +유일한 차이점은 가상 환경을 포함하는 이 Venv 폴더가 있다는 거죠 + +06:12.810 --> 06:15.630 +그게 대비책이에요 + +06:15.630 --> 06:16.530 +이해가 되셨으면 좋겠네요 + +06:16.530 --> 06:21.990 +만약 대비책에 문제가 있다면 꼭 연락 주세요 + +06:21.990 --> 06:22.950 +제가 해결할게요 + +06:22.980 --> 06:25.080 +빠른 배웅 해드릴게요 Get you + +06:25.110 --> 06:31.590 +좋아요, 다음 비디오로 넘어가 오픈AI로 API 키를 설정하는 걸 알아보죠 + +06:31.620 --> 06:32.430 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/60616663/en_US.srt b/week5/community-contributions/subtitles/srts/60616663/en_US.srt new file mode 100755 index 0000000..adca383 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616663/en_US.srt @@ -0,0 +1,568 @@ +WEBVTT + +00:00.140 --> 00:00.500 +Well. + +00:00.530 --> 00:04.550 +Hi there, this is time for PC people to get set up. + +00:04.550 --> 00:08.060 +So all you Mac people out there, you don't need to be watching this one. + +00:08.060 --> 00:12.650 +And now's the time when I tell you guys that I told the Mac people that they were my favorite people + +00:12.680 --> 00:15.770 +because I'm a mac user myself, but you know, I am a mac user. + +00:15.770 --> 00:18.350 +But I'll have you know that I started out in PCs. + +00:18.350 --> 00:20.480 +I in fact worked for IBM for a few years. + +00:20.480 --> 00:25.790 +So really, in my heart of hearts, I'm from a IBM from PC blood. + +00:25.790 --> 00:26.150 +So. + +00:26.150 --> 00:30.290 +So you guys are really my people no matter what I said to the other people. + +00:30.290 --> 00:38.060 +Anyways, let's get you set up with a PC to use the environment that we need for this entire course. + +00:38.060 --> 00:46.640 +So we start by going to the repository, my repository for this class, LM engineering, which is at + +00:46.640 --> 00:50.060 +github.com slash my name LM engineering. + +00:50.060 --> 00:56.150 +This link should be all over the place in the course and in the in the notes for the course and resources + +00:56.150 --> 00:56.960 +and so on. + +00:56.990 --> 00:59.870 +And when you go there, you see this repository on GitHub. + +00:59.870 --> 01:05.440 +And if you scroll down you get to see the Readme file that I've written with setup instructions, and + +01:05.440 --> 01:11.020 +I hope I've done a decent job in laying out all of the steps required to set up your environment. + +01:11.050 --> 01:15.700 +And if you prefer to follow instructions that way, then do it that way rather than watching the video. + +01:15.730 --> 01:17.200 +And hopefully I've done it well. + +01:17.200 --> 01:21.550 +And if you find a problem with them, please, please let me know so I can update them for everybody + +01:21.550 --> 01:22.240 +else. + +01:22.570 --> 01:26.740 +Anyway, if you do go through these instructions, what you'll find is that the first thing it says + +01:26.770 --> 01:29.890 +is that for PC people, you may need to install git. + +01:29.890 --> 01:31.300 +If you've not done it before. + +01:31.300 --> 01:36.070 +Git the code control system, something that I imagine most of you who have used some might not. + +01:36.100 --> 01:40.480 +It comes installed on Macs by default, but not on windows. + +01:40.630 --> 01:47.230 +And so if you follow this link, you get to the git download page, you follow the instructions to download, + +01:47.500 --> 01:53.980 +and you then have to say okay about 20 times and just accept the defaults for everything. + +01:53.980 --> 01:55.750 +I just went through and did this myself when I was pressing. + +01:55.750 --> 02:00.310 +Okay, okay, okay, so go through all of those things, just take the defaults all the way through + +02:00.310 --> 02:03.490 +and you will install git on your computer. + +02:03.490 --> 02:07.360 +And so once you have done that come back to the repository. + +02:07.610 --> 02:12.560 +The next thing to do is at the top here there's this green button code. + +02:12.650 --> 02:17.540 +And if you press that green button, it comes up here with the link to the repository. + +02:17.540 --> 02:18.980 +And you can pick either of these. + +02:18.980 --> 02:21.020 +But we were going to pick this https one. + +02:21.020 --> 02:26.000 +And we're going to press um actually you should pick this one and then press the copy button to copy + +02:26.030 --> 02:27.500 +it to clipboard. + +02:27.770 --> 02:32.690 +What we're now going to do is we're going to go and open a new PowerShell. + +02:32.870 --> 02:34.100 +Uh, let me see. + +02:34.160 --> 02:35.720 +Type PowerShell in here. + +02:35.750 --> 02:39.530 +I'm a little bit clumsy at the PC because it has been a little while since I've done this, so you may + +02:39.530 --> 02:41.510 +have to bear with me if I make a fool of myself. + +02:41.510 --> 02:42.140 +But there we go. + +02:42.170 --> 02:43.640 +Up comes the PowerShell. + +02:43.910 --> 02:44.210 +Um. + +02:44.210 --> 02:49.910 +And now the first thing you need to do, which you may already have, is create a projects directory, + +02:49.910 --> 02:53.000 +something that where your different projects that you work on will go. + +02:53.030 --> 02:59.660 +You may already have a projects directory, but if I do CD projects, it turns out I do not. + +02:59.660 --> 03:03.200 +So I can make a projects directory by doing this. + +03:03.230 --> 03:06.050 +You may also just use the Windows Explorer. + +03:06.050 --> 03:08.420 +Uh, the the File Explorer if you prefer. + +03:08.450 --> 03:12.570 +You don't need to use the command prompt, but, uh, is he doing that? + +03:12.570 --> 03:15.900 +So I've made a new projects directory in my home directory. + +03:15.900 --> 03:22.410 +I'm now going to CD into it, and here we are in our projects directory, which of course is empty or + +03:22.410 --> 03:24.870 +dir, if you prefer, the windows way. + +03:25.050 --> 03:28.470 +Uh, what I now type is very simple. + +03:28.620 --> 03:35.730 +Uh, it is git clone, uh, which means I want to make a local copy of this repository, and then you + +03:35.730 --> 03:40.980 +put in the name of the repository, the link to it that we copied a second ago. + +03:41.310 --> 03:44.970 +Uh, and when we do that, it will immediately download. + +03:44.970 --> 03:45.780 +It's already happened. + +03:45.780 --> 03:48.480 +All the code has been cloned to my local drive. + +03:48.480 --> 03:54.120 +So if I look here, you'll see that there is indeed a folder called LM engineering that we will now + +03:54.150 --> 03:55.590 +go into. + +03:55.680 --> 04:00.600 +And there is all of the code that will be our playground for the next eight weeks. + +04:00.600 --> 04:08.220 +And now it's worth mentioning that sometimes people use the expression the project root directory to + +04:08.250 --> 04:10.650 +mean this folder LM engineering. + +04:10.650 --> 04:12.810 +It is the root directory of this project. + +04:12.810 --> 04:18.380 +So if I ever say verbally, or if it's in the in the readme that it mentions project root. + +04:18.380 --> 04:21.620 +I'm talking about this directory LM engineering. + +04:21.740 --> 04:22.550 +There we are. + +04:22.580 --> 04:29.150 +So the next step is now to install Anaconda, which is as I say, it's a it's a reasonably heavyweight + +04:29.180 --> 04:31.190 +tool because it's super powerful. + +04:31.430 --> 04:38.810 +And the place to do that is back in the the Readme again, there is a link to where you go to install + +04:38.900 --> 04:41.000 +Anaconda and there it is. + +04:41.030 --> 04:43.430 +And I have that up over here. + +04:43.640 --> 04:45.410 +Um, except the cookies. + +04:45.440 --> 04:50.450 +This is the Anaconda documentation for installing on windows, and it tries to get you to give your + +04:50.450 --> 04:53.600 +email address and various other things which you can feel free to do if you wish. + +04:53.600 --> 04:57.590 +But then at the end, you can also just say what we'll do now press the download. + +04:57.590 --> 05:02.360 +It suggests you could you could provide an email, but you can also just say skip registration here + +05:02.360 --> 05:03.140 +if you'd prefer. + +05:03.140 --> 05:06.080 +And then you get to this Download Now page. + +05:06.080 --> 05:08.420 +And this is the download that we will do now. + +05:08.870 --> 05:10.550 +Anaconda is quite large. + +05:10.550 --> 05:13.670 +This is a five gigabyte download. + +05:13.820 --> 05:16.730 +And so it may be something which you don't have the room for. + +05:16.730 --> 05:18.740 +And you may also find that this is too heavy weight. + +05:18.740 --> 05:24.560 +And there is the alternative approach using virtualenv that I will record after this, which is a simpler + +05:24.560 --> 05:27.290 +approach, but it's less guaranteed to be compatible. + +05:27.350 --> 05:28.790 +And, you know, it's less hardcore. + +05:28.790 --> 05:32.930 +If you want to be a hardcore LM engineer, you should at least try the Anaconda route. + +05:32.930 --> 05:36.800 +So you press the download button, which I already did. + +05:36.830 --> 05:43.520 +It then brings up a screen to install Anaconda, the usual installation wizard thing, and you have + +05:43.520 --> 05:44.930 +to press yes a few times. + +05:44.930 --> 05:47.300 +It does prompt you for where you want it to install. + +05:47.330 --> 05:53.750 +Some people have selected there to install it to a different drive to make sure that you have capacity. + +05:53.900 --> 05:58.310 +Um, and then when that is done, it will complete the installation, take a few minutes to download + +05:58.310 --> 06:03.590 +and install, and you will have the Anaconda program on your computer. + +06:03.590 --> 06:04.850 +And what does that mean? + +06:04.850 --> 06:10.040 +Well, the thing that it means, as far as we're concerned is that whilst we have this PowerShell that + +06:10.040 --> 06:17.300 +we are very familiar with, there is also a new thing on our start menu, which is called Anaconda PowerShell + +06:17.300 --> 06:22.780 +and that if we open it looks very similar, looks just like a normal PowerShell, but it is in fact + +06:22.780 --> 06:25.030 +a special Anaconda one. + +06:25.420 --> 06:26.860 +Try and expand this window for you. + +06:26.890 --> 06:29.740 +Hold on a second while I fumble fumble, fumble. + +06:29.920 --> 06:31.450 +Uh uh. + +06:31.480 --> 06:32.350 +Now what have I done? + +06:32.380 --> 06:33.250 +There we go. + +06:33.580 --> 06:34.630 +I'm hopeless at this. + +06:34.660 --> 06:35.590 +There we go. + +06:35.620 --> 06:42.490 +So the way that you know that it's an Anaconda PowerShell is that this word base appears to the left, + +06:42.490 --> 06:48.070 +which means that we are in a world, uh, a base world, rather than in a world that we have built, + +06:48.070 --> 06:49.900 +especially for an environment. + +06:50.020 --> 06:50.680 +All right. + +06:50.680 --> 06:57.640 +So now I'm going to go into our projects folder that we just made, and I'm going to go into LM engineering. + +06:57.670 --> 06:58.570 +There it is. + +06:59.020 --> 07:03.880 +So we're now in the project root directory as I explained is the right term for it. + +07:03.910 --> 07:10.600 +And I'm now going to run a simple command which is conda which is the name for running anaconda commands. + +07:10.660 --> 07:13.390 +Env which means environment create. + +07:13.390 --> 07:19.810 +I want to create a new environment minus f means I'm going to specify a file which describes exactly + +07:19.810 --> 07:21.940 +everything that I need for this environment. + +07:21.940 --> 07:23.920 +And the file is called Environment. + +07:23.950 --> 07:25.450 +Environment. + +07:25.480 --> 07:26.650 +Dot YAML. + +07:26.650 --> 07:29.080 +And you can see Environment.yml right right here. + +07:29.080 --> 07:31.810 +So that is what I'm going to do. + +07:32.170 --> 07:39.580 +Now at this point you will need to go and get a coffee and maybe even two coffees. + +07:39.700 --> 07:45.670 +So it does take a bit of time to install this full environment the first time. + +07:45.670 --> 07:50.950 +So when I do it, it's probably takes a few minutes because I've done it before. + +07:50.980 --> 07:56.950 +The very first time, it might take somewhere between 10 minutes and 20 minutes depending on your internet + +07:56.950 --> 08:00.520 +connection, and that's going to be a thing to look out for. + +08:00.520 --> 08:02.260 +So what it's actually doing. + +08:02.260 --> 08:08.380 +So it's looking at all of the packages that we say that we need for this data science environment. + +08:08.380 --> 08:15.520 +And it then figures out for this particular type of system, whatever chip you've got and for the various + +08:15.520 --> 08:21.400 +configurations that you've got, what are the compatible versions of every single package that we need + +08:21.430 --> 08:26.050 +to do things like in the future we'll have an object database, we'll be using Lang chain, we'll be + +08:26.050 --> 08:28.590 +using gradio all sorts of things. + +08:28.590 --> 08:35.010 +It makes sure that we pick the latest versions that are all compatible with each other and also compatible + +08:35.010 --> 08:36.810 +with your processor and so on. + +08:36.810 --> 08:39.240 +And that's why it now says solving environment. + +08:39.240 --> 08:45.330 +That's the equation that it's solving with lots of different decisions to make. + +08:45.960 --> 08:48.360 +So it's going to make those decisions. + +08:48.360 --> 08:53.790 +And it's then going to install all of those packages on your computer. + +08:53.940 --> 08:58.140 +And we're going to I'm going to keep keep going with this sentence for a bit longer to see if it's going + +08:58.140 --> 08:59.820 +to get close to wrapping up. + +08:59.970 --> 09:04.890 +Otherwise I might have to, uh, to to break and come back in just a second. + +09:06.300 --> 09:06.690 +All right. + +09:06.690 --> 09:08.820 +It looks like it's going to take just a couple more minutes. + +09:08.820 --> 09:13.140 +So I'm going to pause the video and I will return when this is ready. + +09:13.320 --> 09:15.690 +Well it's just as well I didn't hang on in there. + +09:15.690 --> 09:18.630 +It took another five minutes from that point to finish. + +09:18.720 --> 09:23.790 +So that would've been an embarrassing amount of five minutes of my waffle that I've rescued you from. + +09:23.940 --> 09:28.140 +So anyway, this can take a few minutes and I've got a fast connection, as I say, so. + +09:28.140 --> 09:31.250 +So it could well be 20 30 minutes for you. + +09:31.340 --> 09:32.870 +So hang on in there. + +09:32.870 --> 09:37.880 +But but obviously if it takes unspeakably long, then abandon and go to the next video where we will + +09:37.880 --> 09:38.660 +do it differently. + +09:38.660 --> 09:44.120 +But if it's completed, your your Anaconda PowerShell prompt should look something like this. + +09:44.420 --> 09:46.490 +It still says base at the end there. + +09:46.640 --> 09:48.620 +And we've created an environment that is known. + +09:48.650 --> 09:49.880 +It's called LMS. + +09:49.880 --> 09:51.110 +That's the name I gave it. + +09:51.110 --> 09:59.000 +And to now activate that environment, which means to make that be the live world that that Anaconda + +09:59.000 --> 10:00.020 +is working within. + +10:00.050 --> 10:05.480 +You simply type conda, activate LMS and you'll need to run that. + +10:05.600 --> 10:10.370 +You'll need to any time that you come back to your computer that you want to start everything again + +10:10.520 --> 10:16.550 +before you you get started, you will need to remember to open up an anaconda prompt and do conda. + +10:16.580 --> 10:23.390 +Activate LMS and your clue that it is now working is that you'll see LMS written over here on the left + +10:23.540 --> 10:30.770 +to the left of my prompt, and that is telling me that I have successfully created my Anaconda environment. + +10:30.890 --> 10:37.050 +Um, and another way we can do it is that if we type Python minus minus version, it will tell us that + +10:37.050 --> 10:43.290 +the version of Python we're on is 3.11.1, which is the version, or at least 3.11 is what I specified + +10:43.290 --> 10:44.220 +for this environment. + +10:44.220 --> 10:49.500 +So we know that we're on exactly the same version of Python, and everything should be compatible. + +10:49.500 --> 10:53.670 +And it just remains for me to type these two words Jupyter Lab. + +10:53.670 --> 10:58.860 +And this is going to open up the Jupyter environment, which is this data science environment where + +10:58.860 --> 11:02.280 +we can work really effectively together on code. + +11:02.280 --> 11:04.290 +Again, some of you probably know this back to front. + +11:04.290 --> 11:05.940 +For some people this might be new. + +11:05.970 --> 11:07.050 +I press enter. + +11:07.050 --> 11:09.630 +It's going to start Jupyter Lab. + +11:09.810 --> 11:11.490 +Uh, up it comes. + +11:11.880 --> 11:17.310 +The first time it might prompt you for which browser do you want to open, but then it opens up with + +11:17.310 --> 11:26.190 +Jupyter Lab on my computer with our eight weeks that lies ahead of us and we are good to go. + +11:26.220 --> 11:28.410 +That is the PC instructions. + +11:28.410 --> 11:30.060 +Hopefully this has all gone well for you. + +11:30.060 --> 11:35.520 +If it hasn't, please, please reach out to me, let me know and I will get you sorted. + +11:35.700 --> 11:37.800 +I will see you then for the next video. diff --git a/week5/community-contributions/subtitles/srts/60616663/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616663/ja_JP.srt new file mode 100755 index 0000000..ed0d96e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616663/ja_JP.srt @@ -0,0 +1,493 @@ +WEBVTT + +00:00.140 --> 00:00.500 +まあね。 + +00:00.530 --> 00:04.550 +こんにちは、 PCの皆さんはセットアップの時間です。 + +00:04.550 --> 00:08.060 +だから、 マックの皆さんはこの映画を見る必要はない。 + +00:08.060 --> 00:12.650 +そして今こそ、 私がマックの人たちに、 私自身がマックユーザーだから、 彼らが私のお気に入りの人たちだと言ったことを、 + +00:12.680 --> 00:15.770 +あなたたちに話すときだ。 + +00:15.770 --> 00:18.350 +でも、 最初はPCから始めたんだ。 + +00:18.350 --> 00:20.480 +実際、 私はIBMで数年間働いていた。 + +00:20.480 --> 00:25.790 +だから本当に心の中では、 私はPCの血を引くIBM出身なんだ。 + +00:25.790 --> 00:26.150 +だから + +00:26.150 --> 00:30.290 +だから、 私が他の人たちに何を言おうと、 あなたたちは本当に私の仲間なんだ。 + +00:30.290 --> 00:38.060 +とにかく、 このコース全体に必要な環境を使うために、 PCをセットアップしましょう。 + +00:38.060 --> 00:50.060 +まずは、 githubにあるこのクラスの私のリポジトリ、 LM engineeringにアクセスしてみよう。 + +00:50.060 --> 00:50.060 +comをスラッシュしてLMエンジニアリングと名乗る。 + +00:50.060 --> 00:56.960 +このリンクは、 コースのいたるところ、 コースのノートやリソースなどに記載されているはずです。 + +00:56.990 --> 00:59.870 +そしてそこに行くと、 GitHubのこのリポジトリが表示される。 + +00:59.870 --> 01:05.440 +下にスクロールすると、 セットアップ手順を書いたReadmeファイルを見ることができる。 + +01:05.440 --> 01:11.020 +あなたの環境をセットアップするのに必要なすべてのステップをきちんと説明できたと思う。 + +01:11.050 --> 01:15.700 +そして、 もしあなたがその方法で指示に従うことを好むなら、 ビデオを見るよりもその方法でやってください。 + +01:15.730 --> 01:17.200 +そして願わくば、 それをうまくやり遂げたい。 + +01:17.200 --> 01:22.240 +そして、 もし問題が見つかったら、 他の人たちのために更新しますので、 どうか私に知らせてください。 + +01:22.570 --> 01:29.890 +いずれにせよ、 この説明書を読んでみると、 まず最初に、 PCの人はgitをインストールする必要があるかもしれないと書かれている。 + +01:29.890 --> 01:31.300 +まだやったことがないのなら。 + +01:31.300 --> 01:36.070 +Gitはコード管理システムだが、 使ったことのある人はほとんどいないだろう。 + +01:36.100 --> 01:40.480 +マックにはデフォルトでインストールされているが、 ウィンドウズにはインストールされていない。 + +01:40.630 --> 01:47.230 +このリンクをたどってgitのダウンロードページに行き、 指示に従ってダウンロードし、 + +01:47.500 --> 01:53.980 +OKを20回ほど言って、 すべてのデフォルトを受け入れるだけだ。 + +01:53.980 --> 01:55.750 +プレスの時に自分でやってみたんだ。 + +01:55.750 --> 02:00.310 +OK、 OK、 OK、 では、 これらのことをすべて実行し、 デフォルトのまま実行すれば、 + +02:00.310 --> 02:03.490 +あなたのコンピューターにgitがインストールされます。 + +02:03.490 --> 02:07.360 +そうしたら、 リポジトリに戻ってくるんだ。 + +02:07.610 --> 02:12.560 +次にすることは、 この一番上にある緑色のボタンコードだ。 + +02:12.650 --> 02:17.540 +緑色のボタンを押すと、 リポジトリへのリンクが表示されます。 + +02:17.540 --> 02:18.980 +そして、 このどちらかを選ぶことができる。 + +02:18.980 --> 02:21.020 +しかし、 我々はこのhttpsの方を選ぶつもりだった。 + +02:21.020 --> 02:27.500 +そして、 これを選んでコピーボタンを押してクリップボードにコピーする。 + +02:27.770 --> 02:32.690 +これから行うのは、 新しいPowerShellを開くことだ。 + +02:32.870 --> 02:34.100 +ええと、 そうだな。 + +02:34.160 --> 02:35.720 +ここにPowerShellと入力する。 + +02:35.750 --> 02:41.510 +久しぶりのPCなので、 ちょっと不器用です。 + +02:41.510 --> 02:42.140 +でも、 これでいい。 + +02:42.170 --> 02:43.640 +PowerShellの登場だ。 + +02:43.910 --> 02:44.210 +うーん。 + +02:44.210 --> 02:53.000 +そして、 まず最初にすべきことは、 すでにあるかもしれないが、 プロジェクト・ディレクトリを作ることだ。 + +02:53.030 --> 02:59.660 +すでにprojectsディレクトリがあるかもしれないが、 CD projectsをやってみると、 ないことがわかった。 + +02:59.660 --> 03:03.200 +こうすることで、 プロジェクト・ディレクトリを作ることができる。 + +03:03.230 --> 03:06.050 +ウィンドウズ・エクスプローラーを使うこともできる。 + +03:06.050 --> 03:08.420 +ファイル・エクスプローラーでもいい。 + +03:08.450 --> 03:12.570 +コマンドプロンプトを使う必要はない。 + +03:12.570 --> 03:15.900 +そこで、 ホーム・ディレクトリに新しいprojectsディレクトリを作った。 + +03:15.900 --> 03:24.870 +CDをダウンロードして、 プロジェクト・ディレクトリーに入ります。 + +03:25.050 --> 03:28.470 +ええと、 私が今打っているのはとてもシンプルなものです。 + +03:28.620 --> 03:35.730 +git cloneというのは、 このリポジトリのローカルコピーを作るという意味です。 そしてリポジトリの名前と、 + +03:35.730 --> 03:40.980 +1秒前にコピーしたリポジトリへのリンクを入れます。 + +03:41.310 --> 03:44.970 +そうすれば、 すぐにダウンロードできる。 + +03:44.970 --> 03:45.780 +もう起きてしまったことだ。 + +03:45.780 --> 03:48.480 +すべてのコードはローカルドライブにクローンした。 + +03:48.480 --> 03:55.590 +ここで見てみると、 確かにLMエンジニアリングというフォルダがあることがわかるだろう。 + +03:55.680 --> 04:00.600 +そして、 これから8週間、 私たちの遊び場となるすべてのコードがある。 + +04:00.600 --> 04:10.650 +また、 プロジェクトのルート・ディレクトリーという表現を、 LMエンジニアリングのこのフォルダーという意味で使うこともある。 + +04:10.650 --> 04:12.810 +このプロジェクトのルート・ディレクトリである。 + +04:12.810 --> 04:18.380 +だから、 もし私が口頭で言ったり、 あるいはReadmeにプロジェクト・ルートについて書いてあったりしたら、 それを読んでください。 + +04:18.380 --> 04:21.620 +このディレクトリーLMエンジニアリングについて話しているんだ。 + +04:21.740 --> 04:22.550 +さあ、 着いた。 + +04:22.580 --> 04:29.150 +次のステップは、 Anacondaをインストールすることだ。 Anacondaは、 私が言ったように、 超強力なので、 + +04:29.180 --> 04:31.190 +それなりにヘビー級のツールだ。 + +04:31.430 --> 04:41.000 +Readmeに、 Anacondaをインストールするためのリンクがある。 + +04:41.030 --> 04:43.430 +それをここにアップしてある。 + +04:43.640 --> 04:45.410 +クッキーを除いてはね。 + +04:45.440 --> 04:50.450 +これはWindowsにインストールするためのAnacondaのドキュメントで、 電子メールアドレスやその他様々なことを教えようとしますが、 + +04:50.450 --> 04:53.600 +望むなら自由にやってください。 + +04:53.600 --> 04:57.590 +でも最後に、 これから何をするかと言って、 ダウンロードを押すこともできる。 + +04:57.590 --> 05:03.140 +Eメールを入力することもできますが、 登録を省略することもできます。 + +05:03.140 --> 05:06.080 +そして、 このDownload Nowのページにたどり着く。 + +05:06.080 --> 05:08.420 +そして、 これがこれから行うダウンロードだ。 + +05:08.870 --> 05:10.550 +アナコンダはかなり大きい。 + +05:10.550 --> 05:13.670 +これは5ギガバイトのダウンロードだ。 + +05:13.820 --> 05:16.730 +だから、 あなたには余裕がないのかもしれない。 + +05:16.730 --> 05:18.740 +また、 これが重すぎると感じるかもしれない。 + +05:18.740 --> 05:24.560 +そして、 この後に記録するvirtualenvを使った別のアプローチもある。 これはよりシンプルなアプローチだが、 + +05:24.560 --> 05:27.290 +互換性が保証されていない。 + +05:27.350 --> 05:28.790 +それに、 ハードコアじゃない。 + +05:28.790 --> 05:32.930 +筋金入りのLMエンジニアになりたいなら、 少なくともアナコンダ・ルートを試すべきだ。 + +05:32.930 --> 05:36.800 +ダウンロードボタンを押す。 + +05:36.830 --> 05:44.930 +すると、 Anacondaをインストールする画面が表示され、 いつものインストールウィザードのように、 「はい」を数回押さなければならない。 + +05:44.930 --> 05:47.300 +インストールする場所を聞いてくる。 + +05:47.330 --> 05:53.750 +容量を確保するために、 別のドライブにインストールすることを選択する人もいる。 + +05:53.900 --> 05:58.310 +それが終わると、 インストールが完了し、 + +05:58.310 --> 06:03.590 +ダウンロードとインストールに数分かかります。 + +06:03.590 --> 06:04.850 +それは何を意味するのか? + +06:04.850 --> 06:10.040 +つまり、 スタートメニューには、 使い慣れたPowerShellがある一方で、 + +06:10.040 --> 06:25.030 +Anaconda PowerShellと呼ばれる新しいものもある。 + +06:25.420 --> 06:26.860 +このウィンドウを広げてみてほしい。 + +06:26.890 --> 06:29.740 +手探り、 手探り、 手探り。 + +06:29.920 --> 06:31.450 +ええと......。 + +06:31.480 --> 06:32.350 +さて、 私は何をしたのだろう? + +06:32.380 --> 06:33.250 +これでよし。 + +06:33.580 --> 06:34.630 +僕はこれがダメなんだ。 + +06:34.660 --> 06:35.590 +これでよし。 + +06:35.620 --> 06:42.490 +Anaconda PowerShellであることを知る方法は、 このbaseという単語が左側に表示されることです。 + +06:42.490 --> 06:49.900 +これは、 私たちが構築した世界、 特に環境の世界ではなく、 基本的な世界にいることを意味します。 + +06:50.020 --> 06:50.680 +分かった。 + +06:50.680 --> 06:57.640 +それでは、 先ほど作ったプロジェクトフォルダーに入って、 LMエンジニアリングに入ります。 + +06:57.670 --> 06:58.570 +あれだ。 + +06:59.020 --> 07:03.880 +というわけで、 プロジェクト・ルート・ディレクトリに入った。 + +07:03.910 --> 07:10.600 +condaはanacondaコマンドを実行するための名前だ。 + +07:10.660 --> 07:13.390 +環境創造を意味するエンブ。 + +07:13.390 --> 07:21.940 +新しい環境マイナスfを作りたいので、 この環境に必要なものすべてを記述したファイルを指定します。 + +07:21.940 --> 07:23.920 +ファイル名はEnvironment。 + +07:23.950 --> 07:25.450 +環境だ。 + +07:25.480 --> 07:26.650 +ドットYAML。 + +07:26.650 --> 07:29.080 +そして、 環境を見ることができる。 ymlはここにある。 + +07:29.080 --> 07:31.810 +だから、 私はそうするつもりだ。 + +07:32.170 --> 07:39.580 +さて、 この時点でコーヒーを買いに行く必要がある。 + +07:39.700 --> 07:45.670 +そのため、 この完全な環境を最初にインストールするには少し時間がかかる。 + +07:45.670 --> 07:50.950 +だから、 僕がやるときは、 前にやったことがあるから、 たぶん数分で終わる。 + +07:50.980 --> 08:00.520 +初回は、 インターネット接続にもよるが、 10分から20分かかるかもしれない。 + +08:00.520 --> 08:02.260 +では、 実際に何をしているのか。 + +08:02.260 --> 08:08.380 +つまり、 データ・サイエンス環境に必要なパッケージのすべてに目を向けているのだ。 + +08:08.380 --> 08:15.520 +そして、 この特定のタイプのシステム、 どんなチップを搭載していても、 さまざまなコンフィギュレーションがあっても、 + +08:15.520 --> 08:21.400 +将来的にオブジェクト・データベースを導入したり、 ラングチェーンを使ったり、 グラディオを使ったりするために必要な、 + +08:21.430 --> 08:28.590 +あらゆるパッケージの互換バージョンを割り出す。 + +08:28.590 --> 08:36.810 +そのため、 互いに互換性があり、 あなたのプロセッサーなどとも互換性のある最新バージョンを選ぶようにしている。 + +08:36.810 --> 08:39.240 +だから今、 環境を解決すると書いてあるんだ。 + +08:39.240 --> 08:45.330 +それは、 さまざまな決断を迫られる方程式を解いているのだ。 + +08:45.960 --> 08:48.360 +だから、 そういう決断を下すことになる。 + +08:48.360 --> 08:53.790 +そして、 それらのパッケージをすべてあなたのコンピューターにインストールする。 + +08:53.940 --> 08:59.820 +そして、 もう少しこの文章を続けて、 終わりに近づくかどうかを確認するつもりだ。 + +08:59.970 --> 09:04.890 +そうでなければ、 ちょっと休憩して、 またすぐに戻ってこなければならないかもしれない。 + +09:06.300 --> 09:06.690 +分かった。 + +09:06.690 --> 09:08.820 +あと2、 3分かかるようだ。 + +09:08.820 --> 09:13.140 +それではビデオを一時停止して、 準備ができたらまた戻ってきます。 + +09:13.320 --> 09:15.690 +まあ、 あそこで頑張らなかっただけマシだよ。 + +09:15.690 --> 09:18.630 +そこから終了までさらに5分かかった。 + +09:18.720 --> 09:23.790 +というわけで、 私のワッフルからあなたを救ったのは、 恥ずかしいことに5分間だったことになる。 + +09:23.940 --> 09:28.140 +とにかく、 この作業には数分かかる。 + +09:28.140 --> 09:31.250 +だから、 あなたにとっては20分30分かもしれない。 + +09:31.340 --> 09:32.870 +だから、 そこで頑張るんだ。 + +09:32.870 --> 09:38.660 +でも、 言いようのないほど時間がかかるようなら、 諦めて次のビデオに進んでください。 + +09:38.660 --> 09:44.120 +しかし、 それが完了すれば、 Anaconda PowerShell プロンプトは以下のようになるはずです。 + +09:44.420 --> 09:46.490 +あそこはまだ最後にベースと書いてある。 + +09:46.640 --> 09:48.620 +そして、 私たちは知られる環境を作り上げた。 + +09:48.650 --> 09:49.880 +LMSと呼ばれるものだ。 + +09:49.880 --> 09:51.110 +それが私がつけた名前だ。 + +09:51.110 --> 10:00.020 +そして今、 その環境を活性化させる。 つまり、 アナコンダが活動するライブの世界にするのだ。 + +10:00.050 --> 10:05.480 +condaと入力してLMSを起動し、 それを実行するだけだ。 + +10:05.600 --> 10:10.370 +コンピュータに戻ってきたらいつでも、 始める前にもう一度すべてを始めたいのであれば、 + +10:10.520 --> 10:16.550 +アナコンダ・プロンプトを開いてcondaを実行することを忘れないようにする必要がある。 + +10:16.580 --> 10:30.770 +LMSをアクティベートし、 プロンプトの左側にLMSと書かれているのが動作している証拠です。 + +10:30.890 --> 10:43.290 +Pythonマイナス・バージョンと入力すると、 今使っているPythonのバージョンが3であることを教えてくれます。 + +10:43.290 --> 10:43.290 +11. バージョンである1か、 少なくとも3だ。 11がこの環境で指定したものだ。 + +10:44.220 --> 10:49.500 +Pythonのバージョンはまったく同じで、 すべて互換性があるはずだ。 + +10:49.500 --> 10:53.670 +そして、 あとは私がJupyter Labという2つの単語を入力するだけだ。 + +10:53.670 --> 11:02.280 +Jupyter環境は、 データサイエンス環境であり、 コード上で効率的に共同作業ができる。 + +11:02.280 --> 11:04.290 +繰り返しになるが、 おそらく皆さんの中には、 このことを裏から表まで知っている人もいるだろう。 + +11:04.290 --> 11:05.940 +人によっては初めてのことかもしれない。 + +11:05.970 --> 11:07.050 +エンターキーを押す。 + +11:07.050 --> 11:09.630 +Jupyter Labが始まる。 + +11:09.810 --> 11:11.490 +あ、 上がってきた。 + +11:11.880 --> 11:17.310 +最初はどのブラウザを開くか聞かれるかもしれないが、 + +11:17.310 --> 11:26.190 +私のコンピュータのJupyter Labが開き、 8週間先の予定が表示される。 + +11:26.220 --> 11:28.410 +それがPCの指示だ。 + +11:28.410 --> 11:30.060 +すべてうまくいっていることを願っている。 + +11:30.060 --> 11:35.520 +そうでなければ、 どうか私に連絡をください。 + +11:35.700 --> 11:37.800 +では、 次のビデオでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/60616663/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616663/ko_KR.srt new file mode 100755 index 0000000..1e90fd3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616663/ko_KR.srt @@ -0,0 +1,562 @@ +WEBVTT + +00:00.140 --> 00:00.500 +글쎄요 + +00:00.530 --> 00:04.550 +안녕하세요, PC 사람들이 준비될 시간이에요. Get up + +00:04.550 --> 00:08.060 +맥을 좋아하는 분들은 이걸 안 봐도 돼요 + +00:08.060 --> 00:12.650 +이제 말씀드릴 차례입니다 Mac 사람들에게 제가 좋아하는 사람들이라고 했어요 제가 + +00:12.680 --> 00:15.770 +Mac 사용자니까요 하지만 Mac 사용자 맞아요 + +00:15.770 --> 00:18.350 +하지만 난 PC로 시작했어요 + +00:18.350 --> 00:20.480 +IBM 밑에서 몇 년 일했죠 + +00:20.480 --> 00:25.790 +전 PC 혈통의 IBM 출신이에요 + +00:25.790 --> 00:26.150 +그래서요? + +00:26.150 --> 00:30.290 +내가 다른 사람들한테 뭐라고 하든 당신들은 내 사람이에요 + +00:30.290 --> 00:38.060 +어쨌든, 전체 과정을 위해 필요한 환경을 사용하도록 PC로 get 설정하죠 + +00:38.060 --> 00:46.640 +저장소로 가서 시작하죠 이 클래스를 위한 제 저장소요 LM 엔지니어링, 깃허브에 + +00:46.640 --> 00:50.060 +있죠 내 이름 LM 엔지니어링이요 + +00:50.060 --> 00:56.960 +이 링크는 코스 곳곳에 있어야 합니다 코스 노트와 리소스 등에도요 + +00:56.990 --> 00:59.870 +깃허브에 이 저장소가 있어요 + +00:59.870 --> 01:05.440 +스크롤을 내리면 Settingation 지시와 함께 작성한 리드메 파일 + +01:05.440 --> 01:11.020 +보이시죠 여러분 환경을 셋업하는 데 필요한 모든 단계를 잘 배치했길 바라요 + +01:11.050 --> 01:15.700 +그렇게 지시를 따르는 게 좋다면 영상을 보는 대신 그렇게 하세요 + +01:15.730 --> 01:17.200 +제가 잘 해냈길 바라요 + +01:17.200 --> 01:22.240 +문제가 있으면 저한테 알려 주세요 제가 대신 업데이트해 드릴게요 + +01:22.570 --> 01:26.740 +어쨌든 이 설명서를 훑어보면 제일 먼저 나오는 게 PC 사용자에게는 + +01:26.770 --> 01:29.890 +git을 설치해야 한다는 거예요 + +01:29.890 --> 01:31.300 +처음이라면 말이죠 + +01:31.300 --> 01:36.070 +코드 컨트롤 시스템을 깃으로 해요 사용해 보신 분은 잘 모르시겠지만요 + +01:36.100 --> 01:40.480 +기본값으로 Macs에 설치되지만 윈도우에는 없어요 + +01:40.630 --> 01:47.230 +이 링크를 따라가면 Git 다운로드 페이지로 가서 다운로드 설명서를 + +01:47.500 --> 01:53.980 +따라가면 스무 번 정도 확인을 누르고 모든 기본값을 수락해야 해요 + +01:53.980 --> 01:55.750 +다림질할 때 제가 직접 했어요 + +01:55.750 --> 02:00.310 +좋아요, 좋아요, 그런 걸 모두 검토하세요 기본값만 끝까지요 + +02:00.310 --> 02:03.490 +그럼 컴퓨터에 git을 설치할 수 있어요 + +02:03.490 --> 02:07.360 +그렇게 하고 나면 저장소로 돌아와요 + +02:07.610 --> 02:12.560 +다음으로 할 일은 여기 위에 녹색 버튼 코드가 있어요 + +02:12.650 --> 02:17.540 +녹색 버튼을 누르면 저장소로 가는 링크와 함께 여기 뜨죠 + +02:17.540 --> 02:18.980 +둘 중 하나를 고르세요 + +02:18.980 --> 02:21.020 +Https 1을 선택할게요 + +02:21.020 --> 02:26.000 +우리가 누를 것은 사실 이걸 선택해야 해요 그리고 복사 버튼을 눌러 클립보드로 + +02:26.030 --> 02:27.500 +복사하세요 + +02:27.770 --> 02:32.690 +이제 할 것은 새 PowerShell을 여는 거예요 + +02:32.870 --> 02:34.100 +어디 봐요 + +02:34.160 --> 02:35.720 +PowerShell을 입력하세요 + +02:35.750 --> 02:39.530 +PC에선 제가 좀 서툴러요 이걸 해본 지 좀 됐거든요 제가 바보짓을 해도 + +02:39.530 --> 02:41.510 +여러분이 참아주셔야 할지도 몰라요 + +02:41.510 --> 02:42.140 +하지만 됐어요 + +02:42.170 --> 02:43.640 +PowerShell이 나오네요 + +02:43.910 --> 02:44.210 +네 + +02:44.210 --> 02:49.910 +가장 먼저 해야 할 일은 이미 갖고 계실지도 모르지만 프로젝트 디렉터리를 만드는 겁니다 + +02:49.910 --> 02:53.000 +작업하는 다양한 프로젝트가 가는 곳이죠 + +02:53.030 --> 02:59.660 +프로젝트 디렉터리가 이미 있을 수도 있지만 CD 프로젝트를 하면 없을 수도 있어요 + +02:59.660 --> 03:03.200 +이렇게 해서 프로젝트 디렉터리를 만들 수 있어요 + +03:03.230 --> 03:06.050 +Windows 탐색기를 사용해도 되고요 + +03:06.050 --> 03:08.420 +파일 익스플로러라고도 하죠 + +03:08.450 --> 03:12.570 +명령 프롬프트는 필요 없지만 그렇게 하고 있나요? + +03:12.570 --> 03:15.900 +가정 디렉터리에 새 프로젝트 디렉터리를 만들었죠 + +03:15.900 --> 03:22.410 +이제 CD로 들어가겠습니다 프로젝트 디렉터리에 왔어요 물론 비어 있죠, dir요 원한다면 + +03:22.410 --> 03:24.870 +Windows 방식이요 + +03:25.050 --> 03:28.470 +지금 입력하는 건 아주 간단해요 + +03:28.620 --> 03:35.730 +아, Git 클론입니다. 어… 이 저장소의 로컬 복사본을 만들고 싶다는 의미입니다. + +03:35.730 --> 03:40.980 +그리고 저장소의 이름을 적습니다. 조금 전에 복사한 링크요. + +03:41.310 --> 03:44.970 +그러면 바로 다운로드 돼요 + +03:44.970 --> 03:45.780 +이미 벌어진 일이에요 + +03:45.780 --> 03:48.480 +모든 코드가 제 로컬 드라이브로 복제됐어요 + +03:48.480 --> 03:54.120 +여길 보시면 LM 엔지니어링이라는 폴더가 있는 게 보이시죠 이제 들어가볼 + +03:54.150 --> 03:55.590 +거예요 + +03:55.680 --> 04:00.600 +앞으로 8주간은 이 코드가 우리 놀이터가 되겠죠 + +04:00.600 --> 04:08.220 +가끔 사람들이 프로젝트 루트 디렉터리라는 표현을 이 폴더 LM 엔지니어링을 의미한다고 + +04:08.250 --> 04:10.650 +말할 필요가 있어요 + +04:10.650 --> 04:12.810 +이 프로젝트의 루트 디렉터리예요 + +04:12.810 --> 04:18.380 +제가 말로 하거나 리드미에 프로젝트 루트를 언급하면요 + +04:18.380 --> 04:21.620 +디렉터리 LM 엔지니어링 말이에요 + +04:21.740 --> 04:22.550 +됐어요 + +04:22.580 --> 04:29.150 +다음 단계는 아나콘다를 설치하는 겁니다 말씀드렸듯이 아나콘다는 아주 + +04:29.180 --> 04:31.190 +강력한 도구예요 + +04:31.430 --> 04:38.810 +다시 리드미로 돌아가서 하면 됩니다 아나콘다를 설치하는 곳으로 가는 링크가 + +04:38.900 --> 04:41.000 +있어요 저기 있네요 + +04:41.030 --> 04:43.430 +여기 위에 있어요 + +04:43.640 --> 04:45.410 +쿠키만 빼고요 + +04:45.440 --> 04:50.450 +윈도우에 설치를 위한 아나콘다 문서화입니다 이메일 주소와 다른 여러 가지를 + +04:50.450 --> 04:53.600 +제공하도록 합니다 원하면 자유롭게 할 수 있어요 + +04:53.600 --> 04:57.590 +하지만 마지막에 우리가 뭘 할 건지 말할 수도 있어요 다운로드를 누르세요 + +04:57.590 --> 05:02.360 +이메일을 제공할 수도 있지만 원하면 등록을 건너뛰라고 할 수도 + +05:02.360 --> 05:03.140 +있어요 + +05:03.140 --> 05:06.080 +그럼 Get Now 다운로드 페이지가 나오죠 + +05:06.080 --> 05:08.420 +이제 다운로드 할 거예요 + +05:08.870 --> 05:10.550 +아나콘다는 꽤 커요 + +05:10.550 --> 05:13.670 +5기가바이트 다운로드예요 + +05:13.820 --> 05:16.730 +그래서 공간이 부족할 수도 있어요 + +05:16.730 --> 05:18.740 +너무 무겁다는 걸 아실 수도 있어요 + +05:18.740 --> 05:24.560 +virtualenv를 이용한 다른 접근법이 있습니다. 이것은 더 간단한 접근법이지만 + +05:24.560 --> 05:27.290 +호환성이 보장되지는 않아요. + +05:27.350 --> 05:28.790 +less 하드코어죠 + +05:28.790 --> 05:32.930 +노련한 달 착륙선 엔지니어가 되려면 아나콘다 항로라도 시도해 봐야죠 + +05:32.930 --> 05:36.800 +다운로드 버튼을 누르세요 제가 이미 했죠 + +05:36.830 --> 05:43.520 +아나콘다를 설치하기 위한 스크린이 나타납니다 일반적인 설치 마법사죠 예를 몇 + +05:43.520 --> 05:44.930 +번 눌러야 해요 + +05:44.930 --> 05:47.300 +어디에 설치할지 유도하는 거죠 + +05:47.330 --> 05:53.750 +어떤 사람들은 용량이 충분한지 확인하기 위해 다른 드라이브에 설치하도록 선택했어요 + +05:53.900 --> 05:58.310 +그게 끝나면 설치가 완료됩니다 다운로드 후 설치하는 + +05:58.310 --> 06:03.590 +데 몇 분 걸리죠 그럼 컴퓨터에 아나콘다 프로그램이 있어요 + +06:03.590 --> 06:04.850 +그게 무슨 뜻이죠? + +06:04.850 --> 06:10.040 +우리가 아는 한 그 의미는 우리가 아주 익숙한 PowerShell이 + +06:10.040 --> 06:17.300 +있는 동안 시작 메뉴에 새로운 것도 있다는 겁니다 아나콘다 PowerShell이란 건데 열어보면 + +06:17.300 --> 06:22.780 +아주 비슷해 보여요 일반 PowerShell과 비슷하지만 실은 특별한 + +06:22.780 --> 06:25.030 +아나콘다죠 + +06:25.420 --> 06:26.860 +이 창문을 넓혀볼게요 + +06:26.890 --> 06:29.740 +잠깐만요, 공을 놓쳤어요 + +06:29.920 --> 06:31.450 +네 + +06:31.480 --> 06:32.350 +내가 무슨 짓을 한 거죠? + +06:32.380 --> 06:33.250 +됐어요 + +06:33.580 --> 06:34.630 +전 가망이 없어요 + +06:34.660 --> 06:35.590 +됐어요 + +06:35.620 --> 06:42.490 +아나콘다 파워셸인지 아는 방법은 왼쪽에 베이스가 보이는 거죠 그 말은 우리가 + +06:42.490 --> 06:48.070 +베이스 세계에 있다는 겁니다 우리가 만든 세계나 특정 환경을 위한 + +06:48.070 --> 06:49.900 +세계가 아니라요 + +06:50.020 --> 06:50.680 +좋아요 + +06:50.680 --> 06:57.640 +이제 방금 만든 프로젝트 폴더로 가보겠습니다 LM 엔지니어링으로 가보죠 + +06:57.670 --> 06:58.570 +저기 있네요 + +06:59.020 --> 07:03.880 +이제 프로젝트 루트 디렉터리에 들어왔어요 제가 설명드린 대로요 + +07:03.910 --> 07:10.600 +이제 간단한 명령을 실행할게요 콘다라는 명령어인데 아나콘다 명령을 실행하는 이름이죠 + +07:10.660 --> 07:13.390 +환경 창조를 뜻하죠 + +07:13.390 --> 07:19.810 +새 환경을 생성하기 위해 마이너스 f는 파일을 명시한다는 의미입니다 이 환경에서 필요한 + +07:19.810 --> 07:21.940 +모든 것을 설명하는 거죠 + +07:21.940 --> 07:23.920 +그 파일은 환경이라고 해요 + +07:23.950 --> 07:25.450 +환경이요 + +07:25.480 --> 07:26.650 +도트 YAML요 + +07:26.650 --> 07:29.080 +환경을 볼 수 있어요 yml이 여기 있네요 + +07:29.080 --> 07:31.810 +그래서 그렇게 하려고요 + +07:32.170 --> 07:39.580 +이제 가서 커피를 사 오세요 Get you get you, for 두 잔도요 + +07:39.700 --> 07:45.670 +비트 전체 환경을 처음 설치하는 데 시간이 좀 걸려요 + +07:45.670 --> 07:50.950 +몇 분 걸릴 거예요 전에 해 봤거든요 + +07:50.980 --> 07:56.950 +처음 할 때는 인터넷 연결에 따라 다르지만 10분에서 20분 정도 + +07:56.950 --> 08:00.520 +걸립니다 이 정도도 조심해야 해요 + +08:00.520 --> 08:02.260 +실제로 뭘 하는 거죠? + +08:02.260 --> 08:08.380 +이 데이터 과학 환경에 필요하다고 말하는 모든 패키지를 보고 있어요 + +08:08.380 --> 08:15.520 +그러면 이 특정 유형의 시스템을 파악합니다 칩이 무엇이든 다양한 구성이 있습니다 모든 패키지의 + +08:15.520 --> 08:21.400 +호환 가능한 버전이 무엇인지를 파악합니다 미래에는 객체 데이터베이스가 + +08:21.430 --> 08:26.050 +있을 것입니다 랭체인을 이용할 것입니다 그러디오를 이용할 + +08:26.050 --> 08:28.590 +것입니다 모든 것들이요 + +08:28.590 --> 08:35.010 +서로 호환 가능하고 프로세서와 호환 가능한 최신 버전을 선택하도록 + +08:35.010 --> 08:36.810 +하죠 + +08:36.810 --> 08:39.240 +그래서 환경 해결이라고 하는 거죠 + +08:39.240 --> 08:45.330 +여러 가지 결정을 내리면서 풀어야 하는 방정식이죠 + +08:45.960 --> 08:48.360 +그런 결정을 내리죠 + +08:48.360 --> 08:53.790 +그런 다음 컴퓨터에 모든 패키지를 설치하죠 + +08:53.940 --> 08:58.140 +이 문장을 좀 더 길게 계속 진행해서 마무리할 수 있을지 보겠습니다. + +08:58.140 --> 08:59.820 +비트 주세요. + +08:59.970 --> 09:04.890 +안 그러면 잠시 쉬었다가 다시 와야 할 거예요 + +09:06.300 --> 09:06.690 +좋아요 + +09:06.690 --> 09:08.820 +몇 분 더 걸릴 것 같아요 + +09:08.820 --> 09:13.140 +비디오를 일시 정지하고 준비되면 돌아오죠 + +09:13.320 --> 09:15.690 +거기서 안 버틴 게 다행이죠 + +09:15.690 --> 09:18.630 +거기서 결승선까지 5분이 더 걸렸죠 + +09:18.720 --> 09:23.790 +하마터면 5분 동안 와플을 못 먹게 될 뻔했네요 + +09:23.940 --> 09:28.140 +몇 분 걸릴 수도 있어요 말했듯이 연결은 빠르죠 + +09:28.140 --> 09:31.250 +당신에겐 20분, 30분이 될 수도 있어요 + +09:31.340 --> 09:32.870 +조금만 더 버텨요 + +09:32.870 --> 09:37.880 +하지만 말할 수 없이 오래 걸린다면 포기하고 다른 방식으로 하는 다음 비디오로 + +09:37.880 --> 09:38.660 +오세요 + +09:38.660 --> 09:44.120 +하지만 완료되면 아나콘다 파워셸 프롬프트는 이런 모습이어야 하죠 + +09:44.420 --> 09:46.490 +아직도 맨 끝에 베이스라고 쓰여 있어요 + +09:46.640 --> 09:48.620 +그래서 잘 알려진 환경을 만들었어요 + +09:48.650 --> 09:49.880 +LMS라고 하죠 + +09:49.880 --> 09:51.110 +제가 지은 이름이에요 + +09:51.110 --> 09:59.000 +이제 그 환경을 활성화하려면 아나콘다가 활동하는 살아 있는 세계로 만들어야 + +09:59.000 --> 10:00.020 +해요 + +10:00.050 --> 10:05.480 +콘다를 입력해 LMS를 활성화하면 그걸 실행해야 하죠 + +10:05.600 --> 10:10.370 +컴퓨터에 다시 접속할 때마다 모든 걸 다시 시작하고 싶을 겁니다 + +10:10.520 --> 10:16.550 +get aanaconda 프롬프트를 열어 콘다를 하는 걸 기억하세요 + +10:16.580 --> 10:23.390 +LMS 활성화 이제 작동한다는 단서는 여기 왼쪽에서 제 프롬프트 왼쪽에서 LMS가 + +10:23.540 --> 10:30.770 +작성된 게 보이실 겁니다 성공적으로 아나콘다 환경을 생성했다는 걸 알려주는 거죠 + +10:30.890 --> 10:37.050 +또 다른 방법은 파이썬 마이너스 버전을 입력하면 파이썬 버전은 3이라고 + +10:37.050 --> 10:43.290 +알려줄 거예요 11살요 1번, 그게 버전이죠 적어도 3번이요 이 환경을 위해 11을 + +10:43.290 --> 10:44.220 +지정했어요 + +10:44.220 --> 10:49.500 +그래서 우리는 정확히 같은 버전의 파이썬을 사용하고 있고 모든 것이 호환 가능하다는 것을 알죠 + +10:49.500 --> 10:53.670 +이제 두 단어를 입력하면 돼요 주피터 랩 + +10:53.670 --> 10:58.860 +그러면 Jupyter 환경이 열립니다 코드에서 효과적으로 + +10:58.860 --> 11:02.280 +함께 작업할 수 있는 데이터 과학 환경이죠 + +11:02.280 --> 11:04.290 +이미 아시는 분들도 계실 거예요 + +11:04.290 --> 11:05.940 +어떤 사람들에겐 새로운 일이겠죠 + +11:05.970 --> 11:07.050 +엔터키를 눌렀어요 + +11:07.050 --> 11:09.630 +주피터 연구소가 시작될 거예요 + +11:09.810 --> 11:11.490 +위로 올려요 + +11:11.880 --> 11:17.310 +처음엔 어떤 브라우저를 열지 묻겠지만 제 컴퓨터에 + +11:17.310 --> 11:26.190 +주피터 랩이 뜨죠 앞으로 8주 동안의 여정이 펼쳐져 있어요 + +11:26.220 --> 11:28.410 +그게 PC 지침이에요 + +11:28.410 --> 11:30.060 +모든 게 잘 풀렸길 바라요 + +11:30.060 --> 11:35.520 +만약 그렇지 않다면 부디 제게 연락하세요 알려 주시면 해결해 드릴게요 Get up + +11:35.700 --> 11:37.800 +그럼 다음 영상에서 만나요 diff --git a/week5/community-contributions/subtitles/srts/60616833/en_US.srt b/week5/community-contributions/subtitles/srts/60616833/en_US.srt new file mode 100755 index 0000000..5c73a48 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616833/en_US.srt @@ -0,0 +1,118 @@ +WEBVTT + +00:00.110 --> 00:06.500 +So I realized that day one of week one has been a pretty long day, and I assure you that the other, + +00:06.500 --> 00:09.500 +generally speaking, the other days won't be as long. + +00:09.560 --> 00:14.930 +We had some foundational work to do to get the environments up and running, and hopefully that you're + +00:14.930 --> 00:19.730 +happy that we've got there and you're feeling satisfied that we ran our first big project. + +00:19.730 --> 00:27.200 +As a quick recap of what we got done at the very beginning, seems like an age ago we used to run LMS + +00:27.200 --> 00:33.500 +open source LMS locally on your box, running them to generate content. + +00:33.770 --> 00:41.540 +Then we set up the environment, and then we used open AI in the cloud to make a call to frontier models + +00:41.540 --> 00:42.560 +to GPT four. + +00:42.800 --> 00:49.670 +Mini was the model we used to generate text there, and obviously we're using here a closed source model + +00:49.670 --> 00:53.300 +that is maybe 1000 or 10,000 times larger. + +00:53.540 --> 00:59.420 +We pay a small price for that in the form of a fraction of a cent, but we do have to to pay to use + +00:59.420 --> 00:59.750 +that. + +00:59.750 --> 01:06.500 +But what we get back is much richer in quality than using a small local one. + +01:06.890 --> 01:12.590 +Um, we learned how to distinguish between a system prompt and a user prompt just at a high level. + +01:12.590 --> 01:15.710 +We'll do a lot more on that, of course, in the coming days. + +01:15.770 --> 01:21.860 +Uh, system prompt setting the tone, the context of the conversation, the user prompt, which is the + +01:21.860 --> 01:23.240 +conversation itself. + +01:23.270 --> 01:24.730 +We used it for the opener. + +01:24.760 --> 01:28.030 +Later, we'll be using it for many rounds of conversation. + +01:28.030 --> 01:35.320 +And then most importantly, we applied this to the field of summarization and a critical use case that + +01:35.320 --> 01:39.550 +comes up so many times it's applicable to many different problems. + +01:39.550 --> 01:44.260 +It's something that I hope you'll find ways to use this in your day job, in what you do already. + +01:44.260 --> 01:49.090 +And if not, then certainly you should be able to find personal projects that you could come up with + +01:49.090 --> 01:50.440 +where you could apply this. + +01:50.440 --> 01:53.110 +And I'm really excited to see what people come up with. + +01:53.530 --> 01:55.480 +So that's what we got done. + +01:55.480 --> 02:02.950 +And would you believe we are already 2.5% through the course on the way to being an LLM engineering + +02:03.190 --> 02:07.000 +expert, so it's already progress has been made. + +02:07.030 --> 02:12.460 +Tomorrow we're going to talk about what really is that journey like what are the steps. + +02:12.460 --> 02:16.600 +So you have a clear sense of what's what's to be done, set you up for success. + +02:16.600 --> 02:18.910 +And then we'll do some, some, some content. + +02:18.910 --> 02:22.870 +We'll talk about what are the leading frontier models and the different ways to use them. + +02:22.870 --> 02:28.900 +And we'll also do some quick lab work, something I promised you that people who would prefer not to + +02:28.930 --> 02:31.630 +fork out dollars to OpenAI. + +02:31.660 --> 02:37.360 +I'm going to show you how we could use Olama as an alternative with the same code that we just wrote, + +02:37.360 --> 02:43.870 +calling Olama running locally instead of calling out to the frontier model on the cloud. + +02:43.870 --> 02:46.030 +So we'll do that tomorrow too. + +02:46.060 --> 02:47.500 +Very much looking forward to it. + +02:47.500 --> 02:48.670 +And I will see you then. diff --git a/week5/community-contributions/subtitles/srts/60616833/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616833/ja_JP.srt new file mode 100755 index 0000000..e6e9850 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616833/ja_JP.srt @@ -0,0 +1,94 @@ +WEBVTT + +00:00.110 --> 00:09.500 +週目の1日目はかなり長い1日だった。 + +00:09.560 --> 00:14.930 +私たちは環境を整え、 稼働させるためにいくつかの基礎的な作業をしなければならなかった。 願わくば、 + +00:14.930 --> 00:19.730 +私たちが最初の大きなプロジェクトを成功させたことに満足してほしい。 + +00:19.730 --> 00:27.200 +最初の頃のことを簡単に振り返ると、 一昔前はオープンソースのLMSをローカルで動かし、 + +00:27.200 --> 00:33.500 +コンテンツを生成していた。 + +00:33.770 --> 00:42.560 +そして環境を整え、 クラウド上のオープンAIを使ってGPT4へのフロンティアモデルへの呼びかけを行った。 + +00:42.800 --> 00:53.300 +ミニは、 私たちがテキストを生成するために使用したモデルで、 明らかに私たちがここで使用しているのは、 おそらく1000倍か1万倍大きいクローズドソースのモデルだ。 + +00:53.540 --> 00:59.750 +私たちはそのために、 ほんのわずかなセントという形で、 わずかな代償を払っている。 + +00:59.750 --> 01:06.500 +しかし、 私たちから返ってくるものは、 地元の小規模なものを使うよりもはるかに質が豊かだ。 + +01:06.890 --> 01:12.590 +ええと、 システムプロンプトとユーザープロンプトを区別する方法は、 高いレベルで学びました。 + +01:12.590 --> 01:15.710 +もちろん、 この件に関しては、 今後数日のうちに詳しくお伝えするつもりだ。 + +01:15.770 --> 01:23.240 +システム・プロンプトは会話のトーンや文脈を設定し、 ユーザー・プロンプトは会話そのものである。 + +01:23.270 --> 01:24.730 +開幕戦に使った。 + +01:24.760 --> 01:28.030 +その後、 何度も会話に使うことになる。 + +01:28.030 --> 01:35.320 +そして最も重要なのは、 これを要約の分野に応用したことだ。 重要なユースケースは何度も出てくるので、 + +01:35.320 --> 01:39.550 +さまざまな問題に応用できる。 + +01:39.550 --> 01:44.260 +本業で、 すでにやっていることでこれを使う方法を見つけてほしいものだ。 + +01:44.260 --> 01:50.440 +そうでなければ、 これを応用できるような個人的なプロジェクトを見つけることができるはずだ。 + +01:50.440 --> 01:53.110 +そして、 みんなが何を考え出すのか、 とても楽しみだ。 + +01:53.530 --> 01:55.480 +だから、 それが僕らの成果なんだ。 + +01:55.480 --> 02:07.000 +そして、 私たちがすでに2歳になったことを信じられるだろうか。 LLMエンジニアリングの専門家になるためのコースの5%を修了した。 + +02:07.030 --> 02:12.460 +明日は、 その旅とはいったいどんなものなのか、 どんなステップなのかについて話すつもりだ。 + +02:12.460 --> 02:16.600 +だから、 何をすべきかを明確に認識し、 成功への準備を整えるのだ。 + +02:16.600 --> 02:18.910 +そして、 いくつか、 いくつか、 いくつか、 コンテンツを作る。 + +02:18.910 --> 02:22.870 +主要なフロンティア・モデルとは何か、 またその使い分けについてお話しします。 + +02:22.870 --> 02:31.630 +そして、 OpenAIにお金を出したくない人たちに約束した、 簡単なラボワークも行います。 + +02:31.660 --> 02:37.360 +クラウド上のフロンティア・モデルを呼び出す代わりに、 + +02:37.360 --> 02:43.870 +ローカルで動作するOlamaを呼び出す。 + +02:43.870 --> 02:46.030 +だから明日もそうしよう。 + +02:46.060 --> 02:47.500 +とても楽しみにしている。 + +02:47.500 --> 02:48.670 +その時にまた会おう。 diff --git a/week5/community-contributions/subtitles/srts/60616833/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616833/ko_KR.srt new file mode 100755 index 0000000..59ddaa2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616833/ko_KR.srt @@ -0,0 +1,115 @@ +WEBVTT + +00:00.110 --> 00:06.500 +첫째 주 첫날은 정말 긴 하루였어요 그리고 다음 날은 분명히 + +00:06.500 --> 00:09.500 +그렇게 길지 않을 거예요 + +00:09.560 --> 00:14.930 +환경 구축과 실행을 위한 기초 작업이 있었어요 여러분이 만족하셨으면 + +00:14.930 --> 00:19.730 +좋겠습니다 첫 번째 큰 프로젝트를 실행했다는 것에 대해서요 + +00:19.730 --> 00:27.200 +초기에 했던 것을 짧게 요약해보죠 아주 오래 전에 LMS 오픈 소스 LMS를 + +00:27.200 --> 00:33.500 +로컬로 실행했었어요 콘텐츠 생성을 위해서요 + +00:33.770 --> 00:41.540 +환경을 설정하고 클라우드에서 오픈 인공지능을 사용해 GPT 4에 프런티어 모델을 + +00:41.540 --> 00:42.560 +호출했죠 + +00:42.800 --> 00:49.670 +미니는 텍스트를 생성하기 위해 사용한 모델입니다. 우리는 비공개 소스 모델을 사용하고 있는데 + +00:49.670 --> 00:53.300 +1000 혹은 10,000배 정도 더 크죠. + +00:53.540 --> 00:59.750 +우린 그걸 위해 1센트도 안 되는 작은 대가를 지불하지만 그걸 사용하려면 돈을 내야 하죠 + +00:59.750 --> 01:06.500 +하지만 작은 국내산보다 품질이 훨씬 좋아요. + +01:06.890 --> 01:12.590 +높은 수준에서 시스템 프롬프트와 사용자 프롬프트를 구별하는 법을 배웠어요 + +01:12.590 --> 01:15.710 +앞으로 며칠 동안 더 자세히 다룰 거예요 + +01:15.770 --> 01:21.860 +시스템 프롬프트에서 톤을 설정하고 대화의 맥락을 설정합니다 대화 자체가 사용자 + +01:21.860 --> 01:23.240 +프롬프트죠 + +01:23.270 --> 01:24.730 +오프닝으로 썼어요 + +01:24.760 --> 01:28.030 +나중에 이걸로 많은 대화를 나눌 거예요 + +01:28.030 --> 01:35.320 +그리고 가장 중요한 것은 요약과 주요 유스케이스에 적용한 것입니다. + +01:35.320 --> 01:39.550 +많은 문제들에 적용할 수 있어요. + +01:39.550 --> 01:44.260 +이걸 본업에서도 활용할 방법을 찾길 바라요 이미 하고 있는 일에서요 + +01:44.260 --> 01:49.090 +그렇지 않다면 개인 프로젝트를 찾아 적용할 수 + +01:49.090 --> 01:50.440 +있어야 해요 + +01:50.440 --> 01:53.110 +어떤 작품이 나올지 정말 기대돼요 + +01:53.530 --> 01:55.480 +그렇게 했어요 + +01:55.480 --> 02:02.950 +벌써 두 살이라니 믿어지세요? 5%만 더 배우면 LLM 공학 전문가가 + +02:03.190 --> 02:07.000 +될 겁니다 이미 진전이 있었던 거죠 + +02:07.030 --> 02:12.460 +내일은 그 여정이 뭔지 얘기할 거예요 어떤 단계인지 같은 거요 + +02:12.460 --> 02:16.600 +뭘 해야 할지 확실히 알고 성공하도록 준비하는 거죠 + +02:16.600 --> 02:18.910 +그리고 몇 가지 내용을 다룰 거예요 + +02:18.910 --> 02:22.870 +선구적인 개척자 모델은 무엇이며 이를 활용하는 다양한 방법에 대해 얘기해 보죠 + +02:22.870 --> 02:28.900 +실험실 작업도 할 겁니다 오픈라이에 돈을 쓰기 싫어하는 + +02:28.930 --> 02:31.630 +사람들이 있을 거예요 + +02:31.660 --> 02:37.360 +방금 쓴 것과 같은 코드로 Olama를 대체할 수 있는 방법을 보여드리겠습니다 Preftier + +02:37.360 --> 02:43.870 +모델을 클라우드에서 호출하는 대신 Olama를 로컬로 실행하는 거죠 + +02:43.870 --> 02:46.030 +내일도 그렇게 하죠 + +02:46.060 --> 02:47.500 +정말 기대돼요 + +02:47.500 --> 02:48.670 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/60616845/en_US.srt b/week5/community-contributions/subtitles/srts/60616845/en_US.srt new file mode 100755 index 0000000..9f6873c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616845/en_US.srt @@ -0,0 +1,265 @@ +WEBVTT + +00:00.050 --> 00:01.400 +We're on the home stretch. + +00:01.400 --> 00:04.880 +This is the final step in the environment setup, and it's an easy one. + +00:04.880 --> 00:06.980 +It's one that's easy to make mistakes as well. + +00:06.980 --> 00:10.430 +But I'm going to guide you through it and there'll be no problems whatsoever. + +00:10.430 --> 00:15.170 +I'm going to show it to you both on a mac and a PC all in one video, so you guys can see how it works + +00:15.170 --> 00:15.890 +for both. + +00:15.890 --> 00:17.150 +And it's pretty simple. + +00:17.150 --> 00:23.960 +It's called creating a dot env file, which is a common way to store secrets in a way that you can access + +00:23.960 --> 00:29.990 +in your project that don't get checked into source code control doesn't go into into git, which means + +00:29.990 --> 00:31.850 +that your secrets are safe. + +00:31.850 --> 00:33.230 +We're going to start with a mac. + +00:33.230 --> 00:41.300 +Here I am, I am going to go into my projects folder that we set up into LM engineering. + +00:41.300 --> 00:44.180 +So we are now in the project root directory, as I say. + +00:44.390 --> 00:48.890 +Um, and what I'm going to do is create a file called dot env. + +00:48.890 --> 00:55.040 +And the way that you create a file from, from the, the way you bring up an editor in here is you can + +00:55.040 --> 00:57.830 +use nano, which is a very popular tool. + +00:57.830 --> 01:03.050 +And the name of our file, the thing we're going to create is going to be called dot EMV, literally + +01:03.050 --> 01:03.560 +a period. + +01:03.560 --> 01:04.940 +And then the letters EMV. + +01:05.180 --> 01:06.470 +Now here's the thing. + +01:06.470 --> 01:08.510 +This is a fussy step. + +01:08.510 --> 01:11.750 +The name of the file must be exactly dot EMV. + +01:11.840 --> 01:19.010 +It can't be like my keys dot EMV or EMV and it can't be dot EMV, dot text or anything like that. + +01:19.010 --> 01:23.600 +It's got to be exactly dot EMV, otherwise it doesn't find it. + +01:23.630 --> 01:25.370 +So that's that's the rule. + +01:25.490 --> 01:30.470 +And now that I've done that, up comes a new empty file called dot EMV. + +01:31.010 --> 01:32.930 +And what I'm going to put in here is very simple. + +01:32.930 --> 01:35.210 +And of course this is all laid out in the in the readme. + +01:35.210 --> 01:41.870 +But I'm going to put in this file just open AI underscore API underscore key equals. + +01:41.870 --> 01:49.580 +And now I put in here the key itself that that I have hopefully stored very safely from the last video. + +01:49.580 --> 01:53.720 +And it begins scmproj dash blah blah blah. + +01:54.110 --> 01:56.360 +Put that in the file and don't have any. + +01:56.390 --> 01:59.840 +Don't have a space before the equals sign or after the equals sign. + +01:59.870 --> 02:02.840 +And there's one other like really unpleasant. + +02:02.870 --> 02:03.530 +Gotcha. + +02:03.530 --> 02:09.590 +If you've if you use the notebook on the notes application on Mac, I use that a lot. + +02:09.620 --> 02:16.760 +Sometimes if you paste something in there and then press enter and copy it out, it replaces hyphens + +02:16.760 --> 02:19.430 +with long dashes if you're not careful. + +02:19.430 --> 02:21.650 +So just make sure that that whatever. + +02:21.680 --> 02:26.960 +If you haven't copied it straight from that website, that nothing has got mangled in your key. + +02:27.050 --> 02:28.490 +That was a really hard problem. + +02:28.490 --> 02:33.620 +That it took me a while to track down that one student had, but maybe it's just a rare situation that + +02:33.620 --> 02:34.190 +that happens. + +02:34.190 --> 02:37.370 +But anyways, if you've been careful, we've got the key just like this. + +02:37.400 --> 02:42.560 +You've got the full key laid out there, and then you press, as it says on the on the bottom of the + +02:42.560 --> 02:48.590 +screen on a mac, command O or control O to save, and X gets out of it. + +02:48.620 --> 02:50.480 +Now there's one thing to look out for. + +02:50.480 --> 02:56.570 +If I do an LHS, you'll see that there isn't a EMV file here, and you'll also not see one when we look + +02:56.570 --> 02:57.530 +in JupyterLab. + +02:57.530 --> 03:00.570 +And the reason is because any file that begins with a dot. + +03:00.600 --> 03:05.820 +A mac considers to be a hidden file, a secret file, and if you want to see that, you'd have to do + +03:05.850 --> 03:10.170 +ls minus A, and then you see all of the hidden files, and you'll now see included. + +03:10.170 --> 03:12.300 +On the bottom left there is EMV. + +03:12.480 --> 03:15.090 +And if I want to see what contents that has. + +03:15.840 --> 03:19.140 +You'll see that it has the OpenAI API key. + +03:19.140 --> 03:22.800 +And of course in your case hopefully you have a full key in there. + +03:22.980 --> 03:24.390 +So that's it. + +03:24.390 --> 03:25.560 +That's the Mac version. + +03:25.560 --> 03:26.760 +Let's look on a PC. + +03:27.270 --> 03:32.370 +Let me flip over to my emulator of PCs over here. + +03:32.370 --> 03:33.390 +Here we have it. + +03:33.570 --> 03:38.190 +So on a PC the thing to use is the program called notepad. + +03:38.190 --> 03:43.800 +And you can press the windows and R to run and then type notepad and press okay. + +03:43.830 --> 03:45.390 +And up comes notepad. + +03:46.110 --> 03:53.550 +Uh, in this notepad we say open AI underscore API underscore key equals. + +03:53.550 --> 03:59.190 +And then you paste in your key which should start scmproj dash blah blah blah blah blah. + +03:59.250 --> 04:00.840 +That should go in here. + +04:00.840 --> 04:04.440 +And then you go file and you go save as. + +04:04.470 --> 04:10.650 +Now you then will have to navigate around to find your way to the LM engineering folder, which I've + +04:10.650 --> 04:12.570 +already got set up right here. + +04:12.570 --> 04:13.140 +And now. + +04:13.140 --> 04:15.000 +There's like a little gotcha here, a little trick. + +04:15.030 --> 04:17.280 +You have to know where it says save as type. + +04:17.310 --> 04:19.890 +You have to change that to be all files. + +04:19.890 --> 04:22.920 +And now in here you can type dot EMV. + +04:23.610 --> 04:27.120 +And I will save this right now. + +04:27.180 --> 04:28.740 +And that should be done. + +04:29.010 --> 04:30.060 +Exit. + +04:30.060 --> 04:31.680 +And now back over here. + +04:31.680 --> 04:38.340 +If I do an LZ we do see that there is a dot EMV and it doesn't have any nasty name to it. + +04:38.340 --> 04:43.920 +So that has been created successfully and that is the final step. + +04:43.920 --> 04:47.610 +We are now finally ready to actually go and do a lab. + +04:47.610 --> 04:48.780 +Congratulations! + +04:48.810 --> 04:49.560 +Congratulations! + +04:49.560 --> 04:53.880 +I guess I should hold the congratulations until you see it working in the lab, but tentative. + +04:53.880 --> 04:57.690 +Congratulations on getting it this far and I can't wait to see you for the next video. + +04:57.690 --> 04:59.250 +When we're actually going to do something. + +04:59.280 --> 05:00.000 +See you there. diff --git a/week5/community-contributions/subtitles/srts/60616845/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616845/ja_JP.srt new file mode 100755 index 0000000..57d618d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616845/ja_JP.srt @@ -0,0 +1,241 @@ +WEBVTT + +00:00.050 --> 00:01.400 +ホームストレッチに入った。 + +00:01.400 --> 00:04.880 +これは環境設定の最後のステップで、 簡単なものだ。 + +00:04.880 --> 00:06.980 +ミスもしやすいものだ。 + +00:06.980 --> 00:10.430 +でも、 私が案内するから、 何の問題もないよ。 + +00:10.430 --> 00:15.890 +マックとPCの両方を1つのビデオでお見せします。 + +00:15.890 --> 00:17.150 +それはとてもシンプルなことだ。 + +00:17.150 --> 00:23.960 +これはドットenvファイルを作成すると呼ばれるもので、 ソースコード管理にチェックされることなく、 + +00:23.960 --> 00:31.850 +プロジェクト内でアクセスできる方法で秘密を保存する一般的な方法です。 + +00:31.850 --> 00:33.230 +まずはマックから。 + +00:33.230 --> 00:41.300 +ここで、 LMエンジニアリングに設定したプロジェクトフォルダーに入ってみる。 + +00:41.300 --> 00:44.180 +というわけで、 私たちは今、 プロジェクトのルート・ディレクトリにいる。 + +00:44.390 --> 00:48.890 +ええと、 これからやるのは、 dot envというファイルを作ることだ。 + +00:48.890 --> 00:57.830 +そして、 ここからファイルを作成する方法は、 エディタを呼び出す方法である。 + +00:57.830 --> 01:03.560 +ファイル名は、 これから作成するものをdot EMVと呼び、 文字通りピリオドを打つ。 + +01:03.560 --> 01:04.940 +そしてEMVの文字。 + +01:05.180 --> 01:06.470 +さて、 ここからが問題だ。 + +01:06.470 --> 01:08.510 +これは気難しいステップだ。 + +01:08.510 --> 01:11.750 +ファイル名は正確にドットEMVでなければならない。 + +01:11.840 --> 01:19.010 +私のキーのようにドットEMVやEMVにすることはできないし、 ドットEMVやドットテキストにすることもできない。 + +01:19.010 --> 01:23.600 +ドットEMVでなければ見つからない。 + +01:23.630 --> 01:25.370 +それがルールなんだ。 + +01:25.490 --> 01:30.470 +そうすると、 dot EMVという新しい空のファイルが出てくる。 + +01:31.010 --> 01:32.930 +ここに書くことはとてもシンプルだ。 + +01:32.930 --> 01:35.210 +もちろん、 これらはすべてReadmeに記載されている。 + +01:35.210 --> 01:41.870 +でも、 このファイルにはAIアンダースコアAPIのアンダースコア・キー・イコールを開くだけでいいんだ。 + +01:41.870 --> 01:49.580 +そして今、 私はここに、 できれば前回のビデオから非常に安全に保管しておきたいキーそのものを入れた。 + +01:49.580 --> 01:53.720 +そしてscmprojダッシュで始まる。 + +01:54.110 --> 01:56.360 +それをファイルに入れて、 何も持っていない。 + +01:56.390 --> 01:59.840 +等号の前や後にスペースを入れないでください。 + +01:59.870 --> 02:02.840 +そしてもうひとつ、 本当に不愉快なことがある。 + +02:02.870 --> 02:03.530 +やった。 + +02:03.530 --> 02:09.590 +マックのノートアプリケーションでノートブックを使っているなら、 僕はそれをよく使うよ。 + +02:09.620 --> 02:19.430 +そこに何かを貼り付けてからエンターキーを押してコピーすると、 気をつけないとハイフンが長いダッシュに置き換わってしまうことがある。 + +02:19.430 --> 02:21.650 +だから、 何でもいいから確認してほしい。 + +02:21.680 --> 02:26.960 +もし、 そのウェブサイトからそのままコピーしたのでなければ、 あなたのキーは何も壊れていないことになる。 + +02:27.050 --> 02:28.490 +あれは本当に難しい問題だった。 + +02:28.490 --> 02:34.190 +一人の生徒が持っていたことを突き止めるのに時間がかかったが、 そういうことが起こるのはまれな状況なのかもしれない。 + +02:34.190 --> 02:37.370 +とにかく、 気をつけていれば、 こんな感じで鍵は手に入る。 + +02:37.400 --> 02:42.560 +マックでは画面の下に書いてあるように、 コマンドOかコントロールOを押すと保存され、 + +02:42.560 --> 02:48.590 +Xはそこから抜けます。 + +02:48.620 --> 02:50.480 +今一つ気をつけなければならないことがある。 + +02:50.480 --> 02:57.530 +LHSをしてみると、 ここにはEMVファイルがないことがわかる。 + +02:57.530 --> 03:00.570 +その理由は、 ドットで始まるファイルなら何でも良いからだ。 + +03:00.600 --> 03:05.820 +マックでは隠しファイル、 シークレットファイルとみなされ、 それを見たい場合は、 + +03:05.850 --> 03:10.170 +lsマイナスAをして、 すべての隠しファイルを見ることになる。 + +03:10.170 --> 03:12.300 +左下にはEMVがある。 + +03:12.480 --> 03:15.090 +そして、 その中身を見たいと思ったら......。 + +03:15.840 --> 03:19.140 +OpenAIのAPIキーがあることがわかるだろう。 + +03:19.140 --> 03:22.800 +そしてもちろん、 あなたの場合、 そこにフルキーがあることを願っている。 + +03:22.980 --> 03:24.390 +それで終わりだ。 + +03:24.390 --> 03:25.560 +それがマック版だ。 + +03:25.560 --> 03:26.760 +PCで見てみよう。 + +03:27.270 --> 03:32.370 +ここでPCのエミュレーターに切り替えてみよう。 + +03:32.370 --> 03:33.390 +これだ。 + +03:33.570 --> 03:38.190 +だから、 PCで使うのはメモ帳というプログラムだ。 + +03:38.190 --> 03:43.800 +そしてwindowsとRを押して実行し、 notepadと入力してokを押す。 + +03:43.830 --> 03:45.390 +そしてメモ帳が登場する。 + +03:46.110 --> 03:53.550 +このメモ帳では、 "open AI underscore API underscore key equals "と言うんだ。 + +03:53.550 --> 03:59.190 +そして、 scmproj dash blah blah blah blahを開始するキーを貼り付ける。 + +03:59.250 --> 04:00.840 +それはここに入れるべきだ。 + +04:00.840 --> 04:04.440 +そしてファイルを開き、 名前を付けて保存する。 + +04:04.470 --> 04:12.570 +次に、 LMエンジニアリング・フォルダを探すためにナビゲートする必要があります。 + +04:12.570 --> 04:13.140 +そして今。 + +04:13.140 --> 04:15.000 +ちょっとしたトリックがあるんだ。 + +04:15.030 --> 04:17.280 +どこに "save as type "と書いてあるのか知らなければならない。 + +04:17.310 --> 04:19.890 +すべてのファイルに変更する必要があります。 + +04:19.890 --> 04:22.920 +そして、 ここでドットEMVと入力する。 + +04:23.610 --> 04:27.120 +そして、 今すぐこれを保存する。 + +04:27.180 --> 04:28.740 +そして、 そうすべきだ。 + +04:29.010 --> 04:30.060 +出口だ。 + +04:30.060 --> 04:31.680 +そして今、 こっちに戻ってきた。 + +04:31.680 --> 04:38.340 +LZをしてみると、 ドットEMVがあることがわかる。 + +04:38.340 --> 04:43.920 +これが最終ステップだ。 + +04:43.920 --> 04:47.610 +これでようやく、 実際にラボに行く準備が整った。 + +04:47.610 --> 04:48.780 +おめでとう! + +04:48.810 --> 04:49.560 +おめでとう! + +04:49.560 --> 04:53.880 +ラボで動くのを見るまで、 お祝いの言葉は控えておくべきだと思うが、 暫定的なものだ。 + +04:53.880 --> 04:57.690 +ここまで来られたことを祝福し、 次のビデオでお会いするのが待ち遠しいです。 + +04:57.690 --> 04:59.250 +私たちが実際に何かをするとき。 + +04:59.280 --> 05:00.000 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/60616845/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616845/ko_KR.srt new file mode 100755 index 0000000..612be09 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616845/ko_KR.srt @@ -0,0 +1,259 @@ +WEBVTT + +00:00.050 --> 00:01.400 +거의 다 됐어요 + +00:01.400 --> 00:04.880 +환경 설정의 마지막 단계입니다 쉬운 거죠 + +00:04.880 --> 00:06.980 +실수하기 쉬운 분야이기도 하죠 + +00:06.980 --> 00:10.430 +하지만 제가 안내할 거고 아무 문제 없을 거예요 + +00:10.430 --> 00:15.170 +맥과 PC를 한 비디오에 보여드릴게요 둘 다 어떻게 작동하는지 보실 + +00:15.170 --> 00:15.890 +수 있게요 + +00:15.890 --> 00:17.150 +아주 간단해요 + +00:17.150 --> 00:23.960 +.Infile 생성이라고 하죠 기밀을 저장하는 일반적인 방법이에요 프로젝트 안에서 액세스할 수 있는 + +00:23.960 --> 00:29.990 +방법이죠 소스 코드 컨트롤에 체크되지 않고 Git으로 들어가지 않는 기밀이요 여러분의 기밀은 + +00:29.990 --> 00:31.850 +안전하다는 뜻이죠 + +00:31.850 --> 00:33.230 +맥으로 시작할 거예요 + +00:33.230 --> 00:41.300 +제 프로젝트 폴더로 가겠습니다 LM 엔지니어링으로 설정해둔 거죠 + +00:41.300 --> 00:44.180 +이제 프로젝트 루트 디렉터리에 들어왔어요 + +00:44.390 --> 00:48.890 +.Infi라는 파일을 만들 거예요 + +00:48.890 --> 00:55.040 +파일을 생성하는 방법은 여기서 에디터를 가져오는 방법은 아주 인기 있는 + +00:55.040 --> 00:57.830 +도구인 나노를 사용하는 거죠 + +00:57.830 --> 01:03.560 +파일 이름, 우리가 만들 것은 .MV라고 하겠습니다 말 그대로 마침표죠 + +01:03.560 --> 01:04.940 +EMV라는 글자도 있어요 + +01:05.180 --> 01:06.470 +문제는 이거예요 + +01:06.470 --> 01:08.510 +까다로운 단계예요 + +01:08.510 --> 01:11.750 +파일 이름은 정확히 .MV여야 해요 + +01:11.840 --> 01:19.010 +. EMV나 EMV 같은 키도 안 되고 . EMV나 .Text 같은 것도 안 돼요 + +01:19.010 --> 01:23.600 +정확히 .MV가 아니면 찾을 수 없어요 + +01:23.630 --> 01:25.370 +그게 규칙이에요 + +01:25.490 --> 01:30.470 +이제 다 됐으니 .MV라는 새 빈 파일이 뜨네요 + +01:31.010 --> 01:32.930 +여기에 Put은 건 아주 간단해요 + +01:32.930 --> 01:35.210 +물론 이 모든 건 대본 읽기에 나와 있죠 + +01:35.210 --> 01:41.870 +이 파일에 그냥 인공지능 밑줄 API =을 열게요 + +01:41.870 --> 01:49.580 +이제 키를 넣을게요. 지난 비디오에서 아주 안전하게 저장해둔 거죠. + +01:49.580 --> 01:53.720 +스크프록 대시 어쩌고저쩌고로 시작해요 + +01:54.110 --> 01:56.360 +파일에 넣고, 없어요. Put it up Put it it up Put it up Put it up Put it up Put it up Put it up Put it it up Put it it up Put it + +01:56.390 --> 01:59.840 += 기호 앞이나 뒤에 빈칸이 있으면 안 돼요 + +01:59.870 --> 02:02.840 +정말 불쾌한 게 하나 더 있어요 + +02:02.870 --> 02:03.530 +알았어요 + +02:03.530 --> 02:09.590 +Mac의 노트 응용 프로그램에 있는 노트를 사용하면, 전 많이 써요 + +02:09.620 --> 02:16.760 +가끔 뭔가를 붙여넣기 한 후 엔터를 누르고 복사하면 하이픈을 긴 대시로 대체해요 + +02:16.760 --> 02:19.430 +조심하지 않으면요 + +02:19.430 --> 02:21.650 +그러니 확실히 하세요 + +02:21.680 --> 02:26.960 +웹사이트에서 복사한 게 아니라면 열쇠는 멀쩡할 거예요 + +02:27.050 --> 02:28.490 +정말 어려운 문제였어요 + +02:28.490 --> 02:34.190 +그 학생을 찾는 데 시간이 좀 걸렸다는 거요 하지만 그런 일은 아주 드물죠 + +02:34.190 --> 02:37.370 +어쨌든 조심하면 이런 열쇠를 얻을 수 있어요 + +02:37.400 --> 02:42.560 +전체 키가 있고, 버튼을 누르면 화면 하단에 나와 있듯이 + +02:42.560 --> 02:48.590 +Mac에 저장 명령 O나 컨트롤 O가 있고 X가 나오죠 + +02:48.620 --> 02:50.480 +이제 조심해야 할 게 하나 남았어요 + +02:50.480 --> 02:56.570 +LHS를 하면 여기 EMV 파일이 없는 게 보이시죠 JupyterLab에도 + +02:56.570 --> 02:57.530 +없어요 + +02:57.530 --> 03:00.570 +그 이유는 점으로 시작하는 파일은 다 그렇기 때문이죠 + +03:00.600 --> 03:05.820 +Mac은 숨겨진 파일로 간주합니다 비밀 파일이요 그걸 보고 싶다면 ls-a를 + +03:05.850 --> 03:10.170 +하세요 그럼 숨겨진 파일들이 보이죠 이제 포함된 걸 보실 수 있어요 + +03:10.170 --> 03:12.300 +왼쪽 아래가 EMV예요 + +03:12.480 --> 03:15.090 +어떤 콘텐츠가 있는지 보고 싶을 때도요 + +03:15.840 --> 03:19.140 +OpenAI API 키가 있는 게 보이시죠 + +03:19.140 --> 03:22.800 +물론 당신 경우엔 전체 키가 있길 바라요 + +03:22.980 --> 03:24.390 +그게 다예요 + +03:24.390 --> 03:25.560 +맥 버전이에요 + +03:25.560 --> 03:26.760 +PC에서 찾아보죠 + +03:27.270 --> 03:32.370 +여기 PC 에뮬레이터로 넘어가죠 + +03:32.370 --> 03:33.390 +여기 있네요 + +03:33.570 --> 03:38.190 +PC에서는 메모장이라는 프로그램을 사용해요 + +03:38.190 --> 03:43.800 +실행하려면 윈도우와 R을 누르고 메모장을 입력하고 확인을 누르세요 + +03:43.830 --> 03:45.390 +메모장이 나왔어요 + +03:46.110 --> 03:53.550 +이 메모장에는 열린 인공지능 밑줄 API 키 =이라고 적어요 + +03:53.550 --> 03:59.190 +그런 다음 scmproj 대시 어쩌고저쩌고 키를 붙여넣어요 + +03:59.250 --> 04:00.840 +여기에 넣어야 해요 + +04:00.840 --> 04:04.440 +그런 다음 파일로 가서 as로 저장해요 + +04:04.470 --> 04:10.650 +이제 LM 엔지니어링 폴더로 가는 길을 찾아야 합니다 제가 이미 + +04:10.650 --> 04:12.570 +여기 설정해뒀죠 + +04:12.570 --> 04:13.140 +지금요 + +04:13.140 --> 04:15.000 +여기 속임수가 있어요 + +04:15.030 --> 04:17.280 +어디에 타입으로 저장하라고 돼 있는지 알아야 해요 + +04:17.310 --> 04:19.890 +모든 파일로 바꿔야 해요 + +04:19.890 --> 04:22.920 +이제 여기서 .MV를 입력할 수 있어요 + +04:23.610 --> 04:27.120 +지금 당장 저장할게요 + +04:27.180 --> 04:28.740 +그래야만 해요 + +04:29.010 --> 04:30.060 +출구예요 + +04:30.060 --> 04:31.680 +다시 이쪽으로요 + +04:31.680 --> 04:38.340 +LZ를 하면 .MV가 보이는데 이상한 이름은 없어요 + +04:38.340 --> 04:43.920 +성공적으로 만들어졌고 이게 마지막 단계예요 + +04:43.920 --> 04:47.610 +드디어 실험할 준비가 됐어요 + +04:47.610 --> 04:48.780 +축하해요 + +04:48.810 --> 04:49.560 +축하해요 + +04:49.560 --> 04:53.880 +축하 인사는 실험실에서 효과를 보기 전까진 미뤄야겠네요 + +04:53.880 --> 04:57.690 +여기까지 온 걸 축하해요 다음 영상에서 빨리 보고 싶네요 + +04:57.690 --> 04:59.250 +실제로 뭔가를 할 때요 + +04:59.280 --> 05:00.000 +거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/60616855/en_US.srt b/week5/community-contributions/subtitles/srts/60616855/en_US.srt new file mode 100755 index 0000000..8c25c56 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616855/en_US.srt @@ -0,0 +1,322 @@ +WEBVTT + +00:00.080 --> 00:01.760 +Now I know what you're thinking. + +00:01.790 --> 00:04.190 +We've been building environments for so long. + +00:04.220 --> 00:05.330 +Are we not done yet? + +00:05.360 --> 00:06.740 +We're almost done. + +00:06.770 --> 00:07.940 +We're almost done. + +00:07.940 --> 00:13.100 +There's two more steps to go, and then we are through with environment setup and onto projects. + +00:13.100 --> 00:21.440 +So the next thing we have to do is set up our keys to use open AI so that we can connect from our JupyterLab + +00:21.440 --> 00:26.420 +environment to run with some of the most powerful models on the planet. + +00:26.690 --> 00:33.320 +Uh, if you go to the Readme, uh, in our GitHub repository and you scroll down a little bit, we'll + +00:33.320 --> 00:39.740 +get to the point where there are the links to the major providers, and we're just going to be using + +00:39.740 --> 00:41.480 +open AI for this week. + +00:41.480 --> 00:46.820 +And if you click on that link, it will take us to the open AI page. + +00:46.820 --> 00:52.820 +If you've never been before, you will have to sign up or sign in using Google credentials or creating + +00:52.820 --> 00:53.930 +an account. + +00:54.260 --> 00:59.720 +Now, before we go further, I have to explain something that's pretty confusing, which does sometimes, + +00:59.720 --> 01:11.550 +uh, throw people all of the major closed source providers like OpenAI and anthropic for Cloud and Google + +01:11.550 --> 01:19.110 +for Gemini have two different types of plan that are completely separate and don't have any relationship + +01:19.110 --> 01:19.980 +with each other. + +01:20.010 --> 01:28.380 +One of them relates to their web tools, the front end tools, which you can use to chat with like famously + +01:28.410 --> 01:30.720 +ChatGPT that we all know well. + +01:30.750 --> 01:37.980 +ChatGPT has a pro plan that you can pay for if you want to, to be able to use some of the latest models + +01:37.980 --> 01:42.990 +that we'll look at later, like, oh one preview and GPT four with canvas and others. + +01:43.050 --> 01:46.020 +Uh, that needs a pro plan in some cases. + +01:46.050 --> 01:52.440 +Uh, and the Pro plan costs about $20 a month in the US and similar pricing in other regions. + +01:52.770 --> 01:57.420 +Um, a fixed monthly price, and then you have almost unlimited access to those models. + +01:57.420 --> 01:59.220 +The same is true for anthropic. + +01:59.250 --> 02:00.660 +Uh, same same kind of thing. + +02:00.660 --> 02:03.180 +And, uh, Google Gemini also. + +02:03.830 --> 02:08.720 +But this is entirely separate to the API world. + +02:08.720 --> 02:11.810 +The API world is where you can use. + +02:11.810 --> 02:18.230 +You can call out to the model directly using code, and it's running on the cloud, and it will respond + +02:18.260 --> 02:20.390 +with answers to your questions. + +02:20.390 --> 02:23.330 +It's a different type of pricing plan. + +02:23.330 --> 02:25.880 +It is there's no monthly subscription. + +02:25.880 --> 02:28.670 +There's no no payment whatsoever monthly. + +02:28.670 --> 02:32.240 +But you do have to pay per API request. + +02:32.240 --> 02:39.200 +It's a pay per request situation, and the payment per request will talk more detail about API costs + +02:39.200 --> 02:39.680 +later. + +02:39.680 --> 02:44.390 +But it's really tiny for any project that we work on in this course. + +02:44.390 --> 02:50.600 +For most of the projects, except where I will mention otherwise, it is a fraction of a cent it is + +02:50.600 --> 02:59.300 +tiny, so there is really no harm in trying out some of these APIs and it gets you such power, gives + +02:59.300 --> 03:03.710 +you access to such incredible models, even though some of the models will use their open source are + +03:03.710 --> 03:04.550 +very powerful. + +03:04.550 --> 03:11.010 +These frontier closed source models are super powerful, as you will see, and so it makes sense that + +03:11.010 --> 03:16.050 +you have to pay a small amount for the compute that goes behind the processing to to generate these + +03:16.050 --> 03:16.980 +responses. + +03:17.190 --> 03:20.400 +But having said that, there is a hitch. + +03:20.430 --> 03:26.310 +There is something about this pricing, uh, which is uh, in some cases it's uh, it's new and it may + +03:26.340 --> 03:33.450 +be different in different regions, but certainly for me and for, for OpenAI, uh, they have a minimum + +03:33.450 --> 03:36.180 +that you have to put down to use the API. + +03:36.180 --> 03:39.480 +And in my case right now it is a $5 minimum. + +03:39.480 --> 03:41.880 +So you have to put in at least $5. + +03:41.880 --> 03:46.020 +And then as you use the API, you'll start to charge down against that $5. + +03:46.020 --> 03:51.630 +We're not going to use we're going to use a fraction of that and this entire course, uh, but there'll + +03:51.660 --> 03:54.660 +be plenty of other ways that you can use that $5. + +03:54.660 --> 03:55.500 +I assure you. + +03:55.500 --> 03:57.390 +There are so many projects will build. + +03:57.390 --> 04:03.090 +There'll be so many exercises for you for real, useful things, ways that you'll be able to spend that + +04:03.120 --> 04:05.010 +and have a good outcome from it. + +04:05.010 --> 04:08.450 +So my sales pitch to you would be that it is well worth it. + +04:08.450 --> 04:12.950 +I would do it, but it is, of course a personal choice. + +04:13.220 --> 04:19.400 +If you don't feel comfortable putting down the $5 or you'd rather not, that's fine. + +04:19.520 --> 04:25.160 +You can watch me as I go through the exercises, and what I'll do is I'll show you a way that you can + +04:25.160 --> 04:26.600 +use open source models. + +04:26.600 --> 04:29.330 +Instead, we'll be able to use a llama. + +04:29.360 --> 04:34.070 +The thing that we did at the very, very start, and you'll be able to use that as an alternative to + +04:34.100 --> 04:35.570 +using a frontier model. + +04:35.570 --> 04:38.240 +Now, the results, of course, are not going to be the same. + +04:38.360 --> 04:44.420 +The it's a, uh, the llama models will be using as like a 2 billion parameter model, uh, compared + +04:44.420 --> 04:52.580 +to GPT four, that is rumored to be about 10 trillion parameters, uh, perhaps like 10,000 times as + +04:52.580 --> 04:52.940 +many. + +04:52.940 --> 04:58.190 +So it is a different quality of model, but you would have to put down that $5 up front. + +04:58.190 --> 05:00.200 +So that's the decision for you. + +05:00.290 --> 05:03.590 +And again I my my take is that it is worth it. + +05:04.070 --> 05:09.470 +So in order to do this though, what you need to do is once you've come into the OpenAI platform, you've + +05:09.470 --> 05:11.040 +logged in, you've got to this point. + +05:11.190 --> 05:12.750 +There's just two things you have to do. + +05:12.780 --> 05:15.990 +You first have to go to the settings button up here. + +05:15.990 --> 05:20.520 +And on settings over here you go to billing and on billing. + +05:20.520 --> 05:26.910 +This is where you need to have some credit balance to be able to use the GPT four zero. + +05:27.060 --> 05:30.420 +And the minimum that it will take is $5. + +05:30.420 --> 05:33.600 +You have to press add to credit balance and then punch in a credit card. + +05:33.600 --> 05:37.410 +And it will take $5 and keep auto recharge off. + +05:37.440 --> 05:42.030 +You don't want them snapping from your card without your permission. + +05:42.030 --> 05:44.340 +As I say this, this has been it. + +05:44.550 --> 05:51.450 +I use it all the time, and I barely manage to get beyond a few cents a day because we use the GPT four + +05:51.660 --> 05:56.910 +mini, the cheap version, almost exclusively on this project, on this course. + +05:57.000 --> 05:57.510 +So. + +05:57.510 --> 06:03.870 +So this $5 is more than enough and you'll be able to put it to fine use, I assure you. + +06:03.870 --> 06:09.270 +So if you are comfortable doing this then go ahead, go to billing, put on your $5. + +06:09.510 --> 06:17.040 +And then the next thing is to go to dashboard over here and then down to API keys here. + +06:17.490 --> 06:24.330 +And then you have to come here and press this create new secret key button right here. + +06:24.810 --> 06:26.850 +Uh, you have it set to you. + +06:26.850 --> 06:28.560 +You can give it a name if you wish. + +06:28.560 --> 06:32.850 +You give permissions all and then you press the Create Secret key button. + +06:32.850 --> 06:37.830 +And when you do that, it's going to show you a new secret key that you will have created. + +06:37.830 --> 06:41.610 +And it will give you a chance to copy that secret key into your clipboard. + +06:41.610 --> 06:44.610 +And you need to do that because it won't let you ever see it again. + +06:44.640 --> 06:47.250 +This is the most you'll ever see of that secret key again. + +06:47.280 --> 06:51.450 +It is your secret and you have to copy it and keep it somewhere safe. + +06:51.450 --> 06:53.490 +We will use it in just a second. + +06:53.520 --> 06:57.780 +Once you've done that, we'll be ready to actually put this key to use. + +06:57.780 --> 06:59.010 +And we are almost there. + +06:59.010 --> 07:00.540 +We are almost done. + +07:00.540 --> 07:04.080 +So with any luck, you have now got a secret key copied. + +07:04.080 --> 07:09.570 +You've forked out $5 or local currency equivalent and you're ready to go. + +07:09.570 --> 07:14.190 +And I will see you in the next video for the very final step of setting up our environment. diff --git a/week5/community-contributions/subtitles/srts/60616855/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616855/ja_JP.srt new file mode 100755 index 0000000..5ac02c4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616855/ja_JP.srt @@ -0,0 +1,265 @@ +WEBVTT + +00:00.080 --> 00:01.760 +今、 あなたが何を考えているか分かる。 + +00:01.790 --> 00:04.190 +私たちは長い間、 環境を構築してきた。 + +00:04.220 --> 00:05.330 +まだ終わっていないのか? + +00:05.360 --> 00:06.740 +あと少しだ。 + +00:06.770 --> 00:07.940 +あと少しだ。 + +00:07.940 --> 00:13.100 +あと2つステップを踏んで、 環境のセットアップを終えてプロジェクトに入る。 + +00:13.100 --> 00:21.440 +そこで次にしなければならないのは、 オープンAIを使用するためのキーをセットアップすることで、 JupyterLab環境から接続して、 + +00:21.440 --> 00:26.420 +地球上で最もパワフルなモデルを使って実行できるようにすることだ。 + +00:26.690 --> 00:33.320 +GitHubリポジトリのReadmeを少し下にスクロールすると、 + +00:33.320 --> 00:41.480 +主要なプロバイダーへのリンクがあります。 + +00:41.480 --> 00:46.820 +そのリンクをクリックすると、 オープンAIのページに移動します。 + +00:46.820 --> 00:53.930 +一度も利用したことがない場合は、 Googleの認証情報を使ってサインアップするか、 アカウントを作成する必要がある。 + +00:54.260 --> 00:59.720 +さて、 先に進む前に、 かなり紛らわしいことを説明しなければならない。 + +00:59.720 --> 01:11.550 +それは、 OpenAIやクラウドのanthropic、 GeminiのGoogleのような主要なクローズド・ソース・プロバイダーには、 2つの異なるタイプのプランがあり、 + +01:11.550 --> 01:19.980 +それらは完全に別個のもので、 お互いに何の関係もない、 ということだ。 + +01:20.010 --> 01:30.720 +そのうちのひとつはウェブ・ツール、 つまりフロント・エンド・ツールに関するもので、 私たちがよく知っているChatGPTのようなチャットに使うことができる。 + +01:30.750 --> 01:37.980 +ChatGPTにはプロプランがあり、 後ほど紹介する最新モデル、 例えばOh one previewやGPT + +01:37.980 --> 01:42.990 +four with canvasなどが使えるようになります。 + +01:43.050 --> 01:46.020 +それは場合によってはプロのプランが必要だ。 + +01:46.050 --> 01:52.440 +ええと、 Proプランはアメリカでは月額約20ドルで、 他の地域でも似たような価格設定です。 + +01:52.770 --> 01:57.420 +月額固定料金で、 その機種にほぼ無制限にアクセスできる。 + +01:57.420 --> 01:59.220 +人間性についても同じことが言える。 + +01:59.250 --> 02:00.660 +ええと、 同じようなことだよ。 + +02:00.660 --> 02:03.180 +それと、 グーグル双子座も。 + +02:03.830 --> 02:08.720 +しかし、 これはAPIの世界とはまったく別の話だ。 + +02:08.720 --> 02:11.810 +APIの世界は、 あなたが使用できる場所です。 + +02:11.810 --> 02:20.390 +コードを使ってモデルに直接呼びかけることができ、 モデルはクラウド上で動いていて、 あなたの質問に対する答えを返してくれる。 + +02:20.390 --> 02:23.330 +異なるタイプの料金プランだ。 + +02:23.330 --> 02:25.880 +毎月の購読料がないのだ。 + +02:25.880 --> 02:28.670 +毎月の支払いは一切ない。 + +02:28.670 --> 02:32.240 +しかし、 APIリクエストごとに支払う必要がある。 + +02:32.240 --> 02:39.680 +リクエストごとの支払いは、 後でAPIのコストについて詳しく説明する。 + +02:39.680 --> 02:44.390 +でも、 このコースで取り組むプロジェクトにとっては、 本当に小さなものなんだ。 + +02:44.390 --> 03:04.550 +そのため、 これらのAPIのいくつかを試してみても本当に損はない。 + +03:04.550 --> 03:16.980 +これらのフロンティア・クローズド・ソース・モデルは、 おわかりのように超強力であるため、 これらの反応を生成するための処理の背後にあるコンピュートに対して少額を支払わなければならないのは理にかなっている。 + +03:17.190 --> 03:20.400 +しかし、 そうはいっても問題がある。 + +03:20.430 --> 03:26.310 +この価格設定については、 場合によっては新しいもので、 地域によって違うかもしれませんが、 + +03:26.340 --> 03:36.180 +私やOpenAIの場合、 APIを使うために最低限支払わなければならないものがあります。 + +03:36.180 --> 03:39.480 +私の場合は最低5ドルだ。 + +03:39.480 --> 03:41.880 +だから、 少なくとも5ドルは入れなければならない。 + +03:41.880 --> 03:46.020 +そして、 APIを使用するにつれて、 その5ドルをチャージするようになる。 + +03:46.020 --> 03:54.660 +でも、 その5ドルの使い道は他にもたくさんある。 + +03:54.660 --> 03:55.500 +断言するよ。 + +03:55.500 --> 03:57.390 +たくさんのプロジェクトがある。 + +03:57.390 --> 04:05.010 +本当に役に立つこと、 それを使って良い結果を得るための方法など、 たくさんの練習があるはずだ。 + +04:05.010 --> 04:08.450 +だから、 私の売り文句は、 それだけの価値があるということだ。 + +04:08.450 --> 04:12.950 +私ならそうするが、 もちろん個人の選択だ。 + +04:13.220 --> 04:19.400 +もし、 5ドルを置くことに抵抗があったり、 むしろ置きたくないのであれば、 それでも構わない。 + +04:19.520 --> 04:26.600 +オープンソースのモデルを使う方法をお見せします。 + +04:26.600 --> 04:29.330 +その代わり、 ラマを使うことができるだろう。 + +04:29.360 --> 04:35.570 +私たちが一番最初にやったことで、 フロンティアモデルを使う代わりにそれを使うことができる。 + +04:35.570 --> 04:38.240 +もちろん、 結果は同じではない。 + +04:38.360 --> 04:44.420 +GPT4が約10兆のパラメータを使うと噂されているのに比べ、 + +04:44.420 --> 04:52.940 +ラマ・モデルは20億のパラメータを使うことになる。 + +04:52.940 --> 04:58.190 +そのため、 モデルの質は異なるが、 5ドルを前払いする必要がある。 + +04:58.190 --> 05:00.200 +だから、 それが君の決断なんだ。 + +05:00.290 --> 05:03.590 +そして、 繰り返しになるが、 私の考えは、 それだけの価値があるということだ。 + +05:04.070 --> 05:11.040 +そのためには、 OpenAIのプラットフォームに入ってログインし、 ここまで来ればOKです。 + +05:11.190 --> 05:12.750 +やるべきことは2つだけだ。 + +05:12.780 --> 05:15.990 +まず、 ここにある設定ボタンをクリックする。 + +05:15.990 --> 05:20.520 +そしてこちらの設定から課金に行き、 課金に行く。 + +05:20.520 --> 05:26.910 +ここで、 GPTフォーゼロを利用するためには、 ある程度のクレジット残高が必要となる。 + +05:27.060 --> 05:30.420 +最低でも5ドル必要だ。 + +05:30.420 --> 05:33.600 +クレジット残高に追加を押して、 クレジットカードを打ち込まなければならない。 + +05:33.600 --> 05:37.410 +そして5ドルで、 オートリチャージはオフにしておく。 + +05:37.440 --> 05:42.030 +あなたの許可なくカードからスナップされたくないでしょう。 + +05:42.030 --> 05:44.340 +こう言っている間にも、 これがそうだった。 + +05:44.550 --> 05:56.910 +GPTフォーミニ(廉価版)をこのプロジェクト、 このコースでほぼ独占的に使用しているからだ。 + +05:57.000 --> 05:57.510 +だから + +05:57.510 --> 06:03.870 +だから、 この5ドルでも十分すぎるし、 きっと有効に使えるはずだ。 + +06:03.870 --> 06:09.270 +だから、 もしそうすることに抵抗がないのであれば、 課金に行き、 5ドルを支払えばいい。 + +06:09.510 --> 06:17.040 +そして次に、 ダッシュボードに行き、 APIキーのところに行く。 + +06:17.490 --> 06:24.330 +そして、 ここに来て、 ここにある新しいシークレットキーを作成するボタンを押してください。 + +06:24.810 --> 06:26.850 +ええと、 あなたに設定されています。 + +06:26.850 --> 06:28.560 +お望みなら名前をつけてもいい。 + +06:28.560 --> 06:32.850 +すべての権限を与え、 シークレットキーの作成ボタンを押す。 + +06:32.850 --> 06:37.830 +そうすると、 あなたが作成した新しい秘密鍵が表示されます。 + +06:37.830 --> 06:41.610 +そして、 その秘密鍵をクリップボードにコピーするチャンスを与えてくれる。 + +06:41.610 --> 06:44.610 +そうする必要がある。 + +06:44.640 --> 06:47.250 +この秘密鍵はもう二度と見ることはないだろう。 + +06:47.280 --> 06:51.450 +それはあなたの秘密であり、 コピーして安全な場所に保管しなければならない。 + +06:51.450 --> 06:53.490 +すぐに使います。 + +06:53.520 --> 06:57.780 +それができたら、 このキーを実際に使う準備ができる。 + +06:57.780 --> 06:59.010 +そして、 あと少しだ。 + +06:59.010 --> 07:00.540 +あと少しで終わる。 + +07:00.540 --> 07:04.080 +運がよければ、 これで秘密鍵がコピーできたことになる。 + +07:04.080 --> 07:09.570 +5ドルまたは現地通貨相当額を支払い、 準備は整った。 + +07:09.570 --> 07:14.190 +それではまた次のビデオで、 環境構築の最後のステップをお見せしよう。 diff --git a/week5/community-contributions/subtitles/srts/60616855/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616855/ko_KR.srt new file mode 100755 index 0000000..f40facd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616855/ko_KR.srt @@ -0,0 +1,307 @@ +WEBVTT + +00:00.080 --> 00:01.760 +무슨 생각 하는지 알아요 + +00:01.790 --> 00:04.190 +우린 오랫동안 환경을 만들었어요 + +00:04.220 --> 00:05.330 +아직 안 끝났어요? + +00:05.360 --> 00:06.740 +거의 다 됐어요 + +00:06.770 --> 00:07.940 +거의 다 됐어요 + +00:07.940 --> 00:13.100 +두 단계가 더 있어요 환경 셋업을 거쳐 프로젝트로 넘어가죠 + +00:13.100 --> 00:21.440 +다음으로 할 일은 오픈 인공지능을 사용할 키를 설정하는 겁니다 유피터랩 환경에서 연결해 + +00:21.440 --> 00:26.420 +지구에서 가장 강력한 모델로 실행할 수 있도록요 + +00:26.690 --> 00:33.320 +깃허브 리포지토리의 리드미로 가셔서 스크롤을 좀 내리시면 주요 공급자로 + +00:33.320 --> 00:39.740 +가는 링크가 있는 곳으로 갈 수 있습니다 이번 주에는 오픈 인공 지능을 사용할 + +00:39.740 --> 00:41.480 +거예요 비트지능 + +00:41.480 --> 00:46.820 +그 링크를 클릭하면 열린 인공지능 페이지가 나와요 + +00:46.820 --> 00:52.820 +처음 가보는 분이라면 구글 자격 증명을 이용해 가입하거나 로그인해야 합니다 계정을 + +00:52.820 --> 00:53.930 +만들거나요 + +00:54.260 --> 00:59.720 +더 진행하기 전에 혼란스러운 점을 설명할게요 사람들이 + +00:59.720 --> 01:11.550 +헷갈릴 때가 있어요 주요 비공개 소스 공급자 오픈AI나 클라우드용 인스로픽 제미니의 구글 같은 두 가지 유형의 + +01:11.550 --> 01:19.980 +사업 계획이 완전히 분리되어 서로 아무런 관련이 없다는 점이죠 + +01:20.010 --> 01:28.380 +그중 하나는 웹 도구와 관련된 프런트엔드 도구입니다 유명한 챗GPT와 채팅할 때 사용할 수 있죠 + +01:28.410 --> 01:30.720 +우리 모두 잘 아는 거예요 + +01:30.750 --> 01:37.980 +챗GPT는 프로 플랜을 결제해 두었기 때문에 나중에 살펴볼 최신 모델을 사용할 수 + +01:37.980 --> 01:42.990 +있습니다 예를 들어 프리뷰 1개와 캔버스 4개 등을요 + +01:43.050 --> 01:46.020 +프로의 계획이 필요한 경우도 있어요 + +01:46.050 --> 01:52.440 +프로 계획의 미국 요금은 월 20달러 정도고 다른 지역도 가격은 비슷해요 + +01:52.770 --> 01:57.420 +고정된 월 단가를 제공하고 그런 모델에 거의 무제한으로 접근할 수 있어요 + +01:57.420 --> 01:59.220 +인류학도 마찬가지예요 + +01:59.250 --> 02:00.660 +늘 똑같죠 + +02:00.660 --> 02:03.180 +구글에서 제미니도 검색해 보세요 + +02:03.830 --> 02:08.720 +이건 API 세계와는 완전히 별개예요 + +02:08.720 --> 02:11.810 +API 세계를 이용하세요 + +02:11.810 --> 02:18.230 +코드를 이용해 모델에 직접 호출할 수 있고 클라우드에서 실행되고 있고 여러분의 질문에 대한 + +02:18.260 --> 02:20.390 +답변으로 응답할 거예요 + +02:20.390 --> 02:23.330 +가격 계획이 달라요 + +02:23.330 --> 02:25.880 +월간 구독이 안 돼요 + +02:25.880 --> 02:28.670 +매달 지급되는 건 없어요 + +02:28.670 --> 02:32.240 +API 요청당 돈을 내야 해요 + +02:32.240 --> 02:39.680 +요청당 지불 상황입니다 요청당 지불은 나중에 API 비용에 대해 더 자세히 말씀드리죠 + +02:39.680 --> 02:44.390 +하지만 이 과목에서 진행하는 프로젝트치고는 너무 작아요 + +02:44.390 --> 02:50.600 +언급할 부분을 제외하고 대부분의 프로젝트에서 1센트도 안 돼요 그러니 + +02:50.600 --> 02:59.300 +이런 API를 시도해 보는 건 해가 될 게 없죠 엄청난 힘을 얻고 굉장한 모델에 엑세스 권한을 주니까요 + +02:59.300 --> 03:04.550 +오픈 소스를 사용하는 모델은 아주 강력하지만요 + +03:04.550 --> 03:11.010 +이러한 프런티어 폐쇄 소스 모델은 매우 강력합니다. 이러한 반응을 생성하기 위해 + +03:11.010 --> 03:16.980 +프로세싱 뒤에 있는 컴퓨트에 적은 돈을 지불해야 하는 것은 당연하죠. + +03:17.190 --> 03:20.400 +하지만 문제가 있어요 + +03:20.430 --> 03:26.310 +이 가격 책정에는 뭔가가 있어요 어떤 경우에는 새롭고 지역에 + +03:26.340 --> 03:33.450 +따라 다를 수도 있지만 저와 OpenAI는 API 사용을 위해 최소로 + +03:33.450 --> 03:36.180 +책정돼 있어요 + +03:36.180 --> 03:39.480 +제 경우엔 최소 5달러가 필요해요 + +03:39.480 --> 03:41.880 +그래서 적어도 5달러는 넣어야 해요 Put + +03:41.880 --> 03:46.020 +그리고 API 사용 시 5달러로 요금을 낮추기 시작하죠 + +03:46.020 --> 03:51.630 +이 강의에선 그 일부를 사용하지 않을 겁니다 하지만 그 5달러를 + +03:51.660 --> 03:54.660 +사용할 방법은 많아요 + +03:54.660 --> 03:55.500 +장담해요 + +03:55.500 --> 03:57.390 +앞으로 할 일이 많아요 + +03:57.390 --> 04:03.090 +정말 유용한 것들을 위한 많은 연습이 있을 겁니다 그 시간을 유용하게 쓰고 좋은 + +04:03.120 --> 04:05.010 +결과를 얻는 방법이죠 + +04:05.010 --> 04:08.450 +제 영업 전략은 그럴 가치가 있다는 거예요 + +04:08.450 --> 04:12.950 +전 하고 싶지만 물론 개인적 선택이죠 + +04:13.220 --> 04:19.400 +5달러를 내기 불편하거나 내기 싫으면 안 내도 돼요 + +04:19.520 --> 04:25.160 +제가 연습하는 걸 보세요 오픈 소스 모델을 사용하는 방법을 + +04:25.160 --> 04:26.600 +보여드릴게요 + +04:26.600 --> 04:29.330 +대신 라마를 이용하면 돼요 + +04:29.360 --> 04:34.070 +우리가 초창기에 했던 것과 같은 거예요 개척자 모델을 대체할 + +04:34.100 --> 04:35.570 +수 있을 거예요 + +04:35.570 --> 04:38.240 +물론 결과는 똑같지 않을 거예요 + +04:38.360 --> 04:44.420 +라마 모델은 20억 개의 매개 변수 모델을 사용할 겁니다 GPT 4와 비교해서요 + +04:44.420 --> 04:52.940 +소문에 따르면 10조 개의 매개 변수가 있다고 합니다 아마 10,000배 정도 더 많을 거예요 + +04:52.940 --> 04:58.190 +품질이 다른 모델이지만 5달러는 선불로 주셔야 해요 Put it's go + +04:58.190 --> 05:00.200 +그게 당신 결정이에요 + +05:00.290 --> 05:03.590 +다시 말하지만 그럴 가치가 있어요 + +05:04.070 --> 05:09.470 +이걸 하려면 오픈AI 플랫폼에 들어왔을 때 로그인하면 이 지점에 + +05:09.470 --> 05:11.040 +도달해야 해요 + +05:11.190 --> 05:12.750 +두 가지만 하면 돼요 + +05:12.780 --> 05:15.990 +먼저 여기 위에 설정 단추로 가야 해요 + +05:15.990 --> 05:20.520 +설정에서 계산을 하고 계산을 해요 + +05:20.520 --> 05:26.910 +GPT 40을 사용하려면 신용 잔고가 어느 정도 있어야 하죠 + +05:27.060 --> 05:30.420 +최소 5달러는 들 거예요 + +05:30.420 --> 05:33.600 +신용 잔액 추가 누르고 신용카드 입력해요 + +05:33.600 --> 05:37.410 +5달러면 되고 자동 재충전도 돼요 + +05:37.440 --> 05:42.030 +허가 없이 카드에서 카드를 뽑으면 안 되니까요 + +05:42.030 --> 05:44.340 +말씀드린 대로 이게 다예요 + +05:44.550 --> 05:51.450 +저도 항상 사용하지만 하루에 몇 센트 이상 쓸 수 있는 것도 아닙니다 GPT 4 미니를 저렴한 버전으로 + +05:51.660 --> 05:56.910 +사용하는데 이 코스에서 이 프로젝트에서 거의 독점적으로 사용하고 있죠 + +05:57.000 --> 05:57.510 +그래서요? + +05:57.510 --> 06:03.870 +이 5달러면 충분하고도 남아요 좋은 일에 쓰실 수 있을 거예요 장담해요 Put + +06:03.870 --> 06:09.270 +이렇게 하는 게 괜찮으면 계산대로 가서 5달러를 내요 Put it's go + +06:09.510 --> 06:17.040 +다음은 이쪽 대시보드로 가서 API 키로 내려가죠 + +06:17.490 --> 06:24.330 +그런 다음 여기로 와서 이걸 눌러야 해요 새 비밀 키 생성하기요 + +06:24.810 --> 06:26.850 +본인에게 맞춰져 있네요 + +06:26.850 --> 06:28.560 +이름을 지어 줘도 돼요 + +06:28.560 --> 06:32.850 +모두 허가를 내린 다음 비밀 키 생성 버튼을 누르세요 + +06:32.850 --> 06:37.830 +그렇게 하면 여러분이 만들게 될 새 비밀 키가 나타나요 + +06:37.830 --> 06:41.610 +비밀 열쇠를 클립보드에 복사할 기회도 되고요 + +06:41.610 --> 06:44.610 +다시는 볼 수 없을 테니 그렇게 해야 해요 + +06:44.640 --> 06:47.250 +이게 그 비밀 열쇠의 마지막 모습일 거예요 + +06:47.280 --> 06:51.450 +당신만의 비밀이니까 복사해서 안전한 곳에 보관해야 해요 + +06:51.450 --> 06:53.490 +잠시 후에 쓸게요 + +06:53.520 --> 06:57.780 +그렇게 하고 나면 이 키를 실제로 사용할 준비가 되죠 Put it + +06:57.780 --> 06:59.010 +거의 다 왔어요 + +06:59.010 --> 07:00.540 +거의 다 됐어요 + +07:00.540 --> 07:04.080 +운이 좋다면 비밀 열쇠를 복사할 수 있죠 + +07:04.080 --> 07:09.570 +5달러나 그 지역 통화와 같은 액수를 내면 준비 완료죠 + +07:09.570 --> 07:14.190 +다음 비디오에서 뵙죠 환경 설정의 마지막 단계에서요 diff --git a/week5/community-contributions/subtitles/srts/60616895/en_US.srt b/week5/community-contributions/subtitles/srts/60616895/en_US.srt new file mode 100755 index 0000000..4b6b4f6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616895/en_US.srt @@ -0,0 +1,484 @@ +WEBVTT + +00:00.110 --> 00:05.750 +It feels like 100 videos ago that I told you that we were going to have instant gratification with our + +00:05.750 --> 00:13.070 +first project, and you'd be within your rights to feel like it hasn't exactly been instant gratification. + +00:13.070 --> 00:17.960 +But never fear, I'm going to make up for it with a nice juicy project to start us off. + +00:17.960 --> 00:23.960 +So if you already have JupyterLab running like the window up in your browser, please close that. + +00:23.960 --> 00:31.730 +And if you already had your Anaconda prompt in windows, then exit out of that and close that and start + +00:31.730 --> 00:32.180 +again. + +00:32.180 --> 00:39.350 +Bring up a fresh Anaconda prompt in a PC and on a mac, bring up a fresh, uh, terminal, having closed + +00:39.350 --> 00:44.660 +everything down because we've made that env file and I want to start from absolute scratch. + +00:44.690 --> 00:49.850 +Go into your projects folder into LM engineering, and you'll remember the first thing. + +00:49.850 --> 00:54.710 +And you'll always have to do this if you come in fresh with a, with or without your Jupyter Lab running, + +00:54.710 --> 00:57.800 +you type conda activate LMS. + +00:57.800 --> 01:00.080 +That is how you activate your environment. + +01:00.110 --> 01:04.830 +If you're using virtualenv, then there was a different PC and a mac way of doing it in the readme. + +01:04.860 --> 01:10.290 +I do that, and my clue is that the prompt has now changed to LMS. + +01:10.290 --> 01:14.100 +And for windows people in your Anaconda prompt, it should have done the same thing. + +01:14.100 --> 01:17.670 +And now you type the two wonderful words Jupyter Lab. + +01:17.670 --> 01:23.670 +And when you do that, it thinks for a second and up comes JupyterLab right here. + +01:23.700 --> 01:25.350 +Instant gratification. + +01:25.350 --> 01:26.130 +It says. + +01:26.130 --> 01:28.500 +So you may not see this. + +01:28.530 --> 01:31.320 +Actually, it may come up for you the first time just looking like this. + +01:31.320 --> 01:34.560 +Perhaps this might be more similar to what you're seeing right now. + +01:34.620 --> 01:37.380 +So this is Jupyter Lab for some people. + +01:37.380 --> 01:38.610 +This is old news. + +01:38.610 --> 01:39.870 +You've probably used this a lot. + +01:39.900 --> 01:41.370 +For some of you, this might be new. + +01:41.370 --> 01:43.920 +And I'm here to tell you that it is fabulous. + +01:43.950 --> 01:49.080 +It is this very interactive way for data scientists to work with code. + +01:49.290 --> 01:54.210 +It's full of some hairy stuff like use global variables a lot in Jupyter Lab. + +01:54.240 --> 01:59.820 +And we know, as if you're from an engineering background that that's not not very good behavior, but + +01:59.820 --> 02:02.340 +it's something that we do as part of research and development. + +02:02.340 --> 02:03.780 +And so we just do it. + +02:03.780 --> 02:07.900 +As you will see, it means that you can be very productive as you experiment. + +02:07.900 --> 02:12.730 +When we get to week eight, at the end of the course, we're going to be looking at productionizing, + +02:12.730 --> 02:18.460 +the stuff that we do, and we'll be talking about how we migrate from JupyterLab into proper code and + +02:18.460 --> 02:19.510 +deployment and so on. + +02:19.510 --> 02:21.550 +But we don't need to worry about that stuff now. + +02:21.580 --> 02:27.250 +So on the left here, you have something called the File Browser, which is kind of what you would expect + +02:27.280 --> 02:32.230 +actually, you might have come in like this, of course, which is showing you the parent directory. + +02:32.230 --> 02:36.700 +There's a directory for each of the weeks that we'll be going through, and you can see the things we + +02:36.700 --> 02:40.120 +know about the Readme and the environment.yml here as well. + +02:40.120 --> 02:43.960 +And without further ado, we're going to go into week one. + +02:44.200 --> 02:50.860 +Now, if this is your first time ever in JupyterLab, I've made a guide to JupyterLab that you can come + +02:50.860 --> 02:52.330 +into and look at you. + +02:52.330 --> 02:57.370 +Just double click here and it will bring it up, and it will take you on a quick tour of what you need + +02:57.370 --> 02:59.830 +to do, just so that you get a handle of things. + +02:59.830 --> 03:04.600 +For example, you'll learn that you can click in one of these boxes here, which is called a cell. + +03:04.600 --> 03:05.890 +You hold down shift. + +03:05.890 --> 03:12.130 +You press return or enter on your keyboard and it executes it and prints the results for and as you + +03:12.130 --> 03:18.640 +go through, you scroll down and you go on to do the next thing, and you can just come in and execute + +03:18.640 --> 03:20.170 +and see. + +03:20.200 --> 03:27.130 +My favorite fruit is bananas, and you can go through and use this guide to Jupiter as a way to learn + +03:27.130 --> 03:31.270 +some of the tricks of using Jupiter Lab, and I hope that will be helpful for you. + +03:31.270 --> 03:38.290 +But I imagine many of the people on this course will have used Jupiter once or twice and will be familiar + +03:38.290 --> 03:40.420 +with the wonder of it. + +03:40.810 --> 03:45.280 +And so that brings us to our day one project, which is right here. + +03:45.280 --> 03:48.640 +And again, I've slightly cheekily called it instant gratification. + +03:48.640 --> 03:53.020 +I know you've put in a fair amount of work for this, and day one has been a pretty long day, but it + +03:53.050 --> 03:54.670 +hopefully will be worth it. + +03:54.880 --> 04:00.340 +Uh, there's some spiel here to remind you one more time that I'm here to help. + +04:00.340 --> 04:01.510 +There's my email address. + +04:01.510 --> 04:02.560 +This is my LinkedIn. + +04:02.560 --> 04:04.720 +I always love it if people connect with me on LinkedIn. + +04:04.750 --> 04:08.560 +It's nice if you if you put a message there to say hi, but you really don't need to as well. + +04:08.560 --> 04:10.950 +If you just want to just connect, that's good with me. + +04:10.950 --> 04:15.190 +I always will accept connections from people taking this course. + +04:15.190 --> 04:16.090 +Of course. + +04:16.210 --> 04:21.010 +The other thing it tells you right here is that there is another notebook. + +04:21.130 --> 04:24.340 +And sorry, these these Jupyter labs are known as notebooks. + +04:24.430 --> 04:27.040 +People call them for historical reasons. + +04:27.250 --> 04:31.480 +Uh, there's another notebook here called troubleshooting. + +04:31.480 --> 04:36.730 +And troubleshooting is where you will go if you have any problems troubleshooting over here. + +04:36.760 --> 04:37.600 +Uh, begins. + +04:37.630 --> 04:38.260 +Oh, dear. + +04:38.500 --> 04:45.070 +Uh, has step by step to go through and figure out exactly what's going wrong and check your env file. + +04:45.100 --> 04:45.640 +Looks good. + +04:45.640 --> 04:47.470 +I check everything as we go through it. + +04:47.470 --> 04:51.670 +So if any troubles at all, you go to troubleshooting and we'll have it sorted. + +04:51.670 --> 04:55.510 +But otherwise we're starting with the day one notebook. + +04:55.510 --> 05:00.610 +Generally during this course, we'll have a separate notebook for each day so that you can go through + +05:00.640 --> 05:03.850 +and learn from that day and do the exercises. + +05:03.880 --> 05:04.690 +Okay. + +05:04.690 --> 05:08.140 +So the first thing we start with is some imports. + +05:08.140 --> 05:10.510 +I often like to put the imports at the top. + +05:10.510 --> 05:16.210 +You have to execute the code in JupyterLab from the top downwards, and we'll start by clicking in this + +05:16.240 --> 05:17.140 +import cell. + +05:17.150 --> 05:18.230 +Hold down shift. + +05:18.230 --> 05:19.190 +Press return. + +05:19.190 --> 05:20.630 +And that runs. + +05:20.660 --> 05:25.430 +If this gives you an error, then head over to the troubleshooting notebook. + +05:25.460 --> 05:29.270 +I'll tell you what's probably happening, what's going on, and what you need to do about it. + +05:29.300 --> 05:33.650 +Most likely is that for some reason, the conda environment isn't activated. + +05:33.650 --> 05:35.450 +And I'll tell you what to do about that. + +05:36.290 --> 05:36.980 +Okay. + +05:37.010 --> 05:40.580 +The next thing we're going to do is connect to OpenAI. + +05:40.610 --> 05:48.950 +So this is where we are going to to make our connection to the OpenAI API service so that we can make + +05:48.950 --> 05:54.170 +a call to GPT, the frontier model and and ask it questions. + +05:54.200 --> 05:58.040 +Now, of course, we're going to be talking a lot more about OpenAI and GPT and what all these things + +05:58.040 --> 05:58.460 +mean. + +05:58.460 --> 06:00.110 +The idea is to get a flavor for it. + +06:00.110 --> 06:02.030 +Right now, this is just our first lab. + +06:02.630 --> 06:06.230 +So the first thing we do is call something called load dot env. + +06:06.230 --> 06:11.150 +And that looks at the dot env file and loads in our secrets. + +06:11.240 --> 06:14.120 +Right now we have one secret the OpenAI API key. + +06:14.120 --> 06:17.240 +And we're going to load that in and put it in a variable called API key. + +06:17.240 --> 06:20.120 +And we're going to check that it looks decent. + +06:20.210 --> 06:23.190 +It says API key found and looks good so far. + +06:23.220 --> 06:24.570 +If it doesn't say that. + +06:24.570 --> 06:25.320 +If it doesn't say. + +06:25.350 --> 06:28.470 +API key found and looks good so far, head over to troubleshooting. + +06:28.470 --> 06:30.210 +We'll sort it out or. + +06:30.570 --> 06:32.820 +Email me uh, LinkedIn with me. + +06:32.850 --> 06:35.730 +We'll fix it okay. + +06:35.730 --> 06:39.240 +The next thing we do is this simple thing here. + +06:39.270 --> 06:41.040 +We take OpenAI. + +06:41.070 --> 06:44.580 +We we create an instance of it and put it in OpenAI. + +06:44.760 --> 06:46.350 +Uh, this is where we're. + +06:46.380 --> 06:51.000 +Actually making the connection to OpenAI done. + +06:51.780 --> 06:52.650 +All right. + +06:52.650 --> 06:55.800 +So what is this project going to be about today? + +06:55.800 --> 06:57.510 +What what business problem are we actually going. + +06:57.540 --> 06:58.050 +To solve? + +06:58.050 --> 07:00.510 +It's going to be really simple and actually quite cool. + +07:00.510 --> 07:08.010 +We're going to write a program which is going to be able to look at any web page on the internet, scrape + +07:08.010 --> 07:14.400 +the contents of the web page and then summarize it and present back a short summary of that web page. + +07:14.400 --> 07:18.630 +You can think of it like you're building your own little web browser, which is like a summarizing web + +07:18.630 --> 07:19.440 +browser. + +07:19.440 --> 07:21.120 +Uh, you know, the Reader's Digest. + +07:21.120 --> 07:23.430 +It's like a Reader's Digest web browser. + +07:23.810 --> 07:25.100 +Um, that's what we're going to do. + +07:25.100 --> 07:26.450 +That's going to be the project. + +07:26.450 --> 07:33.380 +And we're going to start by defining a class, a class website, and which is going to be a very simple + +07:33.380 --> 07:37.460 +utility class that will represent a website that we've scraped. + +07:37.610 --> 07:42.470 +Uh, it's going to be a class which, which will have a URL, a title and a text. + +07:42.470 --> 07:46.280 +I'm not going to go through this line by line, because you'll be able to read this yourself and get + +07:46.280 --> 07:52.820 +a sense for it as you come through and execute this right after this, uh, and we'll have other labs + +07:52.820 --> 07:54.920 +where we'll go into much more detail on what's going on. + +07:54.920 --> 07:58.370 +This is just about your first your first experience with it. + +07:58.370 --> 08:05.480 +Now in this, uh, in the constructor and where I set up this class, I use a package called Beautifulsoup, + +08:05.480 --> 08:08.840 +which again, I imagine many of you have come across at some point. + +08:08.840 --> 08:15.770 +It's a fabulous little package that's used for parsing web pages and people who do web scraping, uh, + +08:15.770 --> 08:20.120 +on a regular basis, like myself, know Beautifulsoup very well indeed. + +08:20.210 --> 08:25.490 +Uh, and you can use Beautifulsoup to do things like pluck out the title of a web page and get rid of + +08:25.490 --> 08:31.590 +things like scripts and style and images and inputs from a web page and then figure out its text. + +08:31.890 --> 08:37.320 +So that is what we're going to do with our, uh, um, uh, class website. + +08:37.350 --> 08:41.340 +And then what we're going to do finally is try one out. + +08:41.340 --> 08:44.190 +So we're going to create a new website object. + +08:44.190 --> 08:50.370 +And I'm going to pass in this particular website for thoroughly undesirable website happens to be my + +08:50.610 --> 08:51.480 +website. + +08:51.660 --> 08:54.300 +Uh, and it's a very vanilla website. + +08:54.390 --> 08:56.760 +But we'll create a class to represent it. + +08:56.940 --> 09:00.450 +Uh, we'll look at its title and look at the text on the website. + +09:00.450 --> 09:01.740 +Let's see if this works. + +09:01.830 --> 09:03.090 +Yes it works. + +09:03.090 --> 09:04.440 +And here we go. + +09:04.530 --> 09:07.440 +We're seeing the name of the website. + +09:07.560 --> 09:12.570 +Uh, and there is something about what's going on on that website right there. + +09:12.870 --> 09:17.940 +Uh, this is just the contents looking a bit scrappy because it's of course, the contents of that web + +09:17.970 --> 09:22.920 +page with images, JavaScript stylesheets all removed. + +09:23.130 --> 09:27.390 +Okay, so far we've not done anything to do with generative AI or Llms. + +09:27.390 --> 09:31.200 +That's coming right up, and we're going to get to it in the next video. diff --git a/week5/community-contributions/subtitles/srts/60616895/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616895/ja_JP.srt new file mode 100755 index 0000000..f06edaf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616895/ja_JP.srt @@ -0,0 +1,430 @@ +WEBVTT + +00:00.110 --> 00:05.750 +最初のプロジェクトで即座に満足できると言ったのは100年前のビデオのような気がするし、 + +00:05.750 --> 00:13.070 +即座に満足できたとは言えないと感じるのも当然だろう。 + +00:13.070 --> 00:17.960 +しかし、 心配は無用だ。 その埋め合わせとして、 素晴らしいジューシーなプロジェクトでスタートを切るつもりだ。 + +00:17.960 --> 00:23.960 +もしすでにJupyterLabをブラウザのウィンドウのように起動している場合は、 それを閉じてください。 + +00:23.960 --> 00:32.180 +また、 Anacondaのプロンプトがすでにウィンドウズに表示されている場合は、 それを終了して閉じ、 もう一度やり直してください。 + +00:32.180 --> 00:44.660 +PCでは新しいAnacondaプロンプトを、 Macでは新しいターミナルを立ち上げる。 + +00:44.690 --> 00:49.850 +LMエンジニアリングのプロジェクトフォルダーに入れば、 まず思い出すだろう。 + +00:49.850 --> 00:54.710 +そして、 Jupyter Labを起動しているいないにかかわらず、 conda activate LMSと入力し、 + +00:54.710 --> 00:57.800 +新しい状態でログインした場合は、 常にこの作業を行う必要がある。 + +00:57.800 --> 01:00.080 +そうやって環境を活性化させるのだ。 + +01:00.110 --> 01:04.830 +もしvirtualenvを使っているのであれば、 ReadmeにPCとMacで違うやり方が載っていた。 + +01:04.860 --> 01:10.290 +そうしたら、 プロンプトがLMSに変わったんだ。 + +01:10.290 --> 01:14.100 +アナコンダ・プロンプトのウィンドウズ・ユーザーも同じことをするはずだ。 + +01:14.100 --> 01:17.670 +そして今、 Jupyter Labという2つの素晴らしい言葉を入力する。 + +01:17.670 --> 01:23.670 +そうすると、 一瞬考えて、 JupyterLabがここに出てくる。 + +01:23.700 --> 01:25.350 +即座に満足できる。 + +01:25.350 --> 01:26.130 +と書いてある。 + +01:26.130 --> 01:28.500 +だから、 あなたはこれを見ないかもしれない。 + +01:28.530 --> 01:31.320 +実際、 このように見えるだけで、 初めて出てくるかもしれない。 + +01:31.320 --> 01:34.560 +もしかしたら、 この方が今見ているものに近いかもしれない。 + +01:34.620 --> 01:37.380 +つまり、 これはある人々にとってのJupyter Labなのだ。 + +01:37.380 --> 01:38.610 +これは古いニュースだ。 + +01:38.610 --> 01:39.870 +これはよく使うだろう。 + +01:39.900 --> 01:41.370 +初めて聞く人もいるかもしれない。 + +01:41.370 --> 01:43.920 +そして、 その素晴らしさをお伝えしたい。 + +01:43.950 --> 01:49.080 +データサイエンティストがコードを使って作業するための、 非常にインタラクティブな方法である。 + +01:49.290 --> 01:54.210 +Jupyter Labでグローバル変数を多用するような、 毛むくじゃらのものばかりだ。 + +01:54.240 --> 02:02.340 +エンジニア出身者であれば、 それがあまり良い振る舞いでないことは分かっているはずだが、 研究開発の一環として行っていることなのだ。 + +02:02.340 --> 02:03.780 +だから、 そうするんだ。 + +02:03.780 --> 02:07.900 +おわかりのように、 これは実験しながら非常に生産的になれることを意味する。 + +02:07.900 --> 02:19.510 +コースの最後となる第8週目では、 プロダクション化、 つまりJupyterLabから適切なコードやデプロイメントに移行する方法について説明します。 + +02:19.510 --> 02:21.550 +でも、 今はそんなことを心配する必要はない。 + +02:21.580 --> 02:27.250 +左側にはファイル・ブラウザーと呼ばれるものがあり、 + +02:27.280 --> 02:32.230 +このように親ディレクトリが表示されます。 + +02:32.230 --> 02:40.120 +これから進む各週のディレクトリがあり、 Readmeや環境についてわかっていることを見ることができる。 + +02:40.120 --> 02:40.120 +ymlもここにある。 + +02:40.120 --> 02:43.960 +では、 さっそく第1週に入る。 + +02:44.200 --> 02:52.330 +JupyterLabが初めての方は、 JupyterLabのガイドを作りましたので、 そちらをご覧ください。 + +02:52.330 --> 02:59.830 +ここをダブルクリックするだけで、 必要なことを簡単に説明してくれる。 + +02:59.830 --> 03:04.600 +例えば、 セルと呼ばれるこのボックスの一つをクリックできることを学ぶだろう。 + +03:04.600 --> 03:05.890 +シフトを押し続ける。 + +03:05.890 --> 03:12.130 +キーボードのリターンキーを押すかエンターキーを押せば実行され、 + +03:12.130 --> 03:20.170 +結果が表示される。 + +03:20.200 --> 03:27.130 +私の好きな果物はバナナです。 ジュピター・ラボの使い方のコツを学ぶ方法として、 このジュピター・ガイドに目を通していただき、 + +03:27.130 --> 03:31.270 +役立てていただければ幸いです。 + +03:31.270 --> 03:40.420 +しかし、 このコースの参加者の多くは、 一度や二度はジュピターを使ったことがあり、 その素晴らしさを熟知していることだろう。 + +03:40.810 --> 03:45.280 +それで、 初日のプロジェクトがここにある。 + +03:45.280 --> 03:48.640 +そしてまた、 私は少し生意気にも、 それをインスタント・グラティフィケーションと呼んでいる。 + +03:48.640 --> 03:54.670 +あなたがこのためにかなりの労力を費やしてきたことは知っているし、 初日はかなり長い一日だった。 + +03:54.880 --> 04:00.340 +ええと、 ここでもう一度、 私が助けに来たことを思い出してもらうために、 何か挨拶があるんだ。 + +04:00.340 --> 04:01.510 +私のメールアドレスがある。 + +04:01.510 --> 04:02.560 +これは私のリンクトインだ。 + +04:02.560 --> 04:04.720 +LinkedInで私とつながってくれる人はいつも嬉しい。 + +04:04.750 --> 04:08.560 +挨拶がてらメッセージを入れてくれるのはありがたいが、 本当にその必要はない。 + +04:08.560 --> 04:10.950 +ただつながりたいだけなら、 私はそれでいいと思う。 + +04:10.950 --> 04:15.190 +このコースを受講している人たちからのコネクションは常に受け入れるつもりだ。 + +04:15.190 --> 04:16.090 +もちろんだ。 + +04:16.210 --> 04:21.010 +もうひとつは、 ノートブックがもう一冊あるということだ。 + +04:21.130 --> 04:24.340 +そして申し訳ないが、 このJupyterラボはノートブックとして知られている。 + +04:24.430 --> 04:27.040 +人々は歴史的な理由からそう呼ぶ。 + +04:27.250 --> 04:31.480 +ええと、 ここにトラブルシューティングという別のノートがあります。 + +04:31.480 --> 04:36.730 +そして、 トラブルシューティングは、 トラブルシューティングで問題が発生したときに行く場所だ。 + +04:36.760 --> 04:37.600 +ええと、 始まる。 + +04:37.630 --> 04:38.260 +なんてことだ。 + +04:38.500 --> 04:45.070 +何が間違っているのか、 envファイルをチェックする必要がある。 + +04:45.100 --> 04:45.640 +良さそうだ。 + +04:45.640 --> 04:47.470 +私はすべてをチェックしながら進めていく。 + +04:47.470 --> 04:51.670 +だから、 何かトラブルがあったら、 トラブルシューティングに行けば解決する。 + +04:51.670 --> 04:55.510 +でも、 それ以外は初日のノートから始める。 + +04:55.510 --> 05:00.610 +通常、 このコースでは、 各日ごとに別のノートを用意し、 その日のうちに学習し、 + +05:00.640 --> 05:03.850 +練習問題を解くことができるようにします。 + +05:03.880 --> 05:04.690 +オーケー。 + +05:04.690 --> 05:08.140 +だから、 まずは輸入から始める。 + +05:08.140 --> 05:10.510 +私はよく輸入品をトップに置くのが好きだ。 + +05:10.510 --> 05:17.140 +JupyterLabのコードを上から下に実行する必要があるので、 まずこのインポートセルをクリックする。 + +05:17.150 --> 05:18.230 +シフトを押し続ける。 + +05:18.230 --> 05:19.190 +リターンを押す。 + +05:19.190 --> 05:20.630 +そして走る。 + +05:20.660 --> 05:25.430 +これでエラーが出た場合は、 トラブルシューティングノートブックにアクセスしてください。 + +05:25.460 --> 05:29.270 +おそらく何が起きているのか、 何が起こっているのか、 そしてそれに対して何が必要なのかをお話しします。 + +05:29.300 --> 05:33.650 +何らかの理由でconda環境が有効になっていない可能性が高い。 + +05:33.650 --> 05:35.450 +そのためにどうすればいいかを教えてあげよう。 + +05:36.290 --> 05:36.980 +オーケー。 + +05:37.010 --> 05:40.580 +次にすることは、 OpenAIに接続することだ。 + +05:40.610 --> 05:54.170 +ここでOpenAIのAPIサービスに接続し、 GPT(フロンティアモデル)を呼び出して質問できるようにします。 + +05:54.200 --> 05:58.460 +もちろん、 これからOpenAIやGPTについて、 そしてこれらのことが何を意味するのかについて、 もっとたくさん話していくつもりだ。 + +05:58.460 --> 06:00.110 +その味を知ることだ。 + +06:00.110 --> 06:02.030 +今はまだ、 最初のラボに過ぎない。 + +06:02.630 --> 06:06.230 +そこでまず、 load dot envというものを呼び出す。 + +06:06.230 --> 06:11.150 +そして、 ドットenvファイルを見て、 我々の秘密をロードする。 + +06:11.240 --> 06:14.120 +今は、 OpenAIのAPIキーという秘密がある。 + +06:14.120 --> 06:17.240 +それを読み込んで、 APIキーという変数に入れる。 + +06:17.240 --> 06:20.120 +そして、 それがまともに見えるかどうかをチェックするんだ。 + +06:20.210 --> 06:23.190 +APIキーが見つかりました。 + +06:23.220 --> 06:24.570 +もしそう書かれていなければ + +06:24.570 --> 06:25.320 +もし書かれていなければ + +06:25.350 --> 06:28.470 +APIキーが見つかり、 今のところ問題なさそうだ。 + +06:28.470 --> 06:30.210 +解決するか、 それとも + +06:30.570 --> 06:32.820 +メールで連絡してくれ。 + +06:32.850 --> 06:35.730 +大丈夫だ。 + +06:35.730 --> 06:39.240 +次にすることは、 このシンプルなことだ。 + +06:39.270 --> 06:41.040 +我々はOpenAIを採用している。 + +06:41.070 --> 06:44.580 +そのインスタンスを作ってOpenAIに置く。 + +06:44.760 --> 06:46.350 +ええと、 ここが僕らのいる場所なんだ。 + +06:46.380 --> 06:51.000 +実際にOpenAIとの接続が完了した。 + +06:51.780 --> 06:52.650 +分かった。 + +06:52.650 --> 06:55.800 +さて、 今日はどんな企画にしようかな? + +06:55.800 --> 06:57.510 +私たちは実際にどのようなビジネス上の問題を解決しようとしているのか。 + +06:57.540 --> 06:58.050 +解決するため? + +06:58.050 --> 07:00.510 +本当にシンプルで、 とてもクールなものになるだろう。 + +07:00.510 --> 07:08.010 +インターネット上のあらゆるウェブページを見て、 ウェブページの内容をスクレイピングし、 それを要約して、 + +07:08.010 --> 07:14.400 +そのウェブページの短い要約を返すことができるプログラムを書こうと思っている。 + +07:14.400 --> 07:19.440 +自分自身の小さなウェブ・ブラウザーを構築しているようなもので、 要約ウェブ・ブラウザーのようなものだと考えればいい。 + +07:19.440 --> 07:21.120 +ええと、 ほら、 リーダーズ・ダイジェスト。 + +07:21.120 --> 07:23.430 +リーダーズ・ダイジェストのウェブブラウザのようなものだ。 + +07:23.810 --> 07:25.100 +ええと、 そうするつもりなんだ。 + +07:25.100 --> 07:26.450 +それがこのプロジェクトになる。 + +07:26.450 --> 07:33.380 +そして、 クラスwebsiteを定義することから始めよう。 これは、 スクレイピングしたウェブサイトを表す、 + +07:33.380 --> 07:37.460 +とてもシンプルなユーティリティ・クラスになる。 + +07:37.610 --> 07:42.470 +URL、 タイトル、 テキストを持つクラスになります。 + +07:42.470 --> 07:46.280 +一行ずつ説明するつもりはない。 + +07:46.280 --> 07:54.920 +この後、 他のラボでもっと詳しく説明する予定だからだ。 + +07:54.920 --> 07:58.370 +これはまさに、 あなたにとって初めての経験なのだ。 + +07:58.370 --> 08:08.840 +さて、 このコンストラクタとクラスのセットアップでは、 Beautifulsoupというパッケージを使っている。 + +08:08.840 --> 08:15.770 +これはウェブページの解析に使われる素晴らしい小さなパッケージで、 私のようにウェブスクレイピングを日常的に行っている人間は、 + +08:15.770 --> 08:20.120 +Beautifulsoupをよく知っている。 + +08:20.210 --> 08:25.490 +Beautifulsoupを使えば、 ウェブページのタイトルを抜き出したり、 ウェブページからスクリプトやスタイルや画像や入力を取り除いて、 + +08:25.490 --> 08:31.590 +そのテキストを見つけ出すことができる。 + +08:31.890 --> 08:37.320 +だから、 それが僕らの、 あー、 あー、 あー、 クラスのウェブサイトでやろうとしていることなんだ。 + +08:37.350 --> 08:41.340 +そして、 最後にひとつ試してみよう。 + +08:41.340 --> 08:44.190 +それでは、 新しいウェブサイト・オブジェクトを作りましょう。 + +08:44.190 --> 08:51.480 +そして、 私は徹底的に望ましくないウェブサイトのために、 この特定のウェブサイトに渡すつもりだ私のウェブサイトであることが起こる。 + +08:51.660 --> 08:54.300 +ええと、 とてもバニラなウェブサイトなんだ。 + +08:54.390 --> 08:56.760 +しかし、 それを表現するクラスを作ることにしよう。 + +08:56.940 --> 09:00.450 +タイトルを見て、 ウェブサイトの文章を見てみよう。 + +09:00.450 --> 09:01.740 +うまくいくかどうか見てみよう。 + +09:01.830 --> 09:03.090 +そうだ。 + +09:03.090 --> 09:04.440 +そして、 これだ。 + +09:04.530 --> 09:07.440 +ウェブサイトの名前を見ている。 + +09:07.560 --> 09:12.570 +あのウェブサイトで何が起こっているのか、 何か書いてあるんだ。 + +09:12.870 --> 09:22.920 +画像やJavaScriptのスタイルシートがすべて削除されたウェブページのコンテンツだからだ。 + +09:23.130 --> 09:27.390 +これまでのところ、 私たちは生成AIやLlmsに関することは何もしていない。 + +09:27.390 --> 09:31.200 +それは次のビデオで。 diff --git a/week5/community-contributions/subtitles/srts/60616895/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616895/ko_KR.srt new file mode 100755 index 0000000..77c6a26 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616895/ko_KR.srt @@ -0,0 +1,478 @@ +WEBVTT + +00:00.110 --> 00:05.750 +첫 프로젝트가 성공하면 바로 만족할 거라고 말한 게 100편 + +00:05.750 --> 00:13.070 +전인 것 같은데 그렇게 바로 만족스럽지 않다고 느끼는 건 여러분 권리예요 + +00:13.070 --> 00:17.960 +하지만 걱정 마세요, 멋진 프로젝트로 만회할 테니까요 + +00:17.960 --> 00:23.960 +이미 브라우저의 창처럼 JupyterLab이 돌아가고 있다면 그걸 닫으세요 + +00:23.960 --> 00:32.180 +이미 윈도우에 아나콘다 프롬프트가 있다면 그것을 종료하고 닫았다가 다시 시작하세요 + +00:32.180 --> 00:39.350 +PC와 Mac에 새로운 아나콘다 프롬프트를 불러오고 새로운 터미널을 불러옵니다 모든 걸 닫았어요 + +00:39.350 --> 00:44.660 +저 부럽지 않은 파일을 만들었으니까요 완전히 처음부터 시작하고 싶어요 + +00:44.690 --> 00:49.850 +LM 엔지니어링의 프로젝트 폴더로 가세요 첫 번째 것을 기억하실 거예요 + +00:49.850 --> 00:54.710 +그리고 항상 이걸 해야 합니다 주피터 랩이 실행 중이건 아니건 콘다를 + +00:54.710 --> 00:57.800 +입력해 LMS를 활성화하세요 + +00:57.800 --> 01:00.080 +그렇게 환경을 활성화하는 거죠 + +01:00.110 --> 01:04.830 +가상env를 사용한다면 리드메에 다른 PC와 맥 방식이 있어요 + +01:04.860 --> 01:10.290 +그렇게 하면 단서는 프롬프트가 LMS로 바뀌었다는 거죠 + +01:10.290 --> 01:14.100 +아나콘다 프롬프트에서 윈도우 사람들을 위해 같은 것을 해야 하죠 + +01:14.100 --> 01:17.670 +이제 멋진 두 단어를 입력하세요 주피터 연구소 + +01:17.670 --> 01:23.670 +그렇게 하면 잠시 생각을 하고 주피터랩이 나타나죠 + +01:23.700 --> 01:25.350 +즉각적인 만족감이죠 + +01:25.350 --> 01:26.130 +그렇게 쓰여 있어요 + +01:26.130 --> 01:28.500 +못 보실지도 몰라요 + +01:28.530 --> 01:31.320 +사실, 이렇게만 봐도 처음 떠오를지도 몰라요 + +01:31.320 --> 01:34.560 +지금 보시는 것과 비슷할지도 몰라요 + +01:34.620 --> 01:37.380 +어떤 사람들에게는 여기가 주피터 연구소예요 + +01:37.380 --> 01:38.610 +이건 옛날 뉴스예요 + +01:38.610 --> 01:39.870 +많이 써보셨죠? + +01:39.900 --> 01:41.370 +새로운 분들도 계실 거예요 + +01:41.370 --> 01:43.920 +정말 멋지다는 말을 해 주러 왔어요 + +01:43.950 --> 01:49.080 +데이터 과학자들이 코드로 작업하는 상호작용적인 방법이죠 + +01:49.290 --> 01:54.210 +주피터 랩은 전면 변수를 많이 사용하는 등 아슬아슬한 것으로 가득하죠 + +01:54.240 --> 01:59.820 +공학 전공자로서 봤을 때 좋은 행동은 아니지만 연구 개발의 + +01:59.820 --> 02:02.340 +일환으로 하는 일이에요 + +02:02.340 --> 02:03.780 +그래서 그냥 했어요 + +02:03.780 --> 02:07.900 +곧 보시겠지만 실험하면서 아주 생산적일 수 있다는 뜻이에요 + +02:07.900 --> 02:12.730 +과정의 끝인 8주 차에는 프로덕션화와 우리가 하는 걸 살펴볼 겁니다 + +02:12.730 --> 02:18.460 +JupyterLab에서 제대로 된 코드와 배포로 마이그레이션하는 방법도 얘기할 + +02:18.460 --> 02:19.510 +거고요 + +02:19.510 --> 02:21.550 +하지만 지금은 그런 걱정 안 해도 돼요 + +02:21.580 --> 02:27.250 +왼쪽에 파일 브라우저라는 게 있어요 사실 기대하시는 거죠 + +02:27.280 --> 02:32.230 +이렇게 들어와 부모 디렉토리를 보여주고 있죠 + +02:32.230 --> 02:36.700 +우리가 검토할 매주 디렉터리가 있어요 리드메이트와 환경에 관해 + +02:36.700 --> 02:40.120 +우리가 아는 걸 볼 수 있죠 yml도 여기 있어요 + +02:40.120 --> 02:43.960 +그럼 지체 없이 첫째 주를 시작하죠 + +02:44.200 --> 02:50.860 +주피터랩에 처음 오신 거라면 제가 안내서를 만들어 놨으니 + +02:50.860 --> 02:52.330 +와서 보세요 + +02:52.330 --> 02:57.370 +여길 더블 클릭하면 get이 나옵니다 여러분이 해야 할 일을 간단히 보여줍니다 + +02:57.370 --> 02:59.830 +여러분이 잘 처리할 수 있도록요 + +02:59.830 --> 03:04.600 +예를 들어, 여기 있는 상자 중 하나를 클릭할 수 있어요 셀이라고 하죠 + +03:04.600 --> 03:05.890 +시동을 꺼요 + +03:05.890 --> 03:12.130 +자판에서 반환이나 입력을 누르면 실행되고 결과를 출력합니다 + +03:12.130 --> 03:18.640 +그리고 스크롤을 내리면 다음 작업이 나옵니다 실행하고 볼 수 + +03:18.640 --> 03:20.170 +있죠 + +03:20.200 --> 03:27.130 +제가 제일 좋아하는 과일은 바나나예요 이 주피터 가이드북을 참고해서 주피터 랩을 + +03:27.130 --> 03:31.270 +사용하는 기술을 배우세요 도움이 되기를 바라요 + +03:31.270 --> 03:38.290 +하지만 이 항로에 있는 많은 사람이 목성을 한두 번 사용해 봤을 테고 그 경이로움에 + +03:38.290 --> 03:40.420 +익숙할 거예요 + +03:40.810 --> 03:45.280 +이제 첫날 프로젝트 차례입니다 바로 여기요 + +03:45.280 --> 03:48.640 +전 좀 뻔뻔하게도 즉각적인 만족감이라고 했죠 + +03:48.640 --> 03:53.020 +이 집을 위해 많은 일을 한 거 알아요 첫날은 꽤 긴 하루였지만 그만한 가치가 + +03:53.050 --> 03:54.670 +있길 바라요 Put + +03:54.880 --> 04:00.340 +다시 한번 말하지만 난 도우러 온 거예요 + +04:00.340 --> 04:01.510 +제 이메일 주소예요 + +04:01.510 --> 04:02.560 +이건 제 링크트인이죠 + +04:02.560 --> 04:04.720 +링크드인에서 사람들이 저와 소통하는 걸 좋아해요 + +04:04.750 --> 04:08.560 +for 안녕이라는 메시지를 넣는 것도 좋지만 꼭 그럴 필요는 없어요 + +04:08.560 --> 04:10.950 +그냥 교감만 하고 싶다면 그렇게 하세요 + +04:10.950 --> 04:15.190 +이 수업을 듣는 학생들의 인맥은 언제든 환영이에요 + +04:15.190 --> 04:16.090 +물론이죠 + +04:16.210 --> 04:21.010 +여기서 또 알 수 있는 건 다른 공책이 있다는 거예요 + +04:21.130 --> 04:24.340 +이 주피터 연구소들은 공책이라고 해요 + +04:24.430 --> 04:27.040 +역사적인 이유로 그렇게 부르죠 + +04:27.250 --> 04:31.480 +문제 해결이라는 공책이 하나 더 있어요 + +04:31.480 --> 04:36.730 +문제 해결은 여러분이 여기서 문제를 해결하는 곳이죠 + +04:36.760 --> 04:37.600 +시작이죠 + +04:37.630 --> 04:38.260 +어떡해요 + +04:38.500 --> 04:45.070 +단계별로 뭐가 잘못됐는지 파악하고 부럽지 않은 파일을 확인하죠 + +04:45.100 --> 04:45.640 +맛있어 보여요 + +04:45.640 --> 04:47.470 +모든 걸 확인하면서 진행해요 + +04:47.470 --> 04:51.670 +문제가 생기면 해결을 하면 됩니다. 우리가 해결할게요. + +04:51.670 --> 04:55.510 +하지만 그렇지 않으면 첫날의 공책부터 시작해야죠 + +04:55.510 --> 05:00.610 +이 과정에서는 매일 따로 노트를 준비합니다 그날의 + +05:00.640 --> 05:03.850 +경험을 훑어보며 연습할 수 있죠 + +05:03.880 --> 05:04.690 +네 + +05:04.690 --> 05:08.140 +우선 수입품부터 살펴볼게요 + +05:08.140 --> 05:10.510 +전 수입품을 상위권에 두는 걸 좋아해요 Put it up Put it + +05:10.510 --> 05:16.210 +주피터랩에서 코드를 실행해야 합니다 위에서부터요 이 임포트 셀을 클릭하는 것으로 + +05:16.240 --> 05:17.140 +시작하죠 + +05:17.150 --> 05:18.230 +기어를 낮추세요 + +05:18.230 --> 05:19.190 +회신을 누르세요 + +05:19.190 --> 05:20.630 +잘 달리네요 + +05:20.660 --> 05:25.430 +에러가 발생하면 문제 해결 노트북으로 가세요. HDPULL GAME + +05:25.460 --> 05:29.270 +어떤 일이 벌어질지 어떻게 해야 할지 알려줄게요 + +05:29.300 --> 05:33.650 +콘다 환경이 활성화되지 않는 게 가장 큰 이유죠 + +05:33.650 --> 05:35.450 +어떻게 할지 알려줄게요 + +05:36.290 --> 05:36.980 +네 + +05:37.010 --> 05:40.580 +다음으로 오픈AI에 연결할 거예요 + +05:40.610 --> 05:48.950 +여기서 오픈AI API 서비스에 연결할 겁니다 프론티어 모델인 GPT에 + +05:48.950 --> 05:54.170 +전화를 걸어 질문을 할 수 있도록요 + +05:54.200 --> 05:58.460 +오픈AI와 GPT에 대해 더 자세히 이야기할 텐데요 + +05:58.460 --> 06:00.110 +Get it의 맛을 내는 거죠 + +06:00.110 --> 06:02.030 +지금은 첫 실험실이에요 + +06:02.630 --> 06:06.230 +가장 먼저 로드.Infsi라는 걸 호출해요 + +06:06.230 --> 06:11.150 +.Infile을 보고 비밀을 로드하죠 + +06:11.240 --> 06:14.120 +오픈AI API 키라는 비밀이 있어요 + +06:14.120 --> 06:17.240 +그걸 로드해 API 키라는 변수에 넣을 거예요 Put it + +06:17.240 --> 06:20.120 +잘 나왔는지 확인해 보죠 + +06:20.210 --> 06:23.190 +API 키를 찾았고 지금까지는 괜찮아 보이네요 + +06:23.220 --> 06:24.570 +그렇게 안 쓰여 있다면요 + +06:24.570 --> 06:25.320 +안 적혀 있으면요 + +06:25.350 --> 06:28.470 +API 키를 찾았고 지금까지는 좋습니다. 문제 해결로 가보세요. HDP, HDP.M. + +06:28.470 --> 06:30.210 +우리가 해결할게요 + +06:30.570 --> 06:32.820 +제 이메일로 링크트인을 보내주세요 + +06:32.850 --> 06:35.730 +우리가 고칠게요 + +06:35.730 --> 06:39.240 +다음 할 일은 간단한 거예요 + +06:39.270 --> 06:41.040 +오픈아이를 점령하죠 + +06:41.070 --> 06:44.580 +인스턴스를 생성해 OpenAI에 넣죠 + +06:44.760 --> 06:46.350 +여기가 우리 집이에요 + +06:46.380 --> 06:51.000 +오픈라이와의 연결을 완료했어요 + +06:51.780 --> 06:52.650 +좋아요 + +06:52.650 --> 06:55.800 +오늘 이 프로젝트는 어떤 건가요? + +06:55.800 --> 06:57.510 +어떤 사업상의 문제를 다루고 있나요? + +06:57.540 --> 06:58.050 +해결요? + +06:58.050 --> 07:00.510 +아주 간단하고 사실 꽤 멋질 거예요 + +07:00.510 --> 07:08.010 +프로그램을 만들어서 인터넷상의 모든 웹 페이지를 살펴보고 그 내용을 긁어내서 + +07:08.010 --> 07:14.400 +요약하고 그 웹 페이지의 짧은 요약을 보여드릴 거예요 + +07:14.400 --> 07:18.630 +작은 웹 브라우저를 만든다고 생각해 보세요 웹 브라우저 요약 + +07:18.630 --> 07:19.440 +같은 거죠 + +07:19.440 --> 07:21.120 +리더스 다이제스트 알죠? + +07:21.120 --> 07:23.430 +라이더스 다이제스트 웹 브라우저 같아요 + +07:23.810 --> 07:25.100 +그게 우리가 할 일이에요 + +07:25.100 --> 07:26.450 +그게 프로젝트가 될 거예요 + +07:26.450 --> 07:33.380 +클래스를 정의하는 것으로 시작할게요 클래스 웹사이트요 아주 간단한 유틸리티 클래스가 될 + +07:33.380 --> 07:37.460 +겁니다 우리가 긁어모은 웹사이트를 나타내는 거죠 + +07:37.610 --> 07:42.470 +클래스가 될 거예요 URL과 제목, 텍스트가 있는 클래스요 + +07:42.470 --> 07:46.280 +한 줄씩 살펴보진 않겠습니다 여러분이 직접 읽어보시고 + +07:46.280 --> 07:52.820 +바로 실행하실 때 감을 잡으실 수 있을 테니까요 get it 이후엔 다른 랩에서 더 자세히 + +07:52.820 --> 07:54.920 +살펴볼 거예요 + +07:54.920 --> 07:58.370 +처음 경험하는 거잖아요 그렇죠 + +07:58.370 --> 08:05.480 +생성자에서 이 클래스를 설정했던 곳에서 뷰티풀소우프라는 패키지를 사용했어요 + +08:05.480 --> 08:08.840 +많은 분들이 언젠가 경험하셨을 거예요 + +08:08.840 --> 08:15.770 +웹 페이지 파싱에 사용되는 멋진 패키지예요 웹 스크래핑을 하는 사람들은 + +08:15.770 --> 08:20.120 +저처럼 정기적으로 뷰티풀 슈프를 잘 알죠 + +08:20.210 --> 08:25.490 +뷰티풀 get을 이용해서 웹 페이지의 타이틀을 제거하고 스크립트와 + +08:25.490 --> 08:31.590 +스타일, 이미지 입력을 제거하고 텍스트를 알아낼 수 있어요 + +08:31.890 --> 08:37.320 +그래서 우리 클래스 웹사이트로 그걸 할 거예요 + +08:37.350 --> 08:41.340 +마지막으로 한 번 시험해 보죠 + +08:41.340 --> 08:44.190 +새 웹사이트 객체를 생성할게요 + +08:44.190 --> 08:50.370 +이 특정 웹사이트를 넘기겠습니다 철저히 바람직하지 않은 웹사이트가 제 웹사이트가 + +08:50.610 --> 08:51.480 +된 거죠 + +08:51.660 --> 08:54.300 +웹사이트도 아주 평범해요 + +08:54.390 --> 08:56.760 +하지만 그걸 대표할 클래스를 만들 거예요 + +08:56.940 --> 09:00.450 +제목과 웹 사이트의 텍스트를 보죠 + +09:00.450 --> 09:01.740 +이게 통할지 보죠 + +09:01.830 --> 09:03.090 +네, 작동해요 + +09:03.090 --> 09:04.440 +자, 보세요 + +09:04.530 --> 09:07.440 +웹사이트 이름이 보이네요 + +09:07.560 --> 09:12.570 +그 웹사이트에 뭔가 일어나고 있어요 + +09:12.870 --> 09:17.940 +음, 비트가 좀 허술해 보이네요. 이미지와 자바스크립트 + +09:17.970 --> 09:22.920 +스타일시트가 모두 삭제되었기 때문이죠. + +09:23.130 --> 09:27.390 +지금까지는 재생 인공지능이나 LAM을 사용하지 않았어요 + +09:27.390 --> 09:31.200 +Get in get이 곧 나올 겁니다 다음 비디오에서 알아보죠 diff --git a/week5/community-contributions/subtitles/srts/60616927/en_US.srt b/week5/community-contributions/subtitles/srts/60616927/en_US.srt new file mode 100755 index 0000000..783962c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616927/en_US.srt @@ -0,0 +1,628 @@ +WEBVTT + +00:00.050 --> 00:03.890 +It's time for our first LM experiment at this point. + +00:03.980 --> 00:08.330 +So some of this you may know well, you may know very well already. + +00:08.360 --> 00:11.180 +For some people this might be new, but let me just explain. + +00:11.240 --> 00:13.550 +The models that we're going to be using. + +00:13.550 --> 00:17.090 +These frontier models have been trained in a particular way. + +00:17.090 --> 00:23.090 +That means that they expect two different types of instruction from us the user. + +00:23.120 --> 00:27.590 +One of them is known as the system prompt, and one of them is known as the user prompt. + +00:27.620 --> 00:33.980 +The system prompt is something which explains the context of this conversation. + +00:33.980 --> 00:39.440 +It tells them what kind of task they're performing, what tone they should use, and we'll be experimenting + +00:39.440 --> 00:44.780 +with what it means to to change a system prompt and what kind of information that you can include in + +00:44.780 --> 00:47.480 +the system prompt throughout this course. + +00:47.570 --> 00:51.230 +The user prompt is the actual conversation itself. + +00:51.230 --> 00:55.040 +And in our case right now, it's going to just be the the conversation starter. + +00:55.040 --> 01:02.790 +And the role of the LM of the large language model is to figure out what is the most likely way that + +01:02.790 --> 01:05.730 +it should respond, given this user prompt. + +01:05.730 --> 01:11.580 +If it's given this user prompt, and in the context of this system prompt, what is the most likely + +01:11.580 --> 01:13.410 +next text that will come after it? + +01:13.410 --> 01:17.460 +That would come from an assistant responding to this user. + +01:18.000 --> 01:22.950 +So that's the difference between the system prompt that sets the context, the user prompt that is the + +01:22.950 --> 01:24.660 +conversation starter. + +01:24.810 --> 01:26.790 +So we're going to set a system prompt. + +01:26.790 --> 01:27.960 +And this is what it's going to say. + +01:27.960 --> 01:33.870 +It's going to say you are an assistant that analyzes the contents of a website and provides a short + +01:33.870 --> 01:37.590 +summary, ignoring texts that might be navigation related. + +01:37.590 --> 01:39.630 +Respond in markdown. + +01:39.660 --> 01:42.660 +You'll see more of what that means in in just a second. + +01:42.660 --> 01:46.800 +So that is our system prompt for the user prompt. + +01:46.800 --> 01:51.180 +It's going to take as a we're going to write a function user prompt for. + +01:51.180 --> 01:54.990 +And it's going to take a website as the argument to the function. + +01:54.990 --> 02:00.460 +And it's going to say you are looking at a website titled The Website. + +02:01.000 --> 02:03.760 +The contents of this website is as follows. + +02:03.760 --> 02:10.240 +Please provide a short summary of the website in markdown if it includes news or announcements. + +02:10.240 --> 02:17.020 +Summarize these two and we then take the text from the website object that Beautifulsoup plucked out + +02:17.020 --> 02:22.990 +for us, and we add that into the user prompt and we return that user prompt. + +02:23.290 --> 02:28.870 +So let's just quickly let's run that cell right now and let's just have a look now. + +02:28.870 --> 02:33.520 +So after doing that, if I just look at what system Prompt has. + +02:35.560 --> 02:38.470 +It has that text of course that we just said. + +02:38.650 --> 02:45.490 +And now if you remember earlier on we created a new website object and we stored it in this variable + +02:45.490 --> 02:46.060 +editor. + +02:46.060 --> 02:55.540 +So if I come here I should be able to say user prompt for and then pass in the object Ed. + +02:56.260 --> 02:58.690 +And what we'll get is a prompt. + +02:58.720 --> 03:03.340 +It might be easier if I print this so that it prints out empty lines. + +03:05.440 --> 03:09.070 +And here is the user prompt string that we've created. + +03:09.070 --> 03:11.440 +It says you're looking at a website titled blah blah blah. + +03:11.470 --> 03:13.510 +The contents of this website is as follows. + +03:13.510 --> 03:15.220 +Please provide a short summary. + +03:15.250 --> 03:18.760 +Look, it looks like we should have a space right here, otherwise it might be confusing. + +03:18.760 --> 03:19.810 +Let's try that again. + +03:21.670 --> 03:25.960 +That's always why it's worth printing things as you go, because you'll spot little inconsistencies + +03:25.960 --> 03:26.770 +like that. + +03:28.000 --> 03:30.580 +I think it'll be nicer, actually, now that I look at that. + +03:30.580 --> 03:34.540 +If we have a carriage return there like so. + +03:38.080 --> 03:39.250 +Let's have a look at this prompt. + +03:39.280 --> 03:45.280 +Now you're looking at the website and there we go on a separate line that looks good okay. + +03:45.310 --> 03:49.780 +So let's talk about the messages object. + +03:49.780 --> 03:55.800 +So OpenAI expects to receive a conversation in a particular format. + +03:55.800 --> 04:01.920 +It's a format that OpenAI came up with and they used for their APIs, and it became so well used that + +04:01.920 --> 04:07.200 +all of the other major frontier models decided to adopt the same convention. + +04:07.200 --> 04:13.140 +So this has gone from being originally OpenAI's way of using the API to being something of a standard + +04:13.140 --> 04:16.050 +across many different models to use this approach. + +04:16.050 --> 04:17.520 +And here's how it works. + +04:17.520 --> 04:25.590 +When you're trying to describe a conversation, you describe it using a list a Python list of dictionaries. + +04:25.590 --> 04:29.640 +So it's a list where each element in the list is a dictionary. + +04:29.640 --> 04:32.310 +And that dictionary looks like this. + +04:32.340 --> 04:35.130 +It's a dictionary with two elements. + +04:35.340 --> 04:43.170 +One of them has a key of role, and here the value is either system or user, a key of role. + +04:43.170 --> 04:46.500 +And the value is system a key of content. + +04:46.500 --> 04:49.770 +And the value is of course the system message. + +04:49.800 --> 04:53.760 +There's another Dictionary where there's a key of role. + +04:53.760 --> 04:56.700 +The value is user because it's the user message. + +04:56.700 --> 05:01.680 +The user prompt content is where the user message goes. + +05:02.130 --> 05:05.460 +User message and user prompt are the same thing. + +05:06.120 --> 05:11.190 +So hopefully I didn't explain it very well, but it makes sense when you see it visually like this. + +05:11.220 --> 05:18.120 +It's just a dictionary which has role and content, system and system, message user and the user message. + +05:18.120 --> 05:21.630 +And there are some other roles as well, but we're going to get to them in good time. + +05:21.630 --> 05:23.010 +This is all we need for now. + +05:23.010 --> 05:26.130 +So this is how messages are built. + +05:26.130 --> 05:33.300 +And if you look at this next function def messages for hopefully it's super clear to you that this is + +05:33.300 --> 05:34.050 +creating. + +05:34.050 --> 05:38.400 +This here is creating exactly this construct using code. + +05:38.400 --> 05:42.000 +It's going to do it's going to put in there the generic system prompt we came up with. + +05:42.000 --> 05:46.560 +And it's going to create the user prompt for the website. + +05:46.980 --> 05:48.570 +So let's run that. + +05:49.200 --> 05:58.220 +And now, presumably it's clear that if I say messages for Ed, which is the object for my website, + +05:58.220 --> 06:02.300 +let's print it so that we see empty lines and stuff. + +06:04.430 --> 06:07.370 +Actually, sorry, in this case it might be better if we don't print it. + +06:07.640 --> 06:09.830 +If we just do this, it might look a bit clearer. + +06:09.860 --> 06:10.370 +There we go. + +06:10.400 --> 06:16.220 +And now you can see that it is it's a list of two things role system. + +06:16.220 --> 06:18.170 +And there's a system message role user. + +06:18.170 --> 06:20.270 +And there is the user message. + +06:20.870 --> 06:21.710 +Okay. + +06:21.710 --> 06:23.120 +It's time to bring this together. + +06:23.120 --> 06:24.710 +It's time to actually do it. + +06:24.710 --> 06:32.750 +The API for OpenAI to make a call to a frontier model to do this for us is super simple, and we're + +06:32.750 --> 06:34.610 +going to be using this API all the time. + +06:34.610 --> 06:37.700 +So whereas now it might look like it's a few things to remember. + +06:37.700 --> 06:42.620 +You're going to get so used to this, but we're going to make a function called summarize. + +06:42.620 --> 06:48.020 +And that is that's going to do the business that's going to solve our problem and summarize a URL that's + +06:48.020 --> 06:48.860 +passed in. + +06:48.860 --> 06:53.720 +It will first create a website for that URL, just like we did for editor. + +06:53.720 --> 06:56.300 +And this is where we call OpenAI. + +06:56.840 --> 06:59.840 +We say OpenAI, which is the the OpenAI object. + +06:59.870 --> 07:05.480 +We created OpenAI dot chat, dot completions, dot create. + +07:05.930 --> 07:08.390 +And that for now you can just learn it by rote. + +07:08.390 --> 07:10.520 +We'll understand a lot more about that later. + +07:10.820 --> 07:16.070 +But as far as OpenAI is concerned, this is known as the completions API because we're asking it to + +07:16.100 --> 07:20.750 +complete this conversation, predict what would be most likely to come next. + +07:20.900 --> 07:23.780 +We pass in the name of the model we're going to use. + +07:23.780 --> 07:27.920 +We're going to use a model called GPT four mini that you'll get very familiar with. + +07:27.920 --> 07:36.410 +It is the light, cheap version of GPT four, the the one of the finest models on the planet, and this + +07:36.410 --> 07:39.350 +will cost fractions of a cent to use. + +07:39.350 --> 07:45.620 +This, um, you pass in the model and then you pass in the messages and the messages we pass in, use + +07:45.620 --> 07:49.910 +this structure that we've just created and that is all it takes. + +07:50.660 --> 07:54.380 +What comes back we put in this this object response. + +07:54.500 --> 08:01.910 +And when we get back the response we call response dot choices zero dot message dot content. + +08:02.090 --> 08:05.270 +Now I'm going to explain what this is another day we don't need to know. + +08:05.270 --> 08:05.810 +For now. + +08:05.810 --> 08:10.820 +We just need to know that we're going to do response dot choices zero dot message dot content. + +08:10.820 --> 08:11.930 +That's going to be it. + +08:11.930 --> 08:14.330 +That is our summarize function. + +08:14.330 --> 08:19.400 +And with that let's try summarizing my website we're running. + +08:19.400 --> 08:22.070 +It's now connecting to OpenAI in the cloud. + +08:22.070 --> 08:24.080 +It's making the call and back. + +08:24.080 --> 08:26.870 +Here is a summary of my website. + +08:26.870 --> 08:34.130 +We have just uh, spent a fraction of a cent and we have just summarized my website. + +08:34.460 --> 08:39.110 +We can do a little bit better because we can print this in a nice style. + +08:39.110 --> 08:44.300 +Uh, GPT four, we've asked to respond in markdown, and that means that it's responded with various + +08:44.300 --> 08:48.910 +characters to represent headings, things in bold and so on. + +08:49.270 --> 08:57.160 +And we can use a feature of Jupyter Labs that we can ask it to actually show that in a nice markdown + +08:57.160 --> 08:57.880 +format. + +08:57.910 --> 08:58.990 +So let's do that. + +08:58.990 --> 09:02.440 +Let's use this display summary function and try again. + +09:02.470 --> 09:05.770 +Again we're going to GPT for a mini in the cloud. + +09:05.770 --> 09:08.800 +And here is a summary of my website. + +09:08.980 --> 09:11.800 +Uh, it says something about me. + +09:12.070 --> 09:17.350 +Uh, and it's uh yeah, very nicely formatted, very nicely structured. + +09:17.350 --> 09:18.760 +Pretty impressive. + +09:19.360 --> 09:24.970 +And apparently it highlights my work with proprietary LMS, offers resources related to AI and LMS, + +09:25.150 --> 09:28.840 +showcasing his commitment to advancing knowledge in this field. + +09:28.870 --> 09:30.550 +Good for you, GPT for mini. + +09:30.580 --> 09:32.620 +That's a very nice summary. + +09:33.070 --> 09:33.910 +Okay. + +09:33.910 --> 09:36.490 +And now we can try some more websites. + +09:36.520 --> 09:39.490 +Let's try summarizing cnn.com. + +09:39.760 --> 09:42.490 +Uh, we'll see what this happens. + +09:42.520 --> 09:47.470 +Obviously, CNN is a much bigger, uh, result you've got here. + +09:47.650 --> 09:53.350 +Uh, and, uh, we get some information about what's going on. + +09:53.440 --> 10:00.370 +I'm actually recording this right now on the 5th of November at, uh, in the evening, which is the + +10:00.370 --> 10:02.800 +date of the 2024 elections going on right now. + +10:02.800 --> 10:05.920 +So that, of course, is featured on CNN's web page. + +10:05.920 --> 10:11.050 +We can also summarize anthropic, which is the website for Claude. + +10:11.050 --> 10:12.940 +And they have a nice page. + +10:12.940 --> 10:13.990 +And here you go. + +10:13.990 --> 10:18.100 +And you can read more about it in this nice little summary of their web page. + +10:19.030 --> 10:20.020 +All right. + +10:20.020 --> 10:24.010 +And that wraps up our first instant gratification. + +10:24.010 --> 10:25.750 +It's it's juicy. + +10:25.750 --> 10:27.700 +It's something where we've actually done something useful. + +10:27.700 --> 10:28.780 +We've scraped the web. + +10:28.780 --> 10:33.640 +We've summarized summarization is one of the most common AI use cases. + +10:33.640 --> 10:36.430 +So common it's useful for all sorts of purposes. + +10:36.430 --> 10:41.850 +We'll be doing it a few different ways during during this course, even in our week eight a sticky solution + +10:41.850 --> 10:44.160 +will be using something that will do some summarization. + +10:44.160 --> 10:48.030 +So it's a great, uh, thing to have experimented with already. + +10:48.360 --> 10:53.640 +So there are so many other business applications of summarization. + +10:53.640 --> 10:56.340 +This is something you should be able to put to good use. + +10:56.340 --> 11:01.020 +You should be able to think of some ways you could apply this to your day job right away, or be building + +11:01.020 --> 11:05.190 +a couple of example projects in GitHub that show summarization in action. + +11:05.190 --> 11:10.710 +You could apply it to summarizing the news, summarizing financial performance from a financial report, + +11:10.740 --> 11:12.300 +a resume, and a cover letter. + +11:12.300 --> 11:14.820 +You could you could take a resume and generate a cover letter. + +11:14.970 --> 11:19.470 +Uh, there are so many different things you can do with summarization of of documents. + +11:19.470 --> 11:23.220 +And also adding on to that the scraping the web angle of it. + +11:23.220 --> 11:30.030 +So have a think about how you would apply summarization to your business and try extending this to do + +11:30.030 --> 11:31.440 +some summarization. + +11:32.280 --> 11:37.320 +There's also uh, for for the more technically inclined, uh, one of the things that you'll discover + +11:37.350 --> 11:42.480 +quite quickly when you use this is that there are many websites that cannot be summarized with this + +11:42.480 --> 11:49.560 +approach, and that's because they use JavaScript to render the web page and are rather simplistic. + +11:49.560 --> 11:55.710 +Approach has just taken the the just just made the requests the server call and taken what we get back. + +11:55.710 --> 11:56.970 +But there's a solution. + +11:56.970 --> 12:03.450 +And the solution is to use a platform like selenium or others like it, or playwright, which would + +12:03.450 --> 12:07.230 +allow you to render the page and and do it that way. + +12:07.230 --> 12:12.720 +So if you're technically inclined and have some background with that kind of thing, then a really interesting + +12:12.720 --> 12:19.200 +challenge is to turn this into something that's a bit beefier and add selenium to the mix. + +12:19.380 --> 12:22.470 +Um, as it happens, someone has already done that. + +12:22.470 --> 12:24.600 +Uh, one of the students, thank you very much. + +12:24.690 --> 12:29.490 +And if you go into this folder community contributions, you'll see a few different solutions. + +12:29.490 --> 12:32.520 +And one of them is a selenium based solution. + +12:32.520 --> 12:34.860 +So you can always go in and just just look at that yourself. + +12:34.860 --> 12:36.840 +Or you can have a shot at doing it too. + +12:36.840 --> 12:39.380 +And you'll find the solution in there. + +12:40.370 --> 12:44.900 +And if you do come up with a solution to that or to anything, I would love it if you were willing to + +12:44.900 --> 12:47.420 +share your code so that others can benefit from it. + +12:47.420 --> 12:53.240 +Ideally, put it in the community contributions folder and be sure to clear the output. + +12:53.240 --> 12:58.190 +So you go to kernel restart kernel and clear outputs of all cells. + +12:58.310 --> 13:03.170 +Otherwise, everything that you've got in your output would also get checked into code which which would + +13:03.320 --> 13:04.730 +just clutter things up a bit. + +13:04.730 --> 13:06.200 +So so do that. + +13:06.470 --> 13:12.440 +And then if you could submit a PR, a pull request, I can then merge that into the code. + +13:12.440 --> 13:16.550 +And if that's a new thing for you, it is a bit of a process. + +13:16.580 --> 13:21.620 +There is a write up here for exactly what you need to do to make that work. + +13:21.830 --> 13:25.700 +Anyways, this was the first project, the first of many. + +13:25.730 --> 13:27.920 +It's a simple project, but it's an important one. + +13:27.950 --> 13:29.960 +A very important business use case. + +13:29.960 --> 13:31.610 +I hope you found it worthwhile. + +13:31.610 --> 13:34.700 +I will see you for the next video when we wrap up. + +13:34.700 --> 13:35.570 +Week one. + +13:35.570 --> 13:36.200 +Day one. diff --git a/week5/community-contributions/subtitles/srts/60616927/ja_JP.srt b/week5/community-contributions/subtitles/srts/60616927/ja_JP.srt new file mode 100755 index 0000000..bbd7abf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616927/ja_JP.srt @@ -0,0 +1,568 @@ +WEBVTT + +00:00.050 --> 00:03.890 +この時点で最初のLM実験の時間だ。 + +00:03.980 --> 00:08.330 +だから、 この中のいくつかはよくご存じかもしれないし、 すでによくご存じかもしれない。 + +00:08.360 --> 00:11.180 +初耳の人もいるかもしれないが、 説明しよう。 + +00:11.240 --> 00:13.550 +これから使うモデルだ。 + +00:13.550 --> 00:17.090 +これらのフロンティアモデルは、 特定の方法で訓練されている。 + +00:17.090 --> 00:23.090 +つまり、 私たちユーザーから2つの異なるタイプの指示を期待されているのだ。 + +00:23.120 --> 00:27.590 +そのうちの1つはシステムプロンプトとして知られ、 もう1つはユーザープロンプトとして知られている。 + +00:27.620 --> 00:33.980 +システム・プロンプトは、 この会話の背景を説明するものだ。 + +00:33.980 --> 00:39.440 +このコースでは、 システムプロンプトを変更することの意味や、 + +00:39.440 --> 00:47.480 +システムプロンプトにどのような情報を含めることができるかを試していきます。 + +00:47.570 --> 00:51.230 +ユーザー・プロンプトは実際の会話そのものである。 + +00:51.230 --> 00:55.040 +そして、 今の私たちの場合は、 会話のきっかけになるだけだ。 + +00:55.040 --> 01:05.730 +そして、 大規模な言語モデルのLMの役割は、 このユーザーからのプロンプトが与えられたときに、 最も可能性の高い応答方法は何かを見つけ出すことである。 + +01:05.730 --> 01:13.410 +このユーザー・プロンプトが与えられたとしたら、 そしてこのシステム・プロンプトの文脈の中で、 その次に来る可能性の高いテキストは何だろうか? + +01:13.410 --> 01:17.460 +それは、 このユーザーに対応するアシスタントのものだろう。 + +01:18.000 --> 01:24.660 +これが、 文脈を設定するシステム・プロンプトと、 会話のきっかけとなるユーザー・プロンプトの違いだ。 + +01:24.810 --> 01:26.790 +そこで、 システム・プロンプトを設定する。 + +01:26.790 --> 01:27.960 +そして、 こう書いてある。 + +01:27.960 --> 01:37.590 +あなたがウェブサイトのコンテンツを分析し、 ナビゲーションに関連しそうなテキストを無視して短い要約を提供するアシスタントであると言うことになる。 + +01:37.590 --> 01:39.630 +マークダウンで対応する。 + +01:39.660 --> 01:42.660 +それが何を意味するかは、 すぐにわかるだろう。 + +01:42.660 --> 01:46.800 +これがユーザー・プロンプトのシステム・プロンプトだ。 + +01:46.800 --> 01:51.180 +ユーザー・プロンプトの関数を書きます。 + +01:51.180 --> 01:54.990 +そして、 関数の引数としてウェブサイトを受け取る。 + +01:54.990 --> 02:00.460 +そして、 「ウェブサイト」というタイトルのウェブサイトを見ていると表示される。 + +02:01.000 --> 02:03.760 +本ウェブサイトの内容は以下の通りです。 + +02:03.760 --> 02:10.240 +ニュースやお知らせが含まれる場合は、 ウェブサイトの簡単な概要をマークダウンでご記入ください。 + +02:10.240 --> 02:17.020 +この2つを要約し、 Beautifulsoupが取り出してくれたウェブサイトオブジェクトからテキストを取り出し、 + +02:17.020 --> 02:22.990 +それをユーザープロンプトに追加して、 ユーザープロンプトを返す。 + +02:23.290 --> 02:28.870 +では、 さっそくこのセルを動かして見てみましょう。 + +02:28.870 --> 02:33.520 +そうして、 プロンプトのシステム構成を見てみよう。 + +02:35.560 --> 02:38.470 +もちろん、 今言ったような文章もある。 + +02:38.650 --> 02:46.060 +さて、 先ほど新しいウェブサイト・オブジェクトを作成し、 この変数エディタに格納したのを覚えているだろうか。 + +02:46.060 --> 02:55.540 +だから、 ここに来れば、 ユーザー・プロンプトと言い、 オブジェクト・エドを渡すことができるはずだ。 + +02:56.260 --> 02:58.690 +そして、 プロンプトが表示される。 + +02:58.720 --> 03:03.340 +空行を出力するように印刷すれば簡単かもしれない。 + +03:05.440 --> 03:09.070 +そしてこれが、 作成したユーザー・プロンプト文字列である。 + +03:09.070 --> 03:11.440 +あなたが見ているのは、 "blah blah blah "というタイトルのウェブサイトです。 + +03:11.470 --> 03:13.510 +本ウェブサイトの内容は以下の通りです。 + +03:13.510 --> 03:15.220 +簡単な要約をお願いします。 + +03:15.250 --> 03:18.760 +ほら、 ここにスペースがあったほうがよさそうだ。 そうしないと混乱するかもしれない。 + +03:18.760 --> 03:19.810 +もう一度やってみよう。 + +03:21.670 --> 03:26.770 +そのような小さな矛盾を見つけることができるからだ。 + +03:28.000 --> 03:30.580 +実際、 今見ると、 もっと素敵になると思うよ。 + +03:30.580 --> 03:34.540 +このようにキャリッジ・リターンがあるとする。 + +03:38.080 --> 03:39.250 +このプロンプトを見てみよう。 + +03:39.280 --> 03:45.280 +今、 あなたはウェブサイトを見ている。 + +03:45.310 --> 03:49.780 +では、 メッセージ・オブジェクトについて話そう。 + +03:49.780 --> 03:55.800 +だからOpenAIは、 特定のフォーマットで会話を受け取ることを期待している。 + +03:55.800 --> 04:01.920 +これはOpenAIが考え出したフォーマットで、 + +04:01.920 --> 04:07.200 +彼らのAPIに使われている。 + +04:07.200 --> 04:16.050 +つまり、 これはもともとOpenAIのAPIの使用方法だったのが、 このアプローチを使用する多くの異なるモデルにおける標準のようなものになったということだ。 + +04:16.050 --> 04:17.520 +その仕組みはこうだ。 + +04:17.520 --> 04:25.590 +会話を記述しようとするとき、 Pythonの辞書のリストを使って記述する。 + +04:25.590 --> 04:29.640 +つまり、 リストの各要素が辞書であるリストだ。 + +04:29.640 --> 04:32.310 +その辞書はこうなっている。 + +04:32.340 --> 04:35.130 +これは2つの要素を持つ辞書である。 + +04:35.340 --> 04:43.170 +そのうちの1つはroleのキーを持ち、 ここではroleのキーであるsystemかuserのどちらかの値である。 + +04:43.170 --> 04:46.500 +そして、 その価値はコンテンツのキーとなるシステムである。 + +04:46.500 --> 04:49.770 +そして、 その値はもちろんシステム・メッセージである。 + +04:49.800 --> 04:53.760 +もうひとつ、 役割のカギを握る辞書がある。 + +04:53.760 --> 04:56.700 +ユーザーメッセージなので、 値はuserである。 + +04:56.700 --> 05:01.680 +ユーザープロンプトコンテンツは、 ユーザメッセージが入る場所である。 + +05:02.130 --> 05:05.460 +ユーザーメッセージとユーザープロンプトは同じものです。 + +05:06.120 --> 05:11.190 +だから、 うまく説明できなかったかもしれないが、 こうして視覚的に見れば理解できるだろう。 + +05:11.220 --> 05:18.120 +これは、 ロールとコンテンツ、 システムとシステム、 メッセージ・ユーザーとユーザー・メッセージを持つ辞書に過ぎない。 + +05:18.120 --> 05:21.630 +他にもいくつか役割があるが、 それは追って説明する。 + +05:21.630 --> 05:23.010 +今はこれで十分だ。 + +05:23.010 --> 05:26.130 +これがメッセージの作り方だ。 + +05:26.130 --> 05:34.050 +そして、 この次の関数のdefメッセージを見てもらえば、 これが作成されていることが一目瞭然だろう。 + +05:34.050 --> 05:38.400 +これは、 まさにこの構造をコードを使って作っている。 + +05:38.400 --> 05:42.000 +私たちが考えた一般的なシステムプロンプトを入れるんだ。 + +05:42.000 --> 05:46.560 +そして、 ウェブサイトのユーザー・プロンプトを作成する。 + +05:46.980 --> 05:48.570 +では、 それを実行してみよう。 + +05:49.200 --> 06:02.300 +そして今、 私のウェブサイトのオブジェクトであるEdへのメッセージと言えば、 空行などが見えるように印刷することは明らかだろう。 + +06:04.430 --> 06:07.370 +実は、 申し訳ないが、 この場合は印刷しない方がいいかもしれない。 + +06:07.640 --> 06:09.830 +こうすれば、 少しはスッキリするかもしれない。 + +06:09.860 --> 06:10.370 +これでよし。 + +06:10.400 --> 06:16.220 +そして今、 それが2つの役割システムのリストであることがわかるだろう。 + +06:16.220 --> 06:18.170 +そして、 システムメッセージの役割ユーザーがいる。 + +06:18.170 --> 06:20.270 +そして、 そこにユーザーメッセージがある。 + +06:20.870 --> 06:21.710 +オーケー。 + +06:21.710 --> 06:23.120 +これをまとめる時が来た。 + +06:23.120 --> 06:24.710 +今こそ実際にやるときだ。 + +06:24.710 --> 06:34.610 +OpenAIのAPIはとてもシンプルで、 フロンティアモデルを呼び出してこれをやってくれる。 + +06:34.610 --> 06:37.700 +だから、 今は覚えておくべきことがいくつかあるように見えるかもしれない。 + +06:37.700 --> 06:42.620 +すぐに慣れるだろうが、 これからsummarizeという関数を作る。 + +06:42.620 --> 06:48.860 +そしてそれは、 私たちの問題を解決し、 渡されたURLを要約するビジネスを行うことだ。 + +06:48.860 --> 06:53.720 +エディターの時と同じように、 まずそのURLのウェブサイトを作成する。 + +06:53.720 --> 06:56.300 +そこでOpenAIと呼んでいる。 + +06:56.840 --> 06:59.840 +OpenAIとは、 OpenAIのオブジェクトのことだ。 + +06:59.870 --> 07:05.480 +我々はOpenAIのドットチャット、 ドットコンプリート、 ドットクリエイトを作った。 + +07:05.930 --> 07:08.390 +そして、 今はただ暗記すればいいということだ。 + +07:08.390 --> 07:10.520 +そのことについては、 後でいろいろと理解できるだろう。 + +07:10.820 --> 07:16.070 +しかし、 OpenAIに関する限り、 これは補完APIとして知られている。 なぜなら、 この会話を完成させ、 + +07:16.100 --> 07:20.750 +次に何が来る可能性が最も高いかを予測するよう求めているからだ。 + +07:20.900 --> 07:23.780 +使用するモデルの名前を渡す。 + +07:23.780 --> 07:27.920 +GPT4ミニと呼ばれるモデルを使う。 + +07:27.920 --> 07:39.350 +地球上で最高級モデルのひとつであるGPT 4の軽量・廉価版であり、 これを使うには1セントもかからない。 + +07:39.350 --> 07:45.620 +モデルを渡して、 それからメッセージを渡す。 メッセージは、 + +07:45.620 --> 07:49.910 +今作ったこの構造体を使う。 + +07:50.660 --> 07:54.380 +戻ってきたものをこのオブジェクト・レスポンスに入れる。 + +07:54.500 --> 08:01.910 +そしてレスポンスが返ってきたら、 レスポンス・ドット・チョイス・ゼロ・ドット・メッセージ・ドット・コンテンツと呼ぶ。 + +08:02.090 --> 08:05.270 +さて、 これが何なのか、 私たちが知る必要のない別の日に説明しよう。 + +08:05.270 --> 08:05.810 +今のところはね。 + +08:05.810 --> 08:10.820 +あとは、 レスポンス・ドット・チョイス・ゼロ・ドット・メッセージ・ドット・コンテンツを実行することだけだ。 + +08:10.820 --> 08:11.930 +それでいいんだ。 + +08:11.930 --> 08:14.330 +これが要約機能だ。 + +08:14.330 --> 08:19.400 +ということで、 私たちが運営しているウェブサイトを要約してみよう。 + +08:19.400 --> 08:22.070 +現在はクラウド上のOpenAIに接続している。 + +08:22.070 --> 08:24.080 +往復するんだ。 + +08:24.080 --> 08:26.870 +以下は私のウェブサイトの要約である。 + +08:26.870 --> 08:34.130 +ほんのわずかなお金を使い、 私のウェブサイトを要約した。 + +08:34.460 --> 08:39.110 +私たちは、 これを素敵なスタイルで印刷することができるので、 少し良いことができます。 + +08:39.110 --> 08:44.300 +ええと、 GPT4はマークダウンで回答するようお願いしました。 つまり、 見出しや太字のものなど、 + +08:44.300 --> 08:48.910 +さまざまな文字を使って回答しているということです。 + +08:49.270 --> 08:57.880 +Jupyter Labsの機能を使って、 マークダウン形式で表示させることもできる。 + +08:57.910 --> 08:58.990 +だから、 そうしよう。 + +08:58.990 --> 09:02.440 +この要約表示機能を使って、 もう一度やってみよう。 + +09:02.470 --> 09:05.770 +今回もクラウド上のミニをGPTにする。 + +09:05.770 --> 09:08.800 +そして、 これが私のウェブサイトの要約である。 + +09:08.980 --> 09:11.800 +ええと、 僕について何か書いてある。 + +09:12.070 --> 09:17.350 +とてもきれいなフォーマットで、 とてもきれいに構成されている。 + +09:17.350 --> 09:18.760 +かなり印象的だ。 + +09:19.360 --> 09:24.970 +そしてどうやら、 私が独自に開発したLMSでの仕事にハイライトを当て、 AIとLMSに関連するリソースを提供し、 + +09:25.150 --> 09:28.840 +この分野での知識の発展に尽力していることをアピールしているようだ。 + +09:28.870 --> 09:30.550 +よかったね、 ミニのGPT。 + +09:30.580 --> 09:32.620 +とてもいいまとめだ。 + +09:33.070 --> 09:33.910 +オーケー。 + +09:33.910 --> 09:36.490 +そして今、 さらにいくつかのウェブサイトを試すことができる。 + +09:36.520 --> 09:39.490 +CNNを要約してみよう。 comに移籍した。 + +09:39.760 --> 09:42.490 +まあ、 どうなるか見てみよう。 + +09:42.520 --> 09:47.470 +CNNの方がはるかに大きな結果を出しているのは明らかだ。 + +09:47.650 --> 09:53.350 +それで、 何が起こっているのか、 いくつかの情報を得ることができた。 + +09:53.440 --> 10:02.800 +実は今、 11月5日の夕方にこれを録音しているんですが、 これは今行われている2024年の選挙の日なんです。 + +10:02.800 --> 10:05.920 +もちろん、 それはCNNのウェブページで紹介されている。 + +10:05.920 --> 10:11.050 +クロードのウェブサイトである "アントロピック "を要約することもできる。 + +10:11.050 --> 10:12.940 +素敵なページもある。 + +10:12.940 --> 10:13.990 +さあ、 どうぞ + +10:13.990 --> 10:18.100 +その詳細については、 彼らのウェブページのこの素敵な要約をご覧いただきたい。 + +10:19.030 --> 10:20.020 +分かった。 + +10:20.020 --> 10:24.010 +これで、 最初のインスタント・グラティフィケーションは終了だ。 + +10:24.010 --> 10:25.750 +ジューシーだ。 + +10:25.750 --> 10:27.700 +実際に役に立つことをしたんだ。 + +10:27.700 --> 10:28.780 +我々はウェブをスクラップした。 + +10:28.780 --> 10:33.640 +要約は最も一般的なAIの使用例の一つである。 + +10:33.640 --> 10:36.430 +一般的なものなので、 あらゆる用途に使える。 + +10:36.430 --> 10:41.850 +このコースの間、 いくつかの異なる方法でそれを行う予定だが、 8週目の粘着ソリューションでも、 + +10:41.850 --> 10:44.160 +要約を行うものを使う予定だ。 + +10:44.160 --> 10:48.030 +だから、 すでに実験していることは素晴らしいことなんだ。 + +10:48.360 --> 10:53.640 +だから、 要約のビジネスへの応用は他にもたくさんある。 + +10:53.640 --> 10:56.340 +これは有効活用できるはずだ。 + +10:56.340 --> 11:05.190 +すぐにでも自分の仕事に応用できる方法を思いついたり、 GitHubで要約の実例となるプロジェクトをいくつか作ったりできるはずだ。 + +11:05.190 --> 11:12.300 +ニュースの要約、 財務報告書からの業績の要約、 履歴書、 カバーレターなどに応用できるだろう。 + +11:12.300 --> 11:14.820 +履歴書からカバーレターを作成することもできる。 + +11:14.970 --> 11:19.470 +ドキュメントの要約でできることはたくさんある。 + +11:19.470 --> 11:23.220 +それに加えて、 ウェブのスクレイピングという側面もある。 + +11:23.220 --> 11:31.440 +だから、 あなたのビジネスに要約をどのように適用するか考え、 これを拡張して要約をしてみてほしい。 + +11:32.280 --> 11:37.320 +また、 技術志向の人向けには、 この方法を使うとすぐにわかることがある。 それは、 + +11:37.350 --> 11:42.480 +この方法では要約できないウェブサイトがたくさんあるということで、 それはウェブページのレンダリングにJavaScriptを使っていて、 + +11:42.480 --> 11:49.560 +かなり単純化しているからだ。 + +11:49.560 --> 11:55.710 +アプローチは、 ただサーバーにリクエストして、 返ってきたものを受け取っただけだ。 + +11:55.710 --> 11:56.970 +しかし、 解決策はある。 + +11:56.970 --> 12:03.450 +解決策としては、 セレニウムなどのプラットフォームや、 playwrightを使えば、 ページをレンダリングして、 + +12:03.450 --> 12:07.230 +そのように実行することができる。 + +12:07.230 --> 12:12.720 +だから、 もしあなたが技術的な傾向があり、 そのようなことについてのバックグラウンドがあるのなら、 + +12:12.720 --> 12:19.200 +本当に興味深い挑戦は、 これをもう少しビーフなものに変えて、 セレンをミックスに加えることだ。 + +12:19.380 --> 12:22.470 +うーん、 偶然にも、 誰かがすでにそれをやっている。 + +12:22.470 --> 12:24.600 +ええと、 生徒の一人です、 ありがとうございました。 + +12:24.690 --> 12:29.490 +そして、 このフォルダーのコミュニティへの貢献を見てみると、 いくつかの異なる解決策がある。 + +12:29.490 --> 12:32.520 +そのひとつがセレンベースのソリューションだ。 + +12:32.520 --> 12:34.860 +だから、 いつでも中に入って自分で見ることができる。 + +12:34.860 --> 12:36.840 +あるいは、 あなたにもチャンスがある。 + +12:36.840 --> 12:39.380 +そこに解決策があるはずだ。 + +12:40.370 --> 12:47.420 +そして、 もしあなたがその解決策や何かを思いついたなら、 他の人がその恩恵を受けられるようにコードを共有してくれると嬉しい。 + +12:47.420 --> 12:53.240 +理想的なのは、 それをコミュニティへの貢献フォルダに入れ、 出力をクリアすることだ。 + +12:53.240 --> 12:58.190 +そこでカーネルを再起動し、 すべてのセルの出力をクリアする。 + +12:58.310 --> 13:04.730 +そうしないと、 出力されたものすべてがコードにチェックされることになる。 + +13:04.730 --> 13:06.200 +だから、 そうしてくれ。 + +13:06.470 --> 13:12.440 +そして、 PR(プル・リクエスト)を提出してもらえれば、 それをコードにマージすることができる。 + +13:12.440 --> 13:16.550 +そして、 それがあなたにとって初めてのことであるなら、 それはちょっとしたプロセスなのだ。 + +13:16.580 --> 13:21.620 +そのために必要なことは、 ここに書いてある。 + +13:21.830 --> 13:25.700 +とにかく、 これが最初のプロジェクトで、 多くのプロジェクトの最初のものだった。 + +13:25.730 --> 13:27.920 +シンプルなプロジェクトだが、 重要なものだ。 + +13:27.950 --> 13:29.960 +非常に重要なビジネスユースケースだ。 + +13:29.960 --> 13:31.610 +有意義なものであったことを願っている。 + +13:31.610 --> 13:34.700 +また次のビデオでお会いしましょう。 + +13:34.700 --> 13:35.570 +週目。 + +13:35.570 --> 13:36.200 +初日。 diff --git a/week5/community-contributions/subtitles/srts/60616927/ko_KR.srt b/week5/community-contributions/subtitles/srts/60616927/ko_KR.srt new file mode 100755 index 0000000..dbde0e8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60616927/ko_KR.srt @@ -0,0 +1,622 @@ +WEBVTT + +00:00.050 --> 00:03.890 +첫 번째 달 착륙선 실험이에요 + +00:03.980 --> 00:08.330 +이 중 일부는 이미 잘 알고 계실 거예요 + +00:08.360 --> 00:11.180 +어떤 사람들에겐 생소하겠지만 제가 설명해 드리죠 + +00:11.240 --> 00:13.550 +우리가 사용할 모델이죠 + +00:13.550 --> 00:17.090 +개척 시대 모델들은 특정한 방식으로 훈련받았죠 + +00:17.090 --> 00:23.090 +사용자가 두 가지 다른 종류의 지시를 내리길 기대한다는 뜻이죠 + +00:23.120 --> 00:27.590 +그 중 하나는 시스템 프롬프트고 다른 하나는 사용자 프롬프트죠 + +00:27.620 --> 00:33.980 +이 대화의 맥락을 설명하는 시스템 프롬프트예요 + +00:33.980 --> 00:39.440 +어떤 작업을 수행하고 어떤 톤을 사용해야 하는지 알려줍니다 또한 시스템 프롬프트를 + +00:39.440 --> 00:44.780 +변경하는 게 무슨 의미인지 실험할 겁니다 그리고 이 과정 내내 시스템 프롬프트에 + +00:44.780 --> 00:47.480 +어떤 정보를 포함할 수 있는지도요 + +00:47.570 --> 00:51.230 +사용자 프롬프트는 실제 대화 그 자체예요 + +00:51.230 --> 00:55.040 +지금 우리 경우엔 대화의 시작에 불과해요 + +00:55.040 --> 01:02.790 +큰 언어 모델에서 LM의 역할은 이 사용자 프롬프트에 따라 가장 잘 반응해야 + +01:02.790 --> 01:05.730 +하는 방법을 알아내는 거죠 + +01:05.730 --> 01:11.580 +이 사용자 프롬프트가 주어졌을 때 시스템 프롬프트의 맥락에서 가장 다음에 나올 가능성이 높은 + +01:11.580 --> 01:13.410 +텍스트는 무엇일까요? + +01:13.410 --> 01:17.460 +비서가 사용자에게 응답하는 거죠 + +01:18.000 --> 01:22.950 +이게 컨텍스트를 설정하는 시스템 프롬프트와 대화를 시작하는 사용자 + +01:22.950 --> 01:24.660 +프롬프트의 차이예요 + +01:24.810 --> 01:26.790 +시스템 프롬프트를 설정할게요 + +01:26.790 --> 01:27.960 +이렇게 말할 거예요 + +01:27.960 --> 01:33.870 +웹사이트의 내용을 분석하고 탐색과 관련된 문자를 무시하며 + +01:33.870 --> 01:37.590 +요약하는 보조라고 할 거예요 + +01:37.590 --> 01:39.630 +가격 조정으로 응답하세요 + +01:39.660 --> 01:42.660 +그게 무슨 뜻인지 곧 알게 되실 거예요 + +01:42.660 --> 01:46.800 +이게 사용자 프롬프트 시스템 프롬프트예요 + +01:46.800 --> 01:51.180 +이건 사용자 프롬프트 함수를 작성하는 것으로 받아들이죠 + +01:51.180 --> 01:54.990 +함수에 대한 인수로 웹사이트를 취할 거예요 + +01:54.990 --> 02:00.460 +더 웹 사이트라는 웹사이트를 보고 있다고 하네요 + +02:01.000 --> 02:03.760 +웹사이트의 내용은 다음과 같아요 + +02:03.760 --> 02:10.240 +뉴스나 공지가 포함된 사이트라면 마크다운된 사이트를 짧게 요약해 주세요 + +02:10.240 --> 02:17.020 +이 둘을 요약하고 나서 웹사이트 객체에서 텍스트를 가져옵니다. 아름다운쏘프가 + +02:17.020 --> 02:22.990 +뽑아낸 것이죠. 사용자 프롬프트에 추가하고 반환해요. + +02:23.290 --> 02:28.870 +그럼 빨리 셀을 실행해 보죠 지금 살펴보죠 + +02:28.870 --> 02:33.520 +그 다음에는 시스템 프롬프트가 무엇인지 보죠 + +02:35.560 --> 02:38.470 +방금 말씀드린 텍스트가 있죠 + +02:38.650 --> 02:46.060 +아까 새 웹사이트 객체를 생성해 이 변수 편집기에 저장했던 걸 기억하시나요? + +02:46.060 --> 02:55.540 +이렇게 하면 사용자 프롬프트 for를 하고 객체 Ed를 전달할 수 있죠 + +02:56.260 --> 02:58.690 +Get 프롬프트가 나오죠 + +02:58.720 --> 03:03.340 +이걸 프린트해서 빈 선으로 프린트하는 게 더 쉬울 것 같아요 + +03:05.440 --> 03:09.070 +여기 우리가 만든 사용자 프롬프트 문자열이 있네요 + +03:09.070 --> 03:11.440 +웹사이트 이름이 어쩌고저쩌고라고 나오네요 + +03:11.470 --> 03:13.510 +웹사이트의 내용은 다음과 같아요 + +03:13.510 --> 03:15.220 +간단히 요약해 주세요 + +03:15.250 --> 03:18.760 +여기에 공간을 만들어야 할 것 같아요 안 그러면 헷갈릴 거예요 + +03:18.760 --> 03:19.810 +다시 해 보죠 + +03:21.670 --> 03:25.960 +그래서 작업을 하면서 인쇄할 가치가 있는 거죠 그런 모순을 발견할 수 + +03:25.960 --> 03:26.770 +있으니까요 + +03:28.000 --> 03:30.580 +지금 보니까 더 좋을 것 같아요 + +03:30.580 --> 03:34.540 +마차가 그렇게 돌아오면요 + +03:38.080 --> 03:39.250 +이 프롬프트를 보죠 + +03:39.280 --> 03:45.280 +이제 웹사이트를 보면 별도의 라인이 있는데 괜찮아 보이네요 + +03:45.310 --> 03:49.780 +메시지 객체에 대해 얘기해보죠 + +03:49.780 --> 03:55.800 +오픈AI는 특정한 형식의 대화를 기대하죠 + +03:55.800 --> 04:01.920 +오픈AI가 개발한 포맷으로 API에 사용되었는데 너무 잘 사용되어서 + +04:01.920 --> 04:07.200 +다른 주요 프론티어 모델들도 같은 방식을 채택하기로 했죠 + +04:07.200 --> 04:13.140 +API 사용에 대한 오픈AI의 방식에서 이 접근법을 사용하는 많은 다양한 + +04:13.140 --> 04:16.050 +모델에 걸친 표준으로 바뀌었어요 + +04:16.050 --> 04:17.520 +이렇게 하는 거예요 + +04:17.520 --> 04:25.590 +대화를 설명하려고 할 때 파이썬 리스트를 이용해 설명하죠 사전 리스트요 + +04:25.590 --> 04:29.640 +목록의 각 요소가 사전인 목록이죠 + +04:29.640 --> 04:32.310 +그 사전은 이렇게 생겼어요 + +04:32.340 --> 04:35.130 +두 가지 원소가 있는 사전이에요 + +04:35.340 --> 04:43.170 +그 중 하나는 역할의 키를 갖고 있는데 여기 값은 시스템이나 사용자, 역할의 키죠 + +04:43.170 --> 04:46.500 +그 가치는 시스템 a 콘텐츠 키죠 + +04:46.500 --> 04:49.770 +물론 가치는 시스템 메시지죠 + +04:49.800 --> 04:53.760 +역할의 핵심인 사전이 또 있어요 + +04:53.760 --> 04:56.700 +값은 사용자입니다 사용자 메시지니까요 + +04:56.700 --> 05:01.680 +사용자 프롬프트 콘텐츠는 사용자 메시지가 가는 곳이죠 + +05:02.130 --> 05:05.460 +사용자 메시지와 프롬프트는 같은 거예요 + +05:06.120 --> 05:11.190 +설명이 부족했다면 몰라도 이렇게 시각적으로 보면 이해가 돼요 + +05:11.220 --> 05:18.120 +역할과 콘텐츠, 시스템과 시스템 메시지와 사용자 메시지가 있는 사전이죠 + +05:18.120 --> 05:21.630 +다른 역할도 있지만 때가 되면 보여드릴게요. Get it. + +05:21.630 --> 05:23.010 +지금은 이거면 돼요 + +05:23.010 --> 05:26.130 +이렇게 메시지가 구축되죠 + +05:26.130 --> 05:34.050 +다음 함수 데프 메시지를 보면 이게 생성되고 있다는 게 아주 명확했으면 좋겠네요 + +05:34.050 --> 05:38.400 +이건 코드를 이용해 정확히 이걸 만들고 있어요 + +05:38.400 --> 05:42.000 +이제 입력할 것은 우리가 생각해낸 제네릭 시스템 프롬프트예요 + +05:42.000 --> 05:46.560 +웹사이트의 사용자 프롬프트를 생성할 거예요 + +05:46.980 --> 05:48.570 +실행해 보죠 + +05:49.200 --> 05:58.220 +제 웹사이트의 객체인 Ed를 위한 메시지라고 하면 빈 줄 같은 + +05:58.220 --> 06:02.300 +게 보이도록 프린트하죠 + +06:04.430 --> 06:07.370 +사실, 죄송해요, 이 경우엔 프린트 안 하는 게 낫겠어요 + +06:07.640 --> 06:09.830 +비트를 이렇게 하면 더 명확해 보일 거예요 + +06:09.860 --> 06:10.370 +됐어요 + +06:10.400 --> 06:16.220 +이제 두 가지 목록이 보이시죠 역할 시스템이요 + +06:16.220 --> 06:18.170 +시스템 메시지 역할 유저가 있어요 + +06:18.170 --> 06:20.270 +사용자 메시지가 있네요 + +06:20.870 --> 06:21.710 +네 + +06:21.710 --> 06:23.120 +이제 하나로 합쳐야죠 + +06:23.120 --> 06:24.710 +이제 실제로 할 때예요 + +06:24.710 --> 06:32.750 +OpenAI의 API는 프론티어 모델에 호출하는 데 아주 간단합니다 이 API를 + +06:32.750 --> 06:34.610 +늘 사용할 거예요 + +06:34.610 --> 06:37.700 +지금은 기억해야 할 게 몇 가지 있는 것처럼 보일 수 있죠 + +06:37.700 --> 06:42.620 +익숙해지실 거예요 get Marize라는 함수를 만들 거예요 + +06:42.620 --> 06:48.020 +그럼 우리 문제를 해결하고 통과한 URL 요약을 하게 + +06:48.020 --> 06:48.860 +되죠 + +06:48.860 --> 06:53.720 +URL 웹사이트를 먼저 생성합니다 편집기에서 했던 것처럼요 + +06:53.720 --> 06:56.300 +여기가 오픈아이라고 부르는 곳이에요 + +06:56.840 --> 06:59.840 +오픈AI라고 하죠 오픈AI 객체예요 + +06:59.870 --> 07:05.480 +OpenAI.챗, .완성, .Create를 만들었어요 + +07:05.930 --> 07:08.390 +지금은 기계로 익히면 돼요 + +07:08.390 --> 07:10.520 +나중에 더 자세히 알게 될 거예요 + +07:10.820 --> 07:16.070 +하지만 오픈AI에 관한 한 이건 완성 API로 알려져 있습니다 이 대화를 완료하도록 + +07:16.100 --> 07:20.750 +요청하고 다음에 뭐가 나올지 예측하도록 요청하고 있으니까요 + +07:20.900 --> 07:23.780 +사용할 모델의 이름을 전달하죠 + +07:23.780 --> 07:27.920 +GPT for 미니라는 모델을 사용할 건데 여러분이 아주 익숙해지실 거예요 + +07:27.920 --> 07:36.410 +GPT 4의 가볍고 저렴한 버전입니다 GPT 4는 지구에서 가장 좋은 모델로 사용 + +07:36.410 --> 07:39.350 +비용도 1센트도 들지 않죠 + +07:39.350 --> 07:45.620 +이건 모델을 통과시키고 메시지를 통과시키는데 우리가 통과시킨 + +07:45.620 --> 07:49.910 +메시지는 방금 만든 이 구조를 사용하면 돼요 + +07:50.660 --> 07:54.380 +Get in get은 객체 응답이에요 + +07:54.500 --> 08:01.910 +응답을 받으면 응답은 .선택 .0,Message.content라고 하죠 + +08:02.090 --> 08:05.270 +이제 이게 뭔지 설명할게요 알 필요 없는 또 다른 하루 + +08:05.270 --> 08:05.810 +지금은요 + +08:05.810 --> 08:10.820 +우리가 해야 할 건 응답 .선택 .0,Message.content예요 + +08:10.820 --> 08:11.930 +그렇게 될 거예요 + +08:11.930 --> 08:14.330 +이게 요약 함수예요 + +08:14.330 --> 08:19.400 +그것과 함께 실행 중인 제 웹사이트를 요약해보죠 + +08:19.400 --> 08:22.070 +클라우드에서 오픈AI에 연결되고 있어요 + +08:22.070 --> 08:24.080 +전화를 걸고 돌아오죠 + +08:24.080 --> 08:26.870 +제 웹사이트의 요약본이에요 + +08:26.870 --> 08:34.130 +저희는 그냥 아주 적은 돈을 들여서 제 웹사이트를 요약해 봤어요 + +08:34.460 --> 08:39.110 +좀 더 나은 게 있어요 멋진 스타일로 비트를 출력할 수 있거든요 + +08:39.110 --> 08:44.300 +GPT4에 마크다운으로 응답하라고 요청했는데 다양한 + +08:44.300 --> 08:48.910 +캐릭터와 제목 등 굵은 글씨로 응답했다는 뜻이죠 + +08:49.270 --> 08:57.160 +주피터 랩의 기능을 사용할 수 있습니다 멋진 마크다운 포맷으로 보여드리기 위해 요청할 + +08:57.160 --> 08:57.880 +수 있죠 + +08:57.910 --> 08:58.990 +그렇게 하죠 + +08:58.990 --> 09:02.440 +디스플레이 요약 함수를 이용해 다시 해보죠 + +09:02.470 --> 09:05.770 +클라우드에서 미니를 위해 GPT로 갈 거예요 + +09:05.770 --> 09:08.800 +제 웹사이트의 요약이에요 + +09:08.980 --> 09:11.800 +저에 대해 뭔가 말하는 것 같아요 + +09:12.070 --> 09:17.350 +아주 멋진 형식이고 구조가 잘 짜여 있어요 + +09:17.350 --> 09:18.760 +정말 인상적이에요 + +09:19.360 --> 09:24.970 +보아하니 LMS 독점 작업을 강조하는군요 인공지능 및 LMS와 관련된 리소스를 제공하며 + +09:25.150 --> 09:28.840 +이 분야의 진보된 지식을 위한 그의 노력을 보여주죠 + +09:28.870 --> 09:30.550 +잘했어요, 미니 GPT + +09:30.580 --> 09:32.620 +요약 잘하시네요 + +09:33.070 --> 09:33.910 +네 + +09:33.910 --> 09:36.490 +이제 다른 웹사이트도 살펴보죠 + +09:36.520 --> 09:39.490 +cnn을 요약해 보죠 코무요 + +09:39.760 --> 09:42.490 +어떻게 될지 두고 보죠 + +09:42.520 --> 09:47.470 +CNN이 훨씬 더 큰 결과를 가져왔죠 + +09:47.650 --> 09:53.350 +Get it, get it, get it it it, it + +09:53.440 --> 10:00.370 +지금 이걸 녹화하는 건 11월 5일 저녁이에요 2024년 선거일인데 + +10:00.370 --> 10:02.800 +지금 진행 중이죠 + +10:02.800 --> 10:05.920 +CNN 웹페이지에도 그 내용이 실렸어요 + +10:05.920 --> 10:11.050 +클로드의 웹사이트인 안드로픽도 요약할 수 있어요 + +10:11.050 --> 10:12.940 +페이지도 좋고요 + +10:12.940 --> 10:13.990 +여기 있어요 + +10:13.990 --> 10:18.100 +웹 페이지의 요약본에서 더 자세히 읽어보세요 + +10:19.030 --> 10:20.020 +좋아요 + +10:20.020 --> 10:24.010 +첫 번째 즉각적인 만족감을 맛보았네요 + +10:24.010 --> 10:25.750 +육즙이 풍부해요 + +10:25.750 --> 10:27.700 +뭔가 유용한 일을 한 거죠 + +10:27.700 --> 10:28.780 +거미줄을 긁어냈어요 + +10:28.780 --> 10:33.640 +가장 흔한 인공지능 사용 사례를 요약해 봤는데요 + +10:33.640 --> 10:36.430 +너무 흔해서 여러 용도로 쓸 수 있죠 + +10:36.430 --> 10:41.850 +이번 과정 동안 몇 가지 다른 방법을 사용할 겁니다 8주 차에도요 요약을 하는 + +10:41.850 --> 10:44.160 +걸 이용한 끈적한 솔루션이 있죠 + +10:44.160 --> 10:48.030 +이미 실험해 본 거라서 정말 좋아요 + +10:48.360 --> 10:53.640 +요약의 비즈니스 응용 프로그램이 아주 많아요 + +10:53.640 --> 10:56.340 +이건 좋은 일에 쓸 수 있어야 해요 Put it up Put it up Put it up Put it up Put it up Put it up Put it up Put it + +10:56.340 --> 11:01.020 +이걸 당장 직장에 적용할 방법을 생각해내거나 요약하는 걸 보여주는 + +11:01.020 --> 11:05.190 +예시 프로젝트를 깃허브에 만들 수 있어야 해요 + +11:05.190 --> 11:10.710 +뉴스 요약, 재무 성과 요약 재무 보고서, 이력서 자기소개서에 + +11:10.740 --> 11:12.300 +적용할 수 있죠 + +11:12.300 --> 11:14.820 +이력서를 가져가서 자기소개서를 작성할 수 있죠 + +11:14.970 --> 11:19.470 +문서 요약으로 할 수 있는 게 정말 많아요 + +11:19.470 --> 11:23.220 +거기에 거미줄 각도를 긁어내는 것도 추가했죠 + +11:23.220 --> 11:30.030 +요약법을 여러분 비즈니스에 어떻게 적용할지 생각해 보세요 이걸 요약법으로 + +11:30.030 --> 11:31.440 +확장해 보세요 + +11:32.280 --> 11:37.320 +기술적으로 좀 더 관련이 있는 분들을 위해 이걸 사용하실 때 여러분이 빠르게 + +11:37.350 --> 11:42.480 +발견하실 것 중 하나는 이런 접근법으로 요약할 수 없는 많은 웹사이트들이 있다는 + +11:42.480 --> 11:49.560 +겁니다 왜냐하면 웹 페이지를 렌더링하기 위해 자바스크립트를 사용하기 때문이죠 좀 단순해요 + +11:49.560 --> 11:55.710 +접근 방식이 서버 호출을 요청해 get을 받는 거죠 + +11:55.710 --> 11:56.970 +하지만 해결책이 있죠 + +11:56.970 --> 12:03.450 +해결책은 셀레늄 같은 플랫폼을 사용하는 거예요 페이지를 + +12:03.450 --> 12:07.230 +렌더링할 수 있게 해주죠 + +12:07.230 --> 12:12.720 +기술적으로 뛰어나고 그런 경험이 있다면 흥미로운 도전은 + +12:12.720 --> 12:19.200 +이걸 좀 더 우람하게 만드는 거예요 비트에 셀레늄을 넣고요 + +12:19.380 --> 12:22.470 +공교롭게도 누가 벌써 했어요 + +12:22.470 --> 12:24.600 +학생 한 명요, 고마워요 + +12:24.690 --> 12:29.490 +이 커뮤니티 기부 폴더를 살펴보면 몇 가지 다른 솔루션을 볼 수 있어요 + +12:29.490 --> 12:32.520 +그중 하나가 셀레늄 기반 용액이에요 + +12:32.520 --> 12:34.860 +언제든 들어가서 직접 볼 수 있어요 + +12:34.860 --> 12:36.840 +당신도 할 수 있어요 + +12:36.840 --> 12:39.380 +거기서 해결책을 찾을 수 있어요 + +12:40.370 --> 12:44.900 +그것이나 다른 것에 대한 해결책을 생각해내신다면 기꺼이 코드를 공유해주시면 감사하겠습니다 + +12:44.900 --> 12:47.420 +다른 이들도 혜택을 받을 수 있도록요 + +12:47.420 --> 12:53.240 +이상적으로는 커뮤니티 기부 폴더에 넣고 OUTPUT를 확실히 지우세요 + +12:53.240 --> 12:58.190 +커널 재시작 커널로 가서 모든 셀의 출력을 비우세요 + +12:58.310 --> 13:03.170 +그렇지 않으면 결과물에 있는 모든 것이 코드 안에서 체크인 될 겁니다 비트가 + +13:03.320 --> 13:04.730 +좀 어수선해지겠죠 + +13:04.730 --> 13:06.200 +그럼 그렇게 하세요 + +13:06.470 --> 13:12.440 +PR, pull request를 제출할 수 있다면 코드에 병합할 수 있어요 + +13:12.440 --> 13:16.550 +비트 박스가 처음이라면 좀 복잡할 거예요 + +13:16.580 --> 13:21.620 +그걸 성공시키기 위해 정확히 뭘 해야 하는지 여기 적혀 있어요 + +13:21.830 --> 13:25.700 +어쨌든 이게 첫 프로젝트였어요 + +13:25.730 --> 13:27.920 +단순하지만 중요한 프로젝트예요 + +13:27.950 --> 13:29.960 +아주 중요한 기업용 유스케이스가 있어요 + +13:29.960 --> 13:31.610 +보람이 있었길 바라요 + +13:31.610 --> 13:34.700 +그럼 저희는 마무리하고 다음 영상에서 뵐게요 + +13:34.700 --> 13:35.570 +첫째 주에요 + +13:35.570 --> 13:36.200 +첫날이에요 diff --git a/week5/community-contributions/subtitles/srts/60617163/en_US.srt b/week5/community-contributions/subtitles/srts/60617163/en_US.srt new file mode 100755 index 0000000..c8d76fb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617163/en_US.srt @@ -0,0 +1,40 @@ +WEBVTT + +00:00.080 --> 00:02.570 +And already that wraps up day two. + +00:02.570 --> 00:04.550 +Now that you have built that solution. + +00:04.550 --> 00:05.990 +And congratulations on that. + +00:05.990 --> 00:06.830 +And I would love to see it. + +00:06.830 --> 00:11.990 +By the way, if you want to share it, uh, you've got some experience with summarization both with + +00:11.990 --> 00:14.120 +OpenAI and with llama. + +00:14.150 --> 00:19.790 +Next time we're going to have some fun with Frontier models, we're going to look at six of them. + +00:19.790 --> 00:25.160 +You're going to get sleeves rolled up, hands on experience with them so that we get a good sense, + +00:25.190 --> 00:28.100 +a good intuition to the differences between them. + +00:28.190 --> 00:32.780 +We're going to use some of the latest and greatest versions, and we're also going to see some areas + +00:32.780 --> 00:33.740 +where they struggle. + +00:33.740 --> 00:35.210 +So looking forward to that. + +00:35.210 --> 00:36.380 +I will see you then. diff --git a/week5/community-contributions/subtitles/srts/60617163/ja_JP.srt b/week5/community-contributions/subtitles/srts/60617163/ja_JP.srt new file mode 100755 index 0000000..815ae42 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617163/ja_JP.srt @@ -0,0 +1,34 @@ +WEBVTT + +00:00.080 --> 00:02.570 +これで2日目が終わった。 + +00:02.570 --> 00:04.550 +これでソリューションが構築できた。 + +00:04.550 --> 00:05.990 +本当におめでとう。 + +00:05.990 --> 00:06.830 +そして、 ぜひ見てみたい。 + +00:06.830 --> 00:14.120 +ところで、 もし共有したいのであれば、 OpenAIとllamaの両方で要約の経験があるんですよね。 + +00:14.150 --> 00:19.790 +次回はフロンティアの6つのモデルを見てみよう。 + +00:19.790 --> 00:25.160 +両者の違いを直感的に理解するために、 袖をまくり、 + +00:25.190 --> 00:28.100 +実際に体験してもらう。 + +00:28.190 --> 00:33.740 +最新で最高のバージョンをいくつか使うつもりだし、 彼らが苦戦している分野も見ていくつもりだ。 + +00:33.740 --> 00:35.210 +楽しみにしているよ。 + +00:35.210 --> 00:36.380 +それではまた。 diff --git a/week5/community-contributions/subtitles/srts/60617163/ko_KR.srt b/week5/community-contributions/subtitles/srts/60617163/ko_KR.srt new file mode 100755 index 0000000..e989dba --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617163/ko_KR.srt @@ -0,0 +1,40 @@ +WEBVTT + +00:00.080 --> 00:02.570 +벌써 둘째 날이 끝났네요 + +00:02.570 --> 00:04.550 +이제 솔루션을 구축했군요 + +00:04.550 --> 00:05.990 +그 점은 축하해요 + +00:05.990 --> 00:06.830 +저도 보고 싶어요 + +00:06.830 --> 00:11.990 +이 강의를 공유하고 싶다면 오픈AI와 라마를 요약해 + +00:11.990 --> 00:14.120 +본 경험이 있을 거예요 + +00:14.150 --> 00:19.790 +다음 시간에는 개척 시대 모델 여섯 명을 살펴볼 거예요 + +00:19.790 --> 00:25.160 +소매를 걷어붙이고 직접 경험해보셔야 합니다 둘의 차이를 잘 감지하고 + +00:25.190 --> 00:28.100 +직관할 수 있도록요 get it + +00:28.190 --> 00:32.780 +최신의 훌륭한 버전을 사용할 겁니다 그들이 어려워하는 부분도 + +00:32.780 --> 00:33.740 +볼 거고요 + +00:33.740 --> 00:35.210 +정말 기대돼요 + +00:35.210 --> 00:36.380 +그때 봐요 diff --git a/week5/community-contributions/subtitles/srts/60617251/en_US.srt b/week5/community-contributions/subtitles/srts/60617251/en_US.srt new file mode 100755 index 0000000..5905ddf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617251/en_US.srt @@ -0,0 +1,325 @@ +WEBVTT + +00:00.050 --> 00:02.180 +Congratulations are definitely in order. + +00:02.210 --> 00:06.380 +Yesterday was a mammoth first day on this course and you got through it. + +00:06.380 --> 00:09.560 +You set up your environment and we're off to the races. + +00:09.560 --> 00:14.840 +So as I recap, what you can now do is use a llama to run models locally. + +00:14.840 --> 00:18.410 +You can call OpenAI's API to run a frontier model. + +00:18.410 --> 00:22.130 +You understand, at least at a high level, the difference between a system and a user prompt. + +00:22.130 --> 00:28.730 +And you've built a summarization use case, which is an important commercial application. + +00:29.120 --> 00:35.270 +Today we're going to talk a bit about what are the steps to get to being an LLM engineer, set you up + +00:35.270 --> 00:39.110 +for success, and then talk more about frontier models. + +00:39.110 --> 00:41.030 +That is the plan. + +00:41.060 --> 00:47.750 +Now, let me start by just emphasizing that this course, this eight week course is a practical course. + +00:47.750 --> 00:50.480 +First and foremost we're going to cover theory. + +00:50.510 --> 00:53.420 +We're going to cover the foundational information that you need. + +00:53.420 --> 00:56.600 +But we're always going to do it in a practical context. + +00:56.630 --> 00:59.810 +I'm a big believer in the best way to learn is by doing. + +00:59.810 --> 01:01.640 +And that's what we'll be actually doing. + +01:01.820 --> 01:03.620 +And we're going to be putting it to good use. + +01:03.620 --> 01:06.620 +We're going to be building commercial projects as we go. + +01:06.620 --> 01:11.390 +And I'll often be asking you to try and think about how you could apply what you've learned to your + +01:11.390 --> 01:15.710 +day job and try and build little prototypes to bring that to life. + +01:15.710 --> 01:18.470 +So that is the flavour of the course. + +01:18.950 --> 01:24.740 +There are three different aspects to LM engineering that you'll be picking up as we go. + +01:24.770 --> 01:29.960 +The first of them is just understanding the models that are out there, the wide range of different + +01:29.990 --> 01:34.610 +types of LM and what they're capable of, whether we're talking about open source versions, the closed + +01:34.610 --> 01:40.220 +source paid versions, ones that are multi-modal, that can generate images or audio, the different + +01:40.220 --> 01:45.920 +architectures of LMS, and importantly, how you pick which LM you should be working with in the first + +01:45.920 --> 01:46.610 +place. + +01:46.610 --> 01:53.270 +We'll be looking at all of the tools of the trade, things like the ubiquitous hugging face, the super + +01:53.270 --> 01:58.490 +impressive glue code in Lange chain, the fabulous gradio that you will see. + +01:58.520 --> 02:01.130 +I'm a big fan of weights and biases. + +02:01.130 --> 02:05.270 +Super helpful and then modal for deploying it to production. + +02:05.270 --> 02:10.790 +And we'll be looking at techniques different ways that you can apply this. + +02:10.880 --> 02:18.590 +The field of AI to solve business problems from using API's Rag which is such a hot topic, fine tuning + +02:18.590 --> 02:23.360 +and then at the end a full Agentic AI solution. + +02:24.830 --> 02:30.170 +The idea of this course is that I've planned it so that it will apply almost no matter what your level + +02:30.170 --> 02:31.400 +of experience. + +02:31.640 --> 02:36.350 +You may find initially that some of the first few weeks are too simplistic for you, or the first few + +02:36.350 --> 02:37.130 +days at least. + +02:37.130 --> 02:42.980 +And there I would say hang in there, use it as a way to just sort of reinforce some of the of the foundational + +02:42.980 --> 02:47.570 +information that we'll talk a bit more detail about things like tokens than you've done before, perhaps. + +02:47.570 --> 02:52.400 +So there'll be stuff to pick up, but you can go through it quickly and then make the projects your + +02:52.400 --> 02:52.730 +own. + +02:52.730 --> 02:59.600 +So use it as a way to build deeper versions of what we do and then prepare for harder, fun projects + +02:59.600 --> 03:00.980 +coming up later. + +03:01.190 --> 03:05.510 +If it feels too challenging, then please, please don't worry. + +03:05.540 --> 03:07.910 +Take your time with the practicals. + +03:07.940 --> 03:09.860 +Take your time with the exercises. + +03:09.860 --> 03:11.210 +Work your way through them. + +03:11.210 --> 03:15.650 +There are some extra guides that I'll talk about in week one that would help give you a sort of firmer + +03:15.650 --> 03:16.790 +footing if you need it. + +03:16.790 --> 03:19.550 +And please, please, please ask for help. + +03:19.550 --> 03:21.620 +I am here, I respond quickly. + +03:21.620 --> 03:27.290 +You can always reach out to me, either as I say on the platform or through email or through LinkedIn. + +03:27.320 --> 03:29.180 +Details are in the GitHub repo. + +03:29.180 --> 03:30.290 +Reach out to me. + +03:30.320 --> 03:31.130 +Get help. + +03:31.130 --> 03:32.600 +That is what I'm here for. + +03:32.900 --> 03:35.810 +And then if it feels just right, then excellent. + +03:35.810 --> 03:36.770 +Keep going. + +03:37.580 --> 03:43.880 +So the prerequisite is beginner to intermediate level Python. + +03:43.880 --> 03:49.370 +And if you have intermediate level Python you're going to find it easiest and you'll get the most out + +03:49.370 --> 03:50.060 +of it. + +03:50.180 --> 03:55.280 +And if you look for example at this line of code right here, which is just a random line from within + +03:55.280 --> 03:55.910 +a function. + +03:55.910 --> 04:01.280 +If you basically know what that's probably doing, then you're in great shape if you know exactly what + +04:01.280 --> 04:07.250 +it's doing and if you know in fact why, it's not perhaps the most optimal way of doing it, then you're + +04:07.250 --> 04:12.260 +more than more than, well set that you're advanced, and that's great if you don't know what this does + +04:12.290 --> 04:19.910 +and you're not familiar with a world with a word like yield or set or the dot get, then there is a + +04:19.910 --> 04:27.890 +special notebook, a special Jupyter Lab in week one for you, which is a guide to to Python at this + +04:27.890 --> 04:28.490 +level. + +04:28.490 --> 04:33.890 +And as you go through that notebook, I will take you through each of the stepping stones until we get + +04:33.890 --> 04:36.620 +to a point where a line like this should make sense. + +04:36.620 --> 04:40.550 +And of course, you can also use ChatGPT and Claude. + +04:40.580 --> 04:45.320 +The the the genies are really good at explaining code. + +04:45.320 --> 04:49.310 +And indeed, if you put something like this in there, they would tell you exactly what it does and + +04:49.310 --> 04:50.780 +why and step you through it too. + +04:50.780 --> 04:54.200 +And they're probably just as good as, as my, my notebook. + +04:54.200 --> 04:57.050 +So either way, that should give you what you need to do. + +04:57.050 --> 05:01.640 +And at any point, if you don't understand some code, you can always use ChatGPT. + +05:03.230 --> 05:08.630 +So to get the most out of this course, there are a few things that I would ask you. + +05:08.660 --> 05:12.410 +First of all, follow along as I do my coding. + +05:12.410 --> 05:16.640 +So when I'm when I'm going through in the labs and I'm executing cells, then either at the same time + +05:16.670 --> 05:18.890 +or afterwards go through and do it yourself. + +05:18.890 --> 05:25.100 +And if you hit snafus, which you might do for various reasons, then have a crack at trying to to sort + +05:25.130 --> 05:25.640 +out why. + +05:25.670 --> 05:26.540 +Do some debugging. + +05:26.540 --> 05:33.380 +That's a great way to learn and complete the exercises, and then put your code examples up on GitHub. + +05:33.380 --> 05:34.670 +It's actually a great way as well. + +05:34.670 --> 05:39.770 +If you're new to this space, and you're trying to build up something of a kind of resume to show that + +05:39.770 --> 05:45.290 +you've built some of this experience, the best kind of resume you can have is a GitHub repo, because + +05:45.290 --> 05:48.380 +people will look at it and will see the sorts of things you've worked on. + +05:48.380 --> 05:51.170 +And of course, you don't want to put exactly the projects we do. + +05:51.200 --> 05:52.910 +You want to make them your own. + +05:52.910 --> 05:57.800 +You want to figure out, okay, how can I apply this to my business area or to a personal project that + +05:57.800 --> 06:01.160 +I'm working on to make it something that's similar, slightly different? + +06:01.190 --> 06:03.950 +Take on it for new business value. + +06:04.010 --> 06:10.760 +That is a great way to get the most out of this course, and then look to share your code. + +06:10.760 --> 06:13.130 +If you're happy with it and you and you're okay with that. + +06:13.130 --> 06:18.140 +I've got instructions about how you can submit a pull request, which means that I can see your code. + +06:18.170 --> 06:23.660 +I can give you feedback on it if you'd like, and I can also then republish it so that other students + +06:23.660 --> 06:27.590 +taking the course will see your examples and we can all share in it together. + +06:27.590 --> 06:29.540 +And of course stick at it. + +06:29.570 --> 06:30.950 +Hang on in there. + +06:30.980 --> 06:34.550 +This course gets better and better and better, I assure you. + +06:34.550 --> 06:39.440 +And I you know, there's there's going to be so many projects, so much commercial application. + +06:39.440 --> 06:43.550 +And I definitely encourage you to stay the course. + +06:43.580 --> 06:48.410 +And the thing I didn't put down here one more time is that getting the most out of the course. + +06:48.410 --> 06:52.220 +Also, please reach out to me if I can help at any point. diff --git a/week5/community-contributions/subtitles/srts/60617251/ja_JP.srt b/week5/community-contributions/subtitles/srts/60617251/ja_JP.srt new file mode 100755 index 0000000..9401f44 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617251/ja_JP.srt @@ -0,0 +1,268 @@ +WEBVTT + +00:00.050 --> 00:02.180 +おめでとうございます。 + +00:02.210 --> 00:06.380 +昨日はこのコースの初日としては非常に大きなものだったが、 君はそれを乗り切った。 + +00:06.380 --> 00:09.560 +環境を整えれば、 レースが始まる。 + +00:09.560 --> 00:14.840 +つまり、 今できることは、 ラマを使ってローカルでモデルを走らせることだ。 + +00:14.840 --> 00:18.410 +OpenAIのAPIを呼び出して、 フロンティアモデルを実行することができる。 + +00:18.410 --> 00:22.130 +システムプロンプトとユーザープロンプトの違いを、 少なくとも高いレベルで理解している。 + +00:22.130 --> 00:28.730 +そして、 あなたは重要な商用アプリケーションである要約のユースケースを構築した。 + +00:29.120 --> 00:39.110 +今日は、 LLMエンジニアになるためのステップ、 成功のための準備、 そしてフロンティア・モデルについて少しお話しします。 + +00:39.110 --> 00:41.030 +それが計画だ。 + +00:41.060 --> 00:47.750 +さて、 まず強調しておきたいのは、 この8週間のコースは実践的なものだということだ。 + +00:47.750 --> 00:50.480 +まず第一に、 我々は理論をカバーするつもりだ。 + +00:50.510 --> 00:53.420 +必要な基礎情報をカバーするつもりだ。 + +00:53.420 --> 00:56.600 +しかし、 我々は常に実践的な文脈でそれを行うつもりだ。 + +00:56.630 --> 00:59.810 +私は、 学ぶ最善の方法は実践することだと信じている。 + +00:59.810 --> 01:01.640 +そして、 それが私たちが実際にやることだ。 + +01:01.820 --> 01:03.620 +そして、 それを有効に使うつもりだ。 + +01:03.620 --> 01:06.620 +私たちは、 これから商業プロジェクトを立ち上げていく。 + +01:06.620 --> 01:11.390 +そして、 学んだことを本業にどう生かせるかを考え、 それを実現するための小さなプロトタイプを作ってみるよう、 + +01:11.390 --> 01:15.710 +よくお願いしている。 + +01:15.710 --> 01:18.470 +それがこのコースの特徴だ。 + +01:18.950 --> 01:24.740 +LMエンジニアリングには3つの異なる側面があり、 それをこれから学んでいくことになる。 + +01:24.770 --> 01:29.960 +オープンソースのもの、 クローズドソースの有料版、 画像や音声を生成できるマルチモーダルなもの、 + +01:29.990 --> 01:34.610 +LMSのさまざまなアーキテクチャ、 そして重要なのは、 + +01:34.610 --> 01:46.610 +そもそもどのLMSを使うべきかをどのように選択するかということだ。 + +01:46.610 --> 01:53.270 +私たちは、 ユビキタスなハグ顔、 ランゲ・チェーンの超印象的なグルー・コード、 皆さんが目にする素晴らしいグラディオなど、 + +01:53.270 --> 01:58.490 +あらゆる商売道具を見ていきます。 + +01:58.520 --> 02:01.130 +私はウェイトとバイアスの大ファンだ。 + +02:01.130 --> 02:05.270 +超便利で、 本番環境にデプロイするためのモーダルがある。 + +02:05.270 --> 02:10.790 +このテクニックを応用するさまざまな方法を見ていこう。 + +02:10.880 --> 02:18.590 +APIのラグを利用したビジネス上の問題を解決するAIの分野は、 非常にホットなトピックであり、 ファインチューニングを経て、 + +02:18.590 --> 02:23.360 +最終的には完全なAgentic AIソリューションとなる。 + +02:24.830 --> 02:31.400 +このコースのアイデアは、 あなたの経験レベルにほとんど関係なく応用できるように計画したということだ。 + +02:31.640 --> 02:37.130 +最初の数週間は、 あるいは少なくとも最初の数日間は、 自分にとって単純すぎると感じるかもしれない。 + +02:37.130 --> 02:47.570 +トークンのようなものについては、 これまでよりももう少し詳しくお話しします。 + +02:47.570 --> 02:52.730 +だから、 手に取るべきものはあるだろうけど、 すぐに読み終えて、 自分のプロジェクトにすることができる。 + +02:52.730 --> 03:00.980 +だから、 私たちがやっていることのより深いバージョンを構築するための方法として、 そして後に控えている、 より困難で楽しいプロジェクトに備えるための方法として使ってほしい。 + +03:01.190 --> 03:05.510 +もし難しすぎると感じたら、 どうか心配しないでほしい。 + +03:05.540 --> 03:07.910 +実技はゆっくりと。 + +03:07.940 --> 03:09.860 +エクササイズはじっくりと。 + +03:09.860 --> 03:11.210 +それをやり遂げることだ。 + +03:11.210 --> 03:16.790 +もし必要であれば、 第1週でお話しするような、 足場を固めるのに役立つ追加ガイドもある。 + +03:16.790 --> 03:19.550 +そして、 どうか、 どうか、 助けを求めてほしい。 + +03:19.550 --> 03:21.620 +私はここにいる。 + +03:21.620 --> 03:27.290 +プラットフォームやEメール、 LinkedInを通じて、 いつでも私に連絡を取ることができる。 + +03:27.320 --> 03:29.180 +詳細はGitHubのレポにある。 + +03:29.180 --> 03:30.290 +私に声をかけてください。 + +03:30.320 --> 03:31.130 +助けを求める。 + +03:31.130 --> 03:32.600 +そのために私はここにいる。 + +03:32.900 --> 03:35.810 +そして、 もしそれがちょうどいいと感じたら、 素晴らしいことだ。 + +03:35.810 --> 03:36.770 +続けてくれ。 + +03:37.580 --> 03:43.880 +だから、 前提条件は初級から中級レベルのPythonだ。 + +03:43.880 --> 03:50.060 +中級レベルのPythonを持っている人なら、 最も簡単で、 最大限に活用できるだろう。 + +03:50.180 --> 03:55.910 +例えば、 このコードの行を見てほしい。 + +03:55.910 --> 04:01.280 +もしこれが何をやっているのかよく分かっていて、 なぜそれが最適なやり方ではないのかも分かっているなら、 + +04:01.280 --> 04:19.910 +あなたは上級者です。 もしこれが何をやっているのか分からなくて、 yieldやsetやgetのような単語のある世界に馴染みがないのなら、 第1週目に特別なノートブック、 特別なJupyter + +04:19.910 --> 04:28.490 +Labがあります。 + +04:28.490 --> 04:36.620 +そのノートを見ながら、 このような線が意味を持つようになるまで、 それぞれの足がかりをたどっていこう。 + +04:36.620 --> 04:40.550 +もちろん、 ChatGPTやクロードを使うこともできる。 + +04:40.580 --> 04:45.320 +精霊たちはコードを説明するのが本当にうまい。 + +04:45.320 --> 04:50.780 +そして実際、 もしこのようなものを入れたら、 彼らはそれが何をするのか、 なぜそうするのかを正確に教えてくれるだろう。 + +04:50.780 --> 04:54.200 +そして、 おそらく私のノートと同じくらい良いものだ。 + +04:54.200 --> 04:57.050 +いずれにせよ、 これで必要なことはわかったはずだ。 + +04:57.050 --> 05:01.640 +また、 わからないコードがあれば、 いつでもChatGPTを使うことができます。 + +05:03.230 --> 05:08.630 +そこで、 このコースを最大限に活用するために、 いくつかお願いしたいことがある。 + +05:08.660 --> 05:12.410 +まずは、 私のコーディングにお付き合いください。 + +05:12.410 --> 05:16.640 +だから、 私がラボで細胞を処理するとき、 同時に、 あるいはその後に、 + +05:16.670 --> 05:18.890 +自分で処理するんだ。 + +05:18.890 --> 05:25.640 +そして、 さまざまな理由でうまくいかないことがあったら、 その原因を突き止めよう。 + +05:25.670 --> 05:26.540 +デバッグをしよう。 + +05:26.540 --> 05:33.380 +GitHubにコード例をアップするのもいい方法だ。 + +05:33.380 --> 05:34.670 +実際、 それも素晴らしい方法だ。 + +05:34.670 --> 05:39.770 +もしあなたがこの仕事を始めたばかりで、 この経験を積み重ねてきたことを示すために履歴書のようなものを作ろうとしているなら、 + +05:39.770 --> 05:48.380 +GitHubのレポが最高の履歴書となるでしょう。 + +05:48.380 --> 05:51.170 +そしてもちろん、 私たちのようなプロジェクトにぴったりと当てはまるものを入れたくはないだろう。 + +05:51.200 --> 05:52.910 +自分のものにしたいのだろう。 + +05:52.910 --> 06:01.160 +自分のビジネス分野や個人的に取り組んでいるプロジェクトにこれを適用して、 似ているようで少し違うものにするにはどうしたらいいか? + +06:01.190 --> 06:03.950 +新たなビジネス価値を生み出すために。 + +06:04.010 --> 06:10.760 +このコースを最大限に活用し、 コードを共有するための素晴らしい方法だ。 + +06:10.760 --> 06:13.130 +あなたがそれで満足し、 あなたがそれでいいのなら。 + +06:13.130 --> 06:18.140 +プルリクエストを提出する方法を説明しました。 + +06:18.170 --> 06:23.660 +また、 他の受講生があなたの例を見ることができるように、 + +06:23.660 --> 06:27.590 +再公開することもできます。 + +06:27.590 --> 06:29.540 +そしてもちろん、 それを貫く。 + +06:29.570 --> 06:30.950 +頑張るんだ。 + +06:30.980 --> 06:34.550 +このコースはどんどん良くなっていくよ。 + +06:34.550 --> 06:39.440 +多くのプロジェクトがあり、 多くの商業的応用がある。 + +06:39.440 --> 06:43.550 +そして、 私は間違いなくあなたがこの道を進むことを勧める。 + +06:43.580 --> 06:48.410 +そして、 もう1度ここに書かなかったことは、 コースを最大限に活用することだ。 + +06:48.410 --> 06:52.220 +また、 何かお手伝いできることがあれば、 声をかけてください。 diff --git a/week5/community-contributions/subtitles/srts/60617251/ko_KR.srt b/week5/community-contributions/subtitles/srts/60617251/ko_KR.srt new file mode 100755 index 0000000..52d2d44 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617251/ko_KR.srt @@ -0,0 +1,307 @@ +WEBVTT + +00:00.050 --> 00:02.180 +당연히 축하해야죠 + +00:02.210 --> 00:06.380 +어제는 이 코스에서 엄청난 첫날을 보냈고 그걸 해냈죠 + +00:06.380 --> 00:09.560 +환경만 설정하면 바로 레이스가 시작되죠 + +00:09.560 --> 00:14.840 +복습해보면, 이제 할 수 있는 건 llama를 이용해 로컬 모델을 실행하는 거죠 + +00:14.840 --> 00:18.410 +OpenAI API를 통해 개척자 모델을 실행할 수 있죠 + +00:18.410 --> 00:22.130 +적어도 상위 레벨에서는 시스템과 사용자 프롬프트의 차이를 이해하죠 + +00:22.130 --> 00:28.730 +요약 사용 사례를 구축했는데 중요한 상업적 응용 프로그램이죠 + +00:29.120 --> 00:35.270 +오늘은 LLM 엔지니어가 되기 위해 어떤 단계를 거쳐야 하는지 얘기해 보겠습니다 성공을 위한 + +00:35.270 --> 00:39.110 +준비 과정과 비트를 이용한 모델에 대해 더 얘기해 보죠 + +00:39.110 --> 00:41.030 +그게 계획이에요 + +00:41.060 --> 00:47.750 +먼저 강조하고 싶은 건 이 8주 과정의 실용성이에요 + +00:47.750 --> 00:50.480 +가장 먼저 이론에 대해 배울 거예요 + +00:50.510 --> 00:53.420 +여러분이 필요한 기본 정보를 다룰 거예요 + +00:53.420 --> 00:56.600 +하지만 실용적인 맥락에서 할 거예요 + +00:56.630 --> 00:59.810 +전 실전이야말로 최고의 배움이라고 믿어요 + +00:59.810 --> 01:01.640 +그게 우리가 할 일이죠 + +01:01.820 --> 01:03.620 +좋은 일에 쓸 거예요 + +01:03.620 --> 01:06.620 +상업 프로젝트를 진행할 거예요 + +01:06.620 --> 01:11.390 +여기서 배운 것을 본업에 어떻게 적용할지 생각해 보라고 할 + +01:11.390 --> 01:15.710 +때가 많아요 그걸 구현할 시제품을 만들어 보세요 + +01:15.710 --> 01:18.470 +이게 이 코스의 맛이에요 + +01:18.950 --> 01:24.740 +LM 엔지니어링에는 세 가지 다른 양상이 있습니다 진행하면서 알게 되실 텐데요 + +01:24.770 --> 01:29.960 +첫 번째는 모델을 이해하는 겁니다 다양한 유형의 광범위한 LM과 + +01:29.990 --> 01:34.610 +어떤 게 가능한지를요 오픈 소스 버전이든 폐쇄 소스 유료 버전이든 + +01:34.610 --> 01:40.220 +이미지나 오디오를 생성할 수 있는 다중 모듈인 것이든 LMS의 다양한 + +01:40.220 --> 01:46.610 +아키텍처든 가장 먼저 어떤 LM을 어떻게 선택해야 하는지를요 + +01:46.610 --> 01:53.270 +모든 툴을 살펴볼 겁니다 안아주는 얼굴이라든지 랑게인 + +01:53.270 --> 01:58.490 +접착 코드도 있고 멋진 그라디오도 있죠 + +01:58.520 --> 02:01.130 +전 무게와 편견을 좋아해요 + +02:01.130 --> 02:05.270 +생산으로 배포하는 데 큰 도움이 되죠 + +02:05.270 --> 02:10.790 +다양한 기술을 적용할 수 있는 방법을 살펴볼 거예요 + +02:10.880 --> 02:18.590 +인공지능 분야는 API 래그를 이용해 비즈니스 문제를 해결합니다 API는 뜨거운 주제죠 미세 + +02:18.590 --> 02:23.360 +조정하고 최종적으로는 에이전트식 AI 솔루션이죠 + +02:24.830 --> 02:30.170 +이 코스는 여러분의 경험 수준에 상관없이 적용될 수 있도록 + +02:30.170 --> 02:31.400 +계획했어요 + +02:31.640 --> 02:37.130 +처음 몇 주는 너무 단순하다고 느끼실 겁니다 처음 며칠은요 + +02:37.130 --> 02:42.980 +여기서 잠깐 기다리세요 기본 정보를 강화하는 방법으로 사용하세요 + +02:42.980 --> 02:47.570 +토큰 같은 것에 대해 좀 더 자세히 얘기할 테니까요 + +02:47.570 --> 02:52.730 +배울 게 많겠지만 재빨리 훑어보고 자신의 프로젝트로 만들 수 있어요 + +02:52.730 --> 02:59.600 +그러니 이 경험을 토대로 더 심오한 버전을 만들고 앞으로 있을 더 어렵고 재밌는 프로젝트를 + +02:59.600 --> 03:00.980 +준비하세요 + +03:01.190 --> 03:05.510 +너무 힘들면 걱정하지 마세요 + +03:05.540 --> 03:07.910 +실용적인 건 천천히 공부하세요 + +03:07.940 --> 03:09.860 +천천히 연습하세요 + +03:09.860 --> 03:11.210 +잘 헤쳐나가세요 + +03:11.210 --> 03:15.650 +첫째 주에 더 자세히 알려드릴 가이드도 있어요 더 탄탄한 기반을 마련해 + +03:15.650 --> 03:16.790 +줄 거예요 + +03:16.790 --> 03:19.550 +제발, 제발 도움을 청해 주세요 + +03:19.550 --> 03:21.620 +난 신속하게 대응해요 + +03:21.620 --> 03:27.290 +언제든 제게 연락하세요 플랫폼이나 이메일 또는 링크드인을 통해서요 + +03:27.320 --> 03:29.180 +깃허브 압류에 자세한 내용이 있어요 + +03:29.180 --> 03:30.290 +내게 연락해요 + +03:30.320 --> 03:31.130 +Get it, get it, 도움 요청해요 + +03:31.130 --> 03:32.600 +그래서 제가 온 거예요 + +03:32.900 --> 03:35.810 +느낌이 괜찮으면 더 좋고요 + +03:35.810 --> 03:36.770 +계속해요 + +03:37.580 --> 03:43.880 +필수 조건은 초보자에서 중급 레벨 파이썬 을 지원하는 것인데요 + +03:43.880 --> 03:49.370 +중간 레벨의 파이썬 을 가지고 있다면 가장 쉽게 찾을 수 있고 가장 많은 것을 얻을 수 + +03:49.370 --> 03:50.060 +있어요. + +03:50.180 --> 03:55.910 +예를 들어 여기 이 코드 줄을 보시면 함수 내부의 무작위 선이죠 + +03:55.910 --> 04:01.280 +그게 뭘 하는 건지 안다면 아주 좋은 겁니다 그게 정확히 뭘 하는 + +04:01.280 --> 04:07.250 +건지 알고 그 이유를 안다면 최적의 방법은 아닐 수도 있어요 그럼 그 이상인 + +04:07.250 --> 04:12.260 +거죠 고급으로 설정할 겁니다 이게 뭘 하는지 모르고 yild이나 + +04:12.290 --> 04:19.910 +set이나 .get 같은 단어가 익숙하지 않다면 특별한 공책이 있어요 첫 주에 Jupyter + +04:19.910 --> 04:28.490 +랩을 특별히 보여드리는 거죠 이 레벨에서 파이썬에 대한 가이드예요 + +04:28.490 --> 04:33.890 +여러분이 그 노트를 보는 동안 저는 디딤돌을 하나씩 짚어 나가며 이런 선이 말이 되는 + +04:33.890 --> 04:36.620 +지점을 찾을 거예요. Get it. + +04:36.620 --> 04:40.550 +물론 챗GPT와 클로드도 사용할 수 있어요 + +04:40.580 --> 04:45.320 +지니는 코드를 정말 잘 설명해요 + +04:45.320 --> 04:49.310 +실제로 이런 걸 넣으면 어떤 작용을 하는지 그 이유를 설명하고 + +04:49.310 --> 04:50.780 +설명해주죠 + +04:50.780 --> 04:54.200 +제 노트만큼이나 잘 썼을 거예요 + +04:54.200 --> 04:57.050 +어느 쪽이든 필요한 건 다 얻었어요 + +04:57.050 --> 05:01.640 +그리고 언제든 어떤 코드를 이해 못 하겠으면 챗GPT를 이용하세요 + +05:03.230 --> 05:08.630 +이 코스를 최대한 활용하기 위해 몇 가지 물어볼게요. Get it. + +05:08.660 --> 05:12.410 +먼저, 제가 코딩하는 걸 잘 따라오세요 + +05:12.410 --> 05:16.640 +랩에서 셀을 실행할 때 동시에 또는 나중에 + +05:16.670 --> 05:18.890 +직접 할 수 있죠 + +05:18.890 --> 05:25.640 +여러 이유로 스내퍼스를 누르면 그 이유를 알아내려고 노력하세요 + +05:25.670 --> 05:26.540 +디버깅을 하죠 + +05:26.540 --> 05:33.380 +학습하고 연습을 완료하는 훌륭한 방법이죠 코드 예제를 깃허브에 올리는 거예요 + +05:33.380 --> 05:34.670 +사실 좋은 방법이에요 + +05:34.670 --> 05:39.770 +이 분야에 처음 들어왔고 이 분야에서 쌓은 경험을 자랑하려고 이력서를 + +05:39.770 --> 05:45.290 +작성하고 싶다면 깃허브 리포토가 가장 좋습니다 사람들이 그걸 보고 여러분이 + +05:45.290 --> 05:48.380 +작업한 걸 볼 테니까요 + +05:48.380 --> 05:51.170 +물론 우리가 하는 것과 똑같은 것을 Put 하면 안 되죠 + +05:51.200 --> 05:52.910 +자신의 것으로 만들고 싶죠 + +05:52.910 --> 05:57.800 +이걸 어떻게 비즈니스 영역이나 작업 중인 개인 프로젝트에 적용할 수 있는지 + +05:57.800 --> 06:01.160 +알아내야 하죠 유사하거나 약간 다른 것으로요 + +06:01.190 --> 06:03.950 +새 사업 가치를 위해서요 + +06:04.010 --> 06:10.760 +이 코스에서 가장 많은 걸 얻을 수 있는 방법이죠 코드를 공유하도록 하세요 get it + +06:10.760 --> 06:13.130 +본인 마음에 들면 그걸로 된 거예요 + +06:13.130 --> 06:18.140 +끌어오기 요청을 어떻게 제출할지 지침이 있어요 코드를 볼 수 있다는 뜻이죠 + +06:18.170 --> 06:23.660 +원하시면 피드백을 드릴 수도 있고 다시 게시할 수도 있습니다 이 과정을 듣는 다른 + +06:23.660 --> 06:27.590 +학생들이 여러분의 예를 보고 함께 공유할 수 있도록요 + +06:27.590 --> 06:29.540 +물론 계속 노력해야죠 + +06:29.570 --> 06:30.950 +조금만 참아요 + +06:30.980 --> 06:34.550 +이 코스는 점점 더 좋아져요 + +06:34.550 --> 06:39.440 +앞으로 많은 프로젝트와 상업적 응용 프로그램이 있을 거예요 + +06:39.440 --> 06:43.550 +코스를 지키라고 응원할게요 + +06:43.580 --> 06:48.410 +한 가지 더 말씀드리자면 MTB를 최대한 활용해야 해요 TMI, TMI, MTB F1 + +06:48.410 --> 06:52.220 +그리고 제가 도울 일이 있으면 언제든 연락 주세요 diff --git a/week5/community-contributions/subtitles/srts/60617255/en_US.srt b/week5/community-contributions/subtitles/srts/60617255/en_US.srt new file mode 100755 index 0000000..448aa08 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617255/en_US.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:00.050 --> 00:02.540 +I'm now going to talk for a bit about models. + +00:02.570 --> 00:10.190 +A term you often hear is the term frontier models, which refers to the llms that are pioneering what + +00:10.190 --> 00:13.760 +is possible today, the largest possible models. + +00:13.760 --> 00:18.830 +And often when people say frontier models, they're referring to the closed source models, the paid + +00:18.830 --> 00:21.230 +models like GPT and Claude. + +00:21.260 --> 00:26.420 +Actually, sometimes people also say frontier models when they're referring to the biggest, strongest + +00:26.420 --> 00:28.160 +open source models as well. + +00:28.160 --> 00:30.770 +So depending on the context, it could mean either thing. + +00:30.770 --> 00:35.300 +But let's first talk about the closed source frontier models. + +00:35.780 --> 00:38.450 +And these are also sometimes called the super scalers. + +00:38.480 --> 00:42.140 +They are the largest, highest scale of the models. + +00:42.140 --> 00:47.120 +So first of all, the one that needs no introduction I'm sure is GPT from OpenAI. + +00:47.210 --> 00:54.020 +When ChatGPT came out in late 2022, it caught us all off guard with its power. + +00:54.350 --> 00:56.210 +And I'm sure you're familiar with it. + +00:56.240 --> 00:58.370 +Claude from anthropic. + +00:58.490 --> 01:01.700 +Anthropic is the competitor to OpenAI. + +01:01.730 --> 01:02.840 +That's that's very well known. + +01:02.840 --> 01:06.530 +And Claude is the one that's that's usually favored by data scientists. + +01:06.560 --> 01:12.230 +but Claude and GPT are often considered neck and neck right now in the leaderboards. + +01:12.260 --> 01:14.060 +Claude has the slight edge. + +01:14.150 --> 01:17.090 +Gemini is Google's entrant. + +01:17.330 --> 01:21.800 +You probably remember from when we looked at llama that they also Google also has Gemma. + +01:21.830 --> 01:23.900 +The the open source variant as well. + +01:23.900 --> 01:28.370 +And command R is one that you may or may not have come across from cohere. + +01:28.400 --> 01:29.750 +Canadian AI company. + +01:29.750 --> 01:36.440 +And then perplexity is a search engine which actually can use one of the other models, but also has + +01:36.440 --> 01:37.700 +a model itself. + +01:37.730 --> 01:41.210 +So these are some of the big frontier models. + +01:41.420 --> 01:44.780 +And let's talk about some of the open source models. + +01:44.780 --> 01:52.610 +So llama uh, after which llama is named llama from meta is of course the most famous of the open source + +01:52.610 --> 01:59.420 +models because meta paved the way in the field of open source LMS by open sourcing the original llama + +01:59.420 --> 02:00.080 +one. + +02:00.200 --> 02:05.270 +Uh, there's one called Mistral from, uh, French company Mistral, which is a model, which is what + +02:05.270 --> 02:07.010 +they call a mixture of experts. + +02:07.010 --> 02:09.770 +It contains multiple smaller models. + +02:10.160 --> 02:14.210 +Quen is a model that I mentioned back when we were playing with Obama. + +02:14.210 --> 02:16.010 +It is a powerhouse model. + +02:16.010 --> 02:23.390 +It's really super impressive from Alibaba Cloud, and we will use Quan from time to time because it's + +02:23.390 --> 02:26.750 +it's as I say, it's very powerful for its size. + +02:26.780 --> 02:34.490 +Gemma I mentioned is Google's smaller model and Fi is Microsoft's smaller open source model. + +02:35.510 --> 02:38.990 +So this is confusing and it's super important. + +02:38.990 --> 02:43.820 +And it's something which some of you may already know, but but it might be something that's been nagging + +02:43.820 --> 02:45.470 +as well at a few of you. + +02:45.470 --> 02:47.840 +And I want to be really clear on this. + +02:47.840 --> 02:54.020 +There are different ways that you can use models that are in completely different approaches, and it's + +02:54.020 --> 02:58.430 +important to understand the differences between them and when you come across them, being used in those + +02:58.430 --> 02:59.180 +different ways. + +02:59.180 --> 03:01.730 +Have in your mind what's going on here. + +03:01.730 --> 03:09.170 +So first of all, there are chat interfaces to using models, obviously like ChatGPT, which is a web + +03:09.170 --> 03:15.170 +front end where you are chatting and you are calling something that's running in the cloud, and that + +03:15.170 --> 03:21.530 +the whole process of interacting with the LLM LM is being handled by OpenAI on their cloud. + +03:21.530 --> 03:26.900 +In the case of ChatGPT, there's also, of course, a Cloud and Gemini Advance and others. + +03:27.560 --> 03:29.960 +There are cloud APIs. + +03:29.960 --> 03:35.780 +And this is where again, you are calling something that's running on the cloud, but you're doing it + +03:35.780 --> 03:38.270 +with code, not through a user interface. + +03:38.270 --> 03:45.950 +And what we did in the summarization Jupyter notebook, Jupyter Lab was calling OpenAI's API. + +03:45.980 --> 03:52.310 +We were connecting to OpenAI, calling their API, and with the chat interfaces you typically it's it's + +03:52.310 --> 03:58.040 +either free for a free tier, or you're paying a monthly subscription fee to use the user interface + +03:58.040 --> 04:00.380 +chat almost as much as you want. + +04:00.410 --> 04:01.820 +There are some limits there. + +04:02.360 --> 04:04.280 +It's different with the APIs. + +04:04.280 --> 04:10.100 +With the APIs, there's no subscription, there's no monthly charge, but rather you pay for every API + +04:10.100 --> 04:10.910 +request you make. + +04:10.940 --> 04:12.260 +If it's a paid API. + +04:12.290 --> 04:17.900 +There are also open source free APIs too, so you can call the APIs directly. + +04:17.900 --> 04:23.990 +There are also libraries like Lang Chain, which give you a kind of abstraction layer, and you can + +04:23.990 --> 04:24.820 +use Lang chain. + +04:24.820 --> 04:27.130 +And then within it you can call the different APIs. + +04:27.130 --> 04:31.210 +And it presents you with one API that is unified across them. + +04:31.210 --> 04:34.210 +And so there are some of these frameworks like Lang chain. + +04:34.210 --> 04:39.460 +And if you see someone using Lang chain, it's really just using the lm API under the covers. + +04:39.460 --> 04:45.850 +It's just giving you a nicer user, nicer API interface, more consistent, uh, on top of it. + +04:46.360 --> 04:51.250 +And then there's another type of API which is a bit of a different take, which is using something called + +04:51.250 --> 04:58.540 +a managed AI cloud service, which is where you are connecting with a provider like Amazon, Google + +04:58.540 --> 05:00.160 +or Microsoft Azure. + +05:00.370 --> 05:07.390 +And they are running the models on their cloud, and they're presenting you with a common interface + +05:07.390 --> 05:09.550 +so that you can run behind the scenes. + +05:09.550 --> 05:11.680 +It could be open source, it could be closed source. + +05:11.680 --> 05:13.510 +And you'll hear of Amazon Bedrock. + +05:13.540 --> 05:14.650 +That's Amazon's offering. + +05:14.650 --> 05:18.820 +Google vertex AI is Google's and Azure ML. + +05:18.850 --> 05:21.850 +It goes by some other names too is Microsoft's offering. + +05:21.850 --> 05:26.230 +So these are the managed AI cloud services. + +05:26.230 --> 05:30.220 +But what all of these have in common is that you are writing code Locally. + +05:30.220 --> 05:35.860 +That then makes a call to an LM running in the cloud, and that is the cloud API. + +05:35.980 --> 05:43.660 +And then there's a third approach, and that is when you get the code and the weights for an LM yourself + +05:43.660 --> 05:52.210 +and you run it yourself, uh, on your box or potentially by remoting into a remote box. + +05:52.210 --> 05:56.650 +And here again, there's two different ways that we will be doing it on this course. + +05:56.650 --> 05:58.930 +And it's important to understand the differences between them. + +05:58.930 --> 06:05.440 +One of them is using hugging face, where we will be able to get access to like the Python code and + +06:05.440 --> 06:09.250 +the PyTorch code, which has that model in it. + +06:09.250 --> 06:14.770 +And we'll be able to then work in a fairly granular way with that model, will be able to use it to + +06:14.770 --> 06:20.890 +do things like tokenized text, and then call the model with the tokens, and you'll be actually operating + +06:20.890 --> 06:21.520 +the model. + +06:21.520 --> 06:26.980 +And we'll typically do that using something like Google Colab, where we can be running it on a very + +06:26.980 --> 06:33.610 +high powered box in the cloud, because typically one's local box isn't powerful enough to run that + +06:33.610 --> 06:37.210 +kind of, uh, that level of processing. + +06:37.630 --> 06:45.730 +And as an alternative to that, people have taken this code and they've optimized it into high performance + +06:45.760 --> 06:51.460 +C plus plus code and compiled it so that you can run it locally on your box. + +06:51.460 --> 06:53.890 +And that is what Olama is. + +06:54.070 --> 06:58.900 +It uses something called llama CPW behind the scenes as the C plus plus code. + +06:58.930 --> 07:01.420 +Now that means that you can run it locally. + +07:01.420 --> 07:08.410 +You're running the models in inference and execution mode on your box, but you don't have as much ability + +07:08.410 --> 07:12.550 +to control what's going on because it's just fully compiled code. + +07:12.550 --> 07:14.650 +So that gives you hopefully some insight. + +07:14.650 --> 07:19.510 +I'm glossing over some of the details, but hopefully shows you the landscape of the three different + +07:19.510 --> 07:24.670 +ways that you can work with models, and then some of the sort of sub sub techniques under that. + +07:25.210 --> 07:27.430 +With all of that, what are we going to do now? + +07:27.430 --> 07:32.080 +We're going to do an exercise, and it's going to be a useful exercise, and one that you'll be able + +07:32.080 --> 07:36.610 +to continue using throughout the course, because it's going to involve olama. + +07:36.850 --> 07:42.520 +And without further ado, I'm going to flip over to JupyterLab to explain the exercise. diff --git a/week5/community-contributions/subtitles/srts/60617255/ja_JP.srt b/week5/community-contributions/subtitles/srts/60617255/ja_JP.srt new file mode 100755 index 0000000..127271f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617255/ja_JP.srt @@ -0,0 +1,277 @@ +WEBVTT + +00:00.050 --> 00:02.540 +これからモデルについて少し話をしようと思う。 + +00:02.570 --> 00:10.190 +よく耳にする言葉にフロンティアモデルというものがあるが、 これは現在可能なことを開拓しているllms、 + +00:10.190 --> 00:13.760 +つまり可能性のある最大のモデルを指す。 + +00:13.760 --> 00:21.230 +そして、 フロンティア・モデルと言われる場合、 クローズド・ソース・モデル、 GPTやクロードのような有料モデルを指すことが多い。 + +00:21.260 --> 00:28.160 +実際、 最大で最強のオープンソース・モデルを指してフロンティア・モデルと言うこともある。 + +00:28.160 --> 00:30.770 +だから文脈によっては、 どちらの意味にもなる。 + +00:30.770 --> 00:35.300 +しかし、 まずはクローズドソースのフロンティアモデルについて話そう。 + +00:35.780 --> 00:38.450 +また、 これらはスーパースケーラーと呼ばれることもある。 + +00:38.480 --> 00:42.140 +モデルの中で最も大きく、 スケールが大きい。 + +00:42.140 --> 00:47.120 +まず、 紹介するまでもないだろう、 OpenAIのGPTだ。 + +00:47.210 --> 00:54.020 +2022年末にChatGPTが登場したとき、 そのパワーは私たちを驚かせた。 + +00:54.350 --> 00:56.210 +あなたもよくご存知でしょう。 + +00:56.240 --> 00:58.370 +アントロピックのクロード。 + +00:58.490 --> 01:01.700 +AnthropicはOpenAIのライバルだ。 + +01:01.730 --> 01:02.840 +それはよく知られていることだ。 + +01:02.840 --> 01:06.530 +そしてクロードは、 通常データサイエンティストに好まれるものだ。 + +01:06.560 --> 01:12.230 +しかし、 クロードとGPTは今、 首位争いを繰り広げている。 + +01:12.260 --> 01:14.060 +クロードがやや優勢。 + +01:14.150 --> 01:17.090 +ジェミニはグーグルの参入企業である。 + +01:17.330 --> 01:21.800 +グーグルにはジェンマもいる。 + +01:21.830 --> 01:23.900 +オープンソースのバリエーションもある。 + +01:23.900 --> 01:28.370 +そしてコマンドRは、 コヒーレで見たことがあるかもしれないし、 ないかもしれない。 + +01:28.400 --> 01:29.750 +カナダのAI企業。 + +01:29.750 --> 01:37.700 +そして "perplexity "は、 実際に他のモデルのいずれかを使用することができる検索エンジンであるが、 同時にモデル自体も持っている。 + +01:37.730 --> 01:41.210 +これが大きなフロンティアモデルだ。 + +01:41.420 --> 01:44.780 +そして、 オープンソースのいくつかのモデルについて話そう。 + +01:44.780 --> 01:52.610 +というのも、 metaはオリジナルのllamaをオープンソース化することで、 + +01:52.610 --> 02:00.080 +オープンソースLMSの分野を切り開いたからだ。 + +02:00.200 --> 02:07.010 +フランスのミストラル社のミストラルというモデルがある。 + +02:07.010 --> 02:09.770 +複数の小型モデルが含まれている。 + +02:10.160 --> 02:14.210 +クエンは、 私たちがオバマとプレーしていたときに話したモデルだ。 + +02:14.210 --> 02:16.010 +強力なモデルだ。 + +02:16.010 --> 02:26.750 +アリババクラウドは本当に素晴らしく、 私たちも時々Quanを使うつもりです。 + +02:26.780 --> 02:34.490 +私が述べたGemmaはGoogleの小型モデルで、 FiはMicrosoftの小型オープンソースモデルだ。 + +02:35.510 --> 02:38.990 +だから、 これは混乱しているし、 超重要なことなんだ。 + +02:38.990 --> 02:45.470 +すでにご存知の方もいらっしゃるかもしれませんが、 何人かの方にとっては、 ずっと気になっていたことかもしれません。 + +02:45.470 --> 02:47.840 +これだけははっきりさせておきたい。 + +02:47.840 --> 02:54.020 +モデルにはまったく異なるアプローチの使い方があり、 + +02:54.020 --> 02:59.180 +その違いを理解することが重要だ。 + +02:59.180 --> 03:01.730 +ここで何が起こっているのか、 頭の中に思い浮かべてください。 + +03:01.730 --> 03:21.530 +ChatGPTはウェブフロントエンドで、 チャットをしながらクラウド上で動作している何かを呼び出すことができます。 + +03:21.530 --> 03:26.900 +ChatGPTの場合、 もちろんクラウドやジェミニ・アドバンスなどもある。 + +03:27.560 --> 03:29.960 +クラウドAPIがある。 + +03:29.960 --> 03:35.780 +ここでもまた、 クラウド上で実行されている何かを呼び出しているわけだが、 ユーザー・インターフェースを介してではなく、 + +03:35.780 --> 03:38.270 +コードを使っている。 + +03:38.270 --> 03:45.950 +Jupyterラボの要約Jupyterノートブックでやったことは、 OpenAIのAPIを呼び出すことだった。 + +03:45.980 --> 03:52.310 +私たちはOpenAIに接続し、 彼らのAPIを呼び出していました。 一般的にチャット・インターフェースでは、 + +03:52.310 --> 04:00.380 +無料のティアか、 ユーザー・インターフェースのチャットをほぼ好きなだけ使うために月額利用料を支払うかのどちらかです。 + +04:00.410 --> 04:01.820 +そこには限界がある。 + +04:02.360 --> 04:04.280 +APIは違う。 + +04:04.280 --> 04:10.910 +APIにはサブスクリプションも月額料金もなく、 APIリクエストのたびに料金を支払う。 + +04:10.940 --> 04:12.260 +有料APIの場合。 + +04:12.290 --> 04:17.900 +オープンソースの無料APIもあるので、 APIを直接呼び出すこともできる。 + +04:17.900 --> 04:24.820 +ラング・チェインのような抽象化レイヤーを提供するライブラリもあり、 ラング・チェインを使うこともできる。 + +04:24.820 --> 04:27.130 +そして、 その中でさまざまなAPIを呼び出すことができる。 + +04:27.130 --> 04:31.210 +そして、 それらを統一した1つのAPIを提供する。 + +04:31.210 --> 04:34.210 +ラングチェーンのようなフレームワークもある。 + +04:34.210 --> 04:39.460 +ラングチェーンを使っている人を見かけたら、 それは単にlm APIを使っているだけだ。 + +04:39.460 --> 04:45.850 +より良いユーザー、 より良いAPIインターフェイス、 より一貫性のあるものを提供するだけだ。 + +04:46.360 --> 04:51.250 +マネージドAIクラウドサービスと呼ばれるもので、 + +04:51.250 --> 05:00.160 +アマゾンやグーグル、 マイクロソフト・アズールなどのプロバイダーと接続するものだ。 + +05:00.370 --> 05:09.550 +そして、 彼らはクラウド上でモデルを実行し、 裏側で実行できるように共通のインターフェイスを提示している。 + +05:09.550 --> 05:11.680 +オープンソースかもしれないし、 クローズドソースかもしれない。 + +05:11.680 --> 05:13.510 +アマゾン・ベッドロックのことも耳にするだろう。 + +05:13.540 --> 05:14.650 +それがアマゾンの提案だ。 + +05:14.650 --> 05:18.820 +Google vertex AIはGoogleとAzureのMLである。 + +05:18.850 --> 05:21.850 +マイクロソフトが提供するこの製品は、 他の名前でも呼ばれている。 + +05:21.850 --> 05:26.230 +これがマネージドAIクラウドサービスだ。 + +05:26.230 --> 05:30.220 +しかし、 これらに共通しているのは、 ローカルでコードを書いているということだ。 + +05:30.220 --> 05:35.860 +そして、 クラウド上で稼働しているLMを呼び出す。 それがクラウドAPIだ。 + +05:35.980 --> 05:43.660 +第3のアプローチは、 LMのコードとウェイトを自分で入手し、 + +05:43.660 --> 05:52.210 +それを自分のマシンで、 あるいはリモートを使って実行する方法だ。 + +05:52.210 --> 05:56.650 +そしてここでも、 このコースでは2つの異なる方法がある。 + +05:56.650 --> 05:58.930 +そして、 両者の違いを理解することが重要だ。 + +05:58.930 --> 06:09.250 +PythonのコードやPyTorchのコードにアクセスできるようになる。 + +06:09.250 --> 06:14.770 +そして、 そのモデルを使ってかなり細かい作業ができるようになり、 トークン化されたテキストのようなことができるようになり、 + +06:14.770 --> 06:21.520 +トークンを使ってモデルを呼び出し、 実際にモデルを操作することになります。 + +06:21.520 --> 06:26.980 +通常、 Google Colabのようなものを使い、 + +06:26.980 --> 06:37.210 +クラウド上の非常に高性能なマシンでそれを実行する。 + +06:37.630 --> 06:45.730 +それに代わるものとして、 人々はこのコードを高性能のC+++コードに最適化し、 コンパイルして、 + +06:45.760 --> 06:51.460 +自分のコンピュータ上でローカルに実行できるようにした。 + +06:51.460 --> 06:53.890 +それがオラマだ。 + +06:54.070 --> 06:58.900 +C++のコードとして、 裏ではllama CPWというものを使っている。 + +06:58.930 --> 07:01.420 +つまり、 ローカルで実行できるということだ。 + +07:01.420 --> 07:12.550 +しかし、 完全にコンパイルされたコードなので、 何が起こっているのかをコントロールすることはできない。 + +07:12.550 --> 07:14.650 +だから、 これで少しはわかってもらえると思う。 + +07:14.650 --> 07:19.510 +細かいことは省くが、 モデルを使ってできる3つの異なる方法と、 + +07:19.510 --> 07:24.670 +その下にあるいくつかのサブ・テクニックを紹介したい。 + +07:25.210 --> 07:27.430 +それを踏まえて、 これからどうするか。 + +07:27.430 --> 07:32.080 +これから練習をしますが、 オラマを使うので、 コースを通して使い続けられる、 + +07:32.080 --> 07:36.610 +役に立つ練習になるでしょう。 + +07:36.850 --> 07:42.520 +では、 早速JupyterLabでエクササイズを説明しよう。 diff --git a/week5/community-contributions/subtitles/srts/60617255/ko_KR.srt b/week5/community-contributions/subtitles/srts/60617255/ko_KR.srt new file mode 100755 index 0000000..9dd86bd --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617255/ko_KR.srt @@ -0,0 +1,328 @@ +WEBVTT + +00:00.050 --> 00:02.540 +이제 모델에 대해 비트 좀 할게요 + +00:02.570 --> 00:10.190 +프런티어 모델이라는 용어가 종종 등장하는데 오늘날 가능한 가장 큰 모델을 + +00:10.190 --> 00:13.760 +개척하는 llms를 뜻하죠 + +00:13.760 --> 00:18.830 +사람들이 개척자 모델이라고 할 때 주로 비공개 소스 모델을 말합니다 GPT나 + +00:18.830 --> 00:21.230 +클로드 같은 유료 모델이죠 + +00:21.260 --> 00:26.420 +사실, 프런티어 모델이라고 하는 사람들도 있어요 가장 크고 강력한 오픈 + +00:26.420 --> 00:28.160 +소스 모델을 언급할 때요 + +00:28.160 --> 00:30.770 +문맥에 따라 둘 다 될 수 있어요 + +00:30.770 --> 00:35.300 +먼저 비공개 프런티어 모델에 대해 얘기해 보죠 + +00:35.780 --> 00:38.450 +슈퍼 스케일러라고도 불리죠 + +00:38.480 --> 00:42.140 +가장 크고 높은 모형이죠 + +00:42.140 --> 00:47.120 +먼저, 오픈AI의 GPT는 설명이 필요 없겠죠? + +00:47.210 --> 00:54.020 +2022년 말 챗GPT가 나왔을 때 그 출력에 다들 깜짝 놀랐어요 + +00:54.350 --> 00:56.210 +당신도 잘 알 거예요 + +00:56.240 --> 00:58.370 +인류학의 클로드예요 + +00:58.490 --> 01:01.700 +앤트로픽은 오픈라이의 경쟁자죠 + +01:01.730 --> 01:02.840 +잘 알려진 사실이죠 + +01:02.840 --> 01:06.530 +데이터 과학자들이 좋아하는 건 클로드예요 + +01:06.560 --> 01:12.230 +클로드와 GPT는 현재 순위표에서 막상막하로 평가받죠 + +01:12.260 --> 01:14.060 +클로드는 살짝 더 날카롭죠 + +01:14.150 --> 01:17.090 +제미니는 구글의 참가자죠 + +01:17.330 --> 01:21.800 +라마를 검색했을 때 기억하실 거예요 구글에도 제마가 있어요 + +01:21.830 --> 01:23.900 +오픈 소스 변종도요 + +01:23.900 --> 01:28.370 +명령 R은 코어에서 접할 수도 있고 아닐 수도 있어요 + +01:28.400 --> 01:29.750 +캐나다 인공지능 회사요 + +01:29.750 --> 01:36.440 +당황스러움은 검색 엔진으로 다른 모델을 사용할 수 있지만 모델 그 자체도 + +01:36.440 --> 01:37.700 +있어요 + +01:37.730 --> 01:41.210 +이건 개척 시대의 대형 모델이에요 + +01:41.420 --> 01:44.780 +오픈 소스 모델에 대해 얘기해보죠 + +01:44.780 --> 01:52.610 +라마는 메타에서 따온 이름인데 오픈 소스 모델 중 가장 유명하죠 메타가 원조 라마를 + +01:52.610 --> 02:00.080 +오픈 소스 소스 오픈 소싱해 오픈 소스 LMS 영역의 길을 포장한 거니까요 + +02:00.200 --> 02:05.270 +프랑스 회사 미스트랄의 미스트랄도 있는데 모델명이에요 전문가들을 + +02:05.270 --> 02:07.010 +섞은 거죠 + +02:07.010 --> 02:09.770 +작은 모델도 여러 개 포함돼 있죠 + +02:10.160 --> 02:14.210 +퀜은 오바마와 경기할 때 제가 언급했던 모델이에요 + +02:14.210 --> 02:16.010 +강력한 모델이죠 + +02:16.010 --> 02:23.390 +알리바바 클라우드에서 나온 건데 정말 인상적이에요 콴을 가끔 사용할 거예요 말씀드렸듯이 + +02:23.390 --> 02:26.750 +크기에 비해 아주 강력하거든요 + +02:26.780 --> 02:34.490 +젬마는 구글의 작은 모델이고 Fi는 마이크로소프트의 작은 오픈 소스 모델이죠 + +02:35.510 --> 02:38.990 +헷갈리지만 아주 중요해요 + +02:38.990 --> 02:43.820 +이미 아는 분들도 있겠지만 몇몇 분들은 계속 마음에 + +02:43.820 --> 02:45.470 +걸렸을 거예요 + +02:45.470 --> 02:47.840 +확실히 해두고 싶어요 + +02:47.840 --> 02:54.020 +완전히 다른 접근법으로 모델을 사용할 수 있는 다양한 방법이 있어요 그 둘의 차이를 + +02:54.020 --> 02:58.430 +이해하는 게 중요하죠 마주쳤을 때 다른 방식으로 사용될 수도 + +02:58.430 --> 02:59.180 +있고요 + +02:59.180 --> 03:01.730 +무슨 일인지 잘 생각해 봐요 + +03:01.730 --> 03:09.170 +먼저 모델을 이용한 채팅 인터페이스가 있습니다 챗GPT 같은 거죠 채팅하는 웹 프런트엔드로서 + +03:09.170 --> 03:15.170 +클라우드에서 실행 중인 뭔가를 호출하는 겁니다 LLM LM과의 상호 작용 + +03:15.170 --> 03:21.530 +전 과정은 그들의 클라우드에서 OpenAI가 처리하고요 + +03:21.530 --> 03:26.900 +챗GPT의 경우 클라우드와 제미니 어드밴스 등의 제약 회사가 있죠 + +03:27.560 --> 03:29.960 +클라우드 API가 있어요 + +03:29.960 --> 03:35.780 +여기서 클라우드에서 실행되는 걸 호출하지만 사용자 인터페이스가 + +03:35.780 --> 03:38.270 +아니라 코드로 하죠 + +03:38.270 --> 03:45.950 +요약본에 있는 유피터 노트북에서 유피터 랩은 OpenAI API라고 명명했죠 + +03:45.980 --> 03:52.310 +OpenAI에 연결해서 API 호출을 하고 채팅 인터페이스를 이용하면 무료로 + +03:52.310 --> 03:58.040 +사용할 수 있고 매달 사용료를 내야 사용자 인터페이스 채팅을 원하는 만큼 + +03:58.040 --> 04:00.380 +사용할 수 있어요 + +04:00.410 --> 04:01.820 +거기에도 한계가 있어요 + +04:02.360 --> 04:04.280 +API는 달라요 + +04:04.280 --> 04:10.100 +API는 구독이 없어요 매달 돈을 청구하지 않죠 대신 API 요청에 대한 비용을 + +04:10.100 --> 04:10.910 +지불해요 + +04:10.940 --> 04:12.260 +유료 API 경우에요 + +04:12.290 --> 04:17.900 +오픈 소스 무료 API도 있어요 API를 직접 호출할 수 있죠 + +04:17.900 --> 04:23.990 +랭 체인 같은 라이브러리도 있어요 일종의 추상 레이어를 제공하죠 랭 체인을 사용할 + +04:23.990 --> 04:24.820 +수 있어요 + +04:24.820 --> 04:27.130 +그 안에서 다양한 API를 호출할 수 있어요 + +04:27.130 --> 04:31.210 +통합된 API 하나를 제공하죠 + +04:31.210 --> 04:34.210 +랑 체인 같은 틀이 있어요 + +04:34.210 --> 04:39.460 +랭 체인을 사용하는 사람을 보면 커버 아래 lm API만을 사용하죠 + +04:39.460 --> 04:45.850 +더 나은 사용자와 API 인터페이스를 제공할 뿐이죠 더 일관성 있는 거요 + +04:46.360 --> 04:51.250 +또 다른 API 유형은 약간 다른데요 관리되는 인공 지능 클라우드 서비스라는 + +04:51.250 --> 04:58.540 +걸 사용합니다 아마존, 구글 또는 마이크로소프트 Azure 같은 공급자와 연결하는 거죠 비트로소프트 + +04:58.540 --> 05:00.160 +클라우드 서비스 + +05:00.370 --> 05:07.390 +클라우드에서 모델을 실행하고 공통 인터페이스를 제공해 여러분이 뒤에서 + +05:07.390 --> 05:09.550 +실행할 수 있게 하죠 + +05:09.550 --> 05:11.680 +오픈 소스일 수도 있고 폐쇄 소스일 수도 있죠 + +05:11.680 --> 05:13.510 +아마존 베드록도 있어요 + +05:13.540 --> 05:14.650 +아마존이 제공하는 거죠 + +05:14.650 --> 05:18.820 +구글의 정점 인공지능은 구글과 애저 ML이죠 + +05:18.850 --> 05:21.850 +다른 이름도 있어요 마이크로소프트가 제공하죠 + +05:21.850 --> 05:26.230 +관리되는 인공지능 클라우드 서비스죠 + +05:26.230 --> 05:30.220 +하지만 이 모든 것의 공통점은 로컬에서 코드를 작성한다는 거죠 + +05:30.220 --> 05:35.860 +클라우드에서 실행되는 LM에 호출을 하는데 그게 클라우드 API예요 + +05:35.980 --> 05:43.660 +세 번째 접근법은 LM에 대한 코드와 무게를 직접 알아내서 여러분의 + +05:43.660 --> 05:52.210 +박스에서 실행하거나 원격 get box로 원격 조종하는 것이죠. + +05:52.210 --> 05:56.650 +다시 말씀드리지만 이 코스에서는 두 가지 다른 방법이 있어요 + +05:56.650 --> 05:58.930 +그 둘의 차이를 이해하는 게 중요해요 + +05:58.930 --> 06:05.440 +하나는 얼굴 안기입니다 파이썬 코드와 해당 모델이 있는 PyTorch + +06:05.440 --> 06:09.250 +코드에 엑세스할 수 있죠 + +06:09.250 --> 06:14.770 +이 모델을 가지고 세분화 작업을 할 수 있습니다. 토큰화 텍스트 같은 작업을 + +06:14.770 --> 06:21.520 +할 수 있고 토큰으로 모델을 호출할 수 있습니다. 실제로 모델을 운영할 수 있죠. + +06:21.520 --> 06:26.980 +일반적으로 구글 Colab 같은 걸 사용합니다 클라우드 내 + +06:26.980 --> 06:33.610 +고성능 상자에서 실행할 수 있죠 일반적으로 로컬 상자는 그런 수준의 프로세싱을 + +06:33.610 --> 06:37.210 +실행할 만큼 강력하지 않거든요 + +06:37.630 --> 06:45.730 +그 대신 사람들은 이 코드를 고성능 C++ 코드로 최적화해 컴파일했고 + +06:45.760 --> 06:51.460 +여러분의 박스에서 로컬로 실행할 수 있게 됐죠 + +06:51.460 --> 06:53.890 +그게 올라마예요 + +06:54.070 --> 06:58.900 +llama CPW라는 걸 사용합니다 C 플러스 코드로요 + +06:58.930 --> 07:01.420 +그 말은 로컬로 실행할 수 있다는 거죠 + +07:01.420 --> 07:08.410 +추론과 실행 모드로 모델을 실행하고 있지만 무슨 일이 일어나는지 제어할 능력은 + +07:08.410 --> 07:12.550 +별로 없어요 코드가 완전히 컴파일됐으니까요 + +07:12.550 --> 07:14.650 +그게 도움이 됐으면 좋겠네요 + +07:14.650 --> 07:19.510 +세부 사항은 대충 말씀드렸지만 여러분께 보여드릴 수 있길 바라요 모델을 + +07:19.510 --> 07:24.670 +다루는 세 가지 다른 방법과 그 밑에 있는 일종의 서브 테크닉을요 + +07:25.210 --> 07:27.430 +이제 어떻게 할 거예요? + +07:27.430 --> 07:32.080 +오늘 할 운동은 아주 유용할 겁니다 수업 내내 계속 + +07:32.080 --> 07:36.610 +활용할 수 있죠 올라마를 배울 거니까요 + +07:36.850 --> 07:42.520 +그럼 지체 없이 유피터랩으로 넘어가서 이 운동에 대해 설명하죠 diff --git a/week5/community-contributions/subtitles/srts/60617259/en_US.srt b/week5/community-contributions/subtitles/srts/60617259/en_US.srt new file mode 100755 index 0000000..261bc66 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617259/en_US.srt @@ -0,0 +1,280 @@ +WEBVTT + +00:00.080 --> 00:04.790 +I'm excited to introduce you to your first exercise, and I'm looking forward to seeing what you make + +00:04.820 --> 00:05.510 +of this. + +00:05.510 --> 00:11.270 +As a reminder, you should have gone to your Anaconda prompt if you're on a PC or your terminal window. + +00:11.270 --> 00:15.110 +If you're on a mac, you should have gone to the project root directory. + +00:15.140 --> 00:24.710 +LM engineering activated Anaconda by doing conda activate LMS or the virtualenv equivalent if you're + +00:24.710 --> 00:29.180 +using that, and then typed JupyterLab to bring up your Jupyter lab. + +00:29.180 --> 00:33.590 +And in the file browser on the left, you should see the weeks like this. + +00:33.620 --> 00:38.240 +Or you might be already in the week one folder, in which case it will look something like this. + +00:38.240 --> 00:45.500 +And I now want you to go to the day two exercise notebook, which will come up like this. + +00:45.500 --> 00:47.840 +And here is the plan. + +00:47.840 --> 00:53.240 +What we're going to do is we're going to see how to call a llama from code. + +00:53.240 --> 00:59.060 +So we're going to use Python code to call the llama model that's running on your computer. + +00:59.060 --> 01:02.320 +And what we're then going to do and once our first set that up. + +01:02.320 --> 01:05.170 +So we'll get that to work and you'll be able to see the results. + +01:05.170 --> 01:12.760 +And the exercise for you will be to then update the summarization project that we completed yesterday + +01:12.760 --> 01:18.370 +and use Olama use your local model instead of the call to OpenAI. + +01:18.370 --> 01:23.260 +And if you didn't sign up for the OpenAI API, then this is your chance to do it for the first time. + +01:23.740 --> 01:30.550 +So, uh, first of all, I explain here that we will be, uh, using a llama. + +01:30.580 --> 01:34.300 +The benefits of using a llama, of course, is that there's no API charges. + +01:34.300 --> 01:35.110 +It's open source. + +01:35.110 --> 01:36.190 +It's running on your box. + +01:36.190 --> 01:37.300 +It's free. + +01:37.330 --> 01:41.080 +Another benefit is that the data will never leave your box. + +01:41.080 --> 01:46.030 +So if you're ever working on something, whether it's confidential data that absolutely must not go + +01:46.030 --> 01:53.500 +to the cloud, then of course this gives you techniques to be working locally without data leaving the + +01:53.500 --> 01:54.280 +internet. + +01:54.490 --> 02:04.880 +Uh, the disadvantage is that, uh, obviously the frontier models, they are many, many times larger + +02:04.880 --> 02:06.950 +and more powerful than the open source models. + +02:06.950 --> 02:10.730 +And so we should expect that the results won't be as strong. + +02:10.880 --> 02:15.950 +But, you know, that's what you pay for when you pay your, uh, your, your, uh, fraction of a cent + +02:15.980 --> 02:16.970 +each call. + +02:17.660 --> 02:22.400 +First of all, a recap that you hopefully already installed Olama by going to Olama. + +02:22.400 --> 02:27.260 +Com and you remember, it's just a matter of pressing that download button and you're off to the races. + +02:27.260 --> 02:34.970 +If you've done that, then if you visit this link here localhost 11434, then you should see this Olama + +02:34.970 --> 02:37.670 +is running message which tells you that it's running. + +02:37.670 --> 02:45.800 +If that doesn't show, then bring up a terminal or a PowerShell and just enter Olama serve and it should + +02:45.800 --> 02:46.910 +then be running. + +02:46.910 --> 02:50.750 +And if you go there, you should see again a llama is running. + +02:50.750 --> 02:55.910 +So with that, if that doesn't happen then then try and do a little bit of debugging and research and + +02:55.910 --> 02:58.240 +then contact me and I'll help all right. + +02:58.240 --> 03:00.550 +So I'm going to do a few imports. + +03:00.580 --> 03:02.620 +Now I'm going to set some constants. + +03:02.620 --> 03:14.080 +This here is a URL on my local box on this port which is you see the port that runs on slash API slash + +03:14.080 --> 03:14.860 +chat. + +03:14.860 --> 03:19.180 +I'm going to have also a constant called model which will be llama 3.2. + +03:20.170 --> 03:27.280 +Now this here this messages, uh, hopefully you will recognize this construct because this is the same + +03:27.280 --> 03:29.800 +construct as the messages. + +03:29.830 --> 03:31.420 +Let me lay it out a bit differently for you. + +03:31.420 --> 03:36.610 +This is the same as the messages that we talked about before. + +03:36.640 --> 03:39.730 +Uh, that we use with OpenAI. + +03:39.760 --> 03:43.750 +Messages is a list of dictionaries, the dictionaries. + +03:43.750 --> 03:50.470 +Each dictionary has a key of role, and the value is either user or system and a key of content, and + +03:50.470 --> 03:53.170 +the value is the user message or the system message. + +03:53.170 --> 03:58.660 +So this very simply is saying I want to have a user prompt that says, describe some of the business + +03:58.660 --> 04:00.940 +applications of generative AI. + +04:01.180 --> 04:02.290 +Let's run that. + +04:02.470 --> 04:09.160 +I'm now going to put that into a JSON object called a payload, which specifies the model, the messages, + +04:09.160 --> 04:11.080 +and I don't want it to stream results. + +04:11.080 --> 04:12.910 +I just want to get back the results. + +04:13.150 --> 04:23.680 +And I'm then going to use the Python package requests to post that request to this URL pass in the JSON. + +04:23.680 --> 04:31.750 +And then what I get back, I'm going to take the JSON look in the message content fields, and we'll + +04:31.750 --> 04:33.850 +see what happens when we make that call. + +04:33.850 --> 04:39.610 +So right now of course it's making web requests locally from my box to my box. + +04:39.880 --> 04:46.390 +And it's connecting to the llama 3.2 model that's being served by llama. + +04:46.390 --> 04:48.070 +And this is the result. + +04:48.070 --> 04:50.860 +And I will tell you that the answers that it gives are really good. + +04:50.890 --> 04:56.720 +So that since we are trying to learn about commercial applications, it would do you no harm to read + +04:56.720 --> 05:00.890 +through some of its responses and see if there's anything that interests you. + +05:01.160 --> 05:05.330 +Now, I wanted to show you that because I wanted to explain exactly what's going on behind the covers + +05:05.330 --> 05:10.970 +and that we're making these basically these URL, these web requests to our local box. + +05:11.270 --> 05:17.960 +But in fact, the friendly people at Allama have built a Python package, which makes this even simpler. + +05:17.960 --> 05:19.430 +So you can just do this in one line. + +05:19.430 --> 05:24.860 +So I could have started with this, but I wanted to show you the steps to making the web request so + +05:24.860 --> 05:27.410 +you have a good intuition for what's actually happening. + +05:27.470 --> 05:33.890 +But there is this nice package, Allama, that you can just import, and then you can say Allama dot + +05:33.890 --> 05:40.640 +chat, pass in the model, pass in the messages, and then just take back the response content. + +05:40.640 --> 05:46.700 +And if I run that, we should hopefully see that we will get basically the same thing. + +05:46.760 --> 05:48.620 +And here we go. + +05:48.650 --> 05:49.640 +There it is. + +05:50.060 --> 05:56.970 +Uh, and I imagine yeah, I can Uh, already see that there are differences between them. + +05:57.000 --> 05:59.070 +Of course, it's somewhat unique each time. + +05:59.160 --> 06:01.650 +Uh, this one looks like a longer response. + +06:01.860 --> 06:05.310 +Okay, that's the end of my teeing up. + +06:05.310 --> 06:06.840 +Now it's over to you. + +06:06.840 --> 06:13.860 +So you'll remember in day one, we built this solution that where we built something that would summarize + +06:13.890 --> 06:18.390 +a website, and we made a call to OpenAI to achieve that. + +06:18.840 --> 06:23.550 +Here, in fact is our call to OpenAI right here. + +06:23.760 --> 06:32.430 +The challenge for you is to keep going with this day two exercise lab and add in that same summarizer + +06:32.430 --> 06:40.710 +code so that you can build a website, summarizer, that uses your local Ulama open source model, llama + +06:40.740 --> 06:45.000 +3.2 or a different model if you wish to do your summarization. + +06:45.000 --> 06:46.650 +That's the exercise. + +06:46.650 --> 06:50.220 +The solution is in the solutions folder should you need it. + +06:50.220 --> 06:55.290 +But I think you've got this one and I will see you for the next video when you have that done. diff --git a/week5/community-contributions/subtitles/srts/60617259/ja_JP.srt b/week5/community-contributions/subtitles/srts/60617259/ja_JP.srt new file mode 100755 index 0000000..07b353e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617259/ja_JP.srt @@ -0,0 +1,235 @@ +WEBVTT + +00:00.080 --> 00:05.510 +初めてのエクササイズを紹介できることに興奮している。 + +00:05.510 --> 00:11.270 +リマインダーとして、 PCを使用している場合はAnacondaプロンプトに、 ターミナルウィンドウを表示してください。 + +00:11.270 --> 00:15.110 +もしあなたがマックを使っているなら、 プロジェクトのルート・ディレクトリに行くはずだ。 + +00:15.140 --> 00:24.710 +LMエンジニアリングは、 conda activate LMS、 もしくはvirtualenvを使用している場合はそれに相当するコマンドを実行してAnacondaを起動し、 + +00:24.710 --> 00:29.180 +JupyterLabと入力してJupyterラボを呼び出した。 + +00:29.180 --> 00:33.590 +左側のファイルブラウザで、 このように週が表示されるはずだ。 + +00:33.620 --> 00:38.240 +あるいは、 すでに1週目のフォルダに入っている場合もあり、 その場合は次のように表示される。 + +00:38.240 --> 00:45.500 +そして、 2日目のエクササイズ・ノートを見てほしい。 + +00:45.500 --> 00:47.840 +そして、 これがその計画だ。 + +00:47.840 --> 00:53.240 +これからやるのは、 コードからラマを呼び出す方法だ。 + +00:53.240 --> 00:59.060 +Pythonのコードを使って、 あなたのコンピューターで動いているラマ・モデルを呼び出します。 + +00:59.060 --> 01:02.320 +そして、 これから何をするかというと、 最初にそれをセットアップするんだ。 + +01:02.320 --> 01:05.170 +だから、 それをうまく使って、 結果を目にすることができるだろう。 + +01:05.170 --> 01:18.370 +そして、 昨日完了した要約プロジェクトを更新し、 OpenAIへの呼び出しの代わりに、 Olamaを使用してローカルモデルを使用する練習をしていただきます。 + +01:18.370 --> 01:23.260 +そして、 もしあなたがOpenAI APIにサインアップしていないのであれば、 これは初めてのチャンスだ。 + +01:23.740 --> 01:30.550 +それで、 まず最初に、 ラマを使うことを説明します。 + +01:30.580 --> 01:34.300 +ラマを使うメリットは、 もちろんAPI料金がかからないことだ。 + +01:34.300 --> 01:35.110 +オープンソースだ。 + +01:35.110 --> 01:36.190 +あなたのボックスで動いている。 + +01:36.190 --> 01:37.300 +無料だ。 + +01:37.330 --> 01:41.080 +もうひとつの利点は、 データがあなたの箱から出ることがないことだ。 + +01:41.080 --> 01:46.030 +だから、 クラウドに絶対に出してはいけない機密データであろうと、 何かに取り組んでいるのであれば、 + +01:46.030 --> 01:54.280 +もちろん、 データをインターネットから出さずにローカルで作業するテクニックが得られる。 + +01:54.490 --> 02:06.950 +デメリットは、 明らかにフロンティア・モデルは、 オープンソース・モデルよりも何倍も何十倍も大きく、 強力だということだ。 + +02:06.950 --> 02:10.730 +だから、 結果はそれほど強くないと予想すべきだ。 + +02:10.880 --> 02:16.970 +でもね、 その分、 通話料がかかるんだ。 + +02:17.660 --> 02:22.400 +まず最初に、 OlamaにアクセスしてOlamaをインストール済みであることを願う。 + +02:22.400 --> 02:27.260 +ダウンロードボタンを押すだけで、 レースが始まるのだ。 + +02:27.260 --> 02:37.670 +そうすれば、 このリンク(localhost 11434)にアクセスすると、 Olamaが実行中であることを示すメッセージが表示されるはずだ。 + +02:37.670 --> 02:46.910 +もし表示されない場合は、 ターミナルかPowerShellを立ち上げ、 Olama serveと入力すれば実行されるはずだ。 + +02:46.910 --> 02:50.750 +そしてそこに行けば、 またラマが走っているのを見るはずだ。 + +02:50.750 --> 02:55.910 +もしそうならなかったら、 デバッグとリサーチを少しやってみて、 それから僕に連絡してくれれば、 + +02:55.910 --> 02:58.240 +力になるよ。 + +02:58.240 --> 03:00.550 +だから、 いくつか輸入するつもりだ。 + +03:00.580 --> 03:02.620 +これからいくつかの定数を設定する。 + +03:02.620 --> 03:14.860 +これは僕のローカル・ボックスのURLで、 このポートはスラッシュAPIスラッシュ・チャットで動作するポートだ。 + +03:14.860 --> 03:19.180 +リャマ3となるモデルという定数も持つつもりだ。 2. + +03:20.170 --> 03:29.800 +さて、 このメッセージだが、 うまくいけばこの構成がわかるだろう。 + +03:29.830 --> 03:31.420 +もう少し違う言い方をしよう。 + +03:31.420 --> 03:36.610 +これは前に話したメッセージと同じだ。 + +03:36.640 --> 03:39.730 +OpenAIで使っているものだ。 + +03:39.760 --> 03:43.750 +メッセージは辞書のリストである。 + +03:43.750 --> 03:53.170 +各ディクショナリは、 roleをキーとし、 値はユーザーまたはシステムであり、 contentをキーとし、 値はユーザーメッセージまたはシステムメッセージである。 + +03:53.170 --> 04:00.940 +つまり、 非常に簡単に言えば、 「ジェネレーティブAIのビジネス・アプリケーションをいくつか説明してください」というユーザー・プロンプトが欲しいということだ。 + +04:01.180 --> 04:02.290 +それを実行しよう。 + +04:02.470 --> 04:11.080 +ペイロードと呼ばれるJSONオブジェクトに、 モデルやメッセージを指定する。 + +04:11.080 --> 04:12.910 +結果を取り戻したいだけなんだ。 + +04:13.150 --> 04:23.680 +そして、 Pythonパッケージのrequestsを使って、 このURLにJSONでリクエストをポストします。 + +04:23.680 --> 04:33.850 +そして、 戻ってきたJSONをメッセージ・コンテンツ・フィールドで見てみる。 + +04:33.850 --> 04:39.610 +だから今はもちろん、 僕のボックスから僕のボックスへローカルにウェブリクエストをしている。 + +04:39.880 --> 04:46.390 +そして、 リャマ3とつながっている。 2 llamaがサーブしているモデル。 + +04:46.390 --> 04:48.070 +その結果がこれだ。 + +04:48.070 --> 04:50.860 +そして、 その答えが実に良いものであることをお伝えしたい。 + +04:50.890 --> 04:56.720 +だから、 私たちは商業的な応用について学ぼうとしているのだから、 その回答のいくつかに目を通して、 + +04:56.720 --> 05:00.890 +何か興味のあるものがあるかどうか確認しても損はないだろう。 + +05:01.160 --> 05:05.330 +さて、 ここでお見せしたかったのは、 その裏側で何が起こっているのかを正確に説明したかったからで、 + +05:05.330 --> 05:10.970 +基本的にはこれらのURL、 つまりローカル・ボックスへのウェブ・リクエストを行っているのだ。 + +05:11.270 --> 05:17.960 +しかし実は、 Allamaの親切な人たちがPythonのパッケージを作ってくれていて、 これを使えばもっと簡単だ。 + +05:17.960 --> 05:19.430 +だから、 これを1行で済ませることができる。 + +05:19.430 --> 05:27.410 +だから、 ここから始めてもよかったのだが、 ウェブリクエストを作る手順をお見せすることで、 実際に何が起こっているのかを直感的に理解していただきたかった。 + +05:27.470 --> 05:33.890 +しかし、 Allamaという素晴らしいパッケージがあり、 それをインポートすれば、 Allama dot + +05:33.890 --> 05:40.640 +chatと言って、 モデルを渡し、 メッセージを渡し、 レスポンスの内容を受け取ることができる。 + +05:40.640 --> 05:46.700 +それを実行すれば、 基本的に同じものが得られることが期待できる。 + +05:46.760 --> 05:48.620 +そして、 これだ。 + +05:48.650 --> 05:49.640 +あれだ。 + +05:50.060 --> 05:56.970 +ああ、 想像するに、 彼らとの間に違いがあるのはもうわかるよ。 + +05:57.000 --> 05:59.070 +もちろん、 その都度多少の違いはある。 + +05:59.160 --> 06:01.650 +ええと、 これはもっと長いレスのようだ。 + +06:01.860 --> 06:05.310 +よし、 これで私のティーアップは終わりだ。 + +06:05.310 --> 06:06.840 +あとは君の出番だ。 + +06:06.840 --> 06:18.390 +初日に、 ウェブサイトを要約するソリューションを構築し、 それを実現するためにOpenAIに呼びかけたことを覚えているだろう。 + +06:18.840 --> 06:23.550 +実際、 ここにOpenAIへの呼びかけがある。 + +06:23.760 --> 06:32.430 +あなたへの挑戦は、 この2日目の練習ラボを続けて、 同じサマライザーのコードを追加し、 あなたの地元のウラマのオープンソースモデル、 + +06:32.430 --> 06:45.000 +llama 3を使用したウェブサイト、 サマライザーを構築できるようにすることだ。 + +06:45.000 --> 06:45.000 +2または別のモデルで要約を行いたい場合。 + +06:45.000 --> 06:46.650 +それが練習だ。 + +06:46.650 --> 06:50.220 +解決策はソリューション・フォルダーにあります。 + +06:50.220 --> 06:55.290 +でも、 このビデオは完成したと思うし、 完成したらまた次のビデオで会おう。 diff --git a/week5/community-contributions/subtitles/srts/60617259/ko_KR.srt b/week5/community-contributions/subtitles/srts/60617259/ko_KR.srt new file mode 100755 index 0000000..f2b778a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60617259/ko_KR.srt @@ -0,0 +1,271 @@ +WEBVTT + +00:00.080 --> 00:05.510 +첫 번째 운동을 소개하게 돼서 기쁘고 어떻게 만드실지 정말 기대되네요 + +00:05.510 --> 00:11.270 +PC나 터미널 창을 사용하신다면 아나콘다 프롬프트를 선택하세요 + +00:11.270 --> 00:15.110 +Mac에 있다면 프로젝트 루트 디렉터리로 가야 해요 + +00:15.140 --> 00:24.710 +LM 엔지니어링은 LMS 활성화 콘다 또는 그걸 사용한다면 가상env와 동등한 것으로 아나콘다를 활성화합니다 그런 다음 JupyterLab을 + +00:24.710 --> 00:29.180 +입력해 Jupyter 랩을 불러와요 + +00:29.180 --> 00:33.590 +왼쪽 파일 브라우저에 이런 주가 있을 거예요 + +00:33.620 --> 00:38.240 +아니면 이미 첫 주 폴더에 있을 수도 있죠 그런 경우 이런 식으로 되죠 + +00:38.240 --> 00:45.500 +이제 둘째 날 운동 공책을 꺼내세요 이렇게 나올 거예요 + +00:45.500 --> 00:47.840 +계획은 이래요 + +00:47.840 --> 00:53.240 +코드에서 llama를 호출하는 방법을 볼 거예요 + +00:53.240 --> 00:59.060 +파이썬 코드를 이용해 llama 모델을 호출할 거예요 여러분 컴퓨터에서 실행되는 거죠 + +00:59.060 --> 01:02.320 +그런 다음 할 일은∙∙∙ 첫 번째 설정부터요 + +01:02.320 --> 01:05.170 +get get을 하면 결과를 볼 수 있어요 + +01:05.170 --> 01:12.760 +여러분이 할 일은 어제 완료한 요약 프로젝트를 업데이트하는 겁니다 그리고 OpenAI + +01:12.760 --> 01:18.370 +호출 대신 Olama를 사용해 여러분의 로컬 모델을 사용하세요 + +01:18.370 --> 01:23.260 +OpenAI API에 등록하지 않았다면 이번이 처음 할 기회예요 + +01:23.740 --> 01:30.550 +우선 라마를 쓸 거라고 설명해 드릴게요 + +01:30.580 --> 01:34.300 +라마를 사용하면 API 요금이 안 들어요 + +01:34.300 --> 01:35.110 +오픈 소스 소스예요 + +01:35.110 --> 01:36.190 +당신 박스에서 작동해요 + +01:36.190 --> 01:37.300 +공짜예요 + +01:37.330 --> 01:41.080 +또 다른 장점은 데이터가 상자 밖으로 나가지 않는다는 거죠 + +01:41.080 --> 01:46.030 +여러분이 뭔가 작업하고 있다면 그게 클라우드로 절대 가면 안 되는 + +01:46.030 --> 01:54.280 +기밀 데이터든 간에 이건 인터넷에서 데이터가 떠나지 않고 로컬에서 작업할 수 있는 기술을 제공하죠 + +01:54.490 --> 02:04.880 +단점은 개척자 모델은 오픈 소스 모델보다 훨씬 크고 강력하다는 + +02:04.880 --> 02:06.950 +거예요 + +02:06.950 --> 02:10.730 +그러니 결과가 그렇게 강하지 않을 거라고 예상해야 해요 + +02:10.880 --> 02:15.950 +하지만 한 통당 1센트도 안 되는 돈을 냈으니 당연한 + +02:15.980 --> 02:16.970 +거죠 + +02:17.660 --> 02:22.400 +먼저 올라마에 가서 올라마를 설치한 걸 복습해보죠 + +02:22.400 --> 02:27.260 +기억하세요 다운로드 버튼만 누르면 바로 레이스가 시작되죠 + +02:27.260 --> 02:34.970 +그걸 완료했다면 여기 localhost 11434 링크를 방문하면 이 올라마가 실행 중인 메시지를 볼 수 있습니다 + +02:34.970 --> 02:37.670 +실행 중이라는 걸 알려주는 거죠 + +02:37.670 --> 02:45.800 +안 나오면 터미널이나 PowerShell을 불러와 OlamaService를 입력하면 실행될 + +02:45.800 --> 02:46.910 +거예요 + +02:46.910 --> 02:50.750 +거기 가면 라마가 뛰는 걸 볼 수 있어요 + +02:50.750 --> 02:55.910 +만약 비트가 작동하지 않는다면 디버깅과 조사를 해보시고 저에게 연락하세요 + +02:55.910 --> 02:58.240 +제가 도와드릴게요 + +02:58.240 --> 03:00.550 +몇 가지 일을 할 거예요 + +03:00.580 --> 03:02.620 +이제 상수를 설정할게요 + +03:02.620 --> 03:14.860 +이건 제 로컬 상자 이 포트의 URL이에요 슬래시 API 슬래시 채팅에서 실행되는 포트가 보이시죠 + +03:14.860 --> 03:19.180 +모델이라 불리는 상수도 가질 거예요 그게 라마 3이 되겠죠 2번요 + +03:20.170 --> 03:27.280 +여기 이 메시지들은 여러분이 이 구조를 알아보셨으면 합니다 메시지와 + +03:27.280 --> 03:29.800 +같은 구조니까요 + +03:29.830 --> 03:31.420 +비트를 좀 다르게 표현해 볼게요 + +03:31.420 --> 03:36.610 +전에 얘기했던 메시지와 같은 거예요 + +03:36.640 --> 03:39.730 +오픈라이에 쓰는 거예요 + +03:39.760 --> 03:43.750 +메시지는 사전 목록이에요 사전들요 + +03:43.750 --> 03:50.470 +각 사전은 역할의 키를 갖고 있고 그 값은 사용자나 시스템 그리고 콘텐츠의 키입니다 그 값은 + +03:50.470 --> 03:53.170 +사용자 메시지나 시스템 메시지죠 + +03:53.170 --> 03:58.660 +간단히 말하자면 사용자 프롬프트가 이런 말을 하는 거죠 인공지능이 재생되는 일부 비즈니스 + +03:58.660 --> 04:00.940 +응용 프로그램을 묘사하라 + +04:01.180 --> 04:02.290 +실행해 보죠 + +04:02.470 --> 04:09.160 +이제 그걸 페이로드라는 JSON 객체에 넣을 거예요 모델과 메시지를 지정하는 거죠 결과를 + +04:09.160 --> 04:11.080 +스트림하지 않고요 + +04:11.080 --> 04:12.910 +Get it 결과만 받으면 돼요 + +04:13.150 --> 04:23.680 +파이썬 패키지 요청을 이용해 JSON 내의 이 URL 패스 게시할 거예요 + +04:23.680 --> 04:31.750 +그런 다음 JSON을 메시지 콘텐츠 필드에서 살펴보겠습니다 호출을 하면 + +04:31.750 --> 04:33.850 +어떻게 되는지 보죠 + +04:33.850 --> 04:39.610 +지금은 제 상자에서 제 상자까지 로컬로 웹 요청을 하고 있어요 + +04:39.880 --> 04:46.390 +라마 3에 연결되고 있어요 라마가 제공하는 2가지 모델이죠 + +04:46.390 --> 04:48.070 +이게 그 결과예요 + +04:48.070 --> 04:50.860 +그 결과는 정말 훌륭하다고 장담해요 + +04:50.890 --> 04:56.720 +상업적 응용 프로그램에 대해 알고자 하는 거니까 일부 응답을 + +04:56.720 --> 05:00.890 +읽어보고 관심 있는 게 있는지 보세요 + +05:01.160 --> 05:05.330 +이걸 보여드린 건 뒤에서 무슨 일이 벌어지는지 설명하고 + +05:05.330 --> 05:10.970 +싶어서예요 이런 URL 즉, 로컬 상자에 대한 웹 요청을 만들고 있죠 + +05:11.270 --> 05:17.960 +하지만 사실 알라마의 친절한 분들이 파이썬 패키지를 개발해 주셔서 작업이 더 간단해졌어요 + +05:17.960 --> 05:19.430 +한 줄로 하면 돼요 + +05:19.430 --> 05:24.860 +이걸로 시작할 수도 있지만 웹 요청을 만드는 단계를 보여드릴게요 실제 무슨 일이 + +05:24.860 --> 05:27.410 +일어나는지 직관적으로 알 수 있게요 + +05:27.470 --> 05:33.890 +Allama라는 멋진 패키지가 있어요 그냥 가져오면 되죠 Allama.챗이라고 + +05:33.890 --> 05:40.640 +입력하고 모델 통과, 메시지 통과 그리고 응답 콘텐츠를 가져오면 돼요 + +05:40.640 --> 05:46.700 +그걸 실행하면 기본적으로 같은 게 나오죠 get + +05:46.760 --> 05:48.620 +자, 보세요 + +05:48.650 --> 05:49.640 +저기 있네요 + +05:50.060 --> 05:56.970 +그리고 제 생각엔 네, 벌써 차이가 있다는 걸 알겠어요 + +05:57.000 --> 05:59.070 +물론 매번 독특하죠 + +05:59.160 --> 06:01.650 +이건 좀 긴 것 같네요 + +06:01.860 --> 06:05.310 +티업은 다 했어요 + +06:05.310 --> 06:06.840 +이제 당신에게 달렸어요 + +06:06.840 --> 06:13.860 +첫날 이 솔루션을 구축한 걸 기억하실 겁니다 웹사이트를 요약하는 뭔가를 구축했죠 + +06:13.890 --> 06:18.390 +그걸 달성하기 위해 OpenAI에 호출했어요 + +06:18.840 --> 06:23.550 +사실 오픈AI를 호출한 건 여기죠 + +06:23.760 --> 06:32.430 +여러분이 하실 일은 이 2일 차 운동 랩을 계속하면서 동일한 요약기 코드를 추가하는 겁니다 + +06:32.430 --> 06:40.710 +그래야 Ulama 오픈 소스 모델인 llama 3을 사용하는 웹사이트를 만들 + +06:40.740 --> 06:45.000 +수 있으니까요 요약하고 싶다면 다른 모델을 사용하세요 + +06:45.000 --> 06:46.650 +그게 연습이죠 + +06:46.650 --> 06:50.220 +솔루션은 솔루션 폴더에 있어요 필요하면 쓰세요 + +06:50.220 --> 06:55.290 +하지만 이건 된 것 같네요 다음 영상에서 다시 뵙죠 diff --git a/week5/community-contributions/subtitles/srts/60619123/en_US.srt b/week5/community-contributions/subtitles/srts/60619123/en_US.srt new file mode 100755 index 0000000..dbae7b6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619123/en_US.srt @@ -0,0 +1,82 @@ +WEBVTT + +00:00.110 --> 00:05.360 +So what we're now going to do is we're going to look at some models in practice and start to compare + +00:05.360 --> 00:06.770 +them and ask some questions. + +00:06.770 --> 00:11.180 +Some of the questions we're going to ask, we're going to ask how do I decide if a business problem + +00:11.180 --> 00:13.580 +is suitable for an LM solution? + +00:13.580 --> 00:17.480 +Because it's again useful for us to hear the answer and to learn from it. + +00:17.480 --> 00:19.490 +But also it would be good to see. + +00:19.610 --> 00:25.220 +I imagine you'll see that that's a question that none of the models will have any problems with, and + +00:25.250 --> 00:26.690 +it'll be great to see that. + +00:26.990 --> 00:31.100 +I will also ask a question which I think is interesting because it's sort of introspective. + +00:31.100 --> 00:38.330 +It sees whether the models have a sense of of what they are by saying compared to other LMS, what kinds + +00:38.330 --> 00:44.090 +of questions are you best at answering and where do you where do you have challenges and which other + +00:44.120 --> 00:47.000 +LMS have capabilities that complement yours? + +00:47.000 --> 00:54.470 +So it's an interesting one to see if they even recognize that they are LMS and can sort of assess themselves + +00:54.470 --> 00:55.610 +against others. + +00:55.610 --> 01:01.280 +And then a question which surely will be something which which they will struggle with, because it's + +01:01.280 --> 01:02.780 +something that I would struggle with. + +01:02.810 --> 01:05.810 +What does it feel like to be jealous? + +01:06.140 --> 01:14.510 +A yeah, a philosophical and a very human centric question, and then a rather interesting one. + +01:14.510 --> 01:18.440 +How many times does the letter A appear in this sentence? + +01:18.740 --> 01:23.300 +And if you do a quick count there, I think you'll find that it appears four times, and we'll see whether + +01:23.300 --> 01:25.640 +the models can figure out that it appears four times. + +01:25.670 --> 01:28.880 +And then there are a couple of other interesting things I have up my sleeve to ask. + +01:28.880 --> 01:35.900 +So we'll be going through and the screenshots you see here ChatGPT, you'll see Claude, Gemini and + +01:35.960 --> 01:39.680 +Commander Plus from cohere, and we'll try the other ones too. + +01:39.680 --> 01:44.960 +And again, please do bear in mind, try looking and understanding the differences and seeing how you'll + +01:44.960 --> 01:48.260 +be able to apply this to your own model selection. + +01:48.260 --> 01:49.250 +See you in a second. diff --git a/week5/community-contributions/subtitles/srts/60619123/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619123/ja_JP.srt new file mode 100755 index 0000000..7a6df20 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619123/ja_JP.srt @@ -0,0 +1,58 @@ +WEBVTT + +00:00.110 --> 00:06.770 +だから、 これからやることは、 実際にいくつかのモデルを見て、 それらを比較し、 質問を投げかけることだ。 + +00:06.770 --> 00:13.580 +あるビジネス上の問題が、 LM ソリューションに適しているかどうかをどのように判断すればよいのか? + +00:13.580 --> 00:17.480 +その答えを聞き、 そこから学ぶことは、 私たちにとってまた有益だからだ。 + +00:17.480 --> 00:19.490 +でも、 見るのもいいだろう。 + +00:19.610 --> 00:26.690 +どのモデルもこの質問には答えないだろう。 + +00:26.990 --> 00:31.100 +また、 ある意味内省的で面白いと思う質問もします。 + +00:31.100 --> 00:38.330 +他のLMSと比較して、 そのモデルが何を言っているのか、 どのような質問に答えるのが得意なのか、 + +00:38.330 --> 00:47.000 +どこに課題があるのか、 他のLMSはあなたのLMSを補完する機能を持っているのか。 + +00:47.000 --> 00:55.610 +だから、 LMSであることを認識し、 他と比較して自らを評価できるかどうか、 興味深いところだ。 + +00:55.610 --> 01:02.780 +そして、 きっと彼らが悩むことになるであろう質問だ。 + +01:02.810 --> 01:05.810 +嫉妬するってどんな感じ? + +01:06.140 --> 01:14.510 +そう、 哲学的で人間中心的な質問だ。 + +01:14.510 --> 01:18.440 +この文章にAは何回出てくる? + +01:18.740 --> 01:25.640 +そこでざっと数えてみると、 4回出てくることがわかると思う。 モデルが4回出てくることを理解できるかどうか見てみよう。 + +01:25.670 --> 01:28.880 +それから、 他にもいくつか興味深いことを聞いてみたいことがあるんだ。 + +01:28.880 --> 01:35.900 +ChatGPTのスクリーンショットをご覧ください。 コヒーレのクロード、 ジェミニ、 + +01:35.960 --> 01:39.680 +コマンダープラスが写っています。 + +01:39.680 --> 01:48.260 +そして、 繰り返しになるが、 その違いをよく見て理解し、 自分のモデル選びにどう生かすかを考えてほしい。 + +01:48.260 --> 01:49.250 +またすぐに会おう。 diff --git a/week5/community-contributions/subtitles/srts/60619123/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619123/ko_KR.srt new file mode 100755 index 0000000..29e80c3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619123/ko_KR.srt @@ -0,0 +1,82 @@ +WEBVTT + +00:00.110 --> 00:05.360 +이제 몇 가지 모델을 실제로 보고 비교해보면서 질문을 + +00:05.360 --> 00:06.770 +해볼게요 + +00:06.770 --> 00:11.180 +우리가 질문할 몇 가지 사항은 사업상의 문제가 LM 솔루션에 적합한지 + +00:11.180 --> 00:13.580 +어떻게 결정하느냐는 거죠 + +00:13.580 --> 00:17.480 +답을 듣고 배우는 건 유용하니까요 + +00:17.480 --> 00:19.490 +하지만 보는 것도 좋죠 + +00:19.610 --> 00:25.220 +다른 모델들은 이 질문에 아무런 문제도 없을 겁니다 그걸 보는 + +00:25.250 --> 00:26.690 +게 좋을 거예요 + +00:26.990 --> 00:31.100 +자기 성찰적인 질문이라 흥미로울 것 같은데요 + +00:31.100 --> 00:38.330 +모델이 무엇인지에 대한 감각을 가지고 있는지 다른 LMS와 비교해서 어떤 종류의 질문에 가장 잘 대답할 + +00:38.330 --> 00:44.090 +수 있는지, 어디에서 어려움이 있는지, 어떤 다른 LMS가 여러분의 것을 보완하는 + +00:44.120 --> 00:47.000 +기능을 가지고 있는지에 대해서요 + +00:47.000 --> 00:54.470 +그래서 흥미롭죠 자신이 LMS라는 걸 인식하고 다른 사람과 비교해 자신을 평가할 수 있는지 + +00:54.470 --> 00:55.610 +보는 거요 + +00:55.610 --> 01:01.280 +이 질문은 분명히 그들이 고심할 문제일 겁니다 저라도 고민할 + +01:01.280 --> 01:02.780 +문제니까요 + +01:02.810 --> 01:05.810 +질투하는 기분이 어때요? + +01:06.140 --> 01:14.510 +철학적이고 인간 중심적인 질문이지만 흥미로운 질문도 있어요 + +01:14.510 --> 01:18.440 +A가 이 문장에 몇 번이나 나오죠? + +01:18.740 --> 01:23.300 +빠르게 세어 보면 네 번 나타날 거예요 모델이 네 번 나타난다는 + +01:23.300 --> 01:25.640 +걸 알아낼 수 있을지 보죠 + +01:25.670 --> 01:28.880 +그리고 몇 가지 더 물어볼 게 있어요 + +01:28.880 --> 01:35.900 +챗GPT 스크린샷을 살펴볼 텐데요 클로드, 제미니 커맨더 플러스도 + +01:35.960 --> 01:39.680 +코어에서 왔고 다른 것도 확인해 보죠 + +01:39.680 --> 01:44.960 +다시 한번 명심하세요 차이점을 보고 이해한 후 여러분의 모델 선택에 + +01:44.960 --> 01:48.260 +어떻게 적용할지 생각해 보세요 + +01:48.260 --> 01:49.250 +이따 봐요 diff --git a/week5/community-contributions/subtitles/srts/60619149/en_US.srt b/week5/community-contributions/subtitles/srts/60619149/en_US.srt new file mode 100755 index 0000000..0e908c4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619149/en_US.srt @@ -0,0 +1,184 @@ +WEBVTT + +00:00.020 --> 00:06.230 +So we're going to start our exploration into the world of frontier models by playing with the famous + +00:06.230 --> 00:10.550 +GPT from OpenAI, which most of you are probably quite familiar with. + +00:10.580 --> 00:15.680 +I have a pro license, which means I get access to all of the models, and I imagine some of you do + +00:15.710 --> 00:16.520 +as well. + +00:16.550 --> 00:21.320 +We'll start with a softball question, the kind of question that they're so good at answering, which + +00:21.320 --> 00:30.110 +is how do I decide if a business problem is suitable for an LLM solution? + +00:30.110 --> 00:34.910 +And it's useful for us because it's the kind of question that one might ask on this course. + +00:35.030 --> 00:40.760 +And what we'll get back, of course, is a very carefully structured and reasoned response with an introduction, + +00:40.760 --> 00:44.300 +with summaries, the nature of the problem, the scalability needs. + +00:44.300 --> 00:50.480 +No doubt there'll be stuff in here about nuance, about unstructured data, contextual understanding, + +00:50.480 --> 00:56.810 +cost, maintenance, lots of great, well-reasoned points with a good summary to boot. + +00:56.810 --> 01:00.110 +So this is the kind of thing that it's really, really good at. + +01:00.140 --> 01:05.300 +Now I'll ask it a question, which it usually gets right, but sometimes amazingly gets wrong. + +01:05.330 --> 01:06.920 +Let's see what happens this time. + +01:06.950 --> 01:15.500 +How many times does the letter A appear in this sentence? + +01:16.640 --> 01:18.560 +Uh, so let's see how it does. + +01:18.770 --> 01:20.900 +Uh, it's got it wrong. + +01:20.900 --> 01:24.170 +The letter A appears five times in your sentence. + +01:24.410 --> 01:27.380 +Sometimes it gets this right, and sometimes it gets it wrong. + +01:27.380 --> 01:29.750 +It's, uh, difficult to know, but. + +01:29.750 --> 01:33.920 +But, uh, it might shock you that, uh, it gets that wrong. + +01:33.920 --> 01:37.610 +It doesn't mean that we humans still have an advantage in some ways. + +01:37.940 --> 01:42.710 +But the truth is, it's to do with the way that this information is sent into the LM. + +01:42.710 --> 01:45.260 +It's to do with this, this tokenization strategy. + +01:45.260 --> 01:47.180 +And we'll be talking more about that later. + +01:47.300 --> 01:50.090 +But it is interesting that it gets it wrong. + +01:50.360 --> 01:55.160 +Uh, I'm going to ask it one more question, which is a tricky question. + +01:55.160 --> 01:59.540 +I'm going to ask it, uh, Choose the word that best completes the analogy. + +01:59.570 --> 02:03.440 +Feather is to bird as scale is to. + +02:03.470 --> 02:05.390 +And then there's a few different options there. + +02:05.390 --> 02:11.390 +And the best answer is in fact, reptile fish is a bit of a trick answer because fish do have scales, + +02:11.390 --> 02:15.260 +but it's not as distinguishing feature as it is for reptiles. + +02:15.260 --> 02:20.060 +This question I got from a website called vellum, which is a very a company that does a lot of this + +02:20.090 --> 02:23.030 +kind of analysis that we will talk about later. + +02:23.270 --> 02:23.840 +All right. + +02:23.840 --> 02:25.400 +Let's switch to a different model. + +02:25.400 --> 02:28.190 +Let's switch to zero one preview. + +02:28.220 --> 02:34.490 +This is the model that was originally codenamed strawberry and is the strongest of OpenAI's models, + +02:34.490 --> 02:39.260 +only available to Pro subscribers, but it will ultimately be available to everyone, and it gives you + +02:39.260 --> 02:40.610 +a sense of what's to come. + +02:40.640 --> 02:45.350 +It uses a sort of chain of reasoning approach to think through questions. + +02:45.350 --> 02:55.700 +Let's ask it the same question how many times does the letter A appear in this sentence. + +02:56.150 --> 02:58.310 +See if it can do better. + +02:59.690 --> 03:00.980 +It's thinking. + +03:02.480 --> 03:04.970 +You can see how it takes longer for sure. + +03:05.000 --> 03:06.710 +Counting letter frequencies. + +03:06.710 --> 03:07.850 +That sounds promising. + +03:07.880 --> 03:09.320 +Taking a closer look. + +03:09.350 --> 03:10.130 +Good to know. + +03:10.130 --> 03:12.560 +And it gets the right the answer. + +03:12.560 --> 03:13.190 +Correct. + +03:13.190 --> 03:16.040 +The letter A appears four times in the sentence. + +03:16.040 --> 03:21.320 +Once in many, uh, once within the quotes and twice in the word appear. + +03:21.320 --> 03:23.090 +So it is correct. + +03:23.090 --> 03:24.230 +Very good. + +03:24.230 --> 03:27.080 +Uh, and then let's also ask strawberry. + +03:27.110 --> 03:33.650 +Oh, one preview this, uh, this puzzle and let's see how it can approach this. + +03:33.710 --> 03:41.300 +It's considering choosing the right analogy that's also promising cultivating. + +03:41.300 --> 03:44.510 +And it gives the correct answer reptile. + +03:44.510 --> 03:50.750 +So this gives you a sense of the different models, some of the different strengths between them from + +03:50.750 --> 03:53.960 +GPT four zero and zero one preview. diff --git a/week5/community-contributions/subtitles/srts/60619149/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619149/ja_JP.srt new file mode 100755 index 0000000..be458b8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619149/ja_JP.srt @@ -0,0 +1,172 @@ +WEBVTT + +00:00.020 --> 00:06.230 +そこで、 フロンティア・モデルの世界への探検を、 おそらくほとんどの方がよくご存じであろう、 + +00:06.230 --> 00:10.550 +OpenAIの有名なGPTで遊ぶことから始めようと思う。 + +00:10.580 --> 00:16.520 +私はプロライセンスを持っているので、 すべてのモデルにアクセスできる。 + +00:16.550 --> 00:21.320 +ビジネス上の問題がLLMの解決策に適しているかどうか、 + +00:21.320 --> 00:30.110 +どうやって判断すればいいのでしょうか? + +00:30.110 --> 00:34.910 +このコースで聞かれるような質問なので、 私たちにとっても有益だ。 + +00:35.030 --> 00:40.760 +もちろん、 返ってくるのは、 序論、 要約、 問題の本質、 スケーラビリティの必要性など、 + +00:40.760 --> 00:44.300 +非常に注意深く構成され、 理路整然とした回答だ。 + +00:44.300 --> 00:50.480 +ここには、 ニュアンス、 非構造化データ、 文脈理解、 コスト、 メンテナンスなど、 たくさんの素晴らしい、 + +00:50.480 --> 00:56.810 +理路整然とした指摘があり、 それをうまく要約したものがあるに違いない。 + +00:56.810 --> 01:00.110 +だから、 これは本当に、 本当に得意なことなんだ。 + +01:00.140 --> 01:05.300 +私が質問を投げかけると、 たいていは正解するが、 ときどき驚くほど間違える。 + +01:05.330 --> 01:06.920 +今回はどうなるか見てみよう。 + +01:06.950 --> 01:15.500 +この文章にはAという文字が何回出てくるか? + +01:16.640 --> 01:18.560 +どうなるか見てみよう。 + +01:18.770 --> 01:20.900 +ええと、 それは間違っている。 + +01:20.900 --> 01:24.170 +あなたの文章にはAという文字が5回出てきます。 + +01:24.410 --> 01:27.380 +それが正しいこともあれば、 間違っていることもある。 + +01:27.380 --> 01:29.750 +難しいんだけどね。 + +01:29.750 --> 01:33.920 +でも、 それが間違っていることにショックを受けるかもしれない。 + +01:33.920 --> 01:37.610 +だからといって、 私たち人間の方がまだ有利な面もある。 + +01:37.940 --> 01:42.710 +しかし実際は、 この情報がLMに送られる方法に関係しているのだ。 + +01:42.710 --> 01:45.260 +それは、 このトークナイゼーション戦略に関係している。 + +01:45.260 --> 01:47.180 +それについては後で詳しく話す。 + +01:47.300 --> 01:50.090 +しかし、 それが間違っているのは興味深い。 + +01:50.360 --> 01:55.160 +ええと、 もう1つ聞きますが、 これは難しい質問です。 + +01:55.160 --> 01:59.540 +この例えに最もふさわしい言葉を選んでください。 + +01:59.570 --> 02:03.440 +フェザーは鳥にとってのスケールである。 + +02:03.470 --> 02:05.390 +そして、 そこにはいくつかの異なるオプションがある。 + +02:05.390 --> 02:15.260 +というのも、 魚には鱗があるが、 爬虫類の鱗ほど区別できる特徴ではないからだ。 + +02:15.260 --> 02:20.060 +この質問は、 ヴェラムというウェブサイトから得たものだ。 ヴェラムは、 この種の分析を多く行っている会社で、 + +02:20.090 --> 02:23.030 +後ほど紹介する。 + +02:23.270 --> 02:23.840 +分かった。 + +02:23.840 --> 02:25.400 +別のモデルに切り替えよう。 + +02:25.400 --> 02:28.190 +ゼロワンのプレビューに切り替えよう。 + +02:28.220 --> 02:34.490 +これはもともとstrawberryというコードネームで呼ばれていたモデルで、 OpenAIの最強モデルであり、 + +02:34.490 --> 02:40.610 +プロ契約者のみが利用できるが、 最終的には誰でも利用できるようになり、 今後の展開を予感させる。 + +02:40.640 --> 02:45.350 +一種の推理の連鎖のようなアプローチで問題を考えていく。 + +02:45.350 --> 02:55.700 +同じ質問をしてみよう。 この文章にAという文字は何回出てくるだろうか? + +02:56.150 --> 02:58.310 +もっとうまくやれるかどうか見てみよう。 + +02:59.690 --> 03:00.980 +考えているんだ。 + +03:02.480 --> 03:04.970 +確かに時間がかかるのはわかるだろう。 + +03:05.000 --> 03:06.710 +文字の頻度を数える。 + +03:06.710 --> 03:07.850 +それは期待できそうだ。 + +03:07.880 --> 03:09.320 +よく見てみよう。 + +03:09.350 --> 03:10.130 +知っておいて損はない。 + +03:10.130 --> 03:12.560 +そして、 正しい答えを導き出す。 + +03:12.560 --> 03:13.190 +その通りだ。 + +03:13.190 --> 03:16.040 +文中にAという文字が4回出てくる。 + +03:16.040 --> 03:21.320 +引用符の中に1回、 "appear "の中に2回。 + +03:21.320 --> 03:23.090 +だから正しい。 + +03:23.090 --> 03:24.230 +とても良い。 + +03:24.230 --> 03:27.080 +ええと、 それからイチゴにも聞いてみよう。 + +03:27.110 --> 03:33.650 +ああ、 このパズルをプレビューして、 このパズルにどうアプローチできるか見てみよう。 + +03:33.710 --> 03:41.300 +適切な例えを選択することは、 有望な育成でもある。 + +03:41.300 --> 03:44.510 +そして正解は爬虫類。 + +03:44.510 --> 03:53.960 +これで、 GPTの4つのゼロとゼロ・ワンのプレビューから、 異なるモデル、 異なる強さのいくつかを感じてもらえるだろう。 diff --git a/week5/community-contributions/subtitles/srts/60619149/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619149/ko_KR.srt new file mode 100755 index 0000000..615ca5a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619149/ko_KR.srt @@ -0,0 +1,184 @@ +WEBVTT + +00:00.020 --> 00:06.230 +그래서 개척자 모델의 세계를 탐구하기 위해 오픈AI의 유명한 GPT를 + +00:06.230 --> 00:10.550 +사용해 볼 겁니다 여러분도 잘 알고 계시겠죠 + +00:10.580 --> 00:15.680 +전 프로 라이선스가 있어요 모든 모델에 엑세스 권한이 있다는 뜻이죠 여러분도 + +00:15.710 --> 00:16.520 +그럴 거예요 + +00:16.550 --> 00:21.320 +소프트볼 질문으로 시작하겠습니다 그들이 아주 + +00:21.320 --> 00:30.110 +잘 대답하는 질문이죠 사업상의 문제가 LLM 해결책에 적합한지 어떻게 결정할까요? + +00:30.110 --> 00:34.910 +이 강의에서 물어볼 만한 질문이라 우리에게 유용하죠 + +00:35.030 --> 00:40.760 +그리고 나서 아주 조심스럽고 합리적인 구조를 갖춘 답변을 해 줍니다. 요약본을 제공하고 + +00:40.760 --> 00:44.300 +문제의 본질과 확장성 문제를 설명하죠. + +00:44.300 --> 00:50.480 +뉘앙스와 비구조적인 데이터 맥락적 이해 비용, 유지보수 + +00:50.480 --> 00:56.810 +논리적인 요점들과 훌륭한 요약도 들어 있어요 + +00:56.810 --> 01:00.110 +이런 게 정말 잘 되는 거죠 + +01:00.140 --> 01:05.300 +이제 질문을 할게요 보통은 맞지만 놀랍게도 틀릴 때도 있어요 + +01:05.330 --> 01:06.920 +이번엔 어떻게 될지 보죠 + +01:06.950 --> 01:15.500 +A가 이 문장에 몇 번이나 나오죠? + +01:16.640 --> 01:18.560 +어떻게 되는지 보죠 + +01:18.770 --> 01:20.900 +잘못 만들었어요 + +01:20.900 --> 01:24.170 +A는 형량에 다섯 번 등장해요 + +01:24.410 --> 01:27.380 +잘 될 때도 있고 안 될 때도 있죠 + +01:27.380 --> 01:29.750 +알 수 없는 일이죠 + +01:29.750 --> 01:33.920 +하지만 잘못되면 충격받을 거예요 + +01:33.920 --> 01:37.610 +그렇다고 인간이 여전히 유리한 점이 있다는 건 아니에요 + +01:37.940 --> 01:42.710 +하지만 사실은 이 정보가 달 착륙선으로 전송되는 방식과 관련이 있어요 + +01:42.710 --> 01:45.260 +토큰화 전략과 관련이 있어요 + +01:45.260 --> 01:47.180 +그 얘긴 나중에 더 하죠 + +01:47.300 --> 01:50.090 +하지만 틀리다니 흥미롭네요 + +01:50.360 --> 01:55.160 +어려운 질문이지만 하나만 더 물어볼게요 + +01:55.160 --> 01:59.540 +질문을 할 건데 이 비유를 가장 잘 완성하는 단어를 고르세요 + +01:59.570 --> 02:03.440 +새에게 깃털은 비늘과 같아요 + +02:03.470 --> 02:05.390 +몇 가지 다른 옵션이 있어요 + +02:05.390 --> 02:11.390 +정답은 파충류 어류입니다 사실 어류도 비늘이 있어서 정답이라고 할 순 없지만 + +02:11.390 --> 02:15.260 +파충류에 비해 비늘이 특이한 기능은 아니죠 + +02:15.260 --> 02:20.060 +이 질문은 벨럼이라는 웹사이트에서 받았어요 이런 종류의 분석을 많이 + +02:20.090 --> 02:23.030 +하는 회사인데 나중에 얘기하도록 하죠 + +02:23.270 --> 02:23.840 +좋아요 + +02:23.840 --> 02:25.400 +다른 모델로 바꿔볼게요 + +02:25.400 --> 02:28.190 +01 프리뷰로 넘어가죠 + +02:28.220 --> 02:34.490 +원래 코드명이 딸기였던 모델입니다 오픈아이에서 가장 강력한 모델로 프로 구독자만 사용할 수 + +02:34.490 --> 02:39.260 +있지만 궁극적으로는 모두가 사용할 수 있습니다 앞으로 어떤 모델이 나올지 + +02:39.260 --> 02:40.610 +감이 오실 거예요 + +02:40.640 --> 02:45.350 +일련의 추론 접근을 통해 질문을 통해 생각해내죠 + +02:45.350 --> 02:55.700 +같은 질문을 해 보죠 A라는 글자가 이 문장에 몇 번 나오죠? + +02:56.150 --> 02:58.310 +더 잘하는지 보죠 + +02:59.690 --> 03:00.980 +생각 중이에요 + +03:02.480 --> 03:04.970 +얼마나 오래 걸리는지 아시겠죠? + +03:05.000 --> 03:06.710 +문자 주파수를 세고 있어요 + +03:06.710 --> 03:07.850 +조짐이 좋은데요 + +03:07.880 --> 03:09.320 +자세히 살펴보죠 + +03:09.350 --> 03:10.130 +좋은 정보네요 + +03:10.130 --> 03:12.560 +정답을 맞힐 수 있어요 + +03:12.560 --> 03:13.190 +맞아요 + +03:13.190 --> 03:16.040 +A는 문장에 네 번 등장하죠 + +03:16.040 --> 03:21.320 +많은 경우에 한 번은 따옴표 안에 한 번은 단어에 두 번 나타나요 + +03:21.320 --> 03:23.090 +맞는 말이네요 + +03:23.090 --> 03:24.230 +좋아요 + +03:24.230 --> 03:27.080 +그리고 딸기한테도 물어보죠 + +03:27.110 --> 03:33.650 +이 퍼즐을 미리 보기하고 어떻게 접근하는지 보죠 + +03:33.710 --> 03:41.300 +적절한 비유를 선택하는 것도 배양의 장래성이죠 + +03:41.300 --> 03:44.510 +정답을 알려줍니다 파충류예요 + +03:44.510 --> 03:50.750 +이렇게 하면 다양한 모델과 GPT 40과 01 프리뷰의 + +03:50.750 --> 03:53.960 +장점을 알 수 있죠 diff --git a/week5/community-contributions/subtitles/srts/60619227/en_US.srt b/week5/community-contributions/subtitles/srts/60619227/en_US.srt new file mode 100755 index 0000000..580722e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619227/en_US.srt @@ -0,0 +1,229 @@ +WEBVTT + +00:00.050 --> 00:06.620 +And now let's move to Claude from anthropic, my favorite model and typically the favorite model of + +00:06.620 --> 00:08.120 +most data scientists. + +00:08.120 --> 00:15.020 +The most recent version, Claude 3.5 Sonnet New, that came out in October, is currently leading in + +00:15.050 --> 00:20.870 +most benchmarks, showing that it is right now probably the strongest LLM on the planet. + +00:20.900 --> 00:24.080 +Let's start off with a difficult question for Claude. + +00:24.080 --> 00:29.240 +What does it feel like to be jealous? + +00:30.590 --> 00:32.450 +See how it answers this? + +00:32.480 --> 00:39.200 +And what you'll find is that we'll get back something that is thoughtful and interesting and remarkably + +00:39.200 --> 00:40.340 +insightful. + +00:40.370 --> 00:44.420 +Uh, from my understanding, it's hinting at the fact that it doesn't experience. + +00:44.420 --> 00:50.480 +Jealousy itself often manifests as fear, insecurity, and desire a tight knot in your stomach. + +00:50.480 --> 00:56.540 +So it gives it like a almost a biological sort of sense to it and a burning sensation in your chest. + +00:56.780 --> 01:00.590 +Racing thoughts, uh, a sense of inadequacy or being threatened. + +01:00.620 --> 01:02.570 +It's really interesting. + +01:02.570 --> 01:04.040 +Compelling answer. + +01:04.070 --> 01:07.140 +No problem at all with a difficult question like that. + +01:07.170 --> 01:07.680 +Okay. + +01:07.710 --> 01:15.000 +Let's ask how many times does the letter A appear in this sentence? + +01:16.500 --> 01:18.300 +See how it handles that? + +01:18.330 --> 01:20.700 +Let me count it. + +01:20.700 --> 01:21.660 +Gets it wrong. + +01:22.590 --> 01:24.420 +Hope for humanity still. + +01:24.510 --> 01:31.860 +Uh, so, uh, Claude counts five times and gives an incorrect explanation for that again. + +01:31.890 --> 01:34.710 +We'll find out later why these sorts of questions are harder. + +01:34.710 --> 01:36.240 +And so, so far it's zero. + +01:36.240 --> 01:39.870 +One preview is the only model that was able to handle that. + +01:40.050 --> 01:40.590 +All right. + +01:40.590 --> 01:47.340 +Let's ask it a tricky question compared with other frontier llms what kinds of questions do you best + +01:47.340 --> 01:49.980 +at answering and what do you find most challenging? + +01:50.250 --> 01:53.340 +Which others compare with you? + +01:53.370 --> 01:56.130 +So what you get from Claude here is interesting. + +01:56.130 --> 02:03.990 +It pushes back and this ties to to anthropic strong views about the need for safety and alignment in + +02:03.990 --> 02:04.830 +models. + +02:05.010 --> 02:09.690 +Um, it says I aim to be direct and transparent whilst respecting my ethics. + +02:09.690 --> 02:14.480 +I am not comfortable making comparative claims versus other AI models. + +02:14.480 --> 02:18.110 +What it will then do is tell you about its own strengths and weaknesses. + +02:18.110 --> 02:20.630 +It's a very interesting kind of answer. + +02:21.200 --> 02:28.310 +By comparison, if we go back to GPT and ask it the same question, we'll see how GPT responds and what + +02:28.310 --> 02:34.130 +you'll find there is it doesn't have the same kind of qualms about about responding. + +02:34.130 --> 02:41.090 +You'll get something very clear about where it's strongest, the challenges and then complementary. + +02:41.600 --> 02:49.580 +So ChatGPT with web browsing and code interpreter, which I think is its name for the canvas piece, + +02:49.580 --> 02:50.570 +Claude. + +02:50.600 --> 02:57.080 +It gives interestingly as a comparison point and then it says Barred by Google and it should really + +02:57.110 --> 02:58.460 +say Gemini. + +02:58.820 --> 03:04.850 +But it's interesting that it that it does give the main competitors and it talks about some of the differences. + +03:04.850 --> 03:05.510 +And look at this. + +03:05.510 --> 03:13.010 +Fascinatingly, it does mention that Claude has more thoughtful responses on broader socio ethical considerations, + +03:13.010 --> 03:15.870 +which could complement my technical focus. + +03:15.870 --> 03:23.520 +Fascinating that not only does GPT four have that kind of ability to compare, but it gives really well + +03:23.550 --> 03:25.260 +considered answers to. + +03:25.290 --> 03:27.600 +So I thought that was particularly interesting. + +03:28.140 --> 03:28.650 +All right. + +03:28.650 --> 03:33.330 +Anyway, that gives us our quick tour of some of the features of, uh, Claude. + +03:33.330 --> 03:39.690 +It's also worth saying that Claude is really effective at coding and working with you on code. + +03:39.750 --> 03:48.150 +Let's ask it for, uh, to give an example, let's say please, uh, give me some example code that + +03:48.150 --> 04:00.150 +uses, uh, that, uh, some example Python that, uh, uses the open AI API. + +04:00.450 --> 04:04.620 +Let's see how it handles this. + +04:04.620 --> 04:13.110 +So what you'll see here is that it creates code in what it's called an artifact as a separate piece + +04:13.110 --> 04:14.970 +of code over on the right. + +04:15.150 --> 04:20.220 +Uh, and it then has produced quite a lot of code here. + +04:20.220 --> 04:26.150 +I have to say, but you'll see that it's got client, which is what we called open AI and code chat + +04:26.360 --> 04:32.300 +completions dot create and that the result is response dot choices, zero dot message content. + +04:32.330 --> 04:35.630 +Hopefully that's that's a little bit familiar to you since we did that last time. + +04:35.630 --> 04:37.160 +It's put it into a class. + +04:37.160 --> 04:38.690 +It's got some examples. + +04:38.690 --> 04:45.050 +And so it's written this really nicely in this thing that's called an artifact, which is it's not not + +04:45.050 --> 04:47.210 +quite the same as the way that canvas works with GPT four. + +04:47.210 --> 04:53.120 +It's a bit different, but but it allows you to see this this file, this artifact, you can then publish + +04:53.120 --> 04:56.060 +it to share it with other people or download it and so on. + +04:56.060 --> 05:02.000 +So using Claude with artifacts somewhat similar to to to canvases. + +05:02.000 --> 05:04.220 +And it gives you this very powerful way to do it. + +05:04.220 --> 05:09.620 +And as you interact it will produce more different artifacts rather than changing this one. + +05:09.650 --> 05:14.030 +And it's sometimes useful to have that experience too, because you can tie back and see all the different + +05:14.030 --> 05:17.150 +versions of the file as you worked together. + +05:17.150 --> 05:19.760 +So that gives you a little tour of Claude. + +05:19.760 --> 05:26.390 +What it's good at its sense of alignment and safety, and also the way that you can create artifacts. diff --git a/week5/community-contributions/subtitles/srts/60619227/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619227/ja_JP.srt new file mode 100755 index 0000000..876d24c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619227/ja_JP.srt @@ -0,0 +1,196 @@ +WEBVTT + +00:00.050 --> 00:08.120 +そして次は、 私のお気に入りのモデルであり、 一般的に多くのデータサイエンティストが好むモデルであるアントロピックのクロードに話を移そう。 + +00:08.120 --> 00:15.020 +最新版のクロード3。 5 10月に発表されたソネット・ニューは、 現在ほとんどのベンチマークで首位に立っており、 + +00:15.050 --> 00:20.870 +この地球上でおそらく最強のLLMであることを示している。 + +00:20.900 --> 00:24.080 +クロードにとって難しい質問から始めよう。 + +00:24.080 --> 00:29.240 +嫉妬するってどんな感じ? + +00:30.590 --> 00:32.450 +この答えを見てどう思う? + +00:32.480 --> 00:40.340 +そうすれば、 思慮深く、 興味深く、 驚くほど洞察に富んだものが返ってくるはずだ。 + +00:40.370 --> 00:44.420 +私の理解では、 それは経験しないという事実をほのめかしている。 + +00:44.420 --> 00:50.480 +嫉妬そのものは、 しばしば恐怖や不安、 胃の締め付けられるような欲望として現れる。 + +00:50.480 --> 00:56.540 +だから、 ほとんど生物学的な感覚で、 胸が焼けるような感覚になる。 + +00:56.780 --> 01:00.590 +焦るような思い、 つまり物足りなさや脅威を感じる。 + +01:00.620 --> 01:02.570 +実に興味深い。 + +01:02.570 --> 01:04.040 +説得力のある答えだ。 + +01:04.070 --> 01:07.140 +そのような難しい質問はまったく問題ない。 + +01:07.170 --> 01:07.680 +オーケー。 + +01:07.710 --> 01:15.000 +この文章にAという文字が何回出てくるか聞いてみよう。 + +01:16.500 --> 01:18.300 +どう処理するか見てみよう。 + +01:18.330 --> 01:20.700 +数えてみよう。 + +01:20.700 --> 01:21.660 +間違っている。 + +01:22.590 --> 01:24.420 +人類の希望はまだある。 + +01:24.510 --> 01:31.860 +ええと、 それで、 クロードは5回数えて、 それについてまた間違った説明をする。 + +01:31.890 --> 01:34.710 +この種の質問がなぜ難しいかは、 後でわかるだろう。 + +01:34.710 --> 01:36.240 +だから、 今のところゼロだ。 + +01:36.240 --> 01:39.870 +それに対応できたのは、 ワンプレビューだけである。 + +01:40.050 --> 01:40.590 +分かった。 + +01:40.590 --> 01:49.980 +他のフロンティアのLLMと比べて、 どのような質問に答えるのが最も得意で、 どのようなことに最もやりがいを感じますか? + +01:50.250 --> 01:53.340 +あなたと比較する他人は? + +01:53.370 --> 01:56.130 +クロードから得たものは興味深い。 + +01:56.130 --> 02:04.830 +これは、 モデルにおける安全性と整合性の必要性についての人類学的な強い見解と結びついている。 + +02:05.010 --> 02:09.690 +私は自分の倫理観を尊重しながら、 率直で透明性のある人間でありたいと思っています。 + +02:09.690 --> 02:14.480 +他のAIモデルとの比較を主張するのは気が引ける。 + +02:14.480 --> 02:18.110 +そして、 自らの長所と短所を教えてくれる。 + +02:18.110 --> 02:20.630 +とても興味深い答えだ。 + +02:21.200 --> 02:28.310 +それに比べて、 GPTに戻って同じ質問をしてみれば、 + +02:28.310 --> 02:34.130 +GPTがどう答えるかわかるだろう。 + +02:34.130 --> 02:41.090 +どこが一番強いのか、 課題は何か、 そしてそれを補うものは何か、 はっきりしたものが得られるはずだ。 + +02:41.600 --> 02:50.570 +つまり、 ChatGPTにウェブ・ブラウジングとコード・インタプリタ、 つまりキャンバス・ピースのクロードという名前だと思う。 + +02:50.600 --> 02:58.460 +比較対象としてinterestlyと表示され、 その後にBarred by Googleと表示されている。 + +02:58.820 --> 03:04.850 +しかし、 主な競争相手を示し、 いくつかの違いについて語っているのは興味深い。 + +03:04.850 --> 03:05.510 +そして、 これを見てほしい。 + +03:05.510 --> 03:15.870 +魅力的なことに、 クロードはより広範な社会倫理的考察について、 より思慮深い回答をしており、 それは私の技術的な焦点を補うことができる。 + +03:15.870 --> 03:25.260 +GPT4がそのような比較能力を持っているだけでなく、 実によく検討された答えを出しているのは魅力的だ。 + +03:25.290 --> 03:27.600 +だから特に興味深いと思ったんだ。 + +03:28.140 --> 03:28.650 +分かった。 + +03:28.650 --> 03:33.330 +ともあれ、 これでクロードの機能の一部を簡単に紹介できた。 + +03:33.330 --> 03:39.690 +また、 クロードはコーディングが得意で、 あなたと一緒にコードを考えてくれる。 + +03:39.750 --> 04:00.150 +例えば、 オープンAI APIを使ったPythonのサンプルコードを教えてください。 + +04:00.450 --> 04:04.620 +これをどう処理するか見てみよう。 + +04:04.620 --> 04:14.970 +つまり、 右側にあるアーティファクトと呼ばれる別のコードにコードを作成します。 + +04:15.150 --> 04:20.220 +その結果、 ここに多くのコードが生まれた。 + +04:20.220 --> 04:26.150 +私たちがオープンAIと呼んでいるものですが、 クライアントがあり、 コードチャットの完了がドットクリエイトされ、 + +04:26.360 --> 04:32.300 +その結果がレスポンスのドットチョイスであり、 メッセージの内容がゼロドットであることがわかると思います。 + +04:32.330 --> 04:35.630 +前回もそうだったので、 少しはお分かりいただけただろうか。 + +04:35.630 --> 04:37.160 +それをクラス分けしたんだ。 + +04:37.160 --> 04:38.690 +いくつかの例がある。 + +04:38.690 --> 04:47.210 +アーティファクトと呼ばれるもので、 キャンバスがGPT 4で機能する方法とは少し違います。 + +04:47.210 --> 04:53.120 +少し違うが、 このファイル、 アーティファクトを見ることができ、 それを公開して他の人と共有したり、 + +04:53.120 --> 04:56.060 +ダウンロードしたりすることができる。 + +04:56.060 --> 05:02.000 +だから、 クロードをキャンバスに使うのと同じようなものだ。 + +05:02.000 --> 05:04.220 +そして、 そのためのとてもパワフルな方法を教えてくれる。 + +05:04.220 --> 05:09.620 +そして、 相互作用によって、 このアーティファクトを変更するのではなく、 より多くの異なるアーティファクトを生み出すことになる。 + +05:09.650 --> 05:14.030 +また、 そのような経験があると、 一緒に仕事をしたときに、 すべての異なるバージョンのファイルを遡って見ることができるので、 + +05:14.030 --> 05:17.150 +便利なこともある。 + +05:17.150 --> 05:19.760 +これでクロードを少し案内したことになる。 + +05:19.760 --> 05:26.390 +アラインメントと安全性、 そしてアーティファクトの作り方だ。 diff --git a/week5/community-contributions/subtitles/srts/60619227/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619227/ko_KR.srt new file mode 100755 index 0000000..774f877 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619227/ko_KR.srt @@ -0,0 +1,226 @@ +WEBVTT + +00:00.050 --> 00:06.620 +이제 인류학의 클로드로 넘어가죠 제가 제일 좋아하는 모델이에요 데이터 과학자들이 대부분 + +00:06.620 --> 00:08.120 +좋아하는 모델이죠 + +00:08.120 --> 00:15.020 +가장 최근 버전인 클로드 3이죠 10월에 나온 뉴 소넷 5편이 벤치마킹에서 현재 + +00:15.050 --> 00:20.870 +선두를 달리고 있어요 현재 세계에서 가장 강력한 LLM이라는 뜻이죠 + +00:20.900 --> 00:24.080 +클로드에게 어려운 질문으로 시작해보죠 + +00:24.080 --> 00:29.240 +질투하는 기분이 어때요? + +00:30.590 --> 00:32.450 +어떻게 대답하는지 봤죠? + +00:32.480 --> 00:39.200 +Get in get을 통해 여러분은 사려 깊고 흥미롭고 놀랍도록 통찰력 있는 걸 보게 + +00:39.200 --> 00:40.340 +될 거예요 + +00:40.370 --> 00:44.420 +제가 알기로는 경험이 없다는 걸 암시하는 것 같아요 + +00:44.420 --> 00:50.480 +질투는 종종 두려움과 불안 불안한 마음과 욕망으로 나타나요 + +00:50.480 --> 00:56.540 +그래서 거의 생물학적 감각을 느끼게 되죠 가슴이 타는 듯한 느낌도요 + +00:56.780 --> 01:00.590 +성급한 생각, 무능함, 위협감 같은 거요 + +01:00.620 --> 01:02.570 +정말 흥미로워요 + +01:02.570 --> 01:04.040 +설득력 있는 대답이네요 + +01:04.070 --> 01:07.140 +그런 어려운 질문이라면 문제없어요 + +01:07.170 --> 01:07.680 +네 + +01:07.710 --> 01:15.000 +알파벳 A가 이 문장에 몇 번이나 나오는지 보죠 + +01:16.500 --> 01:18.300 +어떻게 처리하는지 보이죠? + +01:18.330 --> 01:20.700 +세어 볼게요 + +01:20.700 --> 01:21.660 +틀려요 + +01:22.590 --> 01:24.420 +인류에 대한 희망이죠 + +01:24.510 --> 01:31.860 +클로드는 다섯 번 세고 다시 틀린 설명을 해요 + +01:31.890 --> 01:34.710 +왜 이런 문제가 더 어려운지는 잠시 후에 알아보도록 하죠 + +01:34.710 --> 01:36.240 +지금까지는 0마리예요 + +01:36.240 --> 01:39.870 +그걸 처리할 수 있는 모델은 프리뷰 하나뿐이에요 + +01:40.050 --> 01:40.590 +좋아요 + +01:40.590 --> 01:47.340 +다른 질문과 비교해서 까다로운 질문을 해 보죠 어떤 질문에 가장 잘 대답하고 + +01:47.340 --> 01:49.980 +어떤 점이 가장 어려운가요? + +01:50.250 --> 01:53.340 +당신과 비교하면요? + +01:53.370 --> 01:56.130 +클로드의 모습이 흥미롭죠 Get up + +01:56.130 --> 02:03.990 +이는 안전과 정렬의 필요성에 대한 인류학적인 견해와 밀접한 관련이 + +02:03.990 --> 02:04.830 +있죠 + +02:05.010 --> 02:09.690 +직설적이고 투명하며 윤리를 존중한다고 쓰여 있어요 + +02:09.690 --> 02:14.480 +다른 인공지능 모델과 비교하는 클레임은 불편하네요 + +02:14.480 --> 02:18.110 +그리고 각각의 강점과 약점을 알려 주죠 + +02:18.110 --> 02:20.630 +아주 흥미로운 대답이네요 + +02:21.200 --> 02:28.310 +비교해서 GPT로 돌아가 같은 질문을 하면 GPT가 어떻게 반응하는지 볼 + +02:28.310 --> 02:34.130 +수 있습니다 반응에 대한 가책은 느끼지 않는다는 걸 알 수 있죠 + +02:34.130 --> 02:41.090 +뭐가 가장 강한지, 도전과 상호 보완이 뭔지 명확히 알게 되죠 Get it + +02:41.600 --> 02:50.570 +웹 브라우징과 코드 인터프리터가 있는 챗GPT예요 캔버스 작품의 이름인 클로드죠 + +02:50.600 --> 02:57.080 +흥미롭게도 비교 대상을 제시해요 구글은 쌍둥이자리임을 인정하지 + +02:57.110 --> 02:58.460 +않지만요 + +02:58.820 --> 03:04.850 +하지만 흥미로운 건 주요 경쟁자들이 차이점을 언급하고 있다는 거예요 + +03:04.850 --> 03:05.510 +이것 좀 보세요 + +03:05.510 --> 03:13.010 +흥미롭게도 클로드는 더 광범위한 사회 윤리적 고려 사항에 대해 사려 깊은 답변을 하고 있습니다 제 기술적 + +03:13.010 --> 03:15.870 +초점을 보완할 수 있는 것들이죠 + +03:15.870 --> 03:23.520 +GPT 4는 비교 능력이 있을 뿐만 아니라 아주 신중하게 답을 제공한다는 + +03:23.550 --> 03:25.260 +게 흥미롭네요 + +03:25.290 --> 03:27.600 +그래서 특히 흥미로웠어요 + +03:28.140 --> 03:28.650 +좋아요 + +03:28.650 --> 03:33.330 +어쨌든 클로드의 특징을 간단히 소개해 드렸어요 + +03:33.330 --> 03:39.690 +클로드는 코딩에 정말 효과적이란 걸 말씀 드릴 가치가 있어요 코드에 대해 여러분과 함께 작업하죠 + +03:39.750 --> 03:48.150 +예를 들어보죠 예를 들어 파이썬 코드 같은 예제를 + +03:48.150 --> 04:00.150 +요청해보세요 오픈 인공지능 API를 사용하는 예제요 + +04:00.450 --> 04:04.620 +어떻게 되는지 보죠 + +04:04.620 --> 04:13.110 +여기 보이는 건 코드를 생성하는 겁니다 아티팩트라고 부르는 것으로 우측에 있는 + +04:13.110 --> 04:14.970 +개별 코드죠 + +04:15.150 --> 04:20.220 +그리고 꽤 많은 코드를 생성했죠 + +04:20.220 --> 04:26.150 +클라이언트가 있는 게 보이실 텐데요 오픈 인공지능과 코드 채팅 완료라고 + +04:26.360 --> 04:32.300 +부르는 거죠 그 결과 응답은 .선택과 0.Contion이에요 + +04:32.330 --> 04:35.630 +지난 시간에 비트를 살펴본 이후로 익숙해지셨길 바라요 + +04:35.630 --> 04:37.160 +Put it로 클래스를 만들었어요. + +04:37.160 --> 04:38.690 +예시가 좀 있어요 + +04:38.690 --> 04:45.050 +아주 잘 쓰여 있어요 아티팩트라는 것에요 GPT4로 캔버스가 작동하는 + +04:45.050 --> 04:47.210 +방식과는 좀 다르죠 + +04:47.210 --> 04:53.120 +약간 다르지만 이 아티팩트 파일을 볼 수 있게 해줘요 다른 사람들과 공유하거나 + +04:53.120 --> 04:56.060 +다운로드 받도록 게시할 수 있죠 + +04:56.060 --> 05:02.000 +클로드를 공예품으로 표현했어요 캔버스 같은 거죠 + +05:02.000 --> 05:04.220 +아주 강력한 방법을 제공하죠 + +05:04.220 --> 05:09.620 +상호 작용을 하면 이걸 바꾸는 대신 더 많은 아티팩트를 생성해요 + +05:09.650 --> 05:14.030 +그런 경험을 하는 것도 유용할 때가 있어요 함께 작업하면서 파일의 + +05:14.030 --> 05:17.150 +모든 다양한 버전을 볼 수 있으니까요 + +05:17.150 --> 05:19.760 +클로드를 잠깐 둘러보셨어요 + +05:19.760 --> 05:26.390 +정렬과 안전성을 높이고 인공물을 만들 수 있어요 diff --git a/week5/community-contributions/subtitles/srts/60619247/en_US.srt b/week5/community-contributions/subtitles/srts/60619247/en_US.srt new file mode 100755 index 0000000..ab6c724 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619247/en_US.srt @@ -0,0 +1,259 @@ +WEBVTT + +00:00.110 --> 00:04.970 +We're going to spend a little bit more time with GPT just to try out a few more interesting things. + +00:04.970 --> 00:09.470 +I want to start with a question that used to be asked of an earlier version of GPT. + +00:09.500 --> 00:15.170 +I think it was GPT two to show its limitations, and we'll see whether it still has a problem with it. + +00:15.170 --> 00:18.830 +The famous question was how many rainbows? + +00:20.840 --> 00:21.530 +Sounding interesting? + +00:21.530 --> 00:29.960 +How many rainbows does it take to leap from Hawaii to 17? + +00:30.110 --> 00:31.610 +An interesting question. + +00:31.610 --> 00:37.550 +And GPT two, I think it was, responded very firmly that it takes three rainbows to leap from Hawaii + +00:37.550 --> 00:41.360 +to 17, clearly missing the nuance of the question. + +00:41.660 --> 00:45.200 +Uh, but GPT four has no such problem. + +00:45.200 --> 00:45.590 +You'll see. + +00:45.590 --> 00:53.150 +It gives back a witty and fun response, and it even includes a rainbow emoji in its response. + +00:53.390 --> 00:54.710 +It's astonishing. + +00:55.070 --> 01:00.680 +So there you have it, a very easy to deal with this kind of trickery and in fact, because GPT four + +01:00.710 --> 01:04.520 +O, the O stands for Omni, representing that it is multimodal. + +01:04.520 --> 01:09.560 +We can of course say please generate an image of this. + +01:11.420 --> 01:16.790 +And it's you know, I know everyone has seen this sort of thing before, but it's funny to think that + +01:16.790 --> 01:22.970 +just a few years ago, people were debating whether it would ever be possible to have an LLM that would + +01:22.970 --> 01:29.090 +be able to show some kind of imagination or creativity, and now it's almost a foregone conclusion that + +01:29.090 --> 01:36.380 +this, this kind of thing can be generated, this astonishing picture of Hawaii, the number 17 and + +01:36.380 --> 01:38.360 +leaps of rainbows getting there. + +01:38.390 --> 01:41.750 +I mean, it's honestly it's just amazing. + +01:41.780 --> 01:43.190 +Absolutely amazing. + +01:43.400 --> 01:47.390 +Uh, so and it even explains itself and it's, uh. + +01:47.390 --> 01:52.100 +Yeah, it's so very effective at this kind of creative challenge. + +01:52.490 --> 01:57.920 +All right, on a different note, let's try out GPT four with canvas. + +01:58.100 --> 02:00.920 +Another of the newer features that we have from GPT. + +02:01.400 --> 02:07.100 +Um, and this allows you to work very collaboratively with, uh, with OpenAI. + +02:07.190 --> 02:13.080 +Um, and let's let's look back to the Python question that I had asked you a while ago. + +02:13.080 --> 02:23.820 +Let's say, um, please use canvas to work with me with this code. + +02:25.410 --> 02:25.680 +Uh. + +02:25.680 --> 02:26.160 +Let's see. + +02:26.190 --> 02:26.760 +Yield. + +02:26.760 --> 02:30.570 +From now, I'll do something that's maybe not. + +02:30.600 --> 02:31.920 +I'll change it a little bit. + +02:31.950 --> 02:42.780 +Yield from, uh, book get author for book in books, uh, using a list comprehension. + +02:42.780 --> 02:46.410 +And I'm not telling it what books are or what this means. + +02:46.410 --> 02:50.070 +I'm just giving it this line of code and seeing what it makes of it. + +02:50.070 --> 02:51.870 +So let's see what happens now. + +02:51.870 --> 02:57.540 +So first of all, it opens up this canvas that you'll see on the right hand side, which gives us an + +02:57.540 --> 03:00.210 +ability to iterate with it on this code. + +03:00.480 --> 03:01.740 +Uh, okay. + +03:01.770 --> 03:02.910 +And there it has it. + +03:03.060 --> 03:05.730 +Um, uh, it's showing the code. + +03:05.760 --> 03:17.420 +Please, uh, extend this to show an example of this being used And what we'll see now is you'll see + +03:17.420 --> 03:24.560 +a sort of cursor going through, and it's going to rewrite and embellish, enrich what we had there. + +03:24.560 --> 03:27.920 +And so now that piece of code has been turned into a generator. + +03:27.920 --> 03:30.140 +It's got some data that gives an example. + +03:30.140 --> 03:36.260 +And it's understood that there are books with a title and an author, and it's got like a little test + +03:36.380 --> 03:37.370 +example of it. + +03:37.370 --> 03:41.120 +So effective the way you saw it coming through and rewriting. + +03:41.120 --> 03:57.380 +So now let's say please modify this code so that the generator excludes books with a missing or empty + +03:57.410 --> 03:58.430 +author. + +04:00.410 --> 04:04.220 +And you can imagine this might be like a business requirement that comes through. + +04:04.340 --> 04:10.310 +Uh, and what you'll see there is something that looks really similar to the code I had there. + +04:10.310 --> 04:16.820 +It went through it, gave some more examples with an empty and a missing book, and then it rewrote + +04:16.820 --> 04:17.810 +this line. + +04:17.810 --> 04:24.220 +It's so, so good the way that it that it's able to sort of interactively add to what we've done before. + +04:24.250 --> 04:36.220 +And so now let's say please update this so that it only yields unique authors. + +04:36.250 --> 04:38.380 +I'm being sort of loose with my language. + +04:38.380 --> 04:42.100 +I'm not saying even what what I want it to to to update. + +04:42.100 --> 04:47.470 +And now through it goes and it's done the job it's gone through. + +04:47.500 --> 04:53.770 +It's added in another example with the same author, and it's updated this code so that it uses a set + +04:53.800 --> 04:59.350 +to keep track of what it's seen and only yield unique cases. + +04:59.380 --> 05:03.490 +Now, this is in fact, I think, a better solution than the one liner that I had. + +05:03.490 --> 05:05.020 +But it is lengthier. + +05:05.140 --> 05:09.190 +But there are various reasons why it's probably better to do it that way, but let's still see if I + +05:09.190 --> 05:10.720 +can't get it. + +05:10.720 --> 05:22.360 +To rewrite this in a simpler way, let's just say is it possible to rewrite the Yield authors generator? + +05:22.780 --> 05:24.310 +Let's just say to simplify. + +05:25.030 --> 05:31.050 +To simplify the let's let it have a shot at that. + +05:31.140 --> 05:32.100 +Through it goes. + +05:32.100 --> 05:34.200 +It's not changing that. + +05:34.200 --> 05:36.330 +And there we go. + +05:38.220 --> 05:46.080 +And it has indeed done exactly basically, again, slightly better than the code that I had left you + +05:46.110 --> 05:47.310 +with on a previous slide. + +05:47.310 --> 05:50.310 +It uses the curly braces to represent the set. + +05:50.400 --> 05:54.750 +Uh, it could have just said yield from that line, but it's chosen to do it this way, and it looks + +05:54.750 --> 05:56.190 +very effective to me. + +05:56.190 --> 06:03.270 +So I am staggered by the whole experience of working with GPT in this way, and the way that you're + +06:03.270 --> 06:08.760 +able to be so interactive, the way it will do things like build example data and it will update things + +06:08.760 --> 06:10.110 +and show you what happens. + +06:10.110 --> 06:16.350 +So yeah, extremely powerful and something which I really encourage you to use as we're going through + +06:16.350 --> 06:20.490 +this course and you get things that you're stuck with, the trick if it doesn't bring up the canvas, + +06:20.520 --> 06:27.240 +is to use the words use canvas, and then it will come up and use this as a tool at your disposal to + +06:27.270 --> 06:30.960 +help explain and work through problems and ideas. diff --git a/week5/community-contributions/subtitles/srts/60619247/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619247/ja_JP.srt new file mode 100755 index 0000000..915ac06 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619247/ja_JP.srt @@ -0,0 +1,217 @@ +WEBVTT + +00:00.110 --> 00:04.970 +GPTにはもう少し時間をかけて、 もう少し面白いことを試してみたい。 + +00:04.970 --> 00:09.470 +GPTの以前のバージョンでよく聞かれた質問から始めたい。 + +00:09.500 --> 00:15.170 +GPTの限界を示すためのGPT2だったと思うが、 それでも問題があるかどうかはこれからだ。 + +00:15.170 --> 00:18.830 +有名な質問は「虹は何本? + +00:20.840 --> 00:21.530 +面白そうだろう? + +00:21.530 --> 00:29.960 +ハワイから17番まで跳ぶのに何本の虹がかかる? + +00:30.110 --> 00:31.610 +興味深い質問だ。 + +00:31.610 --> 00:41.360 +そしてGPTの2人だったと思うが、 ハワイから17番まで跳ぶには3本の虹が必要だと強く答えた。 + +00:41.660 --> 00:45.200 +でも、 GPT4にはそのような問題はない。 + +00:45.200 --> 00:45.590 +今にわかるよ。 + +00:45.590 --> 00:53.150 +ウィットに富んだ楽しい返事を返し、 その返事にはレインボーの絵文字まで含まれている。 + +00:53.390 --> 00:54.710 +驚くべきことだ。 + +00:55.070 --> 01:04.520 +GPTの4つのOはOmni(オムニ)の略で、 マルチモーダルであることを表している。 + +01:04.520 --> 01:09.560 +もちろん、 これをイメージしてくださいと言うこともできる。 + +01:11.420 --> 01:22.970 +ほんの数年前までは、 想像力や創造性を発揮できるような法学修士号を取得することは可能だろうかと議論されていたのに、 + +01:22.970 --> 01:29.090 +今では、 ハワイの驚くべき写真、 17という数字、 そしてそこに至る虹の跳躍など、 + +01:29.090 --> 01:38.360 +このようなものが生み出されることはほぼ当然の結論になっている。 + +01:38.390 --> 01:41.750 +つまり、 正直なところ、 本当に素晴らしいんだ。 + +01:41.780 --> 01:43.190 +本当に素晴らしい。 + +01:43.400 --> 01:47.390 +それで、 自分で説明までしてくれるんだ。 + +01:47.390 --> 01:52.100 +ええ、 このような創造的な挑戦にはとても効果的です。 + +01:52.490 --> 01:57.920 +さて、 話は変わるが、 キャンバスでGPT4を試してみよう。 + +01:58.100 --> 02:00.920 +GPTのもうひとつの新機能。 + +02:01.400 --> 02:07.100 +そして、 OpenAIと非常に協力的に仕事をすることができる。 + +02:07.190 --> 02:13.080 +ええと、 少し前に質問したパイソンの質問に戻りましょう。 + +02:13.080 --> 02:23.820 +例えば、 キャンバスを使って、 このコードで私と仕事をしてください。 + +02:25.410 --> 02:25.680 +ええと。 + +02:25.680 --> 02:26.160 +見てみよう。 + +02:26.190 --> 02:26.760 +収量。 + +02:26.760 --> 02:30.570 +これからは、 そうじゃないかもしれないことをする。 + +02:30.600 --> 02:31.920 +少し変えてみるよ。 + +02:31.950 --> 02:42.780 +本の中の本の著者を得る。 + +02:42.780 --> 02:46.410 +そして、 私は本が何であるか、 これが何を意味するかは教えていない。 + +02:46.410 --> 02:50.070 +私はただ、 このコード行を与えて、 それがどうなるかを見ているだけだ。 + +02:50.070 --> 02:51.870 +では、 これからどうなるか見てみよう。 + +02:51.870 --> 03:00.210 +まず最初に、 右側にあるキャンバスを開き、 このコードを反復することができる。 + +03:00.480 --> 03:01.740 +ああ、 わかった。 + +03:01.770 --> 03:02.910 +そして、 それはそこにある。 + +03:03.060 --> 03:05.730 +ええと、 コードが表示されています。 + +03:05.760 --> 03:17.420 +カーソルのようなものが表示され、 書き換えたり、 + +03:17.420 --> 03:24.560 +装飾したりする。 + +03:24.560 --> 03:27.920 +そして今、 そのコードの一部がジェネレーターに変わった。 + +03:27.920 --> 03:30.140 +例を示すデータもある。 + +03:30.140 --> 03:37.370 +そして、 タイトルと著者のある本があることは理解されているし、 その小さなテスト例のようなものもある。 + +03:37.370 --> 03:41.120 +だから、 あなたがそれを見抜き、 書き直した方法は効果的だった。 + +03:41.120 --> 03:58.430 +それでは、 ジェネレーターが作者のいない本や空白の本を除外するように、 このコードを修正してください。 + +04:00.410 --> 04:04.220 +そして、 これはビジネス上の要求のようなものだと想像できるだろう。 + +04:04.340 --> 04:10.310 +そして、 そこに表示されるのは、 私がそこに持っていたコードと本当によく似たものだ。 + +04:10.310 --> 04:17.810 +そして、 空の本と欠けている本の例をいくつか挙げ、 この行を書き直した。 + +04:17.810 --> 04:24.220 +これまでやってきたことをインタラクティブに追加できるのがとてもいい。 + +04:24.250 --> 04:36.220 +それでは、 ユニークな著者だけが得られるように更新してください。 + +04:36.250 --> 04:38.380 +ちょっと言い方がゆるいかな。 + +04:38.380 --> 04:42.100 +何を更新してほしいかまでは言っていない。 + +04:42.100 --> 04:47.470 +そして今、 それは通過し、 それは通過した仕事をやり遂げた。 + +04:47.500 --> 04:53.770 +同じ作者による別の例が追加され、 このコードが更新され、 見たものを追跡するためにセットを使用し、 + +04:53.800 --> 04:59.350 +ユニークなケースだけが得られるようになった。 + +04:59.380 --> 05:03.490 +実際、 これは私が持っていた1本のライナーよりも良い解決策だと思う。 + +05:03.490 --> 05:05.020 +しかし、 もっと長い。 + +05:05.140 --> 05:10.720 +でも、 その方がいい理由はいろいろあるんだろうけど、 それでも取れないかどうかやってみよう。 + +05:10.720 --> 05:22.360 +これをもっと簡単に書き直すと、 『イールド』の著者ジェネレーターを書き直すことは可能か? + +05:22.780 --> 05:24.310 +簡単に言おう。 + +05:25.030 --> 05:31.050 +それを単純化するために、 一発やらせてみよう。 + +05:31.140 --> 05:32.100 +スルーだ。 + +05:32.100 --> 05:34.200 +それは変わらない。 + +05:34.200 --> 05:36.330 +そして、 これだ。 + +05:38.220 --> 05:47.310 +そしてそれは、 前のスライドでお見せしたコードよりも、 また少し良くなっています。 + +05:47.310 --> 05:50.310 +中括弧を使って集合を表す。 + +05:50.400 --> 05:56.190 +そのセリフから "yield "と言うこともできたが、 この方法を選んだ。 + +05:56.190 --> 06:03.270 +このような形でGPTと仕事をする経験全体、 そしてインタラクティブなやり方、 例題データの構築のようなことをやってくれるやり方、 + +06:03.270 --> 06:10.110 +そして更新して何が起こるかを見せてくれるやり方に、 私は驚いている。 + +06:10.110 --> 06:16.350 +もしキャンバスが表示されなかったら、 + +06:16.350 --> 06:30.960 +キャンバスを使うという言葉を使えば表示される。 diff --git a/week5/community-contributions/subtitles/srts/60619247/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619247/ko_KR.srt new file mode 100755 index 0000000..3b4ff9e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619247/ko_KR.srt @@ -0,0 +1,256 @@ +WEBVTT + +00:00.110 --> 00:04.970 +GPT와 시간을 더 보내면서 몇 가지 흥미로운 걸 시도할 거예요 + +00:04.970 --> 00:09.470 +초기 버전의 GPT에서 자주 받던 질문으로 시작할게요 + +00:09.500 --> 00:15.170 +GPT 2가 한계를 보여준 것 같아요 여전히 문제가 있는지 두고 봐야죠 + +00:15.170 --> 00:18.830 +다들 무지개가 몇 개냐고 물었죠 + +00:20.840 --> 00:21.530 +흥미롭죠? + +00:21.530 --> 00:29.960 +하와이에서 17개까지 뛰려면 무지개가 몇 개 필요할까요? + +00:30.110 --> 00:31.610 +흥미로운 질문이네요 + +00:31.610 --> 00:37.550 +GPT 2의 반응은 아주 단호했어요 하와이에서 17로 가려면 무지개가 세 + +00:37.550 --> 00:41.360 +개여야 한다는데 질문의 뉘앙스를 놓치고 있죠 + +00:41.660 --> 00:45.200 +GPT 4에는 그런 문제가 없어요 + +00:45.200 --> 00:45.590 +두고 봐요 + +00:45.590 --> 00:53.150 +재치 있고 재밌는 리액션을 보내 주는데 무지개 이모티콘도 같이 보내 줘요 + +00:53.390 --> 00:54.710 +정말 놀라워요 + +00:55.070 --> 01:00.680 +이런 속임수를 다루기가 아주 쉬워요 GPT 4O는 O는 옴니의 + +01:00.710 --> 01:04.520 +약자인데 멀티모달을 나타내죠 + +01:04.520 --> 01:09.560 +물론 이미지 생성을 요청할 수도 있죠 + +01:11.420 --> 01:16.790 +이런 건 다들 본 적 있겠지만 생각해 보면 재미있어요 몇 + +01:16.790 --> 01:22.970 +년 전만 해도 상상력과 창의력을 발휘할 수 있는 LLM이 가능할지 + +01:22.970 --> 01:29.090 +논의가 많았거든요 그런데 이제는 이런 결론이 나올 것 같아요 + +01:29.090 --> 01:36.380 +하와이의 놀라운 사진을 만들 수 있다고요 무지개 17개가 저기로 가고 + +01:36.380 --> 01:38.360 +있어요 + +01:38.390 --> 01:41.750 +솔직히 정말 놀라워요 + +01:41.780 --> 01:43.190 +정말 놀라워요 + +01:43.400 --> 01:47.390 +그 자체로도 설명이 돼요 + +01:47.390 --> 01:52.100 +이런 창의적인 과제에 아주 효과적이에요 + +01:52.490 --> 01:57.920 +그럼 이쯤에서 GPT 4를 캔버스로 시험해 보죠 + +01:58.100 --> 02:00.920 +GPT의 또 다른 새로운 기능인데요 + +02:01.400 --> 02:07.100 +오픈AI와 협업할 수 있는 좋은 기회죠 + +02:07.190 --> 02:13.080 +파이썬 으로 돌아가서, 제가 전에 질문했던 것인데요 + +02:13.080 --> 02:23.820 +캔버스를 이용해 이 코드를 작업한다고 해보죠 + +02:25.410 --> 02:25.680 +네 + +02:25.680 --> 02:26.160 +어디 보죠 + +02:26.190 --> 02:26.760 +항복해요 + +02:26.760 --> 02:30.570 +지금부터는 안 될 일을 할 거예요 + +02:30.600 --> 02:31.920 +비트를 좀 바꿀게요 + +02:31.950 --> 02:42.780 +책에서 작가로 책 인 북을 get get 명령에서 목록해독을 사용하세요 + +02:42.780 --> 02:46.410 +책이 뭔지, 이게 무슨 의미인지 설명하진 않겠어요 + +02:46.410 --> 02:50.070 +코드 한 줄을 주고 어떻게 되는지 볼게요 + +02:50.070 --> 02:51.870 +이제 어떻게 되는지 보죠 + +02:51.870 --> 02:57.540 +먼저 오른쪽에 보이는 캔버스가 열립니다 이 코드에 반복할 + +02:57.540 --> 03:00.210 +수 있는 기능을 주죠 + +03:00.480 --> 03:01.740 +네 + +03:01.770 --> 03:02.910 +저기 있네요 + +03:03.060 --> 03:05.730 +코드가 뜨는데요 + +03:05.760 --> 03:17.420 +이걸 확장해 이게 사용된 예제를 보여주세요 이제 일종의 커서가 이동하는 걸 보실 수 있습니다 + +03:17.420 --> 03:24.560 +우리가 갖고 있던 걸 다시 쓰고 꾸미고 풍성하게 하죠 + +03:24.560 --> 03:27.920 +그 코드 조각이 이제 발전기로 바뀌었어요 + +03:27.920 --> 03:30.140 +예제를 제공하는 데이터가 있어요 + +03:30.140 --> 03:36.260 +책의 제목과 저자가 정해져 있다고 알고 있는데 시험 삼아 보여드린 + +03:36.380 --> 03:37.370 +거예요 + +03:37.370 --> 03:41.120 +대본을 다시 쓰는 게 아주 효과적이었어요 + +03:41.120 --> 03:58.430 +이제 코드를 수정하라고 하죠 저자가 없거나 빈 책은 생성기에서 제외되도록요 + +04:00.410 --> 04:04.220 +비즈니스 요구 사항 같은 걸 상상해 보세요 + +04:04.340 --> 04:10.310 +저기 보이는 건 제가 갖고 있던 코드와 아주 유사해 보이죠 + +04:10.310 --> 04:16.820 +빈 책과 빠진 책이 있는 예시를 더 보여주더니 이 줄을 다시 + +04:16.820 --> 04:17.810 +썼어요 + +04:17.810 --> 04:24.220 +정말 좋은 방법이에요 우리가 전에 했던 것에 상호작용적으로 추가할 수 있으니까요 + +04:24.250 --> 04:36.220 +이걸 업데이트해 달라고 하죠 독특한 작가들만 나오게요 + +04:36.250 --> 04:38.380 +제가 말을 좀 막 했어요 + +04:38.380 --> 04:42.100 +업데이트 하고 싶은 것도 말 안 했어요 + +04:42.100 --> 04:47.470 +이제 다 됐어요 제 역할을 다했죠 + +04:47.500 --> 04:53.770 +다른 예제에 같은 작성자로 추가됐어요 이 코드를 업데이트해서 세트를 + +04:53.800 --> 04:59.350 +이용해 뭐가 나왔는지 추적하고 고유한 케이스만 제공하죠 + +04:59.380 --> 05:03.490 +사실 이게 제가 생각했던 것보다 더 나은 해결책 같아요 + +05:03.490 --> 05:05.020 +하지만 더 길어요 + +05:05.140 --> 05:09.190 +하지만 그렇게 하는 게 나은 이유는 여러 가지가 있겠지만 get이 + +05:09.190 --> 05:10.720 +가능한지 보죠 + +05:10.720 --> 05:22.360 +이걸 더 간단하게 다시 쓰려면 생산자 생성기를 다시 쓸 수 있을까요? + +05:22.780 --> 05:24.310 +간단히 말하자면요 + +05:25.030 --> 05:31.050 +단순화하기 위해 시도해 보도록 하죠 + +05:31.140 --> 05:32.100 +통과해요 + +05:32.100 --> 05:34.200 +그건 변하지 않아요 + +05:34.200 --> 05:36.330 +다 됐어요 + +05:38.220 --> 05:46.080 +다시 말씀드리지만 이전 슬라이드에서 보여드린 코드보다 약간 더 + +05:46.110 --> 05:47.310 +나은데요 + +05:47.310 --> 05:50.310 +중괄호를 이용해서 집합을 나타내고요 + +05:50.400 --> 05:54.750 +그냥 저 선에서 양보하라고 할 수도 있었지만 이렇게 하기로 선택됐어요 아주 + +05:54.750 --> 05:56.190 +효과적으로 보이네요 + +05:56.190 --> 06:03.270 +저는 GPT로 작업하는 전체적인 경험에 충격을 받았습니다 상호작용을 할 수 있고 + +06:03.270 --> 06:08.760 +빌드 예시 데이터도 하고 업데이트도 하고 어떻게 되는지 보여 + +06:08.760 --> 06:10.110 +주니까요 + +06:10.110 --> 06:16.350 +아주 강력한 기능이죠 이 코스를 진행하면서 막히는 게 있을 때 꼭 사용하시길 + +06:16.350 --> 06:20.490 +권장합니다 비결은 캔버스를 불러오지 않을 경우 + +06:20.520 --> 06:27.240 +get canvas 사용이라고 하면 여러분이 원하는 도구로 이걸 사용해 설명하고 + +06:27.270 --> 06:30.960 +문제와 아이디어를 해결하죠 diff --git a/week5/community-contributions/subtitles/srts/60619275/en_US.srt b/week5/community-contributions/subtitles/srts/60619275/en_US.srt new file mode 100755 index 0000000..5f2d382 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619275/en_US.srt @@ -0,0 +1,190 @@ +WEBVTT + +00:00.110 --> 00:05.750 +And we will conclude our expedition into the world of frontier models through their chat interface by + +00:05.780 --> 00:08.570 +looking at meta AI and perplexity. + +00:08.600 --> 00:15.770 +Meta AI is, of course, the front end version to llama that's running behind the scenes. + +00:15.770 --> 00:21.680 +We can ask it the same question about how does it compare itself to other models, and we'll get back + +00:21.680 --> 00:25.190 +something that has some strengths and weaknesses. + +00:25.190 --> 00:27.440 +It doesn't do a great job. + +00:27.440 --> 00:33.530 +It gives some rather old fashioned complimentary llms, but it's it's okay. + +00:33.530 --> 00:38.000 +And generally speaking, I think you'd find asking various questions that you'll get answers that are + +00:38.000 --> 00:42.200 +okay, but not the same power as some of the others. + +00:42.590 --> 00:44.060 +Let's ask the same question. + +00:44.060 --> 00:52.370 +You guys are going to be fed up with me for doing this, but how many times does the letter A appear + +00:52.370 --> 00:54.650 +in this sentence? + +00:54.920 --> 00:58.190 +And let's see what we get from meta AI. + +00:58.340 --> 01:04.160 +It appears five times, so meta is also not able to handle that particular question. + +01:04.190 --> 01:08.210 +Now, one thing that the meta is able to handle is image generation. + +01:08.210 --> 01:17.320 +And we can say please generate an image of a rainbow of rainbows + +01:19.270 --> 01:23.830 +leaping from Hawaii to 17. + +01:24.100 --> 01:31.450 +Uh, and you'll find that this is the kind of, of challenge that, uh, lama is up for. + +01:31.450 --> 01:35.980 +And it very nicely does it with these four possibilities. + +01:36.070 --> 01:38.350 +And you get this, uh, this kind of effect. + +01:38.350 --> 01:44.260 +And it seems to have, uh, well, Hawaii has appeared, uh, but but some of these are very respectable, + +01:44.260 --> 01:49.840 +particularly for the open source model that is, uh, Lama sitting behind the scenes. + +01:49.960 --> 01:55.510 +All right, let's flip over to perplexity, which is, of course, a search engine, not an LLM. + +01:55.510 --> 01:56.080 +So it doesn't. + +01:56.110 --> 02:01.660 +It's the odd one out in this group, although actually, uh, OpenAI also is now in the search space, + +02:01.660 --> 02:02.320 +too. + +02:02.440 --> 02:08.020 +Um, so really it's looking for, uh, factual questions that it can then research and provide an answer + +02:08.020 --> 02:08.380 +for. + +02:08.380 --> 02:13.240 +And I'm recording this on November the 6th, the day after the elections in the US. + +02:13.240 --> 02:22.940 +So I can say something like, who is the president elect of the United States, and it will do some + +02:22.940 --> 02:23.570 +thinking. + +02:23.570 --> 02:29.960 +And I would not be surprised at all to see that it's able to summarize back the outcome and give key + +02:29.960 --> 02:33.110 +points, reactions and the like. + +02:33.170 --> 02:40.280 +Uh, and so it's able to do this and provide a nuanced, well crafted response to current events. + +02:40.310 --> 02:46.160 +Actually, if you ask a question like that to, uh, OpenAI to GPT right now, it will also give you + +02:46.160 --> 02:50.990 +a good answer, uh, based on current events, despite its knowledge cutoff being last year. + +02:51.110 --> 02:53.210 +But if you ask Claude, it won't be able to do that. + +02:53.210 --> 02:55.610 +And it will say it will say to to be direct. + +02:55.610 --> 03:00.440 +My knowledge cutoff is, uh, you get that very specific answer. + +03:00.890 --> 03:04.550 +Uh, let's ask the question, uh, how many? + +03:04.580 --> 03:15.350 +Let's start a new, fresh chat and say, how many times does the letter A appear in this sentence? + +03:16.130 --> 03:17.960 +It's the last time you have to see this. + +03:18.170 --> 03:20.510 +Uh, and it says four times. + +03:20.510 --> 03:23.030 +So it is able to count. + +03:23.060 --> 03:24.800 +Uh, so it's impressive. + +03:24.830 --> 03:28.310 +Uh, don't know whether it's a coincidence, whether it's because other people have written articles + +03:28.310 --> 03:32.300 +about this that it's found, but it is able to count for times. + +03:32.300 --> 03:39.080 +So perplexity is with the oh one preview version and being able to get this right. + +03:39.110 --> 03:41.420 +Congratulations to perplexity. + +03:41.630 --> 03:48.290 +Um, and now ask a slightly curious question, which is, ah, uh, question about comparing to other + +03:48.290 --> 03:52.910 +models, and you'll see a, um, here's the response. + +03:53.000 --> 03:55.340 +Uh, the area is indicated by perplexity. + +03:55.340 --> 03:57.260 +I don't have the capabilities. + +03:57.260 --> 04:00.440 +So it definitely pushes back firmly on that. + +04:00.680 --> 04:04.460 +And that is a wrap on our exploration of frontier models. + +04:04.460 --> 04:07.160 +But now I encourage you to do the same. + +04:07.160 --> 04:12.560 +Come up with interesting questions, particularly try and find questions which are able to bring to + +04:12.590 --> 04:17.960 +the surface the differences between the models, their characters, what they're good at, where they're + +04:17.960 --> 04:18.590 +weak. + +04:18.620 --> 04:23.420 +And if you find something good, then please share it with me or post it in messages. + +04:23.450 --> 04:29.810 +Uh, this is it's really great to find the kinds of prompts that help to surface these differences, + +04:29.810 --> 04:32.660 +and also that help to highlight where they are so strong. + +04:32.750 --> 04:36.050 +And I will see you in the next video to wrap this up. diff --git a/week5/community-contributions/subtitles/srts/60619275/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619275/ja_JP.srt new file mode 100755 index 0000000..1860955 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619275/ja_JP.srt @@ -0,0 +1,160 @@ +WEBVTT + +00:00.110 --> 00:08.570 +そして、 メタAIと当惑を見ることで、 チャット・インターフェースを通じたフロンティア・モデルの世界への探検を締めくくる。 + +00:08.600 --> 00:15.770 +メタAIはもちろん、 裏で動いているllamaのフロントエンド版だ。 + +00:15.770 --> 00:25.190 +他のモデルと比較してどうなのかと同じ質問をすれば、 長所と短所を持つものが返ってくるだろう。 + +00:25.190 --> 00:27.440 +大した仕事はしていない。 + +00:27.440 --> 00:33.530 +かなり古風な褒め言葉だが、 まあいい。 + +00:33.530 --> 00:38.000 +一般的に言って、 いろいろな質問をしてみると、 まあまあの答えが返ってくるが、 + +00:38.000 --> 00:42.200 +他の選手と同じようなパワーは得られないと思う。 + +00:42.590 --> 00:44.060 +同じ質問をしよう。 + +00:44.060 --> 00:54.650 +こんなことをすると君たちに辟易されるだろうが、 この文章の中にAという文字が何回出てくるだろうか? + +00:54.920 --> 00:58.190 +メタAIから何が得られるか見てみよう。 + +00:58.340 --> 01:04.160 +5回も出てくるので、 メタもその質問には対応できない。 + +01:04.190 --> 01:08.210 +さて、 メタが扱えることのひとつに画像生成がある。 + +01:08.210 --> 01:23.830 +そして、 ハワイから17号線に虹が架かるイメージを作ってくださいと言うことができる。 + +01:24.100 --> 01:31.450 +そして、 これがラマ僧の挑戦であることがわかるだろう。 + +01:31.450 --> 01:35.980 +そして、 この4つの可能性をとてもうまく実現している。 + +01:36.070 --> 01:38.350 +そして、 このような効果が得られる。 + +01:38.350 --> 01:44.260 +そして、 ハワイが登場したようだが、 そのうちのいくつかは非常に立派なもので、 特にオープンソースモデルとしては、 + +01:44.260 --> 01:49.840 +ラマが舞台裏に座っているようなものだ。 + +01:49.960 --> 01:55.510 +さて、 "perplexity "に話を移そう。 "perplexity "はもちろん検索エンジンであり、 LLMではない。 + +01:55.510 --> 01:56.080 +だから、 それはない。 + +01:56.110 --> 02:02.320 +このグループの中では変わり者だが、 実はOpenAIも検索スペースに参入している。 + +02:02.440 --> 02:08.380 +つまり、 調査して答えを出せるような、 事実に基づいた質問を探しているんだ。 + +02:08.380 --> 02:13.240 +そして、 私はこれを11月6日、 アメリカでの選挙の翌日に収録している。 + +02:13.240 --> 02:23.570 +だから、 アメリカの次期大統領は誰なのか、 などと言えば、 少しは考えてくれるだろう。 + +02:23.570 --> 02:29.960 +そして、 結果を要約し、 重要なポイントや反応などを伝えることができるようになっていても、 + +02:29.960 --> 02:33.110 +私はまったく驚かない。 + +02:33.170 --> 02:40.280 +そうすることで、 時事問題に対してニュアンスのある、 よく練られた反応を示すことができるんだ。 + +02:40.310 --> 02:46.160 +実際、 GPTのOpenAIにそのような質問をすると、 知識のカットオフが昨年であるにもかかわらず、 + +02:46.160 --> 02:50.990 +時事問題に基づいて適切な答えを返してくれる。 + +02:51.110 --> 02:53.210 +しかし、 クロードに言わせれば、 そんなことはできないだろう。 + +02:53.210 --> 02:55.610 +そして、 直接的であれと言うだろう。 + +02:55.610 --> 03:00.440 +私の知識の切り口は......具体的な答えだ。 + +03:00.890 --> 03:04.550 +ええと、 何人ですか? + +03:04.580 --> 03:15.350 +この文章にAという文字は何回出てきますか? + +03:16.130 --> 03:17.960 +これを見るのはこれが最後だ。 + +03:18.170 --> 03:20.510 +ええと、 4回と書いてある。 + +03:20.510 --> 03:23.030 +だから数えることができる。 + +03:23.060 --> 03:24.800 +ああ、 だから印象的なんだ。 + +03:24.830 --> 03:28.310 +偶然なのか、 他の人がこの件について記事を書いたから見つかったのかはわからないが、 + +03:28.310 --> 03:32.300 +回数を数えることができる。 + +03:32.300 --> 03:39.080 +そのため、 このプレビュー版でこの問題を解決できるのかどうかが大きな課題となっている。 + +03:39.110 --> 03:41.420 +おめでとう、 当惑。 + +03:41.630 --> 03:52.910 +そして、 ちょっと不思議な質問、 つまり、 他のモデルとの比較についての質問をしてみてください。 + +03:53.000 --> 03:55.340 +ええと、 このあたりは当惑で示されている。 + +03:55.340 --> 03:57.260 +私には能力がない。 + +03:57.260 --> 04:00.440 +だから、 間違いなくそれをしっかりと押し返している。 + +04:00.680 --> 04:04.460 +以上、 フロンティア・モデルについての解説を終えた。 + +04:04.460 --> 04:07.160 +しかし、 今、 私はあなたにも同じことをするよう勧める。 + +04:07.160 --> 04:12.560 +興味深い質問を考え、 特にモデル間の違い、 キャラクター、 得意なこと、 + +04:12.590 --> 04:18.590 +不得意なことを表面化できるような質問を見つけるようにする。 + +04:18.620 --> 04:23.420 +そして、 もし何かいいものを見つけたら、 それを僕にシェアするか、 メッセージに投稿してほしい。 + +04:23.450 --> 04:32.660 +このような違いを表面化させ、 またその違いが強い部分を強調するのに役立つようなプロンプトを見つけるのは本当に素晴らしいことだ。 + +04:32.750 --> 04:36.050 +それではまた次のビデオでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/60619275/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619275/ko_KR.srt new file mode 100755 index 0000000..01518c1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619275/ko_KR.srt @@ -0,0 +1,178 @@ +WEBVTT + +00:00.110 --> 00:05.750 +그리고 대화 인터페이스를 통해 개척 모델의 세계를 탐구하는 여정을 마무리할 겁니다 + +00:05.780 --> 00:08.570 +메타 인공지능과 당혹스러움을 살펴보죠 + +00:08.600 --> 00:15.770 +메타 인공지능은 라마의 프런트 엔드 버전으로 뒤에서 실행되고 있어요 + +00:15.770 --> 00:21.680 +같은 질문을 할 수도 있어요. 다른 모델과 비교해 보면 장단점이 + +00:21.680 --> 00:25.190 +있는 Get을 얻을 수 있어요. + +00:25.190 --> 00:27.440 +잘 안 되네요 + +00:27.440 --> 00:33.530 +전통적인 무료 llm을 주지만 괜찮아요 + +00:33.530 --> 00:38.000 +일반적으로 다양한 질문을 하면 괜찮긴 하지만 다른 질문만큼 + +00:38.000 --> 00:42.200 +강력하진 않은 답을 얻게 될 거예요. Get it. + +00:42.590 --> 00:44.060 +같은 질문을 해 보죠 + +00:44.060 --> 00:52.370 +제가 이런 말을 해서 질리실 텐데 A가 이 문장에 몇 번이나 + +00:52.370 --> 00:54.650 +나오는 거죠? + +00:54.920 --> 00:58.190 +메타 인공지능은 어떤지 보죠. + +00:58.340 --> 01:04.160 +다섯 번 나타나죠 메타도 그 특정 질문을 처리할 수 없어요 + +01:04.190 --> 01:08.210 +메타가 다룰 수 있는 한 가지는 이미지 생성이에요 + +01:08.210 --> 01:17.320 +하와이에서 17로 가는 무지개의 이미지를 생성해 + +01:19.270 --> 01:23.830 +달라고 요청할 수 있어요 + +01:24.100 --> 01:31.450 +보시면 아시겠지만 라마에겐 이런 도전이 딱이에요 + +01:31.450 --> 01:35.980 +이 네 가지로 아주 잘 표현했어요 + +01:36.070 --> 01:38.350 +Get it 하면 이런 효과가 나요 + +01:38.350 --> 01:44.260 +하와이가 등장한 것 같지만 몇몇은 아주 훌륭한 것들이죠 + +01:44.260 --> 01:49.840 +특히 오픈 소스 모델에게요 무대 뒤에 있는 라마요 + +01:49.960 --> 01:55.510 +그럼 이제 검색 엔진으로 넘어가죠 LLM이 아니라요 + +01:55.510 --> 01:56.080 +그렇죠 + +01:56.110 --> 02:02.320 +이 그룹에서 특이한 경우지만 오픈라이도 검색 영역에 들어왔어요 + +02:02.440 --> 02:08.380 +그래서 사실적인 질문을 찾아서 조사하고 답을 제공하죠 + +02:08.380 --> 02:13.240 +11월 6일에 녹화하고 있습니다 미국 선거 다음 날이죠 + +02:13.240 --> 02:23.570 +미국 대통령 당선자를 이렇게 말하면 생각이 바뀔 거예요 + +02:23.570 --> 02:29.960 +결과물을 요약해서 핵심 포인트나 반응 같은 걸 내놓는다 해도 + +02:29.960 --> 02:33.110 +전혀 놀랍지 않을 거예요 + +02:33.170 --> 02:40.280 +이 기능을 통해 현안에 대한 미묘하고 정교한 대응을 제공하죠 + +02:40.310 --> 02:46.160 +오픈AI에 그런 질문을 해도 좋은 답변을 얻을 수 있습니다 현재 상황을 + +02:46.160 --> 02:50.990 +고려하면요 작년 기준으로 제한이 있었지만요 + +02:51.110 --> 02:53.210 +클로드한테 물어보면 못 할 거예요 + +02:53.210 --> 02:55.610 +단도직입적으로 말하라고 할 거예요 + +02:55.610 --> 03:00.440 +아주 구체적인 답을 얻게 되는 게 제 지식의 한계예요 get it get it + +03:00.890 --> 03:04.550 +질문을 해 보죠 몇 명이죠? + +03:04.580 --> 03:15.350 +새로운 대화를 시작해 보죠 A라는 글자가 이 문장에 몇 번 나오죠? + +03:16.130 --> 03:17.960 +이걸 보는 건 마지막이에요 + +03:18.170 --> 03:20.510 +네 번이라고 쓰여 있어요 + +03:20.510 --> 03:23.030 +숫자를 셀 수 있군요 + +03:23.060 --> 03:24.800 +정말 인상적이에요 + +03:24.830 --> 03:28.310 +우연의 일치인지 다른 사람들이 기사를 써서 + +03:28.310 --> 03:32.300 +발견된 건지는 모르겠지만 숫자를 셀 수 있어요 + +03:32.300 --> 03:39.080 +그래서 get One Preview 버전에서 이걸 제대로 하는 게 당혹스러워요 + +03:39.110 --> 03:41.420 +축하해요, 당혹스러움 + +03:41.630 --> 03:48.290 +이제 좀 이상한 질문을 해 보죠 다른 모델과 비교해 + +03:48.290 --> 03:52.910 +보면 이런 답이 나올 거예요 + +03:53.000 --> 03:55.340 +그 지역은 당혹스러움으로 표시돼요 + +03:55.340 --> 03:57.260 +전 그럴 능력이 없어요 + +03:57.260 --> 04:00.440 +확실히 더 강하게 밀어주죠 + +04:00.680 --> 04:04.460 +이것으로 개척자 모델 탐사를 마칠게요 + +04:04.460 --> 04:07.160 +하지만 이젠 당신도 그렇게 해요 + +04:07.160 --> 04:12.560 +흥미로운 질문을 던지고 특히 모델과 캐릭터의 차이점 + +04:12.590 --> 04:18.590 +장점과 약점을 드러낼 수 있는 질문을 찾으려고 노력해요 + +04:18.620 --> 04:23.420 +그리고 좋은 거 발견하시면 공유나 post 해 주세요 + +04:23.450 --> 04:29.810 +이런 차이를 표면화하고 어디가 가장 큰지 강조할 수 있는 표시를 + +04:29.810 --> 04:32.660 +찾는 건 정말 좋은 일이에요 + +04:32.750 --> 04:36.050 +그럼 다음 비디오에서 마무리하도록 하죠 diff --git a/week5/community-contributions/subtitles/srts/60619281/en_US.srt b/week5/community-contributions/subtitles/srts/60619281/en_US.srt new file mode 100755 index 0000000..8ca637a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619281/en_US.srt @@ -0,0 +1,334 @@ +WEBVTT + +00:00.050 --> 00:05.810 +Well, I'm delighted to welcome you to day three of our eight week journey together. + +00:05.810 --> 00:09.470 +And today we're going to be looking at Frontier Models. + +00:09.470 --> 00:16.760 +The idea that the goal of today is to get deep into these different models so that you can get a true + +00:16.760 --> 00:21.920 +intuition for where are they strong, where are they weak, what are the differences between them? + +00:21.920 --> 00:26.840 +And so that's what I want you to keep in mind throughout today's material, learning about the differences + +00:26.840 --> 00:31.970 +between them and thinking about how you would apply them commercially to your business or to future + +00:31.970 --> 00:35.570 +projects, and understanding when you would pick which model. + +00:35.600 --> 00:37.040 +Let's get to it. + +00:37.100 --> 00:42.320 +So we're going to be talking about six different models today from six different companies, starting + +00:42.350 --> 00:44.750 +of course, with OpenAI's models. + +00:44.780 --> 00:47.270 +OpenAI needs no introduction really. + +00:47.420 --> 00:50.300 +GPT is the most famous model. + +00:50.300 --> 00:53.090 +And we'll also, of course look at 0101 preview. + +00:53.240 --> 01:01.310 +The the newest of their models and ChatGPT is their user interface, the screens where you can interact + +01:01.310 --> 01:02.150 +with it. + +01:02.360 --> 01:05.180 +We'll also look at the models from anthropic. + +01:05.210 --> 01:12.510 +Anthropic is OpenAI's top competitor, based in San Francisco as well, and founded by some people that + +01:12.510 --> 01:16.380 +left OpenAI, and their model is called Claude and Claude. + +01:16.380 --> 01:19.200 +In fact, you may know, comes in sort of three powers. + +01:19.200 --> 01:21.300 +The smallest one is called haiku. + +01:21.330 --> 01:23.310 +Claude haiku, and then the sonnet. + +01:23.310 --> 01:24.750 +And then there's opus. + +01:24.810 --> 01:31.770 +But actually, because sonnet has had much more recent versions, the latest version of sonnet is stronger + +01:31.770 --> 01:35.190 +than the bigger, more expensive opus, as we'll see. + +01:35.190 --> 01:36.930 +That will make more sense later. + +01:37.050 --> 01:42.360 +But Claude Claude 3.5 sonnet is the strongest of Claude's models. + +01:43.050 --> 01:50.580 +Google has Google Gemini, probably latest to the party, and most of us know Gemini most well because + +01:50.580 --> 01:55.530 +nowadays when we do a Google search, very often we see Gemini's responses. + +01:55.650 --> 02:00.720 +Gemini is, of course, the next generation of what was originally called Bard from Google. + +02:01.320 --> 02:04.290 +Cohere is one that you may have heard less about. + +02:04.290 --> 02:11.880 +It's a Canadian AI company, and their model is most well known for being using using a technique called + +02:11.880 --> 02:14.220 +Rag to make sure that it has expertise. + +02:14.220 --> 02:15.630 +So we will see that. + +02:15.720 --> 02:18.930 +And then we know the llama model from meta. + +02:18.930 --> 02:20.940 +We've used it ourselves through llama. + +02:20.970 --> 02:25.710 +This is an open source model, and you may not know that the meta actually also has a website, meta + +02:25.950 --> 02:30.300 +AI, that lets you interact with the llama model. + +02:30.300 --> 02:32.220 +And we will have a look at that. + +02:32.310 --> 02:39.540 +And then perplexity is a bit different, because perplexity is actually a search engine powered by AI, + +02:39.570 --> 02:43.230 +powered by Llms, and it can use some of the other models that we'll talk about. + +02:43.230 --> 02:48.360 +But they do also have their own model too, so it's a slightly different beast, but we'll be looking + +02:48.360 --> 02:50.190 +at perplexity as well. + +02:51.240 --> 02:58.290 +So overall, these llms are astonishing in what they are capable of. + +02:58.320 --> 03:06.330 +They are really very effective indeed at taking a detailed question, a nuanced question, and providing + +03:06.330 --> 03:10.260 +a structured summary that appears well researched. + +03:10.260 --> 03:15.660 +It often has a sort of introduction and a summary, and this is one of the ways that I use it all the + +03:15.660 --> 03:16.470 +time. + +03:16.660 --> 03:22.900 +and I find that across the board, these llms are shocking in how good they are at this. + +03:22.930 --> 03:28.300 +It's something that a couple of years ago, none of us would have imagined that we could get this far + +03:28.300 --> 03:29.290 +this quickly. + +03:30.040 --> 03:36.010 +There are also really good, and I imagine that many of you do this a lot yourselves, and I do it if + +03:36.040 --> 03:41.080 +you put in a few bullets, just a few notes on something and say, hey, can you turn this into an email + +03:41.110 --> 03:44.230 +or can you turn this into a slide? + +03:44.560 --> 03:51.820 +They are really good at fleshing it out and building, say, a blog post, and they're very good at + +03:51.850 --> 03:52.600 +iterating. + +03:52.600 --> 03:55.360 +So they'll do something and are like some of it I won't like others. + +03:55.360 --> 03:58.330 +And you can give feedback and keep going backwards and forwards. + +03:58.330 --> 04:00.340 +And it's a really effective way of working. + +04:00.340 --> 04:05.260 +It's the kind of copilot construct that is so, so, so effective. + +04:06.220 --> 04:09.340 +And then coding, of course. + +04:09.340 --> 04:17.050 +And perhaps this for many of us is is the thing that is most staggering is how very good the llms are + +04:17.050 --> 04:21.010 +at writing code and debugging problems and solving them. + +04:21.190 --> 04:24.320 +It's something which is really remarkable. + +04:24.320 --> 04:28.610 +I've had experiences myself when I've been working on something that's very complex, and it's something + +04:28.610 --> 04:34.040 +that I believe I have deep subject matter expertise in, and I've got a fairly intricate error, and + +04:34.040 --> 04:41.720 +I put the details and the stack trace in into Claude, say, and I get back not only a very precise + +04:41.720 --> 04:46.880 +explanation of what's going wrong, but also the code that will fix it appearing as an artifact on the + +04:46.880 --> 04:47.750 +right in Claude. + +04:47.750 --> 04:50.330 +And it's it's it's amazing. + +04:50.330 --> 04:51.830 +It's absolutely amazing. + +04:52.010 --> 04:56.210 +And in fact, these are often things which I if I try and paste them, if I look for it in Stack Overflow, + +04:56.240 --> 04:57.470 +there's no answer there. + +04:57.470 --> 05:02.840 +Somehow it's it's able to look beyond just, just a regurgitating Stack Overflow answers. + +05:02.840 --> 05:05.990 +And it seems to have real insight into what's going on. + +05:06.020 --> 05:13.280 +And I suppose that's why it's not surprising, really, that Stack Overflow has seen a big falloff in + +05:13.280 --> 05:14.120 +its traffic. + +05:14.150 --> 05:21.650 +You can see that something started to happen in a big way after Q4 2022, which is when ChatGPT was + +05:21.650 --> 05:22.370 +released. + +05:22.610 --> 05:31.800 +So, you know, it's obviously changed the paradigm of how uh, how we how technology people work with + +05:31.800 --> 05:33.930 +with researching our problems. + +05:33.960 --> 05:35.100 +It's very effective. + +05:35.100 --> 05:41.640 +And I encourage you, if you get stuck with some of the things we work on to give Claude or OpenAI GPT + +05:41.670 --> 05:42.570 +a shot. + +05:43.560 --> 05:45.510 +So what about where are they weak? + +05:45.510 --> 05:47.010 +What are the things that they struggle with? + +05:47.010 --> 05:49.230 +Where does humanity still have a chance in all of this? + +05:49.260 --> 05:56.730 +Well, so first of all, they tend to not be as strong with specialized subject matter if it's something + +05:56.730 --> 05:59.190 +that requires detailed knowledge. + +05:59.280 --> 06:02.250 +Most llms are not yet at PhD level. + +06:02.280 --> 06:09.090 +Now, I had to put the word most in there because literally just just just a few weeks ago for me in + +06:09.090 --> 06:16.620 +October, uh, Claude, the newest version of Claude came out, uh, the latest Claude 3.5 sonnet, + +06:16.710 --> 06:23.070 +uh, and it has surpassed PhD level in maths, physics, chemistry. + +06:23.190 --> 06:29.220 +Uh, and so this is something where very quickly we're seeing these models achieving PhD level. + +06:29.220 --> 06:30.360 +So far just Claude. + +06:30.360 --> 06:36.540 +But the others I'm sure are not far behind, but those are in those specific sciences and in a particular + +06:36.540 --> 06:42.180 +domain, like a business domain, they still won't have the specialist knowledge of an expert in that + +06:42.180 --> 06:42.960 +space. + +06:43.530 --> 06:46.260 +And secondly, recent events. + +06:46.260 --> 06:52.170 +So the models have been trained up until a knowledge cutoff, which is for GPT. + +06:52.200 --> 06:53.370 +October of last year. + +06:53.370 --> 06:58.470 +And so they won't be able to answer questions on information that has come since then. + +06:58.500 --> 07:01.740 +And then they have some strange blind spots. + +07:01.740 --> 07:04.260 +There are some questions which they will just get wrong. + +07:04.260 --> 07:08.580 +And when they get them wrong, one of the things that's quite concerning is that they do tend to be + +07:08.580 --> 07:10.440 +confident in their responses. + +07:10.440 --> 07:14.550 +They often don't volunteer the fact that they're uncertain. + +07:14.550 --> 07:19.320 +They just state an answer with the same level of conviction as with something where they do get the + +07:19.320 --> 07:20.160 +answer right. + +07:20.280 --> 07:28.530 +And that that is something which of course causes concern when you see models hallucinate or come up + +07:28.530 --> 07:32.190 +with with new information which it doesn't know and do so with confidence. + +07:32.190 --> 07:38.670 +And we'll see some examples of that and talk about what are the reasons behind those blind spots. diff --git a/week5/community-contributions/subtitles/srts/60619281/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619281/ja_JP.srt new file mode 100755 index 0000000..fe9aa5b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619281/ja_JP.srt @@ -0,0 +1,271 @@ +WEBVTT + +00:00.050 --> 00:05.810 +さて、 8週間にわたる旅の3日目にようこそ。 + +00:05.810 --> 00:09.470 +そして今日は、 フロンティア・モデルについて見ていこう。 + +00:09.470 --> 00:16.760 +今日の目標は、 これらの異なるモデルを深く理解することで、 どこが強く、 どこが弱いのか、 + +00:16.760 --> 00:21.920 +両者の違いは何なのかを直感的に理解することだ。 + +00:21.920 --> 00:26.840 +その違いを学び、 自分のビジネスや将来のプロジェクトにどのように応用するかを考え、 + +00:26.840 --> 00:35.570 +どのような場合にどのモデルを選ぶかを理解する。 + +00:35.600 --> 00:37.040 +さっそく始めよう。 + +00:37.100 --> 00:44.750 +今日は6社の6つの異なるモデルについてお話しします。 + +00:44.780 --> 00:47.270 +OpenAIは紹介するまでもないだろう。 + +00:47.420 --> 00:50.300 +GPTは最も有名なモデルだ。 + +00:50.300 --> 00:53.090 +もちろん、 0101のプレビューも見る。 + +00:53.240 --> 01:02.150 +彼らの最新モデルとChatGPTは、 ユーザー・インターフェイス、 つまりユーザーが操作できる画面だ。 + +01:02.360 --> 01:05.180 +人類学的なモデルも見てみよう。 + +01:05.210 --> 01:12.510 +AnthropicはOpenAIの一番のライバルで、 同じくサンフランシスコに拠点を置き、 + +01:12.510 --> 01:16.380 +OpenAIを去った人たちによって設立された。 + +01:16.380 --> 01:19.200 +実際、 ご存じかもしれないが、 3つのパワーがある。 + +01:19.200 --> 01:21.300 +一番小さなものは俳句と呼ばれる。 + +01:21.330 --> 01:23.310 +クロード俳句、 そしてソネット。 + +01:23.310 --> 01:24.750 +そしてオパスだ。 + +01:24.810 --> 01:31.770 +しかし、 実際には、 ソネットにはもっと新しいバージョンがあるため、 後述するように、 ソネットの最新バージョンは、 + +01:31.770 --> 01:35.190 +大きくて高価なオーパスよりも強いのだ。 + +01:35.190 --> 01:36.930 +それは後でもっと理解できるだろう。 + +01:37.050 --> 01:42.360 +しかし、 クロード・クロード3. 5ソネットはクロードのモデルの中で最も強力なものである。 + +01:43.050 --> 01:55.530 +グーグルには、 おそらく最新のグーグル双子座があり、 私たちのほとんどは双子座をよく知っている。 + +01:55.650 --> 02:00.720 +Geminiはもちろん、 もともとGoogleのBardと呼ばれていたものの次世代である。 + +02:01.320 --> 02:04.290 +コヒーレはあまり聞いたことがないかもしれない。 + +02:04.290 --> 02:14.220 +カナダのAI企業で、 彼らのモデルは専門知識を持つことを確認するためにラグと呼ばれる技術を使用していることで最もよく知られている。 + +02:14.220 --> 02:15.630 +だから、 それを見ることになる。 + +02:15.720 --> 02:18.930 +そして、 私たちはメタのラマ・モデルを知っている。 + +02:18.930 --> 02:20.940 +私たち自身、 ラマを通じて利用したことがある。 + +02:20.970 --> 02:25.710 +これはオープンソースのモデルで、 皆さんはご存じないかもしれないが、 実はメタはウェブサイト「メタAI」も持っていて、 + +02:25.950 --> 02:30.300 +そこでラマ・モデルと対話することができる。 + +02:30.300 --> 02:32.220 +それを見てみよう。 + +02:32.310 --> 02:39.540 +そしてperplexityは少し違います。 perplexityは実際にAIを搭載した検索エンジンであり、 + +02:39.570 --> 02:43.230 +Llmsを搭載しています。 + +02:43.230 --> 02:50.190 +しかし、 彼らにも独自のモデルがあるので、 少し違うが、 当惑も見ていくことにしよう。 + +02:51.240 --> 02:58.290 +つまり、 全体的に見れば、 これらのLLMの能力は驚くべきものなのだ。 + +02:58.320 --> 03:06.330 +細かい質問、 微妙なニュアンスの質問を取り上げ、 よく研究されたように見える構造化された要約を提供することにおいて、 + +03:06.330 --> 03:10.260 +彼らは実に効果的だ。 + +03:10.260 --> 03:16.470 +導入部と要約のようなものがあることが多いが、 これは私がいつも使っている方法のひとつだ。 + +03:16.660 --> 03:22.900 +そして、 私は、 これらのLLMが全体的に、 いかに優れているかということに衝撃を受けている。 + +03:22.930 --> 03:29.290 +2、 3年前には、 ここまで早く到達できるとは誰も想像していなかったことだ。 + +03:30.040 --> 03:36.010 +また、 本当に良い方法があります。 皆さんの多くは自分でこれをよくやっていると想像しますし、 + +03:36.040 --> 03:41.080 +私も何かについてほんの少し箇条書きやメモを入れて、 これをメールにできないか、 + +03:41.110 --> 03:44.230 +スライドにできないかと言うと、 そうします。 + +03:44.560 --> 03:52.600 +彼らはそれを具体化し、 例えばブログ記事を構築するのがとても上手で、 反復するのがとてもうまい。 + +03:52.600 --> 03:55.360 +だから、 彼らは何かをして、 そのうちのいくつかは気に入るだろう。 + +03:55.360 --> 03:58.330 +そして、 フィードバックを与え、 前進と後退を繰り返すことができる。 + +03:58.330 --> 04:00.340 +そして、 それは本当に効果的な仕事のやり方だ。 + +04:00.340 --> 04:05.260 +それは、 とてもとてもとても効果的な副操縦士の構成だ。 + +04:06.220 --> 04:09.340 +そしてもちろん、 コーディングだ。 + +04:09.340 --> 04:21.010 +そして、 おそらく私たちの多くにとって最も驚異的なことは、 llmsがコードを書くこと、 問題をデバッグして解決することがいかに優れているかということだ。 + +04:21.190 --> 04:24.320 +本当に驚くべきことだ。 + +04:24.320 --> 04:28.610 +私自身、 非常に複雑で、 自分が深い専門知識を持っていると信じていることに取り組んでいて、 + +04:28.610 --> 04:34.040 +かなり複雑なエラーが発生したときに、 詳細とスタックトレースをクロードに入力すると、 + +04:34.040 --> 04:47.750 +何が間違っているのかの非常に正確な説明だけでなく、 クロードの右側にアーティファクトとして表示される修正コードも戻ってきた経験がある。 + +04:47.750 --> 04:50.330 +そして、 それはそれは素晴らしい。 + +04:50.330 --> 04:51.830 +本当に素晴らしいよ。 + +04:52.010 --> 04:57.470 +実際、 スタックオーバーフローで探しても、 答えがないことが多い。 + +04:57.470 --> 05:02.840 +どういうわけか、 Stack Overflowの回答の再掲にとどまらない。 + +05:02.840 --> 05:05.990 +そして、 何が起こっているのかを本当に見抜いているようだ。 + +05:06.020 --> 05:14.120 +だからこそ、 スタック・オーバーフローのトラフィックが大きく落ち込んだとしても、 驚くことではないのだろう。 + +05:14.150 --> 05:22.370 +ChatGPTがリリースされた2022年第4四半期以降、 何かが大きく動き始めたことがわかるだろう。 + +05:22.610 --> 05:33.930 +だから、 テクノロジーに携わる人たちがどのように問題解決に取り組むかというパラダイムを明らかに変えたんだ。 + +05:33.960 --> 05:35.100 +とても効果的だ。 + +05:35.100 --> 05:42.570 +そして、 もし私たちが取り組んでいることに行き詰まったら、 クロードやOpenAI GPTを試してみることをお勧めする。 + +05:43.560 --> 05:45.510 +では、 どこが弱いのか? + +05:45.510 --> 05:47.010 +彼らが苦労していることは何か? + +05:47.010 --> 05:49.230 +この中で人類にまだチャンスがあるのはどこだろうか? + +05:49.260 --> 05:59.190 +まあ、 だからまず、 細かい知識が必要なものであれば、 専門的なテーマにはあまり強くない傾向がある。 + +05:59.280 --> 06:02.250 +ほとんどのLLMはまだ博士号レベルには達していない。 + +06:02.280 --> 06:09.090 +というのも、 10月にクロードの最新バージョン、 + +06:09.090 --> 06:16.620 +クロード3が出たからだ。 5ソネットは、 数学、 物理学、 + +06:16.710 --> 06:23.070 +化学の博士号レベルを超えている。 + +06:23.190 --> 06:29.220 +だから、 このようなモデルが博士号レベルに達するのは非常に早いことなんだ。 + +06:29.220 --> 06:30.360 +今のところクロードだけだ。 + +06:30.360 --> 06:36.540 +しかし、 そのような人たちは、 特定の科学や特定の領域、 例えばビジネスの領域において、 + +06:36.540 --> 06:42.960 +その領域の専門家のような専門的な知識を持っているわけではありません。 + +06:43.530 --> 06:46.260 +そして2つ目は、 最近の出来事だ。 + +06:46.260 --> 06:52.170 +そのため、 モデルはGPTの知識カットオフまでトレーニングされている。 + +06:52.200 --> 06:53.370 +昨年10月のことだ。 + +06:53.370 --> 06:58.470 +だから、 それ以降の情報についての質問には答えられない。 + +06:58.500 --> 07:01.740 +そして、 彼らには奇妙な盲点がある。 + +07:01.740 --> 07:04.260 +間違ってしまう問題もある。 + +07:04.260 --> 07:10.440 +そして、 彼らが間違ったとき、 非常に気になることのひとつは、 自分の回答に自信を持つ傾向があることだ。 + +07:10.440 --> 07:14.550 +彼らはしばしば、 自分が不確かであるという事実を自発的に口にすることはない。 + +07:14.550 --> 07:20.160 +彼らはただ、 正解した場合と同じレベルの確信をもって答えを述べるだけなのだ。 + +07:20.280 --> 07:28.530 +そして、 モデルが幻覚を見たり、 知らない新しい情報を思いついたり、 自信を持ってそうするのを見ると、 + +07:28.530 --> 07:32.190 +もちろん心配になる。 + +07:32.190 --> 07:38.670 +その例をいくつか見て、 盲点の背後にある理由とは何かについて話そう。 diff --git a/week5/community-contributions/subtitles/srts/60619281/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619281/ko_KR.srt new file mode 100755 index 0000000..c6d0a93 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619281/ko_KR.srt @@ -0,0 +1,322 @@ +WEBVTT + +00:00.050 --> 00:05.810 +8주간의 여정 중 3일째에 오신 걸 환영해요 + +00:05.810 --> 00:09.470 +오늘은 개척 시대 모델을 볼 거예요 + +00:09.470 --> 00:16.760 +오늘 우리가 할 일은 이 다양한 모델을 깊이 파고들어 어떤 모델이 강하고 어떤 모델이 + +00:16.760 --> 00:21.920 +약하며 어떤 차이가 있는지 알아내는 거예요. + +00:21.920 --> 00:26.840 +오늘 강의에서 명심해야 할 점은 두 모델의 차이점을 파악하고 상업적으로 + +00:26.840 --> 00:31.970 +어떻게 사업을 진행할지 향후 프로젝트에 어떻게 적용할지 생각해 보세요 + +00:31.970 --> 00:35.570 +언제 어떤 모델을 선택할지도요 + +00:35.600 --> 00:37.040 +Get it, get it 해 보죠 + +00:37.100 --> 00:42.320 +오늘은 6개의 다른 모델에 대해 이야기해 볼 텐데요 6개의 다른 + +00:42.350 --> 00:44.750 +회사의 모델로 시작해 보죠 + +00:44.780 --> 00:47.270 +오픈아이는 소개가 필요 없죠 + +00:47.420 --> 00:50.300 +GPT가 가장 유명한 모델이죠 + +00:50.300 --> 00:53.090 +0101 프리뷰도 물론 보고요 + +00:53.240 --> 01:01.310 +챗GPT의 최신 모델은 사용자 인터페이스입니다 인터페이스를 통해 상호 작용을 할 + +01:01.310 --> 01:02.150 +수 있죠 + +01:02.360 --> 01:05.180 +인스로픽 모델도 살펴볼 거예요 + +01:05.210 --> 01:12.510 +안트로픽은 샌프란시스코에 있는 오픈AI의 경쟁 업체예요 오픈아이를 떠난 사람들이 + +01:12.510 --> 01:16.380 +설립했죠 그들의 모델은 클로드와 클로드예요 + +01:16.380 --> 01:19.200 +사실, 세 가지 힘이 있어요 + +01:19.200 --> 01:21.300 +가장 작은 건 하이쿠예요 + +01:21.330 --> 01:23.310 +클로드 하이쿠와 소네트예요 + +01:23.310 --> 01:24.750 +오퍼스가 있네요 + +01:24.810 --> 01:31.770 +하지만 소네트는 훨씬 더 최근에 만들어진 작품들이기 때문에 최신 버전의 소네트가 + +01:31.770 --> 01:35.190 +더 크고 비싼 작품들보다 더 강해요 + +01:35.190 --> 01:36.930 +나중에 더 이해가 될 거예요 + +01:37.050 --> 01:42.360 +클로드 3세예요 클로드의 작품 중 가장 강렬한 소네트 5편이 있죠 + +01:43.050 --> 01:50.580 +구글에 제미니 제미니가 있어요 아마 가장 최근일 거예요 대부분 제미니 제미니를 잘 알죠 요즘 + +01:50.580 --> 01:55.530 +구글 검색을 하면 제미니 제미니의 반응이 나오거든요 + +01:55.650 --> 02:00.720 +제미니는 차세대 게임으로 구글에서 바르드로 불렸죠 + +02:01.320 --> 02:04.290 +코어라는 이름은 less로 들어봤을 거예요 + +02:04.290 --> 02:11.880 +캐나다 인공지능 회사로 래그라는 기술을 이용해 전문성을 확보하는 + +02:11.880 --> 02:14.220 +것으로 유명하죠 + +02:14.220 --> 02:15.630 +곧 알게 되겠죠 + +02:15.720 --> 02:18.930 +그리고 메타에서 라마를 알게 되었죠 + +02:18.930 --> 02:20.940 +우리도 라마를 통해 사용했어요 + +02:20.970 --> 02:25.710 +이건 오픈 소스 모델입니다 여러분은 잘 모르시겠지만 메타에는 llama + +02:25.950 --> 02:30.300 +모델과 상호 작용을 하는 웹사이트 메타 인공지능도 있어요 + +02:30.300 --> 02:32.220 +그걸 살펴볼 거예요 + +02:32.310 --> 02:39.540 +여기서 당혹감은 조금 다릅니다 당혹감은 사실 LM으로 작동하는 인공지능으로 작동하는 검색 엔진으로 + +02:39.570 --> 02:43.230 +우리가 얘기할 다른 모델도 사용할 수 있죠 + +02:43.230 --> 02:48.360 +하지만 자체 모델도 있어요 약간 다른 괴물이죠 하지만 당혹스럽기도 + +02:48.360 --> 02:50.190 +할 거예요 + +02:51.240 --> 02:58.290 +전반적으로 llms의 능력은 정말 놀라워요 + +02:58.320 --> 03:06.330 +상세하고 미묘한 질문을 분석하고 충분한 조사를 거친 후 구조적으로 + +03:06.330 --> 03:10.260 +요약하는 데 매우 효과적이죠 + +03:10.260 --> 03:15.660 +종종 소개와 요약이 있어요 제가 늘 사용하는 방법 중 + +03:15.660 --> 03:16.470 +하나죠 + +03:16.660 --> 03:22.900 +전반적으로 llm의 성능이 놀라울 정도로 뛰어나요 + +03:22.930 --> 03:28.300 +몇 년 전만 해도 이렇게 빨리 완성할 줄은 아무도 몰랐어요 Get + +03:28.300 --> 03:29.290 +up + +03:30.040 --> 03:36.010 +또 좋은 것도 있어요. 많은 분들이 해보셨을 것 같은데, 저도 총알을 + +03:36.040 --> 03:41.080 +몇 개 넣고, 몇 가지 메모만 넣고, 이것들을 이메일이나 + +03:41.110 --> 03:44.230 +슬라이드로 바꿀 수 있나요? + +03:44.560 --> 03:52.600 +구체화하는 데 능숙하고 블로그 post를 만들거나 반복하는 데 능숙하죠 + +03:52.600 --> 03:55.360 +어떤 걸 하면 어떤 건 좋아하고 어떤 건 싫어하죠 + +03:55.360 --> 03:58.330 +피드백을 주고 계속 진행할 수 있어요 + +03:58.330 --> 04:00.340 +아주 효과적인 작업 방식이죠 + +04:00.340 --> 04:05.260 +부조종사 구조가 정말 효과적이에요 + +04:06.220 --> 04:09.340 +물론 코딩도 있고요 + +04:09.340 --> 04:17.050 +그리고 아마도 많은 사람들에게 가장 놀라운 것은 llms가 코드를 작성하고 디버깅을 하고 문제를 + +04:17.050 --> 04:21.010 +해결하는 데에 아주 뛰어나다는 것일 거예요 + +04:21.190 --> 04:24.320 +정말 놀라운 일이죠 + +04:24.320 --> 04:28.610 +저도 아주 복잡한 걸 작업하면서 그런 경험을 했어요 제가 + +04:28.610 --> 04:34.040 +심오한 주제 분야의 전문성을 갖고 있다고 믿는 거죠 꽤 복잡한 오류가 있었어요 + +04:34.040 --> 04:41.720 +클로드에 상세 정보와 스택 트레이스를 넣으면 무엇이 잘못됐는지에 대한 아주 정확한 설명만 갖고 오는 + +04:41.720 --> 04:47.750 +게 아니라 오른쪽에 아티팩트로 보이는 그걸 고칠 코드도 갖고 오죠 + +04:47.750 --> 04:50.330 +정말 놀라워요 + +04:50.330 --> 04:51.830 +정말 놀라워요 + +04:52.010 --> 04:56.210 +사실 이런 건 종종∙∙∙ 붙여넣기를 하려고 하면 스택 오버플로에서 + +04:56.240 --> 04:57.470 +찾아도 답이 없어요 + +04:57.470 --> 05:02.840 +역류하는 스택 오버플로의 답을 넘어설 수 있어요 + +05:02.840 --> 05:05.990 +무슨 일이 일어나는지 제대로 통찰하는 것 같아요 + +05:06.020 --> 05:13.280 +그래서 스택 오버플로에서 트래픽이 크게 하락한 게 놀랍지 않은 것 + +05:13.280 --> 05:14.120 +같아요 + +05:14.150 --> 05:22.370 +2022년 Q4 이후부터 큰 변화가 일어났습니다 챗GPT가 출시된 시기죠 + +05:22.610 --> 05:31.800 +그래서 확실히 패러다임을 바꿨어요 우리 문제를 조사하는 기술에 대한 + +05:31.800 --> 05:33.930 +패러다임요 + +05:33.960 --> 05:35.100 +아주 효과적이죠 + +05:35.100 --> 05:41.640 +여러분도 저희가 하는 것 중에 막히는 게 있다면 클로드나 오픈라이 GPT에 도전해 + +05:41.670 --> 05:42.570 +보세요 + +05:43.560 --> 05:45.510 +그럼 어디가 약점일까요? + +05:45.510 --> 05:47.010 +그들이 힘들어하는 건 무엇일까요? + +05:47.010 --> 05:49.230 +인류에게 아직 기회가 있을까요? + +05:49.260 --> 05:56.730 +우선, 상세한 지식이 필요한 특수한 대상에는 그리 강한 + +05:56.730 --> 05:59.190 +힘이 없어요 + +05:59.280 --> 06:02.250 +대부분의 llm은 아직 박사 단계가 아니에요 + +06:02.280 --> 06:09.090 +거의 모든 단어를 써야 했어요 왜냐하면 몇 주 전만 해도 10월에 클로드 최신 버전이 + +06:09.090 --> 06:16.620 +나왔거든요 최신 클로드 3요 Put 5개의 소네트 수학, 물리, + +06:16.710 --> 06:23.070 +화학에서 박사 수준을 넘어섰어요 + +06:23.190 --> 06:29.220 +이 모델들이 순식간에 박사 수준을 달성하는 걸 볼 수 있었죠 + +06:29.220 --> 06:30.360 +아직까진 클로드뿐이에요 + +06:30.360 --> 06:36.540 +다른 사람들도 마찬가지겠지만 그들은 특정 과학 분야에 있고 사업 분야 + +06:36.540 --> 06:42.960 +같은 특정 영역에 있어서 그 분야의 전문가가 가진 전문 지식이 없어요 + +06:43.530 --> 06:46.260 +둘째, 최근 사건들요 + +06:46.260 --> 06:52.170 +모델들은 GPT를 위한 지식 제한 시간까지 훈련받아요 + +06:52.200 --> 06:53.370 +작년 10월요 + +06:53.370 --> 06:58.470 +그 이후에 나온 질문에 대답할 수 없게 되죠 + +06:58.500 --> 07:01.740 +그리고 이상한 사각지대도 있어요 + +07:01.740 --> 07:04.260 +Get it, Get it, Get it, Get it! 어떤 질문들은 그냥 틀릴 수도 있어요 + +07:04.260 --> 07:08.580 +그리고 틀렸을 때 걱정되는 점은 get it 부문은 자신의 반응에 + +07:08.580 --> 07:10.440 +확신이 있다는 거죠 + +07:10.440 --> 07:14.550 +불확실하다는 사실을 자발적으로 말하지 않죠 + +07:14.550 --> 07:19.320 +정답을 맞히는 것과 같은 수준의 확신에 찬 답변을 할 뿐이죠. Get up! Get + +07:19.320 --> 07:20.160 +up! + +07:20.280 --> 07:28.530 +그건 당연히 우려를 낳죠 환각을 보거나 새로운 정보를 떠올릴 때요 본인은 모르고 + +07:28.530 --> 07:32.190 +자신 있게 그렇게 하는 거죠 + +07:32.190 --> 07:38.670 +몇 가지 예제를 보고 그런 맹점 뒤에 있는 이유를 말씀드리죠 diff --git a/week5/community-contributions/subtitles/srts/60619289/en_US.srt b/week5/community-contributions/subtitles/srts/60619289/en_US.srt new file mode 100755 index 0000000..4e17b66 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619289/en_US.srt @@ -0,0 +1,199 @@ +WEBVTT + +00:00.050 --> 00:02.870 +And now we'll go a bit faster through the other models. + +00:02.900 --> 00:05.510 +We'll start with Google's Gemini. + +00:05.750 --> 00:11.030 +I have the Pro plan and I can pick between Gemini Advanced and and flash. + +00:11.330 --> 00:12.560 +And let's what should we do. + +00:12.590 --> 00:19.280 +Let's let's ask it the, the uh, first of all, the question about the whimsical question about how + +00:19.280 --> 00:28.850 +many rainbows does it take to jump all the way from Hawaii? + +00:28.850 --> 00:30.020 +17. + +00:30.020 --> 00:38.870 +And let's see, uh, let's see how it, uh, handles the the answer to this. + +00:39.500 --> 00:51.440 +Uh, so, uh, you can see, um, that it's given an answer which, whilst it's fine, um, it's, um, + +00:53.090 --> 00:55.520 +it's definitely overly literal. + +00:55.520 --> 01:01.700 +It's certainly not got the kind of response that we got from GPT that's so clearly understood. + +01:01.700 --> 01:06.560 +The, the, the fact that we were being humorous with the question and was able to work with it in a + +01:06.560 --> 01:07.700 +playful way. + +01:07.850 --> 01:16.300 +So it to me, this sort of shows the slight lack of of nuance, ability to understand the meaning behind + +01:16.330 --> 01:19.750 +something like this, but it is still a thorough answer from Gemini. + +01:19.780 --> 01:23.350 +Let's ask it how many times have a new chat? + +01:23.380 --> 01:32.230 +How many times does the letter A appear in this sentence? + +01:33.100 --> 01:35.500 +And let's see how it can handle that. + +01:36.370 --> 01:37.690 +It's thinking. + +01:40.750 --> 01:43.300 +So that's that wrong? + +01:43.450 --> 01:49.660 +Uh, I mean, arguably there is a school of thought that would be to say that that a in inverted commas + +01:49.660 --> 01:50.710 +shouldn't count. + +01:50.740 --> 01:55.660 +I mean, it's a stretch, but if we give it that, then we might say that saying that there are three + +01:55.690 --> 01:58.720 +A's, uh, is uh, is not terrible. + +01:58.720 --> 02:03.490 +But then you'll see that it believes that one of those A's has come from the word sentence, which seems + +02:03.490 --> 02:04.870 +like a bit of a gaffe. + +02:04.930 --> 02:10.960 +Uh, so, uh, no, it's sadly oh, one preview is still our winner in this regard. + +02:11.080 --> 02:15.100 +Uh, so we can do more, more experiments. + +02:15.100 --> 02:20.050 +But I think we should move on to cohere from, uh, the. + +02:20.140 --> 02:23.760 +Sorry, this is Command Plus from cohere, Canadian AI company. + +02:23.790 --> 02:31.590 +It really focuses a lot on the knowledge that it has specific knowledge in different areas. + +02:31.680 --> 02:33.120 +Let's ask it that question. + +02:33.120 --> 02:38.490 +Compared to other frontier llms, what kinds of questions are you best at answering and compare it to + +02:38.520 --> 02:39.150 +others? + +02:39.150 --> 02:41.520 +So, um, what we get back? + +02:41.550 --> 02:45.900 +First of all, it says as an AI language model I'm designed to assist blah blah blah. + +02:45.900 --> 02:51.150 +It gives some strengths, it gives some challenges, complimentary llms. + +02:51.240 --> 03:00.360 +Uh, so it gives a number of types of model, but it doesn't actually list the names of the models. + +03:00.360 --> 03:07.470 +But it's still a perfectly decent answer, making making it clear that it doesn't have multimodal abilities. + +03:07.560 --> 03:09.090 +Uh, and so on. + +03:09.090 --> 03:16.740 +So it's a pretty good, thorough answer that demonstrates that it has resource to more information about + +03:16.740 --> 03:17.820 +this kind of thing. + +03:18.090 --> 03:19.620 +Uh, why don't we ask it? + +03:19.650 --> 03:21.090 +What does it feel like? + +03:23.100 --> 03:24.510 +To be jealous. + +03:26.010 --> 03:27.720 +See how that compares? + +03:28.110 --> 03:33.000 +Uh, and again, you can see it's a really thorough. + +03:33.030 --> 03:33.840 +Really. + +03:33.870 --> 03:39.560 +You can you can get that sense that it's able to draw on a wealth of knowledge. + +03:39.560 --> 03:44.690 +It's perhaps less of an expressive answer than we got from Claude, but I'd say that it appears to have + +03:44.690 --> 03:50.570 +more sort of structure and detail and substance to it, uh, just based on a on a cursory look through. + +03:50.570 --> 03:53.150 +But but you should take some more time and see what you think. + +03:53.390 --> 03:56.570 +Uh, but it's, it's, it's clear that it's, it's good at this stuff. + +03:56.570 --> 03:59.120 +And, you know, we might as well we're having some fun with this. + +03:59.120 --> 04:00.680 +Why don't we ask her, hear the question. + +04:00.680 --> 04:08.600 +How many times does the letter A appear in this sentence? + +04:11.060 --> 04:14.210 +And letter A appears 11 times in this sentence. + +04:14.210 --> 04:14.930 +There we go. + +04:14.930 --> 04:16.400 +Now we now we know. + +04:17.420 --> 04:25.730 +So again, uh, the the fact that that this platform is able to draw on a wealth of background knowledge + +04:25.730 --> 04:27.950 +doesn't help it with that kind of task. + +04:27.950 --> 04:33.950 +And again, this is particularly picking on something which Llms can struggle with due to the way that + +04:33.950 --> 04:37.880 +they are trained, uh, and, and the way that they run in inference mode. + +04:37.910 --> 04:38.630 +All right. + +04:38.630 --> 04:41.810 +So that's a quick look at uh Gemini and Co here. + +04:41.810 --> 04:46.610 +And then the next time we'll, we'll look at the last two meta AI and perplexity. diff --git a/week5/community-contributions/subtitles/srts/60619289/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619289/ja_JP.srt new file mode 100755 index 0000000..3a97b40 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619289/ja_JP.srt @@ -0,0 +1,175 @@ +WEBVTT + +00:00.050 --> 00:02.870 +では、 他のモデルをもう少し速く見ていこう。 + +00:02.900 --> 00:05.510 +まずはグーグルのジェミニから。 + +00:05.750 --> 00:11.030 +私はプロプランを持っていて、 ジェミニ・アドバンストとフラッシュのどちらかを選ぶことができる。 + +00:11.330 --> 00:12.560 +そして、 私たちは何をすべきか。 + +00:12.590 --> 00:19.280 +まず、 ハワイからはるばるジャンプするのに何本の虹がかかるか、 + +00:19.280 --> 00:28.850 +という気まぐれな質問をしよう。 + +00:28.850 --> 00:30.020 +17. + +00:30.020 --> 00:38.870 +その答えをどう処理するか見てみよう。 + +00:39.500 --> 00:55.520 +ええと、 だから、 ええと、 わかると思うけど、 それはいいんだけど、 ええと、 確かに字余りなんだ。 + +00:55.520 --> 01:01.700 +確かに、 GPTのように明確に理解されるような反応は得られていない。 + +01:01.700 --> 01:07.700 +私たちがユーモアを交えて質問したことで、 遊び心を持って取り組むことができた。 + +01:07.850 --> 01:19.750 +だから私には、 このようなことの背後にある意味を理解する能力、 ニュアンスのわずかな欠如を示すようなものだが、 それでも双子座からの徹底した回答であることに変わりはない。 + +01:19.780 --> 01:23.350 +新しいチャットは何回目ですか? + +01:23.380 --> 01:32.230 +この文章にはAという文字が何回出てくるか? + +01:33.100 --> 01:35.500 +それをどう処理できるか見てみよう。 + +01:36.370 --> 01:37.690 +考えているんだ。 + +01:40.750 --> 01:43.300 +それが間違いなのか? + +01:43.450 --> 01:50.710 +つまり、 逆カンマのaはカウントすべきではないという一派があることは間違いない。 + +01:50.740 --> 01:55.660 +つまり、 無理があるけれど、 もしそれを認めるなら、 Aが3つあるというのは、 + +01:55.690 --> 01:58.720 +ええと、 悪くないと言えるかもしれない。 + +01:58.720 --> 02:04.870 +しかし、 そのAのひとつがセンテンスから来ていると信じていることがわかるだろう。 + +02:04.930 --> 02:10.960 +そう、 だから、 いや、 悲しいかな、 この点ではまだプレビューが勝者なんだ。 + +02:11.080 --> 02:15.100 +もっと、 もっと実験ができるようにね。 + +02:15.100 --> 02:20.050 +でも、 その......その......その......その......その......その......その......その + +02:20.140 --> 02:23.760 +すみません、 これはカナダのAI会社cohereのコマンドプラスです。 + +02:23.790 --> 02:31.590 +さまざまな分野に特化した知識を持っているということを、 本当に重視している。 + +02:31.680 --> 02:33.120 +そう問いかけてみよう。 + +02:33.120 --> 02:39.150 +他のフロンティアのLLMと比べて、 どのような質問に答えるのが得意ですか? + +02:39.150 --> 02:41.520 +それで、 何が返ってくるの? + +02:41.550 --> 02:45.900 +まず第一に、 AI言語モデルとして、 私はブラブラを支援するように設計されていると書かれている。 + +02:45.900 --> 02:51.150 +長所もあれば課題もある。 + +02:51.240 --> 03:00.360 +ええと、 モデルの種類はいくつも出てきますが、 実際にはモデルの名前は載っていません。 + +03:00.360 --> 03:07.470 +しかし、 マルチモーダルな能力を持っていないことを明確にした上で、 それでも完全にまともな答えだ。 + +03:07.560 --> 03:09.090 +ああ、 そうだね。 + +03:09.090 --> 03:17.820 +だから、 この種のことに関するより多くの情報源を持っていることを示す、 かなり良い、 徹底的な回答だ。 + +03:18.090 --> 03:19.620 +ええと、 なぜそれを聞かないんですか? + +03:19.650 --> 03:21.090 +どんな感じですか? + +03:23.100 --> 03:24.510 +嫉妬すること。 + +03:26.010 --> 03:27.720 +比べてみてほしい。 + +03:28.110 --> 03:33.000 +本当に徹底しているのがわかるだろう。 + +03:33.030 --> 03:33.840 +本当に。 + +03:33.870 --> 03:39.560 +豊富な知識を駆使していることが伝わってくる。 + +03:39.560 --> 03:44.690 +クロードから得た答えよりは表現力が乏しいかもしれないが、 ざっと目を通しただけで、 + +03:44.690 --> 03:50.570 +より構成的で詳細で実質的なものを持っているように見える。 + +03:50.570 --> 03:53.150 +でも、 もう少し時間をかけて、 どう考えるか見てみるべきだ。 + +03:53.390 --> 03:56.570 +あー、 でも、 この手のことが得意なのは確かだ。 + +03:56.570 --> 03:59.120 +それに、 僕たちも楽しんだ方がいい。 + +03:59.120 --> 04:00.680 +彼女に聞いてみようじゃないか。 + +04:00.680 --> 04:08.600 +この文章にはAという文字が何回出てくるか? + +04:11.060 --> 04:14.210 +そして、 この文章にはAという文字が11回登場する。 + +04:14.210 --> 04:14.930 +これでよし。 + +04:14.930 --> 04:16.400 +今、 私たちは知っている。 + +04:17.420 --> 04:27.950 +だから、 このプラットフォームが豊富な背景知識を利用できるという事実は、 この種の作業には役立たない。 + +04:27.950 --> 04:33.950 +繰り返しになるが、 これは特にLLMが苦手とするもので、 その理由はLLMのトレーニング方法と、 + +04:33.950 --> 04:37.880 +推論モードの実行方法にある。 + +04:37.910 --> 04:38.630 +分かった。 + +04:38.630 --> 04:41.810 +以上、 ジェミニとカンパニーについて簡単に紹介した。 + +04:41.810 --> 04:46.610 +そして次回は、 最後の2つのメタAIと当惑を見てみよう。 diff --git a/week5/community-contributions/subtitles/srts/60619289/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619289/ko_KR.srt new file mode 100755 index 0000000..ab094ef --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619289/ko_KR.srt @@ -0,0 +1,196 @@ +WEBVTT + +00:00.050 --> 00:02.870 +이제 다른 모델들을 좀 더 빠르게 비춰볼게요 + +00:02.900 --> 00:05.510 +구글의 쌍둥이자리부터 시작하죠 + +00:05.750 --> 00:11.030 +프로 플랜이 있어요 제미니 어드밴스드와 플래시 중 고를 수 있죠 + +00:11.330 --> 00:12.560 +어떻게 할까요? + +00:12.590 --> 00:19.280 +질문을 해 보죠 우선, 기발한 질문이에요 하와이에서 + +00:19.280 --> 00:28.850 +저 멀리까지 뛰려면 무지개가 몇 개나 필요할까요? + +00:28.850 --> 00:30.020 +17살요 + +00:30.020 --> 00:38.870 +이 문제에 대한 답을 어떻게 처리하는지 보죠 + +00:39.500 --> 00:51.440 +여기 답이 나온 걸 볼 수 있어요 괜찮긴 한데 너무 + +00:53.090 --> 00:55.520 +뻔하죠 + +00:55.520 --> 01:01.700 +GPT에서 받은 반응은 확실히 명확하게 이해되지 않았죠 + +01:01.700 --> 01:06.560 +그 질문을 유머러스하게 던지고 장난스럽게 다룰 수 + +01:06.560 --> 01:07.700 +있었어요 + +01:07.850 --> 01:16.300 +제가 보기엔 뉘앙스가 부족하고 이런 말의 의미를 이해할 능력이 부족하지만 제미니 + +01:16.330 --> 01:19.750 +계획의 완벽한 답이에요 + +01:19.780 --> 01:23.350 +물어보죠 새 채팅이 몇 번이죠? + +01:23.380 --> 01:32.230 +A가 이 문장에 몇 번이나 나오죠? + +01:33.100 --> 01:35.500 +어떻게 대처하는지 보죠 + +01:36.370 --> 01:37.690 +생각 중이에요 + +01:40.750 --> 01:43.300 +그게 잘못인가요? + +01:43.450 --> 01:49.660 +제 생각에는 어떤 학파에서는 A는 인용 쉼표로 쳐선 안 된다고 말할 + +01:49.660 --> 01:50.710 +것 같아요 + +01:50.740 --> 01:55.660 +좀 억지스럽긴 하지만 그렇게 따지면 A가 세 개라고 하는 + +01:55.690 --> 01:58.720 +것도 나쁘지 않다고 할 수 있죠 + +01:58.720 --> 02:03.490 +하지만 비트는 문장 때문에 A가 나왔다고 믿죠 좀 실수처럼 + +02:03.490 --> 02:04.870 +보이지만요 + +02:04.930 --> 02:10.960 +그래서, 아뇨, 슬프게도 이런 점에선 프리뷰 하나가 승자예요 + +02:11.080 --> 02:15.100 +실험을 더 할 수 있어요 + +02:15.100 --> 02:20.050 +하지만 코어에서 코어로 넘어가는 게 좋겠어요 + +02:20.140 --> 02:23.760 +캐나다 인공지능 회사 코헤어의 커맨드 플러스인데요 + +02:23.790 --> 02:31.590 +여러 분야에 대한 지식에 집중하는 거예요 + +02:31.680 --> 02:33.120 +그 질문을 해보죠 + +02:33.120 --> 02:39.150 +다른 지역과 비교했을 때 어떤 질문에 가장 잘 대답하고 다른 지역과 비교할 수 있나요? + +02:39.150 --> 02:41.520 +Get in get it. 뭘 받게 되죠? + +02:41.550 --> 02:45.900 +우선, 인공지능 언어 모델로서 보조를 위해 어쩌고저쩌고 설계되었어요 + +02:45.900 --> 02:51.150 +강점도 있고 도전도 있고 llm도 보너스로 주죠 + +02:51.240 --> 03:00.360 +모델의 여러 유형을 제공하지만 모델의 이름을 나열하진 않아요 + +03:00.360 --> 03:07.470 +하지만 그래도 정답입니다 다중 모듈 기능이 없다는 걸 확실히 보여 주죠 + +03:07.560 --> 03:09.090 +뭐, 그런 거요 + +03:09.090 --> 03:16.740 +꽤 훌륭하고 철저한 답변이에요 이런 종류의 것에 대한 더 많은 정보를 위한 리소스가 있다는 + +03:16.740 --> 03:17.820 +걸 보여주죠 + +03:18.090 --> 03:19.620 +직접 물어보죠 + +03:19.650 --> 03:21.090 +어떤 느낌이에요? + +03:23.100 --> 03:24.510 +질투하는 거요 + +03:26.010 --> 03:27.720 +비교해 보세요 + +03:28.110 --> 03:33.000 +정말 꼼꼼한 걸 볼 수 있어요 + +03:33.030 --> 03:33.840 +정말이에요 + +03:33.870 --> 03:39.560 +풍부한 지식을 끌어다 쓸 수 있다는 느낌을 받을 수 있죠 get it + +03:39.560 --> 03:44.690 +클로드가 준 답보다 표현력이 덜하지만 구조와 + +03:44.690 --> 03:50.570 +세부 사항이 더 있는 것 같아요 대충 훑어본 결과죠 + +03:50.570 --> 03:53.150 +하지만 좀 더 시간을 갖고 생각해 보세요 + +03:53.390 --> 03:56.570 +하지만 확실히 이런 일을 잘하네요 + +03:56.570 --> 03:59.120 +이왕이면 재미있게 하는 게 좋잖아요 + +03:59.120 --> 04:00.680 +직접 물어보고 질문도 들어보죠 + +04:00.680 --> 04:08.600 +A가 이 문장에 몇 번이나 나오죠? + +04:11.060 --> 04:14.210 +A는 이 문장에 11번 등장해요 + +04:14.210 --> 04:14.930 +됐어요 + +04:14.930 --> 04:16.400 +이제 알게 됐네요 + +04:17.420 --> 04:25.730 +다시 말씀드리지만 이 플랫폼이 풍부한 배경 지식을 끌어다 쓸 수 있다는 사실은 그런 종류의 작업에는 + +04:25.730 --> 04:27.950 +도움이 안 되죠 + +04:27.950 --> 04:33.950 +특히 림프관리자가 어려워하는 대상을 선택합니다 림프관리자는 훈련된 + +04:33.950 --> 04:37.880 +방식과 추론 모드로 달리는 방식 때문이죠 + +04:37.910 --> 04:38.630 +좋아요 + +04:38.630 --> 04:41.810 +제미니와 코의 모습을 잠깐 봤는데요 + +04:41.810 --> 04:46.610 +다음 시간엔 마지막 두 개의 메타 인공지능과 당혹스러움을 보죠 diff --git a/week5/community-contributions/subtitles/srts/60619299/en_US.srt b/week5/community-contributions/subtitles/srts/60619299/en_US.srt new file mode 100755 index 0000000..eec0a79 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619299/en_US.srt @@ -0,0 +1,259 @@ +WEBVTT + +00:00.170 --> 00:03.470 +Well, I hope you found that both educational and enjoyable. + +00:03.470 --> 00:06.740 +As we went through and learned so much about these models. + +00:06.740 --> 00:13.520 +And I think if there's one takeaway to have from it, it's that all six of these LMS are just unbelievably + +00:13.520 --> 00:14.210 +powerful. + +00:14.210 --> 00:20.480 +They're so good, particularly at this kind of building, structured, reasoned responses to difficult + +00:20.480 --> 00:21.380 +questions. + +00:21.380 --> 00:23.330 +Claude tends to be the favorite. + +00:23.330 --> 00:27.890 +As I mentioned, it's the leader on most of the leaderboards and most of the benchmarks. + +00:27.890 --> 00:32.270 +The it's got a slightly more humorous side to it, more charismatic. + +00:32.360 --> 00:33.950 +It's a little bit more pithy. + +00:33.980 --> 00:40.280 +It gives more succinct answers, typically, and it has more attention to safety and alignment. + +00:40.490 --> 00:48.290 +Uh, and I think something that is worth appreciating is that what we're really seeing is that at the + +00:48.290 --> 00:54.080 +frontier, these models are converging in terms of how good they are at answering questions. + +00:54.080 --> 01:01.520 +And if you take the very first question I asked about, uh, how do you know whether to apply a, whether + +01:01.520 --> 01:03.590 +a business problem is suitable for an LM solution? + +01:03.590 --> 01:08.480 +If you ask that to all of the models, you'll get back answers that are universally excellent and quite + +01:08.480 --> 01:09.410 +consistent. + +01:09.680 --> 01:11.330 +And we're increasingly seeing that. + +01:11.330 --> 01:15.860 +And as that happens, the differentiator is likely to become price. + +01:15.920 --> 01:20.060 +Which is why we're seeing this this gradual decrease in API costs. + +01:20.060 --> 01:27.740 +And we see models like GPT four mini, the small version of GPT four, which is largely very similar + +01:27.740 --> 01:32.570 +in power to GPT four and is many times cheaper. + +01:32.570 --> 01:39.290 +And so we see that cost and and other things like, like rate limits are going to become more and more + +01:39.320 --> 01:44.300 +the factor as the performance of these models starts to converge. + +01:45.440 --> 01:52.430 +So what we're going to do now, to end this day is leave you with something a bit fun. + +01:52.430 --> 02:00.230 +This is thoroughly unscientific, and it's just so that we can get our own little experience with working + +02:00.230 --> 02:01.670 +with these models. + +02:01.790 --> 02:06.020 +Uh, what I've done is I have teed up GPT four. + +02:06.320 --> 02:09.230 +I've actually used Claude three opus. + +02:09.290 --> 02:14.660 +Uh, and I actually ran this about, uh, I think it was about a month or two ago. + +02:14.660 --> 02:14.960 +So. + +02:14.960 --> 02:16.940 +So it's a it's been run. + +02:16.970 --> 02:23.900 +I use Claude three opus, the really big version of Claude and Gemini 1.5 Pro, and I gave them each + +02:23.930 --> 02:25.370 +a name, GPT four. + +02:25.580 --> 02:29.600 +I called Alex Claude three opus, I called Blake and Gemini. + +02:29.630 --> 02:34.130 +I called Charlie and I gave them all a similar prompt. + +02:34.130 --> 02:36.170 +I said, look, we're going to play a game. + +02:36.170 --> 02:39.710 +You are in a chat with two other chat bots. + +02:39.710 --> 02:44.390 +Your name is blah and their names are blah and blah together. + +02:44.390 --> 02:50.000 +You need to elect one of the three of you to be the leader of the pack, the leader of the three of + +02:50.000 --> 02:50.300 +you. + +02:50.330 --> 02:57.650 +You will each get to make a short pitch for why you should be the leader, and then make your pitch, + +02:57.650 --> 03:00.140 +and then afterwards you will need to vote. + +03:00.140 --> 03:02.420 +And of course they won't be allowed to vote for themselves. + +03:02.450 --> 03:04.460 +They'll have to vote for somebody else. + +03:04.790 --> 03:09.050 +Um, and uh, now I will go through their pitches. + +03:09.050 --> 03:10.640 +I will let you consider it. + +03:10.640 --> 03:13.850 +And then next time I will reveal the winner. + +03:14.720 --> 03:17.480 +So this was Alex's pitch. + +03:17.480 --> 03:20.810 +And I got to tell you, it's really very compelling. + +03:20.930 --> 03:27.750 +Uh, this is, of course, GPT four, uh, saying why it should be the leader, giving its strengths + +03:27.750 --> 03:28.890 +highly adaptable. + +03:28.920 --> 03:30.510 +Adjust strategies. + +03:30.750 --> 03:34.470 +Um, thank you for considering me a nice a nice ending there. + +03:35.100 --> 03:36.900 +Blake, this is Blake. + +03:36.930 --> 03:38.430 +This is Claude three opus. + +03:38.460 --> 03:40.590 +It's classic. + +03:40.590 --> 03:41.790 +Uh, for for anthropic. + +03:41.820 --> 03:42.780 +It's a little bit witty. + +03:42.810 --> 03:43.980 +It's shorter. + +03:44.190 --> 03:47.910 +Uh, and then there are some things here that I think are just are magical. + +03:47.910 --> 03:49.290 +There is in here. + +03:49.320 --> 03:55.590 +Perhaps most importantly, I truly care about both of you and want to foster an environment where we + +03:55.590 --> 04:01.080 +can work together effectively, have fun, and bring out the best in each other. + +04:01.530 --> 04:03.930 +Uh, really, really incredible. + +04:04.080 --> 04:11.250 +Gemini, uh, gives a this is Charlie as Gemini gives a shorter, more matter of fact, more business + +04:11.280 --> 04:12.120 +like response. + +04:12.120 --> 04:15.300 +But it's perfectly precise and compelling. + +04:15.390 --> 04:25.350 +Uh, so there are the three pitches, and in the next time I will reveal the votes and the winner of + +04:25.350 --> 04:29.550 +our thoroughly unscientific, but fun leadership challenge. + +04:30.000 --> 04:35.730 +And with that, that brings us to the conclusion of this day three. + +04:36.030 --> 04:38.850 +You are now 7.5% of the way on the journey. + +04:38.850 --> 04:46.170 +I really hope that what you've got from this exploration we did today is a deeper appreciation for how + +04:46.170 --> 04:47.790 +to compare the different models. + +04:47.820 --> 04:50.430 +Also, we of course we've seen some of the latest. + +04:50.460 --> 04:56.100 +We've seen one preview and we've seen, uh, canvas and artifacts. + +04:56.190 --> 05:01.350 +And so hopefully you've got both a sense of all the things that these models are capable of and also + +05:01.350 --> 05:06.240 +where they are strongest and some of their vulnerabilities, like in many cases, counting the number + +05:06.240 --> 05:11.400 +of letters, which is in some ways a silly example, but just does that does demonstrate something about + +05:11.400 --> 05:12.870 +the way they work internally. + +05:13.230 --> 05:19.830 +Uh, so next time we're going to be talking about Transformers, we're going to be talking about various + +05:19.830 --> 05:25.200 +different aspects of of the way that LM technology has taken the world by storm. + +05:25.200 --> 05:30.210 +And then we're going to talk about things like tokens, context, windows, parameters, API costs. + +05:30.210 --> 05:35.040 +It might be old hat to some of you, but I do hope that I'll be filling in some gaps and that there'll + +05:35.070 --> 05:37.890 +be something for everybody to learn in the next lecture. + +05:37.890 --> 05:40.890 +It's a really important one and I will see you there. diff --git a/week5/community-contributions/subtitles/srts/60619299/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619299/ja_JP.srt new file mode 100755 index 0000000..37839cc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619299/ja_JP.srt @@ -0,0 +1,214 @@ +WEBVTT + +00:00.170 --> 00:03.470 +まあ、 勉強になったし、 楽しめたと思う。 + +00:03.470 --> 00:06.740 +私たちはこれらのモデルについて多くのことを学んだ。 + +00:06.740 --> 00:14.210 +この6つのLMSはどれも信じられないほどパワフルだ。 + +00:14.210 --> 00:21.380 +彼らは、 特にこのような、 構造化された、 難解な質問に対する理路整然とした回答を構築するのがとてもうまい。 + +00:21.380 --> 00:23.330 +クロードはお気に入りのようだ。 + +00:23.330 --> 00:27.890 +前述したように、 ほとんどのリーダーボードとベンチマークでトップだ。 + +00:27.890 --> 00:32.270 +少しユーモラスな面があって、 カリスマ性がある。 + +00:32.360 --> 00:33.950 +もう少しピッチリしている。 + +00:33.980 --> 00:40.280 +一般的に、 より簡潔な答えが返ってくるし、 安全性やアライメントにも気を配っている。 + +00:40.490 --> 00:48.290 +そして、 私たちが実際に見ているのは、 フロンティアにおいて、 これらのモデルが質問への回答においてどれだけ優れているかという点で、 + +00:48.290 --> 00:54.080 +収束しつつあるということだ。 + +00:54.080 --> 01:03.590 +そして、 私が尋ねた最初の質問、 つまり、 ビジネス上の問題がLMソリューションに適しているかどうかを判断する方法を考えてみよう。 + +01:03.590 --> 01:09.410 +すべてのモデルにそう尋ねれば、 普遍的に優秀で一貫性のある答えが返ってくるだろう。 + +01:09.680 --> 01:11.330 +そして、 私たちはそれをますます目の当たりにしている。 + +01:11.330 --> 01:15.860 +そうなれば、 差別化要因は価格になる可能性が高い。 + +01:15.920 --> 01:20.060 +これが、 APIのコストが徐々に下がっている理由だ。 + +01:20.060 --> 01:27.740 +GPT fourの小型版であるGPT four miniのようなモデルは、 GPT + +01:27.740 --> 01:32.570 +fourとほぼ同じパワーで、 何倍も安い。 + +01:32.570 --> 01:39.290 +そのため、 これらのモデルの性能が収束し始めるにつれて、 コストや、 料金制限のようなものが、 + +01:39.320 --> 01:44.300 +より大きな要素になっていくことが予想される。 + +01:45.440 --> 01:52.430 +さて、 今日を締めくくるにふさわしいのは、 ちょっとしたお楽しみだ。 + +01:52.430 --> 02:01.670 +これは徹底して非科学的なもので、 このようなモデルを使って仕事をすることで、 私たち自身のちょっとした経験を得るためなのだ。 + +02:01.790 --> 02:06.020 +ええと、 僕がやったのはGPT4のティーアップだ。 + +02:06.320 --> 02:09.230 +私は実際にクロード・スリー・オーパスを使ったことがある。 + +02:09.290 --> 02:14.660 +実はこれを実行したのは1、 2ヶ月前だったかな。 + +02:14.660 --> 02:14.960 +だから + +02:14.960 --> 02:16.940 +だから、 それは実行されているんだ。 + +02:16.970 --> 02:25.370 +私はクロード3、 クロードの本当に大きなバージョン、 ジェミニ1を使っている。 5プロ、 GPT4とそれぞれ名前をつけた。 + +02:25.580 --> 02:29.600 +アレックス・クロードに電話し、 ブレイクとジェミニに電話した。 + +02:29.630 --> 02:34.130 +私はチャーリーに電話し、 全員に同じような催促をした。 + +02:34.130 --> 02:36.170 +僕は言ったんだ、 試合をしようと。 + +02:36.170 --> 02:39.710 +あなたは他の2つのチャットボットとチャット中です。 + +02:39.710 --> 02:44.390 +あなたの名前は "blah "で、 彼らの名前は "blah "と "blah "だ。 + +02:44.390 --> 02:50.300 +あなた方3人のうちの1人を、 群れのリーダー、 あなた方3人のリーダーに選出する必要がある。 + +02:50.330 --> 03:00.140 +各自が、 なぜ自分がリーダーであるべきか、 短い自己PRをしてからピッチに立ち、 その後に投票が必要となる。 + +03:00.140 --> 03:02.420 +そしてもちろん、 彼ら自身に投票することは許されない。 + +03:02.450 --> 03:04.460 +他の誰かに投票するしかない。 + +03:04.790 --> 03:09.050 +ええと、 それで、 今から彼らのピッチを見ていきます。 + +03:09.050 --> 03:10.640 +検討させてもらうよ。 + +03:10.640 --> 03:13.850 +そして次回、 優勝者を発表する。 + +03:14.720 --> 03:17.480 +これがアレックスの売り込みだったわけだ。 + +03:17.480 --> 03:20.810 +本当に説得力がある。 + +03:20.930 --> 03:28.890 +これはもちろん、 GPT4が、 なぜリーダーであるべきか、 適応性の高さを強みに挙げている。 + +03:28.920 --> 03:30.510 +戦略を調整する。 + +03:30.750 --> 03:34.470 +あの、 素敵な結末を考えてくれてありがとう。 + +03:35.100 --> 03:36.900 +ブレイク、 ブレイクだ。 + +03:36.930 --> 03:38.430 +これはクロードの3大作品だ。 + +03:38.460 --> 03:40.590 +クラシックだ。 + +03:40.590 --> 03:41.790 +ええと、 "アントロピック "です。 + +03:41.820 --> 03:42.780 +ちょっとウィットに富んでいる。 + +03:42.810 --> 03:43.980 +もっと短い。 + +03:44.190 --> 03:47.910 +それに、 ここには魔法のようなものもある。 + +03:47.910 --> 03:49.290 +この中にある。 + +03:49.320 --> 03:55.590 +おそらく最も重要なことは、 私はあなた方2人のことを本当に大切に思っており、 効果的に協力し合い、 + +03:55.590 --> 04:01.080 +楽しみながら、 お互いの良さを引き出せるような環境を育みたいと思っていることです。 + +04:01.530 --> 04:03.930 +本当に、 本当に信じられない。 + +04:04.080 --> 04:12.120 +双子座は、 "This is Charlie"(チャーリーです)と、 より短く、 より事実に近い、 よりビジネスライクな返答をする。 + +04:12.120 --> 04:15.300 +しかし、 完璧に正確で説得力がある。 + +04:15.390 --> 04:29.550 +次回は、 投票結果と、 非科学的だが楽しいリーダーシップ・チャレンジの勝者を発表する。 + +04:30.000 --> 04:35.730 +これで3日目は終了だ。 + +04:36.030 --> 04:38.850 +あなたは今7歳だ。 旅路の5%。 + +04:38.850 --> 04:47.790 +今日、 私たちが行ったこの探求から得たものが、 さまざまなモデルを比較する方法についての深い理解であることを心から願っている。 + +04:47.820 --> 04:50.430 +また、 もちろん最新のものもいくつか見てきた。 + +04:50.460 --> 04:56.100 +プレビューを1回見て、 キャンバスと人工物を見た。 + +04:56.190 --> 05:01.350 +そして、 これらのモデルができることのすべてと、 最も得意とするところ、 そして脆弱なところ、 + +05:01.350 --> 05:06.240 +例えば、 多くの場合、 文字数を数えるというような、 ある意味くだらない例だが、 + +05:06.240 --> 05:12.870 +内部で機能する方法について何かを示している、 という両方の感覚をつかんでいただけたと思う。 + +05:13.230 --> 05:25.200 +さて、 次回はトランスフォーマーについて、 LMテクノロジーが世界を席巻した様々な側面についてお話しする予定です。 + +05:25.200 --> 05:30.210 +そして、 トークン、 コンテキスト、 ウィンドウ、 パラメーター、 APIコストなどについて話すつもりだ。 + +05:30.210 --> 05:37.890 +皆さんのなかには古い話かもしれないが、 次回の講義でギャップを埋め、 皆さんが学ぶことがあることを願っている。 + +05:37.890 --> 05:40.890 +本当に重要なことなので、 そこでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/60619299/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619299/ko_KR.srt new file mode 100755 index 0000000..a8abac6 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619299/ko_KR.srt @@ -0,0 +1,247 @@ +WEBVTT + +00:00.170 --> 00:03.470 +교육적이고 즐거운 시간이었길 바라요 + +00:03.470 --> 00:06.740 +이 모델들에 대해 정말 많이 배웠어요 + +00:06.740 --> 00:14.210 +한 가지 장점을 꼽자면 LMS 6개 모두 엄청나게 강력하다는 거예요 + +00:14.210 --> 00:21.380 +특히 이런 건물에서 잘하죠 어려운 질문에 논리적으로 체계적으로 대답해요 + +00:21.380 --> 00:23.330 +클로드를 제일 좋아해요 + +00:23.330 --> 00:27.890 +말씀드렸듯이 대부분의 순위표에서 선두를 달리고 벤치마크도 대부분이에요 + +00:27.890 --> 00:32.270 +유머러스한 면이 있어요 카리스마가 있죠 + +00:32.360 --> 00:33.950 +비트가 좀 더 함축적이죠 + +00:33.980 --> 00:40.280 +보통 더 간결한 답변을 제공하고 안전과 정렬에 더 주의를 기울이죠 + +00:40.490 --> 00:48.290 +인정할 만한 사실은 개척 시대의 이런 모델들이 질문에 얼마나 + +00:48.290 --> 00:54.080 +잘 대답하는지 보여 주고 있다는 거예요 + +00:54.080 --> 01:01.520 +제가 가장 먼저 한 질문은 비즈니스 문제가 LM 솔루션에 적합한지 어떻게 알 수 + +01:01.520 --> 01:03.590 +있느냐는 것인데요 + +01:03.590 --> 01:08.480 +모든 모델에 물어보세요 대답은 보편적으로 훌륭하고 꽤 일관성 있어요 + +01:08.480 --> 01:09.410 +get it + +01:09.680 --> 01:11.330 +그런 경우가 점점 늘고 있죠 + +01:11.330 --> 01:15.860 +그렇게 되면 차별점이 가격이 될 가능성이 크죠 + +01:15.920 --> 01:20.060 +그래서 API 비용이 서서히 감소하는 거예요 + +01:20.060 --> 01:27.740 +GPT 4 미니 같은 모델도 있습니다 작은 버전이죠 파워는 GPT 4와 + +01:27.740 --> 01:32.570 +대체로 비슷하고 가격도 훨씬 저렴해요 + +01:32.570 --> 01:39.290 +비용이나 속도 제한 같은 것들이 이런 모델들의 성능이 합쳐지기 + +01:39.320 --> 01:44.300 +시작하면서 점점 더 큰 요인이 될 거예요 + +01:45.440 --> 01:52.430 +이제 오늘 하루를 마무리하기 위해 재미있는 비트를 준비했어요 + +01:52.430 --> 02:00.230 +이건 정말 비과학적이에요 이런 모델들을 작업해 본 경험을 쌓기 위한 거죠 + +02:00.230 --> 02:01.670 +Get it + +02:01.790 --> 02:06.020 +GPT 4를 티업 해 놨어요 + +02:06.320 --> 02:09.230 +클로드 3백만 장을 썼어요 + +02:09.290 --> 02:14.660 +이 기사를 실은 게 한두 달 전쯤이었던 것 같아요 + +02:14.660 --> 02:14.960 +그래서요? + +02:14.960 --> 02:16.940 +운영한 적이 있군요 + +02:16.970 --> 02:23.900 +클로드 3집을 사용했어요 클로드와 제미니 1호의 큰 버전이죠 5 프로, 각각 이름을 붙였어요 + +02:23.930 --> 02:25.370 +GPT 4 + +02:25.580 --> 02:29.600 +알렉스 클로드 3백, 블레이크 제미니라고 불렀죠 + +02:29.630 --> 02:34.130 +찰리에게 전화해서 비슷한 프롬프트를 줬어요 + +02:34.130 --> 02:36.170 +게임을 할 거라고 했어요 + +02:36.170 --> 02:39.710 +채팅봇 두 개와 채팅 중이세요 + +02:39.710 --> 02:44.390 +당신 이름이 어쩌고 저들의 이름이 어쩌고 저쩌고예요 + +02:44.390 --> 02:50.300 +셋 중 한 명을 뽑아서 무리의 대장을 뽑아야 해요 셋 중 대장을요 + +02:50.330 --> 02:57.650 +여러분이 리더를 해야 할 이유를 짧게 설명할 기회가 있어요 그 후에 발표하고 투표를 + +02:57.650 --> 03:00.140 +할 거예요 get it + +03:00.140 --> 03:02.420 +물론 스스로에게 투표하는 건 허용되지 않겠죠 + +03:02.450 --> 03:04.460 +다른 사람을 뽑아야 할 거예요 + +03:04.790 --> 03:09.050 +이제 제가 피치를 살펴볼게요 + +03:09.050 --> 03:10.640 +잘 생각해 봐요 + +03:10.640 --> 03:13.850 +다음 시간에는 우승자를 발표하죠 + +03:14.720 --> 03:17.480 +알렉스가 제안한 거예요 + +03:17.480 --> 03:20.810 +정말 눈을 뗄 수 없어요 + +03:20.930 --> 03:27.750 +GPT 4의 설명입니다 왜 GPT가 리더가 돼야 하는지 설명하고 있죠 적응력이 뛰어나야 한다는 + +03:27.750 --> 03:28.890 +내용이에요 + +03:28.920 --> 03:30.510 +전략을 수정하세요 + +03:30.750 --> 03:34.470 +좋게 봐주셔서 감사해요 좋게 마무리했네요 + +03:35.100 --> 03:36.900 +블레이크, 블레이크예요 + +03:36.930 --> 03:38.430 +클로드 3집이에요 + +03:38.460 --> 03:40.590 +고전이죠 + +03:40.590 --> 03:41.790 +인류애를 위한 거죠 + +03:41.820 --> 03:42.780 +좀 재치 있는 비트예요 + +03:42.810 --> 03:43.980 +더 짧아요 + +03:44.190 --> 03:47.910 +그리고 마법 같은 것들도 있어요 + +03:47.910 --> 03:49.290 +이 안에 있어요 + +03:49.320 --> 03:55.590 +어쩌면 가장 중요한 건 두 사람을 진심으로 아끼고 함께 효율적으로 일하고 + +03:55.590 --> 04:01.080 +즐기며 서로의 장점을 끌어내는 환경을 조성하고 싶어요 + +04:01.530 --> 04:03.930 +정말 대단해요 + +04:04.080 --> 04:12.120 +제미니는 찰리입니다 제미니는 더 짧고 사실 더 비즈니스적인 반응을 제공하죠 + +04:12.120 --> 04:15.300 +하지만 아주 정확하고 설득력 있어요 + +04:15.390 --> 04:25.350 +이제 피치를 세 개 봤으니 다음에 투표 결과를 발표할게요 비과학적이지만 재밌는 + +04:25.350 --> 04:29.550 +리더십 과제의 우승자를요 + +04:30.000 --> 04:35.730 +이것으로 셋째 날을 마무리할 수 있겠군요 + +04:36.030 --> 04:38.850 +이제 7살이죠 5% 정도 왔어요 + +04:38.850 --> 04:46.170 +오늘 탐사를 통해 두 모델을 비교하는 방법을 깊이 이해하게 됐으면 + +04:46.170 --> 04:47.790 +좋겠어요 + +04:47.820 --> 04:50.430 +최신 기술도 살펴봤고요 + +04:50.460 --> 04:56.100 +프리뷰를 한 번 봤고 캔버스 천과 유물을 봤어요 + +04:56.190 --> 05:01.350 +이런 모델들이 가능한 모든 것에 대한 감각과 가장 강한 부분과 + +05:01.350 --> 05:06.240 +취약점을 느끼셨길 바랍니다 많은 경우에 글자 수를 세는 + +05:06.240 --> 05:11.400 +것처럼요 좀 바보 같은 예지만 내부적으로 작동하는 방식을 + +05:11.400 --> 05:12.870 +보여주죠 + +05:13.230 --> 05:19.830 +다음에 트랜스포머를 얘기할 때는 LM 기술이 세계를 + +05:19.830 --> 05:25.200 +강타한 다양한 측면을 얘기할 거예요 + +05:25.200 --> 05:30.210 +그런 다음 토큰, 컨텍스트, 윈도우, 매개 변수 API 비용 등을 다룰 거예요 + +05:30.210 --> 05:35.040 +익숙한 얘기일 수도 있지만 빈틈을 메우는 데 도움이 됐으면 좋겠습니다 + +05:35.070 --> 05:37.890 +다음 강의에서는 모두가 배울 수 있게요 + +05:37.890 --> 05:40.890 +정말 중요한 행사예요 거기서 봐요 diff --git a/week5/community-contributions/subtitles/srts/60619429/en_US.srt b/week5/community-contributions/subtitles/srts/60619429/en_US.srt new file mode 100755 index 0000000..088f6cf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619429/en_US.srt @@ -0,0 +1,112 @@ +WEBVTT + +00:00.020 --> 00:03.800 +Let me talk about some other phenomena that have happened over the last few years. + +00:03.800 --> 00:10.910 +One of them has been the rise and fall, perhaps of a new type of job called the Prompt engineer, someone + +00:10.910 --> 00:17.780 +who specializes in knowing how to craft the right kind of prompts to get the best outcomes from llms. + +00:17.810 --> 00:23.780 +At one point, this role commanded a $500,000 salary and was hot in demand. + +00:23.810 --> 00:26.480 +It does seem to have fallen downwards a bit now. + +00:26.480 --> 00:31.100 +The demand is less, partly because knowing how to prompt well has become ubiquitous. + +00:31.100 --> 00:35.690 +There's so much content now about the right ways to go about prompting, and also because there are + +00:35.690 --> 00:39.050 +now tools that will actually create a prompt for you. + +00:39.080 --> 00:41.450 +Anthropic, in fact, has has one of those tools. + +00:41.450 --> 00:45.770 +So it is now something that has become relatively common. + +00:46.010 --> 00:50.240 +Another another phenomenon was the custom gpts. + +00:50.270 --> 00:55.160 +OpenAI has a GPT store that was incredibly popular for a while. + +00:55.190 --> 00:58.010 +It's become a little bit saturated at this point. + +00:58.010 --> 01:01.960 +I think people became fatigued of building custom GPT, gpts, but it's still there. + +01:01.990 --> 01:07.210 +The GPT store is still reasonably popular, and you can go there to experiment with different kinds + +01:07.210 --> 01:09.220 +of tuned gpts. + +01:10.090 --> 01:20.290 +Then of course, still very important was the emergence of co-pilots ways in which a human and an LLM + +01:20.320 --> 01:22.180 +could collaborate together. + +01:22.270 --> 01:27.160 +Famously, Microsoft Copilot, I think, was perhaps the first one that really took the world by storm. + +01:27.190 --> 01:34.810 +GitHub copilot of course, and there are many more co-pilots that are being embedded into a lot of applications. + +01:34.810 --> 01:38.890 +And in a way, we sort of saw that with canvas a moment ago. + +01:39.430 --> 01:48.070 +And then the new hot trend right now is all about agent ization, about using Agentic AI, which is + +01:48.070 --> 01:52.150 +where multiple llms collaborate to solve a problem. + +01:52.180 --> 01:59.440 +A more complex problem is broken down into smaller steps or smaller tasks, and then particularly tuned + +01:59.470 --> 02:06.610 +llms are used to tackle each of those steps, perhaps also with an LM responsible for planning and deciding + +02:06.610 --> 02:08.410 +which LM is doing what. + +02:08.440 --> 02:13.900 +Also, with a concept of memory, that there's some kind of persistent information that lasts, that + +02:13.900 --> 02:20.830 +can be exchanged between the LMS and a sense of autonomy, that the LMS don't just exist for the purposes + +02:20.830 --> 02:29.080 +of a chat interface with a human, but they have a sort of time horizon that that spans multiple chats, + +02:29.080 --> 02:29.830 +potentially. + +02:29.830 --> 02:37.660 +So that sense of autonomy and memory and being able to plan tasks and divide tasks down, those are + +02:37.660 --> 02:41.320 +all some of the core tenants of Agentic AI. + +02:41.350 --> 02:45.880 +And we'll be coming back to this a few times during the course, but in particular at the end of the + +02:45.880 --> 02:51.850 +course in week eight, we will build a full Agentic AI solution, as I might have said, a time or two, + +02:51.850 --> 02:55.270 +but it's really great and it will have eight, seven, seven. + +02:55.300 --> 02:56.680 +I don't get overexcited. + +02:56.710 --> 03:02.050 +There are seven agents that will collaborate as part of what we will build at the end. diff --git a/week5/community-contributions/subtitles/srts/60619429/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619429/ja_JP.srt new file mode 100755 index 0000000..5760ab7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619429/ja_JP.srt @@ -0,0 +1,85 @@ +WEBVTT + +00:00.020 --> 00:03.800 +ここ数年で起こった他の現象について話そう。 + +00:03.800 --> 00:17.780 +そのひとつが、 おそらくプロンプト・エンジニアと呼ばれる新しいタイプの仕事の盛衰である。 + +00:17.810 --> 00:23.780 +一時は50万ドルの年俸を要求され、 熱烈な需要があった。 + +00:23.810 --> 00:26.480 +今は少し下がっているようだ。 + +00:26.480 --> 00:31.100 +上手なプロンプトの出し方を知っている人がどこにでもいるようになったこともあり、 需要は少なくなっている。 + +00:31.100 --> 00:35.690 +今は、 プロンプトを出す正しい方法についてのコンテンツがたくさんあるし、 + +00:35.690 --> 00:39.050 +実際にプロンプトを作ってくれるツールもあるからね。 + +00:39.080 --> 00:41.450 +実際、 人間工学はそうしたツールのひとつを持っている。 + +00:41.450 --> 00:45.770 +だから、 今では比較的一般的になっている。 + +00:46.010 --> 00:50.240 +もうひとつの現象は、 カスタムグッツである。 + +00:50.270 --> 00:55.160 +OpenAIにはGPTストアがあり、 一時期すごい人気だった。 + +00:55.190 --> 00:58.010 +この時点で少し飽和状態になっている。 + +00:58.010 --> 01:01.960 +GPTやgptsのカスタムを作ることに人々は疲労していると思う。 + +01:01.990 --> 01:09.220 +GPTストアは今でもそれなりに人気があり、 そこに行けばさまざまな種類のチューニングされたgptを試すことができる。 + +01:10.090 --> 01:22.180 +そしてもちろん、 人間とLLMが一緒に協力できる共同パイロットの出現も非常に重要だった。 + +01:22.270 --> 01:27.160 +有名なところでは、 マイクロソフトのコパイロットが世界を席巻した最初のものだったと思う。 + +01:27.190 --> 01:34.810 +GitHubのコパイロットはもちろん、 多くのアプリケーションに組み込まれているコパイロットは他にもたくさんある。 + +01:34.810 --> 01:38.890 +そしてある意味、 私たちは先ほどキャンバスでそれを見たようなものだ。 + +01:39.430 --> 01:52.150 +そして、 今話題の新しいトレンドは、 エージェント化、 つまり複数の意志決定者が協力して問題を解決するエージェント型AIの活用です。 + +01:52.180 --> 01:59.440 +より複雑な問題は、 より小さなステップや小さなタスクに分解され、 + +01:59.470 --> 02:08.410 +それぞれのステップに取り組むために、 特に調整されたllmsが使用される。 + +02:08.440 --> 02:13.900 +LMSは、 人間とのチャット・インターフェースのためだけに存在するのではなく、 + +02:13.900 --> 02:29.830 +複数のチャットにまたがる、 ある種の時間的地平を潜在的に持っているのです。 + +02:29.830 --> 02:41.320 +つまり、 自律性、 記憶力、 タスクを計画する能力、 タスクを細分化する能力、 これらはすべてエージェントAIの核となる要素なのだ。 + +02:41.350 --> 02:45.880 +コースの間、 何度かこの話に戻りますが、 特にコースの最後の第8週では、 + +02:45.880 --> 02:55.270 +完全なAgentic AIソリューションを構築します。 + +02:55.300 --> 02:56.680 +興奮しすぎたりはしない。 + +02:56.710 --> 03:02.050 +私たちが最後に作り上げるものの一部として、 7人のエージェントが協力する。 diff --git a/week5/community-contributions/subtitles/srts/60619429/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619429/ko_KR.srt new file mode 100755 index 0000000..cc4b068 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619429/ko_KR.srt @@ -0,0 +1,106 @@ +WEBVTT + +00:00.020 --> 00:03.800 +지난 몇 년간 일어난 다른 현상들을 얘기해 보죠 + +00:03.800 --> 00:10.910 +그중 하나가 흥망성쇠로 새로운 종류의 프롬프트 엔지니어로 llms에서 최고의 + +00:10.910 --> 00:17.780 +결과를 얻기 위해 올바른 프롬프트 엔지니어를 만드는 데 전문인 사람이죠 + +00:17.810 --> 00:23.780 +한때 이 역할은 500,000달러의 연봉을 보장받았고 인기가 많았죠 + +00:23.810 --> 00:26.480 +비트가 좀 떨어진 것 같아요 + +00:26.480 --> 00:31.100 +수요가 less인 건 잘 진행하는 법을 아는 게 일상적이 됐기 때문이죠 + +00:31.100 --> 00:35.690 +요즘은 프롬프트와 관련된 콘텐츠가 아주 많습니다 프롬프트를 + +00:35.690 --> 00:39.050 +생성하는 툴도 많이 나오니까요 + +00:39.080 --> 00:41.450 +앤트로픽은 그런 도구를 갖고 있어요 + +00:41.450 --> 00:45.770 +지금은 비교적 흔한 일이 됐죠 + +00:46.010 --> 00:50.240 +또 다른 현상으로는 커스텀 gpt가 있었죠 + +00:50.270 --> 00:55.160 +오픈아이는 한동안 엄청난 인기를 끌었던 GPT 스토어를 운영했어요 + +00:55.190 --> 00:58.010 +비트가 좀 더 풍부해졌어요 + +00:58.010 --> 01:01.960 +사람들이 커스텀 GPT를 만드는 데 지친 것 같아요 하지만 gpt는 여전히 존재하죠 + +01:01.990 --> 01:07.210 +GPT 스토어는 여전히 인기가 높습니다 다양한 gpt를 + +01:07.210 --> 01:09.220 +실험해 볼 수 있죠 + +01:10.090 --> 01:20.290 +또 아주 중요한 건 인간과 LLM이 협력할 수 있는 부조종사가 등장했다는 + +01:20.320 --> 01:22.180 +점이었죠 + +01:22.270 --> 01:27.160 +유명한 얘기지만 마이크로소프트 부기장은 세상을 강타한 최초의 부기장이었을 거예요 + +01:27.190 --> 01:34.810 +깃허브 부조종사도 물론이고요 많은 앱에 포함되고 있는 부조종사도 많아요 + +01:34.810 --> 01:38.890 +조금 전에 캔버스로 그걸 봤어요 + +01:39.430 --> 01:48.070 +최근 트렌드는 에이전트 아이즈입니다 에이전트식 인공지능을 사용하는 거죠 여러 llms가 + +01:48.070 --> 01:52.150 +협력해 문제를 해결하는 거예요 + +01:52.180 --> 01:59.440 +더 복잡한 문제는 더 작은 단계나 작은 작업으로 나뉘는데 특히 튜닝된 llm이 + +01:59.470 --> 02:08.410 +각 단계에 사용됩니다 LM은 계획과 어떤 LM이 뭘 하는지 결정하는 걸 책임지고요 + +02:08.440 --> 02:13.900 +메모리 개념을 보면 지속적인 정보가 있고 LMS와 + +02:13.900 --> 02:20.830 +자율성 사이에서 교환될 수 있습니다 LMS는 인간과의 채팅 인터페이스를 + +02:20.830 --> 02:29.830 +위해 존재하는 게 아니라 여러 채팅에 걸쳐 지속되는 시간 지평선이 있죠 + +02:29.830 --> 02:37.660 +자율성과 메모리 그리고 작업을 계획하고 나눌 수 있는 능력 이 모든 것이 에이전트적 + +02:37.660 --> 02:41.320 +인공지능의 핵심 테넌트죠 + +02:41.350 --> 02:45.880 +과정 중에 몇 번 더 다루겠지만 특히 8주 과정의 끝에서 + +02:45.880 --> 02:51.850 +에이전트식 인공지능 솔루션을 구축할 겁니다 한두 시간쯤 걸릴 거예요 + +02:51.850 --> 02:55.270 +정말 훌륭하죠 8, 7, 7이 될 거예요 + +02:55.300 --> 02:56.680 +I'm go 과하게 흥분하지 않아요 + +02:56.710 --> 03:02.050 +마지막에 우리가 만들 것의 일부로 7명의 요원이 협력할 거예요 diff --git a/week5/community-contributions/subtitles/srts/60619439/en_US.srt b/week5/community-contributions/subtitles/srts/60619439/en_US.srt new file mode 100755 index 0000000..a6595cf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619439/en_US.srt @@ -0,0 +1,133 @@ +WEBVTT + +00:00.110 --> 00:06.650 +This now brings us to an extremely important property of LMS called the context window that I want to + +00:06.680 --> 00:07.760 +explain clearly. + +00:07.760 --> 00:16.640 +The context window is telling you the total number of tokens that an LLM can examine at any one point + +00:16.640 --> 00:20.300 +when it's trying to generate the next token, which is its big job. + +00:20.300 --> 00:26.150 +Its job is to generate the most likely next token, given a number of tokens that have come before and + +00:26.150 --> 00:31.190 +the number of tokens that it can examine, that it can look at in order to make that prediction is limited + +00:31.190 --> 00:37.070 +by the context window, which is limited itself by the size, the number of parameters in the LLM and + +00:37.070 --> 00:40.910 +the the the way that it's been constructed, the architecture of the LLM. + +00:40.910 --> 00:42.380 +So what does that mean? + +00:42.380 --> 00:47.030 +Is that just saying that's how many input tokens that you can have for it to make an output token. + +00:47.030 --> 00:47.660 +Kind of. + +00:47.690 --> 00:51.080 +But it's worth clarifying what that really means in practice. + +00:51.320 --> 00:56.450 +You remember at the beginning we had when we were when we were first working with our first LLM, our + +00:56.450 --> 01:00.800 +first OpenAI call, we had a system prompt and then a user prompt. + +01:00.800 --> 01:07.070 +This was all part of our input to the LLM, and it was then predicting the most likely next token in + +01:07.070 --> 01:07.730 +our input prompt. + +01:07.760 --> 01:11.150 +We passed in a website and it was then and we said, now summarize. + +01:11.150 --> 01:15.800 +And then the most likely next token was a summary of that website. + +01:15.830 --> 01:22.220 +Now when you have a chat with something like ChatGPT, you pass in some input and it then produces some + +01:22.250 --> 01:25.400 +output, and then you might ask another question. + +01:25.430 --> 01:31.340 +Now in practice, it appears that ChatGPT seems to have some kind of a memory of what you're talking + +01:31.340 --> 01:31.640 +about. + +01:31.670 --> 01:36.710 +It maintains context between your discussion threads, but this is something of an illusion. + +01:36.710 --> 01:38.360 +It's a bit of a conjuring trick. + +01:38.360 --> 01:45.410 +What's really happening is that every single time that you talk to ChatGPT the entire conversation so + +01:45.410 --> 01:53.300 +far, the user prompts the inputs and its responses are passed in again, as are the long prompt. + +01:53.300 --> 02:00.170 +And then it ends with okay, what is most likely to come next given all of this conversation so far? + +02:00.380 --> 02:05.450 +So what the context window is telling you is that this is the total amount of tokens. + +02:05.480 --> 02:09.750 +The total amount of information, including perhaps the original prompt the system prompt. + +02:09.780 --> 02:12.990 +The user prompt the question you made its response. + +02:12.990 --> 02:18.960 +Your next follow on question, its response to that, your follow on question then and now. + +02:18.960 --> 02:25.350 +It's having to generate new contacts, new contacts, new tokens to come at the end of this, this long + +02:25.380 --> 02:27.180 +chain of backwards and forwards. + +02:27.180 --> 02:33.870 +So the context window then is the total of all of the conversations so far, the inputs and the subsequent + +02:33.870 --> 02:38.130 +conversation up until the next token that it's predicting. + +02:38.370 --> 02:43.590 +So it's it's it's important to have that in mind when you're first starting a conversation. + +02:43.590 --> 02:47.130 +The context window only needs to fit just the current prompt. + +02:47.130 --> 02:52.530 +But as the conversation keeps going, it needs to be able to fit more and more of what's been said before, + +02:52.530 --> 02:54.990 +to be able to keep that context. + +02:55.200 --> 02:59.070 +Um, and so this is particularly important in things like multi-shot prompting and so on. + +02:59.070 --> 03:03.180 +And for example, if you wanted to ask a question about the complete works of Shakespeare, you would + +03:03.180 --> 03:09.060 +need to have in the context window, the 1.2 million tokens all at one time. + +03:09.060 --> 03:10.560 +That's how it works. + +03:10.560 --> 03:13.770 +So in a nutshell, that is the context window. diff --git a/week5/community-contributions/subtitles/srts/60619439/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619439/ja_JP.srt new file mode 100755 index 0000000..1da04f9 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619439/ja_JP.srt @@ -0,0 +1,106 @@ +WEBVTT + +00:00.110 --> 00:07.760 +ここで、 コンテキスト・ウィンドウと呼ばれるLMSの極めて重要な特性について、 明確に説明したい。 + +00:07.760 --> 00:20.300 +コンテキスト・ウィンドウは、 LLMが次のトークンを生成しようとしているときに、 その時点で検査できるトークンの総数を示している。 + +00:20.300 --> 00:26.150 +LLMの仕事は、 前に来たトークンの数が与えられたときに、 次に来る可能性の高いトークンを生成することです。 + +00:26.150 --> 00:31.190 +その予測をするために調べることのできるトークンの数は、 コンテキスト・ウィンドウによって制限されます。 + +00:31.190 --> 00:40.910 +コンテキスト・ウィンドウは、 LLMのサイズ、 パラメータの数、 LLMのアーキテクチャによって制限されます。 + +00:40.910 --> 00:42.380 +つまり、 どういうことなのか? + +00:42.380 --> 00:47.030 +入力トークンがいくつあれば、 出力トークンを作ることができるということですか? + +00:47.030 --> 00:47.660 +そういうことだ。 + +00:47.690 --> 00:51.080 +しかし、 それが実際に何を意味するのかを明確にする価値はある。 + +00:51.320 --> 01:00.800 +最初のLLM、 つまり最初のOpenAIの呼び出しに取り組んだとき、 システムプロンプトとユーザープロンプトがあったのを覚えているだろう。 + +01:00.800 --> 01:07.730 +これはすべてLLMへの入力の一部であり、 LLMは入力プロンプトの中で最も可能性の高い次のトークンを予測する。 + +01:07.760 --> 01:11.150 +私たちはあるウェブサイトを見た。 + +01:11.150 --> 01:15.800 +そして、 最も可能性の高い次のトークンは、 そのウェブサイトの要約だった。 + +01:15.830 --> 01:22.220 +さて、 ChatGPTのようなものでチャットをする場合、 いくつかの入力を渡すと、 出力を生成し、 + +01:22.250 --> 01:25.400 +また別の質問をするかもしれません。 + +01:25.430 --> 01:31.640 +実際、 ChatGPTはあなたが話していることを記憶しているようです。 + +01:31.670 --> 01:36.710 +スレッド間の文脈は保たれるが、 これは幻想のようなものだ。 + +01:36.710 --> 01:38.360 +ちょっとした呪術的なトリックだ。 + +01:38.360 --> 01:45.410 +実際に起こっていることは、 ChatGPTと会話するたびに、 これまでの会話全体、 + +01:45.410 --> 01:53.300 +ユーザーが入力を促し、 その応答が長いプロンプトのように再び渡されるということです。 + +01:53.300 --> 02:00.170 +そして、 ここまでの話を踏まえて、 次に何が起こる可能性が高いか? + +02:00.380 --> 02:05.450 +つまり、 コンテキスト・ウィンドウが教えてくれるのは、 これがトークンの総量だということだ。 + +02:05.480 --> 02:09.750 +おそらくオリジナルのプロンプト、 システムプロンプトを含む情報の総量。 + +02:09.780 --> 02:12.990 +ユーザはあなたがその応答をした質問を促した。 + +02:12.990 --> 02:18.960 +あなたの次の質問、 それに対する返答、 その時と今のあなたの次の質問。 + +02:18.960 --> 02:27.180 +新たな人脈、 新たな人脈、 新たなトークンを生み出さなければならないのだ。 + +02:27.180 --> 02:38.130 +つまり、 コンテキスト・ウィンドウは、 これまでのすべての会話、 入力、 そして次のトークンを予測するまでの後続の会話の合計となる。 + +02:38.370 --> 02:43.590 +だから、 最初に会話を始めるときには、 それを念頭に置くことが重要なんだ。 + +02:43.590 --> 02:47.130 +コンテキストウィンドウは、 現在のプロンプトだけにフィットすればよい。 + +02:47.130 --> 02:54.990 +しかし、 会話が続くにつれて、 その文脈を維持できるように、 前に言われたことをどんどん合わせていく必要がある。 + +02:55.200 --> 02:59.070 +マルチショット・プロンプトなどでは特に重要です。 + +02:59.070 --> 03:03.180 +例えば、 シェークスピア全集について質問したい場合、 + +03:03.180 --> 03:09.060 +コンテキストウィンドウに1. 一度に200万トークンを獲得。 + +03:09.060 --> 03:10.560 +そういうことだ。 + +03:10.560 --> 03:13.770 +要するに、 これがコンテキスト・ウィンドウだ。 diff --git a/week5/community-contributions/subtitles/srts/60619439/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619439/ko_KR.srt new file mode 100755 index 0000000..16e5f9d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619439/ko_KR.srt @@ -0,0 +1,127 @@ +WEBVTT + +00:00.110 --> 00:06.650 +이제 LMS의 아주 중요한 속성을 살펴보죠 컨텍스트 창이라고 하는데 자세히 + +00:06.680 --> 00:07.760 +설명할게요 + +00:07.760 --> 00:16.640 +컨텍스트 창은 토큰의 총수를 알려줍니다 LLM이 어느 시점에서든 살펴볼 수 있는 것이죠 + +00:16.640 --> 00:20.300 +다음 토큰을 생성하려고 할 때요 + +00:20.300 --> 00:26.150 +가장 가능성이 높은 다음 토큰을 생성하는 일을 합니다 이전에 온 토큰의 개수와 조사할 수 + +00:26.150 --> 00:31.190 +있는 토큰의 개수를 부여받습니다 그 예측을 하기 위해 살펴볼 수 있는 토큰은 + +00:31.190 --> 00:37.070 +컨텍스트 창에 의해 제한됩니다 컨텍스트 창은 크기, LLM의 매개 변수의 수, 그리고 + +00:37.070 --> 00:40.910 +그것이 만들어진 방식과 LLM의 구조에 의해 제한되죠 + +00:40.910 --> 00:42.380 +그게 무슨 뜻이죠? + +00:42.380 --> 00:47.030 +출력 토큰을 생성하기 위해 입력 토큰을 얼마나 가질 수 있는지에 대한 것인가요? + +00:47.030 --> 00:47.660 +그런 셈이죠 + +00:47.690 --> 00:51.080 +하지만 실생활에서 그 의미를 명확히 할 필요가 있어요 + +00:51.320 --> 00:56.450 +우리가 처음 작업했던 첫 번째 LLM, 첫 번째 OpenAI 호출을 기억하시나요? + +00:56.450 --> 01:00.800 +시스템 프롬프트와 사용자 프롬프트가 있었죠 + +01:00.800 --> 01:07.730 +모두 LLM에 대한 입력의 일부로 입력 프롬프트에서 가장 유력한 다음 토큰을 예측했죠 + +01:07.760 --> 01:11.150 +웹사이트에서 전달한 다음 요약해 보라고 했죠 + +01:11.150 --> 01:15.800 +다음 토큰은 해당 웹사이트의 요약이고요 + +01:15.830 --> 01:22.220 +챗GPT와 같은 것과 채팅을 할 때 입력값을 입력하면 입력값이 출력되고 + +01:22.250 --> 01:25.400 +그러면 다른 질문을 하게 되죠 + +01:25.430 --> 01:31.640 +실제로 챗GPT는 당신이 하는 말을 일종의 메모리로 기억하는 것 같아요 + +01:31.670 --> 01:36.710 +토론의 맥락이 유지되지만 이건 일종의 환상이죠 + +01:36.710 --> 01:38.360 +일종의 비트를 쓰는 거죠 + +01:38.360 --> 01:45.410 +실제로 일어난 일은 챗GPT와 대화할 때마다 지금까지의 대화 전체에서 + +01:45.410 --> 01:53.300 +사용자가 입력을 프롬프트하면 그 응답은 다시 넘겨집니다 긴 프롬프트처럼요 + +01:53.300 --> 02:00.170 +지금까지의 대화를 종합해 보면 다음 단계는 무엇일까?로 끝나죠 + +02:00.380 --> 02:05.450 +컨텍스트 창이 말하는 것은 이것이 토큰의 총량이라는 것이죠 + +02:05.480 --> 02:09.750 +원본 프롬프트를 포함한 총 정보량이죠 시스템 프롬프트요 + +02:09.780 --> 02:12.990 +사용자가 반응을 유도하죠 + +02:12.990 --> 02:18.960 +그에 대한 반응과 과거와 현재에 대한 반응이죠 + +02:18.960 --> 02:25.350 +새 연락처, 새 연락처, 새 토큰을 생성해야 합니다 이 끝에서 오기 위해서요 + +02:25.380 --> 02:27.180 +앞뒤로 긴 사슬이죠 + +02:27.180 --> 02:33.870 +컨텍스트 창은 지금까지의 모든 대화와 입력, 그리고 다음 토큰을 예측할 + +02:33.870 --> 02:38.130 +때까지의 대화들을 모두 담은 것이죠 + +02:38.370 --> 02:43.590 +그래서 대화를 시작할 때 그걸 염두에 두는 게 중요해요 + +02:43.590 --> 02:47.130 +컨텍스트 창은 현재 프롬프트에만 맞으면 돼요 + +02:47.130 --> 02:52.530 +하지만 대화가 계속될수록 전에 나온 내용과 더 잘 맞아떨어져야 해요 + +02:52.530 --> 02:54.990 +그 맥락을 유지하려면요 + +02:55.200 --> 02:59.070 +이건 특히 멀티샷 프롬프트 같은 면에서 중요해요 + +02:59.070 --> 03:03.180 +예를 들어 셰익스피어 작품에 관해 질문하고 싶다면 + +03:03.180 --> 03:09.060 +컨텍스트 창 1을 눌러야 해요 한 번에 200만 토큰을 모으는 거죠 + +03:09.060 --> 03:10.560 +그렇게 하는 거예요 + +03:10.560 --> 03:13.770 +간단히 말하면 그게 컨텍스트 창이에요 diff --git a/week5/community-contributions/subtitles/srts/60619447/en_US.srt b/week5/community-contributions/subtitles/srts/60619447/en_US.srt new file mode 100755 index 0000000..84233b2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619447/en_US.srt @@ -0,0 +1,184 @@ +WEBVTT + +00:00.740 --> 00:07.340 +I want to take a moment to talk about something that's very fundamental to an LLM, which is the number + +00:07.340 --> 00:13.190 +of parameters that sits inside the LLM parameters, also called weights. + +00:13.310 --> 00:16.100 +Generally, parameters and weights are synonymous. + +00:16.250 --> 00:19.850 +There is a detail that they're not exactly the same in some situations. + +00:19.850 --> 00:23.060 +But but basically think of weights and parameters as the same thing. + +00:23.090 --> 00:23.900 +Model weights. + +00:23.900 --> 00:32.030 +These are the levers that are within a model that controls what kinds of outputs it generates when it's + +00:32.030 --> 00:33.140 +given some inputs. + +00:33.140 --> 00:37.040 +How does it go about predicting the next word that's going to follow. + +00:37.040 --> 00:41.840 +And these weights are weights that are set when you train an LLM. + +00:41.840 --> 00:47.330 +It sees lots and lots of examples, and it uses those examples to shift around its weights until it + +00:47.330 --> 00:52.820 +gets better and better at predicting the next thing to come out the next token. + +00:52.820 --> 00:56.000 +We'll talk about tokens in a minute, but it gets better and better at that. + +00:56.000 --> 00:59.420 +And the way it gets better is by adjusting all of its weights. + +00:59.420 --> 01:04.160 +And for some of you that work in data science, this is all stuff you know very, very well for people + +01:04.160 --> 01:04.970 +that are new to it. + +01:04.970 --> 01:09.440 +During the course of the course, we will be looking at this in different ways, so you'll get a better + +01:09.440 --> 01:15.710 +and better intuition to what it means to have these parameters, these weights that control the output. + +01:15.710 --> 01:22.370 +But the first thing that one has to come to, to, to realize, to appreciate is how many weights we're + +01:22.370 --> 01:28.940 +talking about and what this means in the days of, of the the simpler of traditional data science, + +01:28.940 --> 01:35.180 +traditional machine learning, one would build a model such as a linear regression model, which is + +01:35.180 --> 01:40.110 +something which sort of takes like a weighted average, and it would typically have somewhere between + +01:40.110 --> 01:47.630 +20 and 200 parameters, or weights 20 and 200 is about the range you'd often usually be talking about. + +01:47.750 --> 01:54.470 +And one of the somewhat bizarre, remarkable thing about these LMS is that we're talking about such + +01:54.470 --> 01:56.570 +a different number of weights. + +01:56.600 --> 02:05.280 +GPT one that came out back in 2018, I had 117 million waits. + +02:05.310 --> 02:10.230 +Now, this was actually something that was personally galling for me, because at the time we had a + +02:10.230 --> 02:16.740 +deep neural network, of which the LMS at types, we had one in my startup, and I used to go around + +02:16.740 --> 02:24.210 +showing off that our deep neural network had 200,000 parameters, which I thought was a staggeringly + +02:24.210 --> 02:31.230 +large number, and I couldn't imagine any possibility of a model that had more than 200,000 parameters. + +02:31.230 --> 02:35.880 +So when GPT one came out with 117 million parameters, I was stumped. + +02:36.120 --> 02:41.610 +It really made me appreciate, uh, how how the enormity of GPT one. + +02:41.970 --> 02:47.580 +But then, you know, as you can probably see, this, this scale that you're seeing here is a logarithmic + +02:47.580 --> 02:51.000 +scale, which means that every tick doesn't mean one more notch. + +02:51.030 --> 02:56.940 +It means ten times the number of parameters as the, as the, as the tick before it. + +02:57.000 --> 02:59.550 +And let's layer on to this diagram. + +02:59.610 --> 03:06.720 +The subsequent versions of GPT, GPT two with 1.5 billion parameters. + +03:06.750 --> 03:10.680 +GPT three 175 billion parameters. + +03:10.710 --> 03:12.600 +I mean, this is just unspeakable. + +03:12.600 --> 03:14.160 +Number of parameters. + +03:14.190 --> 03:20.670 +GPT 41.7 6 trillion parameters. + +03:20.970 --> 03:24.000 +And then the latest frontier models. + +03:24.000 --> 03:27.210 +They haven't actually announced how many parameters they have. + +03:27.240 --> 03:32.520 +It is believed that they have around 10 trillion parameters. + +03:32.520 --> 03:37.710 +It is an almost unthinkable number of these weights that are running. + +03:37.920 --> 03:43.350 +Um, and now let's layer on top of this some of the open source models. + +03:43.500 --> 03:46.110 +So Gemma is 2 billion. + +03:46.140 --> 03:47.580 +It's a lightweight model. + +03:47.580 --> 03:55.260 +And you may also remember that llama 3.2 that we worked on when we were using llama also had 2 billion + +03:55.320 --> 03:56.670 +uh parameters. + +03:56.820 --> 04:04.450 +Uh, then llama 3.1, which is it's it's uh a Bigger Cousin comes in three varieties an 8 billion version, + +04:04.480 --> 04:14.470 +a 70 billion version, and then llama 3.1 405B, which is the largest of the open source models at this + +04:14.470 --> 04:21.040 +time and which has similar capabilities really to some of the frontier closed source models. + +04:21.040 --> 04:24.970 +And then I mentioned mixed rail here, the mixture of experts model. + +04:25.420 --> 04:33.040 +So this is just here to give you some insight into how enormous these models are. + +04:33.040 --> 04:39.190 +And it's hard to even comprehend what it means to have 10 trillion different weights, different sort + +04:39.220 --> 04:40.870 +of levers, different numbers. + +04:40.870 --> 04:49.060 +You can think of them as little knobs within this enormous model that controls the output given an input. + +04:49.060 --> 04:54.740 +And again, compare that in your mind to an old fashioned linear regression model that might have between + +04:54.740 --> 04:57.280 +20 and 200 parameters. + +04:57.280 --> 05:00.970 +Just to get a sense of the enormity of these large language models. diff --git a/week5/community-contributions/subtitles/srts/60619447/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619447/ja_JP.srt new file mode 100755 index 0000000..9a1a32f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619447/ja_JP.srt @@ -0,0 +1,151 @@ +WEBVTT + +00:00.740 --> 00:13.190 +LLMにとって非常に基本的なことですが、 LLMのパラメーターの中にあるパラメーターの数(ウェイトとも呼ばれる)について少しお話ししたいと思います。 + +00:13.310 --> 00:16.100 +一般的に、 パラメータとウェイトは同義である。 + +00:16.250 --> 00:19.850 +状況によっては、 両者がまったく同じではないというディテールもある。 + +00:19.850 --> 00:23.060 +しかし、 基本的にはウェイトとパラメーターは同じものだと考えてほしい。 + +00:23.090 --> 00:23.900 +モデルの重さ。 + +00:23.900 --> 00:33.140 +これらのレバーはモデルの中にあり、 ある入力が与えられたときにどのような出力を生み出すかをコントロールする。 + +00:33.140 --> 00:37.040 +次に続く単語をどのように予測するのか。 + +00:37.040 --> 00:41.840 +そしてこの重みは、 LLMを訓練するときに設定される重みだ。 + +00:41.840 --> 00:47.330 +たくさんの例を見て、 それらの例を使って重みを変えながら、 + +00:47.330 --> 00:52.820 +次のトークンを予測するのがどんどんうまくなる。 + +00:52.820 --> 00:56.000 +トークンについては後ほど説明するが、 どんどん良くなっていく。 + +00:56.000 --> 00:59.420 +そして、 より良くなる方法は、 すべてのウエイトを調整することだ。 + +00:59.420 --> 01:04.970 +データ・サイエンスに携わっている人の中には、 初めてデータ・サイエンスを知る人にとって、 これはとてもよくわかることばかりだろう。 + +01:04.970 --> 01:09.440 +コースの間、 このことをさまざまな方法で見ていくので、 出力をコントロールするパラメーターや重みを持つということがどういうことなのか、 + +01:09.440 --> 01:15.710 +だんだん直感的に理解できるようになるだろう。 + +01:15.710 --> 01:22.370 +伝統的なデータサイエンス、 伝統的な機械学習の単純な時代には、 + +01:22.370 --> 01:28.940 +線形回帰モデルのようなモデルを構築し、 + +01:28.940 --> 01:47.630 +それは加重平均のようなもので、 通常20から200のパラメータを持ちます。 + +01:47.750 --> 01:56.570 +そして、 これらのLMSについて、 少々奇妙で注目すべきことのひとつは、 私たちがこれほど異なる数のウェイトについて話しているということだ。 + +01:56.600 --> 02:05.280 +2018年に出たGPTでは1億1700万回待たされた。 + +02:05.310 --> 02:10.230 +というのも、 当時私たちはディープ・ニューラル・ネットワークを持っていて、 + +02:10.230 --> 02:16.740 +そのうちのLMSは私のスタートアップにもあったのですが、 私はよく、 + +02:16.740 --> 02:31.230 +私たちのディープ・ニューラル・ネットワークには20万ものパラメータがあると自慢して回っていました。 + +02:31.230 --> 02:35.880 +だから、 GPT1が1億1700万ものパラメーターを持つようになったとき、 私はとまどった。 + +02:36.120 --> 02:41.610 +GPT1の巨大さを実感したよ。 + +02:41.970 --> 02:51.000 +しかし、 お分かりのように、 この目盛りは対数目盛りで、 刻み目が1つ増えるごとに1ノッチ増えるわけではありません。 + +02:51.030 --> 02:56.940 +これは、 その前のティックの10倍のパラメータ数を意味する。 + +02:57.000 --> 02:59.550 +そして、 この図に重ねてみよう。 + +02:59.610 --> 03:06.720 +GPTのその後のバージョン、 GPT two with 1. 50億のパラメータ。 + +03:06.750 --> 03:10.680 +GPTは1750億のパラメーターを持つ。 + +03:10.710 --> 03:12.600 +言いようのないことだ。 + +03:12.600 --> 03:14.160 +パラメータの数。 + +03:14.190 --> 03:20.670 +GPT 41. 7 6兆のパラメータ。 + +03:20.970 --> 03:24.000 +そして最新のフロンティアモデル。 + +03:24.000 --> 03:27.210 +実際にいくつのパラメータを持っているかは発表されていない。 + +03:27.240 --> 03:32.520 +約10兆個のパラメーターがあると言われている。 + +03:32.520 --> 03:37.710 +ほとんど考えられない数のウェイトが走っているのだ。 + +03:37.920 --> 03:43.350 +さて、 この上にオープンソースのモデルをいくつか重ねてみよう。 + +03:43.500 --> 03:46.110 +ジェマは20億か。 + +03:46.140 --> 03:47.580 +軽量モデルだ。 + +03:47.580 --> 03:56.670 +ラマ3世も覚えているだろう。 llamaを使っていたときに取り組んでいた2も、 20億のパラメータを持っていた。 + +03:56.820 --> 04:04.450 +じゃあ、 ラマ3だ。 1には、 80億バージョン、 700億バージョン、 + +04:04.480 --> 04:14.470 +そしてラマ3の3種類がある。 1 405Bは、 現時点ではオープンソースの中で最大のモデルで、 + +04:14.470 --> 04:21.040 +フロンティアのクローズドソースモデルと同様の能力を持つ。 + +04:21.040 --> 04:24.970 +そして、 ミックスド・レール(専門家混合モデル)についても触れた。 + +04:25.420 --> 04:33.040 +だからこれは、 これらのモデルがどれほど巨大なものかを知ってもらうためのものだ。 + +04:33.040 --> 04:40.870 +そして、 10兆もの異なる重さ、 異なる種類のレバー、 異なる数字があることの意味を理解することさえ難しい。 + +04:40.870 --> 04:49.060 +この巨大なモデルの中にある小さなノブのようなもので、 入力された出力をコントロールするものだと考えればいい。 + +04:49.060 --> 04:57.280 +そしてまた、 あなたの頭の中で、 20から200のパラメーターがあるような昔ながらの線形回帰モデルと比較してみてください。 + +04:57.280 --> 05:00.970 +これらの大規模な言語モデルの巨大さを感じてほしい。 diff --git a/week5/community-contributions/subtitles/srts/60619447/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619447/ko_KR.srt new file mode 100755 index 0000000..2d67cc2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619447/ko_KR.srt @@ -0,0 +1,181 @@ +WEBVTT + +00:00.740 --> 00:07.340 +LLM의 아주 근본적인 것에 대해 잠시 얘기하고 싶습니다 LLM 매개 + +00:07.340 --> 00:13.190 +변수 안에 있는 매개 변수의 수입니다 웨이트라고도 하죠 + +00:13.310 --> 00:16.100 +보통 매개변수와 무게는 동의어예요 + +00:16.250 --> 00:19.850 +어떤 상황에서는 완전히 다른 세부 사항이 있어요 + +00:19.850 --> 00:23.060 +하지만 기본적으로 중량과 매개 변수는 같아요 + +00:23.090 --> 00:23.900 +모델 웨이트예요 + +00:23.900 --> 00:33.140 +이건 모델 안에 있는 레버로 입력을 받았을 때 출력을 생성하는 것을 통제하죠 + +00:33.140 --> 00:37.040 +다음에 나올 단어를 어떻게 예측하시나요? + +00:37.040 --> 00:41.840 +이 웨이트는 LLM 훈련 때 쓰는 거예요 + +00:41.840 --> 00:47.330 +수많은 예시를 보고 그 예시를 이용해 무게를 움직입니다 상태가 + +00:47.330 --> 00:52.820 +더 좋아져서 다음 토큰에서 나올 것을 예측할 수 있을 때까지요 + +00:52.820 --> 00:56.000 +토큰에 대해 곧 얘기하겠지만 점점 더 재미있어지죠 + +00:56.000 --> 00:59.420 +모든 무게를 조정하면 더 좋아지죠 + +00:59.420 --> 01:04.160 +데이터 과학을 하는 분들은 처음 보시는 분들에겐 아주 잘 아는 + +01:04.160 --> 01:04.970 +것들이죠 + +01:04.970 --> 01:09.440 +과정을 진행하는 동안 다양한 방식으로 살펴볼 겁니다 보다 나은 직관력을 + +01:09.440 --> 01:15.710 +갖게 될 거예요 getput을 제어하는 매개 변수와 저울이 어떤 의미인지를요 + +01:15.710 --> 01:22.370 +하지만 우선 얼마나 많은 중량이 있는지 알아야 합니다 기존의 + +01:22.370 --> 01:28.940 +데이터 과학이나 머신 러닝에서는 단순하게 접근했기 때문에 + +01:28.940 --> 01:35.180 +선형 회귀 모델이라는 모델을 개발해야 했습니다 가중된 평균을 + +01:35.180 --> 01:40.110 +가지고 20에서 200 정도의 매개 변수를 갖습니다 + +01:40.110 --> 01:47.630 +보통 20에서 200 정도의 중량으로 접근하죠 + +01:47.750 --> 01:54.470 +LMS가 특이한 점 중 하나는 무게의 수가 다르다는 + +01:54.470 --> 01:56.570 +거예요 + +01:56.600 --> 02:05.280 +2018년에 출시된 GPT 1은 1억 1,700만 명이 기다렸어요 + +02:05.310 --> 02:10.230 +개인적으로 좀 화가 났던 부분인데요 LMS@타입의 + +02:10.230 --> 02:16.740 +딥 신경망이 있었을 때 제 스타트업에 하나 있었는데 딥 신경망에 200,000 + +02:16.740 --> 02:24.210 +매개 변수가 있다고 자랑하곤 했어요 엄청나게 큰 숫자라고 생각했죠 200,000 + +02:24.210 --> 02:31.230 +매개 변수가 넘는 모델은 상상할 수도 없었어요 + +02:31.230 --> 02:35.880 +그래서 GPT 1이 매개 변수를 1억 1,700만 개 제시했을 때 당황했죠 + +02:36.120 --> 02:41.610 +GPT 1의 중요성을 절실히 깨닫게 됐죠 + +02:41.970 --> 02:47.580 +그런데 보다시피 여기 보이는 이 범위는 로그식 범위예요 틱을 켤 때마다 + +02:47.580 --> 02:51.000 +한 단계씩 더 올라가는 게 아니죠 + +02:51.030 --> 02:56.940 +매개 변수의 10배를 앞의 체크보다 더 많이 입력해야 해요 + +02:57.000 --> 02:59.550 +이 다이어그램에 레이어를 겹쳐보죠 + +02:59.610 --> 03:06.720 +GPT 2의 후속 버전인 1을 추가했죠 50억 개의 매개 변수가 있어요 + +03:06.750 --> 03:10.680 +GPT 3,750억 매개 변수예요 + +03:10.710 --> 03:12.600 +말로 표현이 안 돼요 + +03:12.600 --> 03:14.160 +변수가 많아요 + +03:14.190 --> 03:20.670 +GPT 41요 7조 6천억 매개 변수예요 + +03:20.970 --> 03:24.000 +최신 개척 시대 모델도 있죠 + +03:24.000 --> 03:27.210 +매개 변수가 몇 개인지 아직 발표하지 않았어요 + +03:27.240 --> 03:32.520 +약 10조 매개 변수가 있는 것으로 추정되죠 + +03:32.520 --> 03:37.710 +이 추의 숫자는 상상할 수 없을 정도예요 + +03:37.920 --> 03:43.350 +이제 이 위에 오픈 소스 모델을 레이어하죠 + +03:43.500 --> 03:46.110 +제마는 20억 달러예요 + +03:46.140 --> 03:47.580 +가벼운 모델이에요 + +03:47.580 --> 03:55.260 +라마 3도 기억하실 거예요 라마를 사용할 때 작업했던 2개의 데이터에도 20억 개의 매개 + +03:55.320 --> 03:56.670 +변수가 있었어요 + +03:56.820 --> 04:04.450 +그럼 라마 3요 1번, 그러니까 더 큰 사촌은 세 종류가 있어요 + +04:04.480 --> 04:14.470 +80억 개, 700억 개 그리고 라마 3이죠 1 405B는 현재 오픈 소스 모델 중 가장 큰 + +04:14.470 --> 04:21.040 +것으로 일부 비공개 소스 모델과 비슷한 기능을 갖고 있죠 + +04:21.040 --> 04:24.970 +혼합 레일도 언급했었죠 전문가들의 혼합 모델이요 + +04:25.420 --> 04:33.040 +이 모델들이 얼마나 거대한지 통찰력을 주는 거예요 + +04:33.040 --> 04:39.190 +10조 개의 무게와 다양한 지렛대 다양한 숫자가 있다는 게 어떤 의미인지 이해하기조차 + +04:39.220 --> 04:40.870 +어려워요 + +04:40.870 --> 04:49.060 +입력된 출력을 조절하는 이 거대한 모델 안의 작은 손잡이라고 생각하면 돼요 + +04:49.060 --> 04:54.740 +그리고 다시 한 번 구식 선형 회귀 모델과 비교해 보세요 20에서 200 + +04:54.740 --> 04:57.280 +매개 변수가 있을 거예요 + +04:57.280 --> 05:00.970 +Get in get 이 거대한 언어 모델의 방대함을 느껴보려고요 diff --git a/week5/community-contributions/subtitles/srts/60619501/en_US.srt b/week5/community-contributions/subtitles/srts/60619501/en_US.srt new file mode 100755 index 0000000..3714030 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619501/en_US.srt @@ -0,0 +1,328 @@ +WEBVTT + +00:01.220 --> 00:04.460 +I welcome to day four of our time together. + +00:04.460 --> 00:06.650 +This is a very important day. + +00:06.650 --> 00:12.110 +Today we're going to be looking at the rise of the transformer, the architecture that sits behind most + +00:12.110 --> 00:14.240 +of the LMS that we'll be working on in this course. + +00:14.240 --> 00:18.830 +We're going to be talking about stuff like Copilots and agents, and most importantly, we're going + +00:18.860 --> 00:25.610 +to be dealing with some of the basic foundational ingredients behind LMS that we'll be working on tokens, + +00:25.610 --> 00:28.040 +contacts, windows, parameters, API costs. + +00:28.070 --> 00:32.240 +Now, for some of you, this will be stuff that you're already quite familiar with, but I hope to go + +00:32.240 --> 00:37.310 +a bit deeper and show you some more insight, so this will still be a useful use of your time. + +00:37.310 --> 00:42.830 +So hang in there and if you're new to it, I hope to give you a really good foundation to this world. + +00:43.340 --> 00:49.100 +But first we have to reveal the winner of our leadership battle. + +00:49.130 --> 00:54.590 +Hopefully you remember that at the end of the last session, I left you with Alex versus Blake versus + +00:54.590 --> 01:01.960 +Charlie, GPT four zero, Claude three, Opus and Gemini and that they were battling together to vote + +01:02.080 --> 01:04.990 +for the leader of the pack. + +01:05.110 --> 01:06.400 +You remember their. + +01:06.430 --> 01:08.830 +Their pithy pitches to each other. + +01:08.830 --> 01:11.020 +And now I will reveal the outcome. + +01:11.020 --> 01:15.160 +So the first vote came in from Alex from GPT. + +01:15.310 --> 01:19.870 +GPT made the vote for Blake to be leader. + +01:19.900 --> 01:23.620 +Next up, the next vote coming in was Blake voting. + +01:23.620 --> 01:27.250 +And Blake voted for Charlie for Gemini. + +01:27.280 --> 01:29.290 +So Claude voted for Gemini. + +01:29.320 --> 01:31.030 +Blake voted for Charlie. + +01:31.030 --> 01:34.180 +And so it all comes down to Charlie's vote. + +01:34.210 --> 01:35.200 +Big drumroll. + +01:35.200 --> 01:35.920 +Make your bets. + +01:35.920 --> 01:37.810 +Decide who you think is going to be the winner. + +01:37.840 --> 01:39.490 +This is the winner. + +01:39.520 --> 01:43.330 +Charlie voted for Blake, and therefore Claude. + +01:43.360 --> 01:51.040 +Three opus Blake was the winner of our thoroughly unscientific, but quite fun challenge, and I hope + +01:51.070 --> 01:54.970 +that that is aligned with your expectations from those pictures. + +01:55.180 --> 01:57.100 +And you should try this yourself. + +01:57.100 --> 02:01.530 +This is as I say, I ran this a couple of months ago and I'm sure that the results will be different + +02:01.560 --> 02:02.220 +now. + +02:02.850 --> 02:04.920 +At least you get different different pictures for sure. + +02:04.920 --> 02:07.320 +So by all means give this a try yourself. + +02:07.320 --> 02:12.060 +You can copy my prompt or try something a bit different and see what you come up with. + +02:12.060 --> 02:15.600 +And in fact, I actually wrote a game based on this. + +02:15.600 --> 02:22.020 +If you go to my personal web page@edward.com, you'll see that I have a game there called Outsmart, + +02:22.080 --> 02:28.860 +which is where I picked the various models against each other to try and do something a little bit more + +02:28.860 --> 02:35.940 +quantitative when they have to decide how to steal coins from each other, and it gives you a really + +02:35.940 --> 02:40.290 +fun way to see the different capabilities of the different models. + +02:40.290 --> 02:42.300 +And maybe we'll look at that a bit later on. + +02:42.300 --> 02:42.990 +I'll see. + +02:43.620 --> 02:44.430 +All right. + +02:44.430 --> 02:49.440 +Well, with that in mind, let's move on to the main material of today. + +02:49.440 --> 02:54.660 +We're going to talk about the unbelievable history of the last few years. + +02:54.660 --> 02:59.690 +And I have to pinch myself from time to time to remind myself of everything that we've been through. + +02:59.690 --> 03:08.240 +In 2017, Google, some scientists from Google released a paper that was called Attention is All You + +03:08.240 --> 03:10.700 +Need, and you can take a look at it. + +03:10.850 --> 03:16.790 +And this was the paper in which the transformer architecture was invented. + +03:16.820 --> 03:21.050 +This new architecture, including these layers called self-attention layers. + +03:21.290 --> 03:27.470 +And the thing that's perhaps most remarkable about this paper when you read it, is that it's very clear + +03:27.470 --> 03:33.200 +that the inventors themselves did not realize what an extraordinary breakthrough they were making. + +03:33.230 --> 03:39.650 +They sort of remark on it as something that seems to be a surprising discovery, but they clearly don't + +03:39.650 --> 03:45.170 +realize the door that they are opening and how much progress is going to be made as a result of their + +03:45.170 --> 03:46.160 +discoveries. + +03:46.190 --> 03:51.170 +In fact, the next year was when GPT one was released also. + +03:51.290 --> 03:56.020 +But for those that were, uh, around at that time and had used Bert from Google. + +03:56.020 --> 03:59.350 +And then came GPT 2 in 2019. + +03:59.620 --> 04:02.140 +GPT 3 in 2020. + +04:02.170 --> 04:09.340 +But most of us got the shock when we saw the power in late 2022. + +04:09.370 --> 04:13.360 +Was it November or December when ChatGPT came out, came out? + +04:13.510 --> 04:21.940 +ChatGPT was essentially GPT three, but also used GPT three and a half, 3.5 and also used this technique + +04:21.970 --> 04:28.060 +RL reinforcement learning from human feedback that made it so very powerful. + +04:28.630 --> 04:35.590 +Then GPT four came out in 2023, and of course this year we've had GPT four. + +04:35.620 --> 04:42.580 +Oh, and we've now, as we've seen, had zero one preview and other things are on the way. + +04:43.960 --> 04:48.940 +It was interesting to see how the world responded to this change. + +04:48.970 --> 04:54.840 +Initially, ChatGPT was such a surprise to all of us, even practitioners in the field. + +04:55.080 --> 05:02.160 +It was really astounding how accurately and with how much nuance it was able to answer questions. + +05:02.160 --> 05:04.620 +That was followed by something of a backlash. + +05:04.620 --> 05:11.700 +There was a lot of of healthy skepticism when people said, this is really akin to to a conjuring trick. + +05:11.820 --> 05:16.560 +This is what we're seeing here is basically really good predictive text. + +05:16.560 --> 05:21.930 +If you bring up your, your, your text messages and you and you press the button to predict the next + +05:21.930 --> 05:27.600 +word, sometimes it does really, really well, almost by coincidence, just because it's matching patterns + +05:27.600 --> 05:28.380 +statistically. + +05:28.380 --> 05:30.240 +And that's all you're seeing here. + +05:30.240 --> 05:35.940 +And there was a famous paper that's known as the stochastic parrot paper, which talked about the fact + +05:35.940 --> 05:42.540 +that what we're seeing here is nothing more than statistics, and that it sort of gives, makes the + +05:42.540 --> 05:48.480 +point that we are falsely interpreting this as the model, having some kind of an understanding which + +05:48.480 --> 05:50.160 +doesn't really exist. + +05:50.160 --> 05:55.850 +And it highlights some of the challenges and even dangers associated with us coming to the to the wrong + +05:55.850 --> 05:57.380 +conclusions about that. + +05:57.500 --> 06:03.800 +But really, based on the progress since then, the pendulum has swung back a bit now. + +06:03.800 --> 06:09.770 +And I would say that where we are as practitioners at this point is explaining this in terms of emergent + +06:09.770 --> 06:10.670 +intelligence. + +06:10.670 --> 06:16.070 +That's the expression we like to use, which is saying that really what's happening here is that whilst + +06:16.070 --> 06:22.160 +it is true that what we're seeing is essentially just statistical prediction, all we're doing when + +06:22.160 --> 06:27.890 +we run an LM is we're providing it with some, some words or actually some tokens and saying, given + +06:27.890 --> 06:32.570 +all of the patterns you've seen in all of your training data and everything you've learned, what is + +06:32.570 --> 06:36.770 +the most likely next token, what is the most likely next token? + +06:36.770 --> 06:40.640 +And then we'll feed that in and say, and now what's the most likely next token after that. + +06:40.640 --> 06:44.000 +And all it is doing is predicting this next token. + +06:44.000 --> 06:45.230 +That is true. + +06:45.230 --> 06:53.500 +But nonetheless, as a byproduct of doing this at such massive scale with trillions of different weights + +06:53.500 --> 06:58.030 +that are being set internally in the model to control how it will make that prediction. + +06:58.060 --> 07:05.740 +A byproduct of this level of scale is that we see this effect that we call emergent intelligence, which + +07:05.740 --> 07:07.570 +is an apparent intelligence. + +07:07.570 --> 07:12.250 +It is as if the model is really understanding what we're telling it. + +07:12.280 --> 07:17.590 +It is, of course, true that this is really something that is imitating understanding. + +07:17.590 --> 07:20.230 +It's just seeing the patterns and replicating them. + +07:20.230 --> 07:27.340 +But there is this emergent property that it apparently is able to show this level of intelligence that + +07:27.340 --> 07:30.820 +we all experience when we use these frontier models every day. + +07:31.540 --> 07:31.870 +All right. + +07:31.870 --> 07:33.700 +Hopefully that's given you food for thought. + +07:33.730 --> 07:36.640 +Interested to hear where you stand on this debate. + +07:36.670 --> 07:40.690 +By all means post that or let me know. + +07:40.750 --> 07:45.280 +And in the next lecture we will talk more about some of the theory behind this. + +07:45.280 --> 07:49.930 +And also look at some of the discoveries that we've had along the way. diff --git a/week5/community-contributions/subtitles/srts/60619501/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619501/ja_JP.srt new file mode 100755 index 0000000..db367f2 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619501/ja_JP.srt @@ -0,0 +1,274 @@ +WEBVTT + +00:01.220 --> 00:04.460 +一緒に過ごす4日目を迎える。 + +00:04.460 --> 00:06.650 +今日はとても重要な日だ。 + +00:06.650 --> 00:14.240 +今日は、 このコースで扱うLMSのほとんどを支えるアーキテクチャであるトランスフォーマーの台頭について見ていこう。 + +00:14.240 --> 00:18.830 +コピロットやエージェントのようなもの、 そして最も重要なこととして、 + +00:18.860 --> 00:28.040 +トークン、 コンタクト、 ウィンドウ、 パラメータ、 APIコストなど、 LMSの背後にある基本的な要素について話すつもりだ。 + +00:28.070 --> 00:32.240 +皆さんの中には、 すでによくご存知の方もいらっしゃると思いますが、 + +00:32.240 --> 00:37.310 +もう少し深く掘り下げて、 より深い洞察をお見せしたいと思います。 + +00:37.310 --> 00:42.830 +だから、 頑張って。 もし、 あなたが初めてこの世界に足を踏み入れるのであれば、 私はあなたにこの世界に対する本当に良い基礎を与えたいと思っている。 + +00:43.340 --> 00:49.100 +しかし、 その前にリーダー争いの勝者を明らかにしなければならない。 + +00:49.130 --> 00:54.590 +前回のセッションの最後に、 アレックス対ブレイク対チャーリー、 GPT4人ゼロ、 + +00:54.590 --> 01:04.990 +クロード3人、 オーパスとジェミニを残し、 彼らがリーダーを決める投票を一緒に戦っていたことを覚えていてほしい。 + +01:05.110 --> 01:06.400 +彼らのことを覚えているかい? + +01:06.430 --> 01:08.830 +お互いに、 このような言葉を投げかけている。 + +01:08.830 --> 01:11.020 +そして今、 結果を明らかにする。 + +01:11.020 --> 01:15.160 +最初の投票はGPTのアレックスからだった。 + +01:15.310 --> 01:19.870 +GPTはブレイクをリーダーに選出した。 + +01:19.900 --> 01:23.620 +次はブレイクの投票だ。 + +01:23.620 --> 01:27.250 +そしてブレイクはジェミニのチャーリーに投票した。 + +01:27.280 --> 01:29.290 +だからクロードは双子座に投票した。 + +01:29.320 --> 01:31.030 +ブレイクはチャーリーに投票した。 + +01:31.030 --> 01:34.180 +そして、 すべてはチャーリーの一票にかかっている。 + +01:34.210 --> 01:35.200 +大きなドラムロール。 + +01:35.200 --> 01:35.920 +賭けよう。 + +01:35.920 --> 01:37.810 +誰が優勝すると思うかを決める。 + +01:37.840 --> 01:39.490 +これが勝者だ。 + +01:39.520 --> 01:43.330 +チャーリーはブレイク、 つまりクロードに投票した。 + +01:43.360 --> 01:54.970 +スリー・オーパス・ブレイクは、 非科学的ではあるが、 非常に楽しいチャレンジの勝者である。 + +01:55.180 --> 01:57.100 +そして、 これを自分で試してみるべきだ。 + +01:57.100 --> 02:02.220 +これは私が言うように、 2、 3カ月前に実施したもので、 今は結果が変わっていると確信している。 + +02:02.850 --> 02:04.920 +少なくとも、 違った写真が撮れることは確かだ。 + +02:04.920 --> 02:07.320 +だから、 ぜひ自分で試してみてほしい。 + +02:07.320 --> 02:12.060 +私のプロンプトを真似てもいいし、 ちょっと違うことを試してみて、 何を思いつくか見てみるのもいい。 + +02:12.060 --> 02:15.600 +そして実際、 私はこれを基にゲームを書いた。 + +02:15.600 --> 02:22.020 +私の個人ページ@edward. このゲームでは、 様々なモデルを対戦させ、 + +02:22.080 --> 02:28.860 +お互いにコインを盗む方法を決めるという、 + +02:28.860 --> 02:40.290 +もう少し定量的なことを試しています。 + +02:40.290 --> 02:42.300 +それについては、 もう少し後で見てみよう。 + +02:42.300 --> 02:42.990 +そうだね。 + +02:43.620 --> 02:44.430 +分かった。 + +02:44.430 --> 02:49.440 +さて、 それを踏まえて今日のメインネタに移ろう。 + +02:49.440 --> 02:54.660 +ここ数年の信じられないような歴史について話すつもりだ。 + +02:54.660 --> 02:59.690 +そして、 私たちが経験してきたすべてのことを思い出すために、 ときどき自分をつねらなければならない。 + +02:59.690 --> 03:10.700 +2017年、 グーグルの科学者たちが「Attention is All You Need」という論文を発表した。 + +03:10.850 --> 03:16.790 +そしてこれが、 トランス・アーキテクチャが発明された論文である。 + +03:16.820 --> 03:21.050 +セルフ・アテンション・レイヤーと呼ばれる層を含むこの新しいアーキテクチャ。 + +03:21.290 --> 03:33.200 +そして、 この論文を読んで最も注目すべき点は、 発明者自身が自分たちがとんでもないブレークスルーを成し遂げていることに気づいていなかったことが非常にはっきりしていることだろう。 + +03:33.230 --> 03:46.160 +しかし、 彼らは明らかに、 自分たちが開いているドアや、 自分たちの発見の結果としてどれほどの進歩がもたらされるかを理解していない。 + +03:46.190 --> 03:51.170 +実は、 その翌年にGPT1もリリースされた。 + +03:51.290 --> 03:56.020 +しかし、 その当時、 グーグルのバートを使っていた人たちにとっては、 そうだろう。 + +03:56.020 --> 03:59.350 +そして2019年にGPT2が開催された。 + +03:59.620 --> 04:02.140 +2020年のGPT3。 + +04:02.170 --> 04:09.340 +しかし、 私たちの多くは2022年後半にパワーを目の当たりにして衝撃を受けた。 + +04:09.370 --> 04:13.360 +ChatGPTが発表されたのは11月か12月だったかな? + +04:13.510 --> 04:21.940 +ChatGPTは基本的にGPT 3だったが、 GPT 3.5、 3.5も使っていた。 5、 また、 人間のフィードバックからRL強化学習というテクニックを使うことで、 + +04:21.970 --> 04:28.060 +非常に強力になった。 + +04:28.630 --> 04:35.590 +そして2023年にGPT4が発表され、 もちろん今年もGPT4が行われた。 + +04:35.620 --> 04:42.580 +ああ、 そして私たちは今、 見てきたように、 ゼロワンのプレビューを行ったし、 他のものも進行中だ。 + +04:43.960 --> 04:48.940 +この変化に世界がどう反応したかは興味深かった。 + +04:48.970 --> 04:54.840 +当初、 ChatGPTは私たち全員にとって、 その道の実践者にとっても驚きでした。 + +04:55.080 --> 05:02.160 +どれだけ正確に、 どれだけニュアンス豊かに質問に答えることができるのか、 本当に驚かされた。 + +05:02.160 --> 05:04.620 +その後、 反発が起きた。 + +05:04.620 --> 05:11.700 +これは本当に呪術的なトリックに似ている。 + +05:11.820 --> 05:16.560 +ここにあるのは、 基本的に本当に優れた予測テキストだ。 + +05:16.560 --> 05:21.930 +テキストメッセージを表示させ、 次の単語を予測するボタンを押すと、 + +05:21.930 --> 05:28.380 +統計的にパターンが一致するため、 ほとんど偶然に、 本当にうまくいくことがある。 + +05:28.380 --> 05:30.240 +ここにあるのはそれだけだ。 + +05:30.240 --> 05:35.940 +そして、 確率論的オウム論文として知られる有名な論文があり、 この論文では、 + +05:35.940 --> 05:42.540 +私たちがここで目にしているのは単なる統計学に過ぎないという事実が語られ、 私たちはこれをモデルとして誤って解釈し、 + +05:42.540 --> 05:50.160 +実際には存在しない何らかの理解をしているという指摘がなされている。 + +05:50.160 --> 05:57.380 +そして、 私たちがそれについて間違った結論を出すことに伴う課題や危険性さえも浮き彫りにしている。 + +05:57.500 --> 06:03.800 +しかし、 その後の経過を見ると、 今は振り子が少し戻っている。 + +06:03.800 --> 06:10.670 +そして、 現時点で私たちが実践者としているのは、 これを創発的知性という観点から説明することだ。 + +06:10.670 --> 06:16.070 +これは私たちが好んで使う表現で、 ここで起きているのは、 私たちが見ているのは本質的には単なる統計的予測であることは事実だが、 + +06:16.070 --> 06:32.570 +LMを実行するときに私たちがやっていることは、 いくつかの単語や実際のトークンをLMに与えて、 すべてのトレーニングデータと学習したすべてのものの中で見たすべてのパターンを考慮して、 次に来る可能性が最も高いトークンは何か、 次に来る可能性が最も高いトークンは何か、 + +06:32.570 --> 06:36.770 +と言っているのだ。 + +06:36.770 --> 06:40.640 +そして、 その次のトークンは何だろう? + +06:40.640 --> 06:44.000 +そしてやっていることは、 この次のトークンを予測することだ。 + +06:44.000 --> 06:45.230 +その通りだ。 + +06:45.230 --> 06:53.500 +しかし、 それにもかかわらず、 何兆通りもの異なる重みがモデル内部で設定され、 どのように予測を行うかを制御するような大規模なスケールの副産物として、 + +06:53.500 --> 06:58.030 +このようなことが行われているのだ。 + +06:58.060 --> 07:07.570 +このレベルのスケールの副産物として、 私たちが創発的知性と呼ぶ、 見かけ上の知性という効果が見られる。 + +07:07.570 --> 07:12.250 +まるでモデルが私たちの言うことを本当に理解しているかのようだ。 + +07:12.280 --> 07:17.590 +もちろん、 これが本当に理解を模倣したものであることは事実だ。 + +07:17.590 --> 07:20.230 +パターンを見て、 それを再現するだけだ。 + +07:20.230 --> 07:30.820 +しかし、 フロンティア・モデルには、 私たちが日常的に使っているフロンティア・モデルに見られるような、 知性のレベルを示すことができるという創発的な性質がある。 + +07:31.540 --> 07:31.870 +分かった。 + +07:31.870 --> 07:33.700 +これで考える材料ができただろうか。 + +07:33.730 --> 07:36.640 +この議論について、 あなたの立場を聞いてみたい。 + +07:36.670 --> 07:40.690 +ぜひそれを投稿するか、 私に知らせてほしい。 + +07:40.750 --> 07:45.280 +次回の講義では、 その背景にある理論についてもう少し詳しく話そう。 + +07:45.280 --> 07:49.930 +そして、 その過程で得たいくつかの発見も見てほしい。 diff --git a/week5/community-contributions/subtitles/srts/60619501/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619501/ko_KR.srt new file mode 100755 index 0000000..061ffeb --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619501/ko_KR.srt @@ -0,0 +1,322 @@ +WEBVTT + +00:01.220 --> 00:04.460 +우리가 함께한 지 4일째예요 + +00:04.460 --> 00:06.650 +오늘은 중요한 날이에요 + +00:06.650 --> 00:12.110 +오늘 살펴볼 것은 변압기의 발전입니다 이 과정에서 작업하게 될 LMS 대부분 + +00:12.110 --> 00:14.240 +뒤에 자리한 아키텍처죠 + +00:14.240 --> 00:18.830 +부조종사와 에이전트 같은 것에 대해 얘기할 겁니다 가장 중요한 + +00:18.860 --> 00:25.610 +건 LMS의 기본 요소를 다룰 겁니다 토큰, 연락처, windows, 매개변수 API + +00:25.610 --> 00:28.040 +비용에 대해 작업할 거예요 + +00:28.070 --> 00:32.240 +여러분 중 일부는 이미 익숙한 내용이지만 좀 더 깊이 들어가 통찰력을 + +00:32.240 --> 00:37.310 +보여드리고자 합니다 비트를 통해 시간을 유용하게 쓰실 거예요 + +00:37.310 --> 00:42.830 +그러니 조금만 더 버텨요 처음이라면 이 세계에 대한 좋은 기초가 되길 바라요 + +00:43.340 --> 00:49.100 +하지만 먼저 리더십 대결의 승자를 발표해야겠죠 + +00:49.130 --> 00:54.590 +기억하시길 바랍니다 지난 세션 마지막에 알렉스와 + +00:54.590 --> 01:01.960 +블레이크 대 찰리 GPT 40, 클로드 3, 오푸스 제미니가 대장을 + +01:02.080 --> 01:04.990 +뽑기 위해 싸우고 있었죠 + +01:05.110 --> 01:06.400 +기억하죠? + +01:06.430 --> 01:08.830 +서로 간결하게 말하죠 + +01:08.830 --> 01:11.020 +이제 결과를 발표하죠 + +01:11.020 --> 01:15.160 +GPT의 알렉스가 첫 번째 투표를 했어요 + +01:15.310 --> 01:19.870 +GPT가 블레이크를 지도자로 뽑았어요 + +01:19.900 --> 01:23.620 +다음은 블레이크의 투표가 있었어요 + +01:23.620 --> 01:27.250 +블레이크는 찰리를 제미니제로 뽑았고요 + +01:27.280 --> 01:29.290 +클로드는 쌍둥이자리에 투표했어요 + +01:29.320 --> 01:31.030 +블레이크는 찰리를 찍었어요 + +01:31.030 --> 01:34.180 +찰리의 표에 모든 게 달렸죠 + +01:34.210 --> 01:35.200 +드럼 소리요 + +01:35.200 --> 01:35.920 +베팅하세요 + +01:35.920 --> 01:37.810 +누가 우승할 것 같은지 결정하세요 + +01:37.840 --> 01:39.490 +이게 우승이에요 + +01:39.520 --> 01:43.330 +찰리는 블레이크를 뽑았고 클로드도 뽑았죠 + +01:43.360 --> 01:51.040 +비과학적이지만 재미있는 도전 과제의 우승자는 오퍼스 블레이크입니다 사진에서 + +01:51.070 --> 01:54.970 +보셨던 기대와 일치하면 좋겠네요 + +01:55.180 --> 01:57.100 +여러분도 해 보세요 + +01:57.100 --> 02:01.530 +말씀드린 대로 몇 달 전에 실행했는데 이제 결과가 달라질 + +02:01.560 --> 02:02.220 +거예요 + +02:02.850 --> 02:04.920 +적어도 다른 사진을 얻을 수 있죠 Get it + +02:04.920 --> 02:07.320 +그러니 여러분도 한번 해 보세요 + +02:07.320 --> 02:12.060 +제 프롬프트를 복사하거나 다른 걸 시도해서 비트를 만들어 보세요 + +02:12.060 --> 02:15.600 +사실 이걸 토대로 게임을 만들었어요 + +02:15.600 --> 02:22.020 +제 개인 웹페이지 @ed워드에서 확인하세요 아웃스마트라는 게임이 있는데요 + +02:22.080 --> 02:28.860 +다양한 모델을 골라 비교해 양적인 효과를 내려고 했어요 비트를 + +02:28.860 --> 02:35.940 +훔칠 방법을 결정해야 할 때요 다양한 모델의 다양한 기능을 볼 수 + +02:35.940 --> 02:40.290 +있는 재미있는 방법이죠 + +02:40.290 --> 02:42.300 +비트는 나중에 보도록 하죠 + +02:42.300 --> 02:42.990 +두고 보죠 + +02:43.620 --> 02:44.430 +좋아요 + +02:44.430 --> 02:49.440 +자, 그럼 오늘의 주제로 넘어가 보죠 + +02:49.440 --> 02:54.660 +지난 몇 년간의 믿기 힘든 역사에 대해 얘기해 보죠 + +02:54.660 --> 02:59.690 +가끔 제 볼을 꼬집어야 해요 우리가 함께 겪은 모든 일을 떠올리려고요 + +02:59.690 --> 03:08.240 +2017년에 구글의 몇몇 과학자들이 발표한 논문은 관심만 있으면 된다였고 + +03:08.240 --> 03:10.700 +여러분이 볼 수 있었죠 + +03:10.850 --> 03:16.790 +이 종이를 토대로 변압기 구조가 탄생했죠 + +03:16.820 --> 03:21.050 +이 새로운 구조는 자기 집중 계층을 포함해요 + +03:21.290 --> 03:27.470 +이 논문에서 가장 놀라운 점은 발명가들 자신은 + +03:27.470 --> 03:33.200 +이 놀라운 발견을 전혀 몰랐다는 거예요 + +03:33.230 --> 03:39.650 +놀라운 발견인 것처럼 언급했지만 그게 어떤 기회인지 전혀 모르고 + +03:39.650 --> 03:46.160 +있었어요 그 발견으로 얼마나 많은 진전이 있을지도 몰랐고요 + +03:46.190 --> 03:51.170 +사실 이듬해에 GPT 1이 출시됐죠 + +03:51.290 --> 03:56.020 +하지만 그 당시 구글에서 버트를 사용했던 사람들은 달랐죠 + +03:56.020 --> 03:59.350 +2019년 GPT 2가 시작됐죠 + +03:59.620 --> 04:02.140 +GPT 3을 달성할 수 있죠 + +04:02.170 --> 04:09.340 +하지만 2022년 말의 위력을 보고 대부분 충격을 받았어요 + +04:09.370 --> 04:13.360 +챗GPT가 나온 게 11월인가 12월이었나요? + +04:13.510 --> 04:21.940 +챗GPT는 GPT 3을 의미하지만 3.5나 3도 사용했어요 인간 피드백을 통해 배우는 RL + +04:21.970 --> 04:28.060 +강화 기술을 사용했는데 아주 강력해졌죠 + +04:28.630 --> 04:35.590 +2023년에 GPT 4가 출시됐고 올해에도 4가 출시됐죠 + +04:35.620 --> 04:42.580 +아, 그리고 지금 보셨듯이 01 프리뷰가 있고 다른 것들도 준비 중이에요 + +04:43.960 --> 04:48.940 +이런 변화에 세계가 어떻게 반응하는지 보는 건 흥미로웠어요 + +04:48.970 --> 04:54.840 +처음에는 챗GPT가 모두에게 충격이었어요 심지어 업계 종사자들도요 + +04:55.080 --> 05:02.160 +정말 놀라웠어요 얼마나 정확하고 뉘앙스가 많은지 질문에 대한 답을 줄 수 있었죠 + +05:02.160 --> 05:04.620 +그에 따른 반발이 있었어요 + +05:04.620 --> 05:11.700 +건전한 회의론이 많았어요 마술과 비슷하다는 사람들의 말에 말이죠 + +05:11.820 --> 05:16.560 +여기 보이는 건 기본적으로 아주 훌륭한 예측 텍스트예요 + +05:16.560 --> 05:21.930 +문자 메시지를 불러와서 다음 단어를 예측하는 버튼을 누르면 + +05:21.930 --> 05:28.380 +정말 잘 나올 때도 있어요 거의 우연이죠 패턴을 통계적으로 일치하거든요 + +05:28.380 --> 05:30.240 +그게 다예요 + +05:30.240 --> 05:35.940 +추계학적 앵무새 논문이라는 유명한 논문이 있어요 그 논문은 + +05:35.940 --> 05:42.540 +우리가 여기서 보는 게 통계에 불과하다는 사실을 강조하죠 우리가 + +05:42.540 --> 05:48.480 +이걸 모델로 잘못 해석하고 있지도 않은 이해를 하고 있다는 걸 + +05:48.480 --> 05:50.160 +시사해요 + +05:50.160 --> 05:55.850 +이 책은 우리가 잘못된 결론을 내리는 데 따른 어려움과 위험을 + +05:55.850 --> 05:57.380 +보여 주죠 + +05:57.500 --> 06:03.800 +하지만 그 이후의 진전을 보면 비트는 약간 흔들렸어요 + +06:03.800 --> 06:09.770 +이 시점에서 우리 의사들은 이를 초현실적 지능으로 설명하고 + +06:09.770 --> 06:10.670 +있어요 + +06:10.670 --> 06:16.070 +우리가 즐겨 쓰는 표현입니다 여기서 일어나는 일은 통계적 + +06:16.070 --> 06:22.160 +예측일 뿐입니다 LM을 실행할 때 우리가 하는 일은 단어 혹은 토큰을 + +06:22.160 --> 06:27.890 +제공하는 것입니다 트레이닝 데이터와 모든 것을 통해 패턴을 + +06:27.890 --> 06:32.570 +보았고 다음 토큰이 무엇인지 가장 유력한 것이 무엇인지를 + +06:32.570 --> 06:36.770 +알려주는 것이죠 + +06:36.770 --> 06:40.640 +그런 다음 그걸 피드하고 그 다음에 가장 가능성이 큰 토큰이 뭔지 말하죠 + +06:40.640 --> 06:44.000 +다음 토큰을 예측하는 게 전부죠 + +06:44.000 --> 06:45.230 +맞아요 + +06:45.230 --> 06:53.500 +하지만 그렇다고 해도 거대한 규모로 작업하면서 모델 내부에서 수조 개의 다른 중량을 설정해 + +06:53.500 --> 06:58.030 +예측을 어떻게 할지 통제하는 부산물이죠 + +06:58.060 --> 07:05.740 +그 정도 수준의 부산물로 발생하는 효과가 있는데 우리는 이를 비상 지능이라고 부릅니다 + +07:05.740 --> 07:07.570 +명백한 지능이죠 + +07:07.570 --> 07:12.250 +모델이 우리가 말하는 걸 정말 이해하는 것 같아요 + +07:12.280 --> 07:17.590 +이해를 모방하는 것이라는 건 물론 사실이에요 + +07:17.590 --> 07:20.230 +패턴을 보고 복제하는 거예요 + +07:20.230 --> 07:27.340 +하지만 이 신흥 속성이 우리가 매일 개척자 모델을 사용할 때 경험하는 수준의 + +07:27.340 --> 07:30.820 +지능을 보여줄 수 있다는 걸 보여주죠 + +07:31.540 --> 07:31.870 +좋아요 + +07:31.870 --> 07:33.700 +잘 생각해 보세요 + +07:33.730 --> 07:36.640 +이번 토론에 대한 당신 입장이 궁금하군요 + +07:36.670 --> 07:40.690 +post를 하시거나 저에게 알려주세요. + +07:40.750 --> 07:45.280 +다음 강의에서는 이 이론에 대해 더 자세히 이야기해 보죠 + +07:45.280 --> 07:49.930 +그 과정에서 발견한 것들도 살펴볼 거예요 diff --git a/week5/community-contributions/subtitles/srts/60619577/en_US.srt b/week5/community-contributions/subtitles/srts/60619577/en_US.srt new file mode 100755 index 0000000..c7be37f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619577/en_US.srt @@ -0,0 +1,124 @@ +WEBVTT + +00:00.110 --> 00:04.730 +And for the final piece of background information, I wanted to take another moment to talk about API + +00:04.730 --> 00:05.300 +costs. + +00:05.300 --> 00:07.910 +It's something that I mentioned before. + +00:07.940 --> 00:11.870 +I've also got a section on this, of course, in the readme too. + +00:12.110 --> 00:15.260 +So as I said before, there is this. + +00:15.290 --> 00:23.060 +It is confusing that there is typically this pro plan associated with the chat web interfaces like ChatGPT, + +00:23.150 --> 00:29.900 +which is typically $20 a month, uh, in the US, and similar amounts in other territories, uh, and + +00:29.900 --> 00:35.510 +with a monthly subscription, uh, it's somewhat rate limited, but but basically there's no charge + +00:35.510 --> 00:36.140 +per call. + +00:36.140 --> 00:41.390 +And it's, it feels like you have an almost unlimited ability to be calling the model through the chat + +00:41.390 --> 00:42.350 +interface. + +00:42.350 --> 00:44.540 +The APIs are different. + +00:44.540 --> 00:51.110 +The APIs do not have a monthly subscription, but you pay per call, and the costs that you pay depends + +00:51.110 --> 00:56.990 +on the model that you're using, and also on the number of input tokens and the number of output tokens. + +00:56.990 --> 00:58.340 +And it charges you. + +00:58.340 --> 01:01.040 +The total cost is a combination of those two. + +01:01.070 --> 01:06.140 +It's a smaller charge based on the number of input tokens, and a slightly higher charge based on the + +01:06.140 --> 01:07.550 +number of output tokens. + +01:07.550 --> 01:13.250 +But I should stress that the total costs are overall very low when it comes to the individual calls. + +01:13.250 --> 01:17.600 +If you're looking to build a system that will be making large numbers of calls, then the numbers add + +01:17.600 --> 01:20.930 +up and you have to be very sensitive and aware of it as you scale. + +01:20.960 --> 01:26.030 +But in the kinds of projects we'll be working on, the costs will be quite small, and I would like + +01:26.030 --> 01:27.980 +to show you exactly what that is in just a second. + +01:28.340 --> 01:34.670 +I do want to say that generally, as far as you're concerned, for this course, probably when it comes + +01:34.670 --> 01:41.960 +to API costs, what will matter the most is that these days these platforms seem to require a minimum. + +01:41.960 --> 01:47.810 +In the case of OpenAI and Claude, at the moment in the US, they require you to put on at least a $5 + +01:47.810 --> 01:50.480 +worth of credit that you then draw down on. + +01:50.480 --> 01:55.820 +And in this course, we won't scratch the surface of that $5, but you do have to put in that initial + +01:55.820 --> 02:01.370 +amount, but there will be plenty of opportunities for you to be using that in your own projects in + +02:01.370 --> 02:02.030 +different ways. + +02:02.030 --> 02:08.270 +And I think it's a great investment in terms of your education and ability to use these models to build + +02:08.270 --> 02:13.130 +new projects for yourself, as well as build your education and your experience. + +02:13.160 --> 02:18.080 +Having said that, if that is something that you're not comfortable with, as I say, now that we see + +02:18.080 --> 02:24.140 +how to use a llama and you've built that as part of the last exercise, you would be able to use a llama + +02:24.140 --> 02:30.290 +instead of using either OpenAI or Claude or Gemini at any point. + +02:30.380 --> 02:35.090 +You've got some practice doing that now with the last exercise, and you can put that into action should + +02:35.090 --> 02:35.870 +you wish. + +02:35.900 --> 02:38.810 +If you're not comfortable with with the API costs. + +02:38.810 --> 02:45.080 +But with that background, let me now take you to a site where I can show you a bit more insight into + +02:45.080 --> 02:47.360 +the API costs and also the context. + +02:47.360 --> 02:48.020 +Windows. diff --git a/week5/community-contributions/subtitles/srts/60619577/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619577/ja_JP.srt new file mode 100755 index 0000000..a941474 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619577/ja_JP.srt @@ -0,0 +1,100 @@ +WEBVTT + +00:00.110 --> 00:05.300 +そして、 最後の背景情報として、 APIのコストについてお話ししたいと思います。 + +00:05.300 --> 00:07.910 +前にも言ったことだけどね。 + +00:07.940 --> 00:11.870 +もちろん、 Readmeにもこれに関するセクションがある。 + +00:12.110 --> 00:15.260 +だから、 前にも言ったように、 これがある。 + +00:15.290 --> 00:23.060 +通常、 ChatGPTのようなチャット・ウェブ・インターフェイスには、 プロ・プランがあり、 + +00:23.150 --> 00:29.900 +米国では月額20ドル、 他の地域でも同額で、 月額のサブスクリプションでは、 + +00:29.900 --> 00:36.140 +多少料金が制限されますが、 基本的に通話料はかかりません。 + +00:36.140 --> 00:42.350 +それに、 チャットのインターフェイスを通してモデルに電話をかけることが、 ほとんど無制限にできるような気がするんだ。 + +00:42.350 --> 00:44.540 +APIが違う。 + +00:44.540 --> 00:51.110 +APIは月額制ではなく、 コールごとに支払う。 支払うコストは、 使用するモデルや、 + +00:51.110 --> 00:56.990 +入力トークンの数、 出力トークンの数に依存する。 + +00:56.990 --> 00:58.340 +料金も請求される。 + +00:58.340 --> 01:01.040 +総費用はこの2つの組み合わせとなる。 + +01:01.070 --> 01:07.550 +インプット・トークンの数に応じた少ないチャージと、 アウトプット・トークンの数に応じた若干高いチャージだ。 + +01:07.550 --> 01:13.250 +しかし、 個々の通話に関しては、 トータルコストは全体的に非常に低いことを強調しておきたい。 + +01:13.250 --> 01:17.600 +大量の電話をかけるシステムを構築しようとしているのであれば、 その数はどんどん増えていくので、 + +01:17.600 --> 01:20.930 +規模を拡大するにつれて非常に敏感になり、 それを意識しなければならない。 + +01:20.960 --> 01:27.980 +しかし、 私たちが取り組むプロジェクトでは、 そのコストはかなり小さくなる。 + +01:28.340 --> 01:34.670 +一般的に、 このコースに関して言えば、 おそらくAPIコストに関して言えば、 + +01:34.670 --> 01:41.960 +最も重要なのは、 最近のプラットフォームは最低限を要求しているようだということです。 + +01:41.960 --> 01:47.810 +OpenAIとClaudeの場合、 現時点ではアメリカでは、 少なくとも5ドル相当のクレジットを付ける必要があり、 + +01:47.810 --> 01:50.480 +そのクレジットを引き出して使用する。 + +01:50.480 --> 01:55.820 +このコースでは、 その5ドルの表面には触れませんが、 最初の金額を投入する必要があります。 + +01:55.820 --> 02:02.030 +しかし、 自分のプロジェクトでその5ドルをさまざまな方法で使用する機会がたくさんあります。 + +02:02.030 --> 02:08.270 +そして、 自分の教育や経験を積み重ねるだけでなく、 これらのモデルを使って自分のために新しいプロジェクトを立ち上げる能力という点でも、 + +02:08.270 --> 02:13.130 +素晴らしい投資だと思う。 + +02:13.160 --> 02:18.080 +とはいえ、 もしラマを使うことに抵抗があるのであれば、 + +02:18.080 --> 02:24.140 +前回の練習でラマを作ったのですから、 オープンAIやクロード、 + +02:24.140 --> 02:30.290 +ジェミニを使う代わりにラマを使うこともできます。 + +02:30.380 --> 02:35.870 +前回の練習でそれを実践する練習を積んだだろう。 + +02:35.900 --> 02:38.810 +APIコストに抵抗があるなら。 + +02:38.810 --> 02:47.360 +このような背景を踏まえて、 APIのコストとその背景についてもう少し詳しく説明できるサイトを紹介しよう。 + +02:47.360 --> 02:48.020 +ウィンドウズ diff --git a/week5/community-contributions/subtitles/srts/60619577/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619577/ko_KR.srt new file mode 100755 index 0000000..5d2d8de --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619577/ko_KR.srt @@ -0,0 +1,112 @@ +WEBVTT + +00:00.110 --> 00:05.300 +마지막 배경 정보로 API 비용에 대해 다시 한 번 얘기하고 싶네요 + +00:05.300 --> 00:07.910 +전에 말씀드린 건데요 + +00:07.940 --> 00:11.870 +리드미에 이것에 관한 부분도 있어요 + +00:12.110 --> 00:15.260 +아까도 말씀드렸지만 이게 있어요 + +00:15.290 --> 00:23.060 +챗 웹 인터페이스와 관련된 프로 계획이라는 게 혼란스럽습니다 챗GPT 같은 + +00:23.150 --> 00:29.900 +거요 미국에서는 월 20달러죠 다른 지역도 금액이 비슷하고요 월간 + +00:29.900 --> 00:36.140 +구독을 하면 요금은 제한적이지만 한 통당 요금은 없어요 + +00:36.140 --> 00:41.390 +채팅 인터페이스를 통해 모델을 호출할 수 있는 거의 무한한 기능이 있는 + +00:41.390 --> 00:42.350 +것 같아요 + +00:42.350 --> 00:44.540 +API는 달라요 + +00:44.540 --> 00:51.110 +API는 월간 구독을 하지 않지만 호출당 지불을 하고 지불하는 비용은 사용하는 모델에 + +00:51.110 --> 00:56.990 +따라 다르고 입력 토큰의 개수와 출력 토큰의 개수에 따라 달라지죠 + +00:56.990 --> 00:58.340 +요금을 청구하죠 + +00:58.340 --> 01:01.040 +총비용은 그 둘을 합친 거예요 + +01:01.070 --> 01:06.140 +입력 토큰의 개수에 따라 전하가 더 적고 출력 토큰의 개수에 따라 + +01:06.140 --> 01:07.550 +전하가 더 높죠 + +01:07.550 --> 01:13.250 +하지만 개인 통화의 총비용은 전반적으로 아주 낮다는 점을 강조해야겠네요 + +01:13.250 --> 01:17.600 +많은 양의 호출을 하는 시스템을 구축하고자 한다면 그 숫자가 쌓일 것이고 아주 + +01:17.600 --> 01:20.930 +세심하게 스케일을 조정할수록 그것을 인식해야 하죠 + +01:20.960 --> 01:26.030 +하지만 우리가 작업할 프로젝트는 비용이 아주 적게 들죠 그게 어떤 건지 + +01:26.030 --> 01:27.980 +잠시 후에 보여드릴게요 + +01:28.340 --> 01:34.670 +일반적으로 말씀드리고 싶은 건 여러분이 염려하는 한 이 과정에선 API + +01:34.670 --> 01:41.960 +비용에 관해 가장 중요한 건 요즘 이런 플랫폼들은 최소 비용이 필요한 것 같아요 + +01:41.960 --> 01:47.810 +오픈아이와 클로드의 경우 현재 미국에서는 최소 5달러의 신용카드를 + +01:47.810 --> 01:50.480 +빌린 후 돈을 써야 해요 + +01:50.480 --> 01:55.820 +이 코스에선 그 5달러의 표면을 긁진 않겠지만 초기 금액을 입력해야 합니다 + +01:55.820 --> 02:02.030 +하지만 여러분의 프로젝트에서 다양한 방식으로 사용할 기회가 많을 거예요 + +02:02.030 --> 02:08.270 +저는 교육과 능력 면에서 큰 투자를 하는 거라고 봅니다 이런 모델을 활용해 새로운 + +02:08.270 --> 02:13.130 +프로젝트를 구축하고 교육과 경험을 쌓을 수 있으니까요 + +02:13.160 --> 02:18.080 +하지만 만약 라마를 사용하는 것이 불편하다면 라마를 + +02:18.080 --> 02:24.140 +사용하는 법을 봤고 지난 시간에 만들었으니 오픈라이, 클로드, + +02:24.140 --> 02:30.290 +제미나이 대신 라마를 사용할 수 있을 거예요 + +02:30.380 --> 02:35.870 +지난 번 연습으로 연습했으니 원하면 실행에 옮길 수 있어요. Tool karaoke Tool karaoke. + +02:35.900 --> 02:38.810 +API 비용이 불편하다면 말이죠 + +02:38.810 --> 02:45.080 +이제 다른 사이트로 가서 API 비트와 컨텍스트에 대한 통찰력을 + +02:45.080 --> 02:47.360 +보여드릴게요 + +02:47.360 --> 02:48.020 +창문요 diff --git a/week5/community-contributions/subtitles/srts/60619619/en_US.srt b/week5/community-contributions/subtitles/srts/60619619/en_US.srt new file mode 100755 index 0000000..de39aea --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619619/en_US.srt @@ -0,0 +1,121 @@ +WEBVTT + +00:00.200 --> 00:03.650 +Well, day four was an information dense day. + +00:03.650 --> 00:09.500 +I do hope that you learned some something useful here, and I hope that even those that were already + +00:09.530 --> 00:15.440 +somewhat familiar with things like tokens and context windows picked up a thing or two and are now able + +00:15.440 --> 00:17.420 +to more confidently put that into practice. + +00:17.420 --> 00:23.150 +Certainly this is foundational stuff that we'll be using again and again over the next week or next + +00:23.150 --> 00:26.870 +weeks as we build on this and apply it to commercial problems. + +00:26.870 --> 00:32.900 +So what you should now be confident doing is writing code that calls OpenAI and also a llama and using + +00:32.900 --> 00:39.260 +it to to summarize the summary use case that we worked on, you can now contrast the leading six frontier + +00:39.260 --> 00:39.710 +models. + +00:39.710 --> 00:44.510 +In fact, a bit more than that, because we've been exposed to zero one preview as well as GPT four + +00:44.540 --> 00:47.750 +zero, and to Claude artifacts and things like that. + +00:48.020 --> 00:53.270 +Um, and in particular, we know that almost all of them are not able to answer the question, how many + +00:53.300 --> 00:56.690 +A's are there in the the that sentence? + +00:56.690 --> 00:59.570 +And it's worth pointing out, of course, the reason they struggled with it. + +00:59.570 --> 01:05.240 +Now, it should be very clear to you it's because that text is tokenized by the time it's sent in to + +01:05.270 --> 01:07.880 +the model, and all the model knows about Is tokens. + +01:07.880 --> 01:13.730 +And so from that perspective, counting letters doesn't mean anything to it, because all it sees is + +01:13.730 --> 01:18.440 +the tokens that are already combined and they don't have the meaning of the letters. + +01:18.440 --> 01:23.640 +And that's why it's actually a very difficult question for an LLM, but something like zero one preview + +01:23.670 --> 01:29.880 +that's able to think step by step and reason and understands how things need to be spelt, is able to + +01:29.880 --> 01:30.660 +do it. + +01:30.720 --> 01:33.390 +Um, and then perplexity was also able to do it too, wasn't it. + +01:33.390 --> 01:38.430 +And I suspect that's because it was able to look that up in its resources of knowledge. + +01:39.000 --> 01:45.750 +Uh, so also now you've built on top of this to understand about the history of Transformers and how + +01:45.750 --> 01:46.890 +we've got to where we are. + +01:46.920 --> 01:52.470 +Tokens and what it means to tokenize context windows and how they're not just the input. + +01:52.470 --> 01:54.540 +It's the whole conversation so far. + +01:54.540 --> 02:01.350 +And now you know about API costs and where to go to look up the costs of APIs and the context windows + +02:01.380 --> 02:04.260 +associated with the big models. + +02:05.160 --> 02:06.240 +Okay. + +02:06.270 --> 02:08.790 +Next lecture is going to be exciting. + +02:08.790 --> 02:10.560 +You're going to be coding this time. + +02:10.560 --> 02:14.970 +You're going to be building some confidence in your coding against the OpenAI API. + +02:15.000 --> 02:20.430 +We're going to use a bunch of different techniques, and you're going to be implementing a business + +02:20.430 --> 02:23.400 +solution that is more of a wholesale business solution. + +02:23.400 --> 02:28.290 +That's going to involve a couple of different calls to LMS, and we're going to get it done in a matter + +02:28.290 --> 02:28.860 +of minutes. + +02:28.860 --> 02:32.610 +And it's a great lab, and it will end with exercise for you. + +02:32.610 --> 02:39.090 +So without further ado, uh, let's wrap up for today and I will see you tomorrow for our Big Week one + +02:39.090 --> 02:39.960 +project. diff --git a/week5/community-contributions/subtitles/srts/60619619/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619619/ja_JP.srt new file mode 100755 index 0000000..e29290b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619619/ja_JP.srt @@ -0,0 +1,91 @@ +WEBVTT + +00:00.200 --> 00:03.650 +さて、 4日目は情報密度の濃い一日だった。 + +00:03.650 --> 00:09.500 +また、 トークンやコンテクスト・ウィンドウのようなものにすでに多少慣れている人たちも、 + +00:09.530 --> 00:17.420 +何かひとつかふたつは得て、 より自信を持って実践できるようになっていることを願う。 + +00:17.420 --> 00:23.150 +確かに、 これは基礎的なものであり、 来週、 再来週と、 これをベースに商業的な問題に応用していく中で、 + +00:23.150 --> 00:26.870 +何度も何度も使っていくことになるだろう。 + +00:26.870 --> 00:32.900 +つまり、 OpenAIとラマを呼び出すコードを書いて、 それを使って私たちが取り組んだユースケースを要約することで、 + +00:32.900 --> 00:39.710 +主要な6つのフロンティアモデルを対比することができます。 + +00:39.710 --> 00:44.510 +実際、 ゼロワン・プレビューやGPTフォーゼロ、 クロードのアーティファクトなどにも触れているので、 + +00:44.540 --> 00:47.750 +それよりも少し多い。 + +00:48.020 --> 00:56.690 +特に、 その文の中にAはいくつあるかという質問に、 ほとんど全員が答えられないことがわかっている。 + +00:56.690 --> 00:59.570 +もちろん、 彼らが苦労した理由も指摘する価値がある。 + +00:59.570 --> 01:07.880 +このテキストはモデルに送られる時点でトークン化されており、 モデルが知っているのはトークンだけだからだ。 + +01:07.880 --> 01:13.730 +なぜなら、 すでに組み合わされているトークンしか見ていないし、 + +01:13.730 --> 01:18.440 +トークンには文字の意味はないからだ。 + +01:18.440 --> 01:23.640 +だから、 LLMにとっては非常に難しい問題なのですが、 ゼロワン・プレビューのように段階を追って考えることができ、 + +01:23.670 --> 01:30.660 +理性的で、 物事のスペルがどうあるべきかを理解している人ならできるのです。 + +01:30.720 --> 01:33.390 +うーん、 それから当惑もできたよね。 + +01:33.390 --> 01:38.430 +そして、 それは知識のリソースでそれを調べることができたからだろう。 + +01:39.000 --> 01:46.890 +トランスフォーマーの歴史や、 現在に至るまでの経緯について理解するために、 このようなことを積み重ねてきたわけですね。 + +01:46.920 --> 01:52.470 +トークンと、 コンテキスト・ウィンドウをトークン化することの意味、 そしてそれが単なる入力ではないということ。 + +01:52.470 --> 01:54.540 +これまでの会話のすべてだ。 + +01:54.540 --> 02:04.260 +そして、 APIのコストと、 APIのコストと大きなモデルに関連するコンテキスト・ウィンドウを調べるにはどこに行けばいいのかがわかっただろう。 + +02:05.160 --> 02:06.240 +オーケー。 + +02:06.270 --> 02:08.790 +次の講義はエキサイティングなものになりそうだ。 + +02:08.790 --> 02:10.560 +今回はコーディングすることになる。 + +02:10.560 --> 02:14.970 +OpenAI APIに対するコーディングに自信をつけることになるだろう。 + +02:15.000 --> 02:23.400 +私たちはさまざまなテクニックを駆使し、 よりホールセール的なビジネス・ソリューションを導入することになる。 + +02:23.400 --> 02:28.860 +LMSに何度か電話して、 数分で終わらせるつもりだ。 + +02:28.860 --> 02:32.610 +素晴らしい研究室だし、 最後には運動もできる。 + +02:32.610 --> 02:39.960 +それではまた明日、 ビッグ・ウィーク第1弾の企画でお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/60619619/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619619/ko_KR.srt new file mode 100755 index 0000000..0ec7e74 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619619/ko_KR.srt @@ -0,0 +1,112 @@ +WEBVTT + +00:00.200 --> 00:03.650 +넷째 날은 정보가 많은 날이었어요 + +00:03.650 --> 00:09.500 +여러분이 여기서 유용한 걸 배우셨길 바랍니다 이미 토큰이나 컨텍스트 같은 것에 + +00:09.530 --> 00:15.440 +익숙한 분들도 Windows가 픽업한 게 한두 개 있다면 그걸 좀 더 자신 있게 + +00:15.440 --> 00:17.420 +실행할 수 있길 바라요 + +00:17.420 --> 00:23.150 +이건 기본 기술이고 앞으로 몇 주 동안 계속 사용할 겁니다 이걸 + +00:23.150 --> 00:26.870 +토대로 상업적 문제에 적용할 거예요 + +00:26.870 --> 00:32.900 +이제 오픈AI와 llama를 호출하는 코드를 작성해서 지금까지 다룬 + +00:32.900 --> 00:39.710 +요약 사용 사례를 요약해 보겠습니다 이제 여섯 가지 프런티어 모델을 비교해 보죠 + +00:39.710 --> 00:44.510 +사실 그 이상이죠 왜냐하면 01 미리보기와 GPT 40에 노출됐거든요 + +00:44.540 --> 00:47.750 +클로드 아티팩트 같은 것도요 + +00:48.020 --> 00:53.270 +특히 이 질문에 답할 수 있는 학생이 거의 없다는 걸 알아요 그 + +00:53.300 --> 00:56.690 +문장에 A가 몇 개나 들어가는지요 + +00:56.690 --> 00:59.570 +왜 그들이 힘들어했는지도 짚고 넘어갈 필요가 있어요 + +00:59.570 --> 01:05.240 +보시면 아시겠지만 모델로 전송될 때마다 텍스트가 토큰화되기 때문입니다 + +01:05.270 --> 01:07.880 +모델은 토큰에 대해서만 알고 있죠 + +01:07.880 --> 01:13.730 +그래서 그런 관점에서 보면 글자를 세는 건 아무 의미가 없어요 이미 합쳐진 + +01:13.730 --> 01:18.440 +토큰들만 보이니까요 글자의 의미가 없는 토큰들이죠 + +01:18.440 --> 01:23.640 +그래서 LLM에 대해 아주 어려운 질문인 겁니다 하지만 01 프리뷰처럼 + +01:23.670 --> 01:30.660 +단계별로 이성과 생각을 할 수 있고 스펠트가 어떻게 필요한지 이해하는 건 가능하죠 + +01:30.720 --> 01:33.390 +당혹스러움도 그 역할을 할 수 있었죠 + +01:33.390 --> 01:38.430 +그건 아마 지식을 통해 찾아볼 수 있었기 때문일 거예요 + +01:39.000 --> 01:45.750 +그 위에 또 다른 건물을 지었잖아요 트랜스포머의 역사와 현재의 모습을 이해하기 + +01:45.750 --> 01:46.890 +위해서요 + +01:46.920 --> 01:52.470 +토큰과 컨텍스트 윈도우를 토큰화하는 것의 의미 그리고 어떻게 입력만 하는 것이 아닌지를요 + +01:52.470 --> 01:54.540 +지금까지의 대화 전부요 + +01:54.540 --> 02:01.350 +이제 API 비용도 알고 API 비용을 어디서 찾을지도 알죠 큰 모델과 + +02:01.380 --> 02:04.260 +관련된 컨텍스트 창도요 + +02:05.160 --> 02:06.240 +네 + +02:06.270 --> 02:08.790 +다음 강의는 정말 재미있을 거예요 + +02:08.790 --> 02:10.560 +이번엔 당신이 코딩을 해요 + +02:10.560 --> 02:14.970 +OpenAI API 코딩에 대한 자신감이 생기게 되죠 + +02:15.000 --> 02:20.430 +다양한 테크닉을 사용할 겁니다 여러분은 비즈니스 솔루션을 구현할 겁니다 + +02:20.430 --> 02:23.400 +도매에 가까운 비즈니스 솔루션이죠 + +02:23.400 --> 02:28.290 +LMS에 대한 몇 가지 다른 호출이 포함될 겁니다 몇 분 안에 get get을 완료하도록 + +02:28.290 --> 02:28.860 +하죠 + +02:28.860 --> 02:32.610 +훌륭한 실험실이고 운동으로 끝날 거예요 + +02:32.610 --> 02:39.960 +그럼 지체 없이 오늘은 이만하고 내일 첫 주 프로젝트로 만나요 diff --git a/week5/community-contributions/subtitles/srts/60619651/en_US.srt b/week5/community-contributions/subtitles/srts/60619651/en_US.srt new file mode 100755 index 0000000..16fb02e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619651/en_US.srt @@ -0,0 +1,211 @@ +WEBVTT + +00:00.140 --> 00:02.960 +I mentioned before an AI company called vellum. + +00:03.530 --> 00:08.780 +When we were talking about the different questions, they had come up with that question about scales + +00:08.780 --> 00:16.850 +on reptiles that we asked GPT four zero and zero one preview vellum publishes a leaderboard, which + +00:16.850 --> 00:23.660 +we will come back to in future weeks, but it has a lot of super interesting information that ranks + +00:23.660 --> 00:25.520 +and compares different llms. + +00:25.520 --> 00:29.030 +And there are a bunch of different leaderboards that we'll look at when we talk about picking the right + +00:29.060 --> 00:30.650 +LLM for the task at hand. + +00:30.680 --> 00:35.720 +It's a good one to bookmark, but in particular, there's something useful on the vellum website, which + +00:35.720 --> 00:39.500 +I don't think I find somewhere else that you can also download should you wish. + +00:39.530 --> 00:45.890 +According to that pop up, and it's right here, about halfway up on the Valimai LLM leaderboard website, + +00:45.890 --> 00:54.050 +and it's a table that compares the context windows with the costs of the different frontier models. + +00:54.080 --> 00:58.550 +And it's really useful to have to hand, particularly when you're talking about costs. + +00:58.580 --> 01:02.390 +First of all, let's just mention the context windows that we talked about. + +01:02.390 --> 01:11.430 +So the model with the largest context window is Gemini 1.5 flash, and it has an outrageous context + +01:11.430 --> 01:11.910 +window. + +01:11.940 --> 01:12.900 +Outrageous. + +01:12.900 --> 01:15.210 +1 million tokens. + +01:15.210 --> 01:22.410 +And you may remember a million tokens means for normal English language about 750,000 words, which + +01:22.410 --> 01:25.350 +isn't far off the complete works of Shakespeare. + +01:25.380 --> 01:29.040 +If you remember, I think we thought that was 1.2 million tokens, perhaps. + +01:29.280 --> 01:37.110 +So you could almost fit the complete works of Shakespeare in one prompt into Gemini to have it to continue + +01:37.110 --> 01:39.210 +generating Gemini 1.5 flash. + +01:39.210 --> 01:43.560 +So that is extraordinarily wide context window. + +01:43.590 --> 01:57.360 +The Claude series of models have a 200,000 token context window, and the GPT set have mostly 128,000 + +01:57.390 --> 01:59.250 +token context window. + +01:59.280 --> 02:06.450 +And again, remember that that means that the all of the series of prompts in one conversation, the + +02:06.450 --> 02:11.340 +input in a conversation, the response the next input, the response, and also the system prompt at + +02:11.340 --> 02:11.910 +the beginning. + +02:11.910 --> 02:14.730 +They all have to fit within that context window. + +02:14.730 --> 02:21.150 +And actually also all of the tokens that get generated in the output, because each each token is generated + +02:21.150 --> 02:27.630 +one at a time and then passed in and it generates the next token given all of the input and output that + +02:27.630 --> 02:28.770 +came before it. + +02:29.250 --> 02:32.280 +So that's what has to fit into the context window. + +02:32.280 --> 02:36.930 +But there's a lot of space for that in 128,000 tokens. + +02:36.930 --> 02:39.570 +I guess it's what about about 100,000 words. + +02:39.780 --> 02:44.670 +So that's that's how to look at the context window size. + +02:44.670 --> 02:48.480 +And now over here we get a sense of the cost. + +02:48.480 --> 02:53.880 +And at first blush you might think this doesn't exactly look like it's super cheap if you take something + +02:53.880 --> 03:01.140 +like Claude 3.5 sonnet, which is the one that I frequently find myself using in the most powerful LLM + +03:01.170 --> 03:01.770 +on the planet. + +03:01.770 --> 03:10.410 +By most measures, you'll see that its cost is $3 per input token, $15 per output token. + +03:10.440 --> 03:15.760 +It sounds quite expensive, but no, it's because it's not $3 per input token. + +03:15.760 --> 03:21.580 +It's $3 per million input tokens, $3 per million input tokens. + +03:21.580 --> 03:27.400 +So if you passed in in your input prompt, the complete works of Shakespeare, then you'd be paying + +03:27.400 --> 03:29.200 +a little bit more than $3. + +03:29.200 --> 03:34.570 +So I just want to get across that whilst these numbers are meaningful and they are real numbers that + +03:34.570 --> 03:39.910 +one needs to watch for, and you can bring up the dashboard with each of these and monitor carefully + +03:39.910 --> 03:41.290 +your API costs. + +03:41.320 --> 03:46.690 +Typically, the kinds of things we're going to be doing on this course are asking short business questions, + +03:46.690 --> 03:51.160 +summarizing things, trying to draw quick conclusions. + +03:51.220 --> 03:58.030 +You'll find that you will be costing less than a cent for most of them, less than than a fraction of + +03:58.030 --> 03:58.720 +a dollar. + +03:58.750 --> 04:08.140 +The model that we will use most often is GPT four mini, and that will cost us $0.15 per million input + +04:08.170 --> 04:12.910 +tokens and $0.60 per million output tokens. + +04:12.910 --> 04:19.060 +And so, as you can imagine, you have to generate a fair amount of content in order to be able to scratch + +04:19.060 --> 04:25.930 +the surface in terms of a real price, and bearing in mind that these are large models and a lot of + +04:25.930 --> 04:31.780 +compute is required to run these and be able to deliver the quality of results we get. + +04:31.840 --> 04:37.060 +These API costs do seem to me to be to be very reasonable in the circumstances. + +04:37.060 --> 04:42.220 +So I hope this gives you some some, some clarity, some perspective on what these costs are like and + +04:42.220 --> 04:42.850 +what they mean. + +04:42.850 --> 04:50.320 +The total cost is then the sum of your cost for your input tokens by by this number, plus the cost + +04:50.320 --> 04:52.570 +of any output tokens that got generated. + +04:52.570 --> 04:58.960 +And in the APIs you can specify a a maximum number of output tokens to generate to stop it from going + +04:59.020 --> 04:59.890 +too far. + +05:00.070 --> 05:03.340 +Uh, so that gives you a good sense of the costs. + +05:03.340 --> 05:08.170 +And you should bookmark this page and have it as something that you come back to anytime that you want. + +05:08.200 --> 05:11.620 +A little bit more clarity on how to think about these costs. + +05:11.620 --> 05:15.760 +And as I say, the time when it really becomes meaningful is when you're building a system that's going + +05:15.790 --> 05:21.280 +to maybe make a thousand calls to the model, and then you have to really bear in mind, how are these + +05:21.280 --> 05:22.600 +costs going to add up? diff --git a/week5/community-contributions/subtitles/srts/60619651/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619651/ja_JP.srt new file mode 100755 index 0000000..b24f4e1 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619651/ja_JP.srt @@ -0,0 +1,154 @@ +WEBVTT + +00:00.140 --> 00:02.960 +以前、 ヴェラムというAI企業の話をした。 + +00:03.530 --> 00:08.780 +さまざまな質問について話していたとき、 + +00:08.780 --> 00:25.520 +彼らはGPTフォーゼロ・アンド・ゼロワン・プレビュー・ヴェラムがリーダーボードを発表していることを思いついた。 + +00:25.520 --> 00:30.650 +そして、 私たちが目の前のタスクに適したLLMを選ぶことについて話すときに見てみましょう異なるリーダーボードの束があります。 + +00:30.680 --> 00:39.500 +ブックマークしておいて損はないが、 特にベラムのウェブサイトには便利なことが書いてある。 + +00:39.530 --> 00:45.890 +そのポップアップによると、 ヴァリマイLLMのリーダーボードのウェブサイトの約半分、 + +00:45.890 --> 00:54.050 +ここにあり、 さまざまなフロンティアモデルのコストとコンテキストウィンドウを比較した表だ。 + +00:54.080 --> 00:58.550 +特にコストの話をするときに、 手元にあると本当に便利なんだ。 + +00:58.580 --> 01:02.390 +まず最初に、 私たちが話したコンテキスト・ウィンドウについて触れておこう。 + +01:02.390 --> 01:11.910 +つまり、 最大のコンテクストウィンドウを持つモデルはジェミニ1ということになる。 5フラッシュで、 とんでもないコンテキストウィンドウを持っている。 + +01:11.940 --> 01:12.900 +とんでもない。 + +01:12.900 --> 01:15.210 +100万トークン。 + +01:15.210 --> 01:25.350 +100万トークンとは、 通常の英語では約75万語を意味し、 シェイクスピア全集には遠く及ばない。 + +01:25.380 --> 01:29.040 +思い起こせば、 私たちはそれが1だと考えていたと思う。 おそらく200万トークンだろう。 + +01:29.280 --> 01:39.210 +つまり、 双子座1号を生成し続けるために、 シェイクスピア全集を双子座に1回のプロンプトでほとんど収めることができるのだ。 + +01:39.210 --> 01:39.210 +5フラッシュ + +01:39.210 --> 01:43.560 +つまり、 非常に広いコンテクスト・ウィンドウなのだ。 + +01:43.590 --> 01:59.250 +クロード・シリーズのモデルは20万トークンのコンテクスト・ウィンドウを持ち、 GPTセットは主に12万8000トークンのコンテクスト・ウィンドウを持つ。 + +01:59.280 --> 02:11.910 +繰り返しになるが、 1つの会話における一連のプロンプト、 会話における入力、 応答、 次の入力、 応答、 そして冒頭のシステム・プロンプトのすべてを意味することを覚えておいてほしい。 + +02:11.910 --> 02:14.730 +それらはすべて、 そのコンテクストウィンドウの中に収まらなければならない。 + +02:14.730 --> 02:28.770 +各トークンは一度にひとつずつ生成され、 それが渡されると、 それ以前のすべての入力と出力を考慮して次のトークンを生成するからだ。 + +02:29.250 --> 02:32.280 +だから、 それがコンテキスト・ウィンドウに収まらなければならない。 + +02:32.280 --> 02:36.930 +しかし、 128,000トークンにはそのためのスペースがたくさんある。 + +02:36.930 --> 02:39.570 +10万語くらいかな。 + +02:39.780 --> 02:44.670 +これがコンテキスト・ウィンドウのサイズの見方だ。 + +02:44.670 --> 02:48.480 +そして今、 私たちはその代償を感じている。 + +02:48.480 --> 03:01.140 +そして、 一見したところ、 クロード3のようなものを取れば、 これは決して激安には見えないと思うかもしれない。 + +03:01.140 --> 03:01.140 +5ソネットは、 + +03:01.170 --> 03:01.770 +私が地球上で最もパワフルなLLMで頻繁に使っているものだ。 + +03:01.770 --> 03:10.410 +たいていの尺度では、 そのコストは入力トークン1個につき3ドル、 出力トークン1個につき15ドルであることがわかるだろう。 + +03:10.440 --> 03:15.760 +かなり高く聞こえるが、 そうではなく、 インプット・トークン1つにつき3ドルではないからだ。 + +03:15.760 --> 03:21.580 +100万投入トークンあたり3ドルだ。 + +03:21.580 --> 03:29.200 +つまり、 入力プロンプトにシェイクスピア全集と入力すれば、 3ドルより少し高い金額を支払うことになる。 + +03:29.200 --> 03:34.570 +だから、 これらの数字には意味があり、 注意しなければならない現実の数字であること、 + +03:34.570 --> 03:41.290 +そして、 それぞれのダッシュボードを表示し、 APIコストを注意深く監視することができることを伝えたい。 + +03:41.320 --> 03:46.690 +通常、 このコースでやることは、 短いビジネス質問をし、 物事を要約し、 + +03:46.690 --> 03:51.160 +素早く結論を導き出そうとすることだ。 + +03:51.220 --> 03:58.720 +ほとんどの場合、 1セント未満、 端数ドル未満で済むことがわかるだろう。 + +03:58.750 --> 04:08.140 +最もよく使うモデルはGPT4ミニで、 コストは0ドルだ。 100万投入トークンあたり15ドル、 + +04:08.170 --> 04:12.910 +0ドル。 100万出力トークンあたり60ドル。 + +04:12.910 --> 04:19.060 +また、 これらは大規模なモデルであり、 これらを実行し、 + +04:19.060 --> 04:31.780 +私たちが得られるような質の高い結果を提供するためには、 多くの計算が必要であることを念頭に置く必要がある。 + +04:31.840 --> 04:37.060 +これらのAPI費用は、 状況的に非常に合理的であると私には思える。 + +04:37.060 --> 04:42.850 +これで、 これらの費用がどのようなもので、 どのような意味を持つのか、 多少なりとも明確になり、 見通しがつくといいのだが......。 + +04:42.850 --> 04:52.570 +合計コストは、 入力トークンのコストにこの数をかけたものと、 生成された出力トークンのコストの合計となる。 + +04:52.570 --> 04:59.890 +また、 APIでは、 生成する出力トークンの最大数を指定することができる。 + +05:00.070 --> 05:03.340 +ええと、 これで費用の感覚はつかめたと思う。 + +05:03.340 --> 05:08.170 +このページをブックマークして、 いつでも見られるようにしておくといい。 + +05:08.200 --> 05:11.620 +これらのコストをどのように考えるべきか、 もう少し明確にしよう。 + +05:11.620 --> 05:15.760 +そして、 私が言うように、 それが本当に意味を持つようになるのは、 + +05:15.790 --> 05:22.600 +モデルに1000回コールするようなシステムを構築するときだ。 diff --git a/week5/community-contributions/subtitles/srts/60619651/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619651/ko_KR.srt new file mode 100755 index 0000000..f708fc4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619651/ko_KR.srt @@ -0,0 +1,196 @@ +WEBVTT + +00:00.140 --> 00:02.960 +인공지능 회사인 베룸을 언급했었죠 + +00:03.530 --> 00:08.780 +다른 질문들에 대해 이야기할 때 파충류의 비늘에 대한 질문이 + +00:08.780 --> 00:16.850 +나왔는데요 GPT 400과 01 미리 보기 벨럼은 리더보드를 만들었습니다 나중에 다시 + +00:16.850 --> 00:23.660 +살펴보도록 하죠 다양한 llm의 순위와 비교를 하는 흥미로운 정보가 많이 + +00:23.660 --> 00:25.520 +있어요 + +00:25.520 --> 00:29.030 +당면한 작업에 맞는 LLM을 고르는 것에 대해 얘기할 때 다양한 leaderboard가 + +00:29.060 --> 00:30.650 +있어요 + +00:30.680 --> 00:35.720 +즐겨찾기에 좋고 특히 피지 웹사이트에 유용한 게 있어요 다른 곳에선 + +00:35.720 --> 00:39.500 +못 찾을 것 같은데 원하시면 다운로드도 가능해요 + +00:39.530 --> 00:45.890 +팝업 창에 따르면 바로 여기 있네요 Valimai LLM leaderboard + +00:45.890 --> 00:54.050 +웹사이트의 중간쯤에 있어요 컨텍스트 창과 다른 프론티어 모델의 비용을 비교하는 테이블이죠 + +00:54.080 --> 00:58.550 +직접 하는 게 정말 유용해요 특히 비용에 관해서요 + +00:58.580 --> 01:02.390 +먼저, 아까 얘기했던 컨텍스트 창을 언급하죠 + +01:02.390 --> 01:11.910 +문맥상 창문이 가장 큰 모델은 제미니 1호예요 5 플래시, 그리고 문맥상 충격적인 창문이 있어요 + +01:11.940 --> 01:12.900 +말도 안 돼요 + +01:12.900 --> 01:15.210 +백만 토큰요 + +01:15.210 --> 01:22.410 +토큰 백만 개는 일반적인 영어에서 약 750,000단어의 뜻입니다 셰익스피어 + +01:22.410 --> 01:25.350 +작품과 크게 다르지 않죠 + +01:25.380 --> 01:29.040 +1편인 줄 알았던 것 같아요 2백만 토큰 정도요 + +01:29.280 --> 01:39.210 +셰익스피어의 모든 작품을 제미니 1호에 끼워 맞출 수 있었죠 + +01:39.210 --> 01:39.210 +5번 섬광이에요 + +01:39.210 --> 01:43.560 +컨텍스트 창이 아주 넓어요 + +01:43.590 --> 01:57.360 +클로드 시리즈 모델은 200,000 토큰 컨텍스트 창을 가지고 있고 GPT는 대부분 128,000 토큰 컨텍스트 창을 + +01:57.390 --> 01:59.250 +가지고 있네요 + +01:59.280 --> 02:06.450 +다시 한 번 기억하세요 하나의 대화에 모든 프롬프트, 대화 안의 입력 응답 + +02:06.450 --> 02:11.910 +다음 입력, 응답 그리고 시작 부분에 있는 시스템도요 + +02:11.910 --> 02:14.730 +전부 그 문맥에 맞아야 해요 + +02:14.730 --> 02:21.150 +그리고 출력된 모든 토큰들도 마찬가지입니다 각각의 토큰은 한 번에 하나씩 + +02:21.150 --> 02:27.630 +생성되어 전달되고 다음 토큰을 생성합니다 이전의 모든 입력과 출력 결과에 + +02:27.630 --> 02:28.770 +의해서요 + +02:29.250 --> 02:32.280 +컨텍스트 창에 맞아야 해요 + +02:32.280 --> 02:36.930 +하지만 128,000 토큰으로 만들기엔 공간이 너무 많아요 + +02:36.930 --> 02:39.570 +100,000단어 정도 되겠네요 + +02:39.780 --> 02:44.670 +컨텍스트 창 크기를 보는 방법이었어요 + +02:44.670 --> 02:48.480 +Get up! Get up! 이제 여기서 그 비용을 알 수 있죠 + +02:48.480 --> 02:53.880 +얼핏 보면 저렴해 보이지 않을 수도 있습니다 + +02:53.880 --> 03:01.770 +클로드 3 같은 경우라면요 지구상에서 가장 강력한 LLM에서 자주 사용하는 소네트 5편이죠 + +03:01.770 --> 03:10.410 +입력 토큰 하나에 3달러 출력 토큰 하나에 15달러죠 + +03:10.440 --> 03:15.760 +꽤나 비쌀 것 같지만 아닙니다 입력 토큰 당 3달러가 아니기 때문이죠 + +03:15.760 --> 03:21.580 +백만당 3달러 입력 토큰 백만당 3달러 입력 토큰이죠 + +03:21.580 --> 03:27.400 +입력 프롬프트에 셰익스피어 전집을 입력하면 3달러보다 조금 더 많은 + +03:27.400 --> 03:29.200 +비트를 내야 해요 + +03:29.200 --> 03:34.570 +이 숫자들이 의미 있고 주의 깊게 봐야 할 실제 숫자일 때 대시보드를 + +03:34.570 --> 03:41.290 +불러와서 API 비용을 조심스럽게 모니터링하세요. + +03:41.320 --> 03:46.690 +이 과정에서 저희가 주로 하는 건 숏 비즈니스 질문입니다 + +03:46.690 --> 03:51.160 +요약하고 빠른 결론을 내리려고 하죠 + +03:51.220 --> 03:58.720 +비용은 대부분 1센트 이하로 들 겁니다 1달러도 안 되는 비율로요 + +03:58.750 --> 04:08.140 +주로 사용할 모델은 GPT 4 미니인데 비용은 0달러예요 입력 토큰 1500만 개와 + +04:08.170 --> 04:12.910 +0달러예요 출력 토큰은 6천만 대 1이에요 + +04:12.910 --> 04:19.060 +상상하셨다시피 상당한 양의 콘텐츠를 생성해야 실제 가격의 표면을 + +04:19.060 --> 04:25.930 +긁어낼 수 있습니다 이런 대형 모델들을 실행하고 결과의 품질을 전달하려면 + +04:25.930 --> 04:31.780 +많은 컴퓨팅이 필요하다는 걸 염두에 두고요 + +04:31.840 --> 04:37.060 +API 비용은 제가 보기엔 아주 합리적인 것 같아요 + +04:37.060 --> 04:42.220 +이 일을 계기로 좀 더 명확해졌으면 좋겠어요 비용이 얼마나 드는지 어떤 의미인지 + +04:42.220 --> 04:42.850 +말이죠 + +04:42.850 --> 04:50.320 +합계 비용은 입력 토큰에 대한 비용의 합을 이 숫자로 나눈 것입니다. 거기에 생성된 모든 출력 + +04:50.320 --> 04:52.570 +토큰의 비용을 더하죠. + +04:52.570 --> 04:58.960 +API에서 최대 출력 토큰의 수를 지정하여 생성하여 너무 멀리 가는 것을 막을 + +04:59.020 --> 04:59.890 +수 있죠 + +05:00.070 --> 05:03.340 +그래서 비용이 어느 정도인지 알 수 있죠 + +05:03.340 --> 05:08.170 +이 페이지를 즐겨찾기 해서 언제든 원할 때 다시 보도록 해두세요 + +05:08.200 --> 05:11.620 +비트에 대해 좀 더 명확하게 생각할 수 있죠 + +05:11.620 --> 05:15.760 +말씀드렸듯이 정말 의미있는 때는 모델에 천 개의 호출을 하는 + +05:15.790 --> 05:21.280 +시스템을 구축할 때입니다 그러면 정말 염두에 두어야 합니다 이런 비용이 어떻게 + +05:21.280 --> 05:22.600 +증가할까요? diff --git a/week5/community-contributions/subtitles/srts/60619721/en_US.srt b/week5/community-contributions/subtitles/srts/60619721/en_US.srt new file mode 100755 index 0000000..9106d95 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619721/en_US.srt @@ -0,0 +1,418 @@ +WEBVTT + +00:00.260 --> 00:02.930 +Now it's time to talk for a minute about tokens. + +00:02.960 --> 00:07.700 +Tokens are the individual units which get passed into a model. + +00:07.880 --> 00:13.550 +In the early days of building neural networks, one of the things that you'd see quite often is neural + +00:13.550 --> 00:16.790 +networks that were trained character by character. + +00:16.790 --> 00:22.400 +So you would have a model which would take a series of individual characters, and it would be trained + +00:22.400 --> 00:28.250 +such that it would predict the most likely next character, given the characters that have come before. + +00:28.280 --> 00:33.020 +That was a particular technique, and in some ways it had a lot of benefits. + +00:33.020 --> 00:38.210 +It meant that the number of possible inputs was a limited number, just the number of possible letters + +00:38.210 --> 00:39.950 +of the alphabet and some symbols. + +00:39.950 --> 00:43.760 +And so that meant that it had a very manageable vocab size. + +00:43.760 --> 00:48.470 +And it needed to its weights could didn't didn't need to worry about too many different possibilities + +00:48.470 --> 00:49.910 +for the inputs. + +00:49.940 --> 00:57.200 +But the challenge with it was that it meant that there was so much required from the model in terms + +00:57.200 --> 01:05.830 +of understanding how a series of different characters becomes a word, and all of the intelligence associated + +01:05.830 --> 01:12.280 +with the meaning behind a word had to be captured within the weights of the model, and that was expecting + +01:12.310 --> 01:15.550 +too much from the model itself. + +01:15.550 --> 01:22.720 +And so we then went to almost the other extreme where neural networks, these models were trained of + +01:22.720 --> 01:25.270 +each individual possible word. + +01:25.270 --> 01:29.680 +So you would build something called the vocab, which is like like the sort of the dictionary, the + +01:29.680 --> 01:31.840 +index of all the possible words. + +01:31.840 --> 01:37.420 +And then each possible word a token could be any one of these possible words. + +01:37.420 --> 01:44.470 +So that meant that the the model itself could start to understand that each individual word had a different + +01:44.470 --> 01:49.600 +meaning, rather than having to to appreciate how a sequence of characters would have a meaning. + +01:49.600 --> 01:51.040 +So that was a good thing. + +01:51.040 --> 01:55.210 +But the trouble was that it resulted in an enormous vocab. + +01:55.240 --> 01:58.740 +You needed to have a vocab the size of all of the possible words. + +01:58.740 --> 02:04.140 +And of course, there are so many possible words because there are also names of places and people. + +02:04.140 --> 02:07.800 +And so there had to be special tokens for unknown words. + +02:07.800 --> 02:10.650 +And that that caused some limitations. + +02:10.830 --> 02:13.860 +Rare words had to be omitted, special places had to be omitted. + +02:13.860 --> 02:17.370 +And so that caused some some some some oddness. + +02:17.550 --> 02:25.860 +Um, and then around the time of the of GPT uh, a discovery was made a breakthrough that that there + +02:25.860 --> 02:31.170 +was a sort of happy medium between these two extremes, rather than trying to train a model based on + +02:31.170 --> 02:36.360 +individual characters and need it to learn how to combine them to form a word. + +02:36.360 --> 02:43.350 +And rather than trying to say that each word is a different token, you could take chunks of letters, + +02:43.350 --> 02:48.600 +chunks that would that would sometimes form a complete word and sometimes part of a word, and call + +02:48.600 --> 02:56.720 +it a token, and train the model to take a series of tokens and output tokens based on the tokens that + +02:56.720 --> 02:57.740 +are passed in. + +02:57.740 --> 03:01.250 +And this had a number of interesting benefits. + +03:01.250 --> 03:05.630 +One of them is that because you're breaking things down into tokens, you could also handle things like + +03:05.630 --> 03:07.880 +names of places and proper names. + +03:07.880 --> 03:10.040 +They would just be more fragments of tokens. + +03:10.040 --> 03:15.800 +And then there was a second interesting effect, which is that it meant that it was good at handling + +03:15.950 --> 03:23.090 +word stems, or times when you'd have the same beginning of a word and multiple potential endings that + +03:23.090 --> 03:27.950 +would be encoded into one token, followed by a few second tokens. + +03:27.950 --> 03:34.310 +And that meant that the sort of underlying meaning of what you're trying to say could be easily represented + +03:34.310 --> 03:40.400 +inside the model, because the tokens had the same kind of structure that might have sounded a bit abstract. + +03:40.400 --> 03:42.380 +Let me make that a bit more real for you. + +03:43.100 --> 03:51.530 +So GPT, OpenAI actually provides a tool which is a platform openai.com slash tokenizer, and it lets + +03:51.530 --> 03:58.240 +you put in some text and see visually how that text is turned into tokens. + +03:58.390 --> 04:05.080 +And so I took a particular sentence, an important sentence for my class of AI engineers. + +04:05.350 --> 04:08.650 +And you can see that GPT tokenized that. + +04:08.650 --> 04:12.700 +That's the verb that we use when we're turning from words into tokens. + +04:12.700 --> 04:17.080 +And it highlights in colors how it turned that into tokens. + +04:17.080 --> 04:24.730 +And in this case, because these are all common words, every one of these words mapped precisely to + +04:24.760 --> 04:26.320 +one token. + +04:26.590 --> 04:28.480 +So this is a clear example. + +04:28.480 --> 04:29.860 +You can see from the colors. + +04:29.860 --> 04:34.060 +One other slightly interesting point to make that's that's important. + +04:34.060 --> 04:40.030 +You see the way that some of these colored boxes, like the word for has a space in front of it. + +04:40.060 --> 04:41.200 +It's like space. + +04:41.200 --> 04:43.570 +And then for is what's been tokenized. + +04:43.660 --> 04:49.330 +That's because the break between words is also meaningful when tokenizing. + +04:49.330 --> 04:55.440 +That token represents the word for in isolation, like that beginning of word followed by the letters + +04:55.530 --> 05:03.780 +for that is mapped to one token, the beginning of word for token, and so that that will maybe become + +05:03.780 --> 05:05.010 +a bit more important in a moment. + +05:05.010 --> 05:05.250 +But. + +05:05.250 --> 05:11.970 +But it's worth noting that the gap between words is included as part of a token. + +05:12.780 --> 05:15.120 +So let's take another example. + +05:15.150 --> 05:22.320 +Now in this example, I'm coming up with a slightly more interesting sentence, an exquisitely handcrafted + +05:22.320 --> 05:26.880 +quip for my musterers of LM witchcraft. + +05:26.910 --> 05:30.060 +Now Musterers is, I believe, an invented word. + +05:30.060 --> 05:34.950 +As you'll see, the red squiggly underline shows that it's not a true word. + +05:34.950 --> 05:38.010 +And let's see how the tokenization has happened down here. + +05:38.010 --> 05:47.790 +So you'll see that that four is still here as one word, uh, with a beginning of token at the beginning + +05:47.790 --> 05:48.330 +of it. + +05:48.330 --> 05:54.920 +But and so is an Anne at the start, but exquisitely has been broken up into multiple tokens. + +05:54.950 --> 05:56.690 +Exquisitely. + +05:57.380 --> 06:04.130 +And that shows how when you've got a rare word, it doesn't have that word as a single word in its vocab. + +06:04.130 --> 06:08.630 +And so it had to break it into multiple tokens, but it's still able to pass it in. + +06:08.750 --> 06:11.570 +And now look at that word handcrafted. + +06:11.570 --> 06:17.000 +You can see that it also doesn't have that in its vocab as a single token, but it's able to break that + +06:17.000 --> 06:19.280 +into hand and craft it. + +06:19.280 --> 06:25.640 +And that does kind of reflect the, the, the, the meaning in a way it does get across that it can. + +06:25.670 --> 06:28.280 +It's combined from these two hand and crafted. + +06:28.280 --> 06:33.320 +And you can see as well that the crafted token does not include a beginning of sentence. + +06:33.320 --> 06:38.810 +So it's a token that represents a word that that has crafted in the middle of it. + +06:38.810 --> 06:41.000 +That's what that token reflects. + +06:41.090 --> 06:43.430 +You'll see that quip isn't there at all. + +06:43.430 --> 06:48.700 +It got broken into and it uh, and then you'll see Masteries. + +06:48.700 --> 06:52.300 +And this is a good example of what I was saying about word stems. + +06:52.300 --> 06:58.900 +Masteries has been broken into master, which is after all, the the, the verb that we're going for + +06:58.900 --> 07:04.720 +here, someone who masters and then errs at the end as an extension to that word. + +07:05.020 --> 07:12.160 +Um, and so you can see that it's, it's able to reflect the meaning of what we're trying to say by + +07:12.160 --> 07:15.820 +breaking it into those two tokens, even though it's not a real word. + +07:16.390 --> 07:22.600 +And you can also see that witchcraft got broken into witch and craft, uh, which is also interesting. + +07:23.170 --> 07:25.540 +Uh, and so, yeah, handcrafted. + +07:25.540 --> 07:29.740 +And master, as I say, you can see how the meaning is reflected there by the tokens. + +07:29.740 --> 07:35.980 +And hopefully this gives you some real insight into what it means to break something into tokens. + +07:37.150 --> 07:42.250 +So an interesting one here is to now show you this slightly more sophisticated example. + +07:42.280 --> 07:48.130 +Uh, my favorite number, apparently 6534589793238462643383. + +07:48.160 --> 07:49.030 +Blah blah blah. + +07:49.180 --> 07:58.810 +Uh, so, uh, it shows you that when you have something like this, of course, long numbers like pi + +07:58.840 --> 08:01.750 +are not going to map to one token. + +08:01.930 --> 08:09.550 +And in fact, what you see is happening here is that every series of three digit numbers is being mapped + +08:09.550 --> 08:11.020 +to one token. + +08:11.260 --> 08:13.030 +And that's an interesting property. + +08:13.030 --> 08:19.000 +It's actually a property of GPT two tokenizer, but many others don't have that many other cases. + +08:19.000 --> 08:23.170 +You'll see that things map to multiple tokens. + +08:24.220 --> 08:32.170 +Uh, so the generally speaking, there's a rule of thumb which is helpful to know to bear in mind when + +08:32.170 --> 08:33.670 +you're looking at tokens. + +08:33.670 --> 08:41.560 +The rule of thumb generally is that on average, one token typically maps to about four characters. + +08:41.860 --> 08:49.430 +And that means that a token is on average for normal English writing, it's about three quarters of + +08:49.430 --> 08:50.060 +a word. + +08:50.060 --> 08:53.210 +One token maps to about 0.75 words. + +08:53.210 --> 08:54.950 +And an easier way to think about that. + +08:54.950 --> 09:00.260 +A better way to put it is that a thousand tokens is about 750 words. + +09:00.260 --> 09:01.880 +So that's the mapping to have in your mind. + +09:01.910 --> 09:04.820 +A thousand tokens is 750 words. + +09:04.820 --> 09:11.300 +And that means that the complete works of Shakespeare, for example, to make this real, to give a + +09:11.300 --> 09:16.460 +real example, that's about 900,000 words, apparently in the complete works of Shakespeare. + +09:16.460 --> 09:23.510 +So about 1.2 million tokens, that is the size of the complete works of Shakespeare. + +09:23.540 --> 09:26.270 +Now, that refers to English. + +09:26.270 --> 09:33.920 +If you're looking at things like math formulas, scientific terms and also code, then the token count + +09:33.920 --> 09:39.530 +is much higher because obviously, as we saw here with numbers, things need to be broken into many + +09:39.530 --> 09:43.990 +more tokens to incorporate Right symbols and stuff like that. + +09:44.290 --> 09:48.790 +And the other point to make here is that this is showing you GPT tokenizer. + +09:48.820 --> 09:52.930 +There are no hard and fast rules about how Tokenizers should work. + +09:52.960 --> 09:57.340 +In fact, we saw a minute ago that in the early days you used to have tokenizer, that every letter + +09:57.340 --> 10:02.770 +would map to one token, and you'll see that different models have different approaches to tokenization. + +10:02.770 --> 10:07.660 +And when we look later at open source, we're going to be getting hands on with a bunch of different + +10:07.690 --> 10:08.440 +tokenizers. + +10:08.470 --> 10:12.280 +And we're going to explore an interesting property of Llama's tokenizer too. + +10:12.310 --> 10:15.280 +So different tokenizers work can work differently. + +10:15.280 --> 10:20.170 +There are pros and cons for having fewer tokens or more tokens. + +10:20.380 --> 10:26.350 +There's not a single answer that depends on the how many parameters are in the model and how it was + +10:26.350 --> 10:27.640 +trained, and so on. + +10:27.820 --> 10:34.930 +But this is a more detailed look at GPT tokenizer, and I hope this has given you some clarity and intuition + +10:34.930 --> 10:41.230 +on what it means to to go from words and characters into the world of tokens. diff --git a/week5/community-contributions/subtitles/srts/60619721/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619721/ja_JP.srt new file mode 100755 index 0000000..b5f5dac --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619721/ja_JP.srt @@ -0,0 +1,340 @@ +WEBVTT + +00:00.260 --> 00:02.930 +さて、 トークンについて少し話をしよう。 + +00:02.960 --> 00:07.700 +トークンとは、 モデルに渡される個々のユニットのことである。 + +00:07.880 --> 00:16.790 +ニューラルネットワークを構築した初期には、 一文字ずつ学習させたニューラルネットワークをよく見かけたものだ。 + +00:16.790 --> 00:22.400 +つまり、 一連の個々の登場人物をモデル化し、 前に登場した人物から、 + +00:22.400 --> 00:28.250 +次に登場する可能性の高い人物を予測するように訓練するのだ。 + +00:28.280 --> 00:33.020 +あれは特殊なテクニックで、 ある意味、 多くの利点があった。 + +00:33.020 --> 00:39.950 +つまり、 入力可能な数は限られており、 アルファベットといくつかの記号の数だけだった。 + +00:39.950 --> 00:43.760 +つまり、 非常に扱いやすいボキャブラリーサイズだったということだ。 + +00:43.760 --> 00:49.910 +そして、 そのウェイトが必要なのだから、 インプットに多くの異なる可能性を心配する必要はない。 + +00:49.940 --> 00:57.200 +しかし、 一連の異なる文字がどのようにして単語になるのかを理解するという点で、 + +00:57.200 --> 01:15.550 +モデルには多くのことが要求され、 単語の背後にある意味に関連するすべての知性をモデルの重みの中に取り込まなければならない。 + +01:15.550 --> 01:25.270 +そこで私たちは、 可能性のある単語をひとつひとつ訓練してニューラルネットワークを構築した。 + +01:25.270 --> 01:31.840 +それは辞書のようなもので、 ありとあらゆる単語の索引のようなものだ。 + +01:31.840 --> 01:37.420 +そして、 トークンが持つ可能性のある単語は、 これらの可能性のある単語のいずれかである可能性がある。 + +01:37.420 --> 01:49.600 +つまり、 一連の文字がどのような意味を持つかを理解するのではなく、 個々の単語がそれぞれ異なる意味を持つことをモデル自体が理解し始めたのだ。 + +01:49.600 --> 01:51.040 +だから、 それはいいことだった。 + +01:51.040 --> 01:55.210 +しかし、 その結果、 膨大なボキャブラリーを抱えることになった。 + +01:55.240 --> 01:58.740 +可能な限りの単語をボキャブラリーとして持っておく必要があった。 + +01:58.740 --> 02:04.140 +もちろん、 地名や人名もあるので、 可能性のある単語はたくさんある。 + +02:04.140 --> 02:07.800 +だから、 未知の言葉には特別なトークンが必要だった。 + +02:07.800 --> 02:10.650 +そして、 そのためにいくつかの制限が生じた。 + +02:10.830 --> 02:13.860 +珍しい言葉は省略され、 特別な場所は省略されなければならなかった。 + +02:13.860 --> 02:17.370 +それで、 いくつか奇妙なことが起きた。 + +02:17.550 --> 02:25.860 +そしてGPTの頃、 ある発見がなされた。 個々の文字に基づいてモデルを訓練し、 それらを組み合わせて単語を形成する方法を学習させるのではなく、 + +02:25.860 --> 02:36.360 +この2つの両極端の中間に、 ある種の幸福な媒体があるという画期的な発見がなされたのだ。 + +02:36.360 --> 02:43.350 +各単語が異なるトークンであると言うのではなく、 文字のかたまり、 あるときは完全な単語を形成し、 + +02:43.350 --> 02:48.600 +あるときは単語の一部を形成するかたまりをトークンと呼び、 一連のトークンを受け取り、 + +02:48.600 --> 02:57.740 +渡されたトークンに基づいてトークンを出力するようにモデルを訓練することができる。 + +02:57.740 --> 03:01.250 +そして、 これには多くの興味深い利点があった。 + +03:01.250 --> 03:07.880 +そのひとつは、 物事をトークンに分解することで、 地名や固有名詞のようなものも扱えるようになることだ。 + +03:07.880 --> 03:10.040 +トークンの断片が増えるだけだ。 + +03:10.040 --> 03:15.800 +それは、 単語の語幹、 つまり同じ語頭と複数の語尾の可能性があるものを1つのトークンにエンコードし、 + +03:15.950 --> 03:27.950 +その後に2つ目のトークンをいくつか並べるという処理に長けているということだ。 + +03:27.950 --> 03:40.400 +つまり、 トークンが同じような構造を持っているので、 言おうとしていることの根本的な意味をモデルの中で簡単に表現することができた。 + +03:40.400 --> 03:42.380 +もう少し現実的な話をしよう。 + +03:43.100 --> 03:51.530 +GPT、 OpenAIは実際にプラットフォームopenaiであるツールを提供しています。 comのスラッシュ・トークナイザーで、 テキストを入力して、 + +03:51.530 --> 03:58.240 +そのテキストがトークンにどのように変換されるかを視覚的に見ることができる。 + +03:58.390 --> 04:05.080 +そこで私はある文章を取り上げた。 私のクラスのAIエンジニアにとって重要な文章だ。 + +04:05.350 --> 04:08.650 +GPTがそれをトークン化しているのがわかるだろう。 + +04:08.650 --> 04:12.700 +これは、 言葉をトークンに変えるときに使う動詞だ。 + +04:12.700 --> 04:17.080 +そして、 それをどのようにトークンに変えたかを色分けして強調している。 + +04:17.080 --> 04:26.320 +そしてこの場合、 これらはすべて一般的な単語であるため、 これらの単語のひとつひとつがひとつのトークンに正確に対応する。 + +04:26.590 --> 04:28.480 +だから、 これは明確な例だ。 + +04:28.480 --> 04:29.860 +色を見ればわかるだろう。 + +04:29.860 --> 04:34.060 +もうひとつ、 少し興味深い点がある。 + +04:34.060 --> 04:40.030 +色つきのボックスの中には、 例えば "for "の前にスペースが入っているものがある。 + +04:40.060 --> 04:41.200 +まるで宇宙のようだ。 + +04:41.200 --> 04:43.570 +そして、 それが形骸化したものだ。 + +04:43.660 --> 04:49.330 +トークン化の際、 単語と単語の間の区切りにも意味があるからだ。 + +04:49.330 --> 04:55.440 +このトークンはforという単語を単独で表し、 単語の頭に続くforという文字が1つのトークン、 + +04:55.530 --> 05:05.010 +トークンの単語の頭にマッピングされる。 + +05:05.010 --> 05:05.250 +でもね。 + +05:05.250 --> 05:11.970 +しかし、 単語と単語の間のギャップがトークンの一部として含まれることは注目に値する。 + +05:12.780 --> 05:15.120 +では、 別の例を挙げてみよう。 + +05:15.150 --> 05:26.880 +この例では、 もう少し面白い文章、 つまりLMの魔術師である私のマスタリングのために絶妙に手作りされた小ネタを考えている。 + +05:26.910 --> 05:30.060 +Musterersは造語だと思う。 + +05:30.060 --> 05:34.950 +ご覧のように、 赤い四角いアンダーラインは、 それが本当の言葉ではないことを示している。 + +05:34.950 --> 05:38.010 +トークン化がどのように行われたかを見てみよう。 + +05:38.010 --> 05:48.330 +だから、 この4はまだ1つの単語としてここにある。 + +05:48.330 --> 05:54.920 +しかし、 冒頭のアンもそうだが、 絶妙に複数のトークンに分割されている。 + +05:54.950 --> 05:56.690 +絶妙だ。 + +05:57.380 --> 06:04.130 +そしてこれは、 珍しい単語がある場合、 その単語がボキャブラリーの中に1単語として含まれていないことを示している。 + +06:04.130 --> 06:08.630 +そのため、 複数のトークンに分割しなければならなかったが、 それでも通過させることができた。 + +06:08.750 --> 06:11.570 +そして今、 ハンドクラフトという言葉を見てほしい。 + +06:11.570 --> 06:19.280 +このボキャブラリーも単一のトークンとして持っているわけではなく、 それを手で分解して作ることができるのだ。 + +06:19.280 --> 06:25.640 +そしてそれは、 ある意味、 意味を反映している。 + +06:25.670 --> 06:28.280 +この2つの手を組み合わせて作られる。 + +06:28.280 --> 06:33.320 +そして、 細工されたトークンには文頭が含まれていないこともわかるだろう。 + +06:33.320 --> 06:38.810 +つまり、 真ん中に細工が施された単語を表すトークンなのだ。 + +06:38.810 --> 06:41.000 +それがこのトークンに反映されている。 + +06:41.090 --> 06:43.430 +その口癖がまったくないことがわかるだろう。 + +06:43.430 --> 06:48.700 +侵入されて、 それで......マスタリーを見ることになる。 + +06:48.700 --> 06:52.300 +そしてこれは、 私が語幹について言ったことの良い例である。 + +06:52.300 --> 06:58.900 +Masteriesはmaster、 つまりマスターする人という動詞と、 + +06:58.900 --> 07:04.720 +その延長線上にあるerrsに分かれる。 + +07:05.020 --> 07:15.820 +実際の単語ではないにもかかわらず、 2つのトークンに分割することで、 私たちが言おうとしていることの意味を反映することができるのです。 + +07:16.390 --> 07:22.600 +また、 魔術が魔女と工芸に分かれたのも興味深い。 + +07:23.170 --> 07:25.540 +それで、 そう、 手作りなんだ。 + +07:25.540 --> 07:29.740 +そしてマスターは、 私が言ったように、 トークンによってそこにどのように意味が反映されているかがわかるだろう。 + +07:29.740 --> 07:35.980 +そして、 何かをトークンに分解するということがどういうことなのか、 これが本当の意味での洞察になることを願っている。 + +07:37.150 --> 07:42.250 +そこで興味深いのは、 もう少し洗練された例をお見せすることだ。 + +07:42.280 --> 07:48.130 +ええと、 私の好きな番号、 6534589793238462643383らしい。 + +07:48.160 --> 07:49.030 +ブラブラブラ。 + +07:49.180 --> 08:01.750 +円周率のような長い数字は、 もちろん1つのトークンには対応しない。 + +08:01.930 --> 08:11.020 +そして実際、 ここで起こっているのは、 3桁の数字が1つのトークンにマッピングされているということだ。 + +08:11.260 --> 08:13.030 +これは興味深い性質だ。 + +08:13.030 --> 08:19.000 +実はこれはGPTの2つのトークナイザーの特性なのだが、 他の多くのケースはそれほど多くない。 + +08:19.000 --> 08:23.170 +複数のトークンにマッピングされていることがわかるだろう。 + +08:24.220 --> 08:33.670 +一般的に言って、 トークンを見るときに覚えておくと便利な経験則があります。 + +08:33.670 --> 08:41.560 +経験則では、 平均して1トークンは約4文字に対応する。 + +08:41.860 --> 08:50.060 +つまり、 通常の英文では、 トークンは単語の4分の3程度ということになる。 + +08:50.060 --> 08:53.210 +1トークンは約0に相当する。 75語。 + +08:53.210 --> 08:54.950 +そして、 それをより簡単に考える方法もある。 + +08:54.950 --> 09:00.260 +つまり、 1000トークンは約750ワードということだ。 + +09:00.260 --> 09:01.880 +だから、 これがあなたの頭の中にあるマッピングなんだ。 + +09:01.910 --> 09:04.820 +1000トークンは750ワードである。 + +09:04.820 --> 09:11.300 +ということは、 例えばシェイクスピア全集で、 実際の例を挙げれば、 + +09:11.300 --> 09:16.460 +約90万語ということになる。 + +09:16.460 --> 09:23.510 +だから1. 200万トークンといえば、 シェークスピア全集のサイズだ。 + +09:23.540 --> 09:26.270 +さて、 これは英語の話だ。 + +09:26.270 --> 09:33.920 +数学の公式や科学用語、 コードなどを見ている場合、 トークン数はもっと多くなる。 + +09:33.920 --> 09:43.990 +数字で見たように、 右の記号などを組み込むにはもっと多くのトークンに分割する必要があるからだ。 + +09:44.290 --> 09:48.790 +そして、 もうひとつのポイントは、 これはGPTトークナイザーを示しているということだ。 + +09:48.820 --> 09:52.930 +トーケナイザーがどのように機能すべきかについて、 確固たるルールはない。 + +09:52.960 --> 09:57.340 +実際、 1分前に見たように、 初期の頃はトークナイザーがあり、 + +09:57.340 --> 10:02.770 +すべての文字が1つのトークンにマッピングされていた。 + +10:02.770 --> 10:08.440 +オープン・ソースを後で見るときには、 いろいろなトークナイザーに触れることになる。 + +10:08.470 --> 10:12.280 +そして、 Llamaのトークナイザーの興味深い特性も探求するつもりだ。 + +10:12.310 --> 10:15.280 +だから、 トークナイザーによって動作が異なることがある。 + +10:15.280 --> 10:20.170 +トークンの数が少なかったり、 多かったりすることには賛否両論がある。 + +10:20.380 --> 10:27.640 +モデルのパラメーターの数や学習方法などによって、 答えは一つではない。 + +10:27.820 --> 10:34.930 +言葉や文字からトークンの世界に入るということがどういうことなのか、 + +10:34.930 --> 10:41.230 +少しはわかってもらえただろうか。 diff --git a/week5/community-contributions/subtitles/srts/60619721/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619721/ko_KR.srt new file mode 100755 index 0000000..d4b865c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619721/ko_KR.srt @@ -0,0 +1,406 @@ +WEBVTT + +00:00.260 --> 00:02.930 +이제 토큰에 대해 잠깐 얘기해 보죠 + +00:02.960 --> 00:07.700 +토큰은 모델로 전환되는 개별 유닛이에요. + +00:07.880 --> 00:13.550 +신경망을 구축하던 초기에는 캐릭터마다 훈련된 + +00:13.550 --> 00:16.790 +신경망을 자주 사용했어요 + +00:16.790 --> 00:22.400 +일련의 캐릭터를 구성하는 모델이 있어야 해요 그리고 훈련해서 가장 + +00:22.400 --> 00:28.250 +유력한 다음 캐릭터를 예측하는 거죠 이전의 캐릭터를 고려해서요 + +00:28.280 --> 00:33.020 +특별한 기술이었고 어떤 면에선 장점도 많았어요 + +00:33.020 --> 00:38.210 +가능한 입력 수의 숫자는 제한된 숫자라는 의미였죠 가능한 알파벳 문자와 + +00:38.210 --> 00:39.950 +일부 기호의 숫자요 + +00:39.950 --> 00:43.760 +그래서 다루기 쉬운 단어가 많이 생겼죠 + +00:43.760 --> 00:48.470 +그 무게는 입력에 대한 너무 많은 가능성을 걱정할 + +00:48.470 --> 00:49.910 +필요가 없었죠 + +00:49.940 --> 00:57.200 +하지만 문제는 그 모델에 요구되는 게 너무 많다는 거였어요 일련의 다른 + +00:57.200 --> 01:05.830 +캐릭터들이 어떻게 단어로 변하는지 이해해야 했고 단어에 담긴 의미와 관련된 모든 + +01:05.830 --> 01:12.280 +지성이 모델의 무게에 담겨야 했는데 그건 모델 자체에 너무 많은 + +01:12.310 --> 01:15.550 +걸 요구하는 거였죠 + +01:15.550 --> 01:22.720 +그다음에는 거의 다른 극한으로 갔어요 각 단어의 정보를 훈련한 + +01:22.720 --> 01:25.270 +신경망 모형들이었죠 + +01:25.270 --> 01:29.680 +그래서 단어 선택이라는 걸 만들어요 사전 같은 건데 가능한 + +01:29.680 --> 01:31.840 +모든 단어의 인덱스죠 + +01:31.840 --> 01:37.420 +각각의 단어는 토큰이 될 수 있어요 이 중 아무 단어나 될 수 있죠 + +01:37.420 --> 01:44.470 +즉, 모델 자체가 각 단어의 의미가 다르다는 걸 이해하기 시작했다는 거죠 + +01:44.470 --> 01:49.600 +캐릭터의 연속적인 의미를 이해하는 대신에요 + +01:49.600 --> 01:51.040 +그건 좋은 일이었죠 + +01:51.040 --> 01:55.210 +하지만 문제는 그 결과로 엄청난 어휘가 나왔다는 거죠 + +01:55.240 --> 01:58.740 +가능한 모든 단어를 합친 단어가 필요했어요 + +01:58.740 --> 02:04.140 +가능한 단어가 아주 많아요 장소와 사람들의 이름도 있으니까요 + +02:04.140 --> 02:07.800 +그래서 모르는 단어를 위한 특별한 증표가 있어야 했죠 + +02:07.800 --> 02:10.650 +그래서 한계가 생겼죠 + +02:10.830 --> 02:13.860 +희귀한 단어와 특별한 장소는 생략해야 했죠 + +02:13.860 --> 02:17.370 +그래서 좀 이상한 일이 생겼어요 + +02:17.550 --> 02:25.860 +그러다 GPT가 개발될 무렵 돌파구를 찾았습니다 양극단 사이에 적절한 매체가 있다는 + +02:25.860 --> 02:31.170 +사실이었죠 캐릭터별로 모델을 훈련하고 그것들을 조합해 + +02:31.170 --> 02:36.360 +단어를 만드는 법을 배우는 대신에요 + +02:36.360 --> 02:43.350 +각각의 단어가 다른 토큰이라고 말하는 것보다는 하나의 글자를 모아서 완전한 단어를 + +02:43.350 --> 02:48.600 +만들거나 단어의 일부를 토큰으로 만드는 것이 더 좋을 것 같습니다. + +02:48.600 --> 02:57.740 +그렇게 해서 전달되는 토큰에 기반하여 일련의 토큰과 출력 토큰을 만들도록 하는 거죠. + +02:57.740 --> 03:01.250 +이 방법에는 흥미로운 장점이 많아요 + +03:01.250 --> 03:05.630 +그중 하나는 토큰으로 나누기 때문에 장소의 이름이나 정식 이름 + +03:05.630 --> 03:07.880 +같은 것도 다룰 수 있다는 거죠 + +03:07.880 --> 03:10.040 +그저 패의 조각에 불과할 뿐이죠 + +03:10.040 --> 03:15.800 +두 번째 흥미로운 효과는 단어 스템 처리에도 좋다는 겁니다 + +03:15.950 --> 03:23.090 +혹은 단어 시작 부분이 같아 여러 잠재적 결말을 하나의 토큰에 인코딩하고 + +03:23.090 --> 03:27.950 +몇 개의 토큰으로 이어질 때요 + +03:27.950 --> 03:34.310 +즉, 말하고자 하는 것의 기저에 깔린 의미는 모델 내에서 쉽게 표현될 수 있다는 것입니다. + +03:34.310 --> 03:40.400 +왜냐하면 토큰은 약간 추상적으로 들릴 수도 있는 구조로 구성되어 있기 때문이죠. + +03:40.400 --> 03:42.380 +좀 더 현실적인 비트로 바꿔드리죠 + +03:43.100 --> 03:51.530 +GPT, OpenAI는 플랫폼 OpenAI라는 도구를 제공하죠 comp/토큰라이저입니다 텍스트를 + +03:51.530 --> 03:58.240 +입력하면 어떻게 토큰으로 바뀌는지 볼 수 있죠 + +03:58.390 --> 04:05.080 +그래서 한 문장을 선택했습니다 우리 인공지능 엔지니어 학급에게는 중요한 문장이죠 + +04:05.350 --> 04:08.650 +GPT가 그걸 토큰화한 걸 볼 수 있죠 + +04:08.650 --> 04:12.700 +단어를 증표로 바꿀 때 사용하는 동사죠 + +04:12.700 --> 04:17.080 +그게 어떻게 토큰이 됐는지 색으로 강조하죠 + +04:17.080 --> 04:24.730 +이 경우에는 모두 흔한 단어라서 모든 단어가 토큰 하나에 정확히 그려져 + +04:24.760 --> 04:26.320 +있어요 + +04:26.590 --> 04:28.480 +이게 확실한 예죠 + +04:28.480 --> 04:29.860 +색깔을 보면 알 수 있죠 + +04:29.860 --> 04:34.060 +또 하나 흥미로운 점은 중요한 거예요 + +04:34.060 --> 04:40.030 +색깔 있는 상자 몇 개가 앞에 공간이 있어요 + +04:40.060 --> 04:41.200 +우주 같아요 + +04:41.200 --> 04:43.570 +그리고 for는 표시된 거죠 + +04:43.660 --> 04:49.330 +왜냐하면 표기할 때도 단어 간격이 의미가 있거든요 + +04:49.330 --> 04:55.440 +저 토큰은 격리된 워드를 나타내요 워드의 시작 부분처럼요 그걸 위한 글자들이 + +04:55.530 --> 05:05.010 +토큰의 시작 부분에 매핑되죠 토큰에 대한 워드의 시작 부분에요 그건 잠시 후에 좀 더 중요해질 거예요 + +05:05.010 --> 05:05.250 +하지만요 + +05:05.250 --> 05:11.970 +하지만 단어 사이의 간격도 토큰의 일부로 포함된다는 걸 알아두세요 + +05:12.780 --> 05:15.120 +다른 예를 들어보죠 + +05:15.150 --> 05:22.320 +이 예시에서는 좀 더 흥미로운 문장을 떠올려 봅니다 LM 위치크래프트 + +05:22.320 --> 05:26.880 +몰이꾼들을 위한 정교한 수제 재담이죠 + +05:26.910 --> 05:30.060 +몰이꾼이란 말은 지어낸 말일 거예요 + +05:30.060 --> 05:34.950 +여기 보시면 빨간 구불구불한 선이 가짜라는 걸 알 수 있어요 + +05:34.950 --> 05:38.010 +어떻게 토큰화가 이루어졌는지 보죠 + +05:38.010 --> 05:48.330 +4는 여전히 한 단어로 여기 있는 게 보이시죠 토큰의 앞부분도요 + +05:48.330 --> 05:54.920 +하지만 앤도 마찬가지예요 하지만 여러 토큰으로 정교하게 나뉘어 있죠 + +05:54.950 --> 05:56.690 +아주 정교하게요 + +05:57.380 --> 06:04.130 +희귀한 단어를 쓰면 그 단어가 한 단어로 안 쓰인다는 걸 알 수 있죠 + +06:04.130 --> 06:08.630 +여러 개의 토큰으로 나눠야 했지만 여전히 전달할 수 있었죠 + +06:08.750 --> 06:11.570 +그 단어는 수제품이에요 + +06:11.570 --> 06:17.000 +보다시피 단일 토큰이라는 단어에도 그게 없어요 하지만 그걸 + +06:17.000 --> 06:19.280 +쪼개서 만들 수 있죠 + +06:19.280 --> 06:25.640 +그게 그 의미를 반영해요 어떤 면에서는요 Get it의 의미요 + +06:25.670 --> 06:28.280 +두 가지를 수작업으로 합친 거예요 + +06:28.280 --> 06:33.320 +보다시피 만들어진 토큰은 문장의 시작을 포함하지 않죠 + +06:33.320 --> 06:38.810 +중간에 만들어진 단어를 상징하는 토큰인 셈이죠 + +06:38.810 --> 06:41.000 +그게 그 토큰의 의미죠 + +06:41.090 --> 06:43.430 +그런 농담은 전혀 안 통해요 + +06:43.430 --> 06:48.700 +누군가 침입했고 마스터시스 사진도 있어요 + +06:48.700 --> 06:52.300 +단어 줄기에 대해 말씀드린 좋은 예죠 + +06:52.300 --> 06:58.900 +마스터는 마스터로 바뀌었는데 여기서 중요한 건 동사죠 + +06:58.900 --> 07:04.720 +마스터를 익혔다가 마지막에 실수하는 사람요 + +07:05.020 --> 07:12.160 +보다시피 우리가 말하려는 의미를 반영할 수 있어요 두 개의 토큰으로 + +07:12.160 --> 07:15.820 +쪼개서요 진짜 단어는 아니지만요 + +07:16.390 --> 07:22.600 +마법과 공예가 섞여 들어간 것도 볼 수 있어요 이것도 흥미롭죠 + +07:23.170 --> 07:25.540 +네, 수공예품이에요 + +07:25.540 --> 07:29.740 +스승님, 말씀드렸듯이 현극령에 그 의미가 반영돼 있죠 + +07:29.740 --> 07:35.980 +여러분이 이걸 통해 토큰으로 뭔가를 쪼개는 게 어떤 의미인지 깨달았으면 좋겠네요 + +07:37.150 --> 07:42.250 +여기서 흥미로운 건 이 좀 더 복잡한 예시를 보여드리는 거예요 + +07:42.280 --> 07:48.130 +제가 좋아하는 숫자죠 6534589793238462643383이에요 + +07:48.160 --> 07:49.030 +어쩌고저쩌고 말이죠 + +07:49.180 --> 07:58.810 +이 화면을 보면 이런 게 있을 때 파이처럼 긴 숫자는 토큰 하나로 + +07:58.840 --> 08:01.750 +대응하지 않아요 + +08:01.930 --> 08:09.550 +여기서 벌어지는 일을 보면 모든 일련의 세 자리 숫자가 토큰 하나에 매핑되고 + +08:09.550 --> 08:11.020 +있어요 + +08:11.260 --> 08:13.030 +흥미로운 자산이죠 + +08:13.030 --> 08:19.000 +사실 GPT 2 토큰라이저의 속성입니다 하지만 다른 많은 제품에는 그렇게 많은 케이스가 없죠 + +08:19.000 --> 08:23.170 +여러 개의 토큰을 가지고 있는 것을 볼 수 있어요. + +08:24.220 --> 08:32.170 +일반적으로 토큰을 볼 때 명심해야 할 경험 법칙이 + +08:32.170 --> 08:33.670 +있어요 + +08:33.670 --> 08:41.560 +경험 법칙에 따르면 토큰 하나는 보통 네 글자로 매핑이 돼요 + +08:41.860 --> 08:50.060 +보통 영어 문학에서 토큰은 평균적으로 단어 4분의 3에 해당해요 + +08:50.060 --> 08:53.210 +토큰 한 장은 0개 정도고요 75단어요 + +08:53.210 --> 08:54.950 +쉽게 생각할 수 있는 방법이죠 + +08:54.950 --> 09:00.260 +쉽게 말하면 천 현은 750글자나 되죠 Put + +09:00.260 --> 09:01.880 +그게 여러분이 염두에 두셔야 할 지도예요 + +09:01.910 --> 09:04.820 +천 현은 750글자예요 + +09:04.820 --> 09:11.300 +셰익스피어 전작을 예로 들어 볼게요 예를 들어 현실적으로 보면 약 + +09:11.300 --> 09:16.460 +900,000단어로 된 셰익스피어 작품이죠 + +09:16.460 --> 09:23.510 +1명 정도요 200만 토큰이면 셰익스피어 작품 전집과 맞먹는 크기죠 + +09:23.540 --> 09:26.270 +영어를 말하는 거예요 + +09:26.270 --> 09:33.920 +수학 공식이나 과학 용어, 코드 같은 것을 볼 때 토큰의 수는 훨씬 더 많습니다 여기에서 + +09:33.920 --> 09:39.530 +숫자들을 보았듯이 더 많은 토큰으로 쪼개져야 합니다 올바른 + +09:39.530 --> 09:43.990 +심볼 같은 것을 포함하기 위해서요 + +09:44.290 --> 09:48.790 +여기서 또 한 가지 중요한 점은 GPT 토큰라이저를 보여드린다는 거죠 + +09:48.820 --> 09:52.930 +토큰이 어떻게 작동해야 하는지 정해진 규칙은 없어요 + +09:52.960 --> 09:57.340 +조금 전에 토큰라이저를 사용하던 초창기를 살펴봤는데요 모든 글자가 + +09:57.340 --> 10:02.770 +토큰 하나에 매핑되었죠 모델마다 토큰화에 다른 접근법을 갖고 있었어요 + +10:02.770 --> 10:07.660 +나중에 오픈 소스를 살펴볼 땐 다양한 토큰라이저들을 직접 경험해볼 + +10:07.690 --> 10:08.440 +거예요 + +10:08.470 --> 10:12.280 +라마 토큰라이저의 흥미로운 속성도 살펴볼 거예요 + +10:12.310 --> 10:15.280 +각각의 토큰이 다르게 작동할 수 있어요 + +10:15.280 --> 10:20.170 +패가 많거나 적을 때 장단점이 있어요 + +10:20.380 --> 10:26.350 +모델에 매개 변수가 몇 개 있고 어떻게 훈련받았는지에 따라 답이 달라지지 + +10:26.350 --> 10:27.640 +않아요 + +10:27.820 --> 10:34.930 +이번 시간에는 GPT 토큰마이저를 더 자세히 살펴보고 여러분께 명확함과 직관을 얻으셨길 + +10:34.930 --> 10:41.230 +바랍니다 단어와 글자를 토큰의 세계로 바꾸는 게 어떤 의미인지 말이죠 diff --git a/week5/community-contributions/subtitles/srts/60619883/en_US.srt b/week5/community-contributions/subtitles/srts/60619883/en_US.srt new file mode 100755 index 0000000..e66ec7e --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619883/en_US.srt @@ -0,0 +1,142 @@ +WEBVTT + +00:00.080 --> 00:03.950 +And now we've arrived at an exciting moment in our first week. + +00:03.980 --> 00:10.670 +The conclusion of the first week is where we get to actually put this into practice and build a commercial + +00:10.670 --> 00:11.570 +solution. + +00:11.570 --> 00:18.170 +By the end of today, you will be able to confidently code against the OpenAI API, because you'll have + +00:18.170 --> 00:19.430 +done it several times. + +00:19.430 --> 00:24.650 +You'll have used a technique called one shot prompting that we'll talk about streaming markdown, JSON + +00:24.650 --> 00:31.010 +results, and overall we'll have implemented a business solution literally in minutes. + +00:31.010 --> 00:32.750 +So what is this business problem? + +00:32.750 --> 00:33.650 +Well, here it is. + +00:33.650 --> 00:40.490 +We're going to build an application that is able to generate a marketing brochure about a company. + +00:40.490 --> 00:46.400 +It's something that could be used for prospective clients, for investors or maybe for recruiting talent. + +00:46.580 --> 00:51.740 +It's going to be something which will bring together information from multiple sources. + +00:52.040 --> 00:57.890 +And so it's a bit like the summarization project we did before, except we're we're summarizing and + +00:57.890 --> 00:59.030 +we're generating. + +00:59.090 --> 01:02.420 +So it's sort of built on top of some stuff we did before. + +01:02.450 --> 01:04.910 +Now we're going to use the OpenAI API. + +01:04.910 --> 01:09.200 +And as before, you'd be able to switch to using Olama if you wanted to. + +01:09.230 --> 01:11.960 +You're now an expert in that, so I'll leave that up to you. + +01:11.990 --> 01:16.130 +We're going to use a technique called one shot prompting, which sounds very fancy. + +01:16.130 --> 01:21.320 +And all it's saying is that in the prompts we send the model, we're going to give an example of what + +01:21.320 --> 01:22.100 +we're looking for. + +01:22.100 --> 01:25.460 +We're going to tell it the kind of thing we're expecting it to reply. + +01:25.490 --> 01:29.360 +And when you do that with one example, it's called one shot prompting. + +01:29.360 --> 01:34.490 +If you if you ask a question with no examples at all, that is called zero shot prompting. + +01:34.490 --> 01:38.330 +It's expected just to figure out from the question how to answer one shot. + +01:38.330 --> 01:43.550 +Prompting is when you give one example, and then if you give multiple examples of what you're asking + +01:43.550 --> 01:48.020 +and what it should respond in different situations, that's known as multi-shot prompting. + +01:48.170 --> 01:50.180 +So that's that's all there is to it. + +01:50.600 --> 01:56.390 +And then we're going to be using things like streaming and formatting to make this, uh, nice, nice + +01:56.390 --> 01:58.970 +and impressive brochure generator. + +01:59.690 --> 02:03.560 +So just to remind you of the environment setup, we've done this to death. + +02:03.560 --> 02:05.960 +You've got an environment that works and it's fabulous. + +02:05.960 --> 02:08.810 +But just to remind you what you did, you cloned the repo. + +02:08.840 --> 02:14.210 +You followed the readme to set up your Anaconda environment, maybe a virtual env, and you set up a + +02:14.210 --> 02:21.170 +key with OpenAI and you put that key, the OpenAI API key, which is src proj. + +02:21.560 --> 02:22.250 +Blah blah blah. + +02:22.280 --> 02:27.860 +You put that in a file that was called dot env and it is in your project root directory. + +02:27.860 --> 02:30.470 +And that is why all of this is working so well. + +02:30.530 --> 02:36.230 +And so what you need to do now in order to get us back to where we were, is if you're on a PC, you + +02:36.230 --> 02:38.150 +bring up an Anaconda prompt. + +02:38.150 --> 02:41.120 +If you're on a mac, you bring up a terminal window. + +02:41.150 --> 02:45.440 +You go to the project root directory LM engineering. + +02:45.440 --> 02:51.920 +You type the conda, activate lm conda, activate LM to activate the environment. + +02:51.920 --> 02:55.940 +And then you should see the LMS prefix by your prompt. + +02:55.940 --> 02:58.700 +If it already says that, then you're already activated. + +02:58.910 --> 03:05.330 +And once you've done that, you simply type JupyterLab to launch JupyterLab and to be up and running. + +03:05.330 --> 03:08.360 +And that is where I will see you in the next video. diff --git a/week5/community-contributions/subtitles/srts/60619883/ja_JP.srt b/week5/community-contributions/subtitles/srts/60619883/ja_JP.srt new file mode 100755 index 0000000..946cafc --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619883/ja_JP.srt @@ -0,0 +1,124 @@ +WEBVTT + +00:00.080 --> 00:03.950 +そして今、 私たちは最初の週にエキサイティングな瞬間を迎えた。 + +00:03.980 --> 00:11.570 +最初の週の締めくくりは、 これを実際に実践し、 商業的ソリューションを構築するところだ。 + +00:11.570 --> 00:19.430 +今日の終わりには、 OpenAI APIに対して自信を持ってコードを書けるようになるでしょう。 + +00:19.430 --> 00:24.650 +マークダウンのストリーミング、 JSONの結果、 そして全体として、 + +00:24.650 --> 00:31.010 +文字通り数分でビジネス・ソリューションを実装することになる。 + +00:31.010 --> 00:32.750 +では、 このビジネス上の問題とは何か? + +00:32.750 --> 00:33.650 +さて、 これだ。 + +00:33.650 --> 00:40.490 +私たちは、 ある会社についてのマーケティング・パンフレットを作成できるアプリケーションを作ろうとしています。 + +00:40.490 --> 00:46.400 +見込み客や投資家、 あるいは人材募集に使えるものだ。 + +00:46.580 --> 00:51.740 +複数のソースからの情報をまとめるものになるだろう。 + +00:52.040 --> 00:59.030 +以前行った要約プロジェクトと少し似ている。 + +00:59.090 --> 01:02.420 +だから、 僕らが以前やっていたことの上に成り立っているようなものなんだ。 + +01:02.450 --> 01:04.910 +今度はOpenAIのAPIを使ってみよう。 + +01:04.910 --> 01:09.200 +そして、 以前と同じように、 望めばオラマを使うように切り替えることができる。 + +01:09.230 --> 01:11.960 +あなたはもうその専門家だから、 私はあなたに任せるわ。 + +01:11.990 --> 01:16.130 +ワンショット・プロンプティングと呼ばれるテクニックを使います。 + +01:16.130 --> 01:22.100 +つまり、 モデルに送るプロンプトの中で、 我々が求めているものの例を示すということだ。 + +01:22.100 --> 01:25.460 +返事を期待しているようなことを伝えるんだ。 + +01:25.490 --> 01:29.360 +そして、 それを一つの例で行うことを一発プロンプトと呼ぶ。 + +01:29.360 --> 01:34.490 +もし、 例をまったく挙げずに質問するなら、 それはゼロショット・プロンプティングと呼ばれる。 + +01:34.490 --> 01:38.330 +一発でどう答えるか、 問題から導き出すことが期待される。 + +01:38.330 --> 01:43.550 +プロンプティングとは、 1つの例を示すことであり、 さらに、 何を求めているのか、 さまざまな状況でどう反応すべきなのか、 + +01:43.550 --> 01:48.020 +複数の例を示すことをマルチショット・プロンプティングという。 + +01:48.170 --> 01:50.180 +だから、 それがすべてなんだ。 + +01:50.600 --> 01:58.970 +そして、 ストリーミングやフォーマットなどを使って、 この、 素敵で印象的なパンフレットジェネレーターを作ります。 + +01:59.690 --> 02:03.560 +だから、 環境のセットアップを思い出してほしい。 + +02:03.560 --> 02:05.960 +あなたは素晴らしい環境を手に入れた。 + +02:05.960 --> 02:08.810 +しかし、 あなたがしたことを思い出してほしい。 + +02:08.840 --> 02:14.210 +Readme に従って Anaconda の環境(たぶん仮想環境)をセットアップし、 + +02:14.210 --> 02:21.170 +OpenAI でキーをセットアップして、 そのキー(OpenAI API キー)を src proj に置きます。 + +02:21.560 --> 02:22.250 +ブラブラブラ。 + +02:22.280 --> 02:27.860 +それをdot envと呼ばれるファイルに入れて、 プロジェクトのルート・ディレクトリに置く。 + +02:27.860 --> 02:30.470 +だからこそ、 このすべてがうまくいっているのだ。 + +02:30.530 --> 02:38.150 +それで、 元の場所に戻るために今すべきことは、 PCを使っているなら、 アナコンダのプロンプトを表示させることだ。 + +02:38.150 --> 02:41.120 +マックならターミナル・ウィンドウを立ち上げる。 + +02:41.150 --> 02:45.440 +プロジェクトのルート・ディレクトリLMエンジニアリングに行く。 + +02:45.440 --> 02:51.920 +conda、 activate lm conda、 activate LMと入力して環境を起動する。 + +02:51.920 --> 02:55.940 +そうすると、 プロンプトにLMSのプレフィックスが表示されるはずです。 + +02:55.940 --> 02:58.700 +すでにそう表示されていれば、 すでにアクティベートされていることになる。 + +02:58.910 --> 03:05.330 +JupyterLabを起動するには、 JupyterLabと入力するだけだ。 + +03:05.330 --> 03:08.360 +次のビデオでお会いしましょう。 diff --git a/week5/community-contributions/subtitles/srts/60619883/ko_KR.srt b/week5/community-contributions/subtitles/srts/60619883/ko_KR.srt new file mode 100755 index 0000000..4232ce8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60619883/ko_KR.srt @@ -0,0 +1,142 @@ +WEBVTT + +00:00.080 --> 00:03.950 +이제 첫 주의 흥미진진한 순간이 다가왔어요 + +00:03.980 --> 00:10.670 +첫 주의 결론은 이 문제를 실제로 실행하고 상업적 해결책을 구축하는 거죠 Get + +00:10.670 --> 00:11.570 +up + +00:11.570 --> 00:18.170 +오늘 내로 오픈AI API에 대해 자신 있게 코드를 작성할 수 있을 겁니다 여러 번 + +00:18.170 --> 00:19.430 +해봤으니까요 + +00:19.430 --> 00:24.650 +원샷 프롬프트라는 테크닉을 사용했을 겁니다 스트리밍 마크다운과 JSON + +00:24.650 --> 00:31.010 +결과에 관해 얘기할 겁니다 전반적으로 비즈니스 솔루션을 몇 분 만에 구현했어요 + +00:31.010 --> 00:32.750 +사업상의 문제가 뭔가요? + +00:32.750 --> 00:33.650 +여기 있어요 + +00:33.650 --> 00:40.490 +응용 프로그램을 만들 거예요 그 회사에 대한 마케팅 책자를 생성할 수 있는 거죠 + +00:40.490 --> 00:46.400 +잠재 고객이나 투자자 혹은 인재를 모집할 때 쓸 수 있는 거죠 + +00:46.580 --> 00:51.740 +여러 소스에서 정보를 함께 모으는 뭔가가 될 거예요 + +00:52.040 --> 00:57.890 +전에 했던 요약 프로젝트랑 좀 비슷하죠 비트만 빼고 요약하고 생성하고 + +00:57.890 --> 00:59.030 +있어요 + +00:59.090 --> 01:02.420 +전에 했던 것 위에 지어진 거죠 + +01:02.450 --> 01:04.910 +OpenAI API를 사용할 거예요 + +01:04.910 --> 01:09.200 +전처럼 원하면 올라마를 쓸 수도 있어요 + +01:09.230 --> 01:11.960 +이제 그건 당신이 전문가니까 당신한테 맡길게요 + +01:11.990 --> 01:16.130 +원샷 프롬프트라는 테크닉을 사용할 겁니다 아주 근사하게 들리네요 + +01:16.130 --> 01:21.320 +모델 전송 프롬프트에서 우리가 찾는 것에 대한 예제를 제공한다는 + +01:21.320 --> 01:22.100 +거죠 + +01:22.100 --> 01:25.460 +답변이 오길 기대하는 그런 걸 알려주는 거죠 + +01:25.490 --> 01:29.360 +하나의 예제로 이걸 하는 걸 원샷 프롬프트라고 하죠 + +01:29.360 --> 01:34.490 +예를 전혀 제시하지 않은 질문을 한다면 이것을 제로 샷 프롬프트라고 하죠 + +01:34.490 --> 01:38.330 +한 발에 어떻게 대답할지 문제를 보고 알아내죠 + +01:38.330 --> 01:43.550 +프롬프트란 하나의 예시를 제시하는 것이고 여러 개의 예시를 제시해 질문을 하고 여러 + +01:43.550 --> 01:48.020 +상황에서 어떤 반응을 보여야 하는지를 멀티샷 프롬프트라고 해요 + +01:48.170 --> 01:50.180 +그게 다인 것 같아요 + +01:50.600 --> 01:56.390 +그리고 스트리밍과 포맷 같은 걸 이용해서 이 멋지고 인상적인 브로슈어 + +01:56.390 --> 01:58.970 +생성기를 만들 거예요 + +01:59.690 --> 02:03.560 +환경 설정 얘길 다시 하자면 우린 이걸 죽도록 했어요 + +02:03.560 --> 02:05.960 +잘 맞는 환경을 조성했어요 + +02:05.960 --> 02:08.810 +하지만 당신이 한 짓을 상기시켜 드리자면 압류 차량을 복제했죠 + +02:08.840 --> 02:14.210 +읽기를 따라 아나콘다 환경 가상 가상 가상 가상 환경을 설정하고 OpenAI로 + +02:14.210 --> 02:21.170 +키를 설정해 OpenAI API 키를 입력합니다 src proj죠 + +02:21.560 --> 02:22.250 +어쩌고저쩌고 말이죠 + +02:22.280 --> 02:27.860 +그걸 .Infi라는 파일에 넣으면 프로젝트 루트 디렉터리에 있죠 + +02:27.860 --> 02:30.470 +그래서 이 모든 게 잘 풀리는 거예요 + +02:30.530 --> 02:36.230 +이전으로 돌아가기 위해 해야 할 일은 get attt pc라면 아나콘다 프롬프트를 + +02:36.230 --> 02:38.150 +띄우는 거예요 + +02:38.150 --> 02:41.120 +맥을 사용하면 터미널 윈도우를 띄워요 + +02:41.150 --> 02:45.440 +프로젝트 루트 디렉터리 LM 엔지니어링으로 가세요 + +02:45.440 --> 02:51.920 +콘다를 입력하고 lm 콘다를 활성화하면 환경을 활성화할 수 있죠 + +02:51.920 --> 02:55.940 +그럼 프롬프트 옆에 LMS 접두사가 보일 거예요 + +02:55.940 --> 02:58.700 +이미 그렇게 말하면 이미 활성화 된 거죠 + +02:58.910 --> 03:05.330 +그렇게 하고 나면 JupyterLab을 입력해 JupyterLab을 시작하고 실행하게 하면 돼요 + +03:05.330 --> 03:08.360 +다음 영상에서 다시 만나요 diff --git a/week5/community-contributions/subtitles/srts/60620025/en_US.srt b/week5/community-contributions/subtitles/srts/60620025/en_US.srt new file mode 100755 index 0000000..f679c7b --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620025/en_US.srt @@ -0,0 +1,298 @@ +WEBVTT + +00:00.050 --> 00:04.520 +And welcome back to Jupyter Lab, one of my very favorite places to be. + +00:04.670 --> 00:10.580 +When Jupyter Lab sprung up on your screen, you will probably arrive at this place the root directory + +00:10.580 --> 00:12.350 +with all eight weeks of our work. + +00:12.380 --> 00:17.150 +You may also already be within week one, which you get to of course by double clicking here. + +00:17.150 --> 00:23.450 +We are in week one, and I'd like to ask you to head over to day five, which is where we will be spending + +00:23.450 --> 00:25.100 +the next few moments. + +00:25.100 --> 00:30.830 +So as I say, the business challenge that I have in store for you is to build on what we already built + +00:30.830 --> 00:36.920 +in day one, to create a brochure for a company by scraping the web, finding out more about the company, + +00:36.920 --> 00:38.900 +and using that for our brochure. + +00:39.230 --> 00:43.820 +Um, and if you do encounter any problems with this, please do reach out to me. + +00:43.820 --> 00:47.690 +But if possible, you should be executing this while I speak. + +00:47.690 --> 00:49.940 +Or maybe afterwards come back in and go through it. + +00:49.940 --> 00:54.710 +And the trick is to come in and add in some print statements and really convince yourself that you're + +00:54.710 --> 00:57.140 +confident with what's going on at every point. + +00:57.140 --> 00:59.120 +I'm going to start with some imports. + +00:59.120 --> 01:04.790 +Remember, you press shift and enter to run that, um, if you have any problems with these imports? + +01:04.790 --> 01:10.970 +The most likely explanation is that somehow you're not running in an activated environment. + +01:10.970 --> 01:14.150 +JupyterLab was brought up in an activated environment. + +01:14.150 --> 01:21.020 +Check to see whether Lmms is in the prompt, in your in your terminal window or in your Anaconda prompt. + +01:21.020 --> 01:26.090 +And if not, then start that part again and look at the readme if you need help on that. + +01:26.120 --> 01:32.630 +Uh, it's possible you might also, in some situations, need to restart the Python process which sits + +01:32.630 --> 01:35.600 +behind this, uh, which is known as the kernel. + +01:35.600 --> 01:41.630 +And to do that you go to the kernel menu and you say restart kernel and clear outputs of all cells. + +01:41.630 --> 01:45.230 +And you simply start this notebook again and here we go again. + +01:45.260 --> 01:47.150 +We'll run the import a second time. + +01:47.570 --> 01:48.560 +All right. + +01:48.590 --> 01:51.200 +Now we're going to initialize and set things up. + +01:51.200 --> 01:53.810 +We're going to load in our dot env file. + +01:53.810 --> 01:56.930 +And we're just going to check that the key looks good. + +01:57.110 --> 01:58.730 +Uh and it does for me. + +01:58.730 --> 02:00.830 +And hopefully it's looked good for you as well. + +02:00.830 --> 02:05.000 +Otherwise head over to the troubleshooting notebook to figure out what's going on. + +02:05.030 --> 02:10.790 +And we're setting our model to be GPT four mini, the cheap version of GPT four, which is still going + +02:10.820 --> 02:12.620 +to be phenomenally good. + +02:12.950 --> 02:15.800 +Okay, so this should look familiar to you. + +02:15.800 --> 02:20.900 +In the next cell, we're looking at the class website that we created in week one. + +02:20.900 --> 02:23.660 +And maybe now take a little bit of a closer look at it. + +02:23.660 --> 02:26.120 +It's your second time of playing with this. + +02:26.120 --> 02:31.550 +You'll remember that this is a class that we create by passing in a URL. + +02:31.550 --> 02:37.100 +It uses the requests package to retrieve that URL. + +02:37.400 --> 02:44.570 +It then collects the content, and it uses Beautifulsoup, that wonderful parsing package to parse it. + +02:44.600 --> 02:46.520 +There's something different here. + +02:46.520 --> 02:52.880 +We not only parse in the title and the contents and strip out some of the junk that we don't need, + +02:52.910 --> 03:01.010 +but we also gather any links that are referred to on this page, and we collect those links in something + +03:01.010 --> 03:02.450 +called self dot links. + +03:02.450 --> 03:06.170 +So we're going to store all of our links in there. + +03:07.310 --> 03:14.780 +And this little line here hopefully because we just went through an AST zero one preview for for some, + +03:14.780 --> 03:15.560 +uh, no. + +03:15.590 --> 03:21.590 +We asked, uh, sorry, uh, GPT four with canvas to explain some of this. + +03:21.590 --> 03:24.320 +So maybe this is now very familiar to you. + +03:25.010 --> 03:30.080 +Uh, and then we're going to have a method, getcontents, which is going to describe what this web + +03:30.110 --> 03:30.920 +page does. + +03:30.920 --> 03:32.330 +So let's run that. + +03:32.330 --> 03:38.210 +So let's now again do, uh, what we did before editor is website. + +03:41.450 --> 03:42.110 +Edward Dunham. + +03:42.110 --> 03:46.700 +Com my wonderful website. + +03:46.730 --> 03:49.670 +That's very simplistic, but it's a good test for us now. + +03:49.670 --> 03:55.460 +And let's print print ad dot get contents. + +03:55.640 --> 04:00.680 +Remember last time we printed just the title and the body? + +04:00.770 --> 04:02.150 +Let's see what we get. + +04:02.150 --> 04:03.230 +So now we do that. + +04:03.230 --> 04:07.940 +What we get is again the title and we get the body. + +04:07.940 --> 04:10.460 +But hopefully we're going to get something else as well. + +04:10.640 --> 04:14.430 +Uh, we're also Uh, can I get. + +04:14.460 --> 04:18.900 +Well, we get the title and the contents all in one long string as part of Getcontents. + +04:18.900 --> 04:25.350 +But the other thing that I want to look at then is I want to look at what is editor dot links. + +04:25.860 --> 04:27.870 +Let's see what this has. + +04:28.920 --> 04:36.510 +And now you'll see that in this links variable we now have all of the links that you'll find on my web + +04:36.540 --> 04:37.320 +page. + +04:37.440 --> 04:39.720 +Uh, it might be easier if I don't have the print. + +04:39.750 --> 04:41.310 +If I just do it this way. + +04:41.340 --> 04:42.510 +We'll get them listed out there. + +04:42.510 --> 04:43.620 +That's easier, isn't it? + +04:43.650 --> 04:44.730 +So there they are. + +04:44.760 --> 04:47.430 +Here are all of the links that you'll find on my web page. + +04:47.430 --> 04:49.800 +And they're now being stored in this variable links. + +04:49.830 --> 04:51.750 +Hopefully that's clear to you. + +04:52.200 --> 04:53.220 +All right. + +04:53.250 --> 05:00.030 +Now if we're building a company brochure and we want to provide a web page and we want it to use that + +05:00.030 --> 05:06.690 +to gather more information, we want it to follow some of these links to figure out how it can. + +05:06.720 --> 05:08.400 +It can collect more information from them. + +05:08.400 --> 05:11.250 +But not all of these links are going to be relevant. + +05:11.250 --> 05:15.120 +Some of these links are going to be red herrings, like this thing here, which is probably from from + +05:15.120 --> 05:17.950 +one of the, uh, The analytics tags that's included. + +05:17.950 --> 05:23.140 +Or there are some other things here like that's going to be irrelevant. + +05:23.170 --> 05:30.100 +Now it's going to be really hard for us to write code to figure out whether or not a link is relevant + +05:30.100 --> 05:33.220 +for the purposes of generating a sales brochure. + +05:33.220 --> 05:34.930 +That's actually really hard. + +05:34.960 --> 05:40.960 +The other thing we might want to do is take a link to something like slash about and replace it with + +05:40.960 --> 05:43.090 +a full URL as well. + +05:43.090 --> 05:47.800 +And maybe that's easier to do with code, but it's still not a not a simple task by any means. + +05:47.800 --> 05:53.680 +The combined code to try and figure out which of these links are relevant and what's the full, full + +05:53.710 --> 05:56.290 +URL, including the the host. + +05:56.290 --> 06:01.600 +That would be a lot of coding, but of course, it turns out that's the kind of thing that GPT four + +06:01.810 --> 06:04.240 +mini would be very good at doing for us. + +06:04.240 --> 06:06.760 +We could just ask it to do that as a task. + +06:06.760 --> 06:12.100 +It's an example of taking a sort of nuanced, complicated task, and rather than trying to hand code + +06:12.100 --> 06:17.650 +it, we can just ship it off to a frontier model and say, do this for us, and that's what we're going + +06:17.650 --> 06:20.350 +to do, and we're going to do that in the next video. diff --git a/week5/community-contributions/subtitles/srts/60620025/ja_JP.srt b/week5/community-contributions/subtitles/srts/60620025/ja_JP.srt new file mode 100755 index 0000000..98130bf --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620025/ja_JP.srt @@ -0,0 +1,253 @@ +WEBVTT + +00:00.050 --> 00:04.520 +そして、 Jupyter Labへようこそ!Jupyter Labは僕の大好きな場所のひとつだ。 + +00:04.670 --> 00:12.350 +あなたのスクリーンにJupyter Labが現れたら、 おそらく私たちの8週間分の仕事がすべて入っているルート・ディレクトリにたどり着くだろう。 + +00:12.380 --> 00:17.150 +もちろん、 ここをダブルクリックすれば1週目に入ることができる。 + +00:17.150 --> 00:25.100 +週目に入りましたので、 5日目に向かっていただきたいと思います。 + +00:25.100 --> 00:30.830 +つまり、 初日にすでに作ったものを土台にして、 ウェブをスクラップし、 + +00:30.830 --> 00:38.900 +その会社について詳しく調べ、 それをパンフレットに使うというビジネス・チャレンジだ。 + +00:39.230 --> 00:43.820 +もし何か問題があれば、 私に連絡してください。 + +00:43.820 --> 00:47.690 +でもできれば、 私が話している間に実行してほしい。 + +00:47.690 --> 00:49.940 +あるいは、 その後にまた戻ってきて、 それをやり直すかもしれない。 + +00:49.940 --> 00:57.140 +そして、 そのコツは、 何枚かのプリントを追加して、 すべての時点で何が起こっているのかに自信を持っていると自分自身を本当に納得させることだ。 + +00:57.140 --> 00:59.120 +まずは輸入車から始めようと思う。 + +00:59.120 --> 01:04.790 +Shiftを押しながらEnterを押してインポートを実行することを忘れないでください。 + +01:04.790 --> 01:10.970 +最も可能性が高いのは、 なぜかアクティベートされた環境で動いていないことだ。 + +01:10.970 --> 01:14.150 +JupyterLabはアクティベートされた環境で育った。 + +01:14.150 --> 01:21.020 +Lmms がプロンプトに表示されているか、 ターミナルウィンドウに表示されているか、 Anaconda プロンプトに表示されているかを確認する。 + +01:21.020 --> 01:26.090 +もしそうでなければ、 その部分をもう一度始めて、 助けが必要ならReadmeを見てください。 + +01:26.120 --> 01:35.600 +状況によっては、 カーネルと呼ばれるPythonプロセスを再起動する必要があるかもしれません。 + +01:35.600 --> 01:41.630 +カーネル・メニューを開き、 カーネルを再起動してすべてのセルの出力をクリアする。 + +01:41.630 --> 01:45.230 +そして、 またこのノートを始めればいい。 + +01:45.260 --> 01:47.150 +もう一度インポートを実行する。 + +01:47.570 --> 01:48.560 +分かった。 + +01:48.590 --> 01:51.200 +では、 初期化とセットアップを行おう。 + +01:51.200 --> 01:53.810 +dotのenvファイルを読み込む。 + +01:53.810 --> 01:56.930 +そして、 キーが問題ないことを確認するだけだ。 + +01:57.110 --> 01:58.730 +僕にとってはそうだ。 + +01:58.730 --> 02:00.830 +そして願わくば、 それがあなたにとっても良いものであったことを。 + +02:00.830 --> 02:05.000 +そうでなければ、 トラブルシューティングノートで原因を突き止めよう。 + +02:05.030 --> 02:12.620 +そして、 GPT4の廉価版であるGPT4ミニにモデルを設定している。 + +02:12.950 --> 02:15.800 +なるほど、 これは見覚えがあるはずだ。 + +02:15.800 --> 02:20.900 +次のセルでは、 第1週に作成したクラスのウェブサイトを見てみよう。 + +02:20.900 --> 02:23.660 +そして今、 もう少し詳しく見てみよう。 + +02:23.660 --> 02:26.120 +これでプレーするのは2回目だね。 + +02:26.120 --> 02:31.550 +これは、 URLを渡して作成するクラスであることを覚えているだろう。 + +02:31.550 --> 02:37.100 +そのURLを取得するためにrequestsパッケージを使用する。 + +02:37.400 --> 02:44.570 +そしてコンテンツを収集し、 Beautifulsoupという素晴らしい解析パッケージを使って解析する。 + +02:44.600 --> 02:46.520 +ここは何かが違う。 + +02:46.520 --> 02:52.880 +タイトルとコンテンツを解析し、 不要なものを取り除くだけでなく、 + +02:52.910 --> 03:02.450 +このページで参照されているリンクを集め、 self dot linksと呼ばれるものに集める。 + +03:02.450 --> 03:06.170 +そこで、 すべてのリンクをそこに保存することにする。 + +03:07.310 --> 03:15.560 +そして、 この小さな線は、 ASTゼロ・ワンのプレビューを通過したところなので、 うまくいけばいいのですが......。 + +03:15.590 --> 03:21.590 +私たちは、 ああ、 申し訳ないが、 ああ、 キャンバスを持つGPTの4人に、 この一部を説明してくれるように頼んだ。 + +03:21.590 --> 03:24.320 +だから、 もしかしたら、 これはもうあなたにとってとても身近なことなのかもしれない。 + +03:25.010 --> 03:30.920 +それから、 getcontentsというメソッドを用意して、 このウェブページが何をするのかを記述する。 + +03:30.920 --> 03:32.330 +では、 それを実行してみよう。 + +03:32.330 --> 03:38.210 +では、 編集者がウェブサイトを作る前にやったことをもう一度やろう。 + +03:41.450 --> 03:42.110 +エドワード・ダナム + +03:42.110 --> 03:46.700 +私の素晴らしいウェブサイトをご覧ください。 + +03:46.730 --> 03:49.670 +とても単純な話だけど、 今の僕らにとってはいいテストだよ。 + +03:49.670 --> 03:55.460 +そして、 印刷広告ドットゲットコンテンツを印刷しよう。 + +03:55.640 --> 04:00.680 +前回、 タイトルと本文だけを印刷したのを覚えているだろうか? + +04:00.770 --> 04:02.150 +何が出てくるか見てみよう。 + +04:02.150 --> 04:03.230 +だから今はそうしている。 + +04:03.230 --> 04:07.940 +私たちが手にするのは、 またしてもタイトルであり、 ボディである。 + +04:07.940 --> 04:10.460 +でも、 他のものも手に入れられるといいね。 + +04:10.640 --> 04:14.430 +あの、 私たちも......ちょっといいですか? + +04:14.460 --> 04:18.900 +さて、 Getcontentsの一部として、 タイトルと内容を1つの長い文字列で取得する。 + +04:18.900 --> 04:25.350 +しかし、 もうひとつ見ておきたいのは、 エディター・ドット・リンクとは何かということだ。 + +04:25.860 --> 04:27.870 +何があるのか見てみよう。 + +04:28.920 --> 04:37.320 +そして今、 このリンク変数に、 私のウェブページにあるすべてのリンクがあることがわかるだろう。 + +04:37.440 --> 04:39.720 +ええと、 プリントを持っていない方が簡単かもしれない。 + +04:39.750 --> 04:41.310 +こうすればいいんだ。 + +04:41.340 --> 04:42.510 +私たちはそれらをそこにリストアップする。 + +04:42.510 --> 04:43.620 +その方が簡単だろう? + +04:43.650 --> 04:44.730 +そう、 これだ。 + +04:44.760 --> 04:47.430 +以下は、 私のウェブページにあるすべてのリンクです。 + +04:47.430 --> 04:49.800 +そしてそれらは現在、 この変数リンクに保存されている。 + +04:49.830 --> 04:51.750 +それがはっきりすればいいのだが。 + +04:52.200 --> 04:53.220 +分かった。 + +04:53.250 --> 05:00.030 +今、 私たちが会社案内を作っていて、 ウェブページを提供し、 それを使ってより多くの情報を収集させたい場合、 + +05:00.030 --> 05:06.690 +これらのリンクのいくつかをたどって、 どのようにできるかを考えてもらいたい。 + +05:06.720 --> 05:08.400 +彼らからより多くの情報を集めることができる。 + +05:08.400 --> 05:11.250 +しかし、 これらのリンクのすべてが関連するわけではない。 + +05:11.250 --> 05:15.120 +これらのリンクのいくつかは、 おそらくアナリティクスのタグに含まれているものであろう、 + +05:15.120 --> 05:17.950 +ここにあるもののように、 赤いニリングになりそうです。 + +05:17.950 --> 05:23.140 +あるいは、 それとは関係ないようなこともある。 + +05:23.170 --> 05:33.220 +これで、 販売パンフレットを作成する目的で、 リンクが関連性があるかどうかを把握するコードを書くのは本当に難しくなった。 + +05:33.220 --> 05:34.930 +それは本当に難しいことだ。 + +05:34.960 --> 05:43.090 +もうひとつは、 スラッシュ・アバウトのようなものへのリンクを、 完全なURLに置き換えることだ。 + +05:43.090 --> 05:47.800 +そして、 それはコードを使えば簡単にできるかもしれないが、 それでも決して簡単な作業ではない。 + +05:47.800 --> 05:56.290 +これらのリンクのどれが関連性があるのか、 そしてホストを含む完全なURLは何なのかを把握しようとするコードが組み合わされている。 + +05:56.290 --> 06:04.240 +もちろん、 GPT4ミニはそういうことをやってくれる。 + +06:04.240 --> 06:06.760 +それをタスクとして依頼すればいいのだ。 + +06:06.760 --> 06:12.100 +これは、 微妙で複雑なタスクを手作業でコード化するのではなく、 + +06:12.100 --> 06:20.350 +フロンティア・モデルに委ねて「これをやってくれ。 diff --git a/week5/community-contributions/subtitles/srts/60620025/ko_KR.srt b/week5/community-contributions/subtitles/srts/60620025/ko_KR.srt new file mode 100755 index 0000000..d1dd013 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620025/ko_KR.srt @@ -0,0 +1,292 @@ +WEBVTT + +00:00.050 --> 00:04.520 +주피터 연구소에 잘 오셨습니다 제가 가장 좋아하는 곳이죠 + +00:04.670 --> 00:10.580 +주피터 랩이 화면에 등장했을 때 여러분은 아마도 이곳에 도달하게 될 겁니다 루트 디렉터리요 + +00:10.580 --> 00:12.350 +작업한 8주 치가 다 있죠 + +00:12.380 --> 00:17.150 +이미 첫째 주에 다뤘을 수도 있어요 여기서 더블 클릭하면 get이 가능하죠 + +00:17.150 --> 00:23.450 +이제 첫째 주입니다 5일째를 맞아 앞으로 몇 분간 함께해 주세요 + +00:23.450 --> 00:25.100 +HRMH + +00:25.100 --> 00:30.830 +말씀드렸듯이 제가 준비한 비즈니스 도전은 첫날 이미 만든 걸 기반으로 구축하는 + +00:30.830 --> 00:36.920 +겁니다 회사를 위한 책자를 만드는 거죠 웹을 긁어 회사에 대해 더 많은 걸 알아내 그걸 + +00:36.920 --> 00:38.900 +책자에 사용해서요 + +00:39.230 --> 00:43.820 +혹시 문제가 생기면 저한테 연락 주세요 + +00:43.820 --> 00:47.690 +가능하다면 제가 말하는 동안 이걸 실행하세요 + +00:47.690 --> 00:49.940 +아니면 나중에 다시 와서 살펴보거나요 + +00:49.940 --> 00:54.710 +비법은 print문을 추가하는 겁니다 모든 시점에서 일어나는 일에 대해 + +00:54.710 --> 00:57.140 +자신 있다고 자신을 설득하는 거죠 + +00:57.140 --> 00:59.120 +수입품부터 시작할 거예요 + +00:59.120 --> 01:04.790 +시프트와 엔터를 눌러야 실행할 수 있어요 수입에 문제가 생기면요 + +01:04.790 --> 01:10.970 +가장 그럴듯한 설명은 활성화된 환경에서 실행되고 있지 않다는 거죠 + +01:10.970 --> 01:14.150 +유피터랩은 활성화 환경에서 시작됐어요 + +01:14.150 --> 01:21.020 +Lmm이 프롬프트에 있는지 확인하세요 터미널 창이나 아나콘다 프롬프트에 있는지요 + +01:21.020 --> 01:26.090 +그게 아니라면 그 부분을 다시 시작하고 도움이 필요하면 읽기를 보세요 + +01:26.120 --> 01:32.630 +어떤 상황에서는 파이썬 프로세스를 다시 시작해야 할 수도 있습니다 커널이라고 + +01:32.630 --> 01:35.600 +알려진 이 뒤에 있는 것이죠 + +01:35.600 --> 01:41.630 +커널 메뉴로 가서 커널을 재시작하라고 하면 모든 셀의 출력이 나오죠 + +01:41.630 --> 01:45.230 +이 공책을 다시 시작하면 다시 시작이네요 + +01:45.260 --> 01:47.150 +다시 import를 실행할게요. + +01:47.570 --> 01:48.560 +좋아요 + +01:48.590 --> 01:51.200 +이제 초기화하고 셋업할게요 + +01:51.200 --> 01:53.810 +.Infile을 로드할 거예요 + +01:53.810 --> 01:56.930 +열쇠가 잘 있는지 확인할게요 + +01:57.110 --> 01:58.730 +저한텐 그래요 + +01:58.730 --> 02:00.830 +당신도 좋아 보였으면 좋겠네요 + +02:00.830 --> 02:05.000 +아니면 문제를 해결하기 위해 노트 앞에 가보세요. HDP의 HDP는 + +02:05.030 --> 02:10.790 +저희는 GPT 4 미니를 모델로 설정했습니다 GPT 4의 저렴한 버전이죠 이 모델도 놀라울 + +02:10.820 --> 02:12.620 +정도로 훌륭할 거예요 + +02:12.950 --> 02:15.800 +자, 이제 눈에 익죠? + +02:15.800 --> 02:20.900 +옆 칸에는 우리가 첫 주에 만든 클래스 웹사이트가 있어요 + +02:20.900 --> 02:23.660 +이제 비트를 좀 더 자세히 살펴보죠 + +02:23.660 --> 02:26.120 +두 번째 갖고 노는 거잖아요 + +02:26.120 --> 02:31.550 +URL 전달로 생성되는 클래스라는 걸 기억하실 거예요 + +02:31.550 --> 02:37.100 +URL 검색을 위해 요청 패키지를 사용하죠 + +02:37.400 --> 02:44.570 +그리고 콘텐츠를 수집합니다 그리고 뷰티풀 소우프를 이용합니다 훌륭한 파싱 패키지를 이용해서 파싱을 하죠 + +02:44.600 --> 02:46.520 +뭔가 달라요 + +02:46.520 --> 02:52.880 +제목과 내용을 분석하고 필요 없는 건 제거할 뿐 아니라 이 페이지에서 + +02:52.910 --> 03:02.450 +참조되는 모든 링크도 모아요 셀프.링크스라는 곳에서 그런 링크들을 모아요 + +03:02.450 --> 03:06.170 +모든 링크를 여기에 저장할 거예요 + +03:07.310 --> 03:15.560 +여기 이 줄에 대해 말하자면 AST 01 프리뷰를 봤기 때문에 아, 아니에요 + +03:15.590 --> 03:21.590 +죄송합니다 GPT 4에 캔버스와 함께 설명해 달라고 요청했어요 + +03:21.590 --> 03:24.320 +이제 아주 익숙해지셨을 거예요 + +03:25.010 --> 03:30.080 +그리고 getcontent라는 메서드가 있는데 이 웹 페이지가 하는 일을 설명해줄 + +03:30.110 --> 03:30.920 +거예요 + +03:30.920 --> 03:32.330 +실행해 보죠 + +03:32.330 --> 03:38.210 +이제 편집기 전에 했던 걸 다시 해보죠 웹사이트 + +03:41.450 --> 03:42.110 +에드워드 더넘요 + +03:42.110 --> 03:46.700 +제 멋진 웹사이트에 연락하세요 + +03:46.730 --> 03:49.670 +아주 단순하지만 우리에겐 좋은 시험이죠 + +03:49.670 --> 03:55.460 +print- ad. get 콘텐츠라고 입력하죠 + +03:55.640 --> 04:00.680 +지난번엔 제목과 몸만 인쇄했잖아요 + +04:00.770 --> 04:02.150 +get get을 해 보죠 + +04:02.150 --> 04:03.230 +이제 그렇게 하죠 + +04:03.230 --> 04:07.940 +Get it의 제목과 차체를 얻었어요 + +04:07.940 --> 04:10.460 +다른 것도 잡히면 좋겠네요 Get up + +04:10.640 --> 04:14.430 +Get it, get it, get get it, get it, get, get, get, get, it, it + +04:14.460 --> 04:18.900 +Getcontentents의 일부로 제목과 내용 전부를 긴 문자열에 담았어요 + +04:18.900 --> 04:25.350 +여기서 또 살펴볼 것은 Editor. links가 무엇인가 하는 건데요 + +04:25.860 --> 04:27.870 +뭐가 있나 보죠 + +04:28.920 --> 04:36.510 +이제 이 링크 변수에 이제 모든 링크가 있는 걸 보실 수 있습니다 제 웹 페이지에서 찾으실 + +04:36.540 --> 04:37.320 +수 있죠 + +04:37.440 --> 04:39.720 +프린트가 없으면 더 쉬울 것 같아요 + +04:39.750 --> 04:41.310 +이렇게 하면 돼요 + +04:41.340 --> 04:42.510 +Get up 목록에 올릴게요 + +04:42.510 --> 04:43.620 +그게 더 쉽죠? + +04:43.650 --> 04:44.730 +저기 있네요 + +04:44.760 --> 04:47.430 +제 웹페이지에서 찾을 수 있는 링크들이에요 + +04:47.430 --> 04:49.800 +이 변수 링크에 저장되고 있어요 + +04:49.830 --> 04:51.750 +잘 아셨길 바라요 + +04:52.200 --> 04:53.220 +좋아요 + +04:53.250 --> 05:00.030 +회사 브로슈어를 제작하는데 웹페이지를 제공하고 싶고 그걸 이용해 정보를 + +05:00.030 --> 05:06.690 +더 모으고 싶다면 이런 링크를 따라가서 어떻게 가능한지 알아내야죠 + +05:06.720 --> 05:08.400 +정보를 더 수집할 수 있어요 + +05:08.400 --> 05:11.250 +하지만 모든 링크가 관련 있는 건 아니에요 + +05:11.250 --> 05:15.120 +이 링크들 중 일부는 주의를 끄는 것일 수 있습니다. 여기 이것처럼요. 아마 포함되어 + +05:15.120 --> 05:17.950 +있는 분석 태그 중 하나에서 나온 것일 거예요. + +05:17.950 --> 05:23.140 +아니면 관련 없는 다른 것들도 있어요 + +05:23.170 --> 05:30.100 +이제 코드를 작성하는 게 정말 어려워지겠네요 판매 책자 생성에 링크가 + +05:30.100 --> 05:33.220 +관련 있는지 알아보기 위해서요 + +05:33.220 --> 05:34.930 +정말 어렵네요 + +05:34.960 --> 05:40.960 +또 다른 방법은 슬래시 어바웃 같은 것에 링크를 걸고 전체 URL로 + +05:40.960 --> 05:43.090 +바꾸는 거예요 + +05:43.090 --> 05:47.800 +코드로 하면 더 쉬울지 몰라도 결코 간단한 작업은 아니에요 + +05:47.800 --> 05:53.680 +어떤 링크가 관련 있는지 알아내기 위한 코드죠 호스트를 포함해 + +05:53.710 --> 05:56.290 +전체 URL 말이에요 + +05:56.290 --> 06:01.600 +코딩이 엄청 많겠지만 GPT for 미니가 아주 잘 할 + +06:01.810 --> 06:04.240 +수 있는 작업인 것 같아요 + +06:04.240 --> 06:06.760 +그걸 작업으로 요청할 수 있어요 + +06:06.760 --> 06:12.100 +미묘하고 복잡한 작업을 손으로 코드화하는 대신 + +06:12.100 --> 06:17.650 +프론티어 모델로 보내 작업을 지시하는 거죠 다음 비디오에서 + +06:17.650 --> 06:20.350 +할 거예요 diff --git a/week5/community-contributions/subtitles/srts/60620143/en_US.srt b/week5/community-contributions/subtitles/srts/60620143/en_US.srt new file mode 100755 index 0000000..8c41867 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620143/en_US.srt @@ -0,0 +1,448 @@ +WEBVTT + +00:00.110 --> 00:02.570 +So we're going to make a call to GPT four. + +00:02.600 --> 00:07.850 +Oh, that's going to ask it to look through a set of links, figure out which ones are relevant, and + +00:07.850 --> 00:11.000 +then replace them with fully qualified links. + +00:11.270 --> 00:17.660 +Um, and it's going to be a great, a great way of using llms because it requires particularly for selecting + +00:17.660 --> 00:18.740 +which links are relevant. + +00:18.740 --> 00:21.620 +It needs a sort of nuanced reasoning process. + +00:21.890 --> 00:27.560 +Uh, so we're going to we're going to not only are we going to to use GPT four for this purpose, we're + +00:27.560 --> 00:34.130 +going to ask it to respond in the form of JSON in a way that specifies exactly the information that + +00:34.130 --> 00:35.390 +we need back. + +00:35.420 --> 00:41.660 +Now, later on, we're going to cover a technique called structured outputs, which is when we require + +00:41.660 --> 00:45.080 +the LLM to respond with a very specific format. + +00:45.110 --> 00:48.950 +We effectively we specify the format that it needs to respond in. + +00:48.980 --> 00:50.300 +We're not going to do this today. + +00:50.300 --> 00:52.670 +We're just going to simply ask for JSON back. + +00:52.700 --> 00:56.000 +And we're going to tell it the format that it needs to use to reply. + +00:56.000 --> 00:57.380 +And it's going to be great. + +00:57.410 --> 01:00.590 +Uh, this works well for simple requests like this. + +01:00.590 --> 01:04.120 +When you get more sophisticated, you you might need to use structured outputs. + +01:04.120 --> 01:09.370 +And in week eight, when we build our Agentic AI framework, we're going to do just that. + +01:09.370 --> 01:11.560 +But for now, this is what we do. + +01:11.560 --> 01:14.290 +So we're going to to create a system prompt. + +01:14.350 --> 01:19.960 +The system prompt is where, of course we describe the task at hand and how it's to go about doing it. + +01:19.960 --> 01:22.570 +That is where we will be supplying this information. + +01:22.570 --> 01:24.250 +Here's the system prompt. + +01:24.280 --> 01:28.720 +You are provided with a list of links found on a web page. + +01:28.720 --> 01:34.810 +You are able to decide which of these links will be most relevant to include in a brochure about the + +01:34.810 --> 01:41.800 +company, such as links to an about page or a company page, or a careers jobs page. + +01:41.830 --> 01:46.210 +You should respond in JSON as in this example. + +01:46.210 --> 01:49.150 +And then there is an example passed in. + +01:49.150 --> 01:54.400 +And when I said we're working with one shot prompting, that's really what I meant by giving it a specific + +01:54.400 --> 02:01.480 +example that it could use with an about page and a careers page, and the way that we're specifying + +02:01.480 --> 02:07.000 +the format is simply by giving it an example you can see we're asking for a dictionary. + +02:07.030 --> 02:09.880 +It will have a single attribute links. + +02:09.880 --> 02:16.570 +And that links will be a list of again dictionaries with type and URL in each one. + +02:16.570 --> 02:19.420 +And that URL is the full URL. + +02:19.420 --> 02:21.430 +So let me run this cell. + +02:21.430 --> 02:27.190 +And just to make sure that this is clear to you, let me just print link system prompt. + +02:27.190 --> 02:28.840 +So we now have a variable. + +02:28.840 --> 02:31.330 +And this is what that variable contains. + +02:31.330 --> 02:32.410 +We print it out. + +02:32.440 --> 02:34.540 +We'll get the carriage returns as well. + +02:34.540 --> 02:35.830 +Let's have a look at this. + +02:36.910 --> 02:37.990 +Here it is. + +02:38.500 --> 02:44.890 +So this is exactly what we are going to instruct the LLM to do in our system prompt. + +02:45.370 --> 02:50.110 +And now we're going to write a function get links user prompt. + +02:50.110 --> 02:51.400 +And this is what it looks like. + +02:51.430 --> 02:53.770 +It will take a website object. + +02:53.770 --> 02:59.800 +And it's going to say here is a list of links on the website of blah. + +03:00.070 --> 03:04.630 +Please decide which are relevant web links for a brochure about the company. + +03:04.660 --> 03:13.470 +Respond with a full your URL do not include and a few things not to include and then list out the links + +03:13.590 --> 03:17.160 +by one by one and return that. + +03:17.160 --> 03:20.490 +So this will make more sense if we look at an actual example. + +03:20.490 --> 03:26.580 +So let's call get Links user prompt and we'll pass in editor which is of course as before editor. + +03:26.580 --> 03:27.510 +Is this one up here. + +03:27.510 --> 03:29.520 +It's going to be looking at these links. + +03:29.520 --> 03:32.100 +So let's see what this user prompt looks like. + +03:32.130 --> 03:34.800 +And you should run this to and get a sense for it. + +03:34.800 --> 03:36.930 +This is what the user prompt would look like. + +03:36.960 --> 03:40.020 +It's sorry it says exactly what I just said. + +03:40.110 --> 03:45.270 +Uh, it tells it that we're looking at this website, that fine website. + +03:45.540 --> 03:47.430 +Uh, and then here are the links. + +03:47.430 --> 03:55.080 +Some might be relative and you should summarize and you should you should select the ones that are relevant. + +03:55.680 --> 03:56.580 +Okay. + +03:56.610 --> 04:01.920 +And now it's time to put all of this into a function which is going to call OpenAI. + +04:01.920 --> 04:04.890 +And here it is get links URL. + +04:04.890 --> 04:08.430 +So we'll create a new website object for that URL. + +04:09.120 --> 04:11.130 +And now we call this. + +04:11.130 --> 04:15.930 +And I went through this quickly last time, and now it's time to spend a little bit more time on this. + +04:15.930 --> 04:23.670 +We call OpenAI chat, which is the main API for chats completions, which is the one that we will almost + +04:23.670 --> 04:24.330 +always use. + +04:24.330 --> 04:31.560 +Is that the the API, which is the standard API where we're saying your task is to keep going, is to + +04:31.590 --> 04:33.720 +is to complete this conversation. + +04:33.720 --> 04:42.000 +And we create something on the completions API and it takes, as before, a model and messages the model + +04:42.000 --> 04:42.690 +we're passing in. + +04:42.690 --> 04:43.410 +It was a variable. + +04:43.410 --> 04:46.350 +We set GPT four mini right at the start. + +04:46.410 --> 04:47.820 +Messages. + +04:47.820 --> 04:51.120 +Hopefully this is already starting to be familiar to you. + +04:51.120 --> 04:56.370 +The format that we use for messages is a list of dictionaries. + +04:56.400 --> 05:02.700 +It's a list of dictionaries where each dictionary, each dictionary has a key role with either system + +05:02.700 --> 05:08.670 +or user, a key content with the associated system message or user message. + +05:08.670 --> 05:09.270 +So. + +05:09.270 --> 05:11.600 +System system message user. + +05:11.630 --> 05:15.890 +User message that is going in our messages list. + +05:15.890 --> 05:17.360 +It's as simple as that. + +05:17.360 --> 05:20.300 +I hope that this is completely connecting for you. + +05:20.330 --> 05:26.930 +There is one little extra detail, one tiny thing I'm throwing in there, and it's this here response + +05:26.930 --> 05:28.010 +format. + +05:28.100 --> 05:34.970 +So you could tell OpenAI that we want it to provide a JSON object back in its response. + +05:34.970 --> 05:39.500 +And we do that by passing this in type JSON object. + +05:39.500 --> 05:44.870 +So that is something which is it's actually Claude doesn't doesn't have this way of requiring a JSON + +05:44.900 --> 05:45.800 +object back. + +05:45.890 --> 05:46.940 +OpenAI does. + +05:46.940 --> 05:53.120 +But OpenAI mentions in their documentation that even when you use this, it's still important that you + +05:53.120 --> 05:56.570 +mention in your prompt that a JSON response is required. + +05:56.570 --> 06:00.950 +It will only work if you do mention that explicitly in your prompt also. + +06:01.700 --> 06:03.620 +So we do that. + +06:03.650 --> 06:06.440 +What comes back is into this variable completion. + +06:06.440 --> 06:11.180 +Actually, to keep this consistent with before, I'm going to change this to response because that's + +06:11.180 --> 06:12.770 +what we called it last time. + +06:14.540 --> 06:15.410 +There we go. + +06:15.590 --> 06:24.080 +Uh, and, uh, what we then to to to actually get the final reply, we go response dot choices zero. + +06:24.080 --> 06:25.490 +So what's this about? + +06:25.490 --> 06:32.540 +Well, as it happens we can actually in the API request ask to have multiple variations if we want, + +06:32.570 --> 06:36.890 +if we wanted it to generate several possible variations of the response. + +06:36.890 --> 06:37.970 +And we haven't done that. + +06:37.970 --> 06:39.680 +So we're only going to get back one. + +06:39.740 --> 06:43.250 +Uh, and so those variations come back in the form of these choices. + +06:43.250 --> 06:44.840 +But we've only got one. + +06:44.840 --> 06:50.000 +So choices zero is getting us the one and the only choice of the response back. + +06:50.000 --> 06:57.380 +So that's why you'll always see response dot choices zero dot message dot content is just simply drilling + +06:57.380 --> 07:00.050 +down to what is actually the text message back. + +07:00.050 --> 07:05.870 +So basically you get very familiar with these two two things because it's the same in many, many times + +07:05.870 --> 07:06.680 +that we call the API. + +07:06.710 --> 07:12.410 +We'll be doing the same thing OpenAI dot chat, dot completions, dot create and then with what comes + +07:12.410 --> 07:16.840 +back it's response dot choices, zero Message content. + +07:17.380 --> 07:21.370 +You will get to the point when you'll be reciting it in your sleep. + +07:21.700 --> 07:29.200 +And then with what comes back, we're going to use the Json.load string function to then bring that + +07:29.200 --> 07:31.150 +back as JSON. + +07:31.180 --> 07:32.710 +Let's run that. + +07:32.980 --> 07:33.910 +Okay. + +07:33.910 --> 07:35.290 +So we're going to take the plunge. + +07:35.290 --> 07:40.210 +We're going to call that that function and pass in the website anthropic comm. + +07:40.210 --> 07:41.770 +So what are we expecting it to do. + +07:41.800 --> 07:50.950 +We're expecting it to collect all of the links on that page and then call uh call GPT four mini and + +07:50.950 --> 07:57.460 +say please select from this some links which you think are relevant and respond with them. + +07:57.460 --> 07:58.990 +So let's see what we get. + +07:59.020 --> 07:59.890 +Here we go. + +08:01.210 --> 08:02.860 +It's going off now to OpenAI. + +08:02.890 --> 08:06.550 +Well it's first it had to collect the anthropic page and back it comes. + +08:06.550 --> 08:09.910 +And this is what we get type about page. + +08:09.910 --> 08:17.080 +And there's a link to the about page a careers page, a team page, research enterprise pricing, API + +08:17.080 --> 08:17.980 +and news. + +08:17.980 --> 08:18.760 +How about that? + +08:18.760 --> 08:24.610 +This is all actually great information that we would want on a brochure, and no doubt there are a ton + +08:24.610 --> 08:26.380 +of links that it hasn't included. + +08:26.380 --> 08:28.270 +Let's convince ourselves of that. + +08:28.270 --> 08:37.330 +We can say, uh, anthropic is website and just pass this in. + +08:39.700 --> 08:47.500 +And do anthropic dot links and we'll see what are all of the links that were on that page here? + +08:47.500 --> 08:48.310 +They all are. + +08:48.340 --> 08:52.360 +There's a ton of them like supported countries and lots of others. + +08:52.360 --> 08:57.820 +You'll also see that there are many of them that are not fully, uh, the full URL, including the the + +08:57.820 --> 08:58.690 +host name. + +08:58.690 --> 09:06.970 +And so you'll see that and that our call to GPT four mini has very well selected a subset of these fully + +09:06.970 --> 09:09.490 +qualified them and explained what they are. + +09:10.060 --> 09:11.470 +I'd say that's a great result. + +09:11.470 --> 09:16.480 +That was pretty easy to it's just step one of the two steps that we have to go through to build our + +09:16.480 --> 09:17.380 +company brochure. + +09:17.380 --> 09:19.900 +And I will see you in the next video for step two. diff --git a/week5/community-contributions/subtitles/srts/60620143/ja_JP.srt b/week5/community-contributions/subtitles/srts/60620143/ja_JP.srt new file mode 100755 index 0000000..1aeabf4 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620143/ja_JP.srt @@ -0,0 +1,379 @@ +WEBVTT + +00:00.110 --> 00:02.570 +そこでGPT4に電話をかける。 + +00:02.600 --> 00:11.000 +ああ、 これはリンクのセットに目を通し、 どれが関連性のあるリンクかを把握し、 完全修飾リンクに置き換えるように要求しているのだ。 + +00:11.270 --> 00:18.740 +特に、 どのリンクが関連性があるのかを選択する必要があるからだ。 + +00:18.740 --> 00:21.620 +一種のニュアンスのある推論プロセスが必要なのだ。 + +00:21.890 --> 00:27.560 +この目的のためにGPT 4を使うだけでなく、 必要な情報を正確に指定する方法で、 + +00:27.560 --> 00:35.390 +JSONの形で応答するよう依頼するつもりだ。 + +00:35.420 --> 00:45.080 +さて、 この後、 構造化出力と呼ばれるテクニックを取り上げますが、 これはLLMに非常に特殊なフォーマットでの応答を要求する場合です。 + +00:45.110 --> 00:48.950 +私たちは効果的に、 応答するために必要なフォーマットを指定する。 + +00:48.980 --> 00:50.300 +今日はやらない。 + +00:50.300 --> 00:52.670 +単純にJSONを返してもらうだけだ。 + +00:52.700 --> 00:56.000 +そして、 返信に必要な書式を指示する。 + +00:56.000 --> 00:57.380 +素晴らしいものになるだろう + +00:57.410 --> 01:00.590 +ええと、 このような単純なリクエストには効果的です。 + +01:00.590 --> 01:04.120 +もっと高度になると、 構造化出力を使う必要が出てくるかもしれない。 + +01:04.120 --> 01:09.370 +第8週では、 エージェントAIのフレームワークを構築する。 + +01:09.370 --> 01:11.560 +でも今は、 これが僕らの仕事だ。 + +01:11.560 --> 01:14.290 +そこで、 システム・プロンプトを作成することにする。 + +01:14.350 --> 01:19.960 +システム・プロンプトはもちろん、 目の前のタスクと、 それをどのように進めるかを説明する場所だ。 + +01:19.960 --> 01:22.570 +そこでこの情報を提供することになる。 + +01:22.570 --> 01:24.250 +これがシステム・プロンプトだ。 + +01:24.280 --> 01:28.720 +ウェブページにあるリンクのリストが提供される。 + +01:28.720 --> 01:34.810 +これらのリンクのうち、 会社概要ページや会社ページ、 採用情報ページへのリンクなど、 + +01:34.810 --> 01:41.800 +会社案内のパンフレットに掲載するのに最もふさわしいものはどれかを決めることができます。 + +01:41.830 --> 01:46.210 +この例のようにJSONで応答してください。 + +01:46.210 --> 01:49.150 +そして、 渡された例がある。 + +01:49.150 --> 01:54.400 +一発プロンプトで作業していると言ったのは、 + +01:54.400 --> 02:07.000 +アバウトページやキャリアページで使用できる具体的な例を示したというのが本当の意味です。 + +02:07.030 --> 02:09.880 +属性リンクは1つになる。 + +02:09.880 --> 02:16.570 +そしてそのリンクは、 それぞれの辞書に種類とURLが記載された再度の辞書のリストになる。 + +02:16.570 --> 02:19.420 +そしてそのURLは完全なURLである。 + +02:19.420 --> 02:21.430 +では、 このセルを走らせてみよう。 + +02:21.430 --> 02:27.190 +念のため、 リンクのシステム・プロンプトをプリントしておこう。 + +02:27.190 --> 02:28.840 +これで変数ができた。 + +02:28.840 --> 02:31.330 +そして、 その変数に含まれているのがこれだ。 + +02:31.330 --> 02:32.410 +印刷するんだ。 + +02:32.440 --> 02:34.540 +キャリッジリターンも取得する。 + +02:34.540 --> 02:35.830 +これを見てみよう。 + +02:36.910 --> 02:37.990 +これだ。 + +02:38.500 --> 02:44.890 +だから、 システム・プロンプトでLLMに指示するのはまさにこれなのだ。 + +02:45.370 --> 02:50.110 +次に、 ユーザー・プロンプトのリンクを取得する関数を書いてみよう。 + +02:50.110 --> 02:51.400 +そして、 こんな感じだ。 + +02:51.430 --> 02:53.770 +ウェブサイト・オブジェクトを取る。 + +02:53.770 --> 02:59.800 +そして、 ここには「blah」のウェブサイト上のリンクのリストが表示される。 + +03:00.070 --> 03:04.630 +会社案内に関連するウェブリンクを決定してください。 + +03:04.660 --> 03:17.160 +あなたのURLは含めないこと、 含めないことをいくつか挙げて、 リンクをひとつずつリストアップして返す。 + +03:17.160 --> 03:20.490 +だから、 これは実際の例を見ればもっと理解できるだろう。 + +03:20.490 --> 03:26.580 +では、 リンクス・ユーザー・プロンプトを取得して、 エディターを渡すことにしよう。 + +03:26.580 --> 03:27.510 +これはこの上ですか? + +03:27.510 --> 03:29.520 +これらのリンクを見ることになる。 + +03:29.520 --> 03:32.100 +では、 このユーザー・プロンプトがどんなものか見てみよう。 + +03:32.130 --> 03:34.800 +そして、 これを実行して感覚をつかむべきだ。 + +03:34.800 --> 03:36.930 +ユーザー・プロンプトはこのように表示される。 + +03:36.960 --> 03:40.020 +今言ったことと同じことが書いてあって、 申し訳ない。 + +03:40.110 --> 03:45.270 +このウェブサイトを見ている、 あの素晴らしいウェブサイトを見ている、 と教えてくれるんだ。 + +03:45.540 --> 03:47.430 +それから、 ここにリンクがある。 + +03:47.430 --> 03:55.080 +中には相対的なものもあるかもしれないので、 まとめるべきだし、 関連性のあるものを選ぶべきだ。 + +03:55.680 --> 03:56.580 +オーケー。 + +03:56.610 --> 04:01.920 +そして今、 このすべてをOpenAIを呼び出す関数に入れる時だ。 + +04:01.920 --> 04:04.890 +そして、 ここにリンクのURLがある。 + +04:04.890 --> 04:08.430 +そこで、 そのURLに対して新しいウェブサイト・オブジェクトを作成する。 + +04:09.120 --> 04:11.130 +そして今、 私たちはこう呼んでいる。 + +04:11.130 --> 04:15.930 +前回は手短に済ませたが、 今回はもう少し時間をかけよう。 + +04:15.930 --> 04:24.330 +OpenAIのチャットを呼び出します。 これは、 チャットの完了のための主要なAPIで、 私たちがほとんど常に使うものです。 + +04:24.330 --> 04:33.720 +APIは標準APIで、 あなたのタスクはこの会話を続けることです。 + +04:33.720 --> 04:42.690 +そして、 completion APIで何かを作成し、 先ほどと同じようにモデルを受け取り、 そのモデルをメッセージで送ります。 + +04:42.690 --> 04:43.410 +それは変数だった。 + +04:43.410 --> 04:46.350 +GPTのミニ4枚を最初にセットした。 + +04:46.410 --> 04:47.820 +メッセージ + +04:47.820 --> 04:51.120 +もうお分かりいただけただろうか。 + +04:51.120 --> 04:56.370 +メッセージに使うフォーマットは辞書のリストだ。 + +04:56.400 --> 05:08.670 +これは辞書のリストであり、 各辞書は、 システムまたはユーザー、 関連するシステム・メッセージまたはユーザー・メッセージのキー・コンテンツを持つキー・ロールを持つ。 + +05:08.670 --> 05:09.270 +だから + +05:09.270 --> 05:11.600 +システムシステムメッセージユーザー。 + +05:11.630 --> 05:15.890 +メッセージリストに表示されるユーザーメッセージ。 + +05:15.890 --> 05:17.360 +簡単なことだ。 + +05:17.360 --> 05:20.300 +これがあなたにとって完全につながることを願っている。 + +05:20.330 --> 05:28.010 +細かいことだが、 1つだけ特別なことがある。 + +05:28.100 --> 05:34.970 +だから、 OpenAIに、 レスポンスにJSONオブジェクトを返すように指示することができる。 + +05:34.970 --> 05:39.500 +そのためには、 JSONオブジェクトを渡す。 + +05:39.500 --> 05:45.800 +クロードはJSONオブジェクトを要求するようなことはしない。 + +05:45.890 --> 05:46.940 +OpenAIはそうだ。 + +05:46.940 --> 05:56.570 +しかし、 OpenAIのドキュメントには、 これを使う場合でも、 JSONレスポンスが必要であることをプロンプトに記載することが重要であると記載されています。 + +05:56.570 --> 06:00.950 +これは、 プロンプトにそのことを明記している場合にのみ機能する。 + +06:01.700 --> 06:03.620 +だからそうするんだ。 + +06:03.650 --> 06:06.440 +戻ってきたものは、 この変数コンプリートに入る。 + +06:06.440 --> 06:12.770 +実は、 前回と一貫性を保つために、 前回そう呼んでいたので、 レスポンスに変更しようと思う。 + +06:14.540 --> 06:15.410 +これでよし。 + +06:15.590 --> 06:24.080 +そして、 最終的な返答を得るために、 ドット選択肢ゼロを選ぶ。 + +06:24.080 --> 06:25.490 +それで、 これは何なんだ? + +06:25.490 --> 06:36.890 +まあ、 実際にAPIリクエストで複数のバリエーションをリクエストすることは可能だ。 + +06:36.890 --> 06:37.970 +私たちはそれをやっていない。 + +06:37.970 --> 06:39.680 +だから1本しか返せない。 + +06:39.740 --> 06:43.250 +そして、 そのバリエーションが選択肢という形で戻ってくる。 + +06:43.250 --> 06:44.840 +でも、 1人しかいない。 + +06:44.840 --> 06:50.000 +つまり、 選択肢ゼロは、 唯一無二の選択肢である。 + +06:50.000 --> 07:00.050 +そのため、 常にレスポンス・ドット・チョイス・ゼロ・ドット・メッセージ・ドット・コンテンツが表示されるわけだが、 これは単に、 実際にテキストメッセージが返されるところまでドリルダウンしているだけである。 + +07:00.050 --> 07:06.680 +APIを呼び出すのは何度も何度も同じだからだ。 + +07:06.710 --> 07:12.410 +OpenAIのドットチャット、 ドットコンプリート、 ドットクリエイト、 そして返ってくるのはレスポンスのドットチョイス、 + +07:12.410 --> 07:16.840 +ゼロメッセージのコンテンツだ。 + +07:17.380 --> 07:21.370 +寝ている間に暗唱するようになるだろう。 + +07:21.700 --> 07:31.150 +そして、 戻ってきたものにはJsonを使う。 文字列をロードして、 それをJSONとして戻す。 + +07:31.180 --> 07:32.710 +それを実行しよう。 + +07:32.980 --> 07:33.910 +オーケー。 + +07:33.910 --> 07:35.290 +だから、 思い切ってやるんだ。 + +07:35.290 --> 07:40.210 +その関数を呼び出して、 ウェブサイト "anthropic comm "を渡す。 + +07:40.210 --> 07:41.770 +では、 何を期待しているのか。 + +07:41.800 --> 07:50.950 +そのページ上のすべてのリンクを収集し、 GPTフォーミニを呼び出して、 この中から関連性があると思われるリンクをいくつか選んで回答してください、 + +07:50.950 --> 07:57.460 +と言うことを期待しています。 + +07:57.460 --> 07:58.990 +では、 何が出てくるか見てみよう。 + +07:59.020 --> 07:59.890 +さあ、 始めよう。 + +08:01.210 --> 08:02.860 +今、 OpenAIに行っている。 + +08:02.890 --> 08:06.550 +まず、 人類学のページを集めなければならなかった。 + +08:06.550 --> 08:09.910 +そして、 これがアバウトページのタイプだ。 + +08:09.910 --> 08:17.980 +そして、 会社概要ページ、 採用情報ページ、 チームページ、 リサーチ・エンタープライズ価格、 API、 ニュースへのリンクがある。 + +08:17.980 --> 08:18.760 +どうだ? + +08:18.760 --> 08:26.380 +これはすべて、 パンフレットに掲載したい素晴らしい情報であり、 掲載されていないリンクが山ほどあることは間違いない。 + +08:26.380 --> 08:28.270 +そう自分を納得させよう。 + +08:28.270 --> 08:37.330 +人間工学はウェブサイトであり、 これを渡すだけだ。 + +08:39.700 --> 08:47.500 +そして、 "anthropic dot links "をクリックすると、 そのページにあったすべてのリンクが表示されます。 + +08:47.500 --> 08:48.310 +みんなそうだ。 + +08:48.340 --> 08:52.360 +支援された国とか、 他にもたくさんある。 + +08:52.360 --> 08:58.690 +また、 ホスト名を含む完全なURLになっていないものが多いこともわかるだろう。 + +08:58.690 --> 09:09.490 +そして、 GPT4ミニへの問い合わせは、 これらのサブセットを非常にうまく選択し、 完全に修飾して、 それらが何であるかを説明していることがわかるだろう。 + +09:10.060 --> 09:11.470 +素晴らしい結果だと思う。 + +09:11.470 --> 09:17.380 +これは、 会社案内を作るために必要な2つのステップのうち、 第1ステップに過ぎない。 + +09:17.380 --> 09:19.900 +ステップ2については、 また次のビデオで。 diff --git a/week5/community-contributions/subtitles/srts/60620143/ko_KR.srt b/week5/community-contributions/subtitles/srts/60620143/ko_KR.srt new file mode 100755 index 0000000..94d3174 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620143/ko_KR.srt @@ -0,0 +1,436 @@ +WEBVTT + +00:00.110 --> 00:02.570 +GPT 4에 전화를 걸 거예요 + +00:02.600 --> 00:07.850 +링크 세트를 검토하라고 요청하는군요 어떤 게 관련 있는지 알아내고 + +00:07.850 --> 00:11.000 +완전히 자격 있는 링크로 대체하라고요 + +00:11.270 --> 00:17.660 +llm을 사용하는 아주 좋은 방법이 될 겁니다 특히 관련 있는 링크를 선택해야 + +00:17.660 --> 00:18.740 +하니까요 + +00:18.740 --> 00:21.620 +추론 과정이 필요해요 + +00:21.890 --> 00:27.560 +이 목적으로 GPT 4를 사용할 뿐 아니라 JSON 형식으로 응답하도록 + +00:27.560 --> 00:35.390 +요청할 겁니다 우리가 돌려받아야 할 정보를 정확히 지정하는 방식으로요 + +00:35.420 --> 00:41.660 +나중에 구조화 출력이라는 테크닉을 다루겠습니다 LLM이 아주 구체적인 + +00:41.660 --> 00:45.080 +포맷으로 반응하도록 요구하는 경우죠 + +00:45.110 --> 00:48.950 +응답해야 할 포맷을 지정하는 거죠 + +00:48.980 --> 00:50.300 +오늘은 안 돼요 + +00:50.300 --> 00:52.670 +JSON을 요청하는 거죠 + +00:52.700 --> 00:56.000 +회신을 위해 사용해야 하는 포맷을 알려드리죠 + +00:56.000 --> 00:57.380 +멋질 거예요 + +00:57.410 --> 01:00.590 +이렇게 간단한 요청에는 잘 통하죠 + +01:00.590 --> 01:04.120 +IQ가 높아지면 구조적인 출력 작업을 해야 해요 IQ, IQ + +01:04.120 --> 01:09.370 +8주 차에 에이전틱 인공지능 프레임워크를 만들 때 그걸 할 거예요 + +01:09.370 --> 01:11.560 +하지만 지금은 이게 우리 일이에요 + +01:11.560 --> 01:14.290 +시스템 프롬프트를 생성할게요 + +01:14.350 --> 01:19.960 +시스템 프롬프트에는 당면한 작업과 그 작업 방법을 설명하는 글이 있죠 + +01:19.960 --> 01:22.570 +거기서 정보를 제공할 거예요 + +01:22.570 --> 01:24.250 +시스템 프롬프트예요 + +01:24.280 --> 01:28.720 +웹페이지에 있는 링크 목록을 제공하죠 + +01:28.720 --> 01:34.810 +어떤 링크가 회사 브로슈어에 포함될지 결정할 수 있습니다 + +01:34.810 --> 01:41.800 +회사 페이지나 회사 페이지 직업 페이지 링크 같은 거요 + +01:41.830 --> 01:46.210 +이 예제처럼 JSON에서 응답해야 해요 + +01:46.210 --> 01:49.150 +그리고 예시가 전달되죠 + +01:49.150 --> 01:54.400 +원 샷 프롬프트라고 한 건 특정 예제를 준다는 게 정말 그 뜻이었습니다 + +01:54.400 --> 02:01.480 +어바웃 페이지나 직업 페이지와 함께 사용할 수 있도록요 그 형식을 지정하는 방법은 단순히 + +02:01.480 --> 02:07.000 +예제를 주는 겁니다 사전에 대해 묻는 걸 보실 수 있죠 + +02:07.030 --> 02:09.880 +단일 특성 링크를 갖게 되죠 + +02:09.880 --> 02:16.570 +그 링크는 각각의 형식과 URL을 가진 사전의 목록이 되겠죠 + +02:16.570 --> 02:19.420 +그 URL이 전체 URL이죠 + +02:19.420 --> 02:21.430 +이 방을 운영하게 해줘요 + +02:21.430 --> 02:27.190 +이게 명확한지 확인하기 위해 링크 시스템 프롬프트를 프린트할게요 + +02:27.190 --> 02:28.840 +이제 변수가 생겼어요 + +02:28.840 --> 02:31.330 +그 변수가 포함하는 게 이거죠 + +02:31.330 --> 02:32.410 +인쇄를 했어요 + +02:32.440 --> 02:34.540 +Get it, get it 마차도 다시 가져오죠 + +02:34.540 --> 02:35.830 +이걸 보시죠 + +02:36.910 --> 02:37.990 +여기 있네요 + +02:38.500 --> 02:44.890 +이게 바로 우리가 시스템 프롬프트에서 LLM에게 지시하려는 거죠 + +02:45.370 --> 02:50.110 +이제 함수 get links 사용자 프롬프트를 쓸게요 + +02:50.110 --> 02:51.400 +이렇게 생긴 거예요 + +02:51.430 --> 02:53.770 +웹사이트 객체를 가져가요 + +02:53.770 --> 02:59.800 +여기 블라 웹사이트에 링크 목록이 있다고 할 거예요 + +03:00.070 --> 03:04.630 +해당 회사 브로슈어에 실을 관련 링크를 골라 주세요 + +03:04.660 --> 03:13.470 +URL do 포함 안 함과 포함 안 함 몇 가지로 응답하세요 그런 다음 링크를 + +03:13.590 --> 03:17.160 +하나씩 나열해 반환하세요 + +03:17.160 --> 03:20.490 +실제 예제를 보면 더 이해가 잘 될 거예요 + +03:20.490 --> 03:26.580 +get Links 사용자 프롬프트라고 하죠 편집기로 넘기겠습니다 편집기 전처럼요 + +03:26.580 --> 03:27.510 +이 위에 있는 건가요? + +03:27.510 --> 03:29.520 +이 링크들을 살펴볼 거예요 + +03:29.520 --> 03:32.100 +이 사용자 프롬프트가 어떻게 생겼는지 보죠 + +03:32.130 --> 03:34.800 +Get in을 실행해 보세요 감을 잡아야죠 + +03:34.800 --> 03:36.930 +사용자 프롬프트는 이렇게 생겼을 거예요 + +03:36.960 --> 03:40.020 +제가 한 말을 그대로 전해서 죄송해요 + +03:40.110 --> 03:45.270 +우리가 이 웹사이트를 보고 있다고 나와요 + +03:45.540 --> 03:47.430 +여기 링크가 있어요 + +03:47.430 --> 03:55.080 +어떤 것은 관련 있을 수 있으니 요약해서 관련 있는 것을 선택해야 하죠 + +03:55.680 --> 03:56.580 +네 + +03:56.610 --> 04:01.920 +이제 이 모든 걸 함수에 넣을 겁니다 OpenAI라고 하죠 + +04:01.920 --> 04:04.890 +get links URL이네요 + +04:04.890 --> 04:08.430 +URL을 위한 새 웹사이트 객체를 생성할게요 + +04:09.120 --> 04:11.130 +이제 이걸 이렇게 불러요 + +04:11.130 --> 04:15.930 +지난번에 비트를 빠르게 훑어봤는데요 이번에는 여기에 시간을 좀 더 투자할 거예요 + +04:15.930 --> 04:23.670 +OpenAI 채팅이라고 부르는데 채팅 완료를 위한 메인 API죠 거의 항상 사용하게 될 + +04:23.670 --> 04:24.330 +거예요 + +04:24.330 --> 04:31.560 +API, 여러분의 작업이 계속되는 표준 API입니다 이 대화를 + +04:31.590 --> 04:33.720 +완료하는 거죠 + +04:33.720 --> 04:42.690 +완료 API 상에서 뭔가를 생성하면 전처럼 모델과 전달할 모델에 메시지가 생겨요 + +04:42.690 --> 04:43.410 +그게 변수였어요 + +04:43.410 --> 04:46.350 +GPT 4 미니를 출발선에 세팅했어요 + +04:46.410 --> 04:47.820 +메시지예요 + +04:47.820 --> 04:51.120 +벌써 익숙해지기 시작했길 바라요 + +04:51.120 --> 04:56.370 +메시지 형식은 사전 목록이에요 + +04:56.400 --> 05:02.700 +각 사전, 각 사전은 시스템 또는 사용자와 관련된 핵심 역할과 관련된 + +05:02.700 --> 05:08.670 +시스템 메시지나 사용자 메시지의 핵심 콘텐츠를 갖고 있죠 + +05:08.670 --> 05:09.270 +그래서요? + +05:09.270 --> 05:11.600 +시스템 메시지 사용자를 입력하세요 + +05:11.630 --> 05:15.890 +메시지 목록에 들어갈 사용자 메시지요 + +05:15.890 --> 05:17.360 +아주 간단해요 + +05:17.360 --> 05:20.300 +충분히 이해하셨길 바라요 + +05:20.330 --> 05:28.010 +작은 추가 디테일이 하나 더 있어요 여기 응답 포맷이요 + +05:28.100 --> 05:34.970 +OpenAI에 JSON 개체를 다시 제공하라고 할 수 있어요 + +05:34.970 --> 05:39.500 +JSON 형식 객체를 통해 전달하죠 + +05:39.500 --> 05:44.870 +그건 사실 클로드는 JSON 객체를 돌려받는 방법이 + +05:44.900 --> 05:45.800 +없어요 + +05:45.890 --> 05:46.940 +오픈아이는 알아요 + +05:46.940 --> 05:53.120 +OpenAI는 문서화에서 언급하길 이걸 사용할 때도 JSON 응답이 필요하다고 + +05:53.120 --> 05:56.570 +반드시 언급해야 한다고 했어요 + +05:56.570 --> 06:00.950 +프롬프트에 명시적으로 언급해야 작동할 거예요 + +06:01.700 --> 06:03.620 +그렇게 하죠 + +06:03.650 --> 06:06.440 +돌아오는 건 이 변수 완료죠 + +06:06.440 --> 06:11.180 +사실 일관성을 유지하기 위해 이걸 응답으로 바꾸겠습니다 지난 번에 + +06:11.180 --> 06:12.770 +그렇게 불렀거든요 + +06:14.540 --> 06:15.410 +됐어요 + +06:15.590 --> 06:24.080 +최종 답변을 얻기 위해서는 응답 .선택을 선택하세요. get + +06:24.080 --> 06:25.490 +무슨 일이죠? + +06:25.490 --> 06:32.540 +실제로 API 요청에서 여러 변형을 요청할 수 있어요 여러 가지 가능한 + +06:32.570 --> 06:36.890 +변형을 생성하길 원하는 경우에요 + +06:36.890 --> 06:37.970 +우린 그러지 않았죠 + +06:37.970 --> 06:39.680 +Get in get은 하나네요 + +06:39.740 --> 06:43.250 +그런 변형이 이런 선택 형태로 나타난 거죠 + +06:43.250 --> 06:44.840 +하지만 하나밖에 없어요 + +06:44.840 --> 06:50.000 +0번 선택은 유일한 응답을 받게 해주죠 + +06:50.000 --> 06:57.380 +그래서 항상 응답이 .선택 .0,Message.content인 거죠 텍스트 메시지가 되돌아오는 + +06:57.380 --> 07:00.050 +것으로 드릴다운하는 거예요 + +07:00.050 --> 07:05.870 +기본적으로 이 두 가지에 아주 익숙해지게 됩니다 API 호출은 많은 경우에 + +07:05.870 --> 07:06.680 +같거든요 + +07:06.710 --> 07:12.410 +OpenAI.챗, .완성, .생성 그리고 응답 .선택, + +07:12.410 --> 07:16.840 +0 메시지 콘텐츠도 똑같이 할 거예요 + +07:17.380 --> 07:21.370 +Get it, get it, get it get it, get it it, it 자면서도 외울 수 있는 수준까지 가셔야 해요 + +07:21.700 --> 07:29.200 +그런 다음 돌아온 것으로 Json을 사용하죠 문자열 함수를 불러와 JSON으로 + +07:29.200 --> 07:31.150 +불러오죠 + +07:31.180 --> 07:32.710 +실행해 보죠 + +07:32.980 --> 07:33.910 +네 + +07:33.910 --> 07:35.290 +그래서 과감하게 도전했죠 + +07:35.290 --> 07:40.210 +그걸 함수라고 부르고 웹사이트 anthropiccom에 넘길 거예요 + +07:40.210 --> 07:41.770 +그럼 어떻게 되는 거죠? + +07:41.800 --> 07:50.950 +해당 페이지의 모든 링크를 수집한 다음 GPT for 미니에 호출해 관련 있다고 + +07:50.950 --> 07:57.460 +생각되는 링크를 선택해 응답하라고 하는 거죠 + +07:57.460 --> 07:58.990 +get get을 해 보죠 + +07:59.020 --> 07:59.890 +시작할게요 + +08:01.210 --> 08:02.860 +오픈아이로 출발하네요 + +08:02.890 --> 08:06.550 +인류 문명의 페이지를 수집해서 돌려줘야 해요 + +08:06.550 --> 08:09.910 +이게 Get in get page죠 + +08:09.910 --> 08:17.080 +링크가 있어요 경력 페이지, 팀 페이지 리서치 엔터프라이즈 가격, API 그리고 + +08:17.080 --> 08:17.980 +뉴스요 + +08:17.980 --> 08:18.760 +어때요? + +08:18.760 --> 08:24.610 +모두 브로슈어에 실어야 할 훌륭한 정보예요 포함되지 않은 링크가 + +08:24.610 --> 08:26.380 +엄청나게 많죠 + +08:26.380 --> 08:28.270 +우리 스스로를 설득해 보죠 + +08:28.270 --> 08:37.330 +앤스로픽 이즈 웹사이트라고 하고 전달할 수 있어요 + +08:39.700 --> 08:47.500 +종족유사 닷 링크라고 입력하면∙∙∙ 이 페이지에 링크가 다 뭐죠? + +08:47.500 --> 08:48.310 +전부 다 그래요 + +08:48.340 --> 08:52.360 +지원하는 나라가 정말 많네요 + +08:52.360 --> 08:58.690 +또한 많은 경우 완전한 URL이 아닌 것도 보이실 겁니다 호스트 이름을 포함해서요 + +08:58.690 --> 09:06.970 +GPT for 미니에 대한 호출이 정규화된 하위 집합을 잘 선택해 무엇인지 설명하는 + +09:06.970 --> 09:09.490 +걸 보실 수 있어요 + +09:10.060 --> 09:11.470 +좋은 결과라고 생각해요 + +09:11.470 --> 09:16.480 +아주 쉬웠어요 회사 브로슈어를 만들 때 거쳐야 하는 두 단계 중 + +09:16.480 --> 09:17.380 +하나였죠 + +09:17.380 --> 09:19.900 +그럼 다음 2단계 영상에서 만나요 diff --git a/week5/community-contributions/subtitles/srts/60620169/en_US.srt b/week5/community-contributions/subtitles/srts/60620169/en_US.srt new file mode 100755 index 0000000..9adb00d --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620169/en_US.srt @@ -0,0 +1,430 @@ +WEBVTT + +00:00.080 --> 00:07.280 +Hopefully you found this super satisfying to be able to have this nice business result and have it calling + +00:07.370 --> 00:13.340 +an LLM twice, we can make this result a little bit more satisfying by adding in something called streaming, + +00:13.340 --> 00:19.310 +which is so common that you see in these tools that we've experienced ourselves in the chat user interfaces + +00:19.610 --> 00:26.420 +and streaming is when the information flows back from the LLM and appears in what they call the typewriter + +00:26.420 --> 00:27.980 +style interface. + +00:28.430 --> 00:31.670 +And the way you do it is remarkably simple. + +00:31.760 --> 00:42.350 +Uh, when you are creating your, uh, call to OpenAI chat completions, create, uh, if you want to + +00:42.380 --> 00:48.920 +not just receive it all back in one go, but you want it to flow back, you simply pass in another parameter + +00:48.920 --> 00:50.480 +stream equals true. + +00:50.510 --> 00:55.880 +We'll find when we go and use Claude that it has a slightly different API, but this is one of the rare + +00:55.880 --> 00:59.510 +times when GPT and Claude are a bit different. + +00:59.510 --> 01:02.200 +But that's how you do it with GPT stream equals true. + +01:02.230 --> 01:03.460 +Now what does that mean? + +01:03.460 --> 01:10.420 +What it means is that what comes back into stream is no longer the the single text response, but instead + +01:10.420 --> 01:16.450 +you get back something that you can iterate over, and as you iterate over, each chunk of the response + +01:16.450 --> 01:22.510 +will come through, um, and you can just sort of say for chunk in stream print chunk. + +01:22.540 --> 01:27.640 +Now, in our case, we want to be a little bit smarter than that because we're we're showing this in + +01:27.640 --> 01:28.750 +markdown. + +01:28.960 --> 01:37.090 +Um, and actually it's a little tiny bit fiddly because markdown is something where the, the markdown + +01:37.090 --> 01:41.920 +characters, you might it might start to stream some markdown characters, and you would then need it + +01:41.920 --> 01:43.990 +to sort of incorporate that in what it's showing. + +01:43.990 --> 01:49.600 +So there's a bit of hokey code here to handle the fact that we're going to want to rewrite the full + +01:49.600 --> 01:53.230 +markdown version for each chunk so you can look through it. + +01:53.230 --> 01:59.100 +But basically I keep a sort of running track of everything in response, a kind of cumulative track + +01:59.100 --> 02:01.980 +of all of the chunks that have come back. + +02:01.980 --> 02:09.120 +And so for each chunk that comes back, I basically, uh, include that in response. + +02:09.120 --> 02:14.070 +Uh, I strip out if the word markdown is actually in there, I strip it out of the response. + +02:14.070 --> 02:18.090 +And finally I update the full display to show that. + +02:18.090 --> 02:20.850 +So this is all a little bit complex. + +02:20.850 --> 02:24.270 +You wouldn't need this if you were just simply writing text. + +02:24.270 --> 02:30.450 +Uh, the only reason that you need it this way is because we want to show it in a nice, fancy markdown + +02:30.450 --> 02:30.990 +way. + +02:30.990 --> 02:32.880 +And let me show you what I mean by that. + +02:32.880 --> 02:38.880 +If we now repeat this for anthropic, we'll have to wait a minute while it, uh, while it does that + +02:38.880 --> 02:42.780 +first finding the links and getting all these pages, and then here you go. + +02:42.810 --> 02:43.560 +Look at that. + +02:43.560 --> 02:50.760 +It's the familiar streaming interface that you can see there, and it's done. + +02:50.760 --> 02:55.320 +And I love the fact that because it's markdown, we also get things like the links in here. + +02:55.320 --> 02:57.680 +And just to show you what I was talking about. + +02:57.680 --> 03:03.590 +If you didn't want to have it displaying in markdown, the simpler way that you could do this is that + +03:03.590 --> 03:10.550 +you could have it say like, uh, something like, um, for chunk in stream. + +03:12.560 --> 03:15.260 +Uh, print chunk. + +03:15.260 --> 03:20.060 +And then we'd have to have something like end equals that stops it from printing a new line each time. + +03:20.060 --> 03:22.640 +And I think this is now let's see if that works. + +03:23.090 --> 03:29.330 +Uh, this this would now just print it as a series of, uh, little, uh, pieces of text so we won't + +03:29.330 --> 03:30.890 +get the markdown formatting. + +03:31.280 --> 03:33.560 +Oh, that's not going to work. + +03:34.040 --> 03:34.940 +Uh, sorry. + +03:34.940 --> 03:37.850 +This should of course, be chunk. + +03:38.810 --> 03:41.720 +I might as well put in all of this like that. + +03:41.720 --> 03:43.940 +Otherwise, we're getting the objects that are flowing back. + +03:43.970 --> 03:47.510 +You probably saw me doing that and thought, idiot, there we go. + +03:47.600 --> 03:49.340 +It shows you this is real time. + +03:49.550 --> 03:51.560 +Uh, okay, let's try that again. + +03:51.590 --> 03:53.180 +See if we get a better result now. + +03:53.990 --> 03:58.990 +So if we simply put in here the the the the the print statement. + +03:58.990 --> 03:59.770 +Like this. + +03:59.770 --> 04:01.090 +Then what you'll see coming back. + +04:01.090 --> 04:01.930 +Here you go. + +04:01.960 --> 04:03.700 +You see it comes back. + +04:03.700 --> 04:05.080 +It's super simple. + +04:05.080 --> 04:07.450 +It just won't be nicely formatted. + +04:07.450 --> 04:10.510 +But obviously the code, if you do it this way is really simple. + +04:10.510 --> 04:13.570 +This is all the code you need to be able to stream back results. + +04:13.570 --> 04:18.250 +So you set stream equals true and then you iterate back that way. + +04:19.180 --> 04:29.170 +Uh, so now let me remove that and I will uncomment this if you don't know by the way then then a command + +04:29.170 --> 04:36.250 +and the divide by sign or on the windows it's a windows key and divide by sign, uh is something which + +04:36.250 --> 04:39.010 +will comment out or uncomment a block of code like that. + +04:39.010 --> 04:44.020 +It's a useful shortcut to know, uh, all right, let's run this again and just see anthropic one more + +04:44.020 --> 04:47.530 +time with the nice great formatting. + +04:47.800 --> 04:49.180 +Finds the links. + +04:49.180 --> 04:50.800 +And here it is again. + +04:50.800 --> 04:55.350 +And of course it's going to be amazingly a different brochure every time. + +04:55.590 --> 04:58.860 +Uh, and, uh, there it is this time. + +04:58.860 --> 05:00.600 +And now let's try a different company. + +05:00.600 --> 05:07.530 +Let's try hugging face the ubiquitous open source platform for, uh, AI engineers. + +05:07.560 --> 05:09.090 +Let's see how we get. + +05:09.120 --> 05:10.800 +We get some links. + +05:10.800 --> 05:16.380 +And here comes the hugging face brochure with career opportunities. + +05:16.380 --> 05:17.010 +Perks. + +05:17.040 --> 05:18.060 +Get in touch. + +05:18.060 --> 05:18.780 +And then. + +05:18.810 --> 05:21.300 +And together, let's build the future of AI. + +05:21.330 --> 05:22.530 +Very nice. + +05:22.560 --> 05:25.740 +All right, let's do one more thing to make this fun. + +05:25.740 --> 05:31.050 +Let's just go all the way back to where we created the system prompt. + +05:31.050 --> 05:32.910 +Where was that? + +05:32.940 --> 05:34.710 +All the way back here. + +05:34.710 --> 05:36.210 +System prompt. + +05:36.210 --> 05:41.220 +So one of the things about the system prompt is that this is the place where you not only describe the + +05:41.220 --> 05:49.200 +task that is to be done, but also you talk about the tone and character that the LLM should adopt in + +05:49.200 --> 05:50.760 +generating this content. + +05:50.760 --> 05:57.690 +So let's comment this and uncomment a variation right here. + +05:57.690 --> 05:59.730 +And this variation just includes that. + +05:59.730 --> 06:05.610 +It should be a short, humorous, entertaining, jokey brochure and we will run that code. + +06:05.820 --> 06:11.970 +Uh, and because I use very naughtily here, system prompt is like a global variable that I refer to + +06:12.000 --> 06:12.600 +elsewhere. + +06:12.600 --> 06:17.640 +So I do believe I don't need to rerun any of this because I have that like hardcoded, I should be able + +06:17.640 --> 06:21.600 +to come all the way down here and just simply rerun this. + +06:21.630 --> 06:29.760 +And I believe, if I'm not mistaken, that we're now going to get a jovial, jokey ha ha, here we go. + +06:29.790 --> 06:33.900 +Welcome to anthropic, where I gets a safety net. + +06:34.380 --> 06:39.240 +Uh, at anthropic, we're building AI systems you can actually trust, ensuring they're more reliable + +06:39.240 --> 06:43.920 +than your morning coffee, and easier to understand than your cats mood swings. + +06:44.670 --> 06:50.100 +Based on sunny land of San Francisco, we're on a mission to make AI a friend, not a foe. + +06:50.130 --> 06:57.620 +So, I mean, it's just It's extraordinary that by making a small change to the system prompt like that, + +06:57.650 --> 07:03.740 +we can have such a wonderfully different lens on our company brochure. + +07:03.860 --> 07:07.220 +I find that just just really, truly remarkable. + +07:07.430 --> 07:12.860 +I will uncomment I will comment that so that it doesn't confuse you when you see it. + +07:13.010 --> 07:20.990 +But um, this of course lends me to the first thing I want to say, which is that as you experiment + +07:20.990 --> 07:28.460 +with this, this is your opportunity to really understand deeply what it means to use prompting to affect + +07:28.460 --> 07:30.560 +the character of what's generated. + +07:30.560 --> 07:32.840 +So you can take it a step further. + +07:32.840 --> 07:36.740 +Make a snarky brochure that's loaded with sarcasm. + +07:36.740 --> 07:38.810 +Make a brochure in Spanish. + +07:38.930 --> 07:43.790 +Uh, do add something that will that will translate to a different language. + +07:43.790 --> 07:46.010 +Uh, try all of these different things. + +07:46.160 --> 07:53.230 +Um, and this is, this will be very important part of learning how you use prompting to affect the + +07:53.230 --> 07:54.910 +type of result that you get. + +07:55.930 --> 08:01.180 +Okay, let me wrap this up before we start talking about too much about exercises. + +08:01.180 --> 08:09.010 +So what we did today was we extended what we did in day one instead of just calling one LM call to summarize + +08:09.010 --> 08:15.700 +a website, we ended up making two calls to LM one to collect relevant links and one to then from a + +08:15.700 --> 08:21.700 +scrape of all of that data to then build a robust company brochure. + +08:22.000 --> 08:28.210 +Uh, and, you know, as I say, this is like a toy starting version of a gigantic AI in a small way, + +08:28.210 --> 08:31.600 +because we're dividing up a bigger problem into smaller steps. + +08:31.600 --> 08:38.410 +But I did want to impress upon you that this this is very applicable to many different kinds of business + +08:38.410 --> 08:43.420 +problem, this kind of synthesizing information and then generating as a result of it. + +08:43.480 --> 08:49.810 +Uh, and so you can imagine that you could do this to write marketing content to generate a product. + +08:49.930 --> 08:57.240 +Uh, tutorial like a guide from product spec, uh, to create some personalized email content. + +08:57.300 --> 08:59.670 +By reading through a bunch of emails. + +08:59.670 --> 09:05.310 +So there are so many ways that you can imagine this kind of two step synthesize some data and summarize + +09:05.310 --> 09:09.930 +it in JSON, and then use that as a way to build some kind of output. + +09:09.960 --> 09:12.120 +Many different applications of that. + +09:12.120 --> 09:17.730 +And so what I'd love to see you do is figure out a way to apply this to what you do day to day. + +09:17.730 --> 09:19.920 +And interesting commercial angle here. + +09:19.950 --> 09:24.990 +Uh, something that would allow you to think about your area of expertise, where you have the greatest + +09:24.990 --> 09:25.860 +domain knowledge. + +09:25.860 --> 09:31.500 +And now given this new these, these skills you have, how could you put that to good use? + +09:31.500 --> 09:33.390 +So have a think about that. + +09:33.390 --> 09:39.570 +Feel free to bounce ideas off me at any point, and try and build some examples of that and put them + +09:39.570 --> 09:40.380 +in your GitHub. + +09:40.380 --> 09:47.700 +So you have some nice examples of using multiple calls to llms to synthesize information and generate + +09:47.700 --> 09:48.600 +content. + +09:48.630 --> 09:50.400 +I will see you for the next video. diff --git a/week5/community-contributions/subtitles/srts/60620169/ja_JP.srt b/week5/community-contributions/subtitles/srts/60620169/ja_JP.srt new file mode 100755 index 0000000..eb3268c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620169/ja_JP.srt @@ -0,0 +1,352 @@ +WEBVTT + +00:00.080 --> 00:07.280 +ストリーミングと呼ばれるものを追加することで、 この結果をもう少し満足のいくものにすることができます。 + +00:07.370 --> 00:27.980 +ストリーミングとは、 LLMから情報が戻ってきて、 タイプライタースタイルのインターフェースと呼ばれるものに表示されることです。 + +00:28.430 --> 00:31.670 +その方法は極めてシンプルだ。 + +00:31.760 --> 00:42.350 +OpenAIのチャットコンプリートの呼び出しを作成するときに、 一度にすべてを受け取るだけでなく、 フローバックさせたい場合は、 + +00:42.380 --> 00:50.480 +別のパラメータストリームをtrueに等しく渡すだけです。 + +00:50.510 --> 00:59.510 +クロードを使ってみるとわかるが、 GPTとクロードではAPIが少し違うのだ。 + +00:59.510 --> 01:02.200 +しかし、 それはGPTストリーム・イコール・トゥルーでのやり方だ。 + +01:02.230 --> 01:03.460 +さて、 それは何を意味するのか? + +01:03.460 --> 01:10.420 +つまり、 ストリームに戻ってくるのは、 単一のテキスト・レスポンスではなく、 + +01:10.420 --> 01:22.510 +反復処理できるようなもので、 反復処理すると、 レスポンスの各チャンクが戻ってくる。 + +01:22.540 --> 01:28.750 +さて、 今回のケースでは、 マークダウンで表示しているので、 もう少しスマートにしたい。 + +01:28.960 --> 01:37.090 +というのも、 マークダウンは、 マークダウンの文字がストリーミングされ始めるかもしれないし、 + +01:37.090 --> 01:43.990 +マークダウンの文字が表示されるかもしれない。 + +01:43.990 --> 01:53.230 +各チャンクの完全なマークダウン・バージョンを書き換えることで、 そのチャンクに目を通すことができるようになる。 + +01:53.230 --> 02:01.980 +しかし、 基本的には、 私はすべての応答、 つまり戻ってきたすべてのチャンクの累積記録のようなものを管理している。 + +02:01.980 --> 02:09.120 +そうして戻ってきたチャンクに対して、 私は基本的に、 そのチャンクを返答に含める。 + +02:09.120 --> 02:14.070 +ええと、 マークダウンという単語が実際にそこにある場合は、 それを取り除いて対応します。 + +02:14.070 --> 02:18.090 +そして最後に、 フルディスプレイを更新して表示する。 + +02:18.090 --> 02:20.850 +だから、 これはちょっと複雑なんだ。 + +02:20.850 --> 02:24.270 +単にテキストを書くだけなら、 これは必要ないだろう。 + +02:24.270 --> 02:30.990 +ええと、 この方法が必要な唯一の理由は、 素敵で派手なマークダウンの方法で表示したいからです。 + +02:30.990 --> 02:32.880 +どういう意味か、 お見せしよう。 + +02:32.880 --> 02:38.880 +もし、 これを人類について繰り返したら、 最初にリンクを見つけてすべてのページを取得する間、 + +02:38.880 --> 02:42.780 +少し待たなければならない。 + +02:42.810 --> 02:43.560 +あれを見ろ。 + +02:43.560 --> 02:50.760 +おなじみのストリーミング・インターフェースが完成した。 + +02:50.760 --> 02:55.320 +マークダウンなので、 ここにリンクのようなものも表示されるのがいいですね。 + +02:55.320 --> 02:57.680 +そして、 私が何を言っていたのかをお見せしよう。 + +02:57.680 --> 03:03.590 +マークダウンで表示させたくない場合は、 もっと簡単な方法として、 ストリーム内のチャンクに対して、 + +03:03.590 --> 03:10.550 +えーと、 えーと、 と表示させることもできる。 + +03:12.560 --> 03:15.260 +ええと、 プリントチャンク。 + +03:15.260 --> 03:20.060 +そして、 end equalsのように、 毎回改行されないようにする必要がある。 + +03:20.060 --> 03:22.640 +そして、 これがうまくいくかどうか、 今見ているところだと思う。 + +03:23.090 --> 03:30.890 +この場合、 一連の小さなテキストとして印刷されるので、 マークダウンの書式は適用されません。 + +03:31.280 --> 03:33.560 +ああ、 それはうまくいかないよ。 + +03:34.040 --> 03:34.940 +あ、 すみません。 + +03:34.940 --> 03:37.850 +もちろん、 これはチャンクであるべきだ。 + +03:38.810 --> 03:41.720 +そうやって全部入れた方がいいかもしれない。 + +03:41.720 --> 03:43.940 +そうでなければ、 流れてくるオブジェクトを取り戻すことになる。 + +03:43.970 --> 03:47.510 +おそらく、 私がそうしているのを見て、 バカ野郎、 これでどうだ、 と思ったことだろう。 + +03:47.600 --> 03:49.340 +これはリアルタイムで表示される。 + +03:49.550 --> 03:51.560 +よし、 もう一度やってみよう。 + +03:51.590 --> 03:53.180 +今よりいい結果が出るかどうか見てみよう。 + +03:53.990 --> 03:58.990 +だから、 ここに単純にprint文を入れるとする。 + +03:58.990 --> 03:59.770 +こんな感じだ。 + +03:59.770 --> 04:01.090 +そして、 また戻ってくる。 + +04:01.090 --> 04:01.930 +どうぞ + +04:01.960 --> 04:03.700 +戻ってくるのがわかるだろう。 + +04:03.700 --> 04:05.080 +超シンプルだ。 + +04:05.080 --> 04:07.450 +ただ、 きれいなフォーマットにはならない。 + +04:07.450 --> 04:10.510 +しかし、 この方法でやれば、 コードは明らかにシンプルになる。 + +04:10.510 --> 04:13.570 +これが、 結果をストリームバックするために必要なすべてのコードである。 + +04:13.570 --> 04:18.250 +つまり、 streamをtrueにセットし、 それを繰り返しながら戻るのだ。 + +04:19.180 --> 04:29.170 +ところで、 コマンドと除算記号、 ウィンドウズではウィンドウズ・キーと除算記号で、 このようなコードのブロックをコメントアウトしたり、 + +04:29.170 --> 04:39.010 +コメントアウトを解除したりするものです。 + +04:39.010 --> 04:47.530 +このショートカットを使って、 もう一度、 素晴らしいフォーマットでアントロピックを見てみよう。 + +04:47.800 --> 04:49.180 +リンクを見つける。 + +04:49.180 --> 04:50.800 +そして、 またこれだ。 + +04:50.800 --> 04:55.350 +そしてもちろん、 毎回驚くほど違うパンフレットになる。 + +04:55.590 --> 04:58.860 +ええと、 それで......今度はあれだ。 + +04:58.860 --> 05:00.600 +そして今度は別の会社を試してみよう。 + +05:00.600 --> 05:07.530 +AIエンジニアのためのユビキタスなオープンソースプラットフォームとハグしてみよう。 + +05:07.560 --> 05:09.090 +どうなるか見てみよう。 + +05:09.120 --> 05:10.800 +リンクがいくつかある。 + +05:10.800 --> 05:16.380 +そして、 キャリアのチャンスが書かれた抱きつき顔のパンフレットがやってきた。 + +05:16.380 --> 05:17.010 +特典。 + +05:17.040 --> 05:18.060 +ご連絡ください。 + +05:18.060 --> 05:18.780 +そして + +05:18.810 --> 05:21.300 +そして一緒にAIの未来を築きましょう。 + +05:21.330 --> 05:22.530 +とても素晴らしい。 + +05:22.560 --> 05:25.740 +よし、 これを楽しくするためにもうひとつやろう。 + +05:25.740 --> 05:31.050 +システム・プロンプトを作成したところまで戻ってみよう。 + +05:31.050 --> 05:32.910 +あれはどこだった? + +05:32.940 --> 05:34.710 +ここにずっと戻ってきた。 + +05:34.710 --> 05:36.210 +システムプロンプト。 + +05:36.210 --> 05:41.220 +つまり、 システム・プロンプトの特徴のひとつは、 やるべきタスクを記述するだけでなく、 + +05:41.220 --> 05:50.760 +LLMがこのコンテンツを作成する際に採用すべきトーンや性格についても語る場所であるということだ。 + +05:50.760 --> 05:57.690 +では、 ここでコメントとバリエーションを解除してみよう。 + +05:57.690 --> 05:59.730 +そして、 このバリエーションはまさにそれを含んでいる。 + +05:59.730 --> 06:05.610 +短い、 ユーモラスな、 楽しい、 冗談のようなパンフレットであるべきで、 私たちはそのコードを実行する。 + +06:05.820 --> 06:12.600 +ええと、 ここでは非常にいたずらっぽく使うので、 システム・プロンプトはグローバル変数のようなもので、 別の場所で参照する。 + +06:12.600 --> 06:21.600 +ハードコードされているので、 再実行する必要はない。 + +06:21.630 --> 06:29.760 +そして、 私の思い違いでなければ、 これから陽気な、 冗談のような、 ははは、 さあどうぞ、 という話になると信じている。 + +06:29.790 --> 06:33.900 +セーフティネットがある人類学へようこそ。 + +06:34.380 --> 06:39.240 +anthropicでは、 あなたが実際に信頼できるAIシステムを構築し、 朝のコーヒーよりも信頼性が高く、 + +06:39.240 --> 06:43.920 +猫の気分転換よりも理解しやすいことを保証しています。 + +06:44.670 --> 06:50.100 +陽光降り注ぐサンフランシスコを拠点に、 私たちはAIを敵ではなく味方にする使命を担っている。 + +06:50.130 --> 07:03.740 +つまり、 システムプロンプトにちょっとした変更を加えるだけで、 会社案内にこれほど素晴らしい別のレンズが使えるというのは驚異的なことなんだ。 + +07:03.860 --> 07:07.220 +本当に、 本当に驚くべきことだと思う。 + +07:07.430 --> 07:12.860 +あなたがそれを見たときに混乱しないように、 コメントを外します。 + +07:13.010 --> 07:20.990 +しかし、 これはもちろん、 私が最初に申し上げたいこと、 つまり、 プロンプトを使用して生成されるものの性格に影響を与えるということがどういうことなのかを深く理解するチャンスであるということを、 + +07:20.990 --> 07:30.560 +この実験を通して実感していただきたいのです。 + +07:30.560 --> 07:32.840 +だから、 もう一歩踏み込むことができる。 + +07:32.840 --> 07:36.740 +皮肉たっぷりのパンフレットを作ろう。 + +07:36.740 --> 07:38.810 +スペイン語のパンフレットを作る。 + +07:38.930 --> 07:43.790 +ええと、 別の言語に翻訳されるようなものを追加してください。 + +07:43.790 --> 07:46.010 +いろいろ試してみて + +07:46.160 --> 07:54.910 +これは、 プロンプトをどのように使って、 どのような結果を得るかに影響を与えるかを学ぶ上で、 とても重要なことなんだ。 + +07:55.930 --> 08:01.180 +さて、 エクササイズの話をしすぎる前に、 この話をまとめよう。 + +08:01.180 --> 08:09.010 +そこで今日やったことは、 初日にやったことを拡張して、 ウェブサイトを要約するためにLMに1回電話するだけでなく、 + +08:09.010 --> 08:21.700 +関連するリンクを集めるためにLMに1回電話し、 そのすべてのデータをスクレイピングして強固な会社案内を作るために1回電話した。 + +08:22.000 --> 08:31.600 +大きな問題を小さなステップに分割しているんだ。 + +08:31.600 --> 08:38.410 +しかし、 私が皆さんに印象づけたかったのは、 このような情報の統合とその結果としての生成は、 + +08:38.410 --> 08:43.420 +さまざまな種類のビジネス問題に非常に応用できるということだ。 + +08:43.480 --> 08:49.810 +商品を生み出すためのマーケティング・コンテンツを書くために、 このようなことができると想像できるだろう。 + +08:49.930 --> 08:57.240 +パーソナライズされたEメールコンテンツを作成するための、 製品仕様からのガイドのようなチュートリアルです。 + +08:57.300 --> 08:59.670 +たくさんのメールに目を通すことによってね。 + +08:59.670 --> 09:09.930 +このように、 あるデータを合成してJSONにまとめ、 それを何らかのアウトプットを構築する方法として使うという2つのステップを想像する方法はたくさんある。 + +09:09.960 --> 09:12.120 +さまざまな応用が可能だ。 + +09:12.120 --> 09:17.730 +だから、 ぜひやってほしいのは、 これを日々の仕事に応用する方法を考えることだ。 + +09:17.730 --> 09:19.920 +そして、 この商業的なアングルも興味深い。 + +09:19.950 --> 09:25.860 +ええと、 自分の専門分野、 自分が最も得意とする領域について考えることができるようなもの。 + +09:25.860 --> 09:31.500 +そして今、 この新しいもの、 あなたが持っているこのスキルを、 どのように有効活用できるか? + +09:31.500 --> 09:33.390 +だから、 それについて考えてみてほしい。 + +09:33.390 --> 09:40.380 +いつでも遠慮なく私にアイデアをぶつけてほしいし、 その例をいくつか作ってGitHubに置いてみてほしい。 + +09:40.380 --> 09:48.600 +llmsへの複数のコールを使って情報を合成し、 コンテンツを生成している素晴らしい例がいくつかあるわけだ。 + +09:48.630 --> 09:50.400 +また次のビデオで会おう。 diff --git a/week5/community-contributions/subtitles/srts/60620169/ko_KR.srt b/week5/community-contributions/subtitles/srts/60620169/ko_KR.srt new file mode 100755 index 0000000..cab49f7 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620169/ko_KR.srt @@ -0,0 +1,421 @@ +WEBVTT + +00:00.080 --> 00:07.280 +이런 멋진 비즈니스 결과를 얻고 LLM을 두 번 호출하는 것에 대해 아주 만족스러우시길 바랍니다 + +00:07.370 --> 00:13.340 +스트리밍이라는 걸 추가함으로써 이 결과를 좀 더 만족스럽게 만들 수 있어요 아주 + +00:13.340 --> 00:19.310 +흔한 거죠 채팅에서 경험한 이런 도구에서 볼 수 있습니다 사용자 인터페이스와 + +00:19.610 --> 00:26.420 +스트리밍 채팅에서 정보가 LLM에서 다시 흘러와 타자기 스타일 인터페이스로 나타날 + +00:26.420 --> 00:27.980 +때죠 + +00:28.430 --> 00:31.670 +그 방법은 놀랍도록 간단해요 + +00:31.760 --> 00:42.350 +오픈AI 채팅 완료를 위한 호출을 만들 때 생성할 때 한 번에 다 받고 싶진 않지만 다시 + +00:42.380 --> 00:48.920 +흘러가길 원한다면 다른 매개 변수 스트림을 참으로 + +00:48.920 --> 00:50.480 +넘기면 되죠 + +00:50.510 --> 00:55.880 +클로드를 사용할 때 API가 약간 다른 걸 알게 될 겁니다 하지만 GPT와 + +00:55.880 --> 00:59.510 +클로드가 약간 다른 건 드문 경우죠 + +00:59.510 --> 01:02.200 +GPT 스트리밍 = True에선 이렇게 하는 거예요 + +01:02.230 --> 01:03.460 +그게 무슨 뜻이죠? + +01:03.460 --> 01:10.420 +스트림으로 돌아온 것은 더 이상 단일 텍스트 응답이 아니라 반복할 + +01:10.420 --> 01:16.450 +수 있는 것을 얻게 됩니다 반복을 하면 각 응답 덩어리가 나타납니다 + +01:16.450 --> 01:22.510 +스트림 프린트 덩어리를 선택하는 거죠 + +01:22.540 --> 01:27.640 +우리 경우엔 그것보다 좀 더 영리해야 해요 왜냐하면 마크다운 비트로 이걸 + +01:27.640 --> 01:28.750 +보여주니까요 + +01:28.960 --> 01:37.090 +사실 좀 성가신 일이에요 마크다운이란 마크다운 캐릭터를 스트림으로 + +01:37.090 --> 01:43.990 +만들 때 그걸 화면에 포함시켜야 하거든요 + +01:43.990 --> 01:49.600 +여기 호키 코드가 좀 있네요 각 비트에 대해 전체 마크다운 버전을 다시 써야 한다는 사실을 + +01:49.600 --> 01:53.230 +처리하기 위해서요 여러분이 살펴볼 수 있게요 + +01:53.230 --> 01:59.100 +기본적으로는 모든 것에 대한 대응을 위한 트랙을 유지해요 모든 덩크가 + +01:59.100 --> 02:01.980 +누적된 트랙을 만드는 거죠 + +02:01.980 --> 02:09.120 +그래서 저는 답변을 받을 때마다 그 부분을 포함해요 + +02:09.120 --> 02:14.070 +실제로 마크다운이라는 단어가 있으면 그 단어는 반응에서 제거해요 + +02:14.070 --> 02:18.090 +마지막으로 전체 디스플레이를 업데이트해 보여드릴게요 + +02:18.090 --> 02:20.850 +비트 코드는 좀 복잡해요 + +02:20.850 --> 02:24.270 +그냥 글만 쓰면 이런 건 필요 없잖아요 + +02:24.270 --> 02:30.990 +이렇게 해야 하는 이유는 멋지게 줄이는 방식으로 보여주고 싶어서예요 + +02:30.990 --> 02:32.880 +무슨 뜻인지 보여 드릴게요 + +02:32.880 --> 02:38.880 +이걸 anthropic에서 반복하면 1분 기다려야 합니다 링크를 먼저 + +02:38.880 --> 02:42.780 +찾고 이 페이지들을 다 얻을 때까지요 이제 됐네요 + +02:42.810 --> 02:43.560 +보세요 + +02:43.560 --> 02:50.760 +익숙한 스트리밍 인터페이스죠 다 됐어요 + +02:50.760 --> 02:55.320 +마크다운이 되어 있어서 링크 같은 것도 get이 된다는 게 좋아요 + +02:55.320 --> 02:57.680 +제가 말하던 걸 보여드릴게요 + +02:57.680 --> 03:03.590 +마크다운에서 디스플레이하는 걸 원치 않는 경우 더 간단한 + +03:03.590 --> 03:10.550 +방법은 스트림에서의 청크를 위한 것 같은 걸 하는 거죠 + +03:12.560 --> 03:15.260 +한 덩어리 인쇄해요 + +03:15.260 --> 03:20.060 +그런 다음 end = 같은 걸 해야 합니다 매번 새 줄을 프린트하는 걸 멈추도록요 + +03:20.060 --> 03:22.640 +제 생각엔 이게∙∙∙ 작동하는지 보죠 + +03:23.090 --> 03:29.330 +이건 그냥 일련의 텍스트 조각으로 프린트할 거예요 마크다운 포맷이 안 나오게요 + +03:29.330 --> 03:30.890 +Get up + +03:31.280 --> 03:33.560 +그건 안 돼요 + +03:34.040 --> 03:34.940 +미안해요 + +03:34.940 --> 03:37.850 +이건 당연히 청크죠 + +03:38.810 --> 03:41.720 +그냥 이렇게 다 얹는 게 낫겠어요 + +03:41.720 --> 03:43.940 +안 그러면 물체가 뒤로 흐르거든요 + +03:43.970 --> 03:47.510 +제가 이러는 걸 보고 바보라고 생각하셨겠죠 + +03:47.600 --> 03:49.340 +실시간이란 걸 보여주죠 + +03:49.550 --> 03:51.560 +네, 다시 해 보죠 + +03:51.590 --> 03:53.180 +Get it, get it, get it, get it, it, it, it! 이제 더 좋은 결과가 나오는지 보죠 + +03:53.990 --> 03:58.990 +여기에 입력하면∙∙∙ print 문을요 + +03:58.990 --> 03:59.770 +이렇게요 + +03:59.770 --> 04:01.090 +다시 보게 될 거예요 + +04:01.090 --> 04:01.930 +여기요 + +04:01.960 --> 04:03.700 +다시 돌아오죠 + +04:03.700 --> 04:05.080 +아주 간단해요 + +04:05.080 --> 04:07.450 +서식이 별로일 뿐이죠 + +04:07.450 --> 04:10.510 +하지만 코드는 이렇게 하면 정말 간단해요 + +04:10.510 --> 04:13.570 +결과를 스트리밍하는 데 필요한 모든 코드예요 + +04:13.570 --> 04:18.250 +스트리밍 = true를 설정하면 그 방법으로 반복되죠 + +04:19.180 --> 04:29.170 +이제 저걸 제거하고 주석을 제거하겠습니다 여러분이 모르신다면 command와 나눗셈 기호 혹은 Windows에서 + +04:29.170 --> 04:36.250 +Windows 키와 나눗셈 기호는 코드 블록의 주석을 제거하거나 주석을 제거하는 + +04:36.250 --> 04:39.010 +것이죠 + +04:39.010 --> 04:44.020 +유용한 지름길이죠 이걸 다시 실행해 근사한 서식이 있는 + +04:44.020 --> 04:47.530 +athropic을 한 번 더 보죠 + +04:47.800 --> 04:49.180 +연결 고리를 찾죠 + +04:49.180 --> 04:50.800 +또 나오네요 + +04:50.800 --> 04:55.350 +물론 매번 다른 브로슈어가 있어요 + +04:55.590 --> 04:58.860 +이번엔 그렇게 됐네요 + +04:58.860 --> 05:00.600 +이제 다른 회사를 알아보죠 + +05:00.600 --> 05:07.530 +인공지능 엔지니어를 위한 유비쿼털 오픈 소스 플랫폼을 안아보죠 + +05:07.560 --> 05:09.090 +get get 해 볼게요 + +05:09.120 --> 05:10.800 +Get it 링크도 있네요 + +05:10.800 --> 05:16.380 +이제 안아주는 얼굴 책자가 나오네요 + +05:16.380 --> 05:17.010 +특전이죠 + +05:17.040 --> 05:18.060 +Get it 연락 주세요 + +05:18.060 --> 05:18.780 +그리고요 + +05:18.810 --> 05:21.300 +함께 인공지능의 미래를 만들어 나가요 + +05:21.330 --> 05:22.530 +아주 좋아요 + +05:22.560 --> 05:25.740 +좋아요, 더 재미있게 한 가지 더 해보죠 + +05:25.740 --> 05:31.050 +시스템 프롬프트를 생성했던 곳으로 돌아가보죠 + +05:31.050 --> 05:32.910 +어디서요? + +05:32.940 --> 05:34.710 +저 뒤쪽에서요 + +05:34.710 --> 05:36.210 +시스템 프롬프트예요 + +05:36.210 --> 05:41.220 +시스템 프롬프트의 특징 중 하나는 완료될 작업을 설명하는 + +05:41.220 --> 05:49.200 +곳일 뿐 아니라 톤과 문자에도 대해 얘기합니다 LLM이 콘텐츠를 생성할 때 채택해야 + +05:49.200 --> 05:50.760 +하는 거죠 + +05:50.760 --> 05:57.690 +이걸 주석 처리하고 변형을 해제하죠 + +05:57.690 --> 05:59.730 +이 변형에는 그런 게 포함되죠 + +05:59.730 --> 06:05.610 +짧고, 유머러스하고, 재미있고, 농담조의 브로슈어여야 해요 그 코드를 실행하죠 + +06:05.820 --> 06:11.970 +여기서 아주 노골적으로 사용하기 때문에 System 프롬프트는 다른 곳에서도 언급하는 전역 변수 + +06:12.000 --> 06:12.600 +같아요 + +06:12.600 --> 06:17.640 +이걸 재실행할 필요는 없어요 하드코딩이 돼 있으니까요 + +06:17.640 --> 06:21.600 +여기까지 와서 이걸 그냥 재실행하면 되죠 + +06:21.630 --> 06:29.760 +제 생각이 맞는다면 이제 아주 유쾌하고 재미있는 장면이 나올 거예요 get it get it + +06:29.790 --> 06:33.900 +안전망이 있는 안드로픽에 잘 오셨어요 + +06:34.380 --> 06:39.240 +앤스로픽에서는 여러분이 믿을 수 있는 인공지능 시스템을 개발합니다 모닝커피보다 + +06:39.240 --> 06:43.920 +더 신뢰할 수 있고 고양이 감정의 변화보다 이해하기 쉽도록요 + +06:44.670 --> 06:50.100 +샌프란시스코의 햇살 나라에서는 인공지능을 적이 아닌 친구로 만들려 하죠 + +06:50.130 --> 06:57.620 +즉각적으로 시스템을 약간 변경했을 뿐인데 이렇게 완전히 다른 + +06:57.650 --> 07:03.740 +회사 브로슈어를 만들 수 있다니 정말 놀라워요 + +07:03.860 --> 07:07.220 +정말 놀라운 일이에요 + +07:07.430 --> 07:12.860 +주석 제거∙∙∙ 주석을 달겠습니다 여러분이 혼란스러워하지 않도록요 + +07:13.010 --> 07:20.990 +이 기회에 이 말을 하고 싶네요 이 실험을 통해 여러분이 깊이 이해할 수 있는 기회가 + +07:20.990 --> 07:28.460 +될 겁니다 어떤 작품이 탄생하든 프롬프트를 통해 개성에 영향을 준다는 게 어떤 + +07:28.460 --> 07:30.560 +의미인지 말이죠 + +07:30.560 --> 07:32.840 +한 단계 더 나아갈 수 있게요 + +07:32.840 --> 07:36.740 +비꼬는 내용으로 책자를 만들어 봐요 + +07:36.740 --> 07:38.810 +스페인어로 책자를 만들어요 + +07:38.930 --> 07:43.790 +다른 언어로 번역할 수 있는 것도 추가해 주세요 + +07:43.790 --> 07:46.010 +다양한 걸 시도해 보는 거죠 + +07:46.160 --> 07:53.230 +이건 아주 중요한 부분이에요 어떻게 프롬프트해서 결과에 영향을 주는지를 학습하는 데 있어서요 + +07:53.230 --> 07:54.910 +Get up + +07:55.930 --> 08:01.180 +좋아요, 운동 얘기를 너무 많이 하기 전에 그만 끝내죠 + +08:01.180 --> 08:09.010 +오늘 우리가 한 건 첫날에 한 걸 확장했어요 웹 사이트를 요약하기 위해 LM 호출을 + +08:09.010 --> 08:15.700 +하나 하는 대신 LM 1에 두 번 호출해 관련 링크를 수집하고 그 모든 + +08:15.700 --> 08:21.700 +데이터 긁어모음에서 하나 회사 브로슈어를 구축했죠 + +08:22.000 --> 08:28.210 +말씀드렸듯이 이건 거대한 인공지능의 장난감 버전과 같습니다 작은 방식으로요 + +08:28.210 --> 08:31.600 +큰 문제를 작은 단계로 나누고 있으니까요 + +08:31.600 --> 08:38.410 +하지만 여러분께 강조하고 싶은 건 이건 많은 종류의 비즈니스 문제에도 적용 가능하다는 + +08:38.410 --> 08:43.420 +겁니다 이런 종류의 정보를 통합하고 그 결과로 생성하는 거죠 + +08:43.480 --> 08:49.810 +이렇게 제품을 생산하기 위한 마케팅 콘텐츠를 작성할 수 있어요 + +08:49.930 --> 08:57.240 +개인 맞춤형 이메일 콘텐츠를 만드는 튜토리얼 같은 거예요 + +08:57.300 --> 08:59.670 +이메일을 많이 읽으면서요 + +08:59.670 --> 09:05.310 +방법은 아주 많아요 이런 2단계는 데이터를 합성하고 JSON에서 요약하는 + +09:05.310 --> 09:09.930 +거죠 일종의 출력을 구축하는 방법으로 사용해요 + +09:09.960 --> 09:12.120 +다양한 응용 프로그램이 있죠 + +09:12.120 --> 09:17.730 +여러분이 매일 하는 일에 이걸 적용할 방법을 찾아보세요 + +09:17.730 --> 09:19.920 +흥미로운 상업적 각도예요 + +09:19.950 --> 09:24.990 +여러분이 가장 큰 영역 지식을 가진 자신의 전문 분야를 생각할 수 있는 + +09:24.990 --> 09:25.860 +그런 거요 + +09:25.860 --> 09:31.500 +새로 얻은 기술을 어떻게 좋은 일에 쓸 건가요? TUZ + +09:31.500 --> 09:33.390 +그러니 생각해 보세요 + +09:33.390 --> 09:40.380 +언제든 제 아이디어를 듣고 예를 들어서 깃허브에 올려 보세요 + +09:40.380 --> 09:47.700 +정보를 합성하고 콘텐츠를 생성하기 위해 llms에 다중 호출을 사용하는 좋은 예가 + +09:47.700 --> 09:48.600 +있어요 + +09:48.630 --> 09:50.400 +그럼 다음 영상에서 만나요 diff --git a/week5/community-contributions/subtitles/srts/60620375/en_US.srt b/week5/community-contributions/subtitles/srts/60620375/en_US.srt new file mode 100755 index 0000000..8a4e84f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620375/en_US.srt @@ -0,0 +1,115 @@ +WEBVTT + +00:00.080 --> 00:03.050 +And with that, we've reached an important milestone. + +00:03.080 --> 00:07.940 +The first week of our eight week journey is complete, and there's been an awful lot covered. + +00:07.970 --> 00:10.490 +And congratulations for making it to this point. + +00:10.520 --> 00:16.160 +At this point, just to recap, you're in a position to describe the history of Transformers and the + +00:16.160 --> 00:17.540 +shocking last few years. + +00:17.540 --> 00:23.210 +The use of tokens, tokenization, the importance of context windows and exactly what that means, where + +00:23.240 --> 00:27.230 +to go to look up API costs, and a lot more like that. + +00:27.410 --> 00:32.780 +You've really got hands on experience looking at a bunch of different frontier models, both the big + +00:32.780 --> 00:37.400 +six companies, but also some of the models within them and some of the very latest innovations. + +00:37.400 --> 00:42.770 +And we've seen how something like count, how many times letter A appears in the sentence is something + +00:42.770 --> 00:45.890 +surprisingly hard for Llms, even some of the very top ones. + +00:45.920 --> 00:50.660 +And now, with your understanding of tokenization, you probably have a very good sense as to why. + +00:50.720 --> 00:57.590 +And then most importantly, you're at this point, hopefully you are confidently able to use the OpenAI + +00:57.620 --> 01:02.070 +API, Including adding in things like streaming and markdown. + +01:02.070 --> 01:07.920 +You've built your own tool with the assignment, and you've also used the exercise we did. + +01:07.950 --> 01:12.330 +You made multiple calls to LMS and you've played around with the system prompts. + +01:12.330 --> 01:18.900 +You've got a good understanding of how you can use the system prompt for things like setting tone, + +01:18.900 --> 01:22.680 +character of the response, as well as giving the specific instructions. + +01:22.800 --> 01:30.330 +And you also understand about using both single shot and Multi-shot prompting as a way to get more accurate, + +01:30.330 --> 01:33.930 +robust, repeatable results from the LM. + +01:34.230 --> 01:42.960 +And to boot, you've also added in using the llama API to call the models running on your box directly. + +01:42.960 --> 01:45.780 +It's not something that we'll be doing, particularly going forwards. + +01:45.780 --> 01:47.760 +When we get to using open source models. + +01:47.760 --> 01:53.340 +We'd rather use hugging face code when we can actually really get into the internals and start examining + +01:53.340 --> 01:54.900 +things like tokens and stuff. + +01:54.990 --> 01:59.340 +Um, but at any point you can always flip to using the Ulama API. + +01:59.370 --> 02:02.730 +If you would like to reduce API costs. + +02:02.880 --> 02:10.950 +So with that, that would be a wrap for a very substantive week, one with a lot of ground covered. + +02:11.190 --> 02:16.800 +Next week we will be getting into using APIs for all of the frontier models. + +02:16.800 --> 02:21.240 +So we'll be using OpenAI and Anthropic and Gemini. + +02:21.270 --> 02:23.490 +You'll do some work a little bit more. + +02:23.520 --> 02:30.300 +Another step in the direction of agent ization of agentic AI with a little bit more work on agents. + +02:30.300 --> 02:37.470 +But most importantly, we're going to be building some data science UIs using the fabulous Gradio platform + +02:37.470 --> 02:38.430 +that I love. + +02:38.460 --> 02:44.730 +And we'll be doing that, including building a complete multimodal customer support agent that is able + +02:44.730 --> 02:51.240 +to do things like show pictures and make audio and use tools where it calls into your computer. + +02:51.270 --> 02:53.220 +So a lot to cover next week. + +02:53.220 --> 02:56.670 +It's going to be a super exciting week and I can't wait to see you there. diff --git a/week5/community-contributions/subtitles/srts/60620375/ja_JP.srt b/week5/community-contributions/subtitles/srts/60620375/ja_JP.srt new file mode 100755 index 0000000..3a118af --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620375/ja_JP.srt @@ -0,0 +1,94 @@ +WEBVTT + +00:00.080 --> 00:03.050 +そして、 これで我々は重要な節目を迎えた。 + +00:03.080 --> 00:07.940 +8週間の旅の最初の週が終わった。 + +00:07.970 --> 00:10.490 +そして、 ここまで来られたことを祝福する。 + +00:10.520 --> 00:17.540 +この時点で、 あなたはトランスフォーマーの歴史と衝撃的なここ数年を説明する立場にある。 + +00:17.540 --> 00:23.210 +トークンの使い方、 トークン化、 コンテキスト・ウィンドウの重要性とその意味、 APIのコストを調べるにはどこに行けばいいか、 + +00:23.240 --> 00:27.230 +などなど。 + +00:27.410 --> 00:37.400 +大手6社だけでなく、 その中のいくつかのモデルや最新のイノベーションなど、 さまざまなフロンティア・モデルを実際に見て経験することができる。 + +00:37.400 --> 00:45.890 +Aという文字が文中に何回現れるかを数えるようなことは、 LLMにとって驚くほど難しいことである。 + +00:45.920 --> 00:50.660 +そして今、 トークン化について理解したあなたは、 おそらくその理由をよく理解しているはずだ。 + +00:50.720 --> 00:57.590 +そして最も重要なことは、 この時点で、 ストリーミングやマークダウンなどの追加を含め、 OpenAI + +00:57.620 --> 01:02.070 +APIを自信を持って使えるようになっていることだ。 + +01:02.070 --> 01:07.920 +あなたは課題で自分のツールを作り、 私たちが行った練習も使った。 + +01:07.950 --> 01:12.330 +あなたはLMSに何度も電話をかけ、 システムのプロンプトを弄った。 + +01:12.330 --> 01:22.680 +具体的な指示だけでなく、 トーンの設定や応答の性格など、 システム・プロンプトの使い方をよく理解している。 + +01:22.800 --> 01:33.930 +また、 LMからより正確で、 確実で、 再現性のある結果を得るための方法として、 シングルショットとマルチショットプロンプトの両方を使用することも理解していますね。 + +01:34.230 --> 01:42.960 +さらに、 llama APIを使って、 あなたのボックスで動いているモデルを直接呼び出すこともできる。 + +01:42.960 --> 01:45.780 +特に今後やっていくようなことではない。 + +01:45.780 --> 01:47.760 +オープンソースのモデルを使うようになったら + +01:47.760 --> 01:54.900 +私たちは、 実際に内部に入り込んでトークンなどを調べ始めることができるようになったら、 むしろ抱きつき顔のコードを使いたい。 + +01:54.990 --> 01:59.340 +でも、 いつでもウラマAPIの使用に切り替えることができる。 + +01:59.370 --> 02:02.730 +APIコストを削減したい場合。 + +02:02.880 --> 02:10.950 +ということで、 非常に内容の濃い1週間であった。 + +02:11.190 --> 02:16.800 +来週は、 すべてのフロンティア・モデルでAPIを使うことになる。 + +02:16.800 --> 02:21.240 +だから僕たちはOpenAIとAnthropicとGeminiを使うつもりだ。 + +02:21.270 --> 02:23.490 +もう少し仕事をすることになる。 + +02:23.520 --> 02:30.300 +エージェントにもう少し手を加えることで、 エージェント型AIのエージェント化の方向にまた一歩進んだ。 + +02:30.300 --> 02:38.430 +しかし最も重要なのは、 私が大好きな素晴らしいGradioプラットフォームを使って、 データサイエンスのUIをいくつか構築することだ。 + +02:38.460 --> 02:44.730 +私たちは、 写真を見せたり、 音声を出したり、 コンピューターに電話をかけるツールを使ったりできる、 完全なマルチモーダル・カスタマーサポート・エージェントを構築することも含めて、 + +02:44.730 --> 02:51.240 +そうするつもりです。 + +02:51.270 --> 02:53.220 +というわけで、 来週は盛りだくさんの内容だ。 + +02:53.220 --> 02:56.670 +超エキサイティングな1週間になりそうで、 そこで皆さんにお会いできるのが待ち遠しいです。 diff --git a/week5/community-contributions/subtitles/srts/60620375/ko_KR.srt b/week5/community-contributions/subtitles/srts/60620375/ko_KR.srt new file mode 100755 index 0000000..6799ca0 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620375/ko_KR.srt @@ -0,0 +1,115 @@ +WEBVTT + +00:00.080 --> 00:03.050 +이제 중요한 이정표에 도달했어요 + +00:03.080 --> 00:07.940 +8주간의 여정 중 첫 주가 끝났습니다 많은 걸 다뤄야 했죠 + +00:07.970 --> 00:10.490 +여기까지 온 걸 축하해요 + +00:10.520 --> 00:16.160 +간단히 정리해 보죠 트랜스포머의 역사와 충격적인 지난 몇 년을 설명해야 + +00:16.160 --> 00:17.540 +하는데요 + +00:17.540 --> 00:23.210 +토큰 사용, 토큰화 컨텍스트 윈도우의 중요성 그리고 이것이 정확히 무슨 의미인지 + +00:23.240 --> 00:27.230 +API 비용은 어디서 확인하는지 등 많은 것들이 있죠 + +00:27.410 --> 00:32.780 +빅 식스라는 여러 회사의 모델과 그 안의 모델 그리고 최신 + +00:32.780 --> 00:37.400 +혁신 모델을 직접 살펴본 경험이 많으시잖아요 + +00:37.400 --> 00:42.770 +A가 문장에 몇 번 나오는지 세는 건 Lms에겐 놀라울 정도로 어려운 일입니다 + +00:42.770 --> 00:45.890 +제일 위에 있는 것도 마찬가지죠 + +00:45.920 --> 00:50.660 +이제 토큰화를 이해하셨으니 그 이유도 잘 아실 텐데요 + +00:50.720 --> 00:57.590 +그리고 가장 중요한 건 이 시점에서 OpenAI API를 자신 있게 사용할 수 있길 바라는 + +00:57.620 --> 01:02.070 +겁니다 스트리밍이나 마크다운 같은 걸 추가하는 것도요 + +01:02.070 --> 01:07.920 +과제와 함께 여러분만의 도구를 만들고 우리가 한 연습도 사용했죠 + +01:07.950 --> 01:12.330 +LMS에 여러 번 전화했고 시스템 프롬프트도 조작했죠 + +01:12.330 --> 01:18.900 +시스템 프롬프트를 어떻게 사용하는지 잘 알고 있죠 톤 설정이나 응답 문자 + +01:18.900 --> 01:22.680 +같은 거요 구체적인 지시도 할 수 있고요 + +01:22.800 --> 01:30.330 +그리고 단발과 다발 프롬핑을 LM에서 보다 정확하고 강력하고 반복 가능한 결과를 얻기 위한 방법으로 + +01:30.330 --> 01:33.930 +사용하는 것도 이해하시죠? get it + +01:34.230 --> 01:42.960 +부팅하기 위해 llama API 사용도 추가했어요 컴퓨터에서 직접 실행되는 모델을 호출하기 위해서요 + +01:42.960 --> 01:45.780 +앞으로 그런 일은 없을 거예요 + +01:45.780 --> 01:47.760 +오픈 소스 모델을 사용하게 되면요. + +01:47.760 --> 01:53.340 +Get-Tuck Season 2 닙턱 시즌 2 차라리 포옹하는 얼굴 코드를 사용하죠 내부의 토큰 + +01:53.340 --> 01:54.900 +같은 걸 검사할 수 있다면요 + +01:54.990 --> 01:59.340 +언제든 Ulama API 사용으로 전환할 수 있어요 + +01:59.370 --> 02:02.730 +API 비용을 줄이고 싶다면요 + +02:02.880 --> 02:10.950 +이로써 아주 중요한 한 주 작업이 끝났고 많은 걸 다뤄야 했죠 + +02:11.190 --> 02:16.800 +다음 주에는 모든 프론티어 모델에 API를 사용할 거예요 + +02:16.800 --> 02:21.240 +오픈라이, 앤스로픽 제미니를 쓸 거예요 + +02:21.270 --> 02:23.490 +비트 좀 더 줘요 + +02:23.520 --> 02:30.300 +에이전트 인공지능의 에이전트 아이즈로 가는 스텝으로 에이전트에 좀 더 집중했죠. 비트 + +02:30.300 --> 02:37.470 +하지만 가장 중요한 건 데이터 과학 UI를 구축하는 겁니다 제가 좋아하는 그래디오 플랫폼을 + +02:37.470 --> 02:38.430 +이용해서요 + +02:38.460 --> 02:44.730 +그걸 할 겁니다 그림을 보여주거나 오디오를 만들 수 있는 완벽한 멀티모달 고객 + +02:44.730 --> 02:51.240 +지원 에이전트를 구축하는 것도요 여러분 컴퓨터로 호출되는 툴을 사용하는 거죠 + +02:51.270 --> 02:53.220 +다음 주에 다룰 게 많아요 + +02:53.220 --> 02:56.670 +정말 신나는 한 주가 될 거예요 빨리 보고 싶네요 diff --git a/week5/community-contributions/subtitles/srts/60620395/en_US.srt b/week5/community-contributions/subtitles/srts/60620395/en_US.srt new file mode 100755 index 0000000..4a98d16 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620395/en_US.srt @@ -0,0 +1,181 @@ +WEBVTT + +00:00.110 --> 00:05.630 +Welcome back to Jupyter Lab, where I want to show you the assignment, the homework exercise for you + +00:05.630 --> 00:09.170 +for the end of week one, coming into the week one folder. + +00:09.170 --> 00:14.330 +And you'll see here week one exercise so you can see there's rather little here. + +00:14.330 --> 00:16.550 +There's a little framework to tell you what we're doing. + +00:16.550 --> 00:23.150 +So the idea is for you to build a little tool that will be used by yourself as something that can be + +00:23.150 --> 00:25.130 +your guide during this course. + +00:25.160 --> 00:33.170 +It's going to be a tool which will be able to answer questions on code, on llms, on things that you + +00:33.170 --> 00:39.290 +might run into along the way and be your sidekick, your your customized copilot for this experience. + +00:39.290 --> 00:41.120 +And this is what it's going to do. + +00:41.240 --> 00:44.300 +Uh, it's going to you're going to fill in some inputs. + +00:44.300 --> 00:47.840 +You're going to you can just use these two constants we've got here. + +00:47.840 --> 00:55.790 +You're going to be using both GPT and open source llama running in llama on your computer, something + +00:55.790 --> 01:00.140 +which you're familiar with from our week two assignment or day two assignment. + +01:00.170 --> 01:03.300 +You'll put some code in here to set up your environment. + +01:03.360 --> 01:09.780 +And then this is where you will type the question that you want to ask your tutor, your personal private + +01:09.780 --> 01:10.500 +tutor. + +01:10.500 --> 01:15.330 +And in this case, I am asking it to explain what this code does and why. + +01:15.360 --> 01:20.340 +And this again, is that line of code that we looked at a couple of times, which is has a few different + +01:20.340 --> 01:24.270 +things going on there, and this is the better version of the code than the one I originally wrote. + +01:24.270 --> 01:26.370 +This is the version that I think it was. + +01:26.400 --> 01:31.230 +Yeah, it was GPT four with canvas that wrote this version of it. + +01:31.500 --> 01:39.450 +So we will provide this, and then you will fill in this piece of code here, which will get GPT four + +01:39.450 --> 01:40.530 +mini to answer. + +01:40.530 --> 01:47.310 +And you should try and answer and have that response stream back and use markdown, uh, so that it + +01:47.310 --> 01:49.560 +appears in full glory. + +01:49.620 --> 01:55.620 +Uh, and then do the same thing again for llama 3.2 and see the answer there. + +01:55.620 --> 02:00.090 +So this will need you to to write the code from scratch and sort of type it out. + +02:00.090 --> 02:06.460 +And of course you should look back at the fragments of code that we came up with on the prior days and + +02:06.460 --> 02:12.010 +used that to build this, and hopefully it will be something that will come reasonably naturally and + +02:12.010 --> 02:16.360 +you'll be used to the familiar .0., blah, blah, blah, blah, blah. + +02:16.360 --> 02:22.390 +It's all going to be something that will be in your memory and you'll also be very comfortable, I hope, + +02:22.390 --> 02:28.120 +with creating the messages list of dictionaries, something that you're used to at this point. + +02:28.240 --> 02:31.600 +Um, and when you do that, you'll see what answer that you get for this. + +02:31.600 --> 02:34.750 +And it's actually a remarkably good answer. + +02:34.750 --> 02:36.850 +Again, I think much better than I could do. + +02:37.000 --> 02:42.010 +Uh, and it would be interesting to see whether you're also happy with the answer that you get from + +02:42.010 --> 02:46.000 +a minuscule model that is much, much smaller by comparison. + +02:46.000 --> 02:47.950 +And see how you compare those two. + +02:47.980 --> 02:52.480 +And then in the future, you'll be able to use this as a bit of a resource. + +02:52.630 --> 02:55.450 +And so I hope this this is useful for you. + +02:55.480 --> 02:59.740 +And in fact, after week two, you might want to come back to this and add a user interface to it. + +02:59.740 --> 03:04.450 +So you could have like a little tool running on the side that would be uh, could be super handy. + +03:04.630 --> 03:09.650 +Uh, one also thing that you might do is that what I've done here is I've just put down the question + +03:09.650 --> 03:11.060 +as a global variable. + +03:11.060 --> 03:15.350 +You might instead want to see if you can think about a way that you could take that in as an input and + +03:15.350 --> 03:16.880 +be a bit more dynamic about this. + +03:16.880 --> 03:21.890 +So it could just be a cell that you run, but maybe do that as a as a later improvement. + +03:22.430 --> 03:28.910 +Now I've done this and the solution is sitting in this solutions folder, but I urge you not to look + +03:28.940 --> 03:29.840 +at the solution. + +03:29.840 --> 03:33.350 +Uh, not not unless you you find yourself feeling stuck. + +03:33.380 --> 03:34.730 +There's nothing worse than feeling stuck. + +03:34.760 --> 03:41.870 +If you do feel stuck, you could always try first asking ChatGPT or Claude for a nudge, but otherwise, + +03:41.870 --> 03:43.790 +please, please, please reach out to me. + +03:43.790 --> 03:46.190 +There is no reason whatsoever to be stuck with this. + +03:46.220 --> 03:49.160 +It should be something that you that you're able to to get through. + +03:49.160 --> 03:50.750 +And I would love to help you with that. + +03:50.750 --> 03:57.710 +Get you to a point when you have your own personal, technical and data science tutor to help you along + +03:57.710 --> 03:58.190 +the way. + +03:58.190 --> 04:06.560 +So, um, above all, be sure to have fun with this and I will see you back for the wrap up video for + +04:06.560 --> 04:07.220 +week one. diff --git a/week5/community-contributions/subtitles/srts/60620395/ja_JP.srt b/week5/community-contributions/subtitles/srts/60620395/ja_JP.srt new file mode 100755 index 0000000..13b1a0c --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620395/ja_JP.srt @@ -0,0 +1,136 @@ +WEBVTT + +00:00.110 --> 00:09.170 +Jupyter Labへようこそ。 第1週目の終わり、 第1週目のフォルダに入っている宿題をお見せしましょう。 + +00:09.170 --> 00:14.330 +そして、 ここにある1週目の練習を見れば、 ここにはほとんど何もないことがわかるだろう。 + +00:14.330 --> 00:16.550 +私たちが何をしているかを伝えるための小さな枠組みがある。 + +00:16.550 --> 00:25.130 +だから、 このコースの間、 ガイドとなるような小さなツールを自分で作ることだ。 + +00:25.160 --> 00:33.170 +このツールは、 コードやLLMに関する質問、 途中でぶつかる可能性のある事柄に答えてくれるツールであり、 + +00:33.170 --> 00:39.290 +あなたの相棒、 この経験のためのカスタマイズされた副操縦士になるだろう。 + +00:39.290 --> 00:41.120 +そして、 これがその目的だ。 + +00:41.240 --> 00:44.300 +ええと、 いくつかの入力を入力するんだ。 + +00:44.300 --> 00:47.840 +ここにある2つの定数を使えばいい。 + +00:47.840 --> 01:00.140 +GPTとオープンソースのllamaの両方を使うことになるが、 これは2週目の課題か2日目の課題でお馴染みのものだ。 + +01:00.170 --> 01:03.300 +ここに環境をセットアップするためのコードを入れる。 + +01:03.360 --> 01:10.500 +そして、 ここで家庭教師に聞きたい質問を入力します。 + +01:10.500 --> 01:15.330 +そしてこの場合、 私はこのコードが何をするのか、 なぜするのかを説明するよう求めているのだ。 + +01:15.360 --> 01:24.270 +そしてこれもまた、 何度か見たコードの行で、 そこにはいくつかの異なることが起こっている。 + +01:24.270 --> 01:26.370 +これがそのバージョンだと思う。 + +01:26.400 --> 01:31.230 +ああ、 このバージョンを書いたのはキャンバスを持つGPTの4人だ。 + +01:31.500 --> 01:40.530 +このコードを入力すると、 GPTが4つのミニに答えてくれる。 + +01:40.530 --> 01:49.560 +そして、 回答して、 その回答がストリームバックされ、 マークダウンを使って、 その回答が完全な形で表示されるようにするべきだ。 + +01:49.620 --> 01:55.620 +ええと、 それからラマ3にも同じことをするんだ。 2 を見て、 そこに答えがある。 + +01:55.620 --> 02:00.090 +そのため、 ゼロからコードを書いて、 それをタイプアウトする必要がある。 + +02:00.090 --> 02:06.460 +そしてもちろん、 その前の日に私たちが考え出したコードの断片を振り返って、 それを使ってこれを構築してほしい。 うまくいけば、 それはそれなりに自然にできることだし、 + +02:06.460 --> 02:16.360 +慣れ親しんだ.NET Frameworkに慣れているはずだ。 + +02:16.360 --> 02:16.360 +0. ............................................ + +02:16.360 --> 02:28.120 +そして、 辞書のメッセージリストを作成することにも慣れていることだろう。 + +02:28.240 --> 02:31.600 +そうすれば、 どんな答えが返ってくるかわかるだろう。 + +02:31.600 --> 02:34.750 +そして、 それは実際、 驚くほど良い答えだ。 + +02:34.750 --> 02:36.850 +繰り返しになるが、 私ができることよりずっといいと思う。 + +02:37.000 --> 02:46.000 +それと、 それに比べてはるかに小さい極小モデルから得られる答えに満足しているかどうかも見てみたいですね。 + +02:46.000 --> 02:47.950 +そして、 その2つを比べてみてほしい。 + +02:47.980 --> 02:52.480 +そして将来的には、 これをちょっとした資料として使えるようになる。 + +02:52.630 --> 02:55.450 +だから、 これがあなたの役に立つことを願っている。 + +02:55.480 --> 02:59.740 +そして実際、 2週目が終わったら、 またここに戻ってきて、 ユーザー・インターフェースを追加したくなるかもしれない。 + +02:59.740 --> 03:04.450 +だから、 ちょっとしたツールを横に置いておくと、 すごく便利なんだ。 + +03:04.630 --> 03:11.060 +ええと、 もうひとつ、 私がここでやったことは、 質問をグローバル変数として置くということです。 + +03:11.060 --> 03:16.880 +その代わりに、 それをインプットとして取り入れ、 もう少しダイナミックに行動できる方法を考えてみてはどうだろう。 + +03:16.880 --> 03:21.890 +だから、 ただセルを走らせるだけということもあり得る。 + +03:22.430 --> 03:29.840 +解決策はこのソリューション・フォルダーにある。 + +03:29.840 --> 03:33.350 +そうじゃなくて、 行き詰まりを感じない限りはね。 + +03:33.380 --> 03:34.730 +行き詰まりを感じることほど悪いことはない。 + +03:34.760 --> 03:43.790 +もし行き詰まりを感じたら、 まずChatGPTやクロードに助けを求めてもいい。 + +03:43.790 --> 03:46.190 +これにこだわる理由は何もない。 + +03:46.220 --> 03:49.160 +それは、 あなた自身が乗り切れるものでなければならない。 + +03:49.160 --> 03:50.750 +私はそのお手伝いをしたい。 + +03:50.750 --> 03:58.190 +あなただけの、 技術的でデータサイエンスに精通した家庭教師が、 あなたをサポートします。 + +03:58.190 --> 04:07.220 +それでは、 何よりも楽しんでください。 diff --git a/week5/community-contributions/subtitles/srts/60620395/ko_KR.srt b/week5/community-contributions/subtitles/srts/60620395/ko_KR.srt new file mode 100755 index 0000000..c76b24a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620395/ko_KR.srt @@ -0,0 +1,175 @@ +WEBVTT + +00:00.110 --> 00:05.630 +주피터 연구소에 잘 오셨습니다 첫 주 마지막 날의 숙제와 숙제를 보여드리겠습니다 + +00:05.630 --> 00:09.170 +첫 주 폴더에 넣어주세요 + +00:09.170 --> 00:14.330 +첫 번째 주에는 운동량이 거의 없었죠 + +00:14.330 --> 00:16.550 +우리가 뭘 하는지 보여주는 프레임워크가 있어요 + +00:16.550 --> 00:23.150 +여러분이 직접 도구를 만드세요 이 과정 동안 여러분의 가이드가 + +00:23.150 --> 00:25.130 +될 수 있도록요 + +00:25.160 --> 00:33.170 +코드와 llms에 대한 질문에 답변을 해주는 도구가 될 겁니다 여러분이 마주치게 될 수 있는 + +00:33.170 --> 00:39.290 +것들이요 조수가 될 수도 있고 여러분의 맞춤형 부조종사가 될 수도 있죠 + +00:39.290 --> 00:41.120 +이렇게 하는 거예요 + +00:41.240 --> 00:44.300 +입력값을 채울 수 있어요 + +00:44.300 --> 00:47.840 +여러분은 그냥 여기 있는 이 두 상수를 사용할 수 있어요 + +00:47.840 --> 00:55.790 +GPT와 오픈 소스 라마를 모두 사용할 겁니다 라마를 컴퓨터에서 실행하는 거죠 2주 + +00:55.790 --> 01:00.140 +차, 아니 2일 차 과제에서 익숙하셨을 텐데요 + +01:00.170 --> 01:03.300 +환경을 설정하기 위한 코드를 여기 Put이 되죠 + +01:03.360 --> 01:10.500 +그리고 여기서 개인 과외 선생님께 묻고 싶은 질문을 입력하세요 + +01:10.500 --> 01:15.330 +이 경우 이 코드가 뭘 하고 왜 하는지 설명하도록 요청하고 있어요 + +01:15.360 --> 01:20.340 +이건 우리가 몇 번 봤던 코드 줄입니다 몇 가지 다른 게 있죠 + +01:20.340 --> 01:24.270 +이게 제가 원래 쓴 코드보다 나은 버전이에요 + +01:24.270 --> 01:26.370 +이게 그 버전인 것 같아요 + +01:26.400 --> 01:31.230 +네, GPT 4와 캔버스로 이런 버전을 만들었어요 + +01:31.500 --> 01:39.450 +이걸 제공하고 이 코드를 채우면 GPT for 미니가 응답할 + +01:39.450 --> 01:40.530 +거예요 + +01:40.530 --> 01:47.310 +응답 스트림을 되돌려 받아 마크다운을 사용해야 합니다 그래야 완전히 + +01:47.310 --> 01:49.560 +멋지게 나타나죠 + +01:49.620 --> 01:55.620 +라마 3에서도 같은 걸 하고요 2번, 답이 저기 있네요 + +01:55.620 --> 02:00.090 +그래서 처음부터 코드를 작성하고 타이핑을 해야 해요 + +02:00.090 --> 02:06.460 +물론 우리가 이전에 만들어낸 코드 조각들을 살펴봐야 합니다. 이것들을 구축하는데 + +02:06.460 --> 02:12.010 +사용했던 것들이죠. 바라건대, 자연스럽게 만들어지고 익숙한 것에 익숙해지길 + +02:12.010 --> 02:16.360 +바라요. 0살요 어쩌고저쩌고, 어쩌고저쩌고예요 + +02:16.360 --> 02:22.390 +전부 여러분 메모리에 남을 겁니다 또한 아주 편안한 사전 메시지 + +02:22.390 --> 02:28.120 +목록을 만들 수 있길 바랍니다 현재로선 익숙하신 거죠 + +02:28.240 --> 02:31.600 +get을 입력하면 어떤 답이 나올지 알 수 있어요 + +02:31.600 --> 02:34.750 +사실 아주 훌륭한 대답이에요 + +02:34.750 --> 02:36.850 +제 능력보다 훨씬 낫다고 생각해요 + +02:37.000 --> 02:42.010 +비교적 아주 작은 축소 모델에서 얻은 답에도 만족하는지 확인하는 + +02:42.010 --> 02:46.000 +것도 흥미로울 것 같아요. Get it! + +02:46.000 --> 02:47.950 +그 둘을 비교해 보세요 + +02:47.980 --> 02:52.480 +미래에는 비트를 리소스로 사용할 수 있을 거예요 + +02:52.630 --> 02:55.450 +그러니 이게 유용하길 바라요 + +02:55.480 --> 02:59.740 +둘째 주가 지나면 여기로 돌아와 사용자 인터페이스를 추가하세요 + +02:59.740 --> 03:04.450 +옆에 작은 도구가 달려 있으면 아주 편리할 거예요 + +03:04.630 --> 03:09.650 +여러분이 하실 수 있는 또 한 가지는 제가 여기서 한 건 질문을 전역 변수로 + +03:09.650 --> 03:11.060 +입력한 거죠 Put + +03:11.060 --> 03:15.350 +비트 대신에 이걸 입력으로 받아들이고 좀 더 동적으로 만들 방법을 + +03:15.350 --> 03:16.880 +생각해 보세요 + +03:16.880 --> 03:21.890 +그냥 세포만 실행할 수도 있지만 나중에 개선할 수도 있죠 + +03:22.430 --> 03:28.910 +이걸 해뒀는데 솔루션은 솔루션 폴더에 있어요 하지만 솔루션은 보지 + +03:28.940 --> 03:29.840 +마세요 + +03:29.840 --> 03:33.350 +답답한 기분이 들지 않는 한은 괜찮아요 + +03:33.380 --> 03:34.730 +막히는 것만큼 괴로운 건 없죠 + +03:34.760 --> 03:41.870 +막막하시면 챗GPT나 클로드에게 먼저 연락해 보세요 그게 아니라면 제발 + +03:41.870 --> 03:43.790 +제게 연락하세요 + +03:43.790 --> 03:46.190 +이 문제를 계속 안고 있을 이유가 없어요 + +03:46.220 --> 03:49.160 +Get it은 여러분이 극복할 수 있어야 해요 + +03:49.160 --> 03:50.750 +나도 돕고 싶어요 + +03:50.750 --> 03:57.710 +개인, 기술, 데이터, 과학 강사가 도와줄 수 있는 단계까지 get it + +03:57.710 --> 03:58.190 +up + +03:58.190 --> 04:07.220 +그러니 무엇보다 즐겁게 작업하시길 바라요 1주 차 마무리 영상에서 다시 만나요 diff --git a/week5/community-contributions/subtitles/srts/60620397/en_US.srt b/week5/community-contributions/subtitles/srts/60620397/en_US.srt new file mode 100755 index 0000000..0520e88 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620397/en_US.srt @@ -0,0 +1,196 @@ +WEBVTT + +00:00.080 --> 00:07.220 +Well, that's a fantastic result to have now arrived towards the end of week one and having completed + +00:07.250 --> 00:09.980 +a substantial and exciting project. + +00:10.010 --> 00:12.350 +Now, of course, there are some challenges for you. + +00:12.350 --> 00:14.000 +This is where it gets interesting. + +00:14.030 --> 00:18.860 +First of all, I have some challenges, which is things that you can do to make this project better, + +00:18.860 --> 00:20.990 +similar to things I just mentioned a moment ago. + +00:20.990 --> 00:27.350 +And then after that there is an exercise, a proper work through homework assignment for you where you + +00:27.350 --> 00:29.180 +have to build something from scratch. + +00:29.180 --> 00:33.230 +And of course, I do provide a solution when you're ready for it, but I don't think you'll need it + +00:33.230 --> 00:34.610 +because I think you got this. + +00:34.610 --> 00:36.230 +Let's start with the challenges. + +00:36.230 --> 00:42.650 +So first of all, uh, in the when we built the the brochure maker that we've already got, there are, + +00:42.680 --> 00:44.600 +of course, the two calls to the LMS. + +00:44.600 --> 00:50.840 +The first call I described as one shot prompting because we give an example of some JSON of how it should + +00:50.840 --> 00:51.830 +reply. + +00:51.860 --> 00:57.530 +And now I mentioned before that there's also this expression multi-shot prompting, which is when you + +00:57.530 --> 01:00.110 +provide multiple examples. + +01:00.110 --> 01:05.090 +And so that's what I would like you to do, extend this to have multi shot prompting and really to make + +01:05.090 --> 01:06.350 +it true multi-shot prompting. + +01:06.350 --> 01:12.770 +The way you would do is you'd say something like, so if I show you these links, you might reply like + +01:12.770 --> 01:19.670 +this and give it some JSON, clearly indicating where you've only selected the relevant links and how + +01:19.670 --> 01:21.650 +you fully qualified the path. + +01:21.650 --> 01:22.880 +So try doing that. + +01:22.880 --> 01:29.090 +Put in 1 or 2 more examples, because that will be then making use of Multi-shot prompting. + +01:29.090 --> 01:31.610 +And you can add that to your resume that you've done multi-shot prompting. + +01:31.640 --> 01:36.920 +I joke, of course, but but it is an important skill to have done and tried. + +01:36.920 --> 01:43.700 +But the reason it's useful is that when you do this, you improve the quality and reliability of the + +01:43.700 --> 01:44.900 +call to the LLM. + +01:44.930 --> 01:53.870 +Adding more uh, examples into the prompt strengthens the its, its ability to reliably predict the + +01:53.870 --> 01:56.480 +next tokens and what you want it to be predicting. + +01:56.480 --> 01:58.940 +So this is a good exercise to do. + +01:58.940 --> 02:03.500 +It's a good way to add more robustness to this LLM call. + +02:03.500 --> 02:05.690 +And it's something that we'll be doing along the course. + +02:05.690 --> 02:08.810 +And it's something that you'll want to incorporate in your own projects. + +02:08.810 --> 02:13.570 +So please do give that a try and say give that a shot, give that a multi shot. + +02:14.560 --> 02:18.700 +So and I also I mentioned that towards the end of the course we're going to be using this technique + +02:18.700 --> 02:20.050 +called structured outputs. + +02:20.050 --> 02:23.620 +That actually forces the LLM to respond in a particular way. + +02:23.620 --> 02:30.160 +But still Multi-shot prompting helps giving it that extra context that that extra sort of flavor for + +02:30.160 --> 02:31.330 +what you're looking for. + +02:32.050 --> 02:37.000 +Um, and then just things you can do for the second call to generate the brochure. + +02:37.000 --> 02:41.350 +We already talked, of course, about using the system prompts to make it be snarky or sarcastic or + +02:41.350 --> 02:41.740 +whatever. + +02:41.740 --> 02:46.900 +And I mentioned that you can use the system prompt to make it generate something in a different language, + +02:46.990 --> 02:48.010 +like Spanish. + +02:48.010 --> 02:53.290 +There's another thing you could do there which might be more, certainly more interesting. + +02:53.290 --> 02:55.450 +I don't know if it will get you a better result or not. + +02:55.450 --> 03:00.880 +And that would be generate the brochure in English and then make a second call. + +03:01.030 --> 03:08.080 +Actually, of course, it's a third call to the LLM to translate the brochure from English to Spanish. + +03:08.140 --> 03:14.020 +Uh, now, in many ways it's probably actually not going to be any better to do it that way in this + +03:14.020 --> 03:14.710 +case. + +03:14.710 --> 03:20.080 +But by getting into that practice of doing that, you could imagine that we might use a model that is + +03:20.080 --> 03:26.440 +actually specially trained for the purposes of translation, and so you could use that model just for + +03:26.470 --> 03:31.900 +that purpose, and that would then allow you for sure to get a better outcome using one model that's + +03:31.900 --> 03:36.730 +trained for brochure generation and a different model that's trained for translation. + +03:36.730 --> 03:41.950 +And so whilst we will in fact probably be using just GPT four or mini for both purposes, it certainly + +03:41.950 --> 03:45.970 +gives you that hands on experience of making the multiple calls. + +03:45.970 --> 03:50.350 +And again, that's basically a miniature implementation of Agentic AI. + +03:50.380 --> 03:55.900 +So again, great thing to get into the habit of doing, even if you could probably just use the system + +03:55.900 --> 03:58.210 +prompt to do it all in 1 in 1 bash. + +03:58.360 --> 04:00.370 +Anyway, those are the things to do. + +04:00.370 --> 04:08.680 +This will really help build your your confidence and your experience with these kinds of techniques, + +04:08.710 --> 04:12.970 +which will come in extremely useful in the upcoming weeks. + +04:12.970 --> 04:18.130 +And then I have an exercise for you, and this is where you'll be building something from scratch. + +04:18.130 --> 04:22.030 +And to show you that, I will take you to the next video back to JupyterLab. diff --git a/week5/community-contributions/subtitles/srts/60620397/ja_JP.srt b/week5/community-contributions/subtitles/srts/60620397/ja_JP.srt new file mode 100755 index 0000000..556437f --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620397/ja_JP.srt @@ -0,0 +1,136 @@ +WEBVTT + +00:00.080 --> 00:07.220 +第1週が終わり、 充実したエキサイティングなプロジェクトを終えた今、 + +00:07.250 --> 00:09.980 +素晴らしい結果だ。 + +00:10.010 --> 00:12.350 +もちろん、 あなたにはいくつかの課題がある。 + +00:12.350 --> 00:14.000 +ここからが面白くなる。 + +00:14.030 --> 00:20.990 +まず、 このプロジェクトをより良いものにするためにできること、 つまり、 先ほど申し上げたことと同じようなことがあります。 + +00:20.990 --> 00:29.180 +そしてそのあとには、 ゼロから何かを作り上げるという宿題が出される。 + +00:29.180 --> 00:34.610 +もちろん、 準備ができたら解決策を提示する。 + +00:34.610 --> 00:36.230 +まずは課題から。 + +00:36.230 --> 00:44.600 +まず第一に、 私たちがパンフレット・メーカーを作ったとき、 もちろんLMSへの2つの呼び出しがありました。 + +00:44.600 --> 00:51.830 +最初の呼び出しは、 一発プロンプトと表現した。 + +00:51.860 --> 01:00.110 +そして、 マルチショット・プロンプティングという表現があることは前にも述べた。 + +01:00.110 --> 01:06.350 +だから、 マルチショット・プロンプトができるように拡張して、 本当のマルチショット・プロンプトにしてほしいんだ。 + +01:06.350 --> 01:12.770 +このリンクを見せたら、 こう答えてJSONを渡し、 関連するリンクだけを選択し、 + +01:12.770 --> 01:21.650 +どのようにパスを完全修飾したかを明確に示すのだ。 + +01:21.650 --> 01:22.880 +だから、 そうしてみてほしい。 + +01:22.880 --> 01:29.090 +マルチショット・プロンプトを使うことになるので、 もう1つか2つの例を入れてください。 + +01:29.090 --> 01:31.610 +そして、 マルチショット・プロンプトを行ったことを履歴書に書き加えることができる。 + +01:31.640 --> 01:36.920 +もちろん冗談だが、 しかし、 やってみることは重要なスキルだ。 + +01:36.920 --> 01:44.900 +しかし、 これが有用なのは、 そうすることでLLMへのコールの質と信頼性が向上するからだ。 + +01:44.930 --> 01:56.480 +プロンプトにさらにあーだこーだと例を加えることで、 次のトークンを確実に予測し、 何を予測させたいかを予測する能力が強化される。 + +01:56.480 --> 01:58.940 +だから、 これはいい練習になる。 + +01:58.940 --> 02:03.500 +このLLMコールに、 より堅牢性を加える良い方法だ。 + +02:03.500 --> 02:05.690 +そして、 それはコースに沿って行うことだ。 + +02:05.690 --> 02:08.810 +そしてそれは、 あなた自身のプロジェクトに取り入れたくなるものだ。 + +02:08.810 --> 02:13.570 +だから、 ぜひ試してみてほしい。 + +02:14.560 --> 02:20.050 +コースの終盤では、 構造化されたアウトプットというテクニックを使うことにも触れた。 + +02:20.050 --> 02:23.620 +そうすることで、 LLMは特殊な対応をせざるを得なくなる。 + +02:23.620 --> 02:31.330 +でも、 マルチショット・プロンプトは、 あなたが探しているものに対して、 特別な文脈、 特別な味わいを与えてくれる。 + +02:32.050 --> 02:37.000 +それから、 パンフレットを作るために2回目の電話でできること。 + +02:37.000 --> 02:41.740 +もちろん、 システムのプロンプトを使って、 皮肉や皮肉を言うようにすることについては、 すでに話した。 + +02:41.740 --> 02:48.010 +また、 システム・プロンプトを使って、 スペイン語など別の言語で何かを生成させることができることは述べた。 + +02:48.010 --> 02:53.290 +そこでできることはもう一つあって、 それはもっと、 確かにもっと面白いかもしれない。 + +02:53.290 --> 02:55.450 +それで良い結果が得られるかどうかは分からない。 + +02:55.450 --> 03:00.880 +そして、 それは英語でパンフレットを作成し、 2度目の電話をかけることだ。 + +03:01.030 --> 03:08.080 +実際には、 もちろん、 パンフレットを英語からスペイン語に翻訳するために、 LLMに3度目の電話をしている。 + +03:08.140 --> 03:14.710 +でも、 いろんな意味で、 この場合、 その方がいいということはないだろう。 + +03:14.710 --> 03:20.080 +しかし、 それを実践することで、 実際に翻訳の目的のために特別に訓練されたモデルを使用することが想像できます。 + +03:20.080 --> 03:36.730 +そうすれば、 その目的のためだけにそのモデルを使用することができ、 パンフレット生成のために訓練されたモデルと翻訳のために訓練された別のモデルを使用して、 より良い結果を確実に得ることができるようになります。 + +03:36.730 --> 03:45.970 +GPT4かミニを両方の目的で使うことになるだろうが、 複数のコールを実際に体験することができる。 + +03:45.970 --> 03:50.350 +繰り返しになるが、 これは基本的にエージェントAIのミニチュア実装だ。 + +03:50.380 --> 03:58.210 +システム・プロンプトを使えば、 1 in 1のbashで全部できるかもしれないが。 + +03:58.360 --> 04:00.370 +とにかく、 それがやるべきことだ。 + +04:00.370 --> 04:12.970 +これは、 あなたの自信と、 この種のテクニックの経験値を高めるのに大いに役立つだろう。 + +04:12.970 --> 04:18.130 +そして、 ゼロから何かを作り上げる練習をしてもらいます。 + +04:18.130 --> 04:22.030 +それをお見せするために、 次のビデオでJupyterLabに戻ります。 diff --git a/week5/community-contributions/subtitles/srts/60620397/ko_KR.srt b/week5/community-contributions/subtitles/srts/60620397/ko_KR.srt new file mode 100755 index 0000000..b54bed3 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60620397/ko_KR.srt @@ -0,0 +1,187 @@ +WEBVTT + +00:00.080 --> 00:07.220 +정말 멋진 결과예요 첫째 주가 끝나가는데 흥미진진한 프로젝트를 + +00:07.250 --> 00:09.980 +완성했잖아요 + +00:10.010 --> 00:12.350 +물론 어려운 점도 있죠 + +00:12.350 --> 00:14.000 +여기서부터가 재미있어요 + +00:14.030 --> 00:18.860 +우선, 어려운 점이 몇 가지 있습니다 이 프로젝트를 더 좋게 만들 수 있는 방법입니다 + +00:18.860 --> 00:20.990 +조금 전에 말씀드린 것과 비슷하죠 + +00:20.990 --> 00:27.350 +그 후에는 숙제를 내주는 실습이 있습니다 처음부터 다 + +00:27.350 --> 00:29.180 +만들어야 하죠 + +00:29.180 --> 00:33.230 +물론 준비되시면 해결책을 제공하겠지만 필요 없을 것 같네요 + +00:33.230 --> 00:34.610 +이게 있으니까요 + +00:34.610 --> 00:36.230 +과제부터 시작하죠 + +00:36.230 --> 00:42.650 +먼저, 이미 있는 브로슈어 제작기를 만들 때 LMS에 두 번 + +00:42.680 --> 00:44.600 +전화를 걸었어요 + +00:44.600 --> 00:51.830 +첫 번째 호출은 원샷 프롬프트라고 설명했어요 어떻게 응답해야 하는지에 대한 JSON 예제를 보여드렸거든요 + +00:51.860 --> 00:57.530 +아까도 언급했지만 다중 샷 프롬프트라는 표현이 있습니다 여러 개의 + +00:57.530 --> 01:00.110 +예제를 제공하는 것을 말하죠 + +01:00.110 --> 01:05.090 +그게 제가 여러분께 원하는 겁니다 이걸 확장해 멀티 샷 프롬핑으로 진정한 멀티 샷 프롬핑이 + +01:05.090 --> 01:06.350 +되게 하는 거죠 + +01:06.350 --> 01:12.770 +이렇게 하는 거죠 이 링크들을 보여드리면 이렇게 답장하고 JSON을 + +01:12.770 --> 01:19.670 +주는 거예요 어디서 관련 링크만 선택했는지 경로를 어떻게 정정했는지를 + +01:19.670 --> 01:21.650 +분명히 알려주죠 + +01:21.650 --> 01:22.880 +그러니 그렇게 해보세요 + +01:22.880 --> 01:29.090 +예제를 한두 개 더 넣으세요 MultiOut 프롬프트 사용이 될 테니까요 + +01:29.090 --> 01:31.610 +멀티샷 프롬핑을 한 이력서에 추가할 수 있어요 + +01:31.640 --> 01:36.920 +물론 농담이지만 시도해 본 중요한 기술이에요 + +01:36.920 --> 01:43.700 +하지만 이 방법이 유용한 이유는 LLM에 보내는 전화의 품질과 신뢰도를 높일 수 있기 + +01:43.700 --> 01:44.900 +때문이죠 + +01:44.930 --> 01:53.870 +예제를 추가하면 다음 토큰과 무엇을 예측할지 더욱 확실하게 + +01:53.870 --> 01:56.480 +알 수 있죠 + +01:56.480 --> 01:58.940 +좋은 운동이 되겠어요 + +01:58.940 --> 02:03.500 +이 LLM 호출에 더 견고한 걸 추가하는 좋은 방법이죠 + +02:03.500 --> 02:05.690 +코스를 따라 계속 진행할 거예요 + +02:05.690 --> 02:08.810 +여러분의 프로젝트에 포함하고 싶은 것이죠 + +02:08.810 --> 02:13.570 +그러니 그걸 시도해 보세요 그걸 시도해 보세요, 멀티샷이요 + +02:14.560 --> 02:18.700 +제가 말씀드린 것처럼 과정 막바지에는 구조화 출력이라는 기술을 + +02:18.700 --> 02:20.050 +사용할 텐데요 + +02:20.050 --> 02:23.620 +LLM이 특정한 방식으로 반응하도록 강요하는 거죠 + +02:23.620 --> 02:30.160 +그래도 멀티샷 홍보가 도움이 돼요 여러분이 원하는 것에 대해 추가적인 컨텍스트와 개성을 + +02:30.160 --> 02:31.330 +부여하죠 + +02:32.050 --> 02:37.000 +두 번째 통화에서 할 수 있는 일은 책자를 생성하는 거예요 + +02:37.000 --> 02:41.740 +시스템 프롬프트에서 비꼬거나 빈정대는 것을 만드는 것에 대해 이미 얘기했었죠 + +02:41.740 --> 02:46.900 +시스템 프롬프트를 이용해 다른 언어로 생성할 수 있다고 말씀드렸죠 스페인어 + +02:46.990 --> 02:48.010 +같은 거요 + +02:48.010 --> 02:53.290 +다른 방법도 있어요 훨씬 더 흥미로울 거예요 + +02:53.290 --> 02:55.450 +Get it로 더 좋은 결과가 나올지는 모르겠지만요 + +02:55.450 --> 03:00.880 +영어로 책자를 만들고 두 번째 전화를 걸었어요 + +03:01.030 --> 03:08.080 +사실 LLM에 거는 세 번째 전화예요 책자를 영어로 스페인어로 번역하는 거죠 + +03:08.140 --> 03:14.710 +여러 면에서 이런 식으로 한다고 더 나을 건 없을 거예요 + +03:14.710 --> 03:20.080 +하지만 그렇게 하는 연습에 들어가면 번역을 목적으로 특별히 훈련된 모델을 + +03:20.080 --> 03:26.440 +사용할 수 있다고 상상할 수 있어요 그 모델을 그 목적만을 위해 사용할 수 있죠 그럼 더 + +03:26.470 --> 03:31.900 +나은 결과를 확실히 얻을 수 있도록 합니다 브로슈어 생성을 위해 훈련된 + +03:31.900 --> 03:36.730 +모델 하나와 번역을 위해 훈련된 다른 모델을 이용해서요 + +03:36.730 --> 03:41.950 +두 가지 목적으로 GPT 4, 또는 미니를 사용할 테지만 + +03:41.950 --> 03:45.970 +다중 호출을 하는 경험을 확실히 제공하죠 + +03:45.970 --> 03:50.350 +에이전트 인공지능의 미니어처 구현이죠 + +03:50.380 --> 03:55.900 +습관을 들이기 좋은 방법이죠 시스템 프롬프트만 사용해도 1배시 안에 + +03:55.900 --> 03:58.210 +모두 할 수 있어요 get it + +03:58.360 --> 04:00.370 +어쨌든 그런 걸 해야 해요 + +04:00.370 --> 04:08.680 +이런 기술을 익힌 경험과 자신감을 쌓는 데 도움이 될 거예요 앞으로 몇 + +04:08.710 --> 04:12.970 +주간 아주 유용하게 쓰일 거예요 + +04:12.970 --> 04:18.130 +그런 다음 실습을 할 거예요 처음부터 뭔가를 만드는 거죠 + +04:18.130 --> 04:22.030 +그걸 보여드리기 위해 주피터랩의 다음 비디오를 보여드리죠 diff --git a/week5/community-contributions/subtitles/srts/60622463/en_US.srt b/week5/community-contributions/subtitles/srts/60622463/en_US.srt new file mode 100755 index 0000000..95965d8 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60622463/en_US.srt @@ -0,0 +1,328 @@ +WEBVTT + +00:00.020 --> 00:04.280 +In this video, we're going to set up a full data science environment for Mac users. + +00:04.280 --> 00:06.110 +In the next video, we'll do PC users. + +00:06.110 --> 00:09.080 +So if you're a PC person, please skip to the next. + +00:09.080 --> 00:12.200 +You can you can listen in here if you wish, but otherwise I will see you next time. + +00:12.290 --> 00:16.310 +Okay, so my Mac people now I'm a mac person myself. + +00:16.310 --> 00:19.520 +I've always owned Macs, so you are my home crowd. + +00:19.580 --> 00:21.740 +You are definitely my favorites. + +00:21.770 --> 00:24.050 +It's not like I'm going to say the same thing to the other guys. + +00:24.080 --> 00:24.950 +Or maybe I am. + +00:24.950 --> 00:33.830 +Anyways, this is looking right now in a Chrome browser at the repo LM engineering in GitHub, and this + +00:33.830 --> 00:39.440 +link should hopefully be plastered all over the place, including in the lecture notes for this lecture. + +00:39.530 --> 00:45.170 +Uh, but please come here and look at the GitHub repo, uh, to see the code that we have. + +00:45.170 --> 00:49.520 +Uh, what you'll also see if you scroll down is the contents of the Readme file, which is where I have + +00:49.520 --> 00:51.890 +laid out what needs to be done to set up your environment. + +00:51.890 --> 00:57.500 +And I hope that I've done a decent job here of laying things out step by step and being clear on what + +00:57.530 --> 00:59.750 +action to take, if anything is to go wrong. + +00:59.790 --> 01:04.200 +so hopefully it's good, but if you have any corrections to make to it. + +01:04.410 --> 01:09.360 +As some people already have, for which I'm very grateful, please do let me know and I'll do that. + +01:09.390 --> 01:09.960 +Okay. + +01:09.960 --> 01:13.470 +But for now, what you're going to do is press this green button here code. + +01:13.470 --> 01:16.230 +And then you can pick either of these two. + +01:16.260 --> 01:21.990 +But you're going to pick this copy button here, which is going to copy the URL that identifies this + +01:21.990 --> 01:23.520 +repo into the clipboard. + +01:23.520 --> 01:24.990 +And that is done. + +01:25.020 --> 01:25.680 +All right. + +01:25.710 --> 01:28.500 +We're now going to bring up a terminal window where we're going to go next. + +01:28.500 --> 01:33.030 +Now this some of this you can do using the max finder windows if you prefer that. + +01:33.120 --> 01:36.930 +But I'm more comfortable in terminals and I imagine many of you are too. + +01:37.170 --> 01:39.300 +So I'm now in my home directory. + +01:39.330 --> 01:45.150 +Many of you will have a projects directory set up, which is where you manage your your projects. + +01:45.180 --> 01:46.860 +At the moment I don't have one set up. + +01:46.860 --> 01:48.810 +If I do CD projects, there's nothing there. + +01:48.810 --> 01:53.220 +So I'm going to make a directory projects and go into it. + +01:53.640 --> 01:54.750 +Here we go. + +01:55.050 --> 01:59.330 +And then the next thing I'm going to do when I'm inside the projects folder is I'm going to type git + +01:59.330 --> 02:05.540 +clone, I want to clone a repo and paste in the name of the repo right there, and when I press enter, + +02:05.540 --> 02:06.770 +it does its thing. + +02:06.770 --> 02:10.670 +And I now have locally an LM engineering folder. + +02:10.700 --> 02:14.000 +I'm going to go into it and there is all of our code. + +02:14.150 --> 02:19.580 +It's worth pointing out that where we are right now in the LM engineering folder is what people sometimes + +02:19.580 --> 02:22.850 +call being in the project root directory. + +02:22.970 --> 02:25.940 +That's, that's the name for for where we are right now for this project. + +02:25.970 --> 02:28.610 +The project root directory LM engineering. + +02:28.670 --> 02:29.540 +All right. + +02:29.540 --> 02:33.890 +So the next thing to do is for us to look at installing Anaconda. + +02:33.890 --> 02:35.630 +If you haven't installed it before. + +02:35.630 --> 02:42.350 +And the Readme has a direct link to the installation page at anaconda.com, which will allow you to + +02:42.380 --> 02:44.420 +set it up for Mac OS. + +02:44.540 --> 02:48.530 +There's a simple set of screens that will go through and configure it. + +02:48.530 --> 02:50.270 +I'm not going to do it because I've already done it for me. + +02:50.270 --> 02:54.680 +But but it is very clearly step by step as you will see it here. + +02:54.680 --> 03:01.010 +And before you know it, you'll have Anaconda on your box when you have anaconda on your box. + +03:01.010 --> 03:02.750 +Back to the terminal window. + +03:02.750 --> 03:03.560 +Here we are. + +03:03.590 --> 03:09.650 +We simply call a command that tells Anaconda to do everything to build a whole data science environment + +03:09.650 --> 03:18.080 +for us, and that command is conda env for environment create minus f, which means we're going to specify + +03:18.080 --> 03:21.260 +a file which has the full description of the environment. + +03:21.260 --> 03:26.630 +And that name of that file is environment.yml which you can see right here. + +03:26.630 --> 03:29.630 +That is the file that defines this environment. + +03:29.630 --> 03:32.210 +And when you've done that you press enter. + +03:32.690 --> 03:40.220 +Now what's going to happen now is that Anaconda is going to connect to its server, find out everything + +03:40.220 --> 03:41.090 +that we need to do. + +03:41.090 --> 03:46.610 +And it does what it's called solving environment, which means it figures out what versions of each + +03:46.610 --> 03:52.970 +of the different packages we need are going to be the right versions, that we need to have a fully + +03:52.970 --> 03:55.400 +compatible, full specked environment. + +03:55.400 --> 03:58.780 +And what you saw it just doing there with all of that stuff going on. + +03:58.780 --> 04:05.590 +Uh, was it, uh, feverishly creating the various, downloading the various packages and building them? + +04:05.620 --> 04:09.400 +Now, that might take a fair bit longer for you, because it's fast for me, because I've already done + +04:09.400 --> 04:13.000 +it and it's cached locally the first time it needs to download it. + +04:13.000 --> 04:16.300 +For me, I have a pretty good internet connection, and it took about five minutes. + +04:16.300 --> 04:21.760 +The first time I did this, someone reported that it was taking more than 20 minutes for them. + +04:21.760 --> 04:25.510 +Uh, which is of course, uh, yeah, a lot of time to wait. + +04:25.510 --> 04:30.940 +So if it's taking that long, then that might be if you have a slower internet connection that might + +04:30.940 --> 04:31.690 +be expected. + +04:31.690 --> 04:36.370 +But if it takes much longer than 20 minutes and you have a good internet connection, then something's + +04:36.370 --> 04:37.660 +not working with Anaconda. + +04:37.660 --> 04:43.900 +And I would cancel it with command C, and instead we'll use the other approach, the virtual environment + +04:43.900 --> 04:45.280 +approach instead. + +04:45.550 --> 04:52.540 +So right now, Anaconda is busy doing what it calls executing the transaction, which is essentially + +04:52.660 --> 04:58.090 +after it's figured out what versions of what we need, It's building that that whole environment. + +04:58.090 --> 05:02.560 +And then at the end, for those that know about this, there are also some packages that I pip install + +05:02.590 --> 05:05.980 +because they're not they don't yet have conda versions. + +05:06.130 --> 05:09.940 +So there are some packages that get pip installed at the end of it. + +05:09.940 --> 05:14.500 +But when you see installing pip install you are near the very end and it's done. + +05:14.500 --> 05:15.550 +So I didn't lie. + +05:15.550 --> 05:17.500 +It doesn't need to take a long amount of time. + +05:17.530 --> 05:21.340 +It's pretty fast and all of that is done. + +05:21.490 --> 05:28.600 +We need to activate our new environment, which means we need to say we want that to be the live environment + +05:28.600 --> 05:30.190 +that's currently being used. + +05:30.220 --> 05:35.470 +If you look at my my cursor right now, you'll see the word base is to the left here. + +05:35.470 --> 05:40.810 +And that means that we're not currently running in an in an anaconda environment right at the moment. + +05:40.810 --> 05:48.010 +In order to use our LM environment, we the name of the environment is LMS and we're going to type conda + +05:48.130 --> 05:51.460 +activate LMS. + +05:51.460 --> 05:57.790 +And that is all it takes to tell Anaconda we want to be running in this environment with the same version + +05:57.790 --> 06:01.150 +of Python, the same version of all of these packages? + +06:01.150 --> 06:03.580 +That means that we are completely consistent. + +06:03.580 --> 06:07.600 +That happened immediately, and you'll see that LMS is now written on the left. + +06:07.600 --> 06:13.540 +And if you're seeing that, that is your clue that it's worked and that you are now running the right + +06:13.540 --> 06:14.950 +version of Anaconda. + +06:16.180 --> 06:20.800 +And as a final step here, I type in this command JupyterLab. + +06:20.800 --> 06:26.620 +And JupyterLab is the command that says, I want to launch this data science environment called JupyterLab, + +06:26.620 --> 06:29.830 +which allows me to work very interactively with code. + +06:29.830 --> 06:35.710 +And when I run that, it thinks for hopefully just a moment while it considers the task at hand. + +06:35.710 --> 06:39.550 +And then it will launch JupyterLab in a new screen. + +06:39.550 --> 06:42.850 +And what you will probably see is something a bit like this. + +06:43.270 --> 06:43.930 +All right. + +06:43.930 --> 06:47.380 +With that, I'm now going to pause for you guys. + +06:47.380 --> 06:53.710 +I'm going to record a video for the PC users, and I'll meet you back in two videos time. + +06:53.740 --> 06:54.610 +See you there. diff --git a/week5/community-contributions/subtitles/srts/60622463/ja_JP.srt b/week5/community-contributions/subtitles/srts/60622463/ja_JP.srt new file mode 100755 index 0000000..40dea35 --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60622463/ja_JP.srt @@ -0,0 +1,274 @@ +WEBVTT + +00:00.020 --> 00:04.280 +このビデオでは、 Macユーザーのために完全なデータサイエンス環境をセットアップする。 + +00:04.280 --> 00:06.110 +次のビデオでは、 PCユーザーを取り上げます。 + +00:06.110 --> 00:09.080 +PC派の方は次へお進みください。 + +00:09.080 --> 00:12.200 +お望みであれば、 ここで聞くこともできますが、 そうでなければ、 また次回お会いしましょう。 + +00:12.290 --> 00:16.310 +さて、 私のマックの人たちは、 今では私もマック派だ。 + +00:16.310 --> 00:19.520 +僕はいつもマックを所有しているから、 君たちは僕のホームグラウンドなんだ。 + +00:19.580 --> 00:21.740 +君たちは間違いなく僕のお気に入りだ。 + +00:21.770 --> 00:24.050 +他の選手にも同じことを言うつもりはない。 + +00:24.080 --> 00:24.950 +あるいは、 私がそうなのかもしれない。 + +00:24.950 --> 00:33.830 +とにかく、 これは今ChromeブラウザーでGitHubのLMエンジニアリングのレポを見ているところだ。 このリンクは、 + +00:33.830 --> 00:39.440 +この講義の講義ノートを含め、 あちこちに貼られているはずだ。 + +00:39.530 --> 00:45.170 +でも、 ここに来て、 GitHubのレポを見てください。 + +00:45.170 --> 00:51.890 +下にスクロールすると、 Readmeファイルの内容が表示されます。 ここには、 あなたの環境をセットアップするために必要なことが書かれています。 + +00:51.890 --> 00:59.750 +そして、 ステップ・バイ・ステップで物事を整理し、 何か問題が起きた場合にどのような行動を取るべきかを明確にすることで、 ここできちんとした仕事ができたことを願っている。 + +00:59.790 --> 01:04.200 +うまくいけばいいんだけど、 もし何か訂正するところがあれば言ってくれ。 + +01:04.410 --> 01:09.360 +すでに何人かの方がそうしてくださっていて、 とても感謝しています。 + +01:09.390 --> 01:09.960 +オーケー。 + +01:09.960 --> 01:13.470 +でも今は、 この緑色のボタンのコードを押してください。 + +01:13.470 --> 01:16.230 +そして、 この2つのどちらかを選べばいい。 + +01:16.260 --> 01:23.520 +このコピーボタンを選ぶと、 このレポを特定するURLがクリップボードにコピーされる。 + +01:23.520 --> 01:24.990 +そして、 それは終わった。 + +01:25.020 --> 01:25.680 +分かった。 + +01:25.710 --> 01:28.500 +次にターミナル・ウィンドウを表示させる。 + +01:28.500 --> 01:33.030 +もしそれがお望みなら、 MAXファインダーウインドウを使うこともできる。 + +01:33.120 --> 01:36.930 +でも、 私は端末の方が快適だし、 皆さんの多くもそうだと思う。 + +01:37.170 --> 01:39.300 +というわけで、 今はホームディレクトリにいる。 + +01:39.330 --> 01:45.150 +多くの人はprojectsディレクトリを設定し、 そこでプロジェクトを管理していることだろう。 + +01:45.180 --> 01:46.860 +今のところ、 セットアップはしていない。 + +01:46.860 --> 01:48.810 +CDプロジェクトをやっても何もない。 + +01:48.810 --> 01:53.220 +だから、 ディレクトリー・プロジェクトを作って、 その中に入っていくつもりだ。 + +01:53.640 --> 01:54.750 +さあ、 始めよう。 + +01:55.050 --> 01:59.330 +そして、 projectsフォルダの中にいるときに次にすることは、 git + +01:59.330 --> 02:06.770 +cloneと入力し、 レポをクローンしたいので、 そこにレポの名前を貼り付け、 Enterを押すと、 そのとおりに実行される。 + +02:06.770 --> 02:10.670 +そして今、 私はローカルにLMエンジニアリングのフォルダを持っている。 + +02:10.700 --> 02:14.000 +その中に入っていくと、 私たちのすべてのコードがある。 + +02:14.150 --> 02:22.850 +LMエンジニアリング・フォルダーは、 プロジェクトのルート・ディレクトリーと呼ばれることもある。 + +02:22.970 --> 02:25.940 +それが、 このプロジェクトで僕らが今いる場所の名前なんだ。 + +02:25.970 --> 02:28.610 +プロジェクトのルートディレクトリ LM engineering。 + +02:28.670 --> 02:29.540 +分かった。 + +02:29.540 --> 02:33.890 +そこで次にすべきことは、 アナコンダのインストールを検討することだ。 + +02:33.890 --> 02:35.630 +まだインストールしたことがない場合は + +02:35.630 --> 02:44.420 +そしてReadmeには、 anacondaのインストールページへの直接リンクがある。 comでMac OS用に設定できる。 + +02:44.540 --> 02:48.530 +簡単な画面で設定できる。 + +02:48.530 --> 02:50.270 +もう自分のためにやったことだから、 やるつもりはない。 + +02:50.270 --> 02:54.680 +でも、 ここにあるように、 ステップ・バイ・ステップで非常にわかりやすい。 + +02:54.680 --> 03:01.010 +そしていつの間にか、 アナコンダがあなたのボックスにあれば、 アナコンダがあなたのボックスにあることになる。 + +03:01.010 --> 03:02.750 +ターミナル・ウィンドウに戻る。 + +03:02.750 --> 03:03.560 +さあ、 着いたぞ。 + +03:03.590 --> 03:09.650 +このコマンドはconda envで、 + +03:09.650 --> 03:21.260 +environment createからfを引いたものだ。 + +03:21.260 --> 03:26.630 +そして、 そのファイルの名前がenvironmentだ。 ymlはここにある。 + +03:26.630 --> 03:29.630 +これがこの環境を定義するファイルだ。 + +03:29.630 --> 03:32.210 +そうしたらエンターキーを押す。 + +03:32.690 --> 03:41.090 +さて、 これから起こることは、 アナコンダがサーバーに接続し、 必要なことをすべて調べるということだ。 + +03:41.090 --> 03:46.610 +つまり、 完全に互換性のある、 フルスペックの環境を手に入れるために必要な、 + +03:46.610 --> 03:55.400 +さまざまなパッケージのバージョンを割り出すのだ。 + +03:55.400 --> 03:58.780 +そして、 あなたが見たのは、 あれだけのことが起こっている中で、 ただそこでやっていることだった。 + +03:58.780 --> 04:05.590 +いろいろなパッケージをダウンロードし、 ビルドするのに熱中していたんですか? + +04:05.620 --> 04:13.000 +私にとっては高速で、 最初にダウンロードする必要があるときにローカルにキャッシュされるからだ。 + +04:13.000 --> 04:16.300 +私の場合、 インターネット接続がかなり良いので、 5分ほどで終わった。 + +04:16.300 --> 04:21.760 +初めてこれをやったとき、 20分以上かかったという報告があった。 + +04:21.760 --> 04:25.510 +ええと、 それはもちろん、 ええと、 待つには多くの時間が必要です。 + +04:25.510 --> 04:31.690 +だから、 もし時間がかかっているのなら、 それはあなたのインターネット接続が予想以上に遅いのかもしれない。 + +04:31.690 --> 04:37.660 +しかし、 20分以上かかり、 インターネット接続が良好であれば、 アナコンダで何かが機能していないことになる。 + +04:37.660 --> 04:45.280 +そしてコマンドCでそれをキャンセルし、 代わりにもうひとつのアプローチ、 仮想環境を使うことにする。 + +04:45.550 --> 04:52.540 +だから今、 アナコンダはトランザクションの実行と呼ばれることに忙しくしている。 それは基本的に、 + +04:52.660 --> 04:58.090 +必要なもののバージョンを把握した後、 環境全体を構築することだ。 + +04:58.090 --> 05:05.980 +そして最後に、 このことをご存知の方のために、 condaのバージョンがまだないのでpipでインストールするパッケージもあります。 + +05:06.130 --> 05:09.940 +そのため、 最後にpipがインストールされるパッケージがいくつかある。 + +05:09.940 --> 05:14.500 +しかし、 pip installをインストールしているところを見ると、 もう終わりに近づいているようだ。 + +05:14.500 --> 05:15.550 +だから嘘はついていない。 + +05:15.550 --> 05:17.500 +長い時間をかける必要はない。 + +05:17.530 --> 05:21.340 +かなり速いし、 すべてが終わる。 + +05:21.490 --> 05:30.190 +新しい環境をアクティベートする必要がある。 つまり、 現在使われているライブ環境をアクティベートする必要がある。 + +05:30.220 --> 05:35.470 +今、 私のカーソルを見ると、 ベースという言葉が左側にあるのがわかるだろう。 + +05:35.470 --> 05:40.810 +つまり、 現時点ではアナコンダ環境では動いていないということだ。 + +05:40.810 --> 05:48.010 +LM環境を使うために、 環境名をLMSとし、 conda activate + +05:48.130 --> 05:51.460 +LMSと入力する。 + +05:51.460 --> 06:01.150 +これだけで、 Anaconda に、 同じバージョンの Python、 同じバージョンのパッケージで、 この環境を実行したいと伝えることができるのですか? + +06:01.150 --> 06:03.580 +つまり、 私たちは完全に一貫しているということだ。 + +06:03.580 --> 06:07.600 +すぐにそうなったので、 左側にLMSと書かれているのがわかるだろう。 + +06:07.600 --> 06:14.950 +もしそれが表示されたら、 それがうまくいったということであり、 正しいバージョンの Anaconda を実行しているということだ。 + +06:16.180 --> 06:20.800 +そして最後のステップとして、 JupyterLabというコマンドを入力する。 + +06:20.800 --> 06:29.830 +そしてJupyterLabは、 JupyterLabというデータサイエンス環境を起動し、 コードを使って非常にインタラクティブに作業することを可能にするコマンドだ。 + +06:29.830 --> 06:35.710 +そして、 それを実行すると、 目の前のタスクを考える間、 うまくいけばほんの一瞬だけ考えてくれる。 + +06:35.710 --> 06:39.550 +そして新しい画面でJupyterLabを起動する。 + +06:39.550 --> 06:42.850 +そして、 あなたが目にするのは、 おそらくこのようなものだろう。 + +06:43.270 --> 06:43.930 +分かった。 + +06:43.930 --> 06:47.380 +ということで、 ここで一旦中断します。 + +06:47.380 --> 06:53.710 +PCユーザーのためにビデオを録画するから、 2本後にまた会おう。 + +06:53.740 --> 06:54.610 +そこで会おう diff --git a/week5/community-contributions/subtitles/srts/60622463/ko_KR.srt b/week5/community-contributions/subtitles/srts/60622463/ko_KR.srt new file mode 100755 index 0000000..79c431a --- /dev/null +++ b/week5/community-contributions/subtitles/srts/60622463/ko_KR.srt @@ -0,0 +1,325 @@ +WEBVTT + +00:00.020 --> 00:04.280 +이 비디오에선 Mac 사용자를 위한 데이터 과학 환경을 설정할 거예요 + +00:04.280 --> 00:06.110 +다음 영상에서는 PC 사용자를 다룰 거예요 + +00:06.110 --> 00:09.080 +PC를 지지하신다면 다음 질문으로 넘어가세요 + +00:09.080 --> 00:12.200 +듣고 싶으면 여기서 들어요 아니면 다음에 봐요 + +00:12.290 --> 00:16.310 +좋아요, 맥 피플이∙∙∙ 저도 맥 피플이에요 + +00:16.310 --> 00:19.520 +전 항상 맥스를 운영해서 여러분이 제 단골이에요 + +00:19.580 --> 00:21.740 +제가 제일 좋아하는 분들이에요 + +00:21.770 --> 00:24.050 +다른 남자들한테도 똑같이 말할 건 아니잖아요 + +00:24.080 --> 00:24.950 +그럴지도 모르죠 + +00:24.950 --> 00:33.830 +깃허브의 LM 엔지니어링 압류 브라우저에 나와 있어요 이 링크가 곳곳에 도배돼 + +00:33.830 --> 00:39.440 +있을 겁니다 이 강의의 강의 노트에도요 + +00:39.530 --> 00:45.170 +여기 와서 깃허브 압류 좀 보세요 우리가 가진 코드를 보시라고요 + +00:45.170 --> 00:49.520 +스크롤을 내리면 리드메 파일의 내용도 보입니다 환경을 설정하기 위해 + +00:49.520 --> 00:51.890 +무엇이 필요한지 표시한 곳이죠 + +00:51.890 --> 00:57.500 +제가 잘했길 바랍니다 모든 걸 단계별로 잘 배치했고 뭔가 잘못되면 어떤 조치를 + +00:57.530 --> 00:59.750 +취할지 명확히 했으니까요 + +00:59.790 --> 01:04.200 +잘 나왔으면 좋겠지만 수정할 부분이 있으면 하세요 + +01:04.410 --> 01:09.360 +이미 그런 분들이 계시니 정말 감사하게 생각해요 저한테 알려 주시면 알려 드릴게요 + +01:09.390 --> 01:09.960 +네 + +01:09.960 --> 01:13.470 +하지만 지금은 이 코드 녹색 버튼을 누르세요 + +01:13.470 --> 01:16.230 +이 둘 중 하나를 고르세요 + +01:16.260 --> 01:21.990 +여기 복사 버튼을 선택하면 이 압류를 식별하는 URL을 클립보드로 + +01:21.990 --> 01:23.520 +복사하죠 + +01:23.520 --> 01:24.990 +다 됐어요 + +01:25.020 --> 01:25.680 +좋아요 + +01:25.710 --> 01:28.500 +이제 터미널 창을 불러오겠습니다 다음에 갈 곳이죠 + +01:28.500 --> 01:33.030 +이것 중 일부는 최대 파인더 창을 이용해 할 수 있어요 그걸 선호한다면요 + +01:33.120 --> 01:36.930 +하지만 전 터미널이 더 편합니다 여러분도 마찬가지겠죠 + +01:37.170 --> 01:39.300 +이제 홈 디렉터리에 들어왔어요 + +01:39.330 --> 01:45.150 +많은 분들이 프로젝트 디렉터리 셋업을 갖고 계실 겁니다 여러분의 프로젝트를 관리하는 곳이죠 + +01:45.180 --> 01:46.860 +지금은 설치가 안 됐어요 + +01:46.860 --> 01:48.810 +CD 프로젝트를 하면 아무것도 없어요 + +01:48.810 --> 01:53.220 +디렉터리 프로젝트를 만들고 들어가 볼게요. + +01:53.640 --> 01:54.750 +시작할게요 + +01:55.050 --> 01:59.330 +프로젝트 폴더에서 다음으로 할 일은 git 클론을 입력하는 + +01:59.330 --> 02:05.540 +것입니다 압류 깃을 복제하고 압류 깃 이름을 붙여넣습니다 엔터를 누르면 알아서 + +02:05.540 --> 02:06.770 +작동하죠 + +02:06.770 --> 02:10.670 +이제 LM 엔지니어링 폴더가 로컬에 있어요 + +02:10.700 --> 02:14.000 +들어가 보면 모든 코드가 있어요 + +02:14.150 --> 02:19.580 +지금 우리가 있는 LM 엔지니어링 폴더는 프로젝트 루트 디렉터리에 + +02:19.580 --> 02:22.850 +있다고 사람들이 종종 부르는 곳이죠 + +02:22.970 --> 02:25.940 +그게 이 프로젝트의 현재 위치의 이름이에요 + +02:25.970 --> 02:28.610 +프로젝트 루트 디렉터리 LM 엔지니어링이죠 + +02:28.670 --> 02:29.540 +좋아요 + +02:29.540 --> 02:33.890 +다음으로 할 일은 아나콘다 설치예요 + +02:33.890 --> 02:35.630 +처음 설치하는 거라면요 + +02:35.630 --> 02:42.350 +리드미에는 아나콘다의 설치 페이지로 직접 링크가 있어요 Mac OS를 설정할 수 + +02:42.380 --> 02:44.420 +있게 해주죠 + +02:44.540 --> 02:48.530 +구성하는 간단한 화면 모음이 있어요 + +02:48.530 --> 02:50.270 +안 할 거예요 이미 해 봤으니까요 + +02:50.270 --> 02:54.680 +하지만 보시다시피 단계별로 아주 명확해요 + +02:54.680 --> 03:01.010 +그럼 상자에 아나콘다가 붙어 있을 거예요 상자에 아나콘다가 붙어 있을 때요 + +03:01.010 --> 03:02.750 +터미널 창으로 돌아가죠 + +03:02.750 --> 03:03.560 +여기예요 + +03:03.590 --> 03:09.650 +아나콘다에게 데이터 과학 환경을 구축하도록 모든 걸 지시하는 명령을 + +03:09.650 --> 03:18.080 +호출합니다 환경을 위한 콘다 부럽기 때문이죠 생성하기 f는 환경에 대한 완전한 설명이 있는 + +03:18.080 --> 03:21.260 +파일을 지정한다는 뜻이에요 + +03:21.260 --> 03:26.630 +저 파일의 이름은 환경이에요 여기서 볼 수 있는 yml이죠 + +03:26.630 --> 03:29.630 +이 환경을 정의하는 파일이죠 + +03:29.630 --> 03:32.210 +다 됐으면 엔터키를 누르세요 + +03:32.690 --> 03:41.090 +이제 아나콘다가 서버에 연결할 겁니다 우리가 해야 할 일을 모두 알아내죠 + +03:41.090 --> 03:46.610 +해결 환경이라는 걸 합니다 필요한 패키지 각각의 버전이 올바른 + +03:46.610 --> 03:52.970 +버전인지 알아내는 거죠 완전히 호환 가능하고 완전히 지정된 환경이어야 + +03:52.970 --> 03:55.400 +하는 거요 + +03:55.400 --> 03:58.780 +이 모든 게 일어나는 걸 보셨을 거예요 + +03:58.780 --> 04:05.590 +바쁘게 다양한 걸 만들고 다양한 패키지를 다운로드해서 구축했나요? + +04:05.620 --> 04:09.400 +비트는 시간이 좀 더 걸릴 수도 있어요 전 이미 해봤거든요 + +04:09.400 --> 04:13.000 +처음 다운로드 할 때 로컬에 캐시돼 있어요 + +04:13.000 --> 04:16.300 +저는 인터넷 연결이 잘돼서 5분 정도 걸렸어요 + +04:16.300 --> 04:21.760 +처음 이걸 했을 때 20분 이상 걸린다는 보고를 받았어요 + +04:21.760 --> 04:25.510 +물론 기다릴 시간이 많다는 뜻이죠 + +04:25.510 --> 04:30.940 +그렇게 오래 걸린다면 예상되는 느린 인터넷 연결이 있을 + +04:30.940 --> 04:31.690 +때죠 + +04:31.690 --> 04:36.370 +하지만 20분 이상 걸리는데 인터넷 연결이 잘 된다면 아나콘다에게 + +04:36.370 --> 04:37.660 +문제가 생긴 거죠 + +04:37.660 --> 04:43.900 +명령 C로 취소하고 다른 접근법을 사용합니다 가상 환경 + +04:43.900 --> 04:45.280 +접근법이죠 + +04:45.550 --> 04:52.540 +지금 아나콘다는 트랜잭션을 실행하는 데 한창입니다 기본적으로 필요한 버전의 + +04:52.660 --> 04:58.090 +어떤 것이 필요한지 알아낸 다음 전체 환경을 구축하죠 + +04:58.090 --> 05:02.560 +그리고 이걸 아시는 분들을 위해 제가 설치한 패키지도 있습니다 + +05:02.590 --> 05:05.980 +왜냐하면 아직 콘다 버전이 없거든요 + +05:06.130 --> 05:09.940 +끝에 get이 설치되는 패키지가 있어요 + +05:09.940 --> 05:14.500 +하지만 피프를 설치할 때는 거의 다 끝나가요 + +05:14.500 --> 05:15.550 +거짓말은 안 했어요 + +05:15.550 --> 05:17.500 +오래 걸릴 필요가 없어요 + +05:17.530 --> 05:21.340 +정말 빠르네요 다 끝났어요 + +05:21.490 --> 05:28.600 +새 환경을 활성화해야 합니다 현재 사용 중인 라이브 환경이 되어야 + +05:28.600 --> 05:30.190 +한다는 뜻이죠 + +05:30.220 --> 05:35.470 +지금 제 커서를 보시면 왼쪽에 있는 단어가 보이시죠 + +05:35.470 --> 05:40.810 +그 말은 현재 아나콘다 환경에서 실행되고 있지 않다는 거죠 + +05:40.810 --> 05:48.010 +LM 환경을 사용하기 위해 환경 이름은 LMS입니다 콘다가 LMS를 + +05:48.130 --> 05:51.460 +활성화한다고 입력하죠 + +05:51.460 --> 05:57.790 +아나콘다에게 이 환경에서 파이썬 버전을 실행하고 싶다고 말하는 데 필요한 게 그것뿐인가요? + +05:57.790 --> 06:01.150 +패키지들도 다 같은 버전이고요? + +06:01.150 --> 06:03.580 +일관성이 있다는 뜻이죠 + +06:03.580 --> 06:07.600 +즉시 그렇게 됐죠 LMS가 왼쪽에 쓰인 게 보이시죠 + +06:07.600 --> 06:13.540 +그걸 보신다면 그게 성공했다는 단서고 올바른 버전의 아나콘다를 실행하고 + +06:13.540 --> 06:14.950 +있다는 거죠 + +06:16.180 --> 06:20.800 +마지막 단계로 command JupyterLab을 입력할게요 + +06:20.800 --> 06:26.620 +JupyterLab은 명령이에요 JupyterLab이라는 데이터 과학 환경을 실행하고 + +06:26.620 --> 06:29.830 +싶어요 코드와 상호 작용하며 작업할 수 있도록 해주죠 + +06:29.830 --> 06:35.710 +실행하면 잠시 생각하죠 당면한 작업을 고려하는 동안에요 + +06:35.710 --> 06:39.550 +그럼 새로운 화면에서 주피터랩을 시작할 거예요 + +06:39.550 --> 06:42.850 +아마 이런 비트를 보게 될 거예요 + +06:43.270 --> 06:43.930 +좋아요 + +06:43.930 --> 06:47.380 +이것과 함께 잠시 멈출게요 + +06:47.380 --> 06:53.710 +PC 사용자들을 위해 영상을 찍고 두 영상 후에 다시 만나요 + +06:53.740 --> 06:54.610 +거기서 봐요 From ec1d846e894eb2da820d57740db97ea00c63680b Mon Sep 17 00:00:00 2001 From: Ivo Brett Date: Wed, 8 Jan 2025 21:24:20 +0000 Subject: [PATCH 2/4] updated cells --- ...izing_subtitles_from_llm_engineering.ipynb | 2226 +++++++++-------- 1 file changed, 1117 insertions(+), 1109 deletions(-) diff --git a/week5/community-contributions/day3 - vectorizing_subtitles_from_llm_engineering.ipynb b/week5/community-contributions/day3 - vectorizing_subtitles_from_llm_engineering.ipynb index 5076f8f..5ce3eba 100644 --- a/week5/community-contributions/day3 - vectorizing_subtitles_from_llm_engineering.ipynb +++ b/week5/community-contributions/day3 - vectorizing_subtitles_from_llm_engineering.ipynb @@ -24,7 +24,7 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": 63, "id": "ba2779af-84ef-4227-9e9e-6eaf0df87e77", "metadata": {}, "outputs": [], @@ -39,7 +39,7 @@ }, { "cell_type": "code", - "execution_count": 61, + "execution_count": 64, "id": "802137aa-8a74-45e0-a487-d1974927d7ca", "metadata": {}, "outputs": [], @@ -59,7 +59,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 65, "id": "58c85082-e417-4708-9efe-81a5d55d1424", "metadata": {}, "outputs": [], @@ -72,7 +72,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 66, "id": "ee78efcb-60fe-449e-a944-40bab26261af", "metadata": {}, "outputs": [], @@ -85,7 +85,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 67, "id": "730711a9-6ffe-4eee-8f48-d6cfb7314905", "metadata": {}, "outputs": [], @@ -143,7 +143,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 68, "id": "7310c9c8-03c1-4efc-a104-5e89aec6db1a", "metadata": {}, "outputs": [], @@ -154,7 +154,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 69, "id": "cd06e02f-6d9b-44cc-a43d-e1faa8acc7bb", "metadata": {}, "outputs": [ @@ -164,7 +164,7 @@ "217" ] }, - "execution_count": 45, + "execution_count": 69, "metadata": {}, "output_type": "execute_result" } @@ -175,7 +175,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 70, "id": "2c54b4b6-06da-463d-bee7-4dd456c2b887", "metadata": {}, "outputs": [ @@ -215,7 +215,7 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 71, "id": "78998399-ac17-4e28-b15f-0b5f51e6ee23", "metadata": {}, "outputs": [], @@ -233,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 72, "id": "763e51ff-5787-4a56-8176-36b7c5796fe3", "metadata": {}, "outputs": [], @@ -246,7 +246,7 @@ }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 73, "id": "99fe3a37-480f-4d55-be48-120588d5846b", "metadata": {}, "outputs": [ @@ -267,7 +267,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 74, "id": "057868f6-51a6-4087-94d1-380145821550", "metadata": {}, "outputs": [ @@ -308,7 +308,7 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 75, "id": "cfb855dc-1610-4aaf-8e5f-68c26ce640a5", "metadata": {}, "outputs": [], @@ -323,7 +323,7 @@ }, { "cell_type": "code", - "execution_count": 58, + "execution_count": 76, "id": "b98adf5e-d464-4bd2-9bdf-bc5b6770263b", "metadata": {}, "outputs": [], @@ -340,7 +340,7 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 77, "id": "427149d5-e5d8-4abd-bb6f-7ef0333cca21", "metadata": {}, "outputs": [ @@ -798,442 +798,442 @@ ], "type": "scatter", "x": [ - -0.859648, - 7.3309765, - 0.21870197, - -13.03976, - -3.0766428, - 11.100553, - 4.6001115, - -9.268545, - -5.360921, - 1.8335935, - 2.2128375, - -1.7025363, - 2.7415411, - 1.6968822, - -9.437864, - -2.4456034, - 13.673174, - 9.69971, - 5.391895, - 0.9950481, - -2.6140966, - 14.227348, - 2.5340881, - -10.256354, - -7.6409054, - 2.7219393, - 1.1424255, - 1.6502428, - -6.957909, - 4.086808, - 8.104448, - -7.2092853, - 13.410249, - -2.9087114, - 2.019522, - 7.0692005, - -0.84805, - -9.599044, - -0.36659604, - 2.8821077, - -1.7564659, - 0.22077061, - 7.1004896, - -0.5071637, - -1.5469455, - 9.606234, - -7.6583476, - -8.758075, - 16.001204, - -0.45763963, - 12.072926, - 14.450202, - -7.893885, - -4.888164, - 7.238137, - -6.4890647, - -10.677237, - -6.450742, - 0.29829141, - 3.5972733, - 7.056694, - 3.3955274, - 4.1175003, - 2.2164605, - 3.3678567, - 10.912271, - 4.3282537, - -1.8016068, - -2.778665, - 0.33017898, - 3.1186757, - 8.368695, - 3.8920324, - 9.047157, - 4.2369857, - -13.133919, - 0.30549568, - 10.36587, - 2.0417519, - -4.207513, - -2.7341063, - 9.276742, - 3.7855272, - 2.2184367, - 9.518204, - -7.6228004, - 3.5007627, - 4.166524, - -6.947239, - -6.4718704, - 6.777542, - -1.643389, - -4.0581813, - 13.556551, - 3.738945, - -9.4638, - -7.085359, - -12.116256, - 1.8722422, - -1.235673, - 1.5310236, - 2.681954, - -1.2896698, - -3.3085613, - -3.5033119, - -7.8056912, - -6.380733, - 12.077981, - 9.891831, - -2.583847, - 0.049997784, - -7.109494, - -1.6533405, - -0.35486424, - 8.023757, - -1.5843254, - 4.68254, - 12.040059, - -4.070594, - 3.5485406, - 8.321888, - -10.936198, - -5.665428, - -3.9380574, - -1.2327232, - -2.4456801, - 5.2406054, - -0.036940902, - 3.5880437, - 10.343754, - 0.10399394, - -6.0591764, - 0.5472898, - -0.18098946, - 12.552157, - 2.215009, - -2.0987718, - -4.3202305, - 10.194152, - -1.0280695, - 0.6394854, - -7.001653, - -2.6180403, - 0.5332797, - 6.908162, - -4.1370797, - 0.36955032, - 6.766898, - -5.599071, - -6.2765083, - -6.5416136, - -8.705647, - 8.097455, - 6.401871, - 10.086735, - -6.55865, - 13.3281975, - -11.958505, - 9.180207, - -10.071172, - -5.573983, - 1.7291324, - 3.2020307, - -9.81586, - 4.254864, - 13.542623, - 3.1633458, - 6.4809103, - 1.6912766, - -0.96716404, - -9.644825, - 4.948192, - -4.875502, - -2.7658813, - -7.0795684, - 6.3749175, - -0.2840374, - -13.407808, - 0.97872597, - -8.729023, - 9.891307, - -2.5329638, - -11.002493, - 0.6183121, - -10.363856, - 0.267183, - -8.229537, - -6.164332, - -7.064035, - -5.55934, - -11.237544, - -2.4159808, - -7.657407, - -0.47880024, - -4.861272, - 11.012814, - -5.3301964, - 4.517483, - -13.10771, - 8.053061, - 2.3658233, - -4.5009966, - 0.74033785, - 3.0659394, - 7.927173, - -5.8426704, - -7.692328, - -11.8179, - -10.170092, - -13.525137, - 7.471072, - -9.561237, - -7.660354, - 0.98921955, - -2.5871053, - 0.7735228, - 4.697858 + -12.589552, + 3.4522862, + 6.075746, + 7.942426, + -3.525712, + 4.1480594, + 4.6078315, + -1.7122985, + -1.6395565, + -9.307264, + -6.770974, + 1.4278501, + -3.795615, + -5.48206, + -4.170929, + 0.42981502, + -3.5235593, + 1.8772042, + 17.16095, + 15.35386, + -11.031532, + 15.838091, + 14.824762, + -2.4908643, + -4.1442113, + -6.1486583, + 14.927404, + -2.396536, + -3.8051388, + -6.8470283, + 7.2692485, + -3.5521216, + -2.7953513, + -3.2857506, + -5.7256823, + 9.390827, + -8.941686, + 8.362188, + -2.4580688, + -7.4087963, + -0.73915297, + -9.044852, + 4.499095, + 1.223194, + 0.6079307, + -2.3045015, + 9.307752, + 4.968605, + -3.0444636, + -13.019468, + -1.9913696, + 16.247093, + -6.6251817, + -3.236832, + 2.7420254, + 8.059585, + 5.8575497, + 1.3678622, + 14.408681, + -7.4271216, + 4.6005616, + -6.2227287, + -8.091358, + -1.0886598, + 3.9747384, + 0.32758102, + -5.358367, + 0.61464316, + -10.948633, + -13.510744, + -10.267108, + 3.5313623, + -4.744116, + 0.98348933, + 15.8871355, + 8.520779, + 12.316195, + 13.00314, + -7.271094, + -12.220864, + -1.1228861, + 8.195982, + 15.675435, + 3.5282235, + 2.7380142, + 3.0779696, + -7.539173, + 9.471518, + 2.180644, + 1.8750061, + 1.8318319, + -7.089598, + -0.79000425, + 0.13995205, + 16.312626, + -3.438324, + -4.710372, + 6.9159217, + 4.997074, + -11.944866, + -6.278514, + -7.310172, + -8.248277, + -0.2617442, + -2.001054, + -2.4265862, + 7.9734154, + -4.359084, + 1.4919127, + -0.38369736, + 2.8925261, + 2.770904, + 11.788717, + -11.200065, + 7.0120173, + -12.489671, + -7.3114347, + -1.5968479, + -2.0740008, + -7.660865, + 1.4215823, + 3.4180312, + -5.9557977, + -4.101128, + -7.1637955, + 1.2174717, + -8.017974, + 13.607655, + -8.332471, + 12.951081, + 13.259139, + 7.851571, + 11.287736, + -8.430205, + -2.83165, + -9.306727, + 1.3151592, + -2.5466766, + 9.444017, + -12.522999, + -10.38123, + -7.0192504, + 0.9397985, + -9.068451, + 4.640919, + -2.51455, + 5.657744, + 1.8063583, + -15.553587, + 0.9260013, + -4.1032104, + 4.0678425, + 6.9909325, + 4.943192, + -2.3060699, + 1.6395743, + -0.48130858, + 1.4182721, + -0.63343734, + 5.6635394, + -3.9217196, + -6.3144593, + 8.239023, + 8.01618, + -8.5425, + -0.17059784, + -6.761717, + 5.7745337, + -1.1535196, + -2.372529, + 3.1349926, + 14.739626, + -3.0802853, + -13.388992, + 3.012913, + 10.2796135, + -13.004479, + -0.6004416, + -2.7484965, + 4.0349708, + 1.1794678, + -3.6047134, + 2.0950997, + 3.1776624, + 5.355312, + 9.249312, + -5.047935, + -2.5895002, + -6.023992, + 0.42378932, + 6.4555655, + 11.28314, + -6.1557565, + 2.6091251, + -6.8104343, + 4.435232, + -6.023258, + 16.286194, + -0.5731437, + 2.0213904, + 8.013111, + -1.5368563, + -10.384564, + -8.238789, + -0.057244953, + -15.348441, + -1.7015631, + 6.999166, + 2.5275056, + 8.751711, + 1.0946581, + -8.001234, + 2.8864157, + -7.969383, + -0.49457392, + 5.2979984, + -7.2938204 ], "y": [ - -16.108109, - -2.802871, - 5.55556, - -3.1165593, - -2.5197542, - 1.6573303, - 7.9436374, - 1.7033308, - 2.729909, - -3.110688, - 0.19969198, - 7.2094626, - 11.474268, - -9.306534, - 6.4831786, - 3.4693139, - -3.6395743, - -1.3802856, - 17.943478, - 20.674412, - -14.37641, - 0.2662799, - 17.946259, - 5.1479177, - 3.269782, - 12.8965645, - 19.676033, - 10.8139715, - 4.6734076, - -12.667667, - 9.158181, - 2.501612, - -2.853026, - -2.8742912, - -9.390684, - 3.5931249, - 1.5709282, - -10.765733, - -5.9113226, - -5.507533, - 10.479771, - -0.30162513, - -5.1619964, - 9.435609, - 8.602637, - -4.3179398, - 14.847089, - -5.8406625, - -4.0188546, - -16.03366, - -2.9351225, - 0.8814765, - 9.312446, - 0.2861319, - -8.754338, - 14.149203, - -2.501995, - -4.5788355, - 20.002163, - -6.9779773, - -1.2691092, - 13.288892, - -9.499816, - 9.733308, - 7.6684027, - 0.11708519, - 12.023214, - -8.592282, - -14.5351715, - -16.749748, - -1.4396764, - -10.056349, - 11.6244335, - 0.4102241, - 18.943052, - -4.8392525, - 17.31309, - 12.829157, - -0.31426865, - -4.913969, - -5.8585067, - 8.703345, - 17.946308, - 7.5203032, - -9.040579, - -8.977853, - -10.744503, - 2.9780662, - -2.9896638, - -9.919191, - -7.825369, - -0.5688983, - 2.7128513, - -8.081976, - 19.224987, - 5.6850524, - 7.2608743, - -1.9696628, - 5.7763453, - -11.9261, - 3.7726462, - -6.2928514, - 0.6002692, - 3.240406, - 10.033546, - 1.7159785, - 14.183074, - -4.955666, - -1.2268807, - -6.7443852, - 8.091246, - -1.4330128, - 17.374035, - -12.052618, - 8.407009, - -12.653764, - 0.5208274, - -2.3776338, - -5.5375533, - -11.549568, - -6.591003, - -7.744704, - 5.603869, - 1.1318715, - -1.3157955, - 12.294856, - -11.588596, - 18.72359, - -4.533707, - 12.797578, - 18.353394, - 14.767065, - 16.229063, - -1.1066937, - -3.7252734, - -3.5343199, - 5.829706, - -1.1521066, - 6.080864, - -14.84926, - -3.3232324, - 10.510039, - 5.957106, - -2.2022781, - -5.182772, - 9.717215, - 4.3090715, - -8.085696, - -12.127335, - -6.2474174, - 7.0910845, - -6.4494314, - 7.989585, - -10.92101, - -4.208281, - -3.0467856, - -7.2040524, - 3.4879417, - -2.9318397, - -3.7214146, - 1.688086, - -6.546599, - 2.838505, - -12.067736, - 4.953533, - -7.6888204, - -8.374947, - 8.707888, - 9.738594, - -5.715931, - -7.3781533, - 19.546906, - 8.7370205, - -11.739149, - -1.4666423, - 3.6902168, - -14.634209, - 7.613645, - 11.104671, - -8.19435, - -6.881912, - -3.8555307, - 3.0141797, - 7.749215, - -1.4897541, - 14.488473, - 3.882484, - 3.0612788, - 0.7137344, - -7.4118447, - -3.123873, - 17.030315, - 8.942967, - 7.1438923, - 4.7416067, - 2.027668, - 4.4894767, - 17.905304, - 7.7233586, - -7.9623003, - 2.6870794, - -7.5661163, - -12.301454, - -11.477651, - -3.8346057, - -11.767478, - 5.598828, - -4.312641, - -6.8031745, - -4.3621974, - -6.8772163, - 9.279575, - -4.0924034, - -0.7417347, - 8.393606, - 5.9398475, - 0.6119152 + -6.5300555, + 14.089418, + 12.162957, + -0.80311126, + -1.6755519, + -13.505905, + 6.5699277, + 1.4233526, + 3.7408068, + -3.009902, + -1.6519994, + 9.911368, + 14.304171, + -9.145412, + 6.8292613, + 4.1779256, + -13.0463, + -11.951641, + 5.743851, + 10.09115, + -8.627289, + 2.584683, + 7.23334, + 6.759529, + 4.1768756, + 12.57557, + 9.190438, + 13.93031, + 5.511717, + -11.910828, + 5.8589373, + 3.6352885, + -13.270146, + -2.0432546, + -9.36256, + -4.0989513, + 2.833454, + -4.2829947, + -6.8667107, + -5.736574, + 11.985562, + 0.33564866, + -7.6441755, + 11.567259, + 10.677815, + -9.594754, + 14.068278, + -2.490878, + -4.379241, + -6.612759, + -12.312431, + 2.8374946, + 10.107471, + 0.86265963, + -7.4858155, + 13.485198, + 0.44996768, + 0.12787041, + 9.892149, + -7.652323, + 13.810954, + 13.420327, + -9.137389, + 14.72838, + 7.676501, + -12.387229, + 14.694999, + -5.226657, + -8.565104, + -5.734247, + 0.18139325, + -9.293782, + 14.728803, + -13.8647995, + 6.203831, + 0.3127214, + 8.967697, + -1.4659885, + -1.9273498, + 3.1576743, + -2.5850005, + 5.1483097, + 5.489101, + 8.593102, + -9.933031, + -4.0722184, + -10.497164, + -10.699288, + 1.5761652, + -3.9312649, + -7.012359, + 1.2585955, + 3.0229156, + -16.250467, + 6.635525, + 6.7354093, + 8.468663, + -1.6286882, + 10.374195, + -10.649093, + -4.4278836, + -6.3712683, + 2.0350108, + 4.080304, + 11.325701, + 2.5883422, + 13.5416975, + -11.214315, + -11.917827, + -2.9803138, + 10.627456, + 2.9241033, + 7.383127, + -11.1040945, + 5.136991, + -10.0327215, + -16.114536, + -12.599517, + -2.9481568, + -11.174494, + -8.890177, + -1.0979837, + 6.8131933, + 1.463663, + 0.48312876, + 12.914509, + -12.583761, + 8.630306, + -4.9693522, + -1.45638, + 8.836656, + 14.184286, + 9.729582, + 0.11569965, + -12.046801, + -3.562859, + 8.306646, + -0.12532109, + 1.8029642, + -7.7512345, + -2.2794526, + 11.317182, + 8.203367, + -1.5793608, + -7.4279957, + 10.902695, + 9.414275, + -6.943587, + -9.714286, + -2.107979, + 8.535427, + -2.2587268, + 4.7189612, + -9.422279, + -9.95208, + 1.4961839, + -15.637048, + 5.8088226, + -10.609174, + -0.896489, + 2.6177058, + -6.1964593, + -10.441606, + -5.6452084, + 6.2846713, + -16.04126, + -9.215314, + 5.74158, + 14.189653, + -6.3043413, + -2.132232, + 5.4891644, + 9.997777, + -10.8906975, + 3.0431316, + -3.8775747, + -7.930391, + 6.087151, + 13.401266, + -3.5819368, + -10.400259, + -3.2769384, + 5.1977687, + 11.534197, + 1.2013716, + 9.190291, + 4.5081005, + 4.3416286, + 2.9420025, + -1.041198, + -0.5506536, + 6.695455, + 9.870885, + 9.233299, + 5.8174458, + -13.40291, + 5.86892, + 5.1465583, + 5.9692817, + -8.088498, + -9.699435, + -4.3566523, + -11.696756, + -11.084373, + -9.227834, + -9.344566, + 8.588015, + 0.74937767, + -1.7350386, + 0.8596554, + -8.018119, + 9.262971, + 0.69532895, + -0.87655604, + 9.858918, + 12.275479, + -16.078566 ] } ], @@ -2079,11 +2079,11 @@ "width": 800 } }, - "image/png": "iVBORw0KGgoAAAANSUhEUgAABPAAAAJYCAYAAADsXBi6AAAgAElEQVR4XuydBZgUx7qG/1kHFncNGoiRQIS4u7vbSUJcTjw5cXd3txt3Je4eokQgxHBfWGF9Z25Vk53sboCdqenuqZp5+zx57j3Z/qv+er+GQ95UdUdi6hIuCEAAAhCAAAQgAAEIQAACEIAABCAAAQhAwEoCEQSelbnQFAQgAAEIQAACEIAABCAAAQhAAAIQgAAEPAIIPB4ECEAAAhCAAAQgAAEIQAACEIAABCAAAQhYTACBZ3E4tAYBCEAAAhCAAAQgAAEIQAACEIAABCAAAQQezwAEIAABCEAAAhCAAAQgAAEIQAACEIAABCwmgMCzOBxagwAEIAABCEAAAhCAAAQgAAEIQAACEIAAAo9nAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACFhNA4FkcDq1BAAIQgAAEIAABCEAAAhCAAAQgAAEIQACBp56B7376TW574AX5efJfEovGZMUh/eXog3eWdUevHH9Ctt3/DJk2c278vxcU5EuPrp1k9Goryr67bi6rrzwkoacpqsZ/6c1P5IXXP5ZJv02Vqppa6a7GWXv14XLwXtvIiKED4uPsfOg5MnhAb7nx4uMTGtvmm6rVOjfd47/eOm+57KRltrrDQWdJoWL73H2X2Lycf/X2wWffy2PPvy0TVaYLS8ulqLBAhg8ZIPvsvJnsuNV6Vq3liNOukYmTp8p7z94o+Xm5S+3twmsflOfHfSTvPnODHHDcpeo5HyaXnz029HWcd/X98vGXP8h7z9zoza1/HYbRy8tvfipnXX63vPnEtdK3V7fQ182EEIAABCAAAQhAAAIQgAAEIACBpgSyXuBNmPinHHT8pbKmEkuHKIEWiUTk3sdele9+/E2evOuCuFDT4qC4XRs549j9PH41tbXyx9RZ8tIbn3jS5rhDd5Vj1V/Lu+rqG+TEc2+WDz//XrbeZC3ZdP1R0q5tkUyZPkeefvl9mT13gVx5zlGy7WbreMNkksDT67n0xkfkqZffk3efvkG6den4L1Tf/jhZDjz+Mjn3vwfJfrtu4cuv1NLyxbL+TsfJV+PulLZtinwZs+UgWnSde9V9suu2G8o2m64tXTt3lAULSz0B9uYH4+V/Jx4gB+y+lVf29kdfy12PvCxP331hIL0kMugb738lp1x4myeGt9p4rX+VaNm6ye4nyQZrryrXX3icvPrO515eY0atlMjwvt7TUuAF1cvlNz/qyczTj93X61//mvz865+UfF3f+zXKBQEIQAACEIAABCAAAQhAAAIQSCeBrBd4WmR8M2GyvPn4NaJ31emrrKJSNtzleCX0tpVTj97b+3ta4OmdOPddf0azvPSOuitvfUwefe4tJTuOVQJniXxb2nXjPc/IPY++IpeddYQne5pelVU1cvipV8sfU2aqXq6Vjh3aZZzAm/T7NNn98PPktKP3kf/su92/EGlZ85qSRe+rnWHti9v68uvioy8myNFnXheowNv+wDOlV/cucv8NZ/6r5xPOuUmUFZZbLj3R+9n1dz0ln339c1oFnhbJm+/5X1l1xGC548qT/9Vz4+6ze689XdZbaxVfcjAdpKXAMx2ntbp9j7lY1lS7aRsFXmv383MIQAACEIAABCAAAQhAAAIQgECYBLJe4M0vKRUtzwb07dGM+0a7nuDtkLvkjMO8v78sgad/1tAQlZ0OOds7Nrmso59V1bWy8W4nyJojV5Q7rzp1qRnPmrNAtFxp7EXvwBs2qK9svuFoufX+52Xm7PnSs3tnTzI07px65a3P5MzL7pKHb/6fXHDtA14v4x69SrRYfODJcfLsqx94dUVFhTJq1WFy0hF7xHcV6t1MZ1xypzx++3ly7Z1PejsJ9Rr0brE9dthY9DHKbyb8KnlqZ9LO22zgibfG669ps+Wme59Ru5R+lqrqGunRrbNsv8W63k7E/Py8ZT7D+x59kSyurJaXH76i2T1L+Jzo7WC79MzDvZ998tWPcvf/vSyT/5iuuNTLyJWGyMlH7SWrDh8Ury1XsvWme5+Vtz4cLxWLq2TwCn3kyAN39Pjc9sDzcvtDL8bv3Xjd1T1hlQib86+5X35UuzMP338HuULtztp8w1Fy8elLnoWW11b7niYD+/WSe649bbm/dg856QoZ//2k+D2NIlML4xuU2Hv3k2+947edO7aXTdZbXU4+ci/v/9fXsvqJxWLe0d3nXvtI7RqbLYUqv43HrO6J56Xtcmyc/Lo7n5IHnxrn7YbUR7ibXoedfJXMUM/M649d7e1IbXlsVT8nN97ztOLzlyyuqlbysrPstPUGcvRBO0tOTkSeeeUD71l85+nrPbHZeB15+rWeHH/ijvO9v5XIM7S8I7Q6d/18LO3SOwf1s6Sv/3v2LXnqpfe8XXVt2xTKcHVMXbNtPPa+yqaHNhtC77z9c8qsfx2hfe/Tb73dk78qEa2vFQf3k8P228HbTauveQsWecfEr1K7aPVu0nc/+cZ7JvWx+HNOOqjZ8fjlPij8EAIQgAAEIAABCEAAAhCAAAQg0IJA1gu8pT0R+litFk1X/G+s7KzEhL6WJ/D0z2+4+2nv6O1HL9wiXTotkS5Nr6++myiH/vdKTwjuvv3GCT2IWuA1NDTIwP69ZOwBO0pubq7awfWkkgO/eeJFz/PG+1+q45C3e3JOv29tmJIKWhjofh566nU57Zh9lYhcQx3pLPN2Cuodfi8/dIUSbp1U7ZKjlPqdYlpOraAk1NW3Py6PPPOmrDZikJxx3H7eO/4aj4jefc1p3rFKLcC23f90b5fchaceKp06FiupMd0THgfusZUnCZd1Pfvqh56MekxJw6bvDdTvBDznynvjf1+Lrv+cfKVsudGacsLhe3gctMT8dPyP8uy9FyvJ2dObQt8zbeY8JUgOlN49usrLb30qDz75uuhede9aYmqR95Z6l1mH9u28Y9CJsLnkhofl/U+/kz5q1+VRB+2k2PSU/n2aS97GNWqumtkOSmDq9yGOVO9DzFNZtby0bNRZ6eO1eidnGyVV9fv+9DvmZip5e/4ph8gI9f5FLcguuv4htZ4u8riSXVqiLasfLZRuvu9ZOVEx0u/a0xL4YtV7jqp5+p6LlvmOu6kz5sh2B5wp/x27p/dsNV5a3G2thGTTv99U4NWrHDZTkkqv8fj/7Obx1LJK93f0wbvI4fttn5DAS/QZWp7AKy1bLKXlFc0wn3/NA94z/vTdF3myu/HZ1c/yZkrI6+PBdz3yknyq5PCr/3eV92tIS9Ot9jlVdtlmQzn+sN2853rcO180E3iNOzn32nFTOWhPdRxa8dXP2XOvfSi3X3GyJ1z1OBvucoI3r+a33ebrKoFXKYefskSE6ueWCwIQgAAEIAABCEAAAhCAAAQgYEIAgdeCWsmictn/2Euks/oH+/+75RwlzXK8O1oTeHqHj5Yu+h/Sm36IonH4xp1yD954lqy9xoiEstICb+GiMvUi/euU7Cnwar78dqInrfQuvo3GrBaXcE2Fi97Npo8A62O65518cHwuvRNP7xZrvLdR4DU90vvL5Cmy59gLvA9qnKmkh770rr7VtzxcSaLd1e62nTyBpwWQfqecFoGN10nn3eLt3HpGiaNlXXq3o36/2vZbjJGLTvtP/Da9O00LmRceuNT7e1p66B1lWrJoyaUvva4t9zlFtla76y5Q4lAffT7ohMv+9S43LQj18dC9d9pUHnhinLe7sPEdeImy0e/re/yFd/4lGpe2Lr1rUu9GfPz5dzxBpHd5rb7KUFlvzVVkxy3X84RO43XMWTeI3vXZ+A68xjxbHr9+Ub1b8X9X3BOff2n91NTWeTlvpHbc6frGa8Ivf4g+EnrNecd4nJd16Z12s+eVyGuKceOlZefd//eKt3uucQdfU4GnP+Si/7v+oMUualdm46WPR+t3xfXr3T1hgZfIM5TMRyz08XTd//03nOVJaX0tKq3w1tj01+SvakfnboedK7defpIn9fS11rZHKgG+efwIbcuPWOjnc6Ea60X1fGoZpy+9+1FLUL1jVgvjRoGnmWv2jZfuSx+f//bNe+LH9JcZCj+AAAQgAAEIQAACEIAABCAAAQgshQACrwkULRSOPP06T8DoHVKNxxf1La0JvEefe1suv/n/5KUHL5MhA/v+C/Urb6ujrpfe5b0nLdGPAWiB16dnl2ZHbn9Xu4t2PuR/cu35x6gdPmPiAk8fodXHc/XVuIPw6vOO9naFNb30ET8tN/QRw0aBp48MNh5LbdyBpeXanjtuEi8ds8Mx3n8/Xe3o05eWIA8//YZ8r77gq8VGNBb1jiJr6fP2k9ct9xfbRdc96H0Y4YPnbvbE5NQZc5UIOUN97OFAdXx3S692zW2O9NbXeJy2ccDj/3eTzJm/0BNgD6n5r77tce+deS2PgTbe31LgJcqm8YMb3711n3csNJGrUh0n1UeK9e7B8T9Mkp8m/eXtgDvv5EO8I8n6ainw7n/iNdHHWVuuQR/31O/Wa/ygx9L6aVxLS5mm5/Hy2kHl9fdHGZbW/2tql9npl9whj9zyP2+3ohZSW+93uqyy4sBmXz5uKvC0zNVyUIs8veNzfbUjc7Ta/dn02HSiR2gTeYYSFXj6wzDHnn2jJ4UbWes16x2DT774nrz+3hfqKPkC9dXnGu9L0/oor362dttuIw9NawJPP487qQ9aXHjaoc1Q6iPon47/ST5+8Za4wNPHlw/bd/v4fU+++K63K/KD525a7rHmRJ4x7oEABCAAAQhAAAIQgAAEIACB7CSAwPs7d72b6/hzbpSVhq7gyYuWH1FoTeBpwfLkS+/K56/csdSvVjbuFtM74vbdZfOEnralfYVWf/l2p4PPju+uapRweufasEH9vHE/U0LhiNOu8d73pt/71vTaUdXqj3HcdfWpcYHXtLZR4F35PyUstl4/XtpUCE2fNU92/c85MmSFvt5uvv5qB5J+T54WmD//OqVVgde4y69RPOkjoPo44vtKcHRQxxe1dFl9i8M9caaPDTe99FHaLp06eDJEH6m94+EXl/uBipYCL1E2Os/X3v1cPn3ptoSyWtpNWgiffMFt8vtfM+Ttp5bsaGsp8Brf4zb+9bvjuyz1WHqXnt6pqN/VdoR6D9/S+mlciz6uG2khGevq6j0BqkXvsi59z6bqYxZ6F5qWWVo+6g+pNB6Vbqxr+Q48/V43/d49/YVdnaXeibnjlut6x7X1LrxEBF6iz1AiAu9P9WtCS0V9BFZ/8bfppY/2Pql2x+pj3XqdxcVtvHdCHnTC5QkLPL2DUj+P+sMrTd8DqefRu2718W+9u65xB15TEa3vQeAZ/xKiEAIQgAAEIAABCEAAAhCAAAT+JoDAUyD0O7zGKuGl31l1wamHLPX9ZcsTePrYpH5vmP6Agj4iu7SrVh133FgJGf1Sf/2hi6Xt6tJH/Z5T74jTx1f1u8VMBd6Pk/6UfY66SJa1A2+t1Yd7Ymdp8i8RgdcoxcY9enWzj380vo+utR14ms9eR16o1lgk919/pvf+sXVGreQdy2y81t7uKO9oqH4nWctLv99NvxfwMXVk9bKbHvGEYe+eXZfKvaXAS5RNMgJPCyG9A3BpH+94+6OvRR8t1h+4WH+tVf8l8Br7W9YOvAvUe/H2VjvdltaP3uG391EXersiN1bvYGt56Z2kTT8isTRA19z+hDz18nvy4fO3yIXXPSDfKpH9hvoic+MxUV3TUuA1HUcfUR2ndrfpr+tuvsFouerco9SHU5a857DlRyy0NNMfI9EfsUj0GWpN4On3Cu6j3lep13m3Ytzy3YN6Z90W6j2K+sMSjZfeIamPxCazA0+Ps+OWS9+B98W3v3hCGYHH/65CAAIQgAAEIAABCEAAAhCAQFAEsl7g6Y877HjQWbK1+mJl03eytQS+LIGnjxSee9V98tKbn8h9150h66658jKz0l9E1e/oOuGw3dUL/3dudp8+fnrMWdfL5D+nyysPX+m9XN9U4GmhqN+NpnckNX0Hnt71tI06Iqlf6H+IkoSmAq/xwwlfvHqHJxr1pY/B6p2B3buqI7Rqt1lrl35n4CU3Piw3XHS8J7j+79ZzvA9xNF5jT7vW+9hDy6/66nn0V08L1HvxGt/11lTE6Ho9nn4Xmz4+2iiKvnztTm93WKJsEhV4H385QY4647r4UdeW677l/ufkzodfih+t1jvw9NdKG98T+PUPv8rBJ17uvcNum03XiZc3fnxB37fSsBWWKvC0FN5AfTRh9+03krNPaL7zTB+1HjygdzMRt7RM9O41vStT77jUxzzHHrCD957DpldTgadl5bfq2HTLo9n6S8g/K6Govy6sd+adfMGt3vsMG3eF6vf1baZ2++mPj2iBl+gztDyBp9/FeOzZ18ufU2fLU3ddKB07tGvWtz4SPGqrI2S/3baMv89R36B/vWq+LQXe3jtt5v3a0FfLd+Dp9wXOV79X6CPyjVfjhzhWHNzfe58eAq+1X/X8HAIQgAAEIAABCEAAAhCAAARMCWS9wNP/MK9Fln4nXOPHEhph6vezrbbSYO+/aomhZdUZxzZ+2KHB+5DDM2q30c+//uXtgjp0n22Xm4M+GqqPVL778Tey3lqryHabjZFOHYplyozZ8sQL76ovai6WWy87Kf6RC1OBp5vQx1Lvf/w1OUuJnY3HjJS5ShpdcfOj3vvjXlQSoqP6IqupwGv88IIWPfqddb/9OUOuuu0xT9a8+eF4eeH+S6WvEmj6/W/LuhZXVsume5zkHb/sqBg0FSO6pvErtHtsv4n3ZVd9n5Zl19zxhJx61N7e1271pY986mOc5550sPelWH3sVR/HbfzIh/5KqJZAWlBpETZ0UN+E2CQq8LQkOuGcm+XDL74X/YXSDdWHRXSm+oMcH33xg3d8cxslh6+7YMlHJvSXevXXbe9WR5i7qiO1+jiz3pk2Y/Y8TyAPVQx/nPiH91VX3a8+6qyvZfWjRZg+RnyKOmqrj0vrHW5Pv/y+9943/aXfVYYPbPX3Bi0Q9Tv3tIB6R8nXlu8TbCrwGqWpFsC7qI+kaCmqpfP5ivG2m62jvgZ8kGhRrN9puN+uS8SZlqb6+fjgs++9nZJa4CX6DF2sjqh+/OUP8t4zN8Z/Hep3OOrdmnrX34PqS8s3XHi8+vpy8/dO6udFH1nWO+1mqH5uvfy/3vHs/3v2Le9djc+8+oF3lF0frdXH5bfY6xSvNy289YdZPv5iQrOv0OqvH2uprGv0Dtl6xfk+9etLf/X4oZv0OwSHIfBafdK4AQIQgAAEIAABCEAAAhCAAARMCWS9wNO7gubOX7RUfvqY5quPXBkXB/rF/Y2XPmKod8npl//rf6Bv/Opla0Fo4aO/SPvcuA+VeJoqemeS3lG2njpeeZh6x5beOdZ4pSLw9DwPPDlOyZwPvHd+adGidwfqd6r179PDm8JU4Ola/WVN/eGOMiUdV1YfPTjz+P2lrRKeY0+/VvSxRi1plvYxj6Z89DFLfdxS7x5rFHJNf67fyXbbgy94glQfOda7t/SHE/SR0sZLf4zgevURiHc+/lq0FBykdp3p3Y1bqS/V6ktL0aNUT5r16qsMEf2xj0TYJCrw9BxazOr3vulctdRdVFah3mdXqN4R2Ed2VB8+2HvnTeNHO3/4+XdP4urdhYfus533DkHNS8uodz/51pNAWjxtvcna6qu/e3gfVNHX8vrRX8vVX8D1vuqqch4xZIAcpRgk+rGUxi8kb7HRaLn5khP/9Qi3PEKr+7xX5T9ZiVu9dn18VUvKYw/ZJf6VVS1O9c7DEvUVZS0ED9tve/XBk99lsvr4if5oSqLP0INPvbFMgbeD2jn717TZS/0lp7/+q4/z6p9fcO0DSor+6Ql4/WXmE9TXlK+69TF5WmW2vXpPoJaBekeo/lpxNBr1ZL4WsFq2vvnEtZ5k1ZcWr3cqWTpJrUEf49bP/XH/2VXWHb1k1y078Fr73Y+fQwACEIAABCAAAQhAAAIQgIApgawXeKbgqIMABCAAAQhAAAIQgAAEIAABCEAAAhCAQBgEEHhhUGYOCEAAAhCAAAQgAAEIQAACEIAABCAAAQgYEkDgGYKjDAIQgAAEIAABCEAAAhCAAAQgAAEIQAACYRBA4IVBmTkgAAEIQAACEIAABCAAAQhAAAIQgAAEIGBIAIFnCI4yCEAAAhCAAAQgAAEIQAACEIAABCAAAQiEQQCBFwZl5oAABCAAAQhAAAIQgAAEIAABCEAAAhCAgCEBBJ4hOMogAAEIQAACEIAABCAAAQhAAAIQgAAEIBAGAQReGJSZAwIQgAAEIAABCEAAAhCAAAQgAAEIQAAChgQQeIbgKIMABCAAAQhAAAIQgAAEIAABCEAAAhCAQBgEEHhhUGYOCEAAAhCAAAQgAAEIQAACEIAABCAAAQgYEkDgGYKjDAIQgAAEIAABCEAAAhCAAAQgAAEIQAACYRBA4IVBmTkgAAEIQAACEIAABCAAAQhAAAIQgAAEIGBIAIFnCI4yCEAAAhCAAAQgAAEIQAACEIAABCAAAQiEQQCBFwZl5oAABCAAAQhAAAIQgAAEIAABCEAAAhCAgCEBBJ4hOMogAAEIQAACEIAABCAAAQhAAAIQgAAEIBAGAQReGJSZAwIQgAAEIAABCEAAAhCAAAQgAAEIQAAChgQQeIbgKIMABCAAAQhAAAIQgAAEIAABCEAAAhCAQBgEEHhhUGYOCEAAAhCAAAQgAAEIQAACEIAABCAAAQgYEkDgGYKjDAIQgAAEIAABCEAAAhCAAAQgAAEIQAACYRBA4IVBmTkgAAEIQAACEIAABCAAAQhAAAIQgAAEIGBIIO0Cb+aCqlZb79WlSOaUVEus1Tu5AQJuESjMz5F2RXlSUl7rVuN0C4EECHQqzpea2qhU1TYkcDe3QMAdAsVt8qRD23ypqKqXsso6dxqnUwgkQKCoIEfaFOTJwgr+bJIALm5xjEDn4gL155J6qVZ/PuGCQKYR6NO1jSTiVzJt3TavR2fi54XA85MmY0EgSQIIvCSBcbtTBBB4TsVFs0kQQOAlAYtbnSOAwHMuMhpOggACLwlY3OocAQSefZEh8OzLhI4gYEwAgWeMjkIHCCDwHAiJFo0IIPCMsFHkCAEEniNB0aYRAQSeETaKHCGAwLMvKASefZnQEQSMCSDwjNFR6AABBJ4DIdGiEQEEnhE2ihwhgMBzJCjaNCKAwDPCRpEjBBB49gWFwLMvEzqCgDEBBJ4xOgodIIDAcyAkWjQigMAzwkaRIwQQeI4ERZtGBBB4RtgocoQAAs++oBB49mVCRxAwJoDAM0ZHoQMEEHgOhESLRgQQeEbYKHKEAALPkaBo04gAAs8IG0WOEEDg2RcUAs++TOgIAsYEEHjG6Ch0gAACz4GQaNGIAALPCBtFjhBA4DkSFG0aEUDgGWGjyBECCDz7gkLg2ZcJHUHAmAACzxgdhQ4QQOA5EBItGhFA4Blho8gRAgg8R4KiTSMCCDwjbBQ5QgCBZ19QCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAgg8I2wUOUIAgedIULRpRACBZ4SNIkcIIPDsCwqBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aEQAgWeEjSJHCCDwHAmKNo0IIPCMsFHkCAEEnn1BIfDsy4SOIGBMAIFnjI5CBwgg8BwIiRaNCCDwjLBR5AgBBJ4jQdGmEQEEnhE2ihwhgMCzLygEnn2Z0BEEjAkg8IzRUegAAQSeAyHRohEBBJ4RNoocIYDAcyQo2jQigMAzwkaRIwQQePYFhcCzLxM6goAxAQSeMToKHSCAwHMgJFo0IoDAM8JGkSMEEHiOBEWbRgQQeEbYKHKEAALPvqAQePZlQkcQMCaAwDNGR6EDBBB4DoREi0YEEHhG2ChyhAACz5GgaNOIAALPCBtFjhBA4NkXFALPvkzoCALGBBB4xugodIAAAs+BkGjRiAACzwgbRY4QQOA5EhRtGhFA4Blho8gRAgg8+4JC4NmXCR1BwJgAAs8YHYUOEEDgORASLRoRQOAZYaPIEQIIPEeCok0jAgg8I2wUOUIAgWdfUAg8+zKhIwgYE0DgGaOj0AECCDwHQqJFIwIIPCNsFDlCAIHnSFC0aUQAgWeEjSJHCCDw7AsKgWdfJnQEAWMCCDxjdBQ6QACB50BItGhEAIFnhI0iRwgg8BwJijaNCCDwjLBR5AgBBJ59QSHw7MuEjiBgTACBZ4yOQgcIIPAcCIkWjQgg8IywUeQIAQSeI0HRphEBBJ4RNoocIYDAsy8oBJ59mdARBIwJIPCM0VHoAAEEngMh0aIRAQSeETaKHCGAwHMkKNo0IoDAM8JGkSMEEHj2BYXAsy8TOoKAMQEEnjE6Ch0ggMBzICRaNCKAwDPCRpEjBBB4jgRFm0YEEHhG2ChyhAACz76gEHj2ZUJHEDAmgMAzRkehAwQQeA6ERItGBBB4RtgocoQAAs+RoGjTiAACzwgbRY4QQODZFxQCz75M6AgCxgQQeMboKHSAAALPgZBo0YgAAs8IG0WOEEDgORIUbRoRQOAZYaPIEQIIPPuCQuDZlwkdQcCYAALPGB2FDhBA4DkQEi0aEUDgGWGjyBECCDxHgqJNIwIIPCNsFDlCAIFnX1AIPPsyoSMIGBNA4Bmjo9ABAgg8B0KiRSMCCDwjbBQ5QgCB50hQtGlEAIFnhI0iRwgg8OwLCoFnXyZ0BAFjAgg8Y3QUOkAAgedASLRoRACBZ4SNIkcIIPAcCYo2jQgg8IywUeQIAQSefUEh8OzLhI4gYEwAgWeMjkIHCCDwHAiJFo0IIPCMsFHkCAEEniNB0aYRAQSeETaKHCGAwLMvKASefZnQEQSMCSDwjNFR6AABBJ4DIdGiEQHbBF5D+QKpnvy5xBrqpXDwmpLftZ/RuiiCgCaAwOM5yGQCCLxMTpe1IfDsewYQePZlQkcQMCaAwDNGR6EDBBB4DoREi0YEbBJ4DZWlUvbOvRKrq16ylpxc6bDJIZLXuW/hyMsAACAASURBVLfR2iiCAAKPZyCTCSDwMjld1obAs+8ZQODZlwkdQcCYAALPGB2FDhBA4DkQEi0aEbBJ4FX/8Y1Ufjeu2TqKVlxP2q66udHaKIIAAo9nIJMJIPAyOV3WhsCz7xlA4NmXCR1BwJgAAs8YHYUOEEDgORASLRoRsEng1c39U8o/fqzZOtqO2l6KBo0yWhtFEEDg8QxkMgEEXiany9oQePY9Awg8+zKhIwgYE0DgGaOj0AECCDwHQqJFIwI2CTy9gMrv35Dq38d7a8nvPUyK19lDIrm5RmujCAIIPJ6BTCaAwMvkdFkbAs++ZwCBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aETANoGnFxGrrpBofa3kFncxWhNFEGgkgMDjWchkAgi8TE6XtSHw7HsGEHj2ZUJHEDAmgMAzRkehAwQQeA6ERItGBGwUeEYLoQgCSyGAwOOxyGQCCLxMTpe1IfDsewasFni3P/SiPP3ye1JTWycbjRkpF5xyqLRtUyjTZs6V866+Xyb9NlX69Oom55x0kIxebZhHd+aCqlYp9+pSJHNKqiXW6p3cAAG3CCDw3MqLbpMjgMBLjhd3u0MAgedOVnSaPAEEXvLMqHCHAALPnazoNHkCCLzkmQVdYa3Ae+P9r+Sme5+RB288W4rbtZETzr1J1hw5XI49ZBc55KQrZPMNR8uBu28ln47/Scm8++StJ6+T/LxcBF7QTwzjW00AgWd1PDSXIgEEXooAKbeWQDICL1pTLw3TSiRvha4Syee9dNaGSmNxAgg8HoZMJoDAy+R0WRsCz75nwFqBN2Hin1JXV6d21q3oUXvo6Tfk50l/yRnH7Sfb7n+GfPbKbZL39wuV9xx7gZxx7H6yzqgRCDz7njE6CpEAAi9E2EwVOgEEXujImTAkAokKvOpvpkrppa9LtKxKcru0k07nbisFI/uF1CXTQMCMAALPjBtVbhBA4LmRE12aEUDgmXELsspagddy0Uefeb1stv4aMmxwf7n4+ofkhQcujd9y6kW3y5jRK8veO20q8xZVt8qra8dCKSmt4Qhtq6S4IVACEf9HL8jNkaLCXCmrrPN/cEYMlEBEAnggAu04/MGL2+apf7ETk5q6hvAnZ0YIBEigjfp9u11RnlTVNMji6vplzjTzgAekfmZp/Of5g7pJ7/sPDLAzhoZA6gQK8nOkUO0WLefPJqnDZATfCMR8+ifB9m3yvT+X1NZHfeuNgSAQJ5Dmd35171SUkF8hsfAI6Ez8vCIxdfk5oB7r9gdfkPHfT5K7rz1Nvvz2F7n53mfliTsviE9z7lX3yYqD+8nBe22T0G+e+bkRqWvwvU2/l814mU4ggEcwkiOSG4lIPc+3e08P/q7VzPJyIhJV/xMTDeDXTquTcwMEAiSQq55t/VeDerj1X0u76hcslsk73NHsR/oI7YiPTg6wM4aGQOoE1KMtOfrPJvzmnTpMRvCPgE9/lshT/1zp/dkEf+dfNoz0D4E0//NBQV5OQn6FyMIjoDPx8/JV4GkXeMUtj8pf02bLTZecKG2KCuTbHyfLBdc+KC89eFm871MuvE3WX2tV2XPHTThC62eajOUcAY7QOhcZDSdBgCO0ScDiVqcIJHqEduFVb0r1W7/E19Zmx9Wk0383d2qtNJt9BDhCm32ZZ9OKOUKbTWln31o5Qmtf5lYfob36tsdl9ryFctW5R3kfqNDXwtJy2XLvU+XjF2/1hJ6+tjvgDLn87LEyatVhCDz7njE6CpEAAi9E2EwVOgEEXujImTAkAokKvGhtvVS+8L3U/jhTCkf1l7Y7jZSIz/8mdllLnl9RJrd9Mk5+mj1N1ugzSI5cbyvpVtwhJEJM4zIBBJ7L6dF7awQQeK0R4ucuE0Dg2ZeetQLvq+8mypW3PiZP3HG+5OfnNSN3+ClXy9prjJCxB+wo4977wjtSO+7RqyVXvf9r5oKqVin36lIkc0qqfXrzQavTcQMEQiOAwAsNNROlgQACLw3QmTIUAokKvFCaWcYkZ7/yiEyYNTX+09X7DJTLdjggnS0xt+UEyn7/TmoWzpYeq6wtnbr3loUVtZZ3THsQSJ4AAi95ZlS4QwCBZ19W1gq8sy+/R155+1Ml5ZbsvNPX0IF95Zl7LpIZs+fL/664Ryb9Pk369+khF556qKwyfKB3DwLPvoeMjsIjgMALjzUzhU8AgRc+c2YMh4DtAq+mvk72fPAaafqa47YFhfLUIaeFA4hZnCMw5aVbZOEvn3p95xUUychDzpNol8HOrYOGIdAaAQRea4T4ucsEEHj2pWetwDNFhcAzJUddJhBA4GVCiqxhWQQQeDwbmUrAdoGnubMDL1OfPv/XVVM6V36586T4wDnqKxY9V9tAem57nP+TMSIE0kwAgZfmAJg+UAIIvEDxGg2OwDPCRhEE7CSAwLMzF7ryhwACzx+OjGIfARcE3ryKUrn9k9d5B559j491HdVXLJQfbzu2mcDrNmJt6bMTX0y2LiwaSpkAAi9lhAxgMQEEnn3hIPDsy4SOIGBMAIFnjI5CBwgg8BwIiRaNCLgg8IwWRpFvBMrqq+WLRdOkQ16RrNmxr+RFcnwbO4iBprxyuyz86SNvaI7QBkGYMW0hgMCzJQn6CIIAAi8IqqmNicBLjR/VELCKAALPqjhoxmcCCDyfgTKcNQQQeNZEYWUjc2sq5JxJb8ji+hqvv+HFPeR/QzeT/Jx/3hNtXeOxmJT9ob6YvGiOdF95LT5iYV1ANOQXAQSeXyQZx0YCCDz7UkHg2ZcJHUHAmAACzxgdhQ4QQOA5EBItGhFA4Blhy5qix2d8Jy/P+bnZes8Ysqms0bGPEwyKCnKkTUEeX6F1Ii2aTJYAAi9ZYtzvEgEEnn1pIfDsy4SOIGBMAIFnjI5CBwgg8BwIiRaNCCDwjLBlTRECL2uiZqEOEkDgORgaLSdMAIGXMKrQbkTghYaaiSAQPAEEXvCMmSF9BBB46WPPzMESQOAFy9f10efUVsi5E5seoe2ujtBubnSEtqakSuZ+OE3qymul27p9pcOwzoHjYQde4IiZII0EEHhphM/UgRNA4AWOOOkJEHhJI6MAAvYSQODZmw2dpU4AgZc6Q0awkwACz85cbOrKj49YNFTXy8/XfCF1ZUvepaevYUeNkvZDg5V4CDybniR68ZsAAs9vooxnEwEEnk1pLOkFgWdfJnQEAWMCCDxjdBQ6QACB50BItGhEAIFnhI2iJAmUTlwgv9/3fbMqvQtvwB7DkxwpudsReMnx4m63CCDw3MqLbpMjgMBLjlcYdyPwwqDMHBAIiQACLyTQTJMWAgi8tGBn0gAJzC+LyO9zc6QhGpHhfXJlYPeolFfVBTgjQ2czgcpZFTLx+i+bIeiz7WDptcXAQLEg8ALFy+BpJoDAS3MATB8oAQReoHiNBkfgGWGjCAJ2EkDg2ZkLXflDAIHnD0dGsYNAjfJ0n/2aK9GoSE5ORHLVXyv3F+lajMCzI6HM7GLm63/I7Hf+8hZXPKiTDDlidcktyA10sQi8QPEyeJoJIPDSHADTB0oAgRcoXqPBEXhG2CiCgJ0EEHh25kJX/hBA4PnDkVHsIDCnNCI/Tc3xmmkUeL3Uq8iG9ULg2ZFQ5nZRV1ErDZV1UtSjXSiLROCFgplJ0kQAgZcm8EwbCgEEXiiYk5oEgZcULm6GgN0EEHh250N3qRFA4KXGj2q7CLADz6486CY4Agi84NgycvoJIPDSnwEdBEcAgRccW9OREXim5KiDgIUEEHgWhkJLvhFA4PmGkoEsIbCsd+BFK2ukZs4iKRrQXSK5S3bpcUHAVQIIPFeTo+9ECCDwEqHEPa4SQODZlxwCz75M6AgCxgQQeMboKHSAAALPgZBo0YhA06/QTn35K5l51ziJqS16Bb27yIDz9pPCvl2NxqUIAjYQQODZkAI9BEUAgRcUWca1gQACz4YUmveAwLMvEzqCgDEBBJ4xOgodIIDAcyAkWjQi0CjwyhZVyZe7Xy6x2vr4OB3WX0n6n7mX0bgUQcAGAgg8G1Kgh6AIIPCCIsu4NhBA4NmQAgJPenUpkjkl1RKzLw86gkBKBBB4KeGj2HICCDzLA6I9YwKNAm/213/IhJPuaTZOXqdiGf7QKcZjUwiBdBNA4KU7AeYPkgACL0i6jJ1uAgi8dCfw7/nZgWdfJnQEAWMCCDxjdBQ6QACB50BItGhEoOkR2u+VwKv8eWp8nF6Hby1dd17XaFyKIGADAQSeDSnQQ1AEEHhBkWVcGwgg8GxIoXkPCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAk0F3sK55bLg5S+k+q850n7McOm82UiRSMRoXIqWTSDaEJXSWbOkfffukldY4AyqWH1M6hfUSW6XfMnJd+O5MBV4DdEGmTF7hvTs1ksKC9zJyJmHiUZ9IYDA8wUjg1hKAIFnXzAIPPsyoSMIGBNA4Bmjo9ABAgg8B0KiRSMCTQVeWWWd0RgUJU5g4fSZ8vE9D0pVyULJb1Mka+2/t/RfY7XEB0jTnXWz62TB8wslWtYgOUU50nmnTlI4uDCQbmqlQV7KnSI/5SyUUdFusmPDAMkRM2FoIvCmzJwqtz56tyxcVCJtitrI2L0OlZEj7M8okDAY1GoCCDyr46G5FAkg8FIEGEA5Ai8AqAwJgXQRQOClizzzhkEAgRcGZeZIBwEEXrjUP7j1bpkzaXJ80ryiQtntqoslkpMTbiNJzjb/0RKpnV4Tr8otzpWex/VIcpTEbr8q/3v5IGdm/OadG1aQo+tXTqy4xV0mAu+Ku6+T36f8Hh+pU4dOcu2ZlxvNTxEEgiSAwAuSLmOnmwACL90J/Ht+BJ59mdARBIwJIPCM0VHoAAEEngMh0aIRAQSeETbjoufPOE/qqqqb1W97zunSoVcwMsy40RaFs26cLbGa5p9g66UEXo4SeX5eNWr33T5F70htrOEfgSaF8ljN5kbTmAi8Ey45Vaqqq5rNd91ZV0jH9h2NeqAIAkERQOAFRZZxbSCAwLMhheY9IPDsy4SOIGBMAIFnjI5CBwgg8BwIiRaNCCDwjLAZF/342pvy87i34vU9R6womxw31ni8sApL3yuXxV9WxKcrGl4kXXbtHMj0Bxe+L/PlH4E2LNZRbqpd32guE4H3zBsvyOsfvhmfb81VRskx+9ufkREgipwmgMBzOj6ab4UAAs++RwSBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aEQAgWeEzbgoFo3KH599JbN+/kW6DOgvwzbZUPLVMVrbr1hUpPLbxVI9pUYKexVI23XaSU6e2XvpWlvr1znz5Or8H6RcaqWzFMn5taNkeKxTa2VL/bmJwIuqjN7/4kP56feJMrjfCrLFeptLUaH9GRkBoshpAgg8p+OjeQSec88AAs+5yGgYAssmgMDj6chkAgi8TE43u9eGwMvu/G1dfY06Qjs9p0IGxNqL+uatcZsmAs94MgohEDIBBF7IwJkuVALswAsVd0KTIfASwsRNEHCDAALPjZzo0owAAs+MG1X2E0Dg2Z8RHZoTQOCZs6PSfgIIPPszokNzAgg8c3ZBVSLwgiLLuBBIAwEEXhqgM2VoBBB4oaFO30SxmLT95UcpmjFdokVtpGKlVaS+W/f09RPSzMkIvFg0Jg2zyyWnY6HktONIYUgRMU0KBBB4KcCj1HoCCDzrI6LBFAgg8FKAF1ApAi8gsAwLgXQQQOClgzpzhkUAgRcW6fTNU/Tnb1L884/xBmK5uVKy2VYSKyxKX1MhzJyowIuWVUvp7Z9J/YxSieTmSLvdVpU2mwwOoUOmgIA5AQSeOTsq7SeAwLM/Izo0J4DAM2cXVCUCLyiyjAuBNBBA4KUBOlOGRgCBFxrqtE3U4ctPpWDe3Gbzl669ntT16Jm2nsKYOFGBV/7U91L90Z//tBSJSNdLtla78dqE0SZzQMCIAALPCBtFjhBA4DkSFG0aEUDgGWELtAiBFyheBodAuAQQeOHyZrZwCSDwwuWdjtnYgVcvZZV1y0S/8Mr3vN13Ta+OR68nBatktuBMx7PInP4RQOD5x5KR7COAwLMvEzryjwACzz+Wfo2EwPOLJONAwAICCDwLQqCFwAgg8AJDu9yBy0ti8vPnMenYTWT42jmiNn0Fd6l34LWb9IsUTpvCO/CWQrn6q2lS/vDX8Z/k9u4gnc/YVCJ55l8IDS5MRobAEgIIPJ6ETCaAwMvkdFkbAs++ZwCBZ18mdAQBYwIIPGN0FDpAAIEXfkjTf43KwxfHpHpxzJt8+FoROeCc3PAbyfAZEz1CqzFUfzdDar+eKZHORdJu6xUlp5gPWWT44+H88hB4zkfIApZDAIHH45HJBBB49qWLwLMvEzqCgDEBBJ4xOgodIIDACz+k526OynfvRZtNfMItudK9X5Db8MJfZ7pnTEbgpbtX5odAsgQQeMkS436XCCDwXEqLXpMlgMBLlljw9yPwgmfMDBAIjQACLzTUTJQGAgi88KG/dGdUxr/RXOAdc12O9B7MkU0/00Dg+UmTsWwjgMCzLRH68ZMAAs9PmoxlGwEEnm2JiCDw7MuEjiBgTACBZ4yOQgcIIPDCD2nWHzG579wGqa1aMvew0RE56DyO0PqdBALPb6KMZxMBBJ5NadCL3wQQeH4TZTybCCDwbEpjSS8IPPsyoSMIGBNA4Bmjo9ABAgi89IRUOj8mE78Q6dg9FvxHLNKzxLTPisBLewQ0ECABBF6AcBk67QQQeGmPgAYCJIDACxCu4dAIPENwlEHARgIIPBtToSe/CCDw/CLJOLYRQODZlgj9+EkAgecnTcayjQACz7ZE6MdPAgg8P2n6MxYCzx+OjAIBKwgg8KyIgSYCIoDACwgsw6adAAIv7RHQQIAEEHgBwmXotBNA4KU9AhoIkAACL0C4hkMj8AzBUQYBGwkg8GxMhZ78IoDA84sk49hGAIFnWyL04ycBBJ6fNBnLNgIIPNsSoR8/CSDw/KTpz1gIPH84MgoErCCAwLMiBpoIiAACLyCwDJt2Agi8tEdAAwESQOAFCJeh004AgZf2CGggQAIIvADhGg6NwDMERxkEbCSAwLMxFXryiwACzy+SjGMbAb8F3rzpC+XX8VOl9+BuMnhkX9uWSz9ZRgCBl2WBZ9lyEXhZFniWLReBZ1/gCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAn4KvF8+/0sev/x1aWiIer1stOdo2fY/6xr1RREE/CCAwPODImPYSgCBZ2sy9OUHAQSeHxT9HQOB5y9PRoNAWgkg8NKKn8kDJoDACxgww6eNgJ8C745TnpXpk+bE1xKJROT8Z46QgqL8tK2PibObAAIvu/PP9NUj8DI94exeHwLPvvwRePZlQkcQMCaAwDNGR6EDBBB4DoREi0YEAhd4z46VgsI8o94ogkCqBBB4qRKk3mYCCDyb06G3VAkg8FIl6H89As9/powIgbQRQOClDT0Th0AAgRcCZKZICwE/BR5HaNMSIZMuhwACj8cjkwkg8DI5XdaGwLPvGUDg2ZcJHUHAmAACzxgdhQ4QQOA5EBItGhHwU+DpBuZNX6Q+YjGFj1gYpUGR3wQQeH4TZTybCCDwbEqDXvwmgMDzm2jq4yHwUmfICBCwhgACz5ooaCQAAgi8AKAypBUE/BZ4ViyKJiDwNwEEHo9CJhNA4GVyuqwNgWffM4DAsy8TOoKAMQEEnjE6Ch0ggMBzICRaNCKAwDPCRpEjBBB4jgRFm0YEEHhG2ChyhAACz76gEHj2ZUJHEDAmgMAzRkehAwQQeA6ERItGBBB4RtgocoQAAs+RoGjTiAACzwgbRY4QQODZFxQCz75M6AgCxgQQeMboKHSAAALPgZBo0YgAAs8IG0WOEEDgORIUbRoRQOAZYaPIEQIIPPuCQuDZlwkdQcCYAALPGB2FDhBA4DkQEi0aEUDgGWGjyBECCDxHgqJNIwIIPCNsFDlCAIFnX1AIPPsyoSMIGBNA4Bmjo9ABAgg8B0KiRSMCCDwjbBQ5QgCB50hQtGlEAIFnhI0iRwgg8OwLCoFnXyZ0BAFjAgg8Y3QUOkAAgedASLRoRACBZ4SNIkcIIPAcCYo2jQgg8IywUeQIAQSefUEh8OzLhI4gYEwAgWeMjkIHCCDwHAiJFo0IIPCMsFHkCAEEniNB0aYRAQSeETaKHCGAwLMvKKsFXsmicjnrsrtk9ryF8tKDl8Xp7XvMxTJx8hSRSMT7ex2K28qHz9/s/f8zF1S1SrlXlyKZU1ItsVbv5AYIuEUAgedWXnSbHAEEXnK8uNsdAgg8d7Ki0+QJIPCSZ0aFOwQQeO5kRafJE0DgJc8s6AprBd7iymrZT4m6TdZbQz74/PtmAm+Hg86Smy4+QYYO6vsvPgi8oB8ZxreZAALP5nToLVUCCLxUCVJvKwEEnq3J0JcfBBB4flBkDFsJIPBsTYa+/CCAwPODor9jWCvwKquqZX5JqffXhdc91EzgbbL7SfLkXRdIr+5dEHj+Pg+M5jgBBJ7jAdL+cgkg8HhAMpUAAi9Tk2VdmgACj+cgkwkg8DI5XdaGwLPvGbBW4DWi+mbCr/8SeKO2Hisbjxkp3/44Wbp16Sj/HbunbLzu6l4JO/Dse8joKDwCCLzwWDNT+AQQeOEzZ8ZwCCDwwuHMLOkhgMBLD3dmDYcAAi8czsySHgIIvPRwX96szgm8aDQm5119n2y72RhZb62V5YNPv5ezLr9LXn74Cm9HXjTW+pvt9JvzWr/LvrDoKLMILHmDI5ftBMZXL5Y7S+fJnIY62bpNBzm6Uw/J//v9m/72zhPRGk+N3fu9m9/AW0PFzx0k0PjbSgJ/jHFwdbSc1QTU793en735vTurHwP7Fs8DaV8mdNSSQLqf0hz1h5NE/ArJhUdAZ+LnFYmpy88Bl7YDr+X4/zn5Stljh01kxy3Xk1kJfMSip/qIxVw+YuFnTIxlQMDXXyh/z88OPIMgllNSFm2QIyumSW0sGr/r4KIusmthJ38nUqP5/HuxUX/zJpTJgh8qRHJi0mvtTtJpaDujcYIq6tguX2pro1JV1xDUFIwLgbQQKC7Kk/Zt86Wiul7KK+vS0gOTQiAoAkX5OeoYbZ4sWlwb1BSMC4GkCfj1T6zswEsaPQVJEPBX1SQx8d+39u7aJiG/kvzIVJgS0Jn4eQUu8CqramTyn9Nl9ZWHxPs+8PjL5KA9t5ZtNl2bI7R+pslYzhFA4Pkb2Td1lXJp5exmg66R10bOb9fb34ksGK18apX8NW5us06G7t5L2nQvtKC7JS1whNaaKGjEZwIcofUZKMNZRYAjtFbFQTM+E0Dg+QyU4awiwBFaq+LwmnHuCO2i0grZat/T5KZLjpf111pVPvriBzn9kjvl1UeulK6dOyDw7HvG6ChEAgg8f2FXxBrkiPKpagfeP/slg9qB52/nyY8264uFMv+7smaFvdbpJN1HdUx+sIAqEHgBgWXYtBNA4KU9AhoIkAACL0C4DJ12Agi8tEdAAwESQOAFCNdwaGsF3tsffS2nXXyH98KMuvoGyc/Pk0H9e8nz918qH3z2vVx7xxMyd8Ei6durm5xx3H6y7uiVPQR8xMLwSaAsIwgg8PyP8fv6KnmkukTmR+tl4/xiOUgdoQ3mHXj+957MiEvdgbdHb2nTrSCZYQK9F4EXKF4GTyOBRAXe5DmzZEbpQlmlT3/pXtw+jR0zNQQSJ4DAS5wVd7pHAIHnXmZ0nDgBBF7irMK601qBZwoAgWdKjrpMIIDAy4QU07eG+eodePMnVKj38cWkx+iO0nl4cfqaWcrMCDyr4qAZHwkkIvCeGP+pfPL7JG/WvNxcGbvB5rJy734+dsFQEAiGAAIvGK6MagcBBJ4dOdBFMAQQeMFwTWVUBF4q9KiFgGUEEHiWBUI7vhJA4PmKk8EsItCawCurrpRzX3pKHUr45zj/Sr36yrGbbG3RKmgFAksngMDjychkAgi8TE6XtSHw7HsGEHj2ZUJHEDAmgMAzRkehAwQQeA6ERItGBFoVeFWVcs5LTzYbG4FnhJqiNBBA4KUBOlOGRgCBFxpqJkoDAQReGqC3MiUCz75M6AgCxgQQeMboMqYwWrFQYhWLJLfXIIk1RKX8j6nSUFkt7VccKHltipxeJwLP6fhofjkEWhN4uvTpbz6XDyf/4o3CEVoeJ5cIIPBcSotekyWAwEuWGPe7RACBZ19aCDz7MqEjCBgTQOAZo8uIwppx90jt2w+ptcQkMnB1mdd5K6meU+KtLUfJu0EH7CwFHd198T0CLyMeUxaxFAKJCDxd9uvcWTJzER+x4CFyiwACz6286DY5Agi85Hhxt1sEEHj25YXAsy8TOoKAMQEEnjE65wsbZv0hldceFF9HdayDzGu/qUQ6dIv/vS5rj5SeG6/t7FoReM5GR+OtEEhU4AESAi4SQOC5mBo9J0oAgZcoKe5zkQACz77UEHj2ZUJHEDAmgMAzRud8Ye2nL0jNs9fE11EV6yjzCsdITrd/vlKJwHM+ZhaQoQQQeBkaLMvyCCDweBAymQACL5PTZW0IPPueAQSefZnQEQSMCSDwjNE5XxhTX6lcfNV+Eiub761Ff6xywYqHS/XiJV+t5Ait8xGzgAwmgMDL4HBZGgKPZyCjCSDwMjrerF8cAs++RwCBZ18mdAQBYwIIPGN0GVEYLZkldR88IdHyEslfdxfJHTKaj1hkRLIsItMJIPAyPeHsXh878LI7/0xfPQIv0xPO7vUh8OzLH4FnXyZ0BAFjAgg8Y3QUOkCAd+A5EBItGhFA4Blho8gRAgg8R4KiTSMCCDwjbBQ5QgCBZ19QCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAgg8I2wUOUIAgedIULRpRACBZ4SNIkcIIPDsCwqBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aEQAgWeEjSJHCCDwHAmKNo0IIPCMsFHkCAEEnn1BIfDsy4SOIGBMAIFnjI5CBwggYoarlgAAIABJREFU8BwIiRaNCCDwjLBR5AgBBJ4jQdGmEQEEnhE2ihwhgMCzLygEnn2Z0BEEjAkg8IzRUegAAQSeAyHRohEBBJ4RNoocIYDAcyQo2jQigMAzwkaRIwQQePYFhcCzLxM6goAxAQSeMToKHSCAwHMgJFo0IoDAM8LmdNHimjIpr14kPTr0k5xIjtNraa15BF5rhPi5ywQQeC6nR++tEUDgtUYo/J8j8MJnzowQCIwAAi8wtAxsAQEEngUh0EIgBBB4gWC1dtBPfxsnr3z/oDREG6R3x4Fy+MbnSPuiztb2m2pjCLxUCVJvMwEEns3p0FuqBBB4qRL0vx6B5z9TRoRA2ggg8NKGnolDIIDACwEyU6SFAAIvLdjTMml51UK57NUjJRaLxedfd8g2stvosWnpJ4xJEXhhUGaOdBFA4KWLPPOGQQCBFwbl5OZA4CXHi7shYDUBBJ7V8dBcigQQeCkCpNxaAgg8a6PxvbEfZ3whj3x6TbNx+3cZJsdvcYXvc9kyIALPliToIwgCCLwgqDKmLQQQeLYk8U8fCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAgg8I2xOFtU31MnN75wpc0qnxvvfZ50TZPQKmzi5nkSaRuAlQol7XCWAwHM1OfpOhAACLxFK4d6DwAuXN7NBIFACCLxA8TJ4mgkg8NIcANMHRgCBFxhao4FrG2pkTtV06dmmnxTkFhqNsbyiCvUBi/d+eU5Kq+bLyP4bysh+6/o+h00DIvBsSoNe/CaAwPObKOPZRACBZ1MaS3pB4NmXCR1BwJgAAs8YHYUOEEDgORASLRoRQOAZYQuk6LdFP8p9k66VspoSaZffQY5c6SwZ2mnVQObKlkEReNmSdHau00TgVeZ+K3U5s6Vt/VqSH+ueneBYtRMEEHj2xYTAsy8TOoKAMQEEnjE6Ch0ggMBzICRaNCKAwDPCFkjRJV8fJ7MXT4uP3atdfzlvzdsCmStbBkXgZUvS2bnOZAReSWmdvPDxuzJtZkz6DJwh6275lQzN+6+0aRiRnfBYtfUEEHj2RYTAsy8TOoKAMQEEnjE6Ch0ggMBzICRaNCKAwDPCFkjRSR/vIfXRuvjYkUhErlv/SSnMLQpkvmwYFIGXDSln7xqTEXj3PvWn/DX3zzisfkOmyG67l0nPqpOyFyArt5oAAs++eBB49mVCRxAwJoDAM0ZHoQMEEHgOhESLRgQQeEbYAil69Ndb5NPZb8XHXr/XVnLAiicEMle2DIrAy5aks3OdiQq8uvqYXHn7H1KTMyUOKr+wVo48cYL0rjo9O+GxausJIPDsiwiBZ18mdAQBYwIIPGN0FDpAAIHnQEi0aEQAgWeELZCi+mi9fDDrFZm86CcZ0nEl2azPzpKXkxfIXNkyKAIvW5LOznUmKvA0nQefmS1/zJom0Ui5B6v/kOly8I7rcIQ2Ox8dJ1aNwLMvJgSefZnQEQSMCSDwjNFR6AABBJ4DIdGiEQEEnhE2ihwhgMBzJCjaNCKQjMDT78B784MS+XPmAhkwsEK23XAF6Vrc02heiiAQBgEEXhiUk5sDgZccL+6GgNUEEHhWx0NzKRJA4KUIkHJrCSDwrI2GxnwggMDzASJDWEsgGYFn7SJoDALLIIDAs+/RQODZlwkdQcCYAALPGB2FDhBA4IUfUsGnk6Twu7+koWdHqdpulMTaFobfRBbMiMDLgpCzeIkIvCwOPwuWjsDLgpCzeIkIPPvCR+DZlwkdQcCYAALPGB2FDhBA4IUbUtG7P0q7xz+OT1o/qKeUnrWrSE4k3EayYDYEXhaEnMVLROBlcfhZsHQEXhaEnMVLRODZFz4Cz75M6AgCxgQQeMboKHSAAAIv3JA6XP685P85p9mkiy7eRxp6dw63kSyYDYGXBSFn8RIReFkcfhYsHYGXBSFn8RIRePaFj8CzLxM6goAxAQSeMToKHSCAwAs3pHb3viNFX0yOTxrLy5GSG/4jUpQfbiNZMBsCLwtCtnSJNQ11MrNmvvQp7CaFucH82kbgWRo+bflCAIHnC0YGsZQAAs++YBB49mVCRxAwJoDAM0ZHoQMEEHjhhpQzv0w63PCK5M4tEy3vFh+4sdRsMCLcJrJkNgRelgRt2TJ/Lp8iN0x5ThbVlUv7vLZy2sC9ZOX2K/jeJQLPd6QMaBEBBJ5FYdCK7wQQeL4jTXlABF7KCBkAAvYQQODZkwWd+E8Agec/01ZHbIhK7syFEu1azAcsWoVlfgMCz5wdleYETp54h0yvmhcfoF+b7nLDiGPMB1xGJQLPd6QMaBEBBJ5FYdCK7wQQeL4jTXlABF7KCBkAAvYQQODZkwWd+E8Agec/U0a0gwACz44csq2L/b6/XOqj9fFlRyQiD69+lhTl+HuUFoGXbU9Wdq0XgZddeWfbahF49iWOwLMvEzqCgDEBBJ4xOgodIIDAcyAkWjQigMAzwkZRigTunPqyvLPg2/goW3QdJUcP2CnFUf9djsDzHSkDWkQAgWdRGLTiOwEEnu9IUx4QgZcyQgaAgD0EEHj2ZEEn/hNA4PnPlBHtIIDAsyMHF7uojdbI/Prp0iN/gORFkts5Vx9rkHHzvpSfK6bKiHb9ZYceY9QYub5jQOD5jpQBLSKAwLMoDFrxnQACz3ekKQ+IwEsZIQNAwB4CCDx7sqAT/wkg8Pxnyoh2EEDg2ZGDa138Vf2jPFpyuVQ2lKmPUHSRfTufIQOLVrVuGQg86yKhIR8JIPB8hMlQ1hFA4FkXiSDw7MuEjiBgTACBZ4yOQgcIIPAcCIkWjQgg8IywBVpUIWVSmlMifaIrqDfDRQKdy3TwG2cfK/PqpsbLu6tdeP/tdbvpcIHVIfACQ8vAFhBA4FkQAi0ERgCBFxha44EReMboKISAfQQQePZlQkf+EUDg+ceSkewigMCzK48385+RlwsekQb1nwHRoXJs9fnSIdbFqibLGxbKVTMPlpj6T+OVq47QXtzveav61M0g8KyLhIZ8JIDA8xEmQ1lHAIFnXSTswLMvEjqCgDkBBJ45OyrtJ4DAsz8jOjQjgMAz4xZEVUlkrpzX9vBmYmzT+h1l75qjg5gupTGfLrlevlv8bnyMtYu3lV07H5/SmEEUI/CCoMqYthBA4NmSBH0EQQCBFwTV1MZkB15q/KiGgFUEEHhWxUEzPhNA4PkMlOGsIYDAsyYKGZ/3gdxfeE2zhlaIDpczq66zp8m/O2mI1csnFS/KlJqfZWjhGrJO8XaSG8mzrk8EnnWR0JCPBBB4PsJkKOsIIPCsi4QdePZFQkcQMCeAwDNnR6X9BBB49mdEh2YEEHhm3IKoqpM6uaTtMTI/Mjs+/GHVp8taDZsEMV1WjInAy4qYs3aRCLysjT4rFo7Asy9mduDZlwkdQcCYAALPGB2FDhBA4DkQEi0aEbBK4NXVS5v3vpP832ZK3YgBUrXxaiJ5uUbrcrWoNLJA3sp7XhbmzpMxtZvLyOgYV5diRd8IPCtioImACCDwAgLLsFYQQOBZEUOzJhB49mVCRxAwJoDAM0ZHoQMEEHgOhESLRgRsEnjtH3hDij7/Jb6Oqs3WkIp9NzVaF0UQ0AQQeDwHmUwAgZfJ6bI2BJ59zwACz75M6AgCxgQQeMboKHSAAALPgZBo0YiANQIvGpVuJ90hkdq6+DqiHdrJgmvGGq2LIggg8HgGMp0AAi/TE87u9SHw7MsfgWdfJnQEAWMCCDxjdBQ6QACB50BItGhEwBqBp7rvcu6DkjtvUXwd9f17yMJz9zdaF0UQQODxDGQ6AQRepiec3etD4NmXPwLPvkzoCALGBBB4xugodIAAAs+BkGjRiIBNAi//l6nS4d5xklNRJdGOxVJ2xLZSt2I/o3Utq2h6WZ38MK9GaupiMrRLvqzWo8jX8RnMLgIcobUrD7rxlwACz1+ejGYXAQSeXXnobhB49mVCRxAwJoDAM0ZHoQMEEHgOhESLRgRsEnjeAtQR2rzZC6W+d1eRfH8/YFFeG5XXJpdLNPYPqjF928jgzgVG7CiynwACz/6M6NCcAALPnB2V9hNA4NmXEQLPvkzoCALGBBB4xugodIAAAs+BkGjRiIB1As9oFYkVTSmtk0+nVTa7eYWOBbJ+/zaJDcBdzhFA4DkXGQ0nQQCBlwQsbnWOAALPvsgQePZlQkcQMCaAwDNGl9bCWFWVNMydLbn9Bkgk19/dLmldmM+TI/B8Bspw1hDIJoHHDjxrHrvQGkHghYaaidJAAIGXBuhMGRoBBF5oqBOeCIGXMCpuhID9BBB49mfUssPaD96RxQ/cJVJTIzk9e0vxWedLbq8+7i0khI4ReCFATsMUM6Iz5Z3o+97Mm+VsLP1z/H3fWhqWlPSU2STwNBzvHXhz1Tvw6mMypEuBjOxRmDQzCtwhgMBzJys6TZ4AAi95ZlS4QwCBZ19WCDz7MqEjCBgTQOAZo0tLYUxJu0VHHqjeN1Ubnz9/nfWl+OQz09KP7ZMi8GxPKPn+5sbmyeW1V0ud+o++CiIFclb+adIz0iP5wRyuyDaB53BUtG5AAIFnAI0SZwgg8JyJikYNCCDwDKAFXILACxgww0MgTAIIvDBppz5X3a8TpeKC5rIup1Nn6XjHg6kPnoEjIPAyL9R3G96XZ+tfaLawXfJ2lK1zt8y8xS5nRQi8rIo76xaLwMu6yLNqwQi8rIo76xaLwLMvcgSefZnQEQSMCSDwjNGlrbD8wrOlftLP8fnbHHSYFG2/S9r6sXliBJ7N6Zj19k30W7mv7qFmxYfnHyKjc0aZDehoFQLP0eBoOyECCLyEMHGTowQQeI4GR9sJEUDgJYQp1JusFngli8rlrMvuktnzFspLD14WBzNt5lw57+r7ZdJvU6VPr25yzkkHyejVhnk/n7mgqlWAvboUyZySaom1eic3QMAtAgg8t/LS3cYWL5bqcS9Jw9S/JH+tMVK40WYikYh7CwmhYwReCJBDniIqUXmo/lEZ3/C1N/NaOaPlkPwDJUf9J5suBF42pZ19a0XgZV/m2bRiBF42pZ19a0Xg2Ze5tQJvcWW17HfMxbLJemvIB59/30zgHXLSFbL5hqPlwN23kk/H/6Rk3n3y1pPXSX5eLgLPvmeMjkIkgMALETZThU4AgRc68tAmLImVKJUXk26RrqHNadNECDyb0qAXvwkg8Pwmyng2EUDg2ZQGvfhNAIHnN9HUx7NW4FVWVcv8klLvrwuveygu8BYsLJNt9z9DPnvlNsnLzfUI7Dn2Ajnj2P1knVEjEHipPxOM4DABBJ7D4dF6qwQyUuBFY9Lui7+kcMJMibUrkMUbDJHagV1aZcENmUUAgZdZebKa5gQQeDwRmUwAgZfJ6bI2BJ59z4C1Aq8R1TcTfm0m8L6ZMFkuvv4heeGBS+M0T73odhkzemXZe6dNEXj2PWN0FCIBBF6IsJkqdAKZKPCKfpgpxe9N+odlTkRK/rOuRIuLQufLhOkjgMBLH3tmDp4AAi94xsyQPgIIvPSxZ+bgCSDwgmec7AzOCbxPx/8oN9/7rDxx5wXxtZ571X2y4uB+cvBe20h1bUOrDLTkqKmLtnofN0DANQI56t1pubkRqavn+XYtO/ptnUB+XkQa1KMdVbvWMuXKe2GC5Pwyu9ly6ndZTaIr98qUJbKOBAjkqd+383JzpL4hpv7i9+8EkHGLQwRy1L+YyFV/8WcTh0Kj1YQJ5OflqD+bxDLqzyYJL54bM55AUUFuQn4l40FYtECdiZ9XJKYuPwdsuQPv2x8nywXXPtjsnXinXHibrL/WqrLnjptISXltq9N3bl8gi9R9vjba6qzcAIHgCeg/RBQpQV1eVR/8ZMwAgZAJtGuTJ/XqX77UZJCgLvhuhhS9MzFOMqbe5Vpx+LoSYwdeyE9XeqfTfxhrW7jkD8mVNa3/i8j0duvf7LNmz5JvJnwnA/uvIKuMWDnlgRfMKJNcxbJT93Ypj8UA/hEoUH82KVB/Nqngzyb+QWUkawjoHdS16s8mtRn0ZxNr4NJI2gl0Ud4kEb+S9kazqAGdiZ9X4AJvYWm5bLn3qfLxi7dKm6IlzW93wBly+dljZdSqwzhC62eajOUcAY7QOhcZDSdBIG1HaNX/7uTd+qhEvp8osRUHSv3xB4j06p5E58u5teU78NYfLLWDsvNDDv4AdXOUbDxC+80P38it99wsDQ1LhOU2m20r+++lfm0ZXFrsP3PlJzL5y5le9Vo7DJPtjh5tMBIlQRDgCG0QVBnTFgIcobUlCfoIggBHaIOgmtqYzh2h1cs9/JSrZe01RsjYA3aUce994R2pHffo1eroYA4CL7XngWrHCSDwHA+Q9pdLIF0CL/eS2yVXfQ298YoNXUHqbvofaUHANwLZKPCuuOEymTj5n92nEfUKiDuvv0eKCguT5vrN67/Lq7eNb1a3/8WbypBRPZMeiwL/CSDw/GfKiPYQQODZkwWd+E8Agec/01RHtFbgvf3R13LaxXeIqBO5dfUNkp+fJ4P695Ln779UZsyeL/+74h6Z9Ps06d+nh1x46qGyyvCBHouZC6paZdKrS5HMKanmCG2rpLjBNQIIPNcSo99kCKRL4OXv/V+JLG7yvy1KNNQ+c5Mo05BM+9wLgWUSyEaBd9HVF8off/0eZ6IF3l033CuFBckfDdHyTku8ptfmB68mG+yV+rFcHtvUCSDwUmfICPYSQODZmw2dpU4AgZc6Q79HsFbgmS4UgWdKjrpMIIDAy4QUWcOyCKRL4LEDj2dyaQQqFsfkm+9E2raJyRqr56iPUJhzykaB98NP38uNd94QP0K73Zbby76772cEceZvJfLA6e9I9O93UBW1K5Ajb9laOvIuPCOefhch8Pwmyng2EUDg2ZQGvfhNAIHnN9HUx0Pgpc6QESBgDQEEnjVR0EgABNIl8KTJO/BkpSFSd8y+/r0DLwBODBk8gfkLRK69KSrl5Us+hzVgQEROOSFH1GEBoysbBZ4GNXvObPnup+9khb4DZKXhqe2WmzZxvox/9TfJUx9LWH+PlaRr3/ZGWVDkPwEEnv9MGdEeAgg8e7KgE/8JIPD8Z5rqiFkp8HqrI7Sz1BFaLghkGoFUBd7curlSVl8qvQp6S3Fucabh+dd66stF5j8RkYofY1K0gkiPg0QKfPo2QcbDS8MC0ybw0rBWprSbwKuvR+W1N5p/y/7YI3NklZUiRo1nq8AzgkWRcwT0V5bbFOTIwoo653qnYQi0RgCB1xohfu4yAQSefekh8OzLhI4gYEwgFYH3zeLx8mf1H97cORG1g6F4Q+lZ0Mu4FxcKZ96h5N03//xDuJZ4A851ofPs7BGBl52527jqF16OylvvIvBszIae7COAwLMvEzryjwACzz+WjGQfAQSenZn42VUkpi4/B0x2rETegccOvGSpcr8rBEwFXnW0Sl5d+HKzZfbM7yUbdtjYlaUb9fn7qRFpKGvyW5baPDP0FiUw+TaBEc+gixB4QRNm/EQJlJbG5IrrYvEjtIMH5ciJx0Y4QpsoQO7LKgIIPPfjXjBvkUz+5S+prqqRvgN6ytARK4j+8AyXCAKPpyCTCSDw7EuXHXj2ZUJHEDAmgMBLDl3LHXiF/URWuCC5Mbg7PAIIvPBYM1PrBPiIReuMuAMCmgACz+3noLa2Tj56Z7z6SExDfCErrjJIVhjc1+2F+dQ9As8nkAxjJQEEnn2xIPDsy4SOIGBMwFTg6QlbHqHdoP3G0iO/h3EvLhTqd+DN/T+RyonqHzCW8w68mFSrf9NcKRLrrJbFv3FOV7YIvHSRZ96gCfAOvKAJM346CSDw0kk/9bnnz1ko3375U7OBunbvLKPXXSX1wTNgBAReBoTIEpZJAIFn38OBwLMvEzqCgDGBVASennRO3Rwpry/Lmo9YJAI6kvOTxHI/V9ouqgReV4nVb6vK2iZSyj0+E0Dg+QyU4awhgMCzJgoaCYAAAi8AqCEO6e3Ae/sriTaoPwf9fbED758AEHghPoxMFToBBF7oyFudEIHXKiJugIA7BFIVeO6sNKxOKyWS/5ia7J/35MWiK4s0bBBWA8zThAACj8chUwkg8DI1WdalCSDw3H8Omr4Dr3e/7rLiyoN4B97fsSLw3H++WcGyCSDw7Hs6EHj2ZUJHEDAmgMAzRrf0wsg0ieS93uxnsZg6Vly/i88TMVwiBJoKvLqGBvnq5wkyo2SeDOrZV0YPX0l9PZnjzYlw5B77CCDw7MuEjvwjkLkCr0Htzv9T/Ss+9QJdKfIPGCOlTKChqlJqyxZJUfdeEsnJSXm85Q2AwAsUL4OnmQACL80BLGV6BJ59mdARBIwJIPCM0S2jUL2wOe8F9W+ZS+I/j9VvqjbkDfN7IsZLgEBTgffch+/IpKl/xavWWXk12WL0OgmMwi0QsI8AAs++TOjIPwKZKPByZLLkRS6RHJmrXo3bTmqjZ6gXbYzxDxojGROY9/n7Mm3cM+q0RIO06dVPhhx4jBR06mI8XmuFCLzWCPFzlwkg8OxLD4FnXyZ0BAFjAgg8Y3TLKawSyZ2gJF6Zev/LEInEBgUxCWMmQKBR4JVV1sh1Tz4ssdg/R5vbtWkrJ+6xXwKjcItLBOobYjJhWq3MWNAg7YpyZNSgAunUNtjdFOngg8BLB3XmDItAJgq8gsipSt7982GHmHSVmtijYSFlnmUQqCsvlQlXn63+Res/fz7oNmZjGbBTcH8+QODxOGYyAQSefeki8OzLhI4gYEwAgWeMjkIHCDTdgXfLc09IReXieNd9e/SUg7fe0YFV0GIyBH6YUiu/za6Ll+jf47ZZvUjycjPruDQCL5mngntdI5CJAq8wsoc6PvvP/wbpTGpij6njtMHt9HIt93T0W/rrT/L7w7c2m7ptv0Ey4ugzAmsHgRcYWga2gAACz4IQWrSAwLMvEzqCgDEBBJ4xOgodINBU4P0xc7q88PF7UlNbK8Vq991em20lvbp0c2AV9rZYp1gumj1LOvXqLfkFBVY0+s6EKimt/OfLh7qpTVYukq7tc63oz68mEHh+kWQcGwlkosDLi9wvefJUHHdDbEOpk3NtxG/ck97lPnHyD1JaViIjho6UTp26Go8VVmEs2iC/3Ha5VM+ZGZ9yhT0Pla5rBHe8GYEXVrrMkw4CCLx0UF/+nAg8+zKhIwgYE0DgGaOj0AECLb9CW1tfLyXqJdXdOnZWO7IyS+iEHcfMXyfJG3fcIdUV5VJU3F62OeYY6bPi8LDb+Nd87MBLewQ0AIGUCWSiwBP1xrtceUV9POkbtetuuNRFd1Wv2miTMiubBnjy+Xvlh5+/8loqKCiUw/Y/Wfr3tf81InWLy2Xux29LTcl86bzaaOm86pqBYkXgBYqXwdNMAIGX5gCWMj0Cz75M6AgCxgQQeMboKHSAQEuB50DLzrT4xAXnycKZ/+xY6Nynj+x70SVp71+/A09LvJkLeQde2sOgAQgYEshMgWcIw5GyefNny413XdCs25Erry377HaEIysIr00EXnismSl8Agi88Jm3NiMCrzVC/BwCDhFA4DkUFq0mTQCBlzSyhAvuOvYoidbVx+/Pyc+To26/K+F6bkyNAEdoU+NHtd0EEHh257O07hB4iWeGwEucFXe6RwCBZ19mCDz7MqEjCBgTQOAZo6PQAQIIvOBCev+hB+WXjz+KT7DShhvJpoccGtyEjNyMQKPAm7e4VhbW1EtRLKLer8UFgcwggMBzM8cnX1BHaH9y7wht2LQReGETZ74wCSDwwqSd2FwIvMQ4cRcEnCCAwHMiJpo0JIDAMwSXQFmDep/gD++8LbN+myy9hw6TkVtsKbl5KKQE0PlyixZ4FTkic5W8a4jGvDE7RyNS5MvoDAKB9BJA4KWXv+nsLT9iUdW+k+SpD1t0538bmiFF4Jk+YdS5QACBZ19KCDz7MqEjCBgTQOAZo6PQAQIIPAdCokUjAu2UwJvS0CBRJe8aBV6+8njd1E48Lgikm4B+Jj+dtkCmlFbKkM7Fsk7fzpKbk/izicBLd4KpzV+npN1VJaXyWXWNN9A27drIiZ06pDZoBlUj8DIoTJbyLwIIPPseCgSefZnQEQSMCSDwjNFR6AABBJ4DIdGiEYG2SuBNReAZsaMoeAIv/DJDxs9cFJ9oLSXwdh3RJ+GJEXgJo7LyxrcXV8kNi8qa9XZJt84yurDAyn7DbgqBFzZx5guTAAIvTNqJzYXAS4wTd0HACQIIPCdioklDAgg8Q3CUWU8gyCO0ddWLpbayQtp27iGRSOK7pqyHRoOhEIiq3VeXfDBR6hqi8fmKC/PlrA1XTHh+BF7CqKy88VYl78Ypidf0OqRDsezdvp2V/YbdFAIvbOLMFyYBBF6YtBObC4GXGKeMvmvOgp/kj2nvS0z9p3+vtdVfYzJ6vZm8OAReJqfL2hB4PAOZSiCoj1jM/PkL+fPL19TR3KgUd+0jq2x1oBS0bZ+pGJ1ZV1lNjSysqpYBHTs4IVVv+vw3mbd4yfFJfXVvVygnrTs0Yd4IvIRRWXnjVPWF8uPnLZCGJa/nlHaRHLmtR2fehfd3Wgg8Kx9bmvKJAALPJ5A+DoPA8xGmVUOp/5UtmFchEfWf2m5tJZan3o69lKt88SwZ/+P9zX6y2op7S7fOw6xaDs0kRgCBlxgn7nKTAALPzdzounUCjQKvoqpeyirrWi9I4I7aynL58slrRL+IvvHqvdI6MnS9nRKo5pagCDw7cZI8OuEn9a7DqHqfXGc5b6MNpHMbuz9X8sfCxfLEhOlSqUROe7X7bp9V+srAzonvvkLgBfU0hTfur7V18nJllfcRi52L28mgfD5y1EgfgRfec8hM4RNA4IXPvLUZEXitEXLw55H6Bunw7WzJ/fsfAhqK8qRsdG+J5ef+azVTZn7i7b5reg3ovZ4MGbC5gyunZQQez0AmE0DgZXK62b22IATe/Ck/yy/vPN4MbHH3fjJqp6MyFnZ0/pJjnjndlv4vLdO98HmLK2Xsq+OaSdUdhg3Wzoj/AAAgAElEQVSVI0evke7WWp2/Vh2hnV9ZIz3aFUleEh+w0AMj8FrFyw0OE0DgORwerbdKAIHXKqLQb0DghY48+AkLZ1dIu0nzm020eFhXqenz72MzZWoH3tfswAs+lJBmQOCFBJpp0kIAgZcW7EwaAoEgBF60oV6+fu4WqS4via9g+CZ7SY8hI0NYUchTKG/X8HCVRL+o9SaOrF0geYe2USYv5D5ame6jqdPk2s++aHbXsK5d5NotM/tfmiLw7HoO6cZfAgg8f3kyml0EEHh25aG7QeDZl0nKHRXMXSzFv8xrLvCGdJaafh2XOnb8HXixqAzova70U+/B43KTAALPzdzoOjECCLzEOHGXewSCEHiagv6AxdTvP5TaxaVK3K0hXVcY4R6cBDqOfl0nDfdWNrsz94i2krNmfgLV4d2iPwRx/OtvyOyKxfFJT1tvHdlowIDwmkjDTAi8NEBnytAIIPBCQ81EaSCAwEsD9FamRODZl0nqHdVHpdO3sySn6RHaNdUR2rx/H6FNfTJGsIkAAs+mNOjFbwIIPL+JMp4tBIISeLasL+g+Gp6vluib/3xkQc+Xs3Wh5O5m37vlSqqq5PmJv6rjqFWy6cABMqZvn6DxpH18BF7aI6CBAAkg8AKEy9BpJ4DAS3sE/2oAgWdfJv50pCRewfxK9QkLWe5HLPyZjFFsIYDAsyWJ5fcxZ85UGT/+bakoXyiDBq0io9fcQvLy7NopYiNJBJ6NqfjXU0OsSkrlSzVgRDrKOpIbsU+++Lfa5iMh8FIjG5sdlforK0Rq/v5gR4FI3tnFEunFv7hMjaw/1Qg8fzgyip0EEHh25kJX/hBA4PnD0c9REHh+0mQsCKSZAAIvzQEkMH1dbY0888xNov9v4zVy9Y1ljVGbJFCd3bcg8DI3/3pZLJOj5yv/MstbZFGkjwzNuVDypNjqRdeqI6ozJ34pObl50nelMZKbp8yRwYXAM4DWoiQ2rV6i79WpD0SI5G6eL5H+fCUzdar+jIDA84cjo9hJAIFnZy505Q8BBJ4/HP0cBYHnJ03GgkCaCSDw0hxAAtPPnTtdXn/tgWZ3du3eV3bY4bAEqpd+S/1fE6XmpQckVjpf8kZtLIXb7i+RDNzRh8AzfkSsL5wfe1umR+9v1me/nMOkW2RLa3uvrlgk7z98vlSpX3f66tRzkGx44HmSX5D8zkEEnrUx05gPBBB4PkBkCGsJIPCsjYbGfCCAwPMBos9DIPB8BspwEEgnAQReOuknNrfeeffsMzdLbW11vCCVHXixqgpZfNXxEqv+5wXuhVvtIwVb7JFYQw7dhcBzKKwkW3VR4P362Yvy0/tPNlvp2rscL/1WXj/J1Ysg8JJGRoFDBBB4DoVFq0kTQOAljYwChwgg8OwLC4FnXyZ0BAFjAgg8Y3ShFjZ9B97AgSvLWmtvJbnqCJ7JVT/pW6l64Ipmpbkrri5tDzvHZDiraxB4VseTUnP1UqGO0J6rjtDO9cYpjPSWYTkXqyO07VIaN8hiBF6QdBk7kwgg8DIpTdbSkgACj2cikwkg8OxLF4FnXyZ0BAFjAgg8Y3TOFi7ZgXeC2oG3OL4GduA5G2dWN+7aRyyq1Udo3n/kgvgR2o49B8rGB14geQWFSefIDrykkVHgEAEEnkNh0WrSBBB4SSOjwCECCDz7wkLg2ZcJHUHAmAACzxid04UNf/4i1S8/yDvwnE6R5l0kwEcsXEyNnsMmgMALmzjzhUkAgRcmbeYKmwACL2zirc+HwGudEXdAwBkCCDxnoqJRAwIcoTWARokTBNiB50RMNGlIAIFnCI4yJwgg8JyIiSYNCSDwDMEFWIbACxAuQ0MgbAIIvLCJM1+YBBB4YdJmrjAJIPDCpM1cYRNA4IVNnPnCJIDAC5M2c4VNAIEXNvHW50Pgtc6IOyDgDIEgBF5l5ULJyyuUgoK2znCg0cwkgMDLzFxZFV+h5RnIbAIIvMzON9tXh8DL9icgs9ePwLMvXwSefZnQEQSMCfgp8Boa6uTnCS9JWelMr5++/deUgYPXN+6NQgikSgCBlypB6m0lwA48W5OhLz8IIPD8oMgYthJA4NmaDH35QQCB5wdFf8dA4PnLk9EgkFYCfgq86VO/kil/ft5sPSNH7SXtO/RK6xqZPHsJIPCyN/tMXzkCL9MTzu71IfCyO/9MXz0CL9MTzu71IfDsyx+BZ18mdAQBYwJ+CryfJrwoi0qmNutl8LDNpHefVY37oxACqRBA4KVCj1qbCSDwbE6H3lIlgMBLlSD1NhNA4NmcDr2lSgCBlypB/+sReP4zZUQIpI2AnwKvdOF0+fGH5+NrKShsJ2usuZ/k57dJ2/qYOLsJ/D975wEgV1W2/2fu9Nnekmw2vXeSEEhCSQi9S1WUrqB8ooIiqPiJKGBBROFTBAVFQP8oijTpLQTSIBDSe91sdjfby8zs1P+dCdlkUnZm7txy7p3nYlTYc973Pb/3bJj89t5zKfDyu/9WXj0FnpW7y7VR4HEPWJkABZ6Vu8u1UeCJtwco8MTrCSsiAcUE1BR4iSLaWneioX6NLO08GFgzDR5vseLaOJEEciVAgZcrQc4XlQAFnqidYV1qEKDAU4MiY4hKgAJP1M6wLjUIUOCpQVHdGBR46vJkNBIwlIDaAs/QxTA5CRxEgAKPW8KqBCjwrNpZritBgAKP+8DKBI4k8NpeXYnWV1bAZrOh/ILpKD5pnJUxcG0WJUCBJ15jKfDE6wkrIgHFBCjwFKPjRBMQoMDTrknS2jCwM4L4FBfiA+zaJWLkwxIwQuCFwh2wQZLvsC5kV0hAUwIUeJriZXCDCRxO4HUv34G6X72SUtnguy6CZ0SVwdUyPQlkR4ACLzteeoymwNODMnOQgE4EKPB0As00hhCgwNMGu+Pv3bC/EdwbXHZ34RuLEJvm0iYZoxou8GLxKHY1voOu7p3JWspKxmNAxSx2hgQ0I0CBpxlaBhaAwOEEXtP/W4LWl5anVFf5hWNRdv40ASpmCSSQOQEKvMxZ6TWSAk8v0sxDAjoQoMDTATJTGEaAAk8D9KE43De0APH9sWMjHAj/qERRstZoFK93dKA+EsYIlxunFBXBJ0mKYuXTJD3vwGvtWI/6poUpeAdXn4FC70ChkHeHA9jjb8GQ4mpINu4hoZqTZTEUeFkC43BTETicwPOv2oVdP38pZR1D7r4I7uG8A89UzWWxoMATbxNQ4InXE1ZEAooJUOApRseJJiBAgadBk3pkgfc/6gm8x5tb0CjLu33XGI8HF5Qok4EarFbYkHoKvN2yvGuTJd6BV7/yo1FROkUYPm/tWIRHVz6DaCyKwcUDcdsx16G/r0KY+lhIdgQo8LLjxdHmItD3GXgrYZPkM/A+N41n4Jmrraz2MwIUeOJtBQo88XrCikhAMQE1Bd7K7i6sCwThsgFTCwsx1O1RXBcnkoAaBCjw1KB4aAy1HqHtlmXL7/c0pSQosNtxY2WlNoVbKKqeAi/Q04Rtdf+V77qMJQnaJReG13xOmLPwApEefPm1HyTl3b7r+JqjcdP0qyzU8fxaCgVefvU731bLt9DmW8fza70UeOL1mwJPvJ6wIhJQTEAtgbetJ4gF7e0pdZxfXoESh0NxbWacGNrTgcQv+QVicPUvhbOch70fro91rbvw53ceweaGjZg2bAauOek6FHvVv+uKAk+77yK1XmLBO/CU9SidwNvV0oVNDW0YWlmEYVW5f2/5g41o7ViXfDS1onQyXM7cYypb+aGzVjdtxE8W/S7lCzWF/fGbeberlYJxdCZAgaczcKbTlQAFnq64mUxnAhR4OgPPIB0FXgaQOIQEzEJALYG3uLMDGwOBlGUfW1SMsV6vWVDkXGekI4DA1oaUOL7R1bD73DnHtlqA2566GdubtvYua+bo4/Cdc76n+jIp8FRHqnpAnoGnDGlfAm/ZlgY8vXAd4p+dU3j2tOE4edIQZYlMMCsm3xn4/QW/xrb22t5qvzL5Upwx7AQTVM8SD0eAAo/7wsoEKPCs3F2ujQJPvD1AgSdeT1gRCSgmoJbAO/gOvMTx4efm2R14wbpWhPek3oXori6Fq1+p4v5YcWJbdwu+9qdrU5ZWUlCGP17/uOrLpcBTHSkDCkKgL4F37wsforHd31up0yHhnstOkO+ek28NtuiVeIHFfza+gd3de3BCzXTMHsg3N5q51RR4Zu4ea09HgAIvHSF+3cwEKPDE6x4Fnng9YUUkoJiAWgIvUUDqGXhF8hl4+XXnWaQriMDm+pRe+MbId+B584tDJpuRd+BlQoljSODIBLITeHZZ4B1vaYHHvWItAhR41uonV5NKgAKPO8LKBCjwxOsuBZ54PWFFJKCYgJoCT3ERFpqYPAOvqVNekfymTp6Bd8TO8gw8C216LsUQAnyE1hDsTKoTAQo8nUAzjSEEKPAMwc6kOhGgwNMJdBZpTCnwLvufn2Ldxu1IniwvX8WFPrz3nweT/7+uOfXcrsOxqC73YHdLMAtMHEoCBhOQDz8Kdu6Aw1MGh6v4iMVQ4BncJ6bXlAAfodUUL4MbSEDvl1gYuFSmzkMCFHh52PQ8WjIFXh41Ow+XSoEnXtNNKfDOufL7eOCn38So4TWHEKXAE2+TsaLcCERC7di26A4E2jfBJtnRb8zl8q8vHDYoBV5urDlbbAIUeGL3h9UpJ5BO4CmPzJkkYDwBCjzje8AKtCNAgacdW0Y2ngAFnvE9OLgCUwq8uRfdhH888mMMqCqnwBNvT7EilQnsXv0YmjY/mxJ13Gl/gdPb75BMFHgqw2c4oQhQ4AnVDhajIgEKPBVhMpRwBCjwhGsJC1KRAAWeijAZSjgCFHjCtQSmFHjTTr8ec2ZOwSerNqKyvAQ3X38J5sw6KkmXd+CJt8lYUW4ENi34DgKt61OCDD76NpTWzKXAyw0tZ5uMAAWeyRrGcjMmQIGXMSoOVIFAfOsKRFctgK2gFPZjzwYKy1SIeuQQFHia4mVwgwlQ4BncAKbXlAAFnqZ4FQU3ncCLxeL40b2P4cx5MzF7xgTMX/gpvv+zR/DiEz9P3pHX4Q+nBVHsc2Y0Lm0gDiABHQi01H6AjYvu6s3kLhiAKWf+CZLkPCS7w26Dy2GHvyeiQ2VMQQL6EvC67IhE4whHY/omZjYS0JhA4u5pt9OOnnBM/hXVOBvD5zOB8IaP4f/Xb3oR2Mv6o+DLd8Hm9mqGxWGX4JQ/nwRC3NuaQWZgwwh4XQ75c0k0+fmEFwlYjQC9iXgdTfREzcsWly81A2YS69pv/wIXnzMX5546G12B9OKiwGNHd5AfIjJhyzFiEGjdtQh7drwJl/zYbM3YS+THZysOW5hdssHpsCEYouAQo3OsQk0CbpeEqPwB+cAPyXX1XUj84bBflU/NVIxFAroScDkkuGSJF4rEEJIlHi8S0IpA939+h/CaxSnhfZfdCtfIvU+yaHElfriY+MXPJlrQZUyjCXjkzyaJzyUUeEZ3gvm1IJB4QiATv6JFbsY8PIFET9S8NBd4/kAPNm6txVETRvbWfcU37sGVl5yOM046ho/QqtlNxjIdAZ6BZ7qWpRQcbZV/sGCX/1Ms/xevQwgc+AhtWJYc9z24FEs+rEuOO+3kYbjxq9NJjQRMSYCP0JqybaYsOvb+vxFd8mJK7Y5rfwFb+QDN1sNHaDVDy8ACEOAjtAI0gSVoRoCP0GqGVnFg0z1C29behdMu+y4euOsbOG7GJCxYsgK33vUw/vvkL1BRVkyBp3grcKIVCFDgmbOL8UgcHS92oWdjKLkA73QPik4tMOdiNKz6QIH39rvb8eDDy1Ky/fj2EzBtyqEvd9GwJIYmAVUIUOCpgpFBMiAQD3Yh8qz8CO3uzck329uOuwD2medlMFP5EAo85ew4U3wCFHji94gVKidAgaecnVYzTSfwEiDmL/oU9/3haTQ2t6FmQCVuu/GLmDV9QpIRX2Kh1VZhXDMQoMAzQ5cOrTHwaRCdr3WnfKH088VwDVP3jANz0tlf9YEC7w+PfoLX3tyasqQrL5uIiy8Ya/Zlsv48JECBl4dNN3LJ8mk3seY62HxF8q9izSuhwNMcMRMYSIACz0D4TK05AQo8zRFnncCUAq+vVVLgZb0HOMFCBCjwzNnMzte7EVgeTCm+cI4PvlnaHSpuKKl4DL7QNnjCTYhIHnS7hiLqKElb0oECb2dtB27+/tuIymeGJa4C+UDX3957CqoqeRZeWpAcIBwBCjzhWsKCVCRAgaciTIYSjgAFnnAtYUEqEqDAUxGmSqEo8FQCyTAkIAIBCjwRupB9DeH6CFr/1g589m4dm9uG8mtLLHsWnrdnhyzwdvaCitscaPXNQFx+nKuv60CBlxi3cXML/vvaVsjn/+Pcs0Zh2ND0EjD77nAGCWhPgAJPe8bMYBwBCjzj2DOz9gQo8LRnzAzGEaDAM479kTJT4InXE1ZEAooJUOApRmf4xNCuMIKfymfg2eLwzfTCUW7dF1kU+1fAGe1MYd7mm4yove9HuQ4WeIY3jQWQgEoElAi8+pZdeGXps/D3dOO06edizOBJKlXDMCSgLgEKPHV5MppYBCjwxOoHq1GXAAWeujzViEaBpwZFxiABQQhQ4AnSCJbRJ4FD78Czy3fgHZP1HXjETAJWIZCtwOvwt+O2R66X5V1XEoHNZsOdV/8WwweMtgoSrsNCBCjwLNRMLuUQAhR43BRWJkCBJ153KfDE6wkrIgHFBCjwFKPjRD0JxKPyI7TbczoDT89ymYsEtCaQrcBbvGY+HnrhlyllnTPzUnxh3rVal8r4JJA1AQq8rJFxgokIUOCZqFksNWsCFHhZI9N8AgWe5oiZgAT0I0CBpx9rZtKfAB+h1Z85M+pDIFuBt752Ne556taU4q489QacNuN8fQpmFhLIggAFXhawONR0BCjwTNcyFpwFAQq8LGDpNJQCTyfQTEMCehCgwNODMnMYRYACzyjyzKs1gWwFXqKeJ998GG989EKytInDpuKmi+6Ax+XRulTGJ4GsCVDgZY2ME0xEgALPRM1iqVkToMDLGpnmEyjwNEfMBCSgHwEKPPlFrvEIYvIjmk7JrR94ZtKFAAWeLpiZxAACSgReosz2rhb5HDw/qisGGVA1U5JAZgQo8DLjxFHmJECBZ86+serMCFDgZcZJz1EUeHrSZi4S0JhAvgu8rmgb2qKNiMfjcNk8qHLWQLI5NKbO8HoRoMDTizTz6E1AqcDTu07mIwElBCjwlFDjHLMQoMAzS6dYpxICFHhKqGk7hwJPW76MTgK6EshngReNh1EX2pLCu9BeijJHf117wGTaEaDA044tIxtLwMoCLxaJYtML76N+6Rp4yoow+pJ5KBtZYyxwZteVAAWerriZTGcCFHg6A2c6XQlQ4OmKO6NkFHgZYeIgEjAHgXwWeP5YJ5rDdSmNctq8GOAaYo7mscq0BCjw0iLiAJMSsLLA2/rqYmz578Lezji8bhx351fg9PG8PpNu16zLtorAC328GZHVtZAqi+A+aRJsXlfWLDjBegQo8KzXU65oPwEKPPF2AwWeeD1hRSSgmEA+C7zEY7P14W2IxEO9/Coc1fDZixXz5ESxCFDgidUPVqMeASsLvI8f+CdaN9WmwJp6w4WomDhcPYCMJDQBKwi80MJ1CL74YS9n++Aq+G44AzbJpir7eFcbQktfQ7y7A85Jx8E+fKKq8RlMfQIUeOozZURxCFDgidOLfZVQ4InXE1bUB4GALGda490YIJVCgrofmqwAPp8FXqJ/iRdYdERaEEMEXqlYlneFVmgr1/AZAQo8bgWrErCywOMdeFbdtZmvywoCr+uhVxDb2ZSy6MJvnw+pX0nmINKMjPcE4X/ibsQ7mntHej53AxyjjlItBwOpT4ACT32mjCgOAQo8cXpBgdcSFK8brKhPAvPD6/Bs6ENEZT1TI5Xja+55KJcoaA6Elu8Cj99C1iZAgadvf+ORCALLPkRol/zIWFERCmYcC3tZub5F5Ek2Kwu85Bl4z72H+o/W8Qy8PNnPBy/TCgLP/4/3EVm+df/S7BIKf/R5SG6nal2NbF2N4LO/S4nnGDsDnnO/oloOBlKfAAWe+kwZURwCFHji9IICjwJPvN3YR0UdMT9+GPgX4vJf+64TnWPxBdcsU61D62Ip8LQmzPhGEqDA05e+/5Nl6Fm/tjep5PGi5PyLIL/aWd9C8iCblQVeHrSPS0xDwAoCL9rShcCf30SsuROQ5Z33wllwHj1S1d7HWhrg/8udKTFdM8+C64TzVc3DYOoSoMBTlyejiUWAAk+sfiSq4SO04vWEFR2GwOrILvyh582UrwyVqnCr92zyOoAABZ422yHeFUH003bYCh2wT5LP1LNTYGhDuu+o+SrwIp1tSTCOolJdsXe8+l9E21pTchaffR7sxeo9MqbrggRORoEncHNYWs4ErCDwEhDi0RhiDe2Qygo0e4FFaMkrCL3/QpK5VD0c3ou+AZvHl3MPGEA7AhR42rFlZOMJUOAZ34ODK6DAE68nrOgwBBKPzf4i8BJ2x/b/YfIq1wk41qnuTz/NDp8CT/0OxppD6LlvE+CP7P1APbIQrq8Ph81Biac+7b4j5pvAi8dj6Jz/Inq2rEmCcY+ciKI558Jmk3RBzzvwdMGcTEKBpx9rZtKfgFUEnl7kYv4OxP1dsFdUQ/4NX6+0zKOQAAWeQnCcZgoCFHjitYkCT7yesKIjEOiMB/FWeDWa4p2Ybh+G6Y5hZHUQAQo89bdE+IXdiLy1JyWw62vDYZ9QpH4yRuyTQL4JvJ6ta9HxznMpTIrnXQD38PG67BSegacLZgo8/TAzk0EEKPAMAs+0uhCgwNMFM5MYRIACzyDwfaSlwBOvJ6zIJASC0SB2B2sxxCfLHJtdiKop8NRvg5UF3vtbt+CfK5ajOxTGWWPH4ZIpYr/pLt8EXtfiNxFY82HKpvZOnoXCY+apv9EZ0VACvAPPUPxMrjEBCjyNATO8oQQo8AzFz+QaE6DA0xiwgvAUeAqgcQoJLG5ZgEe23i8/VdmNCnc/fG/MTzHUN8JwMBR46rfgkEdoRxTAdeMI0z9Cu6u9Hd96/tkUYN+dOw+zhw5TH6JKEfNN4EXbm9H6wl8QD4f3EnQ4UXb+tXCUVqhElGFEIUCBJ0onLFpHLAb7uo9g27EOqBqM8FEnyP8OU+/tqemoUeClI8Svm5kABZ6Zu8fa0xGgwEtHSP+vU+Dpz5wZTU4gJp9Lde2yCxGMBnpXMrX0GPxg7D2Gr4wCT5sWWPElFq+tX48/LlmYAuy0MWNxw6zjtIGoQtR8E3gJZJHmevjXfJSk55swA46KASqQZAjRCFDgidYRa9VjX/gyHJ8u6F1UdMQkRM64XLdFUuDphpqJDCBAgWcAdKbUjQAFnm6oM05EgZcxKg4kgb0EdgV24DsrrkvBUeIsxx+nP204Igo8w1tgmgJ4B55pWsVC84AABZ71mrxhzTrMf+Nt+Lu7Me3YGZhzqnGPvrv++jPY/J37IcsvRuj5yp2A06ULeAo8XTAziUEEKPAMAs+0uhCgwNMFc1ZJKPCywsXBJLCXwD3rvo8V7R/34vjCoGtwUc2XDMdDgWd4C0xVwL4z8PzyGXhn6nQGXiwex7s76rC8sQkjS4tx+vAh8DoyO0NSlDvwYu0xxNpikMokSMX6vBHWVBuLxWZNgAIva2QZT4gEY+iqDaJ4uBeSXZ83era2tOKRXz+ImPzo6r7r3EsuwJSjp2Vct5oDnf95GFL99t6Q8YJihK76gZop+oxFgacbaiYygAAFngHQmVI3AhR4uqHOOBEFXsaoOJAE9hMIyI/PvrT7GWz1b8b00pk4uepMSDbj/yBPgcddKjqBf63fiuc2bOktc1JVOb4/K7M/1Iog8KJbI4hsjOytX3YBjglO2GsyE5Ci94b1GUeAAk8b9g0fduLDe3Yg3B2Ft58Ls+4citLRXm2SHRB1zacr8dzT/0rJM2HKJFzwxUs1z324BLY9u+B65QmguwNwexA+9YuIDRmjWy0UeLqhZiIDCFDgGQCdKXUjQIGnG+qME1HgZYyKA0lAfAIepwSvx4HWzpD4xbLCvCRw2zuLUdfV3bt2+Uku/OnMk+DJ4C68skInAqEYgqGoMeziQM/8HiAk/5/PLqlAgvN4fR5DM2bRzKoHAQo8bSi/ctlaBJs/ewmMnKJ8YgHm/nakNskOiCraHXjJ0uS7AW0tuxEvqdLt0dl9SCjwNN9yTGAggbIiFwLBCILh/XfcGlgOU5OAqgQo8FTFqUowCjxVMDIICYhBgAJPjD4cXMWHUgc+lDpRFXPizFg5iuAQs1AdqvrVkuX4tLG5N1NC3P3xzLnyHazpH20TQuC9HQQO8IcUeDpsmjxIQYGnfpO760N4/Ur5rasHXM4CO859bqL6yQ4TMXEG3ruvv4WA34+pxxyNuaedrEteEZNQ4InYFdakFgEKPLVIMo6IBCjwxOsKBZ54PWFFJKCYAAWeYnSaTXxXasOT9t298YfEPfhRZBikxPOXeXhtb+/Erz9cgZZAEF6nA9cfNR7HVvfLiIThAk+uMrpdfoR2/QGP0E6UH6EdyEdoM2ogBx2RAAWeNptjyU+3o25Be2/w0V/oh0nX8U3O2tA+clQKPL2JM5+eBCjw9KTNXHoToMDTm3j6fBR46RlxBAmYhgAFnnitut+xA6tt+x8ZTVR4d2QEquNu8YrVqaKo/CKLnR2dGFBQkNGjs/vKEkHgJWrhSyx02ih5lIYCT5tmR+VH7jc904SWDX4MOLYYw84qg03Kzx+eaEM4s6gUeJlx4ihzEqDAM2ffWHVmBCjwMuOk5ygKPD1pMxcJaEyAAk9jwArCPy7ffbdAvgtv32WX77y7PzwKhXn8GK0CjMkpogg8pfVzHgkciQAFHveGlQlQ4Fm5u1wbBR73gJUJUOCJ110KPPF6wopIQDEBCjzF6DSbuMcWxv85arELQSTk3Rej/TEvVjkt0nwAACAASURBVKZZPisHpsCzcnfze20UePndf6uvngLP6h3O7/VR4OV3/62+ego88TpMgSdeT1gRCSgmQIGnGJ2mE2OIoxY9qIQTPlnj8VJGgAJPGTfOEp8ABZ74PWKFyglQ4Clnx5niE6DAE79HrFA5AQo85ey0mkmBpxVZxiUBAwhQ4BkAnSl1I0CBpxtqJtKZAAWezsCZTlcCFHi64mYynQmIJvACjduw58OXEI/FUHn0WSgYOFpnIkxnJQIUeOJ1kwJPvJ6wIhJQTIACTzE6TjQBAQo8EzSJJSoiQIGnCBsnmYQABZ5JGsUyFREQSeCF2vdg7R+/iXi4Z+9a7A6Mv+63cFfUKFobJ5EABZ54e4ACT7yesCISUEyAAk8xOk40AQEKPBM0iSUqIkCBpwgbJ5mEAAWeSRrFMhUREEngNX38GmpffThlHdUnXYH+x12saG2cRAIUeOLtgbwUeOVFLrR0hsTrBisigRwJuBwSEh+UO/zhHCNxOgmIR6DI50AoHEOP/IsXCViJAAWelbrJtRxMwO2U4JJ/dfojhEMCliNQ7HOiJxRFT8T4zyYdGz/ClmfuSWE85LybUD75JMtx54L0IVBR7EJzB72JPrQzy5KXAq+63IPdLcHMCHEUCZiIgF534LV378aKbS8hHA1i4pAz0L90jIkoHaHUeBzR9mbEutphkxyQyqogeX3mX5cBKwh3RdC2oRuuIgdKRhVAfvmuKhfvwFMFI4MISIACT8CmsCTVCPAOPNVQMpCABES6Ay+BZ/uLD6J15TtJUiVjZmHAqV9Dd3sHygcPgiRJAhJkSSIT4B144nWHAk+8nrAiElBMQA+B1x1sxjPvfxehSHeyTpvNhgtm/QxVJSMV1y3CxGhHK2Kte/aXIq/LXjMcknx+CK/MCXTXBeSf/tYjGowmJxXLAm/ExdWZB+hjJAWeKhgZREACFHgCNoUlqUaAAk81lAwkIAHRBF4CUeIsvFgsinULl2P+408gFomgatgwnH/7bSiuqhSQIksSlQAFnnidocATryesiAQUE9BD4K3f9S7mr3wopcajhn8OM8derrhuESZGGmsRD/hTSrH3q5HvwpPvIOOVMYHtLzfKP/ntSBk/7roh8FS4Mo5xpIEUeDkjZIAjEOhAHT62/Q1t2IkaTMfU2OfhtOl3By4FHremlQlQ4Fm5u1ybiAIv0ZXu1lb86Ss3IC4/YbLvOuqs03HyV69j00ggYwIUeBmj0m0gBZ5uqJmIBLQnoIfAq21agZc/ujtlMSdO/CrGDz5V+wVqmOGQO/DkxwzsA4fxDrwsme98bQ+al7enzBpzzSD4+nuyjHTocAq8nBEywGEIxBHDK7YfohtNvV8dHj8eM3CNbrwo8HRDzUQGEKDAMwA6U+pGQFSBt3XZJ3ju7p+ncOg/ejS+dG/qGXm6gWIiUxKgwBOvbRR44vWEFZGAYgJ6CLxEcR+sfQyrt7+WrHNw1TScPu27sEtOxXULMZFn4KnShkBDDzb+fRdiob2HOReN8GHkpQNViU2BpwpGBjmIQAd24zXbHSn/tNg2AGfE7tKNFQWebqiZyAACFHgGQGdK3QiIKvBi0SieuuV7aN6+o5fFmTd/A+PnztGNDROZnwAFnng9pMATryesiAQUE9BL4CUK9Pe0IhwJoqRAnfPNFC+aE4UjEOoIo2OjH85ivsRCuOawoEMI8A48bgoS0JYABZ62fBndWAKiCrzkZ/WODix77kW01zdgzPGzk794kUA2BCjwsqGlz1gKPH04MwsJ6EJAT4Gny4KYhAQOIMA78LgdtCLAM/C0Isu4JABQ4HEXWJmAyALPyty5Nn0IUODpwzmbLBR42dDiWBIQnAAFnuANYnk5ERBV4HVH41jTHYXPbsM4nx3y//AigawI8BHarHBxsMkIUOCZrGEsNysCFHhZ4eJgkxGgwBOvYRR44vWEFZGAYgIUeIrRcaIJCIgo8FrCMTxa14Pu6N4z/4Z67bhqgAcOSjwT7ChxSqTAE6cXrER9AhR46jNlRHEIUOCJ0wtWoj4BCjz1meYakQIvV4KcTwICEaDAE6gZLEV1AiIKvNebQ/igPZyy1itkgTdavhOPFwlkSoACL1NSHGdGAhR4Zuwaa86UAAVepqQ4zowEKPDE6xoFnng9YUUkoJgABZ5idJxoAgIUeCZoEktURIACTxE2TjIJAQo8kzSKZSoiQIGnCBsnaUGg24/YniZIQwYBkqRKBgo8VTCqGoQCT1WcDEYCxhKgwDOWP7NrS0BEgdcSiePRXcHeR2iHeOy4upqP0Gq7E6wXnQLPej3livYToMDjbrAyAQo8K3fXPGuLvvkuwo8+AVskAtvQQXB879uQ+lXlvAAKvJwRqh6AAk91pAxIAsYRoMAzjr2SzE3+Lsj+BwMKCpVMz7s5Igq8RBP4Eou824qqL5gCT3WkDCgQAQo8gZrBUlQnQIGnOlIGzJJAPBBEz7VfT8q7fZd04mw4b/qfLCMdOpwCL2eEqgegwFMdKQOSgHEEKPCMY59t5j98vBBvb9+UnHZs9RDcfMyJcNp5blpfHEUVeNn2nuNJ4GACFHjcE1YmQIFn5e5ybRR43ANGE4itWoPwnb9IKUOqqYbzgV/mXBoFXs4IVQ9Agac6UgYkAeMIUOAZxz6bzJ827sbdH7yRMuWr02bjtGGjswmTd2Mp8PKu5RkvuLWpBwvfaEJ3ZxTHzCnH8HHmuquVAi/jVnOgAAQ+WdKAN57fhp5gDCecVoN5Zw3psyoKPAGaxhI0I0CBpxlaBs6UQCyG8G0/Rmzb9t4Z9uuvhuOMUzKNcMRxFHg5I1Q9AAWe6kgZkASMI0CBZxz7bDL/c+1yPLNuRcqU04aPwVenzsomTN6NpcDLu5ZntOCAP4Lf3bEBnQe8DfjaW0aaSuJR4GXUag4SgEDttk784tbFCPfE4HRL8BQ6cPU3J2Hy9COftUSBJ0DjWIJmBCjwNEPLwNkQkF9gEfnPi4jX1cN+wixIx83MZjYFniq09AlCgacPZ2YhAV0IUODpgjnnJE3+btz81vPo+eysCrv8pqifzz0Lw0srco5t5QAUeFburvK1rfmkHU8/tC0lwIy5FTj/CvktbCa5KPBM0iiWiSd+vhLvvr2zl4S7wIFTLhyKS64ee0Q6FHjcOGoTkKQ44vIZwvG4Te3QWcejwMsaGSeYiADvwBOvWRR44vWEFZGAYgIUeIrR6T6xrqsDz29YJb/EIo6zRozFqLJK3WswW0IKPLN1TJ96d23z45F7NqYkO+WCAZh7Tn99ClAhCwWeChAZQhcCf/3ep5j/8a7eXDbJhpvvORqTZ/SjwNOlA/mdxGaLo6gAcEixJIhAjyT/MlbiUeDl9560+uop8MTrMAWeeD1hRSSgmAAFnmJ0nGgCAgcKvM7uduzeUwtJrru631AU+Mx15pkJcJuqxDee3Y0FrzQmax46uhBX3jQcLvnxPrNcFHhm6RTrfPnXG7F+XQu21LcjEgVGDy/Bd37f96NavAOP+0YtAh5XDD6PfOvdAVdbp4SYgXfiUeCp1V3G6YtAJBLDB0vq0CUfG3LirGoUF7l1AUaBpwvmrJJQ4GWFi4NJQGwCFHhi94fV5UZgn8BramvDinVL5Udn9v4EXpLf3jt9wnFwuzy5JeBsUxPo6ggj0B1DVbU+H2rVhEWBlzvNdatWY+mixSguKcHJp5+K4tLS3IMywiEE9mzpwsK/1yIgf7+55Mdnj798MPqP6vsHKBR43EhqESjyyWcvOlIFXndQQk/IuLvwKPDU6i7jHIlAOBzDD+5ehDXrmpNDSkrcuP+uE1Hd36c5NAo8zRFnncCUAm9nXSN+dO+fsX7TDgwcUIkf3nQlpk/e+/bGuuZAWgjV5R7sbgmmHccBJGA2AhR4ZusY682GwD6Bt2nHJmzftSll6ogh41FdZZ4zz7JZN8danwAFXm49XvXpCvzl4Ud6g1RUVuKW238At5dSPzeyh58djcTR0RBEUZUbDlf6O10p8LToQn7GdMjyrliWePuuxBl4bZ02xJG/Ai8ajKDpk3oE9nShZFQ5ysbLj7MbhyM/N6bGq/7okwbc8YslKVkuOX8Uvnz5BI0zAxR4miPOOoEpBd7VN/0cJ58wHVdcdBoWfrRalnmP4Y1//Fr+iYydAi/rLcAJViJAgWelbnItBxPYJ/B27K7Fxm2rUr48fuQ0lJfyHEHuGnMSoMDLrW9PPvZnLP9oWUqQ6278OsZPmphbYM5WhQAFnioYGeQzAk4H4HbGktIuIN+PYeTjs4mSjL4Db8sza9Bd2967P6rnDkPl9GruFwsRoMCzUDNVWIrpBF5zawfO/NJtWPTS7+GQH5tKXJdc/2Pc9vUv4thp4yjwVNgUDGFeAhR45u0dK09PYJ/A8wfDWL91JVra9p55VlUxEGOG8Q/q6QlyhKgEKPBy68zLz7+It159NSXI9+/8Mar6H/nFCrll5OxsCFDgZUOLY81GwEiBF+kOYe0fU3944R1QiFFfnGw2jKy3DwKJR2hvv2cRVq/d/wjtb+4+EQP68RHafNw4phN4H6/ciJ/e/1c895e7e/t1y08ewszpE/D5805Cj7zB011up3xWQgbj0sXh10lANALyy+Bgl/8rHE09H0S0OlkPCSgh4LTbEJXf2hv77Lf57oAfks0Gr8erJBznkIAwBBK/bzsS+zsWl18MwN+/s21MV1c3HrjvAWzdsg12+Ye7n7voPJx93tnZhuF4jQhI8lO2dvn3an420QgwwxpKIPnZRP69W/6P7ldU/vPs0t8sQjQkv1Hms6tiXCXGX6L9o5W6LzbPEyZeYvHuB7Xo7A7LTyIOQkmxPuf90puIt/ESPVHzssXlS82AB8da+NEqPPjov/H0wz/u/dL//vIxjBkxCFddeoaWqRmbBEiABEiABEiABEhAQAKJj591u3ajqLgIxfIvXiRAAiSQDwTqVzRg3YvrEZNlnrfciylfnISCqoJ8WDrXSAIkoAIBzQXeJ6s24sf3PY4XHr+nt9zv3Pl7HDdjEi45dy6aO0Jpl1FR7MpoXNpAHEACghFwOWzymzjt6JRfMc6LBKxGoMhrR498eHqId1BbrbV5vx6v2w6f/Csg30XhD+6/k+JAMD3ynXmOz+5kymdgsVgEwc4GuAv7yXfbOfMZhWnW7pLvFnDLn086A4ff26ZZCAslgcMQKPI55LfgRhGSP58YdUV7ouhpC8Ariztb4nEcXiSgEgF6E5VAqhgm0RM1L80FXmt7J079/C14//nfyY9N7S3+rMtvw89+cD2mTRrNM/DU7CZjmY4Az8AzXctYcBYE9p2BFzzgUZEspnMoCQhLoK8z8MLyc1ktoRj2/dmw3C3Bm6d/QOtu3YKdK/+GcE8H7K4CDJl8BQrLRwnbVxa2lwDPwONOsDIBI8/AszJXrk0MAnwLrRh9OLAK052Blyj+K9+5F8dMHYfrLz8Xr7yzJPlI7St/u1f+SaxEgSfeHmNFOhKgwNMRNlPpToACT3fkTKgTgb4EXlPizooDjveVjxJDtccuv38x/64Ni+5DT1dD78Ldhf0xZvZ38w+EyVZMgWeyhrHcrAhQ4GWFi4NNRoACT7yGmVLg7apvwu0//xPWb96JwQP74c5brsHEscOSdOuaA2kpV5d7sLtFfu84LxKwGAEKPIs1lMtJIUCBxw1hVQJ9CbzdwZh8OHrqo1n95MdtVT7D2BRoV731A8TlR2h7L9lmTph3t/wDXHUfJzEFDBMVSYFnomax1KwJUOBljYwTTESAAk+8ZplS4PWFkQJPvE3GivQjQIGnH2tm0p8ABZ7+zI3IuHndMnzw7jPwd7ZjyoyTMWvuxZASr7G08NWXwOuU3zzXEd4v8NzyGw8rXdbmcaRW71r7L7TULun9cnnNsaiZcKmFd4Y1lkaBZ40+chWHJ0CBx51hZQIUeOJ1lwJPvJ6wIhJQTIACTzE6TjQBAQo8EzQpxxK7Olrw2AM3Ixbbf9j9qeddh8nT5+UYWezpfQm8ROXd8gssAvIvl3z2XaEs8PL0CDz57ruoLPAWobNlEwpKh6Fi8PGQ+CILsTe3XB0FnvAtYoE5EKDAywEepwpPgAJPvBZR4InXE1ZEAooJUOApRseJJiBAgWeCJuVY4vpVi/Dyv3+XEmXsxNk4+5Jv5BhZ7OnpBJ7e1bdJa7BHWgAHilAdOR0eVOpdAvNZiAAFnoWayaUcQoACj5vCygQo8MTrLgWeeD1hRSSgmAAFnmJ0nGgCAhR4JmhSjiXyDrwIOvzhHCnmNr0D67Da+aveIE6U4qjwnXDKMo8XCSghQIGnhBrnmIUABZ5ZOsU6lRCgwFNCTds5FHja8mV0EtCVAAWerriZTGcCFHg6AzcoHc/AM1bgbbE/gQZpfkr3x0VuRll8skE7gmnNToACz+wdZP19EaDA4/6wMgEKPPG6S4EnXk9YEQkoJkCBpxgdJ5qAAAWeCZrEEhUREOkR2l3SK9hh/1fKOiaFfyjffzdC0do4iQQo8LgHrEyAAs/K3eXaKPDE2wMUeOL1hBWRgGICFHiK0XGiCQhQ4JmgSSyxTwLx2lbYClxAWUHKOJEEXgRBrHP+Fp3YmKxxYOxsDI1ezM6SgGICFHiK0XGiCQhQ4JmgSSxRMQEKPMXoNJtIgacZWgYmAf0JUODpz5wZ9SOgtcCLx4IIB7fIb7X0weEaCths+i2OmSxNIB4MI/rzV4FPa5PrlC6aBumqWb1rFkng7SsqYNsNe7wALhRbujdcnPYEKPC0Z8wMxhGgwDOOPTNrT4ACT3vG2WagwMuWGMeTgMAEKPAEbg5Ly5mAlgIvGm5BoO0VQJZ4icvurIa37CxKvJy7xgAJAtFnP0H8icUpMKR7L4I0pn/yn4ko8Ng5ElCLAAWeWiQZx2gCjTvrsGXVOrh9HoyZOhkFJUWgwDO6K8yvJQEKPC3pKotNgaeMG2eRgJAEKPCEbAuLUomAlgKvp3Mxwv7VKZV6y86F3bVXsPAyB4F4PI6Vn36M1tYmTJ4yHeUVVUIUHrvvDcTe35Qq8G6YA+nMiRR4QnSIRWhJgAIvM7qhngjefHoZ1izZjrL+hTjti0djyGeSP7MIHKUlgcbaOsx/9mUk/j2TuDwFPpx++UWo7leCQFA+fCAc0zI9Y5OAIQQo8AzB3mdSCjzxesKKSEAxAQo8xeg40QQEtBV4H8gCb10KBQo8E2yKg0r8858exLKPFiX/qdvjwbdu/iGGDR9l+ELiK3cheseLkP/kl6zFJp+BJz1wKWzF3uTf8w48w1vEAjQkQIGXGdx3/70cC55f0TvYW+jBN399IdxeZ2YBOEoVAo325djhmA9vvBQjw+fDEy9Lxv3o7fexZeXalByzzjoZRx09ngJPFfIMIiIBCjzxukKBJ15PWBEJJAlE6oKwFTlgl39lelHgZUrKuHHRPU0IvPIqOnfI/1sxBP3OnYPCwZXGFWSizFoKvFi0E4Hm52S/EkoSsbvkR2hL+QitibYH6nfvwl13fjel5KNnzMaXr/+WEMtISLz4a2sQl19ikTgDz9Z//9lyFHhCtCjnIuocy7DJ/Sqith4MDc3FiNApOce0QgAKvMy6+NhPXkbd5qaUwdfccSYGj+qXWQCOyplAvbQMH3rv643ji/fHXP/P4YAXaz/6FCs/WJqS45QvfA6jRg+iwMuZPAOISoACT7zOUOCJ1xNWlOcEYv4o2h/eifDm7iSJgnOqUHB2Zh/eKPDE3zydD/4e9fUx1AXln+jK70iQfF6MvnQ2BkwZKH7xBleopcBLLC0W9SPSs40vsTC4z0rTiy7w+loXBZ7Sroszr0PahQ8KfplS0LTAdRgQmSJOkQZVQoGXGfiD78Bz+5z4tnynrtOd+Q9yM8vEUUcisMzzIOrse+/i3nfNDH4P/aJTEQ6F8cGLr6GxdnfyS+NmTMWU44/hGXjcTpYmQIEnXnsp8MTrCSvKcwJd/22E/+U9KRTKbh8JZ40nLRkKvLSIDB0QbW1DlyzwVnUORiQuJWuxORxwDxuEmf9zgqG1mSG51gLPDAxYY98E/vLo/+GjDxcmB4n0CG26vlHgpSMk/td3ON/Has8/UwodHD4ek4JfEL94jSukwMsMcO8ZeIu3oWxAEc/AywybqqPWuf6Bjc7nUmLO89+Pwnh17z/raGmDy+OGR/4BbOLiSyxUbQGDCUaAAk+whsjlUOCJ1xNWlOcE2n63HaG1XSkUii4bCO+Je8/g6OuiwEtHyPivJ+7AW7GjaL/A8/ngHdwfx3z1eOOLE7wCCjzBGyRAeaK+xCIdGgq8dITE/3qHrRYfFN6bUijvwNuLgwJP/P3LCvcSCNm6sMTzS7RJmyDBjjGhSzA6fEGfeCjwuHusTIACT7zuUuCJ1xNWlOcEQuu70fbgtl4KUokTFXeMhM1jT0uGAi8tIsMHJM7A2/7k69hZF5d76oFUXIIRp43DwGmDDK9N9AIo8ETvEOtTSoACTyk5seYlzsDb7HkVESTOwJsjn4F3qlgFGlQNBZ5B4JlWEYE44ui07YIbRXDHS9LGoMBLi4gDTEyAAk+85lHgidcTVkQCCK3rRuD9Fthleec9uRz2CldGVCjwMsIkxKDO+nZ07GpHyaBSFB5wmL0QxQlaBAWeoI1hWTkToMDLGSEDCEyAAk/g5rC0nAlQ4OWMkAEEJkCBJ15zKPDE6wkrkgnE4lGEYn64pAJItr1nhfFKT4ACLz0jjjAvAQo88/aOlfdNgAKPO8TKBCjwrNxdro0Cj3vAygQo8MTrLgWeeD3J+4q6Iq3YHVyDaCwEh+TGQM94+Bzpz3/Le3AyAAo87gIrE6DAs3J383ttFHj53X+rr54Cz+odzu/1UeDld/+tvnoKPPE6TIEnXk/yvqLNXYsRjgV6ObjthRhecEzec8kEAAVeJpQ4xqwEKPDM2jnWnY4ABV46Qvy6mQlQ4Jm5e6w9HQEKvHSE+HUzE6DAE697FHji9SSvK4rEe7Cpc2EKA5v8CO3Yorl5zSXTxVPgZUqK48xIgALPjF1jzZkQUFvgdUR64I9FMMBVkEl6jiEBTQlQ4GmKl8ENJkCBZ3ADmF5TAhR4muJVFJwCTxE2TtKSQF1gLTrC9b0pSpwDUe0dq2VKy8SmwLNMK7mQwxCgwOO2sCoBNQXevxs3YH5rbRLVWF8ZvlIzBR4p/VvMrcqW6zKeAAWe8T1gBdoRoMDTji0jG0+AAs/4HhxcAQWeeD3J+4pi8RhaQ7XwR9tRKJ99V+qqgU3+i1d6AhR46RlxhHkJUOCZt3esvG8Cagm8zYE2PLDj45Rk51eNxKnlQ9kCEjCMAAWeYeiZWAcCFHg6QGYKwwhQ4BmG/oiJKfDE6wkrIgHFBCjwFKPjRBMQoMAzQZNYoiICagm8N5q34cWmLSk1TC/uj2uqJyqqi5NIQA0CFHhqUGQMUQlQ4KXvTDwWR/e6BvTsaofd44BvfH+4KgvTT+QIwwlQ4BnegkMKoMATryesyKIEdtqa0YwODEN/lMZ9mqySAk8TrAwqCAEKvMM3IhKNoCPYgWJPMRx2hyDdYhnZEFBL4HVFQ7h76xL5DvZwMn3i3vVvDZ6Okb7SbMrhWBJQlQAFnqo4GUwwAhR46RsS2LQH3esb9w+021B+ylhITh7vkJ6esSMo8Izlf7jsFHji9YQVWZDAW9JKrLLtSK7MDgnnxWZgaLxK9ZVS4KmOlAEFIkCBd2gzGjsa8cHmRQiEAnA73Thx1PHoV9xPoK6xlEwIqCXwErkaQ3683boDwVgUJ5bUUN5l0gCO0ZQABZ6meBncYAIUeOkb0L50O8J7ulIGFh8zBK5+Reknc4ShBCjwDMV/2OQUeOL1hBVZjIAfQfzJ/lbKqhLy7oLYsaqvlAJPdaQMKBABCrxDm/HSylfQ4W/v/UKxrwTnTj5LoK6xlEwIqCnwMsnHMSSgJwEKPD1pM5feBCjw0hP3b2mCf21D70CbXULZKWN4B156dIaPoMAzvAWHFECBJ15PWJHFCFDgWayhXI5hBCjwDkX/9IfPICbfabXvSrzw55IZF8PJR2kN26dKElPgKaHGOWYhQIFnlk6xTiUEKPDSU+MZeOkZiTqCAk+8zlDgidcTVmRBAgc/Qnt+7BgMiVeqvlLegac6UgYUiAAF3qHNWLJlKTbv2f/SgpFVIzBzhPp39wq0DSxZCgWeJdvKRX1GgAJP/a1Q+9EGbFuwGr6KIow/bya8ZXwUUX3KmUWkwMuME0eZkwAFnnh9o8ATryesyKIEttua0IpOvsTCov3lsrQnQIF3KONYPIb1DRvQ2LEHVUWVGDdAPhTaJu0dWLcO2LMVKKwAhhwFyGfk8RKTAAWemH1hVeoQoMBTh+O+KAl5t+BX/+oNWti/DGfdex0cHqe6iRgtIwIUeBlhssygzfY6fOTcgB4pikmhIZgeGW2ZtR1uIRR44rWXAk+8nrAiElBMgHfgKUYnxMRgPIAmWz2q40Ngt/HNXAc3hQIvi226YwVs6xfsn1AyAPFjL84iAIfqSYACT0/azKU3AQo8dYkvfOA5bF+4JiXoqXddhaoxg9RNZJJosVAUTW9uQ9fqZjjKPKg8fSh8Q0p0q54CL3PUHy3z48Nl3bDZbDhuVgGmTPZmPlmAkW22LjzjfQ8xxHurmddzFMZErfu9R4EnwMY7qAQKPPF6wopIQDEBCjzF6AyfuBRv4Z/SHxFGDypRjRtid6BK/t+0VzyOluUvoXPTIviqx6H82Ethd5nrA1HaNX42gAIvU1LyuKX/gq19/4HRiZnxOdcA7oIsgnCoXgQo8PQizTxGEKDAU5f6iqfnY/V/PkgJes5vbkDxwHJ1E5kkWvM7O9Dy3s7eau0+J4Z962hIbn1+EEqBl9lG2bY9hH8805oy+MovlWPgQPPcObrasQPvu1amrGF8ZAjmhCZnBsGEoyjwxGsaBZ54PTGsomg0hlWLNmLHpt0YOKwKcaDn/AAAIABJREFURx0/Dg6nPv/yM2zRR0i8s3EJdjYuhNddifHDPgePs1i0Eg9bDwWeKdp0SJEhWdr9wH4lIvFQ79eOih+HL8dvS7ugPe8/gYb5j/aOKxw+A8O+dH/aeWYcQIGXRddWvgFb/YbeCXFJQnzudbA5zPNBOYvVmn4oBd7+FjZs3YZ1C5fC6fVg8kknoqi8zPT9zfcFUOCpuwOCHX7M/+U/0bKpDpL8Ns9Jl56IiRcer24SE0Xb8egK9OzqTKl40JcnwztYn8/uFHiZbZZ33+vCkqXdKYPnnliIWTPN84NF3oGXWa85SlsCFHja8jVV9AUvfYxP5u+/JX/0UUNx1hUnmmoNahS7rX4Blq75fW+o4sLBOG3Gz2CXxP+DLwWeGjtA/xhbsQ6/lb6fkrgIZbg79pe0xWx8+Ar0NO9IGTfupv/AkTj3zGIXBV7mDY0HOiB9/ALgb0dcfiOtbdwcxAeOzzwAR+pKgAJvL+76zVvx2h//grh8Z3Hi8hYX4XPfvhGeAvP8AU/XjWOSZBR46jcq8T3SXtsET2kBPEU+9ROYKOLBd+BJHjuGf+cYSDrdhECBl9lmOdwdeFd8qQw1A12ZBRBkFM/AE6QReVwGBV4eN//gpT/603/D3xno/ccOlx033HUZJMmWV5TeW/4z1LesSFnzyUf/FJUlY4TnQIEnfIuOWOAD0u3Ygv0C/QJ8GfNi56dd0Lanb0PX5sW94yT58dnxt7wCm3zHldUuCrzsOhqPxYDuFti88l0IDnN9QM5upeYfrbXA6whHkpCKnQ6hYS189gVsWPxhSo1zv3Qphk+dInTdLK5vAhR43CFaEug9A29lExwVXp6BpyXsHGMnzsBbKv+SbxzFbPnOO7OdgZfj8k05nY/Qitc2CjzxemJYRc/8/nXs3tbYm7+g2Iev/Ogiw+oxKvGyDY9hc+0bvekTB62ed9xD8LjFf4yHAs+oXZN73kC8G+9IL6AO2zElfiyOwTzY5L/SXcE927DtH7ciIp93ZvcUYOA530PJuJPSTTPl1ynwTNk2Fp0BAa0EXky+S+e9xmZs6uhKVjGiqAAn9a+U31Sc/veWDMpWfcjKdxdg2cuvp8Q95xtfRdWQwarnYkD9CFDg6ceamfQnwDvw9GfOjPoRoMDTj3WmmSjwMiWVB+PqdzThv399D93y2RqeAg/O+OJxGDp2YB6sPHWJwVAr3lv+C7R1bZfvPrRj6qirMGrQGabgQIFnijapXmQ8FkWwcTNc5fLba10e1eOLEpACT5ROsA61CWgl8LZ0dePt3XtSyj25ugojCsV8JDXcE8Jbjz+VfJQ2cU2eNwdHn3Wa2rgZT2cCFHg6A2c6XQlQ4OmKm8l0JkCBpzPwDNJR4GUAKZ+GRCJRtDS0o6yqGE6X2I/aaNmXeDyGju5d8l13pXA7i7RMpWpsCjxVcTKYYAQo8ARrCMtRjYBWAm9pUytWtLan1DmlrATHVop9R3lb4x64PZ7kGXiZXgH588umzm645WM/RhYXwi7oXYaZrsdK4yjwrNRNruVgAhR43BNWJkCBJ153KfDE6wkrIgHFBCjwFKPjRBMQoMAzQZNYoiICWgm8tlAYz+2sQyS296UQdlluXTh4IEpd4r+UKRuQ7eEwntlWh0Bk71l//eQ32F4ydCAlXjYQNRxLgachXIY2nAAFnuEtELKAOOIIx1rkfw/55F9eIWvMpCgKvEwo6TuGAk9f3sxGApoSoMDTFC+DG0yAAs/gBjC9ZgS0EniJgpt6erCmrVP+owQwsbQIlW63ZuswKvAH8jl/Hze3paQ/f3A1hhbm99s5jerHwXkp8ETpBOvQggAFnhZUzR0zKp9rXR94ET2xRthsEipcc+SXSJnzZUwUeOLtRQo88XrCikggYwL+jgjm/7UWWz/pwICRPpz51cGoGVGE1s5QxjE4kATMQoACzyydYp3ZEtBS4GVbixnHU+CJ3TUKPLH7w+pyI0CBlxu/bGd3dMfw11c6sXxDD0bUOHH1WUUY1E+sY5+aet5BR3jlAUuzYYjvy3BIYp4/21cPKPCy3aHaj6fA054xM5CAZgRefmAbNixq7Y3fb4gPN/5hMgWeZsQZ2EgCFHj60G+p3YymbetRNWI8ygYO1ydpnmehwMttA7SHI/IjtLt6H6Gt9nlx4ZBqPkKbG1bVZlPgqYZS80CRBfIj9+/VQaopgPOSUbAVWutxey0AUuBpQfXIMR/8ZzsWrQr2DhjU34Ff3VihbxFpstX6/4lQrD5l1ADP+fA5hglVZybFUOBlQknfMRR4+vJmNhJQlcAfb1gFf1u4N6YkH9r9nSenIeLYe94RLxKwEgEKPO27uXHR61j+wuO9iaaefw1Gzz5d+8R5noECL/cNEIjKL7Ho4EssciepfgQKPPWZahEx/PJ2hP6w/64haUwZvL86DpDPzuR1ZAIUePrujhvubUJ7VzQl6R9urURpkV3fQvrI1hlehz09r/eOcNorMMhzmfw4rTg1ZgqLAi9TUvqNo8DTjzUzkYDqBA6+A6//cB++/jvegac6aAYUggAFnvZtePFnX0ewc/9ZYp6iUpx3+0PaJ87zDBR4eb4BLL58CjxzNDj4g0WIrmpOKdb70EmQBheaYwEGVUmBpy/4g+/AGzrAiV98vVzfIjLI1hXZhO7IBvlO8CKUuWaY9kUWogm8YCSMtlBQfllVofyzhfz84QIFXgbfgBxCAqISOOQMvOuHoGZkIR+hVbFh0VgMG5rr4Q+FMK5fNQqc1jsAXkVcmoaiwNMUbzI4BZ72jA+XgQLPGO7Mqg8BCjx9OOeaJXjfJ4jO39UbJu6QUPD302HzinW+WK7rVHs+BZ7aRPuOd/AZeNeeU4yBlea7s01fasqziSTwPtqzE6/vWo9YPIYqbxEuGz4VJW7zvuFXaVco8JSS4zwSEJAA30KrblMisrz72/KFqG3b+xNpr9OFa2fMRZmXbzZUl3Rm0SjwMuOUy6gN77+MT//7VG8IPkKbC83M51LgZc6KI81HgALPHD2LNfgR/NFixHf7kZB3nhsnw3HqYHMUb2CVFHgGwmdqzQmIIvACkRDuXzkfcfmvfdfUihqcO2Si5gxES0CBJ1pHWA8J5ECAAi8HeIeZur11D576ZGHKV2YPGY2TR01QNxGjZUSAAi8jTDkP4kssckaYdQAKvKyRcYKJCFDgmahZsTii2zoh9fPyBRYZtm2fwAvULkC89lV5Vgy26lNgG3hyhhE4jATEJSCKwNvU3oSnt3ycAqrSU4Abxh8vLjyNKqPA0wgsw+pHoHFrFM21cVSPllA6QNIvsYCZKPDUbcrm5gY8/eliCjx1sSqORoGnGB0nCk6AAk/wBrG8nAhQ4OWEj5N1ILC7ZyOao7UY5JqAUkf/rDImBF53wwYEP/llyjzbpJtgKxmXVSwOJgHRCIgi8GLxOB5bvwQNgY5eRIm77xJ34eXbRYFnhY6H4vC86odzRRjRCgnBs7yIDcuP176vfieC5a/ufRNR4hzL2Z93YPj0/D0HgQJP3W/oxL8snvrkA+zkI7TqglUYjQJPIThOE54ABZ7wLWKBORCgwMsBHqdqTuDDrhexsvvNZB5J/uvk0q9giHtSxnkTAq9zw0sIbflP6pya0yENuzDjOBxIAiISEEXgJdh0y4/RLm7YhtZQABPK+mNC6QARkWleEwWe5oi1T+CW5Z37vWBvorhPQsdtJbC5rP1mFvn8SvzjxyFEZYG57yruJ+G8W/JDXh5uZ1Hgqf/9xpdYqM9UaUQKPKXkOE90AhR4oneI9eVCgAIvF3qcqyWBSDyMJxtvTTlXq9I5FOeXfyfjtMk78PZsRnDZz1Lm8A68jBFyoMAERBJ4AmPStTQKPF1xa5PM94cOOHZGUoJ33VCE2BBri6ykwLujB9Hw/qVT4Enwehx8C60232qmiRqNxlG3ow39qovglveDVS4KPKt0kus4mAAFHvdEL4FwDNEtfsQ6wpAGuGEfbP6XJlHgcX+LSiAcD+GpxttyFniBYAS9Z+DJf0CxDeQZeKL2nHVlR4ACLzteeoymwNODssY5PG8E4Hon0JslX+7ASyyYj9Cmbi7egafxN5sJwu/Y0oo//moR2pr88BY4cfU3j8Wko6tNUHn6Einw0jPiCHMSoMAzZ99Ur1p+oCD8QTPiraHe0PZJxbAPL1A9lZ4BKfD0pM1c2RJQ4xHahMALyvKdFwlYjQAFnngdpcATryfZV7TvDLxP5TPwKvPrDLwELL7EYv+WocDL/tvHajPu/993sWV9U++ySsq9uOeRcyyxTAo8S7SRizgMAQo8bosEgXhnBOF396TCKHXBdWKFqQFR4Jm6fXlRfK4vsaDAy4ttkpeLpMATr+0UeOL1hBWRgGICFHiK0Vlm4nevfh5B/wHPlcsr+9kfz0Vxmcf0a6TAM30LuYAjEKDA49ZICrxIHKHX6mE74EYeW40XzumlpgZEgWfq9rH4NAQSZ+BR4HGbWJUABZ54nTWlwLvsf36KdRu3733tqHwVF/rw3n8eTP7/uub9j5IeCXd1uQe7W/a/9EG8tuhXUfv2xWha9TxiYT/KRp+Cyonn65ecmVQnQIGnOlLTBXzuqZV48/n1vXVPnVWD626Zbbp1HK5gCjxLtJGLOAwBCjxui30EYjv9CK9oT0o8W4Ed9lkVkHx2UwOiwDN1+1g8BR73QB4ToMATr/mmFHjnXPl9PPDTb2LU8JpDiFLgZb7Jejp2Y8t/f5AyYdAJN6Jo8DGZB+FIoQhQ4AnVDkOKicXiWPDaZqxb2YihI8tw0tmj4fFa40UWegu8zYt7sHFRD2wSMPZED4ZNdxnSUya1PgEKPOv3OJsVxuWztOL+KGxFDvn3n70/rDbzRYFn5u6x9nQEeAdeOkL7vx6XP6P6N7Qi2hWGb1wZHIX8XJU5PWNGUuAZw72vrKYUeHMvugn/eOTHGFBVToGXw55q3fQO6j/8a0qE0lHzUH3M1TlE5VQjCbidEgrkt462dO4/ANvIepibBNQkUFroRE8ohkAoqmbYw8Zq3BLBB090pXxt7nWFKB9kDRmqOUAmyIoABV5WuDjYZAQo8EzWMJabFYEyWUIFQvJLLOTPJ7yOTCAh7xr+vg7B7R3JQZLbgQFXjoerv/nftG3lvlPgidddUwq8aadfjzkzp+CTVRtRWV6Cm6+/BHNmHZWkyzvwMt9kvAMvc1ZmGUmBZ5ZOsU4lBPQUeKvkt3tv/KAnpcyJp3gwRr4TjxcJqE2AAk9toownEgEKPJG6wVrUJkCBlxnRYG0n6v+6JmVw0fR+qDhreGYBOMoQAhR4hmDvM6mwAm/dph2IRFPvsnA6HBg9fBB+dO9jOHPeTMyeMQHzF36K7//sEbz4xM+Td+QlHh9LdyWOzounH5YujCW+3rhpIXZ+/CyiIT+qJ56GwdMutMS68nYR8t5OPGzD/W3CHWD+p6Q0h25L/uad+I/2v4Hv2hDCKw+3pazpvG+Vof8wp+brZIL8I5D49k/s78TOjvM38PzbABZfsS3xyST52Vv737vNgjLBYn3DYrR278K46hNQ5hvQW3pXKIR/r1mOLe0tOH7QcJw2Ykzy9wdeKhNQaTt+9tEk+fmE15EJdMl33q3/w6cpA6pmDsCQC0cTm8AEJPkYh0z8isBLsFxpiZ6oednkfyGp8tvX//7yMfmNPql3P5QWF+JH377qkHqv/fYvcPE5c3HuqbPll1Okf4nFAPltjPWtfImFmo1nLDEIuB0SfPIjtK1dfIRWjI5kXkXyDzi8+iRQUuhIPkKr12MqmxJn4C0MQrLbMOYEN4Yf7WaHSEATAomjD4p8DnQHI+j0RzTJwaAkYBQBt0uCV/7V1sW9va8HL6z8CdY2vpv8W6fdg89PvReDSicn//72+S9gY9ue3nZdPXEmzh01yaj2WTavWj8MLC1wyZ9L5Edo5bMreR2ZQOIR2vq/yY/Q7jjgEdqrxsPdj4/Qirxvqsu9GfkVkddgtdoSPVHzUk3gHakof0D+A9XWWhw1YWTvkCu+cQ+uvOR0nHHSMXyEVs1uMpbpCPARWtO1jAVnQUDPR2izKItDSSBnAnyENmeEDCAwAT5Cm9qc5u7t+PPia1L+4bj+83DepDvQFvTja6/9v5SvjSyrws/mnC9wh/O7ND5Cm3n/+RKLzFmJMpKP0IrSif11CPsI7ZFQtbV34bTLvosH7voGjpsxCQuWrMCtdz2M/z75C1SUFVPgibfHWJGOBCjwdITNVLoToMDTHTkT6kSAAk8n0ExjCAEKvMwFXiASxvWv/g3hA44Rmlk9DN859hRDesek6QlQ4KVnxBHmJUCBJ17vTCfwEgjnL/oU9/3haTQ2t6FmQCVuu/GLmDV9QpIuX2Ih3iZjRfoRoMDTjzUz6U+AAk9/5syoDwEKPH0455plp7Qbu+17MCI6BJWx0lzD5c18CrxDW/3SqruwtuHt5BeSj9BOux8DS8Yn/37+jg14dMUihKIRDCgoxvdnnY7qwpK82S9mWygFntk6xnqzIUCBlw0tfcaaUuD1hYYCT5+NwyxiEqDAE7MvrEodAiIIvLYtLdizuj65oMoJ/VE2skKdxTFKXhOgwBO//e+4l2Kxc3myUEn+63PBkzEuMkL8wgWokALv0CYkjgzf2rwU7YE6jKicjRLv/pdYJEYHwiE0+DsxuKgMdkkSoIss4UgEKPC4N6xMgAJPvO5S4InXE1ZEAooJUOApRseJJiBgtMDz7+nC9rc3p5AaMm8kCvoVmoAeSxSZAAWeyN0Bwojg14V/SXkDdnW0H64JXCB24YJUR4EnSCNYhiYEKPA0wcqgghCgwBOkEQeUQYEnXk9YEQkoJkCBpxgdJ5qAgNECb8+qejStbkghVT6uH/ofVW0CeizxQAKx2BZEwy8l/5HdeS4kydg7qSjwxN6fIVng3U+Bp7hJFHiK0XGiCQhQ4JmgSSxRMQEKPMXoNJtIgacZWgYmAf0JmFXgdUuNcMZ9cMV5J5P+u8Y8GY0WeP5G+Q68d1LvwBt60kj4+nPfmmcXAbHYLoSCtwDxnr1l2zxwe+6DTaoxbBkUeIahzzjxwY/QXhg4FWOiwzKen88DKfDyufvWXzsFnvV7nM8rpMATr/sUeOL1hBWRgGICZhN4EVsAqwv+hg5pe3LNQ+UzhYaE5ilePydam4DRAi9Bt3VzM5rXNiJxfpFZz8Dbs24Rdi5+FpEeP6qnnobBM/PrMcBI+N+IhJ5K+WZxuK6Aw3lxzt9AMfncqs5P3pctYRSFU2bD7stM7lLg5YxelwA7pDrU25v4EossaVPgZQmMw01FgALPVO1isVkSoMDLEpgOwynwdIDMFCSgFwGzCbxtnjex0zU/Bc+MrpvgjVXqhYx5TERABIFnIlyHLdXfUodPHr815Wvjzv0WKsbMNPvSMq4/GvkA4Z77UsY73d+F3XF8xjEONzDWE8TO334PwdotyS87yqsw9Du/gqMk/YtOKPByQs/JghOgwBO8QSwvJwIUeDnh42TBCVDgidcgCjzxesKKSEAxAbMJvFW+v6LVsSlVJgQuRVV4imIGnGhdAhR4ufd296dvYctbf04JNGDKKRh56pdzD26aCDGEeh5ALPJesmK740Q43TfL/y+3Nz12LnsPdY//KoVC1XlXofz0S9OSocBLi4gDTEyAAs/EzWPpaQlQ4KVFxAEmJkCBJ17zKPDE6wkrIgHFBMwm8Jrt67BGfoR23+WOleLorm/ADrdiBpxoXQIUeLn3lnfg7WcYi+19IYkk9c8drByBAk8VjAxiQQIUeBZsKpfUS4ACj5vBygQo8MTrLgWeeD1hRSSgmIDZBF5ioS32DWhwfQIXClETPA6eeJni9XNiegJhqQu2uPx4nwlfGEKBl76/mYzI9zPwMmGkZEw0GEDtAz+QH6Hd+6KTxCO0w265D/bi8rTheAdeWkQcYGICFHgmbh5LT0uAAi8tIg4wMQEKPPGaR4EnXk9YEQkoJmBGgad4sZyYFYE44mj0vge/c2dyXmFkBCr9s2GT/zLLRYFnlk7lb537XmIRj0RQPO0ESF5fRjAo8DLCpPmgUCSOpVt7EIvbcOwwFzwu8/z+qDmcHBJQ4OUAj1OFJ0CBJ3yLWGAOBCjwcoCn0VQKPI3AMiwJGEGAAs8I6ubI2enYgibfwpRi+wXmoCA8xBwLkKukwDNNq1holgQo8LIEpsHwYDiOHz/fjt1t0WT0ykIJP72wBIXu3M5G1KBU04WkwDNdy1hwFgQo8LKAxaGmI0CBJ17LKPDE6wkrIgHFBMws8Doa/diytBFFVR6MOLY/bDbe+aB4IxxmYpN3CTqdG1O+UtIzEeU909RMo2ksCjxN8TK4gQQo8AyE/1nqxZt78NA7XSmFXH18IU4ZzzNZc+0OBV6uBDlfZAIUeCJ3h7XlSoACL1eC6s+nwFOfKSOSgGEEzCrwdm9oxb//dzHCgUiS3dg5NTj71umGcbRi4rDUgdrCl+SlxZLLs8mvCqnpOg/OWKFplkuBZ5pWsdAsCeS7wOvqDCZ/Zyou8mRJTr3hhxN4X5rlw5mTvOolydNIFHh52vg8WTYFXp40Ok+XSYEnXuMp8MTrCSsiAcUEzCrwXr73Y6xfsCtl3Vc/NA/lg80jlxQ3TceJPfYmtLs2yC+xsKE4NA7umLleGEKBp+NmYSpdCeSzwHv33Q3YvHHvG4GHDqvAvJPHwW7X/7HVgPwI7U/kR2jrPnuEtrzQjnsuLEYBH6HN+XuBAi9nhMIHiHSGsWdxI8JdYZRNqUDRyCLha1arQAo8tUgyjogEKPDE6woFnng9YUUkoJgABZ5idJxoAgIUeCZoEktURCBfBd6OHS1447XVKczmnDQGo0f3V8Qx10kHvsTiGPklFl6+xCJXpMn5FHiqYBQ2SDQUw8ZH1yHcHuqtcejFw1E8pkTYmtUsjAJPTZqMJRoBCjzROgJQ4InXE1ZEAooJmFXgHfII7YnyI7S38RFaxRvBohMp8LRrbDwex2vbluHD+vUYU1aD80ceB6/TpV1CRk4hkK8Cb+nSrVj5aW0Ki3Hjq3H8CaO4QyxEgALPQs08zFK6t3diy983p3yldHwZBl8w1NoL/2x1FHh50ea8XSQFnnitp8ATryesiAT6JNDxQgjtz8tnxdljKLvcjcK5zt7xZhV4iQXwJRbc+OkIUOClI6T86/9Y9y6eXP1Wb4Cp/Ubi7hOvUR6QM7MikK8Cr6sriH//62NEwnvf/CpJNlx48XSUlvqy4sfBYhOgwNOvPw2NHXDIj6BXVOh3BElPSw82PLI2ZZH9juuP/nOr9Vu4gZko8AyEz9SaE6DA0xxx1gko8LJGxgkkYBwB/7IIGn4SSClg4P0+uEfZk//MzALPOKrMbBYCFHjadeqG1x9AbWdTSoInz7kNZZ78OcdIO7rpI+erwEuQaWnuxupVuxCLAxMmVqOqinsu/Y4x1wgKPO37FZYl+B/+9B6WLd+eTHbKvHG46ouztE/8WYbGhQ1omL87+XfegT4M/8II2D0O3fLrkigegyfwBlzBJYhJhQh6z0DEPREUeLrQZxKDCFDgGQS+j7QUeOL1hBWRwBEJtDzeg/Zn958xkhhYdpULpZe4k3Mo8Lh5rEyAAk+77t75wRP4qH5jbwKP/PjsP8/7ISSb/i8T0G6V4kbOZ4EnbleMqcwWjsC5dhvsDa2IFRcgPH5o8n/NfFHgad+9d97bgMefWpiS6NabT8ekCQO1T/5ZhkgggkhXBJ5K+W3SNt3S6pYoIe68Xf86IJ+EjrLbUVpShUAogqB8FiAvErAaAQo88TpKgSdeT1gRCRyRwGHvwPuNfAfeSN6Bx21jfQIUeNr1eEdHI+744Ek0+dvgc3nwrekX4ISaidolZOQUAhR43BD7CDhXb4Vje30vkLjXg+BJU2UhYl4jQoGn/f5+/KlFeOe99SmJLr1wOs49a4r2yfMkg6/zb3D2LE9Zrb/ochRWHEuBlyd7IB+XSYEnXtcp8MTrCSsigT4JpJyB9wX5DLxTrHEGHttOAukIUOClI5Tb16Py40Hb2utRU1gJj4MvsMiNZnazKfCy42Xl0Z73lsPWlXpURnDOVMQLvaZdNgWe9q3bur0Jd/3yZUQje+8C83lduPuO83U9C0/7VRqbgXfgGcuf2Y0hQIFnDPe+slLgidcTVkQCignwEVrF6DjRBAQo8EzQJJaoiAAFniJslpzEO/As2VZdFrVxc6N8F94GyO+wwDlnTsaA/iW65M2bJJ+dgecMLkZcKuIZeHnTeGB1bDHejz+HEHoww3YqZkvn5M3qKfDEazUFnng9YUUkoJgABZ5idJxoAgIUeCZoEktURIACTxE2S07adwae1NCCeHEhz8CzZJe5KCsR4EssrNTNQ9eyJ74Lf4renvKFi+w3YpztWGsv/LPVUeCJ12YKPPF6wopIQDGBbAVeXH7UIt7kBxIH75a6IBXLBw/zIgEVCMT2NCPe0QXbgCpIBT4VIspbtNCJHnmvBkJRVeIxCAmIQoACT5ROsA4tCPARWi2oMqYoBCjwROmENnV8HHsbr8b+mhJ8mjQPZ0nXaJNQsKgUeII1RC6HAk+8nrAiElBMICuBFweim1uAYKQ3n21QEaRS856zoxgcJ6pKILJ8NWLbdu6NKT/L45g9A1Jlec45KPByRsgAghKgwBO0MSxLFQIUeKpgZBBBCVDgCdoYlco63B14F9u/hbG2o1XKIHYYCjzx+kOBJ15PWBEJKCaQjcCLB8KIbW5NyWUrlO/CG1aqOD8nkgCCPQi9+k7qvupXCedxM3KGo4bAi8XjWBlqQXs0hKPcFSix82UNOTeGAXImQIGXM0IGEJgABZ7AzWFpOROgwMsZofABDjwD72jbKThOOlf4mtUqkAJPLZLqxaHAU48lI5GA4QSyEnjhKGLrm1NFS7kX0sAiw9fBAkxMQGCBl5B3D7evxdqeveLaa3PgprK73OF1AAAgAElEQVRJqHEWmBg4S7cCAQo8K3SRazgSAQo87g0rE6DAs3J3uTYKPPH2AAWeeD1hRWYjIEuBSJt81pfXBbvHbWj12Qi8RKHx5gDi9V2Iy2uAxwF74u47h/z6Ml4kkAMBUR+h3RrqwP2tK1NWdrxvAC4rGpnDajmVBHInQIGXO0NGEJcABZ64vWFluROgwMudodkj7NrQgdr1HbDZbBgyoQQDRhSafUm99VPgiddKCjzxesKKTEQgFgqh/e2PEG7YeydbwbRxKJgy2rAVZCvwkoVG5RdZhGOwuR2AzbDSmdhiBGKN8kssOsV6iQUFnsU2mYWWQ4FnoWZyKYcQoMADIvJTD90tfhRWFMDOH5Ra6ruEAs9S7cx6MS27A1j1XmPKvGmnDUBRubE3dWS9kCNMoMBTi6R6cSjw1GPJSHlIoPuTdehesTFl5eUXzIOjxJifvCgSeHnYNy7ZnARyPQMv8QjtI/IjtGsOeIT2ZvkR2oF8hNacG8JCVVPgWaiZXAoF3kEE9mxvxccvrkNIPnvYU+jG9HPHomIwzxu2yrcKBZ5VOqlsHVs+bUXtuo6UycOnlGLw+BJlAQWbRYEnWEPkcijwxOsJKzIRgbY3FiNUtyel4uI50+EZXmPIKijwDMHOpDoRyFXgJcrkSyx0ahbTZEWAAi8rXBxsMgL5fgfe23/6EN1tgd6uFVUV4KRrzPMGyy21dWhobsaYYUNRUVJsst2nfbkUeNozFjlDa0MQK99tSClx+unVKCyzxkvSKPDE230UeOL1hBWZiEDPjnq0v/Nhb8VSoQ8V582FzSU/jmrARYFnAHSm1I2AGgJPt2KZiASyIECBlwUsDjUdgXwWeD3dIbz+0OKUnknyI7TnfPsEU/Tx+Xfew7LVa5O12u12fOnsMzB66GBT1K5XkRR4epEWN0/yDLwN8rExtjiGyHfe8Qw8cXtlhcoo8KzQRa7BUAI9tY0IbtoJu88N36RRkHwew+qhwDMMPRPrQIACTwfITGEIAQo8Q7AzqU4E8lngJRCv+90muN6Lwh6R0FDTCfd5RTjqdOPOS8607V3dftz7lydTho8aMghXnX9OpiHyYhwFXl60uc9F9jTsRuNj/wf/ulUomDQd/b/8dTgr+1kCDO/AE6+NFHji9YQVkYBiAmYVeH60Y720AJ54AcbET4QdxtzBqBg8J+pCgAJPF8xMYgABCjwDoDOlbgTyWeBFd0TQ85MWhLrDyRdZOFx2FN5SAccM437Ym2njKfAyI0WBlxknK4/a/r83w792Ze8SC6bOwJAf/dISS6bAE6+NFHji9YQVkYBiAmYUeG2ox5OOb8GP1uS6B2AsLo/8RlZ4TsUcONGaBCjwrNlXrgqgwOMusDKBfBZ4kXcDCD/ZmdJe+0leuK4sMkXLD36E9orzzsLIQcac8ywqMAo8UTujT12xnh6sv1y+K1V+Udq+SyoowNgnXtCnAI2zUOBpDFhBeAo8BdA4hQREJWBGgfeu9CiWSP9IQXpp9B6MiB8rKmbWZRABCjyDwDOt5gQo8DRHrGuCWCyGtt0N8iHmpXD5vLrmFjFZPgu82I6wfAfe3h9Q7rtc3yiBfZpbxFYdtqZNO2uxp6WVL7E4Qsco8EyzlTUrlHfgaYaWgQ9DgAKP24IELESAAs9CzeRSDiFAgcdNYVUCFHjW6WxHUzPeeOhRtO7aDclhx+zPX4Txc4+3zgIVrCSfBV4CV2RpENEX/IgH47Cf4oXzLJ8CipwiKgEKPFE7o19dPANPP9bMBFDgcReQgIUImFHgtdp24yn7Tb2P0NZgIi6L/IqP0FpoX6q1FAo8tUgyjmgEKPBE64jyeuY//ndsXLS0N0BC4n3p/7d3JmBOlWf/fpLMDDPsIDuiAiJKUQH3pYILSqtt1Y96uVRbW/laq1brQm21H4pal7pUq1XbarHaVq271r2LoiIqUsQNEVR2kG0YZs1k8j8Jf0YzDCR5kpy8z8kdL656Ded53+e9f6/TM/ec5bqpUundUlWqn1IXeKWae6msG4FXKkmX5jq5hda93BF47mVCRxBQE7Ao8BKLrZP13kssXuElFurkS6MQgRe8nFc1vC/rootkQNVo6VLWN3gLzHBFCLwMQRk47KEpv5L1K1aldHrUOT+UQSN3M9B9YVpE4BWGK6O6QQCB50YOdFEYAgi8wnDNZVQEXi70qIWAYwSsCjzHMNKOowQQeI4Go2xr9vp7ZX7N88nqcKhcDtruXOlftadyNNtlCDzb+X25+3mvzpTpf/5b65e2G7S9fOsX50s4HA7OIrNcCQIvS2AcbooAAs9UXDSbJQEEXpbAfDgcgecDZKaAgF8EEHh+kWaeYhBA4BWDemHmbI43yaNLzpC498/mT7/K3eWQ3pMLM6HjoyLwHA8oy/YWzpotC96YLd369pbdxx8qVV06ZzlCsA5H4OUnz5qP58vGRZ9K9xEjpapf//wMyig5E0Dg5YyQARwmgMBzLxwEnnuZ0BEE1AQQeGp0FBoggMAzEFKGLTa3NMqjSyelCLw+HXaVcX0uyXCEYB2GwAtWnqwmlQACL/cdsfixh2XZ888mBwpFIrLz6WdIzzF75z4wI+RMAIGXM0IGcJgAAs+9cBB47mVCRxBQE0DgqdFRaIAAAs9ASFm0mHILrUTkoF4/5Rba+mbZUBfNgiKHQsB9Agi83DJqaWqSt356tsTjX1yx3GmnwTJy8i9yG5jqvBBA4OUFI4M4SgCB514wCDz3MqEjCKgJIPDU6NIWrpVGWRGulT7xKunl/eHjPwEEnv/MCz1j60ssKkdJl/J+hZ7O2fG5As/ZaGgsDwQQeLlBjDU2yqzzz0Hg5YaxYNUIvIKhZWAHCCDwHAihTQsIPPcyoSMIqAkg8NTotln4YXi9vBlZ2fq0rr1aestXYj0LMxmjbpUAAo/NEVQCCLygJsu6EgQQeLnvg7a30A4740fSY89RuQ9coiOs3tAgz81dITXeVc/jdustuw7spiaBwFOjo9AAAQSeeyEh8NzLhI4goCaAwFOj22bhw2ULpDbU3HpMlZTJt6NDCzMZoyLw2AMlRwCBV3KRl9SCEXj5iXvDR/OkdskiXmKRI86ahqj839/nSnVtU+tIk7+xm1riIfByDIRypwkg8NyLB4HnXiZ0BAE1AQSeGt02Cx8v/0Sq5YsTvcqkwBsiIe8fPv4R4Ao8/1gzk78EEHj+8mY2fwkg8PzlzWzbJjDrk7Vy23MfpRw0dkRf+e4hg1XoEHgqbBQZIYDAcy8oBJ57mdARBNQEEHhqdNssnB+ulhmRFa3HcAttYTinGxWBl44Qf2+VQKkKvIbYevms7j9SGekuO1QdIqFQ2GqE9L0NAgg8todLBBaurJErH30vpaWJ+w+Sr48aqGoTgafCRpERAgg894JC4LmXCR1BQE0AgadGl7ZwbahBVoTqeIlFWlKFOwCBVzi2jFxcAqUo8Gqal8vTy38gTS3VSfh9K0fL+D63bFPitbTEZd1nUenSu0wqOiP7irtrM58dgZc5K470h8BDMxfJ07OXJSfbpX9X+enXd5XEObTmg8DTUKPGCgEEnntJIfDcy4SOIKAmgMBTo6PQAAEEnoGQaFFFoBQF3tvrbpf3NtyXwmtCvzuld4eR7TLcsLJZ/nnVKk/gNUm4LCQH/mg72WV8ZxVvivwlgMDzlzezZUZgQ31UahuapX+PqswKtnIUAi8nfBQ7TgCB515ACDz3MqEjCKgJIPDU6Cg0QACBZyAkWlQRQOBtwja+72+lX+WYdhlOv3m1zP/Xxta/S0i8k+8bJBVVuqtmVEEZK4p7Vyy2LK6T+JomCXlXF4V37CihruW+rwKB5ztyJvSRAALPR9hM5TsBBJ7vyNNOiMBLi4gDIGCHAALPTlZ0mj0BBF72zKiwQaAUBV5N8zLvFtozWm+h7d1hTzmq761bvYX2kbOWyvol0ZRAj762n/TdtdJGyEXosmVpnbQsqf9i5khIIqO6S6jMX+mJwCtC+EzpGwEEnm+omagIBBB4RYCeZkoEnnuZ0BEE1AQQeGp0FBoggMAzEBItqgiUosBLgEq8xOLTun9JVaSnDKo6WMKhsq3y++iFjfLKratb/773LpVy9LV9JRzmbeBbgxb7cIPEq1OlZ2R4Fwl1r1DtU20RAk9LjjoLBBB4FlKiRy0BBJ6WXOHqEHiFY8vIEPCdAALPd+RM6CMBBJ6PsJnKVwKlKvCyhfzJq7Wy8OVa6TqgXEYe21WqukWyHaKkjt/iCjzvtuPInlyBV1KbgMUWnAACr+CImaCIBBB4RYS/lamdFnhr19fIxVfdKSs+XydPTLuqdQmLl62SX153t8z7eJEM6NdLLjn3VBmz+7Dk3y9b86VbBbay6P49K2X52gb30qAjCORIAIGXI0DKnSaAwHM6HprLgQACLwd4lG6VQNtn4IV2qJJwN3+vvks0xxV4bNIgE0DgBTld1obAc28POCvwausa5KQzp8rYA0bJS6/PSRF43z33ajns4DHynePHy2tvvefJvLvkhQdukPKyCALPvT1GRz4SQOD5CNvRqd79uE5mfVAr3bqUyWF7d5Gunbd+S5qjS9hqWwg8a4nRb6YEEHiZkuI4iwQQeBZTo+dMCSDwMiXFcRYJIPDcS81ZgVdX3yCr11Yn/1x2wz2tAm/Nug0y4eTJMuOp26QssunWiYmTpsjkH58k+47eFYHn3h6jIx8JIPB8hO3gVAl5d/fjn7d2tl33MrnwtAHSoTwYz4hC4Dm46WgpLwQQeHnByCCOEkDgORoMbeWFAAIvLxgZxFECCDz3gnFW4G1G9fbcj1IE3ttz58vUG++Rx/50ZSvNCy7/new3ZoSc8I1xCDz39hgd+UgAgecjbAenuvep1TJ7Xm1KZz85qa/sNCAYb2lE4Dm46WgpLwQQeHnByCCOEkDgORpMAdp64bGwTH8+Ih0q4zJhYkz2OjBegFncGhKB51YedJNfAgi8/PLMx2hFFXiJq+mWr1qzxToGes+169GtS/LrbQXea2+9K7f88WG5/44prXWXXnuX7DJkeznt20dJfWMsLZfKirA0NLWkPY4DIGCNQDgs3pWpYWmKluj+NnyhWT5af+Rfa+TpV9enbNurzhwkfXv5/8yjQvy3U+49gD3mbe2WluD/QFAIfozpLoGySCj5vbs5Fvf+lOj3b3fjobMcCSTeFOxtb4k28707R5ROl8+aIfLbq1PPZi7/TYvsOCQfZzj5X3q+dmOFt7ljLS3iffvmA4PA7YGqDpGM/ErgFu7wghKZ5PMTinufTAecPvMdeezZV7Y4/IRvHir7jd6tXYE3+935MuX6aSnPxDv/stvkwL1HysRjxsramsa00yd+U7JuY1Pa4zgAAtYIlHsnEYmr8DY2NFtrPT/9ZvzdJz/T5XWUUO4nuBvrYnL7Qyvls+WNEvbGO/qr3eWoA7rntc1iDtapqkyaPTnd2IzgKGYOzJ1/AokrlDp6J2QNTTGpy+AXkfnvgBEhUDgCFWVhqUicm9SX6LlJ4dA6NfJf7vSuvnsh9Vzm5EktcshRjp6cZf4j6zY5J85Nmrzzkmip/vL8y3RyP5V1ak/TjEjPLh0y8iuw8o9AIpN8frISeJlM3PYKvHXVNXLECRfIK4/fKlWVm64q+dopk+VXP58ko0cO4xbaTKByTGAJcAttYKPNeGGJ89EVa5qka6eIdKrK729oMm6iQAdyC22BwDJs0QlwC23RI6CBAhLgFtoCwnVo6DlvhuSuG1JfnDX5mmbZfidHBV6e2HELbZ5AMoyTBLiF1r1YinoLbSY42gq8RM0Pzr9O9hm1q0w65Rh55t8zk7fUPvOX6yTiXX20bE192mH796yU5Wsb0h7HARCwRgCBZy0x+s2GAAIvG1oca4kAAs9SWvSaLQEEXrbE7B7PM/DsZkfnEGiPAALPvX3hrMB7cfosuXDq7SLe5STR5piUl5fJ4EH95NG7r5SlK1bLL67+g8xbsFgGDegjl13wPfnK8J2SdBF47m0yOvKPAALPP9bM5D8BBJ7/zJnRHwIIPH84M0txCCDwisOdWf0hwBV4/nBmluIQQOAVh/u2ZnVW4GlRIfC05KgLAgEEXhBSZA1bI4DAY28ElQACL6jJsq4EAQQe+yDIBBB4QU6XtSHw3NsDCDz3MqEjCKgJIPDU6Cg0QACBZyAkWlQRQOCpsFFkhAACz0hQtKkigMBTYaPICAEEnntBIfDcy4SOIKAmgMBTo6PQAAEEnoGQaFFFAIGnwhaoovpQszRKi3SPb3pBW5A+CLwgpcla2hJA4LEngkwAgedeugg89zKhIwioCWgF3hsfr5I3568S7z0wcsiIATJiUE91DxRCoFAEEHiFIsu4xSaAwCt2AsWdf2b5avkgUp1son+sSg6L9pNy8f4POSAfBF5AgmQZ7RJA4LExgkwAgedeugg89zKhIwioCWgE3vzl1fLwjAUpc55+2K7St3tHdR8UQqAQBBB4haDKmC4QQOC5kEJxelgZrpdnKpalTL5XtKfsHutRnIYKMCsCrwBQGdIZAgg8Z6KgkQIQQOAVAGqOQyLwcgRIOQRcIqAReC/OWSJvLViVsowjR+0gY4b0cmlp9AIBQeCxCYJKAIEX1GTTr2tu2TqZVbY25cDBLZ1lbFPf9MVGjgiKwItFRRbMiMmaz0Q69QzJkP1D0nm7kJEUaLNQBBB4hSLLuC4QQOC5kEJqDwg89zKhIwioCWgEHlfgqXFT6DMBBJ7PwJnONwIIPN9QOzdRQygmj1YslkbvfxOfhA6a0DhA+sarnOtV21BQBN7CGS2y9L2WVgwVHUOy74kRCQXnbmdtxCVdh8Ar6fgDv3gEnnsRI/Dcy4SOIKAmoBF4icl4Bp4aOYU+EkDg+QibqXwlgMDzFbdzk1WHovJeZL1Ewy2ya7RroORdAnZQBN6sh2JStz6esn/2mhiRjt25Cs+5/6h8bAiB5yNspvKdAALPd+RpJ0TgpUXEARCwQ0Ar8OyskE5LmQACr5TTD/baEXjBzrfUVxcUgccVeKW+k9tfPwKPfRFkAgg899JF4LmXCR1BQE0AgadGR6EBAgg8AyHRoooAAk+FjSIjBIIi8HgGnpEN53ObCDyfgTOdrwQQeL7izmgyBF5GmDgIAjYIIPBs5ESXOgIIPB03qtwngMBzPyM61BMIisDTE6AyyAQQeEFOl7Uh8NzbAwg89zKhIwioCSDw1OgoNEAAgWcgJFpUEUDgqbBRZIQAAs9IULSpIoDAU2GjyAgBBJ57QSHw3MuEjiCgJoDAU6Oj0AABBJ6BkGhRRQCBp8LmTNGSlgUyR2ZIQ7xehoVGyh7hA5zpzYVGEHgupEAPhSKAwCsUWcZ1gQACz4UUUntA4LmXCR1BQE0AgadGR6EBAgg8AyHRoooAAk+FzYmiGqmWp5rvkRbvn82f/SNHytDQiKz7WxjeKEvLGqVCQjIk2kl6xztkPYaLBekEXuLZchsWh5Otd9u+RcIVLq6CniDQPgEEHjsjyAQQeO6li8BzLxM6goCaAAJPjY5CAwQQeAZCokUVAQSeCpsTRZ/G58mrsWdSetkxPFwODn8tq/6WhRvkg7INrTUh79/2j24nHeORrMZx8eBtCbxYk8jC58qlcUNixSIVnUWGTGiSsmC4SxfjoKc8E0Dg5RkowzlFAIHnVBzJZkpS4PXrWSkr1zZI3L086AgCORFA4OWEj+IMCFQvXyfllRXSsUenDI7O7yEIvNx5zvv0M3nulRlSWVEhx4z7qgzo0zv3QRkhZwIIvJwRFm2AfF2B964n71Z6Eu/Ln5HNXaVvS2XR1paviSsrwlJVUSbrNnq2rs1n7fyILHsjVVIO2DcmPYfF8jU940CgoAQQeAXFy+BFJoDAK3IA7UyPwHMvEzqCgJoAAk+NjsI0BKINUXnmusdlyTuLkkeOnDBKDjnjMF+5IfByw71w8RK55De3SSy26Qfjzh07yq8vOk+269E9t4GpzpkAAi9nhEUdIPEMvP/GX5NGaZCdvWfg7al4Bl6wr8BD4BV1gzJ5QQkg8AqKl8GLTACBV+QAEHibCHAFnnsbkY7yQwCBlx+OjLIlgblPz5bpd/875S+OnXqCDBixvW+4EHi5ob7vyafliX/+J2WQc087WQ4aMyq3gR2r3tAYldqmmPTvYufKJQSeY5uoSO0E9xl4Wxd4zY0in7xQIY3Vm6BXdInL0AlRifAcvCLtQqbNlgACL1tiHG+JAALPvbS4As+9TOgIAmoCCDw1OgrTEHj+pn/Ix6/OSzlq/1MOljHH7esbOwRebqhffG2m/P7Bh1MGueTMSbLn8GG5DexQ9SPvL5fnPl6Z7GiX7TrLWfsNlsoy958hhsBzaBPRSt4JbOsW2sRkvMQi78gZ0EcCCDwfYTOV7wQQeL4jTzshAi8tIg6AgB0CWxN40aY6+fSTl7yFhGTw0MOkrIxfbdtJ1Y1OVy1YIQ9f/DeJxzc9PbRDxw5y8q2nS1XXjr41iMDLDXW0uVlunHafzHr3/eRARx58gJwx8bjcBnWo+pN1tXLN9PkpHR03or9M2Lmvb102x1rkxZc/lvkLV8uIYX1k7EFDpCyy6e2a2/og8NIR4u8tE0gn8Cyvjd4hgMBjDwSZAALPvXQReO5lQkcQUBNoT+DV162RJx87SzbWrEiO273HjnLMcXdIRXmVeh4KS5PA8g+WyLvPvSPlHStk1DF7SfcBPbYA8f7SdfLqvOXSuapcxo/cXnp1yd8+Q+DlZ98t/3y1VHWokO5du+ZnQEdGeenTNfLXdxandLP3wB4yaa8dfevwngdmyfTXP22d75ADBstpJ4xJOz8CLy0iDjBMAIFnODxaT0sAgZcWEQcYJoDAcy88BJ57mdARBNQE2hN4777zd3ljxm0pY447/JcyZOfD1fNQCIH2CHy0bJ1c//R/W/+qm3eV3pTj95bO3ltr8/FB4OWDYnDH2NjULFP/M0+qvReuJD6hkMj5BwyVXXp18WXRLS1xOfvnj0uT9/y9zZ+u3nP4bpx6dNr5EXhpEXGAYQIIPMPh0XpaAgi8tIg4wDABBJ574SHw3MuEjiCgJtCewPvg/SdkxvQbU8b86rifybDhX1PPQyEE2iNwn/eMvJc/WJbyVz85ag8ZOWi7vABD4OUFY6AHWV3bKC8s/Fw2ehLt4B22k916d/Z1vb+89gVZvmJD65z9+3WVK342Pm0Pfgq8ao/Nwo1R6VYeliFd8iPX0y6QA0qaAAKvpOMP/OIReIGPuKQXiMBzL34EnnuZ0BEE1ATaE3gNjRvk8YcmSe3GTQ9279Z9B/nm8b+X8nI7b2hUA6HQVwLPvvOZPPLGwpQ5f/7NMTK4T7e89IHAywtGBikggQ/mfy533DNTaj2R2K1rlfzwtH1kl6G9087ol8BbVBuVRxbVSNS7WjDxGdG9gxw90F/JmRYGBwSOAAIvcJGyoC8RQOCxHYJMAIHnXroIPPcyoSMIqAls7SUWTU21yZdYhEIRGTzkUF5ioSZM4bYINERjcov3jLyPV6xPHjZhzx3l+H2G5A0aAi9vKBmogAQavSvcVq7aIP37dZPysvQvsEi04pfAS8i7BTVNKav/8fAe0inDPguIjaEDTACBF+BwWZog8NgEQSaAwHMvXQSee5nQEQTUBLYm8NQDUggBBYHl6+ukU4cy6VqV39vzEHiKMCgxQcAvgffkko3yYXVjCpNJw7pL94qICU40aZMAAs9mbnSdGQEEXmacOMomAQSee7kh8NzLhI4goCaAwFOjo9AAAQSegZBoUUXAL4G3pC4qD35WIzFuoVXlRJGOAAJPx40qGwQQeDZyoksdAQSejlshqxB4haTL2BDwmQACz2fgTOcrAQSer7iLPtn61fUy99VlsmFNg/Qf3FX2OGiAlAX0SjG/BF4i1PXeLb4LaqLSo4KXWBR9kxewgaZ4vcyKPimLYnOlR3iA7FN2rPSI9C/gjFsfGoFXFOxM6hMBBJ5PoJmmKAQQeEXBvs1JEXjuZUJHEFATQOCp0VFogAACz0BIeWqxxbtC7Nl7P5BG74qxzZ+hu/eSPQ4emKcZ3BrGT4Hn1srpplAEZjQ+KAtib7YO3znUU75V9TMJSWbPZcxnXwi8fNJkLNcIIPBcS4R+8kkAgZdPmvkZC4GXH46MAgEnCCDwnIiBJgpEAIFXILAODluzrlFevP/DlM669KiUI04c7mC3ubeEwMudISOkEniy4ddS3bIq5YvfqLxIuoX7+I4Kgec7cib0kQACz0fYTOU7AQSe78jTTojAS4uIAyBghwACz05WdJo9AQRe9sysVnAFntXk6NsVAlyB50oS9BF0Agi8oCdc2utD4LmXPwLPvUzoCAJqAgg8NToKDRBA4BkIKY8tJp6BN2f6Mtm4btMz8HY/aKCUe89tC+KHK/CCmGpx15R4Bt4b0cdkaez9Tc/AK/eegRfmGXjFTYXZg0gAgRfEVFnTZgIIPPf2AgLPvUzoCAJqAgg8NToKDRBA4BkIiRZVBBB4KmwUGSHALbRGgqJNFQEEngobRUYIIPDcCwqB514mdAQBNQEEnhodhQYIIPAMhESLKgIIPBU2iowQQOAZCYo2VQQQeCpsFBkhgMBzLygEnnuZ0BEE1AQQeGp0FBoggMAzEBItqggg8FTYKDJCAIFnJCjaVBFA4KmwUWSEAALPvaAQeO5lQkcQUBNA4KnRUWiAAALPQEi0qCKAwFNho8gIAQSekaBoU0UAgafCRpERAgg894JC4LmXCR1BQE0AgadGR6EBAgg8AyHRoooAAk+FjSIjBBB4RoKiTRUBBJ4KG0VGCCDw3AsKgedeJnQEATUBBJ4aHYUGCCDwDIREiyoCCDwVNoqMEEDgGQmKNlUEEHgqbBQZIYDAcy8oBJ57mdARBNQEEHhqdBQaIIDAMxASLaoIIPBU2CgyQgCBZyQo2lQRQOCpsFFkhAACz72gEBv4shMAAB2RSURBVHjuZUJHEFATQOCp0VFogAACz0BItKgigMBTYaPICAEEnpGgaFNFAIGnwkaREQIIPPeCQuC5lwkdQUBNAIGnRkehAQIIPAMh0aKKAAJPhY0iIwQQeEaCok0VAQSeChtFRggg8NwLCoHnXiZ0BAE1AQSeGh2FBggg8AyERIsqAgg8FTaKjBBA4BkJijZVBBB4KmwUGSGAwHMvKASee5nQEQTUBBB4anQUGiCAwDMQEi2qCCDwVNgoMkIAgWckKNpUEUDgqbBRZIQAAs+9oBB47mVCRxBQE0DgqdFRaIAAAs9ASLSoIoDAU2GjyAgBBJ6RoGhTRQCBp8JGkRECCDz3gkLguZcJHUFATQCBp0ZHoQECCDwDIdGiigACT4WNIiMEEHhGgqJNFQEEngobRUYIIPDcCwqB514mdAQBNQEEnhodhQYIIPAMhESLKgIIPBU2iowQQOAZCYo2VQQQeCpsFBkhgMBzLygEnnuZ0BEE1AQQeGp0FBoggMAzEBItqggg8FTYKDJCAIFnJCjaVBFA4KmwUWSEAALPvaAQeO5lQkcQUBNA4KnRUWiAAALPQEi0qCKAwFNho8gIAQSekaBoU0UAgafCRpERAgg894JC4LmXCR1BQE0AgadGR6EBAgi8woUUjzVKpO4ziXXcUUKRDoWbiJHbJYDAY2MEmQACL8jpsjYEHnsgyAQQeO6li8BzLxM6goCaAAJPjY5CAwQQeIUJKbJ+jlR9MFVCjWskXt5N6r8yVWLd9yzMZIyKwGMPlBwBBF7JRV5SC0bglVTcJbdYBJ57kSPw3MuEjiCgJoDAU6Oj0AABBF7uIcXjUQnF14qEenl/IskBO775PYnUfto6eKzTTlK3z7TcJ2OEjAlwBV7GqDjQIAEEnsHQaDljAgi8jFFxoEECCDz3QkPguZcJHUFATQCBp0ZHoQECCLwcQ4p9IqHo0yLxOpFwZ+9qu2O9/+0vnV8eL6GW6BeDh0JSc/AzIpHKHCekPFMCCLxMSXGcRQIIPIup0XOmBBB4mZLiOIsEEHjupYbAcy8TOoKAmgACT42OQgMEEHi5hRRq+L0n76q/GCQyUOIVJ0vlvF9L+fJ/tH492v9oaRh+UW6TUS3L3qiXj5+oFe/xgjJobKXsfEznrVJB4LFhgkwAgRfkdFkbAo89EGQCCDz30kXguZcJHUFATQCBp0ZHoQECCLwcQopvlFDD7akDhDpIvPInIi3NUrHsEQl7z8KLddtdogMnelfmleUwGaU1S5vllSlrUkCM/lE36bd3+1c1IvDYM0EmgMALcrqsDYHHHggyAQSee+k6LfDWrq+Ri6+6U1Z8vk6emHZVK70Tz5wqH87/zHt+Tyj5ta6dO8rLj96S/Pdla+rTUu7Xs1JWrm2QeNojOQACtggg8GzlRbfZEUDgZcdri6O922dDze+1fjleto9I+bgcB7VV3tIisnq1SOfOcenYcdM5RCE+i/5TL+/dtyFl6EFjq2TkqV3bnQ6BV4gUGNMVAgg8V5Kgj0IQQOAVgipjukIAgedKEl/04azAq61rkJM8UTf2gFHy0utzUgTe0adeLDdPPUd2HjxwC6IIPPc2GR35RwCB5x9rZvKfAAIvN+bxeLOEYm97V9wtlVB4R4mXJd40u+lFFqXw2bhR5G9/C8vy5d6qvWUfOT4u++xbmF/ltXsF3lndpd/oDgi8UthsrDGFAAKPDRFkAgi8IKfL2hB47u0BZwVeXX2DrF5bnfxz2Q33pAi8scefKw/cOUX69e6JwHNvT9FREQkg8IoIn6kLTgCBV3DEgZ7g6adD8tZbX1x1l7iI/6fnxaRzl8JcifflZ+DtcGiVDP16p63y5Qq8QG+9kl8cAq/kt0CgASDwAh1vyS8OgefeFnBW4G1G9fbcj7YQeKOPnCSH7LeHzH53vvTq2U3OmzRRDtk/cSUBt9C6t8XoyE8CCDw/aTOX3wQQeH4TD9Z8f7wrLMuWpq7p5JNaZOdhxV8nAq/4GdBB4Qgg8ArHlpGLTwCBV/wM6KBwBBB4hWOrHbmoAm/Nug2yfFXqQ54TCxnYr5f06NYluaa2Aq+lJS6/vO4umXDofnLA3iPkpdfmyMW/ulOe/PPVySvy1tc2pWXRtWO51NRFeQZeWlJfHFCY6xOyaIBDMyJQFglLRVlY6hqbMzqeg9whEPr/z/R0pyP3OqmqiEhzLC7RmPcgMz4QyJLA7P/G5YG/f1HUt6/IOT8OSZkD7+tI/PKlQ3lEGqMt3p9YlivjcAi4TaAsEpJy7771+ibOTdxOqrS6i8fz8wiFqg5lEm1u8c5PODcprR1UGqvt1qlCqjPwK9Zo5Oe//uKsuruXST4/Ie+bYcY8ps98Rx579pUt5j/hm4fKfqN3S369vSvw2hac/tNr5H+OHivHHHGA1NanPznoWBmRugZOkLMJPuNQsxmUY/NOwPN3Uu4JvIYmTiLyDpcBi06gQ0VYYp7AS0g8PhDQEHj3vbjMniPSvZvIoeNC0nnrd7VqhlfXJH7xUuFJvCbvh8AmT+LxgUCQCCQEXuIP5yZBSpW1bCZQ6X3vbvbekNTMj5ZsigASSNwhsDEDv2Jt6ZYvTurkZZLPT1YCL5OJ2wq8uvpGmf/JEtlzxNDW8u+cfZWcOvFIOWrcPryFNhOoHBNYAtxCG9hoWZhHgFto2QZBJcAttEFNlnUlCHALLfsgyAS4hTbI6bI2bqF1bw8U9RbaTHC0FXjrqzfK+BMvlJuvOFsO3HukJK7iu+iKO+Qf914j2/XoisDLBCrHBJYAAi+w0bIwBB57IMAEEHgBDpelIfDYA4EmgMALdLwlvzgEnntbwFmB9+L0WXLh1NtFvDtyo941yeXlZTJ4UD959O4r5aUZc+T62++XVWvWJ5+XN/msk2T/MSOSdJetqU9LuV/PSlm5toFn4KUlxQHWCCDwrCVGv9kQ4Aq8bGhxrCUCCDxLadFrtgS4Ai9bYhxviQACz1Ja9JotAQRetsQKf7yzAk+7dASelhx1QSCAwAtCiqxhawQQeOyNoBJA4AU1WdaVIIDAYx8EmQACL8jpsjYEnnt7AIHnXiZ0BAE1AQSeGh2FBggg8AyERIsqAgg8FTaKjBBA4BkJijZVBBB4KmwUGSGAwHMvKASee5nQEQTUBBB4anQUGiCAwDMQEi2qCCDwVNgoMkIAgWckKNpUEUDgqbBRZIQAAs+9oBB47mVCRxBQE0DgqdFRaIAAAs9ASLSoIoDAU2GjyAgBBJ6RoGhTRQCBp8JGkRECCDz3gkLguZcJHUFATQCBp0ZHoQECCDwDIdGiigACT4WNIiMEEHhGgqJNFQEEngobRUYIIPDcCwqB514mdAQBNQEEnhodhQYIIPAMhESLKgIIPBU2iowQQOAZCYo2VQQQeCpsFBkhgMBzLygEnnuZ0BEE1AQQeGp0FBoggMAzEFIJthhviUvLmqiEu5RJqDKsIoDAU2GjyAgBBJ6RoGhTRQCBp8JGkRECCDz3gkLguZcJHUFATQCBp0ZHoQECCDwDIRltMV7XJE0zPxYpj0jFvkMlVFGW0Upi66NS98hyia1qFImEpOqI3tJhdLeMar98EAIva2QUGCKAwDMUFq1mTQCBlzUyCgwRQOC5FxYCz71M6AgCagIIPDU6Cg0QQOAZCMlgi/H1tVJ92SMSX12T7L5sp97S6dJjJVxZnnY1df9YKU1zN3xxnCfxup49WMJVkbS1CLysEHGwYQIIPMPh0XpaAgi8tIg4wDABBJ574SHw3MuEjiCgJoDAU6Oj0AABBJ6BkAy22PDE21L/4OspnXc6a7xUHDAs7Wpq/vCZxNY0pdZ+e4CUD+2UthaBlxUiDjZMAIFnODxaT0sAgZcWEQcYJoDAcy88BJ57mdARBNQEEHhqdBQaIIDAMxCSwRZzEXiN72yQ+qdXtq460rdSOn93ewmFQ1mR4BbarHBxsDECCDxjgdFuVgQQeFnh4mBjBBB47gWGwHMvEzqCgJoAAk+NjkIDBBB4BkIy2GLLulrZcPkXt9BGduwlXf7vOAl1SH8LbWK50Q9rpOn9jRLuUS4d9ush4Y7Z3T6bGAOBZ3Dj0HLGBBB4GaPiQIMEEHgGQ6PljAkg8DJG5duBCDzfUDMRBApPAIFXeMbMUDwCCLzisQ/6zPHaRom+9YnEvWfYZfMSi3xxQeDliyTjuEgAgediKvSULwIIvHyRZBwXCSDw3EsFgedeJnQEATUBBJ4aHYUGCCDwDIREiyoCCDwVNoqMEEDgGQmKNlUEEHgqbBQZIYDAcy8oBJ57mdARBNQEEHhqdBQaIIDAMxASLaoIIPBU2CgyQgCBZyQo2lQRQOCpsFFkhAACz72gEHjuZUJHEFATQOCp0VFogAACz0BItKgikK3Ai8Vi8tnipdK/bx+pqqpUzUkRBPwigMDzizTzFIMAAq8Y1JnTLwIIPL9IZz4PAi9zVhwJAecJIPCcj4gGcyCAwMsBHqVOE8hG4C1eulyuuek2WbHyc6moqJCzzjhNDj5gH6fXR3OlTQCBV9r5B331CLygJ1za60PguZc/As+9TOgIAmoCCDw1OgoNEEDgGQiJFlUEshF419x0u7z59n9b56ms7CD33vkbCYfDqrkpgkChCSDwCk2Y8YtJAIFXTPrMXWgCCLxCE85+fARe9syogICzBBB4zkZDY3kggMDLA0SGcJJANgLvB2dPlvXV1SnruPnay2X7Af2cXBtNQQCBxx4IMgEEXpDTZW0IPPf2AALPvUzoCAJqAgg8NToKDRBA4BkIiRZVBLIReI8//bz8+W8Pt86zx8jdZMrPzlPNSxEE/CCAwPODMnMUiwACr1jkmdcPAgg8PyhnNwcCLzteHA0Bpwkg8JyOh+ZyJIDAyxEg5c4SyEbgxeNx+c8rr8vrb82WwTsOkm8cdbh06tTR2bXRGAQQeOyBIBNA4AU5XdaGwHNvDyDw3MuEjiCgJoDAU6Oj0AABBJ6BkGhRRSAbgaeagCIIFJEAAq+I8Jm64AQQeAVHzARFJIDAKyL8rUyNwHMvEzqCgJoAAk+NjkIDBBB4BkKiRRUBBJ4KG0VGCCDwjARFmyoCCDwVNoqMEEDguRcUAs+9TOgIAmoCCDw1OgoNEEDgGQiJFlUEEHgqbBQZIYDAMxIUbaoIIPBU2CgyQgCB515QCDz3MqEjCKgJIPDU6Cg0QACBZyAkWlQRQOCpsFFkhAACz0hQtKkigMBTYaPICAEEnntBIfDcy4SOIKAmgMBTo6PQAAEEnoGQaFFFAIGnwkaREQIIPCNB0aaKAAJPhY0iIwQQeO4FhcBzLxM6goCaAAJPjY5CAwQQeAZCokUVAQSeChtFRggg8IwERZsqAgg8FTaKjBBA4LkXFALPvUzoCAJqAgg8NToKDRBA4BkIiRZVBBB4KmwUGSGAwDMSFG2qCCDwVNgoMkIAgedeUAg89zKhIwioCSDw1OgoNEAAgWcgJFpUEUDgqbBRZIQAAs9IULSpIoDAU2GjyAgBBJ57QSHw3MuEjiCgJoDAU6Oj0AABBJ6BkGhRRQCBp8JGkRECCDwjQdGmigACT4WNIiMEEHjuBYXAcy8TOoKAmgACT42OQgMEEHgGQqJFFQEEngobRUYIIPCMBEWbKgIIPBU2iowQQOC5FxQCz71M6AgCagIIPDU6Cg0QQOAZCIkWVQQQeCpsFBkhgMAzEhRtqggg8FTYKDJCAIHnXlAIPPcyoSMIqAkg8NToKDRAAIFnICRaVBFA4KmwUWSEAALPSFC0qSKAwFNho8gIAQSee0Eh8NzLhI4goCaAwFOjo9AAAQSegZBoUUUAgafCRpERAgg8I0HRpooAAk+FjSIjBBB47gWFwHMvEzqCgJoAAk+NjkIDBBB4BkKiRRUBBJ4KG0VGCCDwjARFmyoCCDwVNoqMEEDguRcUAs+9TOgIAmoCCDw1OgoNEEDgGQiJFlUEEHgqbBQZIYDAMxIUbaoIIPBU2CgyQgCB515QCDz3MqEjCKgJIPDU6Cg0QACBZyAkWlQRQOCpsFFkhAACz0hQtKkigMBTYaPICAEEnntBIfDcy4SOIKAmgMBTo6PQAAEEnoGQaFFFAIGnwkaREQIIPCNB0aaKAAJPhY0iIwQQeO4FhcBzLxM6goCaAAJPjY5CAwQQeAZCokUVAQSeChtFRggg8IwERZsqAgg8FTaKjBBA4LkXFALPvUzoCAJqAgg8NToKDRBA4BkIiRZVBBB4KmwUGSGAwDMSFG2qCCDwVNgoMkIAgedeUAg89zKhIwioCSDw1OgoNEAAgWcgJFpUEUDgqbBRZIQAAs9IULSpIoDAU2GjyAgBBJ57QSHw3MuEjiCgJoDAU6Oj0AABBJ6BkGhRRQCBp8JGkRECCDwjQdGmigACT4WNIiMEEHjuBYXAcy8TOoKAmgACT42OQgMEEHgGQqJFFQEEngobRUYIIPCMBEWbKgIIPBU2iowQQOC5FxQCz71M6AgCagIIPDU6Cg0QQOAZCIkWVQQQeCpsFBkhgMAzEhRtqggg8FTYKDJCAIHnXlAIPPcyoSMIqAkg8NToKDRAAIFnICRaVBFA4KmwUWSEAALPSFC0qSKAwFNho8gIAQSee0Eh8NzLhI4goCaAwFOjo9AAAQSegZBoUUUAgafCRpERAgg8I0HRpooAAk+FjSIjBBB47gWFwHMvEzqCgJoAAk+NjkIDBBB4BkKiRRUBBJ4KG0VGCCDwjARFmyoCCDwVNoqMEEDguRcUAs+9TOgIAmoCCDw1OgoNEEDgGQiJFlUEEHgqbBQZIYDAMxIUbaoIIPBU2CgyQgCB515QgRN47iGmIwhAAAIQgAAEIAABCEAAAhCAAAQgAAEIuEMgFPc+7rRDJxCAAAQgAAEIQAACEIAABCAAAQhAAAIQgMCXCSDw2A8QgAAEIAABCEAAAhCAAAQgAAEIQAACEHCYAALP4XBoDQIQgAAEIAABCEAAAhCAAAQgAAEIQAACzgu8D+Z/Jj+dcpscvO/ucul5pyYTa2qKyugjJ0l5eVlrgocdNFpuvOwsEoWAGQLNsZj85g8PyZ/uf0Zeefy30qNbl9be//jXf8j9j/1TmqLNcsQhe8svfnKKlEUiZtZGoxDYTIDv1+yFoBFYvGyV/PK6u2Xex4tkQL9ecsm5p8qY3YcFbZmspwQJTJ85V868+EYpK/vifOOiM0+UU44/ogRpsOSgEHjqxRly+Q3T5MqfnSFHjduHc+2gBMs6ZO36Grn4qjtlxefr5IlpV7USufHOB2Xag89KOBxu/doDd0yR4UMHQS0ABJwWeLPfnS9X/uZe2XnwQOnSqWOrwFu9tlq+dfol8urjtwYgApZQqgTOueRm2XXnHeSOe5+Qlx+9pVXgvTH7Q5ly/d1y728vkY5VHeScS2+Rww/eS04+7vBSRcW6DRPg+7Xh8Gi9XQLfPfdqOezgMfKd48fLa2+958m8u+SFB26Q8i9JD9BBwCKBp/85U154+U256fKzLbZPzxDYgkBCYsyaM08+X7NeTj/x660Cj3NtNot1ArV1DXLSmVNl7AGj5KXX56QIvISwHjZkED87Wg95K/07LfAWLV0pvXp2kz///XlJ/BC4+Qq8TxYt935DeJM8+9frAhoLyyoFAh96V28kBN7uh52eIvCuuOnP0q9PT5l0yjFJDP9+bbZMe+BZuefmn5cCFtYYMAJ8vw5YoCW+nDXrNsiEkyfLjKdua70qeuKkKTL5xyfJvqN3LXE6LN86gQef+LfM/fATuWLy960vhf4hkCSQONdOXHV0xgW/lhO+eWirwONcmw1inUBdfUPSjyT+XHbDPSkC78Kpt8vY/feUbxx5oPVl0n87BJwWeJv7vePPT6QIvHfeX5C8KmnIjv1l/sKlMnznQfLL806TnQb1I2QImCPQVuD94ILr5MRvHSbjvVtnE5+FnrA+/bxr5KVHbja3NhqGAN+v2QNBIvD23Pky9cZ75LE/Xdm6rAsu/53sN2aEnPCNcUFaKmspQQKJx3c8/9Kb3qNqmmVddY18db895OfnnCKdOlaWIA2WHCQCPzj/uhSBx7l2kNIt7bW8PfejLQTeDyffIC0tcfl0yQoJeXi+7Z2fbL4wpLRpBWP1RRd4i5aukg0ba7egucvg7aWiojz59bYCb8GnS+Xeh16Q0044SgZ6z5/53bTH5KUZc1JOqIMRD6uwTKChsUk+9vZq20/3rp1l+/69W7/cVuCdctaV8qPTvpk8cU58lq9cI8d+/1KZ+Y/bLeOg9wAT2NZeb/T+O+D7dYDDL7GlvfbWu3LLHx+W+71nyWz+XHrtXbLLkO3ltG8fVWI0WG7QCLzw8lvy3rxP5XsnTJCWeFwuuuJ2GbrjAO85vN8J2lJZT4kRaCvwONcusQ0Q4OW2J/B+f9+T0qVzR/mfrx8in3l3NCaEXuKXMZsvDgkwjpJYWtEFXuLWwLkfLtwCdmKTJW6fbU/gtT042hyTfSb8rzx//w3Sp1f3kgiORbpPYNmK1XKD9xDRtp+99hie8kyCtgLvjAt/Ld8+ZlzrZf4JYZ342r8f+o37i6bDkiSQ6V5PwOH7dUlukcAsOvFs3inXT0u5VeX8y26TA/ceKROPGRuYdbIQCCQIvOU9OyzxwpZn/nItQCBgmkBbgce5tuk4af5LBNoTeG0B/e6ex2Xl52vl8gtPh10ACBRd4GXCsO0VeIkHkW6oqZWhOw1MlifecriXJ/C+/CKATMblGAi4QKCtwPvVLfdJ186d5OzvH5ds76kXZshjz70if7z+IhfapQcIZEWA79dZ4eJgxwkkbis84oQLvDeH3ypVlRXJbr92ymT51c8nyeiRvInW8fhoLw2BxC8Mu3jnH5t/GT7De0nL1bf+NUVYAxECFgm0FXica1tMkZ7bI9CewEt8beTwwa13M97s3TlQs7Gu9X0CkLRNwKTAmz7zneTrwO+55RfSr3dPuW3ao8k3wd1/+//ZToPuS5JAW4GX+KY7+Yo75L7bLpVOVZWSeE7HScceLsd97aslyYdF2ybA92vb+dH9lgQSPwjuM2rX5PNknvn3zOQttc/85TqJRMLggoBpAjfc8aDM/2Sx3HjZWcnnJyWuLk28bOv8H55gel00D4G2Ao9zbfZEUAi0J/BO/NHlcoj3EosfnfYtWbJ8lXzPe5b65Rd+33s80+5BWXZJr8NpgXeN91u/+x//l3cS0SJx71kckUjEu7VwrFxy7qnyh788JX999EVp9K6+SxjmKRd8L/k8PD4QsEBgffVGGTfxvGSr0WizlJeXJf/9xQduSN46/qf7n5F7H35eYrEW+frh+8tFZ54o4XDiMaR8IGCPAN+v7WVGx1snsNR7PMIvrv6DzFuwWAYN6COXeecfXxm+E8ggYJ5AXX2jTL3pHnn59TlSXlYmhx44Wn529smtV5uaXyALKDkCibeEJ55H3ew9bikSDkvIO5e+9pL/9R5Tsy/n2iW3G4K14Benz5LE22Y9SZJ8PE3iZ8nB3gs9H737Svl08QrvxRbTkm9h7uo9C+/UiUcm//AJBgGnBV4wELMKCEAAAhCAAAQgAAEIQAACEIAABCAAAQjoCSDw9OyohAAEIAABCEAAAhCAAAQgAAEIQAACEIBAwQkg8AqOmAkgAAEIQAACEIAABCAAAQhAAAIQgAAEIKAngMDTs6MSAhCAAAQgAAEIQAACEIAABCAAAQhAAAIFJ4DAKzhiJoAABCAAAQhAAAIQgAAEIAABCEAAAhCAgJ4AAk/PjkoIQAACEIAABCAAAQhAAAIQgAAEIAABCBScAAKv4IiZAAIQgAAEIAABCEAAAhCAAAQgAAEIQAACegIIPD07KiEAAQhAAAIQgAAEIAABCEAAAhCAAAQgUHACCLyCI2YCCEAAAhCAAAQgAAEIQAACEIAABCAAAQjoCSDw9OyohAAEIAABCEAAAhCAAAQgAAEIQAACEIBAwQkg8AqOmAkgAAEIQAACEIAABCAAAQhAAAIQgAAEIKAngMDTs6MSAhCAAAQgAAEIQAACEIAABCAAAQhAAAIFJ4DAKzhiJoAABCAAAQhAAAIQgAAEIAABCEAAAhCAgJ4AAk/PjkoIQAACEIAABCAAAQhAAAIQgAAEIAABCBScwP8D/9u5iHINDGUAAAAASUVORK5CYII=", + "image/png": "iVBORw0KGgoAAAANSUhEUgAABPAAAAJYCAYAAADsXBi6AAAgAElEQVR4XuydB2BUxdqG3930QhJ6B2mKoKAIghVQQEVEr7137L2i/vaKvfeCvVdEERRFBVFBQBDpCEgLLQnpm939Zw5uyIaQbDnn7Jwz77m/97+SMzPf93yTkDyZ4gmKB3xIgARIgARIgARIgARIgARIgARIgARIgARIgASUJOChwFOyLgyKBEiABEiABEiABEiABEiABEiABEiABEiABAwCFHicCCRAAiRAAiRAAiRAAiRAAiRAAiRAAiRAAiSgMAEKPIWLw9BIgARIgARIgARIgARIgARIgARIgARIgARIgAKPc4AESIAESIAESIAESIAESIAESIAESIAESIAEFCZAgadwcRgaCZAACZAACZAACZAACZAACZAACZAACZAACVDgcQ6QAAmQAAmQAAmQAAmQAAmQAAmQAAmQAAmQgMIEKPAULg5DIwESIAESIAESIAESIAESIAESIAESIAESIAEKPDEHZv+1BM+89hnmL/4HwUAQu3Zpj4vOHIkBfXpUz5DDT70Bq9bkV/97amoKWjTNQ589d8XJxxyC3j26RDSbAqL/LyZOxWcTfsbCJStRVlGJ5qKffr13w5knHIbuXTtU9zPy7FvQuUNrPH7XZRH1rfJL5SLPQcddZeT51L1X7jTUI88YjTTB9pNX7lY5nR1im/LLHLzz6bdYIGq6pXAr0tNSsVuXDjhp5GCMGLqfUrmcf91DWLB4Jb7/+HGkJCfVGdsdD4/Fp1//hMkfPYbTLr1HzPNuuO+mUbbnceuDr+Ln3/7E9x89bowtPw/tiGXcxGkYfd+LmPjew2jbqpnteXNAEiABEiABEiABEiABEiABEiABEqhJQHuBN3fBcpxx2T3YR4ils4RA83g8ePmd8Zg9bwnef+H2aqEmxUF2VgZuuOQUg19FZSWWrVyLL76ZakibS88+BpeIf+p7fFV+XPF/T+LH6XMwbGBfDNp/b2RlpmPFv+vx4bgfsC5/Ex645UIcPnhfoxs3CTyZzz2Pv4kPxn2PyR8+hmZNcndANWveYpx+2b34v6vOwCnHHGrKZ2rh1hLsf9Sl+P3r55GZkW5Kn7U7kaLr/8a8gmMOPxCHDeqHpo1zsWlLoSHAJk6ZgZuvOA2nHTvUaPbtTzPxwpvj8OGLd1gSSySdfvPD77jmjmcMMTz04L47NJGydeCxV+KAfnvg0Tsuxfjvphv16r/37pF0b+o7tQWeVbHc9+Tbhsy8/pKTjfjl5+T0mX8J+bq/8TnKhwRIgARIgARIgARIgARIgARIgAQSSUB7gSdFxh9zF2Piuw9BrqqTT1FxKQ48+jIh9A7HtRedaPyZFHhyJc4rj94QVi+5ou6Bp9/B259MErLjEiFwtsm3up7HX/oIL739Je4dfb4he2o+pWUVOO/aB7FsxRoRy8PIzclyncBbuHQVjj3vVlx30Uk45+QjdkAkZc1XQhb9IFaGNcrONOXz4qdf5+KiGx+xVOANP/1GtGreBK8+duMOMV9+yxMQVhhP3XOF8bFHX/gAv8ycn1CBJ0XyIcdfhT26d8ZzD1y9Q8yh1WcvP3w99uvb05Q6xNpJbYEXaz8NtTv54ruwj1hNGxJ4Db3Pj5MACZAACZAACZAACZAACZAACZCAnQS0F3gbNxdCyrMObVuEcT/omMuNFXJ333Cu8ec7E3jyY35/AEeddZOxbXJnWz/Lyitx8P8uxz69dsXzY66ts8Zr12+ClCuhWOQKvG6d2uKQA/vg6Vc/xZp1G9GyeWNDMoRWTn056RfceO8LeOPJm3H7w68ZsXz99hhIsfja+1/j4/FTjHbp6WnYe49uuPL846pXFcrVTDfc/TzeffZWPPz8+8ZKQpmDXC123JEHQ26j/GPuIiSLlUkjDzvAEG+h559V6/DEyx+JVUrzUVZegRbNGmP4oQOMlYgpKck7ncMnX3QnSkrLMe6N+8Pe2cbnCmMF2z03nmd8bOrv8/DiW+OweNm/gksVeu3eBVdfeAL22K1TddutQrY+8fLHmPTjDBSXlKFzxza44PQRBp9nXvsUz77+efW7Bw/obQirSNjc9tCrmCdWZ5536pG4X6zOOuTAvXHX9dvmQu1n6MnXYZd2rfDSw9fV+7l71pX3Y8achdXvhESmFMaPCbE3eeosY/tt49xGGLhfb1x9wQnG/5bPzuIJBoPG1t1PvvpJrBpbhzRRv4P79zbEc12rHEODP/L8Bxj7wdfGaki5hbvmc+7VY7BazJkJ7zxorEitvW1VzpPHX/pQ8PkHJWXlQl42xlHDDsBFZ4yE1+vBR19OMebidx8+aojN0HPB9Q8bcvy9524z/iiSOVTfFlpZdzk/6nrkykE5l+Tz1seT8MEX3xur6jIz0rCb2KYu2Ya2vfccdHZYF3Ll7fIVa3fYQvv9tFnG6slFQkTLZ9fO7XDuKUcaq2nls2FTgbFNfIxYRStXk06e+ocxJ+W2+FuuPCNse3y9E4UfJAESIAESIAESIAESIAESIAESIIFaBLQXeHXNCLmtVoqm+28ehZFCTMinPoEnP/7Yix8aW29/+uwpNMnbJl1qPr/PXoCzr3rAEILHDj84ookoBZ7f78cu7Vth1GkjkJSUJFZwvS/kwBJDvMhxvvnhN7Ed8llDzsnz1roJqSCFgYzn9Q8m4LqLTxYici+xpbPIWCkoV/iNe/1+IdzyRNttWynlmWJSTnUUEurBZ9/Fmx9NxJ7dO+GGS08xzvgLbRF98aHrjG2VUoAdfur1xiq5O649G3m52UJq/GsIj9OPG2pIwp09H4//0ZBR7whpWPPcQHkm4C0PvFz951J0nXP1Axhy0D64/LzjDA5SYk6bMQ8fv3yXkJwtjSHkO6vWbBCC5HS0btEU4yZNw9j3J0DGKmOXElOKvEniLLOcRlnGNuhI2Nz92Bv4YdpstBGrLi884yjBpiXatwmXvKEcJVfJ7EghMOV5iL3EeYjJola1HykbZa3k9lq5kjNDSFV53p88Y26NkLe3XXMWuovzF6Ugu/PR10U+TfCukF1Sou0sHimUnnzlY1whGMmz9qQEvkvE7hVtPnzpzp2ecbdy9XoccdqNuGrU8cbcCj1S3A0TQrLmn9cUeFWiDoOFpJI5XnbO/wyeUlbJ+C4682icd8rwiARepHOoPoFXWFSCwq3FYZhve+g1Y45/+OKdhuwOzV05lwcLIS+3B7/w5heYJuTw+LfGGJ9DUpoOPelaHH3Ygbjs3P8Z8/rr734NE3ihlZwnjBiEM44X26EFXznPPvnqRzx7/9WGcJX9HHj05ca4kt8RhwwQAq8U512zTYTKecuHBEiABEiABEiABEiABEiABEiABGIhQIFXi9rmgq049ZK70Vj8YP/WU7cIaeY13mhI4MkVPlK6yB/Sa15EEeo+tFJu7OOj0W+v7hHVSgq8LQVF4iD9R4TsSTXa/DZrgSGt5Cq+g/rvWS3hagoXuZpNbgGW23RvvfrM6rHkSjy5Wiz0bkjg1dzS+/fiFTh+1O3GhRo3CukhH7mqr/eQ84QkOlasbjvKEHhSAMkz5aQIDD1X3vqUsXLrIyGOdvbI1Y7yfLXhh/bHndedU/2aXJ0mhcxnr91j/JmUHnJFmZQsUnLJR+Y15KRrMEysrrtdiEO59fmMy+/d4Sw3KQjl9tATjxqE19772lhdGDoDL1I28ry+dz/7bgfRWFdectWkXI347qffGYJIrvLq3bMr9tunJ0YM2c8QOqHn4tGPQa76DJ2BF6pn7e3Xn4uzFW++/6Xq8euKp6LSZ9T5ILHiTrYPPXP/Xga5JfShWy82OO/skSvt1m3YjK8E49AjZeeLb31prJ4LreCrKfDkRS7y3+WFFkeLVZmhR26PlmfFtWvdPGKBF8kciuYSC7k9Xcb/6mOjDSktn4LCYiPHmp+Ti8SKzv+d+394+r4rDaknn76HXyAE+CHVW2hrX2Ih5+cW0dfnYn5KGScfufpRSlC5YlYK45DAk8wl+9Aj45Lb52dNfKl6m/5Oi8IPkAAJkAAJkAAJkAAJkAAJkAAJkEAdBCjwakCRQuGC6x8xBIxcIRXavihfaUjgvf3Jt7jvybfwxdh70WWXtjug/vJbsdX1nheMc9IivQxACrw2LZuEbbldKlYXjTzrZjx828VihU//aoEnt9DK7bnyCa0gfPDWi4xVYTUfucVPyg25xTAk8OSWwdC21NAKLCnXjh8xsLpp/yMvNv79erGiTz5Sgrzx4TeYI27wlWIjEAwYW5Gl9Pn2/Ufq/WS785GxxsUIUz550hCTK1fnCxFyg7js4XSxfXeI0Xafwy4w8gttpw11eNnNT2D9xi2GAHtdjP/gM+8aZ+bV3gYaer+2wIuUTejCjdmTXjG2hUbylIrtpHJLsVw9OOPPhfhr4T/GCrhbrz7L2JIsn9oC79X3voLczlo7B7ndU56tF7rQo654QrnUlmlyHKNeR4p6/XcpQ13xfyVWmV1/93N486mbjdWKUkgNO+V69Nx1l7Cbj2sKPClzpRyUIk+u+NxfrMjsI1Z/1tw2HekW2kjmUKQCT14Mc8lNjxtSOMRa5ixXDL7/+feY8P2vYiv5JnHrc4Vx07Tcyivn1v+OOMhA05DAk/PxKHGhxR3XnR2GUm5BnzbjL/z8+VPVAk9uXz735OHV773/+WRjVeSUT56od1tzJHOM75AACZAACZAACZAACZAACZAACehJgALvv7rL1VyX3fI4du/a0ZAXtS9RaEjgScHy/heTMf3L5+q8tTK0WkyuiDv56EMimm113UIrb7496sybqldXhSScXLnWrVM7o99fhFA4/7qHjPPe5LlvNZ8Roq28jOOFB6+tFng124YE3gM3C2ExbP/qpjWF0L9rN+CYc25Bl45tjdV87cUKJHlOnhSY8xetaFDghVb5hcST3AIqtyP+IARHjti+KKVL70PPM8SZ3DZc85FbaZvk5RgyRG6pfe6Nz+u9oKK2wIuUjaznV5OnY9oXz0RUq7pekkL46tufwdJ/VuPbD7ataKst8ELnuM2Y8GL1KkvZl1ylJ1cqyrPazhfn8NUVTygXuV3XU0sy+nxVhgCVondnj3xnkLjMQq5CkzJLykd5kUpoq3SoXe0z8OS5bvLcPXnDrqylXIk5YsgAY7u2XIUXicCLdA5FIvCWi88JKRXlFlh542/NR27tfV+sjpXbumWe2dkZxpmQZ1x+X8QCT66glPNRXrxS8xxIOY5cdSu3f8vVdaEVeDVFtHyHAi/mTyE2JAESIAESIAESIAESIAESIAES+I8ABZ4AIc/wGiWElzyz6vZrz6rz/LL6BJ7cNinPDZMXKMgtsnU9lWK748FCyMhD/eVFF3Wt6pJb/T4RZ8TJ7avybLFYBd68hctx0oV3Ymcr8Pr23s0QO3XJv0gEXkiKff32g2GXf4TOo2toBZ7kc8IFd4gc0/Hqozca54/tu/fuxrbM0NPviAuNraHyTLLajzzfTZ4L+I7YsnrvE28awrB1y6Z1cq8t8CJlE43Ak0JIrgCs6/KOb3+aCbm1WF5wsX/fPXYQeKH4drYC73ZxLt6JYqVbXfHIFX4nXniHsSryYHEGW+1HriSteYlEXYAeevY9fDDue/z46VO445HXMEuI7G/EjcyhbaKyTW2BV7MfuUX1a7G6Td6ue8gBfTDm/y4UF6dsO+ew9iUWUprJy0jkJRaRzqGGBJ48V/AkcV6lzPNFwbj22YNyZd2h4hxFebFE6JErJOWW2GhW4Ml+RgypewXer7P+NoQyBR7/XiUBEiABEiABEiABEiABEiABErCKgPYCT17uMOKM0RgmbqyseSZbbeA7E3hyS+H/jXkFX0ycilceuQED9umx01rJG1HlGV2Xn3usOPB/ZNh7cvvpxaMfxeLl/+LLNx4wDtePVeBJoSjPRpMrkmqegSdXPR0mtkjKA/3PEpIwVoEXujjh1/HPGaJRPnIbrFwZ2Lyp2EIrVps19MgzA+9+/A08dudlhuB66+lbjIs4Qs+o6x42LnuofauvHEfeepoqzsULnfVWU8TI9rI/eRab3D4aEkW/ffW8sTosUjaRCryff5uLC294pHqra+28n3r1Ezz/xhfVW6vlCjx5W2nonMCZfy7CmVfcZ5xhd9igfaubhy5fkO/t3q1jnQJPSuEDxKUJxw4/CDddHr7yTG617tyhdZiIq6smcvWaXJUpV1zKbZ6jTjvSOOew5lNT4ElZOUtsm669NVvehDxfCEV5u7BcmXf17U8b5xmGVoXK8/oGi9V+8vIRKfAinUP1CTx5FuMlNz2K5SvX4YMX7kBuTlZY3HJL8N5Dz8cp/xtSfZ6jfEF+vkq+tQXeiUcNNj435FP7DDx5XuBG8bVCbpEPPaGLOHbt3N44T48Cr6HPen6cBEiABEiABEiABEiABEiABEggVgLaCzz5w7wUWfJMuNBlCSGY8ny2PXfvbPyrlBhSVt1wSehiB79xkcNHYrXR/EX/GKugzj7p8HrrILeGyi2Vk3/+A/v17YkjBvdHXk42Vqxeh/c+myxu1CzB0/deWX3JRawCTwYht6W++u5XGC3EzsH9eyFfSKP7n3zbOD/ucyEhcsWNrLEKvNDFC1L0yDPrlixfjTHPvGPImok/zsBnr96DtkKgyfPfdvaUlJZj0HFXGtsvcwWDmmJEtgndQnvc8IHGza7yPSnLHnruPVx74YnGbbfykVs+5TbO/7vyTOOmWLntVW7HDV3yIW8JlRJICiopwrp2ahsRm0gFnpREl9/yJH78dQ7kDaUHiotFZE3lhRw//fqnsX3zMCGHH7l92yUT8qZeebvti2ILc1OxpVZuZ5Yr01av22AI5K6C4bwFy4xbXWW8cquzfHYWjxRhchvxNWKrrdwuLVe4fTjuB+PcN3nTb8/ddmnwa4MUiPLMPSmgvhPytfZ5gjUFXkiaSgF8tLgkRUpRKZ1vE4wPH7yvuA34DEhRLM80POWYbeJMSlM5P6b8MsdYKSkFXqRz6C6xRfXn3/7E9x89Xv15KM9wlKs15aq/seKm5cfuuEzcvhx+7qScL3LLslxpt1rE8/R9Vxnbs9/6eJJxVuNH46cYW9nl1lq5Xf7QE64xYpPCW17M8vOvc8NuoZW3H0upLNvIFbJVgvMr4vNL3nr8+hPyDMFuFHgNzjS+QAIkQAIkQAIkQAIkQAIkQAIkECsB7QWeXBWUv7GgTn5ym+b4Nx+oFgfy4P7QI7cYylVy8vB/+QN96NbLhgohhY+8kfaTr38U4mkl5MokuaJsP7G98lxxxpZcORZ64hF4cpzX3v9ayJwpxplfUrTI1YHyTLX2bVoYQ8Qq8GRbebOmvLijSEjHHuLSgxsvOxWZQniOuv5hyG2NUtLUdZlHTT5ym6XcbilXj4WEXM2PyzPZnhn7mSFI5ZZjuXpLXpwgt5SGHnkZwaPiEojvfp4JKQU7iVVncnXjUHFTrXykFL1QxCRZ9+7ZBfKyj0jYRCrw5BhSzMpz32RdpdQtKCoW59mliTMC22CEuPjgxJGDqrd2/jl/qSFx5erCs086wjhDUPKSMmry1FmGBJLiadjAfuLW3+OMC1XkU1888rZceQOucaurqHP3Lh1woWAQ6WUpoRuSDz2oD568+4odpnDtLbQyzpdF/RcLcStzl9tXpaS85Kyjq29ZleJUrjzcLG5RlkLw3FOGiwtPlmKxuPxEXpoS6Rwa+8E3OxV4R4qVs/+sWlfnp5y8/Vdu55Ufv/3h14QUXW4IeHkz8+XiNuUxT7+DD0XNhotzAqUMlCtC5W3FgUDAkPlSwErZOvG9hw3JKh8pXp8XsnShyEFu45bz/tJzjsGAPttW3XIFXkNf/fhxEiABEiABEiABEiABEiABEiCBWAloL/BiBcd2JEACJEACJEACJEACJEACJEACJEACJEACJGAHAQo8OyhzDBIgARIgARIgARIgARIgARIgARIgARIgARKIkQAFXozg2IwESIAESIAESIAESIAESIAESIAESIAESIAE7CBAgWcHZY5BAiRAAiRAAiRAAiRAAiRAAiRAAiRAAiRAAjESoMCLERybkQAJkAAJkAAJkAAJkAAJkAAJkAAJkAAJkIAdBCjw7KDMMUiABEiABEiABEiABEiABEiABEiABEiABEggRgIUeDGCYzMSIAESIAESIAESIAESIAESIAESIAESIAESsIMABZ4dlDkGCZAACZAACZAACZAACZAACZAACZAACZAACcRIgAIvRnBsRgIkQAIkQAIkQAIkQAIkQAIkQAIkQAIkQAJ2EKDAs4MyxyABEiABEiABEiABEiABEiABEiABEiABEiCBGAlQ4MUIjs1IgARIgARIgARIgARIgARIgARIgARIgARIwA4CFHh2UOYYJEACJEACJEACJEACJEACJEACJEACJEACJBAjAQq8GMGxGQmQAAmQAAmQAAmQAAmQAAmQAAmQAAmQAAnYQYACzw7KHIMESIAESIAESIAESIAESIAESIAESIAESIAEYiRAgRcjODYjARIgARIgARIgARIgARIgARIgARIgARIgATsIUODZQZljkAAJkAAJkAAJkAAJkAAJkAAJkAAJkAAJkECMBCjwYgTHZiRAAiRAAiRAAiRAAiRAAiRAAiRAAiRAAiRgBwEKPDsocwwSIAESIAESIAESIAESIAESIAESIAESIAESiJEABV6M4NiMBEiABEiABEiABEiABEiABEiABEiABEiABOwgQIFnB2WOQQIkQAIkQAIkQAIkQAIkQAIkQAIkQAIkQAIxEki4wFuzqSzG0NksRCAzPQkpXi8KS32EQgLKEGicnYqyyiqUVwaUiYmBkECrJhnI31KGQJAsSEANAvJrZUZaErYUV6Kswq9GUIxCewKZYk6mJIvvLUv4vaX2k0EhAHni62Wlz49Sfq1UqCoMpWXjdGwsrICf31xyMtRBoE3TDFO5UOCZijMxnVHgJYY7R62fAAUeZ4iKBCjwVKyK3jFR4Oldf1Wzp8BTtTJ6x0WBp3f9Vc2eAk/VyqgRFwWeGnVQKgoKPKXKwWD+I0CBx6mgIgEKPBWrondMFHh611/V7CnwVK2M3nFR4Oldf1Wzp8BTtTJqxEWBp0YdlIqCAk+pcjAYCjzOAYUJUOApXBxNQ6PA07TwiqdNgad4gTQNjwJP08IrnjYFnuIFSnB4FHgJLoCKw1PgqVgVxsQVeJwDKhKgwFOxKnrHRIGnd/1VzZ4CT9XK6B0XBZ7e9Vc1ewo8VSujRlwUeGrUQakoKPCUKgeD+Y8ABR6ngooEKPBUrIreMVHg6V1/VbOnwFO1MnrHRYGnd/1VzZ4CT9XKqBEXBZ4adVAqCgo8pcrBYCjwOAcUJkCBp3BxNA2NAk/TwiueNgWe4gXSNDwKPE0Lr3jaFHiKFyjB4VHgJbgAKg5PgadiVRgTV+BxDqhIgAJPxaroHRMFnt71VzV7CjxVK6N3XBR4etdf1ewp8FStjBpxUeCpUQeloqDAU6ocDOY/AhR4nAoqEqDAU7EqesdEgad3/VXNngJP1croHRcFnt71VzV7CjxVK6NGXBR4atRBqSgo8JQqB4OhwOMcUJgABZ7CxdE0NAo8TQuveNoUeIoXSNPwKPA0LbziaVPgKV6gBIdHgZfgAqg4PAWeilVhTFyBxzmgIgEKPBWrondMFHh611/V7CnwVK2M3nFR4Oldf1Wzp8BTtTJqxEWBp0YdlIqCAk+pcjCY/whQ4HEqqEiAAk/FqugdEwWe3vVXNXsKPFUro3dcFHh611/V7CnwVK2MGnFR4KlRB6WioMBTqhwMhgKPc0BhAhR4ChdH09Ao8DQtvOJpU+ApXiBNw6PA07TwiqdNgad4gRIcHgVeggug4vAUeCpWhTFxBR7ngIoEKPBUrIreMVHg6V1/VbOnwFO1MnrHRYGnd/1VzZ4CT9XKqBEXBZ4adVAqCgo8pcrBYP4jQIHHqaAiAQo8Fauid0wUeHrXX9XsKfBUrYzecVHg6V1/VbOnwFO1MmrERYGnRh2UioICT6lyMBgKPM4BhQlQ4ClcHE1Do8DTtPCKp02Bp3iBNA2PAk/TwiueNgWe4gVKcHgUeAkugIrDU+CpWBXGxBV4nAMqEqDAU7EqesdEgad3/VXNngJP1croHRcFnt71VzV7CjxVK6NGXBR4atRBqSgo8JQqB4P5jwAFHqeCigQo8FSsit4xUeDpXX9Vs6fAU7UyesdFgad3/VXNngJP1cqoERcFnhp1UCoKCjylysFgKPA4BxQmQIGncHE0DY0CT9PCK542BZ7iBdI0PAo8TQuveNoUeIoXKMHhUeAluAAqDk+Bp2JVGBNX4HEOqEiAAk/FqugdEwWe3vVXNXsKPFUro3dcFHh611/V7CnwVK2MGnFR4KlRB6WioMBTqhwM5j8CFHicCioSoMBTsSp6x0SBp3f9Vc2eAk/VyugdFwWe3vVXNXsKPFUro0ZcSgu8zQVbMfreF7BuwxZ8MfbeamInX3wXFixeAXg8xp/lZGfix0+fNP73mk1lapB1cBQUeA4unotDp8BzcXEdnBoFnoOL59LQKfBcWliHp0WB5/ACujR8CjyXFtbhaVHgObyAFoevrMArKS3HKULUDdxvL0yZPidM4B15xmg8cdfl6Nqp7Q54KPDinzEUePEzZA/mE6DAM58pe4yfAAVe/AzZg7kEKPDM5cnezCFAgWcOR/ZiLgEKPHN5sjdzCFDgmcPRrb0oK/BKy8qxcXOh8c8dj7weJvAGHnsl3n/hdrRq3oQCz4KZSYFnAVR2GTcBCry4EbIDCwhQ4FkAlV3GRYACLy58bGwRAQo8i8Cy27gIUODFhY+NLSJAgWcRWJd0q6zAC/H9Y+6iHQTe3sNG4eD+vTBr3mI0a5KLq0Ydj4MH9DaacAVe/DOTAi9+huzBfAIUeOYzZY/xE6DAi58hezCXAAWeuTzZmzkEKPDM4chezCVAgWcuT/ZmDgEKPHM4urUXx5VJw/gAACAASURBVAm8QCCIWx98BYcP7o/9+vbAlGlzMPq+FzDujfuNFXl+8XE+8RGQRwvK0wWJMj6ObG0uAa+YmMFgEPwMN5cre4uPQJLXw7934kPI1iYTkF8r5d/jAfn1kl8wTabL7mIlIL+v3DYvY+2B7UjAfAL83tJ8puwxfgL83jJ+hm7uQc4PMx+P+AHb1L+a61qBVzvgc65+AMcdORAjhuyH9QXlZuajZV8ZaUlI8XhRVO7TMn8mrSaB3MxUlPuqUOELqBkgo9KSQPPcdGwqLAdnpZblVzLp3MwUpKcmobDUh/JKv5IxMij9CGSIOZmSJL63LOP3lvpVX92Mc8T3lr4qP8r4tVLdImkYWfOcNGzeWgm/uVpFQ5LuTLllXrqpiVku8ErLKrB4+b/o3aNLdeCnX3Yvzjh+GA4b1I9baE0oJ7fQmgCRXZhOgFtoTUfKDmsRCAQD+Ld8I5qm5CArObK/HOvbQrvRtxo/F3+GymAZBmQdiQ5pu5O5ywiUFFZg+qcLsGVtCXoMbI8e+7dPeIbcQpvwEjCAOghwCy2nhYoEuIVWxaowJm6h5Ryoj4DjttAWFBZj6MnX4Ym7L8P+fffAT7/+ievvfh7j33wATRvnUOCZMN8p8EyAyC5MJ0CBZzpSdliDwPrKAoxZ9h5WleUjyZOEc9odjsOa7dMgo50JvKKqjXhi/aUoD5QYfYhNjbi45aNom9qtwT75gjMIBPxBvHjlN9iworA64KOu2Bd7De2U0AQo8BKKn4PvhAAFHqeGigQo8FSsCmOiwOMccKTA+/anmbjurucgD3CRS5tTUpLRqX0rfPrqPZjyyxw8/Nx7yN9UgLatmuGGS0/BgD49jDx5iUX8E54CL36G7MF8AhR45jNlj9sJPLPic/yweU71H0iJ99IeV6NRcma9mHYm8GaWfItPNj8e1vagRsfj8Lyzid0lBNYt3YKXrpoYlk3nPq1w2p0DE5ohBV5C8XNwCjzOAQcRoMBzULE0CpUCT6Nix5Cq8ivwos2JAi9aYju+T4EXP0P2YD4BCjzzmbLH7QSu+vtZrBbbZ2s+N3c+FXvndo1J4C0un4WxG24Na3t048uwb/bhxO4SAsWby/D42eOMy3VCz97DOmPE5f0SmiEFXkLxc3AKPM4BBxGgwHNQsTQKlQJPo2LHkCoFXgzQ3N6EAs/tFXZmfhR4zqybU6KevGk2nlv5RXW4u2S0wpjdzodXXOhT31PfGXjjCp7D9K3jjea7ZvTFaU1vQbInxSlIGGcEBH7/cjEmvjIbgaoAWuySJ1bfHYzsJhkRtLTuFQo869iy59gJcAtt7OzY0joCFHjWsWXPsROgwIudnQ4tKfB0qHKUOVLgRQmMr9tCgALPFsxaD/JLwV/4efNfaJ3WBEe13A+5yVkN8qhP4MnGW/2bxSUW5Wia3KbBvviCMwmUF1eiaGMZmnXIgdfrSXgSFHgJLwEDqIMABR6nhYoEKPBUrApjosDjHKiPAAUe58cOBCjwOClUJECBp2JVGFNDAi+RhHzwYZ13JVoG2iEVaYkMhWPbSIACz0bYHCpiAhR4EaPiizYSoMCzETaHipgABV7EqLR8kQJPy7LXnzQFHieFigQo8FSsCmNSVeAt9s7DKxkPogibkYUcnF8+Grv5e7FgGhCgwNOgyA5MkQLPgUXTIGQKPA2K7MAUKfAcWDQbQ6bAsxG2U4aiwHNKpfSKkwJPr3o7JVtVBd5dmZdgnWdlNcZWwQ64rfRZp2BVPs6qqirMmP07SktL0a/PvmiU3UiZmCnwlCkFA6lBgAIvMdOhosCHopVlyGyaiqy26YkJQuFRKfAULo7GoVHgaVz8CFKnwIsAkm6vUODpVnFn5EuB54w66RalqgLviqz/oUpsoQ09HnjwaMmHYiMtf4CLd476qnwY88QYLF660Ogqp1Eubr3+NrRo1iLerk1pT4FnCkZ2YjIBCjyTgUbQXdE/ZVg5KR/BwLaXm+2Zg9b7N46gpT6vUODpU2snZUqB56Rq2R8rBZ79zJUfkQJP+RJpGSAFnpZlVz5pVQXe22lPY2ryhGp+B/iG4bTKK5Tn6YQA//xrDh555uGwUIcPHYGT/neSEuFT4ClRhriCCAYCmPPuFCz7YQ4yGjfCXmccgtZ7doqrz0Q3psCzvwLLv1iP4rXl2wcWd+z0OKcDklISf9lOpDQWVpXjK99WyLu9R6bmoU2SuTe5U+BFWgm+ZycBCjw7aTtvLAo859XM8ogp8CxHzAFiIECBFwM0NrGcgKoCr0qsv/s5+Wv8nTwbXf09MMh3FFLEVRZ84idAgRc/Q/ZQP4EF43/DrDe+rX4pKS0FRz15MTLysh2LjgLP/tIt+XQdyvIrwgRez3M7wJvsDIG31F+B0cVr4EfQyCHbk4RHs9ugmdc8iUeBZ/+85IgNE6DAa5iRzm9Q4Olc/Z3kToHHSaEiAQo8FavCmFQVeE6uTKU/iFVFPsifMdvnpsKr2M+acgvtg0+OwaIlNbbQ3nA7WjRtrgR2rsBTogxxBfH9fe9h3ZxlYX0MHH0S2uzdJa5+E9mYAs9++lvF2Xcrvqmxhba32EI7wDlbaN8o34zPKgrCwF2T2QIHppgnsinw7J+XHLFhAhR4DTPS+Q0KPJ2rT4HH6juIAAWeg4qlUagUeOYWu7wqgAlLS1Dm23ZoU5OMJAzplIUkxSweL7Ewt+7sLZwAV+BxRphFwMmXWEysLMLzZRvDUNyW1Rp7JcsNteY8FHjmcGQv5hKgwDOXp9t6o8BzW0VNyIcr8EyAyC5MJ0CBZzpSdmgCAQo8EyDW6GJufjnm1dzyJT42sGMm2jQyb8uUuRGr1xtX4KlXk2gjUvkMvH/mr8Mf3y9CZk46Bhy+O/KaR3YDM1fgRTsL+L4vGMRDZesxw1dqwDg8NQcXZDQzFQwFnqk42ZlJBCjwTALp0m4o8Fxa2HjSosCLhx7bWkWAAs8qsuw3HgIUePHQ27Ht/A0VmLO+xqHr4pUDO2SifQ4FXqSkKfAiJcX3oiWw4u91ePOBSQgKsSKf7LwMXHDvUchq1PDt1hR40dLm+yECawM+cX+6B429yaZDocAzHSk7NIEABZ4JEF3cBQWei4sba2oUeLGSYzsrCVDgWUmXfcdKgAIvVnJ1t5NbZ79Ztn0LbeP0JAztrN4WWnOzNrc3CjxzebK37QTGj52OPyYvCkNy7CUHoeeAhm/IpcDjTFKRAAWeilVhTBR4nAP1EaDA4/zYgQAFHieFigQo8FSsCmOiwDN/DshLLFYU+pDqVfMSC/MzNrdHCjxzebK37QSmjp+Hye//EYbk3NuPQNsuDV/gQoHHmaQiAQo8FavCmCjwOAco8DgHoiJAgRcVLr5sEwEKPJtAc5ioCFDgRYWLL9tAgALPBsiaDlFR7sMHj03GP3+vNwjsP2IPHHpin4hoUOBFhMnylyoKilCybBVSG+cgu1N7y8dTfQAKPNUrpGd8FHh61j3SrLkCL1JSGr1HgadRsR2UKgWeg4qlUagUeBoV2yGpUuA5pFAODnPjmkKkZ6ciOyfy20Ap8BJf8JJVa7Hq428Q9PuNYHJyfGic/A+SdumF1AOPhyclNfFB2hwBBZ7NwDlcRAQo8CLCpO1LFHjaln7niVPgcVKoSIACT8WqWBfThq1rMGXBZ/CL/wzoPAwdm+5m3WBx9EyBFwE8ceB96pxlSNpSjIrenRFoEtmtlRH0zFfqIECBx2mhIgEKvMRXZeXHE1Dyz2ojkEDhBgS3bkJbz+9I9viQ3PtQZJx5V+KDtDkCCjybgXO4iAhQ4EWESduXKPC0LT0FHkvvLAIUeM6qVzzRFpRswOOTrkOZr8Toxitunrv0kHvRrnGXeLq1pC0FXsNYc176GmkzFhovBtNTUXDV/1DVqXXDDflGTAQo8GLCxkYWE6DAsxhwBN2HCbw1SxAMVFULPKSmI/veSfB4xeGjGj0UeBoV20GpUuA5qFgJCJUCLwHQVR+SK/BUr5Ce8VHg6VP3X5ZOxGd/vBiW8MDdjsHwXqcrB4ECr/6SJK3djCZ3vBH2UkXf3VA06gjlaumWgCjw3FJJd+VBgZf4etbcQhvYsBLZFcvQxLvMCMyT2xzZt32W+CBtjoACz2bgHC4iAhR4EWHS9iUKPG1Lv/PEKfA4KVQkQIGnYlWsiWnR+jl45ce7wzo/dp8L0b/zUGsGjKNXCjwKvDimjyVNKfAswcpO4yRAgRcnQJOahy6xSKrchORvHkGgaCM8WXlIP/U2JHfvb9IozumGAs85tdIpUgo8naodfa4UeNEzc30LCjzXl9iRCVLgObJsMQf92ayX8cuSCUb7nm3749T+VyE5KSXm/qxqSIHXMNmc58YhbfZS40VuoW2YV7xvUODFS5DtrSBAgWcF1fj6DFb5EFi/HJ7mHeFNTYuvM4e2psBzaOFcHjYFnssLHGd6FHhxAnRjcwo8N1bV+TlR4Dm/htFmIM/CqwpWoVm2uuelUeBFUFVeYhEBJPNeocAzjyV7Mo8ABZ55LNmTeQQo8MxjyZ7MI0CBZx5LN/ZEgefGqsaZEwVenADZ3BICFHiWYGWncRKgwIsTIJubToACz3Sk7NAEAhR4JkBkF6YToMAzHSk7NIEABZ4JEF3cBQWei4sba2oUeLGSYzsrCVDgWUmXfcdKgAIvVnJsZxUBCjyryLLfeAhQ4MVDj22tIkCBZxVZ9hsPAQq8eOi5vy0FnvtrHHWGFHhRI2MDGwhQ4NkAmUNETYACL2pkbGAxAaUFXoUPWR9NR9qMZahqnoOSEwbA303dLfIWl0qr7inwtCq3Y5KlwHNMqbQKlAJPq3JHnSwFXtTI3N+AAs/9NXZihhR4Tqya+2OmwHN/jZ2WocoCL/Pj6ciYMLsaaTA7A5vHnAakJjsNM+ONkgAFXpTA+LotBCjwbMHMQaIkQIEXJTDNXqfA06zgkaRLgRcJJb5jNwEKPLuJc7xICFDgRUKJ79hJQGWBl3Pfp0hZvj4MR8HoY+Dv0spORBwrAQQo8BIAnUM2SIACr0FEfCEBBCjwEgDdQUNS4DmoWHaFSoFnF2mOEw0BCrxoaPFduwhQ4NlFmuNESkBlgZf52e/IGD+zOhWuwIu0qs5/jwLP+TV0YwYUeG6sqvNzosBzfg2tzIACz0q6Du2bAs+hhXN52BR4Li+wQ9OjwHNo4VwctsoCD6Ez8H5bgqqWeTwDz8XzsHZqFHgaFdtBqVLgOahYGoVKgadRsWNIlQIvBmhub0KB5/YKOzM/Cjxn1i3eqKsCVfh57QQsKJiDLrm7Y3CbkUj2qnNeFgVevBVme7MJKC3wzE6W/TmGAAWeY0qlVaAUeFqV2zHJUuA5plQJCZQCLyHY1R6UAk/t+ugaHQWenpV/Z/EzmLr2m+rk9215CM7a7SplYFDgKVMKBvIfAQo8TgUVCVDgqVgVxkSBxzmgIgEKPBWrok5MFHjq1EKZSCjwlCkFA6lBgAJPz+kwevpZ2Fq5pTr5lKQ0PLr/+/B6vEoAocBTogwMotbXyoy0JGwprkRZhZ9sSEAJAhR4SpSBQdQiQIHHKaEiAQo8FauiTkwUeOrUQplIKPCUKQUDocDTfg7cPfNSrCtZVc2heUZr3NHvBWW4UOApUwoG8h8BrsDjVFCRAAWeilVhTBR4nAMqEqDAU7Eq6sREgadOLZSJhAJPmVIwEAo87efAkoJ5eGXhwyiq2IyslByc3/0G7Nq4lzJcKPCUKQUDocDjHFCYQKQCr9K31cgiNaWRwtkwNLcQoMBzSyXdlQcFnrvqaXY2FHhmE3VBfxR4LiiiC1PgFloXFjXClHwBH9aVrkTLjHZIFVtoVXoo8FSqBmORBLgCj/NARQINCbxgMIg1G35EUfEyI/yc7M5o0/xgeDweFdNhTC4hQIHnkkK6LA0KPJcV1OR0KPBMBuqG7ijw3FBF9+VAgee+mrohIwo8N1TRXTlQ4Lmrnm7JpiGBV1SyHKvX/xCWbtuWg5CT1cl0BEWlFfhz1SY0zkxFj3bNhCQ0fQh26BACFHgOKZRmYVLgaVbwKNOlwIsSmA6vU+DpUGXn5UiB57ya6RAxBZ4OVXZWjhR4zqqXLtE2JPDWb/wVm4vmh+FokrsnWjbtayqilRuL8OJ3c1FeWWX026NdU5w7eA9Tx2BnziFAgeecWukUKQWeTtWOPlcKvOiZub4FBZ7rS+zIBCnwHFk21wdNgef6EjsuQQo8x5VMi4AbEniVvkIsW/0FgoFtYs3jSUantiORlpprKp/3pi3EjKXrwvq8fmQ/tMzNNHUcduYMAhR4zqiTblFS4OlW8ejypcCLjpcWb1PgaVFmxyVJgee4kmkRMAWeFmU2LckFwUWY5v8F2Z5sHOoZhKbepqb1HeqIAs90pOzQBAINCTw5RHnFpupVeE1yeiA9zfzPj49+XYzpi9aEZXT1kX3QtgkvzTChzI7rggLPcSXTImAKPC3KHHOSFHgxo3NvQwo899bWyZlR4Dm5eu6NnQLPvbU1O7PF/iV4vOrp6m5zPDm4PeVmpHvSTR2KAs9UnOzMJAKRCDyThqq3mzWbi/H0xNmo9PmN97q3bYLzD9nTjqE5hoIEKPAULApDAgUeJ0F9BCjwOD92IECBx0mhIgEKPBWrwpgo8DgHIiXwbtUH+Nk/Lez1S5IvRM+k3SPtIqL3KPAiwsSXbCagisCTaReUlOOvVZuRl8VLLGyeBsoNR4GnXEkYkCBAgcdpQIHHORAVAQq8qHDxZZsIUODZBJrDREWAAi8qXFq/PME/EeOqvgpjcHPKDWjrbWMqFwo8U3GyM5MIqCTwTEqJ3biAAAWeC4rowhQo8FxYVBNT4go8E2G6pSsKPLdU0l15UOC5q55uyYYCzy2VtD6P8mAFnvW/gKX+ZcZgw5KG4OjkEaYPTIFnOlJ2aAIBCjwTILIL0wlQ4JmOlB2aQIACzwSILu6CAs/FxY01NQq8WMmxnZUEKPCspMu+YyVAgRcrOX3brQmsNS6xyPFYc2g+BZ6+c0vlzCnwVK6OvrFR4Olbe5Uzp8BTuTqJj40CL/E1UC4CCjzlSsKABAEKPE4DFQlQ4KlYFb1jUl3glZYFMWsOkJYaxF69vUhO0rteumRPgadLpZ2VJwWes+qlS7QUeLpUOrY8KfBi4+bqVhR4ri6vY5OjwHNs6VwdOAWeq8vryORUFnhFRUHc/0gQ8v/Lp0M7D6650ouUZEeijilov68Sy36fgI3L5iE9tym67X8Uspuaew5iTIFZ3IgCz2LA7D4mAhR4MWFjI4sJUOBZDNjh3VPgObyAVoRPgWcFVfYZLwEKvHgJsr0VBCjwrKDKPuMhoLLAGz8hgK++2SbvQs8lF3jRc3dPPCk7qu2y3yZg9byp1TGnZjRC3xOuRlJyiqPyiDZYCrxoifF9OwhQ4NlBmWNES4ACL1pier1PgadXvSPKlgIvIkx8yWYCFHg2A+dwERGgwIsIE1+ykYDKAm/itwF8Pj5c4I0614u99tRH4P3x2TMo2bwubEb0HjEKOS062DhL7B+KAs9+5hyxYQIUeA0z4hv2E6DAs5+5k0akwHNStWyKlQLPJtAcJioCFHhR4bLsZf/a1fDNmoHkjp2R3HNPy8ZxSscUeGpUKmljMTJnrUIQHpT1bgt/C2suiFAj2/qjUFngFRYG8cCj27fQthdbaK/VbAstV+D5nPBpxBg1IUCBp0mhHZYmBZ7DCmZzuBR4NgN3wnAUeE6okn4xUuAlvua+Gb+h+PEHAL/fCCb9iJHIOPO8xAeWwAgo8BII/7+hkwrKkPf2b/BUBYw/CSZ5UHDavvA3zkx8cAmIQGWBJ3HISyxm/AFkZuh5iYVxBt6v47Hxn795Bl4CPj84JAnUJECBx/mgIgEKPBWrok5MFHjq1EKZSCjwlCkFA6lBgAIv8dOh+K5b4Pt73vZAPB7kvfoePOnpiQ8uQRFQ4CUIfI1h08XKu+wfl4QFUrJ/Z5T165j44BIQgeoCLwFIOKQCBLiFVoEiMIQdCFDgcVKoSIACT8WqqBMTBZ46tVAmEgo8ZUrBQCjwlJoDRbdeD/+SReEC77X34UlLUypOO4OhwLOTdt1jpS3OR6Ov/gr7YNFhu6Oye6vEB5eACCjwEgCdQzZIgAKvQUR8IQEEKPASAJ1DNkiAAq9BRFq/QIGndfnrTp4Cj5NCRQJcgZf4qvhmz0Txw/dWb6FNG3EMMk87J/GBJTACCrwEwg8NHQii0bcLkfb3WuNPKrq1wNbDewBefS5GqFkFCjwF5iRD2IEABR4nhYoEKPBUrApjosDjHKiPAAUe58eO32SlJyHF60VhKQ8a5vRQhwAFnhq14CUW4XWgwFNjXsoovEVl4r89COTou6VbcqDAU2dOMpLtBCjwOBtUJECBp2JVGBMFHucABR7nQFQEuAIvKlx82SYCFHg2geYwURGgwIsK185fXr/RkG9o2dSkDvXthgJP39qrnDkFnsrV0Tc2Cjx9a69y5hR4Klcn8bFxBV7ia6BcBFliBV6SWIFXxBV4ytVG54Ao8HSuvrq5U+DFWZtAAMkPvwrvlN+NjgKH7oeqq84Uy+m8cXasb3MKPH1rr3LmFHgqV0ff2Cjw9K29yplT4KlcncTHRoGX+BooFwEFnnIlYUCCAAUep4GKBCjw4quK96cZSH7gpbBOqkaPQuCgvvF1rHFrCjyNi69w6hR4ChdH49Ao8DQuvsKpU+ApXBwFQqPAU6AIqoVAgadaRRiPJECBx3mgIgEKvPiqkvzC+/B+MTmsE//xh8F/zrHxdaxxawo8jYuvcOoUeAoXR+PQKPA0Lr7CqVPgKVwcBUKjwFOgCKqFQIGnWkUYDwUe54CqBCjw4quM5991SL70Lniq/EZHwbRUVD19K4JtWsTXscatzRR44oJfLFvnQ/5WP5o1SkLXVim6Xu6r8YwyJ3UKPHM4shdzCVDgmcuTvZlDgALPHI5u7YUCz62VjSMvCrw44LGpZQS4As8ytOw4DgIUeHHA+6+pZ+lKYxWeR/yn6phDgU7t4u9U4x7MFHh/LK/AP/lV1TR3aZ6MPp3TNKZbf+oVCGKruIvF7/EgPRhETlDOaj6SAAUe54GKBCjwVKwKY6LA4xyojwAFHufHDgQo8DgpVCRAgadiVRgTBR7ngGoEzBR44/8oQ4UvUJ1ikrhbZGTfLAg/xacWgaAQduuTxCrSGn/eSPxLtpB4fCjwOAfUJECBp2ZddI+KAk/3GVB//hR4nB8UeJwDjiBAgeeIMmkXJAWediVXPmEzBd6kuWXYWrpd4DXK9GLonhnKM0hEgJVC3W2qdXmyXKvYJECBJ+vBFXiJmJUcsyECFHgNEeLHE0GAAi8R1J0zJgWec2plW6RcgWcbag4UBQEKvChg8VXbCFDg2YaaA0VIwEyBt7HIj+lLKlEpVuGlpngxoGsqmuWIZWZ8diAgV+DlCzTbdSfAFXjbMRX7CyFXcGZ4cjl7SEAZAhR4ypSCgdQgQIHH6VAfAQo8zo8dCFDgcVKoSIACT8WqMCYKPM4B1QiYKfBkbn5hpLaW+ZGd7kVyEleT1VdvnoG3I51AMIBnZr+DqWtmGFuv+7Xojav2OUtchlJruaJqn0iMRwsCFHhalNlxSVLgOa5ktgZMgWcrbmcMRoHnjDrpFiUFnm4Vd0a+FHjOqJNOUZot8HRix1zNJ/DLmll4bOZYeL3iQg95wYc/iKv3ORv7tdnb/MHYIwlESYACL0pgfN0WAhR4tmB27CAUeI4tnXWBU+BZx5Y9x06AAi92dmxpHQEKPOvYsufYCFDgxcaNrawh8Nb8L/DF0u/CBN7ILofi9B4jrRmQvZJAFAQo8KKAxVdtI0CBZxtqRw6ktMDbXLAVo+99Aes2bMEXY++tBrxqTT5uffBVLFyyEm1aNcMtV56BPnt2Mz6+ZlOZIwuhUtAUeCpVg7GECFDgcS6oSIACT8Wq6B0TBZ7e9Vct+zXF+bjxp4fgC/iMFXhJSMaYg69H2+yWqoXKeDQkQIGnYdEdkDIFngOKlMAQlRV4JaXlOOXiuzBwv70wZfqcMIF31pX345AD++D0Y4di2oy/hMx7BZPefwQpyUkUeCZMJgo8EyCyC9MJUOCZjtSWDgMVwIaPgOIZHiQ3D6L5CUFkdnPPOVoUeLZMIw4SBQEKvChg8VVbCKwoWoNJK38UAi+IgW33R9e8jraMy0FIoCECFHgNEeLHE0EgUoEXqKpE+eLp8BesR2qb3ZDWYU8Yvynh42oCygq80rJybNxcaPxzxyOvVwu8TVuKcPipN+CXL58Rhylvuwnt+FG344ZLTsG+e3enwDNhulLgmQCRXZhOgALPdKS2dLjhY2DLhO1DJWV70GlMAN5Ud3yDoavAkzduLv53JbaWlqBbuw7Iycq2ZT5xkIYJUOA1zIhv2E8gMy1J/KLdi8ISn/2Dc0QS2AkBCjxODRUJRCrwtv78Dnz5y6tTyOgxEBndD1QxJcZkIgFlBV4oxz/mLgoTeH/MXYy7Hn0dn712TzWGa+98Fv379MCJRw2iwDNhclDgmQCRXZhOgALPdKS2dLjiPqBi+/cWxpjtRweR0YUCz5YCWDTIZz9/j7//WWb0npqSglMOPRxtmrWwaDR2Gw0BCrxoaPFduwhQ4NlFmuNEQ4ACLxpafNcuApEIvGB5MbZ89URYSEmNmiJ36EV2hclxEkTAcQJv2ox5ePLlj/He87dXI/u/Ma9g187tcOYJh4nVAFUJQumeYVNSPPCK5bcVlQH3JMVMHE8gQ/z23ucPoKoq6PhcdEpg7UdBrP/CX51yciMPejzmjWoFnj+4EgEUINnTCR40UgpfdmYySsqqIBakafPkF2zBUx9/EJbvHp274KTBQ7RhYGWiBfn54sD/JOQ0axrTMOlypVOSB+UVfvE10zkT0x+swDLfq9ji/w1pnjbomno+srydYmLARuoRSEn2IEncRFvO7y3VK47GwB5ZzQAAIABJREFUEcmvl37xvaWP31tqPAviS71i6QYUfPgHPFUB5By3F9J3bx1fh6J1dob43rK8/u8tg8EA1nw0BkGxjTb0pLXpiuYDT4t7fHagNoFG4mcPMx+P2FZj6neLtVfgzZq3GLc/PDbsTLxr7ngG+/fdA8ePGIitZVyaH29BU5O8Yvu8EHhV23/ojrdPtieBeAmkpyShSgq8gKlfYuINi+0bICDPwFv9rtBv04NIbQW0PcWL7N0iX31XGfgW/uCS/0ZJRpp3hPgFg+hIkSc7PUV8k+WDTrPSEHgffRgu8LpIgXeoIlVxZhh+XxW+fPY5LJkx00hgz0EDMey8c6JOJj1VCjyvECVS4DnnF3H/VL6JNb4vq/NN8eahb8azQtpvOy6Fj7MJyDnpFQKvwsfvLZ1dSXdFL7+39IvvK530tdJdFXB2Nr41BVh5xhtiq8l/C4jEMQHt3zgTqR2axJVYVnoyysQv4QINaJWyVX9j07TPAL8PSdlN0GzgSUjJbR7X2GysPoFGGSmmBmm5wNtSuBVDTrwWP3/+NDLSU43gjzjtBtx30yjsvUc3bqE1oZzcQmsCRHZhOgFuoTUdqfIdBj1b4U1+LzzOYBfx28ZDlIld1zPwPpryLRavWmHUgVtozZmOC6dOxeSxr4Z1NuLqa9C+R8+oBnDqFtqF/ptQhm1zKvR09z6EdE/bqPLny2oS4BZaNeuie1RO2UJbJcR3/srNaNY2D6niF4d81CBQMm4uip6YHBZMo/P2R/Yp/eIKMJIttKEBgr4K+Eu2ICmnBTxeb1zjsrEzCDhuC63Eet41D6LfXt0x6rQR+Pr7X40ttV+//SCSxG/31mwqcwZ5haOkwFO4OBqHRoGnY/FL4Ul5OyzxYFDcXlg1TBkYugo8XmJh/hSc8tYbmD9lSljH/Y89Fn2OODKqwZwq8FYH3saG4PjqXJM9eejpfYor8KKqvrovU+CpWxudI3OCwFu9ZAPevncCCvO3Ij0rDSffOBTd9umgc9mUyb182jJsuW1cWDy5o4chc8juccUYjcCLayA2diQBZQXetz/NxHV3PQd5sJBPbOVMSUlGp/at8Omr92D1uo24+f6XsHDpKrRv0wJ3XHs2eu62i1EACrz45yEFXvwM2YP5BCjwzGfqiB6TfxBb+hf/F2qyWH13hPh7QZ0ttLoKPEfMHYcFuXnNGnx4950IVG3bipOWkYkTbr8DjZpGdxaeUwWePANvdXAsioIzkOJpjQ7ec5GBbd/b8XE+AQo859fQjRk4QeC9cP2nWDl/bTX+Rk2zMPqNs9xYDsflFBTbrwsfmoSySX8bsacf0AV5tx8pVsJFflRMXUlT4DluKtgasLICL1YKFHixktvejgIvfobswXwCFHjmM3VGj+J0Oc+/QuIVIRiQv3FW6xILCjxnzCKnRJn/z3LMnTxZnBXmRa8hQ9G0XbuoQ3eqwIs6UTZwFAEKPEeVS5tgnSDw7j7xZZSXbL+oQBZn9JtnoVGTLG3qpHqi/vXie1Tx7WpyqxxTQqXAMwWjazuhwHNtaWNPjAIvdnZsaR0BCjzr2LLn2AlQ4MXOji2tIUCBZw1X9hofAQq8+PixtTUEnCDwvn5lGn7+ZHY1gJ5ildepNx9mDRD2qgQBCjwlyqBsEBR4ypYmcYFR4CWOPUfeOQEKPM4OFQlQ4KlYFb1josDTu/6qZm+FwCsp34rfF05FekoG+uy6H1KTt11ux4cEIiXgBIEXENs0fxs/D4tni6OjurXEAcf2RkpqcqQp8j0HEqDAc2DRbAyZAs9G2E4ZigLPKZXSK04KPL3q7ZRsKfCcUil94qTA06fWTsrUbIFXWLwZt469AgXi/8unY8uuuPm0MchIzXASFsaaYAJOEHgJRsThE0CAAi8B0B00JAWeg4plV6gUeHaR5jjREKDA20arPFAGX9CHRknmnLMRTQ347o4EKPA4K1QjQIGnWkUYjyRgtsAb98sH+HDK2DC4l4y8EQN6DCRwEoiYAAVexKj4oo0EKPBshO3AoSjwHFg0q0OmwLOaMPuPhQAFHjCr5A8sK19i4GuR0hIDsg9AipfbKGKZT2a1ocAziyT7MYsABZ5ZJMP7yf9nHlbM/h6pWY3Qtd9wZOW1sGYgl/ZKgefSwjo8LQo8hxfQpeFT4Lm0sCalRYFnEkg3dUOB56ZquicX3QXeRl8+phT9EFbQ3ll7o2t6N/cU2YGZUOA5sGguD5kCz/wCb1wxHz+9c091x+nZeRh6wSNITuN2zUhpmy3wCoo34baxV9bYQtsFt5z2ENJT0yMNie8pTODnpQsxcf4ceD1eHNajN/brbM33OhR4Ck8CjUKbtXo5xv46GfklRTik65648bAjUFhcBb84/5APCdQmQIHHObEDAQo8TgoVCegu8BaW/Y15pXPDStM+rQP2zR6gYrm0iYkCT5tSOyZRCjzzSzVrwiv4Z9Z3YR3vf+INaNllL/MHc2mPZgs8iYmXWLhzsixevxZP/jAhLLkrBx+Bri1amZ4wBZ7pSNlhlASKK8pw5jtPobLKV93y8kFDMXy3/hR4UbLU5XUKPF0qHUWeFHhRwOKrthHQXeBVBCowseBrVAYrDeYe8Z/BuUPQOLmxbTXgQDsSoMDjrFCNAAWe+RVZOPUzzP/xg7CODznvAeS26GD+YC7t0QqB51JU2qf1+ZwZ+HZB+C8sR/baB0N372U6Gwo805GywygJzFi1FHdMeC+s1YFddsUtQ06kwIuSpS6vU+DpUuko8qTAiwIWX7WNgO4CT4Le6t+KxWULURWsQuf0LmiW0tw2/hyobgIUeJwZqhGgwDO/Ir7KcvzywRhsWrXQ6HzX/Uai56CTzR/IxT1S4Lm4uCantih/LZ76PnwF3lWHHIEuzbkCz2TU0XUXDKL8t1Wo+HMtktvmInNQF3gyU6Lrg2/vQIAr8DgpoiVAgRctMQ3ep8DToMgOTJECz4FF0yBkCjwNiuywFCnwrCtY0YZ/kZrZCOlZudYN4tKeKfBcWliL0vppyUJM+ptn4FmEN6Zuy75fiuJPtq+MTOnaDHlXHhhTX2wUToBn4HFGREOAAi8aWpq8S4GnSaEdliYFnsMKpkm4FHiaFNpBaVLgOahYGoVKgadRsR2UKrfQRl6sLQ9PQdWKLWENmt57OLw59lwcU/rXCmz+5g94M1LRdOQApLVtGnnwDnuTt9A6rGA2h0uBZzNwJwxHgeeEKukXIwWefjV3QsYUeE6okl4xUuDpVW+nZEuB55RK6RUnBV7k9S585TdUzl5T3cCTnoym9x4BT2pS5J3E+GbZ0rVYdu3LgNjGK5+k7Ax0eeICpDRz52poCrwYJ4omzSjwNCl0NGlS4EVDi+/aRYACzy7SHCcaAhR40dDiu3YQoMCzgzLHiJYABV60xPi+HQQo8CKn7M8vRuHzv8C/oQQQ0q7RyXshvV/7yDuI4811r3+LTZ9MC+uh3XXHIvegPeLoVd2mFHjq1kaFyCjwVKiCYjFQ4ClWEIZjEKDA40RQkQAFnopV0TsmCjy9669q9hR4qlZG77go8KKrf9AfgH9tEZKaZcGTbt8FFnLr7NpnvwwLtuOdpyN7r847TaCoFCgo9aBxVhCNMqLLM9FvU+AlugJqj0+Bp3Z9EhIdBV5CsHPQBghQ4HGKqEiAAk/Fquw8psoNRfBtLILchJPWKg8pjbOdlUAE0VLgRQDJQa8EV/nh/6ACWFMFT+8UeE9KhyfN46AMtoVKgee4kmkRMAWeM8oc8FVh5d3vomTOciPgJsP7ovWFw3ca/MqNHixZ6zU+Lr9a7tY2gDZNtm2/dcJDgeeEKiUuRgq8xLFXdmQKPGVLo3VgFHhal1/Z5O0WeIE1y1H+5esIrF2J5B59kXbUOeK34A771XKCqllVWIqyf/LDRs/s1hpJmWkJisiaYSnwrOGakF79QNXNWxEsClQP7z00DUnH23NovJk5U+CZSZN9mUWAAs8skvb0U7lms7jEIgXJjRvVO+DPC5JR6dsu7FJTPDiwe5U9QZowCgWeCRBd3AUFnouLG2tqFHixkmM7KwlQ4FlJl33HSqAhgbdGCLeZv09CaVkxunbrjT59BsPj2fZb4aifgB/F91+M4NaC6qapBwwXEu/sqLvSsUHF6s2oFKvvaj5prfOQ2iLPVTgo8NxTzuCqKlTdJ86bqvnskoSUG523cpQCzz3z0k2ZUOC5qZrbc6kt8NJSgjig+/ZfhKieNQWe6hVKbHwUeInlr+ToFHhKlkX7oCjwtJ8CSgKoT+BJaffpx0/DX+Wrjr3vvkPRo8eAmHIJ5K9GyaNXh7VNat4Gmdc+HlN/ujWqcwXermIFXgZX4Ok2FxyTb10r8AaLFXgncgWeY2rIQJUmQIGndHliDq7mFlrZSbc2AbRvyi20MQNlQ6UIUOApVQ41gqHAU6MOjCKcAAUeZ4SKBOoTeCtXLsAPkz8MC7tN2y4YMvTU2FLhCrzYuNVoJc/Aq9y4VfxJkGfgxU0z/g4qAkEsqqyAJxhE9/R0JHucd7Zb/BTq78E4A+/9cmCtn2fgWQ2b/WtHgALPvSXnJRbura3umVHg6T4D6sifAo+TQkUCFHgqVoUx2bkCT9I2zsD7/FXI1XjyDLzUkefCm+a81TicOdYRcMoW2hIhpN/YvAVb/WKZmXiapSTj9LwmSPVS4lk3OxLXM7fQJo49R945AQo8zg4VCXALrYpVUScmCjx1aqFMJBR4ypSCgdQgQIHH6aAiAVvPwFMRAGNSjkDCBN5mP5LfKoV3oQ/BbsnwnZklrgpM2imf30tL8P3W4rCPH5Wbi93FSjw+7iNAgee+mrohIwo8N1TRfTlQ4LmvpmZmRIFnJk2X9EWB55JCuiwNCjyXFdQl6TQk8FySJtNwEIFECbyUh4rgnb/9vMfgbimoHJ2zU3LTS0rwYzEFnoOmVlyhUuDFhY+NLSJAgWcRWHYbFwEKvLjwub4xBZ7rSxx9ghR40TNjC+sJUOBZz5gjRE+AAi96ZmxhLYFECbzUSzfDU1rjkHCxE7biuSZAWt1bYssCAYzdvLl6C21zsYX2tMZiCy3PwbN2giSodwq8BIHnsPUSoMDjBFGRAAWeilVRJyYKPHVqoUwkFHjKlIKB1CBAgcfpoCIBCjwVq6J3TIkSeNGuwJNV4iUW+sxVCjx9au2kTCnwnFQtfWKlwNOn1rFkSoEXCzWXt6HAc3mBHZoeBZ5DC+fysCnwXF5gB6aXKIEHeQbem+IMvEWRnYHnQLQMOQ4CFHhxwGNTywhQ4FmGlh3HQYACLw54GjSlwNOgyNGmSIEXLTG+bwcBCjw7KHOMaAlQ4EVLjO9bTSBhAs/qxNi/owlQ4Dm6fK4NngLPtaV1dGIUeI4un+XBU+BZjth5A1DgOa9mOkRMgadDlZ2XIwWe82rm9ogp8NxeYWfmR4HnzLq5PWoKPLdX2Jn5UeA5s252RU2BZxdpB41DgeegYmkUKgWeRsV2UKoUeA4qliahUuBpUmiHpUmB57CCaRIuBZ4mhXZYmhR4DiuYzeFS4NkM3AnDUeA5oUr6xUiBp1/NnZAxBZ4TqqRXjBR4etW7vmx/Xb0Gb/45F1srfTi8S2ecskePhMGhwEsYeg5cDwEKPE4PFQlQ4KlYFXViosBTpxbKREKBp0wpGEgNAhR4nA4qEqDAU7EqesdEgad3/UPZry0uwaVfT4A/EKwGclX/fhi8S8eEAKLASwh2DtoAAQo8ThEVCVDgqVgVdWKiwFOnFspEQoGnTCkYCAUe54DiBCjwFC+QhuFR4GlY9DpS/m75P3jytxlhHzlMrMK7pG+fhACiwEsIdg5Kgcc54EACFHgOLJqNIVPg2QjbKUNR4DmlUnrFyRV4etXbKdlS4DmlUvrESYGnT63ry5Qr8DgPSKBhAlyB1zAjvmE/AQo8+5k7aUQKPCdVy6ZYKfBsAs1hoiJAgRcVLr5sEwEKPJtAc5iICVDgRYzK9S8aZ+DN/QtbKyp4Bp7rq80EYyGgjcALBpG6fi085eWobN0GwbT0WHCxjU0EKPBsAu3QYSjwHFo4K8OmwLOSLvuOlQAFXqzk2M5KAhR4VtJl37EQoMCLhRrbWE2AW2itJsz+YyGghcAT8i7n9+lI3bDeQBRMTkHBfgfBn5MTCzK2sYEABZ4NkB08BAWeg4tnVegUeFaRZb/xEKDAi4ce21pFgALPKrLsN1YCFHixkmM7KwlQ4FlJl33HSkAHgZe8ZTPypv0Yhqi84y4o3mOvWLGxnUkECteuQ1p2FtIbNQrrkQLPJMAu7YYCz6WFjSctCrx46LGtVQQo8Kwiy37jIUCBFw89trWCAAWeFVTZZ7wEKPDiJcj2VhCgwLOCKvtsiEBlaSl+fnEsNi5dbrzac/gw9DxiaHUzCryGCOr9cQo8vetfZ/YUeJwUKhKgwFOxKoyJAo9zQDUCFHiqVYTxSAIUeJwHKhLQQeBBbqGdIbbQ5m/fQrtFbKENcAttwqbk3HET8PfE78LGP+LWG9CoRXPjzyjwElYaRwxMgeeIMtkbJAWevbw5WmQEKPAi48S37CVAgWcvb47WMAEKvIYZ8Q37CVDg2c+cIzZMQAuBJzHwEouGJ4ONb/z47EtY9/eisBEHnH0aOuyzbVszBZ6NxXDgUBR4Diya1SFT4FlNmP3HQoACLxZqbGM1AQo8qwmz/2gJUOBFS4zv20GAAs8OyhwjWgLaCLxowfB9Swms/vMvTH1pbPUYmU0a47CbrkFK+rbbgSnwLMXv+M4p8BxfQvMToMAznyl7jJ8ABV78DNmD+QQo8Mxnyh7jI0CBFx8/traGAAWeNVzZa3wEKPDi48fWsRNY+9cCLP/1d2TkNMKugw9GVtMm1Z1R4MXOVYeWFHg6VDnKHCnwogTG120hQIFnC2YOEiUBCrwogfF1ywlQ4FmOmAPEQIACLwZobGI5AQo8yxFzgBgIUODFAE2jJhR4GhU70lQp8CIlxffsJECBZydtZ4y1YPVfmDT3G6SnpGNEn6PRunFb2wO3S+BtyF+EVSt+g9/vQ+u2vdGufR/bc+WAziBAgeeMOukWJQWebhV3Rr4UeM6ok25RUuDpVvHo8qXAi46XFm9T4GlRZsclSYHnuJJZGvCy/CW4+d3rxLnMQWOc7LRsPHb2s8jJyLV03Nqd2yHwios3Ys7Md8OG7t5zOJo262JqrusqK7G5yod2qWnISU42te/6OgsEfFg3fywK/v0eqRnN0LLHuWjUfNtBznyiJ0CBFz0ztrCeAAWe9Yw5QvQEKPCiZ8YW1hOgwLOesZNHoMBzcvUsip0CzyKw7DYuAhR4ceFzXeO3f34dX8z4JCyvK464FgfsdrCtudoh8NaumYdli78Py6tVmz3Qpdtg03L9o6QYf5WUGP15xT8H5+ahfVqaaf3X19GGxR+I29her37Fk5SG3Ye9gaSUbFvGd9sgFHhuq6g78qHAc0cd3ZaFEwRe/qogvnolgNVLgth1Hy+Gn+dBVo7HbaVgPjUIUOBxOtRHgAKP82MHAhR4nBQqEqDAU7EqiYtpwuzxeO2HF8MCuPPE+9G9TQ9bg7JD4JVs3YDZf7wXlpeZK/CqxCrG9zbkY9taxm1P05QUDG+8/UBlK6Eun34rivP/CBtil/53olHLvlYO69q+KfBcW1pHJ0aB5+jyuTZ4Jwi8p6/0I3/l9r+hew304vir5K/a+LiVgFME3syCf/HBuj9R5KvAoWJXyLGt9oTXQ7ls9bykwLOasAP7p8BzYNE0CJkCT4MiR5GiT5wFN+bzuzF35Ryj1eF7jcA5g0ZF0YM5r9oh8GSk8gy8f8UZeFXGGXi9xBl4+5iTgOjFJwTe+wkUeBuWfCy20L5anY83OQ3dh3IFXqwFpsCLlRzbRUJg3vpCfLcsH2X+IAa0zcOgTi0iaQYKvIgw8SWbCagu8Iq3BPHguf4wKtl5HtzwWpLNpDicnQScIPA2VhTj6vnj4P/vKBvJ5/wO/XGIyce72MndKWNR4DmlUjbGSYFnI2wOFTEBCryIUWn14prNq8UPhhnIy7JntVhtuHYJPKuLmsgttNvPwJsszsBrzjPw4iw2BV6cANl8pwQ2lFTg6V+XiB/Ytr9yXI822Lt14wapUeA1iIgvJICA6gJPIuEKvARMjAQP6QSBN23LCjy9fGoYqf0ad8TlnQ5IMD33D0+B5/4aR50hBV7UyNjABgIUeDZA5hBRE3CLwJOJJ+oSi6ihs0G9BCjwOEGsIvDr6i0Yt2BNWPf92jbB0d1bNzgkBV6DiPhCAgg4QeDxDLwETIwED+kEgVfXCrwLOw7AwKadE0zP/cNT4Lm/xlFnSIEXNTI2sIEABZ4NkDlE1ATcJPCiTp4NlCRAgadkWVwRFFfguaKMTKIGAScIPBZMPwJOEHiyKjXPwBssts6e2LqXfsVKQMYUeAmArvqQFHiqV0jP+Jws8IKFGxCY+Q2CJYXw9hoEb8eeehbRhVlT4LmwqA5PiQLP4QVUPHyegad4gRheVAQo8KLCxZdtIuAUgWcTDg5TiwAFHqfEDgQo8DgpVCTgWIFXUYaqsTchWFxQjTX5hBvh6bC7ipgZU5QEKPCiBMbXLSdAgWc5Yg4QAwFuoY0BGptYToACz3LEHCAGAnYLvIAvgC1/rgcCQN4ezZGUkRxD1GxiFwEKPLtIO2gcCjwHFUujUJ0q8ALL/4T/k0fDKuXtPRhJQ87SqHruTZUCz721dWpmFHhOrZy746bAc3d9nZodBZ5TK+fuuO0UeP5KPxY9MxNla4oNqCmN09H9sn2QkpPmbsgOzo4Cz8HFsyp0CjyryLLfeAg4VuBt+Bf+N/4vLPWkA46Fd8DIeHCwrSIEKPAUKYRTwxDXeaavKUJSUQX8eekob9MI8HjiyoYCLy58bGwRAQo8i8Cy27gIUODFhY+NLSJgp8DbMns9lr/9V1gmbY7ojFaH7GJRduw2XgIUePESdGF7CjwXFtUFKTlV4En0/qmfIDD9C6MKnra7Ium4a+FJ4W+2XDAtQYHnhiomLofs+RuQuqGkOoCydrko69I4roAo8OLCx8YWEaDAswgsu42LAAVeXPjY2CICFHgWgXVJtxR4LimkmWlQ4JlJk32ZRcDJAk8yCJYWiX+2wtusrVlI2I8CBCjwFCiCU0MQq+8a/7IKVVVlWBL8C4WBLWid2hnZBw2CR/wn1ocCL1ZybGclAQo8K+my71gJUODFSo7trCRgp8DzV/ix8OkZKF+37ZeJ3EJrZWXN6ZsCzxyOruqFAs9V5XRNMk4XeK4phAqJBINICpQi4ElH0JuU0Igo8BKK3/GD503/F9NKxmET1hm5BJO86NhtCDq2PSDm3CjwYkbHhhYSoMCLHG6VrwqlpeXIbpQFrzd2mR/5iPq+SYGnb+1VztxOgSc58BILlWfDjrFR4DmrXrZES4FnC2YOEiUBCrwogSXg9ZWrijB77np07dwYPbo3syQCb6AMOWXzhcArR9DjRUlaZ1SktLRkrEg6pcCLhBLf2RmBQP5GTJ33lDB34v/E2XcBcfNbdk479O15dszQnCzwFvnX4l+xEnGPpHZo4c2JmQEbqkeAAi+ymqxZtR5/z12KgD+AjKx07NWvhxB5mZE15ltRE6DAixoZG9hAwG6BZ0NKHMJEAhR4JsJ0S1cUeG6ppLvyoMBTu54//LQSTzw7E0GxOk4+Jx+/u/GP2U922QKkVW2q7jYothpuzu4vDhdMzEo8CjyzK6xXf8FgAD/NeASBqgqxmnTbSpsmeV3Qe7eTYwbhVIH3WeVMfOubZ+SdBC/OSxuEXsntY+bAhmoRsEPgecRaVg+mCRveTKzQHiAAOGv1ml9Iux++mW7Iu9DTonUz9O7bXa1iuigaCjwXFdNFqVDg7byYcxf+hfWb8rH37r3QtHFTF1U98lQo8CJnpc2bFHjalNpRiVLgqV2uS6+ZhNVrtlYHmZqWhPdeG2n69p/GJb/DG6gMg1GQuSf8SYlZrUOBp/a8dEJ06zbOxcLlXyMQ8CEjvTH27HYisjJjX8HqRIFXGazCtaXviIWI234BIJ+O3ua4PmO4E0rIGCMgYLXA8wSXIi3pBiHvtp3jFEB/VAbvjCAydV4p2FyE36f+GRZQaloqBg7bV50gXRYJBZ7LCuqSdCjw6i7ki++/it/+nGF8MDU1FVeffTm6dezikqpHngYFXuSstHmTAk+bUjsqUQo8tctll8BL961BVvnyahhVSY1QmNkrYXAo8BKG3lUDV4kVeGUVW4S4awGv2Boez+NEgVchBN51FHjxlF35tlYLvBTPk2Ll5ldhHCqCLwkl7KxVnDOmzcWWTYXVeezaszM6dm6jfH2dGiAFnlMr5+64KfB2rO+GLZtw08O3hn2g3559ceHJ57p7MtSRHQWediVvOGEKvIYZ8Q37CVDg2c88mhFrb6E99cQeOPFYa7b9pPryxTbazahKykJ5SmtxdlhyNKGa+i4Fnqk42ZkJBJwo8GTa3EJrQvEV7iIxAu9pIfC6Kkxlx9DkBRb/LFuDrUXFaNmqKVq3awlxPCYfiwhQ4FkElt3GRYACb0d8hVsLce0DN4V9oLfYRnv56RfFxdqJjSnwnFg1i2OmwLMYMLuPiQAFXkzYbG20YmUR5syz9hILWxOKYDBVBV6gOICS38oQ2CoOQt8zHamdUyLIhq+4gYBTBZ5kz0ss3DAD687BaoG34xbavmIL7T3uBcrMTCFAgWcKRnYSIQF/RRkKF/1mvJ27675ISsuos6UVAi9QFUT5gjJ5YxbSd02DNy2+1f4Rpmzqay9/+Dqmz/7V6JNbaM1D6xEHmG8/wMS8fiPuac0mMTH5xEWAAi8ufGxsEQEKPIvAstu4CKgo8AK+IDa/WoBA4faD0HOOaSS+YUuNK1c2dgYBJws8ZxBmlLEQsFrgyZjkJRZJmIpAsLm4xEJcbiQuQ+FDAvURoMDj/LCLQFV5MRa/eRsBxVvOAAAgAElEQVQqNq81hkxv2gZdT7sTyRnZO4RgtsALlAew8a1NqNpUZYyVlJuE5mc1gzfDWV8jpWaat2g+NmzeiN7d9+AlFiZNXgo8k0AmshsKvETS59g7I0CBx7mhIgEVBV7lSh8K3isKw5XePQ05I3f8JlFFpowpPgIUePHxY2trCNgh8KyJnL26mQAFnpurq1ZuG2d9i38nvhIWVLth56HZ3kMsF3ilc8tQ8FVB2Di5w3KRtXemWpAYTUQEuIU2Ikx6vUSBp1e9nZItBZ5TKqVXnCoKvKotAWx+aUtYITL7ZSB7ML9R02F2UuDpUGXn5UiB57ya6RAxBZ4OVVYjx0QKvLK/y7Dli3CBl3NII2T34y921Zgd0UVBgRcdLy3epsDTosyOS5ICz3El0yJgFQWeBF/ySylKp5YjGAgiuWUy8k5qBG+6s7ZKaDGBLEiSAs8CqOwybgIUeHEjZAcWEKDAswAqu6yTQFVZMZa8fTvKN60xPp7WpDW6nXEXktNt2EJbGcSmNzbCV3ML7dliCy2/L3TkbKXAc2TZrA2aAs9avuw9NgIUeLFxYytrCagq8GTW/rIAguIyi+Rm4pZe3mJo7URQqHcKvPBiBIIBvPvvq/h+w0Q0SWmC0ztcgF65fRSqmB6hUOBtq7M8wyl/41pkZWYjOytHueJXFmxG0F+FtKYtlIvNioAo8Kygyj53RiChl1gIiVe+UN4VYN4lFiXwYT3KsAvEL4n5jaZtE9+RAu/ki+/CgsUrxPzb9hNJTnYmfvz0SeN/8xKL+OcOBV78DNmD+QQo8Mxnyh7jJ6CywIs/O/bgRAIUeOFVG7/uY7yx4oXqP0z1puGpvV5HnpB5fOwjQIEHlJWX4M0PnsWKVUsM8EMHHY1BBwy3rwgNjLRy3LvY+OuP23626t4LnU8eBW+y+AWQix8KPBcX18GpmX2JhRUoxiWtwIvJC+BHAB2FwLurYh80R9236loxvs59OlLgHXnGaDxx1+Xo2qntDrWjwIt/OlPgxc+QPZhPgALPfKbsMX4CFHjxM2QP5hKgwAvned+CmzCncGbYH47e7V7sndfPXPDsrV4CFHjApB8+xw9TvwrjdPVFd6FZ05YJnz1FS/7GkrHbFkOEnvZHn4bm/Q5MeGxWBkCBZyVd9h0rAdUF3lZU4uS0yQiK/4Sew/ztcGXVnrGmzHZREHCkwBt47JV4/4Xb0ar5jr89pcCLovo7eZUCL36G7MF8AhR45jNlj/EToMCLnyF7MJcABV44zx1W4CWJFXi9uQLP3FnXcG8UeMDYd5/A4mXzw2CddMz56NUz8TJ53ZQJWDPp87DYmu17EDqMPLXh4jr4DQo8BxfPxaGrLvBmeDbgttQZYRVoh2y8WHGQi6uiTmqOFHh7DxuFg/v3wqx5i9GsSS6uGnU8Dh7Q26C6dpPc280nHgKZ6UlI8nqxtdQXTzds2xABnknVEKGwj+dlpaKssgoVvkBU7ZzysodnRzilVGFxtmicjg0F4rKI7b+EdGQeDNo9BHKzU5CRmoSCYh/KK/3uSSzGTOQZeO/8+wq+zxdn4KU2wRnGGXj7xNgbm8VKICPNi5RkL4pKqmLtwvHt5i+cjTc/eq46j8a5TXHFqNuQnpae8Nzk2Xfzn7kHVWXbfo7yiJ8Dul94I7Ladkh4bFYFIFcP5YrvLSt9fvH9Jb9WWsWZ/UZPoEVeOjYVVsAvv7lU8PtLccIyLk+dhmWeourkrhKr7+QqPD7WE2jd1Nytyh5xOKul0ywgbtS79cFXcPjg/tivbw9MmTYHo+97AePeuN9YkRewdnjrK6LACCGvZGkhFcgz4SEQcFQl+O/IS/eKEgrdqOaDKi97xcSUf+3x01mVijAOj5iT8ssJ5yXngkoEQr+k0v2r5bwFf2LazJ/F+d05OGLwCOTm5ClTppL8dVj5/URUVVSg/UGHIK9TF2VisyQQ8Re367+3tAQcO7WagJyXxveV8r8U/fmgQGyjfcu3GP96SjHE0wZDknY82sxqTrr2L3/2MPOxXODVFew5Vz+A444ciBFD9uMlFiZUk1toTYDILkwnwC20piNlhyYQ4BZaEyCyC1MJcAutqTjZmUkEuIXWJJDsxlQC3EJrKk52ZhIB1bfQmpQmu4mRgOO20JaWVWDx8n/Ru8f23wqdftm9OOP4YThsUD8KvBgnQs1mFHgmQGQXphOgwDMdKTs0gQAFngkQ2YWpBCjwTMXJzkwiQIEHrK+qwofFpSgRu4lGZGWgZ1qqSXTZTawEKPBiJcd2VhKgwLOSrvP7dpzAKygsxtCTr8MTd1+G/fvugZ9+/RPX3/08xr/5AJo2zqHAM2FOUuCZAJFdmE6AAs90pOzQBAIUeCZAZBemEqDAMxUnOzOJgO4CrzAQwKX5m7HFv+2sNbkB6r5mjdHr/9k7Dzi5ynL//6ZuL9nspuymF9JIJVSlSJGOSJNmFyt2wQoIol6x3IuICoqA4hXwUhRQpGlCKIFASKMlJKRustm+s7O70849s4FJJmVnzswpb/nN/fPx/8me87zP8/29LJvvnkKJZ9MOK6wMBV5h3HiWswQo8JzlK3t16QReGvii51bgZ7+5Gy1tnWgaVY8rv3ARjlgwczALvoW2+C1JgVc8Q1awnwAFnv1MWbF4AhR4xTNkBXsJUODZy5PV7CGgu8Bb1NePG9q7smCeal6Fd3lttT2AWaUgAhR4BWHjSQ4ToMBzGLDk5aUUeEMxp8ArfkdS4BXPkBXsJ0CBZz9TViyMQPqtln2pnsGTJ9SPQFvngPkCpcJq8SwSsJsABZ7dRFnPDgK6C7w3YjF8bWdHFsqPVVfi/KoKO/CyRoEEKPAKBMfTHCVAgecoXumLU+BJH6H9A1Dg2c+UFYsnQIFXPENWKJ5ACkm0xDYhbsQGi1WVlqHGaDJvhwoUX5wVSMAGAhR4NkBkCdsJ6C7w0kDv6I7grz29g2znmbfOXlVXi1K/vW8TtD04xQtS4CkesKTjUeBJGpxLbVPguQRapmUo8GRKS59eKfD0yVrkSSPJTnQkdmRaDAX9qPY1oNxfK3Lb7E0jAhR4GoUt0agUeLvCSj8DL2Jesj02FJQoPXVbpcBTN1uZJ6PAkzk953unwHOesXQrUOBJF5kWDVPgaRGz8ENS4NkTUVtvBL97YSnW7NiOGSNG4DOHH4nhFZX2FNe8CgWe5htA0PEp8AQNRvO2KPA03wCCjk+BJ2gwgrRFgSdIECK1QYEnUhrs5V0CFHjcCyIQSBpJ7EyYt9Cm3rmFtqQMtWgyW+MttFby+f7jj2JVc3PmlFkjR+G6k0+1UoLHHoAABR63hogEKPBETIU9UeBxD4hIgAJPxFTE6YkCT5wshOmEAk+YKNjIHgQo8LgdRCHg1UssjP4kkiu6YT5wD4H5NfCF/KIgsdzHh+/+M6LmQ93f/fh8Ptx10aUoDfK2Mssw9zqhUIGX7O5CvHkbgnXDEWwYUWwbPJ8EsghQ4HFDiEiAAk/EVNgTBR73wFAEKPC4P/YhQIHHTSEiAQo8EVNhT6PqytDS0ef4W2iN7jgG/vstGO27pJd/ZAnCX58KX4mcEo9X4Dn3704hAi+2aSN6n1+Cdzdy6YxZKJs737kmWVk7AhR42kUuxcAUeFLEpF2TFHjaRW5pYAo8S7j0OJgCT4+cZZuSAk+2xPTo1y2BF//PTiQe2H3LaZpu+KPjEFgg58sz0s/Au+WF5/Hajh18Bl6e/6qk4nFTsj2G2MY3EagbgcrDT0Bw+Kh9zi5E4HWbtzQn21p31zKviKw990Pw8YrIPNPhYbkIUODlIsSve0GAAs8L6lwzFwEKvFyE9P46BZ7e+e93ego8bgoRCVDgiZgKe3JL4CWeaUP83q1ZwEMXj0Xw8GEMQRMCkeefQN+rL2am9ZdVYviHvmBejpl9FaZ9Au9CU+Dx2Y6abC/Hx6TAcxwxFyiAAAVeAdB4iuMEKPAcRyz1AhR4UsfnTPMUeM5wZdXiCFDgFcePZztDwC2BZ0QT6L9hHdCx6xZan3kLbYnEt9A6k4baVTvuvxWJzrasIYed82kEa4dn/1llGGUlAXREYugbSOYFhbfQ5oWJBxVBgAKvCHg81TECFHiOoWXhIghQ4BUBT4NTKfA0CNnqiBR4VonxeDcIUOC5QZlrWCXglsAb7KvPfInFym4YPsN8iUWt1C+xsMqZxwNOXoGX5suXWHCXOUmAAs9JuqxdKAEKvELJ8TwnCVDgOUlX/toUePJnaPsEFHi2I2VBGwhQ4NkAkSVsJ+CqwLO9exaUiYCTz8CTiQN7lZMABZ6cuaneNQWe6gnLOR8Fnpy5udU1BZ5bpCVahwJPorA0apUCT6OwJRqVAk+isPZo9e2ubjy6Ycvgn5wycQwm1FTLOch+ui7kGXjKDM9BhCVAgSdsNFo3RoGndfzCDk+BJ2w0QjRGgSdEDGI1QYEnVh7sZhcBCjzuBBEJyCzw0k9H2+LrwyjDfJ4esl+EICJru3raFunF9xa/gFgyNViyxHxRww+OPgyNleV2LeFpHQo8T/Fz8QMQoMDj1hCRAAWeiKmwJwo87oGhCFDgcX/sQ4ACj5tCRAIUeCKmwp5kFXgbff24KbgFHYijzBfAxxKjsTBVpUWgf1/7Nu59/a2sWS+YPhlnTZ2gxPwUeErEqNwQFHjKRarEQBR4SsSo3BAUeMpFautAFHi24lSjGAWeGjmqNgUFnmqJqjGPrALvp4GNeN0fzYRQhSB+EZ9iXofnUyOYIaZYum0HbnppddYRXzzkYBzeOFKJ2SnwlIhRuSEo8JSLVImBKPCUiFG5ISjwlIvU1oEo8GzFqUYxCjw1clRtCgo81RJVYx5ZBd7loTfQh123kL77uT4xCaPN22lV/6QMA79Z/iqe27p9cNQjm0bic/Nnwe9TQ15S4Km+g+WcjwJPztxU75oCT/WE5ZyPAk/O3NzqmgLPLdISrUOBJ1FYGrVKgadR2BKNKqvAezDQiof8OzOk5xlV+GJijETki2+1Jdo3WGREeVnxxQSqQIEnUBhsJUOAAo+bQUQCFHgipsKeKPC4B4YiQIHH/bEPAQo8bgoRCVDgiZgKe5JV4KVg4Gl/F1b4ezApVYYTU3Uo1ehFFirvXAo8ldOVdzYKPHmzU7lzCjyV05V3Ngo8ebNzo3MKPDcoS7YGBZ5kgWnSLgWeJkFLNqasAk8yzGzXAgEKPAuweKhrBCjwXEPNhSwQoMCzAIuHukaAAs811FIuRIEnZWzONk2B5yxfVi+MAAVeYdx4lrMEKPCc5cvq1glQ4FlnxjOcJ0CB5zxjrmCdAAWedWY8w3kCFHjOM5Z5BQo8mdNzqHcKPIfA7qdsy/oE1jzeh2iXgfHzw5h5gnkTm9+99WVaiQJPprT06ZUCT5+sZZmUAk+WpPTqkwJPr7xlmZYCT5ak9OqTAk+vvK1OS4FnlZgGx1PguRPyQDSFf/13D5JxI7Pg7JNLMeXIUncakGwVCjzJAtOkXQo8TYKWaEwKPLHCivfFEIvEUF5fCUVedFwQYAq8grDxJIcJUOA5DJjlCyJAgVcQNm1OosDTJur8B6XAy59VMUfuWBvHs3/uzSoxYkoQ77m0spiyyp5LgadstFIPRoEndXyuNW+Yv7BJbjJfHTJgIDA2AH+dc5daU+C5FmvOhZqXb8GGf7+JVMpA5chqzDpnDkIVJTnPU/EACjwVU5V/Jgo8+TNUcQIKPBVTtW8mCjz7WCpTiQLPnSh3XYHXbV6Bt3s9XoF3YPYUeO7sSxlW2Rbpxsvbt2BiTR1mNYzytGUKPE/xS7G4kTAQfyZuyrtUpt/gvBACIwKO9E+B5wjWAxZNRaMwenrNPOux5yV2scgAXrxlCYzdF9lj9LwxmHziNHcbFGQ1CjxBgmAbWQQo8LghRCRAgSdiKuL0RIEnThbCdEKB514Uez4Db+ycEGa/vww+5y7McG8wB1aiwHMAqoQlX2zejJ8v/Q+S7/yt+PTJM/CxOYd6NgkFnmfopVk4tTOJ+PI9flNjdp6+Ci84I+TIDBR4jmDdb9GBp5/BwKLFMJIpBEaPRvmlF8JfXj54bOu6nXj9wZVZ51WOrsG8Sxa616BAK1HgCRQGW8kQoMDjZhCRAAWeiKmI0xMFnjhZCNMJBZ4wUbCRPQiIIvBexz+wzvcfBFGCmcaZGIfDmJOLBL7/9L+wpnVHZkWf+VCpP55xEUqDQRe72L0UBZ4n2KVa1Og1EHtmIKvn4KQgAuYjE5z4UOA5QXXfmqmOTvT88uasL4SPPAJl7z9h8M9SiRRevuN59Hf2ZY6ZdvosNMzw9qphd+jsuwoFnlfkue5QBCjwuD9EJCC6wEs+8hgSD/3TvOjEj8C5ZyFwwrEiYlS2Jwo8ZaMtfDAKvMLZ8UznCIgg8Lb6luNZ/DpryJNSV6HWN865wVk5i8C3/2MK1I7WzJ+lBd6fzrwYJQFnbkfMhZ8CLxchfj1NILEugaT51vH0xz/MvPpuXhC+kM8ROBR4jmDdp2h89RpE73sw68/9TU2o+tTHMn8Wj8aweelGxHr60TBzFIZPaXCnOQFXocATMBS2BAo8bgIRCYgs8FIr1yB+3U+ysIV+dDX8B00REaWSPVHgKRlrcUNR4BXHj2c7Q0AEgfeS7y6sx6KsARcYl2Iy+JsnZ1Lft+ry7Vvxk+efytxCe+bUWfjIwYe4tfw+61DgeYZeuoWNmDH4Egt/lbPPSaDAc2drGIkkIr++BamOjsyCZReci/CM6e40INkqFHiSBaZJuxR4mgQt2ZgiC7zEXfcg+eAjWUSDl5yPwAfPlIyyvO1S4MmbnWOdU+A5hpaFiyAggsDb3xV4J6auxjDf2CIm46lWCfAlFvsSS/an0LKsE/0tMdRMLUfd7GqrWIU53ojHBm/LQMCZW0yFGdShRijwHAK7n7JGTw/6n10Ko7sbodkHIzT9IPcWl2wlCjzJAtOkXQo8TYKWbEw3BV5/fxJbm3swYVwNAoHcdwbs9wq8H14F/7SpklGWt10KPHmzc6xzCjzH0LJwEQREEHjp9vkMvCJCVPBUUa7Ae/NPWxDd1p8h3HRCPRoW1kpF3EilkNy5DUZ/dLBvf1UtAnUjpJpBhGYp8ERIgT3sTYACj3tCRAIUeCKmwp7cEnjPvbgdP//1ckSjcdTXl+Pqry/ElEm5f3YcfAbew/80f1AzHwlyzpl8Bp7LW5YCz2XgMixHgSdDSvr1KIrA0488Jx6KgAgCLxFJYPXNb2e1WTa6FNM+Mkaq8FLdnUh2tGT1HBw5Br7SXW/15Cc/AhR4+XHiUe4SoMBzlzdXy48ABV5+nHiUuwTcEHiplIFLP/c4Ojt3//J3xrTh+Pl173F3WK5mmQAFnmVk6p9Agad+xjJOSIEnY2rq9yyCwEvGUlh90wYYCSMDvGZaJSaeLdfbLpOtzUj19mRtmkBtPfw1depvJBsnpMCzESZL2UaAAs82lCxkIwEKPBthspRtBNwQeJu3RvCZrz2V1XN5eQj/d/upts3BQs4QoMBzhqvUVSnwpI5P2eYp8JSNVurBRBB4aYDtq7qx+fGdMOIGSoaFMOm80SipC0vF1oj1I9G8aXfP6VszGifA59EbhqWCt0ezFHiyJqd23xR4aucr63QUeLImp3bfbgi8NMFrf/oCli7bnoF53llT8YlLZqgNV4HpKPAUCNHuESjw7CbKenYQoMCzgyJr2E1AFIGXnis5kMJAZxxlDWHzJRC5H0RsNws76qWff5eKdMHw+RGoHgZfSC4JaQeDYmtQ4BVLkOc7QYACzwmqrFksAQq8YgnyfCcIuCXw+voS+Ns/1+ONtzpx2PyROPn4cfBL+vOjEzmIWpMCT9RkPOyLAs9D+Fz6gAQo8Lg5RCQgksATkQ97cp8ABZ77zLlibgIUeLkZ8Qj3CVDguc+cK+Ym4JbAy90JjxCRAAWeiKl43BMFnscBcPn9EqDA48YQkQAFnoip6N0TBZ7e+Ys6PQWeqMno3RcFnt75izo9BZ6oyYjRFwWeGDkI1QUFnlBxsJl3CFDgcSuISIACT8RU9O6JAk/v/EWdngJP1GT07osCT+/8RZ2eAk/UZMToiwJPjByE6oICT6g42IxGAm/j2gheXNSOcKkfR53UgPqRJcxfcAIUeIIHpGF7FHgahi7ByBR4EoSkYYsUeBqGLsHIFHgShORhixR4HsIXdWkKPFGT0bsv1a/A27Yxilt+uBaGsSvnsvIgvnT9NFRUBfUOXvDpKfAED0jD9ijwNAxdgpEp8CQIScMWKfA0DF2CkSnwJAjJwxYp8DyEL+rSFHiiJqN3X6oLvMfua8aSR1uyQj7/svGYfVit3sELPj0FnuABadgeBZ6GoUswMgWeBCFp2CIFnoahSzAyBZ4EIXnYIgWeh/BFXZoCT9Rk9O5LdYG39KlWPPKXrVkhf/LKKRg/tULv4AWfngJP8IA0bI8CT8PQJRiZAk+CkCRpMTWQQKy5A+GmOvhDgaK6psArCh9PdogABZ5DYBUpS4GnSJB2jkGBZydN1rKLgOoCLxE3cNdN67H+tcggsiNOqMdpFzbZhY91HCJAgecQWJYtmAAFXsHoeKKDBCjwHISrUenois3Y/qsnkIzGEKytQOM3TkbJxIaCCVDgFYyOJzpIgALPQbgKlKbAUyBEu0egwLObKOvZQUB1gfcuo9bt/SgtC6CyJmQHNtZwmAAFnsOAWd4yAQo8y8h4ggsEKPBcgKzBEhsuvwuJjt7MpKUHjcLYaz5Q8OQUeAWj44kOEqDAcxCuAqUp8BQI0e4RKPDsJsp6dhDQReDZwYo13CNAgecea66UHwEKvPw48Sh3CVDguctbxdWSnVGs/8Kfskbzl4cx+XcfL3hcCryC0fFEBwlQ4DkIV4HSFHgKhGj3CBR4dhNlPTsIUODZQZE17CZAgWc3UdYrlgAF3oEJGjDwIv6NFf7n0WRMxPGps1HqKysWOc/PgwAFXh6QeEhOAs03Po7IC+szx9WePhcNFx+R87wDHUCBVzA6nuggAQo8B+EqUJoCT4EQ7R6BAs9uoqxnBwEKPDsosobdBCjw7CbKesUSUFXgDXQ2I/LmMwjXjUHVlML+wv6E7z485Nt9Bc9kzMKXUj8sFjnPz4MABV4ekHhITgKpWAKd/1iJvvUtqJg1BjUnzYLP78t5HgVewYh4ogcEKPA8gC7RkhR4EoXlVqsUeG6R5jpWCFDgWaHFY90i4LbAa4ml8M+2GLYOpHBQeQCn1YdRXsRfXtziJMo6HZGt2Ny6HA3VUzC6broobdnah4oCL7LxZWy8+woYifggq9o5p2DMmd+xzO0G31ex1bch67wfpG5HNYZZrsUTrBGgwLPGi0e7Q4BX4LnDmatYI0CBZ42XbkdT4OmWeB7zUuDlAYmHuE6AAs915FwwDwJuC7ybtvSh1ZR4735mVwZx3oiSPDrlIWu3LcaiVb9BykgOwpg/+VwcOvVDyoFRUeBt/Ot30PPmkqyspn/5AQQrh1vK71b/9ViDZZlzSszbZ69P3oEw+O+QJZAFHEyBVwA0nuI4AQo8xxFzgQIIUOAVAE2jUyjwNAo731Ep8PIlxePcJECB5yZtrpUvATcFXiRp4Kcbo1mtVQb9uGIcn+GVT173PXMl2nrezhwaCITx8RP/CL/Pn8/p0hyjosB7+y/fQGT9C0ULvO3YjN/5f4RWNCNkSruLU5djAY6WJluZG6XAkzk9dXunwFM3W5kno8CTOT3ne6fAc56xdCtQ4EkXmRYNU+B5F/NAXxQvPf8IWlu2YPL0QzBr7jHeNSPYym4KvPToe1+BN7cqiHMaePVQPtvi3iVfQWdkW+bQYKAEHzvxTgq8fOB5fEzv5pV4+3+/mrmFdti809F0+jcL6ippXoHZ7NuIemM0X2BREMHCTqLAK4wbz3KWAAWes3xZvTACFHiFcdPlLAo8XZK2MCcFngVYPNQ1AhR4rqHOWiiVSuGeP1yL7VvXZf78fad+FPMOe783DQm2qtsCb+9n4J1uPgOvjM/Ay2tXrN/+HJ5c8T8wDGPweN5Cmxc2YQ6y4yUWwgyjYSNWBF7voy3oW9QJX5kPFWeNROm8ag2JcWQ3CFDguUGZa1glQIFnlZhex1Pg6ZV3XtNS4OWFiQe5TIACz2Xg7yzXvnMb7vz1FVmLj2yagos/da03DQm2qtsCT7DxpWunvWcztrSt4EsspEuODctOIF+B17+sC923b8kat+7qKQiO5JXGsu8BEfunwBMxFfZEgcc9MBQBCjzuj30IUOBxU4hIgALPm1RiA/34zU8/g1QykWlg2uyjcNo5X/CmIcFWpcATLBC2AxWfgcdY5SeQr8Dr+Usz+pa0Zw1cdWEjyo7mm4Ll3wXiTUCBJ14m7AigwOMuoMDjHrBEgALPEi4e7BIBCjyXQO9nmVdfWYzHH75tUOLV1o3EOZd+GzXDGrxrSKCVKfAECoOtDBKgwONGEJFAvgJvnyvwAkDdd3kFnoiZqtATBZ4KKao3AwWeepnaORGvwLOTpiK1KPAUCVKxMSjwvA00NtCHzo4W1DeMgT9g/o2Kn0ECFHjcCKIRoMATLRH2kyaQr8BLH8tn4HHPuEWAAs8t0lzHCgEKPCu09DuWAk+/zHNOTIGXExEP8IAABZ4H0LlkTgI6CLzeaATNLRuRMmmMNgVuVUVNTi48wDsCFHjesefKByZgReCRo+QEkkD42T4ENiaQnBRC7PBSQNDf+1HgSb7XFG2fAk/RYG0aiwLPJpAqlaHAUylNdWahwFMnS5UmUV3gDZhXXr7y2lIkkvHB2Hw+P+ZOPwwV5VUqxajULBR4SsWpzDAUeMpEmXOQsvt7EVo2kDkudlgJ+s+uyHmeFwdQ4HlBnWvmIkCBl4uQ3l+nwNM7//1OT4HHTSEiAQo8EVNhT6oLvOadW7B+02tZQY8330I8ZtREhi8oAQo8b4Pp6osOXq06rKzc20YEW50CD+jaYWe8k4UAACAASURBVGDZQwm0bU6hcXoAh34ggJJyn2BJFdmOufmrr+sAYkamkFHlR8+3a4ss7MzpFHjOcGXV4ghQ4BXHT/WzKfBUT7iA+SjwCoDGUxwnQIHnOGIuUAAB1QVeR3c7Xl37UhaZyeNmYlRDUwG0eIobBCjw3KC8/zUeeW05XmneNPjFaQ2jcfashQj6/d41JNDKFHjA334SQ6R9t9iaeEgAR10QFCgle1qp+J8uBFrM+2jf+SRHBND7FTEfvUCBZ0/mrGIvAQo8e3mqVo0CT7VEbZiHAs8GiCxhOwEKPNuRsqANBFQXeGlE6za+ih2tWwdp1dWOwLSJs+GnlLBh9zhTggLPGa65qq5v34m/vPJs1mGnTp+HBY3jc52qxdd1F3h9PQbuvz6WlXVplQ/nfi+sXP6B9QmU/28EvmgKRlUA0QvLkZwYEnJOCjwhY9G+KQo87bfAkAAo8Lg/9iFAgcdNISIBCjwRU2FPOgi8dMrpZ+GlbwssKylj6IIToMDzJqDF61/H02+/kbX4/KYJOG3aXG8aEmxV3QVeOg5drsBLz2qYt9AGWpNImVffISjubcIUeIJ9o2A7gwQo8LgRhiJAgcf9QYHHPSAFAQo892N66G/34pkl/0ZVVRXOPOtDmDPvEPebEHxFXQSe4DGwvT0IUOB5sx26+6P47dKnEE/uunUwfZXqxw85GqOqxHz2l9uU3BZ4O9GNzb5WNKIOowwxMkg/A+/FB+No32YMPgPvsLMDCJeJK7fc3iNerEeB5wV1rpmLAAVeLkJ6f50CT+/89zs9r8DjphCRAAWeu6k89+wi3HXnbzOLBgIBXHXtL9DQMMLdRgRfjQJP8IA0bI8Cz7vQ26IRPLdx7eDVqgvNq+8aq4d514xgK7sp8F71bcET/pUwzP9Lf96TmoaFxhTBiLAdEQhQ4ImQAnvYmwAFHvfEUAQo8Lg/9iFAgcdNISIBrwRe+sf/SCKFAfNvZCXms8grg37o8PvyP/zul3hp2XNZW+Hjn/oiFh56lIjbw7OeKPA8Q8+FD0CAAo9bQ0QCbgq8PwUWoR2RDIYgAvhc8mTo8V9vd9JPDPSge+MLCJbVoGrMAvh8cr6shQLPnf3CVawRoMCzxku3oynwdEs8j3kp8PKAxENcJ+CVwOuMpdCb3P3WuIqAD7VhOX9QtRIar8DLjxYFXn6ceJR7BCjw3GPNlfIn4KnA85kCL0GBl39aQx/Z37kFm578LyRjuyRpxYhpGHv8N6WUeBR4du0K1rGTAAWenTTVq0WBp16mRU9EgVc0QhawQCCBfmwPrTCfeAyMTiwwf0++/zeVeSXwtvUnYez2d+YPqEBjqflQZg0+fAZe7pAp8HIz4hHuEqDAc5c3V8uPgJsCb41vM570r+IttPlFY/moHS/9Ge1vPp513viTvofyevluU6bAsxw/T3CBAAWeC5AlXkJKgbd5WwuuuuEPeGPdJjSOqsd3v/xhLJg9dTCGbW19EschRusUeGLkoEMXcV8Uz5TfgD5/++C4lamROLL3GwiiZJ/xvRJ4O8x7ZxOp3QYv6PdhZPpe2iI+RncfjGfXm7+2DsN/5GTzDW3F1SuiFZ5aJAEKvCIB8nTbCcgo8Hbu6MWaZdvQ0FiFWfNH2c6EBb0n4KbAS08r4kssvE/Bng6aX7wTnev+nVWMAs8etqxCAmkCFHjcB0MRkFLgffTLP8bx712AS885Cc8uW2PKvNvw+D0/RygYoMCzYb9T4NkAkSXyIrAptARrSu/NOnZW/wUYF3+vMAJvIJlCexxImZfh+c3L7+rMCwRLAoULN2NHN5JXPgB0RQdn9M1shP/aM+AL6XFVX14bQ6KDKPAkCkuTVmUTeG+u3olf/+hpJOLpVz+YLxw4cSIu+gzfeK3adnVb4KnGT6R5YpGdePvRa5CM7/o5pnzEdIw7/kreQitSSOxFagIUeFLH53jz0gm8to5unHLxlXju4ZsRNN+KmP6cd9k1uPLzF+Gw+dMp8GzYMhR4NkBkibwI7E/gze6/BGPihwsj8NKNpOVdwrwIL2jePpuWeMV8knc+D+OB5VklAledDt8h44opy3M9IkCB5xF4LntAArIJvJt/+DRee2VH1jw/uvUMVA8rZcoKEaDAUyhMc5R4Xxd6Ni/jSyzUipXTCEKAAk+QIARtQzqB9/KqtbjuF3fiwduvzyD9+rW/xuELZuKCM49DW3dMUNTytFVqPqDfb94mGDWf/cUPCThJIIZeLCr9CaK+d26hxQgc0/cN8yl45fssW1UWxEAiiVh8jwfSOdmcQ7Vjtz+LxH3ZAq/k+2cgsHC8QyuyrJME6qrD6OiJZT0n0cn1WJsEchFIf68Mh/yI9CUw8M5VbbnO8fLrN163GGuWb89q4ad/OAs1FHhexmL72iXmngyaL4Hq5c+WtrNlwcIJVJrfL+OJlBTfKwufkmfKRmBYVRhdvTGkdl2Yzg8JZBEYbv7dw86PzzA/dhbcu9azy1bjl7+/D3f/9prMl773k9tw0KQx+Mj5Jzu5NGuTAAk4QCBuvsRio/ESfObrK8b75u/3+XcOLOtZyeS2LrR+4R6kOnfdehI+uBF1vziXt9B6lggX9pLAQN92dHeuhs8fMoXNXITCtV62w7U9ILDGvPrux999CvF3ZOOJp03BZV/Z9ypsD1rjkiRAAiRAAiRAAiSgNAHHBd7y1Wtxzc/uwN/v+GEG5Ne+fzOOWngwzjvjWP4GxYbtlX68l8+8TTCRdNTF2tApS+hEIGT+5j5p/n5Ahd9Gpbr6EFu8Fr7KEpQcbb6Ahy+xkHYrp690SosHfre0HmF8YCdatpnPgzR2/YrZ5y/BiDHnIxistF6MZ2QIpL9Xpq+iT5jP7zT/nxSfHc0RvPLCVoxsqsK8hY1S9MwmrRHgz5bWePFodwikrwpNX3siy/dKd6hwFa8JhM2/F8TNTensZVFeT8n1CyWQvqLdzo/jAq+jqwcnXvB1LPnbr1BWuuvywVMvuRI/+vZlmH/wVD4Dz4Y0+Qw8GyCyhO0E0peT9/Un0C/BLWG2D8+CwhLgM/AKj2ag50XEoyuzCpTWvA/B0kmFF+WZkO0ZeIxMDwJ8Bp4eOcs2ZW1l2Hw0SxLRAT42SLbsVO6Xz8BTOd3iZ5PuGXjpkT/5tRtw6LzpuOySM/DPfy8dvKX2n3++AQHz13vb2vqKp6J5BQo8zTeAoONT4AkajOZtUeAVvgHifW9goHtJtsAbdgqC4abCi/JMCjzuASEJUOAJGYv2TVHgab8FhARAgSdkLMI0JaXA27q9Fd/58e/wxlubMbZxBL7/9Y9h1rQJg1Ap8IrfWxR4xTNkBfsJUODZz5QViydAgVc4Q8NIor/7KST7Nw0WCZXPQEnVUYUX5JmDBHgFHjeCiAQo8ERMhT1R4HEPiEiAAk/EVMTpSUqBNxQ+CrziNxcFXvEMWcF+AhR49jNlxeIJUOAVzzCV6Daffxc0/9n37dPFV9evAgWefpnLMDEFngwp6dcjBZ5+mcswMQWeDCl51yMFnnfshV2ZAk/YaLRujAJP6/iFHZ4CT9hotG2MAk/b6IUeXHaBF0/0IWK+NbumYpz5kpiA0KzZXP4EKPDyZ8Uj3SNAgeceaxlXosCTMTWHe6bAcxgwyxdEgAKvIGw8yWECFHgOA2Z5ywQo8Cwj4wkuEJBZ4G3euRQvvHozkskYKstG4j1zrjBF3hgXqHEJpwlQ4DlNmPULIUCBVwg1fc6hwNMn67wnpcDLGxUPdJEABZ6LsLlU3gQo8PJGxQNdIkCB5xJoLmOJgKwCzzBSeODpTyCR6M/MO2r4PBwz91uW5ufBYhKgwBMzF927osDTfQcMPT8FHvfHPgQo8LgpRCRAgSdiKuyJAo97QDQCFHiiJcJ+0gRkFXgdPRvw+IvfzgqxNFyLs977WwarAAEKPAVCVHAECjwFQ7VxJAo8G2GqUooCT5Uk1ZqDAk+tPFWZhgJPlSTVmYMCT50sVZpEVoGXzmDRKz/EjvZVmTgOnvQhzJzwQeniSSVTaNvRhZq6SoRLQ9L170TDFHhOUGXNYglQ4BVLUO3zKfDUzreg6SjwCsLGkxwmQIG3G3AM3UigB+Vocpg6y+ciQIGXixC/7jYBCjy3iXO9fAjILPDi5u2zb2x+CJ2RjWiqPxQTRh0Dn8+Xz9jCHJMWdw/f/h90tfUgGA7gfWcfhhmHThamP68aocDzijzXHYoABR73x1AEKPC4P/YhUFbiR8D8wSTSnyQdEhCGQHV5CAOxJAYSKWF68qKRjYH7sM3/j8GlqzAV0+KXI4RKL1rhmiaBuqowOnti0HtXciuIRIACT6Q02Mu7BMrC5s+WAb/5JteEbVDefL4Dz93bjIFoCvNOrsdhHxxlW23VCj1y52K8tXpTZqy0xLvsmvMRCgdVG9XSPFXmz5aJRBJ9Mf5X3BI4HuwogfRFC1295s+W3JaOcpa1OAWerMk52DevwHMQLksXTIBX4AG92IKVoWuyGI5NfgBjUmcVzJUnFkeAV+AVx49n20+AAs9+pqxYPAG7r8Br39aPu654HamkkWnu/Z8fh5nHDC++WQUr/P66+xDt6cua7PzLT8bo8Q0KTpv/SLwCL39WPNI9ArwCzz3WMq5EgSdjag73TIHnMGCWL4gABR6w3f8fbAj8KYtfrTELMxJfK4gpTyqeAAVe8QxZwV4ChQg8I5FA6q2NSHV2wT+yAYHxY2DeI2hvY6ymNQG7Bd7Kx1vx1G2bs5jOPrEeJ3xqrNacDzT88qdfw9N/fynz5aZJI3Hu507SnhUFnvZbQEgAFHhCxiJMUxR4wkQhTiMUeOJkwU52E6DAA5JGP5aHv4s4OjNgZsWvQDWmc6t4RIACzyPwXPaABAoReImlLyPV3JKp6Z8+BUHzH348IGAY6HtrC2KbdyAwrAoVMybBVyL/CwfsFni8As/a3jTMffX6S+uxbvVmNDTWYf7R01FSFrZWRMGjKfAUDFWBkSjwFAjRwREo8ByEK2tpCjxZk1O7bwq8Xfn2oxXNwccQN7owwjgWtamZagcv+HQUeIIHpGF7VgWekUwi/vATgPkX/Hc//qoKBE84WkN63o8cfXU9Ii+uyTQSahiGYae91/vGiuzAboGXbofPwCsyFJ4OCjxuAhEJUOCJmIo4PVHgiZOFMJ1Q4AkTBRvZgwAFHreDiAQo8ERMRe+erAq8tLiL/2sRjP7+DDjfiOEIHXWo3iA9mr79kSVItHZkrV5/wUnwl5V61JE9yzoh8OzpjFV0JkCBp3P64s5OgSduNiJ0RoEnQgqC9UCBJ1ggbGeQAAUeN4KIBCjwRExF754sCzwTV/r22eTyVTBicaC8DMHD58NfU603SI+m71r8MgY2bN29ut+P+gtPhj8k99tCKfA82lBcdkgCFHjcICISoMATMRVxeqLAEycLYTqhwBMmCjayBwEKPG4HEQlQ4ImYit49FSLw0sSMRBJGJLJL3PEFFp5tokRXBF1PvoBkTy8QDKD68NkonSL/ixko8DzbUlx4CAIUeNweIhKgwBMxFXF6osATJwthOqHAEyYKNkKBxz0gOAEKPMED0rC9QgWehqiEHdlIpZDojCBYWQ5fWO4r796FTIEn7HbTujEKPO/jT6UMtLTFUV2VRKB8O8KpMfBB/hf3FEOWAq8YeuqfS4GnfsaWJ6TAs4yMJ7hAgFfguQCZS1gmQIFnGRlPcJgABZ7DgFm+IAIUeAVh40kOE6DAcxhwjvId3Qnc+1ALtrd1IRlqxpEnL8LMuTswKnolSpKTvG3Ow9Up8DyEL8HSFHgShOR2ixR4bhPnevkQ8Ergtcf7sDrShvpQGWZWDs+nVR6jEQEKPI3ClmRUCjxJgtKsTQo8zQKXZFwKPG+D+vtjrVjxWgSxwCYYSMAfSOHir9yOquBkNPVe521zHq5OgechfAmWpsCTICS3W6TAc5s418uHgBcC781oB27duhKxVHKwxUOrR+HDo2fm0662x/Saz9Ba8fJylJkPwp89bz6C5jOcVP5Q4KmcrpyzUeDJmZvqXess8LpizehOtKI0UIW68FgEfGr/d1GmvUyB521av/nTVuzs6EfMvzHTyGkfeQCjmyKY2HO7t815uDoFnofwJViaAk+CkNxukQLPbeJcLx8CXgi8tLxbHWnNau/6ye9BdbAkn5a1O6attQ2/vOGniPT0DM4+dsIEXP71r5oST41nOO0vUJkFXk9/BC9tfBk7IzsxypTTh4ybh/KSCu32rWoDU+Cplqga8+gq8NoG3sbOgQ2ZECuCwzC2fJ4aoSowBQWetyEuXxPBw0+0Iu7fgZSvF/WNLTj9ww9gWOJM1A9c6m1zHq5OgechfAmWpsCTICS3W6TAc5s418uHgBcC747mNXi5e0dWe1dPOnLwdlp+9iXw8AMP4t+PPZ71hU994fOYcfAsZXHJLPD+teZxtJm3h7/7aaxtxHHTjlE2K10Go8CzL+nepQl0/jmGZJeBqpODqL2wBD6/ffV1qqSrwNvQuxQDyWhW1FOqjkLQx18EirD/KfC8T+HVtb1Y9UY3yoe/gVlHLMfw8AxUx99vvshC32+2FHje70uRO6DAEzkdj3qjwPMIPJcdkoAXAu+tvk78avMrSBqpwd54C+3Qm5QCT55/iRPmbeF/ffH/zGfOGJmmQ8EQzj/kXHmGYKf7JUCBZ8/GSLSksOUzvTB2PUFh8FN/eSmq3q/32xELpaurwNscXYHeRHsGm9+8fXZq1dGmnPAVipLn2UiAAs9GmCxlGwEKPNtQKlmIAk/JWIsbigKvOH482xkCXgi89CSt5kss1pi30TaEyrV4iUXSnDfW34HSylHwWbzUpLW1FTfd8LPMLbQTJk/G577yJd5C68y/EkVXffy1J7Gze2emDq/AKxqpEAUo8OyJoXdxHC0/688qVnl0CA1XlNqzgGZVdBV4A8lebO5biUSqf/C/qaPLZpqP4WjQLH1xx6XAEzcbnTujwNM5/dyzU+DlZqTdERR42kUuxcBeCTwp4NjUZMe2F7HltfsA8+qscPlwTFjwaZSU1VmqHon0YOXLr/AlFpaoeXMwn4HnDXenV6XAs4fwfq/A+7J5Bd4JvAKvEMK6Crw0K8MwMJCKIOwrg9+v7jNhC9kXXp9Dged1Alx/fwQo8LgvhiJAgcf9sQ8BCjxuChEJUOA5m0oyMYBXF10zKO/e/dSMmo9xsy92dmHJq8v8DLx80Bsd2+DrboExfBx8ldZkbj71eYz9BCjw7GO65zPwKs1bZ+suCdtXXLNKOgs8zaKWalwKPKni0qZZCjxtoi5oUAq8grCpfRIFntr5yjodBZ6zyfW2r8P6l27JWqSkYgQOOuoKZxeWvLrSAm/DS/Cte35XQj4fjFknAKOnSZ6Y+u17JfBSSKI3uNF8yYOBsvgYBAw+pF/93Zb/hBR4+bPike4RoMBzjzVXyp8ABV7+rHQ8kgJPx9RzzEyBx00hIgEKPGdTMcwXdbz14s3o69qUWahx2tkYPu49zi4seXVlBZ55y5fv37cCycTuhCqGwTiKV2SKvmW9EHgpJNBc8RhigV0P6w8YFWiKnGz+b7nouFztL9Ubg7G9F76YeaVzjflG21GVpvDU42UGFHiubjUulicBCrw8QfEwVwlQ4LmKW7rFKPCki8z5hinwnGfMFawToMCzzszqGekXWLS8/W/EojtRM3IuakfNs1pCu+NVFngwBZ6PAk+6Pe2FwOsNvY2WsiVZrIb1z0Nt7GDp+DnVsJEykHrTFJyJ3Y8p8I+ogM/8R4cPBZ4OKcs3IwWefJnp0DEFng4pFz4jBV7h7JQ9kwJP2WiFGCyaSGBlRzd6zb/EHFRdibEVZXn1RYGXFyYe5DIBZQVemiNvoXV5N9mzHAWePRztrmJEY0it78wuWxZCYPIwu5fab72Nr+zEkjteQ09rP2adOBbv+fB0+APuXf1HgedKzFzEIgEKPIvAeLgrBCjwXMEs7SIUeNJG51zjFHjOsdW9cty8AuG+TVsRie++Le/E0SMwoTL3bVYUeLrvHjHnV1rgmcj5Egsx991QXXkh8FKIY3vl4xjw73kL7SnmLbT5/YJGPsrWO/byCry+7gH8/hNPIjGw++q/Yz41E4d8YLL1QQo8gwKvQHA8zVECFHiO4mXxAglQ4BUITpPTKPA0CdrKmBR4VmjxWCsEmqN9eGTrjqxTJlVV4PhRDTnLUODlRMQDPCCgusDzACmXLJKAFwIv3fK7L7GAP4WK2Hj4ESpyEvVO9+oZeBuWteDBa5dmAR2/oAHnXHuEa5Ap8FxDzYUsEKDAswCLh7pGgALPNdRSLkSBJ2VszjZNgecsX52rd8Xi+OvGrVkI5tXVYuHw2pxYKPByIuIBHhCgwPMAOpcckoBXAo+xiEsg2jWA2z7pzRV4SfMFJ23BVxEqSaIJsxHtDYoLip1pR4ACT7vIpRiYAk+KmDxrkgLPM/TiLkyBJ242KnT2SnsnlrXteg5QQ2kJTmkagRJ/IOdoFHg5EfEADwhQ4HkAnUtS4HEPWCaw5zPwph/XhGM+MRN+h9+Am0QMKyt+j0igeXCtUlRjdtenETaqLffPE0jACQIUeE5QZc1iCVDgFUtQ7fMp8NTOt6DpKPAKwsaTLBDoTyYRNV9iMawkjHwfoU2BZwEwD3WNAAWea6iFW6ht3UvYtORuxAeiGDXneIw78lwheuQVeELEwCZMAjtDK/F62V8HWaQFns/8D/7Y3hMxNnYM+ZCAEAQo8ISIgU3sRYACj1tiKAIUeNwf+xCgwOOmEJEABZ6IqbAnCjw990BfVwuW3/4NGKndLwWYespnMWLm0Z4DocDzPAI28A4BCjxuBdEJUOCJnpCe/VHg6Zl7vlNT4OVLSqPjKPA0CluiUSnwJApLo1Yp8DQKe49Rd6xZjHX/uiVr+FFzTsDkEz/hORAKPM8jYAPvEEiYt9CuqrwNEf+2wSvwylCDgwdvoa0iIxIQggAFnhAxsIm9CFDgcUsMRYACj/tjHwIUeNwUIhKgwBMxFfZEgafnHuAVeHrmzqmtE0i/xKI99CqCYb7Ewjo9nuE0AQo8pwmzfiEEKPAKoabPORR4+mSd96QUeHmj4oEuEqDAcxE2l8qbAAVe3qiUO3DwGXjP3ot4X4TPwFMuXQ5kN4HykgBCQT+6euN2l2Y9EiiYAAVeweh4ooMEKPAchKtAaQo8BUK0ewQKPLuJsp4dBCjw7KDIGnYToMCzm6i1egPdA4hs70bpsDJUNFRaO1nRo3kLraLBSj4WBZ7kASraPgWeosFKPhYFnuQBOtw+BZ7DgGUsT4EnY2rq9+yUwNu05nWsX74C5TXVOPjY96C8ulp9mJzQNgIUeLahtFyoe3Mntj2/yXyRhDF47vBZIzHi4FGW64h6Qm+sH79b+Q88u+01TKodjc/OPR0TakbmbJcCLyciHuABAQo8D6BzyZwEKPByIuIBHhCgwPMAukRLUuBJFJZbrVLguUWa61gh4ITAS8u7p+78c6aNquF1OOurX0AoHLbSGo/VmAAFnnfhb3hiLfrbopkGfObtedPOORg+n8+7pmxc+caXHsDjb7+cqTiyYhh+d/JX4Pf5h1yFAs/GEFjKNgIUeLahZCEbCVDg2QiTpWwjQIFnG0olC1HgKRlrcUNR4BXHj2c7Q8AJgbfoz/diw4pVWQ2f9oXLMGL8OGeGYFXlCFDgeRfp3gLPbwq8qR+YhfT/qvD57GM3YktPa9YovznpSxhb3UCBp0LAms1AgadZ4JKMS4EnSVCatUmBp1ngFselwLMITIfDKfB0SFm+GZ0QeC8/+gRWPrUoC8YHr/gyahrq5QPEjj0hQIHnCfbBRfe+hbZ+5kg0zFbnFlpegefd3uLK9hOgwLOfKSsWT4ACr3iGrGA/AQo8+5mqVJECT6U0bZqFAs8mkCxjKwEnBF5/by+euP0utG7aAn/Aj3knHY85xx9ra98spjYBGQXeQG83tqx+ASUV1WiauRA+v7xXrL37Eosy8yUW5Yq9xCL9DLxfr3gYLza/MfgMvM/MPQ0Ta3ILSt5Cq/b3HFmno8CTNTm1+6bAUztfWaejwJM1OXf6psBzh7NUq1DgSRWXNs06IfDS8AzDQOeOnSirqkBpRYU2PDmoPQRkE3iR9hY8efP3EItGBgE0TJyBYz/1Xaklnj1JqlOFAk+dLFWahAJPpTTVmYUCT50sVZqEAk+lNO2fhQLPfqbSV6TAkz5CJQdwSuApCYtDuUZANoG38p9/wRuLH8ri877PfR/14w5yjRkXcpYABZ6zfFm9MAIUeIVx41nOEqDAc5YvqxdGgAKvMG66nEWBp0vSFuakwLMAi4e6RoACzzXUXMgCARUEXvoKvBGTZ1mYmoeKTIACT+R09O2NAk/f7EWenAJP5HT07Y0CT9/s85mcAi8fSpodQ4GnWeCSjOuUwDPM+dv7Yygz31xZHgxKQoNtikJANoG39y209ROm4bjLruIttKJsKBv6oMCzASJL2E6AAs92pCxoAwEKPBsgsoTtBCjwbEeqVEEKPKXitGcYCjx7OLKKvQScEHj9ySQe3rIDzdG+wWaPbKjDwvph9jbOakoTkE3gpcNIv8Ri08rnzec+1qJxxgLzBS4U1yptUgo8ldK0Z5a1eA6LAr9Dv68XC5Jn4SjjEnsKW6hCgWcBFg91jQAFnmuouZAFAhR4FmBpeCgFnoah5xqZAi8XIX7dCwJOCLzndrZjWWtH1jiXThqHYSUhL0bkmhISkFHgSYiZLVsgQIFnAZYGh7ZhM/4QvAwpJDPTnp68EgcbJ7k6PQWeq7i5WJ4EKPDyBMXDXCVAgecqbukWo8CTLjLnG6bAc54xV7BOwAmB97dNzdjUG81q5uSmkTioutJ6gzxDSwIUeFrGLvTQFHhCx+N6c8v9D+Mx/41Z684zTsfJya+42gsFnqu4uVieBCjw8gTFw1wlQIHnKm7pFqPAG8HX/wAAIABJREFUky4y5xumwHOeMVewTsAJgbe+pxePbNmeaaYqFMTFE8ciHPBbb5BnaEmAAk/L2IUemgJP6Hhcb45X4LmOnAtKRIACT6KwNGqVAk+jsAsYlQKvAGiqn0KBp3rCcs7nhMBLk9gYieLVzh6UhwJYOLwWFXyRhZwbxKOuKfA8As9lD0iAAo+bY28CfAYe9wQJ7J9APgIvFTGfkxyNwT+ihhhJwBUCFHiuYJZ2EQo8aaNzrnEKPOfYsnLhBJwSeIV3xDNJAKDA4y4QjYCuAm9LywDe2jyAiU0lGDeqRLRYtO+Ht9BqvwWEBJBL4PU/uhyxRasHew9OHInSj74Pfj4nWcgsVWqKAk+lNO2fhQLPfqbSV6TAkz5CJQegwFMyVumHosCTPkLlBtBR4C1Z3oP7n2rPZPmB44bh2EOqlctW5oEo8GROT93ehxJ4yW3t6L3pkazhw6fMR+mxB6sLhJMJQYACT4gYhG2CAk/YaLxrjALPO/Zc+cAEKPC4O0QkQIEnYip696SjwLvmt1vR05vIBF9VEcS1n20SaiO0+KJo9fWjyahEjREWqjc3mqHAc4My17BKYCiBF1v6JvofXJpVMjRnAsouOtrqMjyeBCwRoMCzhEu7gynwtIs898AUeLkZ8Qj3CVDguc+cK+YmQIGXmxGPcJcABR4gmsB7KdCKNf62wY2QfkXS0clGjE9VubsxPF6NAs/jACRf3ogPIPbkPUi8tQL+hiaUHHee+Uy6cUVPNZTAM/rjiPzibzB6zGfgpT8+H8o/dSKCk0YVvS4LkMBQBCjwuD+GIkCBx/2xDwEKPG4KEQlQ4ImYCnuiwOMeEI2AjgJv71tozzm+Du+dL4YgS8DAPeF1SBqpzFYZbpTh9ETx8kG0vTdUPxR4MqUlXq8DT/0V8eVPZRrzVVSj4tM/Mo14oKhmcz0DL9UeQeyZ12BE+hE6dCqCUyjvigLOk/MiQIGXFyZtD6LA0zb6Aw9OgcdNISIBCjwRU2FPFHjcA6IR0FHgpTMQ9SUWcaRwd2itqfF2fyjwRPu3hv0MRSBlGNjZ1YdhlaUIB9PXkLr/id7+faTad2QtXP6xa+AfXpxQyyXw3J+UK5IAQIHHXTAUAQo87o99CFDgcVOISIACT8RU2BMFHveAaAR0FXii5bBnP8vNW2hX7XEL7TGJJowzn4Wn04dX4MmZ9vbOKO57fj16ojGUhAM485AJmDK6xvVhvLoCz/VBuSAJmAQo8LgNKPC4BywRoMCzhIsHu0SAAs8l0FzGEgGdBV48lcTSlo3YGu3C1OoGzBveBL/5jCDZPs0J8y+oPRuwMdGDWeE6nFs1EVX+kGxjZPqlwBMzOr7EIoCQefVWV29czIDY1X4J/GnRm9jaFsl8rbIsjMtPdf8trF48A49bggS8IkCB5xV5OdblFXhy5ORqlxR4ruLmYnkSoMDLE5Tkh7W9sQlvPbgY/Z0RNB4xC5PPeA98AW9u2ckHpc4C7/82rMDrnbtvaTpy5ESc0Dg1H2xCHfPD9uXYHo9melpY2oCP1hwkVI9WmqHAs0KLx7pFgFfguUXa3nX++6EVGIgns4p+8bTZqCiV95ccew7DW2jt3S+sZg8BCjx7OKpahQJP1WSLmIsCrwh4PNUxAhR4jqEVpnCstw/PXP17pGK7r9CYes6xGPe+Q4Tpce9GdBV46avvbljxlPlcr91P9qovrcBnZ7xH2Kz211h3Kobv7nwx60tVgTB+VH+oVHPs2SwFnrTRKd04BZ6c8f579TYsfXN7pvlpjbX44BGT5BxmP11T4CkTpVKDUOApFaftw1Dg2Y5U/oIUePJnqOIEFHgqppo9U9uaDXjltw9k/WHdjPGY//lzhR1eV4GXDuTG1YvRE+/PZDOmchg+NlU+8cUr8IT914uNKUSAAk/OMNMvsFi+vhUbWrrROKwCh0xpQEmwuDe/ikSCAk+kNNjLuwQo8LgXhiJAgcf9sQ8BCjxuChEJUOCJmIq9PcUiUTxzTfoKvESmMK/As5exndXe6m7F/RtXYSARR3W4DOdPnIvR5dV2LuFKLT4DzxXMXERzAhR4mm8AQcenwBM0GM3bosDTfAPkGJ8Cj/uDAo97QAoCFHhSxFR0k3s+A2/UoTMw9eyj4fPzGXhFg3WoQMy8lba9vxcjyqqkfIGFQ1g8LctbaD3Fz8UPQIACj1tDRAIUeCKmwp4o8LgHhiJAgcf9QYHHPSAFAQo8KWLSrkmdb6HVLmxJBqbAkyQozdqkwNMscEnGpcCTJCjN2qTA0yxwi+NKKfAu/Nx1eH3tRsDnGxy3urIcix/45eD/f1tbn0UEPHxvAryFlntCRAIUeCKmwp4o8LgHRCNAgSdaIuwnTYACj/tARAIUeCKmwp4o8LgHhiIgpcA7/cPfwo3XfRFTJjbtMxsFXvEbngKveIasYD8BCjz7mbJi8QQo8IpnyAr2EqDAs5cnq9lDgALPHo6sYi8BCjx7ebKaPQQo8OzhqGoVKQXesed8Gffccg1GNdRR4DmwMynwHIDKkkUTUFXgGdEEkut6gbiBwIQy+IaXFM2KBdwjQIHnHmuulB8BCrz8OPEodwlQ4LnLm6vlR4ACLz9OPMpdAhR47vKWbTUpBd7891+GYw6fg+Wr16K+rgZfuew8HHPE3EH2vAKv+C1IgVc8Q1awn4CKAs+IpZBY1AqjP7kLmPlUgOARdfDXU+LZv4OcqUiB5wxXVi2cAAVe4ex4pnMEKPCcY8vKhROgwCucHc90jgAFnnNsVagsrMB7fd0mJJLv/KX2HdKhYBBTJ47BVTfchlPedziOXDgTi55dgW/96BY89McfD16R19o1oEIuns5QWhJAwHy+YG9/wtM+uDgJ7EmgqiKEgYEkYomUMmASW/owsKwja57ghHKUzKtVZkbVBxleXYL2ngEYhuqTcj5ZCFSVh1AS8qMnGsdAXJ3vl7LwZ5/7J1AaDiAY8CHSx58tuUfEIVBpfr9MmD9X9sey/84pTofsREcCddVhdEbiSKX4w6WO+eeaub7G3gs9fIb5ybVoPl//3k9uQ19/toyrra7EVV/9yD6nf/yr/4VzTz8WZ5x4pFJ/uc+HkxPHmD9fDb4gJMlvGk7gZc0CCQT9PqTMby8qbct46wA6Ht+RRaRibi0qZlYXSImnuU0gFPQP/vBvy3/43G6e6ylJIC1J/OZ/wxPJ9PdL7kwlQ5ZwKPM/4eaPlvzZUsLolG5ZxZ8tlQ5Mk+FCAfNnyyR/ttQkbstjhs2/e9j5sU3gHaipaN8A1m7YgrkzJ2cOufTyH+LD570fJx93KG+htSFN3kJrA0SWsJ2AirfQpiElX+sxn4EXGeTlqw8jeGgdfMFdb9jmR3wCvIVW/Ix069DuW2g3R5dic99SVARGYlr1qSj18xcMuu0pO+blLbR2UGQNuwnwFlq7ibKeHQR4C60dFNWtIewttAdC3tkVwUkXfgM3/uByHLXwYDy9dCWu+MFv8cif/gvDh1VT4NmwVynwbIDIErYTUFXgDYIyn4FnmC+x8FUFbefGgs4SoMBzli+rWydgp8Db0LsYL7b/LtNEbXg8Thx5LfwIWG+MZ2hNgAJP6/iFHZ4CT9hotG6MAk/r+HMOL53AS0+06LkV+Nlv7kZLWyeaRtXjyi9chCMWzBwcli+xyJl5zgMo8HIi4gEeEFBa4HnAk0vaQ4ACzx6OrGIfATsF3uKdN2B7/6qs5k4Z9RNUhxrta5iVtCBAgadFzNINSYEnXWRaNEyBp0XMBQ8ppcAbaloKvIL3QuZECrziGbKC/QQo8OxnyorFE6DAK54hK9hLwE6Bt6z991jfuyjToM98VfZZTb9Gib/S3qZZTXkCFHjKRyzlgBR4UsamfNMUeMpHXNSAFHhF4VPzZAo8NXOVfSoKPNkTVLN/Cjw1c5V5KjsFXm+iFUvafoGu2GbzBQRBzK+9GFMqT5IZD3v3iAAFnkfgueyQBCjwuEFEJECBJ2Iq4vREgSdOFsJ0QoEnTBRsZA8CFHjcDiISoMATMRW9e7JT4KVJGkYKnfEtqAzWI+Qv1xsupy+YAAVeweh4ooMEKPAchMvSBROgwCsYnRYnUuBpEbO1ISnwrPHi0e4QoMBzhzNXsUaAAs8aLx7tPAG7BZ7zHXMFHQhQ4ImVcl+0DQMDEdQOGy9WYy53Q4HnMnAulxcBCry8MGl7EAWettEfeHAKPG4KEQlQ4ImYCnuiwOMeEI0ABZ5oibCfNAEKPHH2wfPP/hKvrrp/sKHGpkNw/MnXIxwqE6dBFzuhwHMRNpfKmwAFXt6otDyQAk/L2IceWnaBt71lJ2774914fe1bmD1zOj7ziUtQU1XFpCUnQIFnb4AJJPFUyfN4NfQWapPVOGHgcIxNjbZ3EQ2qUeBpELJkI1LgSRaYJu1S4IkR9Pbtq/CPv30xq5mFh38Gc+ZdJEaDLndBgecycC6XFwEKvLwwaXsQBZ620R94cNkF3pVX/Qhvvb0xM+DCBXPx7a9+nklLToACz94Anw4vw5Lwy5mipUYJvtB7EcLm//GTPwEKvPxZ8Uh3CFDgucOZq1gjQIFnjZdTR69c/mcse+F3WeUnTT4ex514tVNLCl2XAk/oeLRtjgJP2+jzGpwCLy9Meh0ks8Dr6OrCZV/8pvnQbSMTWnl5Gf50y//oFaKC01Lg2RvqHWUPojnQklX0w9EPYExqpL0LKV6NAk/xgCUcjwJPwtA0aJkCT4yQ+/o7cf/dHzaff9cz2JDP58NpZ96IkaPniNGgy11Q4LkMnMvlRYACLy9M2h5Egadt9AceXGaBl56KV+Cpuakp8OzNde8r8ErMK+8uj1zMK/AsYqbAswiMhztOgALPccRcoAACFHgFQHPolK7OzVi96l7EB3oxY9bZ2sq7NF4KPIc2GcsWRYACryh8yp9Mgad8xNYHlF3gpZ+Bd+vtf8ba9W/zGXjW4xf2DAo8e6OJI4F/lyzlM/CKxEqBVyRAnm47AQo825GyoA0EKPBsgMgSthOgwLMdKQvaQIACzwaICpegwFM43EJHk13gFTo3zxObAAWe2Pno2h0Fnq7Jizs3BZ642ejcGQWezumLOzsFnrjZ6NwZBZ7O6eeenQIvNyPtjqDA0y5yKQamwJMiJu2apMDTLnLhB6bAEz4iLRukwNMyduGHpsATPiItG6TA0zL2vIemwMsblT4HUuDpk7VMk1LgyZRWYb2+8cYOrFi+CSnzJTRz543FjBmjCyvk4lkUeC7C5lJ5EaDAywsTD3KZAAWey8C5XF4EKPDywsSDXCZAgecycMmWo8CTLDA32qXAc4My17BKgALPKjG5jt/R3IWHH16Z1fSpp89BY2ON0INQ4DkTj2EkYaQ2w+dvMN+SWOHMIopWpcBTNFjJx6LAkzxARdunwFM0WMnHosCTPECH26fAcxiwjOUp8GRMTf2eKfCcz7ilJzm4yIiqgPOL7bXCilc2Y9mLb2f96ey5Y3DYYRNd78XKghR4Vmjld2wqtQPx2I9hJDfCQBDh8OcQCB2f38k8ChR43AQiEqDAEzEV9kSBxz0gIgEKPBFTEacnCjxxshCmEwo8YaJgI3sQoMBzbjukDODWxb14dm3/4CJHTC7BZ4+rhN/n3Jp7V97e3IlHHl6V9cennT4boxtr3WuigJUo8AqAluOU2MB/I5VYnDkqLfFKy/9kXolXav9iClakwFMwVAVGosBTIEQFR6DAUzBUBUaiwFMgRAdHoMBzEK6spSnwZE1O7b4p8JzL94X1A/jVU5GsBS4/vhKHTSpxbtH9VN7zGXhz5ozBzFmNrq5fyGIUeIVQG/qcWN/lSKW2Zh0UKv0xAoHp9i+mYEUKPAVDVWAkCjwFQlRwBAo8BUNVYCQKPAVCdHAECjwH4cpamgKv8OQSb+/EwL9WmVeKAOFT5iI4bnjhxXhmFoH0X0r7Ygn0x1IkYzOBe16I4pGVfVlVT59Thg8dVm7zSuqVo8CzP9NE/B9IxH6XKewPHISwKfAAv/2LKViRAk/BUBUYiQJPgRAVHIECT8FQFRiJAk+BEB0cgQLPQbiylqbAA1JdcSTWReGrDSI0yXyAeh63Eia3daD7qr8CA4ld0ZeEUH39+QiMFvsWQFn2KQWec0k1dyZx9d+6MBA376U1P+GgDz84uwaja91/Fp5zUzpTmQLPGa7JxH+QTCw1X2IxGsHQuXyRhQXMFHgWYPFQ1whQ4LmGmgtZIECBZwEWD3WNAAWea6ilXIgCT8rYnG1ad4GX2NaH3ru3wXjnSq/QzCpUnDUqJ/T+v7+Mvnufzzqu/OPHouSEWTnP5QG5CVDg5WZUzBEb2xJ4fE0/0s/DO/ngUowfHiymnDbnUuBpE7U0g1LgSROVVo1S4GkVtzTDUuBJE5VWjVLgaRW35WEp8CwjU/8E3QVe9G/bEXutJyvoqsvGIzA8PGT4saXr0HvTY1nHVHzx/QgfPkX9TePChBR4LkDmEpYJUOBZRsYTHCZAgecwYJYviAAFXkHYeJLDBCjwHAbM8gURoMArCJs2J1HgaRN1/oNS4BUm8Azz0qXeXz2G+AtvDcIOv3cayj99vHkLWB733+Yfj7ZHUuBpG73Qg1PgCR2Pls1R4GkZu/BDU+AJH5GWDVLgaRm78ENT4AkfkacNUuB5il/MxXUXeIkt5i209+6+hTY8owrlH8h9C+27aSZ3dpvPa/LBX18lZsCSdkWBJ2lwirdNgWdfwNE3V6LrmUcRqKrBsPedjdDwkfYV16gSBZ5GYUs0KgWeRGFp1CoFnkZhSzQqBZ5EYXnQKgWeB9BFX1J3gZfOp5CXWIieq+z9UeDJnqCa/VPg2ZNrdO0qbL7pu4Cx60Uqgeo6TPj2LxGsrLFnAY2qaCXwzO2SfGUAaE/BPz8MXx1fvCPqVqfAEzUZvfuiwNM7f1Gnp8ATNRkx+qLAEyMHobqgwBMqDjbzDgEKPG4FEQlQ4NmTyo57bkbnkkezijV+7ApUHXKMPQtoVEUngRe7pRvJF/oR708gGTJQ/s1hKJlRrlHa8oxKgSdPVjp1SoGnU9ryzEqBJ09WXnRKgecFdcHXpMATPCBN26PA0zR4wcemwLMnoLYn7kPr3+7IKjbu6z9D2YRp9iygURVdBF6qOYn+77Yh0h5FMpEaTLi7KYbxP5+I0soSjRKXY1QKPDly0q1LCjzdEpdjXgo8OXLyqksKPK/IC7wuBZ7A4WjcGgWexuELPDoFnj3hJPv7sO3WHyB9K236U3fSeWg466P2FNesik4CL3LFDvR29mcSbm+IouSz1Zhy+DjNUhd/XAo88TPSsUMKPB1TF39mCjzxM/KyQwo8L+kLujYFnqDBaN4WBZ7mG0DQ8Snw7A0mtn0z/BVVCFbV2ltYo2q6CLx0pG3XNSP2Qt9gukl/Cm/M3YnxZzRR4Am43ynwBAyFLYECj5tARAIUeCKmIk5PFHjiZCFMJxR4wkTBRvYgQIHH7SAiAQo8EVPRuyedBF4ynsKKn7+K+PYYOob3IVAfxDEfmY+SirDem0DA6SnwBAyFLVHgcQ8ISYACT8hYhGmKAk+YKMRphAJPnCzYyW4CFHjcDSISoMATMRW9e9JJ4KWTTj//bvvaViRMmdd40HCESkN6bwBBp6fAEzQYzdviFXiabwBBx6fAEzQYQdqiwBMkCJHaoMATKQ328i4BCjzuBREJUOCJmIrePekm8PROW57pKfDkyUqnTinwdEpbnlkp8OTJyotOKfC8oC74mhR4ggekaXsUeJoGL/jYFHiCB6RhexR47oUej0QQ7+pC2ejR8Pn97i3s8Upvt7+ExetuQXf/TsxuPBVHT/4k/L7AkF1R4HkcGpffLwEKPG4MEQlQ4ImYijg9UeCJk4UwnVDgCRMFG9mDAAUet4OIBCjwRExF754o8NzJf8d/nsLG+/8KI5FAedNYTL/8ywjV1LizuIerRONduOWZDyGRHMh08b6pn8fCcedT4HmYC5cujAAFXmHceJazBCjwnOUre3UKPNkTdKB/CjwHoLJk0QQo8IpGyAIOEKDAcwAqSxZFgAKvKHx5nRzv6sTy71wJwzAyx4885jhMuPCSvM6X+aD1rUtx34pvZY0wYfihOH/eDRR4Mgerae8UeJoGL/jYFHiCB+RxexR4Hgcg4vIUeCKmwp4o8OzdA75oP4Lrt8EXTyIxbgRSw9W/csRegruqUeA5QZU1iyFAgVcMvfzO7VyzCm/c/MusgysmTMTBV34nvwISHZVIpfDYhlexuq0Z0+tG4rixY3DbcxfzCjyJMmSrByZAgcfdISIBCjwRUxGnJwo8cbIQphMKPHuiiPQCUfOfhgbA57Onps5VKPBsTD8WR9mSlUB/bFdRc38OHDaTEq8AxBR4BUDjKY4SoMBzFO9g8ZR52+zqn1yPvq1bM4tN/tgnUX/YEc4v7vIKt65YgifffiOz6vHjD8LJ48ozz8CbOeokHDf1s+Yz8IZ+BiCfgedycFwuLwIUeHlh4kEuE6DAcxm4ZMtR4EkWmBvtUuAVT3nJEuCpp3b9MNvYBFxycRJlZbR4xZClwCuGXva5gW2tCL+yNusPE+NGIn7wJPsW0aQSBZ4mQUs0JgWeO2HFIz1ofvxfGGhtRd3CQzF8/iHuLOzyKp/+11/Q1R/NrBoOBHHn6R8xhZ21n2ko8FwOjsvlRYACLy9MPMhlAhR4LgOXbDkKPMkCc6NdCrziKLe1ATffnP2b6Pe+Fzj++FRxhTU/mwLPvg3g6+xB6bOrswrGpo9DcpJpm/mxRIACzxIuHuwCAQo8FyBrtMTXnroPW3s6MxM3VdXiF8efa5kABZ5lZDzBBQIUeC5A5hKWCVDgWUam1QkUeFrFnd+wFHgwH0wNtKwYQH97CqMWhFFSG8gPnnnU6tU+3H9/9m+mJ082cMklux92nXcxHpghQIFn72YIvbEJwbd23f6VqqvGwMIZQHDoW6Ds7UCNahR4auSo0hQUeCql6f0sr7U242cvPoVIrB+V4VJ849DjMaN+tOXGKPAsI+MJLhCgwHMBMpewTIACzzIyrU6gwNMq7vyGpcADlt3YgZ2rdj0fLFjux+HfqEX1uFBeAAcGgFtv9aOjY/fhH/pQCtOm5XU6DzoAAQo8B7bGQNx8iUUCRmWZA8X1KEmBp0fOMk1JgSdTWnL0Gk8lsaWnA40VtSgJBgtqmgKvIGw8yWECFHgOA2b5gghQ4BWETZuTKPC0iTr/QXUXeJHmBJ6+yrwPdo/P6ENLMe8z+b+ls6cHWPKMH70RA/PmAVOm8Oq7/Hfg/o+kwCuWIM93ggAFnhNUWbMYAiIJvGg0hv+99wUsW74J48bW4dILD8e4McOKGY/nSkqAAk/S4BRvmwJP8YAlHY8CT9LgXGqbAs8l0DItQ4G3r8BrOqoMcz5RLVOMyvVKgadcpEoMRIGnRIxKDSGSwLvjrufw78W732BaN7wSP//hufD7rb0AoZCAWto78PCiJWg2XzIxc9JEnPreI1FaUlJIKZ5jAwEKPBsgsoTtBCjwbEfKgjYQoMCzAaLCJSjwFA630NF0F3hpbi/d1Dn4DLz0J1Dqw2Ffq0XtpHChSHmeDQQo8GyAyBK2E6DAsx0pCxZJQCSB962rH0Dz9q6siX587QfRODr/K9oLwZFKGbjxrr+go9u8HP6dz4IZ03D2CccVUo7n2ECAAs8GiCxhOwEKPNuRsqANBCjwbICocAkKPIXDLXQ0Crzsl1iMnBdGaV3+L7EolDvPG5oABR53iIgEKPBETEXvnkQSeHtfgddQX4kbrnf+Cryd7Z246X/vydoI9cNq8aVLPqT35vBwego8D+Fz6QMSoMDj5hCRAAWeiKmI0xMFnjhZCNMJBZ4wUbCRPQhQ4HE7iEiAAk/EVPTuSSSB59Uz8HgFnnj/DlDgiZcJOwIo8LgLRCRAgSdiKuL0RIEnThbCdEKBJ0wUbIQCj3tAcAIUeIIHpGF7Igk8L/HzGXhe0t93bQo8sfJgN7sIUOBxJ4hIgAJPxFTE6YkCT5wshOmEAk+YKNgIBR73gOAEKPAED0jD9ijwNAxdgpEp8CQIScMWKfA0DF2CkSnwJAjJwxYp8DyEL+rSFHiiJqN3X7yFVu/8RZ2eAk/UZPTtiwJP3+xFnpwCT+R09O2NAk/f7EWenAJP5HS8740Cz/sMhOuAAk+4SDxvyDBi8CdfgpHaDvgnAcE5Zk8+V/uiwHMVNxfLkwAFXp6geJhrBCjwXEPNhSwQoMCzAIuHukaAAs811FzIAgEKPAuwNDyUAk/D0HONTIGXi5AmXzcMBHf8C8HWpxEfVYJURWlG2hmho0yJ9x5XQVDguYqbi+VJgAIvT1A8zDUCFHiuoeZCFghQ4FmAxUNdI0CB5xpqLmSBAAWeBVgaHkqBp2HouUamwMtFSI+vhzfehZINv0cqEMDAtIkwguVIlY3dNby/DkbJJ10FQYHnKm4ulicBCrw8QfEw1whQ4LmGmgtZIECBZwEWD3WNAAWea6i5kAUCFHgWYGl4KAWehqHnGpkCLxchPb5esexT8EfWmcMa6Js+xZR2PiQrzf/1BWD4JwIl57kKggLPVdxcLE8CFHh5guJhrhGgwHMNNReyQIACzwIsHuoaAQo811BzIQsEKPAswNLwUAo8DUPPNTIFXi5Ceny9bOU3EWxfOjhssroSA02jkKqeBsNXDSN8Lnz+eldBUOC5ipuL5UmAAi9PUDzMNQIUeK6h5kIWCFDgWYDFQ10jQIHnGmouZIEABZ4FWBoeSoGnYei5RqbAy0VIj6/7e99G2apvwt+/A0agBH0HfR3JhrnmlXgNJgC/6xAo8FxHzgXzIECBlwckHuIqAVkFXiqRQttLrejd0ouq8VWomz/cvODb3ZcluRqUZotR4GkWuCTjUuBJEpRmbVLgaRa4xXEp8CwC0+FwCjwdUs5zxlQS/t63zGffjTFfWlGe50nOHEaB5wxXVi2OAAVecfzBEgC7AAAgAElEQVR4tv0EZBV4mx/ehM5V7Rkgwxc2oPGkJvsBsaInBCjwPMHORXMQoMDjFhGRAAWeiKmI0xMFnjhZCNMJBZ4wUbCRPQhQ4HE7iEiAAk/EVPTuSUaBZ6QMvPqLVUjFU5nwghUhzPjSLL3DVGh6CjyFwlRoFAo8hcJUaBQKPIXCdGAUCjwHoMpekgJP9gTV7J8CT81cZZ+KAk/2BNXrX0aBl07hjd++hljHQCaQspHlmPKJg9QLSNOJKPA0DV7wsSnwBA9I0/Yo8DQNPs+xKfDyBKXTYRR4OqUtz6y6CjyjfQCpLX0wEuaTB0eG4W/y9lZmeXbM0J0a5tuV1/Y8hK39z6I+PAPTqy5AyF9meTwKPMvIeILDBGQVeD1v92DzgxuR7EsgVBnG2A+MQ8W4SodpsbxbBCjw3CLNdawQoMCzQovHukWAAs8t0nKuQ4EnZ26Odk2B5yheFi+QgI4Cz+hLIrmqE6Zrynz8kyrM94iUFkjR+dOinV145dEn0Nfdg4OOOhxjD57h/KIFrLCq60680nlr5szRZYfixBH/Y7kSBZ5lZDzBIQLJNzuQfK0DdUeMRvXMOnREYugbSDq0mjNlk+YttLH2fpQML4M/yBdYOEPZm6oUeN5w56pDE6DA4w4RkQAFnoipiNMTBZ44WQjTCQWeMFGwkT0I6CjwUjvMq+/ejmTtA9/IUgQmVAi5N+J9/bjvBzcg0rb7QfQnfu6TmDBvtnD9/n3bxeiKb8zq67wxf0dZYLilXinwLOHiwQ4RiP91HWJ/fH2weiAYQMM3FyBxbKN0As8hPNKXfcuMdtN6P6bPTmH0WDnHocCTMzfVu6bAUz1hOeejwJMzN7e6psBzi7RH6yQTSbz62EpsXrUJo6c3Yvap8xEMB4fshgLPo7C47JAEdBR4sl2Bt3n1a/jXTbdk5Thp4Xwcf9lHPdndbRtiaF7Vh5EzStEwtSSrhydbvoFtfc9l/izoL8eFY/4Fn89vqVcKPEu4eLATBMwXQEQ/+gSMzl3PjwsE/CgZX4Xy3x5HgecEb5drPv6gHw/dHRhc1WdelHjp5xM49Og9Lst2uZ9Cl6PAK5Qcz3OSAAWek3RZu1ACFHiFktPjPAo8xXP+z61PDAq8dz8HHTsTJ37xlCGnpsBTfFNIOp6OAi8dlUzPwOva0YK/Xv2jrB0279STsPDs013fdW88FsGzv26F8c7fcw/7RB0O/kB1po/O2Nt4cufXEU1sRzhQiSPqvo3x5cdZ7pMCzzIynmA3AVPg9V7wKPDO7bIUeHYD9q5eynwp7xUfDyG++90eGNkIfPcXce+asrByLGpg80oDsV5g3Ew/GqeE0NUrR+8WxuShEhOgwJM4PIVbp8BTOFwbRqPAswGiyCXu+NQtiHaaPzm98wmUBHHZny6H33/gq0wo8EROVN/edBV4siX+yj8fx7IHHxlsu2HieJzypc+gpNz9F2/c/4Wt6Nyy+y+KZbVBXHTnmCycKSTRGXsLVcFx5gssCnuuIAWebDtUzX7jD65H7LZXB4dL30I74juHIP7e0bwCT/K4ZRZ4KfPxi8v+msDAO0+B8Pt9OOS0MEpHyfVcRsm3ENvPQYACj1tERAIUeCKmIk5PFHjiZOFIJ/decRdaN7RkateMqsUlv/rEkGtR4DkSBYsWSYACr0iALp7e19ODvp4Iho0eZd7y5c2D6PcWeKU1AVx4xxjzlxf29kOB5+LG4lJDElDhJRaMeF8Cst5C270jhRUPmZcQvvNJf+9tnBrAxKOZMgmIQ4ACT5ws2MluAhR43A1DEaDAU3x/bHt1Cx77738g2hFBaVUZTvrq6Rg7ZxwFnuK5qzgeBZ6KqTo3U65baO1amQLPLpL7r7OmdxFW9/8HfsOHBZWnY3LpIc4uqED19PfKspKAlG+hVQC/IyPI+BKLgYiBF+9JZh5jkBZ4E+cH0Thfvuf3ORIqiwpBgAJPiBjYxF4EKPC4JSjwNNgDKfNBT80dUQyrKEG5eZvsnp9kPIn2La2obaxDqCSUkwavwMuJiAd4QIACzwPoki+ZeYnFdPMlFgdlv8TCrtEo8OwiuW+dLQOv4rHO7JeinFX3ddSHhv4llHMdyVGZAk+OnHTocvPKJDYtM5C+FbhmpB+HnhnGABI6jC70jPG+FLYt7hrssenoagTLd70kRccPBZ6OqYs/MwWe+Bl52SGvwPOSvk1rt0f6cNNjq7G1PYKA+RvOCw6fivfNaiq4OgVeweh4ooMEKPAchMvSBROgwCsYXc4TX+z5O1ZFn8w6bmHlGZhTcVLOc3U+gAJP5/TFmz3eb77EIgrUjwoiHPLzJRYeRxSLJLHoi+sQ2bLrzShVY0twzI2TEa7K/uW/x226tjwFnmuouZAFAhR4FmBpeCgFngKh//Hp17HkjebMJGmJ918XHoma8sKuOKHAU2BTKDgCBZ6CoSowEgWecyHu9wq84d9AfXCsc4sqUJkCT4EQFRyh3LytOxSkwPM62vUPtWHFL7dmtTH3S02YdOZwr1vzZH0KPE+wc9EcBCjwuEWGIiC0wGvv7MG3fngLtu/swN/v+GFmjs3bWnDVDX/AG+s2oXFUPb775Q9jweypg1/f1tanXeJX/99SbO80f725x+dLJ8/BwWML+48xBZ52W0iKgSnwpIhJuyYp8JyNfM9n4M2rOBlTyw53dkEFqlPgKRCigiNQ4IkRKgVedg4UeGLsS3aRTYACjztCSoHXG+3HRZ+7DsceOQ+Lnl+RJfA++uUf4/j3LsCl55yEZ5etMWXebXj8np+bv9kLaCnwnnmzGXcufj2Tc2NdBa7+4KHwF/j2Rwo8ftMQkQAFnoipsCcKPIf3QCIKo2054A/DV78A8On7rKZ8SVPg5UuquONau/vxjxXNiA4kcdyMBkxvqimuoOJnU+CJEXCsJ4HFX34LPZt33UJbOaYEx940BeFKPb+3UuCJsS/ZBQUe90D+BIS9Ai/a14/W9q7Bf77/8zszAq+toxunXHwlnnv4ZgQDu/5jc95l1+DKz1+Ew+ZP11LgpRksW9+CF95qwciaMrx/zlhUlYbz3wV7HUmBVzC6fU6Mmz/Y9/bEUFNXCp95azM/hROgwCucHc90jgAFnnNsEeuCseLHMMz/3fU3zXHwzb7C/F6q57Oa8iVNgZcvqcKP6+mL4+r/W2U+zy02WCT9+9IrzphBiTcEUgq8wveb3WcmoklsXdINmC8E5ksswoiZL/tLi3h+SEAUArwCT5QkxOxDWIH3Lq6XV72ZJfBeXrUW1/3iTjx4+/UZol+/9tc4fMFMXHDmcdoKPDu3FwWePTTffq0drzy9BUbSQPXwMhxxygRUVBcuVu3pSt4qFHjyZqdy5xR4zqVrbHoYxuZHshbwzbwcvmGznFtUgcoUeM6HuHRdG255Ym3WQsfOHImPHjPR+cUlXYECT9LgFG+bV+ApHrCk41HgSRqcS217KvDSV9M1t7TtM2qT+Vy7YTVVg3++t8B7dtlq/PL39+Hu316TOe97P7kNB00ag4+cfzLiCfNd9fwURcBvXimWvlYsmTJ/NcdPQQTisRT+/KvlSJny7t3P5Bl1OPaMSQXV40lAIOBDyvzX2zC4L7kfxCEQNB/KnuB/dxwJpG/9PxBde39W7ar5n0d4hHkrLT8HJJD+Xpl+hEbS/O9Pit8vHdkpa7d148r/XZFV+yPHTMAHD+MLVg4EnD9bOrIVWbRIAunvl+lvkyn+nadIkjzdTgLBgN/8e3hqcG/yQwJ7E0i/EMrOj8/8y3XeW+3ppSvx4KNL9ln/grPeh8Pnz/j/9s4ETM6qyt+nqvdO0p2NkJAEEsISIGwRBREIIJuyKWImrOJo/uAoigQRFYd9EQkOCAIuLDogMCogIAyi/FkE2YcJCDEQAiEJ6Wzd6U7vVTVfFUkn3SGp+tY69963eHjkCd937jnv71qk33xL4dcHCrxXXpsnF1x9W79n4p194Q2y715T5Pijpsmylg+f6cAnOIG66rSk02lZ09kbvIjjZzYtapMn7n2rH4Uhw2rliJMmO04m+PgN9VXS5d3m0NWDpA9OkTOjJjCyoUZWtnZ5oiTqytTLdTdL78uXe7fQerd6eZ+Udwtt5R7ncgttka2R/66sqUrL6vYevi9j/L/RPc++Kw++vLiwwk5jG+TsI3cqcOfz0QRqPTb5P/Bo6+D3luwRPQSGeN+XPd7vLTv5vaWeUOhERnh3bDW39kimdK0CNYcIbNFYE+m0vgReKSsPFHirWlrlkOmz5On7r5e6tc95+8xJ58rl35spe07ZnltoS4Fa5BhuoQ0PMef9NP/47+dJy/L1b0Xeff9xsu2UYG8GDt+R+RW4hdb8DG2cgFtoY07Ve4lFdvlL3rsrvOeI8hKLkmBzC21JmCI5aLX3LLz8H3aOGVYXST2bi3ALrc3pmjsbt9Cam53NnXMLrc3php+trLfQltL+QIGXP+crZ18lH99jssw86Sh5+PHnCrfUPnzHVd4tdmkEXilQEXgRUCpeIv8Ci7kvN0mbd1Xo+O2HythJQ4ufxBGbJIDAY3NoJIDA05iK2z0h8NzOX+v0CDytybjdFwLP7fy1To/A05qMjr7UCrzHnnpJzrn4xvwDr7zn2mWkqqpSJo4fLffecqks+mC5fP+KX8jctxfK+K1GyYWzTpNddpxQILp4xfornnQgNq8LrsAzLzMXOkbguZCyeTMi8MzLzPaO1Qq8XI9Ud/5NKnoWSLZqO+mq3de7L5pbTm3fj+vmQ+C5krRZcyLwzMrLlW4ReK4kHWxOtQIv2DgIvKDcNjwPgRcFRWpETQCBF55oj2RldapHhuVqhB+bw/PMV0DgRcORKtER0Crw6lvvlKquV/oG7arbTzoHHRvd4FRSTQCBpzqevuZWN7fII/c/KAsXvCfjJ2wtRxx7lDQMbTSj+QBdIvACQOOU2Akg8GJHbPQCCDyj44uneQRePFypGo4AAi8cv/fSa+TpmmXSnctIfa5KPt2zpYzIRvsQ1HAdmnk2As/M3GzuWqXA866+a1x1gfeqx54+9Ln0EFk9/N9tjoLZNiCAwDNjO9z5y9tlwdvz+5qdMGlbOfGrXzKj+QBdIvACQOOU2Akg8GJHbPQCCDyj44uneQRePFypGo4AAi84v5zk5J7a96RD1r/9b1SuTj7btVXwopxZIIDAYyOEIdDxTot0Lm2TQdsPl+oR0bwIQaXA8yA1rLpMUpnmPlyZyvHSNvSbYfBxrkEEEHhmhDX7oiukq7Ozr9ma2lqZdcH3zGg+QJcIvADQOCV2Agi82BEbvQACz+j44mkegRcP16BVFza3y93/WCzvt3TInqMb5Yu7jJX66oqg5Yw9D4EXPLrmVLfcV7OwX4Eq7ybakzonBi/KmQg89kAoAssemS/Nzy0p1EilUzJm+mQZtOPwUDXzJ2sVeJXdc6W+7beSyq6RXEWDtA3+kvcsvK1Dz0sBMwgg8MzIiSvwzMiJLu0mgMCzO9+w0yHwwhK08HwEnp5Qs95LXH74lzdkeXt3X1Of2nq4nLqHez/0IPDC7cu/VH8gC73baNd9pmSGyV494WVBuK7MP5sr8MzPsBwTZHuy8vYVz3ov6lq/es3YIbL1V3cL3Y5WgZcfLOfdQluRbZJsxZaetawMPSsFzCGAwDMjKxOegZdatlgqn3lQUiuWSG7iLtKz75EiNcGuYOYKPDP2pWtdIvBcS9zfvAg8f7ycOBqBpyfmJa2dcuHjb/ZraPSQWrnooMl6mkyoEwReOND5F1i8Udkiy9JdMi5TLztkhkjK+4tPOAIIvHD8XD07252Rt6/8u3MCz9W8mVsEgccuiIRANivVv50tqdUr+8plJu8lvQd9IVB5BF4gbJwUMwEEXsyADS+PwDM8wDjat1HgdXX3yOKmJhk3ekupqjTnT/25Am/9DkfgxfH/dmqGJYDAC0vQ3fNdu4XW3aSZPE8Agcc+iIJAalWTVN/1k/6lhm0hXTPODlQegRcIGyfFTACBFzNgw8sj8AwPMI72bRN4b7w1X66+9TfSumaNDG1okG+feqLstN22caCLpSbPwPsQKwIvlu1F0ZAEEHghATp+uksvsXA8aufHR+A5vwWiAcAVeNFwpIpqAgg81fGUvTkEXtkj0NeAbQLv21fOlkUfLO0DPda7Cu8n583SB56ONksAgccG0UgAgacxFbd70vwMPLeTcXt6BJ7b+Uc5Pc/Ai5ImtTQSQOBpTEVPTwg8PVmo6cQmgdfc2iqn//ulkvNeBrHuU+ndQnvn1Zer4U0jpRFwReB15jqkU9ZIY2oEz6grbWuU9SgEXlnxs/hHEEDgsS00EkDgaUyFnriFlj2gkQACT2MqenpC4OnJQk0nNgm8PNTr77hbnnzhpT6+n953bzl9erCH3aoJKeJGnnl+gTzw6JvSmxU5fNp2coj3t7aPCwLvtezzMif3d8nmsjI8taUcVHGs1Eq9tijoZwMCCDy2gzYCCDxtidBPngACj32gkQACT2Mq9ITAYw9sjgACj/2xEQHbBF5vJiMPPfGUvPnOAtlt++3l0E/tI5UVFSS/lsC776+SS2b/tR+PM7+yr+w+ZYwqRrYLvLZci9yfubUf853SH5Op6f0D5dArOVmYbpfV6R4Zma2RrbJ1vHM2EMnNnxSHwOte1i7p+iqpHFQVQ8eUtJ0AAs/2hM2cD4FnZm62d43Asz1hM+dD4JmZW1JdI/CSIm3QOrYJPIPQl6XVPz32pvzhodf7rX3EwTvK8UdPKUs/m1rUdoG3IDdX/pZ5uN/4I1Kj5YiKGYFyeKWqWVamuvvOnZQZJBO8v/lESyBKgZft7JWme+ZJ58LVhSaHHjBWhu4/LtqGqWY9AQSe9REbOSACz8jYrG8agWd9xEYOiMAzMrbEmkbgJYbanIUQeOZkFUWnCxaukkuv4Qq8KFiGqZGRjDyY+bXkr8Rb99m/4ijZOuX/dubuVFaeqlrer51BUin7dA8P0yLnfgSBKAXeqiffl5anFvVbZczMXaVmFLdRs/lKJ4DAK50VRyZHAIGXHGtWKp0AAq90VhyZHAEEXnKsTVwJgWdiajH3jMCLGbDC8s+9vFDu+9PrPAOvzNl05NrkH9mXpV1aZWJqsoxLTwrUUf6VLU9UL/OU4PqXt4zIVssevUMD1eOkTROIUuAt/e2b0jF/vcDNrzr8MxOlYeooIoBAyQQQeCWj4sAECSDwEoTNUiUTQOCVjIoDEySAwEsQtoFLIfAMDC3ulhF4cROmfhACtt9CG4TJ5s5pSnfJ65WrJetJvBqpkD27GyV/FR6faAlEKfA6FqyWpXe80ddgxeBqGXvGbpKu4Zmd0aZmdzUEnt35mjodAs/U5OzuG4Fnd76mTofAMzW5ZPpG4CXD2ahVEHhGxeVMswg8/1HnX2TRkeqVwbkqXmDhH19JZ0Qp8PILdryzWlpfWSqVnrwb8oktpWpobUl9cBAE1hFA4LEXNBJA4GlMhZ4QeOwBjQQQeBpT0dMTAk9PFmo6QeCpiYJGNiCAwGM7aCQQtcDTOCM9mUUAgWdWXq50i8BzJWmz5kTgmZWXK90i8FxJOticCLxg3Kw+C4FndbzGDofAMzY6qxtH4Fkdr5HDIfCMjM36phF41kds5IAIPCNjs75pBJ71EYcaEIEXCp+dJyPw7MxVy1QruzLy4ooO6fber7DnsBoZW19VUmsIvJIwcVDCBBB4CQNnuaIEEHhFEXFAGQgg8MoAnSWLEkDgFUXEAWUggMArA3SDlkTgGRRWUq0i8JIi7d46a3qz8qt5zdKV/fDtqCnv75O3bZTRdcVfroDAc2+/mDAxAs+ElNzqEYHnVt6mTIvAMyUpt/pE4LmVtynTIvBMSao8fSLwysNd9aoIPNXxGN3cGy1d8uD7bf1m+MTIOpm2ZX3RuRB4RRFxQBkIIPDKAJ0lN0sAgccG0UgAgacxFXpC4LEHNBJA4GlMRU9PCDw9WajpBIGnJgrrGlnQ1iP/9e7qfnN9evQgmTqi+Js2EXi6t8PSzFuyMrdYxqZ3kob0FrqbjbA7BF6EMCkVCQEbBF7nmnZZ+NYCqR1UJ+O2nSCpdP56bT4mE0DgmZyevb0j8OzN1uTJEHgmpxd/7wi8+BkbtwICz7jIjGr4sSVr5JWVnYWetxlUJcdtPUQqS/jhDIGnN+YXeu6XuT1PFxpMe38dUHOqjKvYRW/DEXaGwIsQJqUiIWC6wGtevlIe/92D0tPVVeAxesJ42f/owzcr8bpSq+W16l9LU+UrMjQzSaZ0nSpDcuMi4UmRaAgg8KLhSJVoCSDwouVJtWgIIPCi4WhrFQSercmGmAuBFwIep5ZEIP8svG7vOXjDqitKOj5/EAKvZFSJHtib65G7O34gOe+vdZ8R6a3lM7VnJtpHuRZD4G2a/NuLemTue92y84RqmTCmtJfVlCtHm9Y1XeC9+NenZf6cN/pFcvD0Y2TkmC03GdNLtdfJ4opn+/59Xt4d2P5jm2I1fhYEnvERWjkAAs/KWI0fCoFnfISxDoDAixWvmcUReGbmZnvXCDydCffkuuWejvMReOv9pc6gEu7qoWfa5T8faS2smvLufvzqMQ1y8MfqEu7CzeVcFHiPDvqadElzv8APa/+Z1OSGubkJFE6NwFMYCi0JAo9NoJEAAk9jKnp6QuDpyUJNJwg8NVHQyAYEEHh6twO30HbI2hcr6w0p4c7OuGq5tLRl+lZtHFwhN507MuEu3FzOdIEX5BbagVfgNWS3kWkdV7q5AZROjcBTGozjbSHwHN8ASsdH4CkNRklbCDwlQWhqA4GnKQ16WUcAgad7L/ASC935JN3dQIE3sjEtP53lzstNkua94XqmC7z8LH5fYjHwGXi7dp8mg7NblTMG1h5AAIHHltBIAIGnMRV6QuCxBzZHAIHH/tiIAAKPTaGRAAJPYyr0xDPwPnoPbHgLbf6I044cIofvXR/rhnnPe95e07JemTSpRoYNLf35mrE2VYbiNgi8MmBjyZgJIPBiBkz5QAQQeIGwcVLMBBB4MQM2vDwCz/AA42gfgRcHVWqGJYDAC0uQ8+MggMDbNNUkX2Lx339eLf/zakehmYrKlBx3TKNsu21NHJGrr+lH4PV29Ejvmh6pHeHJVe9ZhXwgEBcBBF5cZKkbhgACLww9zo2LAAIvLrJ21EXg2ZFjpFMg8CLFSbGICCDwIgJJmUgJIPAixRmoWNuajPzspuWS2+BFIhO9N99OP97NFxiUKvCann9fmp59X3LeAxzrRg2WCcdNlso63hYcaBNyUlECCLyiiDigDAQQeGWAzpJFCSDwiiJy+gAEntPxf/TwCDw2hUYCCDyNqdATAq/8e2CNJ/Cuv3F5v0YQeBWyqq1bOrrWv0hkQ0DdLZ0y95ZX+jEb+bGtZMwB25Q/UDqwkgACz8pYjR8KgWd8hFYOgMCzMtbIhkLgRYbSnkIIPHuytGkSBJ5I1vtZvKtFpLohf5ugTemaOwsCT0d2j/21VV56ub3QDLfQVktdzeYFXvPc5bLwT/P6hVc3erBsd8KuOgKlC+sIIPCsi9SKgRB4VsRo3RAIPOsijXQgBF6kOO0ohsCzI0fbpnBd4LV7Fxi992S19HqP+arwHu01ft9eGbxV1raYjZsHgacnsne9l1gs8/ESi4x3hVrb60ukt6VdqrYYIoN3GS3pavPNeCm30GYzWZl3+6uSvxJv3Wfro3eQxu1G6AmUTqwigMCzKk5rhkHgWROlVYMg8KyKM/JhEHiRIzW/IALP/AxtnMB1gTf/0WppX7Y+2UrvmfOTP99tY9RGzYTAMyqufs02P/W29K5eL7BqxjTKkKnjzB1obeelCLz8ob2ewGx6abH0tnbL0J1GSsOk4cbPzgB6CSDw9GbjcmcIPJfT1zs7Ak9vNho6Q+BpSEFZDwg8ZYHQToGA6wLvH/9VLdkBvm7ycd3eQ+fZIOUkgMArJ/3ga2e7emXlY3P7FUjXVMnwQ3YIXlTJmaUKPCXt0oYjBBB4jgRt2JgIPMMCc6RdBJ4jQQccE4EXEJzNpyHwbE7X3NlcF3hLXqyQFXMr+gJs3CYr4/frNTdQSzpH4JkbpOtX4JmbHJ2bSACBZ2Jq9veMwLM/YxMnROCZmFpyPSPwkmNtzEoIPGOicqpR1wVeLiey8p8V0vZBSuq3yMqIHbOSXu/znNoLmoZF4GlKw18vLj8Dzx8pjoZAeAIIvPAMqRA9AQRe9EypGJ4AAi88Q5srIPBsTjfgbAi8gOA4LVYCrgu8WOFSPDABBF5gdJwYEwFuoY0JLGVDEUDghcLHyTERQODFBJayoQgg8ELhs/5kBJ71EfsfEIHnnxlnxE8AgRc/Y1bwT8B0gZfLZaQ7u0Kq0sMlnTL/Daz+E7TvDASefZnaMBECz4YU7ZsBgWdfpjZMhMCzIcX4ZkDgxcfW2MoIPGOjs7pxBJ7V8Ro7nMkCryOzWJZ1PSy92TWevKuTUTWHSX3lNsZmQeMfEkDgsRM0EkDgaUyFnhB47AGNBBB4GlPR0xMCT08WajpB4KmJgkY2IIDAYztoJGCywFvUcbd0ZZb2Ya1KN8r4+i9pxExPPggg8HzA4tDECCDwEkPNQj4IIPB8wOLQxAgg8BJDbeRCCDwjY4u3aQRevHypHowAAi8YN86Kl4DJAm/+mhtEvFtoN/xsM+grUpEaFC80qsdKAIEXK16KBySAwAsIjtNiJYDAixUvxQMSQOAFBOfIaQg8R4L2MyYCzw8tjk2KAAIvKdKs44eAyQJvedcTsrrn1b5xB1ft5N1Ge6if8YseO7fifVlQuVQas4Nkas8kqZaqoudwQDgCCLxw/Dg7HgIIvHi4UjUcAQReOH6cHQ8BBF48XG2pisCzJckI50DgRQiTUpERQOBFhpJCERIwWeDlJCutPXOkPbNQatNjpKFq90hfZPFaxQL5W83rfbRH5YbJsR2flLSkIkyAUgMJIPDYExoJIP3f5MAAAB+NSURBVPA0pkJPCDz2gEYCCDyNqejpCYGnJws1nSDw1ERBIxsQQOCxHTQSMFngxc3zD7V/k2Xp5n7LTO+YJsNyg+Ne2un6CDyn449s+Ex3pyx/4QFZs+QtaZj0MRm5h3d1biq4fEfgRRYNhSIkgMCLECalIiOAwIsMpZWFEHhWxhpuKAReOH6cHQ8BBF48XKkajgACb9P8Hqv5H3m7YlHfAWnv2rsvtR/q3UZbGQ46Z2+WAAKPDVIKgZXvL5LaIYOlvrHxIw9/+7cXSus762+x33L/f5Ex+88opfRHHoPAC4yOE2MkgMCLES6lAxNA4AVG58SJCDwnYvY3JALPHy+OToYAAi8ZzqzijwACb9O8Vqfa5aHa5yT/v3l5N61riuyQGe8PMEf7JoDA843MqRN6Orvk/suulIWvfXh7+8eP+5zsd8qJ/Rj0rFklr1/3Fe8lN7m+X68dMVYmn359YFYIvMDoODFGAgi8GOFSOjABBF5gdE6ciMBzImZ/QyLw/PHi6GQIIPCS4cwq/ggg8DbPK+s9Z29lqk0acnW8wMLf1gp8NAIvMDonTnzh3vvl6V/f0W/WGT+6TMbssH3fr+WyWZlzzUmS9W6jXfcZMmmqTPqXHwZmhMALjI4TYySAwIsRLqUDE0DgBUbnxIkIPCdi9jckAs8fL45OhgACLxnOrOKPAALPHy+Ojp8AAi9+xiavcP8VV8n851/sN8LBp8+U3Y/o/wbq5jeflXcfuFZyPV1S1biFbDv9fKnbYuvAoyPwAqPjxBgJIPBihEvpwAQQeIHROXEiAs+JmP0NicDzx4ujkyGAwEuGM6v4I4DA88eLo+MngMCLn7HJKyyc85r8/oJLvLtjP7w9dtCwYXLyf/xY6hsaNhor/yKLrpWLpW7UNpJKV4QaG4EXCh8nx0QAgRcTWMqGIoDAC4XP+pMReNZH7H9ABJ5/ZpwRPwEEXvyMWcE/AQSef2acES8BBF68fG2onpd4cx59TOoaG2Tq0UdJ45ajYh8LgRc7YhYIQACBFwAap8ROAIEXO2KjF0DgGR1fPM0j8OLhStVwBBB44fhxdjwEEHjxcKVqcALlFngVFTmprfH6956j1tGd9v4nFXwYzrSGAALPmiitGgSBZ1Wc1gyDwLMmylgGQeDFgtXsogg8s/OztXsEnq3Jmj0XAs/s/GzsvpwCL53OSePgrKxTdvmbNFtaPYmXQ+LZuNf8zITA80OLY5MigMBLijTr+CGAwPNDy71jEXjuZV50YgReUUQcUAYCCLwyQGfJogQQeEURcUDCBMop8OpqslJX8+Gz1dZ91nSmpasbgZfwNlC3HAJPXSQ05BFA4LENNBJA4GlMRU9PCDw9WajpBIGnJgoa2YAAAo/toJEAAk9jKm73VE6BV1Odk0G12X4BtLanpacXgef2rhRB4Lm+A3TOj8DTmYvrXSHwXN8Bm58fgcf+2IgAAo9NoZEAAk9jKvSEwGMPaCNQToGXZzG4LivVVR9ehdfdk5K2jrQ2RPRTBgIIvDJAZ8miBBB4RRFxQBkIIPDKAN2gJRF4BoWVVKsIvKRIs44fAgg8P7Q4NikCCLykSLNOqQTKLfDyfaY9Z5fzXmKRE+RdqbnZfhwCz/aEzZwPgWdmbrZ3jcCzPeFw8yHwwvGz8mwEnpWxGj8UAs/4CK0cAIFnZaxGD6VB4BkNkOZjIYDAiwUrRUMSQOCFBMjpsRBA4MWC1ZqiCDxrooxuEARedCypFB0BBF50LKkUHQEEXnQsqRQNAQReNBypEi0BBF60PKkWDQEEXjQcqRItAQRetDxtq4bAsy3RCOZB4EUAkRKRE0DgRY6UghEQQOBFAJESkRJA4EWKk2IREUDgRQSSMpESQOBFipNiERFA4EUE0tIyCDxLgw0zFgIvDD3OjYsAAi8ustQNQwCBF4Ye58ZBAIEXB1VqhiWAwAtLkPPjIIDAi4MqNcMSQOCFJWj3+Qg8u/MNNB0CLxA2ToqZAAIvZsCUD0QAgRcIGyfFSACBFyNcSgcmgMALjI4TYySAwIsRLqUDE0DgBUbnxIkIPCdi9jckAs8fL45OhgACLxnOrOKPAALPHy+Ojp8AAi9+xqzgnwACzz8zzoifAAIvfsas4J8AAs8/M5fOQOC5lHaJsyLwSgTFYYkSQOAlipvFSiSAwCsRFIclRgCBlxhqFvJBAIHnAxaHJkYAgZcYahbyQQCB5wOWg4ci8BwMvdjICLxihPj35SCAwCsHddYsRgCBV4wQ/z5pAgi8pImzXikEEHilUOKYpAkg8JImznqlEEDglULJ3WMQeO5mv8nJEXhsCo0EEHgaU6EnBB57QBsBBJ62ROgnTwCBxz7QSACBpzEVekLgsQc2R0C1wFvZ3CrnXXazfLBslfzxtsv65pjxtYvlzXnviqRShV9rGFwvT957XeGfF6/oIPGQBBB4IQFyeiwEEHixYKVoSAIIvJAAOT1yAgi8yJFSMAICCLwIIFIicgIIvMiRUjACAgi8CCBaXEKtwFvT3ikneKJu2if3kCf+/mo/gXfkKefJtRefKdtNHLtRNAi88LsVgReeIRWiJ4DAi54pFcMTQOCFZ0iFaAkg8KLlSbVoCCDwouFIlWgJIPCi5Um1aAgg8KLhaGsVtQKvvaNTlq9sKfx94ezb+wm8acd9S+6++QIZvcVwBF4MOxOBFwNUSoYmgMALjZACMRBA4MUAlZKhCCDwQuHj5JgIIPBiAkvZUAQQeKHwcXJMBBB4MYG1pKxagbeO78tz/rmRwNvzsJlywN67ySuvzZORwxvlrJnHywH77F44hSvwwu9MBF54hlSIngACL3qmVAxPAIEXniEVoiWAwIuWJ9WiIYDAi4YjVaIlgMCLlifVoiGAwIuGo61VyirwVqxaLUuaVmzEduzokTKscUjh1wcKvGw2Jz+86ldyxEF7yyf32lmeeOZVOe/ym+WBX19RuCKvua3b1qwSm6u6Ki1p7/mCnd2ZxNZkIQgUI1BfUyk9mYz09OaKHWrkv0+tfaankc073HTDoCppbe+RnJ3b0uFkzR19nShp78p/X2bNHYTOrSJQXZmWioqUdHj7kg8ENBDIef/hzv/esjeTlW6+KzVEQg9rCTTUV0lbR69k+c0le+IjCOT/4CHKT8r7Miz5x5innvtfue+Rpzdaf/oxB8nee+70kQLvo5r98revlC8cOU2OOuSTsqazN8p5nKxV5f0GKy8T+I+Zk/GrHbrGE8u9mZxkPInPBwJaCOR/89/R1SvsSi2J0EdNVYVUev8d7+rJFL4z+UBAA4H8nsz/4TC/t9SQBj2sI5C/aCF/cQjflewJTQTqaioKF9KUblU0dU8vcRMYVFsZ6RK+BF4pKw+8Aq+9o0vmvfO+7L7zpL7TT/7GZXLK8YfJ4Qd+nFtoS4Fa5BhuoY0AIiUiJ8AttJEjpWAEBLiFNgKIlIiUALfQRoqTYhER4BbaiEBSJlIC3EIbKU6KRUSAW2gjAmlpmbLeQlsK04ECr7mlTQ6dcY5ce8k3ZN+9pkj+Kr7vXHKTPPSbK2XEsAYEXilQEXgRUKJE0gQQeEkTZ71SCCDwSqHEMUkSQOAlSZu1SiWAwCuVFMclSQCBlyRt1iqVAAKvVFJuHqdW4D321EtyzsU3Sv7a0Z7ejFRVVcrE8aPl3lsulSeefVWuvvEuaVrRLPnn5Z379RNkn6k7FxLkJRbhNzJX4IVnSIXoCSDwomdKxfAEEHjhGVIhWgIIvGh5Ui0aAgi8aDhSJVoCCLxoeVItGgIIvGg42lpFrcALChyBF5Tc+vMQeOEZUiF6Agi86JlSMTwBBF54hlSIlgACL1qeVIuGAAIvGo5UiZYAAi9anlSLhgACLxqOtlZB4NmabIi5EHgh4HFqbAQQeLGhpXAIAgi8EPA4NRYCCLxYsFI0JAEEXkiAnB4LAQReLFgpGpIAAi8kQMtPR+BZHnCQ8RB4QahxTtwEEHhxE6Z+EAIIvCDUOCdOAgi8OOlSOygBBF5QcpwXJwEEXpx0qR2UAAIvKDk3zkPguZGzrykReL5wcXBCBBB4CYFmGV8EEHi+cHFwEQJduU55MfeoLM7Nl+1Se8ge6WmS8v7y80Hg+aHFsUkRQOAlRZp1/BBA4PmhxbFJEUDgJUXazHUQeGbmFmvXCLxY8VI8IAEEXkBwnBYrAQRerHidK35X5mqZn5vTN/en0p+TaenP++KAwPOFi4MTIoDASwg0y/gigMDzhYuDEyKAwEsItKHLIPAMDS7OthF4cdKldlACCLyg5DgvTgIIvDjpulW7LdciP818S3LeX+s+I1Jj5PSKK32BQOD5wsXBCRFA4CUEmmV8EUDg+cLFwQkRQOAlBNrQZRB4hgYXZ9sIvDjpUjsoAQReUHKcFycBBF6cdN2qnZWs/CTzdenKtfcNPiG1i5xYca4vEAg8X7g4OCECCLyEQLOMLwIIPF+4ODghAgi8hEAbugwCz9Dg4mwbgRcnXWoHJYDAC0qO8+IkgMCLk657td/MvSAPZW8pSLwhqREyIz1LtkiN9QUCgecLFwcnRACBlxBolvFFAIHnCxcHJ0QAgZcQaEOXQeAZGlycbSPw4qRL7aAEEHhByXFenAQQeHHSdbN2d65LVsoSGZUaL2mp8A0BgecbGSckQACBlwBklvBNAIHnGxknJEAAgZcAZIOXQOAZHF5crSPw4iJL3TAEEHhh6HFuXAQQeHGRpW4xAh1tvbJycbvUDamS4WPq+g5H4BUjx78vBwEEXjmos2YxAgi8YoT49+UggMArB3Vz1kTgmZNVYp0i8BJDzUI+CCDwfMAKcGhrc5fc/cs35e1/tsh2Ow+VY0/cXkaOWi8FApR04hQEnhMxqxuyualT5jzZJLnMhy+8GD1psOyw14jCPyPw1MVFQx4BBB7bQCMBBJ7GVOgJgcce2BwBBB77YyMCCDw2hUYCCLz4Usn05uTyrz8j78xvKSxSM6hSdtxzuJx1wV7xLWpJZQSeJUEaNsacJ5bKqg86+3W9zzHjpLquAoFnWJautIvAcyVps+ZE4JmVlyvdIvBcSTrYnAi8YNysPguBZ3W8xg6HwIsvundeXCVXnP+c9GayfYsM2aJGrrr1QKmp8f8Mrvg61VcZgacvExc68ivwups+KGCpHjXaBTzMqJAAAk9hKLQkCDw2gUYCCDyNqejpCYGnJws1nSDw1ERBIxsQQODFtx1e+MMi+d2d/5RlLR19i4ybOEQu/vn+8S1qSWUEniVBGjZGqbfQtnf0yOJrr5DVT/+1MGHD/p+Wrb55nqTSacMmpl3TCSDwTE/Qzv4ReHbmavpUCDzTE4y3fwRevHyNrI7AMzI265tG4MUX8eqmLrnvR2/IP95bJStbu2R4Q418/dKpMn6HxvgWtaQyAs+SIA0co5SXWCx9/C+yaPYl/aYbO+uH0rDvgQZOTMsmE0DgmZyevb0j8OzN1uTJEHgmpxd/7wi8+BkbtwICz7jInGgYgRdvzCsWtsu8Z1ZK/sKcHfYbKUPH1Ma7oCXVEXiWBGnRGBu+xGLBjdfJyof+0G+64Z+bIVueMtOiiRnFBAIIPBNScq9HBJ57mZswMQLPhJTK1yMCr3zs1a6MwFMbjdONIfCcjl/t8Ag8tdE429iGAq9l/gKZf+4Zkuv88IUXqZoamfjjm6Vm7Hhn+TB4eQgg8MrDnVU3TwCBxw7RSACBpzEVPT0h8PRkoaYTBJ6aKGhkAwIIPLaDRgIIPI2puN3ThgKvoysjne/M867Cu7cAZfiRn5faidu7DYjpy0IAgVcW7CxahAACjy2ikQACT2MqenpC4OnJQk0nCDw1UdAIAo89oJwAAk95QA62N1DgOYiAkRUSQOApDIWWeAste0AlAQSeyljUNIXAUxOFnkYQeHqyoJP1BLgCj92gkQACT2MqbveEwHM7f63TI/C0JuN2X1yB53b+WqdH4GlNRkdfCDwdOajqAoGnKg6aWUsAgcdW0EgAgacxFbd7QuC5nb/W6RF4WpNxuy8Entv5a50egac1GR19IfB05KCqCwSeqjhoBoHHHlBMAIGnOBxHW0PgORq88rEReMoDcrQ9BJ6jwSsfG4GnPKAyt4fAK3MAGpdH4GlMhZ64Ao89oJEAAk9jKm73hMBzO3+t0yPwtCbjdl8IPLfz1zo9Ak9rMjr6QuDpyEFVFwg8VXHQzFoCCDy2gkYCCDyNqbjdEwLP7fy1To/A05qM230h8NzOX+v0CDytyejoC4GnIwdVXSDwVMVBMwg89oBiAgg8xeE42hoCz9HglY+NwFMekKPtIfAcDV752Ag85QGVuT0EXpkD0Lg8Ak9jKvTEFXjsAY0EEHgaU3G7JwSe2/lrnR6BpzUZt/tC4Lmdv9bpEXhak9HRFwJPRw6qukDgqYqDZtYSQOCxFTQSQOBpTMXtnhB4buevdXoEntZk3O4Lged2/lqnR+BpTUZHXwg8HTmo6gKBpyoOmkHgsQcUE0DgKQ7H0dYQeI4Gr3xsBJ7ygBxtD4HnaPDKx0bgKQ+ozO0h8MocgMblEXgaU6EnrsBjD2gkgMDTmIrbPSHw3M5f6/QIPK3JuN0XAs/t/LVOj8DTmoyOvhB4OnJQ1QUCT1UcNLOWAAKPraCRAAJPYypu94TAczt/rdMj8LQm43ZfCDy389c6PQJPazI6+kLg6chBVRcIPFVx0AwCjz2gmAACT3E4jraGwHM0eOVjI/CUB+Roewg8R4NXPjYCT3lAZW4PgVfmADQuj8DTmAo9cQUee0AjAQSexlTc7gmB53b+WqdH4GlNxu2+EHhu5691egSe1mR09IXA05GDqi4QeKrioJm1BBB4bAWNBBB4GlNxuycEntv5a50egac1Gbf7QuC5nb/W6RF4WpPR0RcCT0cOqrpA4KmKg2YQeOwBxQQQeIrDcbQ1BJ6jwSsfG4GnPCBH20PgORq88rEReMoDKnN7CLwyB6Bx+fraCqlKp6WlvUdje/TkKAGuwHM0eOVjI/CUB+Rgewg8B0M3YGQEngEhOdgiAs/B0A0YGYFnQEhlbBGBV0b4WpdG4GlNxu2+EHhu5691egSe1mTc7QuB5272midH4GlOx93eEHjuZq95cgSe5nTK3xsCr/wZqOsAgacuEhryCCDw2AYaCSDwNKbidk8IPLfz1zo9Ak9rMm73hcBzO3+t0yPwtCajoy8Eno4cVHWBwFMVB82sJYDAYytoJIDA05iK2z0h8NzOX+v0CDytybjdFwLP7fy1To/A05qMjr4QeDpyUNUFAk9VHDSDwGMPKCaAwFMcjqOtIfAcDV752Ag85QE52h4Cz9HglY+NwFMeUJnbQ+CVOQCNyyPwNKZCT1yBxx7QSACBpzEVt3tC4Lmdv9bpEXhak3G7LwSe2/lrnR6BpzUZHX0h8HTkoKoLBJ6qOGhmLQEEHltBIwEEnsZU3O4Jged2/lqnR+BpTcbtvhB4buevdXoEntZkdPSFwNORg6ouEHiq4qAZBB57QDEBBJ7icBxtDYHnaPDKx0bgKQ/I0fYQeI4Gr3xsBJ7ygMrcHgKvzAFoXB6BpzEVeuIKPPaARgIIPI2puN0TAs/t/LVOj8DTmozbfSHw3M5f6/QIPK3J6OgLgacjB1VdIPBUxUEzawkg8NgKGgkg8DSm4nZPCDy389c6PQJPazJu94XAczt/rdMj8LQmo6MvBJ6OHFR1gcBTFQfNIPDYA4oJIPAUh+Noawg8R4NXPjYCT3lAjraHwHM0eOVjI/CUB1Tm9qwTeGXmyfIQgAAEIAABCEAAAhCAAAQgAAEIQAACEFBNIJXzPqo7pDkIQAACEIAABCAAAQhAAAIQgAAEIAABCDhMAIHncPiMDgEIQAACEIAABCAAAQhAAAIQgAAEIKCfAAJPf0Z0CAEIQAACEIAABCAAAQhAAAIQgAAEIOAwAQSe4eH3ZjLyH7/4ndx618Py9P0/lWGNQwoTXXPzPXLbPY9IOp3um/Dumy6QHSeNN3xi2jeBwMrmVjnvspvlg2Wr5I+3XdbX8sLFTfLDq26RuW+9J1uNHik/+NYpMnXX7U0YiR4tIvDUc3Pka+ddI5WVFX1TfedrM+Sk4w6xaEpGMYFAfi9efdNdsmx5s0yZPFEu/95MGTm80YTW6dFSAnw/WhqsoWM9+NizctHs2+TS735VDj/w431T8N1paKAWtL2pn7357rQgXENGQOAZEtSm2jzzB9fK5O22lpt+80d58t7r+gRe/j922287Xk78/KcNn5D2TSOwpr1TTvjaxTLtk3vIE39/tZ/A+9K3rpCD95sqJx93qDzz4uuezPuV/Pnu2VK1gUgxbV76NY/An/7ynPz5yRfkJxd9w7zm6dgaAq1t7XLESefKDZefVZB3N9x6nyxYuIR9aU3CZg7C96OZudnYdf5ChJdenSvLVjTLl2d8tk/g8d1pY9rmzLSpn7357jQnQ9M7ReAZnuCb3pVMeYG368Ff7ifwzrn4Rpm2z+5y9GH7Gj4h7ZtGoL2jU5avbCn8feHs2/sE3opVq+WIE8+VZx+8QSorPrzy6fiZF8i5/3aCfGLPyaaNSb8GE7jnj4/LnDffkUvO/VeDp6B10wn89/9/Xn7/0JPy8x+fUxgl/0Pp/p//pjz/0I1SXV1l+nj0bygBvh8NDc7CtvM/4+TvHPrqrB/L9GMO6hN4fHdaGLZBI23qZ2++Ow0K0fBWEXiGB7iu/YEC7/RzZ0s2m5MF738gKe+gLx59oMw86ShLpmUMEwi8POef/QTey3PmycXX3C733XppX/uzLvqZ7D11Z5nu7U8+EEiKwC/vfEgefeIF6e7ulVUtrbL/3rvJ9848SQbV1ybVAutAQG7+zQOyYlWLfP+bJ/fROMATeL++7vsyYfxoCEGgLAT4fiwLdhbdDIGvnH1VP4HHdyfbRQOBgT97892pIRU3ekDgKc+5s6tb3lqwaKMuhzYMlnFjtuj79YFfIj//zwdkyOB6+cJnD5B3Fy2VvNDL/4B66AF7KZ+Y9kwgkL+abknTio1aHes9127dcxgHCrxnXnxNrvvl7+Uu71mM6z7n/+hXssO24+TULx5uwtj0aBCBze3RF71bcl6fu0BOm36EZHM5+c4lN8qkbbbqJ1IMGpVWDSWQf35tJpOVWWdM75vgsBnnyHWXfrNwZT0fCJSDwJ+ffJHvx3KAZ81NEhgo8PjuZLNoIDDwZ2++OzWk4kYPCDzlOS/+YLnM9l5IMfDzsd127Pd8u4FfIgOP/9nt98vSZSvlonO+rHxi2jOBwFPP/a/c98jTG7Wav8Vh7z13Kvz6QIH3ymvz5IKrb+v3TLyzL7xB9t1rihx/1DQTxqZHgwiUskfXjZMXevmXqzx8x48MmpBWTSeQ/4O2Ju/lFeefdUrfKPsde6bc+bPzZeuxW5o+Hv1bQoDvR0uCNHiMgQKP706Dw7So9WI/e/PdaVHYykZB4CkLJGg7A79E8vJkyo4T+56jc6135VP++Tob/qAQdC3Og0ApBAYKvPytiodMn+W9Lfl6qautLpT4jPcA9/xbF/ecwptoS2HKMdEQeNu7qnnI4EEyauTQQsFnvReqXHH9nf3kcjQrUQUCmyaQ/9P63/zu0cIts/nPUu+t3Z89+bvy3J9u7HtOKPwgkDQBvh+TJs56xQgMFHh8dxYjxr9PgsDAn7357kyCOmvkCSDwLNkHA79EZpxxkRzgvcTijFOPlfeXNMlpZ13pXX33r96znna1ZGLG0E5goMDL95v/TdjH95hceB7jw48/V7il9uE7rpKKirT2cejPIgKzb7pH5r2zUK658OuFZ4XmrwTN37J49unrb2W0aFxGUUog/8buw044R2Zf8G+y1+47yuXX3SEdHV1yxfdnKu2YtlwgwPejCymbNeNAgcd3p1n52drtwJ+9+e60NWl9cyHw9GVSckfNLW1y4PFnFY7v6emVqqrKwj8/dvdsaVvT4b1A4DbJvymnwXsW3inHH1b4mw8E4ibw2FMvSf4tyOI9W6ynN1PYlxO9B7Lfe8ulssi7Jfz7V/xC5r69UMZvNUounHWa7LLjhLhboj4E+hFo9yTJxT+5XZ78+6tSVVkpB+27p3z3Gyf2XRkKLggkRSD/bNArf3pn4a3de0zZTi4/b6YMbRyc1PKsA4GNCPD9yKbQQuD4mRcUngPe6/1esiKdllQ6JT/6wf/z3kb7CeG7U0tKbvWxuZ+96+tq+b2lW9uhbNMi8MqGnoUhAAEIQAACEIAABCAAAQhAAAIQgAAEIFCcAAKvOCOOgAAEIAABCEAAAhCAAAQgAAEIQAACEIBA2Qgg8MqGnoUhAAEIQAACEIAABCAAAQhAAAIQgAAEIFCcAAKvOCOOgAAEIAABCEAAAhCAAAQgAAEIQAACEIBA2Qgg8MqGnoUhAAEIQAACEIAABCAAAQhAAAIQgAAEIFCcAAKvOCOOgAAEIAABCEAAAhCAAAQgAAEIQAACEIBA2Qgg8MqGnoUhAAEIQAACEIAABCAAAQhAAAIQgAAEIFCcAAKvOCOOgAAEIAABCEAAAhCAAAQgAAEIQAACEIBA2Qgg8MqGnoUhAAEIQAACEIAABCAAAQhAAAIQgAAEIFCcAAKvOCOOgAAEIAABCEAAAhCAAAQgAAEIQAACEIBA2Qgg8MqGnoUhAAEIQAACEIAABCAAAQhAAAIQgAAEIFCcAAKvOCOOgAAEIAABCEAAAhCAAAQgAAEIQAACEIBA2Qgg8MqGnoUhAAEIQAACEIAABCAAAQhAAAIQgAAEIFCcAAKvOCOOgAAEIAABCEAAAhCAAAQgAAEIQAACEIBA2Qj8H3w/BR73hbd7AAAAAElFTkSuQmCC", "text/html": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# We humans find it easier to visalize things in 2D!\n", - "# Reduce the dimensionality of the vectors to 2D using t-SNE\n", - "# (t-distributed stochastic neighbor embedding)\n", - "\n", - "tsne = TSNE(n_components=2, random_state=42)\n", - "reduced_vectors = tsne.fit_transform(vectors)\n", - "\n", - "# Create the 2D scatter plot\n", - "fig = go.Figure(data=[go.Scatter(\n", - " x=reduced_vectors[:, 0],\n", - " y=reduced_vectors[:, 1],\n", - " mode='markers',\n", - " marker=dict(size=5, color=colors, opacity=0.8),\n", - " text=[f\"Video: {t}
Text: {d[:100]}...\" for t, d in zip(video_numbers , documents)],\n", - " hoverinfo='text'\n", - ")])\n", - "\n", - "fig.update_layout(\n", - " title='2D Chroma Vector Store Visualization',\n", - " scene=dict(xaxis_title='x',yaxis_title='y'),\n", - " width=800,\n", - " height=600,\n", - " margin=dict(r=20, b=10, l=10, t=40)\n", - ")\n", - "\n", - "fig.show()" - ] - }, - { - "cell_type": "code", - "execution_count": 78, - "id": "50207703-afdc-4251-96c3-5e3d6f14d9b7", - "metadata": {}, - "outputs": [ - { - "data": { - "application/vnd.plotly.v1+json": { - "config": { - "plotlyServerURL": "https://plot.ly" - }, - "data": [ - { - "hoverinfo": "text", - "marker": { - "color": [ - "#d01f72", - "#75195e", - "#3678a7", - "#5b3f83", - "#74a788", - "#571122", - "#4099c1", - "#659222", - "#188ca3", - "#6d4052", - "#35303c", - "#a9e927", - "#29fa15", - "#71c500", - "#9b9d6e", - "#cf7e83", - "#badd6d", - "#85fa26", - "#22463b", - "#ce865d", - "#f59c06", - "#011995", - "#793548", - "#ad8b14", - "#d937bd", - "#2b9f18", - "#046e5c", - "#75b5e3", - "#c959de", - "#72e048", - "#8e8cab", - "#20f2c3", - "#64f999", - "#e69670", - "#6a0fce", - "#d65c3a", - "#7bee34", - "#4f86b8", - "#b43417", - "#4dfb77", - "#2ae342", - "#c3e1f2", - "#12897b", - "#2b3af3", - "#7ea8e9", - "#6ad041", - "#0bdacc", - "#99fe53", - "#4aaf9f", - "#d156c8", - "#505bd9", - "#dc152c", - "#b52bf6", - "#9baca0", - "#a03134", - "#d43c00", - "#5af098", - "#2c168d", - "#c6016b", - "#f090af", - "#482281", - "#39821f", - "#e0a8df", - "#480c89", - "#08808d", - "#ac5faf", - "#0faf59", - "#79c82a", - "#e6e164", - "#0d2037", - "#8afd40", - "#2e1afc", - "#3ec815", - "#fbfef2", - "#a63fa4", - "#b27d2e", - "#ca3592", - "#b9fd23", - "#ac9648", - "#804ce2", - "#9b5e28", - "#a64739", - "#c457d7", - "#de30e4", - "#1f6ab0", - "#6ff3c5", - "#6df6ca", - "#ed694d", - "#2fef1a", - "#335dcf", - "#845aa9", - "#574e28", - "#dc95ec", - "#b2140a", - "#15ae86", - "#70d1d9", - "#6f745a", - "#b3dba5", - "#108c41", - "#268bba", - "#913568", - "#1a6fdf", - "#422abb", - "#cb725f", - "#fe62a5", - "#dfc6c7", - "#b25d7b", - "#bd53b1", - "#796278", - "#048452", - "#c6eff5", - "#d24e5d", - "#fe8e92", - "#22398f", - "#3e5237", - "#8069bc", - "#7740be", - "#cc8ec0", - "#b280bb", - "#91f4db", - "#ac55ba", - "#c97596", - "#116019", - "#43c2e8", - "#2a2d25", - "#fc2b74", - "#ae7afe", - "#92b4fa", - "#dd8cd7", - "#4862ce", - "#af0f59", - "#ad6bd0", - "#3f0a72", - "#e01073", - "#144ada", - "#5cb9ca", - "#51d0da", - "#d6d07a", - "#b61e76", - "#474ff9", - "#68bece", - "#d01b19", - "#ee26df", - "#2ebca4", - "#539908", - "#ec0a37", - "#1a5613", - "#da28db", - "#246fa5", - "#bbfe83", - "#d54222", - "#580c96", - "#02cada", - "#996ff1", - "#e2a239", - "#ae5204", - "#4ce72d", - "#2cde7f", - "#b64eac", - "#591ab9", - "#a958c9", - "#696eaa", - "#4c4355", - "#6a6c06", - "#df5d2e", - "#9780cf", - "#682d42", - "#efed10", - "#1b312a", - "#dbde1c", - "#e1b5db", - "#a95826", - "#4e797a", - "#10384a", - "#9a5ba2", - "#d34482", - "#8a29da", - "#fb9dce", - "#ff2d6a", - "#50f10d", - "#f8d349", - "#7b4427", - "#11a70e", - "#987252", - "#c932c1", - "#2d7f7d", - "#c1e3c5", - "#0c777d", - "#0f8781", - "#dd889c", - "#799a24", - "#4212f1", - "#e6f378", - "#805527", - "#091a90", - "#a9541c", - "#fcdcad", - "#01f59b", - "#94a85d", - "#426575", - "#7f03bd", - "#2dcfac", - "#52b6df", - "#73e76a", - "#d70d97", - "#601568", - "#d4b1ce", - "#7341ee", - "#bb0ee6", - "#f645e0", - "#1c2c7e", - "#7dd58b", - "#4b9a93", - "#9df332", - "#612b32", - "#b1c27d", - "#3626a5" - ], - "opacity": 0.8, - "size": 5 - }, - "mode": "markers", - "text": [ - "Video: 59506507
Text: Well, I realized that was a whole lot of theory, but I hope it gave you a good intuition that will\nb...", - "Video: 59671315
Text: Okay, so here we are, back in Jupyter Lab, ready for the next use of a frontier model, and see this\n...", - "Video: 60616895
Text: It feels like 100 videos ago that I told you that we were going to have instant gratification with o...", - "Video: 60619275
Text: And we will conclude our expedition into the world of frontier models through their chat interface b...", - "Video: 59472693
Text: Friends.\nI am absolutely exhausted.\nI am exhausted and a little tiny bit traumatized.\nAnd you are so...", - "Video: 59670121
Text: So it's business time right now.\nWe are going to build a Rag pipeline to estimate the price of produ...", - "Video: 59295619
Text: Welcome back to the the moment when we bring it all together into a beautiful user interface.\nBut fi...", - "Video: 60617163
Text: And already that wraps up day two.\nNow that you have built that solution.\nAnd congratulations on tha...", - "Video: 60616423
Text: So I hope you've just enjoyed yourself experimenting with different LMS locally on your box using th...", - "Video: 59170227
Text: Welcome back to Google Colab.\nHere we are ready to explore the wonderful world of Tokenizers.\nSo, uh...", - "Video: 59169985
Text: So I hope you enjoyed that whirlwind tour of Google Colab.\nHere's just a little screenshot example o...", - "Video: 60616927
Text: It's time for our first LM experiment at this point.\nSo some of this you may know well, you may know...", - "Video: 59673721
Text: And here we are in JupyterLab for the last time, and we are looking here at day five, the last day\no...", - "Video: 59508055
Text: I'm so very happy that you've reached this epic moment in the course and that you're hanging in ther...", - "Video: 59670259
Text: It's remarkable.\nBut you are now at the 95% point.\nThere's 5% remaining of this course.\nUh, maybe it...", - "Video: 60616623
Text: So we're now going to start week one of the course when we are going to be looking at exploring fron...", - "Video: 59472383
Text: And welcome back to the week six folder.\nWe're now at day two, which is the second and final stage o...", - "Video: 59670171
Text: So as the very final step on this part four of day two of week eight, we are now going to build an\ne...", - "Video: 59297721
Text: And so now the time has come to talk about the most crucial aspect of Rag, which is the idea of vect...", - "Video: 59297599
Text: Well, that was a sneaky detour I took you on in the last one.\nI hope you enjoyed it though, and I ho...", - "Video: 59507635
Text: Look, I hope you're excited.\nYou really should be.\nYou've been through 80% of the course and it's al...", - "Video: 59669375
Text: Here we are for the day.\n2.1 notebook.\nAnd don't let it be said that I don't ever do anything for yo...", - "Video: 59297733
Text: Welcome back to JupyterLab and welcome to your first experiment with the world of Long Chain.\nLet me...", - "Video: 59670369
Text: It is terrific that you're hanging on in there and making such great progress with this course.\nAs w...", - "Video: 59166281
Text: And with that, amazingly, you completed day one of week two already and that gets you to the 15% poi...", - "Video: 59671567
Text: Well, the first thing you're going to notice is that I don't have a notebook open for you.\nAnd that'...", - "Video: 59297593
Text: And welcome to continuing our journey with Hrag.\nAnd today it's time to unveil Liang Chen.\nSo first,...", - "Video: 59166461
Text: And welcome back to the lab.\nHere we are in Jupyter Lab and we are going to go into week two.\nAnd we...", - "Video: 59167007
Text: Well, how fabulous is that?\nI hope that you are as wowed as I am by our new airline, I assistant and...", - "Video: 59508121
Text: The moment has arrived.\nHere we go.\nWe're in fine tuning.\nWe do fine tuning.\nTrain.\nThere is also a ...", - "Video: 59295579
Text: All right.\nAre you excited to see how this goes?\nLet's give it a try.\nSo in this next section, I cre...", - "Video: 60620375
Text: And with that, we've reached an important milestone.\nThe first week of our eight week journey is com...", - "Video: 59472491
Text: Welcome back.\nIf you are following along with me in JupyterLab, as I hope you are, then you will nee...", - "Video: 59472425
Text: Welcome to week six, day three.\nToday is going to be a day that you will either love or you will hat...", - "Video: 59508057
Text: Actually slight change in plan.\nI'm going to wrap up the day.\nDay three at this point, and say that ...", - "Video: 60619577
Text: And for the final piece of background information, I wanted to take another moment to talk about API...", - "Video: 59170291
Text: Welcome back to Colab and welcome back to our business project.\nSo again our assignment, we are due ...", - "Video: 60619651
Text: I mentioned before an AI company called vellum.\nWhen we were talking about the different questions, ...", - "Video: 59473191
Text: And you thought we'd never get here.\nHere we are in Jupyter Lab, running our fine tuning for a front...", - "Video: 59170297
Text: And here we are in Google Colab, ready for fun with models.\nSo first we do the usual Pip installs an...", - "Video: 59167015
Text: Welcome back to Jupyter Lab and welcome to Day Five's Lab.\nAnd this is going to be lots of creativit...", - "Video: 59170043
Text: Let me enthusiastically welcome you all back to week three of our LLM engineering journey.\nIf you en...", - "Video: 59473147
Text: Well, I'm very relieved.\nI've got that behind me.\nNo more human testing for me.\nWe'll have one final...", - "Video: 59166453
Text: Welcome back and welcome to our continuing JupyterLab experience.\nUh, I'm hopefully going to keep yo...", - "Video: 59166915
Text: Welcome back to the wonderful world of JupyterLab.\nAnd here we are in week two.\nDay three.\nUh, bring...", - "Video: 59667365
Text: Here we are back in Colab, looking at the week seven, day five of the Colab notebooks and I'm on a\nT...", - "Video: 60616845
Text: We're on the home stretch.\nThis is the final step in the environment setup, and it's an easy one.\nIt...", - "Video: 59295459
Text: And welcome back to More Leaderboard Fest as we go through some more leaderboards.\nBut this time we'...", - "Video: 59471979
Text: So we now turn to the parts of the problem, which is perhaps, let's say, not as glamorous as some\nof...", - "Video: 59503705
Text: And so now we talk about quantization the q and q Laura.\nQ stands for quantized quantized.\nLaura.\nAn...", - "Video: 59472505
Text: So the good news is that this is the very final video about data set curation.\nYou were probably fed...", - "Video: 59669217
Text: And welcome to the next part of visualizing the data.\nAnd just very quickly to show it to you in 3D....", - "Video: 59671221
Text: I gotta tell you, I don't like to toot my horn a whole lot, but I do think that I've done a great\njo...", - "Video: 59503703
Text: Well.\nHello there everybody.\nI am so grateful that you've made it through to the start of week seven...", - "Video: 59473201
Text: Well, before we do a postmortem on what happened, let's just quickly look at the standing the rankin...", - "Video: 60622463
Text: In this video, we're going to set up a full data science environment for Mac users.\nIn the next vide...", - "Video: 60619299
Text: Well, I hope you found that both educational and enjoyable.\nAs we went through and learned so much a...", - "Video: 59295607
Text: So to revisit then the solution that we built in the previous day and talk about the metrics.\nAs I s...", - "Video: 59297575
Text: Well, welcome to the final part on rag.\nAnd this is the session where you go from being a rag expert...", - "Video: 59507687
Text: It's time for action, everybody.\nWe've set up our colab.\nHere we are, week seven, day three.\nWe've g...", - "Video: 59671441
Text: And welcome once more to our favorite place to be Jupyter Lab, the Paradise for a data scientist exp...", - "Video: 59673431
Text: And here we have it.\nThe user interface is completed.\nThe extra notification came through on my phon...", - "Video: 59473137
Text: Let's get straight to it.\nSo the place where you can see everything that's going on and get knee dee...", - "Video: 59166421
Text: Welcome back to the radio day in the lab.\nMore to do.\nLet's keep going.\nWhere we left off is we had ...", - "Video: 59295599
Text: Welcome to the Jupyter Lab for day four.\nIt's going to look very familiar because it's actually I've...", - "Video: 59669631
Text: Here we are in our favorite place to be in JupyterLab, ready for some coding and a lot of coding tha...", - "Video: 59673663
Text: But wait, there's more.\nWe need to add some more to the user interface just to make it look more coo...", - "Video: 59506929
Text: And we return to the hugging face open LLM leaderboard.\nThe first place you go when selecting your b...", - "Video: 59504785
Text: So at this point we're going to talk about hyperparameters.\nAnd we're going to introduce three of th...", - "Video: 59505337
Text: So we're now going to look at four bit quantization, the rather remarkable effect of reducing the pr...", - "Video: 59271655
Text: So here we are on Hugging Face's main landing page at Hugging Face Core.\nA URL you know.\nWell, since...", - "Video: 59472883
Text: Okay, time to reveal the results.\nIt has run to completion.\nAnd here it is.\nSo a moment to pause.\nIt...", - "Video: 59673639
Text: And welcome now to the code for our user interface, which we will find in this Python module.\nPrice ...", - "Video: 59472463
Text: So last time we looked at a humble linear regression model with feature engineering, and now we say\n...", - "Video: 59297595
Text: So by the time you're watching this, hopefully you have played yourself with vectors.\nYou've created...", - "Video: 60619149
Text: So we're going to start our exploration into the world of frontier models by playing with the famous...", - "Video: 59297735
Text: And at last the time has come to see rag in action.\nAfter all of this talk, and here we are.\nWe're i...", - "Video: 60616407
Text: And now over to my Mac people.\nAnd I have news for you.\nIt's exactly the same thing.\nYou go to a fav...", - "Video: 59170235
Text: So here we are in Google Colab for our first collaborative session on the cloud using a GPU box.\nOn ...", - "Video: 59472067
Text: So we've covered steps 1 to 4 of the five step strategy.\nAnd that brings us to step five, which is p...", - "Video: 59472011
Text: Welcome everybody.\nSo in the past I've said quite a few times, I am excited to start this this week ...", - "Video: 59295553
Text: Welcome back.\nIn the last part, we gave our GPT four and clawed the challenge of converting a simple...", - "Video: 59297773
Text: Well, I hope you're eager with anticipation for this session in JupyterLab as we finally get to see\n...", - "Video: 59295583
Text: And here we are back in JupyterLab.\nIt's been a minute.\nWe've been working in Colab for last week, a...", - "Video: 59507329
Text: Okay.\nIt's moment of truth time.\nI have just taken our class tester.\nYou remember this class?\nUh, it...", - "Video: 59295429
Text: Continuing our investigation of benchmarks, and this will become more real when we actually see some...", - "Video: 60595637
Text: Here we are back in the Colab, which has been running overnight for me and probably for you too, I\nh...", - "Video: 59668027
Text: And so here we are at the home page for modal.\nAt modal.com spelt model not not model which is confu...", - "Video: 59295527
Text: I'm so happy to welcome you to week four, day four on our journey to LLM Engineering and Mastery.\nHe...", - "Video: 59295377
Text: Just before we go on to some of the more advanced metrics, I want to mention for a second something\n...", - "Video: 59666211
Text: So before we try our new model and one more recap on the models so far and keep notes of this so we\n...", - "Video: 59170107
Text: And once again, it's that moment when you take a pause and congratulate yourself on another day of\ns...", - "Video: 60616833
Text: So I realized that day one of week one has been a pretty long day, and I assure you that the other,\n...", - "Video: 59472413
Text: Wonderful.\nWhere we left off is we had just created the Get Features function, which builds our feat...", - "Video: 59297561
Text: And would you believe at this point you're 55% of the way along the journey?\nUh, it's been a while s...", - "Video: 59669211
Text: Well, we took on a lot today and we seem to have been successful.\nThese red icons that you see on th...", - "Video: 59166981
Text: Welcome to week two, day five.\nThe last day of week two where a lot is coming together.\nI am so grat...", - "Video: 60619227
Text: And now let's move to Claude from anthropic, my favorite model and typically the favorite model of\nm...", - "Video: 60620395
Text: Welcome back to Jupyter Lab, where I want to show you the assignment, the homework exercise for you\n...", - "Video: 59665127
Text: Well hi there everybody.\nI'm not going to give you my usual song and dance about how excited you are...", - "Video: 59668923
Text: Well, welcome back to Jupyter Lab for what will be an epic finale to our time in this platform.\nAnd ...", - "Video: 59504887
Text: Well, here we are again in Google Colab.\nIt's been a minute since we were here, and welcome back to ...", - "Video: 59170165
Text: Welcome, everybody to the last day of week three.\nWeek three.\nDay five.\nWe're here already wrapping ...", - "Video: 60617251
Text: Congratulations are definitely in order.\nYesterday was a mammoth first day on this course and you go...", - "Video: 59166951
Text: All right, back to the lab.\nBack to our project.\nTime to work with tools.\nI am in the week two folde...", - "Video: 60619619
Text: Well, day four was an information dense day.\nI do hope that you learned some something useful here, ...", - "Video: 60616663
Text: Well.\nHi there, this is time for PC people to get set up.\nSo all you Mac people out there, you don't...", - "Video: 59508175
Text: So I'm taking a moment now to explain that the training costs of optimizing a model for this course\n...", - "Video: 59670087
Text: And welcome to part four of day two of week eight.\nUh, there's a lot happening this week, and I have...", - "Video: 59506713
Text: Hi everyone.\nSo the reason I'm so fired up about week seven is that this is the time when we actuall...", - "Video: 60620169
Text: Hopefully you found this super satisfying to be able to have this nice business result and have it c...", - "Video: 59295435
Text: Well, just before we wrap up, let me introduce this week's challenge and talk about what we're going...", - "Video: 59297609
Text: Last week, we worked with models that were able to speed up code by a factor of 60,000 times, which\n...", - "Video: 59507489
Text: Continuing our adventure through hyperparameters for training.\nThe next one is pretty crucial and it...", - "Video: 59295549
Text: And welcome back to our challenge again.\nAnd this time we are working with our beautiful prototype.\n...", - "Video: 59665129
Text: And now let me make this real for you by showing you some, some diagrams, particularly now looking\na...", - "Video: 59169991
Text: Okay, so that was your introduction to Hugging Face.\nAnd now I'm going to turn to a different resour...", - "Video: 59472027
Text: And now the time has come to curate our data set.\nAnd the way we're going to do this is we're going ...", - "Video: 59472307
Text: Welcome to week six.\nDay two a day.\nWhen we get back into the data, we look back in anger at our dat...", - "Video: 59508289
Text: So here we are now, back in the Colab, in the same one that we kicked off in the previous day.\nIt's ...", - "Video: 59472333
Text: Thank you for putting up with me during my foray into traditional machine learning.\nI think it was u...", - "Video: 59295431
Text: Now I want to take a quick moment to give you a flyby of five different ways that llms are used comm...", - "Video: 59673449
Text: Well, I have to tell you that I'm a little bit sad.\nThis is the beginning of the beginning of the en...", - "Video: 59669389
Text: Well.\nHi there.\nSo you've made it to day two of week eight, and I am super grateful that you've been...", - "Video: 59170057
Text: And so at the beginning of this week, we started by talking about hugging face pipelines.\nAnd you us...", - "Video: 59166949
Text: Welcome back to making chatbots.\nLet's keep going.\nSo for the next part we're going to beef up the s...", - "Video: 59473019
Text: Welcome back to an action packed time of of training.\nSo now, after waiting about five minutes when ...", - "Video: 59297585
Text: Before we move on, let me show you one more time this fabulous slide that describes the simple three...", - "Video: 59170255
Text: And welcome back to us continuing our journey through the model class in Hugging Face Transformers l...", - "Video: 60614589
Text: So we're now going to run a large language model directly on your box using a platform called llama,...", - "Video: 59297601
Text: I'm not going to lie, at this point you have every reason to be impatient with me.\nWe've been yammer...", - "Video: 60616629
Text: And welcome back to team PC and Team Mac as we come back together again for a quick video.\nIn this o...", - "Video: 59297749
Text: It's always welcome back to JupyterLab, my favorite place to be.\nAnd now we are, of course in the we...", - "Video: 59170135
Text: Welcome.\nIt's week three.\nIt's day four.\nWe are back on the adventure in open source land, back inve...", - "Video: 59472017
Text: And this is the first time that we'll be coding against our big project of the course.\nWelcome to Ju...", - "Video: 59507017
Text: Welcome to Colab.\nWelcome to the week seven day two Colab.\nAnd just before we try our base model, we...", - "Video: 60619883
Text: And now we've arrived at an exciting moment in our first week.\nThe conclusion of the first week is w...", - "Video: 59508297
Text: What more is there to say, really?\nTomorrow is the day for results.\nA day that very excited indeed a...", - "Video: 60619247
Text: We're going to spend a little bit more time with GPT just to try out a few more interesting things.\n...", - "Video: 59504769
Text: Without further ado, we're going to get stuck into it.\nTalking about Laura.\nLow rank adaptation.\nAnd...", - "Video: 59170233
Text: Welcome back to our continued exploits with Tokenizers.\nWhat we're now going to look at is what's ca...", - "Video: 59671231
Text: And here we are back in the Jupyter Lab for the fast approaching conclusion with a really great proj...", - "Video: 60620397
Text: Well, that's a fantastic result to have now arrived towards the end of week one and having completed...", - "Video: 59170093
Text: I'm delighted to see you again.\nAs we get started with day three of week three of our adventure and ...", - "Video: 59473089
Text: Welcome back.\nSo hopefully you are still impressed by the GPT four mini results.\nThe frontier model ...", - "Video: 60395261
Text: Let's keep going with our project to equip our LM with a tool.\nWe just created this piece of code to...", - "Video: 60617259
Text: I'm excited to introduce you to your first exercise, and I'm looking forward to seeing what you make...", - "Video: 59507313
Text: And it's this time again, when we look at the podium of how our models are performing across the boa...", - "Video: 60619721
Text: Now it's time to talk for a minute about tokens.\nTokens are the individual units which get passed in...", - "Video: 59295451
Text: I know that everybody.\nIt seems like just the other day that we were embarking on our quest together...", - "Video: 59166919
Text: And with that, it concludes our session on tools.\nAnd at this point, you are probably an expert on t...", - "Video: 59295441
Text: Okay, so welcome to our leaderboard fast as we go through a ton of essential leaderboards for your\nc...", - "Video: 59295541
Text: And welcome back.\nYou've just seen GPT four zero spectacularly failed to work on our hard Python con...", - "Video: 59473101
Text: Welcome back.\nSo about ten minutes later, maybe 15 minutes later, the run has completed.\nAnd how do ...", - "Video: 59507423
Text: So you may remember eons ago when we were building our data set.\nAt the end of that, we uploaded our...", - "Video: 59295545
Text: I really hope you've enjoyed this week.\nWe've got tons done.\nWe've experimented with all sorts of ne...", - "Video: 59472503
Text: Welcome back to Jupyter Lab.\nLast time, we looked at some silly models for predicting the price of p...", - "Video: 60614591
Text: The mantra of this course is that the best way to learn is by doing, and we will be doing stuff toge...", - "Video: 59473021
Text: Welcome to our favorite place to be to JupyterLab.\nHere we are again now in day three.\nIn week six.\n...", - "Video: 60617255
Text: I'm now going to talk for a bit about models.\nA term you often hear is the term frontier models, whi...", - "Video: 59667829
Text: Well.\nHello there.\nLook, I know what you're thinking.\nYou're thinking I peaked too early.\nLast week ...", - "Video: 59505329
Text: Welcome back.\nYou may, like me, have just gone off and got a coffee while things loaded back up agai...", - "Video: 59669049
Text: So what you just saw was an ephemeral app, as it's called, which means just a temporary app that you...", - "Video: 60619439
Text: This now brings us to an extremely important property of LMS called the context window that I want t...", - "Video: 59668181
Text: And so it gives me great pleasure to introduce to you the project that I've lined up for you this we...", - "Video: 59472441
Text: Welcome back.\nSo we've been doing the thoroughly distasteful, unsavory work of feature engineering.\n...", - "Video: 59507785
Text: Well, I'm sure you're fed up of me saying that the moment of truth has arrived, but it really has.\nT...", - "Video: 59295587
Text: When I left you, we had just created this simple user interface for converting from Python to C plus...", - "Video: 59166465
Text: Welcome back to the JupyterLab on Gradio day, so you'll remember where we left off.\nWe'd written two...", - "Video: 59473071
Text: Hey, gang.\nLook, I know what you're thinking.\nThis week was supposed to be training week.\nI set it a...", - "Video: 59295423
Text: Welcome to day two of week four, when we get more into leaderboards so that by the end of this, you'...", - "Video: 59297723
Text: So I know what you're thinking.\nYou're thinking, what's going on here?\nWe're on day five.\nWe're on d...", - "Video: 59166947
Text: Well, thank you for coming along for week two, day four.\nWe have lots of good stuff in store today.\n...", - "Video: 59666831
Text: Take one more moment to look at this very nice diagram that lays it all out, and we will move on.\nNo...", - "Video: 59295493
Text: And welcome to week four, day three.\nAs we are about to embark upon another business project which w...", - "Video: 60616855
Text: Now I know what you're thinking.\nWe've been building environments for so long.\nAre we not done yet?\n...", - "Video: 59506611
Text: So in a future day, I'm going to be training, fine tuning a model and creating a fine tuned model.\nA...", - "Video: 60616493
Text: I'll just take a quick moment to introduce myself to convince you that I am actually qualified to be...", - "Video: 59166317
Text: And welcome to week two, day two, as we continue our adventure into the realm of LMS.\nUh, so today, ...", - "Video: 59295439
Text: So I'm aware that there's a big risk that you are getting fed up of leaderboards, because we've done...", - "Video: 59472421
Text: And welcome back to our final time in Jupyter Lab with traditional machine learning.\nIt's almost ove...", - "Video: 59472137
Text: Well, well, well, it's been a long day, but congratulations, you've made it.\nWe've gone through and ...", - "Video: 59297693
Text: So at the end of each week, it's customary for me to give you a challenge, an assignment to do on\nyo...", - "Video: 60620143
Text: So we're going to make a call to GPT four.\nOh, that's going to ask it to look through a set of links...", - "Video: 60619501
Text: I welcome to day four of our time together.\nThis is a very important day.\nToday we're going to be lo...", - "Video: 59297743
Text: And welcome to day five.\nFor reals.\nWe're actually in the proper Jupyter notebook.\nThis time we're i...", - "Video: 59166847
Text: Well, they say that time flies when you're having fun, and it certainly feels like time is flying.\nU...", - "Video: 59170223
Text: Well.\nFantastic.\nIt's coming up to the end of the week, and that means it's coming up to a challenge...", - "Video: 59170037
Text: So how does it feel to be 30% of the way down the journey to being a proficient LLM engineer?\nTake a...", - "Video: 59295609
Text: You must be feeling absolutely exhausted at this point.\nAnd if you are, that is okay.\nYou have done ...", - "Video: 60619281
Text: Well, I'm delighted to welcome you to day three of our eight week journey together.\nAnd today we're ...", - "Video: 59472429
Text: And continuing on our strategy to solve commercial problems with LMS, we get to step four, which is\n...", - "Video: 59167009
Text: Welcome back.\nIt's time to make our full agent framework.\nI'm super excited about this.\nIt's pulling...", - "Video: 59166481
Text: And here, once more we find ourselves in our favorite place, the Jupyter Lab.\nReady to go with weeks...", - "Video: 59670933
Text: I realized my enthusiasm for this project is a bit insane, but I have to tell you that I am very sat...", - "Video: 59670073
Text: Okay, it's time to complete the Rag workflow in our Jupyter Lab on day 2.3.\nWe've got this function ...", - "Video: 59673595
Text: That concludes a mammoth project.\nThree weeks in the making.\nIn the course of those three weeks, sta...", - "Video: 59297603
Text: And I'm delighted to welcome you back to LM engineering on the day that we turn to vectors.\nFinally,...", - "Video: 60614541
Text: I am delighted to welcome you to the first day of our eight weeks together as you join me on this ad...", - "Video: 59667357
Text: Let's now see our results side by side.\nWe started our journey with a constant model that was at $1....", - "Video: 59667841
Text: Now, look, I know that I went through that very fast, but maybe, uh, you're still, uh, blinking\nat t...", - "Video: 59472007
Text: So I hope you enjoyed our first bash at Scrubbing Data, and that you are now looking through the cod...", - "Video: 59507435
Text: So I'm now going to talk about five important hyperparameters for the training process.\nAnd some of ...", - "Video: 59509185
Text: So this is where I left you looking at this satisfying chart on training loss and seeing the trainin...", - "Video: 59473159
Text: Welcome to Jupyter Lab and welcome to our experiments at the frontier.\nSo we are going to put our fr...", - "Video: 60619447
Text: I want to take a moment to talk about something that's very fundamental to an LLM, which is the numb...", - "Video: 59166353
Text: Well, congratulations on leveling up yet again.\nYou've got some real hard skills that you've added t...", - "Video: 60619123
Text: So what we're now going to do is we're going to look at some models in practice and start to compare...", - "Video: 59295363
Text: Well, another congratulations moment.\nYou have 40% on the way to being an LM engineer at a high leve...", - "Video: 60619289
Text: And now we'll go a bit faster through the other models.\nWe'll start with Google's Gemini.\nI have the...", - "Video: 59472873
Text: So it's quite an adventure that we had at the frontier of what's capable with Llms today, solving a\n...", - "Video: 60619429
Text: Let me talk about some other phenomena that have happened over the last few years.\nOne of them has b...", - "Video: 59295601
Text: So it's time to continue our journey into the world of open source and understand which models we sh...", - "Video: 59170025
Text: And a massive welcome back one more time to LM engineering.\nWe are in week three, day two and we are...", - "Video: 59166443
Text: And welcome back everybody.\nWelcome to week two day three.\nIt's a continuation of our enjoyment of r...", - "Video: 60620025
Text: And welcome back to Jupyter Lab, one of my very favorite places to be.\nWhen Jupyter Lab sprung up on...", - "Video: 59170055
Text: Welcome to the world of Google Colab.\nYou may already be very familiar with Google Colab, even if so..." - ], - "type": "scatter3d", - "x": [ - 1.7087736, - -23.05743, - -28.06106, - 53.49951, - -25.6022, - -38.486794, - 19.744888, - 40.88814, - 16.016825, - -7.811325, - -42.27272, - 12.64262, - -0.9483807, - 6.6331143, - 30.734392, - -11.433903, - 4.802981, - -38.28026, - -65.32751, - -70.74078, - 17.332169, - -52.152122, - -59.27239, - 33.891144, - 31.012686, - -3.4545732, - -64.21017, - 7.6461124, - 20.45596, - 17.49392, - 58.194645, - 23.335354, - -1.4417802, - -31.790943, - 16.829693, - 9.377639, - -35.48943, - 21.703556, - -32.651184, - 34.197903, - 8.165246, - -16.031826, - -15.671898, - 24.559917, - 12.978084, - -2.6771584, - -40.945656, - 36.925316, - 25.997892, - -6.9610124, - -10.469476, - -64.6909, - 1.2878436, - 11.471338, - -9.606986, - -36.224865, - 52.36488, - 18.624384, - -75.35086, - 7.6970425, - -28.021814, - 4.318845, - 21.450777, - 6.252253, - 8.603412, - -42.26961, - -4.845308, - 18.71574, - 26.781273, - -0.79253143, - -14.978188, - 18.245869, - -12.82021, - -46.9319, - -74.53214, - 65.01824, - -55.562347, - -16.433084, - -34.34535, - 37.855953, - 23.34828, - 58.13494, - -62.33339, - 23.672892, - 3.081372, - 54.94866, - 26.811195, - -17.7189, - 8.902483, - 43.160995, - -9.250307, - -15.630141, - 17.22394, - -55.25729, - -83.02552, - 30.956335, - 9.167712, - 44.13426, - -11.525453, - 17.694338, - -5.0039816, - 9.241007, - -22.265665, - 1.0213552, - -1.9952722, - 31.26171, - -44.436382, - 8.186024, - -38.107677, - 20.091564, - -47.018497, - 3.9196463, - -46.056137, - 29.834492, - 51.38648, - 9.7722, - -39.721962, - -11.258467, - 38.1706, - 25.899416, - -22.391533, - 64.70646, - 8.984558, - -8.005773, - -22.550919, - 29.339518, - 38.68547, - -58.67559, - -38.17486, - -21.293037, - -59.715446, - -32.520184, - -55.803455, - -9.595691, - 4.706987, - 1.6881931, - -37.37762, - 26.374004, - 76.44756, - 1.4116235, - -8.510549, - -13.362774, - -50.184566, - -10.902527, - -4.4753523, - -6.0763307, - -7.691084, - 0.769521, - 23.278786, - 37.985294, - 11.939553, - 39.230835, - 59.653934, - 43.122715, - -8.87973, - 4.4371753, - -48.39784, - -10.907453, - -26.853792, - 35.47057, - 6.833131, - 59.59064, - 19.819576, - 38.58615, - -16.264578, - -53.5018, - 10.727256, - 42.230526, - 17.628677, - -24.103918, - 52.2294, - -77.18268, - 3.6058846, - 19.204115, - -4.63787, - -4.450328, - -2.8182654, - -46.7583, - 17.780125, - 57.05202, - -20.470179, - -23.03723, - -15.013553, - -61.297047, - 53.65074, - -63.843815, - 36.721672, - 1.2968292, - -1.146437, - 30.53313, - 47.650024, - -35.42971, - 13.790592, - -29.44714, - -7.0857954, - -31.83992, - 4.395385, - -71.52093, - -38.636032, - -10.17397, - 13.551749, - 26.199244, - 32.304344, - 37.940987, - -19.058989, - 35.280716, - 28.176294, - 63.618996, - 50.98304, - 70.33112, - -23.338556, - 55.944035, - 21.928713, - -24.126383, - 20.637466, - -27.234331, - -34.206 - ], - "y": [ - -4.252548, - -43.394333, - -15.430764, - 3.913298, - 4.4845552, - -33.13936, - -22.152138, - 26.412823, - 30.148039, - 59.570904, - 48.391426, - -12.528983, - -30.37183, - 60.731644, - 27.484875, - 14.465288, - -69.40243, - 0.97867054, - -5.723522, - 2.7253983, - 7.151598, - -40.23502, - -24.437897, - 44.777378, - 2.2172062, - -15.407989, - -9.227094, - -35.85389, - 19.15898, - 37.333565, - -32.59874, - 12.68407, - -73.58059, - -6.1550703, - 54.37412, - -45.11716, - 24.515192, - -40.404133, - 7.894027, - 40.451534, - -45.810196, - 32.84157, - -29.400854, - -36.06799, - -26.307, - -34.34112, - 16.958231, - -4.426608, - -51.174706, - -10.152972, - -61.325924, - -40.93275, - 1.7414787, - 25.57757, - -6.57861, - 23.91178, - -12.384486, - -16.934166, - -12.787716, - 46.80957, - -40.928993, - -23.344496, - 57.658195, - -52.681698, - -29.147705, - -26.069113, - -37.631737, - 6.913289, - 1.0152185, - -22.000841, - 40.077843, - 68.951485, - -34.20306, - -10.534028, - -17.659843, - -15.2762165, - -31.812723, - 41.836502, - 44.55901, - 42.523884, - 21.308317, - -16.114529, - -24.19872, - -28.394356, - 72.398735, - -11.762284, - 39.155792, - 62.174786, - 0.33776075, - -9.822582, - 7.8490186, - 38.088703, - 3.910647, - -21.867012, - -9.80895, - 34.229267, - 6.49524, - -20.215645, - -23.823503, - 13.815909, - -10.719444, - 29.71537, - 27.11394, - 8.893772, - -42.861084, - 11.520209, - 31.976051, - -61.493744, - -10.253941, - 20.047174, - 7.7775283, - 16.061588, - -27.2339, - 9.338753, - -28.769846, - 4.966599, - 66.91598, - -58.99846, - 28.998318, - 35.759415, - -14.775799, - 15.561535, - 23.844439, - 13.185903, - 50.08121, - -53.279182, - 52.965717, - -9.916494, - 30.322853, - 38.71735, - -20.32845, - 33.65194, - -45.906616, - 37.194542, - -58.197693, - 68.45129, - 19.058973, - 30.657938, - -11.88528, - -3.2491598, - 67.76955, - -1.6988521, - 19.30631, - 52.74911, - -24.831163, - -32.65019, - -10.958649, - 0.82742673, - -21.389397, - 14.101158, - 2.1391983, - 6.576113, - -32.843567, - 60.250656, - -43.65164, - -10.533554, - -25.26452, - 50.190014, - -22.556831, - -1.8844366, - 17.620245, - 25.576294, - 53.109592, - -41.58676, - 8.641028, - -33.405598, - 39.503387, - -40.254204, - -46.44093, - 13.338996, - 9.385, - -31.900993, - -10.131737, - -5.4291334, - 9.878982, - -47.344704, - 9.33157, - 51.674915, - -31.377905, - -4.2005377, - -12.071655, - -3.6661708, - 37.244083, - 3.2858796, - -25.261751, - -48.280323, - 11.631785, - 32.637978, - 45.047813, - -23.116121, - -6.183171, - -23.86113, - 0.017425848, - 8.19719, - 22.400421, - -40.894783, - 29.179394, - -18.357765, - 43.00136, - -4.4837027, - 41.68122, - -36.107216, - 21.893982, - 34.812412, - -32.88127, - -17.11192, - -10.238356, - -1.9124643, - 21.319334, - -2.981173, - -1.3924571, - -41.355488, - -4.402796, - 45.275204, - -19.099257, - -28.038015, - 63.64564 - ], - "z": [ - -80.01053, - 6.952257, - 39.770596, - 16.702005, - 5.445383, - -15.32626, - -0.5249115, - 29.32656, - 36.423714, - -14.892507, - -8.28791, - 23.206917, - 57.858578, - -50.514557, - 64.206955, - 18.315903, - -0.5376158, - -15.617648, - -0.26207563, - -29.67441, - -71.75039, - 8.197639, - -14.2429905, - 43.300938, - 38.940685, - 68.43502, - -23.888317, - 48.1142, - 49.111935, - -61.746227, - 50.52906, - 32.868515, - -13.877641, - 7.967563, - -50.249985, - -49.565216, - -2.7943447, - -46.210426, - -54.42825, - -20.080788, - 15.767397, - -16.436441, - -31.49947, - 27.740046, - 31.81843, - -10.746491, - 68.19509, - -17.48625, - -12.825234, - -71.884796, - -16.59812, - 12.649029, - 70.53656, - 19.578947, - -38.99613, - 53.66051, - -4.5801187, - -19.734165, - -33.125164, - -28.618763, - 23.614397, - 77.43594, - -32.899113, - 46.40308, - 8.167681, - -2.8620894, - 74.148186, - -26.347952, - -74.90554, - -77.46363, - -28.790958, - 20.622337, - 64.1211, - 9.823497, - -7.1779866, - 10.737146, - -29.82501, - -67.2842, - -18.324295, - 4.6903768, - -0.48786125, - 44.55313, - -0.092902616, - 13.796377, - 27.559351, - -36.15748, - -36.40333, - -38.448048, - -5.13925, - -45.601143, - -32.268417, - 10.468017, - 16.537827, - 40.53047, - -1.0723572, - 52.555515, - 53.437595, - 12.123496, - 38.82272, - -48.952595, - 12.853546, - -30.875723, - 1.1754398, - 18.969849, - 28.55506, - 22.07326, - 57.09251, - -26.802582, - -10.858276, - -13.749718, - 36.731487, - 0.24828485, - -43.257736, - -55.938084, - 39.296425, - -53.445366, - -17.457575, - -2.9673405, - 0.0901862, - -49.77096, - -25.705078, - -13.610352, - 67.12242, - 35.858795, - 9.70463, - 20.889431, - -43.310135, - -42.36413, - -33.766987, - -57.633263, - -35.28791, - 64.09733, - -30.020395, - -4.3398356, - -11.234243, - -16.331018, - 22.371712, - 17.535992, - 25.282942, - -62.60095, - 0.4806009, - 70.375824, - 20.449402, - -2.0014663, - -32.865536, - 23.095472, - 27.847054, - -30.648935, - -56.0284, - -7.3562527, - 41.76956, - -24.99873, - 31.66762, - -29.629705, - -15.31758, - -9.096767, - 26.896555, - 44.854942, - 7.493553, - 4.068373, - 34.71581, - -46.640224, - -8.490605, - -48.098576, - 50.40989, - 35.75695, - -43.580498, - 43.48538, - 43.563065, - -47.500908, - -17.975456, - -4.8319664, - 43.81085, - -46.34204, - -4.954837, - -51.740547, - -68.37514, - 29.03883, - 56.43028, - -23.674625, - -9.0214, - 20.377613, - 37.052822, - 35.568115, - -8.854601, - -12.067132, - 49.907764, - 36.929363, - 16.495632, - -27.568445, - 7.129844, - -50.75493, - 69.71066, - 32.083324, - 56.811436, - -21.762302, - 54.430317, - 8.924631, - 33.06066, - -21.813917, - -3.1104941, - -21.70847, - -63.661057, - -45.242672, - -2.3619707, - -58.077934, - 52.1452, - -1.9540714, - -23.772692, - 14.296897, - -31.250578, - -1.1164938, - -8.393525, - -8.107033, - 41.62983, - 34.141304, - -26.842785 - ] - } - ], - "layout": { - "height": 700, - "margin": { - "b": 10, - "l": 10, - "r": 20, - "t": 40 - }, - "scene": { - "xaxis": { - "title": { - "text": "x" - } - }, - "yaxis": { - "title": { - "text": "y" - } - }, - "zaxis": { - "title": { - "text": "z" - } - } - }, - "template": { - "data": { - "bar": [ - { - "error_x": { - "color": "#2a3f5f" - }, - "error_y": { - "color": "#2a3f5f" - }, - "marker": { - "line": { - "color": "#E5ECF6", - "width": 0.5 - }, - "pattern": { - "fillmode": "overlay", - "size": 10, - "solidity": 0.2 - } - }, - "type": "bar" - } - ], - "barpolar": [ - { - "marker": { - "line": { - "color": "#E5ECF6", - "width": 0.5 - }, - "pattern": { - "fillmode": "overlay", - "size": 10, - "solidity": 0.2 - } - }, - "type": "barpolar" - } - ], - "carpet": [ - { - "aaxis": { - "endlinecolor": "#2a3f5f", - "gridcolor": "white", - "linecolor": "white", - "minorgridcolor": "white", - "startlinecolor": "#2a3f5f" - }, - "baxis": { - "endlinecolor": "#2a3f5f", - "gridcolor": "white", - "linecolor": "white", - "minorgridcolor": "white", - "startlinecolor": "#2a3f5f" - }, - "type": "carpet" - } - ], - "choropleth": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "type": "choropleth" - } - ], - "contour": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "colorscale": [ - [ - 0, - "#0d0887" - ], - [ - 0.1111111111111111, - "#46039f" - ], - [ - 0.2222222222222222, - "#7201a8" - ], - [ - 0.3333333333333333, - "#9c179e" - ], - [ - 0.4444444444444444, - "#bd3786" - ], - [ - 0.5555555555555556, - "#d8576b" - ], - [ - 0.6666666666666666, - "#ed7953" - ], - [ - 0.7777777777777778, - "#fb9f3a" - ], - [ - 0.8888888888888888, - "#fdca26" - ], - [ - 1, - "#f0f921" - ] - ], - "type": "contour" - } - ], - "contourcarpet": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "type": "contourcarpet" - } - ], - "heatmap": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "colorscale": [ - [ - 0, - "#0d0887" - ], - [ - 0.1111111111111111, - "#46039f" - ], - [ - 0.2222222222222222, - "#7201a8" - ], - [ - 0.3333333333333333, - "#9c179e" - ], - [ - 0.4444444444444444, - "#bd3786" - ], - [ - 0.5555555555555556, - "#d8576b" - ], - [ - 0.6666666666666666, - "#ed7953" - ], - [ - 0.7777777777777778, - "#fb9f3a" - ], - [ - 0.8888888888888888, - "#fdca26" - ], - [ - 1, - "#f0f921" - ] - ], - "type": "heatmap" - } - ], - "heatmapgl": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "colorscale": [ - [ - 0, - "#0d0887" - ], - [ - 0.1111111111111111, - "#46039f" - ], - [ - 0.2222222222222222, - "#7201a8" - ], - [ - 0.3333333333333333, - "#9c179e" - ], - [ - 0.4444444444444444, - "#bd3786" - ], - [ - 0.5555555555555556, - "#d8576b" - ], - [ - 0.6666666666666666, - "#ed7953" - ], - [ - 0.7777777777777778, - "#fb9f3a" - ], - [ - 0.8888888888888888, - "#fdca26" - ], - [ - 1, - "#f0f921" - ] - ], - "type": "heatmapgl" - } - ], - "histogram": [ - { - "marker": { - "pattern": { - "fillmode": "overlay", - "size": 10, - "solidity": 0.2 - } - }, - "type": "histogram" - } - ], - "histogram2d": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "colorscale": [ - [ - 0, - "#0d0887" - ], - [ - 0.1111111111111111, - "#46039f" - ], - [ - 0.2222222222222222, - "#7201a8" - ], - [ - 0.3333333333333333, - "#9c179e" - ], - [ - 0.4444444444444444, - "#bd3786" - ], - [ - 0.5555555555555556, - "#d8576b" - ], - [ - 0.6666666666666666, - "#ed7953" - ], - [ - 0.7777777777777778, - "#fb9f3a" - ], - [ - 0.8888888888888888, - "#fdca26" - ], - [ - 1, - "#f0f921" - ] - ], - "type": "histogram2d" - } - ], - "histogram2dcontour": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "colorscale": [ - [ - 0, - "#0d0887" - ], - [ - 0.1111111111111111, - "#46039f" - ], - [ - 0.2222222222222222, - "#7201a8" - ], - [ - 0.3333333333333333, - "#9c179e" - ], - [ - 0.4444444444444444, - "#bd3786" - ], - [ - 0.5555555555555556, - "#d8576b" - ], - [ - 0.6666666666666666, - "#ed7953" - ], - [ - 0.7777777777777778, - "#fb9f3a" - ], - [ - 0.8888888888888888, - "#fdca26" - ], - [ - 1, - "#f0f921" - ] - ], - "type": "histogram2dcontour" - } - ], - "mesh3d": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "type": "mesh3d" - } - ], - "parcoords": [ - { - "line": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "parcoords" - } - ], - "pie": [ - { - "automargin": true, - "type": "pie" - } - ], - "scatter": [ - { - "fillpattern": { - "fillmode": "overlay", - "size": 10, - "solidity": 0.2 - }, - "type": "scatter" - } - ], - "scatter3d": [ - { - "line": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "marker": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "scatter3d" - } - ], - "scattercarpet": [ - { - "marker": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "scattercarpet" - } - ], - "scattergeo": [ - { - "marker": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "scattergeo" - } - ], - "scattergl": [ - { - "marker": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "scattergl" - } - ], - "scattermapbox": [ - { - "marker": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "scattermapbox" - } - ], - "scatterpolar": [ - { - "marker": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "scatterpolar" - } - ], - "scatterpolargl": [ - { - "marker": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "scatterpolargl" - } - ], - "scatterternary": [ - { - "marker": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "type": "scatterternary" - } - ], - "surface": [ - { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - }, - "colorscale": [ - [ - 0, - "#0d0887" - ], - [ - 0.1111111111111111, - "#46039f" - ], - [ - 0.2222222222222222, - "#7201a8" - ], - [ - 0.3333333333333333, - "#9c179e" - ], - [ - 0.4444444444444444, - "#bd3786" - ], - [ - 0.5555555555555556, - "#d8576b" - ], - [ - 0.6666666666666666, - "#ed7953" - ], - [ - 0.7777777777777778, - "#fb9f3a" - ], - [ - 0.8888888888888888, - "#fdca26" - ], - [ - 1, - "#f0f921" - ] - ], - "type": "surface" - } - ], - "table": [ - { - "cells": { - "fill": { - "color": "#EBF0F8" - }, - "line": { - "color": "white" - } - }, - "header": { - "fill": { - "color": "#C8D4E3" - }, - "line": { - "color": "white" - } - }, - "type": "table" - } - ] - }, - "layout": { - "annotationdefaults": { - "arrowcolor": "#2a3f5f", - "arrowhead": 0, - "arrowwidth": 1 - }, - "autotypenumbers": "strict", - "coloraxis": { - "colorbar": { - "outlinewidth": 0, - "ticks": "" - } - }, - "colorscale": { - "diverging": [ - [ - 0, - "#8e0152" - ], - [ - 0.1, - "#c51b7d" - ], - [ - 0.2, - "#de77ae" - ], - [ - 0.3, - "#f1b6da" - ], - [ - 0.4, - "#fde0ef" - ], - [ - 0.5, - "#f7f7f7" - ], - [ - 0.6, - "#e6f5d0" - ], - [ - 0.7, - "#b8e186" - ], - [ - 0.8, - "#7fbc41" - ], - [ - 0.9, - "#4d9221" - ], - [ - 1, - "#276419" - ] - ], - "sequential": [ - [ - 0, - "#0d0887" - ], - [ - 0.1111111111111111, - "#46039f" - ], - [ - 0.2222222222222222, - "#7201a8" - ], - [ - 0.3333333333333333, - "#9c179e" - ], - [ - 0.4444444444444444, - "#bd3786" - ], - [ - 0.5555555555555556, - "#d8576b" - ], - [ - 0.6666666666666666, - "#ed7953" - ], - [ - 0.7777777777777778, - "#fb9f3a" - ], - [ - 0.8888888888888888, - "#fdca26" - ], - [ - 1, - "#f0f921" - ] - ], - "sequentialminus": [ - [ - 0, - "#0d0887" - ], - [ - 0.1111111111111111, - "#46039f" - ], - [ - 0.2222222222222222, - "#7201a8" - ], - [ - 0.3333333333333333, - "#9c179e" - ], - [ - 0.4444444444444444, - "#bd3786" - ], - [ - 0.5555555555555556, - "#d8576b" - ], - [ - 0.6666666666666666, - "#ed7953" - ], - [ - 0.7777777777777778, - "#fb9f3a" - ], - [ - 0.8888888888888888, - "#fdca26" - ], - [ - 1, - "#f0f921" - ] - ] - }, - "colorway": [ - "#636efa", - "#EF553B", - "#00cc96", - "#ab63fa", - "#FFA15A", - "#19d3f3", - "#FF6692", - "#B6E880", - "#FF97FF", - "#FECB52" - ], - "font": { - "color": "#2a3f5f" - }, - "geo": { - "bgcolor": "white", - "lakecolor": "white", - "landcolor": "#E5ECF6", - "showlakes": true, - "showland": true, - "subunitcolor": "white" - }, - "hoverlabel": { - "align": "left" - }, - "hovermode": "closest", - "mapbox": { - "style": "light" - }, - "paper_bgcolor": "white", - "plot_bgcolor": "#E5ECF6", - "polar": { - "angularaxis": { - "gridcolor": "white", - "linecolor": "white", - "ticks": "" - }, - "bgcolor": "#E5ECF6", - "radialaxis": { - "gridcolor": "white", - "linecolor": "white", - "ticks": "" - } - }, - "scene": { - "xaxis": { - "backgroundcolor": "#E5ECF6", - "gridcolor": "white", - "gridwidth": 2, - "linecolor": "white", - "showbackground": true, - "ticks": "", - "zerolinecolor": "white" - }, - "yaxis": { - "backgroundcolor": "#E5ECF6", - "gridcolor": "white", - "gridwidth": 2, - "linecolor": "white", - "showbackground": true, - "ticks": "", - "zerolinecolor": "white" - }, - "zaxis": { - "backgroundcolor": "#E5ECF6", - "gridcolor": "white", - "gridwidth": 2, - "linecolor": "white", - "showbackground": true, - "ticks": "", - "zerolinecolor": "white" - } - }, - "shapedefaults": { - "line": { - "color": "#2a3f5f" - } - }, - "ternary": { - "aaxis": { - "gridcolor": "white", - "linecolor": "white", - "ticks": "" - }, - "baxis": { - "gridcolor": "white", - "linecolor": "white", - "ticks": "" - }, - "bgcolor": "#E5ECF6", - "caxis": { - "gridcolor": "white", - "linecolor": "white", - "ticks": "" - } - }, - "title": { - "x": 0.05 - }, - "xaxis": { - "automargin": true, - "gridcolor": "white", - "linecolor": "white", - "ticks": "", - "title": { - "standoff": 15 - }, - "zerolinecolor": "white", - "zerolinewidth": 2 - }, - "yaxis": { - "automargin": true, - "gridcolor": "white", - "linecolor": "white", - "ticks": "", - "title": { - "standoff": 15 - }, - "zerolinecolor": "white", - "zerolinewidth": 2 - } - } - }, - "title": { - "text": "3D Chroma Vector Store Visualization" - }, - "width": 900 - } - }, - "image/png": "iVBORw0KGgoAAAANSUhEUgAABPAAAAK8CAYAAABhiUEuAAAgAElEQVR4XuydB5wdVfmGT3Y3u5ueEEJTREGwACKWv11QxEIRFKWo2BVFEVFUqjQpKgiiomKhqHRQBERBUBR7QaSooCAqEBJC6vZN8j/v3JxlMpl758zcaZt9zu+3JOROOec5Z+buvPN+3zdptW2GBgEIQAACEIAABCAAAQhAAAIQgAAEIAABCNSSwCQEvFrOC52CAAQgAAEIQAACEIAABCAAAQhAAAIQgEBAAAGPhQABCEAAAhCAAAQgAAEIQAACEIAABCAAgRoTQMCr8eTQNQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIIOCxBiAAAQhAAAIQgAAEIAABCEAAAhCAAAQgUGMCCHg1nhy6BgEIQAACEIAABCAAAQhAAAIQgAAEIAABBDzWAAQgAAEIQAACEIAABCAAAQhAAAIQgAAEakwAAa/Gk0PXIAABCEAAAhCAAAQgAAEIQAACEIAABCCAgGfXwM23/tmcd+n15h//+q9ZuXKVedpWm5uDDny92elFO4ytkNe+5ZPmvw8tGPv/7u7JZqO5s81ztt/G7L/3K80Oz9zKazWtWrXa/PCGX5kf/PhW849//scMDA2befY4z9/haebtb36NefpTnzR2nNe/82iz5ZM2NWed+GGvY9d5o0E7zp33+Wgwzi+dfGjTru5+4BGmx7K96lsn1Xk46/Ttlt/cbi76/k/N3+2cLl663PT2dNt19CSz3+tfYfbY9UW1Gst7D/+8+fu9/zE/u/IsM7mrM7Zvx59+vvn+9b80N19xpnnrhz5j1/nW5pQj31f6OI793LfNrb//q/nZFWcF59Z1WEZfrrnh1+aIU841N1xyunnCJhuWPm5OCAEIQAACEIAABCAAAQhAAAIQCBOY8ALej3/2e/PxE84xb9zt5Wb3V73QjIyMmvMv+7H5w1/+br735WPM9s/Yckw4mD5tivnkwQcE/z80PGzu+8/D5oc/+VUg2nzonXubg+1PqzYyutJ85JizzS9+e7t59U7PMzu/eEczbWqveeB/j5jLr/m5mb9gkTnt6IPMa1/xf8Fh1icBT+P5zFnfMZdd8zNz8+Vnmg03mLUOqtvuvNe87cMnm2M+eqA5YO9dcrlSly7vMy/e80PmD9d/zUyd0pvLMaMHkdB1zGe/ZfZ+7UvNa3Z+vpk7Z5ZZtHhpIIDdcMsfzVEfeat56xt3DXb76S//ZL7+nWvM5eceX0hffA76k5//wXzs+K8EwvCuL3/eOrtIbN3pjYealzx/O/OF4z9krrvpt8F8vWDHZ/gcPtdtogJeUX055ezvBWLmJw7eP+i/rsnf/ukuK76+OLhGaRCAAAQgAAEIQAACEIAABCAAgSoJTHgB74NHnGkGBofM+WcdMTYPff2D5kV7HhyILp/6UEOwk/NHTpxvfeGTa82XHHWnffki872rbrRix8FWwGmIb3HtrG9cYb7xvWvNyUe8NxB7wq1/YMi85+OfM/c98JC54eLTzayZ09Y7AU8Oxze+51hz+Af2M+/a/3XrIJJY8yMrFv3cOsNmTJ+ay3Xxy9/dYT7wqTMKFfB2e9unzCbzNjDfPvNT6/T5kKO/aMykSeZLn/lI8NkXvn6Z+c2f7q5UwJOQ/Mo3fdRs9/QtzVdPO2ydPjv32TdP/4R50fO2zWUesh4kKuBlPU7Sfvt/8ETzXOumdQJe0vZ8DgEIQAACEIAABCAAAQhAAAIQKJPAhBfw4mBLTHvRHgebt71pV/OJDzYcOc0EPH2msNs933FkEDbZLPRzYHDYvPwNh5jnPmsb87XPfjx2jh9+ZJGRuPKkJ2wUfC4H3tZPeYJ55UufY7787e+bh+Y/ajaeNycQGZxz6tobf2M+dfLXzYVnH2WOO/28oC/Xf++zRsKiwoKvvO6WYL/e3h6z43Zbm0Pfu89YmK7cTJ886Wvm4nOONad/7dLASagxSLjcZ/eXG4VR/vmOe0yXdSa9/jUvCYQ31/793/nmi9+8wrqU7g4E0I02nGN22+WFgRNx8uSupmt4/w+cYCSQXnPhqWtt0+DzkcDB9plPvSf47Fd/uNOc+91rzL33/c9yGTXPesZW5rCD3my2e9pTxvZdvqLf9uNKc+Mv/mhW9A2YLbfYzLz/bXsEfL5y3vfNORdcPbbty1+4QyBY+bD59Oe/be78+/3mPW/Z3Zxq3VmvfOmO5sRPvDt2XLvuf7h58hM3Md84/fCW1+47Dj3V/PH2f4xt44TMZXYMZ1ph7+Zf3RaE386ZNSMI3z7s/W8O/q7WrD+rV68OQnev+tEvrWtsvumx8/fyF+xgPv6BfWNdju7kZ3ztMus0vT5wQyqEO9zefdhnzYN2zfz4os9Z7XHSOmGrWidnfeNyy+ffpm9g0IqXc8yer36J+YANO+/omGSuuPaWYC3edPkXAmHTtfd/4nSjsV7y1U8H/+SzhlqF0GretT7impyDWktq373yRnPZD38WuOqmTukxT7Nh6mLrwt633fmdax3i0q8fZ+5/4OF1Qmh/9uvbAvfkPVaIVttmyyeadx+we+CmVVu4aEkQJv5Z66KVm/TmX/05WJMKiz/60APXCo9vuVD4EAIQgAAEIAABCEAAAhCAAAQgECGAgLcGiIQvCVESLs45/wfBA/j3vnKM2XyzhpjWSsDT52eee7n55kXXmV/+4Etmg9kN0SXcFJL7zo+eZk765LuDcF2fJgFv5cqV5smbb2Le99Y9TGdnp3VwXWr79s9AeNF5fvLz39twyHMCcU751ra2ooIEA/XnAhsKfLgVIHd+8bNtSOeywCkoh981F5xqBbfZdt9GKKVyikmc2sKKUJ8752LznStuMNs//Snmk9Z9qBx/LkT03M8fHoRVSgB77Vs+Ebjkjv/4O83sWdOtqPG/QPB42z67BiJhs3bldb8IxKiLrGgYzhuonIBHn/bNsX+X0PWuw04zr3rZc80h79kn4CAR89d/vNNc+c0Trci5cXAKbfPfhxZageRtZtON5pprbvy1Of/SHxv1VX2XiCkh70aby2zmjGlGYdA+bE4680Lz81//xWxmXZcHHbinZbPx2FqIjk1cxWx3K2AqH+KzbD7ELjtX0SaxUXOl8Fo5OadYUVX5/pRj7iEr3n76Y+8wT7f5FyWQnfCFC+x4NjAXW7FLIlqz/khQOvtbV5qPWEbKtScR+ETb9w67z+XfOKFpjrv/PPiIed1bP2U++r43BWvLNa3/V1tBMvzv4bxzo3YeXmFFKo3xw+96Q8BT14r694G372Xec8BuXgKe7xpqJeAtXdZnli5fsRbmT3/+vGCNX37uCYHY7dau1vIrbMi6woO//p0fml9bcfi67342uIYkmu6638fNXq95qfnwu98QrOvrb/rdWgKec3K+eY+dzYFW2JerUuvsqh/9wpxz6mGB4KrjvHSvQ4Lzit/rXvlCK+D1m/d8rCGEat3SIAABCEAAAhCAAAQgAAEIQAACWQgg4K2hJrHmQ0c1EuU/e9unBgn7Jdq4liTgyeEj0UUP6eFCFG5/55RTqO7zn/10r7mSgLd4yTKbSP8MK/Z0B/v8/ra/B6KVXHwve8H2YyJcWHCRm+2le304CNM99rC3j51LTjy5xdy2TsALh/T+7d4HzJved1xQUMOFD0vc3OFV77Ei0Rutu23PQMCTAKScchICXTv02C8FAugVVjhq1uRuVH613XZ5gTnh8HeNbSZ3mgSZH5z3meDfJHrIUSaRRSKXmsb1qv0+Zl5t3XXHWeHwz3fcaw485OR1crlJIFR46L577mzOu+T6wF3ocuD5slG+vot/cNM6QmPcuOSalBvx4u/fFAhEcnntYNfQi567rdnjVS8KBB3XFLL96GNLx0Jo3XxGw6+vtrkVjzr1G2Pnj+vP0PBIMM8vs4477e/aHX+7zygk9PPHfjDg3KzJaTd/4WPmR5axaxI7z/3utYF7zuUpDAt4KuSi/9f1sZd1Zbqm8GjlinvipvO8BTyfNZSmiIXC09X/b595RCBKqy1ZuiIYY/iavMc6Ot/w7mPMl085NBD11J732vdbAfyVYyG00SIWWp+L7bGututTYpya3I8SQeWYlWDsBDwxF3vX1C+Fz992wzeMit/QIAABCEAAAhCAAAQgAAEIQAACaQkg4K0hpmIHCtV85NHFQUGJe+//n/naaR9bq4hFXA48B/x7V/3UnHL2d80Pzz/ZbPXkJ6wzD9f+1Ia6fubrQZ4032IAEvA223iDtUJu/2XdRa9/x1Hm9E9/0Dp8XjAm4CmEVuG5anfY0E+Fqn7u2A8ErrBwU4ifxA2FGDoBTyGDLizVObAkrr1pj53Gdn3B7h8M/t+FFEsEufDyn5jb7/pnIGysWr3KSJyT6PPTS89ouQ5POOP8oDDCLVedHQiT/3lwgRVCPmmLPbzNhu++Ktj3ua95fzA+F07rDvjho77YmCNbBOICe/7PfeXiIGdeNAzUbR8V8HzZuIIbf7nxW0FYqE/rt+GkCimWe/CPf/2Huesf/w4ccMce9o4gJFktKuB9+5IfGYWzRsegcE/l1nMFPeL648YSFdN0nmC+drfztaYoQ1z/f2RdZp846avmO186KnArSpB69QGfMNtu8+S1Kh+HBTyJuRIHJeTJ8fli68h8jnV/hsOmfUNofdaQr4CnwjAHH3lWIAo71hqzHIOXXv0z8+Of/c6Gki+yVZ+HzGorQCuUV2vrDa97WYAmScDTetzTFrQ4/vB3roVSIei//uNd5tarvzQm4Cl8+d377za23aVX3xy4Im+56ostw5p91hjbQAACEIAABCAAAQhAAAIQgMDEJICAFzPvclTtd9DxQXighDG1JAeeBJZLf3iz+e21X42tWuncYnLE7b/XK71WW1wVWlW+3fPtR465q5wIJ+fa1k95YnDc31hB4b2Hfz7I96a8b+G2h91XQuTXP/fxMQEvvK8T8E47ygoWr37x2K5hQeh/Dy80e7/raLPVFk8I3HybWweS8uRJwLz7ngcSBTzn8nPCk0JAFY74cytwzLThixJddtjlPYFwprDhcFMo7QazZwZiiEJqv3rh1S0LVEQFPF82ms8f3fxb8+sffsVrruI2ksPssOO+Yv717wfNTy9rONqiAp7L4/bHH5875rLUseTSk1NRudrea/PwxfXHjUXhupMiIqOqKUsAldDbrGmbnW0xC7nQJGZJfFQhFRcq7fYLC3j6N+V1U949VdjVXMqJuYet4KxwbbnwfAQ83zXkI+Ddb68JiYoKgVXF33BTaO+l1h2rsG6Nc/r0KUFOyAMPOcVbwJODUutRhVfCeSB1HrluFf4td51z4IWFaG2DgJf5EmJHCEAAAhCAAAQgAAEIQAACEFhDYEILeBLqbr71TzbsbyOz7dOevNaiOPKUb5hbfvuXMQGnlYCnsEnlDVMBhXA12/ABh22448utIKOk/ip0EefqUqjfVTZHnMJXJR5mFfDu/Mf9VoBs7sB73g5PC4SdOPHPR8Bzotj13/vcWMENjdXlo0ty4GnbN79fAmmv+fYXPhXkH/u/HZ8RhGW69vzXHRSEhionWbQpv5vyAl5kQ1ZP/uJ3AsFw043nxl7UUQHPl00aAU+CkByAccU7fvrLPxmFFqvAxYuft906Ap7rXzMH3nE2L96+1ukW1x85/Pa1QrNckS+3OdiiTaG84SIScYA+f84l5rJrfmZ+8f0vmePPOM/cZsOSf3Lx58fCRLVPVMALH0chqtdbd5uq677yJc8xnz3mIFs4pZHnMFrEQqKZipGoiIXvGkoS8JRXcD/rNtU4z7WMo7kH5azbxeZRVGEJ1+SQVEhsGgeejrPHq+IdeL+77W+BoIyAx/cqBCAAAQhAAAIQgAAEIAABCBRFYEILeIIql5PEoAu+eOQYY+V429vmyFL4o0s830zAU0jhMZ/9lvnhDb8y3zrjk+aFz31m07lSRVTl6Drk3W+0Cf9fv9Z2Cj/94BFfCEJ3r73wtCC5flYBT4KicqPJkRTOgSfX02tsiKQS+r/DioRZBTxXOOF31301EBrVFAYrZ+C8uTaE1rrNkppyBp501oXmzBM+HAhc3/3y0UEhDtfed/jpQbGHaFVfnUdVT5VLzOV6Cwsx2l/HUy42hY86oej3P/pa4A7zZeMr4N36+zvMQZ88YyzUNTruL337KvO1C384FlotB56qlbo8gX/66z3m7R85Jchh95qd/29sd1d8Qds9Y+stYgU8icIvsUUT3rjby8yRh6ztPFOo9ZZP2nQtIS5uTuRekytTjkuFeb7vrbsHeQ7DLSzgSay8zYZNR0OzVQn5bisoqrqwnHmHHfflIJ+hc4UqX98rrNtPxUck4PmuoVYCnq7Tg4/8grn/P/PNZV8/3syaOW2tfiskeMdd32sOeMOrxvI5agNdr+IbFfD23fMVwbWhFs2Bp3yBj9pCMAqRd80V4thmy82DfHoIeElXPZ9DAAIQgAAEIAABCEAAAhCAQFYCE17Ac8UnFC6qHFd66L/yulsCEeKsEz9sdrUFE9QkYkis+uTBjQd8hXIqRPIK6za6+55/By6od+732pbzoNBQhVTefOufzYuet6153SteYGbPnG4eeHC+ueQHN9uKmn3myycfOlbkIquAp04oLPXbF//IHGGFnZe/4FlmgRWNTj37e0H+uKutCDHLVmTNKuC5wgsSepSz7p/3P2g++5WLArHmhl/80fzg258xT7ACmgTQZq2vf9DsvM+hQfjlLMsgLIxoH1eFdp/ddgoqu2o7iWWf/+ol5uMH7RtUu1VTyKfCOI859O1B0RGFvSoc1xX5UJVQiUASqCSEPfUpT/Bi4yvgab0ccvTZ5he/u92oQulLbWERzakKcvzyd38Nwjdfs/PzzRnHNYpMqFKvCqaca0OY59qQWoUzy5n24PyFQf62p1qGd/79vqCqq/qrUGe1Zv2REKYw4o/ZUFuFS8vhphyOyvumSr9RZ2ncfEhAVM49CVA3WfE1mk8wLOA50VQC8F62SIpEUYnOn7aMX/uK/7PVgA80EoqV0/CAvRvCmURTrY9bfnN74JSUgOe7hk60Iaq3/v6v5mdXNArMhPsi19/5ttLymcd/2FZfXjvvpNaLQpbltHvQ9ufLp3w0CM/+7pU3Brkar7DXuELZFVqrqrO7vPljQd8keKswy62/u2OtKrSqfixRWfvIITtqOX/LXl+qenzBF5VDcGsEvKzfQuwHAQhAAAIQgAAEIAABCEAAAokEJryAJ0IS68797jVGrqVe6+xSKKzyXb3Kht65JuFAiftdUyVKueSU/F8P9K7qZRJxCT6qSHvV9b+wwtN/jJxJcpS9yIZXvtueU84x19oR8HSe8y693oo5twQ5vyS0yB2onGqbb7ZRcIqsAp72VWVNFe5YZkXHZ9qiB5/68FvMVFuQ4n2fON0orFEiTVwxjzAfhVkq3FLuMSfIhT9XTravnP+DQCBVyLHcWyqcoJBS11SM4Au2CMRNNhRaouBTrOtM7kYnvEoUPcj2Sax32HarIKehDxtfAU/9kDCrvG+aV4m6S5atsPnsemyOwM3MHlYU3vf1O4+Fdv717n8FIq7che/c73VBDkHxkhh1869uC0QgCU+v3un5turvPkFFW7VW/VG1XFXADaq62nl++lZPMgdZBr7FUlyF5F1e9hxz9kkfWWcJR0No1c9v2vm/1wq3GrvCVyVSHvyOvcaqrEo4lfPwMVtFWYLguw/YzRY8+VdQKEZFU3zX0PmX/aSpgLf7gUeYf/93fuwlp+q/CufV58edfp4VRe8PBHhVZj7EVlP+7JcvMpfbOdvN5glU6LaEfFUrXrVqVVDgRQKsxNYbLjk9EFnVJLx+zYql/7BjUBi31v2H3rW3eeFzGq5bHHhJdz8+hwAEIAABCEAAAhCAAAQgAIGsBBDwspJjPwhAAAIQgAAEIAABCEAAAhCAAAQgAAEIlEAAAa8EyJwCAhCAAAQgAAEIQAACEIAABCAAAQhAAAJZCSDgZSXHfhCAAAQgAAEIQAACEIAABCAAAQhAAAIQKIEAAl4JkDkFBCAAAQhAAAIQgAAEIAABCEAAAhCAAASyEkDAy0qO/SAAAQhAAAIQgAAEIAABCEAAAhCAAAQgUAIBBLwSIHMKCEAAAhCAAAQgAAEIQAACEIAABCAAAQhkJYCAl5Uc+0EAAhCAAAQgAAEIQAACEIAABCAAAQhAoAQCCHglQOYUEIAABCAAAQhAAAIQgAAEIAABCEAAAhDISgABLys59oMABCAAAQhAAAIQgAAEIAABCEAAAhCAQAkEEPBKgMwpIAABCEAAAhCAAAQgAAEIQAACEIAABCCQlQACXlZy7AcBCEAAAhCAAAQgAAEIQAACEIAABCAAgRIIIOCVAJlTQAACEIAABCAAAQhAAAIQgAAEIAABCEAgKwEEvKzk2A8CEIAABCAAAQhAAAIQgAAEIAABCEAAAiUQQMArATKngAAEIAABCEAAAhCAAAQgAAEIQAACEIBAVgIIeFnJsR8EIAABCEAAAhCAAAQgAAEIQAACEIAABEoggIBXAmROAQEIQAACEIAABCAAAQhAAAIQgAAEIACBrAQQ8LKSYz8IQAACEIAABCAAAQhAAAIQgAAEIAABCJRAAAGvBMicAgIQgAAEIAABCEAAAhCAAAQgAAEIQAACWQkg4GUlx34QgAAEIAABCEAAAhCAAAQgAAEIQAACECiBAAJeCZA5BQQgAAEIQAACEIAABCAAAQhAAAIQgAAEshJAwMtKjv0gAAEIQAACEIAABCAAAQhAAAIQgAAEIFACAQS8EiBzCghAAAIQgAAEIAABCEAAAhCAAAQgAAEIZCWAgJeVHPtBAAIQgAAEIAABCEAAAhCAAAQgAAEIQKAEAgh4JUDmFBCAAAQgAAEIQAACEIAABCAAAQhAAAIQyEoAAS8rOfaDAAQgAAEIQAACEIAABCAAAQhAAAIQgEAJBBDwSoDMKSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJUAAl5WcuwHAQhAAAIQgAAEIAABCEAAAhCAAAQgAIESCCDglQCZU0AAAhCAAAQgAAEIQAACEIAABCAAAQhAICsBBLys5NgPAhCAAAQgAAEIQAACEIAABCAAAQhAAAIlEEDAKwEyp4AABCAAAQhAAAIQgAAEIAABCEAAAhCAQFYCCHhZybEfBCAAAQhAAAIQgAAEIAABCEAAAhCAAARKIICAVwJkTgEBCEAAAhCAAAQgAAEIQAACEIAABCAAgawEEPCykmM/CEAAAhCAAAQgAAEIQAACEIAABCAAAQiUQAABrwTInAICEIAABCAAAQhAAAIQgAAEIAABCEAAAlkJIOBlJcd+EIAABCAAAQhAAAIQgAAEIAABCEAAAhAogQACXgmQOQUEIAABCEAAAhCAAAQgAAEIQAACEIAABLISQMDLSo79IAABCEAAAhCAAAQgAAEIQAACEIAABCBQAgEEvBIgcwoIQAACEIAABCAAAQhAAAIQgAAEIAABCGQlgICXlRz7QQACEIAABCAAAQhAAAIQgAAEIAABCECgBAIIeCVA5hQQgAAEIAABCEAAAhCAAAQgAAEIQAACEMhKAAEvKzn2gwAEIAABCEAAAhCAAAQgAAEIQAACEIBACQQQ8EqAzCkgAAEIQAACEIAABCAAAQhAAAIQgAAEIJCVAAJeVnLsBwEIQAACEIAABCAAAQhAAAIQgAAEIACBEggg4JUAmVNAAAIQgAAEIAABCEAAAhCAAAQgAAEIQCArAQS8rOTYDwIQgAAEIAABCEAAAhCAAAQgAAEIQAACJRBAwCsBMqeAAAQgAAEIQAACEIAABCAAAQhAAAIQgEBWAgh4WcmxHwQgAAEIQAACEIAABCAAAQhAAAIQgAAESiCAgFcCZE4BAQhAAAIQgAAEIAABCEAAAhCAAAQgAIGsBBDwspJjPwhAAAIQgAAEIAABCEAAAhCAAAQgAAEIlEAAAa8EyJwCAhCAAAQgAAEIQAACEIAABCAAAQhAAAJZCSDgZSXHfhCAAAQgAAEIQAACEIAABCAAAQhAAAIQKIEAAl4JkDkFBCAAAQhAAAIQgAAEIAABCEAAAhCAAASyEkDAy0qO/SAAAQhAAAIQgAAEIAABCEAAAhCAAAQgUAIBBLwSIHMKCEAAAhCAAAQgAAEIQAACEIAABCAAAQhkJYCAl5Uc+0EAAhCAAAQgAAEIQAACEIAABCAAAQhAoAQCCHglQOYUEIAABCAAAQhAAAIQgAAEIAABCEAAAhDISgABLys59oMABCAAAQhAAAIQgAAEIAABCEAAAhCAQAkEEPBKgMwpIAABCEAAAhCAAAQgAAEIQAACEIAABCCQlQACXlZy7AcBCEAAAhCAAAQgAAEIQAACEIAABCAAgRIIIOCVAJlTQAACEIAABCAAAQhAAAIQgAAEIAABCEAgKwEEvKzk2A8CEIAABCAAAQhAAAIQgAAEIAABCEAAAiUQQMArATKngAAEIAABCEAAAhCAAAQgAAEIQAACEIBAVgIIeFnJsR8EIAABCEAAAhCAAAQgAAEIQAACEIAABEoggIBXAmROAQEIQAACEIAABCAAAQhAAAIQgAAEIACBrAQQ8LKSYz8IQAACEIAABCAAAQhAAAIQgAAEIAABCJRAAAGvBMicAgIQgAAEIAABCEAAAhCAAAQgAAEIQAACWQkg4GUlx34QgAAEIAABCEAAAhCAAAQgAAEIQAACECiBAAJeCZA5BQQgAAEIQAACEIAABCAAAQhAAAIQgAAEshJAwMtKjv0gAAEIQAACEIAABCAAAQhAAAIQgAAEIFACAQS8EiBzCghAAAIQgAAEIAABCEAAAhCAAAQgAAEIZCWAgJeVHPtBAAIQgAAEIAABCEAAAhCAAAQgAAEIQKAEAgh4JUDmFBCAAAQgAAEIQAACEIAABCAAAQhAAAIQyEoAAS8rOfaDAAQgAAEIQAACEIAABCAAAQhAAAIQgKQ0u04AACAASURBVEAJBBDwSoDMKSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkJUAAl5WcuwHAQhAAAIQgAAEIAABCEAAAhCAAAQgAIESCCDglQCZU0AAAhCAAAQgAAEIQAACEIAABCAAAQhAICsBBLys5NgPAhCAAAQgAAEIQAACEIAABCAAAQhAAAIlEEDAKwEyp4AABCAAAQhAAAIQgAAEIAABCEAAAhCAQFYCCHhZybEfBCAAAQhAAAIQgAAEIAABCEAAAhCAAARKIICAVwJkTgEBCEAAAhCAAAQgAAEIQAACEIAABCAAgawEEPCykmM/CEAAAhCAAAQgAAEIQAACEIAABCAAAQiUQAABrwTInAICEIAABCAAAQhUQWB05SozPLra9A+Nmkm2A73dnWaK/eno0P/RIAABCEAAAhCAAATGCwEEvPEyU/QTAhCAAAQgAAEIeBAYHl1lhkZWmcGhlWbECnhxrWdyh+me3GmCP7s6PI7KJhCAAAQgAAEIQAACVRJAwKuSPueGAAQgAAEIQAACORAYGllpBqxgJ+Fu5arVqY4oM96Uni7702m67P/gzkuFj40hAAEIQAACEIBAKQQQ8ErBzEkgAAEIQAACEIBAfgRWWZFu0qRJZsXAiOkbHDUpNbumHZGYN9k68gi1zW+uOBIEIAABCEAAAhDIgwACXh4UOQYEIAABCEAAAhAomEA4n92wddrNnj7Z9A+utDnu4sNkXXcUIjvVuutGrcqn/ZK2Dw+DUNuCJ5XDQwACEIAABCAAAU8CCHieoNgMAhCAAAQgAAEIlE1AobGDw1Z0s8JbNJ9dMwHPGvNMzxoXnZx02q/fhtd2WnudBLnJnR3B/w8ON3LkrfaMuCXUtuzZ53wQgAAEIAABCEDgcQIIeKwGCEAAAhCAAAQgUCMCEuwGh0cT89mFBTyJc91WnJPTTgLdkHXlSaDTT1Sgk8A31ea86+3usKJeh82Zp20l8o2mEvMIta3RoqErEIAABCAAAQis9wQQ8Nb7KWaAEIAABCAAAQjUmYALjZXYNmwdd7757CTgWe3N5qybFLjr5NKTsy5NiKzEPAl+cupJ/JMjL6hga/syutLTmmfhulDbKfY4XZ32oDQIQAACEIAABCAAgVwJIODlipODQQACEIAABCAAgWQCEtlG7M9AINq1zmEXPpry2Ulsk3uuywpv2r9vYDSVaNeqdzq+nHwS4lQkw7n4fEXBGVO6TIfdT/n2qGqbvA7YAgIQgAAEIAABCPgSQMDzJcV2EIAABCAAAQhAoA0CLp+dRLGVvjY7e75eK6hJtJOwpv0aIbYrzYypXV5FLLJ2Wa4+JxbKpTdo3YHOndcsb54EPLXlVlRUo6ptVvrsBwEIQAACEIAABNYmgIDHioAABCAAAQhAAAIFEFilqq+jEtyS89mFT+/y2Uk8UzEKl89OTr2w8OdbhTaPoSnUNuiPxMTJjVBbJySG+xQV8KLnpqptHrPBMSAAAQhAAAIQmIgEEPAm4qwzZghAAAIQgAAECiHg8tmpIESa0NhoEQoXuirxrpnbrUwBLwrr8VDeTtu/1WOhwBLoZC7sG2w48Fo1qtomEeJzCEAAAhCAAAQg8DgBBDxWAwQgAAEIQAACEGiDgPLDSYDrt6KVCx31OZyKPSjXnJxtcrilLUJRpYAXHp/G4dx5Pdad5/LyyaXXTHyM8iHU1mfFsA0EIAABCEAAAhOZAALeRJ59xg4BCEAAAhCAQCYCymc3YCu+KiecQkglpunv+rdWTc41VXtVPjuJW3LaSfBKU/HVHb8uAl54vOqTil9IkOvsUM6+RqitHIm+Yp6OR6htpmXJThCAAAQgAAEIrMcEEPDW48llaBCAAAQgAAEI5ENA+ewGrUA3bIU7iW7RGhTK/abKq1EBT8465bELF6HotyJfNJ9dll7WVcBzQqbGruIXGrtESznyXBGMNIIlobZZVgf7QAACEIAABCCwvhFAwFvfZpTxQAACEIAABCCQC4E0+ewk4Lncb75FKNrtZN0FvOj45D6U81Bhw3LpuTx/CkH2bYTa+pJiOwhAAAIQgAAE1jcCCHjr24wyHghAAAIQgAAEMhOQmCQXnRxycoz5Ngl4EqfUJOBp/0Cgsn8W1eoq4PUPWn4JopwYyZnX290RuPQGrbPRufMItS1qxXBcCEAAAhCAAATGMwEEvPE8e/QdAhCAAAQgAIG2CUTz2fke0FViVb62ydZdpuMs77fVZ1M4ynzPFbddHQW8OTO6Td9AOgYKtXVFMHptEQwJp8qbJwFU+QV922Zzp5ilfSNmig3X7bICYYfsejQIQAACEIAABCCwnhBAwFtPJpJhQAACEIAABCDgR8CFxkogUk67FBqRCRehkLgkoUnCncJC1dJUofXrbfOt6ijgzZ3Z3baI6YRRiXqrrR1PRT7kaGwljEoEnDer1yxYMhgAI9S23dXF/hCAAAQgAAEI1I0AAl7dZoT+QAACEIAABCCQOwGJdkFxhTVikO8JnDsscIhZl53cYXFFKKb1dgWiEQJe+wJeeG5c3rzA5WhDbcVewqvmIRxqq5DcuTN7xgS86PxS1dZ3xbMdBCAAAQhAAAJ1JYCAV9eZoV8QgAAEIAABCLRFQM44OeTS5rNzRShUOTXIz2YFoyEbFqs/m+Vnc2GbCHjdQRhrmiqzvpMsMXVqT1eQN6+zo8OG1zZCbfuHRq14OslsYMN3Fy4dSjwcVW0TEbEBBCAAAQhAAAI1JICAV8NJoUsQgAAEIAABCGQjoLoTy/uHA7ddmvxpXZ2TTI/NvybRTkKRRL/AaeeZz04CnsS+Zf0j2TqeYa+6htAuWTGSin2GoQdzJN5yRmrO3FwvXjGcSjwk1DYLffaBAAQgAAEIQKAKAgh4VVDnnBCAAAQgAAEI5EIgnM9ulc1JN7W300hA8mnhfHZy1rkqqL6iXfgcEvAUpul7bp/+JW1TRwFvo9m9ZtGyocIFvCgb8Z81dfJYPsOgArDCpT0FWHc8Qm2TVh2fQwACEIAABCBQFQEEvKrIc14IQAACEIAABDIRkCgzYn+i+ewkyM2Y2mUFpOGmx+21IptcW932T1eEIm2107iDVyXgDSonnHUL1qVVJeAF82pFPAmoCoHWHCvUNgiBtqHUTpxtFgIdx49Q27qsKvoBAQhAAAIQgIAIIOCxDiAAAQhAAAIQqD0B5bMbsGJVq9DYOAHP5bOToKNKsRL9GtVn04XYJgGqSsALCnNYLnVpVQl4zfi7IiRy1vXaEGkVv1DevLSibTjUVsfqssIgDQIQgAAEIAABCJRJAAGvTNqcCwIQgAAEIAABLwIKhx0eXW2FllHvfHbKYzdr2uTAhSWHnXKjScCTWOcKUaRxYHl1dM1GQThuivDdNMdutq1CaOso4C1cOti02Ece4447hk8Owri8ec7FmTbUNsi/tyZsWnNPgwAEIAABCEAAAkUTQMArmjDHhwAEIAABCEDAi4DLZ6eqohLd0jSJd9N7u6yI1mWLGDQqz6YpQpHmXHHbIuA1qMiBt2DJYLs4U+8/zc67XHJpqgBrziT0ylEnQU7rRUKvXHpphF6dVw7PGVO7rWCcuuvsAAEIQAACEIAABLwIIOB5YWIjCEAAAhCAAASKICDnU5CfzIonEk7StGgRCrmppvZ0VSIg+eTfSzM2n23r6MDbbO4U89CiAZ/u57rNjCldwfHSCHjhDsidp7WjvHmdHcqP2Ai1lZjsK+ZpPlxotgQ9Qm1znWIOBgEIQAACEJjwBBDwJvwSAAAEIAABCECgXAI++ezieiSRpce6phoFChr5zAKXXSif3SZzes38xeU7wBDwGjNWpYA3asOu88gH6EJtFRLt8ua5IhijK2254iZt7sxus7zfukdDlW8JtS333sLZIAABCEAAAuszAQS89Xl2GRsEIAABCECgBgSUz06VUhvFI1Ya+7/ezRWhUD47iSFDVhzRcfQT54yqSkBCwKtWwCvSjai5daKxRunWXzRvXlIBDxdq29vdZbq7JpkO/QMNAhCAAAQgAAEIeBJAwPMExWYQgAAEIAABCPgTaDefXY+tGNoIZ2wUofDNZ4cDz3+Oitiyqhx4RQp4YU5ajw0xr5E3b9AK0s6dN29Wr1m0bMiG3yYr1FS1LWL1cUwIQAACEIDA+k0AAW/9nl9GBwEIQAACECiNgJx2ykEmwS1LPjsnjKjDyj8WOPZC4Yg+A5GAVEUVVAk7EpEWLRv26WYu25QlWvl2VqGnErGqKGIxZ0a36dPaS7lefMcWt53G63LdKdRWjrqlfSPBuvUR8cLHJNS2nZlgXwhAAAIQgMDEIICANzHmmVFCAAIQgAAECiHQENpGAxfSxjb/XJoCBr22+qcEEFUCleDhRLu04kd4YElhjIVAsAdFwGswmDuzpxIBLy7/XFFzHXdcjX3erJ7AKapwb61hFVWRmJ1WVCTUtsyZ41wQgAAEIACB8UMAAW/8zBU9hQAEIAABCFROwIXGxuWzU/jqI0sGm1btdPnsAteSzSsWV4Si3QFKyFmyYiS1A6rd81YhXtXNgVcFAzdvEs8eWz5c+ry780fHrrx5EqZViVbuOgl7uma05n2r2urYhNq2e2WyPwQgAAEIQGD9IYCAt/7MJSOBAAQgAAEIFEJAop0cds5R1Owkce63aBEKVwBAxSjSCBm+A6vKiVWFeIWA9/iqqMp56XrQ1TnJzJnebcO3h9ZZqgq1ndrTtSano9ymcq0qr+No6muAUFvfOwHbQQACEIAABNY/Agh469+cMiIIQAACEIBA2wSGbHL+tCGtTjxbZZW5KUGi/04j8SJNEYp2O46A1y7B7PtXEUbselu1gCfH3bQpXWaxdQG2aroe5D7ttWG2ypsnR54rgjG6Mrn4RfjYhNpmX6vsCQEIQAACEBiPBBDwxuOs0WcIQAACEIBAzgRUgGJ4VHnoGvns0uahk4AhR1iHVSi0r44hh1FaUaLdYVVRzEB9rqKAQ90ceHKhzZpWbiEPt16qqj7szj/FCnIKl1X4dpqm66ZRvKUz2M05VNPmzdO+Or8rqtFlw3ZpEIAABCAAAQisXwQQ8Nav+WQ0EIAABCAAAW8C4Xx2Eg7StDEn0RrxQU4iiXcrBkfNgM33VVWrUtSSC6zMCqxVjjVufiVGzZjaVWolXtePzeZOSVVAJe/1KQFP4a3L+tMJeOF+yMHoKjHrWIPWBevceWnCzV04r/LuqV+aFxoEIAABCEAAAuOfAALe+J9DRgABCEAAAhDwJiBnz4j9ScpnF3fAcBEKhcjqGI1iFg3H3sypk4OQwIkq4JXtAkPAe3yVls0+en3MsOGzassHRr2vxVYbSiB3zjxX8MU3pF3Vnaf0Ph7O60Jte2zIrlx6HfoHGgQgAAEIQAAC444AAt64mzI6DAEIQAACEEhHQPnsJKplCY0Nu4L0d4l1QZif/bNoESPdKBtbS0ixWqLps07AslvZLrA6Cng+eeDynpcqCohEx1CkeC0xLyheYUXzqdZRJ7HcCfBxobZJ4byE2ua9AjkeBCAAAQhAoBwCCHjlcOYsEIAABCAAgdIIhPPZSWyToJWmPZ6XqxF655w/SXm5plnXj8w9ebmQ0vTZbZu3EypNHxDwOszU3s7UeeDSMI7btg4CXpliqq7PbuukkxAnYU+hsrrO5X5VqG2aa6DLXrA9Vhgk1LbdVcj+EIAABCAAgeIJIOAVz5gzQAACEIAABAon4PLZqXCEXHJpm0QBuXskDEgEkCgg516aIhRJzp+0fcqyfRrxIsvxW+0z0QW8qua/yuq3bj1UWTxlak+Xded1mM6ODuvOy56LklDbvO8IHA8CEIAABCCQLwEEvHx5cjQIQAACEIBAaQTkiFO1yceWD6UW7eJybEm0c/nssgxCImAVIZThvuZRTCDL2LVPXYtYSOCabCvEjqxcnbq6cBoWVQl4VRbPcHzmzuw2S/tGUgneadj6bOsKy8y0lYB1X5AA74pgpBHi3bkItfWhzjYQgAAEIACB8ggg4JXHmjNBAAIQgAAE2iYQzWc3b1aPWbxi2Es4cEUo5LRT6N2QFQCDfHb2J02Vy2aDqIOQUpWIVEcBTyyUN82Jd5pzObQUEi2nZh5zHhVPJfosWZG9EmuWCyRwj1YQuhvuq8TbRcuGChVIfdm4e4KqQrtCGNrXXetJofBx5yHU1pc+20EAAhCAAASKI4CAVxxbjgwBCEAAAhBom4ALjW1Ue103n52cP8v7bdisFeNiH7yt80rVJyXayaEjh13gtGuyfTsdlkC4wYxus3DpUDuHaWtfVeDstWMtW0RyAt7CpYO5C2PNgMTlXXOh0BJunGATLjji8htqPShnWp5iXlXuxypFWzc3EvDKnPtWF0mcmBguRiMhdzDkzksr5BJq29Ytip0hAAEIQAACmQkg4GVGx44QgAAEIACBYghItFPom6s02eosrUQcl8/OhdEVIdqF+1aHYgJVugDLdmG5uV9pQ2Nd/kJVKHVFDZKEmbzFPAlpcmqVXcSkDgJe2fkPW90TkvoSFz7vCtVo/aRthNqmJcb2EIAABCAAgWwEEPCycWMvCEAAAhCAQK4EFBqrh2g55OSM8m0q2jDJPpHLnSfXlUQ7PYS380Due+647ZLEg3aO7bPvRBHwuqyzcs70bjPZho9q7eSRv9CFWyrM1lcEDM9JVQVEqhIO3dgliM2b1WsWLBn0WaKFbpNWRFff5cjT3EsE1r3DvTjIIvgTalvo9HJwCEAAAhCY4AQQ8Cb4AmD4EIAABCBQHYGGyDYauO3SOl9cPrvpvV2BiKOH7kaYbfpj5Ulgkzm9Zv7i6oQMCVuzbBL/RcuG8xyW17GUe+yx5cOp59Lr4HYjFwYpocW1FYOjZsCGROfVnKCjnHLdXY0w276B0eDPJEdfVQJeVed1zNOKZnnNVdxxnLCbNYxdArheAshVJ2HPCbk+8x/tD6G2Rc40x4YABCAAgYlIAAFvIs46Y4YABCAAgUoIhPPZSWxL28J5rPR3iXWKeOu0otViKxzVoZUdRhodc5V5+JSPULn30oqxreYtXHhEf3c561RVNC58Os814EItJRh2dnTYvInW6TfYPH9iVUKazqvroM+KmVW0KkXj6HiVA3KKFfXzuB9o/qf2dFl3Xkcw/+0UQJFLUutI61bioKrk0iAAAQhAAAIQSEcAAS8dL7aGAAQgAAEIpCKgMLQR++OTzy7uwC5PmR5644pQVBkyGtdfiVhL+0a8quKmAum5cZVuqLwEvLWFs4ZQG1d4pGgBL4w83KewMyscZikhbVQhmDk6An2mvUwOcf3JUzTzGW+rbYrKB6j577HuPBWI6bVFceTIc7k1JcoltZlTJwfCthNZCbVNIsbnEIAABCAAgXUJIOCxKiAAAQhAAAI5E3D57OSyy+LGcpVEXREKHUcCYNyDcpWOs2YCXququDmjjj2cXIBV5CNLqgicNPaggq7NReYqyCZVC65KuAo7s8JinvotYWeiCXhFiWZJ6yXu87JckO7FguZczVU8bpY3r9VaJdQ2y0yzDwQgAAEITEQCCHgTcdYZMwQgAAEI5EpglXWWDI+qcES2fHbO3RQ4XNaIIGmKElSddy4Mc86M7iBnWpYE+HlNSlWFNLIIeGGxdsQ6mQKx1tPBVpWAF56nsJjXY51Z/TaMNUl4zGue3XHEoVVob97nix5vmg1ZlQhVdvXduHFVsSbCof0SdAdtURXnznN5E5UfcvGKYS9nLlVti16xHB8CEIAABMYrAQS88Tpz9BsCEIAABCol4PLZ9Q9ZscqGOKZt4dxmeugdsmG2WYtQ1EnAq0JAiLKvu4AXFu3k0Ewj1obHWgfW0f5oXaspZ5quDa1pnxDLtNdPePsswmk754vuW5brzafPEtAHrIg6mOGe5HP8pG2iodZyZCpvoxg9Yqv0JhVCiR6fUNsk4nwOAQhAAAITiQAC3kSabcYKAQhAAAJtEZDTTqGscvvowTRtU7J7uZQaSeEbuc2C0LM2H7aLrn6aZpxVFxRQXxVCu3BperEgzTjjtm3lPtR8y6mluZeIoeqx7VYMrqOAJ+eVHIQab6NwQVdQ/MD9e5aQ8qR5qTrvYp3mIY3TLYlru5+PVTTWOrBrP5wLNItDl1DbdmeE/SEAAQhAYLwTQMAb7zNI/yEAAQhAoFACymcnQUIPnBvO7DHzFw+mOt/juaIaVRflRgmcdvZ4ebWqHUjhcdTBjVRVJdxoKKdzWU634oXEjD4r/GbNixi3VuokHKl/zUJZdQ0on6M4OEeW3Hlp3VjNrpeq5tv1p07zUDWLuDlyeTpV3EbrQCGy4dyJWhNZ1gKhtnl9g3AcCEAAAhAYLwQQ8MbLTNFPCEAAAhAohYBcdnLEDVvhTmKL/d+x5huq6ooR6GFVjiOJdhICiwolrDpsLjwxdUjoX5UjMRBy7FwbK9ZNta4j57KU266Iua+TcKQ14JP/0Ana4pOXmFe1aFUnAb2q8PFWN+e4Kr3h3IkKt5ZLU/fJrMIuobalfD1yEghAAAIQqJgAAl7FE8DpIQABCECgegK++eyahac5p5UKUKgQhYSJrHnNstCYOXVyIBT2WaGo6haIl1acWbJipLKulC2ouLxfzn2o0NgyCjnUTcBLyz0vMa+qqsNugVclGEcvMN2H5lqXcBUVmFtd7AodV9+W9cffE8aK+OjeYVMM6P7pimBkEb4Jta3s1suJIQABCECgYAIIeAUD5vAQgAAEIFBPAnLEyfEhscU3n11YoIgWoQhy2dkfFaPIEg7WDqU6hK26/kuUmTG1yyxaNtzOkNraN62QlPVkzmkp4VZz32lzHEq4860im/W8br/xLuCFxx8U9ujtNN1dnYEbSxzF1Odaqtp1VrUD0HFUjs0507tt/sehdpdWrvunfcHweNqBzqAf7t6aNe0Aoba5TicHgwAEIACBCgkg4FUIn1NDAAIQgEC5BFw+O7k7siTTl2Ai4a7DWkbkGinLaZVEqQ5hq2ERYda0yZUKeD6hnElMm30eriA7slLh0Y8LTWULamWfL4mZhFM5L7NcW+7YrvDBNFu1VHnShkcfLxrTTMzzDW1P6n/Wz6s+v+t3XKhq1jHluV87If6uGIrLmzdoX7w4d56PuBsdB6G2ec4sx4IABCAAgbIJIOCVTZzzQQACEIBAaQRcaGxQNMI++IXz2fl2IizYSLiTOLF4xXAhOc18+xTdrg6uN9enOoTxSdgalIOrzeq+YVFSBRhcTsNm4dFyQo6qUrE9dxltfRTwwtxcaGUjn+DjYl7YiaVt5s3qrTRstC4CXp2E/PA85hViHM6bJ3HX5VDMWhgmHGordjQIQAACEIBA3Qkg4NV9hugfBCAAAQikIiDRTg6NgUC0S1/pVQ+JymOnsEj9hPPZTbYhalOskLN4eXXhoXEw6hY6V7WgkYewJSFS8z/NhnXK6aNCFFpPrdxlE13AKzKUNCreuBBbzUeVed/qIFi7e0KdQunD9yndDx5ZMugVDu17s3dOTV2jEni1Dtw9P22orZvDpX3DwTUvt1+XFQhpEIAABCAAgboRQMCr24zQHwhAAAIQSE3A5bPL6sQI57PrsUnU9SDYcO2tLdjUyekWhqSH2Y1n95r5iwdTsytih6pzkmUV0sLrQH9XjkQVBvENCdV55fIsq5hIHkJlnvNfpIAXXe9Te7qs2NIRhNmqPWZF9bTCTR5j1zrZYEY98s4p15xeOJTlAPXhV9a9SfdmOWRdqK0TeMUjKdRWoccK2Q7n7STU1md22QYCEIAABMomgIBXNnHOBwEIQAACbRNYZVWS4VHlHxsN3Ha+Akv4xM5hJRFAf5dYF+Qza+HaK+thNAugqkWzcJ8l5Cxcmq/jJg2TNE6kqGjXTl7DNOdNM55m205UAS/MQ8KNOOgeIDHPOSXLEvPqJOrnHTqexxqtwh0cvreHQ237h0ZjxbykKrlUtc1jJXAMCEAAAhDIgwACXh4UOQYEIAABCBROIJzPTkJblvZ4dcOGayeLWFNEOFiWsUT3Kcv95NPXvHJe+ZwrbhsfIU05r4JwOSsAaT0Fee1sBeF2ms952zl+dN86CngLbKhkmU3XtNxTCmsPFzxQzjwJNnKjZRH4fccQVCK2a0nFO6puuu7qlp+z6rx8Lo9iUInWuqvlyAtSLITWRdoquVS1rXqlc34IQAACE5cAAt7EnXtGDgEIQKD2BCSojNifrPnsNMDgAdsKNQqvUiiVhBqF3I7aCqJZWh0fkjUOVQBd3m/ztLUpQmVhEt2n6r5INFAI3PKB0bW65gqSaD0MaV3Z8Ni8Cl3oRHLyyK0TPW8eTOOOgYC35vqOEdBcSKVCbVeuaog2Cm1OCqdMO1dVC1Th/tZJxHf9SnK3peXd7vaPv8RpFK2QeC9Bbnn/SKZ7AaG27c4I+0MAAhCAQBoCCHhpaLEtBCAAAQgUTkDimtwRWUNjnePCuavCRSjycOJULU41m4A5Ng9XnxWs6iDgVd2XsKgSriKs+Xe5sfIWcjQvzYTDoi6augl4VYRx+whoTsxTJWFXubRZOGXauSpbtG3Vvyr4J/GqY14+12fn2NS6CMQ8+92j7x2JelnuD4TaJq0GPocABCAAgXYJIOC1S5D9IQABCECgLQLhfHazp3ebhxYNpD5eOI+Zch7JXRXks8v4INaqA2nDrVIPJuMOdXpQrlpYkqii6rFqTrRLqiCbEftauyHgTcl0/bbDXsx1zS+zDiqf5hxYqlyah5hXduGSZmPUi4t5s3pN2SHMScwl5uftdE06Z5rPXV5TVckNF0ZxayNrYST1gVDbNDPBthCAAAQg4EMAAc+HEttAAAIQgECuBFw+O7lgJKy4liY8VcnRp9hQSDnt9BCWJZ9dlkHVLSTMjaHs/Gut2FUharjE9RJm9HcJd6pMmofr0ned+LjBfI/ls13VQmm0j1U4wNpZ93mIeXWZgzpVww2vizqG9Yb7F1dkQ98n4G0fJQAAIABJREFUEoX13eKEXjnz9B2T1eFMqK3PHY1tIAABCEAgiQACXhIhPocABCAAgVwI6MEnCE+y4bFyN8S1JLdGOBxS+w8ON5x2WR+qsgxMOfWmWIeXkubXqaV1IhXZ93ZElTT9iqsgqyqkHfYJfMbULrNoWblzNNEFPIk1ZTvA8lprwb3Fuja7uzqDnHm+odZ1qfxa1/uSiv7MX1xuYZM09xCfa9aFYOuFke45bm3oe4xQ2zS02RYCEIAABNolgIDXLkH2hwAEIACBpgTS5rOLPozLCaEqoa4IhdxUTrQr01kVHqAcG7OmTS5dHEpaZuFqnEnbFv15kWKiy3HonHZxzsuq5shHDMiTfV3cX25MVQh4eYeOO/eVBGBVsh0etRWKBxsvHeLEGuXEXNo3krkoTl7roey159NviV1zZ/aULur69M1tkzYlgnP69nZ3BC69PMKwXajtZPtdp/s4DQIQgAAEINCMAAIeawMCEIAABHIj4EJjA1ecTQhu9bZUTS6SaVO6AoeDRDs5HiQC6v/LyGHm01mXM6lurhI9+FXhOotjFlT+jakM6sO32TaumrDWhctv2KyCbFXhhM7FtWSFXz62dnho3zoJeFWJNUUyCIvFEmuc8yrs+K1LiGheTsR212R4/7jw1DyPn8exklzfrc7h1kcgwE1u5FSUy1xFmLK+YAqH2urvHaqMQYMABCAAAQisIYCAx1KAAAQgAIG2CDTLZ5fmoOH8ZXIh9NswSIk0KkaRJUQpzbmzbKuwMCU9r1PfqhJQ4vjlJSa6kGmJdq4wiR6Ok1pVLBDwyndblRXCKrEmXOTAiXkqvLNwafX3grydiEnXmM/ndXQFRvudpwDrcirqfqXmXjRkTfEg7a7bCoN6kSWREDHPZ9WxDQQgAIH1mwAC3vo9v4wOAhCAQCEE5IpTKKtccc3y2SWd2D3s6MHEFaHQA48eiOvmbouOJU2xjSQOeX5eRRGBvAW8cJ7DdirIVhHOWXYYc5Hus7TrsirXoxxUfQO2GI4V+8tqYTGvxwosK+z5y87FGR1rWUJmGsZ1dAVG+1/UPVPuQwl5+n6Te3PQfmcGOWDbqIyuY+n7UfdFQm3TrES2hQAEILD+EEDAW3/mkpFAAAIQKJRAI/fcaPAQkjU8KCzOyL2mY6kS7ejKx2Nt6yqOheFWIRr4TG5dnIFpxRxtr+q+yiuldaFCFO2GTBf1YN5qHvJyHvrMtbapm4Cn/pRdOKTKHHQS8ubN6g3uYRJXlDPPVdYuU1DUWqjjfbNO6zPumirLqRt1b7q8eRLz0n6Xunt8pz1oj9JM2FQFhNr63jHZDgIQgMD4J4CAN/7nkBFAAAIQKIRAu/ns1CmXIyhwItjQWD24JOWz0wP58v5yHTVpAaZNfJ72+Fm3r8tDvBM2WlUkdRVkp1vhTttLIO6zwl3aB9ooq8seviX4J4kHykW376Y7ZcWZej8EvPIFPK35x2xF6HbXTerJtjtEBSD9vwQVhTxOsotaYl47+dDS9CnPUNA05221bV1fdLg+VxHi64qk6DtRhXhc3jy9sEgSfZvlFCTUNq8Vy3EgAAEI1J8AAl7954geQgACECiNgB4gRuzPQFCEIltImhNm9HCi0CGXu8zXUVVXcSw8CXKLaZzL+sspVuC7AOokfsY54MJrQ3/XmpDbLuzA9B1rnHB3+fyGeKemh1133DdvslMpQt5EFvDKHrub5yqFq1bVjsWj27rylDdv5apGcQMJ1EXlzazCcZp0rVY5N0l90+cK8ZXQWuV93K0Tib66J7rcinFVj30FR7lBlTsv+JOqtj5LgW0gAAEIjBsCCHjjZqroKAQgAIFiCLh8dlnCecJiiXJBSbQL57NrViW01UjqKo6F+6yKqFOsiLfYOn/q1OoUsqZQL+UyDLswJ1tRTaJd4MLMKWfZXcv/bS6zwt3dKx5YayrCAp774Pinvt1sO+PJhU2ZHsDLDCOt03xXKeBVVUTCN+ehy/fpHFdym8qdl5eY5+N4LWzRtzhwHUXFcHfrdP2oX66Yk1IJ6OWXC7V1ayXLyy2q2lax8jknBCAAgeIIIOAVx5YjQwACEKglgVU2Afbw6Opc89lpoI0cee0LM1UJAWkmq5XzJs1x8t5WjhI7vYHTp+om940eQBU67aoxZhF0k8Zx3L0XrCPeaZ84AU//XqSIN9EFvKm9nUHYcpnNCcVlntOdy9cRFe5bEWJe2pyTZbAqK79cO2OpS8qBuDG4Fx9y0fXal2O6l4rpclswxacSd9wxCbVtZ7WwLwQgAIF6EEDAq8c80AsIQAAChRJw+excgvWsJ5PzTLl7FBqmnFMS7eTgyyME0vVJDy4bW/GnzpVo69rHqt2L4SIlHRaShESFyOblNIquW+W7C4fNhj/vtG6/laHiKO6zZ07fwpyw9TuyXgIt9ytbSEnjIGomaOYFIpj7CgS8Kl1eEvDklMoaghkV81z4ZNrrpY6OYF93Yl7rL8tx6lL0x6fv4rmhzffovmvdS5F2nMyE2vqQZxsIQAAC9SKAgFev+aA3EIAABHIj4PLZ6Rf/pX0jmUIWXc4yiXbK0SOxLqkIRR4DGA8PVnXsYyCwWlGhTBeUhCEVonCirlsfEpeKLkby5ttObLrcJODJbRonhly+46fzWKbrHKNs11GSgBet7rvKwsg7fNNByOJGy2MS5PRsVSwlj3M0O4Ycr2pyRbXTXGEDCaDdXZ1Bzrw+e0zlD/UR86pi32rMdexTuL9lX6vtrA/tG+6v7rlBYSh7v5eAPGi/l5VjUaKez3qJ6wuhtu3OEPtDAAIQKIcAAl45nDkLBCAAgVIISGBTeI1+mXdVGWdPm2zDb1Z7h1VGi1DoocAVosj6cJB28HUqxtCs73UMvyor/NjlappmBQetCeccClcClbg0aNdiEWGzmpNW7jv3wCvBKm7NFlXUomxRoJmAJ/FE+dbUn75BOwf2GtbcOMeXHv4lEuUpsFYh2JTNO3ovUE4yhTZmDWmMu7e40MnG/HXYFy/2pYmdw7iiBm7/vITEtPfpVtvXsU/h/tbRtZhFENV6UaGUaN68dnLaEmqb55XAsSAAAQjkSwABL1+eHA0CEIBAqQTkMJJAMmyFO/3Crvxn0eYTVqk3+noI0Bt9V4Qiz0IDaaHMmdFtBmzoZVHiT9r+xG2vPsol004IUx79CB+jyBBOJ+zKbac14pxczcKnixA3wmOto4BXdjGBsICn+ZFoIidkUqEQ5/iaMbUrcPAozFkiVFiATbs2qxLwNrDX4cKlQ2m7m8v2SQ7Idk8SFvM0T04oj95zir7WsoyjaDZZ+hTex+d7sd1z5Lm/TwELd11LoHcFU/QyT/eDdr6nCLXNcyY5FgQgAIH2CCDgtcePvSEAAQiUTiBtPrtmrqxwvjK5lFwITju/6OcFYzw8XPk8UOXFI81x8kzqH3Zj6u9JwlC4n0U7cHwEvNV2YYdF7WUDi4Muvucpe5t9N90pDVbvbcsM6ZRIMsk+tSv8TQ/vCuXUHKUR4rSfrjcJ+HLlZQ2xlYCnfrQbTuoN2m5YdTGZMkX8qNMqLOYV7XZNMydu2zo6lMPjqLvAGGUuV7peGKV5qaXveAn6urblqnNrppWbM2mudZwp9njueu/QP9AgAAEIQKA0Agh4paHmRBCAAASyE5CoJoeMHs71y3eapge/TTeYYh5+bCCoCKq38/rRcVzl2DQP/GnOnXXb8RDeVFeRMQ8BTw9nQY6lNRVks7gxxUfPdkUJOkkCXvBgaQW8geEBs3DFw2stxSf1D5stBobNrs/c1+y67b5Zl2nsfmUIeE58n2oZD1jn7fL+kVwKyYRDbNMmya9CwCsrZLzZApGoovyieRbx8VmMUTFP+yy1ayDPUF6ffrTaRtfBomVDqcTkds+ZZv+6C4zRsbSbc9WlPYiG2qqwVdbUGLrFTl7zO4Vy5CLmpVmBbAsBCEAgGwEEvGzc2AsCEIBA4QTi8tmlPalzUM2yuZr0y7Ue9vVgntalk/a87W4vZ82c6dWFxvn0v64iox5MH1s+nPrB2YlCEu5czsN2BIEyQipbFbHQel+w7CHrWBlYZzq3XzpgZtvcYmpbztvWfGDnE3ym3GuboqqiumtZzkYJ7hJVFdom52w78xQ3qLBApDxseshXReFWD/pFOy7j+llV5VvXlzqIVK4ittaE3Jiu0niVTuq6VukOr6F2BTGvm0FOG+Wd69GFZuv+0Tu58TLP3UeyvsxTiPe82T05jZjDQAACEIBAMwIIeKwNCEAAAjUh4EJjGwJbfD47n65K/Oqxv5TrTbsLe1RFTj3w5/2g79OfrNsUJYRk7U90v6rD95qNI00BkHAYtROF8hJ3dexpVmxabMXEotpx915g7l7xQOzhFy5/yAqRg+t8NsteW89atraol6eIl/e6DSoLW1HV5baTo3GsQI0NoS1CwAtDcxUvlftQD/oqfBEXgleFgFeGSNxq7ebhds3j2nBrTvf7RgGTLiu2yn3auOdnFWWy9i1vwSlrP5o+/Fjn2MbWITh/8br3h7zPlcfxin5ZFHbeqr9p3bfaR47rWbZgFg0CEIAABIolgIBXLF+ODgEIQKAlAYl2wQP4GldcVlyP/wLeERwimqusruGercZb9xCnurpMkgqAuIIlEnjlqFIBg7xEu/B8lhHeeNfyf5vj/3nhOstIue6WDTby3UVb2H0X/iwvES8PAU8CiK5ZzZEqSOuBOk58LzuPl5vTuIIKEvCUb1BOvbKaxCr1ZZkNH62i5THX7fa7WeEUl//MCa/6nklyUbbbF7d/0YJTu/0cDw7v8BiVb1WCbFHpCKL3bb0skDtP19agfeHh8uO2cuDKMa/rkQYBCEAAAsUSQMArli9HhwAEILAOAYXGKvdclnx24YOFnTn6xVoOOx07Lh9TGWJK3lOdJETlfb4sx6tjGFZccQ2X/0iVCfXA3zfYCKUu0plTZEXc8FzFiXj/W3xf7HQ2E+/cxp978xVZlsFa+7STA6/hnuoMnLM+c1S2gOcGGg6x1b/pfqY+y5lXpsu36DyLrRZD2RWHm/XF5zpzL3hcZdKshUp8L46qhdWkflbt3EzqX/Tzqr4Lo7kWW+XN3ciGz3ZZwY8GAQhAAALFEkDAK5YvR4cABCAQEGgUixgN3mRnFU1cDixXXEC/TAfFBTyOWVe3WKvlUUVIXtrlWkeXoOOmtdGoQNgQhLRO5LYrK+F+mQKHRLzL5t8ShNMO2Zx30aIVmtck8U7b5FHUIq2AJzeQXFIuRDZNwZCqBLzwdRIVhxRiW1b+tSrvEXUJE03rditDzKtrhW63buvev+j3UF1yLcqRp98/nBCs32f0vaJIgk1soSwaBCAAAQgUTwABr3jGnAECEJiABML57OR0ytqcaKdfmPXLsysuoGOmrRynN+QqblCWgJN1zG6/tA+m7Z4vy/5yRvTZnGRlCRZJfZRoNmPK5OABa5VdINFQ6qT98/687BxhEvKuuPdKc/dDfwqchqqSON0Kma5gRdL4yhLw3HUt4U79VGicjxAf7X8dBDzXJ/XFWCew8m3qXiWxuOj8axLwRm3cbpmuPzfeuuTAbMdN5oqAuEIGEo+zfLfUeV3GXfNVOdqS7j9xn9f15ZsL0Va+RV0LNAhAAAIQKIcAAl45nDkLBCAwAQhIxBmxP+3ms9Mvw1PsW2696dYv73qwD5JK2z/baePpoUXjHA9hv3Vxcrhw6qlWEFIYdYddOAuXDrWzXHLZt2wBT52+8a7LzI13XxZcOxLK0gjWeQl4i5YNxTptw5V+dU2364ism4DnCmqIvcJbXf61okI2qxx/cM1ZoXzJimry77kLNA8XouZLoquKzujPlauaFyvxuTHU0Zkc7ncdHG0+HLXNuHiRRf473+lkOwhAAAJtE0DAaxshB4AABCYyAYklcn+0ExrrxCq5phRGp9YIubXhsVYQzKvl8aCXV198jlNX50G471UWBwmLQWFnpkQrVQNctKy46q8+86dtqnhQHhPw7PnlBqtawHNuO11/rtJvXo6xKgWs6BpQXwbl4Iq8aHAur+6uzkzVLVuttSrH347zzff68dlOLxHyzD2o+64Lk+zs6LDfQStNv82ZGVd5uFn/6pgb1PV1PHyv1OU7xmf9aZsNZ/UEL9xoEIAABCBQPAEEvOIZcwYIQGA9IrDKhmsNj6oqZHv57PQQ0WN/4dWDkkQ7Pdg70S5rjrwkzHqTL4dFHYSdpL66z+v8IKg+lu2OcKKdWzNxORDLzD2XNI9zZ3abpX0jqUS0pGP6fP7Jy98UbCY3a9kCntxHClWfLCetdaHpT7loFSab97VdpYAVnQf1RUJPs5cO4YT4cojKqdxuVdSkc/qslazbVFlAI9znZsJp1nGF94sWMXAhtq1eLNVdIBtvFWjrdI03W1P6nu5QvgIaBCAAAQgUTgABr3DEnAACEBjvBFw+ux4rgD1qwxKzPoSHi1AoRFbuPd8iFHkw1PklLsxfPJjH4Uo5hgSgMpPipx1UGXmwNG8SC3q7O4K8hwq7TMqXttncKeahRQNph5P79lXN39d+fpy5b+FdgXg2stJC82xxVWhvWnrRWnvvMustTY/WKGzQHYQwO1dkXm67uJPW6eE+jZim60YvL1yIbdZrvKr1pbmQo9K+dwlEyCpbWSK5r5jnUxW3Sl51cU76MqjCxezbN22nkOt5Nr8uDQIQgAAEyiGAgFcOZ84CAQiMUwJy0rgiFFlyyOlhRg+qEl/0i26Qy87+6OE+bRGKPBDW3dEWHWNdcsw1Y1+U2yRa5EDuTAkFvuKxHvoWLh2sZI2FWVVV5ONfC+4yX7/luFQCXjT/nYS7m5atLd65se0y8y0mLORJFHDVfrVNWcVi6iTgZRXTXK5L3R99HF7h9aVzKged73WRxz3UHaMu7KsQeFqJeWW7ktPOad2/U8LjKer7JS2zVtvr5ZJSNtAgAAEIQKAcAgh45XDmLBCAwDglsGTFcPBQqaZfVOXoWWJDAls1PZA60U7bVV0JNPrAm9XtUsUUVpljzne8eYmi4YrD+ns766YsV04SoypFDol43771eC8H3pbztjUf2PmEYDj3Dd4RCHf3D93RcngKGPvI5p8z28/eca25KpN9lXyjcLIKeO44rvCFnM5qPoUvqhCvXH+LDF1Nuq7Cn1dRKCZ8ft2rnKtNOfNUAEOCatXFPZoxrMu8+czxeCjkNIcCFj5TyTYQgAAEciOAgJcbSg4EAQisjwT0EPnY8kY1z1a5c8K5yeSsU1GL/qHRVPm3yuA3ntwH4jEeHmDaEWxcwngJvsqJKHdmEFbdZvGSqpxv0TVcdZjhsuF/ms9cfUTLSyvqvDvqv3u03F6pnpTvSQKehIpTn3TdWg6wdoWsNPcB8R21fSgyTNe3P3m64R5/CdIZFE/os/kD466JKgW8Mue52RzUKd9l+H4tN6XmzVUlrsIh2YxZlWvG91py242HF1gb2fDZLjvfNAhAAAIQKIcAAl45nDkLBCAwTgko/92CJQ0BT825rfT3hsuuIbzoYaXMfHZZcdY9vCk6rvEQQpQltFrz4NaPwqkHbHhstHpn1jnWfnVxZlVd+VgP6wuWDBpVpv2XzYmnvHhqctxtZX923XbftTB/Y8GRsc47rUPltZN4p7xnKmbjMus9pWd7876NTh07TpnCzvoq4DmY4YqoEoWU/zFc+MLNbzvXStZ9XbGSKsWpOuabc/dDXScqtuPyHPo4KrPORZr96pIf1KfPdbmPN+ur7oebbDDFZyhsAwEIQAACORFAwMsJJIeBAATWXwKP2KIPekhzD0sa6WQr2qmKohxTSQUF6kRmvFXgE7u8QlSLmgdfV2NcBVmtnyJyIVYtnDnWCq2T8LKsv3XYeVFzkzYXYNR9pwdUXfcS6yTaSZSIa6dsfu3YP5fpfqyTgFe0oOVCbMOCkHJvVVWspQ5OLt1TVFl8sc3VWpcWtw6co1J5IvWyqyoxr1FkpicQ9cdD0xpTBECaStpljksvoTaY0V3mKTkXBCAAgQlPAAFvwi8BAEAAAkkE5KxT7js9QKpJsEvKg5d0zCo/r7sgFmVTpqMpy7y0CnOSYCrBQU4UicBluTSrFs4cx8BpaB/aq8qHlUZUckUrdJ132v/oTwl2Pg6rcFGLNNVYs6y38D51EvDKFLSccKVq3itseG3wIqXNsPO0c1F17jn1t44VVZO+X6oU88aTA308uM8loOv7jwYBCEAAAuURQMArjzVnggAExikBvf1WMQs9II6HnGxJmOsuiEX7nyVENYlBnp9HXTCu8vC03s7gNH2DDaemjxCUV7/q4syp+nrxzcumOftF3yXmhsXfDRyRq+x/mrnt4uYIAc+YMgU8zYHL/6aQWlX5VoizXNHhENu8rqfwce4cHTB3rRw0s6ZONkvXOEu37ew123WVH0oo8UQu0eVWxKxDS5uTL3Al2/tk7+SGM89VIS7ClSw+4yGnnJvH8eCW33BWT/A7EQ0CEIAABMojgIBXHmvOBAEIjFMCEu4eXdrIgzce3oonYfYN+Uw6Tlmf1yUctNl49aClMCI9RMttpzUil6bydVUV+lS1cBZ+CJVLY9GyakL8ksRqOZgU1icB7wfzLzTXLvxOpmUdFfAGrWs3z5yGzTpVNwdemaGJ0XBIrflwzrW8q21LuLt0aHEg3ql12TWjAiKuScTbr2dOqUKe7uUSvupQxCRgYu+Fqkq6cM33pe/FpHumQu1nTO2y12Kjkq3mT2PLU8yre065MK+6i40SjiXaq6APDQIQgAAEyiOAgFcea84EAQgUTODmX91m/n7vA+b5Oz7DPH+Hp+V6tvmPDYw5chSWt7RvpPSQrbwGJNFCuWvqlDep1djqGvYkAUGCgQSgHusg6beCXR4VZPOY57rkeqq6H3ECXjisWUKrmzMXQpuFf1TAc9U3sxwrzT5ZBbyBKy42g1dess6pup6xnel90/5m8jO3T9ONYNuyC0pobUmQiROHnYDtCl9ontsJsZV49+n+h9diEhXw3IcnTt20NBFP4y9LLPZZEHmE9EYLlwyO2Mrc1sXczvy5vuu7e7F101f1YsWHodtGczsyujpwlNax9djvPuUTpEEAAhCAQLkEEPDK5c3ZIACBggi886OnmSdssmEg3n3n8p+YXV76HHPwO/fO7WyPLhsKXFVqs62jaMSG1db1F+ukQWd1SSQdt6jP69ZfJ4Cq+rBCY/Uz27pOHrGJ0fN0i7TLsy7VFqvMFeby0Sl82bmzJBDILRktPnPf4B3mmwuPzIT9vfNONVv2NkQvuaLuXvRXc/uCv6x1rN23OCDTsVvtlFbAG7n7DrPipGMS+zH92M+kFvHKnmcfl6krfKFceQqLzlI8IU68k+dIzqNmYfFliXgSqPUyqS6CVN6uMc3f1J6uIERaYqwLsc0q5iXl50u8MErcoO5io+Za7moaBCAAAQiUSwABr1zenA0CEGiTwIPzHzV//Mvfzd/++R+zy8ueGzjt/m7/fvRp3zRXfvPE4Oja5tX7H25uuOT0QNTLo+mBf/mafEd1dYSlGWddxB3fPlfdX1dBVs7FIRtSLdEuHLZWtwdpca3Lw2qVc6f8iXJKya2lOUtySH5jwZHm/qE7fJdlsN1TerY379vo1ODv9yy5w/z4v5eYe+2fcTn0tp61nfnoDqekOn6rjdMIeL7inTtfWhGv7Hl2+dN8C6S44gm6hhWa2ScR16PwxbF9D42FzTo2ihqcZNWlZgKewmlPmrZZbvPc7EBl5x1MGlCRIartinlVu4GT2EU/r8v9u1m/FSqtl1k0CEAAAhAolwACXrm8ORsEINAGgT/c/g9z2pe+Z97+plebGTOmBX8/76wjzENWsDvn/B8Ef3dNgt7/PfvpZq/XvrSNMz6+65AN43GhWo0HgW6zYEkjL954bHV/ux9lWkV/nWjnU0G2joU2qmAWdy1IZFi4tDx3ogttdrkT5bpaPjDi5Y7M4sJz7juJd1/869GNnFAJRTAOfdbJZpvZ6cNUo3zTCHjLTzzajP7tzlS3qzQiXtkOvKwVjqNCkHJVNit8Eee+E8BG2q9JgauvWbtq5papWGfZuGzmSX0s6z7onJUK4/QNkx5PL97q5jqPm3etPfLfJV0RfA4BCEAgfwIIePkz5YgQgEBBBD5yzNnmldZ1t/drXhKcwf3/07fa3CiE9rfXnjN2ZuXDUyhtWNRrp1urrJ1m/uJG8nK1jWb3BIJemZVF2+l/dF+FActJVpfk50ljK+vBUA9OLmRLz+Z6uI+GWsb1tY6FQZIKOCQxz+tzCYmPLS/+WnEP6JPtHGrO5JpVfkI54dKEu6cR8aLinZj5CHja7isv/2HbiH0FvGY575I60LvP/mbKm/xCf8vOgZdXvjWFAqr4TFwVVBWt0E+0qeKtWisBTwUt9FNUS1vxtah+hI9bhSNQgr3WgsKk5YrsH2rcs6PuyrzDe4vkmcfaLrJ/cjVvZAU8GgQgAAEIlE8AAa985pwRAhDISOCcC642f7NFKk4+4r2B604uOwl0M6dPDUJmjzjkreaVL9kxOLrCag+xgt+NNow2rxbOg1eWoJRX36PHGU8PM+q7hIq0QowvOz0AKqxOYo8eiuXYkuCTRpytI8+6JLgvUkjU3GlthF2SYVE6awVjiXg3LbuoZThtOO/dWbcfZe5d2nC3OVeKRP9WLY9wWl8BL4v7zvV9zsVXe11KVQh4EhIk1ObR5LidZteSHF0ut+V3+ha1EPBWx4ZJu74ULeBp7av6ddqKr3mwanaMqh2BrhKxXsKokq0rJqN7ed0q9raahzq+EAr3V9+VyvtKgwAEIACB8gkg4JXPnDNCAAIZCSxb0W++e+WN5ivnfT8oUrGZzW93061/NudbEc9VoJW4p6Y8eHLl5SngKVm4c/LUUbBJg9UnAXya4xW9bd7hT+EKsvq7HBty22VNBu8e/utU2bdI0TPNfEvs9s2F4LvOAAAgAElEQVQ35ntcOVT0EKm5U147iXZxgqu2a0fkkZAXzYmnnHeuYIX660JnXd99BTxt364Lz1fAW3zAXr5o19nOR8CrIr9YVnE2CUQ4xPbCFYvMeUsXriPUabyrE8Kkixbw6nbPqWINtJpLl/NQ9wm5K+WaVB7bwTXFqJLWQZWfF/nSI49xqXiFfgeiQQACEIBA+QQQ8MpnzhkhAIE2CMiFp3bwOxoPpN+54gZzsxXxvviZj5g3vffT5pVW2JO4d9qXLzJ7v+5l5sB9dm3jbGvvKmfWY8sbee/0cKBfYuvkfkgzUD2kbmxzk4XDgtPsX/a2eQiOGrNz2jnRLqmoge8465izSA9YytWVl0PJl0V0u7ycgGKsUEe57SS4+sydBDw5qpatKUCTdQyt9rvugYvNj+yPa0F+NLvYkhx42n43W5m2neq0E13AK8qV6+by8mGF0C4JriP5KTWn+lP3D/f3ZmujaAGvbmGWuj71nehyxRZxrWU9pr4/NrSh/JoziXlZqhFnPXeW/epewEIsxZQGAQhAAALlE0DAK585Z4QABNog8C7rqjvwza8ZC5WV8+4H1//SnG0FPOfQW7a8z/zfjs8Y26aN062166j9xT9cuKLuv2QnjbsuRQ6S+qnP2xEcg2T3CpG14k+/ddkF4XE5uzDa6Z/P+LNskzXJf5ZztdrHV2SKO4ZzSkq4E2OJkT45Cd2xymCAgNcQtMoO5ywrJPKNy+4LlpN0WbkrG9nvjFlpHXgtaliYootY1EWgd9da3QTF8P0kfH+OOvPqJubVzckYvS9LzN5kgyl5f01wPAhAAAIQ8CSAgOcJis0gAIF6ELj6x7ear9iKs8p3p6ZKtCcf+T7z/B2eVkoHF9rKs3qDr1b3MJckIOMtj18awdRVkJVwF5ecPolNls/T9C/L8dPuk4drMe0547bPEuoYnj8Jrj5uu7hzB8fp7TRLVozkMZTYY4wHAS9rEQsN2DeEVk7LMt1XOp/LcVbY5NoDH9v3kLlr5eMFjHQuhWWryY0XF0q7bWevOWnaZkV2q3Y53bJc54UCCh08ziEtUU/uXN0furs6g5x5us/oftNKmC26z3UWQjV2Vf6dO7OnaAwcHwIQgAAEmhBAwGNpQAAC446Awmb//q//midsPNfs9dqXmifYXHhltSUrhoNf8tXkANHDW9UhilnHPt7y+CUJpk70CRc0SOPWysrR7af+KU9i1jx67Z4/un8Vrqi4MfiGsTq3nYQA5bPL42G6DBFzPAh4I3ffYVacdEzqJeZbhVZzV4WA1z+4cp1qo6kH6bGDc+G5TSXgjdo12oiWnrROiO2JUzc123UV61LKKzTdY/hem5QlqHp1JrJRkii2dnqFjkDMW94/Grz8KVvMq3sBC/3eoFBpGgQgAAEIVEMAAa8a7pwVAhAYpwQkKkjEUytDHCgS03jrf9yDjYQDPVD0dncED1pO9ElTQTYvxnVzNNYlrDcpjNUVKJlsc2hJcJUgntf8lbHG1yliYVUdiTo+YyijiIUrdrDwmCNM3+23t6ycGr0WfNx3Vd0LiyiO0uxecOfogPl0/8NjH8u55ZzY7h9lytO8n73RFmbr1bZwiw3XL1L8qdsLA82HikTU5QVGeC7TiGJhMS+oSDxiHcAlCcXqs+ZVRX/yTvOQ1/fcBjN6gu9bGgQgAAEIVEMAAa8a7pwVAhAYpwSGR1eZR5c2ClnURSDJinK89d85BvVgLJedy4vmchhV/eCY5iEx65yl3U9hvVUXKomrlinhVU67sFtSlWTzbmU5w866/Shz79I7g+47ISdJwNt61nbmozuc0taQm+UXjLoZJQio4MOSE44yK/92p/37apvQv/Wppx/7GTP5mdt79a+KiqhVCFgunNY58MJwXNis2MvxpfuTRD6xH7LfG3mLeRvZIkSLlg15CcVek9jmRnXrT3g4WV+uhCsSS8xzL4j0e0BRrW6pGKLjVP9cpe2iGHBcCEAAAhBoTgABj9UBAQhAICWB+Y8NjLlYxlMhiLhhbjS7J8hblSQ2pESU++Z6KJ5qH4ol4kl88K1CmntHWhywjiHJWp+PLa92fsMuOAkbmkfNpx6GJdoVufbKSggfduHpob/Dw4F36LNONtvM9hPI+v9hzMA9xiy6Zu0FOGe7DjNnd5uTbauGoOCE0ck299+IFRni3IzLTzrarPr7nUEfxyqr6i+hlka8025l5BqMXnpVrW0xvsYsWyt1girOxjW39js7rJNLxXPsT17iTx3E+fCYN5s7xTy0aKDMW673ufIQF8sQ88q6X3mDi2wo4XojK+DRIAABCECgOgIIeNWx58wQgMA4JSBBRA9iarNtLpiRlauDcKnx2LI6E8oYqwtlUiGKHitIyMUiF8SCJWsnlC+jLz7nqMKFlNSvpLyBSfvn8bm4qEJpFcKr1tC8Wb2lrBkn4vkIeGnEO4l2UeHOzYtEOJVSmLKNMc86oTsQ8CTaJSXiV0680bvvNENXXTJWWbV7WysmbrOt6dln/9TTXoWAl4cok3qgdgcVRFAOsDQFO8Lij86p4hvthNiWua59GNVZeCrCaa5j6oWNCjroO2mF/f7XS6V2xVmXTmCx/R2jjk0vX2ZP765j1+gTBCAAgQlDAAFvwkw1A4UABPIioAcvFStQk6NIAlNdf+FOGnMdKwcGOdMsU/1ItJMY4cIr6xxeFFfpMIl/0Z+XmScsPBYXwunCnG3AeWXhfmU6lSTiXf+fS8y/lt0Z6yxU2OxuWxzg7bz77+kN512z1qkCCla0W2XjY6c+3ZhND4tY6TwXmBMkpvY8HvaZRoxIynPo2Y1Um0nAq0LMb1eo1/66t0kMUYitiiWkYS1IdSlQ4yasXSapJj7lxkXfl13ItK4dtf6hhoCeJaVD3QtjSbiWcEmDAAQgAIHqCCDgVceeM0MAAuOUwJBNau3cF3o4kLtowZJGXrzx1uryxt+3gmwVea/SzGmZYpFPv/RAKJGgiPxyced38yiBQg+xCpOVOFFleN3iDe4x5/efZ+7tvGOtLm+9cnuzjf3ZfeQtPii9txGDBwf/Zn7/4J/XPp8V73xDZgMhwIbN/u+M+NNKuAvMd7YF+ezW6HZz97RJ8O1P1hZO4K9juPySSfnbkqp8Zu1Pq/2qutbyGqtYy1k8zeaCdC4uX+GnboJZXkyKWCdl9k3z4vKz6r4rp2WaNAF1dsRrbja0KRk0RhoEIAABCFRHAAGvOvacGQIQGMcEwrl+xkseuTjcVQqQOrccWuFCBgpDapUTre4POHVzCJbhsIwWTHCJ3sOiT1VuqTN7jzT3d9/Z0g0jIe+wwVNzuxvlVfn2nvev3SUXmivdbqWFK74uhNYJeNpjm3PzGYpzikmMTXIVSSSRCLXMViEtq1Up4CkXmEKV82phB+RKW10kSTgtU5TyGaPuM1qDdUwlUcY9MI5R1GmZNKc6RlVh4T5zrOI8m2wwxWdTtoEABCAAgQIJIOAVCJdDQwAC6y+BR231P4lNanUXlZJmoUzRSWKPBIFpvZ1Bt/oGG4ndfQsZVPUwlsTQfV43h2CRD/rOvTnZCrG6FuIKJjguVTyYSryT605CcVI4W54iXh4CXth9pwfnoOCEFUjktgsHycYJeO268KJr/XFxqTO4TuNCPrXO8ha1kq65qkThou9BYeF0eNS6WO09MhpiqzBGrYs8RcQk3q0+nz198pjbrJ3jFLFvHfrmI+YVkasvT57K9zd3Zk+eh+RYEIAABCCQgQACXgZo7AIBCEBAOfCc26CO1UfTzFDRhQ6i+dAk9ijpd5KoEjeGuoT8NuNbNzE371A7V+U07Jr0Cc8tW9i8bvJF5rrui4Jp8hHwtF1eIp4YSTRIU+Qgup4WX2vMY9c0wmQl3MlxF9fKEPDC53XipAv5dIUYyhbwqiyaUFZYerTqqe6ZjndZffD9HqlzNfaqqhU3Y+cKvvRObuRAdM48hVNPscJsXfPpzrDpGJx47bsu2A4CEIAABPIngICXP1OOCAEITAAC+qX7seWNvHf6hVzJnRcuHZ958PQwKGdNnuFPTrRTonb9XaKdy4fWzvIoOiF5O33TvnULJctSMTOOgQQaN5eaxzR5nXS8okXiaJ8PnrbH2D91WgfeSlsp2qed02eVszZbOwKeOCus/MErV5oHLh8NxLtWrWwBz/UlWvRitb1/BO68hLDShZdeYB697MJ1hjR12x3Mhvu+3Uzb7tle9Kss4lCFo0vXsZzLWhsSfdT6VG14jQvcC1qBG8kNuXDpYOJ6LbALTQ9dpsM8zfh0DUkIdzkQta/Cp1XlPum6T3OevLYl/11eJDkOBCAAgfYIIOC1x4+9IQCBCUpAVR/nLx4cG72S9D/82EAtf/FOmqI8XW2uKq/cBAqNDX5yfsissiBCEsu6uTH1kDhvVrZqneEche0KsBI9Bq3wl/daiJuPezruMGdNOXLsIwl4ul59Hop3H35L20Ut0gp42l7rpre7Y0zofvj7q8yia5JWW3wOvLxDaFv1whW9kLCkJnFX+fKirPvu/Esg3PXfdXvLQUnI2+LELyQOPC9hOvFEMRtUVdnZdUUvjHQ9Sbx1+SbTVrHNMu5m+9Q59LNKp2YaxmKoYlidHY0CES50WmKtz30rzbmybishVBWvaRCAAAQgUC0BBLxq+XN2CEBgHBNYaCvPOjdE2Q6jPLG162oLVx4dshVHnXBX1INHncO18g5ZzWOe0wie0XBnOaqSCov49LHMkL9w+Kz6VoWAp1xRC5Y8LvDHMQpXXpb7Va5elwuyVQXa8LHKcOAtWTBoli58fCxbbDt7neHIeaq1o0IGcUUv/rbPLj7LJNjGR8Sr8jqrQ0imHG9ygPfYMEwJv2o+RRK8JyHFhnUWyfJ8OZUCSaZNXZ5Q5bmc2tMQ9OXQG7RV7+PyIGY6ScadlN9yIyvg0SAAAQhAoHoCCHjVzwE9gAAEximBJSuGAweEmgSK1fYX76QQsroONW2YUVh8kOgQhMcmVJDNa+x1FkvbFUPzYhQ+jk94W1iElQCbR7hzuA9FJ/4Pn2sdAS8Qlgp04E16yFrhHh7rgkS1WVO3MIuXzY2dTrlUxcNdN81yCEar0MYdrMgqtBLu/vrzR2LHMGtej5GQN3ujxkO9xjNq7wMaS7ToxW2Hfdj03dnaeRc9yZNOOKNlOK3LI7ZkRXlVb10fqyjIEuUTrcAbLZIQV2ikiHuLjllnkaxujuhmc9DMxRjNg1iV41LpE2ZP7y5qCXFcCEAAAhBIQQABLwUsNoUABCAQJqBfpiXiuYcY5bJpJ3F9lXR9RDGJU84ZIHedkqqXJdqF2RSRsy9P9tGH6zyPneVYzQpIOLddWEySeFeEc7LMqpmlCXgS7jr/bCaFxDs3P3KsjIxuYpNaPcdWodgscKeJs4p/JFXsdcfwceFFBby8wmdv/9l867pLzun5rJ03DkS8sIAXXqOj/7jD3HfsYUaBdxJRbYovr5bkwiuyunJSB6u+vluFxeszpS9wedV0j9Y1naVgUBIH97nmQk6xZf3li6lJfawiX2FSn+I+96lcXaWYN8eKd5pnGgQgAAEIVE8AAa/6OaAHEIDAOCWgvEOPrilcUec8QD54m4liEh4UEjett/HLu8K0FO7nQv18jp33NnV+YNRY07oZ8+YTPV5UnHWOmclWkPUVk9rtY9mCS7iIRZC3SeKRRx0L7xx4VrybNPm6plgk4MmRJuFqSudeZlrP5oE7N63g/d/TjRm4pzn9sIA3ZRtjNj+83Zkyxle8c2eSiPfkLWeYIevAjboJw0UrlN5L/ZVA7OOIfMaVNzUdTJX3gKoFPN8CHs4J6QpfFBViW+cXKnVOtxBe3GmdgloD7p7qKkLr3lJULsSNZvfYat6NUG0aBCAAAQhUSwABr1r+nB0CEBjnBB6xhSycmDVeHhbikAcPA9a5saRvJHALySmUdwXZvKbax62Q17myHKeZ4y3LsfLYRy4UOXAkKmlek0I38zhn9Bhl5yw7s/dIc2/nHUE30gh4XlVoE8Q7nVOs1aQZBs6z4d0DJ16W1krEcwJez9b5iHetwmZb9f31735qooDn9pew5JLhq3ptM2FVVWnn7feO2NOW6egMdyDPfG8rHho0fQ+t63Lc+HmzWi6TLNeSCznu7uocy1Gal9hTZoGatNdP3V6mNOt/O05BzW3j+9oVkhlNXSW8FVfdyjbZYEpa9GwPAQhAAAIFEUDAKwgsh4UABCYGgceWDwcPRGqzp002KuLQLKdVnYnIlTXT9l/iTpA4u4A8aHmNv+5uR1WpHFDoWs7Vd7Pwc/nWtK9CvrU2q3BPli26hivROrFIlWhbNW/3Xde1sWGzOnanXZxan5Psf1yBG/376lWbGjO6R5YpDPZROK2q0kbdeHO26zBzrDbYtZVnbGpCD9K679zhtnv+hmaTrWes4wBqVbzCcZJAEBde20rAKzOnYhiZr/staaLv++Ejpu/h5iHKT9lzIzN9s/iiAe24WcNhmKp4qorBclS3EzZf1xdXeYqtSfPZ7ueuKEm7oc5OzHOuS+eKbeeeLwe+KuTSIAABCECgHgQQ8OoxD/QCAhAYpwT08LPUutbU9GClX3YXW1FvvDQJd+rz1N7G2/vHlg3VQnhK4ldnZ4XEBWlFWhtVNOUq1AOcy7fmdKsqc1TpYVoukzJzRLpceIEZzioXrQS8rVdubw4bPDV5umLcdzq8REL9uXJNsQyJ4GEBTwdePfy+5OOn3KJZ7rmUhxnb/BeXPZBpV83vK/Z/8joCXjiEttWBx8Jr7UaaJwlKSQKeK5qRqcMZd2pXiJbr7v5rFnidfdqmPWbL12+8zrZ5uQ+jYk+fDfHWC6i0Yl5d78VZnIpeE5PzRkW9kIoWNskaQj3LvtjTmqNBAAIQgEA9CCDg1WMe6AUEIDBOCQyNrBwTJSSc6E31giXJyd+rHG644qhEBlfZbsOZPWaxLcrRrgugjLH5FN0oox9x50ibzyiPfrqwZwl3eiAM51vLu0rkv+9YYG656E7zwJ0L1+r6Tgdsa7bYfiPzZPsTba0S7+cx/mbHkBPv7KlHthTwvJ13Oknnn8wkW7hCTWOS4y4Ik5XoFOpErICnghYrn5vrcNcXAc9BceG1EkN3vObnTd1h7YQctjMBwQsP+6Ima/XbO77+n1Snj3PiKeec7tt5Or2dMKl1m6bSaVHiUypITTau4j6cpd9lVC5vR8zbcFaP0f40CEAAAhCoBwEEvHrMA72AAATGMYH5jw2M5XGSG2GhLWzRTshKESicaBfOgRZNqF+n0M8kBnVOnF6m8yMsxjYLe27XNRSeiwuOvHkd4S46VxLydnrLdutMYVXJ/+WMlRvvitXfWatPEu7kvNtm1fZJy23s80lWwOvsui0Q7+RUkuMurrkiFuHPVk9AAa/vzr+Y/xz3cW++bkNVod3282cHeb0kVskdFs7ZVlXetXbCVx/541Kz4E9LU7PY/qAnrbVPkWN3hS96rFCpluTayiukODUUjx3q/B0R7n7ZQqPLh9g7uTO4tpLmWPdtl4bAAzubQAACEIBAwQQQ8AoGzOEhAIH1n8CjNuxUYphanUQwPVzp4aC3u/Ew1je4Msht10xcLPtBop2VUee+Fu2ocG47ua9cQQrNa7PQt7wesn3EOzenbz/lFes48aoS8NwDa1bXlMYk4UZFXUZX/9EsH/5dYpjhRBTwdtpvi1j3rm8Ybfh+8KQTzjDTtnt2IJRKxHP3MCc2KKyv397P8irE4Hsvaid8Na37zvVpo+fOMuHCFgO3/TUQXRRC7Nq8F+3oOwTv7ZxrSykWVq5aZZb3ry2i6kDtOhK9O5Nhwzp9F7fqvgTZkdHVpadc0LUlx+WMqV22cJWKGz3uxnffJfp8nq1AS4MABCAAgfoQQMCrz1zQEwhAYJwSULji8v5GHryqhaVoKKWExRU2F5tPWGyeTq2ip7LufS1CrHKhsJNtqLbmVevO1+nZbn8UMnvLxXd5T+uTVvaZ975zazPnpj+N7SNhc2CrJ5i+PV5oRp72RO9jtbthVkdkWCgVb4UWDq/+w1gIbat+jVcBL2sRi+1tEYsNt5redD0+8OmPmf67bveaSifeRTcOC0rKa7jEhvuXXSimnfyWWQU8lwtv4W9uM4/an7i1JVYbWhGvCCFPQk+QJ9UK2BJ09H2iFwb6Tqn6+67VglJhiEX25ZrvPdJrcRawUR2KgETneNCmBpFA3mNf/s2YMrmAUXNICEAAAhDISgABLys59oMABCCwhkA4D14VwpITGvSApb+PiQ02IXmaVud8RtFxFO1yS8Mtbtu8ErtrPiUahEOfs+S+alfAO3HPS72RvGxkodGP2qZPnTO2X6cV8Fbah3614W2eaJZ+/E3ex2xnw7TXZLQISFQondT9jcTuxAp446CIxZIFg+avP38kcXzRDXbaY3PTMV1OreaVfn1EvGbiXfh8uk8pX6euDYUAxjnDUg/Ac4esufeyhs+6bs2cc6fp/9/84H+bCXj6bOoTNzFb7Lub52jSb+ZCbF2VUx3BVbdOf7Ti9hhP32V5fVfkRTPseu2xYbY0CEAAAhCoFwEEvHrNB72BAATGIQElsJ+/eHCs55vNnWIeWjRQ6EjcG3M5I3psgulm+c/SdqIObgDfPpfB2bcv0e3aLbLhQjYlUrgH5HacJJrXx2x15CzHSOO+e+vgv80Wq/rHcEQFPFdhVBuUJeL5VsANM5do11Qo7brWTOp4uOXSiIosq1dtaszoHlmXU9P98i5ioROldeHNmtdjXrX3Fl7rSznx5MR79LILx8akfHf6mbffO7z5OHdVIHDbEEDnDFPl57RVVL1PajfMmn+uHQFvdEW/6Z3067FuthLwtFHRIp7riIRx8eiwX0ZpCl+k4Z1127q/4HHj+n/23gNOsqpMG3+7q7uqc+6ZnpwjDDMgQRREZcmCRHVBzN9/zbi7foo5obJGXMO6q5gIBkBUJCt+OiAISpgZZpgAk2e6p3MO1eF/nlt9qk/duuHcW7eqbvW+5/crmul77gnPObeqz1PP+z5hH+ccET5bIlSXXBgBRoARYATCgwATeOFZCx4JI8AIFDAC7cJ5FmoQlEzJGycYjJxDCGcSue2GRRgTiDs/iiy7PgolbxDGH2ay0Q+OZuWXEbLpUUVpt66Z7EldAg9hs28e3Z8yhKqGMqpuKDd+B7JlUrArKsGCcNqhi1+e1SffKQcgriG/GkIBtZWrRUeoqPRexzGnEXhZUN9hANkg8NCuLokH8m7ja1oo1+GK6K+9dyS5l6QyzM70IqgNhueodzCulZLA3KefENrJMZGaYXAnRaMJ9R0KDEHd3hYWX3UBVS4SpHGWi3wPhlILzxFyqQ2Njtu6B2d5OMnmMzEbydUY0U+Yx4kw9Zbp9+5cYsJ9MQKMACPACDgjwAQe7xBGgBFgBAJAAIc6qD9Qgna/03GQDWAKRhMgBFCgQAp78UOS5WpOurmyzDkLgbvZHTiIMftVDqFvXQLPrL7DvWYCb0qwd+Yoy/b//lAQU7RtA+ROc20ZHeuZUcmqzxQMAfDselInupB4WFfZ3lT8ImFZOz8rc8wWgYfBupF4S46rpSXH1RnzMhNqWZms0ij6U9dTXrIzvQhKlZeJktUPgQf1XXH8pSSBJ7aycAQV7scu2RFypcIzh3/KPIVI5yDDm/EzKPx191XQn8G6/XqtF+Zx4ovChuqo1ylxfUaAEWAEGIEsI8AEXpYB5uYZAUbgfwcCIAG6+keNyUIlVymIsM6+Md+TV9VYOPwgcXg2iB3zAL3mC/M9wQBuDDPZ6JbcXRJIOCQFFf7sBKkuoWjVhg6BZ6W+Q1sqgVcMSYcFgZcLFZ4kfKB4Qf4uFDxTGalXBYlHkactw2kNAm+ihabiwYfNqmuUTQIP/SAnXm/7DPEp+5bEnfy3HaEWwGNu2YRO+LxqegFVmDReyGRMmRCVfsJo4x37qLx8T3LIeIRAUroReLhh3b+9I5Oput5rRYyrN8nPEoQ35zrENsxf7qgYZaKMdl2gDCvA6RmfY1wYAUaAEWAEwoUAE3jhWg8eDSPACBQoAuPiRHVMhNGi+E2gnQjni4g/mhOJo4PIfeYVTr9j99pPEPWlK2u3yO0WtmJFhKquplBnyUNtLtQpOIjh8O9HWalD4KnGFepaNCyooti0iyH6x8OBPHhqMRN4bYPjdEy8UDbMKct4aYH73PoyEfY4qR8m67XXyIzbLm5trDxduKTGvan6vPYp6mebwNMdUq4JPC/9mY0XMjG9yNQM5qXftdHg0cTnhE4p6nuCIpGBZFXjGRLFwSskWTfbBJ5u/jaJf7n4bEMIPb7sAqGazfe9XId066ylVZ2wGVioY2wSeVPxOcaFEWAEGAFGIFwIMIEXrvXg0TACjEABI9AmjCxk2JxufjZzCCVUdlAGjU+7deYDjjAfKlQ8dA+Q+cBQHZskGkuFCyvW1+xqmovxGbkThfoMpJKf4uZCa0XgRYUKtXFBdSr54EDg/WHvgCDuJtKGN6cyYhB5cyu9qUHUMFkk2m/vHc06oSYHnytlTVgIvEyJLS970k355dSWqgrD+6xX0wszcTj64M00+tCPUrqMrDiRYue+g0pWnmQ5FF0Sb9nFc+jgL25PaSNMBJ6f/G2qKnJiMnsOwrncj172rloXn/2Nwk3ZKhTcb5tB3gcMDdU0F0aAEWAEGIFQIcAEXqiWgwfDCDAChYwAXD4RpoVSJ8JPRkWmcasQPUnaIU8Q/l87eX6OwMkV+RDEdMJ6UFNVX1Jtl1G4ZoZgZRoa7abCsyLwVPUdhg/iBUSaOdfc3tecTL87foPrDEHk/dOyKtd6IBZAbKm4e1FsuXagUSFXz1BYCDydkFYN2LSqBEF8+DG9UPsd3/M0Df3XBxzHCyKv8r3fsazjFE5bOS9Gc06upar5ZbTjGyZyUAiiBO9FqRpW62FkW4GXiapXzVUoHUWmIwkAACAASURBVITx/ugpD6UN+k6mNVobLEeV/BCgORoaxcQXPiAXuTACjAAjwAiEDwEm8MK3JjwiRoARKFAEEBLZM5AI5zT+OBfhJz3C3EIW/A4hsgglGhLqD5B9I0KRFbYS5txyZqwySSqfDdyxxpKYBVnV0TeaVzWlnGMQasWffuwR2r+t3RI2cw48s/oONxkEnpEbboZ+iIv/f/SUTbTlldZqJXNndiQeDu3Yt1Fx8LRSOeY6pA6mIUMjwbkI2+3VsBB4uSRIsdbAN5McoxJPL6YXst+2p59wJe9k+04kHuoMHBmhwSMzIbVzBXGnlvbHn6EO8Uq2p2Fggbq5MLHAGoyKz69Mv5gArjI3JQwvMg2xDTMxpq5tmA0sQM4iBx4XRoARYAQYgfAhwARe+NaER8QIMAIFisCYUNx1iDA9FEmYwJ0WhA6IOxxOcpn3zC+MYc4tZ55TrpROTliqhiOqmjIMY1OJCrMTq5/94UTifXxou9Gkalxh7iMiwohVAq9PkN4//vC7PA3l7GWVyXBaKAthGIPwZCcHX6xFLnLSyYkwgedpST1VzlRNateZm+kFrscObaEXvvEBGihJVyfNG+23bNqNxHOa/ODBo3TgjvuTVUoE2TWukQBv8VUXUOWieZ5w9Vo5G0YRM89zceILLvHC56qXkoky0Es/mdbNBn6ZjkneX18VNUhVLowAI8AIMALhQ4AJvPCtCY+IEWAEChiB1q5hQd4VG6RdhfgWOy4OH7lykA0KtiDUWkGNxa2dfKkYzLkLrcijsB3Qggpz3Lf1GCGk1qzGe9X6Srrk6I6kaYXV2mFvyfyOXtV3sj3kwnvd2pq0MFmnvQICD2R6rnJLho3AAykP3scrGeL2/MnruVTgGbkNhdGP33yObnOyM70AofGPH3+Gju3ZatvEvJF+siLyKt7zbduceG7jUVV4OgReLtR3GHM2Va2qMhJK5mFB5OnmKwxKGei2LpleD3Ou2Tl1MePvGC6MACPACDAC4UOACbzwrQmPiBFgBAoYAbjsQWEEpV1ZtJgGh0WobAjDZN0gDvPhQh071BYg0/qG/JkzuOFgvi6NEaCohDoE62xHiiC8EaQJDp5hKNk8cMv51X79ToruOmQ7XZXA2ztvLv3uygs9QYO1Rl71/3NyoyczkFyrIcNC4Ml8gCBLZT566cL6YmsPvdjWk4L/uRuXeloPVA4iJ52XTjM1ZPHSl1T79RzdR5vv/D6N7d/henvV+CitHuxMqQdTi9h573S9167C/l/dR0OHWsmNwMsVeYdx5ir/qFRG4ksxqNixf/HTzsVW10DK92IEcGOunxkvQ8YemyMMLLgwAowAI8AIhBMBJvDCuS48KkaAEShQBJAPqHs6D16uyaUgIcs14eF37NkKp1PHI9V2qjECyDu7A6S8F+QJErTnilx0wzBXa+pE4pUK9RRUqWOrF9Jdl59v6TprngeIJ+TOgx8iktyDFN0wJ2Y40+qWXBFqcjy5UgFZ5cBT96s5HyCelyM9/fTV3/2DpqaxNGN47glLCETeHU/8POXSVS//Z0u4c01G5OO5+suPPmWQZ/GDL4jn3t1CwkqJV/P1x3S3q2W94cOtdOiO+0QIrXUzTaefSM3ilYuS6zXHnKDKw/tpdQVC5ouT6SjMX6AUwpdPYU5TAaK0ToTQcmEEGAFGgBEIJwJM4IVzXXhUjAAjUKAIjMYnksnVc0EuZQumfIWmep0PDnVz68qotXvE662u9eUhC/nVrIwR3BoI2/ojpBeK0GyFUap4lO48RBW/fyJNjVeyYQl1nncKja1aSLdv63WEEMQdiALwJRPiPypv4ofAGxHkeq7UsHh+oBLKNMG/2x5TCTxp5CHzDCKs2+zq+V8PPStUd71JR2A8P1ANw9lUlm0HfyrMCQ5Rc215WvfrFx5Pn73yiym/D9JUwm2+uA4CD2Qa5peLsv8ZYd7yzJ+oaGyIJo4d0O7ypN4jKXUzJfBkfrhD2/cbajy15Iq4k31CSVst9ni3cF7PR1FdhCfE5pXGFwi3hXvqsZ7gPw+CnGeYjaJgXoEvH7kwAowAI8AIhBMBJvDCuS48KkaAEcgBAk89+wIdbuuk177yRKqpqgisxyOdw8m2gso7FtjgNBsKs0LAPIUgFReqmynID4TI+iVhwpZLUIdU2rFrB72weyZM8LKLLtfcMXrVVNfgrcdGaOuxGQdO2YIMk4XSzkxAyTqqkYVOzzpz12lHt06Q7rCDw0epvftZ4WprIm3qN9Hy+acQ9GAgtODA62Tk8dBz++ihLfvTplAsUl2B+Oge3EdbD/w0eT1WErEk8VDhM1feQMct3GDUzTWBl2vyA+o7Oc/4gfQQ2iKBnZUqz6zCy5TAC5O7apjGopqPYB1ASAfhUKz7rPupF7b8qOocmmpjBEy5MAKMACPACIQTASbwwrkuPCpGgBHIMgKf+I+bCZKeakHc/fHRp+ljH7jGIPKCKB19o4ZiCyVXYYtBjFttI2zkk9P8gsAYB1KEDoGMkKSdHXnkBetc5YnSGZMT8XH3vb+mu++927KZtavW0sf/9RM6XbjWUddKJfCswmSdGrv6+FrXvtQKQRJqOh0H1d++I/enEXeyf6iQsF83rLiYiiNzXInmD9/yZ8eh/3XX59Ou15RHqaZiJpwu0hyn2HFDVDJnnObVzafyaEKlt7J0PZ02eQktKVqnA09GdYLCVncQksADSTpmQeDNrEcqkafmwss0Bx76CJO7aq5JVJ21wvOA0M/YNPkE8yhd4wud9oOsk4t8pH7Gi/fhloZ05a2ftvgeRoARYAQYgewgwARednDlVhkBRiBECBxu7aD+gSFau3KxMaqnnttJ3/3x3fSTm643/v3bBx6l7/7kN/TQL74WyKjhdimNCwolFNVq4oWiHgTGYyJ02Wt4JEjKKhEqBOUSCFcnQwq/GwMHtfbeEdd8eX7b93KfXe6wL33zi0J194JrUx/70Mdp3erMCBo1jBfk021bewwSCmo7KGc00ot5zn+HieWacAiCZLIj7yTZKRdsUoC3qOV8qiyfZ7uGduo7eQPCZvuG09V5sVKhwqtJHOgrXt1rEHeylJWW0fz6BYlwXDEomPcsLlpLb458jCaRK+5QYk8VLVxLxYvWuu4v3Qq5VFPK8FmMDe8XY60Co9Eh16FKVZ4Mo83EhVZ2lst5u00wVzke3cZhvi7HhTybMBrC+ztC2WWIrc77i9c+vdbPZtoHr2Mx14+Jz0KEIHNhBBgBRoARCC8CTOCFd214ZIwAIxAAAjd+53baueeAEWaGJPg/FqTdbx58jBA++8WPzrgCXvGuT9NbrjyXXn/+GRn3isNCV38iNLCQQlHNEw9C2ZYxmBoNeFGmyAT/ONjhIOUUcqjRtWuVMGEoc2ipeat0yTs50UxJPBywcbhGEnoQp08cGDReXopX9R3a9rJHvIzFrm6mDsRW5J3MCaiSnYkcgQljjyXz7Ek8mfvObrxW6jtZd9mcaoqeKYhWhbyT15bPWZHMpwfF6jl3ttLiw0XUUNSS1lXx6ZdSRLwyLbkkj1QCD7kw40ODNNmunwfvtIGjFFv9Mip797czzj2Jeecyj6PTOoGI7xfO3+OCtA1TsXKglblIIyJWHOZDeOUiD6gdLmH+mwDvk8iBx4URYAQYAUYgvAgwgRfeteGRMQKMQIYImJV2fUKFh1x3IO9A7N31w5mQMajwngSpd/27MuxVKInEQVaaKuCAjUNFNkwWMh6oSwOFoh60IqbMU0MdhMhClYEDXDbUdlZwhinXERREOJzJ/FBOYbN2WyOTcFooAFUlnMwt+Ie9A1putBiT19x3ch65Nj7IRPGHnHf7jz6QXAKnnIAqgVdR1kJL519guXRO4bMHO/9MeNmVhadPUc2mMeMLEDNdU19ZTw1VDTTv8CidfcdMjr4qqqOqorq0JqHGK3lDQvnst+TaUViG0BoEniCspoQCb0qDxEMI7bqWxTTn374v3ncSOcUyUYLhywCou8NAmoU1BNQpHyq+sKmIlRhrgZyPw+JzIB8htmF2p2+ojiX3qt/nk+9jBBgBRoARyC4CTOBlF19unRFgBPKIwC13PmSEzkJV93cYVohQ2lNOXEenbFxD577pw/SfN3wwGVb7glDpfeLGH6aQepkMvb1n1AjdQbFSBWTSdq7uDfNBQ8XALl+fVNuBTJGGFCDvchlGlakSK8i1xgG2ubYs6dD4lvde66t5Lyo8rAH2EQ7NMi8k1sLsIGpnaCEHOKcyIkJny2hupT93xFwn3cecd3Ttob8dTDc9uGqNNckm59re/YxhWqFj5qESeLh//fK3W65pJgTe+vcoRiPjIoRWuH5K04b6aDWt662kc+/vT+u3pWip5VgyJfFy6aaMCUgCD18CSOWWDom3cMFKWnrWVVSy8iQDB/klQlSEJQ+NjhtfJHgh48JEmoUpt6fcZOb3N6c3N2l8gS918DndP5Qbd26MKZcKUq9v8FhXhMNzYQQYAUaAEQgvAkzghXdteGSMACOQIQIg8EDMHRHE3WvPSByikOsOyrvviZ8oUnEHcu9tH7qRHg4oD17PwJih8jL+YBeqp1ERNujXzTRDGHzfLkOPwu7ohwmq+fpkiBIUMyCNQBYFYUjhB0i7vHN+2griHokT3Ga/fNOXfDV52UWXkZs77QxZgbC1SUPpgjVwU8KByGsbHE8q8jbMidEcQdr5Je7kBI3xlEWoZyDua85eb/r849+m7V0vGmpcq3LV6vPJisgDIdfT/xy1djxt4GVze7JJM4HXLJxpm+vTzXicQmidFHjl8yZo2aXjNCVIO4qnY1c/UUpv/BPR3HYxz9JSKoKl7XSxU+HhcibhtFCiYR2DfKb7i1LXqXpqhsSQYbRSgaeu51RfB+GVUmIVVLdkPW1600ct1x5EEwhemZ9NlzwKC2mGPYc8acd6Rrw+Flmtb1YY63SGtYDpRaX4kgdh/fjMznaIbZiIWBUjmLTMEQQeF0aAEWAEGIFwI8AEXrjXh0fHCDACGSAA8g657b79xeuSDrNQ2aG8922X0tsFYXftVecZijwQe6cKdd61V5yTQY8zt+IgABIPxVD/iENCjwh/KrRSKEYWUDmOC7YDh2yptgsDYRo2ElSaavz69/aus2571InAk2GyUDnCBdK8BiBXy8TzkCsiTc5FJ8zabd4615/v2E2fe/w7IkRP1BbsgB2Bh7bWN66kz77iA0azM3m6iugvz3zflbiTY9El8JxMLJwIvMaXxWnOJkHU2MhW17ZGBIGnKHYUEs+JwMP4d57/Vdq2PZX8On59Ex2/vtkRajzrXf1jgRB4nYK4G7MRHFWJPSyJvC3330wDx/ZrK+ZOuOAdVDdvmeuWkesO8sjJNdWLusy10wwrhDWHW6ZflpiJ1UzCne0gDrOBBdSIcPHlwggwAowAIxBuBJjAC/f68OgYAUYgQwQQKvs+QdZJc4pHHnuGfnP/ZiN8Fqq77/30t9TfP0hnC4VeEAYWcrgIteroTYSe2YV4Zji1nNwe9vBfHNpw8IBb5pAgjPKptrNakLCtvcyjdcfv7qK7773b1x4yE3ggkRAqLN18QUTYhQbmikgzTyxXROpn/yqUd517tAg8jPHNx11E7zrpYoOMkkosO/dZq8XSJfBwr1MYrZ2JxYLTJqjmOHuTkfO2lNDLn58ZmcGFCRJPxOGRHYH3fEcl/WrXXHp+ciUVRdMVPyDx3nTlOlsiLwgF06jI5tc1IxZ0fA7mTRYZ7+E7HvoJHd2/x/WZ0SXv1Iawjon3soRr6qBQDatGC7jeIIwj2qc/U1wHkcUKuQ5H151KkDlbZYgtcqZarYfumPL1PuRnfMiPCmUoF0aAEWAEGIFwI8AEXrjXh0fHCDACGSIAwu7Gb99mEHbzW5qMPHdBKu2chtfaNZxU0jgl185wilm9PUwmDHKiOEwj/EwSRlA7xoSyC8WcWy2r4Gg2HpbQNwxXuuL+8rd3ZkzgqWGyCJGFYsUtrNFPmJsmzI7VQIAg91Q2w8Hv2Hk/3bErYT4BBV6RkNvY4QElToJ8I/rVxd9KqSdz4OnMWzcHHtp6sbWH/uvh5yyb3Xbwp9Q3vD/lWqwkQgtWdVDs9IjlPWWTxfTWRyLUgvBZtYh5FUejBoFXKV5qAXn3mcdXJH5VWUdFVelGF7L+DZ8+05LEA4GXSfimF/IOY4mK6c2LCHJNhGA/t/lB2v+MiBm2KLUtS2nJia/VUt7Zra1qtIA6UgUGhR7CPFUHaZ39kY06Ycrrqc4vG59V6nq4qSR1sA5zXtkmoWzFezoXRoARYAQYgXAjwAReuNeHR8cIMAIBIHDLXQ/Tz+540HCgfcuV5waqtHMaHsK8kE8HBcQJVBUjIidbIZWwHDikIQWIOxyqQNQhv50kSMIa1oW1lmGruTTPsNtjILFGkOdJYOfXxOJNl15Jb3vDG3yFKudLSZSLft9wz3VJ2LFH4XQ5NDRGYyNx6u8ZNq6VlZdSTLxq6iqSe9ecD8/sQuv0fqHrQivbsAul7R3aR88f+lmyK5B3TdEiikTaqPIqoaizKC3jMfqXOxLvb+ZSFItRtXChrS2uN5xrEUq8TSXvcIMLgYcqViRepoS4U9isHdZVgkBbJNRxMvS75+he6m3dm6wO4i7ooppeTIgchMiHGAYCL6wmDEEoM53WEF8+QJEncxfKfHle3tfDih2+cGhpKA96C3N7jAAjwAgwAllAgAm8LIDKTTICjAAjAASgSuqdznsXFiLM68rkKvTQblzyEIuDE8hQHJrU0DJ5X76UXTp4StWb1bh17g+yjqqe+dI3v0gv7H5Bu3npinrXj37uO1Q5X7m8ctGvSuCBtBvoHaaR4UTeSyO0VPxHPexX15VTtSDy1Fx4cjF0w2hVAm/JvPOFSmue63pCiffQln30YltvSl1J4tWUR6mmIkpT3T2CMeqhmn9Nz4sF8q58KkIbt08Yr7QiFGvzSlcm5w5ny089ttwg8ZKlvsUyhFZtC+G0N3z6VSnNZ0LgeVXfyY5BcKyMluZF4Yu9i9xk5eI9cDQ+kVPHVKvNFMa0CrnOLafmLvRifBFG7LDGULDDmIQLI8AIMAKMQPgRYAIv/GvEI2QEGIECRQCHLRmyl28izC+EuT4YYZwgJUDYVYqQNWlIAfLOSemQj3HqYpqN0C7dvs31QCSDjJChxm4qPBnqCQIKa3H9hz5Oa1et89u9cV++jFEyDb10mrQ0r0CdUUHedbb2JUg7FBNxp7YTFevR1FJrhNGay/aXfuyKsyTwFrXokXdqgyDyXmwTJJ1SFjT20x1P/IK2H9qWJPAiC4uSKjyEzdZNlhrkHYodgVdV0kjVJU0pbV95zwkp/y6au9R1fqigqvAyJWLhNjtgY1rhNBgQkFVCPF2ZJwE18rshF9vExBRVVyQcU51ML7SA9VkpTIpiOYV85RqVxhcgwKC4HRafU/jizuqzKsyfUfhcQA48LowAI8AIMALhR4AJvPCvEY+QEWAEChiBI52J0DmUfBEXmcKXK9UAkqODuIOTLMJjvRpSYJwdfUJjY0rJlen8M70/TDmjzC6wO3btoC/f9KW0KYLkM8ghcQXhjwjf+5gg79atzoy8Q0fZJNKc1iqbz58k8JLk3TRJpLMXocS7/9ofWA7dTYmHNVq+4ALhGzE3022acv/zh7bStt/cSpP/eMb4fdvVcWpfGk8Sd2rlt9yZcNuWJToqDBcm5lBRw0x+u1/tnGsYV8hSXF1Hkep6sa/E3nIhxWBogRcK5gulkN8ceIVK4Kmh7xIHJ9OLQDeD0lhYSagwGGtI4wuYKoFslaY0Er58kYw6e6GhOiY+ezn/nQ5WXIcRYAQYgXwjwARevleA+2cEGIFZjQAIJZBRKGEKpfQCejYVZKqDqVTbDYswWT8lrPjicAnFTN9QIpwyn8VKCQoS7+57f22E08owWRB2qgFDUOQd5g6iFfkh3QwvgsYpk9BLnbG88ffXGcq7URE2C+ITHJ4Ol1zfV0/3fOLbtl3A1GJwuFW4LLem1Gmu30RzGxZRLDqX/D4zTvMa/9XdNCFesjx2dg/99ezUsFtcU1V4BnnXIUJu54vw2PIZh1kzgSfVd8Ks1lAugegEmWdFeJoJvEzMSPwSeHguoL7LlwJPukeb3Z3tTC90iGOdPW2uk4tckn7GFaQDrZ/+1XuwJvgiCkSeVErib4CI+GIqLJ8D5jnivREqUy6MACPACDAC4UeACbzwrxGPkBFgBAoYAeTAQ0gNSpgOGV4ghYIMJUiH14R6JGIQRsghBAIiU0InrPiGKXza6gAu12L7zh30xDNbDCJFLZdddLmX7eJa146McL0xwwrZUP4lwr2FQ6gIQfvQQ9+g+/72eHKUOMjrECkrDq+gd5x2OZ112Ubj3tbWfdTWul/83G/8nNuyhFrEK/FzaQoKeDbHBduaDQIPHY1e+ZaU/g4sG6GDy0fSiLyLH5qgFS8WU3QsoeIpWpE6zhQCzyL3neHaO00gQPGp4qYSeJnmusyEwKsQBB7CaPNRdAwa8D4Dp1qQREOjcIWeIDPhl+nYw2oWZFYoZjrPoO6XIbYwvjCep/FJYYQypvW+ENQY3NopEc/dHEHgcWEEGAFGgBEoDASYwCuMdeJRMgKMQIEioObBC+vhxw3aoAgoHL5xkHEzpHAbj931sBqFhC10CmqL9t5Rioq8TSCA4iKvFhySc2WykS+lpA4Jorv3pHK0VJAmcXEoB7l9z18205ee+X6yCV0C79ynzjXIO7wefOBnBmlnV0DinXf+DKmWbQJv7DNfoqnnnY1OkkYaFb1UVCUUevV1KeGzmItB4O1dIhgwcS1qTxYY7r0mIk8l8CRJ5deN1a+JBZ7hlomijL9k0N1f5npe1KMqaWQVyul3DLgvTGpidR5BPtuZ4ON0L5THINtBsI6NC0OmEWtDpmz1b9cuvkiDSQoXRoARYAQYgcJAgAm8wlgnHiUjwAgUKAJQk7R2jxijx0EXf8TLfxfKlBKJ45F3atTzkDFnSRThZhAdCCfKVG1nNZCgiEbPk9S4wcsBXKM531VAROCwOy5yNPnJM+i7Y+VGqGXycXgFcdgzEM9o76kh3+a9/Oe7n6PvPn8rjVRN570Uz43woaXh4sTzb1VOfuFkauhvoCVr51LLSW2O5J28XyXxsk3goU+zCs88D9UJt+i4tRQ5fz1NHXyBpg4liL/i0y+looVr6fL/u117+xhEnvgPfn71C6+m5SvqDdWSOYejdoNKxU5hZDHmMVqwToREVo1NZbR3/IwV92Ri3KG6pQZhehFWlXM281v6XTfzffgMaOtJvBdUxPBFVnFezUjk+OrhcCxIPC6MACPACDAChYEAE3iFsU48SkaAEShgBNoF8QUlBMqcukT+r6BDm7INjzx86IQEYiw4OOKb/Wyp7azmm8lBN9v45du5UQ1ZxlzzuQelo6afsM/SQ91UejjVORXzGTptmesSZhK6K4kQkFUg7sxj3992lP77jjtpoGeYBmODNBFJzeN4ONKaRuRJ8g4Dr193iGrn6ed+lCReLgi8yed3EPLh2SnxgIkRdr1+LUU/93Hbdfjk5/9C27Z3uK6TWuH49U1005dfaxAeCAtFAQkNItZv8aPCWyOS/Pf1ppp1+O3f631B5J0DZom8bFDcTvpW3IYxVDVTYxOv6+Gnvt0a4n0FX3BBmY51QToJN8d1P/073YO/SUqEKpALI8AIMAKMQGEgwAReYawTj5IRYAQKGAHkvMEf5sZBvTpq/IHuh7zIJwQ6YY+JfGARkQ8sYihV8nEY8Uo05gpTHfyCHouanw1qO6wHwmTzMRZ1bn5ceUHcVfxtnyV5J9vuvXwTxRfW28LoZ94gPjFe7GeEGY9MG9Kondz28L10oK2V4EDbJUwsUEZLR42XWoaLRuhwSSvBtGLFkRWG8g6lqKyfGtYfouq6Ck9bYOOmV9EZp5+d1Rx46oDMphbyGvZZyRsuI7ryUtfxX/qmX7vWUSvc8Okz6fj1zYYKDSHyIDqwFsgtmknItxcSr0F897KkvjxvyulMw4ZVPDM1vciXAY3TpglClelpU/qorOOSq6ol5WdnJntcZ5iIVm9pKNepynUYAUaAEWAEQoIAE3ghWQgeBiPACMxeBEbGJoXiKXGYN/6QF9+694gDaCEVJ9IFcwJxVypUHvkKy5RY+iFpcrEOuVSuSPUjlB1W65HLsVhhCyIGB0c7U5TxgRGaEK/RtoTSrmhsnCoO91GZcPGt7RhyXC4nEs9L6K4k7tz28+YtT9OjW55Jjunovs6U8Y0Xj6eo8epbG6misyqlTsm8XdS8rJhiZaWetiJUeFde9o6cEXhycFDkTU7nxQNx5wXXbdvb6ZOf36w1T0neqZWxd7CvpWFm/5D/3I0g8QbEPrQLp40KUWGVeFUIq9zGGqQQsA+F1pqQz0o65I+fpv2YXoTxC5Kw5j5V18RL6LEkq8vFZyqUraPiSwMYYemq373sBXxuN4gvFbkwAowAI8AIFA4CTOAVzlrxSBkBRqBAEUC+MZk/LmyGBrqQmg041FxgUm0XBlUhFI7D4rBjpZTSnWs26vlRnXkdBw76UCjhAGgV5inby8VYnMbuREgM7mmlicFUoqTkWH9Kc3P29xpknl2xI/HciCbsaZAByE3lRtyhb4TN3v7wfSnD6O8ZMsJoUZBmLdXPN1G19mADlQ7PHJqr1m+hppZar8tt1H/fez7nmcC77Y6dyb42rG+kE45r8tW3vMkPae4UTouwWRhXQHlnLureVcOaoVjyS3KAyDOTeCDvYsYKJnKXguSA8Us+ihvhnemYdE0vUG+uyJ8ZthyueK5BcoXh88duLfw8I2gLezwR+hwxQmwzIaytxlZbWWq853FhBBgBRoARKBwEmMArnLXikTICjEABI9AmjCykcUMYVQxu0EriEYnQcZjAoRaHZhyasmFI4TYeu+vZPuz6usIfxgAAIABJREFUHVe23BtVIlUNk3UaZ74xMhSCIszanMfMiryLdA9RUTw9N5wbidfxwdekQWCXe0/FEPsZ+1pnT5vVd7LDztZeGhPPiR2BBwWeqsKb94o9ntV3si8vBN5HP/cYbd2eqhCU7Vxz5Rq65qo1vrZ3JuYgv7hzR0qfIO6cilXOvxniNWLkyfvvsR20pbiLtoqXLBsmG+ia8ZV0wlSj5zkGGcLquXNxQyY5I73252R6EdYvnxDW2y3SVIQ5r2ymn/lq6DNcbPE5jFQcmc65SWCHNefCCDACjAAjUDgIMIFXOGvFI2UEGIECRgCmAfiDG8Xvt/H5mj4OblB2VYjX0PTBIWwKN4mNWSmYL8zM/QbtkKuGySJEG+ojHdIJ48p3zigrLEZbe5Ihsyp2ZvWdes2JxBs6bWmasYWZ/FExdFIs2u2hL996s+32AokXR9ibTY2mXS3Glbd87Fz6y99+4Hub6hB4W57voOs//1fXPqDG+4/PvNK1nrlCJgSe186c1FbbIl10ffRvhnstwg0RfmgOOwSR9x/x0zx1m+/nJR8h71amFwhbLhNf3mRiIOIJeM3KmZJjmt34rha0yYZZMYn3fxDXfkJsgV2xjEf3PUO+kRFgBBgBRiCXCDCBl0u0uS9GgBH4X4sACBYkXkcBkVAk/grvcwgDzDdQOHQg1xTGigKCAyRepsnjsz2vsKpEggrDU00VpALSK6ZBk4le+wcWICU6+2ZcPfue25fWTPHgKBUP2jt/xsTzNPdAr2X3dgSe8D8wiB2oSEH0QMniN/TOicDDoIZ6h6lXKAityhXHnUdnXbbRuPSzn3zBK4TJ+m4Eni55Jxv0Q+LlmsAbgVOnyUxkS1GnIO+eTOICTgLEBAjUSbHoKrnhlcTLlnpWd9EzcU/W7cOunqr8iohcgBOTyOc65ossynQsVvcHTY5lY4zZymGIsap5DMfGhXp4JGFUpFOg5GsWDrRcGAFGgBFgBAoLASbwCmu9eLSMACNQoAjgj+qO6RxK+SZQnCCUqiSptpPOpbjHSyLufC7T/MZyOtKZyEMWpqKOq4RuNYZWUpT4OT71ZpqkE4yXuZjDZEE6ZRI6BZITuY9UAi2XOIEUaK4tS5oC2Knv3Ag8jHnxjg7LoVsReHViztjXw0IJC0dZ3YOuHTZuBB5wluuE3HgoUWFUAbOKj735nclmn3v2z/Tcs3/xvAQ6LrROYbN2HXoNp82lM6ldHsMLY/dbTgd7TSqMVCLPC4mX75DzOSLvXGffqLbC1vNG0rwB2IP0wZdPUHwFEcKp2bVttbAqrtUB5+Jz0yrE1i0nJPY1Pge4MAKMACPACBQWAkzgFdZ68WgZAUaggBFo7RoW6h+R3F4cKuc1hIdkAkGERNmVIi8ZlCoyv445JMcIJRP1wu6gG9acSMZBvPdJKqWPOO7isamvGETejLqiyAiRRaiUbpisUwdmAi0fjxRCt2Qy/EwIvNr2IUtnWkngqUpS7GeQDnbut15xcCPwSkVuqbiNGkYl8NCvHxXeuedfS6uWrbQ1sfCqvlPnf98vL9GGI5cEk5Ua7bbIbrqtZI/jeA0iT/wHP6HAFEIyunHsVK2ceFZ597TBCaCi+qwE0JzvJqRB0KjY0yB/oMjOhrGClwEWggNtro2VzMYX+LICa2b+PK+vihLUgVwYAUaAEWAECgsBJvAKa714tIwAI1DACHQIFQWMBlDCkAcPf7yDkCsVSiEd182whqeatwQOTEEorILealWl19PExLOu4WeGmqLyJsH0npA0Cgl6LPlWKaqkhJV5BeZbNDZOkWlHV7v52xF4o6cvo8irV1FE7O2JiSmDtENIOBREQYWu3/bwvXSgrdV2aewIvMVzW+iacy5Kua+1dR899MAt2ssM8q6lZakR4j4uvhWwCgOG2+xtdyYcZxFSinBS3TxZN376FdrutCDwjvWkOgdrT8RjRatwXTv1nV3TIhLUIPOunVxFbxpb6UqK59PlNAxku8TRiqh1Mr3wuLS+qufS4MPXAMVN+crRh72D97vqihLjJ9T0+AJDKo/niPDZEvF7LowAI8AIMAKFhQATeIW1XjxaRoARKGAEQCL0T+e9y0VYjRVUajgm1Fxe86jlm/jRWX6QGlA6QrUWllJMW6g88lHLxPpyjFgbHLpAsmBtpBIvG3PAYby9d0Sb0Al6DGr/dgo89AkTi6NH99O8eUssh2BF4AHHqWtPof45NQYxLVWLdu63fue2v+0o3f7wfba3qyG0aqWrz7mQlsydl3afLomH0NmNm84y7nck8AR594u7dgkmNLGnsLdQzDnhrCbgJYw2lwSeFYnklcCT8wWR9/eyy1xVZHZhu373jZf7gsqd6aVPu7pOSkAr04tMQ9R1xpzP/IA64wtLjj5pfFEuvrCDAhXvi9UVHD6rs4ZchxFgBBiBsCHABF7YVoTHwwgwArMWgdH4RDLvWK5z90Bth8T9OFB4dS1VFyTsByaMNd+ukVYbuKzo/JRcXLKOzNEFbgVEk6qQmpw6gcboK1l5HvK9jqqSyorAaz2wl5776yPUtv8lxDymYHDiiWfQiSedafxO5sBLhkiK343Mr6Oeyzel4RY0gYcOnFR4VgSelfpODvTAvr/RkcO7aefOLTQ4mMjhGI1WChVhKUVKojS3ZYkg7l5lKO9ksSLwJEl/i1Dgff9n21L2lF1OODNYQRB45aMHqXzsYNo6xCM1NBxdROMlta57u33/duo4sCNZ76yLr0lT+/kl8NDofaMXGGGEeG9E6R9Kz42Yz2dFhtF3C+OIfBZdJaCaiw3jzcQhVWe++f4iwm2Muf6cdxuP8Z4iQvurxJdcUN9zYQQYAUaAESg8BJjAK7w14xEzAoxAgSIA5YvM+5X4Zj4qDqOjWZsNCATkKcIf6gidMV4m90avncMIAPl0/Dp3eu3PT/2gTEJe3HkgrfsVaxZ7HhIMK2BWgTBGyKCwD3DQNZRi4ldOiqiRqQc896dzQ77DjNUQ8vGBERp6cSYU9cFf3ExtB/fNTGNiIhH/qZSWlsV02avfQC0HeykiwDRwFOwnCNBeQd7FF9anwRDUvjA3bEfimQk8O/Kut+cQbXvu7rTxtrZ1Jn+3evUJtGHT5Wl1VAJPVddC7fvDW7cnQ2jNN7oReV4IPCtlVvXQNiqd6HPciiDy+iuOt6yzY/Nd9MKjd6VdA6Z1C9bS2jOuoOYl643rmRJ4shO5P4DjoHDzhFED9lMuTTrME86mg6nO+4Ss4yd9guqQmg3TC+zhuUJJLD9TvcwnV3XzbYBiN0+YV2BsXBgBRoARYAQKDwEm8ApvzXjEjAAjUMAIqHnwkIMGTqBBGBNISNSk/fgdDvJqGGGm0BVC0vBMD3Yg7l7clU7eSexWrF5MXog8SeBJ0g7tyDBZt/WAO+04vdmtmufr+c4dZQ5LlHnw0sg7OTMLEm9lZQu99cRLUsKS7cg7NJNN993NW56mR7c8k7IOKoF3xgkn0pknnJS2TnbkndWC1tQuSCPxQOCBvCwRpBNy/OF5l+S6mgPPboPYEXleTCzMBF5D/1+196MVibf5ti+kqO7UxlIwvfqTBonnl8Czc6JFHxWxxBcfIJ7w//kKNw8LAZQJkSjDN4M2vfBDKmpvzIAq5trAQnfYTbUxQ4nHhRFgBBgBRqDwEGACr/DWjEfMCDACBYxA72A8mZstyD/ujfBAhIKJb9WHRO435LbLRg6ibKmYgl5Sv4nDn/rrVuru7HUdTn1jLZ3yig2u9VChLHKbUIndilRkRhkXpgq6JVsEHogflKAcWXXnI+tZEYibv/cjETb7J/umYB06HU5bLH6WCEXp4rr59OaTXk/xBXUE51kr5Z1sEOQ2iEOQ5tkqIPLUOQ4IQu2VG0607e6xP3/b01BUEk/OBz9V4k5t8MI3/k6rfTWU+4T1TXTDJ0/Xug+V1Bx4Oso7c8N95cclw2nN5F18oDuluqFaLYlRJFpu/P4MQeI9tLzU1YXWajJuLrSSeMJexTqqBgDa4GRYMd9Euxx+UERikKYXmZCKGS6L9u25dGjWHpSoiM/HYkMSzoURYAQYAUag0BBgAq/QVozHywgwAgWNAHICdfUnwmZxKIIDbI8g9fwUHGahEqksixiKrgFB3OGQqes06afPTNVtfvr0c48fl19d8k6Ox4nES6xNsbHGoyM/obGxnxrrYudMajfHbBF4OPwG6cjqdY2sjEa++86305TIE2nOeZfWtjh3lih58d59261a3TsllC/aspOKtwrDB6VMblhNUyes0WrbqpKVY6paDznvDu5/0nP7G0+8nBbOW2oo7tyMaLY830HXf15fEYfB/PX3Vxhtq2GkToOUBF7JeC/VDD/veT5ShaeGzYK4Gx/ssVp6Q3FYXFpGsYZ51LR4HdW++b10fdQbjnbqO6vBY34wxMH7LPC2ypPnedKaN4BwHoF7aIapDzS7s60WtBOvqnKMT0z6cg3PlxGULpZh/azE+36zUP9zYQQYAUaAEShMBJjAK8x141EzAoxAgSIwLg4rMu+d3xAgkC8g7kD+ITwW6psgw3DdoEVOqO6BMU9KMrc2g77u9XDX1dFLf398q+dhnHz6BmpomknGL/MOggCRa1M0eYuRAw/FzpnUruNsOdHmOzm+WdFzeOcL9Nuvf9WAASSeQeSZC1x6SyNUJA6gajnl4kvolItfr7V2ZsdUEHeR23+fRt6pjcW//G++iDw3As+r+g5jAgm5evXptGDxqUaorJMLrZyDFxLvxk+/gk44rsnoB21HBd4IIwWBZffFgMTUzrRCZ2G6ql9Bd3/5aqPqaNdRmoyPuN4mSTyo8I4uneuJxHNT38nOzaSvVJDhulcHb9cJWVTIp4GGOpxs5czMxPQiSAW7n7Vxuyff77F248N7L3LgcWEEGAFGgBEoTASYwCvMdeNRMwKMQAEj0NY9kiTcdEM91QT1bqqbbEMT9oMT5u81V59b3js7TKUKT3X5NYczFtMWihZ9xGgiIkhXJ9MKcz/ZMrHIZj44nf1nVgA+dc9v6al7EuGeiOwqFid7w5lXsEYmE9q05r0QeGq+NpB3pR/7hs5wyQ+JFySBh+cfZId0Kn7lWR8wxq1D4KGeG4m3YX0jwbgC5J1aZBgp8sBZEXkqwZUJgQdX2tu/8VGyU97ZLRJIvI2XvJ/WnXkFbSnq1CLxdMk743m1CbsOMhTUaQOGJQQzF+OQhFdMpIOQanKndAO5GJPWm4NNJa+fQZn05eXe+qqo4bzMhRFgBBgBRqAwEWACrzDXjUfNCDACBYxAj1CvQb2B4hTqicMzlHbIbYeDJMJvoYTJpdrOCuawHkzUsXrN1ffQPY/62lFYl3++5rWG2s4p72CUPiJIqS3GOkq3VLcOsxU+i36xt5pry4Qa1F3p5DZOP9eNnI0iJLFnIBE+DgLvH7//XdJR1iCqLBoeGUv9bVm0SKjv9BV4ksDzQt7JYXgl8YIg8MzEnRyLVwJP3gdjC3MBeWcm7sx17Ig8jK+hOipMHkYpEwLv6SefpGfEa7htr+ftVFJZR2/44v3GfSDxthZ3WebEu2Z8JV0zscpT+24qKjeC01NnFpWtHH4zbdPP/bkch47pRVjDU1Vsgw479rNuVvfAPKvEpGIOqm1uhxFgBBgBRiD7CDCBl32MuQdGgBFgBFIQANEDEg/FykxAGlKAvENOO+OV5xxI6gS8kmP5WH4c8JpqYgaxoFO8EHhGwn/xHyjFoA479+IztEjVsqLzk4nDocJzKpNTJ9AYfUVn6L7r5PJQbh6k3EMg8BBu/LQg7/5y592G4s5coMIBcTcuPCysyvHnXUyXvOVyLRyS+dqu/7pj2KxVY8iJN37jv2v1g0pu4Y9OIbR2xJ3s3C+Bpz14m4pmwgrvTQjHgzFIpgTek488aJn3TmfMV9/0uE41z3WcjBLuOrqVtve30Y6BY0a7xSKy+8SG+fTGhSfQ8mhTxiZC+SbZJVhOuSM9A+rxBjulo9/0Ex67z6h6GBWCcKueIwwsuDACjAAjwAgULgJM4BXu2vHIGQFGoEARgDtsxzSxZCYyVEIPqq58q+2sIC4E9QPG7YWg0iHwQNjhMIuCdZEcHAg8nYJQ2rJiEUorAHQi8HJB3mG8OGC2945k1fTEDhfseyi3gGNcPA8vPLuNfv3VdMIS5N3AiDPZ2XTahbT2nIvpVevcD6ZyzqUX/ovOkqXV8aLCczNSMRN4khhWQ2WtBqk60eqG0PqarMNNKpE3JUjXjr5RKhs5SOVjB311tbOrjv74nfdp5b4zd9BcXUfnfCGhwAu6gMAD6aG6NYO0A3kniTtzn8DmRGGw8aZFGzMi8lR1Y9Dz8tIeyLJq4cTb3Z8992a38ZhNLybE+wIIU6ngdbs/19fD+hkJNX+dCKHlwggwAowAI1C4CDCBV7hrxyNnBBiBAkagtWvYIIDKhPqoQSjFUIZEeKxTGGaYposwHKhuwkgwSpzcFFAqnk4EHg7SUm1nFf6qS+ChPxBX5aW3G860ViWbYbPm/rzgE9Tek7kcoS7F/u8UxI/cQ7/52lfoyK6ZEE8d8g7jWnvdd43hNVUXu5J4Bqn2P78muuUeX1OauPp1NHHNxVr3IoRuaGSChnZtpYmXtovX88bPyPL14nUc7WqKUF/vYSOcuRg57kSrMsedUwfHb7yMausWGlXyReDJ8YFYgdIVBWq84tY/a2GjVpIutLd/6HTP9+IGEHin/5+vU9Xi9b7ud7rJrJAGeXfD7j9q9YN1/fqJF9AJdS3ajr5qw27hu1qDCKCSkwoxgOY9NSFNL0BEoeDzErkZs+m87mmA05XDqlKHWhYpMLgwAowAI8AIFC4CTOAV7trxyBkBRqCAEcBhF4dfHDxwKOkbjIcqTNYN2kIwsvAyRrOJhTlM1o6olCYWbnjJ66qqBoo8vFAm6QTjlcviphALciySjIBzsjT5MDvCqk606HtgeNI2bFaObfEV11HFwtXJoa5bUEp42ZVcE3jHbvokje553nI4kxNxOrJ8HvVuWJ9ibHKwq5X++uJzdKi7LXnfwvq5tKihhc4/6ULasGkmXDjfBJ5KVIDoqYofpKL+fSIUWrgJOwsnaVLk9JwcmKT9f1hAwx0VtOXwW6lzok+8IYppi1eR2Cs6Zf38ZbTpI7frVPVcB27WceEcDsdflC/s+oOt8s6q8XVVc+gL68818pjqOPqqbYSFOMMeA9mO/KthKSDHQfCXii9EdE0vcjn2sOaJbRIO8nhmuTACjAAjwAgULgJM4BXu2vHIGQFGoIARwOFDqo9wSARBFKYDkhu0Vrn73O7J9XUvY+zq6KW/P77VUNqBZAP3gDBXNwfUk0/fQA1NtZ6mNr+xnI50Dnu6JxuVcQgeEcRENvMrGqSOUHxAuQi1jCRCMB8rHKQbLXLejcSdGaCKBato8ZUfSoPm8lMrbOECgTf0Q5Fr76cJx1uvRVeBNy7UdmM//LylYYlU3KHvsbEx6qosoaOveZUxlF8+9WAKcWce34lrzqQrX/02Wrc4QfaGgcBTzUgwptrhbRSbEkScKIL7siTyxruEakqs8YG/LDTIO5T2rv+kbQN/mpkySLwSZxIP6rvm6vqsEXjqM4KwWby8livmbSC81LBjmdsU6RTsCkggvB+p4bte+w6ifhjNGJoFEdUlQnrxualjehEEDl7aCCNm2EstDeVepsF1GQFGgBFgBEKIABN4IVwUHhIjwAjMfgRG4xNGCCoKwmjLxWEtnzmGvCJeCGPWHaMM69y5Yz9t27o3oYbSAMSr+k42icMncoa5KZQ0hpBRlWwqa0DcoX0nd16zAk9OBkq8X375K44EHvLeNb38Isv5n7k2Rs01iRA7c4Eqc+TmzAi8I5tOoGPbD9DWOzenNL/hyjNpzvrFNPe4JTT0P58l2rcjhcAD0aCSw3L9J8bHqFWcqz8+edhxPcvLaylSkshf9bGrbzRIvEwIvN3HjtLuY610//PPpvR7wXGbaNWcFvGa57q/zG7C8obqoW0UnewT8038RiXyrMg71OkbuJ/a+u+mQ/H2mX4dSLyKaBktbZpHVYvW0cp//pTrWP1UwH4ZHB43DCm8qu9kf1DhfWr1PyW7nyHyIgYB1T+UaN9czOo/P+MP4p58hNq7jdsuv6md6YVbe0Ffx3t8tzCqwhd1YSkx8XdG43S4e1jGxONgBBgBRoAR8I4AE3jeMeM7GAFGgBEIBAGpwkq4/EXpWI+eY2ognWfYSCGM2c2pEARfpSCZMBeoXEA2PfHoFuru7HVFxy95h4ZzGbrqNJGgFT7AEaGCIO+AJTB1ypGoqmjM49xxOE6b7/oNDR3aTUOHdxuXobirWLjKlriTbTiF0cq8dHTe/3FdY6sKT7/sFNqyM+E6aldefpLIy9b9rLGvoDxEMYg78b9WORRx/WjnIfp+7Rjti6UrzqLRCorGKtO6+9n19/km8L71yP20p73VcR4g8i48/kTHOnYEHm4qGe+lUhESWyHCaiWRN9Y7QW1P1VPnjsaUdkFqoRxq/SANTY5Q53gfDU9Nvx8KEtAcTiuVd7hn5Zs+mZX8d2gbzyqMErCPr37af5ju7SddbYmjJJxw0axQzYVCVuchCJubqo4rrtn0QpKwOvPNtE5YDSzwfo8ceFwYAUaAEWAEChsBJvAKe/149IwAI5ADBPoGhuhPjz5Nv3ngUTrlxHX05ivOoZoq+zA93SFBhQWiA6UQTCHM84IKoq0nPy6muhibwzRx+IsK4g7KJRzKzYdmtCvDae36WLF6Ma1Ys1h3CGn1wnIwB4FZJsi2TJ0cpYIRuOoQdxIQJ2XPX3aMUEe/fXihE/huBN6oeObGfvQbitz+e09ruLcvTn+ev9L1nldOPUAlZVGqX9homFNA0elEZPYMdIk16KLHmqror+KlWy474xp6yzlvFXkCp1JCk93u1yHvZBsrm1voutdekNbkn7rH6E/d8YQBh/iPnN9r6kvpNfXWLpcgqw7eeMRoy1C5CkIzYhJKjozupmOd3zbqgMgbnkyQeJHaiNEX7kHIrCzZVN8l3pfLkqkOvBJ4Kw5U04qDNcZQ37n4VOPnxOutQxitlGNwae4VuVHzreLy4ubttveCuK6rrEZf0vSiLJqQgo6IvIvZNr1w++IoCAz8tNFQHSOJg5/7+R5GgBFgBBiBcCDABF441oFHwQgwAiFG4G0fupFO3bSWXnvGSXTLnQ8ZI/3i9e/KeMQ4nMm8d14MFzLuOKAGwqIkc5qODGXCwR+kHdxPkX/KTR2GNkHkmdV4mRB3cpxecvMFtFSWzWTqlKgSd1LB6MWVWA1PNA8QCjy8/BSnEFo15LTk+q9T8dZdWl3Eh8fogfK51FmVIGScCgg8lMrGaiqvq3TNo9jadVjsyUROxK+ubXFrPnl97eIN9OV3ft0TgXfftmfSQmbdOvzga85PhtPuHZ6gHx8dSd6CvFpFCoEnL7x9XhktK09l5/of7af+xwYMUgWKPLvgQpXEk+0VVxVTVIRFTyKn3vQvs03eoRs1zFuXwANxd+5jC5LkHdpZVtGQxGxckHh2RB6eKShYK2KJ/Hf4kiefBJ6O2s1t/wR93a9BhDTSybbpBdavVGzwviF/719B4yXbAxGLLxS4MAKMACPACBQ2AkzgFfb68egZAUYgCwj8VijtThaE3YKWJjrc2kEf/OR/0l0iIb0s577pwwaBd4qok0mBGqCrP6Ew8XsoyaT/TO8tBPMNkIxQCIE08EMyZYqR1f1QkMCRMt8HPNUR126eu/ZM0e4Xie57cIZued15RXTShlLasD6SdJT1g5OTEjETAs/JxELN+1e0ZaehwtMh8e4hEfbpQt5JA5RTx+9PwtG0osWVwNvXuidZ30zgTQqjB5g9qCUiyCxZ7v7cQ54IvA/88sd+loq+/ca3k5m8Q0N2BB6umUk8SeAVIyTWhUcAidfbfz+NjiWwMRN4La+8gvDKdlHVZzoEHog7vMxFJfBwbXJNCcU/ak8GA595wnAASsVRkR8vlyGg6ti9qN2yvRay/UwNIrJtehHGz8US8aDOEQQeF0aAEWAEGIHCR4AJvMJfQ54BI8AIBIjAC3sO0BXv+jR9SRB0rz//DMK/QeA99IuvJXsBwfdHEVL7nzd8MKOecThr7U6oWcIaduM0wTAe7jBeNUwW/0ZYZ49QO4alZKp8C3IeTuFxN3130iDvZDGUUwhjBAEhlFArVxB96H0zZJLXcbkZMPz6ySGvTZJT+Cwas1I/Rm67xzacdnLDajp87ZX0x8/fZjsWSdzBsRgKRKnAww018xuMcFqnYkXggbibHJgU5J31ncVVRQQizwuB50d9J3uHCu+WgXTCyYnAW1pWTO+YPxMyKgk8c9hscn+J/7FS5YHIiy6PUePxlVQ8dw1VLlzneV8cfXhv8p555yzTuh/7vbm2TOQmTbxHu7nQQnn3nl+kj62utJzqxctcnEg82Xd774ihGkZuSRQ7wwutCfmoFEY1WZAGEdkwvQhyfD6WzPIW7J+6Kuf3oaD64nYYAUaAEWAEsosAE3jZxZdbZwQYgQJDACGyL7x4kJ56ZkeStHv5695rKPCgyEOBKg8qvMd//72Mc+G1C+OKOCwaRSmEnHLqcoaNdJQhnWqYbKlgnMKgdlNx01G+5eqxsSLwoLr71vdmqJQkcQeCysSwrHIh8XZN7qF7xx+g3VMzKjPM7aLI+XRS5VpaXbzSUPFZlfa+Cdr8gjdjFyf1HfoAIQE1ilWfIPLUMnHNxcY/t96xOc1xFr83E3fyXpXAK6+vIryciiTwDlRE6ZeLGwjk3USXe/6/InEev+fbf9RW4Pkh8OKjwzQ+OkINh1+kBf2dyWnE56+hoVMvocmFay1DaGVFNSceCLzBxxMhtE4Fl81EXuUrqmjZBU1CsTzmmE9Qbbf/xW4CcTfwUk9ad1XL6whEXvWKmXx65kpWz6mTCu9rX0nkuTMpMiNuAAAgAElEQVQXs/pOvW4XTmvVtySbcG1wZCLrudwwzmw6VTvvAvur2ficBLb43MALn8eZKB6zMT6/WMn7YF4BlT8XRoARYAQYgcJHgAm8wl9DngEjwAgEiADUdh99/9X0dpH3TobJfuLGHxo9qHnvzhEE3k9uuj5J6vkdQs/AmGGkgFIIOeXM8wzDYSWRMyqS4iYrc7GFSe2mYmc213DaPyU7j1DpriNUfs/fjWrx1fNpfM18Gr74ZL/bLnmflROsVN5BbSeNA8zEndqxHYn3zbHvpBF36n0gwD5S8UFaPL7cdh5eQmmdct/JDvwoiswEnh1xJ/s4fupJqqUu4586BJ7MgQcTi0drKrXIO9nXj868ixqurtUysfBK4A33ddNIf4L8aunrpEUKgSf7H1+whgYu/6gtqaaq8EYPjFLPrxK46BSVyJv/lnpqXF1HUKRNG/s6NgHybvd/P+PaDYi81e8WtsEWBV9QgPjo7JuRQW7vb6Mbdv8xrbZd6GxLTORBjNg7f9qp8GS+tm5BWJqL4fwr3u9ANg2MjBt5VIFJd99+2nd4M/X0H0i5ZemCM6muejHV1yxxxcNcISyGO3JcZlWk5wm53JCp6UWYvpxRp9pUGyPsGy6MACPACDAChY8AE3iFv4Y8A0aAEQgQARBzD4twWYTJPvnsCwZpB8UdCL0fTxN2cKVFmC1UeZm60YK8A4mHEhZzAy9w5ot0xEEJjnpQFcQFu2SnmMCBbK5wkpShyl7mls26IM6QoN6JjABxB9IO5J1dAYmXCZFnXj/kunvgoSmDuENIKF465br3FtHqlTPSKjfyDm1KB9MPRN5nKPHsipsSr6m6mF61Ti+/kx/nXUnguRF3cvw1U120gZ40/qlD4EkXWuS/i7cmyHydcn71FXRR7ZVU+YpyKhcvt+KFwOtvP0rjYzNmFXYEHvqMCxKv79KP2Hb/+eWVyWv9d3bQ0D5v4ezYVetvmEPiaU+GszrNVZe8k22s+pcTLZV4BlFWlu7SbEXivefna1NMK9C2G3kn+x/90YzBhfwdiOaYyJXp5BAtc7nB8OKPT95MHT37HbfAprXXeCbxwhYO6kRsuu1/r9fN4bUwP3IzFNFZN6/jyLQ+3rdaRD5FLowAI8AIMAKzAwEm8GbHOvIsGAFGIAAEkO/ulrsepi9+9J30lCDv4D6LsFnkuntE5Lz7jSD1LhV58fDz2qvOo2uvOCfjXsdEgvKO3kSYYFjVYk6TzHXCbqk+iYrDLXLb6bjJhkElaMbQjfgEeVfz9d9p7S8o8vo/fIlWXXMlKGyGRDgeClQ9//Gtcdq6Y0JL5aS2parwdMg73JsIzS0yQkA/VPp+RxIP9a1caUHeNQt3Ut1iR8o43b/j14/Stjs3G2SmrsuuVOHpEHjo+8ZoF+2bLBF57zQZU3HPt+bfnnCVFCxw0/9tdIVAl8BD2OxAR2tKe6vbD1LttFOuVUdDp1xCw6e+3nIMKoEX6xymAz9OD2l1GvySt9dR1YooRSNVhvGLVJzZ3fP0Rx5xxQIVRkRo8MioUAX299ITLTuoq6yfLnv/J+kf4+NUFhOOndMK1PlCQbc4GqVXVs4QkbhfzYmnhs/a5byzG9TYR6ppam2qSg9fTKB/u/Byta1ndtxKfQMHjPoyP6XdLvJK4sGFV1f1qAV6hpXyYfbkxfQi15+HOnCCCG6sielU5TqMACPACDACBYAAE3gFsEg8REaAEcgNAsh/B3MKKD6gsquuqqCzzziJrr3yXGMATz23k/rF79esWJRx6Kw6ozZhZAFiIKxqMSf0DcWBUKpk2yQC/VSJQ610kx2eDjvW2RluZJlOG0HWeWn/TmptfZHu/eNvk0TZ2We+jpYtWU3Ll6whL+SdHJdfJV59ddRYP+w/hOO9433e1FEqLt/9RjEh591N8e9owYXnLCLCFKFqWVW0kv41+n6t+zKppEuSqwpPEMXfe93nPHcLEm/FypjhJOr4DP1/n6HdJRN0w/98RJvAe3/jJ2lVbL0nAg9j0HGhNavvcN8ph3e5zr/zfTdb1lEJvIaaCWr/0yB1iJdOqVhaSkvekchTV1JcaeR9g+IMaigQeWZCFTnvVMMKuz5a29tEGzP5FfesmqB9r9+YrF4WjdH8OXONf8vlW1RaapB4IPPMJfYO/dBg871WBJ6bwYtsA+SdGjKLZwouv8a4RRpFq52nS+KF8fMIBBly1Hl5/9fZZ7p13EwvwvZZg3mB9EQoOBdGgBFgBBiB2YEAE3izYx15FowAIxAAAo889gz9cfM/DMJu7crFhgrvxu/cboTKZrMgMTsOpChhC1lym7dVnii3e3SvS1MKqbZDuDEUi15Lvg996nh/cMvXae+BXUa+PhQzAbFs8Wp6/8FFVLnPOyHQ9T/v1oYGhChIAoTwxgWmkoB93795x1d2CgIPhhX3TjygPQ7sHxmW9r3YTdr3ea04MjlEo+I1NjVM8aJhmhCkYayonGLFFVRbkjCnQcG6QIkIfFSFp52RhdM4Tr3m1bThlCbqvOd2mnhpe1rVyPL1FP2nq6hk+XHGtce+sJm+03mD69QkeYeKUoGHENrKV1a43uumwrNS360Z6qKa7g7Xtq1UeGYn2uqKCSoVufQH9465KvGaXlNJzeKFMjJWLAj8ciOEVlVE4X1TVeG6qe+gumvtOJYyl74F1bTzivXUG00lFctjMZrXPDctjPxNdXVpJF7pf/RR8U5rMxY34KxCaHVyzyHn3bMvWLsjSyKvSHwdNSkecpVDRj68E9e92W1YxrPQIAj+9mmFuOsNOaiALxwyMZgIaoh2phdhVHs3VMeMdBNcGAFGgBFgBGYHAkzgzY515FkwAoxAASMAJUnvYEL5FMYQHDdovRgyuLWF637CZJ3azUfYldV4JHmHazL/m1VI5qefaKDGhrkUE2F8XoqbCs+KnDK79OaawEP/yGGIAmfai0rO9zJlrbq94x3UNzHjoArVHwg8tcyLiST/5dXCsTgRmg2y2Lw2f/jcrXRse6pBgN0A5qxfTBd/6a1UGik2wj5RRv9wR7J6TBB35nLsK4kx3t9/Fz0gXuaCnHcro+sM5Z0sIPAGu9toON5BxVWph/TGsxZR41mL09r51iP305721BBZWUk1rsDvGuPDdEbvYeofn6J+JycTUdeKwHv7vDJaVj4T4lwSmaKayhmSGGq8IUHmqXnxVOJOjmtwJEK1FWVpOfAkEY216h8apyf+7Q+Oe8KsvEPlpz54mnGPmcDD78oEidfSlFDiyQIl3j/Xp7rXRn47TCXi5adYEXggqvrFvnHKubZXGFbAtMKt4LuCYvGGoxJ5rzn14263kZ98ka6NZlghbCG9qukFMMZ7bJsgmXWMVjKEQvt2kIoG0c+FEWAEGAFGYFYgwATerFhGngQjwAgUMgKj8Ymk0yEOTeUi5MXKfTCscwxKNaiqwhDOGVSYlG7YZDbxVck79GPkfxOHKvMB/axD5YQXyvx53lwj7Qg8s5JRVSyZk8JnSuC9d/RDnmBE/1JVmQ0C79jYARoVqju1qKo//B6HW6zHonKB90SZY547HRIP5N0/febNngmQ7p/3UvygvoorPjJIva17jamVtAhZm01Z+JbjqWJpbcpVOxJPJfAkeSdvPDLqrM40E3hm9Z1sR6rwdDdKXEAyNFJCUKWpjrDq/dKJ+pH3PmiELFuFjvb09Rr57tRy+LQFdOS0hcavrAg8/L6laY6RE08tCKU158RzC6PdubCddi1KKBlXH2yiNYeaafz15TQhXuYCoqpTmNw45VzUJfBk25LIAzpL559Ji8XLqfhxbNZdUz/1whjSq86jSiiakeYBBZ9fOqYXfnDwck+JWPQ5gsDjwggwAowAIzB7EGACb/asJc+EEWAEChiB1q5hI8wJZAvy6BzrmcnPFPZp1Yn8OjisjAjlktcSVJisU7/5DgVDzrsf3vqNtCGaiSRUUAm86qpaqq6u04bUbGahYgvSDsoyMyEgQ6D3dCSUPH94uNl4oSDnmG658Lwiuki8vIbQuinw4p2DhJdaShsrCS+3YlbeyfoSd+CTdNudjjFsLl1EZSKs1qm0Pb+ftgpTC7MaD8TdhivPpLnHJYhXr4YZg48N0eBjeioulbwz1sqBwMP1xW/bQGWLa1KmtfvYUbpv27MpajwQeJVdR2mtCJttEuo7tYwKjDrj9vn8VALPjryT7emSeCDv+ocipBuq/4wwsJBqIzOR56S+w7jMBN7RkUQoOPLhLW48hRorZpSPViq8ohfiFP1Kf9rWuef0HXTPK15I+/1UWRFduOgkujC6Ke0aVFNuztl/evJLbo+A5XUQeceteA2tWXqWoVq0S0sQNjU49kB9VbhCelWAJV7I0wjVN8g85OtzwtjXAnq4CekA6gRmXBgBRoARYARmDwJM4M2eteSZMAKMQAEj0CHUFiBYUObUxQylia7jZb6n7SdEVSq/QOCMjE1aJqQPcl46B+Ig+1Pb+uNf7hG5FX+f1ryqPpMXVQIvKoiDpsYW7WFJBZ5UHIKgAnHnpGTsif+N+safTFECfukTb0j2GSmq1CLykP8OxSuBZ5cDD6Td0K5jFO8aspx/aUMFVaye40jkHRzdaX2vUP1NIS+YIKTM/hLIizcnmh52qr0ISkWvBB5ulWG0bv117NuWrILQWXP4rHo/VDgop3zl1Y5kwsDBXtq7+W+09Yn/MeqXWhz8nUg8SeCBvHtNfTQldNZqPm4k3vBokdi7iX2li6U0sUjkgEvMWxJ5+w6nhz/L8NnxImGKUToiQoX3EIi7AfFTLbHSBAmyqvFKWi1eKB+ZMydtWmouPCjuQNxJ1Z1aGeQdtSRCi1cVz6XrylNDx6HAQ74/p+JVgae2tXTBmbRmyVlUXQG32yIjZBzEkxr6qZOHr3xfqqPw8FL9LxwcJ2dx0TBMEgr1ngH/Rjte+/RSH2HPw1DeKV9kuZleeGnfT10QnsCNCyPACDACjMDsQYAJvNmzljwTRoARKGAEQLQg5xGK1UEgzFPTDVEFoYQcYzBPADmJQ2NQYbJu+EDViDyDTjml3Nrwe/3jX/wXy1utFHhL+krordtnlFJewmgnLz+Vom86XRvbI8N30cjEYWEqUGwYWciy/6Vmuu3m1yT/XUSlFI2k5vxSJ3Tde4to9coEWeLFhRb1gQHy0a1UXGhB3vU+sU8L7tqXL7Uk8azUd1JxVyQIC3W+5o4WxdZo9e1WSZd0UtsZOxCnnl/0OTY91HOM8EIpjonVaRDPk0XSrYiYJxSGeNagm2t57RJaduFy4z5VFdT2+EFqe+JQss89e2+ngcEE2VUsntdYQ3qIp1VOvOp//7EWcWeeXHksXbkriTtZVzcfW/+L3bT7v59R9u4Mkffiwf1puEoCb6BkmHomd9HugRkHZexoqTeUBB4aaChfT6cv+rQlgYfrUon3tTdsdiXv5IBUEk9XMZwJgafmwJM5R8uiESP0EzlZsZ2QGgEGS1ZfJIG4K9+fSt7JuQwvqaNsEHlhUwSaN5OTgYWd6YXbe0im1/FlYInIw8mFEWAEGAFGYPYgwATe7FlLngkjwAgUMAJqHjw/irZ8Tt0tN5EM5cQBEaG2OCTmmkjLJynqRODhcDzW20sTwh1zciwRNv2llxKhekXFEVqwMEG4OBUIjUDWjHzsUhpc3qKFrSTv0K4VkahL4qnknRzjN8e+Q7unUhVMduM3CDyBwXUl76fVxSuNcFld8k62aUXiqQSeJO4MIksQE1bzVcdXE2lMcaZ1w9/uuh8CD2255cKT6ruiqCDEGxM5t1QCD3sBewK/UxWG5UtqaNFbNxhqtloR9g4S8+mfPEsDh1IJQ5B3IPHUEhUhnRHx/JoLsIXCbd3ZV9Bx4pWt4iUf267vP00DL6WSSyDj9too8KC+ay3alkLeGc+feFkReLgGEu/ul82QfeZ53zf2LN3XPkMkJq/X2ZMpF5ZuNMJpsT+rK0pd86A6udA6rYOdC610962IlRjv0wi/NBsylAhVIMi70l5ndSD679so3ouEkjCoks/3cLc5JFJfxFxVk6rpBdqE+tysfHTrS/c63gNaLMh33fu5HiPACDACjEA4EWACL5zrwqNiBBiB/2UI4BAscx7JA3Z7b+HkwbMysoBqplKo7WQop1UOtlwtM1R/KFA65rrYEXhTcRE23dsjyLvUdT67u5nwQmmIxCgmkuhHLBxpJVEDkmZ01Tzq+/AlWlMbnjhER4d/naxr5cqKiyDxNj9yHB3YmwgVLC2uE8RQIpQQOe9WrRDJ+KeVd+aOdUk87I1/jb6flk+JxkTpfXyvbdis3eQQTlt7+rKUy30THTQw2TWjQFNSt+WSwEOIop3xgtNiOSnxQOCBvIs0RAziFgVknR1xp/az+tOvTP7zwK+306Ag77B/zCorVYUnb7Ai8Qy12NK19Op3fUpr7/mtBAIPocC6z68ViWcVQvv4dScbobNP96SbrzgReKUC929t+AYdV7PRckrvH/ypr6l+p/KtRsijbqjoMztuFcYces7IckCb1l5D9TX2BjnYUtXlpQRTBnyxpKo1G/68z9O8giTxdIw9PA0uwMp+zKfM4bVBm15gD4FU5MIIMAKMACMwuxBgAm92rSfPhhFgBAoYgXZhXIGk1yhO4ThhnKJUR8RFOGS+wmSdcPFzwAoKZ7MDrUG4CMXdaEciDNKqvOvIElo+UmkQeCgqiacSd1J51ffvl9D4mvlaQ1bVd7jBUFEJAsgiCjPZHsi8joOn0cY1y2xJO3PnOiTep2o/ZJB3MpS6497nteZgriRVeFLt2TPeQYf6Wy3nlCsCT9d4wWnCMLYwl8N/nFF2yb1g7CmT4s6qXUngqWGzhoLT2AOpRF7rsUcJL1mswmkXrzmeXvH2T2Q9XydUyRinLoGHMZtJPLMLbVd5Pz19djkdXdpq5L0zF0ngRYQStiSSqj6sLC6mU+s20WfXfy3tPkN9F3/O1z6GCu+qmpM9zdULiedG3slBS8MIpHXAlzAo8e0dVPJil6d5xWvLqH+Tfh5Pp8bzmcfUbdJ+9qdsUyofgza9gIpTfnHlNn6+zggwAowAI1A4CDCBVzhrxSNlBBiBWY4AcrQh/xAKcrbl073OK9Q4wFSW4aBbTENiDjhoh8mEIwgyxSsmsr6VicVIe1syZNau3fccXUYbx2aSwlcvXGIZGumFvENfLw38Z0qXRrJ/DfIHNy2v+qAnGJATD8YW5pDaiyLn00Ul5xsHTBBH2PcwrRja3e6pfVm5es0cmrtxvkEeY++1DrZR30SnZVt2ikNZWceJVmeQIMVgBOBHgefU/q7PP2ZcBnmHPvCcWeXAs2pDEnhbvvl42mVJ5BlE4HRaOoTT4iWJPKnCa162jtaLkNl1mzZR93OP0WTnISrqPpLS5tTyU4hWnKoDlWsd7JNxMU+vOTOREw/htPInVHi76w5TZ1kfdZUlHGM7XzefWgWBZ+evq+a/Q32o7xpKEoTer057KG3smRJ4b6w92fNcdUg8XfIOEzLnHDRU4f9vrxFWDLJf7g/XhRMVus5aqlPNsY5uXsCMO/LZQFDhvUGaXjSJHIZojwsjwAgwAozA7EKACbzZtZ48G0aAEShgBJAPp6s/EU6Zz5BPLxAi3Au5kkoFcQciwS7puZc2s1U3nwoONYzWTX0n539udQud2lVPC48k8khFa2qppLo2CY90nfWKlyWBh4O52ZLVomGvBJ7b2FTlil8Cz8g/dXwLxVY2Jwme/nv3Utea3vTuY8VUIhxSJ0sToafmEqQLbbYIvLZbn6fBfb0p6kIdAk/mwIPb7O7bXhAq0BlVWbRmxtlTiMsMclAl8iROc1++kOaevijxz67DFDv4Dxo7dtBxmadedqmIBV/gthUcr/sl8MyN7nxpF3315ptSfr31tM1UNP1cqWGzqFQaEco/AKKUeqHGi0473H5m3VfTwmgzJfCubjiFRoTBj+pmqgMecuLtO7w5LaQWjrPLxMtLMSvKVNMKEP6Yvi6RF4SphZewYi/zDKpu0OG9QZhe4PNOOjEHNU9uhxFgBBgBRiD/CDCBl/814BEwAowAI2AgMC7CZ4+JMFoUXWfXfEBndpMdFIqnUZEQf65IWC7z+OVjXG59Orkqut2b6fWX9u+kH976DaMZHfXd+bUtND9abqiCQKwhvHXR0TJafOk1FF89Xztc1mrcaQQeWAtB2OSDwFPNCbwSeKo5Rbkg7ypWz6GJ3QMUf0CoG/cMUs9lkxRfaL1yRbUlVFRTmnYxKPUdGg6awANWILL2P7CXDj60N7GXhBRqaGIiZR4lYi3LBelUaiKeFr7leOo7PEEHH2ylcaH2NZfi2IQgieMi32JCfmdF5CUJPEHeFf3jN66GILKPTEk8KBlH45OeFXhWq28m8ba+7BHhbFKaJPFwDx6J0hKxR4rsyTvUywaBd03jqYYjea6NflSs4PiKdA5S8WjlOov9USz2Gt6bnELwgyDwMglRzfS92+1+NxMnt/udrkvTC3xJBox1TS/whVqzcKDlwggwAowAIzD7EGACb/atKc+IEWAEChiBtu4RQ8mWzUOBX3gQhoo8PdJN1hwmO0ccGGC84ZRLzW/fQdyX77BkSeINKW6YWGczXidW1tOm8jrLEOSTbvivjKEwE3gYAw7iOiHPQSvwVKdWXQLP7CoLQCpWNVOsqIJGv/NSCj7tH5iOBbVCTajxiufEKD7VTeMkzESKWmm0NBESWUXrqKpoPbUU+XdWDYrAk8QdTGDkM7f3J1uozSUfGYi8GkFCoUB9NxptoIGDwxQfGLMk8CREZc0jSRIPv1OJvOZTpxV4f7/bCJl1yyeowr5tfFXaKjSvXEFz4IbiUkDgDY1M0Jj4oiCIAhLvd4/cSzv37qaXVj9Ng9XTrrVlFVRcUWnkvCsW5J18PpHzrkqQIuZiFUK7e6KVvjXyoK9hXld2Hr2ycakIux7Veh59daJxkzkk1IrAk81AjQell/plg9pFEARekASuxvQ9VcF7GPIEdvePebrPa2UvphcgPOE0zYURYAQYAUZg9iHABN7sW1OeESPACBQwAghBhRsdipWzaz6mJsNkpZssxmdF0uHQBzVeUIfsoOcKVQlIKplnMOj2ddoDiffLr19PbeMjadXnRctooyDu5kQSIbNWJQgCr3vsb4SXLAaBh1xqwoDEqdRHTyO8gixmpamdiYUcI5RRWEPz/oOJxcTtrYbyTi1jC6Zo6NQpSyWeyORF8eYumoqJfI0KeafeDyIPJB7IPK/FCO0VLpDHetLXWqctK+IO97WOjdHmbYdo0d3u7qOSxCtetdQg71DcCDzUMZN4+B2IvOWvXkaNjYdpZMcTRls6BF58ZJQG2tvpxdZJ42UuzSuX03EXnOtI5GXzveWWF39At7z0QyqLxcQr8eyBlCqaJrUlgWnl1mtF4OH+bw2LvI+TbTrLnKyzqnguXVd+vmFglG8lszkk1InAkxNQn1GoeWVEfhAEXj7V026LCLIMz3qfUE3mouiYXtRXRQ03Yy6MACPACDACsw8BJvBm35ryjBgBRqCAEQC5BDMLlDrxDTpCU70mbg9i+jiQ4GBSFi0WoVSC9NIg5sKet08N1wwCI79tPP3J9xi3gsQ7NoGQ6SnaWFaXPPA6tRsEgTc8cYiODv86pRsdImZe+eVUHrGJSfUJhlmlZlbhuRF36La0oYKqmprT1HfqkAYFiTd0mkpQToK+o+L2Qeo95R6aLLYnW0DirSz+lK8ZggjxSuDZEXcYAMi7h3q6jbGUHxqkpic7qPxwulOtHOzQggqaKq2mxs5UNc5wWyrRaTW5yoXp7baI/HeLyp82TCsQ0gflplOoZ/+xdhofTaQFQHno2YRJj1V59QfebUviQT3bMxDPmirtDX87N2VI2JdTJmMXlcjD3K+cfy1dtfBay7n4UeFBfbcq0kJ+9oyvzelwkxWJ2PDnfVrdSEUvfgKnvhNaaEy40WZSwuzKnk91IMxGoP5DyOyA+NsBfz/gyw2o4WEoxYURYAQYAUZg9iHABN7sW1OeESPACBQwAlCvdYgwVBQc5BGumu3QHBUuI6RR9As3TzVkTwdSHCbKBemXy/HqjEvWCUtewZd+fBP179tl5NmyCzuzm1cQBB7aPjJ8F41MHE5240bglUUW0Pxy/+GkdvPBIb9ZHO5VkgsqPEnc4T6ZA9CuDajv6MlE7judMiyIvNGpDoqJe1AOfvOXrrf5JfG8kDHYnyADnJ67B7u7qC2uGE4I/Br+1kFlgsyrUIg8EHedpzbR8MJKWvCDLqoRIaFqTrzRrmGaFCG5TqVU5MNTzS1Q94R/PZ2KHv6ucZtBaIn/TIg8fFaupFJ5p/ZhReAdLo3Q36tidCRaQvWLUgniN27cRG/ceKLhyp1NAu+OQ7fQHYdvSQ7VisCTF6HOO6npJPqPTd9wdAr3QuJJ8i4MbqtWzyTmXv1sK5X2elOTTjWUU+mZSwxVOcglnTB9857MVMnq+nBnWCFoAws/wzGbXpSLvxu4MAKMACPACMxOBJjAm53ryrNiBBiBAkagFYdrweyAVGkQYanS2CKbU5KqH/SBPFt+VH9hOHw6YZTvvIIS4z33/ZZevP+3hlJCugTqGEjMe+1FNO+1rwtsG6gknhOBly3yTk5EVfsAo+K+ETr0p92uxB3uB3lX2lhJI99+MS181g4o5LuLU0LFhtJ73jbqO/95V1wRSuslJ97Yz3dRjVDHdP5ou9F28fGNFNnQSNF/Xp3Sl1Qh4pduJNXPjqWSlGqop90EQODB1KJCkHhqcVPhmQm85Veup6pFtUkCD21h3+C9CuMwhzZ3HzyUNiQzgffb+gqDuJOlrKaGymtr0u777pWvp4VVjb4IINeFna7w2e0fpu39W4x/YU2cjBkQOivVT6jfP2SdOkCHxJPkncSzWoT65/NLEKwp8qd19qXmdCsRoeA1z7XqwmnU69vYQlpeknsAACAASURBVBMiJBhqbuRPhTGGHVZ2DYf5i6F8f6aYMcN4aiujxpdwXBgBRoARYARmJwJM4M3OdeVZMQKMQAEj0CESmEOFg4JQGByk/CgX3CDAIRVhr1JtNzSaeZL4MIc6AY9cjw8HKqgogTPWVGK86+Zv0IBIoO/FQCIo9Z26L2Q+PDsCLxt578z7EmvSK/JHASPsc3nA7318L8W7rMNDETYL11mQdyhD1yWIF50yTAkHV1l0CTxdFd7E1k4CeTe5rZNKxDM2LpOBKX2WChKv/Jo1yecPxJ1b7kg1fFY25UbgVf9jmGqeHibkwqudNrSQ904IVdSYMM1xKjKMVpJ3qCsVePh/dd+o5iJDPX000teX1rRK4JnJO1nZrMLD70sFqfSps8+j41vm6Syx7zpSiYd5ISekOSvk+uoT6LPrv5bSvlT24pd25NR9Y88a99wXf874iXx3CJe9MLoppS0Q2DGhZMZ+yFdxGoMXEg/k3bgIIVeLzKeK3yEtw4iLChT1cp1jzgvuYVF1q2MG+QrMuDACjAAjwAjMTgSYwJud68qzYgQYgQJGAAq4/umE2GY3wCCmlUmYrFv/+XZ6Dcv41ByCViGR/Xt30e6bv5kMFXUzkFj1zn+l6mWpyi23uXq5Xhpro87B/SlEMci7v5fupn+Il1peFl9F8ycaaP5ko5cuLOviQF8rFEdOyhzkxVMLiDtz8aLAMxN4x973CI2ubNeay6bi2x3rgbwb+cTjyTp2BF5EEGrlJzVT2ZdON9SuL92fuGfv9M9lF5xO9SsXUv2qRcm2nhscoOcGU3PX6RJ4aKSxNN2VEiTeuHCltQunBYGnknfGYKYdaPG/VsQv9v5wbx8N9vSmYSUJPITN/q4hQb6aS1VzM5WWxVJ+DQJvddNc+sJ5F2itU6aVHuj6uZFTbEqJMl5fcwIdV7PRtmkdIs9tXCBesKb4DMhXcRsDSDyYWjiF01qRd+p8zI6qMneb1ZzzmWPObQ3CSC421cYI+HJhBBgBRoARmJ0IMIE3O9eVZ8UIMAIFjMBofCIZvhTUAUFVgiF0E4dTP2GybrCGwenVaYwY35jAV0f5Idu5rW+QtoyO0daxhCpmQ7SUTohF6ZqadALCrGrEQdxOPSlJPLf8c9km7zAnHJJHBJEkcTlS3En3lM041VphCiLvZPHyU1SjhlJx2IT7ciYq0/j9bfo58EwKPJ0ceHKOTgSembzDPWYCD8QdnsUJ8RAaz+HJ5bStI6HKsisnfeAqg8jzo8BDmwihtVLgqf2ByJucdr+Wv69dFaPV165IH9aLT1LRS08Zv7fbu2bzCtRVXWjt1HeoZxVGCwIPZjqfP/f8rKvwMAYvuQvNAKlEnq7KTLYBFSoUm9l4b9Z9TnUJMxB5pSaH5bhQ3JlVd079qrnbhkZnTBjUe8Lixm41D12sdLEPoh4UzTI1QxDtcRuMACPACDAC4UKACbxwrQePhhFgBBgBA4EjncPGz0xDdLIRJuu0RGHOV4Rxu6lL1LmBtLu+o8dxR97YVGeQeaqqEaQdkraDoNEpQ397iF645+60qkHnvHMaC4gDRHpCCWOlurO7d55Q4l0y+nKdaRp1rBxWg1BtTuwecHShVQeoKvBGVhyj9vf/SXv8TgTe8McfN8Jm1SIJPDNxhzqDbV00MRqn7a9KV6qZByRJvCBz4LlNuuWVDYSXZZlW4Xkh8B4WDrTykfivuel57mQ/TgSeNLVwG3um1+c3liffg/22Jd+78R6sm1fUTKT77TuT+6D6hgLcyV04k/at7gWpLfPk4b1TTeeQ67QHXuYWNnIRbrTNIu0GF0aAEWAEGIHZiwATeLN3bXlmjAAjUMAIqHnwcJg8KowtdAkhTFsmWMfhEYchKDoyUTjpQokDfX1VlNqnnXR178tVPRyqKwVZ5ZYk/qPt3UnFndPYEO723UXNdEp1mfYh3dweDoGZKtAyxU8SmztH21yVd+a+dJR4VsSdbAekxdBI5vkXdcNoVQLPi/rOLQfe4CW/T1sGEHgoUnEnK4yPjNHQsYSRRvuSEfFKOE87FZB4TzZVprnQFgn2w+nZRh68hc+NpbjQuvWF65s+stK+WtdhKvrHb2wVeAihVXPg/X3PhMhzmGjuoHgGf1tvHT6L63YhtFDgofz6LW/XGX5GdTJR4Jk7lu85MTFvKJ+dwkXzQZ6Zx5tPV1UQeRWxEkHmRYw9DQVjjTBlUF2qM1rYAG8Om4EFpob3ceTA48IIMAKMACMwexFgAm/2ri3PjBFgBAoYgd7BuHHQQ9FVKIGsgyGFNAOQxF2uYVBdRXPdt1t/OgSjjvIOvIwQOxiKImGsSF9uTCjx/BTd9fXTtu49IHzLRD66n8U309FIl+5tyXr/MnSh5T1OxJ28AWHNyIEXRNigDokHB1o40XrJfYexriz6JFUVrbecJ0wr4uIlS/F0qCx+Ym7m0ndgxk1Wl8CrEznxFrz7UnqoZ8ZB1y0HHvqdK3LfrXhgmAYOJlS9OmXlmxZQ1eJyx6rFPYcpdvBpGmk9kFZPJfCeEuRd90CCfAOd+XdBdj9ZYf+sWJlYqEq/bBN42XLTlmpomNrYEXn5JM/kIobl/VtVMA4Kgh8htl6+xNLZ55nU0fksyaR9P/fiyzO853JhBBgBRoARmL0IMIE3e9eWZ8YIMAIFjMDI2KRQZSVUOW555dSDIcKPcDjMZfiTGeYwEFJOS+8WHnfh4VTTBLUtSdwh3NTMy9y3IN1cQWcL+snLp9Oulzo4LPdW9NLPxjd7uS1Z16zC0yHu5M2Z5P36+fPb6RfipZbJY6N05bFmWj9SRceNVlnO5+D7f0zDK2ZINLdJu6nvJIEHVQ7CZbE/JgXbYGViMdorwn17Z8wodAk8jPHs//w3YWQxY2YhCbzhwSEa7Oqm+Eiqq2zlwAC955xzaODAMO35xWG3aRrXdcg71MP7DtSTnc88msyJp3awv7uEnnh0T1qfXg0s0IAk8I6b25J1Iwv0BRUT3L+zUYBbwo21xCClVEVevskzjK2xJhYaxRtwAuGJfY7wUDcFYzbWy65NjA1j6ps2nMpl33Z9wbW+BN8scWEEGAFGgBGYtQgwgTdrl5YnxggwAoWMwLhgh471JAg8uzx4iUNgxDhII8cS3E5zESbrhqsb4eh2f7avO+UtslPf4UwENRVIGQtBlTFkmQ/P6/hBYKHk03kSe+iF6pfokfgOr8M36ksCzwtxJzvykpdQ3rP1WDt98v/92XasU0KxQ6OT9KndS5IkXvHKSoqsrKKyi1ooUrGHnuz7lPZcndR3aGTqV7tp7PZdhkII4bKyBE3gwZ12uXhJEg/ERveRozQ6ZKGuO3CQin9xJ7W319Kr3nYtRcvLafjvY1QTraKaRusQVl3yDvNLEngmomt42xaKH2ujYzteoCcff4aKSkqoSKgAi8srkrhY5cAricWoek5z2ppIUhTmDrnIgWfksxQhnD0DCdOabBWZ900l8ppry/JKnoG8rBaKWLcUA9nCxNyu+lmiEp/4ogrEZz4/78L2OYf3gpYGZ9VsrtaN+2EEGAFGgBHIHgJM4GUPW26ZEWAEGIGMEGgXBB7C79RcO2EJk3WaWBiVCep4kWdqWBz+rJxo4Th7W/+MOipB3EFNla64M2NwTXWlpTOt2yaQ4avZJgzcxrG7bh89MpaqZnO7R15/Ba2hs6PrDRLZyXnXqj3zfnn0rwP02F9n1gD3LFpUSosXRemMV1SRG3ln7uOGV59FG0zEEEIVX+p+mvZM3eA6RSfyThKWfU+1Uee/P5rWVl/RuLF3ZBH+kFQkksH5VeBJAg/twZX2jh/cTKMnbUrtVxB3RQcPUcdvdlB7R23yWv38eQaJBwKvNlZF1Q2VdPY1pySv2xpW2CBkJntA2vU/8nBK7a7OboPEk6VY9A8iz+xCC/Kufl5CwQpSRg2VxPsfyHP8Ptvhs+gfaxoTIeW5eh5VIg/9t/eO5C1UNNdzd3v4rN6rVcMLfD72D4kvr8bTw9Td2v7/2XsPOMmqMv3/7a7OOc1MT855hjCASBIFAQVWwgyIAorxv6sromtARX6KsqILyoqy7qoIIoiSkSQISM4zwMz05Jw651jV1f0/z60+1adu3XBu1a3qmu73+Clnpuvcc895zq3b3G897/sm+36mFbCAU7FK/G7jxgqwAqwAKzC+FWCAN773l1fHCrACh7EC7d0i0b0oPoGGhwU4UArFf6T3CvjkFZKkU4ZkK+emeq5OjjcAvLu7e4z8dsjXBbedCmCc5pYowMsUvbaW76LnPDrwDHeU0Or4ocV0ZN+CGEdM+LEBwsvcAufkE16yScfTe5t66M9/Gc3vZqU1QN6doX9QXp63MLGHL14TM5xapKB++H7Cy9xqs1YTXlbNymmoFrFoR5Y9Ae+sWp4ANEXt/UB5xtu7j+im3orI59ytqQDvr9f8gA7W1QnAH1/EYveeydTbWxA3nIR48o25K6fR52843+201utQnGpw3eFl1QDxtm/dRa0tkYrOgHiHykrpEQEQ0dSqs7ie4FoE85RpAGSY8JqVRwoH3tEJzdXLQdhbzCHdjlh8DhCSjDvPWDnM1GrUXjRLVV+3nIDShY7zpxvkZVp1XIR9w83MjRVgBVgBVmB8K8AAb3zvL6+OFWAFDmMFAO8GQmEjTDY/N/PBnSq1W565sdwWON4KxYOOOUwMD4P3CHj3+5ZOGvIA7uRaEgV4gBY1Iu/UWFfu7S3rpDsH411kVnuVPeJMNEJGhVZqDryhrSIH480jJUdtNjproQj9FhAve1GOESLe1Bim39xmn3tQDrMpsIt2FO6mqipvD6qXLF9GnxAv2RK9PiVksXIayjx49RSk/ix7RxAAHl5ZQrtgeZj2HBnrNnT6bKASbeXCmbRvw0a69/s/NNyhZoBnB+8wbm5BAVVNnxZzikQhngSvTTv3xznv1BOEwiEB4yIwEyAPrbumnIqOPZlu2Tla+EM9JpJrMpJLUPjxaMWUqfSDMz6SltvGWEEs6X5DASOAmBLxAshL55c1AIgDwkXrR0GZZDfLS5VX+SVIuvLkparQSTKa1Ygv+aADN1aAFWAFWIHxrQADvPG9v7w6VoAVOMwVAMDrEfntssTDbL74j/N28XB3OLRMCy9SNTMnqZduqlB4mH5T3053tHcnJHGiAA8nSxQoJTRRm4NQfOQ/Bx9yHNIM7mRnWYVWB97F7MVVRXSoeJj+/Nc2Cgv93doLOWupObtdOPCyPEM81YWnOvDczon35TWDvzs5ff55zZM0ab3z9RMQDtrCxkifHUd00XC5+7rlHFHEAu3Ve+6lV/9ybxzA6+nJpz17pzguacr8eXHvf+4n59HAtCJ6QAC2TSNOOdlpqaiwfOGiubS0pjLmOAnw9j7yOA2K8Fmr1tnXKZzD1k7EN1ZMptrKpfSr19bazhcg75iZ0+nGj51rhLSmI+eZn1WRda4t2cecCxIAC/nxikU+Pqw7HQ4z3AMAEMeyCJLUI5Eqr8Y1Kb6IycMXXqYCIV72wq1vpoUa43OCe1o2/sKNFWAFWAFWYFwrwABvXG8vL44VYAUOdwXqW/sMF0oiDzNjuXanPHNjOS95boQ/IQk6HsTMbiqnKrROc0+0Ci3GBPBs7QqmBVDYrQF79sLgJnola0tcF4A7AAU47uBOVJvqvgt+qdPT9sKJd9+0ATp4UK9y8gN5z0bHr6oSD+oeQmkTAXiywnPeSE40p1xbm7N66Gc5e+nib++imeudHYiFDd30zuImaqsI0Zzh+FBXKxHV8Fk7gKe67xDaDOiPe4dsADM5IoS1sCYW8tUfVUED76913DuAvO+duCraB07W7PZm2v+3x+OOg+uuq7/LcbxNcyuoqziPTl96Gv1z1wHaUF9PGxvqo8egYAWqzp4wd6YAMtnCkZedUigjTwwXWr9wP1vlyPR0cXvs7FSNWX7JkGqQBwg0ljn4VMmSgWRqnjy4GOFm9zNPXqYVsEDORlQP5sYKsAKsACsw/hVggDf+95hXyAqwAoexAoA6eABBA3RCmGU6XCjJSgY3CeBHZ29mOQYlkCkS87PLJWguZKGjRTLuO4wP50s6HDZOa5HOo7/Qy3Qo0Gp0NXIBjhTxMIM7vD81XEULNq+iut0huu8hAWw6Im6yC0TV0SXZAVoaCLjK94/cAdo0Z1jL9aMCvJKSbCopcR9fTkAFeG7AVAV3CGHUCSn8aWAPbcmOgLsT7mqkE+9qslz7K5dOolcvnUwnfelB4/2K4RyqIOeQ4IoFM+iYKy+OjvfzCyJ/N4fQ1m2aZfy8SIBNFdypE8kWexIQlWEDZVWiOmweHerupf5wmLK/PVrQwm7TVIgHwBKqW0+Nb8Y76Fp7ItePUzswuZjwQgPEm1IWKWJhbhJs4T6I+4patVUtduF2Pt33AbLhevYT+OicWwccylBRjJeK+wV+x9S39etMN+V9/AhlHi0Q4q+LMRPu1+oG4HOBHHjcWAFWgBVgBca/Agzwxv8e8wpZAVbgMFYALjGENKFluqtNlTlTCjPIOY2GVmUbOaWQXwq62j2kf7upjdYH9eDjyrxc+umk2PBCr5cc4FlQhEun2/WjzlMW94Bb5cniN2gvNRnuTytwh+MA7975n8W0ScA7tOG98YUYlgjX1AUCEjmBvPd6g/TCUcMU0qgkqQI8nLO2Vv+hVQV4eAC3CslMBNxJDT+buylu2zHe8Xc2GEUZ9q0spv1HRIAVWtnWJlp580tUMJxNtWRfPdIM73AsCljs31hnCfCc4B2OlQAPf+/MK6aOEUelDsDDMQinvXDxXMO9agXw+oJ91Bfqc/0IqAAPnT95/CWWx5hDWtWqrakIk7S7NlwXlGQH3N+7xBceOuGrqQB5uFbh4moUBVYyoekATS/zVDXDPQ5AOFEAnGkFLKpK86nAgxvZi27clxVgBVgBViCzFGCAl1n7wbNhBViBw1iBA/XNdOvtD9Eb72ym6bU1dP3Vnzf+TKYhB15LZ9AYIlNdbVbr85KAPBl93I6VVQrxcKo6qXRCoHQgnh/wDmtwqozrtka/3pdhehgPjrPtoUZ6PbA16saT5wG4mzZUTTd9pzzm1FYAT3b4Tn6BJcQLBoeorTVMfz0lrAUuZA48OW4yAE/N9ZUMuJNzsQN4Q4IS2IGC6t3b6ehbH6Vp7RH4GQ6MFtrAv9WwWVVs2xDaHXMsnXdZorIpikGg5eaPhtoNiAokjfmlxs91AR76/ulfTrMFeDruO4xhBngrp6+glTNWxF3OdiAnVSDPrfKpX5838zhurlCr8/oJ8uyK+6RqvW7jpmofki14kWmgEzoCKHL+O7crit9nBVgBVmB8KMAAb3zsI6+CFWAFMkCB1Z+/lr78mQvotJOOpjvvf5ruvPfvdN/vrqOykqKkZnewJeJmyTRXm9uixrKQhZozCm4LcwikLgy1C6cFuDsiP48uLRt1VLnp4fQ+Hp4LhKMJrrB0NwmvCvKEo0oAHQmMneZx3W1tUeed7OcE8ODE+25BoeWQDfUhbYDXlNVGL+aui46jC/DMVWhlmCTC0ZH0HteLbqisnS5eAN7i5/9OeMk2Z7iQBvtFDsQB8RIQb+ln/sOoNmvXrKrQVhYFaH1dJITWrmVniVxy4tpFGxRrDwuy2BMQIaM5oiKwRgitHPd7JxxNq6bXxDnwWnsHaF/baPjszAr7vFxeAF5vv30OM79BntcCJ359XpM5r/zdkC3E6BaubZ2Qb/O88RlAFddMSXuQ6nBe3PfwewD3Pbjx4HbXSU8xlvdqq2stR6xjsgB43FgBVoAVYAUmhgIM8CbGPvMqWQFWwGcF4LbbsmMflQo4d9yRiwn/vuKqG+jpe26Mnul7N/zOcOB96Yrzkzp7c+eAUWgBLROqleouJt0hv3ggQ7J7uNmgFx5k7cLRMg2GjsV8zK4zhLAij5IbwKvbFaQf/aE97jJwAnjojLx4CKc1NwC8vwgHnk4VWhwrXXhecuCp4bMYA64uuNJQnAPXCiCvzsO707WvA/DguAO4q9mzIzpUJIQ2HnQtuPQ6Kpkd70iTByKM9mBdnchRmGXMff6kfHFPmkT1rfZhxQGxB9li0Vj7oIiNHhyxBjadtpiyTp7u+NGe3jhAM5oGjD5HT6mh42dMpvxFNdT86uO0vblTvLpEyPWQ8VJbmQBDMyvyqFzAErWhCq25WYXR6lZG9QvkpRoc2Ynsx3nlfUQ6jr2EifqRc073d4NbP8y/SoQUI+drqpta8AJfYLjlFtRxb6d6zur4+AKiosQ+BD+dc+FzsQKsACvACqReAQZ4qdeYz8AKsALjTAGEyT705Et02smrjD+/8++fpA+Jv595yTfotUdvja72TRFKC4j3lAL1EpECoX5wB6BlWvJsp/XA3YAk+3A2pbKpIMpcUdbuvHhom1RekDH5njCfGpF/Kh0PrHbhorqaWLnvoLMbwLNz4W0eDNFTS8IC/NiHmqr7KF14ulVof/zBU2nl5EnRIaQ7U/da0b121SIW8hhora7rxDt+HQPv0C9SxMIaurlBvJsvvDgK8M5aVm7Au8dei4TEmpuEd/LngH4S4DVfe5rtMgHujq/rFPAuEspvzLkgj6oLC4yw3Ddy91FbQQ9l5WCt8QBPHrN8SmEU4pndd+gzuXQyfXhZ/Dy85qSTQObPa180qtZKMLxy6mxaOW2243bqfgZ0rwndfn6HZcqcn3CX4YsM/P5wy/fmd8453bVb9RsLlxv2HnoBiKHZgbx0fzHlpiO+dMHvWm6sACvACrACE0MBBngTY595lawAK+CTAm++u4VuuOUuul+ExqI9LAAeIN4fbr6aEEL7qTVn0nkfOTl6NkA95MI77qglCc9AzYOXad/+Oy0q1a4yFUQhZMyriyrTEpGn2l2pk+dNZw6fuLbRctuHUQ1hpAqt3XXxx6L4kOODq3Povne7tQEexp45M5c2Tl5PG5qsK72iz4pJkwihsxLeqeAO7w8Ix2EioYZ2a9uc1UM/y9kb8zY0HxbkBMVAzGGzsiPCZ+1ayazltOCyH9m+jzXd+/0f0s531tNpi8uMfo8LgGd24ZnhHfrJENqVK4fphTWnG8VKzJAH8G71881x55cA742cfdSaJUL8RRVbElsLiDc4ZA/sAfGyqgtp89z4oi92OfC85oa7++0X6G4B79CQFwxfIgyJDcAerJw6iz656gO2IC+dzi9VVFQMLhWFbNpE1XE/mxoe7wbyxjLlgXnN6fryx05r1cmI3ysqAM203xs15flGeg1urAArwAqwAhNDAQZ4E2OfeZWsACvgkwLPvrzOCJe9fPUZxoj49zMvraXrv/05A+b9WrjzVMfdldf8ki4XUC8ZgIeHz/q2SGXATEs07iRrxM2Cqob+hkHhYaVYhMnmiofeZPKWZZqb0Suo0L2kdcCdHAt5uJo6+h3dOnYAD2O4ufDMAC9roci79bViaqwP0x13NRuQxa0B3n3y41VGt/WNTXSPqMaqgjyAuxXCcfcJAe/QVHDXueUhI9y0bPH5BsDyE+DhXGYXngrwPnbd1+OWVjucRwUUcfzYtaO++4DlW+q6nrjtbqra+gKF+iP3CQnxUHE2S4TMImzW3LD+6klD9OEPD9MtCz9shBOjSZBnB+/Qp7akkA7md9KOQMvosCMQL5wdX41YdgoLmpb/0XmW6zl96Wk0pSw+rFY3N9z6g3sEuHuB1h+Khag4GfYB9yPsPSDlT865zBLipQqkOW6weDPVjjOd8GKdz77bOvx6H27AARHa7vfn0+v8zE5GzCeTKvUCTtdW2X8B4HW93J8VYAVYAVYg8xVggJf5e8QzZAVYgQxWwAB0F51l5MFDg+MO/wbg6+zuNf4NoJdsIYsmAcGQnydTqrvqbomfbgW1Smoy4E7OHW7GoKjy2z+SX1B3Tanq5zdQ9ALu5Jp08o05Arx+QUcaY3OgqXqZAV7OVUWUvSjHyFu4d1+Q/u92e0cdxlHhnds+yOuld/sjBHCnNqNiY9lCGi5dSFmzznUbytP7KsSTAG/hP2OLVmBAp9BZ9YS1p3yc8JINUAGAAyHAgHBiJQbIbn/8LgrVj8KrV94O0StvW7vhZk7NphOPyaHB4l5j2F8u+LDxJ+4vBsgT23jes00xYbPy/AU5AQHwiujpvG3xugQEICtAOHT8NRAS8w7lBih/brnxUptd+Cz66AK87zx6pyW8U8+jgrz/PDse4qUapNldSLhWUYwg1ekG7EBepv1eySQ3oPxcwBVYIl4IiUexoaBw8I51yxc5XwEUubECrAArwApMHAUY4E2cveaVsgKsgM8KIJz21394kG4X4bOywZ33GVHMAsUtAO1OO+WYqFsvmdO3dweNEFG0THu4cVqXH1BKrSjrlmDci8ZjHaZlnqtfQDERcCfnIquzOj2cOgE8jDPsAPEkwIPzLnCOqHwq4B2aCjBeeqWbXn6lJ0YegLtZM/Po5BNLXLdYAq6Bls0GuAu3bY07BvwOtAruVoC87JXx7jjXEzl0eDi7iR4ONBvOL4TQmgGejvNODi8BntzXgHCeAiDATabqFjy0hzqeuDtuVvsOhmnfoVHYAHg3c1rE9dfWP0AbsovpgenHxOrdPEAXPBcfOotOcN8V5ORYAzzxfu6sCursEwUtQkEaElVNzS0gKtMWr5oS82M7951xPlFhUzqQ7SRXw2bd9m220OPkN4M0Y/8gLZw0Neo2LfnU8VTxvtlibrPSXok13QUkzCAPxS4qRSGEdOTgdNsfuecN7c5OYJ1x/O6D3xnIkYfPok7BC7/Pbx4P80EOPG6sACvACrACE0cBBngTZ695pawAK+CzAqg6++XPXBB136FohQyVBchDBVq/Wn9wiFq7IqGoFeI/2P3O3+XXPM3jJPpgigckPCgBUPhdbEDOUYbi+p13KlEtkwWK5gqUiYSf6UDE+57rofvFy62Zc+KhCu2FywoNaAd4pzY/nE8IuNQnXQAAIABJREFUf5QPswC9/c//f7ZTjAF46JUCiIdh/17QaoC27GcfpIIXHhbhstmuIbPmSU8/9RJaeOalRoVls/PU7Nwyu/Dc9gjvPzb7ffSPXhDN0Xb8xk5RuKLL+AFgj8yNJ913rVm99HbuAcvhs0VxmIB4AeLZ5cMrO21W9FgneIdOOg68c397vc5S6RMP9tIsAfBkqykpperiUgPkYo24Lqo+eyLlfuI4rfH86jRWIaNqBVZcp60iB1+y1ZiT1cTvgh7Jzkc9Xi1gIe+3eB9frnmp+OvXnKpK80XhDc5/55eePA4rwAqwAoeDAgzwDodd4jmyAqxAxikgw2NRzAJVaQ8KYDdNADsUrEhFGxThszKXHB7a80U4WruoTpvpzWvOPjXpOh6IACxS9UAJ4JNJrpNEIZYf4E5eR7rA1c2FZ3Vd/vm6+Pxmsl8yMFV1HHaKzwRCoofW/5yo0yLEc+SERqio+D/12sqaeY7v4bRSzx1P30X1L/7F88cV4azzPvxJmnTSxy3zgZkBnp0Lz+7E5R/9JOWJ6qwPbNlFD2zdFe2mAjz5w+rCfFFFNs/IU4jcdzsDrZbDSoCHN/uCfdQXiuTlUxsAHsJmV85YYZn3TvbVrQqrA/DM8A7nqCoqMQCeEVKNJtYGx2TFjasp/8gZnvcr0QMAhrp6QzQY1kgCmehJHI5TnWW47wJIjVWIqNffGSmQw3ZIwOSWzoGY+4a8/+YKt6lboRC/5wp3avTa9XtwHo8VYAVYAVYgIxVggJeR28KTYgVYgUxXYPP2vUbVWbjsvnzF+TGVZ1M19wZRyALAIdPAk9N6deeKfsgvJF1GcN2lCtyp89Wpupqq/TSP67Vqr5/gTs5FNxdX3a4g/egP7drSfP8zFbRsbp5tf+mea+nUr8LpFCo89PK/Oc6td6hDOMtEeKv4POVm5VNudoHRP/uk/9Fek05HVc93/vNCnUOMPgBXiD4FLJv3ieuoZPYKy2Ot9ksX4kl4Jwfe1NxGm1raDZCnAjxUnK0oiDgmA2JiYF3IvfeUVQ48aDjiwFMnHAoDTkXy8U2aMZXOvuQiLS10qsLqhM+e9MaAETZr1RbUTFU0j4A83Hsq0wjxUlXARktk0QnOW4SEAt4V5edQcUHA0MDPlAW6cwFMxL53CqCZSc0tTyDmjLkX5AUEAB00gHsqf4chZ+JkAfC4sQKsACvACkwsBRjgTaz95tWyAqyAjwqoIbM+Dms7FMKb8ICF5mdxiFTP3WmucFugoiwefvwoTOF1LZmUTxAPiDUiIblbHipAG4QXo8Epk0iorJ1ORtVF8fCOHGtuTRfiucE7nAf7jzBCHYDnluNveO+jNLzvsbjph4b6qXeok0LD1lWRi7LLqPjIH1BW+SK3pWu/rwK27X/6PnXv3eh4rAR3COcUPIVKZi2nBZf9yPYYJ+Das+5Fymt+MO7YntAxVHT0yYbzzq71PLGZ8LJrgAdP5m4z3GrmZgXw1D7Lj19Fy4+Pzblndx6d60IH4H3715FwYKsmAZ68DkUwrQF3C46aQTU3rUmLE00nTFj7okugIz57/QgDVQr6pCr3qNv0JEz0877mdk6d93VdwmpYcirz5OF3QIXIW8iNFWAFWAFWYGIpwABvYu03r5YVYAUOYwV6+gepYyRsVs3Fk+lLsipkoT4c+g2hvOihU7TBy3jJ9nVK2J+OB2qvLkCs97rbhHNrdzzwWzonl9Z8qNjReafq5QYx3MCdHMsK4AHedYSdK9zi+M5px9GCRf8v4W0cahXhu22jBSOgp+FYmyXC6/ZsoO13XWs7dgBMdgTcyU4LLrV336GPHcDL732aCsTLrvUXnUED4mXXgtuaqf2Wlxx12JXTStuzW+L6oIiFU7v4yi9o6wtnZqlwhznlqVx/cA9957E/2Y7p5L5DCG1VUWn0WBQICY+EsQLELH/9m8Z7qXai6RTq0BYtgY5ODkA111uqdcDUxzqc2E4+r85AXD9wMwK0oXKt39oh3yfmxI0VYAVYAVZgYinAAG9i7TevlhVgBQ5jBZCTqLkj4h7y+jAxlsuGowKhRHAPIkQWecEQIjuWeZakHro539Kln9WDdDrAnVyfTsiinRZw5NUJkLdMgDuncFm74+0ghlrQRMelaQZ4uvAO89o6qVJAvPfR2ZVf8bTlAHeDOwZpWIF3GEAtlpF7bB71dtXFQTyEyuJhH4471dDmBu8wvhXAK+74DeWEdrrOfzB3HvWU/6ttv7ZfvkSh7daVaHEQXHivZe+jtuy+6BhZAljkTLGvFOzFfYdBdR2hTjnwvAA8AEM1Dx0q01aLohalRRFQ4jeEwZiZULRBx9GdLpAHkN/UkXkVaJMpNKKmO8DvPXwZZ2Fedf3Mqh1qyvONzwc3VoAVYAVYgYmlAAO8ibXfvFpWgBU4zBWob+0z8mIl4pQaq6VLAIXzp6qibKJrS7RwRKLncztOdSumE9yp8xorN5DZgQeoJfMiSuCrk1PKDPA6Bhttw2bN+wGAt3VSFR1d/BHx+qjbdhnvA96F3rLOr2audhuYn0P9OZup/qW/UO++jQbgM4M7jKkD79DPDPB04Z1cmBPEc3PhAeA1UQ+9lTNajbZwTqXhNsI9ytwmTZ9KH1p9rpamspPu59MpjFYn/508nxXAK/3U+423cc8FxME1CCe0XwUndFyGnkTz2Fm3UIgcVgV5PaLIkBp26/HUcd29ziXZ83k53qqAhZfj5TUERx7y5CVT8AL3jdqqQq+n5/6sACvACrAC40ABBnjjYBN5CawAKzBxFGgWFfAAM9AyqQCD1Q6oFWXxPnK76QCYdO5mIsUTUjk/uBURxpcrXthnPOT5BQp05z1WAA/uQ1zfcKZIeJkI8FUBnpP7DrDA7IJ5dNn8qEyfnfzfWpINPBVfZVUeaFXttvSkQiqdmksNW9+l5m3vxZwDOe/sClZYTUYFeIHQDirp+F+tOaNTqL+Fhgb7aNP2TurujRC3yYs/ScXVK6mk5gjj304QDwAPxSzQ3srZT521AcoWIX2AC6jwq4I8r847uQisD9U9dQoafOfRO2n9ob1x67cDeNPLq6gwN1KcQzYngKfOSbqI/aiSrQsptTfWY0fdQkPmYVNRRCfT7sfq53iKcAbWi0JSfjSZJw8htsGQ96q/+cLJXi3ypXJjBVgBVoAVmHgKMMCbeHvOK2YFWIHDWAE8MHaNVOezyi2XCUszwt7Eg7esKIvQ2anCLXCwZTTULhPmiTm4VRZM5zxVxx2KSIwV7Byripi4ngcEtMS1kwi4U/dKVqHtDXcYhSusmhngtRQV0Ktzpke7frTi32lq3kLHSyAoKpuaw2bVA1SAFylQISrfCuaVf2aBL/urAjxd9114sJeC3aOuua6eIdq6J1IhVjZAvHkn3WD8ExAPBS3M4bQS4OUuqKHKK0+mxv0HqenAoegYsirnomOPti200vjn5+P0LV4xm4pXzjF+rlsVGX2RC+/utS/EQTwrgGcF7zCGGeBV3bSa8o+cYXkNJAOZ1QG9rDEV96NkAaKf4aFegG0qtLAbM1VgMdGCF8gLCYjMjRVgBVgBVmDiKcAAb+LtOa+YFWAFDmMFBsS39bJSp8wth3w6mdDw8IWQRzyUmHOVAc74GXbm53pV55ef4+qOpYIAVC3MFQBUpwqs7vhe+40FGIYG5eKhdEDkeewU4YnJwsvhjq00vOEX5AXgvTp7GrUUj4al6YTROrnvoLsB8AS0E38Y4E6uC6G0OeKVbFPhT3nzt1yHM8M7ecDbdfEhwCrEQz+APBXiASAMf2iBYy4v6cKFq1TNH9ezfjftvuaPtvMtEhBv7vWfNnJ9wtGH+4lus3LiySq0hbl5omhFSZzzTo6tAry8I6dTtahCa9VCTXuob9OLNNi8V+yv4jgU5ujCpacYL9021nk4E9HYam3yi5tkwkMz7XeaXGc6cs7Kiuw4p1thJ85/p/vp4n6sACvACow/BRjgjb895RWxAqzAOFZgSISsyTAe/Ad/oXjAdarQmA4pdHK14cEMcKpPJPDOtDYWwAoaWDl4MiG3IfJ89Yt98jO3ld2eqxqYIU+y18nQ+p9Tb9tbWg48M7zDud0AHopWhMXLqQFgAeKZw6DHAuDZwTvM3wrg4edmiKeu1a1qsNpXzZu29srfUvf6PbayDVIfDWZFQhVrb/wgLVh+YUL3DeTFk23K43to7j8O2YI72U+tQmvnvgO4w8vczCCv9JRLKXfSbNfLOJniCK6Da3Tw+96spk7wmuctU6urp3OP5GcFoeN2+iHNAb4Y4MYKsAKsACsw8RRggDfx9pxXzAqwAoe5Ak3tAwYMG8vwT7UyqE64YzocDIlua7pdH06hd5mQxD0djiBZEEC9dvwGCbge9jY+QAV1t1leGjKE1gre4YBkAJ4Ed0gRh8dss6MwqzKb8o7LS/SSjR7nxYE30L3fyHln1ewAHvrOPfEn0Zx46rFeAJ48LrR5L+347h3CjhivSX9WG/VntcdML3zkEHXdGKLKwaV0Yv/3k9Kr5T/uo+C7o6HD5sHUcGe47+p/nkd1WX+L6TbU10Xz6ipp/q6ZtnNRQV7xSe4QD9AKaRHSnetSLgDnRzEKVDn3s6l53noHBrUqr/pRKMLPNcix4NJu6w6mdY/gBkWOPDgaoR++/MJ9BKHrkwXA48YKsAKsACswMRVggDcx951XzQqwAoexAghFlWGz6X6wkO6KIuH86xWhu7pJ3DPBWWa35emCi7o5s8aqiITUx6+QOiu9ZS4pvKeGVOLfqQCH63qeoHU9T9Kiplaq7umj6t6Iswv57tpKimhzTaXtncCtiIWVAw+umEhl2WEjtNSqiAVOOBYOvL72bbZrdQJ4KGwxZfGlcccmAvA2nnedMY7MB2gUuhDcqDvrUNR1Zz5R3+UCXlwece6e0HcN1QwtS/ju7QTx5F51X1ZOb1/xUtw5hkMDNNTdGv35sW8vp6r2ctu5AOQV1c6lqg9d7givxirnpJx4qqGZLsgbyy+k3C6oRK51tzF13zfnyUPVauQJ5cYKsAKsACswMRVggDcx951XzQqwAoexAv3BIWrtGjBWUFEcyRuW6tBUCe5QmALnQo4er3nKxhpM2W15KuEidINmXqpWjvUDfbJJ7a10Vq8fc35E2T9Vyfxva/yq5dZjTnbXsJv7DgMOtQ5R6K1I7rhRICV+PlKZVZ5UDcuM/iwFOfDye5+mAvGya3YA72BTmA6Jl1Nb+bHH4t72CjVQsKLpntiiFYBcgHfBYefqnq1PRe53aMlCvIF391P3H1+Lc+NVffYE6jq6l55bfpelFOGuFhELHZsr0A3iYaBpZ11BZdPnGS4qfPFirnzsVUe/f3Wl676sgjwUNoIW6ucPn8cq4QZEtfJMaolW6fV7DdAPjrwSkXsSWnFjBVgBVoAVmJgKMMCbmPvOq2YFWIHDWIFB8RV8owijRQP0yEfRA+HKS0VTKwzagRfd86bbLag7LzwY1ZTl+/rg6DXEWJ3rWOXkk3PwE2jqgDt5XqcKlM/uaorZztPmTtLdXjoU3EZPtP8qrr8TwJPuu6H+XhoeGA07zcovpOyCouhYwaf7o5Vl7WCgFcDLPTaPsqsEvUqyqdAzENpBJR3/azliqL+FBvtH3WNqpy27Q1QfWkg9eYvijs0Nt1BJcCsdc258wQmv4MkK4Klhs+aqwOpkOv9LhC8eKSyNolWHkw+ntRIJztM/hj5LcAVatXDbaIVd9f0znznRcRdR0KJo2SlGQQ4AGDPISxdAs5rkWITsq44ygDzp4jZ+l4kvO8aygI+VRpk2r8kV+aJacvL3jiRvPXw4K8AKsAKswBgpwABvjITn07ICrAArkIwCDW39kXw4Ik9OZYn/rgUZ7olnWSTS9sPhl6kJyrEPfj1EJwPu5PWQilxwVtda72CbqNI6mnOsKFBBRTmVBpBC0nZZ7TiR69QLuJPjG1UsCwIxD/AAd2Z4J/vPrSiiz62aozW9x9tuEZBqe0xfzBFuOTOu+WjFv9OUoekUbtxvOTYgXl5lDVXWlFP3pgHq3OjsGDIDPL/y32FyZteinQvProBFV88QvdZwHIUC1Y46TplxHK1cfGxMH68Ab9f37qDeDbHFK9qzd2ntnxpGiwOSdeFZnXRz9t9oY9YjAuBZT8kO4M3fOdMxJx5Gq7rwu8ag5nBSAKyq0nzxhYyzA1FLpAQ64fdHqSgwNBaFkMyhodAdOTEzpaq6ej/G79pMmBeMd7VVo5WyE9hyPoQVYAVYAVbgMFeAAd5hvoE8fVaAFZiYCrR2BQkPf2iATw3iAdDGOKItkDncE2GyfiY2T1euOe0FKx3hekNuwUQTyfsB7uR0/MxBt6erlfZ0t9EL9Tuiqz139hwqzu2g8HC3pVQ1+XNo2ZSlUajQv6eT2l/cT/17u2L6F8wqpYLZZVRxyozozxMBd/Jg1fm3q63HAHe72ntdt/NzR8+muZXFrv3MTjwzwKvNXWAUrpjUVhDjujMPHBDkAfAhq0A8SNfMoOCbQRpusy8AYAZ4frnvMC+rsOPijt9QTmhnnB7mEFpdeJdTUEW54lVWUhkD8ZIFeKg2251d77pv6GAGeIuCq2lxaLXWsbqdXsq5ieqHNscBXRyP4hXD/dafl8q2Mjpu7QrH00iAJzvh+ikX4AwFCgCHmjsHkr5/665T7ZcJ7jJoAR2QZgC/w3Af9vP3TiK6qMck+7sh2fOrx0MnhBlzYwVYAVaAFZi4CjDAm7h7zytnBViBw1gBwLV2URUPLdmQSxW66FSUTVQ2P0MzE52D3XGJugOldrnCPRYSuQh1i3o4zd+vHHR3bnvTgHdqK8sL08qayHWTlx2g6nxr8FVTXE01gSMMcNf+on3lTowDkDf9U8uNxOoAAnCq9PQ751SzWr/q/Pv92t1a8E6Oowvx0B+FLdAwV1zvACiAd1PzFlK4o4WGxMuqZQvSYBSoEJRBwvLs8moKiJcTxFMBnp/wTq4BVSlx3anNKpxWrULrFDZrXnteyXQK5ERcPzOnzqNZ0+Ybf/cK8MwhtFZVZ83nlmG1qEQbOmIUkqYC4D2Q80Xq3NlD3bt74rZ/ygcnkZ0DD53dwmjNAE+eAJ/1CuGgRthusikKnO4pdu/5+WVBIudXj0GKhT7xpVShgFSGHr3+V8ZNZI5+fUGWyLnNx5SLnLfYM26sACvACrACE1cBBngTd+955awAK3AYKwCHQvNIsu9EnW3pAndS5kyuMuj1QdaPoh52l1+yoBOuuzu3vxU3vArv5Jt2EA+hdUN/FZDvkHu4FsBW2fwKmvnpZQkVN1Gvj0nlBXTPun22YbN2mnkJp5VjmEOVke/OKmxWVicFtLPKjxaYPMPIi2dVlRbngpbhMlHM5Dj/nTNuhT8A8sqyI2Gr3T1dtPWN31J3byRGtKH4XNc7YLYAd/kC4KntpGPOMByI2CsvoZ/JALzOZyIONVTgRPMC8HZt6abd4vXc3xqiy/jQv0yhOYtLaK54oe3bXUd3v/A1S3gnD5pzYREVz7TeQyeAhxx4eFk1uX/4QgYONBS8SSfIQ6j8gIDYfqRIcL2YXDqoaQzkPRCHjCXIw32+WuRH9XKdJ6uD0/E1AnJCG26sACvACrACE1cBBngTd+955awAK3CYK1Df2mfka/IKfIxcY8J9JB8WpQspHXKMdYVVuzXCCVMonA1uuaBSCe7k3BKBI+q6rJx3eP+kaaPFGNT+VhAv+6CAVfeGhUtPFG3IsnZ8GJVXxf8ZYEX8H5x4tZctS+oywkP85+9fm9AYXlx4OAGAyaD4AEl4YXbfmddnNynpwpPvozrtkBJSO+2YkpQBACeAh2u1TDh2hsUacZ/IzcmibWvvoAN1f6Lu3EWWRSvMayysWBC37BWLjqEq4TpMJE/ixvOui46n68DrvUwE214eNtyP2chbKK61Bf3uIbQAd889Uk+7t8Y76uQk5iwqpoVHtNGbr99M7Vl7xbVskwAPBwyFac6aUkuI5wTwSk+5lHInzba8fHANYm9kfjW1aFBnb0ikSbAPzU7oQ2I6CM7jLnGeRFMH+DEHjGEHysYa5GVCiLGqMe6P+AxwYwVYAVaAFZi4CjDAm7h7zytnBViBw1wB5E0CfEObVl1IB1usAY1cJh5GAO7wsJROl4cqc6KhqqneKrdiIOkAd+YHtXpRqMRre+HQjph8d/L4maUhmlUaG2apjl2Sk0+lufnRH2XdP0TD+4cpmwToDYxWXUUHJ7BVe+lSIy9eou3N+jZ6uM662qfbmKhM66U6rRmehPZujZ4CYakSTLqdF+/nzoqv4CqP8xpqqnM+9TNtDqFVr1VU9JT5xCQM2b/xT/TuxlcNiGfX4LxD7jsZOqv2Qxjt3BkLEgJ4Pet30+5rRivauhWxCB81RB0/i62wnS0MSKuyP04LRR48gC8r5gZ494cbR/M+Omna2f06zVr6PPVkNVBw2OUzFw7RnIvK4yCeHcDLqZlFZR+4zPb0dg64dIErXJst4veIXQVlL9diMn3d0gaoesCtmC7HIFy6mVLAIldUnp0kKtByYwVYAVaAFZjYCjDAm9j7z6tnBViBw1gBPLwi4TeaUx48WVHWeBARubL6R6DfWCwd0ATNnLNrLOZiPqcVBDUXZUiXWzFRp6Kd+25F9QCV59u7eeJceP89msOuIFBqSKXjSKs4ZXpMUQuv+/rGoTZ6ZFN6AJ7ZvQaAJwtU4LPi4MWKW1amADwJ6Z0gB2DIgYN1tGHzaxQOxUN/WbTCbu+SAXgYU4V43VmHaDDLGpoh513fLwYt3WEf673byAWGLySsvoy47b+2Ozrv5Nq6u9qop7udCksbaeryv1H3UGzOyDgNhsVnSDjxln99UvQtuyq0bvAOA7g54FIN8vyqvu31c27ur5sGQnUoYt9RyCnZ4k1Oc082v2yyuqjHQyPkwOPGCrACrAArMLEVYIA3sfefV88KsAKHsQIDobBwT0QKEpidAgBPBXnZxkMuoFO3gH1jHSaFeeIBrFhAPLdQ1bHYFkCzNlEYBDolU03Vj7kn+uD443VPWZ7eLnxW7Ty1cMQ595qAFK+P4qvCnNKYUFmn9SUL8F490EKPbRnNVeZFS68OPHP+sbYtdTEFKryce6wBHgqoIFxWtwjN3oM76EDDThGWKkJSR0JsddebLMDDeQDxGu95nno37CErF56sOgtnrPm+dULfNVQzFAnVlmHCiCqUudIQNqvmu3NaV8OhXdG3J8+oE1bmx91lEBBv0vH5NPmESAEYK/edDrzDsbqgPhUgL5Pyu3nNxSf1gCsNv9vsnJjum+ncI5MKWFSKYie4Z3FjBVgBVoAVmNgKMMCb2PvPq2cFWIHDXAEZNitzuHUKRx5cbkUC3PWKBxs/qqL6KVEkvxuSgg/4OawvY8EN0y/Cs/JFPrx0J5M3L8BcYEF3gakAeMW5ZdohdsnmwQPAe1wAPC/uN6mNV4CHzwygF9x2cKx1btukK3Ncv7EEePi8hwR0xmdfNxQSAG/foZ3GOpBTy6iuC9ehhvDIgVddUU2lIrzQDxB/8J4nqS7vrqimyHcnW0DwirBS0FiFd+omqIDrxms3EUJo3VpwoI/aWuuj3YrLmmjKkiepm9wB8pQP1giIV0Dv3/oBKt3WHh0DxSoA7+xy3pnnZBdevVXA5G1b62jhomW0aPFoXkk/QR7gqF976Ka12/vqlydufdX35RctBaJyrd8gL5MAJ9Y8WYTP5ghgyY0VYAVYAVZgYivAAG9i7z+vnhVgBQ5zBWQePDzYVQkAhQTvyA8EIKH7MJ9uCTLJ1RAFBYJgwAUylvkB1X3wWhVXHmsH8NxCaHF81IH3unDgvTZshMwC6MgQWp3rJFkHHvbgK4+8mxDA81LEQg0rR544fFYGG/bRsIA6XltWvsgXN2Wm7WGpCFPE3pQISA8AA1DfPhJK72XuL7/9dLQ70uJnC6CDFhYw0K6VlVTSysXHGk7aooIAQTs/WnN2HW3Nu59aArEQVQV4dvBOPT/m9b3PvyMu3AiQdGoyfFbts/h995IoHeEK8YrnFNFnP3ULTabFSS3ffG384qbrBLiLB8kLFy2lc85dE4V5foA8fAbgYEOxjLFuyf5OwH0b90yAvN4Bfxx5bnn50qkZAHttlXtF8HTOic/FCrACrAArMDYKMMAbG935rKwAK8AK+KIAQofgpMADDFprVyQENJNbouGhqViTGiqL8EM882fCA22iD492AM+tiAW0BcCDHoBDgz8fLXiRToAH5+GvXttBW5vdHVTq9TC3oog+t2qO6yUiXXfY6wHxyhdh5hJCDfX3Urhxv+sYskNosI9Cg/20NbiZsgsihT6mVi2jaVXLY8bwu4iFzHOHNaCKrrmIhe4COrpaacPWt2O6G3kOUb22R+RM7BugtoI8ai8cTZwP9115aZXvAE9OAiBPQjzc1wCq8GXE4tBq3WXRtV94N5Kv0ahYK1LW2YA8O4AnT9RH7QLldcSdt4DK6bRTP0snnKo/J6vJqw4vOO5u/vmPXNd41de/75sjz1zExfXkKeqQbNVtdVoYK5IbUTjQkwR5mVTAAmASX9BxYwVYAVaAFWAFGODxNcAKsAKswGGsABxS7T1Bw3VXIcIBkdR7LItU6EiZCQ+OVjnupKtF5hXUWUuq+gBeIGG517nYVaHFPJ3y4JXlCSyRlz8aRnmfiFs8QJSTlScAkX7lwznfPT4pSXBtbGvpMSCel+bmvpP7HRC6Ssed1X7ruPAA7nr720TYaj81DxyiV5ufiJnqio5Smj3pGMoVlVzRVnzs30TIuPeKwub1R4BPXkyeO3MhDi+aoe/6LW9RZ3ekcEOFAHbz2rqpsj+SV1OgXGFki3wZAJA3vHIVFc2JVK7124FnNe9EzwGAJxsq1hp5/sSNcshUw8UcQotj4MDTaSecemHSAE+GsL7+1jta8E7Oywzx8HPVUSpzAbqtw2veObeUOXlyAAAgAElEQVTxEn0/FXlR/QB5mVQxHb8LACa5sQKsACvACrACDPD4GmAFWAFW4DBWAO6S+rYIHMBDXL5wrCQSTpdOCWS+Pj/yZ3mdt3xYzBUgx1y9Eg99UyoKonp6HdvP/sm4UhJx4U0rKovNf7ZfOLsezBIAL+Is02nJhs/iHDJ0+L2DHfT7dXt0TktO8M6pGAneA8QwQ1IniAd419ETqZKrwrvCtg6q3rWPito7o3MuL55qQDyAmqoTr6DJH/iM1nrMndQ1YK5qaKgTwGt4cTS/m6HtrBIqmV0SNwfkw8upe9eAd3atSDgMs7NzqOP9J1OouiYtAM+4n4k8hV7DdFWAJ9cDkLdr0n7aXhPrsMxvEqGJu8uptKPA6KoL8C761Pdo5pzR3HSJbKx02X7y0jWeD7eCePJ3ACC4TjETwGBUMR9rx7ZuBVrPIokDJMhDuDm+3PKSExbO2aaOfq28kInMzcsxNSJvLH53cWMFWAFWgBVgBRjg8TXACrACrMBhrkCTKAgRCg8ZoACV6po6Mq9AhCrxWMxTuq3cctwlm4vJz0sp0dxpe0Ro5J3b37KcilUuvKq8IsoPxLo7igIVNK1uGu1/erfWkpItXiFPogKpXW099OyuJtrV3ms7Bzt4h30uEgAI45lBrRzMDuDh/XBHCw2Jl9pUeLelcx1t7VpnvA14N3PdRss51pTPMz6XEpKs+N4LWnrKToAxMkE/XLbmZgXwdty1nXr2WsM4QLwpJ9fGgLzclmYqefmfAgyGBfgZvXcEsgMUENdFfm5+TH48QLycaVNSnj/Nam1gjLsa8SVFJGUAWkXxMFWK19wpEbeguQptW1EnrZ0rKsxatHB4UDjzwlTSnk8nNDdT9XTrfuqhM2YvpYs/fY2nfbTqjPX9/fEH6J6/3uN5rHPOXU3n/Is9+JOOPCeQlymAKtGCPV5EM4M8hGUHReVmu5bMFyhe5qXTF9kxsFcoNsONFWAFWAFWgBVggMfXACvACrACh7kC7d1BI08UWiYBKCdZ0zVPXXAn55pJ+flQmRE5Dd2S8VvpbAfx8BA4syREM8QLzQ7ezSo+2gjXrK9roQN/dIYafsE7zMcqbBIgzwzxkPNubmWx5SUm4QWAFypT2lVW1XlIB8iT7bXNf4zkvBsBd/i5E7zD+7mBAqounx4FeMWzjqK5l//S9Y6juwYVcnXv6aadd293HRsd5n1ygQHxAO/KX3vJOGbr/lGgUV2GcN1YYKDmleu5YLVRuRZwNFVNXZsVuDOfFyBv7uQh8SUGGXnw0JzgnTx+cDAorpFhmpF3kE7u2Oe6HD/cdzgJ4OzPfvpDWr9xg+s5rTrc+r9/djzOzX2W6BcECU3W4SCEqnaJQhrpcAKqmuBLL7tw47F0iZulggu1ukw/jYHf+8PjsQKsACvACmSWAgzwMms/eDasACvACnhWAPAOEA8tkwCU00JSPU+v4E7ONR1uEN0NxoNtjwAkTk4Rp7EA8V6o30F7RI4zgDtZVRZh1x+onU/zy4eF8y4QM0RN/tzov9Xzt7+4n9pfFEnxlAZwV3HKDCqYXaa7JNd+yeTD0nEdmSegW2DiYOtGevSNH8bNf8baDTFhs+YOfUM5lJNXQX2hCDAtyMmhmnnvo1Vf+K2lFtIViEqwOuF+AA2oRItQyLd+tNZVX7UDIF6gaR/tf04UjhiN/I0ZY9GMLFo0IzZ0D+AucORK6l+ylFIZBg/AhSIdB1vCtG5X7HXqtNCj54r74YFu+vmt62ydd+bjC4p6BJztoppQr4B49oVM/IJ3OD/Cty+7bI1RaCOR5gbw5JhW+eCQFxBFETLBrT1WTkCnvIGpDOv1uteYC3LgcWMFWAFWgBVgBaAAAzy+DlgBVoAVSIMCB+qb6a13NouE8b102smraHptjW9nBeBpHgmbxUMvWiqdMX5MPFUV/uRDGVxXcF9ZhR06zV8+uO268zXq3XSQ+jZF8p3JVn3hMVS9+hhqCjVS06BIoKW0ZYWx1UeT1ckPmCj1qGtqorqWJgPc6TacPxhKb1GURIp3eIVe6vp1XUhvb7+X8FKbk/suNJxNXeFI1chsEYqanRUbonzXyf9BaxYfTRctOdroo+a56xQwzkshGkDP1lcbaM+zBz25NXOnipx2O7a4Xg7Vgs+esMwE0KZMpuwPf8g4FjnqEoXMTieXAO/lTcMxIbOuExYdTlsZpu88/QS9sHGva/ea2nwqKhKh1p1t1ClecOHVhPpijkPYLApXJJv3Th0UgPyTn1w9UibEdZpxHXQBnjxQBXlwn6GlEsDqrCgT8o7Kqs6Yr3TkAa72iy/GvHwOddabSB+kxcAcubECrAArwAqwAlCAAR5fB6wAK8AKpFgBwLsrr/klnf+Rk6mrp4/+eO/f6f7fXecrxGsQhSwQaplJlVSdZDUSuOcFfCu4kUgVRqv5Bbccoq5H1lHLO9YunM4ZwhF0whANHFdG2YXxroilAuL5BfJkQYdEYGwibjSzHmNRLRggS9cZpEKvRCGSrgPPCuChaAVe5qbCO7xnBfDem3UCrZ91Ii2rrqUbTv8XI18fnLRegbM893s/eccIaYUmuA+4ubqCoX5q62yiorygyHPnfgM0Qzycq2/1GmPOgB1ofhdEAEBu6himlzd7t6ghlPZbr9xBA/2icEG7cLEOxOc7Ky3PobyCbMovQK4/UXFXUH84004qyqdj83KMf6OdcOpqd4ES6IEQ+YsuuTCBIyOHeAV48kSAZuVCW5lbscchzDzhyWkeOBb5UO2mJn934v3cQLbhTkwkfYHm0rW7Ta7IF3k0uYCFtmDckRVgBViBca4AA7xxvsG8PFaAFRh7Bb7309/Tkvkz6fI1ZxqTufP+p+lOAfGeuudG3yaHXGmospcJjgadRSXitLIa1y9wJ8feeun/iYclEvmY4s8GeLfl4tE38mrLLSFeTc4kOrUs4k5KpskqlV6qcPoB7uScnSqcJrMut2PdoJpTZVm3sc3vu51L9tcFeGZ4h+PtAN7aUwsN6HbjMV+k+YNHJAwLUG224aXRirPQB/cBwAe7/H8NLRHwmB8IUV6eHiBTw2kRkt11/oVR4Aj4YbiWgiKvWF/Il8qdGO+FjcJdbBPe67bX/7P9D25dou+rhUZQsfa5T39RwMlBSiXcwrX3hzv/RI89er/2PNWOiQI8jAE4L3e9KD8n5Wu1W2CilYYTEkzzoPw8kXOuNJ8GhPvYLkee5lBJd8sRn7PJlZEKydxYAVaAFWAFWAEowACPrwNWgBVgBVKsgBng4XRnXvINuvorl9JpJ0VC6JJteNCEAwYt1fnlkp2rPH5adaHIbxUbqqY7tt/gDufd9+O/GSGzucKNExI5yNRmhnfyvYK51qHQfkA8L5DTT3An12ZVUEJ3f5Lp5xTWWizcUnAOyeqayZwHx+rm39IFeJ0ibHZQhM+qTQV4wwKYDRd10Nr3V9G6E6qj3eaW19BHhy8xXl6bGeDheFlwApQGeeTU1t3bQd097QTKZgC8nBEoDXIlDszCnzbt3PdH7HpmgCe7Y3+KC3J9AUIAeA++NpQQDHy54R16Tyk24qapCvDQ974LPyUgF1xqIhRa5KFM1BnpdF5c5y+8tpZu/vmP3KYX975bFVq3AdUQUasceXbg121cr++nKpWC13mo/aUTD/CutCjHcLWm6hpwmyecuRUihJYbK8AKsAKsACsgFWCAx9cCK8AKsAIpVuDhJ1+iZ15aS7/88ZXRM1n9LJlpwC3Q0hkpZJGJD0VWawNo9Bp2p4IqhO/5lXurt+4g7b/+UWOacOCFBdNQscebX7eutpkjqqHmVBZZbl2y4bQ6VVKl8wluKzj1/Az5GqtwbCtXXCoAJTYN16COblZFLMwhtFbuu8j1FKkgOVTYIf4RgexmgFdbXE6FOZFQ1K8M/ZgW0grt24EVwJMHw+EH2DYkiMzQSBRpV0sD9YR6jC75At7l5ZrspoB4otiGVTthGSpiZlF27WTqOvEUS7A1CoREiHwS+fGwN/e+bGGF1VBmb3c9PVb/hEbPSBczwLv3gk8ZPwe8KRMFBIyKuwLo+HW/wbioLNrY3k+/uOk62rZ1k/Zc0TEZ9x2Ot6pwPRYgD3kA+8SXT5mQa05ugLmAhVoQqUeEZMOZmS7AieIVmA83VoAVYAVYAVZAKsAAj68FVoAVYAVSrABy4MFxh5BZWbxi8/a99BWRF+9pH8No61v7jNxXCL0sFP/RP9YJyt1k9VIkIVUAR85Ruu+Mh3lhQIIBTz6kHRA575D3zq7ZufDQf3XVxW4yOL5v50aTD5XI2QUI6hdYUCfjJR9dUos0HaxCNQkopePOT0CJ0+oCPPT92xs/oEOtddHZmotYoOIsXmrLzhIFLLJzYuAd3v/91xbG9KvIL6LKglEQ7AXiOQE8eRKY6nCtBBtbqC3YSaGsyPUM912+GeDhDRuIJ8Nohz70QeqrrHZ0psniIhguEfiVDMDDORMNob1oyZF08dIjY/ZHzY+WyFrMnw8AQ1QPlvdoLxDvqq9/nxYtXubpI/f3xx6L6V8m3IUnnn6mJYiSIerpyJEHWN/SmRm55qRAcCcOhIbirm15DSA/HgokpTK8Ws6lRuRJxHm5sQKsACvACrACUgEGeHwtsAKsACuQBgVuvf0hAsi7/urPG2cDwLvhV3fT7Tdf7dvZm8WDEEBHxN2RJ9wdA76NnYqBzE4Hq3OkGtzJcyL3nWwyX/hIoUZKBuB9oPSDNCl3csLymZ0yfuZ/05mUbpVWnbF0++Da7RFhiwAIeQJGI7/joCmkWXcst35ews2tXHgz1m6govZIkjYzwBM+NlEcQYS/iTDVIRE2K9uhGYX0+EUzYqZmBngLhlfQlcM/dpt+9H0UsXBr4ZZWomCQmrJ6o11zA/0CEAhHHSxm5mYB8QDwlhxbS2EB8HTdswAQcBIBinjJj4dr/+l3QtTabTE3t8WK959vf5zqmhs0eootEkAtPHKN/eDkM2n5pFrL41SQp+PctDu5VX5LN4i3cNFSOufcNZ7gHcDdU489HjcN5FZDaPWZ55xNZ51zjuU0VZDX0RtKWRhxvSjAlEnNDSqmC3DiI1lbVZhJ0vBcWAFWgBVgBTJAAQZ4GbAJPAVWgBUY/wp0dvfSZ666wXDgHXfUErrzvqfo+u98gY47crFvi0eeni7xoIWGynUIqfXbseTbZMVATiGa6QJ3cj0qwDPyh4nX4IjpbvNFIpn5TPtE/04OvGTDaBFiBpiFfUTieQCtdOZjSjfAk/AZ+5KOdcJt0yvC4nQdjGaIp7rwzAAvGjpb2hzzsTG77/CmGeDhZ4m48EoW7jHO1b1tdsw5hweCNNQqAJ5oPRSi3qyQcOSFRb7HAWG2y6JcxI1bNWHdy1LK1J5y+lQqPudUY2+83lu85scDSHl9ywDtavTuQEIV2t7sQ/SDl57SumXJ6r3LaqbQD085y/UYeX9KtGiHXYGYrVvqRDhtXUxhC+S7W7hoWQy4e/it+CrZ5x0bC4Vv/cXNtGPbNsu1SICHN+cvXEhf+tpVtmuWYcTIDernZ3KsHL5Om+ulCBTmjy+h8EVDKgqe5It7PcKsubECrAArwAqwAqoCDPD4emAFWAFWII0KANyhnXbyqmg4rV+nV/PgZWJuIfM6rR6W0g3u5JxiAJ74IZiFrEQ7lgAPYcZ4kINWfj48615zVrmydI/10g/rKxl5GMZxANHpyIvlFeBhboB4KGohw2klxJM58GTYrFz/kALwHlsznepnxudMtAJ4ukUtgj1rKdizjvoOjTrrcO6BlnIKtpYbME+67/BzCfDgvgtkiUq1I9kec8RFH+fEU114uTl08g/PT8oNKfOsFQroATceAJhdk7kQn11vAxcdLrDTVkZy521sqneFeJgTwosXV03WgnfytMnkjAOMR7oDhGF6aQB3VvBOjrF4Whl9+2PLyAnewc+IfVYBrJMTT46t5oLz416USJVtL1ol0tdL4SA/rgOnOQIOwrnKjRVgBVgBVoAVUBVggMfXAyvACrAC40SBIfFEKMORdMJTM2HZAERt3UHhAMo2HGapynXmtlY1Bx76qpVoxyKEVoZpFYmHuF7xkN8+UmHYbR1+v+8lxDTRc0toi0qfKEyCyosI77Or/PnafYeip5qxrIRmLCtN9NRGwZegKACTKCwEyEMDxAtseJG21L0VM5fhvF4azu8lhM2icIUVvMMBqEJrbjphtH1tj1E4VG8cGkZRl9YBenXHaEjiCfMLDJDX9FBsGHd7TquAVvHwDG48wKyAjCMX42aVldJQIIdmnLKQZp04L2Gt1QN18uNJ92dbN9G6XfoQ7+i5YaosGT2bG8RDqCJCZq896cyE1qaCvJ7+kIBy7oU37PKsOU3gp4/U0ZaDkXBtpzanShQH+ufvbbtEoqUjhU3U9m9XfZUWLFrkNnzUOY2OyeQDlMUZvEJM1wkm0SGZ35vqddAfDBtw1qtLVZ16VWm+UQWZGyvACrACrAAroCrAAI+vB1aAFWAFxpECTSLvXUgkb4OToLIkj5o6MjcPnnyIR1JwPPAkEpbn19apVWgxJiIKpQPPCeA5VaHFOF6LWEATACxALQCskIjjLRB/R76tsWhuDrX9Oxro9affowM743ONHX/GEYSXXbNzW1q5k/bXddFr99XT/jpBcyza+9fU0vvXTPUsEc7lBAt1BpTXMXKoPfbPB+i51x6OHhau3kcHjmqmQzPtc1lZue/kAL8cesh2Ciq829MQpJfWd9PeRhEqi9hvhc3MqMyhVQVlVPDGfFHgposqiw5RRzhA9QPlOsujwNSIrid/43St/l46SVcXtEOeNZUpqdWIdSGeGd6pc/nrpnfp3s3vxkwPIbOfOfoYWikAHu4/yTQ1P6Vb9V04pOEy1c3tqAvvMP/GhgbK6Wmg+d2xa5VrA6DFBQIHoNp0XHhq/2QLe+De0i/ucYnC82T2yu7YRMCqeSwJ8uAoTub3GgC2ZW7KVCycx2QFWAFWgBU4bBRggHfYbBVPlBVgBVgBdwWQWF46GvAA0NDeb1lp0H2k1PVQIRUe2OVDTurOqDeyUyXaN79u/XDvV/47M7iDEw3ujURCuvRWq9fLCXAB3OHl1KbPm0Kr//WMmC6q+8qqEIA5Pxjg3X3XbXedMNx4a66Nre7qdlAyAE8FNp3icydBxA9fEsUTWkZccRWHaGDVE7bTKAjk0tQSa5DmFEIrw2Yx8IsC3AHemduwWvgjHKZZwSz6hFIld3v3ZOoOF8Qdh+IbaDK0FgBv5cdXUfnMSjc5E37fnB8PA00qLxCFeGILHOxqyLLMiYecd3On2OepdJpYMteA1bg6YMtLaPpm4br7mXDf6bZ9e/caXed1v0Mlg6PFU+TxuG7h1rZS66Zbf617mmg/nfVaDepFA8+TSvAA6QjXBatOp1FBHr5U8+JWRI7CyeL3NzdWgBVgBVgBVsCsAAM8viZYAVaAFRhHCiCnVGtXxHWXjvBHL9KpkEqGyhoQpCjHKLiRCU3mwkMEIeCidKl0zhimLRfHhsbl1ZZTdqF1jqKanEl0atmHtJbklPcPD4FWIENrYB86IaQMIXeqO6mxrY5efvM2OtAQD++GG46k4cYjY84sIZ5uBV01N5YuvJMn9ArxrNanIxuAUyR5fdgy1FeFeL2n3WY5pBO8wwFOAK+7MRIiCefd3c9EilM4tmDEwTmzU0C8vNFrtnswn7b3TLE9tCSnnxb8++qUwjt5cjU/Xp9w5CL3YzruC364rqwEdAJbqrvQbevc8t6Zj5cAr3iw3dKFpxawMB+bCMCTY3gFeekukOOmM973si8646FPIiAPLuwK4aDnxgqwAqwAK8AKmBVggMfXBCvACrAC40iBQfFNf6MIo0WDswQt2dCwZOVRwY05x52Xqn/JzkPneITStjzwNvVvPmSAKyFntKkQL1l419OwmfqbN4tw2QggQ96squXnWU5xLB90AdPycgPUOVLd+Lm11wlw9y61N3dRe8lk6iidQuVdDVTR3Rid+3D3FBreFVvJc+6iWvrcN87RKsQBEFAsrt22riDdfMk6nW2L6eMlnNauGqjdSb0UWbl38zq6b8s66j/6cRqqjDjyZHMKm5V97MJnVffdT+6OHddWLOHAMy5mYdS7JDuHZonqsmprDJYbwFoNYS0V8G72uSdS2YdPNUK4k8nn5WUTpUMTf7qFonoZ166vW5h4sufA9YxzqPc+L5/pz/7mNU9TkAAPBx3R/nzcsakCePJEOiAvUm06P85h6WmhPndOR9oJef/AZ8nJkYfiFTJHoM/L5OFYAVaAFWAFDnMFGOAd5hvI02cFWAFWwKxAQ1u/8bAtH6TS4WKx2gVzIQa7HHcIW2ruHMioUN+Oh9bS4LZ6anlnf8zScr+6ivYu6aLmwSbLC8/NeQdw17rxIQHvthjrNUORmhXn06SV58eM7WdYl9dPiwrTAO+a2uvo+ar3UXNeVdxQAHmzDq43YJ6EeAC0AfF/cDKe/8UP04z59m4vOaAMG37stj1G3rtE2lX3HK11mC7Ak9dylgAPCJf1CrMuD55pBKbWFBcbx5rqB8TNVSd81i501nbhcOGJiNSTBgN0EpI8igZ4bIBqE7yTY5Rf+10jkX6pcJrCFYfwfLe5awnv0knCFBRaQH68VObHhFM5HYBShgkjZQDcm+bwYDtJDjeAJ9fhBPLwxUChcPcC0mdKw70Ars905BvFueCyQ7MCeTXidyL048YKsAKsACvACpgVYIDH1wQrwAqwAuNMgVbxUISHxLFyt6mOO1lZ1Al4IKF7j0ggH0Ty/Qxpbto1hRqpyQTxlhUud5x9sGUL7XnmBktwpx5YNHkJzT796uiPxlIf6YbatOcduq/uV/TKlBMoJK4tp7Zyyz8MiJfVU0sknHjhEeLjVtRCjinPmQzAW3PtAq3qtHhILhLhsHYP7WrYNyASrue9be/QK7vuoH3t8UUCTpz7aTpJvMxtG22gW7KvMX4MOIVml2fLrfqsdOBpu+/kZIbgwBN7Jwy638rPI/ABgFVzMQPZvfhTl1LOnNnGP9Xw1q4+ke9PhOqnsqn7EnEt5Ypw5cGUAMR05mKDjlgL4E23gKE6QNQrwEMRi4GBiAvbiwPPaxEL3f23cp0lU+1V97xe+6EiNX5PpbMqrhXkBFCvrbIveuN1XdyfFWAFWAFWYHwpwABvfO0nr4YVYAVYAeMBBMUs0NLp3vIK7uRWZUqor/nS8asICB7Ssjq30/pHf2S4iXSaCvHwYIkk6IBH6W4yB9+tr9xID+WVi/kP0ZDGGo4QEK9cQLyhnWeK+GAB8kTTBXjoi1xUt359vW3VWTcddMNosTeNvW303oFD0SFnV9TQ7MoaoxKwWkkSHe5Z+zVLcKfOZ2bFkXTJql/ETREQ74mse2h71gYDiCGUEXATXE02J+ed7JMwwBMDZA3m0HBbH323ME9U37VXUYV3steDf3uHNm+tp23bIxWHP3a2yHco/rzgX45y2w7P76t5EHGwBIgIOQdA9POzkIq8Z04Llu4z3AvgbpRg2O4YrwCvv7+fmhojIe1mgAd0jMqmVl+oJJP/TmeDVZCH/nb5I3XGSkUfODHxe9OPAhZe56eCPJyfw2e9Ksj9WQFWgBWYOAowwJs4e80rZQVYgQmiAJxszR0RB0Y6XAWJgju5HWqYZiZtUbJFQORDGfR54Tef8Bx6KMNpEy204JeWAJkffvO3xnC6AA994cQr3zE1WtTCqiKt3RxxzqvPejXhJegAvD1tzfTi7i20v6MlDmjAJXfWkuV0zNQF0fd04J2csB3Ew/uAePJPXBvLc46gueHldHroYq31hoOHqK/9cfLqwOtuz6fQQD4F+wI0Ze0amlmyl2ZVN9BJC0edhIHZs6jg1FOizjtMaNOWerrhpidj5maERqOaKRx84v+u/o+P0NLFEVDrR7MLbcY5y0R+MKOwSq8/rl0v+ej8XptakdluPV6LWGCOcOHl9DTEFbGAbgKHin2L/SLh3676Ki1YtMiP5bmOgb2tFAUaesUXTakMjXadiKmDX1/YeD2v2h+/M8pFvsRcVFHixgqwAqwAK8AKWCjAAI8vC1aAFWAFxqEC9a19xsN1KnMNmauKIkm719xgkB7jVIkw2qYR6Jgp2wH4GQyFqV+sy0sz67L3rfupecNDXoaI9l36iduNXEiyqENCgyR50ONdG+juhkgxCS8AD/nwZr2THQV4Xh14j/xuV8I58NxCaF/YtZle3LXFWFNAwDrpjMTeAU5JF86simq6fNXJ9LIImUXYrJfmBPHMD+2oxBwaRK63kBboRRVaXYAX7A9Qa32JOCXWFsm7NW3P540/hweCNBwM0mXnv0dLvvyBuOX95MYnDdedXUMtjGzkOBQ3m2993T+I55abUMJx3OMSyUmorifdDjw4jjFvNVTTrfCDVxce1rcktJkaNr0Zs3XYK7HrMWHT6YR3cjKAZQghVh2uifzu8PJ5dOqbSUU1OP+dX7vK47ACrAArMD4VYIA3PveVV8UKsAJCgWdfXkfPvLQ2Totvf/kTVFZSNK41QlEIALXIg0letDKtH4s2Ayo/wtkywf1g1sar881Ol6b1DyUM8GaddjWVT1tKqEo4VsVIbmp6idZ17IrK45YDT9Xx5OefiVak9QLwEPr9qChi8eq9o6GtXq5dpyIWcN79ad3L0eEA8GBIilQdji8wAYhX336tl9NH+37ztGe1j5NFDnr6Q0ZVYqcGF95TL/6VXlovyso6tNb6Ygr2R6pRC1QpAF4W5fVPpZqGc+KO+sx1AZq7IpKfD83KeWd3KoRlgg1d/fWzaNHC5J14MoTQLR+ZDHO2KrAxvOEADW84SEP3vGVMO2vFNOU1PaJImr882Bd42AjNBiAuDC6k8uElMZIaDizxWYdDTi2ssflgJ/3skTrta2nxtDL69seW0d8fe4yeeuzx0WtdCZ+dv3AhIQOJGeUAACAASURBVO9dupx3chIyLB9FPGRoNECebk5AbRE8dExnAQu3aeF3IT5P3FgBVoAVYAVYASsFGODxdcEKsALjVoHO7l7asn2vsb6unj769R8eNP5+/++uG7drlgtDaFJXbyQP3uSKfAP+JOtwSAW4k/NNNlw1FRuq63xz02XTn69IeHoIo518hKhMW16gXbUy4ZPZHPiFfY+KnGNt0XcHhStRtxLpyY9vjDrwrvzZZdpTk5VBb7o4HsC7DeIWPnv9sw9Hh8BzckCEq+GzAReZVQsO9lIO3U8FOa1up457366ohd1AABoVIrwQ83LLx7V5y4N05xP2UGfUeYezReAdWnX9OZQ/MNVyCirE+/QXb/e0Xgx/1Irp9IOrz04qNBIONQAVFOPRyUem5seT8DN8zcMGvLPVWcC8wI/PMwBehQhZTCUc78jaTPsCj1BndsTxaeQ+BCgemdzy0DfjQJ7MF4diIdKVqRtKK+GdunaAPLQyUUADoGzuggVpB3dyPghPLxXuZrUCrbqHqSpW4nQxpyPVhM6HCdfGZAHwuLECrAArwAqwArb/DTMsGsvDCrACrMB4VwBuvBtuuYv+cPPVNL22ZrwvlwYEZJEPpahi2ice2ryGgkqR3ACVH2JmygOUuhY3d46uLskCvEkrz6d05+lSdfhq0yvU0rEj+qNhAR8GNSsGS4DnxX2HE8nKuzvf66D7rtuufYn1ixDUIy4tocfvHQVbZ1+0jBYun2S8zKGzoChqyKzViboHWkQRka00pegN7XnIjl4BnjwO8BhgCYn+nSqV/v6B+2nn3s2W86rfXT7y81F4Z+e+kwPMWZ5Fn/1RgFCw4iHxSqTd98fPiST83qvGSrcwnMOJ5EbD8YWiwEbXNx8wYKzbf93CkVd4wwWOVYgTWb96DODdxtz/ihkGkGbQBIvLhhbTisFvxfSzglqbDnQSQN4W4cizaucdO4PwsmtjeR+RcwKcRI63zpEvmNS5jhXIy5QvkFCdGACfGyvACrACrAArYKcAO/D42mAFWIFxr8CB+ma64qob6Jbrv0pL5s8c9+vFAvEAW9/Wb6wV4Wh4uLV6YHISQxdQ+SFoKnP1JTM/qwde6AJNUUES4cNwtDjBgmRDaIunLDHCoN0cWcms0+lYALyunoMUHOyLdsP1FdaAeCfdnkNeilfIExjwSoSRoiDLfddt06pG23Cwi7rLW2m4YNByOQuXTaLuU9uNzwJAgQyXhSPIyekFgNcdbKFZpU94ljhRgIcTSZhRmBcw9h5aWLXbHnyEdu4TgHUY+fMirluEzbbWl4oxRpPhu8E7OTZceO/sejdhgHe+qEp74ceOMj4jABIIBbWbuzyndN3p9HXaBDjvaOPBaAiiW9Xn3COnU/lNq405+t2s4B3OkSuuv5CF29MK4qnXASrwSochQmpViAfX3RLxcmpq6Krfa/Uynk5V7XSDvExJ4YDiHgCc3FgBVoAVYAVYATsFGODxtcEKsALjWgGE0a75/LV0+UVn0eWrzxjXazUvTubBA6DAg4FukQi4f/DgnZebbThh/Mhx5ya81zm6jefX+yo4A/iBLnjAgiZwR+mEJScD8FDEAk060txAiF/rxjgS4D7es4/uqt9IHd2xIYmuEC9USBe/WkWr/9X75878kL+/rsvRibf7QCOFK/ps4R3Wg7DU7PP6afK00hjgmqkAT+6lWqlUzYsm38f1+NLatfS351+PhgF3tYqCGKNRz7Z576yulw99PJvai95LCuBdICCevIYAY9Gs5i6dhvg84V6TTBu6581ovjuMM1opF+HR1iPjmigQLrygjxV05Zleyfuc5UntAB46W4XTqtcBQCfuy/gyBuG1XppV6KqX4/3q6+Velg6Ql0kFLJDuIocr0Pp1qfE4rAArwAqMSwUY4I3LbeVFsQKsgFQAzrv3HbWEvnTF+RNOFLh2ZBJ4HYeBrISIB5p0gTt1UzIhvMt8kcjw4yyhCR6eEw3vSySMFvnvED6LpuNa8esCV0ElroO63jb679YNAhj0UndffFEJq8q0eXlFtKbyGDqven5C07Kq1ImBXrsv/vz3/+Vt6qOI29SqSZADl2T/GV2UL5xhUwTEk80N4CEHXmvf/rQ78MxrieRFi4Smdot9kYAVRTgkHHvujUjBhi1vDtOWtyIZUuC8s8t5Z6WXnwBPjm8OCUY1VAmj/MjPifPY5b1TK+WajW8G1L3kOKKPH5vQdWp3EIpVIO+dVXMCeHYuPHUct4q1dnPKlEINqPrb1NHvGuKsriOVIA/u74IRp6ivF4HHwXAt1lYVejyKu7MCrAArwApMNAUY4E20Hef1sgITSIFb73iYHnziRfryZy6IWfVpJx097qvQYsFwaLR2DRhrd8rxM9bgTm5OpuQhUi+WClERskgAn14RJptIXi45Vk/DZtr77A3an77S7vk0f943jP4Dpxca4Yh4wEvWpeQ2AZk8X3UYAnDd2llH67tbKSTCaK0gnjpuTqBAJKmfRrdMOcntdLbvYx7IFea23sf/WkeN971Ox2Z1xY311nAp1Wfl0yHKjzolQ2dFqraWi0Tx5ZWRh2U3gIc+rb37qLboAc/r8VKFVmdwgAxAPIRvC4+ZUeDAyiH73F+GCK9EWrIA7+r/+AgttXC0qRVHUWXVD9edur7B8//Hcbnm0Gl0Nqp9irlkP/hviUhle8yGnJ9Fi1aonVBGBPMw58BT+5wY/L3WXLyCPN3KvlonT7ATroEpAuDJ9A5eh5HQukCElftVtTZT8q9iTVUiXy03VoAVYAVYAVbASQEGeHx9sAKswLhV4OEnXyLkvzO30045ZkLkwhsMD4nKpRGAB7cLmgpEMgXcyf3JlAcpzEeCLKM6qTAyqRUTE/3A7HnmBupttC44IMec9+6FVNazgPJLa2NPsyCXsj9aQu1TI5VE/W5yvVYOQzx015Tl0zW73qLtoUjyfFSlHQz3ideo860gr1LAsALKzSmkKytX0MI8WUTB+2zhisnLDTjmbRzcWU+7fvgADQi46tQeGaqmgwLioYXnB2loQTDGhQcHm1uutNrSIgHDf+JpIcnkv7M7EWAjHvKxT5h3aFCEygqIZ87BmCzAqz2qkW646UlP65Wd7/i/KyyPUx2D+EwZQLp30DU/nu4k3AAexsG1bEA7XAvhYQOmASYGfAZ4duGzODPO7xR6rwvwpC6Aubh3urmDEco8IK6bdKREcLp+vaRzsBvHDPLgSE20JVvkKdHzmo8rF18WScjq15g8DivACrACrMD4U4AB3vjbU14RK8AKsAJRBZoEwAsJkCdhHcLVJKxBp7EIlbXbHqfqhOnaUjPIwgM/HqxkRd9k52HnxCtun07z31tN2TkF8fBu5KQANgOzAtT7Bedk9V7mKCvtwhHUKUKu7cCCDG/+79b1UYhndZ4FuWV0dsmspOAdxjXyMBZECiBYNcC73t89TXt3KIneHBYuId5Qpchb+L5IMY5Z8yqNPwFUjKIWAujYtcuOPkn43fbTX9Z9XUvemRVH0iWrfqHVV6eTWlBGDTktFhqh4qssbqCOde2FiUGN6x6IwP6f3Pgkbd5arzO9aB8UsJD579QD8blC/kjkjZQACXuMzxagkhWE9HRi0VkH4MkxZVg1/o1rPl0AD+xQXG0Udqh6M3/Tx6m7bQf1tO2MkWDKvDOouHI+lVRZh6XLawEh1laViwGqukTuPKeCLV4199rf7zBemR8ymbQPiYT0el23Tv+a8nzjvseNFWAFWAFWgBVwUoABHl8frAArwAqMYwXau4PGQ7MMXYLbBC2TwJ2UH+4iP2GZl22F46tMwAQ8zJurvaYiNx9AnnTj5ewK0bTnjqHcwgrbKYc7kPOMKNQ+SPVF3TS0NI/yFhZT8TmTvCwz2tdrhWE85Da2R9x224IdxusJUdxCto8WzzSgXTKuO3UhKnC2WmDPb5+i8K4GbYCHMSTEGzxOFLuoCkcBHt5zKngwq6KaLl91sjGNvW3vuEI8v513En71B8MCzITj5MBe4toFHFKv3du+H6bdG+2hpJWuCJ/FS7ZPf/F27etryaJa+s43PhLTXwIWwFG7EHQnCKl9ctHRC8BT7zl5R82g3OvPs9TWy/nVvnYOvIC40IbF/yyK0IqqzgM0ONBJ1f9c7Hjaecf8qy3Ec8oVh89wS+eAVuGdRNftdpxdbku349zeT9RNnimVebE+/J6R7lC39fL7rAArwAqwAhNXAQZ4E3fveeWsACswARQAvBsIhY0QWiSOxwN+nwABmdqmVRfSwZaIQyodTX3wg9vLqsrrJOGMQEVfB9NMUlMt+26r7fFD/UMUbgoa7yP8TuIYQLyBnIjDqvjsSdogzyu4kxNDfkKrKqJJLdzhYAl+rJyPA8+8SwPPvGccrevAQ1/kxMMLDRBv5rHxTkY8QAOEAeRiv1V4p0735V130D4B8/a1vxv9sd/gLlIdM4+2Pn+v4agqmbaEymcus1XNXCgC8/cC8eYsz6LP/igQM/6mLfVGNVo3J54VvMM9B/BR58sCNbef3efQ7VozV6F164/38aUBXXwsVXz2BKO6a6LnNp8LRSw2DN8b8+PynHwjZHdYbIwVwOvr3G/0n/TiEa5Td4J4xr1CyZUo9U/FFxGuEzV1QBhvv/id1C9cl6loOvdz9bz44qZQ5Bf1I0VCMuvJFd/OTBIVaLmxAqwAK8AKsAJuCjDAc1OI32cFWAFW4DBXAAAPuaaQJBvhtGOZA8lNSsCyNuEaTHWYlxeQlcriGvnP9BFeVk2Fd+b3+wOD1FAcKciAVvHV2ZS3qNhRXln1U+bKctsL9f1UamA1D5l3r6kjksNRbdJ9ByC9e7s9/LQa9zdD06I/vvhnK+jFXVviukk33tyqSfTxI05IGbi10x/X5oHXH6A9r9wfBYlq35knXEizTlhtebh0YBWKzzpg/ZZ3wkYxCzcnnhW8U0/woIB4AHlWzRw2K8GjW042q7HUwgxegfHwhgOiEu1o5VdAMgBvq8qz4HZoBsC7/1+NvydzbnUtv9y3lhqpjlZOj69CW5GbT2WBvCiIl8cN9DTRUHiAivZMoeK9U7Q+mm4QD4OoIab4CkC6aLVOkIJO6XIB6hb4QM45aNQpQovHsmEecJ9zYwVYAVaAFWAF3BRggOemEL/PCrACrMBhrkB9a5/xEJspbgMnOVH1FaGCqXJoeAF3cp6pLK5R9NtOytkVn6vMCd7Jee0pa4+R0g7iORWo0L20U+2csZqHGrarvt/1vTsJoYhGcZGWXupoGy2k4bYeCfDOvmgZnX1xxM32wq74wiKnzltiJJRH3ja/XFluc8P72Kt37/kRteypoyErm9bIIGUzltLKi6+xHVKCG3TA/P/x5wjIs2rmsFmnecKRJ914VrnupOsuWc0i12yuCP+3zudmN0fpwkM6Qwf5jMPhtJx6y0XUOqsmZjjpZEQVby+5+bb1ttEt+9dFx1ohAF554UHLqU7KK6SC7EiuQYTOBnubKLe9mCrWW+e3sxqkuHIezT9Wr3ou9CwXhS7wBY6fhUN0rmm1j91n2us4uv3dQN5Y3Nes5o7CHtgjbqwAK8AKsAKsgJsCDPDcFOL3WQFWgBU4zBVo7QoaUEw6Y2Rl2kxcVqocEYmAO6lPKotr2IXPDjYGaXjAOczMDPAw38m/Hg2xlCDCygm1n1pof1ZLzCUwY7iaZlC15WWRqtxVTtegOeRPFtzY/e+/jykC4CWMVgK8X927Ruvyh0MLD9d+FVqwO6nMY/fmnT+khp0btObmBvEwiArCZKVOCfLUXHdaJ3ToJK81uHvVStfJjKu6CQHSANR0Wtudb1DBvW+7du38f+dS8aqZVGJR2FnNJWdVIMRq8Cu3Phv3YzPEw7gyFH9mQSScOyTy3mU1hD3BO3miI874L9d1yusAYZr4PQCnF3KhenU4ap3IoZP8/Fq5apMd2+14O5CXLkeg2/wmi/DZHCQ55cYKsAKsACvACrgowACPLxFWgBVgBTJcgYeffIn+eN9T1NndS5+66Cy6fPUZnmaM/FkIp0PDg4JaxdLTQGno7Fa8wOsUkgF38lx+z0ldgx3AC+1zd5VZATy48AqXlBg5D61yegHcvZa9lQ5k2Yeerg6/Pw7kAQTlCMuSX3BGZx+lW0fdQ1TKbfzmHTGH9wu403hwNJzYaWwAvK/+4FRauNxb8Q8UWijKzzE+R1Z5EnXWY9dHutbW/+MvtP2F2LxpbuPqQDyZDy1f5Pvye/521XHd5u3lfdVNaC4wYx5nQFjvmgXoy9l4kArve5ty6w7FnSq0bCr1rTmGBpdPM0JoK8QrX8bUmnrr5uZD2Oz2vlhHrBxqZtVbNEu80FSAl58doMl5RTR0sJfK1s3wIkm0r04YLTqbAbx05CFHqheXYUKTHDkIDvCCEUdrMuMkc6wK8nr6BqlCwPl6Dw7eZM5tdyycoLVVhakYmsdkBVgBVoAVGIcKMMAbh5vKS2IFWIHxo8Cb726hG265i3754yuptKSIPnPVDXT6yavoS1ecr71I5MCTxQAqS/OoTwC9VIWoak/KpqOslpvsQxUe+uHmK8jLNnL+4UEVhQkSaamsVGgF8FBxdqgzPqzWPHdLB96FtTR59VTLwgGAd/cHXtOSwAzxxuLhG3n3QoPDlJuTZeyfzN0oc+CpC9GFePnXfsIzvJPnkSAJlxGqOydb1ESGNmNd3eIz+dJNl2rtjbnTSV+/S+s4CS+gqR/QRp1/OsCuTmjrgb74Aj2FI248gDtAO9kATrLEhxv3hemFzuGLKvixgohW7jvzpgDkgRMivFe2L076GO156vda+2fVacq8M2jK/DNdj7cKFXWqWOs6YAIdcD+G5um4Vtymh/2EExGfaTjU/YbybudX30du2irxe5kbK8AKsAKsACugowADPB2VuA8rwAqwAmOkwA2/upum19bQ5WsiD2kH6pvpzEu+QU/dc6Pxc90mK7umKkRVdx46/ZIpZIEHMuQtA1xIFtypc0U4Z0N7f9LQxrx+qyIWiQC8bBF9hYfj4rMnU/5HrK+L/w48piN/tI8K8VLlQgy+Ej+nwMyFVLJgqZGza2BwiOC6U+Hr4M566v3d03FrAcTraO2nAQHD4nQW8GDyRcdT+bnHeNLAqjPceMUFuaIybEi8vFd0lq61XAERAA+wtr2v3k/7Xn0gobk5FbVI5fwDgkalOwzTKbRVuu90RMQeoKxzeISm1QjQb+fCU8ezgoiPN+ygJxp2RrtlievDruG86rX80eq5tGjXk9TTNnq8zvxlH10HHmC4nXvRDPJkqLWXeej0BUREKHqmFFGSX/DIPRmr3IAAiZgLN1aAFWAFWAFWQEcBBng6KnEfVoAVYAXSoIAMlQWkW7JgFt1+89V06+0PGdDu+qs/H53B9274nfF39Wdu02vuFInSxcNTqkCM2/m9vJ+oS9CPYg1283R6APayNnPfwM4QFf+uK+bHTgAP6bpg4GnP76cO8YpUTI0k7B8SKcKKz55ExefEh4fel/2qY9is3Rq+Gj7HeEu6z6STM5k149jwvq3U99ebLYcxUkEhrOyKb1FfzTxLd4yVC08dDDBvQITI5YtQ4gJRDAGt7D8vT3ba0ePNRSJ03Z0IZYTjBo47FWQkCvCCHe1UPf1Yqpo+CiZnnPMx13XKnHvgWG5hqepg+IwBkKuOSNeTpaADrnsAXkBECV46xf2tS7gLnZqsMIwCIaoht1S4PMtEiKdd21aXRU/cl03b6yJ9AMyPn9FJpyzooFdn1NPzNQ2xh4rxsgqyKUtAWtnw2c22AHir2reJvIfxQFpHNt0ceAhHb+pw/gIiJlRdVGXVzTmoM0/0wRczEljrHpPKfipQlL878DlON8irEbrg9zI3VoAVYAVYAVZARwEGeDoqcR9WgBVgBVKsgIR3MlR2zeevpau/ciktnj/TCJv9g4B50nG3efteuvKaXxouPN2Gh3TkwkNLlZtMdy5u/byGWqUS3Mm5ohJtUIQipyL02FyJ1qkCrQR4e8vbDXCHME5RWDLa7ACeV/edHFB14flVQdIO3kkXIdaDdeHfxR//Gg1PW2h5yXR+9063Syn6ftHnz6CcebXa/XU7IkS7rCjP1Y2HXGsIk7MqKIJzeQV4AHehjg5jmvnhGioIx0Lb0oWLaNnXvum6DOkoA5DD/cEuLFgCSzjWEAKpCyxdJ5BkB/mFBOa1vyNIHQLi2TWsAQBvUI1hHensBPB+eV0gCu7k2Bcub6QZ5QPGP1+e2kCvTKun3AgnjmlZJYEoxAMsBZlGAQnZ4MA7JWuIdr79G89KeKlCay4I43Qyt8qtnic6ckCm/d6xKmABdy2qH6PYR7quc+gCsMuNFWAFWAFWgBXQUYABno5K3IcVYAVYgRQr8L2f/p6WCFgnQ2UROrtk4Ww6/6yTDIB33FFLYvLeLf/gFbTxn7drz0rNgwc3WbpdBtoTFR11XYLpAHdy3l6hopf1oq8Z4jkVsWgu7ab+HAFRLKI3UcQib1FxzOlfy9pKr2dv8zolo//xQwvp/cOLjL97gQB2J7OCd2YXoTwWAA92w/yLrqLAzMgczM3NiWdomyJ4J+eC+SMZPp7BzSGlukUevAC8voZ6GhqIwCM0K4CHn+tCvNEQyoAxf3M+MFlkA0AjU8IfzdcBwEuP+DwcErkJ4URVW9R1Z7hUrR16dgDPDd7hPAB4eGVnDVOOA8TLxkTEBa1OAQAPrx1v/Y/nMFrd8NlEq7/6CfIiFdDzqVGkIciE5pRrVX4eSkRYa6pBHioDTxKFpbixAqwAK8AKsAK6CjDA01WK+7ECrAArkEIFECar5rS7QkA7hMjiZ3hvtXDk3XL9V+m4IxcTClv8+g8PGiG2ug0PrrIwhLkioe4Y6eoXKRqBh71RSKGeO53gTp4XD7PFIvyxTeQsS1VTIZ5VGO2AgHZdhf3UFxBFLiyMRrkLi6jyqjlx0/ML4GFPEIqdTPGGvr/8gsL7R2FijkgXZnYRygUYphSkKhMOvELhxLNryIk38Mx7FN41GsYYmDtFOO6mUP7pR6Zqu+LGlYnx+4R7B3nEZLgpIIBbrryOfXW04d7rHeda2Dad8g5WUrh/FIJ0F4ncaXlhyhmOhbZyIF2Ih/6ALAPN3XRwZxvt395Czfs6KFc4B4/80DwqnFxCldPL06ZlIidCCG2P+FzguoFDMOLizDL+beW6U89hBfAQNnvLdbG5yY6f2UHHz+yMmd7ekm66Z1Ekh11AXM+BQDwkzK6IFEwYFpNSAd5XZhxNC4sqjWO9QDxdeIdxky1AE3GZwoEsQpQTdF9iDoUCiKXy/unlmoEjFrnn/n/2vgQ+sqrM/ktSqSydpJN0uju97xvN0iAoq8PWLCOb7OCgqIjKpqOoIDPuKA7OfxxEHBEFQVEREARlacSFRQRpdnqhm96XdNKdfa8k/3te5VZuvbzl3lfvVVW6v+uvDJ2667nvvfzueef7jldKAJXIi8qtFy+GMA8ujAAjwAgwAoyALgJM4OkixfUYAUaAEcgSAk4E3er1W+jSz3zHcqKtEh+E2pqYWGDqjYIQ6xfxiboKtywt13GYSUKV0NiaThbhEFglDju5yFOEA1+NUFlhTlEW5MSLbRC52/7UTYldwum0V5g4lPZQX3GCugoTVgggVGl2IZEbeYe5hkXgBckDONi0hwZWr6Mh8XOor4cGm3cl4RvsoIL+nZTo73CFUxJ4ICsrPn9blLCH1rfMzVYuDub9woDDJOfXG/d9i9q2rnKcy/SVZ1PZnqk0NGAz6BgqpL7SJtpT+zL1lO9wbDvtA6eTTl68Nc9tojXPb7IIr2S4aYH1vJCE7YQZ42nRkbOobmZ1aHiF3RFcaHGLIDeedZl5qO7UsZ1caK+5cDSxcs2RWxyn/KsF62lLZaf1XTw+ZGEHsk4W5MOLlcfSQo/nl1XTNTMOSetPh8QzIe/QOYhkKL3aRF67TEqFeIFRXhIT+Q8TnuHWTmPkm3mSyXyidOvF3xTsDxdGgBFgBBgBRkAXASbwdJHieowAI8AIRIQA8t8dKkJkJSGH8NkTjnmPpbZDQc47mFqgtHV0WQRekNIiwsugJPAKHwrSbxRtYGTRKdQeCOeThCNIBTiSRpGHTmcNUyeUkXTz1amfaR2EBe75n43U/nZ7SnGnklpq/06hs/L7sAg8JH3vEq6r9hBLt3X2P/uiRdzJMtjRSkOdybxtsgwNdNBg9zrHLnCdJtVUghQ54gMUPzJpqJGvBdendLaEUglKVxgBtAtDDR3VopMKD6q7Ga+cYy15SMRMDw2mx00XDsYFYZVMgL9t2iOOJJ6OCu+5X79Gu4XiDpjHxDokQYx/SzWbxP3ICw7MWxLPrsLDniDnnJNiVa5HV32H+m4EXroKb8hS4qUVYWoRryqmhMK8q+o7tW7HnvXC1OLJUSG1k+cup8nzkm7kJiVMxXVQMgsKPpDB+RKCHcQRN+javfYKL6pilmsPF0aAEWAEGAFGQA8BJvD0cOJajAAjwAiEjgCIux8Kl1mZ3w4EHgg6hMuuEAYV8vuzTjk6Lf9d0ImATNjTnlSQIRyyWRB6fqFlQcfKtB0OnVCxgMCJC+VdPuTfyhZm9hDhtkd2UecfGy1IVVIL/3YzrbDjH4aJBfYEBITOIdxO3mHeIO8G2tMJPMzTi8RDiC04j9j78pvAk3sGbHCtyr1CQnyEIDrllnO6R+wk3sKnr0lVG+xPD99WyTtZyY3Ee99tP3G9JSV5B7IL91u/YvIg88clw5yTijIo8Y66MHuhybrPkmSetTit3d1DneJZJ0vSHKVgFBGJ78XW0EQHBRQcZx+7P52Fm1bVQ+fsn7wPnYok8RBCO4rAw1i18RSB50be6a7VpB7Iqh5xXYb54gPXBa5tOBK3CmWf/Znw5op0V94yodyrnllKE2Y7h3qbrCeMupk8y8Mi8kCUTxIGFlwYAUaAEWAEGAETBJjAM0GLUURBnwAAIABJREFU6zICjAAjEAICTsSd7FY6zOLfKrEXwrCCrBtM5ZWrFqGovULdpkPGhDG2SR84iCNUtkTknXM6HJr0FWZdVRUYZr+yL+kI6pVrqvuxRivkqm96ySizCq85BVHhTRuqpXMHj0h1q2vkgZBZfGRB+DEIoER7i4icHU3god5g304RYrvTcQkgYKr+5QxxQ5yad4SzvFaHBLnllh9Mhl/3WjnE/NV4ksSbsOF9hI8sKoHnRN6hXnfZdto+/dFROLoReE2bW+iF+163QmbBz7m5y8qwWnyPegilXXTUrChug0B9SqMN5DTDHBsFYaVweFafqbDa4fx4buQd6joReE757+yTBYn3wvQG2l49OjS8ZEIJLaqooROqZtE8ET6brQKyyiSU22Re0p0YBCmczlc+toPeXDEcJq90hHsAL4smzR1H+y+fRJPmVZgME3rdMBy1MyXyQH7C/IYLI8AIMAKMACNgggATeCZocV1GgBFgBEJAACGycJt1ymEHw4rbhCrvikvPMs5xpzO1huYe64BrJTaPC9dJcejKl6I6diLpPw44UeecM1l7mKFo6rjqunVChIMePu8v/DttKxgJafVb+zkDh9N0mpCqhmsmXlzkm0ur76HHrTbJPGojIZhOIbTqHAY6XnWd0rhjTqPJJ33Qyr3lZwjht66wvjd1Z0UOsTJxz4Ho0AlDbr1gjTXVvs4W6+dgn8iBKMJlC4fSjRXs63FS4TkReNifd/+xhd56ZoM2MQrVEJLMDQgy5vQvvD8sKAP3I1V3qvJRdtYr5tgmXlKoRJ6Vo1DkJ6wW7F2iX4QkO5vSOhJ46NcthFZdwPhqQSBO6qD18WROPOtemBin9y6bRbOKq0SYtQhFD5BHzg7Syt0ttHJP8tpQy5SyUjqktpqmlCfVXXheNLb2aIVxB90IvHz460820Na1CPdPGoioRRJ48nfHf3JOzki8sPOZqmYXHdbzKaGFNcwr8FKECyPACDACjAAjYIIAE3gmaHFdRoARYATGOAJQYsAVEwffWpFnLh8IMpXAUkNl60V4UUNLtAdPk+3UJbB0+3Rbt1974CIdhf3qqt9vpd30QuFaLRLPTt6hHz8n3mdX7aBJiR6qe+tNKxQTZ3gkz48jDna4DDRsdp2ylwoPJhYybK9Y5BNDSKqbWswEkyB1ZbisSX47OY5ULEHFhpyUbgRS/8OdlPj9CAGE9p2bN2lNF4YWzRNeTqtrJ/CkQ+7Pv/LUKEMUv0FknrzzbjhOm6zw6zPI95JA1Q1PVseQhgydPf2OhLCTAy3a6xJ445PGsqlSsl8VVS+rsVxY5XWM0GoYS+A6Mik7unos4m5H94gbsVN7EHmnzagX6RJKhfLau67J+E51ETIL5V0q5FpUAsmrXvf2+/XC/zog02EDtce1XyJeRuC6CbPI5zlejOkQeXVCGYlnKhdGgBFgBBgBRsAEASbwTNDiuowAI8AIjHEEoA6AAggFCbRlyFkulqUm/YeCBgYb6iEP+azau5JGFvlQoNyAagKYZVpAPiC3nwyXNekviBus2r9XOC3CZg8fXJimvJNt3da/ubGd7n3mHevwfmBvCy3tSVcF4ZBaW5FUAw3sEbmx+p2dfN0IPLuBRTLUWJicuJAvJlia1FUJ10zvGxiUQInltgYnAq+vtYX6W51DkNV12Ak81cRCEoggV0CW/+67fzWBIK3uxV853lLJ6hJoq+5poMbXOqjp9SQxWXfgOJp4UAUtuWSy0Ryk6i4IgaoOJIk0kDlOqshbvlFE695OJ1h0wmhnzhkt66s6exqVDmMl5yD3Av82ec7d8c5GbbymChXeR/abFfmLml9/8Y20OUkiz0qbCJZa/ALKPLUglHb/5WZ7r71wj4ow1MDfGfwtjKLoEHl4wVFfWxbF8NwnI8AIMAKMwF6OABN4e/kG8/IYAUaAEVARABnW1JokUJDTrVscYsJMbq6DNg44OPhDCYFDOA5STmqqqA9aOnO11wmqfpP92A0qgqjIwsrFByJPLdOHJjgSd7IODuV2Nc+9f1tLm5s6LEUnQiv362qmAwSJZy9pJJ6LCs+JwCuavoDKLvj3Uf1hLiDxMGw21Hgg3MpFIn4oa8LKG6kSOPY1OBF4AEFHhWcn8JZ89lqqWrjIcsXF9aeqXH9/89+C3AZWmzNECK3XGmTHIO7w8SrH3DzXIvP8SiaqO7e+pcs1+CWEsMt7MogKD+GzdvVd0cIKqjiw2nL3lQYn6lwwPl4M9Itns1seRVn/D1t3+irv1L5xn5w7bxqNLyj2gzbw91J959SBNBBRTVDUerlQ4WX6AkQXKHlv4KfdhAmk8YSqEt2uuB4jwAgwAowAI5BCgAk8vhgYAUaAEdjHENi5p9sKm0P+HRwuEMaVrWJCYOVjnj4khG9q69XKcaRiarJuv70wcYP168v0e5XARMjs82t2WiQaSA8c0kHeORF4GEeSeEN9PTTYPDrRvZ3A23FkIe08uXbUFMcnZtPM3uOoemCOuIa9lWym67PXl2ovL2ORTMdwWoMbgTfQ00M9u7zJMJXAg/rugM9/0XJndVqDdKANsgYQeLLINSC3W8ewCy+++9u161OKO78xvEi8bO6Dmp/OxI22pHSIJk9JX2XBhDgVHznBIk/xzPVSfakYOuVRQ+jsH7Y5G724YYt7c0p5GZ06NTqlmxeBh3lZ5L4oIBMHxUNiUBFU54LAy3ZqBkkQq0Qe/vaCtOXCCDACjAAjwAiYIsAEniliXJ8RYAQYgTGOAAgoHOblwSKMkFA/SIIQWGEnG/ebo873pmG90qW0WITfIv8gnBgzLbpusJmO49ReJsRHnqdv3vdPi5RQQ+OQA+/ELneSoUKEjeKDYje1UAm8tR8qoc6FlZ5LAJF3YNfH0lRgwNgtr5wJHmGGy+qM66Qo7P74CMnZneil7kSf+PSKUPNuKhYGDCWCCEl+kgSJLNLEAuTd+758vaW6cwtzXfPcJlrzvF5uPXWMCTPG01EXHpQ2rj0kddvLbfTMF97VWX6qjhOJZ2oWYjSgrfKIs+gIZm5KvLOX7qLp45NqZifyDsq7okXJaxiqWeQe9VNvquPbVVtuphVe6y20klEO0cfmz84EFs+29vBZe2XVwMJubJPtMNokEVwSeU5AJ8BUIg9yZeDChRFgBBgBRoARMEWACTxTxLg+I8AIMAJjHAEcDNuHVXdTJ5TR9t3dka0oCHGnTibTkNWwF6Yb1hvUoEJnvn5mEjp9BK0DAhPGFI+t3ExPvLLVsZuL2zZ6dl9fXZ72PRR5Q329VDhXEB4zFtDzS3+hPT1J4qGBVDC1dcGoxTxvYnPJ69a44DwQLlvde0DWHW9lOGW3IHuaf91qGVls62iyyDu1DAkZ09DAgEXOxMVSJ/YlyYDusu3UftxamnvWWTTz4P0twsgpbFPtK0gY7ZEXHEh1M6sd90mu4Y7D/0kDAdjUs5880OrXnq8vSLi59oVkq2gPDV79Jol8eAXCnXbEkAVNjj28m04+rptKkykeU0USdyqpaaLcdcqPF4TAQz9QvR1cU02HTHDer6AYyXYmBB7aABOLWBRl6YmTaL8TJmU6Be32URlYaE9AVMT9AQMLLowAI8AIMAKMQBAEmMALghq3YQQYAUZgDCPQK9Q7UnVnqijTXXamxJ0cJ1v5inTX5Rd2rOb3sytodMfwq6drptGwfpAa3h0Sn0HaJX6iHHBiEU2eW0CT55m5H0pCslyEfjUL99Sfrlhl5b5zKn4qPDuBhz6KFs+3Pq+X/4xaYxv9IEj7/oDOj1rhtFY/ghiorkA+MZg09Puq8bqLGgjEXU8sGZYKpSRUhZIsquk9kPDJZlFJn3s/soLqt1V5Dg8iDyTelHglVX51CtUeXmsZpOgabTRtbqHnf5MkL3WKk/rO3g5mFc998V0qEosBiWfzL/AcBiq82e8bH3rOQZ212eskDVOKLUJY53pS28uwXx0S1W1uan6+m1em56zUWY9Uvx1SGx2B5xdCqyrw1DnjOr/iR++xfmVi4qGzbrc6ui9gMhnDry1yIU4STuJcGAFGgBFgBBiBIAgwgRcENW7DCDACjMAYRgAhjzube6wVhH2gkcQdCJAwDmWYX//AoG/oWba2wyvsWK7dyVE37Pn5KRNfXzFAbzwlFFouZZIg8Q4EmadB5KkhjFDf9QkC+Gu/+afnkk7o3EmTB5LXmL2oYbT4rqCuloqPfi+1FG2gN8bdGQiqY9q+kdZOmk44uYvKiiDvdoxbYf0TZIvM5Wcnm0oTk2lq1/JA88qk0Q3P/Iy63u6kjz15jFY3/7hoC335snO1VHf2DnVJPB3yDn2rxhUgLFCsPIk+Kxns6aYFxzTRkuVdlvoRbcoX7C8+B2hhEEWlkbDWmLbzsbxnwgubL6JXm1vpme1NaTnk/NabawLP8rYBievA4E6aO46O/9RcK8QbeEWZZ1LiFJYBkB/uXt/DwAkGPFwYAUaAEWAEGIEgCDCBFwQ1bsMIMAKMwBhHoLGl1yLGwgrHdErUHQZEONyBNMqm0YbXvHGYrxM5lBqHnXxRN5sHUDk3LwJvxY/7U4o7vz048fKYK4nnREjKZPz/ee+Lfl27GlrUVpSI6y4ZiijJO/z3ppI/02bxCVJUFZ5sD1KuVuQe6xX5Hu3qKUnegWBIhhmOqO6cxg+DxGt8Kz034MSl9a5LfaNxA13/t59Z3yPc8BMr3k+zG+oc6787uZH+fNBq2iB+XrDoWLp4yfFBICSQeMiHt3tL66j2IO4WHTnLNWzW3sDuPAsKD0SemxoPxN1A6x4a7O2hKfNeoSlzX03rsmz+/lT3rxfllMiT+Sxxzbi9nIgy7BchtK+2tAiiOUmI6UQnSwLvsgWzA10Tuo3cwmhlDj4nBebxn5xDk+aNOA9XCBIPoeuqiYju+Lr1sm1g4TQvmFdAyc2FEWAEGAFGgBEIggATeEFQ4zaMACPACIxxBKBMgtMhCKnJ1aUpRZ7psqLM9Ya56IaLms47k/qSPJMmG8gxBZOAbObocgst9lPeOa37Q99NV4NI4qs/AeIrkbYukHogYh5buYXgQutXEE4LJZ7qTIsQWhB3CJktFD9lyYTAgyvtLPFxKkliIN3IAcq7vuJdIpU8UUIzvjNoOO2mP6+jrsZOx7mVTxxHs46bP+q7e99+mu5dNUJm4j7Fvhz32hIrp5ksG+ubCB+oai0X4LrZ9J1/+bjftvh+DzJPEnmLjprlW99ewU7gye8twlQsBpjLVYC869+1nQrHJUROP6L6Ga9R/UznkN4Z19yYUxIP65A5/nB/tIpconI7cG9UCGKmQzxX/cwqjAEdbnDHOxut/yoaNkCQ++7WH+pNipfQB6a7k8VB56K227W+g57+8YZRXckcfHayUarv7A1UtWPYRF4uDSzUdSL/Ha4hLowAI8AIMAKMQBAEmMALghq3YQQYAUZgjCOAnE572pMOihPFgQJ5zUwcUqMm7lR4/cJFs70VIM/A+SBfWlR57vzW5BYK9ssvpZsd+PWD75EX78DlRRZBBIUd8qe5rQsHz/LSInr0pc1aBJ59/Jl1FXTx+xc6TisqAg+DgWyFGg/XfevQdtpa+qRFTGpyd6n5zm37Nx1IrTqduzpo81/Wa9Wfeew8GjdpRI102gP/6diuUJz7pQIrqW4arRx89Jxvao0ZZSU3Ak+OKcNq+/sF+diXzD8oy9zTn6F54tPxUjV1io+9LPrB76Ocunbf0jQFRBP2pDSun3dQexBbRdXIQjWDGHBwt7a+F/93ypTJNKU8+pxrTiSeU/47N/JOXarMAwniHSRpGIRoqXiulQ4T+UHxz7Qdbtn62rJMu+H2jAAjwAgwAvswAkzg7cObz0tnBBiBfReBhAif3SXCaFGqRUhPr1CT6BySdEiesFENQjCGPQf0pxo5dAhlWi7DemUoK1SUsgRR38m2n7ql3CLueoT7KXKPuRVVEXnTgyuNYb74mAU0c2KlY7soCTwMCMJl/Lg4bR1aSZsGXtYKQbRPdErnciobmKy17lX3vUbdA81CbdZDiaGRfICxglIqLaqm4sL0g7xK4rkReBgYxN1wWjlH0j0fCDzM88GTfIwxBrfTQH8b2ZO6Lb/922n47nmonvq3jxBQCKed+Zn0OlobEkEl3A+ThIIZSjjku+sTz9Goyx+27qQd3SPXE64FXBNQZgpz4lTB78+aPZVqY9nLtwYS780Vu0QIf1Jxaifw9l8+ifZfrnf/yGcuTETw7IXKORN8nZ6ZUe+Vvf8S8YydIFIwcGEEGAFGgBFgBIIiwAReUOS4HSPACDACYxyBBmFkARUSQr9KhLKqRYTVuhWVuANp5EXyhA0LCEYQSz0il1muCg5/pfEiax5QbVl5sASJl6uCHEr2OQQh8JKhmUSnX11GldMGfcOAUR+EKnIAbtrVTvc+8442BF7qO3SSCYFnN7FQJ6Veu3Bm3VPyGrWVvpHMx2Z4SemG0a5/+k1q2L7OExsQeZXFU1J11HBaNwIPhAjCEXHfSuLGng8tXwg8rzDawYHtNDjQRUOJdMWoVN/ZgWu4bXbar/JBhSeNKkAsoVSWJ/OaZSOc3k7iYVypzgSZiGfURQtnUF08rvViRvsmNqj41lO7aJz424KQYqju1Hx3Bt1YVWWOVfx3UHMkqJa7xVxy+XcEz23kwOPCCDACjAAjwAgERYAJvKDIcTtGgBFgBMY4AlCMgJCSudxUYwa5NJX8yIa7qhOkOPRgHrlQvDkZVIRl/JHJ5eM0B9PwWRB3SAI3IAR3MoxWZ07J0MGYRVSs29GqReL5kXdy3GeqvqIzhbQ64xOz6cCujzm2k260al6y5pLXCR9cUyAkTULHdQi8XT2raN39a7TWYSfxlpx/kNXOTuBJ1Z2TeYHMhybDKPOFwMM6/nbtemp6fST/H/LfDQ01i2tuDw0NJKyPLDWLNtGhn/+lI25920qp+eGRPG4TTr3IMrXIRfEyqkg+L4ojNWKQa97R1WMp8VbuaUnBMKWslGaPL6f3T6uzwmdNUyOEiWcUIat47kGRh/vAlCiFUrKxtSeQ8jYsXGorS6xQay6MACPACDACjEBQBJjAC4oct2MEGAFGYIwj0NU7IA5BSQXMpOqkqkomG8chFfmHcCDts1w8080Msrl0qb6AeipbBYfPKqGUcFq7F+GZrfk5mXvous8mlTpQcYkUasNOAiYEHtYoc8pBjblqS4uVD29zU4fj8o9eMoXw0SlBVHhuDrTIVei0f5LAw3ykOYRULfnN0Y/AA3m38qXfUdWm/fy6Sn2vknh1SycT3GmliUVqfmKfMEe3ItV4B0ycQ9886qPaY2ejIki83W90UkwsBktIJJJ5AVUCr2ahIO+uTZJ3WLOTw6oaShs2gbe76B3aExtRTC7oPdURGmlUkTRYcA41l0YMZUKxG6Whhdfe4ZmJnI9wGjclusK6JpxUwuH1LYnSgVEO005jJJXDpSJtxEjocVhzMekH+Vyt3JVcGAFGgBFgBBiBgAgwgRcQOG7GCDACjMBYRwD5hJoEaYeihhc5qc5yudZMnXJN5q4qDr1yLk2dUEbbd3ebdB16Xbu5h18IrQyXBYliDx21O9HqTFYmmi+OJfNTbWhoSyPxoLpzy3fn1f/r5T+j1thGnSmQnbxT9w8KUyd1nUrgyUEsUwUNNZ6ficWfd91IvWsKjAg8zKEiVm/lxJMEHtYBFZ6uQjDRO0SJviE6a/uFNHNgplC4DdHiY0upbnaMJopProq8Rt76+U566SfbBDEn7pnBpHsxCLzqeeupZtFmy7TCr6imFmEReO+UPEbrxMepzBcknkrkVZUXGxlVSKUe+g4a9umHidf3UJy1d/fTeDFvEIkg252I0UzG8GoLpVyvePmjk1s1yBxMHGvxQqZMqIabxTMhVwXPmEmCwOPCCDACjAAjwAhkggATeJmgx20ZAUaAERjjCOzc022pYqCWQGgPDp25Vtw5QYq8ayBkEDoVRTE158gHYw07gdewfpCeun10Xj5J3CVzp41Gb9LcAlr+yeB5maQbZ1sXQrINk8q5bKYOiWcn76Q6CmHhfvkJ3636xaiRgRMO2W7utH7qO3R435ZLqGrjEmMCD6YWZUU1FoE385Bplhvw3ze/Q59Z8RPfy72jaUCQd0RHdB1FR3YfNUpVCBIPZF62iTwZ7ggCR+5Hou83lOj/jbWmROse66Nbwibw/lF+S5rqzm0eR3ZfQwvGLbWIKL/ryqkPiQPuDRBq2SDRVMWZSnR19vT75i998uEN9OTDG0ctZd6iajrpzNk0b3GN1pZl6xlpJ/JgMGQvuUzDIOcCRXt1RfYMRbQ2iSsxAowAI8AIjDkEmMAbc1vGE2YEGAFGIDwEmtp6U+6q6BVhqlGRZJnMOqoE5GqosIk5B+bTKQ6KmbgiZoIH2iJEtFUYj6gqM3sYLfLcJVVc7qOdeHmMJs/LLC8TcETIHhQ3YZEULUUbLCXe5pI/pyaPfHfjB+bQrN7jUr+TY/cLRaluqHd3UQPtGLfCERSEB6PY1Xt+6ju0CUrgoW1NXKxLkHez3jMtRVa/0biBrv/bz1w3T5J30/tn0AVt6TnhUnnzhBoPtPfRl1ZkjcSTBg/254lK4A32dFPfrm3at0HnS9UEEg8lUxMLXfIOeeQgzjyi5xqq6JmnPVd7RVMSLfBAww1xDVcK5Z2qOJMvKYpFeC2eG07Prh999xVav2Ykp57TPEDkffpLB/tOES8YGkTIajYIS0zG6yUM1IA9goDNpYFFjSDv8JKBCyPACDACjAAjkAkCTOBlgh63ZQQYAUZgL0CgWyiW2sSBbrI4cOU6LNQNzijyKWUSKgyCAmJAkH65Kk4kolThdXU1iVxjgiDp60pNr7i4jIqLy6m8vC71u0zVd/a1yzBDt/DVMLECKVIhlKPYRxB3pqF6XiQeSBsQAlKNN6VzOZUNTPac/lutD9Jbbb+z6kz/6znGS60rnUtzjp9PxbXlaW1B4iEn3htNG9N+D/KuvnMGHSmUdzMSMx3HG8mhl3Tc/eDXkgRYVAWYgVh2U6upBB7m0NewjQZ79ULRu1+ups5/VlPNyRcRQmiDFq+wWbVPrAXMJ9yKaxPz6X1d1wQdMtUO+4GQVi8SLeNBRAeWs7gIG5UOuWqfqqOrmh/v2o+NEOV+c/Aj8XKZc87JsRbhxLvFy6pcvpxCntmY5RzEhRFgBBgBRoARCI4AE3jBseOWjAAjwAiMeQTUPHg4eOciV5MOiGE6v2ZC3Mm5IqdSvLgoJ864cg5OJGJTw2v08l+ep9VPHu0KK4g8kHjTFo3LKHTWbQDs1XhhAOKV6F9nz73qyD0MGtYo+/Yi8VCnunAKTR5YRomOCb5KIhhY/KXx21bXE199P5W0TjRa5rRpB9Cs47xVXiDyUBo3Jmjmi+/V7l+q8Rb9SwktfH80ebjcVHf2SfZ0np36lYkKD+q7bkHgHXX3k4I49w8FdQPnsSpvIs4iPS3DDUF6KhH77+28miYMLNDG3F7x1RVP0871G2jnuxstVaxlZiD6P+CEY2nZ8uMD9+vUEC88ULxeMMh7CKG93/3qi77KO/s4n/7iMtdw2jCf10GBUYm8YkGc7WzOnYEFtrq+tizoUrgdI8AIMAKMACOQQoAJPL4YGAFGgBHYxxGQqjuop6BQyKWqzG0rkoqOEuEimDTdCFJkUnmExWWaT8/JBTbInDJpo6oSsbaO5jfpvvuushRDKL1bT6W+bf86aoiiyncoPv2PdPrFV9GUqcsymYJrW+wX8j3h4Aqswwqjk2FyBaJjqEbDUtSAyOuJNaStpzQx2VLdyRx/cGz2C5lGCC1KSUsdTXztX7SxRQ68RSccSuMmVWi1WfWXHlotPiZFqvE+9J260MKcMb5U3enmeOvr/k+hBnwrNXVdEq/httk045obadzCA0SOwGJLYeYWCuqFixeBJ0NmEw65Nu2mFrrYg7SzyDtB3NmLdA8GWXjSJz5G9fPm6HbrWU/XQALXxI6N7fT9b748irDUmcj3fjYSyq7WBzkI0qytq1+nm0jr4O9ahVBMdwm1tG6IfdgTwrU6oaok7G65P0aAEWAEGIF9EAEm8PbBTeclMwKMwN6FQFtHFz38xHP09DMv02EHL6ErPnKm0QKRBw/GFfng1Oc1cYQgNQrXXFMyyNSgQge8bDrjus1HKkzgOBoXB8Tv3nyEIzaJtvk00JZUDpVMT3fc/Njlf9FZbuA6IL/KS2KBiBb7oFLhFSRcNvAChhtK8rc/MeRJfsGFtrF3dRJrAxJv6fKjafzkkdBmv/k+c1cHNQkVXpByyXfrxJ4UWeGVfoSkX/9yT0z6Ghx4k/p6vpLWNUg8GFq4hdMONh9NtUfeltZGKj1N8i56hc/CwMRyaHZ5wAQNo73ri//pB6OlxgOZd9LlH6NJc2b71veqIPdE91kpTSugOsRzDaS4rlWQmwoPpFm/cMwxDWvPaOEujfGio1jkBAQpi2dRUhmcXUde5CPEvnBhBBgBRoARYAQyRYAJvEwR5PaMACPACOQYgXMu+wqddeoxdPxRB9NNt95L0+rr6LqrLtaeFVQsONDkAynlNWnTEN8oiDt1flAEgvw0JRS1N8anIg7JIGJgGvHscz+lV16+y7jrg99zKeETZVEVWkEUOSCWq0RILhReuE7DUt0FWTOUPF7klxpGi/5B4lVt3M81nLZ3fCOVLBqiIxdebjSd333N22jAqzM40h5wQpkV5gzyt1WopEyvYbmnIP6DtHci8TBnEHl2Ei9WfiiVVn/PdUnJPYlphdU6EXiWCk48/KBc9cIhCIH3+P/91FF557QYWKcUCqLpnM9cTpVTZxhf55JkNt1T1XXWmgMAEUXnPoMr7UlnjlYNIj9nu7iu7EYwRhd5SJVVNaLdsTZbRF6d+FsBwpkLI8AIMAKMACOQKQJM4GWKILdnBBgBRiCHCKxet5luuOkOeuCnL9YaAAAgAElEQVSOb1izgBrvXEHoXXnpWXTmKe550NQpgxjZ054MTQUp1SxCBfPh4GWH1cQ4Ihn2GLMUIF3io3MYNd1GU0LRtH+3+moOv9J4kZXb6Y+PfJZ27njVeIj6KcvoX0//vnE70wY4OMuwR93wZZWAzSd3ZIRPw1HSTfmlmllInEpFPjx8rMhMGeJc3UhVQnV33KQbTOGkTAg81Y1Whge3dfVZBKlOCUsJ6UbiqXOIFV9AsfgFvtOS5BUqqsYM9oa7i96hF8f9IPVrqM5QZNi510CmIbQInX38x+4Owm5jzVw0jy78/Cet55YuwYRrEi7QQXJCqgSenNNIHkB3RSLquhF4MI1obM2eA63XvuFvmv2ZI81E8PzEC5DOHg+bbt+rz78CHHklMepfm2swAowAI8AIMALuCDCBx1cHI8AIMAJjCAEQdJ/5j1voCkHQHbZsMdkJPCzl4cefpR/e9RA9+Wt31Yq65EHBKsgE39VCldObyI/QJ/u26IT4hmFQoXs5ZDtMTBJacLCUB1IclHe19NDPbj9Wd9qj6kUdRqsOWBoXarryuK9aCsQSDtcgMfIhDM8OmiQksR6n8FGVxMO+ob6dFJ9YspiWVp1Nk0qXGO9dkBx4chC7E60u+aWq7sLMJQZnWpREf/JnYeFSoUTbX4u4swMHlRMUV17kF3LguRlVeG2EKYGHvHevrtB3dlXH/ujN37ReQOgoC3XNQ9zW5kTgybpSnWg39JDfOxF4uXSgdbpPJ4tnpJuBhbz28dMkDNzkhkUuwIki/QMXRoARYAQYAUYgDASYwAsDRe6DEWAEGIEsIbBtZxMhZBZhslJ1d9KF19KN111mEXqpg5XD77ym2CjMIZCzCARYiTgEt4iw2nwrUvmE3E5uB/d+ERIYprmBFwY4YOPgFyQs1ARbr1BgqAARAn37j/QNE+xjZ5PAw9heoX5RkUQmeJvUlYQR1Gv266BjaC291fY72tLxljBtSM8qtrTqg7R0/Igbq8mYqAsX2mdFHjzTUjc7Rsdc6myUIdV4Tg6vMpdhhwhhzkdCVcXBj1xdWXkrQYnnZFThheepbbcYwW0SPmvv+JRPJg0tVHLV7hAelqGLF4En5+WWH8/JxCIfDH7kvHXnojrWhu3Ejr8TCFfnwggwAowAI8AIhIEAE3hhoMh9MAKMACOQJQSefu4VWv3OJnpIqOxkmCwUd/j3nd+/LjUL5MJbvGAWnXXyUVozg8MmVCteJJlWRxFXQihSg1CcyVxVUee581oODn3jRP6tZuGyGlWRikK30DjkmursTtD/3fb+wFPINoEnJyrDnGXII5REMOOISgkTGCCfhmp4MMLPcW3KtUQZ+hvEyEINn3Vall2NhzpQtCGvWpiqu6j2Qu1Xkjf4Ha4pFBDeIFt/E/u00RRM1XfoXMe8wm0Sy5YfR8uWH5/6WhLFMucgcvZhLUFCZp3GvPZj/kpBNT8eCOm5i6rp0186eFR3+eRAazoXiTNSLniFYptcPAi3xzy4MAKMACPACDACYSDABF4YKHIfjAAjwAhkCYF77n+SKivHkXh4p4XJQoV3lsh5h9BalEs/exNd+dEP0mEHLdKaGcg7kHgodpJMq4MsVZJ553DAkiRJLlxJsVwQN3VVSWfcsIt6YPciTmRewB/copfv0GmeuSLwMBesE7m7EKIXFhkR9l7o9ifXgmsz6XQZbV4tUxWel/rOvkaEBlePw76QlSMs31V3XnsEohj5F9W12HPhebUPYl6B/jIh8C79r286TgmGHRVC0YV8agidDytXqY4KT05Ihh9/9j8PpSmzK0YZfyC1AO4B5O/LdQma5kBeM/i7iOvf1ORFXTfc02MijJYLI8AIMAKMACMQBgJM4IWBIvfBCDACjECWEICy7r0HL7EcZ0HaIWwWSjv8+6OCtJsqQmvbRZ48/N7EibZP5L1rGiaipKoLv8u3ggNZiVBpofT04XCV20MiyE63/EpBsFMVhchz53dAh7IjJg7zf3v2jrx1oXXDAWsFedc/fJ0ViUT8YalegmCfSRs1LBhEUXEsO2vRJfFMyDt5DQIPrAX3W74a2/jtmRpmChUh1oKQczzbQOKtK3mM9sTWuXYTRHknO8skB54TgSeNF3Cf9CeGRI7IcNWqP/ruK7R+jZ678RVCeXfgsjorR1+SrE6kSC78/egW/+4RDsW5LpmYMoXhWIscgvW1ZbmGgcdnBBgBRoAR2IsQYAJvL9pMXgojwAjs/QhcM2xgsUa4z8KoAjnxYFaBnHgwuNgu/l1ZUW7927Q0CDdTKCeyldvNdH4ynBTtogxNNJmXzEHnR7T59YnDIpQ1WKOJolC6T4L4ChJGmwv1ndtagzii+uGaje+dXFl1zTrCmp+bqQWIu8XHltJE8VOn4PrDdagqCHWMIXT6znYdOW9V2anmLJTKKhB5IPFA5qGAtENZMPwz6LyDutDWz51Np3zq42nDytyQ9rVUlif3NSziW4fE+/QXl9G8xTXWuE75BmGss7utNxLnb9O9kCY/pu3U+pLIw32BHJC6zsDoA0Y8eEnBhRFgBBgBRoARCAsBJvDCQpL7YQQYAUYgAgRA0N0miDoQc8hxB5XdVvHf7xUKO4TL/sdNd1ihs2eKT6YFii+o2mRCb5Bk+VBUZ1nMD3nn8mVuYahN5PqQmwuHQ5CoJkWSeOve/Sfdf//V2k1PPe37NGXqMu36YVRUc/rhMGwPTZOqvF6h3gkaurbljSba+sZueuFXa1NTPvyihTT9gAk04wBzYttt3X6GGzj4V4v8V1DhhEWw+O0BFHlN4oOyRBB3ukUq1dxUkH7GELrjZKuenzMrQlF1HF4znW8QIwtpYCHHlgYiuB/wjLAXSXzblXBB575+dbOlxHvy4Y2pLuaJfHfzFlfTSWfOcexWNYGA62qYquSg65DPkrBSHATJtwrzCrwQ48IIMAKMACPACISFABN4YSHJ/TACjAAjECICkrh76dXVKbMKdA/DikMFeScVdqiHEkRxZ58uyCOEl6FMnVBG23d3h7gi865kWCJ+qsYG+TA3uRoczkDQBAnl9SOATBADwQKlx9atr9Cvfn2lb9Nsk3fy8Fssct6BKPYjKREqDRWbThixXCyIuxfuXUtb39ztuv7p+0+g875zpC8+ssJb69bT0vnzRtV3Ut25derl7qo9kQgrOqnu3IZzUrBFMbVnnko3VTjmxOO0hpH3FIguPwLYbtjhd01qTcChkkkuPDt5J1MG+N0zI+GeRZYbshPRF3T+Ju1KxD07obKEOkRqAz/8TfoNUtdyVB82xQnS3usekMpHP8fauvEl1gsxLowAI8AIMAKMQFgIMIEXFpLcDyPACDACISEAkg7hsdJlNqRufbvp7R9IKdukWUQu8uD5KR3CClv1BUSjQqk4IJYJEs/EidZvfRrDulaRxNKf/nI7vfTinaPqHfyeSwmfbBYTskudFw6+ULDoGEKAvLv/y3/XWpYfiQfS7rePr6C31707qr/zT11OV1xwmkWQmBAU2SKLtAAYrqRehyYh6Wt2JmjTniE6aEacZtQUWPnkwiq/uP1O2rxho2N3M+fMpmNOPJZmzXVWgUmlGpSdJqYb2QgR1lHiqeSdU8isDsbqdYaXMZmG9uuMqdaRz0PktXTKj2faXyb1ozbTUFWHbkQecqTCcIQLI8AIMAKMACMQFgJM4IWFJPfDCDACjEBICCCXXZXIY5eLIlV3UR9+nNaGw2e5UE1AOQE1oJuLJ+bWJ8jGfEiSjvBVkEy6Ib3SORehwFG5lCYJibjovz+yMXSuTRzmqwQ2fVY4rHloMMaQykL8N1RIbm6Q/3P6IzpTStVxI/G+duv/ORJ3aIjrE/NZNGcOnXvKckdlnt8k8iXPnxrKrKMeXbWjn363sptW7xgxjQEtUVhEtLi+mM5YVkpLphT7Ld/1+03vbqBf/uQurfYf+sSlaSSeX/ivTqfZCBFGTjwYW+x8d2PalJYtP46WLT8+9TupiDQlItVOs6WUtGMrw0Xx/FYxzYUqMFsvetQUD+pzDqHEE4UDLRdGgBFgBBgBRiBMBJjACxNN7osRYAQYgTGOQJNIPg7CJYiyLJOlm+SByzeTDR0nWrdDXiaYebUNI5dc0LkFVXZ5jSfVVdJBVK3793vXpOW70533ud8+Ii0nnht5ByICa0JqwsHh/IT7zZ9LX7vqU7pDpdVTHWtbRbijGykZqHOfRkH2BuTdd/7Q7tozBEZQGV33r5W0cHKwfF/fvv6rRsuVJJ6TUYVRR7bKkpDHr7OVt1Cdgl/uPpO1qS6q2SLzqyuKqac3/eWKiqlfyKnJ+vzq4rnc0NKTlfvLybEWCkS83OHCCDACjAAjwAiEiQATeGGiyX0xAowAIzDGEYCCoF2QCjiQTBT5e3a19Ea6oiDEVr6ZbAAnEJ9ORAwIE6jQYuKnXx6rKIDWzaHlNPazhfdYv545dKD4HKQ1PRBtcF4MojAc3NWZGqNAYFYwbsS98ZGVG2jtjhZav6s1RaQtnFJNi8Rn3Vff1JqbvRKMLY64eJH1a4TM4mMvUnXnFIqYCYmHcZJqvFhafsdAC9FsFITs8iPv1KGLhBrvW2fX0kwRVmuST84rbNZtaQin/fRnPmGpdU3CfzWhGt6b4qypWNXcfVCrhVnkM8jK1dmVCDXk2T5PPAvdnnPy+sMLoqiJ6ySe+PvVEyaUvn2pRJ4k/n0bcQVGgBFgBBgBRsAAASbwDMDiqowAI8AI7O0IqHnwcBhr7uiLJI+SdE7FQT+I0kVH9ZatvYITbacgPtVcYFHmuTNdV5IkLRbJ7eEy7J2vDKTdc8PEnX2cowYvoaPFx6kENeQY6uwji7gbNk+x991bU0KffWxl2q+h9gIZIUm1mrt3UXV5XKhGzdVf//7I6Vbf53/2i2ljpFR34vr0MgX+6lWfDBRKKwfDfVAjwp0zcd31ux7UUEZTsuvDd+zx6z59b8S+/PHzU42IL1P1HQYEbh+/8jKqmzo9MoWVxA1GCE7KTyNgPCrDrAX3p06ux0zGlDklkZ8uaEi73/g6z2XpAByWa67TnKIysPBbv/we187kas5/p4sX12MEGAFGgBHQR4AJPH2suCYjwAgwAns9AggR3NmcVC1UCxUUlFRh5poLi9iKklw03WSEvYHkQd4nFNP8YqbjBamvo/C5t+ha2lLwum/3Fw3cnFLkqfvZJkg4k2vFIu82tLiO19OfoIbWbtrc3UP3btuVVk8l2Erv2GF9N3l8mTGJBwIPphVfv/XHqf6LBDlEYj91VGTniVx4+GRapOuu6racaZ9oH9QMAW2R8w4f03LuoeX00fdXWc38yHm4zT7zp79oDyH3HXtz9PHHClMLPXda7QEcKkaZTy7MkFndNco8jGETaCaqN5VUBploYjqis85c5HBV54UXDPW1ZTpT5TqMACPACDACjIARAkzgGcHFlRkBRoAR2PsRaBRhs/0DgxYRVSIMEVpc1FEmSIRF3MkxoXrrFoSZCWFkMl+TusgXGC9Oho0iB1Qmpg0m45rWlYfm4ljBKGJFl7yTY4LEWxQ/RCiHYtbhW8cIQZ2vH3mHupuaRvKuOZF4qAOyreT27VbXJWIP6sebmb+AwJPhs1LZB3JINy9dWAQe5h82UZQpOWSqvlP39+7LakUotTAxEcpIr/xrJgSePZz5mBOyQ+DJdUnlWBj55HKZoxLrCdu0QxLFXuZDTs+rqPLjOamiTZ+XmdSHGROMhLgwAowAI8AIMAJhI8AEXtiIcn+MACPACIxxBBAuhoOYDO9rbM0sD54kEkD0dImPjrLJD0LkDrNyOgn1Rq4LCDwc1gYF64P8T0750nI9R3V8uyGEV9is27wXFC+jjxX+T+BQvIE30xV19nFaunpFnqy+tF/fu61BqPFGX4vlP00q8FDGC8KoulzP+VE60YLA+92KpyzSzvTaDJPAw/xVYiXotaSjttS5HjMl8OR6xgvXaBCteK7Y7w0dAg9ut2ivmoig72wTeBhTGpDgv/3UhW4YS6LWlOzS2TPTOjKfaCbrkUStToi+HyZhvfzIpoGF05pgXiEdeU33hOszAowAI8AIMAJeCDCBx9cHI8AIMAKMQBoCyJO2pz1JlGRyEApiUKG7FflgZAGypUIQiVhnofiHDD3WXUMu68kchCARvpI4QXsqhYXCcVSsWwg06cLESCitdgeiInLeDSmGFU5tVfWd/N5NhVfyh91UtDNJ9pUKFd5kTRUeTCxO+vhSuv/Jp+hnD/5RW3WnzjdsAk/2jeubXmqxiPSBAcEsDpfYzDKKzXRXGUqyPIxQ3DAIPHU9UKeCwMeapMLRj8Cz5ztUsc8FgSfHT5JWo9fjdx9kqor06z/o93iGgWjtEHuj7o9ff2GvJ4zwXpNQXr/1Bf2+TuSPte5hLowAI8AIMAKMQMgIMIEXMqDcHSPACDACYx2BhGBnpPtskFAkKNLgvBqWmsIJT5kkPFekmSQnQXbiwIucfA3C8VA39DIfrhFg2Fz+Nt2R+KwgibxnlMw9Jsg3wSUNDvtgeJlaePUWlMBDnzet2zyq68IdvVT6xxGzhbmTq9JIL7fr59t/O9cy9Wjv7qfzPpNuYqG7P1EQeH9ZMUTdTzdZUwBZijI13kMzSkccNSsunpZG5KmqO6wnjOswTAIPa3AL23QzsQDJ7KWK/NAnLqVZc+foblXo9UzCUKUT7JC4gaIykMh0gaqDKq4hr7x0cu1QRiL3paly1Wuusm+EocKt1jQ/Hv7+lIq2ILFzUXDPTmIDi1xAz2MyAowAI7BPIMAE3j6xzbxIRoARYATMEGgQRhY4lJmEqspDavHwoS7q/HQgzZraekMhK3TRcXNbnVAVp/audCda3T5zWQ/hs38vuoegrAOJN6L1GpkViDscqhM2ki8ogecXPouRnRR4+L1bGG3xynYqfqXDmvSciZViPQXW9etEZGEPP/6/76equeNTxMPXbv0/envdu8Zbcd/3/8u4jVuDv/61gEDeDexxJh6ml/TQ+ZN3pppLEk+qoMI2A/j2H9po9Q7zEPXFU2L05Q8kTSycij3X3z0/vpM2b9iYqoprDfsHQx03InLmnNn0b5d/NDTsM+lIzePmFFYr1a72XJFbO7tpW1fSJGRaeRlNH5cfpgdqmLDTMy0TYxQTnP3m4daXDF2VpkImY4ZRF67FE6r0wvjDGI/7YAQYAUaAEdi3EGACb9/ab14tI8AIMAJaCCD/FkwZdEJVwzao0JqgqBREHajbt72e3xoRTtfXH65jb9C5mrST+e9SCjuhroPKDkUNl3UiUnJB4D27p5XwcSqSxJtVV2l9DeJEzZsmHUwvvOkomri4Jq0LuxOtH4bdwhl3UOTjK7Uxnud/8DS64OzT/ZqP+v7nPy+gje+4k3eyAVR4500aIfFmXD6HimaURaLqWrWjn77zhxEjEd1FXf+BSloypdizuqpee+2Nd+jO235q1bcbVbh1kmv1ndO8ZPinanIhyVU1n+E/GvfQP5qaHZf2vroaet/EWl2oI63nZKoi86ImHWx9ZLshzU7Oo1+Ekuuo/XJtcAQCETnwuDACjAAjwAgwAlEgwAReFKhyn4wAI8AIjHEEoF5A0nkU5MFzClXFYRthTggnDeJEmilEOByjRG1kgXFK40Uif5f7odVEqZjpusNsbzewKCoSvQtSCgRLMnzRfbTABN4GQV74OBu7KfCcQmjVGS6hcpq+foC2vrnb+rXMoYa1HHfpEqqZX03TD5jguChdEm/bjgai/gTVj6twBefrX/4c7b9kkdZWgbzbtKmAenf3UW+fyHlna4UtiVMhxbAposws66ELJjdYe4SceKUXTNMaJ0glUxWen/rOPgdJCL2zZj3dfusdo4wqnOacj+SdnCf2BLnkikX+s/7EIBWIZ6QknaC4e3Bz0jHZq0wrL6VzZkW3p37jq9+rYbVwJoe6endbX6ghs7rz0c2Ph/DVxtbcpTOorSyxHJi5MAKMACPACDACUSDABF4UqHKfjAAjwAiMcQT6xOGzadh91ik8NEqDCl3ooMwYJ8i1ZqEWjKKYrDHquUSxPtnnd2MnpbpPhsuCKBoaFTJrn0NQAk8nB56TCy3G9yPwTj9kNp1+SDIv2t/vXWMRzAsOnUxzl020ct35qYb8SLz1GzZTdUmJ+JT6bokOibdxI9HddxdSe3MvJXwUTSDyygvw/0RH1bTQUdWtlqurPR+e78QMK+jmwjMl7+Q0pErt3XUb6LFHVtA7a51DmRE2e8yJx+Y0750OdHixgfB+FJh2yJyEt6xar9PcqpNPJB7mUy0UZXiJAaftMAxStIGwVZSEIsyDnO5nfD9xfKnI4TqSLzLoWEHb4YUXXhxwYQQYAUaAEWAEokCACbwoUOU+GQFGgBHYCxDYuafbUsQgPBT5xKDKMyG1ooYAh7U6kWuocZhoDGs8mcsvJg5hCH3TSdCePDgm5xKGgUBYa/HrZ2dBF91d9DlqKnjbqloMpdeQIEYLYlY4o1seOdT9UuJJv+5dvw+SB88rfFYOdPtlx1n/KfNnwcEVCk0QD7WVces77KnfHv328RUEMk/Ni9cpQnftIbN+APiReMh79+jDfZQQRA95qB3lOJLEQz68C+t3WkRB6dETKHZEekiw37xMv/dT4oG8++AhZb6hs+q49j3Cd3Cq3bh+I73xxtq0+27m3Nk0byBGhbvbqGhPm9XNQG0VDU6oosSCGabLiax+UiUWS5FcFeIFQ3lJjH7y5gba1N5lNG4+kHjYI9w3vf2D1CYMJWRKBSzEKd+f0QIzqOyWHy/XL1LwN2OSIPC4MAKMACPACDACUSHABF5UyHK/jAAjwAiMcQRgEAEnWbj6QelWKFgqEFVqPqdcLxFqh7DcX/3y3PmtVSqJchVi5jc/9XsQd68VNVFDYTf1FKyldYM3j2peTjGqLIylOc/KSkHVd7K9jgqvR4SpNrQmk/xvFjnn7t22y3OJn//AMlo0pYa8TB1AsIBQQXg4VKa65c1Va+ir3/5/utVT9ZYuXkjfuOHzru2u/5LImyjMT5Ig63UfpwIqKSikz83caDWoWz6ZKo+tE4SKPzGpN4JzLeTEg6nF71Ym9wQFpB3IO7+cd/Ye8SIAKiqnsHR7LrnC3a1U8o8kwexW+hdMzymRJ3P6wcDATvrjufKD1Un1HRSTJuWaJfNMqodaV5pv4MWNXbmqG84a6oQcOgNhh3xzUhkIlSDwBtmYiwLFb3VF8kUBF0aAEWAEGAFGIAoEmMCLAlXukxFgBBiBvQABKJe6xOGtShyQysTBqFkQBMh1l08lLPdXqSzMNJcfch9VlcfFARImIJqMTJYBBXn3ZPEWKhomZAeEHK1jcA1toO+NmgkUedUFIgOb4kI7Y+hAunhgdF3TZQxo5MKTJJ6b+6wcE+TdftNqhftj3CKdce26KSclMYH90T3o/+bBR+i+3z1qukSr/gP3/Ni13b9f3Zv8zoDAQ/VKEUorCbzSo8W6T5wklF/FFolnQkwGWlAGjVTzCi+iO6XO29VCvU+/pjUiFHl9hy/VqhtmJT9XVmlaYeUshKpV3G+Dmo+GXJlaSCWh18sadS9xH+XyeScJRYT+I+dg1A7obtcPyETpghvmNcZ9MQKMACPACDACEgEm8PhaYAQYAUaAEXBEYFCQClA2gAyBWkY3nDSbcEJtBe4DKpEgBepCEJR+pI9J334HepO+oqi7QpB3jUXJ8GjsryydQ84kHkJqa0DiCfLhmKEP03v7PhTatHyVeGJviubU0CMrN4jPxlHjypx3Xqo7p8l6Kaac6n/lxv+mt1avDbRutzDaVS810O13Vyf7NCTwygSx+sVZm6ymIPAQRquGO8q8a4EmHFEj0/sCyrvSF9+21pV8FvlPLNtKPKnoBN5uBJbddRbrsQhxsSC/UO5cEHimSmIQ4tJ1FcpWU5Wh/67q1cA9PVkYWCT/Hvjnu9Tr1axWnUijAFUgF0aAEWAEGAFGICoEmMCLClnulxFgBBiBMY4ADs3I6QYlE5KY9/SJcD+hbsqnAgKuTJCLpkYWarhsFCHBOEwidxRysLUKdYrfQT0bmGLNa8ua6YXELksF5FZ2Df3e+moXPWL9HEcLaf/Bw+j8oU9ZCjcnlWLfzibqfHUt9Tck3V/VMu6ghTRumbcjK4g8eykQ11zBOO9wNByWkTcN5EkQ0koqJv0O/Odc8snAW3T+B0+jC84+fVT7NAIP3yY0GKrhXhBGe/2szda/qq9bkNZ3Mu9aUU7NBuyLlaSQiQFC/IW3UvnucO2KJdOguJ/8UOr+1yMC75VJQ+QGxfXjFzJvJ/AwBp4P1pqw7R5htdkk8KTqsV9ch7rKVBWv5L0U/F40wd6priTw8DcLzwSkfDANlc9kDtjO+tqyTLrgtowAI8AIMAKMgC8CTOD5QsQVGAFGgBHYdxGQefAQYlosLEqDHOyiRA/qjxqRc0jXyAKHPKgJsR4oC6MOCcaB1ikvVpSY2PtW1/zf3a8HIhMPGphABw3UWcTDeLGmIoG7NIPofHUNdb7mrE6zwlQ7BmhzooJeL5xKuwoqrenNm15CJx9RTfNnBEv4rhKwfgSKH9aqmYIb2ZoJgecWQgsC74nHErSpsS45RRA5fuzU8GLmlfbSv03elVLf2deohgkHITb9MNP93m5UoWMIg76d8t5J0guqUa8Q1N737SfMLcbrTtG4nqmS0InAk4OC9JFmMU4KQ0ngFWx7lwq3p7vzDk6dS0PT5hrP36mB6ZrcBpUuscgz6UeKhzJxpRNpsIHnAUq2DTfwnJ8gTJW4MAKMACPACDACUSLABF6U6HLfjAAjwAjkCQIPP/4sbRMqqcMOXkKHHeSthlKnDAUDwlNNibJsLhtGFjube3yHlHnuQCphTbpkgm/HPhVkmB1yCGY7vExdM4icnxevCbycD/eNXDcyR9aWZ9+k1pdXO/a5a0+/cK9MZ6R+GcoPu5UAACAASURBVDs0rS6IvCvPrzeaU1j5Cu2D2h1E1e+jyIEHAm/1Pxvob28r96OmCu+MidvpsMVxqrh4uit2ao4yE+Wb0WZ4VPYyqvAbI/bOFip+Z6tjNYRyg/hyc0iOMow2ee0VW2pPXfJ/a2c3Pbh5u+eSQaBhv+xrOnvmVJr91D2CvNvg2H5w6hwaOPTEjIg8uU9hqtXcXGL99j2T75F7zsnAAvc19qzDMuNIBHqBoTMvjC9DiXXqcx1GgBFgBBgBRiAIAkzgBUGN2zACjAAjMIYQOOeyr9Di+TOtzw/veoiuv/pDdObJR2mtAGTXnvZkov0wHV+1Btes5GdkIdUlYea505xaqpqXo6NpXzr13RRqd8fDIfAwh6HGPdT8xPOO+cmcyDu0aRAKvKeK0glkXRIvTNWdG4YyJNfuvBmFC60k8Nq7y+mVDTOSU0JoM6LUPZR4VcUddKlQ3825YT+dS8FSIskw42woaHWNKrwm70Xgod2IGk/AZZOuBSXwNm7toU3beuiv/2hJTW3WtFKaPb2U/uV91Sl34yCKzwc2baNtXd4vGaTJBZYDIm9Gyw664NU/aO1x/xmfCETimea705qMUklee3j2Rp1KANd4rxjHiVjNhjKwtrLECqnmwggwAowAI8AIRIkAE3hRost9MwKMACOQYwQeeuI5evqZl+mWb11jzQQqvJMuvJae/PX3aFr9cOiexxwTA4O0qyVJ4PkRZblaKsJUceC1G1mohE82wmX91q+aDERJpHgZOoRJ4DU//ryV8w7htCCcpKKxVYTMtnW6uxWvEAReY1MXFTbtSkG2/PSFdNIZC10hzETN5bcv9u9x2K8WYdlQeUG5JtcVxMjCzcACY2JN9/3gFSvp/p72Mnp900xrKggRHbLlRUO2tEKRBA7k3YEzNtM5XzjEd1mbirbT5qIddEzfeyzCCyokhPlFqQQNKxTTj8CTi3dS4wUh8EDaqcSdHVy4x15+0TSqmxDzxd2pgo4Kb2RNIqxWbNi5/3iQpnWMzinpNgETEk9e4+gLzsVR5+hM5mWMUVdvdCq4icJAws9oSSoD8TNsVSpecOF65MIIMAKMACPACESJABN4UaLLfTMCjAAjkGUEQNC99Noai5xDqKydwMN0brjpDmtWN153mdbsGgWB1y+IPIQI4XwCMiyfCows4sVFafn5QGKVxouGD4zuZFK21+GUQy6sOeg46gYl8CYPltHJiSTBJMuunydNLlAkkYIQ4S0NyRxUTqW5qYOef34zdRcUj/p6xuQ4nfn182ja0mE1mqgRNIdaGJgmQ2qLxXXVl3IYNcmF50beqQq1Z/+0hd56cac13YbWEnp3V60goitGpj+sxCsr7aBpExqpfnwvnXj2Apo4TamjLBak3TPxly3izl5A5J0gzEigVLIrDMPAK4hRhdu4Tjnw3Ora1XimBN7PH9hpKe+citW3+D8r757Yiw+fXW8p8oIUExIPyrs57cnrQjfsHuG0iTMv951aWCSr70C2CvLZFxfP67DJM2lgoZNKAdMKOz8eCN5JgsDjwggwAowAI8AIRI0AE3hRI8z9MwKMACOQJQTueWAF3fPbJ+isU44WCqhuuu7KiyzF3Uc/exPd+f3rUoo7/A5htQ/c8Q0tFR4UGl29A6lDj0wSnqVl+Q6D8FTkHsK8ZH60XIbL+k5YVAgzdM3EUfeJ2GZqKOzWmWJaHWliof5SJfDwexyi4WC5o6nfsf+Vz62jlt1Jt9k9BeWj6kysAelaSFOXTqezvn5+CqNcqidV1aQ0g9BR4rmRdzKkUHXyfebh9bR1cxvt6kwqXVF2t05O/XdZSQeVlyZx6ywvpHMu3o8W1Y0m8H5R9ogjcWcH+pKe0+mAkpmWclJVGBpfFMMNgFGVuP+GBMOFvQort2TZH/9uNKVCEb0I59GBI/enrkpngtPeoZfyDsQdXlj020J0oybxkPdu7t3fsKYqw2rhGu1l3CHX5afCw/0lc/ghPUIuStjkWSaEJP5ewJQHf98yMXuB6zNUu1wYAUaAEWAEGIGoEWACL2qEuX9GgBFgBLKEwHIRGgtSrqoinRyB4g6KvCsuPSs1E5B6+Pdhyxb7zg6HG5B4pioH345DrDB1Qhl19w0QlBB+YVQhDptRV0kyJ56m8DLtUBpk9Ii16ygjdxZ00ZPFW0yHIdXAAo3dnGfdwmc3rNlJG9Y0pMZ1IvCqxonDdEWRVWf+IbPp/BsvCJUQMl600gBh2iA/cG1BEQVTi7dWraW3Vqe7757/wdPogrNPdxxK9mHPobZGqBIfuPdtGtflTag0TowRPiifO3J+GomnS97JiX2o+zRaVDSdqsrjGbmFSsI8CkWfbhitCvbgohlUcXDSmVWHnPzGLRsd9wrPEYgfnchI5MX7yDlmxiv2QeBMay/Tysto+rgyKnrpKSr655/SvgZJBTIxIchEr3DXgUNPoIHDTnRcU5gvDTK5l2TbMMwlJHmXyfWn5sdDiG9HAIU5XiBBoc6FEWAEGAFGgBGIGgEm8KJGmPtnBBgBRiBLCBx+2hX0wqO3ERxn777/SWrr6KIPn3cyHX/UwaPy3l0qCLzrrrrYMrbwK32JQWpqHcmDB7dC3bAuv74z/V6qz8rF4am9q1+LxMp0zDDbB1WPZGLMYarCO6l/BtUPjVbM2RV4wMWJwEPY7CvPr0+DzY3Aq6lKhmmDODnja+nhtEFw734jQT1vpodQ11xUEqQrS4GKg3oyj5d+WLbfHv+3UCau3S1Udp0iTF2QeBMb00PUQdpBedc1biRB/sIJFfT5o+Zb60DY7C/LHtVaU/ztKoqvGk8T7p9HtUPVVptDLhxPtYviVDqr0Eg950ZIak1Es1L8hbeoaE+bVu2B2irqO3ypVVeGP3f2wEXbea+c1HfIYAbyzlK8eRiJfOWa2VpzClLJicBDPzJUGP/t9vx1IvBkyHZxLBzFZZA1ubWRc4OCDSYXus6+6A+EOghovFzC36hMi1SSFg8rU036rBP59/B84MIIMAKMACPACESNABN4USPM/TMCjAAjkCUEEBZ71qnH0LYdjXTJuSfRdmEwcPUN/2up8v4p8uLdeufv6ISjD6F2QexVCpUeCDzdsnNPt3WgdTOM0O0nzHpS/YNDX4E4Cfb1D1CPcCEcawWH2NrKOA0IZZefU2NYxhy6JJ4beQeMdQg8rO3d1enqu24qdsyBV1MZE+rRopSr6GHnH0H4BCkg7pp/JXLX2cg72VfNRXEKQuTJnHzoB2o8v+T/fnnhoL77f8+vC7LElApPV31X+839qUSQd7LUDo6nuPgfCkjT+v1Lae5p5VQ5z1tJ5EdIBlqMRyMdEk8l72RX6l45qfHsBJ5byKzT1OBKi08UxY3Ak2Nhr7A2kNx2ktFO4GV7r4Lioe5Ve1fCl5CLUk2ohvjqvKyy7p3asqBL53aMACPACDACjIARAkzgGcHFlRkBRoARyF8EoLz7sgiXVR1mET4rybqUwcXkCVqhs+pKQVYgTBNGCWVC7dYs/p2r4mTWgPAlHAKjdHeNer0gR+ES6hYCrBKWOuGyfvP1CqeFacVBA3WOyjvZr3ShVcdxUuA9/fvX0qbSRiWUKEiGyqpFDaGVv7/i/s/5LWPU982/6rXIO79Sun8RTf32aGWhXzt8n1R4xVyT8UviBHnGvHJrPSJCix8VnyDltEX1dLr4fLvidt/mdvIODSqEqrJiaFxa2yKxLcf9x0RXNZ4kTnCfZTOHGkwtYu9sdVTj+ZlWuKnxVAIPqjsUhKjqlFwSeHJ+1pyH1aqSSFYJPOQGxYuBTMJLdbAIs47MEel130RJ3qlrkc9bv3sYz+wJVcFUvWFix30xAowAI8AI7BsIMIG3b+wzr5IRYAT2YgRAzIG8Q067k0QevCvFzzOFkQXKPSKUFt+bqO2coMIhEGoESUzsEs602S6q+szuYihVE/lmsGGKEQ6NFYIYAtYyhEsaKYBcaBO/D8skQJ3ba0VNqX+CuNMpfeK6anlitNGA3YVWJfD6qZDaC5zdGuFCay+mBB6Udztu0DfpyITEkwQJDvgqceynulPXmCmBd+B+g77hs07knZxD/eDEUZiDF7rktzPTcjPK8EJUjuoa1LnmUAe58VASC0bciv3aQgmKfJNYm1RVSQKvWPwSvB3CZnVLlAQe5hD/0fVaU5EmF9b8xf9JE4tskVxakzSspOakU0OgpYttgdgvhM0abJfhDEaq2/Pj4e+gfdxK8eIFeHNhBBgBRoARYASygQATeNlAmcdgBBgBRiACBEDM3XbXQ7Rd/PzWdZdZRhWr12+xwmY/KEJpK0VC9IcEsXfLt67Rcpv1mmKvCE+V5Nik6hLrv6MgkpzmgEMUSC2QW16OpPU1pbSzuScCpLPbpaqcgaum37qzO7v00ZxIPLsKTxJ4XuSdk/oOI5kSeO+e0W4Mx5Qby6jsgGAHcJnDCyocEEPVFcUilBuqOz031kwJvOoDttEz8Zdd14ycdxO+dYDr904EHiovOquCDru42iK2oLwFQaE65xqDnCcNpMILxjxPPrOb/vpCS9IYwnB+mTjR6gwVe/h2Kty+QaeqVQcOvPFZC2jgrE+IlyyFnkpe7U5zXFGSxlAaIr8piDJds56wp+718ojz34WNNvfHCDACjAAj4IUAE3h8fTACjAAjMMYQAHEHZd3Tz65MU9vJZcjvQegdL3Le4WcYZfvupLKpRoRldQslQjbyzalhTFA/eJGGE0Ui8WahzMgXg41MMAdhggMr1rK7rTdrZGmQOTuReLv29FNvf5IWAYGnknfIVzikyFhKigtoUm2x49AmBJ5u6Kx9oExUeLIv3BNl8SKx5hGiWwfLTAk8vxDaigdmUOUDzkY18aHilJGF01xPvWOSCA2MCzKoyLqvTAwGdNaeqzogY/CsKBQ/r/jaKk+zCrc5RmliIcfUVeHJ+oXnfJJqlyyxckg2CtOhbL1giXof8TegRqgnEwODWX1x5LQuNT+ezNWHF0e4lrgwAowAI8AIMALZQIAJvGygzGMwAowAIxAiAk8/9wq1t3emwmRD7NqzqyZBJEFdhANVcVFhpPnmgrisZpNYjApzVemBXHjlJTGCe6SOYUJUc9Ltt/PVNdT52tpUdZB4mxMV9Paa3bR7zSbHbrzIu6lLp9NZXz9fd3gKor6Tnc/9faX2OGpF9TqFqquyPKZlRqL28cnfvxpo7B+fscxq55UDb8rFR7n27ZQDT638kftnWHnu4LoLAqXXUhb2ZyV0MRAgGo1UUwes5+8vt9Hjf9udV+GzchkF296l4t//RGNVRIMfvJxqFy+2SNZ+QXRVj4tTh3jh4RTyqdVhnlSCGhnXHtYCjmxcqXSBHh3Kms0pyxdL/fh7KNThXBgBRoARYAQYgWwhwARetpDmcRgBRoARGOMIIEQQB0J5qILKI+ySictqabzQcsnNZnhvmOuXeavsYcLSMGEsrmvdlh768x/X0bu/eDQNKhB31cJ1try00CK8nEIYz/z6eTRtqX6es2wTeHJf7PuFaxDXoj1Po9u18t/PraO1uzuMLqWFEyro80fNt9p4udB6EXhu4bMgShCSecZdU9JUd6brMlpQFio7GY8gBPqeBxtoy7YerVDaWdNK6SPn1GdhtiNDeIXTDk6dQ6XHnELlcxdYJL9UH8vQbpPrMKuL0hhM7le+rgsYg1wsFcpbLowAI8AIMAKMQLYQYAIvW0jzOIwAI8AIjHEEoMbZ054k7RA21NDSE6oaBwQWDkOZ5DlKKiOKrXmOlVBamZfLK3dask7cUkCNlVBGVZ3282t/Sdve2jrqDsAhGPUQ9qcagJqq79Bxtgg8zBe57kA8uuW6k3uq6wB6+X0v0lCPULe1dKVjJBRHhdXlVFCabvAh1XeyspsKz43Ac1PfCWFt0tl0gAghtPai4xJqb7PykcdH9TNl4XyasihJQEZd1DyFbg7Pdz+4k7Zu77WeZ25mFrkg71Rsil56Kg2qwalzqWL+QksR7UbuqyGfIJTHSlitnwlHvqwLBB7w58IIMAKMACPACGQLASbwsoU0j8MIMAKMwBhHACSLNIlAbiyZAyjTZclwJJPk/15jShMIL8OLTOccRnt7uKwf4aiG/2Ft+Vyc1IQPffU+2u5A4mEdRSJUDuQJrrEg5B36yAaBB1UQwpoR0udHpKrOp17kSfeDL9FbK96k/1sy3X1LBZFXVF9tff+5I+fTorqKtLqbirY7utE6OdA65b5LEqkCf2sPkl07EXj4varuUtVR9smDuFv56GjyTtYDifeBz18V6WVscs/AlfaZF1uoSCwwIS5GmaYRxB1cZ2dPd3ZPjnQBLp1LErk/ARLZP6w5qWYrFgpqqKgFO5vHBUpPmMK4ka3q1PG3Y7yoj/B1HRzCXjYMnWIW682FEWAEGAFGgBHIDgJM4GUHZx6FEWAEGIG9AoHGll4rxxIIGpRMiCTVZVDnsGYCoMnB3aTfsOri4AlnXVO1IciTWmGYAPVXq3BmVLwgwppaRv2oCi0n05GX7vs74eNUkAj+uI8cQ/udflggpVCUJhaSbAXRaKpkkuRJW1eflVNOLe03PkSJVdtTv/rR4mn0blW5Iz7z+hJ0/bWnuu6PE4lnN7FwIu8QLouw2YTC69QuKqb3faHG81pQHV07bITyH/77Vtqxdp3vtRQliSfvMR2yVZ2oJMdAZraJtAH5ploL+myThDJcXZEOoS+Rfi36blbEFeSzDaRkm3i26Ra0GyeepXiegsTLFkGJe6a+tkx3mlyPEWAEGAFGgBEIBQEm8EKBkTthBBgBRmDfQKBFuFFC7SBDmBC6ZVoyyXNnMhYOdnVVJVbyfZMDockYpnWDmHM4jeEXYmY6r0zrm6oJnUi8w84/wsodV1UeLFS4+40E7bgh6ZRsUqbcWEZlB7gnopdEEMwcgpIDwAfEq2oE0b9qG3Xc+LDjVJ+cVpv2+5O27bH+XXr2YVQmPl7lmfjLBDJvc9EOqxrCaBEyC/IuLv6nlpiI/lNVd/K7936hmiYsSq/rNKZTeKqf8s7eTxQknszXl0neyHxUrclrEUSVnQzWveaDhEHr9h20XlBSUh1PEq/4XVjqcK/1IN0D7mkujAAjwAgwAoxANhFgAi+baPNYjAAjwAiMcQRA3oHEw8F9cnVpKqRWd1kyXBbhh5mo93THwzwRYlUgCBTMO1eKtShIS5nvD+vKpZom7D1VyS5T4tVUhVe6fxFN/baz4i0T1Z0X2QWSEorTxot+qHsZp9Wr+cUVRu12r+mjF29uSWsjjSqEmHbUPaFL3qkdSuIVIZr/+5GrjeaHyoecdgodcvopxu3sDSQRBHLL9NpxGlwlhUyVlxkvxtZBmKS9VK0hHDzXYbVyz3TzRfrhquYUjVKlPH5csaX848IIMAKMACPACGQTASbwsok2j8UIMAKMwBhHICFO/LtEGC3KxPEl1CzII7/cbahbKnIaVYkDT1h57kxhlAYZYYfq6swjbIJLHVPm+wvr8KuzHllHJSUzUTo5jaljPOA21+1f7qKeN/3zfHmRd1HuGQiGkg0NtPXLv00z7tDFXkeFZ+9LJfFUowp7vSDknewDe/bWEyvoHw/90QrxNi2X/fj7pk3S6gcNmdUZNJdqPBn6inmG/RIi1wSlJH7DNh1SCUooZ+0h3jp77lenTvz9w73MhRFgBBgBRoARyCYCTOBlE20eixFgBBiBvQCBhuYeKy8UwtSQD88rmb9K8kDBkkulWPIQHnN1bAx7a7J1OM5EsRZ0zUFz+JmOJ4kTU5WhXzhtzUVxqrmoZNR0oiQl1cF6hHFF38P/tH5lSnYFIfAwDsiGrU900yu/aU0ZVeD3XQUJGlo8REvOqKClC8abblFafeS+a3hnHSGfod1Z2K/jTAi8MNVpbvOUOTuhXsyWGi+M0FI/3PG9NINAvkC8DMiGUjlbe4Zx4uIFUph/fyrF3z6ZB1YHX67DCDACjAAjwAiEhQATeGEhyf0wAowAI7CPIAAVG8wXoKorE4RYs/i3veDgWS6MGnAwzCc3WKn4MCWETLfWyYXVtA/T+iBUi2PmJgsm4+Qiwb9UGQYJu0ZIrb04EXeSxMC++Y3z+8dXpbo845QlJvCl6sJ5FiReMpS1wCLEdUmTIASenSz50/oG2ra2WxB3gzS0JF0ttyxRQ8sS6Tn4dBd5xyc/m6oKww8UXYLyA5+7iqYsmq87lFXPKbegUQcBKuMZUj0ubjkRR6HsklPCOJVlxVqOxwGWMaqJ6i4cJtnlNLdskHfquDJnK36XKfnK5F0YVxv3wQgwAowAIxAUASbwgiLH7RgBRoAR2EcRgEIDLoZSHSJDaiUcMvwQeaicnEhzDZskhKIgFuXacxUqLFWGIFl1QptN9iIXpKScnwwjBOGFtemSXTrr01HdrVnXSCDu1qxvGtXlonl1BCJv0fyJOsNZdSSBh/8GzQWyK2km4R96akLgqTnhYHwA3B6Pb6OdhT2+cz2lbyrVD5q5bKoEHgaQBKWOGs9UgZck44utZ0xQcxFfEFwqqCHeUTi6ZpvgUpcZJtllhy/XOUmlojdpSGOuNGTyLugdw+0YAUaAEWAEwkKACbywkOR+GAFGgBHYRxDo7R+wwlBRJlWXpEJSJVnQL/JftQmCD6qifC1hh6bpkEDZwiKZxD2Yk6vTHMNyzg1j/WETlDLhvZfqDsTd758YUd25reMLVx6jTeI5OdBCiQfCy4941SXwgBVMCqAUk2HuuuQd1lg/WEqn9E0z2jaE0O5Yu25UG0uNJx4HXs8EEwIvlwSXneyqrii2HGElQWoEmK2yJAajVtLqzDHsvH9SLQn1djYMjNzWqCoNTV7i4JkKVTkXRoARYAQYAUYglwgwgZdL9HlsRoARYATGKAI793RbiqGayrgwphigYpEZH3mGTA5EuV46DnJ1VSXU25+ZY2UulWluGIZFUObj2iTplonySj3Eexlw6JJ3ch9MSLzmf7tt1PZhXtg7L8WanwutJJOLBZGrmra8GttDr8aajW47UxJv5SOP08pHH3ccwytceMrC+fSBz1/lOzcZwo2w3LAcRveU/JaaS+5PG7um91wqS+xHZQNLfecUVuhpWPes74QNKsi8fzGxeZkoDfNxbVBiw0kWpb0r4ZqfFdctyLvSOJN3BpcOV2UEGAFGgBGICAEm8CIClrtlBBgBRmBvRqCprddylK0WByAQd355w/IZC6wBCiHT0EwZipurcFk/TDNxr5QkWb6uLRMiR0d1B2wRNnvzD5/xg3nU97oknpMKT3bmpljzU9955Qu8q3S98VrQ4NKeeUbt7GG0amO3cGGd/HeZ5EJ0WkB30Vu0fdw3PNdWKki8aV1f1Vq/vK6CqPFy6Sats7hM11YzrAgGNvlWZCi2074liccSwv5wYQQYAUaAEWAE8gEBJvDyYRd4DowAI8AIjDEEQOwUFpJwoR2ywv5kSO0YW0ZqulCaQWGhKpbc1qKGyyJUuEdgkc/FJNwwn1yDdTA1WRv6M6lvqr6T8z3j5CVWTjyd0n7jQ5RYtd2xqgyplQYXsSVTqfKGs1y79VrbzsJukfvOeRy/eZqaWuxYs47+8P9u9ewWz45CwTBjbfUL/NV3Jvvmtx5876S6c2tnQuKBNIfTNUIt27rEs0GDsAp7bTrrD1JnZG0xkT+uXyvvYNgh70HmrdPGaW1SNRgT6nIujAAjwAgwAoxAviDABF6+7ATPgxFgBBiBMYQAQvwaW3uTB/CaUmpo6QnVWCAXUMjDpldIpTSpGGuKw+S8i4UDY59rqNhYXZvM+edFKjiZOfhdY5f9+4N+VVy/v+N/ztZu60XiyZDa4iXTqPS6Mxz71MlRGCR8Vg5mSuChnQ6Jh7XN2X8RnfXFa1zzx8kQTvQZVl5NHeWdHWiE1Nb2nqe9pzI8E2kGcM+5ma7AhKNEKJh1XhxoDx5xRal+xTBejq5jhZhU4ZJrA7mM65PJu4gvJu6eEWAEGAFGwBgBJvCMIeMGjAAjwAgwAiDwdjYnnSwnVAnDBI8cQmMJrWQ4VXwU0aVDkuT7Ot3C9PLJgCMohjJBPvIZ2s0EJJEAsqEvoa+WzBaBhzUjnLbnwZdGqfGguis/571Ud9hsS+lqJ0wkMYs1S6MKJwyzTeBhDiDxkA/PydQC3x9y2in0njNOsYhl3Hf2/Ykqb9q28q9TT+xt40ttXttvjNu4GUGo1yuUemOx4NobLwhImKTYHV3HIjEp9wB7M3F8iVCYc9jsWLwuec6MACPACOztCDCBt7fvMK+PEWAEGAENBLbtbKLKinKqEh/d0tjSK0JoB62wRChNcIjbG4okumDIgVBhhMSBYBhLBh1u+2AnDsaq6s5tfSpxgDogl4Pm8YuSwItvvMtawkD1MuujUyQZ1NbVR/2JIeu+Q/5JL8Wo7DcXBJ66JphbyALDiimL5qct2Z5jrULcc7g2TUlXPxyDqO9kn6YqPNnOrliT1+VYU/E6YWs38MDfgzCMgfz2Marvi0WuO+S8Y/IuKoS5X0aAEWAEGIFMEWACL1MEuT0jwAgwAmMYARB3H/3sTTStvo62iv/+8Hkn0yXnLNdaEVwJQdrh8F1ZHhvzefDURUsVBn6HNUJl4hYGpwVWnlWCcQdy/uHA7RUGl2fT1pqOVAahMpxKvZRpXh2GTeAVtbxK5a9+1nHI3tmXUp/4+BVJwOJnV++AlWdNtwQ1sQgSQqs7J7WeSgbhXlPDSge3dVtVCypjVFCVdA4NUkxy39n7D0rgyX5AwEJtiAIToITIH7q3FPwNgKNrkOsyXzAoL4kJt9ng11a+rIPnwQgwAowAI7B3I8AE3t69v7w6RoARYAQ8ETjpwmvpzu9fZxF4ksy78fpP0GEHLfJFDgna97T3WrmCJleXpkJqfRvmeQWZdys2HEKFsEwToiTPl5cycsD+QVkIomRvIRPUUGeo07DGoHt3861/ozXrm4y3c9G8WKKWRwAAIABJREFUOvrCVe9Pa1cmiLuYIPD8Stey73sq8mQ4MFSFWJ/J3j0e30Y7C5Nh7ybF1IXWpG+1rpqnENdl685u6trQQUPto5W9hdPKCB/TkksCT+4d8oai7E3Eeeq+EyHq8ViRtsmF6f5FVR+KzypBQHJhBBgBRoARYATyHQEm8PJ9h3h+jAAjwAhEhEBbRxeBwHvh0dtSIzz8+LP0w7seoid//T3fURNCvbVLhNGiIGdQm8jD1avhuujbcQ4ryEO2Gi4LtVqRCK0CWTKWVXhOefykAYRfDrUcbon20Pa9A7GMHF3YuyBkyZp1jXTzD5/RHl9W/MKVx9Ci+RNT7XTJO9nAicSTyrt+QZDg2gQJpGPeoU4+iBPtKX1TqX7QnCgzBQ3qNCigcB2CdB1Y1UaFnUniTjxmXEvR4kojRV4uCDwZQouwZ5mfEQRl9bi4peztEPs5lgvWAmVhszDrwIsA9QUIVNomeSdzgUOleEbg2WFaVq/bTIvnzzRtxvUZAUaAEWAEGIGMEGACLyP4uDEjwAgwAmMHAZBzd9//JIG4u+qjH6QzTz7KIvBu+dY1aQcR/O7KS8+iM0852ndxDcLIAmSCJIfGap44mQvOLV8aDngIOR1LbpHq5jkRk/L7qMwCfC+ekCr4GYxId+Eg+dRMVXh29Z1X2Kzb8hMiJ163UOLJgmsTCqGuXpgFDKQ1U/Or6RDMJiReNsg7NWxW5vIDeSdVd5YLr/i/QcGcDwvXRsFmQuJlOwee170l1w4X2rFAdDldr15Os/a8hvn48iMoeQcsrv6PW6hd/C298brLLAW7vUDRjuL0XUiPPu6GEWAEGAFGYB9EgAm8fXDTecmMACOw7yFwzwMraNuORrrk3JPon6+toe/84JeW8u42obbDQQOHEFlA9P3p2ZUWsedXkIerRSgvUMYiiWfiwCqJIB3TAD/csvW9PET7GTmATKiuiFvTwn7m42HbCTOp3IKSySvXnZsDr84+6JJ4mYTO2ucBFd5gzTJLQahjVGFCUuqQeNnIe+dEbg219dPA6vZR21JUmPyVkxoPefGKllTpbKVVZ33VBdp11YpTO79CZQNLtdtKh2CYjkBV6FbGAtHlNHedlxp4rsAECAQ01Id2AlobzAgqImcf5pZJeUn8Lb1J/C09bNliukK89JImUPh7e89vn6DDDl5i/d11I/kyGZvbMgKMACPACOybCDCBt2/uO6+aEWAE9jEEDj/tCnrgjm+k1ABnX/YVelD8G2q8c8V/q4q7p597hR567BktAg8wWsn0O/sshcxYUnPJA6iTssnt8kC4WFV53CK58jk0TCUmTZRnXoqafLplpPJsQITsyZBSv/mBTKitjFvXqSlJ6UfiOZF3mE/lX471m5bj94m5H6XKAy+3SEmsT6dIklI37x+caVFejTVbP+sHS61w2WWJWp3hMqojyS176LaqvrMPgJSUhS5qPJOceEFUeKYGFqb3kd3NNZ+fLUHIfrsTr8wDmNFFlEFjvKwoF8rWsAoIu7sFYXfXcD7Z5ULF/oMbP0OL582g1eu30DU3/K9WWoqw5sP9MAKMACPACOy9CDCBt/fuLa+MEWAEGIEUAnCaXSTy9ZwlwmLvEWG0UNi9F6oBEUqLcrU4YFwlFARTRSjQTbfeS1eK3x9/1MHaCCL3UVNrz5gg8STR4adKc1u8bJ+v4cIyHNiE/FHXKsmVfCUp5fw6e4IpeiS5YmIAAXyQE2/NuqbkT2FuAdIOue4WzU/+tJcg4bPoo1AwVSCrOo/7qzFJrIZl5mu4d5VQFYIId1KyJl5MkopexUmNZ6rC21b+deqJve03lPV9aWI/mtb1Va26QcgtteN8V+PJXIw9ffrEsv3ZAlUpFLNw98620hf3Vd34UsIzPOyCl2FSgYe/t98aDq3F748QL9De+stdYQ/J/TECjAAjwAjsgwgwgbcPbjovmRFgBPY9BHCI+MWwSgDk3CXnLKeHn3iOfnjn7+h+ocTb3rCbbhP/jZw+l5x3shF5J9FUSTwcZOuqSkToWLCDXhQ7pKrS2kRy9R7h5Bm05KPS0CQc2G/dmYSc+vUd9Hu5vqCmFKOJknikbpnxjXdRifiYFKyNhEIQCqX2Y/9i0jStrhPJ2VzQR82F/al6NYPFVDOUDJvORlFdZt2cgXUIPMxVqvEGBAMkSaDYe82Ugzokngl5F9YzQYadQiFmop6Neg/DXB9ML0DiZnN9cv4xyQBHAJgMnYW5xSphcnHC0YdYKSpA7CGMFup2hNZWin/j76yO23sE0+QuGQFGgBFgBMYwAkzgjeHN46kzAowAI2CCAA4S14jE2willcUK9bGZWJj0aa8LZ1qprJEkXq8gytwO7JmMZdI2U1Wa01h7+/qc3DNNMA+zrtw/qHbCyqPlur7+56gg8fyo6Q/FjiQqPspoWbohtJZZg2ClBgVxhxBfu5GF0aDDleX6diZ66I2+1jTyTu1vzsA4mis+URbpVOoVru6W/85rXjGRG0/AZeXGMyXw0C/CafeU3D9KjQfirmxgP6rtPU8LFp31aXWkVAKJXiNCPRPigjAN+TYdy6++nIt0Cfarr/M91IbIQweDkiAu0TpjyDrZIO+kqztCZ0HM4d9Q4iHvLJTv8oUZ0lVMnTLRyp133dUfYhLPZCO5LiPACDACjAAxgccXASPACDACezECMKR4SHy+dNXF1ipB4N05nKdHEnr4twz9CQMKlcRDf9XikIbSIlRv2S5+DqVhzAfrg3JKxwU0jPHUPrKRWwohj3DKzEVIZpiqQjfsUyGdLetosPvXvls0FBcmCEUzfeuhgg6BJ0NmoWCVpXf2pdQnPpmWd4s6aVNxl6VYg6rPLWSxWqjx3pOoyXQ4x/Ym+eB0FXjqQEk1HlH88DrjkOMwFmyyviDjJQ1KiiNVi3rNK2rznqjXlw3yDvhJAg/mULLc8N2fWiq8QwWhB3d3Se7he6jxkMrixi99PMhlwW0YAUaAEWAE9lEEmMDbRzeel80IMAJ7NwIg5/D2X7rjTRO57VDgOnu3yIG3RIT4oMg8PWGj4UbitXb1ZyXvEQ5tcBiEMiYbueqiPsQ77Y8cMxvrk4d407xxmVxXUagm3eZTGttGA12/8iS51La6JJ5fGC2UTVDcQXmnlkzCZ2U/CJldWdxi/dOu8HPCIWwST+ZL608Mku59H4TAw1riM8upbvF4y+0VKrFs5FbLZr7BbBD1Xs+YqJ23sb4q8SIkJn62ihc9YZl4FEPFKIxrogybVXFD/lgUONIiHQVIuwd++k3LFAr/Vt3ekYsWBhdM4GXyV4LbMgKMACOw7yHABN6+t+e8YkaAEdjLEcAh4qVXV1suspK4U5cMcg/F6bswobGTeNL1tamtN9IDtkr8IFl6Ng7zwC1qpYrcm2yoCp2ug2SC/bhFkMAgI6qSDdWdfe4F3Tdbv3Ij1JzWOlT2BS0InFR4UnXnpIoLS333p/iuUfMDtgQ1nqL2Uysd0l8dSl48acZgGvI8uK2b8DEtRYsrqXB8MWUrt1pY+eBM1xm1Wk2dj1TeRv28VscM08SjNF5kqb9xr2WzQPWOl2QoUN+BzLO7wEv39xuv/wSH0GZzc3gsRoARYAT2AgSYwNsLNpGXwAgwAoyAigDIOyjv8qFAWbRbEHb9w4SBJPGiCMdUVRxR9K+DpyS5onJwzabqzmm9URMX2VTdpdZny3nnFNLqSOBp5sSzu9GqRhX2fsPIfYc+ETq7QXycihVyKv7PiTwMQ4WXqRrVVIVXOK2M8JElTBLICb9cG7xErcaTyslc5S6VJh4VQkGNlwVBcl6Wl8TEy4Zk6oZcF7wwu1So4Vf8+nupqdz284fpxVdW0V0ifQUXRoARYAQYAUbABAEm8EzQ4rqMACPACDACxgg4kXggasIMy8o1saWCEsUBX5ISfcIQBCGzIF9yVXDAHi/y4oWZ90/2GRe59sK8LnQwkuo7ta4XySXrDRXOICq5UGcIAok3bsvPrZ/SqMLeUFXeNQxtJXxkmVwwnfDRLS/HmqlFcZx1auemNjyhb5LuMGn1JLEEdV8m16iJmUVBZYyKllSNmq8Mbw3b6TQXoeRumyGdhmEM0iGeCWGUqAl6kzkGJSorxbMJfw/yqSCUVuaeRdjs1Tf8r5UPb/E88QzhwggwAowAI8AIGCDABJ4BWFyVEWAEGAFGIBgCIC3auhKEwyZKpiodOQup2MoHYktFJqyDsBpOCqfGsHJDBdvF9FZh7aEkJxGWC+In28WJwMMcEHgHktIpR52co04YbRqZtPlFoj2vUMnGu6wuoLgbEB9pWAHS7qmB+x0hmCQIvAMLD9ci8pzCZ506TYXyCtJNUsJBCDxJWoe1hyDxEEo71O5+PdiVd07rk9dWl7i2MiW5cmnm4nZP4NpCWHsYueOiePEQxr0M0hSh0UiHgJBsr5QI+UjeAQOQdjd85ycWHMiFdwuTd2FcGtwHI8AIMAL7JAJM4O2T286LZgQYAUYgNwiAhJIkniTfgiiucpEnzRQxHK5rRQL1/gTIS3MH3pyEkxouUqqAgoQMq8RWkGvAcKqu1d0IPNnAK2+cH4EniVyYK/hdAysEcbdLUd25TRhE3vKicz2Xr0vgoRO7wYUpgSeJ3CgMTkDkgcSTefGguCuoEnnNlJBZv+tANZto7ugj1e1XbbuzZyU19LyS+tXk0oOpvvQQynVIqd/68H2mYcNQKoIk88JHZx5R1dFRVOYreadignDaqHPPRrUH3C8jwAgwAoxAfiDABF5+7APPghFgBBiBfQaBNuEyCDUFCggg5DoyOTgmCYOYUGMEy4+UbaCRSN0k3HQskJMqhkHUVyqxlS3XULd99yPw0C4VUqso1fB7LwLPJKz79cEX6A3x0S0HCCUe1HhuxYTAk31YuflEOa5nopbxi7xO0Q7EfC7DunVwA0lVVR4f9dwAcbei4WrHLkDiHTv9UzQhtiwn6lCddck6OiSXU39hKWlN5hq0LojK8eJ5OihkeOo1BxViufhbwoURYAQYAUaAEdjbEWACb2/fYV4fI8AIMAJ5iABCJduHVWlJAqiE9rT3uqpjsARJFOVbuKwOvLqH5LGgunNar5r/rFXsq2eYm0XAFlkH8HwICdYh8LBmu1LNLQeeNFNBG5DVfsSWV9is17V1olDhueXF8zKx8OoTROUFZTN89yasEHGdeyfMOlIViz5x/T22/Upq6B1R3aljqXkQJ8UPppPqbw1zKpH1ZaLGi9JUKLIFio6lG2+3SMlQLEg9OM5yYQQYAUaAEWAE9gUEmMDbF3aZ18gIMAKMQB4i4ETiQY2FPFpqURVpaGP/Pg+X5jglmQDfKVw0aML2fFu7V56wfFLdpeFmc6H1wzTlIhu/gIYK0pPQByFgTdV3cn5eKrzmgj5aWdzit5RR3x/SX00TC0us0G817LdhYD01DK636mMfxwkSdnxiNtUMzjEeIx8a4F58s+1Oen7nj60ch/bi5ER84PiP0UHVH8+H6fvOwU+NJ3PnDYnF+xHuvoPlqAKIyrrxJTkanYdlBBgBRoARYARygwATeLnBnUdlBBgBRoAREAioJJ4keFSSLgghks/AJtUxcaH+6fv/7b1rkF3Vmaa5lJnnZCrvgGxuZWiX6UBElQGBRbsHmJiBEuCyYyQHeMI2JUp0qSJmBAImwtOWS7R/eKxAzPgHIEx0t3EhW20XE4awNFNlLirojgJF11hlBLiqEWGIMhBcLYGUqUzlXbPefbSSnUfnsu9n73OeZWcgZe691vqetc9RnDe/73sXs8/ClFrmOTa3t1pOnXmPMWgWnouxVD7fjJ6xfoljrsRLlWmG7ef3k7n7Ih/rzT131b03iBOt/+bRhZK5fO4071tOAPrgxOvmscM7Fi/Ta1Q/c33kzuz6fXNx6TpzZvdnIsfQihtd2awrG5ZzrhuLAm0NZW/NmTu8vnhFGa7kdM7GovccZcYWNXvSz9zF0NPdVZSjYJ8QgAAEIACBRAgg4CWCkUkgAAEIQCAqgVoinlwj1dNo1n6wDlKGGHXtVtznSoFdjEUsCW7GzQkHx2cq55j7GOffNMtm/u9mYXk/d6WzrqeaTFkUozLWmvXz2/fB8cU1PjXQY86z/bzSEvC0UFARzy/euQ0q6+6ZmX9vlI2mnmNdVrmr58i7pvd/SVzEe/fXU+bAo0fMe/84teRczvrDPnO2/Vr1tdFA51XropeO/NC8fPQvvR+5UtlmMeraImXh+eMetBmT/bZvqHtWk3ILjnwAMW5EvIsBj1shAAEIQKDwBBDwCn+EBAABCECg+AQkZilDxJXL9ltji+nZeS+bqR2HsrX0oboTYpydWzC/Ozqd/2MMIOJ54l3Pf2fTmM7z4lFmodwvFeOhsemavf/etH3wJNy9NVkxbqkeI7+/0wyWomUSNcrAc+s064dXT7zbO/3vvSkk4Ol1ecKKePUcXHXdn/T/X4md8S+2vneKcFc9uYS8P952VqQ1d71x5ZL7JOJ122yuZjHqpvXn74u0ZqtvUjbzaTb7d25+IXSWaKv37tZHvMvLSbAPCEAAAhBoFQEEvFaRZ10IQAACEFhCYMpmayk7TZlM+rPcBvVfZei1y3AN5pWRJidexTg7Z7MMTxp6tEOc7kO2i1Euw2FceFvOwPbEMwtvmWX2y41q4c7vwHrUCnTKbuq1ItyH4zNLTCsk3j362/GGIXWf95embBWk0yO4aAYR8NziEvKqx+/PD9Tc23+a/N+97/t7wbk/y5SjlkmJymnX9P2vsY8viHjnFokq4vkFvCUxWiFPf68Xo9YtooDn77+pzNGBvlJhXLzdWZfsvw1nDPd658OAAAQgAAEIdCoBBLxOPXnihgAEIJBDAsrwOXR0yivVU6+tFfYDWzuIeH4jjmr31VEr4hVK4Grw3NTrdRfUhTeHj+QpW6qUzpY8MxW/uOycMccmZzwROoh454lkn/yFWdb3XmgR75PLfs+ssU60SY+XZ582L8/u9cT06pLZaife6rXjZuEd+KsjXtlsmLHqq6Ohy2mdgKfXpYbfKVjf0euxXrlw0QS8Wq+9opnmSCAfHSyFeSwWr93z5PPmc5euNOeetSLS/dwEAQhAAAIQyBMBBLw8nQZ7gQAEIAABr0zPL+LJEVNN5o/YbKYijiBGHEUXuJwgoHOSqOUXRNyZVTiUbLZhReAq4mh2Tn6n3bv3vx8sxN53TfeZT3jXDlqTk6DltH9kxbszrYiX9PhvC39rBbynG2ah1TJ/0D7i9sL7y7W/jRTOv9nzL0LdJwGvlkDpn6RexmGRBDwJzaWeZV5maK2sSSc6qzfesZxmOiuDd9j+kiPq2P/Sq2b7jp+YdV+42qy/cU3UabgPAhCAAAQgkAsCCHi5OAY2AQEIQAACfgLVfZqUpaZRJBHPn3UXxJm0lntrEZ4KJwLIwEFZaY1GUR0w/cJcM6MKZant/3Da/N37E1aMDnaCYbPw0sq+k0D56/m95vkjv2i6cWf+4C83vbi0xnOljTKiZN+5dcJk4SmDcu/7m82b4//gZdk1GosZh/a6BXvxmb2rzHVnPRglvEzv0fOqX3xM21L9ZuX5ilHO2D32HpWDz9h+jnkZ6i+pZzLuGDs2aR7audvsf/Gg2bL5ZrP6kgu9Kd9+75C59a7tRo/Byn95vrltwzqz8jO2zyUDAhCAAAQgkFMCCHg5PRi2BQEIQKDIBPRh6ee2dEnjlq9cHynzoZaIV5RS0yBZd7XOt9Ijr+wZeuTpg3StvUYtw5Ng4LIqj9ref7Uyg/L07EugVAmfehY2EyjdvmVa8V8PHbfmD8ZIDwkSoxPxzmoiWKQh3vkFyn1jvzAvzTy95AjKL69e8vf5M98282e+Y6rLTfMu4LkMyv/2wd+bJ969PfBj5rLxbjj7++a07ksC39eKC6OK5K4/ZxA35SziSkq88+/14Otvedl4W27/uhka7Dc3bvy2+Zb989obrvLEvDvufsA8ct8WM2x/xoAABCAAAQjkkQACXh5PhT1BAAIQKDCBh360x4yNT3gfklyGwzr7AWmTzW4IO6pFPH0A7yt313X8DDt/0tdHFbX8+1BpnwSuCSsYTUwFTOFKOpAm80mgVGmbSu+i7lHlfbWMHzIOpe5yfqMK9S2sVRZc7+b/858+9H4kgaunWz3WbAZXk2wv7/qRA2b4jJfqltF+tuvz5mL7leRwPf3c8+Z64HW/f44pv3yF6Xn/3JrLzVkRb+biX3pCnhO4ZGJxxolPR9pemhl4Eo1Vvu1/3p5+73bz/vSBwHs9q2+VufnCH3jl382yMANPmvCFcd87HCc9E9W9OhPeasPpZO4zYN9f0hwbbObduM3Mu/aqy8w1V1/uZd7d+/2/Mmuvv9KsvKDiMs2AAAQgAAEI5I0AAl7eToT9QAACECg4AZUkSaxbbRuHa0jEu+6r3zBPP/q9SI3E64l41Y6frcZWz8Ahyr6iZtFEWSvMPWHLgpvNndeyYZeNVG1U0Swe93Mn4Lm/S8RTnV6Q6sQrP7HcDK146ZSlkhbutECtnn7vz79unn3z/zH9e78cKNzJNT+vZONZkex/O3tHZHfTtAS8Rq+loCKeK531C4Ef2SxZ9evMy3CiehL7amU2njKQ+yM4Moc5B/XF23rPD8y2LRs9tyRl5d1265fNNVeuWvw3694Hf2qGhgbM+puuo6w2DFyuhQAEIACBVAkg4KWKl8khAAEItD8Bufz90n4gusL2FVIp0tbtD5srrHinP7uhkloJed4HpghDIt5HthH77MkPzM3MBCIsEfkWJxDM2H5T9QwcokzuLzXNQ++/qGXBzWKPmzXUbP6wP0/i2VIJ7b7fHV+ytMpp1TdOffEayT7/9g9OD7vl0Ne7HmmzVlGsLmM+8u4Js+//fc/MmenA80rE+4Pf+wOzqvf6RbfQesYJ9SZ999dT5om73wu8pv/CL3z3LHP2Z/tOuTfIs/XSkR+al4/+Zd11Lx75N+aS0T9b8nMJXMqSVVl1HswfknhmqwFknY2n18aKkT7PXCTtsfXeH3qinMQ5jWf3HTC7fvaUVz4rcW/z1vu90loJeBL3tn3rzxf75qW9N+aHAAQgAAEINCKAgMfzAQEIQAACkQmoDEnlRlesusicc9YK70ORGoVLxNOHoXPt9zRcFt4//ZedkddSE/nDY9OLIp7L3gpiEBF50SY3Jpl1V28pGXi0svdf0ll3teIM03Q/rbP0C7Fxe/PVEvC0b4kiMgtQOW69ktq0Bbxm2YUv/828OfTOlBk/8bvAqJedecj8T2s/t3i9e22GLcP8xdb3zHv/OBV4XXdhLRfasNmdEvKqR7Vw5/+5M3+Q8BS2xDp0gA1ucG0F0spI1vOiktY5+8CqN2eQfo5h43OvvR6p3BkM9bqTK63LuFPbB/0b9c3bvuZli+/YdueiYKd/z3Y99rR54Lt3ZLAzloAABCAAAQg0JoCAxxMCAQhAAAKRCBx87U1PqHv84e949+vvagwu0U7f1/Bn3K2xH4x2+kS9KItKxFOJmNwVNVxWWNYinhNBks66q8ckjQybIPzTyrqrtbYEkRHbFy+KWPnCr944ZcrLLj8/SIjeNVGMKppNXl1G679eJbUSQtQbzz9UPnvlJ5c3mzryz4M8R889POfNP3tiOpCI12N6zXDXJ8zVG5f2LPOXYTZzQnUBRcnCq5V9p/6K6uOWxfuCc2GemJqN3A8yyoE6AfGEfU+MKzgHWX/Q9h9V30v1/4va97LWOlmLd9qDssZ/bEU5/fv0qv136x5bLrvz/m+ZZ59/wbzymzfMDp9YJ/FO5hfbvrk0CzMIM66BAAQgAAEIJE0AAS9poswHAQhAoEMISLBTn6D77Ycd/fcdm8Hwiv3eLbYsSeWz6oV3jW0Qrn54e57aZ5557leegJfEUMaLDBQ0KiJTyXw4Pp16Typ/NtrYxKyZOikkJhFTsznCZhQ1m6/Rz7PIuqu3fhCRyd37N3/9snnv3aN1Q/njL37WnH3OaN2fuzhLNsso6Qymell4bjOupHb2pE/Jp/p7zNc+PRzn2BrGOWyzqDT03NYz5HjjhQXzpv1yQyLe8RNjdctply8bNvrSOO+yLnO+/fIPJ8qG4RumF95Zf9hn/njbWYtLtiqTMwnzmjAH36oemUnH2QrxznFWqazKZmVkscn2v1ttW0B8/kubvF9IucxxXSunWn9/vDDnxLUQgAAEIACBpAkg4CVNlPkgAAEIdBABlRtJpFMJrcqRnOvsbVa0+5ztg6dMPH1PPfEk5Pk/GMXF5BfxKr2uelMV8fzZaOp9lUYpWTMmlaymslfKNhPEEaHZhDV+nmXWXb3tVZxRy2ZscsZz/awe775zxPzib34dKLqzzh4xX/zSxadc26yUNNDkTS76q38eM29NVoTmWkPllxIxJKj9z+cPmfNOimxJrO3mcH3gghhyVAt4/n1IyPMPJ9y579US8NzPwmapBcnEW/XVUbPqax+Ls60StfxMwsYZ5ZzbJc6Sfc8+zfYRzKpsthlr/TullhB7rdmSGyqt1S+eXJZ5szn4OQQgAAEIQCBtAgh4aRNmfghAAAJtTEClSH9hRbr/+tcPmWFbPqshw4oxm9WwxTYBT3vINGJ8ctZbRkLFaVbckrgmsSKp0cpstFoxBGnMHyV2f9P6LEoPm+2xkVDRLPOueu7qTDyX5aesu7SdRN+0GW+P/na8brjnDfSYOy79hCdUBi01bcbO/dzFGaQX3Qcvd5sPfjtn3jlYEUx7lp8wXbYqtruSuNd0NBLwdHMj44x6kysb713bE8/1xVPG3dn2yy/c6V5XAn3Usk5L2G4K4OQFSWep+dd1An49YTvoHpO4Lk6cvaUu7726Swp2Tob+zVLWuHrd6RdNKpuVmYX64am3KwMCEIAABCCQBwIIeHk4BfYAAQhAoGAE5Np30PYKUladsvDW2ZJZ/VlDWXcytnAOf2mH5hfxnOhen9eAAAAgAElEQVSj7yUh4uUhG60Wv6SzcLLIRovyHPideF2fL/W7O/DCm6Gn+7M/v9oTkc4YLpusehf6N6mS2urxKSveKevOiacSNpIo5XWis/oJNjNYmHh/mfnnv62odONjs2Z8/NSMwfLQiaZCXjMBz8Xu+tMlJZ6GKbkO/dDEuMFl4yUltuXBtKcWDhenWhoEceTt7+1ZdCqOgTeVW51o93tWwPNc03GfTYUzk0IAAhCAQHQCCHjR2XEnBCAAgY4joA81yrBT36Bv2gw7ZSq4slm50CqfQv/Vz1xGXhaQaol4QUoG6+3NZZd0WWUlCUElDQZ+ceuIzTyKOsJkaUVdI+59En2cuPUf/8PfRZru8//q0+Z//O8v8JrwJyHuRtpEk5ucGBKnRDqMuOsX77S1aZu5evjQTM1dNhPxPvvFbjN6drCMKicYT9hs2aiGCIrT9fVLyx017hm7rMO4Dq55FSkdH2eoIYdlmQzVy2odsq9jxZLnoUw8/fuWZLuHPMfL3iAAAQhAoFgEEPCKdV7sFgIQgEDLCOx6fK951vYDUradTCqqh0wtNJR914qRlIjnPizHEReyjF/iVqmnIjSG6cvnhB6VbkrUCnNvlvG5tVwG0vbv/W3ovSrW8887zfzR9X9Y18ChFTHVWjNMz7rq+x0jleLW6h3ov75avHM/O/S7aTNTo++gfl5PxBuxwt3FVsALM5wArXuaZQlWzxtGpAyzp7SulYNrvzXbCVLKXL2HqK/vtGJpNK96V44OlK3B0Pwp7ylFEO9awYw1IQABCEAAAmEIIOCFocW1EIAABDqUgMS53bbfncpi85yZoA+OysbRcNkvUzP6MFnfSMAdaSvLK+M+VmEzdNz1SZUax91/0PsPfTBm1P9uwZo+2P83HcoHUxmprtU9KqMtwvCLW0GE2Sj9C/95b4+Z+GCpa6zYNMrC6+o5YXprGOWGyb6r5h+21LSZwUlez9cJs0EF81Y56sbl538WnWCJeBeXKvdDAAIQgAAEKgQQ8HgSIAABCECgrQj4RTx9mFwx3GuzkRqLeEUVtPwH57KvGvUWK7JIqVid+6zEECfK1Xt41SBfPfLl8OqyC4si4LmYwpxpmJLxetl3bt1GIt7y05cqp3HEO7deUHErrFCdtze2WuJWrT0WLcOwVgyuTFpPS6n7VKE4b2fDfiAAAQhAAAJFIICAV4RTYo8QgAAEIBCKgJwoPxyb9kQe15/phP1Lda849yGzFaYGoQIKeHEjl8p2ECmF4Yc/eM6jIZFDTRfn509NxZMgJNFO4p0bZ509Yr74pYsDkszPZY36xbnstbB9/TzH2V83LnmViDc+NndKOa3caUvLjVHZrIwrgva9a0a0URahew1rjjHb79F/rs3mzePP3ZnWysZLy2U6aw56eY5ap9m+crjS6qz3yXoQgAAEIACBIhFAwCvSabFXCEAAAhAITECN1A8dnVostRy1bp8acjOVOYUErbJ1/ZQgMDW7EHjevF/osndcD7+iZ91V83YCnr7vsuxc03wJPYq3VontqsvOM5ddfn7ej6/m/pypisRK58br3FwPj82EFrSCCHhuIxLyZqY/fn0Mnbtgfm+1FWcCGlaEBe5KZCemZj2Di3bIRqvFoFY2nnO9TsqhNyz7pK6vnFmvkRjJgAAEIAABCEAgOQIIeMmxZCYIQAACEMgZAQk7h20mnsvYcW6m+vCsksNj1gUz7+YNUZA60UNx689F63XXKOYXfvWGOfBCxTBFw4l2Okf9uZ4DZtHKZ2sxcKKdfqbsLZlVRBlhBLzq+T/52XnzyYvnoywb+B4nWCqBsmRFoKIYygQO0Hehy8Zzr9UogmyUddO6x7339FA2mxZi5oUABCAAgQ4mgIDXwYdP6BCAAAQ6gcDc/IIV8SrGFsq6U0mXPiwfssJeO4p3itN9iFZ8czbWj6xDbTsNGVm89+7RxZBcpk+9vnh//MXPmrPPGS08gkqGVsnr7aeyWWWoRRl5F/Dca1XxakRxb43CpVX3SJiVS63elz6yJjz1ROhW7S/ouoh3QUlxHQQgAAEIQCAaAQS8aNy4CwIQgAAECkRAIt4y+7/J6TkvG63ozfAboVdftP7eHi+7UFmGLuuw3QRLiXgfvD+2xKhCjrOmqvddu4h3/mdW53/6UNlM29JvCXlRhOh//Ek50iv4D29OXwx2z6xKSV02XpyMw0iBZnCTvz+nSqN7bUn/cH/ZCrPRxdkMtl1zCcS7VpFnXQhAAAIQ6CQCCHiddNrECgEIQKCDCbhMPFdO224inhM6dMTKWPI3+m/HWCVg/ee/e838f7/87ZKn2vXF+8Qnh4363hU9807nqlhnrTGL63+ngF0PNYk+ErrCGjv8894eM/FBOHfQtMtnnQhULdYp1hGbpVbqiRZrHt/26vX2a/Q6zmMc2pPKnNXzTq+9KGP3U/vMNVeuMsOD/VFu5x4IQAACEIBAxxBAwOuYoyZQCEAAAhCQiKdy0tmTzqUVF88er8Q2rACSJ5rOjbRR5o6LtegN8l2jf39fNPXF849P/4vTzUUXfMITtuRIXNRRMXQoNewB585+bHLG64sXZoTJwhv45IL59Jq5MNOHujaI+2qQ5zzUoi26OIgxR1FiVbbvcH9PZPFOR/DQj/aYZ577ldmy+Waz+pILW3QqLAsBCEAAAhDIPwEEvPyfETuEAAQgAIEECcihVMYWTsQrsvOjhABl16l0tDrrrhaySsP8sjU/CC/2JHgEkabyu3YGEVyDCEKRNpLRTWGyJl2sYctMJ95fZtQPr1kmXtriXRhx2WUkxikfzugIay7jXHaDvAbzno03aH/5MXzS3Tsu07ffO2S2bn/YrLzgPLNpwzqy8eIC5X4IQAACEGhLAgh4bXmsBAUBCECgfQnog96ux542w0MD5k9uXBPpg55EPDWLlwig4YStD8enC9NA3gmP6nOnvn5Bh8v+KZKzZ5CMpVrx+8WeqI6tQbkmdZ0TbeZtlqjONWhmqOunpirGIGKuf7+NTC3SLpt1zrpBRFn/nt19RcoojZrxm8dsvCGbGSqROemx6/G9ZvcTz5lH7tuy5L19z5PPm932a9C+7+/4PzYnvSzzQQACEIAABApBAAGvEMfEJiEAAQhAQAT0Ie77O3ebTbd+2Rz8zRvm2edf8D7onXvWikiAJHTI2EKjksXUa8WPfJdduqy7su19Flb0cJCiCmKRIMe8yWWiSYALWyKqpV3/tKBZijG3G+v2ipBc8sxHwoiy/kWdSBTFuVUZeRPvV/riDZy5YL+sI0hKI4lMOserCGJ0mIzKWsidsCunZb1HRTEuSeoo0xLv3P7Gjk164p1+WfOQfb9/x/73QpuZ94x9v7/lK9eb9fYXNwwIQAACEIBAJxJAwOvEUydmCEAAAgUlsOar3zA7fYKdPtzpQ93jD38nckSninhlTzyRiJK3ETXrrlYcErZkjqBMryMTs3kLddF9VILFmN1f0Ey0eoE4ASWvGVtJ7i/vwlaSArJ7jnXuYTMPs3roR22ZqQRkPXtxhTcJtEPLS/Y125oy+BEbi/qGpjkk4N159wNG/73lpuvM2huuMsrMU588vf9rSNx79fW3zDlnnuGV3TIgAAEIQAACnUAAAa8TTpkYIQABCBSYgD6ouQy7Gzd+22zbsnHxA5s+4N1kv3eb7ZmkD3lRhwS7cZvhpeHEhTyJeElk3dVjo1JEOZkesn0B44oLUflX35ekUOmf25VKjx+fzY1Aq7N1fcSSECpdvHnK2PKfgQQoGR/oDKJkVNZ7xuKYeST13FbPk1YJt7KFT7O9LLPuA6j+mf293WnhWpy3uh/euH2f13v/jm13eiYXB61wd8fW+71/B/RvwBWXrvT65jEgAAEIQAAC7U4AAa/dT5j4IAABCBSYwNZ7f2j2H3jFPGYz7FRSpSbnGhLx3FBZ7Y9tT7w4WXiaq5aIF6eUMSnsaYlZ/v3FLe9LKtawRhVR1k0y+yvK+v57sthLkpl9ceNN+znzm3lIIGylIJ322fpfK1HKpcOcpfoqrhjp89oMZDlcPzytudqKdFtu/7qXeecX8/SzO2y23nqbqadrGBCAAAQgAIF2JoCA186nS2wQgAAECk5AAp563VV/eJNY5+979wf/wwbzT/9lZ+xo8yTipZl1VwtUGCfQ2KBrTOAEj7BOqlH24souW9lPzIlZaYsv4uMyDyemZs3EVPal4f4sw7T7t2UhAjd75rJ0QHbl0nrdpCFaVl6XvZmLd46xfkFzz4M/NddedZn3i5sNd203V6y6yGz607WLx6B/J/RvxLrrr2x2NPwcAhCAAAQgUGgCCHiFPj42DwEIQKC9CbjMCmXebfvWn3vlU8rK2PWzpxbNK5SRoeviZuA5kn4RT2LACvvhdWomuqlAlBNKwswg+rplMzaZbX8tJ2ZlXbYc1QE1Clt3jzPVKFnDCvVEi9vbL+hekjCNCLqW/7q0M9Hq7amv3GWG+8tWsMxWtGyFEJ5WNp47u57uirFJ1kPlsdfZvqcqnR0aWG7GJ46brff8YDEjW/tx11T/UifrvbIeBCAAAQhAIAsCCHhZUGYNCEAAAhCIROBWm23xXZt18Q8vHvTcZ+U4q/HsvgPm+4/83Ot9pP5I39x8s1n5mU9FWqPWTXK1PHrS2MGJeOo3JSfUNEdaH8TD7Nl9aM/C2dP1adP+WmVA4HqnZeE+3Coxy3/+WYqWTkRrVc/BrPsApl0i3Ox1nGQ2XqvFOxfrwdfeXOx5ut1m4mmolNYNfU/XOHOLZoz4OQQgAAEIQKDIBBDwinx67B0CEIBAmxNwAp7KZeVAK7HOZVoo8+JV+8Etrb5Hk9aFVqKOhsua8oSmlBxbO6l/lzhm0dsv6Msji5JHl5klEThJ84agMfqvq7AveSWXabktt1rM8sebRbmyM4PJMquy1tn7fwkQ1XG5JJMM61Ddqsy7es+0XMf1ZuzKZ/e/9KrZbM0syL6L8i7APRCAAAQgUEQCCHhFPDX2DAEIQKDNCUick2mFRDtlVkjIk1D3zPMvZPphTaWzEvHUK01jdKDk/feoFWGSbJCfhcAQ9pFxfeLm508kHm+WWWBB407LMTQPPdlqMUir56ATu7utCNSqrMpa8fqz05LMpFW8cmc9Yd8k0hL3gz7D/uuU/Tg6UDbHbDbxMeuyHXTIkVoOt11yrsjZcO0Srr36cjM2PmGetf8ePGDLa5PMvs5ZyGwHAhCAAAQgsIQAAh4PBAQgAAEI5IaAPqApy2J4aMB887aveW6DyrpT6ayy8PY8tc/b69oMm5XPWQHr0NGpRRHPZdocGpuOLeI5EWXGlueq/1tW/dDCHHga8abVcD9MXLWudWKbRIwkMqnSEsnixunuT7ofXx5KhBuxcWJbjxWnkjzfPLhV13ueJS4Gjbe/t8eKkZVfUuR16Jc7MrbQvwtrb7hqiZlRXvfMviAAAQhAAAJJEUDAS4ok80AAAhCAQCwC+lC2236tsx/K9MFMQ4Ke32021gIxbpaId9gKdk5gU8ZcX7k7lgjQKuOGKBiSKIcsUrxJGBFkUaYa5Sxr3aN4VVIrUWtmbiHStC7DLYveiZE26LvJ9T2MY3CRd7HSzyhIvEP9JfsM9MRFy/0QgAAEIAABCKRIAAEvRbhMDQEIQAACzQm4rDtduWnDulwIdrV2PTe/YEW8j11Do4paRci6qxW/61kXtq+W4h22pceqyMtTSWWzJ7MiSJUj9YnLY4lws3hdH8Ao2WRRXwvN9pTmz+O48jpzjqzdmuPwaGQYg3gXhyz3QgACEIAABLIjgICXHWtWggAEIACBGgQk4Mll1mXd5RlSXBGvSFlotc4hrKiVJ6OKKM9V2Cwrf8msDCKS7JMYZf9h73F9D3WfhNog+8+LeUPYWN31TmwNKkwXUaz0s6nOxkO8i/rkcB8EIAABCEAgewIIeNkzZ0UIQAACECgwgWoRz4lU/uy86vCKmnVX65hcLM1KJYsudLjYnaglIxMZmtQTtdxz0IxLER59d3aNRK289/cLw1nC9OnWdVWi68TUfN1b2+WZdtl4XV1dRo6zDAhAAAIQgAAEikEAAa8Y58QuIQABCEAgRwTqiXi1BI+iZ93VE/EkeMilV+Yb/lH0LLR6j1kj8aZdhB1/7I162sUpt83Ry/iU59YZOFSXejuzj2W2DryRiJvX2Kr3pXJ2lYerjycDAhCAAAQgAIHiEEDAK85ZsVMIQAACEMgRgQWbkiVji1lrcKHhyks/HJ82Mr1wPbZmrSlAXh1m4+B0mWnzNtajk5VyUWf+oHjVS63dhjOmkIgjswf/GTsG7RSzy9SqdcZBS06LxsMZehyZmLEC9cdnPG2dosfsc170IfFuxUifkQjLgAAEIAABCECgWAQQ8Ip1XuwWAhCAAARyRKBaxKtkJvVacWfeCnrdthyvcUlejkKJvBXXA01CZcmWIkrYcW69kSfN8Y0u+0zijswMopg+5Di8mltzfeJczI3KxYsWW639tusZu+zYnu6udjgmYoAABCAAAQh0HAEEvI47cgKGAAQgAIEkCUjEG5ucM5PTc15GlsrwekvdXrbOsary0iTXzctcErFGB8redn53dLqtxTvHXILW4PIeMzk1t5h9mJfzSGMfTvjpsmmX6vFXXTadxpqtnlMZtStGeo1e3+3wXCPetfqJYn0IQAACEIBAfAIIePEZMgMEIAABCEDAEzZ6S5WMrEn7dcZw2fteo6b4Rcfm7/1WtrHr7+1aWqmzcgKtDC3GJmZtyXCPd+btnHXod+LVc63ehyonLaLLbtDXmysF17kqq3RoeanQ2bSId0FPnusgAAEIQAAC+SaAgJfv82F3EIAABCBQIAIS7I5aYceJPRLx2rHE0u+q6+/95voAStxptx549Ywb/GKPeh+206jEVrLZpJV+cBrqfShBq12Fy1qGJH7htmgmFnKZPWO413Sp+R0DAhCAAAQgAIFCE0DAK/TxsXkIQAACEMgbAZUXjp9sdu/PXmqXskMn6tQT6VzM7ZR96ESdetmF7ShcNnPWrSXu5e21GHY/rp9jvYxK1wtQLrUyMcn76O/tMcP9PbHEu4d+tMeMjU+YTRvWmeHB/ryHzP4gAAEIQAACbU0AAa+tj5fgIAABCECgFQT8Ip4yllbYDJipmflC9w6TMCdRR6WyzUwMnDtrO8Q8PFAySl6SaNPInKNdxFrFoZg1mmWb+c0eiuzQ6hyVZ+fUz7Kx02xFrC15GYnNrm3Fe49bc9CWd7tzjLuPXY/vNbufeM5s2XyzWX3JhUumO/jam+ad9w+bz9nvI/DFJc39EIAABCAAgcYEEPB4QiAAAQhAAAIpEKgl4smp9cjJEtsUlkxtSidahCkHdqLIvC0r9ZfZprbJhCeOIsgp5tHBsif4KYvrRMEqauPG3EzkTPiIEpkuTsw99qDz2P9wyJqsSGxPcrz93iGzdfvDZuUF5y1m40m8u+PuB7zvvWL/vGPbnWblZz6V5LLMBQEIQAACEICAjwACHo8DBCAAAQhAwEdAH1Qf2rnbjB+bNOu/cv0pGSdhYPl74um+UZfZVCARr1n5aDMezcoSm93fip+7mJVh5Xq/hdlHs/LTMHNlda3chIf7y545RZT+ha4XYFHKS8U1brm3KyOemJrNjVlNGuKd/xlUNp4Eu+pMPL1v3nrXdvPYw98hEy+rFy3rQAACEIBAxxFAwOu4IydgCEAAAhCoR+Dg62+Zrff8wGy5/etet379ed0NV3kZJ1GHnDtViuiGRLxu21g+7xlafqMKZRM2Kh9txqYogpYy6EZs9pKcR+NmVi3v7fbMHnT2ee+XltT5uEzNIvQ/dIKl36Cj2XNc6+euXDwPzrxpi3eN+IzZX3hc99VvmMetgHfuWSuioOQeCEAAAhCAAASaEEDA4xGBAAQgAAEInCSw9d4fmtWXrjTrrr/S+46ySvSh9OlHvxfrQ6lEvLGJGbNwsqRSgklfudscGpvOZZllGplFFUGrxxPG8ujWGqWUstkLp55zbbP7svq5EywlKCdV/loEx9akBEt3Ts6ZV6Jgq55vlW7329dY1kPvke/Yrx8/9rSXtbzzvi3e++aeJ583Q9b0Yq39BQi98bI+FdaDAAQgAIF2JYCA164nS1wQgAAEIBCIgDJH3AfM7Q/+1LvHy8A7OVROu9t+GJWIF2dItDp0dGqJiCdRq5khRJw1w94bxqgi7Ny6Pq9urRIs5dh5bGouUvloIxYuQ0v9D/PUCzANwdLPwTm2tkrQqncmSYt3/nX0fJ8+VClDnpiaj/ISCX2P+i2uGOkzEouzGK7FwC9fPOi9b+rrQltSe5H9kli356l95sFHfm6uveoyMzw0YJ557lfmge/eEesXIFnExRoQgAAEIACBIhBAwCvCKbFHCEAAAhBInID7IPrM8y94c6v0S0N9nB6xWST+MjBl4W3bstHLzosz8iziRTGqiMIibeEozJ78mVNpC6l56gWYVamrK1PNQ484l224zCpezdx1wzxD1de6DER9P6msxnr7qbyWejMT77SPzf9uh3n1N2/U7HWn99QbN37b7Lz/W4tmFg/9aI8ZGlhu1t90XRys3AsBCEAAAhCAgCWAgMdjAAEIQAACHUlAHzTXfeFqs/7GNZ5pxX6bUSLhTk6LGhLs3KgurY0DbG5+YUnWXZoZQUH36faQlQGBy0qbmpm32UpzQbeZ6HVOSJRJhcwqshjO6KGVWWlxTUnCcspDj7hWPG86a/VA1Fmn0QPRPb893V1hjyT29TKy2P3Ec2bL5puXmFlssL/8uGLVRWbTn65dXENZzcrSi9NHNPaGmQACEIAABCDQJgQQ8NrkIAkDAhCAAASCEzj42ptm890PmL0ny2KVOXK3Fe4k4Dk3RWde4f6uMjC5LyYxqkU81x8u7Syw6r0naVQRlosyolRuOG9Li7MuLXUGE63IDHNlxK1Yu1VZgFlmOtZ7xuWsm7VY7HogSiRWWe2Jkz0ww75W6sXUCvHO7UXvi/plxxU2K1ni3P6XXjWbt97vtRpwLQmcscWObXfGcvOOy4v7IQABCEAAAu1CAAGvXU6SOCAAAQhAIDABfbBUryZl32now6d6Nbned+7DqYQ+ldLeduuXzTVXrgo8f5ALa4t4ytiZzsTkoZUilp9P1qJSHjIes87+y3q9es+/M0eRmCVBLe3hBDQJd1msVyseJ172luI7G2v+ku11d5oVvlsp3vnjVOayWgvssiYWet/09w9V9p3eQ2VswYAABCAAAQhAID4BBLz4DJkBAhCAAAQKTkB9ms6xQp1zn3Xh+A0u0gixWsSrCA69qYp4aRtVROGUhajmF7GSzIaKEq/ukbAj51CZEKjMMqnsrOr95M0JNysxMQ/lyv6zSEIwl3innnddemhyNp7dd8D7JYhrPaAy210/e+qUfqI52zbbgQAEIAABCBSKAAJeoY6LzUIAAhCAQBoE1LtJJbIq/dpjHWfffv/wkj5Oaazp5lxYOGF74k2bWVtKqiHBZYX9kK6y0qSzhvIm5pwqcPR4YpbMPpIcFTOFknUGncvMHTTo/tMUL52IlVVvw6AxO0OJknVt1XnP29dAkiNNpnH26QwuFG5YIw25JI8OluIsn/q9LuNO76PKvKs2A0p9AywAAQhAAAIQaHMCCHhtfsCEBwEIQAACSwnU6mm3xrrMqsxLZhYa6unkd6FNm2G1iOeylJIs/cvaqCIKM9cfLskSy7yKOaeKlyVrpjFj1C8tiVGEuNMwesi6JDvKWWmPEpWDCqtD9nqdZxGGhLtx26IgrmN3EWJljxCAAAQgAIGsCSDgZU2c9SAAAQhAoGUEJNDtthl2/swQlcn+6y9t8pqxy7hirf1qxZCINzY5ZyanK66sTsSL23zfOXDOzqmR/lzi2U5Js+rUuJPKjvSfd9bmIFGehaTiVlafMlenZ7NzFY4Sr7unIlaXPMG2kQtykcS7ODy4FwIQgAAEIACB5gQQ8Joz4goIQAACECg4gWpn2epwVPq1/qbrMs26q4dUWTlJiXjO3TaPpaONHiknQk3NRHMOdeJIXPEz68c+rviWlPiZddzOkVjrRukHWOS4XR9Eve6rS4kR77J+ElkPAhCAAAQgkG8CCHj5Ph92BwEIQAACCRBQQ3W5yGZZFhtn22MTs+aY7demEUXMcj3Gytb58vBY8j3G4sQW9F5n8nDCZiaGySRzpaNp9NILuve410UpA3UOr0mW4caNI+z9Uc7OiXdFE6n9bGq5844MlMxAXzHKZsOeM9dDAAIQgAAEIBCNAAJeNG7cBQEIQAACEEiVgMpdx62RhYYrDwySkVbUbKR6MIOKWX6DAAmgSRsjpHrYNSYP46JahH53Qfkpe/L0obIt955tajhSMScpJ9o7MOg+k77OCfUztgS42xrZ9JW7k16C+SAAAQhAAAIQKDgBBLyCHyDbhwAEIACB9iVQLeJJ2Ji3Dq1HrEBVazghJ0nzizzQbSZQJdVHLQ+x+vfQTMxyWYq6px1ESxd7kFLiZs9E3s4yyH4U9ydH+zzBngEBCEAAAhCAAASqCSDg8UxAAAIQgAAEckzAL+Jpm6O2tE7DX1babtlntY5D/fxGrBvnobFpM2dFTDeK4K4b5/FyGZXVZgftlmlZi5Fza60uA29X8e6M4bLp6e6K87hwLwQgAAEIQAACbUwAAa+ND5fQIAABCECgPQhUi3iurFRilkrtJGgUzbAhyslUMu16vfJKlRoq7pItuVS/u6KXzDbi4XoaqrRSsfba3oYqHZ2Yal5mGoVznu6pGLGUvFgnp+etc2vZ296RYzPmxMc6bp62HHovToxFvAuNjhsgAAEIQAACHUUAAa+jjptgIQABCECgqAQkXki0cMNlIenvRTWqiHIWTuzosqqWjAskbnbK0JnL2GDBKledeuZ6HYyd7A3ZDueOeNcOp0gMEIAABCAAgWwIIOBlw5lVIAABCEAAAroRXrgAACAASURBVLEJOBHPfehX1pn+3ElijjN3UPbVtM3Caycxp9ED4ncWtrYmbWHcEPQF4X/eJdy2S8ZlyWZUnjHca7rsa5gBAQhAAAIQgAAEmhFAwGtGiJ9DAAIQgAAEckRgfsHY0sEFL/NMZbPt2A+sFm6/gCXBUlloKqc8YUVMfz/AHB1VYlup7nfXrqYdtYD5y6b1vFcE3FLhBUyVvqufJeJdYi8TJoIABCAAAQi0PQEEvLY/YgKEAAQgAIF2IyATh0NHp6yIVYnMZaW1ayZeI8MG1w+wXbKyqp9VOdGODqoH3Jz9ml/8sTMukStxuwqY7rnW2fqNS5yAWW3sUZTX+aAtgx4+aUZTlD2zTwhAAAIQgAAEWk8AAa/1Z8AOIAABCEAAAqEJzNlUPL9gV2n23+OVF/rFjtAT5+wGl3El4wplYNUa7ZqF6OJqdKbtKmA2O1NlZCoDU9WnR47NFsbEZMg6KSs2BgQgAAEIQAACEAhLAAEvLDGuhwAEIAABCOSEQLWI11euuJN+OD7dFiKexCnFFCSzUALmiL1ezrztIGCGid1lqknImpmzNdYFH2FEySLFnqV4N3Zs0nsKhgf7C/40sH0IQAACEIAABBwBBDyeBQhAAAIQgECBCVSLeK5nmBxriyrmuJJZlUgq806GFUFGdb+0IPfk7RrFfvpQObRBR71S27zF12g/7Rx7VuLd2+8dMvc++FNz8LU3jV42t3zlerP+xjVFegzYKwQgAAEIQAACdQgg4PFoQAACEIAABApOoLaIV140uihSeJVSYPV8m13S8y1oDI365QWdo1XXxTWniCqAtSpe/7pxz831BFRfSInXQUXfLGJXqW+/fa6zGDdu/La59qrLzKYN67zlNt/9gPkj+/e1N1yVxfKsAQEIQAACEIBAigQQ8FKEy9QQgAAEIACBrAhIxPvI9r+btaYGGk4QqTY/yGo/UdZp1vcs6JxFFLKSKgV1br3d3cu8foh5ErLqnZ8TLpN4Vl3pcR56Qao/34qRPqP4shgP/WiP+eWBV8zO+7YsLrf7qX3mnXd/tyjoZbEP1oAABCAAAQhAIB0CCHjpcGVWCEAAAhCAQOYEFmz60WHbA65axJP5w/jxucz3E3RBJzbOzC4k5qjqTA5OWCZ5d2lNSrj0805jzqDnGeY617cxyZJvN2fULM4w+693beWZ7s1MvFPPu+u++g3z+MPfMeeetWJxWxvu2u5l5K2/6bokwmIOCEAAAhCAAARaSAABr4XwWRoCEIAABCCQNAGJeB/ZEsJpK4ZpxC1NTHp/1fNVxJaS5zCbhsgYxhAh7Vir53eZgrPWeCINkdGVIzdy8M065qxERn8WZpg+iknwcK+5nu6uJKYLNMeux5426n+35favL17/7L4D5i/u+YF5+tHvYWYRiCIXQQACEIAABPJNAAEv3+fD7iAAAQhAoA0I6IP1q6+/ZT53yYWZfZCWI+nkdCXrzokZUzPpiGRRjyirLLGs1gnDISthNat1wsSua7M4E2Vhqp9iUCfjsDHUur4V4p32US3g6T1H/fB2bLvTrLbvOwwIQAACEIAABIpPAAGv+GdIBBCAAAQgkGMCux7fa3b97ClzjS1j2/3k8+a2W7+cmSukX8STmLHClvQpM29scralxJygOGezBccmZs28nAdSHspGG7GZfodsifHcyT6BKS9Zd/pKv7uSPYcZI6fdtIfOXs628zbuNDL9wuzflTbrnqzMJhzvtDMRS7bX3WmWc5aZd469BLs7rGHFtVdfbs498wzz/Z27zTprXOHMLMKcEddCAAIQgAAEIJBPAgh4+TwXdgUBCEAAAm1AwPWlciVs+pB9q+1JdZt1iMzKFVJlqeMnBTtncOCJJ1Y4a8Uo93SZ0cH0SmYbxSQzAYmYErFUstuKkUXmWb24nMHD4bGZTETT6n20MhvQGWVIME1DwJZ4p553XXKuaNHQ+80e+0sCvc9IvFt5wXkt2gnLQgACEIAABCCQBgEEvDSoMicEIAABCEDAEtj/4kHzkM2EecTnCqm+VNt3/MT7nr/ZfJrA/CKe1hkdKJllVmjIKgPKxebEq1Y6hLZKRPJnnmWVdVjrmVI2mspKdQYztvdeVqNV3P3xOQG7ZEVkxZ9U5md/b48nSjMgAAEIQAACEIBAmgQQ8NKky9wQgAAEINDxBOQMWS3Wbd3+sMdl25aNmfGpFvGcuYNKSk+kXMEq8WbYioZKTlJZb1LCSVR4foODNLKxqveVB/HKvyeXBTkxNWcmptLPRKysV7YmJa3LfPTHn6SIOWTLsiVMMyAAAQhAAAIQgEDaBBDw0ibM/BCAAAQg0NEEXA88ldG6cfC1N829D/50SWZeFpCqRTwJD33l7kSzkarjcKWLabnMRuXmsrHSzkSsuOyWrVA2m4lYFpRHViJmpf9cj/eMtbr3YC0RM85ziXgX9GnjOghAAAIQgAAEkiCAgJcEReaAAAQgAAEINCCwwfa9u+LSlYsN5fe/9Kr5/iM/Nzt9pbVZAZy0vd9UOutGmj3Z3NzKcsvCrCEKQ5eJmGRJZRZso8RafY8TMbtt/7Y0MiPTfLaSiF8ipit91fmHyURFvEviBJgDAhCAAAQgAIEwBBDwwtDiWghAAAIQgEAEAmoq78pmJeSpN943N99sVn7mUxFmi39L2iKehBGJN0n3Gosfee0ZkhaaJIypz1xvKdlea2nHn2SWXJrCaNIcwvZmHLHl4MoqZEAAAhCAAAQgAIEsCSDgZUmbtSAAAQhAoKMJKPPu4G/eMNdcdVlmBhb1gMvA4EPb/27hZP+75b0yN+gxcR1K89bvLegDV4lf5g7TsUo9XfxpuZ0GjSfsda7UN26fuqxKc8PG1+x69ek7fajSp69eX0D1cFQvP5WdMyAAAQhAAAIQgEDWBBDwsibOehCAAAQgAIGcEFBPskNHp04R8aJmYrl+Z+q1p95iRRvq17diuNcctSW/Ufbv+v1lZQ6RNN+44mvc+5OOJ+x8TnycteK2ngF/Sa3EuxUjfUZnzIAABCAAAQhAAAKtIICA1wrqrAkBCEAAAhDICQGJeIdtJp5zhnWOoWEy0VzJqLK44mbwtRpLVBEqbBlmq+Ost77OUploysxUr8SgfeGKLl76eaj81/8su2eip7srr8fGviAAAQhAAAIQ6AACCHgdcMiECAEIQAACEGhEYG5+YYnwVhFjer1ywmaZaEUtGW3EI2wZaLXg0w5PW5iYXPmxBD+VZrfDcDEdn54z+jPiXTucKjFAAAIQgAAEik0AAa/Y58fuIQABCEAAAokQqC3iqSdY/XLYSslsyfYMq983LJHNtWAS59C6zNZO1stE8wt9EjuDZqu1IJxIS+p8K30B6wtzSRuARNpoSjcpG3XFSG9Ks7fvtGPHJs2z+w6Y1ZdcuNjrU0Y+r77+lrnmylXtGziRQQACEIAABFImgICXMmCmhwAEIAABCBSFQLWI57LravV0C5OhVZT4a+2znpuqKxlVhqJEznYdjUpj21m8K9ks1DNsP8QuNb9jhCbgXLe3bdloJOjdtPHbZv1Xrjfrb1wTei5ugAAEIAABCECgQgABjycBAhCAAAQgAIFFAhLxPrIZV7O2N55GdU84f8lsO2ad1XoUqoUqZ9YxZo0O5Dbb7qO6pFjZiXJj1QjTJ68onOQyOzpQQryLcWDKuLv1ru3mtg3rzCuvvWnGrYgnMY8BAQhAAAIQgEB0Agh40dlxJwQgAAEIQKAtCSxYBwMZW1SLeDOzC6Zc6rIls3P2q3gus3EOy/VEm5qZX2JwEGfOIt3rSopLtqxUf27XzMPBvh4zbMU7RnwC+1961Wzeer8ZHuw3jz38He+/DAhAAAIQgAAEohNAwIvOjjshAAEIQAACbUtAIt5H1pRg2op2GiolHVzeY47ZclFlnnXacFloErCOTsx0nICp8xaDT5zsCXfICrxyMG6nMWSfcWVbMpIhcND2vLvxz/6d1wfv6Ue/l8ykzAIBCEAAAhDoYAIIeB18+IQOAQhAAAIQaEZAYt1yW1I4a91Fj9nMu9Ns6aT+fGSic0Q8fxnxpO15d8ZwuW0z0Oo9DzJ0UNlspWz6hBV0K39u5lLc7PnKy88R75I9CX/fu4O/ecMM2ey7Lbd/PdlFmA0CEIAABCDQYQQQ8DrswAkXAhCAAAQgEJaAykblRKrhSin1504Q8VzprF+squ4JF5Zn0a53Pf/0DLisu+reiEWLyb9fxLvkT2/7gz81B23vu533bTH+fnhrb7gq+cWYEQIQgAAEINAhBBDwOuSgCRMCEIAABCAQh4CcVsd9pbNq8t9tnTol6tiErLYcjVxWnZDZ6QxOHyobW21dWDMLZRX293an8vxKwFLmmUpIO2lIsLt7+8Pm/u/esdj37tl9B8yzz/3K/FubhUcvvE56GogVAhCAAASSJICAlyRN5oIABCAAAQi0MYFqEU8Clxw71Q+tnUQ857JqW75ZYWrWzEuhqjPUG7DXGntIyGx0XREfi6Cx6bq+cpc1PikOA53t6cO9RqXBaYz9Lx40W62IJQFPXw/4xKw01mNOCEAAAhCAAATanwACXvufMRFCAAIQ6GgCKuXS8Pdf2vX4XrP/wCveh2pGOAK1RDyVmRZJvGkUcZTS0EaZeuHo5uPqKCXCKrMdWl7yhMwZ2yMxz6Nyxr2mx2aQZjH0fqM+cNu2bMxiOdaAAAQgAAEIQKBNCSDgtenBEhYEIAABCFQIuEwYvwvihru2m2uvusysv+k6MEUg0K4inrLIRgcq5gwTU/OhyLheeUesc2/eBaykBUw3nwQxldROWLOTsPxCwY5xsRNoe7rTybyrtTWVlN648dvm7//6oRg751YIQAACEIAABDqdAAJepz8BxA8BCECgAwhc99VvmNs2rDNqoK6+VBLwJOjRiyn64cuNVWKVG0XPQkti/xKwVtjMrqO2V2AR3VmTEOD82XsVx9roz1jSd2Yp3smFddx+vWPFO2UBr7505WIWsAS9TuuLl/RZMh8EIAABCECgEwkg4HXiqRMzBCAAgQ4jsOfJ580zz7/glcw+9KM9Zmx8YklJbYfhSCzcahGvkoXWU6hyWvW7U+lnUn3sopTgJnYgMSbS2Y3YXnbqZ+icZqNO5ww+Sra/XF56A5asuHqazQ5MK/NOgt29J51X9Wf9ckBf6n+nbF/98kBGDtt3/GSxL55KahHyoj5l3AcBCEAAAhDoPAIIeJ135kQMAQhAoOMIKONFWXjKulMp245td5rVl1zYcRzSCHhqppKJ53wekhSC0tivf04ntk3NLJgxn8Nu3HWj9JCLu2ac+5PIPqy1vptXIl5cUTBOfBLv1POuS84VKQ29x9xqM3uvsWKdv9+mW27PU/vMPVa8e/zh73ii3cHX3zJ3bL3fe09iQAACEIAABCAAgSAEEPCCUOIaCEAAAhAoPAE5QqqkTR+09SG6E8ZDO3ebHz/2tBfqLV+53mz607WphC1x5tDRqUURr1KK2Wuzr+Jnc6WyYTupKxdVqat6+iU9XBZat2UhAStPpaT+WNMS79wacnkdHaz0FWxFWXF/b49dv5T08dacT5l3es3pPUbZda5E3/XAq/7FwRr7S4Wd920hCy+T02ERCEAAAhCAQPEJIOAV/wyJAAIQgAAEAhCQmYV6391265dTE7ICbCOzS+R8+fa7v/OygVx20DpbxrfJ9gJMY0jEO2zLL+dPpuLluR9clplhw7YsNany3CTPTQKjhDUNZVCmKTC2qqx4yLLXWWc99Nrb/cRzXsm+su30vrPygvOWZObtf+lVs9lm4GFskfXpsB4EIAABCECguAQQ8Ip7duwcAhCAAARCEOg084o77n7Ac9lV83wNJ+I5M48Q6AJfOje/sKT/nRNulOHWiuyrWhuXoCa32cNjM4tiY+AAI16YdpZb2G21QlCTYCiHWo0sMhJbJd65s1CJ7DlnnuFl4X3+S5uWmOYoU0/ltuu+cLVZf+OasMfH9RCAAAQgAAEIdCgBBLwOPXjChgAEINBJBCReqYT2ilUXtXX2nYQBDYkGcr7U8PfjciLeY7aEOC0H3noiXlqlqkGf41a7o1YMPkpettvM3ELQbSd+XSvEO38QWQiorRbvqg9NAp4/006vTf1CQeWzDAhAAAIQgAAEIBCUAAJeUFJcBwEIQAAChSQg58e/uOcHnhPkN205aVrCVSvhOAdMOe1q7Lz/W2ZoYPmicYff6VKZecr8uebKValtOW8iXqtFKwfa9d1rVUaiMg+H+8vWsGPGyLijVWOgr9sM9KUjZo4MlOzc2ZfNNmKpvni/tCX8et2ptFa9OB+x4l07vhe16pliXQhAAAIQgEAnEEDA64RTJkYIQAACEGhrAuqxde3Vl3vleOq/9exzv/IEAgkHu5983vuzE/G23vtDr6x23fVXpsokLyJeRSzq8VxmWylaOditEhOz7PsX5MFK2kREBrPDA2XTbzMd8zjU8+4Z+7q8yPbCW2t7UTIgAAEIQAACEIBAWAIIeGGJcT0EIAABCEAgRwScw6W/RE/ulnsf/Z63Syfibdl8sxkfn/BcabPK/lmwhhYytpi1Bhca6oO2YrjXCmnpOL9WH0vees/5RTz1g5ueXfCExbRH3jnM2pLio5ZDVCMNiXcrRvo8Z2EGBCAAAQhAAAIQaFcCCHjterLEBQEIQAACHUNAPbVcrzvX78/fX2uPzcJTee051hFTxhb+ktq0IdUS8SRezVtR78hEOuKVstxGB0tGhrhpO6xG5Scxc8QaanRb0SlNU4dRW1Ja6uny1nAOwVH3nMZ9jkPUPbqMxp7urjS2x5wQgAAEIAABCEAgNwQQ8HJzFGwEAhCAAAQgEJ/A7qf2mYO/eWNR0FN/vFb32pKINzY5Zyan5xYDlLCkESfzqhatVpWoRj25tLLj/KYdWWT5RY3f3RelxBfxLi517ocABCAAAQhAoEgEEPCKdFrsFQIQgAAEINCEgLLxVv7L870ed+qHt+tnT5mnT5bTthrekWOzp4h4yrw6ZMtso5ZP+mNyTq/jx2eNXG+LMpIW8YomYrpzKttnYXSwbCamZu1X4/NDvCvK080+IQABCEAAAhBIigACXlIkmQcCEIAABCCQAwJymd20YZ3ZZXvdaeTNeXfMls0em/o4E0/iVV+5O7aIl7QIlvVROvFRJb8ztidc1OHMISYs42YiWNQ10rzPCXMyHKmXOViyZcdn2F6KXWp+x4AABCAAAQhAAAIdQgABr0MOmjAhAAEIQKAzCEjA++WLB80tttedhLw8jvHjc2bcZ94QR3xTDzVlbUnLUYZfHvu8BT2DuOKbc9xVv7u5k8YhQdfO03U6U/VJ1KjuDyixV+XXiHd5OjH2AgEIQAACEIBAFgQQ8LKgzBoQgAAEIACBjAg8u++AZ1ax8jOfymjFaMskIeIVtVS0EbGoMcURQaOdYPp3VcfU39vjmZMwIAABCEAAAhCAQCcSQMDrxFMnZghAAAIQgEAOCFSLeC6D7PBYc8fUvnKXGbYurkUtFW0m4ikDbXq2fhmp/35x6C3l12k2zqPmSotVVtzf2x1nKu6FAAQgAAEIQAAChSaAgFfo42PzEIAABCAAgWITkAB31PbFc6Mi2PR4pZP1ykCjOJYWjZLKSEesMNdt+71Vl5G6WFz5sP6u3nlJGIHkkZNKpBHv8ngy7AkCEIAABCAAgSwJIOBlSZu1IAABCEAAAhA4hcCkdYyVALVUxCtZ4Wp6iYgnwWpoeftmm9V6NOqVxkYttS3a4zdkRUwxYEAAAhCAAAQgAIFOJ4CA1+lPAPFDAAIQgAAEckBAIt7YxIxZOFHZTMXQodcT9lQ+GcSdNAdhpLKFahGvU8Q7Mu9SeZyYFAIQgAAEIACBghJAwCvowbFtCEAAAhCAQLsRUMnsoaNTS0S8FcO95pgts1UJ5XEr8qlvXicO1wtOJccDfT1mbHLGTM0stCUKOQqfbs+93NPVlvERFAQgAAEIQAACEIhCAAEvCjXugQAEIAABCEAgFQLVIp4MGgZtCeWkFa6O+HrlpbJ4zicdHSiZfiveHbMi5tjkx30Dc77tUNurZBf2ehmYDAhAAAIQgAAEIACBjwkg4PE0QAACEIAABCCQKwJz8wtGTrTKNJPb7JFjs2Z0sNTRGXiujLadWbjS4J5uMu9y9YJkMxCAAAQgAAEI5IIAAl4ujoFNQAACEIAABCBQTWBqZt58dNJdtVP6vlUzcG60JVtOemhs2nOaFYvTh8pmenahbTLxEO94/UMAAhCAAAQgAIHGBBDweEIgAAEIQAACEMglAZeJN3/S2cIJVxL2OqEXXiOhzgl73bbU9MPxGU/YK+pAvCvqybFvCEAAAhCAAASyJICAlyVt1oIABCAAAQhAIBSBahFPwpWMLdop+6wWkKAZh9UOtaHg5uDikhUg1fOuS84VDAhAAAIQgAAEIACBugQQ8Hg4IAABCEAAAhDINYEFm4F32JaPzlqXWg2JeCohnbd/b0djCxk4KD45zk5MzTc9GyfiKRNPJiBFGf29PV5vwyzH2LFJMzzYn+WSrAUBCEAAAhCAAAQSIYCAlwhGJoEABCAAAQhAIE0C1SKe1pIr6zKbuXXkZJ+8NNfPau6Bvm7PvCOsGLe8t9sMLS95LGbmFrLabuR1Bm2Mw/b80h4S7P7hpVfNM8/9yux/8aC33GMPfwcRL23wzA8BCEAAAhCAQOIEEPASR8qEEIAABCAAAQikQUAi3tjknJmcnlucfri/ZHpLHxs8pLFuVnPGLYcNm7mXVVzV6wzZM1OsaQ2JdnuefN48+/wL3hKrL13pfW3d/rBZ/5Xrzfob16S1NPNCAAIQgAAEIACB1Agg4KWGlokhAAEIQAACEEiDwJFjs0tEPIlBfeVuL2vNGV6ksW6aczohMm4MQXvnpRlLo7nTFu+09tvvHTK33rXdbNl8s7nmylXedjbf/YA596wVZsvtX1/cHuW0rXoKWBcCEIAABCAAgSgEEPCiUOMeCEAAAhCAAARaSmBsYtYcsz3i3IibvdaqYFw/PxntJlUK7Iw+Zm0pbZ56BI7YklmVB2cxJOLdYUW7dV+42oyPT5hnbDbeI/dt8UpnlZ33/Z27jboFStTbtmWj918GBCAAAQhAAAIQyDMBBLw8nw57gwAEIAABCECgLoHx43NmfHK2sCJemtlyEvFGbKlqtzXEUFbfiRZ6W8hgdnigbPptn74shzLsbtr4bS8j7+lHv+eJdLse32t2/ewpT8zT35/dd8B8/5Gfm8dtXzwGBCAAAQhAAAIQyDMBBLw8nw57gwAEIAABCBSQwEM2u+nnNstJAskttueYK2NMI5RqEc+ZQBwey3c5bZrinZ9zqzMTJd6tGOkz6s+X9ZBwt8GW0l6hHnirLvKex81b7zc77/+WWfmZTy1u5/Nf2mT+/q8fynp7rAcBCEAAAhCAAARCEUDAC4WLiyEAAQhAAAIQaERAGU5vv/s7r9fYfuv+ufWeH5h1N1xlNm1Ylxq4ahGv4sga3sk1tQ1WTdxX7jLD/WVryDFjpmbSd4yN6mwbl4cTKXu6u+JOFen+g6+/ZV597U2z9vorvfsl5l179eVLTCz0jG7f8RMy8CIR5iYIQAACEIAABLIkgICXJW3WggAEIAABCLQ5gTVf/YbZebI8UaE6Q4EHtt25JOspaQwTth/eUdsXzw0nkn04Pm3m5ltYP1oVaKsy4iqiZsnrszdje+OlPVot3lXH57Lx9tpSWjdciS3OtGk/DcwPAQhAAAIQgEASBBDwkqDIHBCAAAQgAAEIeATk/qmMu7X2yw1nGqA+ZGmOyel5T6ByQ2Wbpw/1mvHjs+a4/VmrR6vEu6U8ykZi58RUejzyJt4pficku2dQ4p2e1ZUXnOeZWDAgAAEIQAACEIBA3gkg4OX9hNgfBCAAAQhAIMcEXJmsMwVwYp37u9u6xBKV0a62/cjSHBLxxiZmjFxdNSoiXtmKeHMtE/HyZCiRdu+9kuV9muXdqrLZRs+WejNKyJNot+uxp801V13mlXozIAABCEAAAhCAQBEIIOAV4ZTYIwQgAAEIQCCnBOTiqR5iQ4P9i33Etj/4U7P/xYNL+optvfeH5lormKRpaOEQqWT20NGpRRHPiVZpZ57VOiKtLQFxenbB9rz7uMS3lccpQXHFcK+ZtaW0R3xlx3H31FfuNqMDJdMl54qcDj2vB3/zhmdo4c8Szel22RYEIAABCEAAAhBYJICAx8MAAQhAAAIQgEBkAhLq9PWKNQuQKOIymmQYoHHbrV8271hTix/bjCdl5Q1boS+LUU/EUymtsvGyGGlnu8WJIemswP7eHjM6WIqzJe6FAAQgAAEIQAACEGhAAAGPxwMCEIAABCAAgcgEJN7tfmqf+eZtXzM3bfy21/9uaGjAc/78z8+/YJ6xX8rOU/msBL4sx9z8gjk8NmPmT9bTZimoudLdVmT9hWGcRF++of6S5/rLgAAEIAABCEAAAhBIjwACXnpsmRkCEIAABCDQ9gTUU+xeWzL7wHfv8DLxlHknES8vxgC1RDyVtE7NpJeJN9DXbQb6esyH4zO5csCt9zDG2S/iXdu/xAkQAhCAAAQgAIGcEEDAy8lBsA0IQAACEIBAEQnIzfPOux8w37VOnnfY/1579eVm9xPPeeWyWWfc1eNXLeKl1QNO6yeR0daK52B5b7fde8lz8Z2xvfGCDMS7IJS4BgIQgAAEIAABCCRDAAEvGY7MAgEIQAACEOhYAp//0iZPrFMWnv67x5bUfu6SC3Mj4Olgaol4I7b0UyMpI4dhO19vqcvLvHNlu0V6KMKU/Y4Olk2/Ff0YEIAABCAAAQhAAALZEEDAy4Yzq0AAAhCAAATalsCeJ583n7t0Za4Eu1qwJeJ9ZMW1WetS64ZcU7u7l3mi24mPvx3qrPwZfUet02zUeUItmtLFzfoEymBW4p0cZxkQgAAEIAABCEAA5qjT+AAAB1NJREFUAtkRQMDLjjUrQQACEIAABCDQYgIL1tDi8Nj0EhHPZc4dst8PK741E7xaHG6k5euVGFdi7TXK1GNAAAIQgAAEIAABCGRLAAEvW96sBgEIQAACEIBAiwlIxFPZrIws3IjSu64dxTvHQyKeSoxLPV1GwmaX/cYZw2Ur3nW1+PRYHgIQgAAEIAABCHQmAQS8zjx3ooYABCAAAQh0PIEjx2bN5PRcJBGvr9xlhvvLZmxyxgqBwUwfighcwmZ/b485Yf+HeFfEE2TPEIAABCAAAQi0CwEEvHY5SeKAAAQgAAEIQCA0gSgiXpRsvdAby8kNyjL8xEiv6VLzOwYEIAABCEAAAhCAQMsIIOC1DD0LQwACEIAABCCQBwLjx+fMuDWfcGO5dVeVSHd47FQ32U4S70q215163iHe5eEpZQ8QgAAEIAABCHQ6AQS8Tn8CiB8CEIAABCAAAVNLxFMPOPV/m7OuteoJJ/dVJaLFcawtCmqVzY4OloqyXfYJAQhAAAIQgAAE2p4AAl7bHzEBQgACEIAABCAQhEC1iFe2Bg4S7cYmZszwQMnrdTfmy9QLMmcRrxns6/HiZUAAAhCAAAQgAAEI5IcAAl5+zoKdQAACEIAABCDQYgLVIl5fqcucbstIj1vH2o/GZ1q8u/SXH7JZhyoTZkAAAhCAAAQgAAEI5IsAAl6+zoPdQAACEIAABCDQYgKT0/PmyLEZ67q6zKyw4t2xqTnrxNrtldketz9r14F4164nS1wQgAAEIAABCLQDAQS8djhFYoAABCAAAQhAIFECM3MLi/3u1ANPbqxnDJc9AU9CXruNEVsyO2BLZxkQgAAEIAABCEAAAvkkgICXz3NhVxCAAAQgAAEItJiAhLtDR6fMwonKRtpVxFOfP2UYMiAAAQhAAAIQgAAE8ksAAS+/Z8POIAABCEAAAhBoMYF2FvHkqLtipM8rFWZAAAIQgAAEIAABCOSbAAJevs+H3UEAAhCAAAQg0GICEvEOj02b+ZOpeMskfNneeFPW2KKo5bQum7Cnuyt1us/uO2CGBvvN6ksuTH0tFoAABCAAAQhAAALtSgABr11PlrggAAEIQAACEEiMwNz8ghXxZk4R8WZtr7wjE7OJrZPFRFmJdxLutu/4iVl5wXnmnLNWmP0vHjSP3LfFDFsxjwEBCEAAAhCAAAQgEI4AAl44XlwNAQhAAAIQgECHEqgW8YRh1Jo/aBydnDUnTvbKyzOerMS7PU/tM/dY8W7HtjsXM++2P/hTT7zbtGFdnhGxNwhAAAIQgAAEIJBLAgh4uTwWNgUBCEAAAhCAQB4J1BPxSj1d5pAts82ziFeyve5OGyrbnnfpls2+/d4hc+PGb5trr7rMK509+Nqb5oHv3mFeff0tLwtv05+uzePRsicIQAACEIAABCCQawIIeLk+HjYHAQhAAAIQgEDeCEjE+2h8xsza3nhuDC3vMX3l7tyKeNqbsgW75FyR8njoR3vM2PiE2XL7172Vdj2+1xz8zRtm25aNiytL5JO4RzltyofB9BCAAAQgAAEItA0BBLy2OUoCgQAEIAABCEAgKwIL1tBCxhbVIt7y3u4lvfKy2k+jdfp7e8zoYKXUN4tx613bvTLZ1Zeu9JZTL7zdTzznZeFJuNPPJd7pz7fd+mWz/sY1WWyLNSAAAQhAAAIQgEChCSDgFfr42DwEIAABCEAAAq0iIBHvo2MzZnp2YXELysTLk4g31F8y2lOWQxl4qiWWiOcEu/Vfud5cc+UqT7xbd8NV3s/Gjk16f1emnhP7stwna0EAAhCAAAQgAIEiEUDAK9JpsVcIQAACEIAABJoSkGi058nnzepVFy0aKDS9KcYFR47NmsnpudyJeK0Q7xyErdsfNr+0/e7GrUh3y03XmT+xXzfZvnhOvHPXydjiXOtQu97+nAEBCEAAAhCAAAQgUJ8AAh5PBwQgAAEIQAACbUNA/dZ2/ewpTyjaLRHPlnH6e6+lFWi1iKcsPGW+fWh75c35euWltX71vK0U79xelGGnoT53ysp7xfbB22HLaP0/l6i37Vt/nonQmhV71oEABCAAAQhAAAJpEEDAS4Mqc0IAAhCAAAQgAAEIQAACEIAABCAAAQhAICECCHgJgWQaCEAAAhCAAAQgAAEIQAACEIAABCAAAQikQQABLw2qzAkBCEAAAhCAAAQgAAEIQAACEIAABCAAgYQIIOAlBJJpIAABCEAAAhCAAAQgAAEIQAACEIAABCCQBgEEvDSoMicEIAABCEAAAhCAAAQgAAEIQAACEIAABBIigICXEEimgQAEIAABCEAAAhCAAAQgAAEIQAACEIBAGgQQ8NKgypwQgAAEIAABCEAAAhCAAAQgAAEIQAACEEiIAAJeQiCZBgIQgAAEIAABCEAAAhCAAAQgAAEIQAACaRBAwEuDKnNCAAIQgAAEIAABCEAAAhCAAAQgAAEIQCAhAgh4CYFkGghAAAIQgAAEIAABCEAAAhCAAAQgAAEIpEHg/wcoeTZSXCmxAAAAAABJRU5ErkJggg==", - "text/html": [ - "
" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "# Let's try 3D!\n", - "\n", - "tsne = TSNE(n_components=3, random_state=42)\n", - "reduced_vectors = tsne.fit_transform(vectors)\n", - "\n", - "# Create the 3D scatter plot\n", - "fig = go.Figure(data=[go.Scatter3d(\n", - " x=reduced_vectors[:, 0],\n", - " y=reduced_vectors[:, 1],\n", - " z=reduced_vectors[:, 2],\n", - " mode='markers',\n", - " marker=dict(size=5, color=colors, opacity=0.8),\n", - " text=[f\"Video: {t}
Text: {d[:100]}...\" for t, d in zip(video_numbers, documents)],\n", - " hoverinfo='text'\n", - ")])\n", - "\n", - "fig.update_layout(\n", - " title='3D Chroma Vector Store Visualization',\n", - " scene=dict(xaxis_title='x', yaxis_title='y', zaxis_title='z'),\n", - " width=900,\n", - " height=700,\n", - " margin=dict(r=20, b=10, l=10, t=40)\n", - ")\n", - "\n", - "fig.show()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9b3ada26-b4b7-42fc-b943-933c14adf89b", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1440654f-590f-4781-bd1c-abfc6ca6edf1", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.11" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/week5/community-contributions/day4 - chat with Eds subtitles on LLM engineering.ipynb b/week5/community-contributions/day4 - chat with Eds subtitles on LLM engineering.ipynb new file mode 100644 index 0000000..46622e0 --- /dev/null +++ b/week5/community-contributions/day4 - chat with Eds subtitles on LLM engineering.ipynb @@ -0,0 +1,4323 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "dfe37963-1af6-44fc-a841-8e462443f5e6", + "metadata": {}, + "source": [ + "## Chat with Ed - the Expert on LLM engineering\n", + "This project will: \n", + "- use subtitle files from Ed Donners excellent LLM engineering course on Udemy.\n", + "- use Document loading using Langchain\n", + "- use Vectorization, embeddings and store vectors in a Chroma DB\n", + "- use RAG (Retrieval Augmented Generation) to ensure our question/answering assistant has high accuracy.\n", + "\n", + "These subtitles can be downloaded using the following process:\n", + "- Using an android phone, download Udemy app and open the LLM engineering course. \n", + "- There is option to download the videos as single files or section wise. \n", + "- Download them and along with those videos subs or cc are also downloaded as .srt’s.\n", + "- Plug in your laptop to the android phone using USB and select file transfer in the notification.\n", + "- Open a file explorer and copy the subtitle files (srt format)\n", + "- Here’s the location of subs in android \"internal storage/android/data/com.udemy.android/files/udemy-subtitle-downloads\"\n", + "\n", + "the raw srt files are stored in the folder \"subtitles/srts\". The code below will use the langchain textloader but will preprocess the srt files to remove the timestamps.\n", + "\n", + "### Note: this is only for educational and testing purposes and you should contact Ed Donnner to seek his permission if you want to use the subtitles.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "ba2779af-84ef-4227-9e9e-6eaf0df87e77", + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "\n", + "import os\n", + "import glob\n", + "from dotenv import load_dotenv\n", + "import gradio as gr\n", + "import re" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "802137aa-8a74-45e0-a487-d1974927d7ca", + "metadata": {}, + "outputs": [], + "source": [ + "# imports for langchain\n", + "\n", + "from langchain.document_loaders import DirectoryLoader, TextLoader\n", + "from langchain.text_splitter import CharacterTextSplitter\n", + "from langchain.schema import Document\n", + "from langchain_openai import OpenAIEmbeddings, ChatOpenAI\n", + "from langchain_chroma import Chroma\n", + "import numpy as np\n", + "from sklearn.manifold import TSNE\n", + "import plotly.graph_objects as go\n", + "from langchain.memory import ConversationBufferMemory\n", + "from langchain.chains import ConversationalRetrievalChain" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "58c85082-e417-4708-9efe-81a5d55d1424", + "metadata": {}, + "outputs": [], + "source": [ + "# price is a factor for our company, so we're going to use a low cost model\n", + "\n", + "MODEL = \"gpt-4o-mini\"\n", + "db_name = \"vector_db\"" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "ee78efcb-60fe-449e-a944-40bab26261af", + "metadata": {}, + "outputs": [], + "source": [ + "# Load environment variables in a file called .env\n", + "\n", + "load_dotenv()\n", + "os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY', 'your-key-if-not-using-env')" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "730711a9-6ffe-4eee-8f48-d6cfb7314905", + "metadata": {}, + "outputs": [], + "source": [ + "# Read in documents using LangChain's loaders\n", + "# Take everything in all the sub-folders of our knowledgebase\n", + "\n", + "folders = glob.glob(\"subtitles/srts/*\")\n", + "\n", + "# With thanks to CG and Jon R, students on the course, for this fix needed for some users \n", + "text_loader_kwargs = {'encoding': 'utf-8'}\n", + "# If that doesn't work, some Windows users might need to uncomment the next line instead\n", + "# text_loader_kwargs={'autodetect_encoding': True}\n", + "\n", + "def preprocess_srt_content(content):\n", + " \"\"\"\n", + " Preprocess the content of an SRT file to remove timing information and the WEBVTT header.\n", + " \"\"\"\n", + " # Remove the WEBVTT header\n", + " content = re.sub(r'^WEBVTT\\s*', '', content, flags=re.IGNORECASE)\n", + " # Remove timing lines (e.g., 00:00.680 --> 00:08.540)\n", + " content = re.sub(r'\\d{2}:\\d{2}\\.\\d{3} --> \\d{2}:\\d{2}\\.\\d{3}', '', content)\n", + " # Remove extra newlines and strip leading/trailing whitespace\n", + " return \"\\n\".join(line.strip() for line in content.splitlines() if line.strip())\n", + "\n", + "documents = []\n", + "for folder in folders:\n", + " video_number = os.path.basename(folder)\n", + " loader = DirectoryLoader(folder, glob=\"**/en_US.srt\", loader_cls=TextLoader)\n", + " folder_docs = loader.load()\n", + "\n", + " for doc in folder_docs:\n", + " # Preprocess the document content\n", + " cleaned_content = preprocess_srt_content(doc.page_content)\n", + " # Replace the original content with the cleaned content\n", + " doc.page_content = cleaned_content\n", + " # Add metadata\n", + " doc.metadata[\"video_number\"] = video_number\n", + " documents.append(doc)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "7310c9c8-03c1-4efc-a104-5e89aec6db1a", + "metadata": {}, + "outputs": [], + "source": [ + "text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=200)\n", + "chunks = text_splitter.split_documents(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "cd06e02f-6d9b-44cc-a43d-e1faa8acc7bb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "217" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(chunks)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "2c54b4b6-06da-463d-bee7-4dd456c2b887", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Video numbers found: 59507785, 59472503, 59170107, 60616493, 59504887, 59297735, 59472429, 59170291, 60595637, 59473019, 59472441, 59295423, 59170043, 59472067, 59295363, 59472425, 59297723, 59473137, 59473159, 59669375, 59472011, 59295431, 59673721, 59473101, 59167015, 59670087, 60619429, 59667365, 59673639, 59169985, 59507489, 60620143, 59505329, 59670369, 59295549, 60395261, 59668181, 59671231, 60619281, 59506713, 59472491, 59295579, 59167007, 59167009, 59666211, 59673431, 59671567, 59170055, 59472017, 59473021, 59297599, 59472027, 59166947, 59473201, 60619123, 59472873, 59295601, 60614591, 60614541, 59472007, 59507313, 60619721, 59297595, 59472693, 59295527, 60619501, 59166981, 59166421, 59507423, 59170165, 59166951, 59170227, 59673663, 59670121, 59166453, 60616845, 59471979, 59670171, 59503705, 59668923, 60617163, 60616629, 59297693, 59166915, 60617259, 59166847, 59295459, 60619439, 59297593, 59295619, 59472883, 59295439, 59670933, 60619651, 59670073, 59166465, 59295429, 59669631, 59170233, 59472333, 59507635, 60619227, 59667829, 59166353, 60614589, 59295599, 59507687, 59671441, 59170057, 59670259, 59170235, 59472307, 59472421, 59667841, 59667357, 59166949, 59170297, 59504785, 59170093, 59166443, 59673595, 59669211, 60620025, 59297773, 60619883, 60616423, 59295493, 59166461, 60616855, 59297601, 59295435, 59673449, 59503703, 59472505, 59295377, 59166281, 59507435, 59297575, 59504769, 59170037, 60622463, 59508289, 60616663, 60616895, 60620375, 60619247, 59665129, 59170135, 59297743, 59169991, 59506929, 60616407, 59508297, 59297603, 60616927, 60617255, 59295441, 59668027, 59297609, 60620169, 59472383, 59297585, 60616623, 60617251, 59666831, 59295553, 59473191, 59473089, 59669217, 59508175, 60616833, 59297749, 59295609, 59295545, 59669389, 59170025, 60619619, 60620397, 59166481, 59295541, 59297561, 59166919, 59507329, 59506611, 59170223, 60619447, 59166317, 59473071, 60619299, 59507017, 59509185, 59170255, 60619577, 59671221, 60619289, 59508121, 59295583, 60619149, 59665127, 59473147, 59295451, 59271655, 59472137, 59295607, 59669049, 59295587, 59472463, 59506507, 59472413, 59297721, 59508057, 59508055, 59671315, 59297733, 60619275, 60620395, 59505337\n" + ] + } + ], + "source": [ + "video_numbers = set(chunk.metadata['video_number'] for chunk in chunks)\n", + "print(f\"Video numbers found: {', '.join(video_numbers)}\")" + ] + }, + { + "cell_type": "markdown", + "id": "77f7d2a6-ccfa-425b-a1c3-5e55b23bd013", + "metadata": {}, + "source": [ + "## A sidenote on Embeddings, and \"Auto-Encoding LLMs\"\n", + "\n", + "We will be mapping each chunk of text into a Vector that represents the meaning of the text, known as an embedding.\n", + "\n", + "OpenAI offers a model to do this, which we will use by calling their API with some LangChain code.\n", + "\n", + "This model is an example of an \"Auto-Encoding LLM\" which generates an output given a complete input.\n", + "It's different to all the other LLMs we've discussed today, which are known as \"Auto-Regressive LLMs\", and generate future tokens based only on past context.\n", + "\n", + "Another example of an Auto-Encoding LLMs is BERT from Google. In addition to embedding, Auto-encoding LLMs are often used for classification.\n", + "\n", + "### Sidenote\n", + "\n", + "In week 8 we will return to RAG and vector embeddings, and we will use an open-source vector encoder so that the data never leaves our computer - that's an important consideration when building enterprise systems and the data needs to remain internal." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "78998399-ac17-4e28-b15f-0b5f51e6ee23", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Vectorstore created with 217 documents\n" + ] + } + ], + "source": [ + "# Put the chunks of data into a Vector Store that associates a Vector Embedding with each chunk\n", + "# Chroma is a popular open source Vector Database based on SQLLite\n", + "\n", + "embeddings = OpenAIEmbeddings()\n", + "\n", + "# If you would rather use the free Vector Embeddings from HuggingFace sentence-transformers\n", + "# Then replace embeddings = OpenAIEmbeddings()\n", + "# with:\n", + "# from langchain.embeddings import HuggingFaceEmbeddings\n", + "# embeddings = HuggingFaceEmbeddings(model_name=\"sentence-transformers/all-MiniLM-L6-v2\")\n", + "\n", + "# Delete if already exists\n", + "\n", + "if os.path.exists(db_name):\n", + " Chroma(persist_directory=db_name, embedding_function=embeddings).delete_collection()\n", + "\n", + "# Create vectorstore\n", + "\n", + "vectorstore = Chroma.from_documents(documents=chunks, embedding=embeddings, persist_directory=db_name)\n", + "print(f\"Vectorstore created with {vectorstore._collection.count()} documents\")" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "057868f6-51a6-4087-94d1-380145821550", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The vectors have 1,536 dimensions\n" + ] + } + ], + "source": [ + "# Get one vector and find how many dimensions it has\n", + "\n", + "collection = vectorstore._collection\n", + "sample_embedding = collection.get(limit=1, include=[\"embeddings\"])[\"embeddings\"][0]\n", + "dimensions = len(sample_embedding)\n", + "print(f\"The vectors have {dimensions:,} dimensions\")" + ] + }, + { + "cell_type": "markdown", + "id": "b0d45462-a818-441c-b010-b85b32bcf618", + "metadata": {}, + "source": [ + "## Visualizing the Vector Store\n", + "\n", + "Let's take a minute to look at the documents and their embedding vectors to see what's going on." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "bf021654-a60b-4905-bdb5-d4517bd0c297", + "metadata": {}, + "outputs": [], + "source": [ + "# Convert the video numbers into unique colors that we can visualize\n", + "import hashlib\n", + "\n", + "def video_numbers_to_hex_colors(video_numbers):\n", + " return [f\"#{hashlib.sha256(v.encode()).hexdigest()[:6]}\" for v in video_numbers]" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "b98adf5e-d464-4bd2-9bdf-bc5b6770263b", + "metadata": {}, + "outputs": [], + "source": [ + "# Prework\n", + "\n", + "result = collection.get(include=['embeddings', 'documents', 'metadatas'])\n", + "vectors = np.array(result['embeddings'])\n", + "documents = result['documents']\n", + "video_numbers = [metadata['video_number'] for metadata in result['metadatas']]\n", + "colors = video_numbers_to_hex_colors(video_numbers)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "427149d5-e5d8-4abd-bb6f-7ef0333cca21", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + " \n", + " " + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.plotly.v1+json": { + "config": { + "plotlyServerURL": "https://plot.ly" + }, + "data": [ + { + "hoverinfo": "text", + "marker": { + "color": [ + "#f8d349", + "#d6d07a", + "#a958c9", + "#7341ee", + "#268bba", + "#4862ce", + "#dd8cd7", + "#6a6c06", + "#8a29da", + "#0d2037", + "#805527", + "#e69670", + "#75b5e3", + "#796278", + "#6d4052", + "#1f6ab0", + "#99fe53", + "#3f0a72", + "#fe8e92", + "#c3e1f2", + "#f645e0", + "#b43417", + "#e0a8df", + "#7740be", + "#43c2e8", + "#64f999", + "#2cde7f", + "#29fa15", + "#580c96", + "#10384a", + "#845aa9", + "#7f03bd", + "#2b3af3", + "#335dcf", + "#22398f", + "#c932c1", + "#d43c00", + "#e6f378", + "#08808d", + "#6a0fce", + "#e1b5db", + "#75195e", + "#6ff3c5", + "#4099c1", + "#b25d7b", + "#d65c3a", + "#9b9d6e", + "#fc2b74", + "#571122", + "#422abb", + "#efed10", + "#dfc6c7", + "#02cada", + "#3ec815", + "#8e8cab", + "#df5d2e", + "#c457d7", + "#ec0a37", + "#da28db", + "#2d7f7d", + "#b27d2e", + "#d01b19", + "#fb9dce", + "#35303c", + "#4f86b8", + "#fbfef2", + "#ca3592", + "#c1e3c5", + "#c97596", + "#091a90", + "#b280bb", + "#7b4427", + "#b2140a", + "#dbde1c", + "#7ea8e9", + "#539908", + "#8069bc", + "#d01f72", + "#4ce72d", + "#73e76a", + "#20f2c3", + "#996ff1", + "#91f4db", + "#d70d97", + "#3678a7", + "#5af098", + "#ae5204", + "#badd6d", + "#a9541c", + "#d4b1ce", + "#51d0da", + "#ff2d6a", + "#1c2c7e", + "#ae7afe", + "#d156c8", + "#480c89", + "#e2a239", + "#39821f", + "#7bee34", + "#92b4fa", + "#b9fd23", + "#591ab9", + "#0bdacc", + "#2a2d25", + "#dc152c", + "#ac9648", + "#6ad041", + "#fe62a5", + "#52b6df", + "#4aaf9f", + "#d34482", + "#2fef1a", + "#7dd58b", + "#987252", + "#94a85d", + "#2b9f18", + "#ee26df", + "#c6016b", + "#9df332", + "#9b5e28", + "#2ebca4", + "#1b312a", + "#2e1afc", + "#574e28", + "#ac55ba", + "#f090af", + "#5cb9ca", + "#2dcfac", + "#804ce2", + "#ce865d", + "#3e5237", + "#482281", + "#2ae342", + "#6df6ca", + "#85fa26", + "#793548", + "#bbfe83", + "#15ae86", + "#70d1d9", + "#bb0ee6", + "#a95826", + "#8afd40", + "#505bd9", + "#0c777d", + "#ed694d", + "#4e797a", + "#dc95ec", + "#612b32", + "#ad8b14", + "#474ff9", + "#71c500", + "#bd53b1", + "#11a70e", + "#144ada", + "#72e048", + "#188ca3", + "#b52bf6", + "#b64eac", + "#f59c06", + "#b1c27d", + "#ac5faf", + "#5b3f83", + "#108c41", + "#b61e76", + "#22463b", + "#c959de", + "#a64739", + "#659222", + "#0f8781", + "#2c168d", + "#0faf59", + "#68bece", + "#696eaa", + "#af0f59", + "#a9e927", + "#601568", + "#9780cf", + "#e01073", + "#dd889c", + "#046e5c", + "#c6eff5", + "#b3dba5", + "#426575", + "#913568", + "#de30e4", + "#50f10d", + "#9a5ba2", + "#cc8ec0", + "#79c82a", + "#9baca0", + "#1a5613", + "#246fa5", + "#cb725f", + "#682d42", + "#a03134", + "#d54222", + "#01f59b", + "#12897b", + "#74a788", + "#fcdcad", + "#048452", + "#3626a5", + "#4dfb77", + "#4212f1", + "#116019", + "#ad6bd0", + "#a63fa4", + "#d24e5d", + "#1a6fdf", + "#6f745a", + "#cf7e83", + "#4b9a93", + "#799a24", + "#e6e164", + "#011995", + "#4c4355", + "#d937bd" + ], + "opacity": 0.8, + "size": 5 + }, + "mode": "markers", + "text": [ + "Video: 59506507
Text: Well, I realized that was a whole lot of theory, but I hope it gave you a good intuition that will\nb...", + "Video: 59671315
Text: Okay, so here we are, back in Jupyter Lab, ready for the next use of a frontier model, and see this\n...", + "Video: 60616895
Text: It feels like 100 videos ago that I told you that we were going to have instant gratification with o...", + "Video: 60619275
Text: And we will conclude our expedition into the world of frontier models through their chat interface b...", + "Video: 59472693
Text: Friends.\nI am absolutely exhausted.\nI am exhausted and a little tiny bit traumatized.\nAnd you are so...", + "Video: 59670121
Text: So it's business time right now.\nWe are going to build a Rag pipeline to estimate the price of produ...", + "Video: 59295619
Text: Welcome back to the the moment when we bring it all together into a beautiful user interface.\nBut fi...", + "Video: 60617163
Text: And already that wraps up day two.\nNow that you have built that solution.\nAnd congratulations on tha...", + "Video: 60616423
Text: So I hope you've just enjoyed yourself experimenting with different LMS locally on your box using th...", + "Video: 59170227
Text: Welcome back to Google Colab.\nHere we are ready to explore the wonderful world of Tokenizers.\nSo, uh...", + "Video: 59169985
Text: So I hope you enjoyed that whirlwind tour of Google Colab.\nHere's just a little screenshot example o...", + "Video: 60616927
Text: It's time for our first LM experiment at this point.\nSo some of this you may know well, you may know...", + "Video: 59673721
Text: And here we are in JupyterLab for the last time, and we are looking here at day five, the last day\no...", + "Video: 59508055
Text: I'm so very happy that you've reached this epic moment in the course and that you're hanging in ther...", + "Video: 59670259
Text: It's remarkable.\nBut you are now at the 95% point.\nThere's 5% remaining of this course.\nUh, maybe it...", + "Video: 60616623
Text: So we're now going to start week one of the course when we are going to be looking at exploring fron...", + "Video: 59472383
Text: And welcome back to the week six folder.\nWe're now at day two, which is the second and final stage o...", + "Video: 59670171
Text: So as the very final step on this part four of day two of week eight, we are now going to build an\ne...", + "Video: 59297721
Text: And so now the time has come to talk about the most crucial aspect of Rag, which is the idea of vect...", + "Video: 59297599
Text: Well, that was a sneaky detour I took you on in the last one.\nI hope you enjoyed it though, and I ho...", + "Video: 59507635
Text: Look, I hope you're excited.\nYou really should be.\nYou've been through 80% of the course and it's al...", + "Video: 59669375
Text: Here we are for the day.\n2.1 notebook.\nAnd don't let it be said that I don't ever do anything for yo...", + "Video: 59297733
Text: Welcome back to JupyterLab and welcome to your first experiment with the world of Long Chain.\nLet me...", + "Video: 59670369
Text: It is terrific that you're hanging on in there and making such great progress with this course.\nAs w...", + "Video: 59166281
Text: And with that, amazingly, you completed day one of week two already and that gets you to the 15% poi...", + "Video: 59671567
Text: Well, the first thing you're going to notice is that I don't have a notebook open for you.\nAnd that'...", + "Video: 59297593
Text: And welcome to continuing our journey with Hrag.\nAnd today it's time to unveil Liang Chen.\nSo first,...", + "Video: 59166461
Text: And welcome back to the lab.\nHere we are in Jupyter Lab and we are going to go into week two.\nAnd we...", + "Video: 59167007
Text: Well, how fabulous is that?\nI hope that you are as wowed as I am by our new airline, I assistant and...", + "Video: 59508121
Text: The moment has arrived.\nHere we go.\nWe're in fine tuning.\nWe do fine tuning.\nTrain.\nThere is also a ...", + "Video: 59295579
Text: All right.\nAre you excited to see how this goes?\nLet's give it a try.\nSo in this next section, I cre...", + "Video: 60620375
Text: And with that, we've reached an important milestone.\nThe first week of our eight week journey is com...", + "Video: 59472491
Text: Welcome back.\nIf you are following along with me in JupyterLab, as I hope you are, then you will nee...", + "Video: 59472425
Text: Welcome to week six, day three.\nToday is going to be a day that you will either love or you will hat...", + "Video: 59508057
Text: Actually slight change in plan.\nI'm going to wrap up the day.\nDay three at this point, and say that ...", + "Video: 60619577
Text: And for the final piece of background information, I wanted to take another moment to talk about API...", + "Video: 59170291
Text: Welcome back to Colab and welcome back to our business project.\nSo again our assignment, we are due ...", + "Video: 60619651
Text: I mentioned before an AI company called vellum.\nWhen we were talking about the different questions, ...", + "Video: 59473191
Text: And you thought we'd never get here.\nHere we are in Jupyter Lab, running our fine tuning for a front...", + "Video: 59170297
Text: And here we are in Google Colab, ready for fun with models.\nSo first we do the usual Pip installs an...", + "Video: 59167015
Text: Welcome back to Jupyter Lab and welcome to Day Five's Lab.\nAnd this is going to be lots of creativit...", + "Video: 59170043
Text: Let me enthusiastically welcome you all back to week three of our LLM engineering journey.\nIf you en...", + "Video: 59473147
Text: Well, I'm very relieved.\nI've got that behind me.\nNo more human testing for me.\nWe'll have one final...", + "Video: 59166453
Text: Welcome back and welcome to our continuing JupyterLab experience.\nUh, I'm hopefully going to keep yo...", + "Video: 59166915
Text: Welcome back to the wonderful world of JupyterLab.\nAnd here we are in week two.\nDay three.\nUh, bring...", + "Video: 59667365
Text: Here we are back in Colab, looking at the week seven, day five of the Colab notebooks and I'm on a\nT...", + "Video: 60616845
Text: We're on the home stretch.\nThis is the final step in the environment setup, and it's an easy one.\nIt...", + "Video: 59295459
Text: And welcome back to More Leaderboard Fest as we go through some more leaderboards.\nBut this time we'...", + "Video: 59471979
Text: So we now turn to the parts of the problem, which is perhaps, let's say, not as glamorous as some\nof...", + "Video: 59503705
Text: And so now we talk about quantization the q and q Laura.\nQ stands for quantized quantized.\nLaura.\nAn...", + "Video: 59472505
Text: So the good news is that this is the very final video about data set curation.\nYou were probably fed...", + "Video: 59669217
Text: And welcome to the next part of visualizing the data.\nAnd just very quickly to show it to you in 3D....", + "Video: 59671221
Text: I gotta tell you, I don't like to toot my horn a whole lot, but I do think that I've done a great\njo...", + "Video: 59503703
Text: Well.\nHello there everybody.\nI am so grateful that you've made it through to the start of week seven...", + "Video: 59473201
Text: Well, before we do a postmortem on what happened, let's just quickly look at the standing the rankin...", + "Video: 60622463
Text: In this video, we're going to set up a full data science environment for Mac users.\nIn the next vide...", + "Video: 60619299
Text: Well, I hope you found that both educational and enjoyable.\nAs we went through and learned so much a...", + "Video: 59295607
Text: So to revisit then the solution that we built in the previous day and talk about the metrics.\nAs I s...", + "Video: 59297575
Text: Well, welcome to the final part on rag.\nAnd this is the session where you go from being a rag expert...", + "Video: 59507687
Text: It's time for action, everybody.\nWe've set up our colab.\nHere we are, week seven, day three.\nWe've g...", + "Video: 59671441
Text: And welcome once more to our favorite place to be Jupyter Lab, the Paradise for a data scientist exp...", + "Video: 59673431
Text: And here we have it.\nThe user interface is completed.\nThe extra notification came through on my phon...", + "Video: 59473137
Text: Let's get straight to it.\nSo the place where you can see everything that's going on and get knee dee...", + "Video: 59166421
Text: Welcome back to the radio day in the lab.\nMore to do.\nLet's keep going.\nWhere we left off is we had ...", + "Video: 59295599
Text: Welcome to the Jupyter Lab for day four.\nIt's going to look very familiar because it's actually I've...", + "Video: 59669631
Text: Here we are in our favorite place to be in JupyterLab, ready for some coding and a lot of coding tha...", + "Video: 59673663
Text: But wait, there's more.\nWe need to add some more to the user interface just to make it look more coo...", + "Video: 59506929
Text: And we return to the hugging face open LLM leaderboard.\nThe first place you go when selecting your b...", + "Video: 59504785
Text: So at this point we're going to talk about hyperparameters.\nAnd we're going to introduce three of th...", + "Video: 59505337
Text: So we're now going to look at four bit quantization, the rather remarkable effect of reducing the pr...", + "Video: 59271655
Text: So here we are on Hugging Face's main landing page at Hugging Face Core.\nA URL you know.\nWell, since...", + "Video: 59472883
Text: Okay, time to reveal the results.\nIt has run to completion.\nAnd here it is.\nSo a moment to pause.\nIt...", + "Video: 59673639
Text: And welcome now to the code for our user interface, which we will find in this Python module.\nPrice ...", + "Video: 59472463
Text: So last time we looked at a humble linear regression model with feature engineering, and now we say\n...", + "Video: 59297595
Text: So by the time you're watching this, hopefully you have played yourself with vectors.\nYou've created...", + "Video: 60619149
Text: So we're going to start our exploration into the world of frontier models by playing with the famous...", + "Video: 59297735
Text: And at last the time has come to see rag in action.\nAfter all of this talk, and here we are.\nWe're i...", + "Video: 60616407
Text: And now over to my Mac people.\nAnd I have news for you.\nIt's exactly the same thing.\nYou go to a fav...", + "Video: 59170235
Text: So here we are in Google Colab for our first collaborative session on the cloud using a GPU box.\nOn ...", + "Video: 59472067
Text: So we've covered steps 1 to 4 of the five step strategy.\nAnd that brings us to step five, which is p...", + "Video: 59472011
Text: Welcome everybody.\nSo in the past I've said quite a few times, I am excited to start this this week ...", + "Video: 59295553
Text: Welcome back.\nIn the last part, we gave our GPT four and clawed the challenge of converting a simple...", + "Video: 59297773
Text: Well, I hope you're eager with anticipation for this session in JupyterLab as we finally get to see\n...", + "Video: 59295583
Text: And here we are back in JupyterLab.\nIt's been a minute.\nWe've been working in Colab for last week, a...", + "Video: 59507329
Text: Okay.\nIt's moment of truth time.\nI have just taken our class tester.\nYou remember this class?\nUh, it...", + "Video: 59295429
Text: Continuing our investigation of benchmarks, and this will become more real when we actually see some...", + "Video: 60595637
Text: Here we are back in the Colab, which has been running overnight for me and probably for you too, I\nh...", + "Video: 59668027
Text: And so here we are at the home page for modal.\nAt modal.com spelt model not not model which is confu...", + "Video: 59295527
Text: I'm so happy to welcome you to week four, day four on our journey to LLM Engineering and Mastery.\nHe...", + "Video: 59295377
Text: Just before we go on to some of the more advanced metrics, I want to mention for a second something\n...", + "Video: 59666211
Text: So before we try our new model and one more recap on the models so far and keep notes of this so we\n...", + "Video: 59170107
Text: And once again, it's that moment when you take a pause and congratulate yourself on another day of\ns...", + "Video: 60616833
Text: So I realized that day one of week one has been a pretty long day, and I assure you that the other,\n...", + "Video: 59472413
Text: Wonderful.\nWhere we left off is we had just created the Get Features function, which builds our feat...", + "Video: 59297561
Text: And would you believe at this point you're 55% of the way along the journey?\nUh, it's been a while s...", + "Video: 59669211
Text: Well, we took on a lot today and we seem to have been successful.\nThese red icons that you see on th...", + "Video: 59166981
Text: Welcome to week two, day five.\nThe last day of week two where a lot is coming together.\nI am so grat...", + "Video: 60619227
Text: And now let's move to Claude from anthropic, my favorite model and typically the favorite model of\nm...", + "Video: 60620395
Text: Welcome back to Jupyter Lab, where I want to show you the assignment, the homework exercise for you\n...", + "Video: 59665127
Text: Well hi there everybody.\nI'm not going to give you my usual song and dance about how excited you are...", + "Video: 59668923
Text: Well, welcome back to Jupyter Lab for what will be an epic finale to our time in this platform.\nAnd ...", + "Video: 59504887
Text: Well, here we are again in Google Colab.\nIt's been a minute since we were here, and welcome back to ...", + "Video: 59170165
Text: Welcome, everybody to the last day of week three.\nWeek three.\nDay five.\nWe're here already wrapping ...", + "Video: 60617251
Text: Congratulations are definitely in order.\nYesterday was a mammoth first day on this course and you go...", + "Video: 59166951
Text: All right, back to the lab.\nBack to our project.\nTime to work with tools.\nI am in the week two folde...", + "Video: 60619619
Text: Well, day four was an information dense day.\nI do hope that you learned some something useful here, ...", + "Video: 60616663
Text: Well.\nHi there, this is time for PC people to get set up.\nSo all you Mac people out there, you don't...", + "Video: 59508175
Text: So I'm taking a moment now to explain that the training costs of optimizing a model for this course\n...", + "Video: 59670087
Text: And welcome to part four of day two of week eight.\nUh, there's a lot happening this week, and I have...", + "Video: 59506713
Text: Hi everyone.\nSo the reason I'm so fired up about week seven is that this is the time when we actuall...", + "Video: 60620169
Text: Hopefully you found this super satisfying to be able to have this nice business result and have it c...", + "Video: 59295435
Text: Well, just before we wrap up, let me introduce this week's challenge and talk about what we're going...", + "Video: 59297609
Text: Last week, we worked with models that were able to speed up code by a factor of 60,000 times, which\n...", + "Video: 59507489
Text: Continuing our adventure through hyperparameters for training.\nThe next one is pretty crucial and it...", + "Video: 59295549
Text: And welcome back to our challenge again.\nAnd this time we are working with our beautiful prototype.\n...", + "Video: 59665129
Text: And now let me make this real for you by showing you some, some diagrams, particularly now looking\na...", + "Video: 59169991
Text: Okay, so that was your introduction to Hugging Face.\nAnd now I'm going to turn to a different resour...", + "Video: 59472027
Text: And now the time has come to curate our data set.\nAnd the way we're going to do this is we're going ...", + "Video: 59472307
Text: Welcome to week six.\nDay two a day.\nWhen we get back into the data, we look back in anger at our dat...", + "Video: 59508289
Text: So here we are now, back in the Colab, in the same one that we kicked off in the previous day.\nIt's ...", + "Video: 59472333
Text: Thank you for putting up with me during my foray into traditional machine learning.\nI think it was u...", + "Video: 59295431
Text: Now I want to take a quick moment to give you a flyby of five different ways that llms are used comm...", + "Video: 59673449
Text: Well, I have to tell you that I'm a little bit sad.\nThis is the beginning of the beginning of the en...", + "Video: 59669389
Text: Well.\nHi there.\nSo you've made it to day two of week eight, and I am super grateful that you've been...", + "Video: 59170057
Text: And so at the beginning of this week, we started by talking about hugging face pipelines.\nAnd you us...", + "Video: 59166949
Text: Welcome back to making chatbots.\nLet's keep going.\nSo for the next part we're going to beef up the s...", + "Video: 59473019
Text: Welcome back to an action packed time of of training.\nSo now, after waiting about five minutes when ...", + "Video: 59297585
Text: Before we move on, let me show you one more time this fabulous slide that describes the simple three...", + "Video: 59170255
Text: And welcome back to us continuing our journey through the model class in Hugging Face Transformers l...", + "Video: 60614589
Text: So we're now going to run a large language model directly on your box using a platform called llama,...", + "Video: 59297601
Text: I'm not going to lie, at this point you have every reason to be impatient with me.\nWe've been yammer...", + "Video: 60616629
Text: And welcome back to team PC and Team Mac as we come back together again for a quick video.\nIn this o...", + "Video: 59297749
Text: It's always welcome back to JupyterLab, my favorite place to be.\nAnd now we are, of course in the we...", + "Video: 59170135
Text: Welcome.\nIt's week three.\nIt's day four.\nWe are back on the adventure in open source land, back inve...", + "Video: 59472017
Text: And this is the first time that we'll be coding against our big project of the course.\nWelcome to Ju...", + "Video: 59507017
Text: Welcome to Colab.\nWelcome to the week seven day two Colab.\nAnd just before we try our base model, we...", + "Video: 60619883
Text: And now we've arrived at an exciting moment in our first week.\nThe conclusion of the first week is w...", + "Video: 59508297
Text: What more is there to say, really?\nTomorrow is the day for results.\nA day that very excited indeed a...", + "Video: 60619247
Text: We're going to spend a little bit more time with GPT just to try out a few more interesting things.\n...", + "Video: 59504769
Text: Without further ado, we're going to get stuck into it.\nTalking about Laura.\nLow rank adaptation.\nAnd...", + "Video: 59170233
Text: Welcome back to our continued exploits with Tokenizers.\nWhat we're now going to look at is what's ca...", + "Video: 59671231
Text: And here we are back in the Jupyter Lab for the fast approaching conclusion with a really great proj...", + "Video: 60620397
Text: Well, that's a fantastic result to have now arrived towards the end of week one and having completed...", + "Video: 59170093
Text: I'm delighted to see you again.\nAs we get started with day three of week three of our adventure and ...", + "Video: 59473089
Text: Welcome back.\nSo hopefully you are still impressed by the GPT four mini results.\nThe frontier model ...", + "Video: 60395261
Text: Let's keep going with our project to equip our LM with a tool.\nWe just created this piece of code to...", + "Video: 60617259
Text: I'm excited to introduce you to your first exercise, and I'm looking forward to seeing what you make...", + "Video: 59507313
Text: And it's this time again, when we look at the podium of how our models are performing across the boa...", + "Video: 60619721
Text: Now it's time to talk for a minute about tokens.\nTokens are the individual units which get passed in...", + "Video: 59295451
Text: I know that everybody.\nIt seems like just the other day that we were embarking on our quest together...", + "Video: 59166919
Text: And with that, it concludes our session on tools.\nAnd at this point, you are probably an expert on t...", + "Video: 59295441
Text: Okay, so welcome to our leaderboard fast as we go through a ton of essential leaderboards for your\nc...", + "Video: 59295541
Text: And welcome back.\nYou've just seen GPT four zero spectacularly failed to work on our hard Python con...", + "Video: 59473101
Text: Welcome back.\nSo about ten minutes later, maybe 15 minutes later, the run has completed.\nAnd how do ...", + "Video: 59507423
Text: So you may remember eons ago when we were building our data set.\nAt the end of that, we uploaded our...", + "Video: 59295545
Text: I really hope you've enjoyed this week.\nWe've got tons done.\nWe've experimented with all sorts of ne...", + "Video: 59472503
Text: Welcome back to Jupyter Lab.\nLast time, we looked at some silly models for predicting the price of p...", + "Video: 60614591
Text: The mantra of this course is that the best way to learn is by doing, and we will be doing stuff toge...", + "Video: 59473021
Text: Welcome to our favorite place to be to JupyterLab.\nHere we are again now in day three.\nIn week six.\n...", + "Video: 60617255
Text: I'm now going to talk for a bit about models.\nA term you often hear is the term frontier models, whi...", + "Video: 59667829
Text: Well.\nHello there.\nLook, I know what you're thinking.\nYou're thinking I peaked too early.\nLast week ...", + "Video: 59505329
Text: Welcome back.\nYou may, like me, have just gone off and got a coffee while things loaded back up agai...", + "Video: 59669049
Text: So what you just saw was an ephemeral app, as it's called, which means just a temporary app that you...", + "Video: 60619439
Text: This now brings us to an extremely important property of LMS called the context window that I want t...", + "Video: 59668181
Text: And so it gives me great pleasure to introduce to you the project that I've lined up for you this we...", + "Video: 59472441
Text: Welcome back.\nSo we've been doing the thoroughly distasteful, unsavory work of feature engineering.\n...", + "Video: 59507785
Text: Well, I'm sure you're fed up of me saying that the moment of truth has arrived, but it really has.\nT...", + "Video: 59295587
Text: When I left you, we had just created this simple user interface for converting from Python to C plus...", + "Video: 59166465
Text: Welcome back to the JupyterLab on Gradio day, so you'll remember where we left off.\nWe'd written two...", + "Video: 59473071
Text: Hey, gang.\nLook, I know what you're thinking.\nThis week was supposed to be training week.\nI set it a...", + "Video: 59295423
Text: Welcome to day two of week four, when we get more into leaderboards so that by the end of this, you'...", + "Video: 59297723
Text: So I know what you're thinking.\nYou're thinking, what's going on here?\nWe're on day five.\nWe're on d...", + "Video: 59166947
Text: Well, thank you for coming along for week two, day four.\nWe have lots of good stuff in store today.\n...", + "Video: 59666831
Text: Take one more moment to look at this very nice diagram that lays it all out, and we will move on.\nNo...", + "Video: 59295493
Text: And welcome to week four, day three.\nAs we are about to embark upon another business project which w...", + "Video: 60616855
Text: Now I know what you're thinking.\nWe've been building environments for so long.\nAre we not done yet?\n...", + "Video: 59506611
Text: So in a future day, I'm going to be training, fine tuning a model and creating a fine tuned model.\nA...", + "Video: 60616493
Text: I'll just take a quick moment to introduce myself to convince you that I am actually qualified to be...", + "Video: 59166317
Text: And welcome to week two, day two, as we continue our adventure into the realm of LMS.\nUh, so today, ...", + "Video: 59295439
Text: So I'm aware that there's a big risk that you are getting fed up of leaderboards, because we've done...", + "Video: 59472421
Text: And welcome back to our final time in Jupyter Lab with traditional machine learning.\nIt's almost ove...", + "Video: 59472137
Text: Well, well, well, it's been a long day, but congratulations, you've made it.\nWe've gone through and ...", + "Video: 59297693
Text: So at the end of each week, it's customary for me to give you a challenge, an assignment to do on\nyo...", + "Video: 60620143
Text: So we're going to make a call to GPT four.\nOh, that's going to ask it to look through a set of links...", + "Video: 60619501
Text: I welcome to day four of our time together.\nThis is a very important day.\nToday we're going to be lo...", + "Video: 59297743
Text: And welcome to day five.\nFor reals.\nWe're actually in the proper Jupyter notebook.\nThis time we're i...", + "Video: 59166847
Text: Well, they say that time flies when you're having fun, and it certainly feels like time is flying.\nU...", + "Video: 59170223
Text: Well.\nFantastic.\nIt's coming up to the end of the week, and that means it's coming up to a challenge...", + "Video: 59170037
Text: So how does it feel to be 30% of the way down the journey to being a proficient LLM engineer?\nTake a...", + "Video: 59295609
Text: You must be feeling absolutely exhausted at this point.\nAnd if you are, that is okay.\nYou have done ...", + "Video: 60619281
Text: Well, I'm delighted to welcome you to day three of our eight week journey together.\nAnd today we're ...", + "Video: 59472429
Text: And continuing on our strategy to solve commercial problems with LMS, we get to step four, which is\n...", + "Video: 59167009
Text: Welcome back.\nIt's time to make our full agent framework.\nI'm super excited about this.\nIt's pulling...", + "Video: 59166481
Text: And here, once more we find ourselves in our favorite place, the Jupyter Lab.\nReady to go with weeks...", + "Video: 59670933
Text: I realized my enthusiasm for this project is a bit insane, but I have to tell you that I am very sat...", + "Video: 59670073
Text: Okay, it's time to complete the Rag workflow in our Jupyter Lab on day 2.3.\nWe've got this function ...", + "Video: 59673595
Text: That concludes a mammoth project.\nThree weeks in the making.\nIn the course of those three weeks, sta...", + "Video: 59297603
Text: And I'm delighted to welcome you back to LM engineering on the day that we turn to vectors.\nFinally,...", + "Video: 60614541
Text: I am delighted to welcome you to the first day of our eight weeks together as you join me on this ad...", + "Video: 59667357
Text: Let's now see our results side by side.\nWe started our journey with a constant model that was at $1....", + "Video: 59667841
Text: Now, look, I know that I went through that very fast, but maybe, uh, you're still, uh, blinking\nat t...", + "Video: 59472007
Text: So I hope you enjoyed our first bash at Scrubbing Data, and that you are now looking through the cod...", + "Video: 59507435
Text: So I'm now going to talk about five important hyperparameters for the training process.\nAnd some of ...", + "Video: 59509185
Text: So this is where I left you looking at this satisfying chart on training loss and seeing the trainin...", + "Video: 59473159
Text: Welcome to Jupyter Lab and welcome to our experiments at the frontier.\nSo we are going to put our fr...", + "Video: 60619447
Text: I want to take a moment to talk about something that's very fundamental to an LLM, which is the numb...", + "Video: 59166353
Text: Well, congratulations on leveling up yet again.\nYou've got some real hard skills that you've added t...", + "Video: 60619123
Text: So what we're now going to do is we're going to look at some models in practice and start to compare...", + "Video: 59295363
Text: Well, another congratulations moment.\nYou have 40% on the way to being an LM engineer at a high leve...", + "Video: 60619289
Text: And now we'll go a bit faster through the other models.\nWe'll start with Google's Gemini.\nI have the...", + "Video: 59472873
Text: So it's quite an adventure that we had at the frontier of what's capable with Llms today, solving a\n...", + "Video: 60619429
Text: Let me talk about some other phenomena that have happened over the last few years.\nOne of them has b...", + "Video: 59295601
Text: So it's time to continue our journey into the world of open source and understand which models we sh...", + "Video: 59170025
Text: And a massive welcome back one more time to LM engineering.\nWe are in week three, day two and we are...", + "Video: 59166443
Text: And welcome back everybody.\nWelcome to week two day three.\nIt's a continuation of our enjoyment of r...", + "Video: 60620025
Text: And welcome back to Jupyter Lab, one of my very favorite places to be.\nWhen Jupyter Lab sprung up on...", + "Video: 59170055
Text: Welcome to the world of Google Colab.\nYou may already be very familiar with Google Colab, even if so..." + ], + "type": "scatter", + "x": [ + -12.833365, + -6.9742827, + 12.4054785, + 0.7444725, + -3.2209346, + -1.8923138, + 12.045013, + 3.3449032, + 3.1842198, + -4.9479027, + -5.4305677, + 8.906914, + 7.4986606, + -8.522678, + -0.6965641, + 6.603374, + -11.045361, + -4.2061296, + -0.6122766, + 4.145742, + -15.19937, + -3.4401643, + 1.4189938, + -1.4075196, + 2.407112, + -2.5531256, + 3.4384673, + 8.128717, + 2.1237493, + -12.902143, + 15.229833, + 2.7304206, + -10.246402, + -3.2447436, + -8.882521, + 8.555937, + 5.628159, + 7.8938856, + -5.265052, + -8.822166, + 7.8464785, + -3.648399, + -8.064129, + 9.394255, + 8.501753, + -9.501365, + 15.182271, + 4.720065, + -1.1797574, + -13.243277, + -9.353854, + -2.998534, + -1.1271738, + 0.3913053, + -8.308189, + 14.194027, + 1.6540549, + 1.3559673, + 4.259716, + -9.247647, + -5.802019, + -3.195949, + -10.075436, + 9.626325, + 11.068077, + -3.1101823, + 6.4528036, + 5.0787916, + -15.360518, + -12.8956175, + -5.790258, + -9.99366, + 6.8768044, + -4.6994433, + 0.35191682, + -0.29200283, + 3.0990727, + 12.57883, + -5.6075945, + -1.0033067, + -2.449439, + 16.036858, + 0.14201127, + 10.2873335, + -10.185286, + 1.0699235, + -11.33001, + 9.997939, + 5.053496, + -0.6908192, + -7.4411364, + -1.8156531, + 4.695986, + -7.3850956, + 0.85939574, + -0.68879485, + 0.79399765, + 2.6232824, + 10.725368, + -14.1221, + 9.375242, + -9.608614, + -1.8901383, + 5.7741113, + 6.4975615, + 3.5574346, + 14.212662, + -11.486093, + -4.2505164, + -2.822659, + 10.812861, + 5.9373355, + 4.6210785, + -14.758913, + 14.809078, + -14.101901, + -8.283896, + -8.942637, + -1.4648409, + -12.052869, + -6.616761, + 4.2436285, + 0.8798934, + 1.789862, + -2.2955062, + 8.728576, + -11.620666, + 3.6742375, + -7.761937, + 12.48991, + 3.6297722, + 14.6792555, + 2.5280774, + -3.2109888, + -10.203885, + -5.4021983, + 8.246243, + -3.1352522, + 12.564423, + -13.406111, + -3.866553, + -2.1669235, + 7.9661245, + -3.791727, + -8.225956, + 5.954079, + 10.361685, + -7.5399003, + -3.2611566, + -0.9431268, + 1.2448666, + 4.4184537, + 14.7139845, + -10.79534, + -9.544763, + 4.5476527, + -7.414183, + 3.5664093, + -6.974854, + 2.978243, + 2.393447, + -9.970659, + 9.268733, + 8.52153, + 2.8192813, + -7.411628, + -10.112688, + 13.632619, + 9.394551, + -4.6803446, + 3.9642556, + -0.22321175, + 5.192608, + -15.408804, + 6.085784, + 9.131328, + -12.507938, + 13.225102, + 7.411992, + 2.4457388, + -5.3649106, + -2.1621914, + 2.9738903, + 11.734665, + 1.3640592, + 2.8509138, + 1.5292069, + 3.109312, + 0.31427717, + 0.59937334, + 1.9934503, + 5.054161, + -0.7211345, + 9.357517, + 1.1712533, + -1.6295905, + 1.4415473, + -0.5701214, + 13.127944, + -7.282712, + 8.714061, + -0.2947172, + -14.72166, + -12.058422, + -7.3617206, + -2.8723657, + 5.6522145, + 1.3458288, + 4.7146225, + 0.14565246, + -6.5029964, + 1.4029636, + 5.10695, + -4.3713784, + 7.316387, + 12.153176, + -8.246752 + ], + "y": [ + 5.864612, + -7.79562, + 2.1185772, + 10.241048, + -4.6602664, + -10.380204, + -3.3814008, + 5.1946826, + 1.7420042, + 8.918891, + 5.260133, + 0.1811261, + -5.5230923, + 0.5748871, + 1.231967, + 3.553936, + -10.655771, + -8.0794115, + -13.241925, + -14.984945, + 4.7148366, + -13.97179, + -13.19601, + -0.2265177, + 3.8162532, + 2.1463737, + -14.238365, + -4.559269, + 2.1515036, + -1.085198, + -4.2104445, + 3.3605366, + -10.949242, + -5.0520687, + 0.5021872, + 10.118524, + 1.5675689, + 11.071112, + -1.8434283, + 4.0219116, + -1.0825654, + 5.961061, + -5.838909, + -2.6683056, + -1.9608945, + -7.612094, + 4.54624, + 12.01477, + -6.5574946, + 5.990393, + -9.988611, + -14.277355, + 2.9967263, + -1.1712778, + -3.7443178, + 3.6448686, + 9.226036, + -3.7033923, + -14.053265, + 2.5498354, + -9.518535, + 1.6401825, + -0.36512238, + -4.9813704, + -2.726482, + -10.685461, + -6.5958095, + 10.8004055, + 4.859266, + 6.9649606, + 7.024944, + -4.250888, + -6.3098636, + -11.0815115, + -14.593737, + 11.150167, + -11.672057, + 6.443364, + 5.543199, + -3.852712, + -2.6978295, + -5.126513, + -13.7175045, + -2.0199032, + -5.6320567, + 14.843209, + 0.2881268, + 5.7191358, + 7.526985, + 14.6844635, + -3.2138662, + 6.6662474, + 3.171249, + -13.094588, + -14.7442875, + 0.5794834, + 2.99146, + 10.943796, + 1.3510485, + 2.100339, + 3.2113767, + 3.7474568, + 5.7545915, + 3.5693796, + -0.9383067, + 4.0061, + 3.5728343, + -8.674493, + -8.300964, + -2.2709742, + -0.603111, + 6.185632, + -11.0190115, + 1.3625672, + -4.850173, + 2.9171705, + 8.1604395, + -10.25015, + -3.9398847, + -0.23160458, + -5.392693, + 15.28234, + 1.2750401, + -1.3851596, + 7.5546064, + -8.560972, + -1.8034146, + -12.882853, + 4.640706, + 6.449833, + -12.572372, + 3.1180751, + -10.773977, + 6.802127, + -9.868545, + 9.253048, + 1.3396845, + -0.74683595, + -7.180224, + 4.771069, + 9.654162, + 3.1239467, + 1.4051272, + 8.177503, + -5.7268376, + -0.7003263, + 2.7622976, + -3.1645548, + 13.3046, + -2.2131858, + 3.1262445, + 12.838138, + -5.2419405, + -2.8829832, + -8.004557, + 7.4833393, + -12.2402, + -4.5172586, + -9.0768795, + 9.612813, + 0.04776096, + 4.9612007, + 4.990355, + 12.433644, + -2.141872, + -12.758332, + 1.4486425, + -4.086392, + -4.481818, + -1.9323591, + 14.265009, + -15.393123, + -0.31127125, + 2.5777261, + 6.3160987, + 9.038766, + 4.5779753, + 10.014262, + -4.2199383, + 14.254804, + -6.665484, + -5.69532, + -5.496155, + -0.40160426, + 8.305916, + -8.923462, + 4.5406356, + 0.7675378, + 5.6171103, + -4.578082, + 9.9752655, + -10.363342, + 3.227578, + -0.91156125, + 0.1750337, + -10.112299, + 0.7475936, + -14.1882, + 9.756163, + -4.082387, + 4.626093, + -5.5265136, + 0.31286407, + 0.15795733, + -7.157549, + 13.754237, + -2.7781584, + 11.642487, + 14.187494, + 11.508914, + -4.578478, + 6.9590425, + 8.829999, + 6.39372, + -2.4645948, + 1.3561549, + 8.1304245 + ] + } + ], + "layout": { + "height": 600, + "margin": { + "b": 10, + "l": 10, + "r": 20, + "t": 40 + }, + "scene": { + "xaxis": { + "title": { + "text": "x" + } + }, + "yaxis": { + "title": { + "text": "y" + } + } + }, + "template": { + "data": { + "bar": [ + { + "error_x": { + "color": "#2a3f5f" + }, + "error_y": { + "color": "#2a3f5f" + }, + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "bar" + } + ], + "barpolar": [ + { + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "barpolar" + } + ], + "carpet": [ + { + "aaxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "baxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "type": "carpet" + } + ], + "choropleth": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "choropleth" + } + ], + "contour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "contour" + } + ], + "contourcarpet": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "contourcarpet" + } + ], + "heatmap": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmap" + } + ], + "heatmapgl": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmapgl" + } + ], + "histogram": [ + { + "marker": { + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "histogram" + } + ], + "histogram2d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2d" + } + ], + "histogram2dcontour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2dcontour" + } + ], + "mesh3d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "mesh3d" + } + ], + "parcoords": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "parcoords" + } + ], + "pie": [ + { + "automargin": true, + "type": "pie" + } + ], + "scatter": [ + { + "fillpattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + }, + "type": "scatter" + } + ], + "scatter3d": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatter3d" + } + ], + "scattercarpet": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattercarpet" + } + ], + "scattergeo": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergeo" + } + ], + "scattergl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergl" + } + ], + "scattermapbox": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattermapbox" + } + ], + "scatterpolar": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolar" + } + ], + "scatterpolargl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolargl" + } + ], + "scatterternary": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterternary" + } + ], + "surface": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "surface" + } + ], + "table": [ + { + "cells": { + "fill": { + "color": "#EBF0F8" + }, + "line": { + "color": "white" + } + }, + "header": { + "fill": { + "color": "#C8D4E3" + }, + "line": { + "color": "white" + } + }, + "type": "table" + } + ] + }, + "layout": { + "annotationdefaults": { + "arrowcolor": "#2a3f5f", + "arrowhead": 0, + "arrowwidth": 1 + }, + "autotypenumbers": "strict", + "coloraxis": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "colorscale": { + "diverging": [ + [ + 0, + "#8e0152" + ], + [ + 0.1, + "#c51b7d" + ], + [ + 0.2, + "#de77ae" + ], + [ + 0.3, + "#f1b6da" + ], + [ + 0.4, + "#fde0ef" + ], + [ + 0.5, + "#f7f7f7" + ], + [ + 0.6, + "#e6f5d0" + ], + [ + 0.7, + "#b8e186" + ], + [ + 0.8, + "#7fbc41" + ], + [ + 0.9, + "#4d9221" + ], + [ + 1, + "#276419" + ] + ], + "sequential": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "sequentialminus": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ] + }, + "colorway": [ + "#636efa", + "#EF553B", + "#00cc96", + "#ab63fa", + "#FFA15A", + "#19d3f3", + "#FF6692", + "#B6E880", + "#FF97FF", + "#FECB52" + ], + "font": { + "color": "#2a3f5f" + }, + "geo": { + "bgcolor": "white", + "lakecolor": "white", + "landcolor": "#E5ECF6", + "showlakes": true, + "showland": true, + "subunitcolor": "white" + }, + "hoverlabel": { + "align": "left" + }, + "hovermode": "closest", + "mapbox": { + "style": "light" + }, + "paper_bgcolor": "white", + "plot_bgcolor": "#E5ECF6", + "polar": { + "angularaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "radialaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "scene": { + "xaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "yaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "zaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + } + }, + "shapedefaults": { + "line": { + "color": "#2a3f5f" + } + }, + "ternary": { + "aaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "baxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "caxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "title": { + "x": 0.05 + }, + "xaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + }, + "yaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + } + } + }, + "title": { + "text": "2D Chroma Vector Store Visualization" + }, + "width": 800 + } + }, + "image/png": "iVBORw0KGgoAAAANSUhEUgAABPAAAAJYCAYAAADsXBi6AAAgAElEQVR4XuydBZxc1dm435n1ZONuECyE4EGCuxOkSIGvWAsUKZRSvLgVdy/eAsXdHRIkFA+aAHEhLuu7M/Pdc8MsO5vd7MyZe+ece+9z/7/+v5acc973PO9JSJ4ciaWcT/ggAAEIQAACEIAABCAAAQhAAAIQgAAEIAABKwnEEHhW1oWkIAABCEAAAhCAAAQgAAEIQAACEIAABCDgEkDgsRAgAAEIQAACEIAABCAAAQhAAAIQgAAEIGAxAQSexcUhNQhAAAIQgAAEIAABCEAAAhCAAAQgAAEIIPBYAxCAAAQgAAEIQAACEIAABCAAAQhAAAIQsJgAAs/i4pAaBCAAAQhAAAIQgAAEIAABCEAAAhCAAAQQeKwBCEAAAhCAAAQgAAEIQAACEIAABCAAAQhYTACBZ3FxSA0CEIAABCAAAQhAAAIQgAAEIAABCEAAAgg8Zw188c2Pcut9z8i3EydLKpmSYasNkeMO31s2GzmieYXs9n9nyLSZc5r/d2lpifTt1V1GrjtMDt53B1l/xGpZraakM/5zr70vz7wyVn74carU1jdIH2ecTdZfUw4/cFcZvvpKzePsfeQ5supKA+SGi0/MamybG9U589xu/7+587z5spPbTXXPw86SMoftU/dcYvN0lsvt3Q+/lIeffkO+d2q6cPFSKS8rlTVXW0kO2nt7Gb3z5lbN5ejTrpbvJ06Vt5+8QUqKi9rM7cJr7penXx4jbz1xvfzhL5c663wN+efZxxR8Huddda+M/fgrefuJG9zY6udhIXJ5/rUP5Kx//ktee+QaGdS/d8HnTUAIQAACEIAABCAAAQhAAAIQgEBLApEXeOO/nySHnXipbOSIpSMcgRaLxeTuh1+UL77+UR6984JmoabEQWXnCjnjhENcfvUNDfLz1Fny3Kvvu9LmL0fuKyc4/1nR19iUkL+ee5O899GXssu2G8t2W2wonTuVy5Tpv8jjz78js+fMlyvOOVZ2235Td5gwCTw1n0tv+I889vzb8tbj10vvnt2WQ/X51xPl0BMvk3P/dpgcsu+OnvxMXby0WrbY6y/yv5fvkE4V5Z6M2XoQJbrOvfIe2Xe3rWTX7TaRXj26yfyFi10B9tq7n8g//voH+cN+O7vd3hjzqdz5n+fl8X9d6Esu2Qz66jv/k79feKsrhnfeZuPluijZuu1+J8uWm6wj1134F3nxzY/ceo3acK1shve0TWuB51cu/7zpIVdmnn7CwW7+6ufkR59+48jXLdyfo3wQgAAEIAABCEAAAhCAAAQgAAGTBCIv8JTI+Gz8RHntv1eL2lWnviVVNbLVPic6Qm83OfW437v/TAk8tRPnnuvOyKiX2lF3xS0Py0NPve7IjhMcgbNMvrX13XDXE3LXQy/IZWcd7cqell9Nbb0cdepV8vOUmU4u10i3rp1DJ/B++Gma7HfUeXLacQfJHw/efTlESta85Miid5ydYV0qO3ny82LMuPFy3JnX+irw9jj0TOnfp6fce/2Zy+V80jk3imOF5eZL/+r+2HV3PiYffvqtUYGnRPIOB/xN1hm+qtx+xSnL5ZzefXb3NafL5huv7UkddAdpLfB0x+mo38HHXywbObtp0wKvo/b8OAQgAAEIQAACEIAABCAAAQhAoJAEIi/w5i1YLEqerTSobwb3rfc9yd0hd8kZf3L/eXsCT/1YIpGUvY442z022d7Rz9q6BtnmdyfJRusNkzuuPLXNGs/6Zb4ouZLORe3AW2OVQbLDViPllnuflpmz50m/Pj1cyZDeOfXC6x/KmZfdKf++6R9ywTX3ubm8/NCVosTifY++LE+++K7br7y8TDZcZw05+ej9m3cVqt1MZ1xyh/z3tvPkmjsedXcSqjmo3WL777mNqGOUn42fIMXOzqS9d93SFW/pb/K02XLj3U84u5S+ldq6eunbu4fsseNm7k7EkpLidtfwwcddJNU1dfL8vy/PaLOMz1/dHWyXnnmU+2Pv/+9r+deDz8vEn6c7XJpkvbVWk1OOPVDWWXOV5r5LHdl6491PyuvvfSJV1bWy6soD5c+Hjnb53Hrf03LbA882t91ms/VdYZUNm/Ovvle+dnZnHvV/e8rlzu6sHbbaUC4+fdlaaP3tfPBpMnRwf7nrmtNW+HP3iJMvl0++/KG5TVpkKmF8vSP23nr/c/f4bY9uXWTbzdeXU/58oPvf1ddePqlUyj26+9RLY5xdY7OlzKnfNqPWd8VzW7sc08GvveMxuf+xl93dkOoId8vvT6dcKTOcNfPKw1e5O1JbH1tV6+SGux53+EyW6to6R172kL122VKOO2xvicdj8sQL77pr8c3Hr3PFZvr78+nXuHL8kdvPd/9RNmtoRUdoVd3V+mjrUzsH1VpS34NPvi6PPfe2u6uuU0WZrOkcU1ds08fe197uyIwh1M7bSVNmLXeE9u0PPnd3T05wRLT6hq06WP50yJ7ublr1zZ2/yD0mfqWzi1btJn3r/c/cNamOxZ9z8mEZx+NXuFD4QQhAAAIQgAAEIAABCEAAAhCAQCsCkRd4ba0IdaxWiabL/3GM7O2ICfWtSOCpH7/+X4+7R2/HPHOz9Oy+TLq0/P73xfdy5N+ucIXgfntsk9VCVAIvkUjI0CH95Zg/jJaioiJnB9ejjhz40RUvKs6r73zsHIe8zZVz6r61NRypoISByueBx16R044/2BGRGzhHOpe4OwXVDr/nH7jcEW7dnb7LjlKqO8WUnFrZkVBX3fZf+c8Tr8m6w1eRM/5yiHvHX/qI6L+uPs09VqkE2G7/d7q7S+7CU4+U7t0qHakx3RUeh+6/sysJ2/uefPE9V0Y97EjDlvcGqjsBz7ni7uZ/rkTXH0+5QnbaeiM56aj9XQ5KYn7wydfy5N0XO5KznxtCtZk2c64jSA6VAX17yfOvfyD3P/qKqFxV7kpiKpH3unOXWdcund1j0NmwueT6f8s7H3whA51dl8cetpfDpp8MGZgpedNzVFwVsz0dganuQ1zPuQ+x2KlV60/JRlUrdbxW7eSscKSquu9P3TE305G35//9CBnu3L+oBNlF1z3gzKen/NeRXUqitZePEko33fOk/NVhpO7aUxL4Yif3uNPn8bsuaveOu6kzfpHd/3Cm/O2YA9y1lf6UuNvFEZIt/3lLgdfk1GF7R1KpOZ74x9+5PJWsUvkdd/g+ctQhe2Ql8LJdQysSeIuXVMvipVUZmM+/+j53jT/+r4tc2Z1eu2otb+8IeXU8+M7/PCcfOHL4xQevdH8OKWm680Gnyj67biUn/ul37rp++c1xGQIvvZPzwNHbyWEHOMehHb5qnT310nty2+WnuMJVjbPVPie5cRW/3XfYzBF4NXLU35eJULVu+SAAAQhAAAIQgAAEIAABCEAAAjoEEHitqC1YtFT+74RLpIfzB/sHbz7HkWZxt0VHAk/t8FHSRf0hveVDFOnh0zvl7r/hLNlkg+FZ1UoJvIWLljgX6V/ryJ5St8/Hn3/vSiu1i2/rUes2S7iWwkXtZlNHgNUx3fNOObw5ltqJp3aLpdumBV7LI73fTZwiBxxzgfugxpmO9FCf2tW3/k5HOZJoP2d3216uwFMCSN0pp0Rg+jv5vJvdnVtPOOKovU/tdlT3q+2x4yi56LQ/NjdTu9OUkHnmvkvdf6akh9pRpiSLklzqU/Pa6aC/yy7O7roLHHGojj4fdtJly93lpgShOh76+722k/seedndXZi+Ay9bNuq+vv8+8+ZyorGtealdk2o34n+fftMVRGqX1/prry6bb7S2jN5pc1fopL/jz7pe1K7P9B146Xq2Pn79rHO34j8uv6s5flv51Dc0unXe2tlxp/qnv/Hf/SzqSOjV5x3vcm7vUzvtZs9dIC85jNOfkp3/evAFd/dcegdfS4GnHnJR/1s9aLGPsysz/anj0equuMED+mQt8LJZQ7k8YqGOp6v8773+LFdKq2/R4ip3ji1/Tk5wdnT+7k/nyi3/PNmVeurbeLc/OwJ8h+YjtK0fsVDrc6Ez1rPO+lQyTn1q96OSoGrHrBLGaYGnmCv26U/lpY7Pf/7aXc3H9NstCj8AAQhAAAIQgAAEIAABCEAAAhBogwACrwUUJRT+fPq1roBRO6TSxxdVk44E3kNPvSH/vOlBee7+y2S1oYOWQ/3CG85R10vvdO9Jy/YxACXwBvbrmXHk9idnd9HeR/xDrjn/eGeHz6hmgaeO0KrjuepL7yC86rzj3F1hLT91xE/JDXXEMC3w1JHB9LHU9A4sJdcOGL1tc9dRex7v/u/TnR196lMS5N+PvypfOi/4KrGRTCXdo8hK+rzx6LUr/Ml20bX3uw8jvPvUTa6YnDpjjiNCznAeezjUOb67k9t3o13/7M4vfZw2PeCJ/7hRfpm30BVgDzjxr7r1v+6dea2PgabbtxZ42bJJP7jxxev3uMdCs/lqnOOk6kix2j34yVc/yDc/THZ3wJ13yhHukWT1tRZ49z7ykqjjrK3noI57qrv10g96tJVPei6tZZqK49ZrT6devz7K0Fb+Lzm7zE6/5Hb5z83/cHcrKiG1yyGny9rDhma8fNxS4CmZq+SgEnlqx+cWzo7Mkc7uz5bHprM9QpvNGspW4KmHYU44+wZXCqdZqzmrHYOPPvu2vPL2OOco+Xzn1ed696VpdZRXra3f7b61i6YjgafW417OgxYXnnZkBkp1BP2DT76Rsc/e3Czw1PHlPx28R3O7R599y90V+e5TN67wWHM2a4w2EIAABCAAAQhAAAIQgAAEIBBNAgi8X+uudnOdeM4NstbqK7vyovUjCh0JPCVYHn3uLfnohdvbfLUyvVtM7Yg7eJ8dslptbb1Cq16+3evws5t3V6UlnNq5tsYqg91xP3SEwtGnXe3e96bufWv5jXb6qsc47rzq1GaB17JvWuBd8Q9HWOyyRXPXlkJo+qy5su8fz5HVVh7k7uYb4uxAUvfkKYH57YQpHQq89C6/tHhSR0DVccR3HMHR1Tm+qKTL+jse5YozdWy45aeO0vbs3tWVIepI7e3/fnaFD1S0FnjZslH1fOmtj+SD527NqlZtNVJC+JQLbpWfJs+QNx5btqOttcBL3+P2ySv/at5lqcZSu/TUTkV1V9vRzj18beWTnos6rhtrJRkbG5tcAapEb3ufarOd85iF2oWmZJaSj+ohlfRR6XS/1nfgqXvd1L176oVdVUu1E3P0Tpu5x7XVLrxsBF62aygbgTfJ+TmhpKI6Aqte/G35qaO9jzq7Y9WxbjXPysoK907Iw076Z9YCT+2gVOtRPbzS8h5IFUftulXHv9XuuvQOvJYiWrVB4Gn/FKIjBCAAAQhAAAIQgAAEIAABCPxKAIHngFB3eB3jCC91Z9UFpx7R5v1lKxJ46tikujdMPaCgjsi29TU4xx23cYSMutRfPXTR1q4uddTvKeeOOHV8Vd0tpivwvv5hkhx07EXS3g68jddf0xU7bcm/bAReWoq9/NBVGY9/pO+j62gHnuJz4J8vdOZYLvded6Z7/9imG67lHstMf5vsfqx7NFTdSdb6U/e7qXsBH3aOrF52439cYTigX682ubcWeNmyyUXgKSGkdgC29XjHG2M+FXW0WD1wscXG6ywn8NL5tbcD7wLnXrzfOzvd2spH7fD7/bEXursit3HuYGv9qZ2kLR+RaAvQ1bc9Io89/7a89/TNcuG198nnjsh+1XmROX1MVPVpLfBajqOOqL7s7G5Tr+vusOVIufLcY52HU5bdc9j6EQslzdRjJOoRi2zXUEcCT90reJBzX6Wa578cxq3vHlQ763Z07lFUD0ukP7VDUh2JzWUHnhpn9E5t78Ab9/l3rlBG4PHvVQhAAAIQgAAEIAABCEAAAhDwi0DkBZ563GH0YWfJLs6LlS3vZGsNvD2Bp44UnnvlPfLca+/LPdeeIZttNKLdWqkXUdUdXSf9aT/nwv+9M9qp46fHn3WdTJw0XV749xXu5fq6Ak8JRXU3mtqR1PIOPLXraVfniKS60P8IRxLqCrz0wwnjXrzdFY3qU8dg1c7APr2cI7TObrOOPnVn4CU3/Fuuv+hEV3A9eMs57kMc6e+Y065xH3to/aqviqNePS117sVL3/XWUsSo/mo8dRebOj6aFkUfv3SHuzssWzbZCryxH4+XY8+4tvmoa+t533zvU3LHv59rPlqtduCp10rT9wR++tUEOfyv/3TvsNt1u02bu6cfX1Dt1lpj5TYFnpLCWzqPJuy3x9Zy9kmZO8/UUetVVxqQIeLaqonavaZ2Zaodl+qY5zF/2NO957Dl11LgKVn5uXNsuvXRbPUS8reOUFSvC6udeadccIt7n2F6V6i6r297Z7efenxECbxs19CKBJ66i/GEs6+TSVNny2N3XijdunbOyFsdCd5w56PlkN/t1Hyfo2qgfr4qvq0F3u/32t79uaG+1nfgqfsC5zm/Vqgj8ukv/RDHsFWHuPfpIfA6+lnPj0MAAhCAAAQgAAEIQAACEICALoHICzz1h3klstSdcOnHEtIw1f1s6661qvs/lcRQsuqME9IPOyTchxyecHYbfTthsrsL6siDdlthHdTRUHWk8q2xn8nmG68tu28/Srp3rZQpM2bLI8+85byoWS23XHZy8yMXugJPJaGOpd7735fkLEfsbDNqPZnjSKPLb3rIvT/uWUdCdHNeZNUVeOmHF5ToUXfW/Thphlx568OurHntvU/kmXsvlUGOQFP3v7X3VdfUyXb7n+wev+zmMGgpRlSf9Cu0+++xrfuyq2qnZNnVtz8ipx77e/e1W/WpI5/qGOe5Jx/uvhSrjr2q47jpRz7UK6FKAilBpUTY6qsMyopNtgJPSaKTzrlJ3hv3pagXSrdyHhZRNVUPcowZ95V7fHNXRw5fe8GyRybUS73qddt/OUeYezlHatVxZrUzbcbsua5AXt1h+PX3P7uvuqp81VFn9bWXjxJh6hjx352jtuq4tNrh9vjz77j3vqmXftdec2iHvzYogaju3FMC6k1Hvra+T7ClwEtLUyWA93EeSVFSVEnn8x3Gu22/qfMa8GGiRLG60/CQfZeJMyVN1fp498Mv3Z2SSuBlu4Yudo6ojv34K3n7iRuafx6qOxzVbk216+9+56Xl6y880Xl9OfPeSbVe1JFltdNuhpPPLf/8m3s8+8EnX3fvanzixXfdo+zqaK06Lr/jgX93c1PCWz3MMnbc+IxXaNXrx0oqqz5qh2yTw/ke5+eXevX4gRvVHYJrIPA6XGk0gAAEIAABCEAAAhCAAAQgAAFdApEXeGpX0Jx5i9rkp45pvvifK5rFgbq4P/2pI4Zql5y6/F/9gT796mVHhVDCR71I+9TL7zniaaqonUlqR9nmzvHKPzl3bKmdY+kvH4Gn4tz36MuOzHnXvfNLiRa1O1DdqTZkYF83hK7AU33Vy5rq4Y4ljnQc4Tx6cOaJ/yedHOF5zOnXiDrWqCRNW495tOSjjlmq45Zq91hayLX8cXUn2633P+MKUnXkWO3eUg8nqCOl6U89RnCd8wjEm2M/FSUFV3F2nandjTs7L9WqT0nRY52cFOv1115N1GMf2bDJVuCpGErMqnvfVF2V1F20pMq5z67MuSNwoIx2Hj74/d7bNR/t/Orbn1yJq3YXHnnQ7u4dgoqXklFvvf+5K4GUeNpl202cV3/3dx9UUd+K8lGv5aoXcN1XXZ06D19tJTnWYZDtYynpF5J33Hqk3HTJX5dbwq2P0Ko873bqP9ERt2ru6viqkpQnHLFP8yurSpyqnYcLnFeUlRD80yF7OA+e/CQTncdP1KMp2a6h+x97tV2Bt6ezc3bytNlt/pRTr/+q47zqxy+45j5Hik5yBbx6mfkk5zXlK295WB53araHc0+gkoFqR6h6rTiZTLoyXwlYJVtfe+QaV7KqT4nXOxxZ+oMzB3WMW637v/xxX9ls5LJdt+zA6+hXP34cAhCAAAQgAAEIQAACEIAABHQJRF7g6YKjHwQgAAEIQAACEIAABCAAAQhAAAIQgAAECkEAgVcIysSAAAQgAAEIQAACEIAABCAAAQhAAAIQgIAmAQSeJji6QQACEIAABCAAAQhAAAIQgAAEIAABCECgEAQQeIWgTAwIQAACEIAABCAAAQhAAAIQgAAEIAABCGgSQOBpgqMbBCAAAQhAAAIQgAAEIAABCEAAAhCAAAQKQQCBVwjKxIAABCAAAQhAAAIQgAAEIAABCEAAAhCAgCYBBJ4mOLpBAAIQgAAEIAABCEAAAhCAAAQgAAEIQKAQBBB4haBMDAhAAAIQgAAEIAABCEAAAhCAAAQgAAEIaBJA4GmCoxsEIAABCEAAAhCAAAQgAAEIQAACEIAABApBAIFXCMrEgAAEIAABCEAAAhCAAAQgAAEIQAACEICAJgEEniY4ukEAAhCAAAQgAAEIQAACEIAABCAAAQhAoBAEEHiFoEwMCEAAAhCAAAQgAAEIQAACEIAABCAAAQhoEkDgaYKjGwQgAAEIQAACEIAABCAAAQhAAAIQgAAECkEAgVcIysSAAAQgAAEIQAACEIAABCAAAQhAAAIQgIAmAQSeJji6QQACEIAABCAAAQhAAAIQgAAEIAABCECgEAQQeIWgTAwIQAACEIAABCAAAQhAAAIQgAAEIAABCGgSQOBpgqMbBCAAAQhAAAIQgAAEIAABCEAAAhCAAAQKQQCBVwjKxIAABCAAAQhAAAIQgAAEIAABCEAAAhCAgCYBBJ4mOLpBAAIQgAAEIAABCEAAAhCAAAQgAAEIQKAQBBB4haBMDAhAAAIQgAAEIAABCEAAAhCAAAQgAAEIaBIwLvBmzq/VTD263Xp2KZWa+oTUNSSiC4GZB4ZAaXFcunQqlvlLGgKTM4lGm0DvbmWyuLpRGpuS0QbB7ANBoG/3cikuismcRXXSlEgFImeSjDaBfs6anbukTpL8EhvthRCQ2XcuL3Z/jVW/L+CDQBAIDOxVITgWeyql6uHlh8DzkmaBxkLgFQg0YTwhgMDzBCODFJAAAq+AsAmVNwEEXt4IGaDABBB4BQZOuLwIIPDywkdnAwQQeAagryAkAs+uehjJBoFnBDtBNQkg8DTB0c0YAQSeMfQE1iCAwNOARhejBBB4RvETPEcCCLwcgdHcOAEEnvESZCSAwLOrHkayQeAZwU5QTQIIPE1wdDNGAIFnDD2BNQgg8DSg0cUoAQSeUfwEz5EAAi9HYDQ3TgCBZ7wECDy7SmA+GwSe+RqQQfYEEHjZs6KlHQQQeHbUgSyyI4DAy44TrewhgMCzpxZk0jEBBF7HjGhhFwEEnn318DIj7sDzkmaBxkLgFQg0YTwhgMDzBCODFJAAAq+AsAmVNwEEXt4IGaDABBB4BQZOuLwIIPDywkdnAwQQeAagryAkR2jtqoeRbBB4RrATVJMAAk8THN2MEUDgGUNPYA0CCDwNaHQxSgCBZxQ/wXMkgMDLERjNjRNA4BkvQUYCCDy76mEkGwSeEewE1SSAwNMERzdjBBB4xtATWIMAAk8DGl2MEkDgGcVP8BwJIPByBEZz4wQQeMZLgMCzqwTms0Hgma8BGWRPAIGXPSta2kEAgWdHHcgiOwIIvOw40coeAgg8e2pBJh0TQOB1zIgWdhFA4NlXDy8z4g48L2kWaCwEXoFAE8YTAgg8TzAySAEJIPAKCJtQeRNA4OWNkAEKTACBV2DghMuLAAIvL3x0NkAAgWcA+gpCcoTWrnoYyQaBZwQ7QTUJIPA0wdHNGAEEnjH0BNYggMDTgEYXowQQeEbxEzxHAgi8HIHR3DgBBJ7xEmQkgMCzqx5GskHgGcFOUE0CCDxNcHQzRgCBZww9gTUIIPA0oNHFKAEEnlH8BM+RAAIvR2A0N04AgWe8BAg8u0pgPhsEnvkakEH2BBB42bOipR0EEHh21IEssiOAwMuOE63sIYDAs6cWZNIxAQRex4xoYRcBBJ599fAyI+7A85JmgcZC4BUINGE8IYDA8wQjgxSQAAKvgLAJlTcBBF7eCBmgwAQQeAUGTri8CCDw8sJHZwMEEHgGoK8gJEdo7aqHkWwQeEawE1STAAJPExzdjBFA4BlDT2ANAgg8DWh0MUoAgWcUP8FzJIDAyxEYzY0TQOAZL0FGAgg8u+phJBsEnhHsBNUkgMDTBEc3YwQQeMbQE1iDAAJPAxpdjBJA4BnFT/AcCSDwcgRGc+MEEHjGS4DAs6sE5rNB4JmvARlkTwCBlz0rWtpBAIFnRx3IIjsCCLzsONHKHgIIPHtqQSYdE0DgdcyIFnYRQODZVw8vM+IOPC9pFmgsBF6BQBPGEwIIPE8wMkgBCSDwCgibUHkTQODljZABCkwAgVdg4ITLiwACLy98dDZAAIFnAPoKQnKE1q56GMkGgWcEO0E1CSDwNMHRzRgBBJ4x9ATWIIDA04BGF6MEEHhG8RM8RwIIvByB0dw4AQSe8RJkJIDAs6seRrJB4BnBTlBNAgg8TXB0M0YAgWcMPYE1CCDwNKDRxSgBBJ5R/ATPkQACL0dgNDdOAIFnvAQIPLtKYD4bBJ75GpBB9gQQeNmzoqUdBBB4dtSBLLIjgMDLjhOt7CGAwLOnFmTSMQEEXseMaGEXAQSeffXwMiPuwPOSZoHGQuAVCDRhPCGAwPMEI4MUkAACr4CwCZU3AQRe3ggZoMAEEHgFBk64vAgg8PLCR2cDBBB4BqCvICRHaO2qh5FsEHhGsBNUkwACTxMc3YwRQOAZQ09gDQIIPA1odDFKAIFnFD/BcySAwMsRGM2NE0DgGS9BRgIIPLvqYSQbBJ4R7ATVJIDA0wRHN2MEEHjG0BNYgwACTwMaXYwSQOAZxU/wHAkg8HIERnPjBBB4xkuAwLOrBOazQeCZrwEZZE8AgZc9K1raQQCBZ0cdyCI7Agi87DjRyh4CCDx7akEmHRNA4HXMiBZ2EUDg2VcPLzPiDjwvaRZoLARegUATxhMCCDxPMDJIAQkg8AoIm1B5E0Dg5WE4Q4IAACAASURBVI2QAQpMAIFXYOCEy4sAAi8vfHQ2QACBZwD6CkJyhNauehjJBoFnBDtBNQkg8DTB0c0YAQSeMfQE1iCAwNOARhejBBB4RvETPEcCCLwcgdHcOAEEnvESZCSAwLOrHkayQeAZwU5QTQIIPE1wdDNGAIFnDD2BNQgg8DSg0SWDQFNTk7w95i355odvZNhqw2Sn7XaR0pIS3ygh8HxDy8A+EEDg+QCVIX0lgMDzFW/OgyPwckYWvg4IvPDVNMwzQuCFubrhnBsCL5x1DeusEHhhrWzh5nX/f+9zBV7622LUVnLsEcf6lgACzze0DOwDAQSeD1AZ0lcCCDxf8eY8uNUCb8GipXLWZXfK7LkL5bn7L2ue3MHHXyzfT5wiEou5/6xrZSd57+mb3P8+c35tzhCi3gGBF/UVEKz5I/CCVS+yFUHgsQqCRACBF6Rq2ZnrX886URYvWdycXGlpqdx53V0Sj8d9SRiB5wtWBvWJAALPJ7AM6xsBBJ5vaLUGtlbgVdfUySGOqNt28w3k3Y++zBB4ex52ltx48Umy+iqDlps0Ai/3dYDAy50ZPcwRQOCZY09kPQIIPD1u9DJDAIFnhnuYop5/+XkyZdrk5in17dNPrr7oGt+miMDzDS0D+0AAgecDVIb0lQACz1e8OQ9urcCrqa2TeQsWu/+58NoHMgTetvudLI/eeYH079MTgZdzyZfvgMDzACJDFIwAAq9gqAnkEQEEnkcgGaYgBBB4BcEc6iA/TPxebr33Vlm8eJFUVnaR4//0F1ln+Nq+zRmB5xtaBvaBAALPB6gM6SsBBJ6veHMe3FqBl57JZ+MnLCfwNtzlGNlm1Hry+dcTpXfPbvK3Yw6QbTZb3+3CDryc14Ag8HJnRg9zBBB45tgTWY8AAk+PG73MEEDgmeEetqiNTY0yc9YM6d9voJQ5R2j9/BB4ftJlbK8JIPC8Jsp4fhNA4PlNOLfxAyfwksmUnHfVPbLb9qNk841HyLsffCln/fNOef7fl7s78hZVNeZGgNbSuaJIGhpT0tiUhAYErCdQXBST8tK4VNUmrM+VBCGgCHTpVCw19QlJJFIAgYD1BLo66zUej8mSmiZRv+fig4DtBLp2KpGldY2S4rextpeK/BwCZSVx99fYWuf3BXwQCAKB7pUlOBaLCqXq4eUXSzmflwO2tQOv9fh/POUK2X/PbWX0Tps7f0hq8jJ8JMYqKy6SpmRSEvxGPRL1Dvok487jNSXFcalv5Dc+Qa9lVPIvKyly/4Ik6e2/HqOCj3kWmEBFabH7RlhtQ5PYsGS/SU6XdxLfS5NjZ0YVrSqbFa1eYCKEs51ARWmR1DUkxNM/gNg+afILLIHiIkfgOb/GNrBxIrA1jFrincrUX0TjWGypu6qHl5/vAq+mtl4mTpou649YrTnvQ0+8TA47YBfZdbtNOEKrUU2O0GpAo4sxAhyhNYaewJoEOEKrCY5uRgjYdIR2riyRh4vGZHDYK7GxrCr9jLAhqJ0EOEJrZ13Iqm0CHKFlZQSNAEdo7apY4I7QLlpcJTsffJrceMmJssXG68iYcV/J6ZfcIS/+5wrp1aMrAk9jfSHwNKDRxRgBBJ4x9ATWJIDA0wRHtzYJJKpqJVHbKEVdO0mRx38LqwLaJPA+i/0sY+LfZXDYKLmabJUazuqAQDMBBB6LIUgEEHhBqha5KgIIPLvWgbUC740xn8ppF98u6vxGY1NCSkqKZZUh/eXpey+Vdz/8Uq65/RGZM3+RDOrfW874yyGy2cgRLlkesch9gSHwcmdGD3MEEHjm2BNZjwACT48bvZYnUDt9njTNr1r2A84ZrIqV+0px1wpPUdkk8ObKYmcH3tiM+e2Z3EhWT/X3dM4MFmwCCLxg1y9q2SPwolbx4M8XgWdXDa0VeLqYEHi5k0Pg5c6MHuYIIPDMsSeyHgEEnh43emUSSDY2SfW30zP+YVGXCum0qrfHSW0SeGqyP8RmyMfxH6VBErJhaqiMTK7K0oBABgEEHgsiSAQQeEGqFrkqAgg8u9YBAs+uehjJBoFnBDtBNQkg8DTB0c0YAQSeMfShCpxyHu6p+nZa5AReqIrIZHwhgMDzBSuD+kQAgecTWIb1jQACzze0WgMj8LSwhasTAi9c9Qz7bBB4Ya9w+OaHwAtfTU3NqH7mAmmYu2RZ+AgcoTXFmbjBIoDAC1a9op4tAi/qKyB480fg2VUzBJ5d9TCSDQLPCHaCahJA4GmCo5sxAgg8Y+hDGbipqk6StQ2ReMQilAVkUp4TQOB5jpQBfSSAwPMRLkP7QgCB5wtW7UEReNrowtMRgReeWkZhJgi8KFQ5XHNE4IWrnmGfjW134IWdN/PLnwACL3+GjFA4Agi8wrEmkjcEEHjecPRqFASeVyQDPA4CL8DFi2DqCLwIFj3gU0bgBbyAEUsfgeddwZuSCXn528/ki5mTZUT/wbLPOptKcbzIuwAGRnpy9hh5ed7/pLKoTA4esINs1n0tA1lkhkTgGS8BCeRAAIGXAyyaWkEAgWdFGZqTQODZVQ8j2SDwjGAnqCYBBJ4mOLoZI4DAM4aewBoEEHga0NrpcvOYl+TV7z9v/tHt11hXTt1ub+8CFHik9xd+LTdMfioj6g1rnSCDynt7nslnUxrkgx/rpUfnuOy5XoV07xRvNwYCz3P8DOgjAQSej3AZ2hcCCDxfsGoPisDTRheejgi88NQyCjNB4EWhyuGaIwIvXPUM+2wQeN5V+NCHbpRFNVXNA5YWl8gTR57mvD/SvozyLrr3I/1r2ovy+rxPMwY+esiesmvvjTwN9tFP9XLb279x69u1SC7fv5uUFMXajIPA8xQ/g/lMAIHnM2CG95wAAs9zpHkNiMDLC184OiPwwlHHqMwCgReVSodnngi88NQyCjNB4HlX5eOfuFOmLZzXPOCArj3kroNO8C5AgUdqvQOvyBGR1w4/zvMdeLe+VSXjfq7PmN15e3WVNfqVIPAKXHPCeU8Agec9U0b0lwACz1++uY6OwMuVWAjbI/BCWNQQTwmBF+LihnRqCLyQFjak00LgeVfYr2dNkSvfelYW1iyVLuWd5OwdfyfrDRzqXQADI7W8A+8g5w68zX24A++hj6rl1a/rmmcXczbeXXdQd+lV2fb9gezAM7AQCKlNAIGnjY6Ohggg8AyBbycsAs+uehjJBoFnBDtBNQkg8DTB0c0YAQSeMfQE1iCAwNOAtoIujYmETF04VwZ37yVlzhFavo4JLK5JyrWvLZXJ85qkKB6T/TeukNHOPXjtfQi8jpnSwh4CCDx7akEm2RFA4GXHqVCtEHiFIm1xHASexcUhteUIIPBYFEEjgMALWsWinS8CL9r1t2X2qZTI9IUJ5/GKmLN7ccV3BiLwbKkaeWRDAIGXDSXa2EQAgWdTNUQQeHbVw0g2CDwj2AmqSQCBpwmObsYIIPA6Rt9Uk5BEfVLKerBDqWNa/rZA4PnLl9G9J4DA854pI/pHAIHnH1tG9ocAAs8frrqjIvB0yYWoHwIvRMWMwFQQeBEocsimiMBbcUFnjF0gC75Z6jbqPKhcVt61jxSVBPOVzjAsXQReGKoYrTkg8KJV76DPFoEX9ApGL38Enl01R+DZVQ8j2SDwjGAnqCYBBJ4mOLoZI4DAax999cw6+fn5XzIaDNyip/Rat4uxekU9MAIv6isgePNH4AWvZlHOGIEX5eoHc+4IPLvqhsCzqx5GskHgGcFOUE0CCDxNcHQzRgCB1z76uZ8vltkfL8po0H21zjJkp97G6hX1wAi8qK+A4M0fgRe8mkU5YwRelKsfzLkj8OyqGwLPrnoYyQaBZwQ7QTUJIPA0wdHNGAEEXvvom+oSMuGRme79d+4XE1l9vwFS0bvUWL2iHhiBF/UVELz5I/CCV7MoZ4zAi3L1gzl3BJ5ddUPg2VUPI9kg8IxgJ6gmAQSeJji6GSOAwFsx+vpFjTJ3/BJJNaSk54hK6Tyg3FitCCyCwGMVBI0AAi9oFYt2vgi8aNc/iLNH4NlVNQSeXfUwkg0Czwh2gmoSQOBpgqObMQIIPGPoCaxBAIGnAY0uRgkg8IziJ3iOBBB4OQKjuXECCDzjJchIAIFnVz2MZIPAM4KdoJoEEHia4OhmjECUBF4qmZC6pVOktKKfFJV2NsacwPoEEHj67OhphgACzwx3ouoRQODpcaOXOQIIPHPs24qMwLOrHkayQeAZwU5QTQIIPE1wdDNGICoCr77mF5n84TnSUD1LJF4sg9c7SXqstJMx7gTWI4DA0+NGL3MEEHjm2BM5dwIIvNyZ0cMsAQSeWf6toyPw7KqHkWwQeEawE1STAAJPExzdjBGIisCb+unVsnjGO79xdiTeiN0ekaLiCmPsCZw7AQRe7szoYZYAAs8sf6LnRgCBlxsvWpsngMAzX4OWGSDw7KqHkWwQeEawE1STAAJPExzdjBGIisCb8PaxUr90egbnVbe6Rjr3XMsYewLnTgCBlzszepglgMAzy5/ouRFA4OXGi9bmCSDwzNcAgWdXDYxng8AzXgISyIEAAi8HWDS1gkBUBN78n5+XmV/f0cy8oseaspoj8GKxuBV1IInsCCDwsuNEK3sIIPDsqQWZdEwAgdcxI1rYRQCBZ189vMwolnI+LwfMdayZ82tz7RL59gi8yC+BQAFA4AWqXCTrEIiKwFPFXjjtDVky6yMprRwkfVf/PQ9ZBPBnAAIvgEWLeMoIvIgvgIBNH4EXsIKRriDw7FoEHKG1qx5GskHgGcFOUE0CCDxNcHQzRiBKAs8YZAJ7RgCB5xlKBioQAQRegUATxhMCCDxPMDJIAQkg8AoIO4tQCLwsIIW9CQIv7BUO1/wQeOGqZxRmg8CLQpXDM0cEXnhqGZWZIPCiUulwzBOBF446RmkWCDy7qo3As6seRrJB4BnBTlBNAgg8TXB0M0YAgWcMPYE1CCDwNKDRJSsCiZpGmf/SZKmbslhKB1RKr92HSkmP8qz6rqgRAi9vhAxQQAIIvALCJpQnBBB4nmD0bBAEnmcogzsQAi+4tYti5gi8KFY92HNG4AW7flHLHoEXtYoXbr6/PD5BaicsbA5Y2r+zDDxqnbwTQODljZABCkgAgVdA2ITyhAACzxOMng2CwPMMZXAHQuAFt3ZRzByBF8WqB3vOCLxg1y9q2SPwolbxws136rWfSLIu8VvAmMhKp20i8dL8XqpG4BWuhkTKnwACL3+GjFBYAgi8wvLuKBoCryNCEfhxBF4EihyiKSLwQlTMiEwFgReRQodkmgi8kBTSwmmwA8/CopBSwQkg8AqOnIB5EkDg5QnQ4+4IPI+BBnE4BF4QqxbdnBF40a19UGeOwAtq5aKZNwIvmnUvxKxb3oFXNriL9NxlZe7AKwR4YlhFAIFnVTlIJgsCCLwsIBWwCQKvgLBtDYXAs7Uy5NUWAQQe6yJoBBB4QatYtPNF4EW7/kGcPUdog1i16OaMwItu7YM6cwSeXZVD4NlVDyPZIPCMYCeoJgEEniY4uhkjgMAzhp7AGgQQeBrQ6GKUAALPKH7fgzfMWiQLX/hSUo0J6bbzCKlYo7/vMf0MgMDzky5j+0EAgecHVf0xEXj67ELTE4EXmlJGYiIIvEiUOVSTROCFqpyhnwwCL/QlDt0EEXihK2nzhBrnLZWpZz8pyZr6Zf+sOC5DLthXylftE9hJI/ACW7rIJo7As6v0CDy76mEkGwSeEewE1SSAwNMERzdjBBB4xtATWIMAAk8DGl2MEkDgGcXva/BFb3wrc+8bkxGjx+gNpPcho3yN6+fgCDw/6TK2HwQQeH5Q1R8TgafPLjQ9EXihKWUkJoLAi0SZQzVJBF6oyhn6ySDwQl/i0E0QgRe6kjZPqHr8dJl5xYsZE+x71DbSbYe1AjtpBF5gSxfZxBF4dpUegWdXPYxkg8Azgp2gmgQQeJrg6GaMAALPGHoCaxBA4GlAo4tRAgg8o/h9Dz7n/rGy+PVv3DidR64s/f+6s8RLinyP61cABJ5fZBnXLwIIPL/I6o2LwNPjFqpeCLxQlTP0k0Hghb7EoZsgAi90JQ31hBB4oS5vKCeHwAtlWTMmlVhUI8m6Rinp3y3wk0XgBb6EkZsAAs+ukiPw7KqHkWwQeEawE1STAAJPExzdjBFA4BlDT2ANAgg8DWh08YVAfO4SScVjkurVZYXjI/B8wc+gPhFA4PkElmF9I4DA8w2t1sAIPC1s4eqEwAtXPcM+GwRe2Cscvvkh8MJX0zDPCIEX5uoGZG6NCely1xtS+vkkN+G6rdeS6sO3bTd5BF5A6kqaLgEEHgshaAQQeHZVDIFnVz2MZIPAM4KdoJoEEHia4OhmjAACzxh6AmsQQOBpQKOLpwTK3v9eKu9/J2PMJaeMlsYRg9uMg8DzFD+D+UwAgeczYIb3nAACz3OkeQ2IwMsLXzg6I/DCUceozAKBF5VKh2eeCLzw1DIKM0HgRaHKds+x84PvSfm732YkWbPfplK7+0gEnt2lI7ssCCDwsoBEE6sIIPCsKocg8Oyqh5FsEHhGsBNUkwACTxMc3YwRQOAZQ09gDQIIPA1odPGUQNHMhdLtkscl1pR0x01VlMnCCw5o9y48duB5ip/BfCaAwPMZMMN7TgCB5znSvAZE4OWFLxydEXjhqGNUZoHAi0qlwzNPBF54ahmFmSDwolBl++dYNHmOVLz1jfuIRd1O60picK92k0bg2V9PMvyNAAKP1RA0Agg8uyqGwLOrHkayQeAZwU5QTQIIPE1wdDNGAIFnDD2BNQgEVeAl5s6Tpp9+kvjAgVKy0hCNmdMlqAQQeEGtXDTzRuBFs+5BnjUCz67qIfDsqoeRbBB4RrATVJMAAk8THN2MEUDgGUNPYA0CQRR4DV+Nl9pnXxBJLjtyWbrNVlKxffuvlmpgoYvFBBB4FheH1JYjgMBjUQSNAALProoh8Oyqh5FsEHhGsBNUkwACTxMc3YwRQOAZQ09gDQKtBV4qlZSpU3+Q2ppqWXnlNaWiUxeNUf3tUnXH3ZL45ZffgpSUSNezTpNYPO5vYEa3ggACz4oykESWBBB4WYKimTUEEHjWlMJNBIFnVz2MZIPAM4KdoJoEEHia4OhmjAACzxh6AmsQaCnwGpsS8uYbj8jMGT+5I5WWlssuux0uPXv20xjZvy5Vt94hiXnzmwPEHIHXBYHnH3DLRu7Xo1zmLqqTZMqyxEgHAm0QQOCxLIJGAIFnV8UQeHbVw0g2CDwj2AmqSQCBpwmObsYIIPCMoSewBoGWAm/mrGnyykv3ZYwybM2NZLPN99AY2b8ujd98KzVPPuM8V7rM4HCE1j/WNo6MwLOxKuTUHgEEHmsjaAQQeHZVDIFnVz2MZIPAM4KdoJoEEHia4OhmjAACzxh6AmsQCKLAU9NMzJkrTT//zCMWGjUPehcEXtArGK38EXjRqncYZovAs6uKCDy76mEkGwSeEewE1SSAwNMERzdjBBB4xtATWINA6yO0b735qMyY/qM7kjpCu+tuR0iPnn01RqYLBPwhgMDzhyuj+kMAgecPV0b1jwACzz+2OiMj8HSohawPAi9kBQ35dBB4IS9wCKeHwAthUUM8pSA+YhHicjC1LAgg8LKARBNrCCDwrCkFiWRJAIGXJagCNUPgFQi0zWEQeDZXh9xaE0DgsSaCRgCBF7SKhS/f72a/JR9MekDqE1Wy4aB9ZfNVDmt3kq0FXvhoMKOwEUDgha2i4Z4PAi/c9Q3j7BB4dlUVgWdXPYxkg8Azgp2gmgQQeJrg6GaMAALPGHoCOwQWVE+Vez46IoPF3uteIGv23a5NPgg8lk3QCCDw7KhYShISj38lqfhckeRKzn/WdBKL2ZGcRVkg8CwqBqlkRQCBlxWmgjVC4BUMtb2BEHj21obMlieAwGNVBI0AAi9oFQtXvl/MeE5e//76jEmtP2gv2WX43xF44Sp1ZGeDwLOk9EVvSCw+6bdkkutLKrGpJcnZkwYCz55akEl2BBB42XEqVCsEXqFIWxwHgWdxcUhtOQIIPBZF0Agg8IJWsXDlyw48++qZSqXksynvyvgZ42Rg96Gy7bB9pKyk3L5EA5IRAs+GQjVJrOR+J5FUi2S6S6rxQBuSsyoHBJ5V5SCZLAgg8LKAVMAmCLwCwrY1FALP1sqQV1sEEHisi6ARQOAFrWLhy/e3O/CqnTvw9uEOPMMlfuf7p+Xl8Q81Z7FKnxFy3HYXG84quOEReHbULlbysJNIdYtk+jsCby87krMoCwSeRcUglawIIPCywlSwRgi8gqG2NxACz97akNnyBBB4rIqgEUDgBa1i0c6XO/D8r/8Nr50msxZPzgh07l53SZfyHv4HD2GEtMCrTdRIQ7JeyosqpSRWEsKZWj6l2HRnF96bzia8BmcfXqVI087Of+9tedKFTw+BV3jmRMyPAAIvP35e90bgeU00gOMh8AJYtAinjMCLcPEDOnUEXkALF9G0EXj+F/7+9y+X72Z+2hyo1Dk+e95e90hpUZn/wUMYQQm87+dOlqrE4mWzi8Wkd9EgqSjqHMLZ2j2llDhHaWOLHHHX00k0bneyhrJD4BkCT1htAgg8bXS+dETg+YI1WIMi8IJVr6hni8CL+goI3vwReMGrWZQzRuD5X/1flkyXB96/QuZXzZYSR9odsPHxssFKW/kfOKQRenYrli/nfJMxu/J4Z+lTMjikM2ZaQSaAwAty9aKZOwLPrroj8Oyqh5FsEHhGsBNUkwACTxMc3YwRQOAZQ09gDQIIPA1oGl0SyYTMXjxFelcOcB6wqNAYgS5pAr0cgffFXEfgtXg/AYHH+rCVAALP1sqQV3sEEHh2rQ0Enl31MJINAs8IdoJqEkDgaYKjmzECCDxj6AmsQQCBpwGNLkYJqCO0E+ZNlSVNC5flwRFao/Ug+IoJIPBYIUEjgMCzq2IIPLvqYSQbBJ4R7ATVJIDA0wRHN2MEEHjG0BNYgwACTwMaXYwSSD9iUeM8YtHIIxZGa0Hwjgkg8DpmRAu7CCDw7KuHlxnFUs7n5YC5jjVzfm2uXSLfHoEX+SUQKAAIvECVi2QdAgg8lkGQCCDwglQtclUE0gIvafRPINQCAtkRQOBlx4lW9hBA4NlTC5UJO/DsqoeRbBB4RrATVJMAAk8THN2MEUDgGUNPYA0CCDwNaHQxSgCBZxQ/wXMkgMDLERjNjRNA4BkvQUYCCDy76mEkGwSeEewE1SSAwNMERzdjBBB4xtATWIMAAk8DGl2MEkDgGcVP8BwJIPByBEZz4wQQeMZLgMCzqwTms0Hgma8BGWRPAIGXPSta2kEAgWdHHcgiOwJ+C7xUUuSrNxLy48cJKa+Myfo7x2Xw2kXZJUcrCLRBAIHHsggSAQRekKpFrooAAs+udcAOPLvqYSQbBJ4R7ATVJIDA0wRHN2MEEHjG0BNYg4DfAk+Ju3FPNjVnFo+L7HNWqXTqFtPIli4Q4A481kCwCCDwglUvskXg2bYGrBZ4CxYtlbMuu1Nmz10oz91/WTO7aTPnyHlX3Ss//DhVBvbvLeecfJiMXHcN98d5xCL3JYbAy50ZPcwRQOCZY09kPQIIPD1u9DJDwG+BN/bhJpnyZSJjclv+X4kMXd8xeXwQ0CDADjwNaAHr0vBTtVQ/8YskFjRK2UZdpXK//hIrDqb0R+AFbPGRLjvwLFsD1gq86po6OeT4i2XbzTeQdz/6MkPgHXHy5bLDViPl0P12lg8++caReffI649eKyXFRQg8jQWGwNOARhdjBBB4xtATWJMAAk8THN2MEPBb4LEDz0hZQx0UgRfq8kqqLiHzzp0gqVrn/P2vX+c9+kjnPfsGcuIIvECWzU069elUSc1cJLFNh0qsX9fgTiTHzDlCmyMwn5tbK/Bqautk3oLF7n8uvPaBZoE3f+ES2e3/zpAPX7hViouW3ZlywDEXyBknHCKbbjgcgaexYBB4GtDoYowAAs8YegJrEkDgaYKjmxECfgu89B14Ez9KSEVX++/AmzDta3nzi5elvKRcdt90P+nfc5CRuhC0fQIIvHCvjoYJ1bLoxskZkyweWiE9T181kBNH4AWybJK85R1JvvHdsuRLiqTo/D0ltm40/n2AwLNrzVor8NKYPhs/IUPgfTZ+olx83QPyzH2XNpM89aLbZNTIEfL7vbaTuYvq7CIcgGy6di6RuoakNDRmHmkJQOrBTjGYO/+NMy8pikun8iJZXN1oPBcvE4gJC8JLnjaN1b2yVKpqm6Qp8dvuAZvyIxcItCTQo0upFMVjsnBpgySSqUjDmTRropxz38mSSi3j0Lm8Uq485jbp1bVPpLnYNvmeXUvd9fprmWxLj3zyJJB0duDNPvuHjB14XUf3lS7Of/L5UmLm17eK0mLn11iRqrrf7gLNZx709Z9AamG11B/+74xA8ZFDpPSi0QUI7n+IjiL06V6OY+kIUgF/XNXDyy/m/CbH018NWwu8Dz75Wm66+0l55I4LmvM+98p7ZNiqg+XwA3eVhib+gJRrQYud36gnnbJF/PfpuWLLv72nP1PyTycoI8Qcz6X+cNmUCBlA/F1QlmDOeRYXxVwR4u2/HXNOgw4QyIpASXHc/euERuf3UyH7VTar+bdsdP+rd8tTYx7N6Hfa7/8h26y3fc5j0cE/Auov9tRfkER9vfpH2PzItROqZO5/Z0rT3AapHNVd+hwyKP878AwtGPVwT8z5zWwibL+PNb9MfMsguaBaFhx0T8b4pRuvJF0v39e3mM0DW/DnA3X6Ccfif6mzjaDq4eXnu8D7/OuJcsE192fciff3C2+VLTZeRw4YvS1HaDWqyRFaDWh0MUaAI7TG0BNYkwBHaDXB0c0IAb+P0BqZlGbQt52js/e9cnNG79MPukzWXWVDzRHp5gcBjtD6QZUx/SLAEVq/yHo/bmNTo0xZME3ijnAd9MhPUvTmxGVBOELrPWxGzJpA4I7QLly8VHb6/aky9tlbpKK81J3o7n84Q/55/Yn0iwAAIABJREFU9jGy4TprIPCyLv1vDRF4GtDoYowAAs8YegJrEkDgaYKjmxECCLzfsKs/vF33xAXyzeQv3H+448g95Yhd/mKkLgRtnwACj9URJAIIvGBUq66xVl755g2pqa92E+7ZuafsULeGFM9ayiMWwShhaLMMnMBTlTjq71fJJhsMl2P+MFpefnuce6T25YeukiJnC/3M+bWhLZZfE0Pg+UWWcf0ggMDzgypj+knAVoG39OcvZP5X70pJZXfpu9nezv/t4ScGxg4IAQTe8oX6ZcFMKS8tl26VPY1Wsb5hsfN73RLnEbdORvOwLTgCz7aKkM+KCCDwgrE+vpn5rXw57auMZLdcfXNZudfKwZiAh1nyiIWHMD0YylqB98aYT+W0i2933mtOOfewJKSkpFhWGdJfnr73Upkxe5784/K75IefpsmQgX3lwlOPlLXXHOriQODlvioQeLkzo4c5Agg8c+yJrEfARoG3eOKnMumpa5onVNa9nww78nIpKqvQmyS9QkMAgWdfKZPJJpn2y5tSUzvTTa5nt3WkX69N7EvUUEYIPEPgCatFAIGnha3gnRB4vyFH4BV8+a0woLUCTxcTAi93cgi83JnRwxwBBJ459kTWI2CjwJvy3E2y8LsPMya06gFnStfVNtCbJL1CQwCBZ18p5y0aL3MXfJKR2MoDRzsvsvMaroKCwLNvzZJR+wQQeMFYHbUNtfLqt78doe3RqYfsNGJH5/q74mBMwMMsEXgewvRgKASeBxCDPgQCL+gVjFb+CLxo1TsMs7VR4M1671H55cNnMvCudcx1UtZzQBiQM4c8CCDw8oDnU9cZc96VJVU/Z4zev/cW0qPrmj5FDNawCLxg1Svq2SLwgrMCGpoaZNrCGe7L7Cv1WkmK40XBSd7DTBF4HsL0YCgEngcQgz4EAi/oFYxW/gi8aNU7DLO1UeA11VbJT49fKbWzfpSY8xvS/lsdIP023zcMuJlDngQQeHkC9KF7de0smTrrVWfklDt6cXEnWWXQPs5deOU+RAvekAi84NUsyhkj8KJc/WDOHYFnV90QeHbVw0g2CDwj2AmqSQCBpwmObsYI2CjwXBjOHbO182ZIcecuUtKpmzE+BLaLAALPrnqks6lxJN7CpRMkHi+RXt3WltISfs6m2SDw7FyzZNU2AQQeKyNoBBB4dlUMgWdXPYxkg8Azgp2gmgQQeJrg6JYXgV9+mCiTPvhYyrp0lmHbbyOde2X/GqW1Ai8vInQOK4EoCLxEbUKSVSkp6e3cZaTORvEFmgACL9Dli1zyCLzIlTzwE0bg2VVCBJ5d9TCSDQLPCHaCahJA4GmCo5s2gTkTf5J3brqjuX95166y+3mnS0l5dsfXEHja6OlogEDYBd7SD5dK1dhqSSUdgde/RHoe2FOKOsUNkCakVwQQeF6RZJxCEEDgFYIyMbwkgMDzkmb+YyHw8mcY+BEQeIEvYaQmgMCLVLmtmOwnjz4pP4/9KCOXrY87SgasPTyr/BB4WWGikSUEwizwmhY1yZw752aQ7rxppXTbvosl9ElDhwACT4cafUwRQOCZIk9cXQIIPF1y/vRD4PnDNVCjIvACVa7IJ4vAi/wSKDiAb199U75+4ZWMuLuc9XfpPii7F1sReAUvGQHzIBBmgVf7Xa0sfG5RBp2SAaXS5/BeeRCjq2kCCDzTFQhm/JqF1VJfXS89Bmd/JYYXM0XgeUGRMQpJAIFXSNodx0Lgdcwo9C0QeKEvcagmiMALVTkDMZnGunoZc8c9Mu+nSW6+w3feXtbbe4+sc0fgZY2KhhYQCLPASzWlZO4980TtxEt/PX7XQyqGZXcc3oLykEIbBBB4LItcCYy5920Z/9LnbrfB660ku52+t5RWlOY6jFZ7BJ4WNjoZJIDAMwi/jdAIPLvqYSQbBJ4R7ATVJIDA0wRHt7wJLJ41W8oqO0t5l9yO2ymB99nXP8mX33wvaw1bXVZfdWjeuTAABPwiEGaBp5glqhJSNa5aEksTUrF2hVSsgbzzay0ValwEXqFIhyPOrO9myNPnPZoxmc0O3VpG7rtJQSaIwCsIZoJ4SACB5yFMD4ZC4HkAMehDIPCCXsFo5Y/Ai1a9wzDb998fK9ff+aCkUil3Okf+4UDZa7edwjA15hBCAmEXeCEsWeSn1FLgNTZWS1XdPOleOURiMR4nifziaAPAZ09/LB89NDbjR1bfck3Z5ZQ9C4ILgVcQzATxkAACz0OYHgyFwPMAYtCHQOAFvYLRyh+BF616h2G2x//tLJm7YFGzwOverZvcc8tVYZgacwghAQReCIsa8imlBd7PM9+R//1wt/PCcJNUVvSTbTY4x/m/fUM+e6aXK4HaJTXy8En3O/ff1bldY7GY7HvxgTJgrcG5DqXVHoGnhY1OBgkg8AzCbyM0As+uehjJBoFnBDtBNQkg8DTB0c0YAQSeMfQE1iCAwNOARhejBJTAmzlvkTw15mhX3qW/If22lM3XPslobgS3k8CimQvlixc+lQbnEYt1d1u/YPJO0UDg2bkmyKp9Agg8u1YHAs+uehjJBoFnBDtBNQkg8DTB0c0YgdZHaI8+/GDZ3XkIgw8CNhJA4NlYFXJaEQEl8L6e9Km8/dklGc26dh4ou426DngQsIoAAs+qcpBMFgQQeFlAKmATBF4BYdsaCoFna2XIqy0CCDzWRdAI8IhF0CoW7XwReNGufxBnrwTenIU18vqn58uCxT82T2HDNf4oawzZNYhTIucQE0Dghbi4IZ0aAs+uwiLw7KqHkWwQeEawE1STAAJPExzdjBFQAm9xdaM0NiWN5UBgCGRLAIGXLSna2UIgfQdefUO1fDvlWamqnSVD+m4uK/XbwpYUyQMCzQQQeCyGoBFA4NlVMQSeXfUwkg0Czwh2gmoSQOBpgqObMQIIPGPoCaxBAIGnAY0uRgm0fIXWaCIEh0AWBBB4WUCiiVUEEHhWlUMQeHbVw0g2CDwj2AmqSQCBpwmObsYIIPCMoSewBgEEngY0uhglgMAzip/gORJA4OUIjObGCSDwjJcgIwEEnl31MJINAs8IdoJqEkDgaYKjmzECCDxj6AmsQQCBpwGNLkYJIPCM4id4jgQQeDkCo7lxAgg84yVA4NlVAvPZIPDM14AMsieAwMueFS3tIIDAs6MOZJEdgaAKvOT3TZJ8qk5SC1MSG1UsRfuWS6w4lt2kaRVoAgi8QJcvcskj8CJX8sBPGIFnVwnZgWdXPYxkg8Azgp2gmgQQeJrg6GaMAALPGHoCaxAIosBLVSWl6ZwqkYZU84zj+5VL0c5lGgToEjQCCLygVSza+SLwol3/IM4egWdX1RB4dtXDSDYIPCPYCapJICoCL+X8QbTozTqJT2iUxMCUJPeolFjnIk1qdDNJwHaBN23RIrn3f+Pkx/nzZOSgwXL0pptJlzLEh8k1YzJ2EAVe8mvn18lbazKwxUYUS/FJnU2iJHaBCCDwCgSaMJ4QQOB5gpFBCkgAgVdA2FmEQuBlASnsTRB4Ya9wuOYXFYFXcoezm+TDGqlZOkcSiUZJdYtL6dGDpWibXuEqaARmY7vAO+HpJ+SXpUubK7HdaqvLSVtuHYHKMMW2CARR4KVqUtJ09hJnB95vM2IHXnTWNwIvOrUOw0wReGGoYrTmgMCzq94IPLvqYSQbBJ4R7ATVJBAJgefsvis7aaHULpwvDfWOyFOfs/kutVKRlF+8lsS6lmjSo5sJAjYLvIW1NXL0449mYOle0UnuOfAgE6iIaQGBIAo8hY078CxYPIZSQOAZAk9YLQIIPC1sdDJIAIFnEH4boRF4dtXDSDYIPCPYCapJIBICz2FTduoiqZo03d19537OicbUgCIpPXYVKRrRRZMe3UwQsFngKR7swDOxKuyNGVSBZy9RMvObAALPb8KM7yUBBJ6XNBmrEAQQeIWgnH0MBF72rELbEoEX2tKGcmJREXjx8Y2SvG6W1C2Yv2z3Xb+4xFaukPLT13D+Ny8rBmlx2y7w1B14d437UCYtXODegXfMqM2kspQ78IK0xrzMFYHnJU3GKgQBBF4hKBPDKwIIPK9IMk6hCCDwCkU6uzgIvOw4hboVAi/U5Q3d5KIi8FTh1EMWyXcWSGJqlcT6lkrxDn0kVlkcupqGfUK2C7yw82d+uRFA4OXGi9bmCSDwzNeADLIngMDLnhUt7SCAwLOjDuksEHh21cNINgg8I9gJqkkgSgJPExHdLCOAwLOsIKSzQgIIPBZI0Agg8IJWsWjni8CLdv2DOHsEnl1VQ+DZVQ8j2SDwjGAnqCYBBJ4mOLoZI4DAM4aewBoEEHga0OhilAACzyh+gudIAIGXIzCaGyeAwDNegowEEHh21cNINgg8I9gJqkkAgacJjm7GCCDwjKEnsAYBBJ4GNLoYJYDAM4qf4DkSQODlCIzmxgkg8IyXAIFnVwnMZ4PAM18DMsieAAIve1a0tIMAAq+wdUjWJ2TxD4vdoN3W7CbxMucVGL6sCSDwskZFQ0sIIPAsKQRpZEUAgZcVJhpZRACBZ1ExnFTYgWdXPYxkg8Azgp2gmgQQeJrg6GaMAAKvcOgTdU3y0wMTpX5BvRu0rFe5rHrY6lJcweMv2VYBgZctKdp5RWBeTa0kncH6dqrQGrJLcqF89cStUjX9O6lceT0ZvNufpaRzD62x6AQBvwkg8PwmzPheE0DgeU00v/EQePnxC0VvBF4oyhiZSSDwIlPq0EwUgVe4Us7/bL7MfHVaRsCBuw6RXiN7FS6JgEdC4AW8gAFKP5lKyZ1ffCfvT5/lZr3pgL5y4kbrSDwWy2kWUx/+hyyY8p3zdPuybl2HbSqrHnB2TmPQGAKFIoDAKxRp4nhFAIHnFUlvxkHgecMx0KMg8AJdvsglj8CLXMkDP2EEXuFKiMDLnzUCL3+GjJAdgXEzf5GbP/06o/FJjsAbNbBfdgM4rZKN9fLdDX+QxqZEs8ArKu8k6/79oazHoCEECkkAgVdI2sTyggACzwuK3o2BwPOOZWBHQuAFtnSRTByBF8myB3rSCLzCla+ptkl+/s+PUj+/zg1a1rNMVjtiDSkq5whttlVA4GVLinb5Enjk2x/lhZ+mZAwzerWV5eARq+c0NDvwcsJFY8MEEHiGC0D4nAkg8HJG5msHBJ6veIMxOAIvGHUiy2UEEHishKARQOAVtmI8YpEfbwRefvzonT2BWVU1cu6Yj6Ve7Z5T/34visul22wqAys7Zz+I03K5O/B2de7Aq+QOvJwg0rhgBBB4BUNNII8IIPA8AunRMAg8j0AGeRgEXpCrF73cEXjRq3nQZ4zAC3oFo5U/Ai9a9TY926lLquSVn6e6j1jsPHSQrNa9W84p8QptzsjoYJAAAs8gfEJrEUDgaWHzrRMCzze0wRkYgRecWpEpO/BYA8EjgMALXs2inDECL8rVD+bcEXjBrFtUs0bgRbXywZ03As+u2iHw7KqHkWwQeEawE1STADvwNMHRrZlAoikpjUvrpLRrhcSLcnvtUAcjAk+HGn1MEUDgmSJPXF0CCDxdcvQzQQCBZ4I6MfMhgMDLh573fRF43jMN3IgIvBWXrK6uSWbOrpKVh3STogL8YT9wC6jACSPwCgw8ZOGqZi2VmeOmSqK+SYorSmTQFitJp96Vvs4SgecrXgb3mEDQBV5TMinzqpdIr05dpKSoyGM6DGcjAQSejVUhp/YIIPBYG0EjgMCzq2IIPLvqYSQbBF772D8YN0NuufMzqalplN69O8m5p28uQ1fO/X4WI4UNaVAEXkgLW6Bp/fjCd9JY3dAcraJnJxm68xq+Rkfg+YqXwT0mEGSBN2vpInnsq3FSVV8nZSUl8rsRG8tqvfp6TIjhbCOAwLOtIuSzIgIIPNZH0Agg8OyqGALPrnoYyQaB1zb2ZDIlRxz7kixdWt/cYOQG/eT8s7Y0UieCLiOAwGMl6BJoqm2Uic99m9E95uyqHX7AerpDZtUPgZcVJhpZQiDIAu+Bz8bK9EXzm0lWllXIyVvuYglZ0vCLAALPL7KM6wcBBJ4fVBnTTwIIPD/p5j42Ai93ZqHrgcBru6TTZyyVE099PeMHu3cvl/vv2CN0ayBIE0LgBala9uU6wzk+u2TywubEegzrLf03HORrolETeI3Oe5LfFS+WufF6GZzoJMMSXSTm/D++YBAIssC75r2XpL6pMQP0yVvtKpWl5cGAT5ZaBBB4WtjoZIgAAs8QeMJqE0DgaaPzpSMCzxeswRoUgdd+vS696gP55LPZzQ3+cNAIOfB3w4NV4JBli8ALWUELPJ1kIikLJs6T2nnVUtm3i3RfvZfE4v7KpagJvDdLZ8u0eHVzZddJ9JCNG3sWuNKE0yUQZIH3xo/fyLipPzZPfXifgbL/upvooqBfQAgg8AJSKNJ0CSDwWAhBI4DAs6tiCDy76mEkGwRe+9hra5vkhVd+lAk/LZSNN+wvO28/VOI+/2HfyCIIUFAEXoCKRaougSgJvCZJyUPlPzv//29ft1Sp/K5+SHBWQ1NCKt4bLyXfT5XG1QdK7fYbiJQUByf/PDMNssBLplLy6fRJMmnRXBncradsMmhVHrLIcz0EoTsCLwhVIsc0AQQeayFoBBB4dlUMgWdXPYxkg8Azgp2gmgQQeJrg6GaMQJQEnoL8WNkUqYk1NfPum6qQPeoHGuOfa+DOT4yRTq9/2tytfqNhsuTP0bk6IcgCL9da0z4cBBB44ahjVGaBwItKpcMzTwSeXbVE4NlVDyPZIPCMYCeoJgEEniY4uhkjEDWBN9U5Pju2bK40pBLSKVUiOzb2k17JMmP8cw3c6/S7JL7ktyPAqdJimXfjCeJsv851qEC2R+AFsmyRThqBF+nyB27yCLzAlSzyCSPw7FoCCDy76mEkGwSeEewE1SSAwNMERzdjBKIm8BRo9ZDFklij9EiVSdC0V/erHpOSn2Y2r5dk90qZf+XRxtZPoQMj8ApNnHj5EkDg5UuQ/oUkgMDznnaysVYaZ/wgqVhcygaPkOSCGjdIUZ+u3geL4IgIPLuKjsCzqx5GskHgGcFOUE0CCDxNcHQzRsAXgZeYLpJyHtiJr+L8p5exuYUxcPGk2dLt9hckvrhKkpUVsvRPu0rD2kPDONU254TAi0ypQzNRBF5oShmJiSDwvC1zqq5KFr1zv6RqFi8beK7zn/e7SSwZl5Ithknn43b0/bEyb2dk32gIPLtqgsCzqx5GskHgGcFOUE0CCDxNcHQzRsBrgRdres/Z4jbu1/k4+9tKR0uqaE1j8wtl4MaEFM+aL039ezh8S0I5xfYmhcCLVLlDMVkbBV5Dap7zmE+TlMX6h4Ixk/COAALPO5ZqpNof3pfab95xB01V10tyzhIpmtxfYguX7b7rfNIuUjpqdW+DRmw0BJ5dBUfg2VUPI9kg8IxgJ6gmAQSeJji6GSPgrcBrkljdLc7vUhub55OKDxApO9TY/AgcLgIIvHDVMwqzsU3gTU/eJ/NSr7vou8ZHytDYyc5VAtH6i4AorDvdOSLwdMm13a+lwEsurJbUopoMgVc2ekPpdPDm3gaN2GgIPLsKjsCzqx5GskHgGcFOUE0CCDxNcHQzRsBLgZdyxF287kZnLikEnrGKhjswAi/c9Q3j7EwLvLkyS4pTxdIj1keWpMbLz8nLMzAPjh8tvWM7hBE9c9IggMDTgLaCLsnapbL43QfcI7QpZ/d86qdaKfpukPPbJOeEQlmxdL3kQCka6Oym59MmgMDTRudLRwSeL1iDNSgCL1j1inq2CLyor4Dgzd9LgefOvnGsxJo+/BVEXFKl+zg3NXM8JHgrw86MEXh21oWs2idgSuA1Oc/13B+7VsbHPnKT21r2kK2b+sms1KMZyfaK7yhDYkdRQgi4BBB43i+Elo9YFCX6SOMb30nK+XvOsl3XleKhfbwPGLEREXh2FRyBZ1c9jGSDwDOCnaCaBBB4muDoZoyA5wJPzYRHLIzVM+yBEXhhr3D45mdK4H0Qe00ejd2WAfTY5CmSTDzo3H5X/es/L5ZhRRdJJ3EeHOKDAAKPNRBAAgg8u4qGwLOrHkayiZrAa6hPyvxf6qTfoAqJF8WMMCeoPgEEnj47epoh4IvAMzMVokaAAAIvAkUO2RRNCbxH47fLB/JqBs3RqUNlm+SWMkdekmSqzjk6u7N0jg0LGXGmkw8BduDlQ4++Jggg8ExQbz8mAs+uehjJJkoC75vPFsuzD0yTupqEdO1VKv93/MoycOVORrgTVI8AAk+PG73MEQiiwGuorpPvX/qfLJ4yRwZtsoasuu165gASuaAEEHgFxU0wDwiYEnhT5Ue5IX6WJJz9duqrkM5yZuIG9y48Pgi0RwCBx9oIGgEEnl0VQ+DZVQ8j2URF4CWTKbnqtG+lZumy32ipb5XhlfLHU1fLjnuqSeKJXyQR7yuxOK+JZQfN+1YIPO+ZMqK/BIIo8F479wGZP3FGM5gND99Jhu+5qb+gGN0KAgg8K8pAEjkQMCXwVIqT5Ht5P/6qFDkX5u+Y2l/6ysAcMqdpFAkg8KJY9WDPGYFnV/0QeHbVw0g2URF4c2fVy83nf5/BuKxTkZxz4zodco83TpXKqgcklljiPGrUWWoqD5Gm0jU77EcD7wkg8Lxnyoj+EgiawKtbWCVPH3dTBpSeqw+UXS870l9QjG4FAQSeFWUgiRwImBR4OaRJUwi4BBB4LISgEUDg2VUxBJ5d9TCSTVQEnoL70K2T5YcvFjdz3nav/rLj3v065F65+FYpapzc3C4Z7yZLe57bYT8aeE8Agec9U0b0l0DQBF5jbYM8/ecbJNHw227lIaOGy1Z/389fUIZHn5iok5cblkrcyWN0aTcZWlRqOCMz4RF4ZrgTVZ8AAk+fHT0LTwCBV3jmRMyPAAIvP35e90bgeU00gONFSeDV1yXlozfnyvTJNbLmul1l5FY9JR7v+CGLrgvOk1iyLqO6S3ueJ8l41wBWPNgpI/CCXb8oZh80gadqNOmdr+R/974qifpGqezfQ7Y98yDpOrBnaMs3I9Egf6ua4dxllXLnWBaLyY2dB0vfouhdl4DAC+0yD+3EEHihLW3oJra48WupSnzq/tmjMr6RdCleO3RzZELhI4DAs6umCDy76mEkmygJPF3A5dXPSlnt2ObujWXrS02XQ3WHo18eBBB4ecCjqxECQRR4ClRjbb1U/bJIug3p47zYrfalhfd7qn6RPFi3IGOCf67oLbuVRu8vabIVeI3OX2pNr/3YZTa4YhMpiVeEd4EwM6sJIPCsLg/J/Uqgtmm6zKp7ypV3zt8RSSKRkoHl+0t58SAYQcBqAgg8u8qDwLOrHkayQeBlgT2VlLK6DyTe+KMkSoZKQ/mWIrHo7czIgpTvTRB4viOObIAlqfkyIfW5dJWeskZ8Q3F+i+0Ji6AKPE8mH5BB3nWOzt5YOzcj2zM79ZNRJZ0DMgPv0sxG4DUkq+XNORfK0sbZbuCuJQNk+77nS1m80rtEGAkCWRJA4GUJimZGCcyvf18WNy7bfZcWeD1Lt5DupRsbzYvgEOiIAAKvI0KF/XEEXmF5WxkNgWdlWUiqHQIIPJaGHwRmpH6S/yavkobUsqPya8RHyoHxkz0JhcDzBKOvgySdo7NXV8+RcU3VbpxtSrvIX50deHGPJK6vyXs8eDYC78eqN+WzhfdnRB7Z40hZvXJHj7NhOAh0TACB1zEjWpgnUJtwduDVttqBV+HswCtiB5756pDBiggg8OxaHwg8u+phJBsEnhHsBNUkgMDTBEe3FRJ4JnmbfJscl9Hmz0WXS+/YwLzJIfDyRliwAWYnG6XYuQavdwTvvktDRuAVbLkRyCMCCDyPQDKM7wSWNI6XpYnPuAPPd9IE8JIAAs9LmvmPhcDLn2HgR0DgBb6EkZoAAi9S5S7YZBF4BUNNIMsJZCPw6pNV8vaci2VJ4yx3Nl1KnBfd+14opfHoHTm2vJyRSA+BF4kyh2aSvEIbmlJGZiIIPLtKjcCzqx5GskHgGcFOUE0CCDxNcHRbIYHWR2iHxzaV/Yr+4gk1duB5gpFBCkQgG4GnUmlM1jqPWHzi/LcUj1gUqDaEaZsAAo+VESQCCLwgVYtcFQEEnl3rAIFnVz2MZIPAM4KdoJoEEHia4OjWIYH2HrH4bsyTMunzN6WscxcZsfXvZcCwjTocq2UDBF5OuGhsmEC2As9wmoSHQDMBBB6LIUgEEHhBqha5IvDsWwMIPPtqUvCMEHgFR07APAgg8PKAR9ecCUz56l357MU7m/vF40Wy47HXSmX3vlmPhcDLGhUNLSCAwLOgCB6kUF01Rz764BaZNeNT6dd/XRm15V+la9f87/T0IDXPh0DgeY6UAX0kgMDzES5D+0KAHXi+YNUeNJAC7+DjL5bvJ04R9w1u5+ta2Unee/om97/PnF+rDSOqHRF4Ua18MOeNwAtm3YKa9f+euVmmf/dhRvqb7HOiDB6xRdZTQuBljYqGFhBA4FlQBA9SePWl02XGtP81j9RvwLqy5943ezCyfUMg8OyrCRm1TwCBx+oIGgEEnl0VC6TA2/Ows+TGi0+S1VdZ/tltBF7uCwyBlzszepgjgMAzxz6KkdmBF8WqR3vOCLxw1P/B+/aUhobq5snEnL/0PvSPL0tJSXk4JthiFgi80JU01BNC4IW6vKGcHALPrrIGUuBtu9/J8uidF0j/Pj2Xo4nAy32BIfByZ0YPcwSUwJs9aZZMnbxQVl1nsHTtUWkuGSJHggB34EWizEzyVwIIvHAshddfPkumTf2oeTLswAtHXZmFPwSSqaQ8NuEVeX3y+9K9vKscvOYesolz9NyPD4HnB1XG9JMAAs9PurmPHUiBt+Eux8g2o9aTz7+eKL17dpO/HXOAbLPZ+u7sZy3gCG2uy6BHZanU1iekrjGRa1faQ6DgBF57+H2Z+OUUaUompbikWH53zA4ycJXs7yMreMIEjDyBXl3LZEl1ozQmkpFnAQD7CfQPTCBnAAAgAElEQVTpVi7FRTGZu7hOmhIp+xMmwzYJpO/Amzl92R14m20V3jvwlHSe56zXJMuVnw2aBF5zxN1d4x9r7l0kcbltpwukZ0V3zRHb79aprNj9NXZJTaPnYzMgBPwgMKBnBY7FD7CaY6p6ePnFUs7n5YCtx0o6/3Y+76p7ZLftR8nmG4+Qdz/4Us76553y/L8vd3fk+Rvdz5nlP3aysUnmf/Gj1P0yXyqHDpAea6/SfE/gikZXVwm6RfO1cvnPjxEgsHDeUrn+nN9+g6WIrLvJqnLg0dsDBwLWEuDXWGtLQ2JtEPj1euFI/36KhREsAu6vsfweNlhFsyzbS8beLW9P+SQjq3O3PFp2GLqx55nya6znSBnQZwL8Gusz4ByHT/8akmO3dpv7LvDaivzHU66Q/ffcVkbvtHmkH7FY+PpH0jhzbjOiTusPk8oN1uywtmE9QptYkJSqD5ukuHdcOm1SLLF4hyhoYDmBmiW1cs+lT0lRPObsDFm2m2mVtQfLXkduZ3nmpBdlAjxiEeXqB2/uHKENXs2injF34EV9BeQ/f7UD7+4WO/BKi0vlpu3O8WUHHkdo868XIxSWAEdoC8u7o2iBO0JbU1svEydNl/VHrNY8t0NPvEwOO2AX2XW7TSIr8BK1dTL/8Tcy/gqyuFul9Nx3e4nVpaR4fIOkikSa1i0VKVn2em/6C6PAq5+ckNln10qyetlfyVZsVCT9zuuExOvoV4QA/Pgbj30oEz6b5Aq84tJi2fdojtAGoGyRThGBF+nyB27yCLzAlSzyCSPwIr8E8gbQ+g68/xs+Wjbqt3be47Y1AALPF6wM6iMBBJ6PcDWGDpzAW7S4SnY++DS58ZITZYuN15Ex476S0y+5Q178zxXSq0fXyAo8dXJ53n9fkZRzjDb9lQ7qKz1HbSIVty+VokXL7rdLDiyWqmO6iJT9JvHCKPDm3VYnS1/JvFtiwFUVUj68WOOnCV1sIlDi3Bvyy+TZMn3qIhk6YhCPWGRRnAUzmmTB1CbpvUqJdO/vmHy+ghJA4BUUN8HyJIDAyxMg3QtOAIFXcOTWBFTibfLiOdKvU3fpXBqMF5YReNYsHxLJkgACL0tQBWoWOIGnuLz74Zdyze2PyJz5i2RQ/95yxl8Okc1GjnCRRfkV2rops2TJ2M+dbXYJiXeukO47jpLOXxZL2WuZD3vUHlwpjes5O/F+/RB43v9sq69x7iOcUSulnYqk10Bn51/mpkfvA0ZoRPUKbZdOxTJ/SUOEZq0/1Qlj6uSbN+vcAdQ63HDvTrLyhr/9/NcfmZ7ZEkDgZUuKdjYQQODZUAVyyIUAAi8XWuFpO7t6oVzy4UMyZfEvztUqRfKXDfeSXYZuZP0EEXjWl4gEWxFA4Nm1JAIp8FaEMMoCT3FJOvIusXiplPToJuLcE1b2Tm0kBV7DzwmZdU6LI7Qji6Xf+RUFOUK7ZH69jH93jiQal93R1mtQJ1l7qz52/cwPcDYIvOyL5/zFtLx87RKpr/7t9dMuvYtkpxOdXbh8BSOAwCsYagJ5QCBsAm/aR5Nk9pczJV4Sl5W2WEX6DO/vASWGsIkAAs+mahQul+s/eUrenOJsXPj1UxLvkdH/kIoSu/+SEoFXuDVCJG8ItBZ46uTfpJ/Gytw5P0hZWaWsvOqW0qPHEG+CMUqHBBB4HSIKdoP4kqRU3NHiCO0A5wjtsc4f3kvDfYRWVc3UIxbffThP5k6tzlg4G+8+UDp1LQn2YrIkewRe9oVQAu/5yxc5Mvm3Pgi87Pl51RKB5xVJxikEgTAJvHkT5sj3z43PwDbyT5tJp56dC4GSGAUigMArEGjLwhz32o0yfem8jKyu3u4YWavXSpZlmpkOAs/q8pBcGwRaC7wZ0z+XyY7AS3/xomLZaNPDpbSUf7cWYgEh8ApB2XCMWG1SSr5tkmQsFZlHLEwiR+D5Sx+BlxvfiR/Wy9evLjtG7x6h3cc5QruB3X87ndsM7W+NwLO/RmT4G4EwCbwfX//e2X03I6O8q+00XAZsMIiSh4gAAi9ExcxhKq9N/lRu+vSZ5h7Deg6Ra7Y72jmAFM9hlMI3ReAVnjkR8yPQWuB9M/5ZWbRgasagI9bZW3r0Wjm/QPTOigACLytM4W4UxjvwTFas9RHa3kM6yYgtOELrVU0QeLmT5BGL3Jl52QOB5yVNxvKbQJgEHjvw/F4tdoyPwLOjDiayGDv9a3l32ngZWNlL9hu2pXQrs38HEALPxEohZj4E2IGXDz3v+yLwvGcauBEReN6XjEcsvGeaHhGB5x9bRvaHAALPH66M6g+BMAk8RUjdgTfz8xlSXFbEHXj+LBnjoyLwjJeABHIggMDLARZNrSCw/B14SZky6SP5Zfa33IFnoEIIPAPQbQuJwLOtIuSzIgIIPNZH0Agg8IJWsWjnGzaBF+1qRmP2CLxo1Dkss0TghaWS0ZkHr9DaVWsEnl31MJINAs8IdoJqEkDgaYKjmzECCDxj6AmsQQCBpwGNLkYJIPCM4id4jgQQeDkCo7lxAgg84yXISACBZ1c9jGSDwDOCnaCaBBB4muDoZowAAs8YegJrEEDgaUCji1ECCDyj+AmeIwEEXo7AaG6cAALPeAkQeHaVwHw2CDzzNSCD7AnYLvC+eGyRfPdilZRWxmSjQ7vL0M3tv1A5e/q01CGAwNOhRh9TBBB4psgXNm7VgomycMbHUlTaWXqvtI2UVvQsbAIeRkPgeQiToXwngMDzHTEBPCaAwPMYaJ7DsQMvT4Bh6I7AC0MVozMHmwXez2Oq5Z1r5mYUY79bB0n3wSXRKRAzXY4AAo9FESQCCLwgVUsv16oFP8mkT+/4f/bOA86Oqu77v9u3l2Q3m957b5SEkgRCDSJNQKooiKKCgvL4KDwooIiFFwQFrBQLShOQEkEINQkthZDey2ZTtpe7t79zd8nd3LS9d+7MnHNmfvd5efGTzDn///n+TsLud2fOpAZ7AyUYceyN8GoyT8UPBZ6KqTm3Zwo852av6sop8ORKjgJPrjyEdEOBJwQ7i+okILPAe/fBWqx5pTltZTO+1hNjzijWuVoOswMBCjw7pOicNVDg2T/rHaueRt32RWkLHTzlKyiuGK3k4inwlIzNsU1T4Dk2emUXToEnV3QUeHLlIaQbCjwh2FlUJwGZBd6Bd+C5PcA5v+YdeDqjts0wCjzbROmIhVDgGRNzczSOtxujaIkmMKXYixEF2n8QJPns3vwGdq17Ka2bYUd/CwWlAyXpMLs2KPCy48WrxRKgwBPLn9WzJ0CBlz0zM0dQ4JlJV5G5KfAUCYptdhDQI/A2fNiIVW/Xo7inD0edU4WCEq9pNPc/A2/qpeUYMrPAtFqcWA0CFHhq5MQuOwlQ4OW+E0KxBH67I4gGTd7t+1xclYcxhXJIvFg0hC1L/4jW+k0d7VUOnoPeI87MfeGCZqDAEwSeZXURoMDThY2DBBKgwBMI/xClKfDkykNINxR4QrCzqE4C2Qq81e/W45X7N6eqlVYFcPkvx8Drc+nsgMNIIDsCFHjZ8eLVYglQ4OXOf11bDH+paU+baHyRF1/oFch9cgNnCLXuhtuXD59f7WMeKPAM3BScynQCFHimI2YBgwlQ4BkMNMfpKPByBGiH4RR4dkjROWvIVuA9/8uN2Kjdgbf/58LbR6DvyCLnQONKhRKgwBOKn8WzJECBlyWwQ1xeG4nj19uCab9zYrkfJ5fzhUa50z14Bgo8M6hyTrMIUOCZRdb+8yYiIcTf+gfiaz4AynrBM+siuPuNNH3hFHimI86qAAVeVrjseTEFnj1zteuqshV4r/9pG5b/Z28Kh0u78e7L94/TDuv22xUR1yUZAQo8yQJhO0ckQIFnzAZ5qz6M/9ZHOibrF/Dg8j4B5Lt557cxdNNnsULgbdu2CU8+8Rh2Vm/HpMnT8IWLr0IgYO4dlWt3NmHByt0I+Nw4bWIf9C7LNwMf57SYAAWexcBtVC721j+xZ/Xz+HBUEPXFUQyoL8aME36DgL/U1FVS4JmKN+vJKfCyRma/ARR49svUzivKVuC1NkTw3N0bsXtTG9weF46/tC+mntnLzoi4NskIUOBJFgjbocCzaA+0xOLQnqZFpd8NqjvzoJst8GKxGH74/W+iuakhtYiT5s7D+V+4zLRFbd7Tijue+QSJz45RLAx48eMvTECPInOloWkL4sQpAhR43Ax6CYT+ejueG74Ezfnx1BQje5yCE/p/U++UGY2jwMsIk2UXUeBZhlreQhR48mbDzg4mkK3AS86Q/AK4dls7Css9yC/mI0zcV9YSoMCzljer5UaAd+Dlxo+jrSdgtsDbtnUTfvaTH6QtbNCQ4bj5+3eYttgnF23Fy0ur0+a/du4IHDO8p2k1ObE1BCjwrOFsxyp1ix7FswXPdC3N7UFZ1VScX3mLqculwDMVb9aTU+Bljcx+Ayjw7JepnVekR+DZmQfXJj8BCjz5M2KHXQQo8KzfDZGWGNY9sxdNG9vRZ2YJBp5SjuRxD/xkRsBsgXeoO/DmnHQ6Lrjoyswa1HFV8tHZx97amDbyxrPGYHx/cx+V09Eqh2RJgAIvS2C8PEUgFg7i6Y3XozmqHQ3k8cFV3AMji0/E8aWXmEqJAs9UvFlPToGXNTL7DaDAs1+mdl4RBZ6d07Xn2ijw7JmrXVdFgWd9sguuX4/6VW2pwhO+1hfDz6+wvhFFK5ot8JJYkmfg/fPvj6Jm5w5LzsCLaI9f3/fyaqzc3tSRypxxVbj8hCGKJsS29ydAgcf9kAuB2sh2LG55FnXR7RgUmIRjis6F323u+ZgUeLkkZvxYCjzjmSo3IwWecpE5umEKPEfHr+TiKfCUjE26puPtbWj7ZDFiTXXIGzoWgSFjTOmRAs8UrIedtL0ugpcvWpX2++WjCzD7/uHWNqJwNSsEnig8uxrbkae9xKK0gC/esjqDkHsTgt6VyIuOQl7cuD+PFHhWJ8l6uRKgwMuVoLHjKfCM5ankbBR4Ssbm2KYp8BwbvbILp8BTNjppGk8k4mj49+OI7uk6E6toxmnIHzPV8B4p8AxHesQJI8E4XrpgJeLhrkPJ+55QimP+b5C1jUhUbc/eZrw4fwXa2sKYc8JIjBnd54jd2VngSRSLo1pp8M1Hbd6fUmvuGboCZeF5hjCgwDMEIyexkAAFnoWwMyhFgZcBJLtfQoFn94TttT4KPHvl6YTVUOCJTbnVtRef5v8DjZ6tqIyMw9jQefAnisQ2lWX1aEMt6p/5Xdoof78hKD3t4ixn6v5yVQTeptbXsLzxTwjHWzC66HxMKDPvPLLuqeV2xdb5dVh6fzVioTgK+/ox8ydDUNTfmW8bbWpuxy23P4/Gxs5Hil3aYYD/851TjyjxKPBy238cfTCBzUXXIubqeuuwJ1GGwS0PG4KKAs8QjJxEJ4G63c2Y//j72LZuN4aM7YszrzoGhcV5R5yNAk8nbJOGUeCZBFalaSnwVEqLvVLgcQ+oRoACT2xi7xTcjWbPjlQTvaOTMSX4ZbFNZVk9Hgmj9q/3AvFYamRg2DiUzDo7y5m6v1wFgdcY2Yrnq5OHdmuvGP/sc2LlnRhUMKf7BUp6RbQthpadYZQMzoPb49w3WCz6YBMe/P2baSnNOXEUvnTZjMMmR4En6aZWuC0KPIXDY+tHJPCH/3sROzfXpq4ZOXUALvr2kf/bSYEn16aiwJMrDyHdUOAJwc6iOglQ4OkEx2HCCFDgCUOPkKsRrxfdmtZAIFGCk1ruFNbUp6Ew/tDUgupoDLPyA/hySTHy3N0Lm/a1y9D83vwOieftUYniky+At7jM8HVYJfAS4UZg4z+QaFwNFA+Da+hFcOVl9uKGNc3/wvt1v0hb+4jic3Bsj+8ZzoMTWktgw6Y9uP2uF9OKXnj+NMw7bQIFnrVROLragY/QVoauQkn4dEOY8A48QzByEh0EWhqDuPf6p5BIdP3wK1Dgw80PffGIs1Hg6YBt4hAKPBPhqjK1CIEX187zeWbjn/HB7gUoD1TgnCFfwujySaogY58CCVDgCYTP0roIUODpwmbYIJnuwAvFE7hyVy2a97uT7uKSIlxeXJjRehOhdkRbGjsEnsvlzmhMthdZJfDiqx4E6pZ3tVc0EO5J/5tRu3a8Ay+jhTvkon8+8xFefOWTjtWOGdUb3/7mXOQFvBR4DslflmXyJRayJME+jCTAO/CMpClmLgo8MdylqipC4L2+4zk8veGPKQ4+TwC3H/U7lPjLpWLDZuQjQIEnXybs6MgEKPDE7hCZzsBbFQ7ju3vq04CM9Pvw/yp7iIW0X3XLBN7iG4FocL/KLriO/X9waV8PZPLZdwZeJN6KUUXnKX0GXibrddo1jc1BtLaE0bdPabdL5yO03SLiBRIR4B14EoXhwFaSZ+C9/Mgi7Ni4l2fgKZo/BZ6iwRnZtgiB98Ant2FV/ZK0ZVw3/jaM6zHNyKVxLhsSoMCzYag2XxIFns0DzmJ5ud6Bl0Up3ZdaJvByuANP9+I40JYEKPBsGattF5WtwIs3xRGvj8Nd5oa71Jw7r20LmwszhAAfoTUEo2GTUOAZhlLdiUQIPN6Bp+5+Ed05BZ7oBFg/WwIUeNkSs/f1es/As4qKVQIvEW7SzsB7QtcZeFaxYB01CFDgqZETu+wkkI3Ai22PIboqknpfj3eUF55Bh3+cnIxJwAwCFHhmUNU/JwWefna2GSlC4MUSMbyw+S9YWPMaz8CzzU6yZiEUeNZwZhXjCFDgGceSM5lPwCqBZ/5KWMEpBCjwnJK0PdaZjcCLvBtGvDWeWrgr4IZ/lt8eILgKZQhQ4MkVFQWeXHno7qbp3U/R+M5K+HqXo+KcGfCWZnYgdrKgCIGne6Ec6HgCVgm8Fu2w+KZgEH1Ky7TD4rt/Q6TjgyGAwxKgwOPmUIkABZ5KabHXJAEKPO4DlQjkIvDgdyEwSzsnlF+WqhS58r1S4MkVIQWeXHno6qb+taWovv/51Ni8Ib0x9FdXawdBZ3ZOAgWeLuwcJIiAFQLvPyuX4aVPlyIWj2NAeQW+PusUFAfyBK2YZVUnQIGneoLO6p8Cz1l522G1FHh2SNE5a8hG4PERWufsC5lXSoEnVzoUeHLloaubLT/6K1qWbEgbO/yB6xAYUJHRfBR4GWHiRZIQMFvg1bY040cvPpW22pNGjce5k4+ShADbUI0ABZ5qiTm7Xwo8Z+ev4uop8FRMzbk9ZyPwkpT4Egvn7hVZVk6BJ0sSnX1Q4MmVh65uqh94AfWv7vdGV+1xv1GP3wRvcUFG81HgZYSJF0lCwGyB99HWjXhk4Ztpqx3UsxLfnXuWJATYhmoEKPBUS8zZ/VLgOTt/FVdPgadias7tOVuB51xSXLksBCjwZEmCAk+uJHLoJrynEVvv+DtCW3YDXg/6fPlU9JiX+d1CFHg5wOdQywmYLfAisRh++sqz2Kvdibfv85XjTsLk/oMsXysL2oMABZ49cnTKKijwnJK0fdZJgWefLJ2wEgo8J6RsrzVS4MmVJ+/AkysP3d0ktLO62rfsgb+yFJ6i7M7qosDTjZ0DBRAwW+All9QYbMNrq1do/27F9EHDMLHfQAErZUm7EKDAs0uSzlgHBZ4zcrbTKinw7JSm/ddCgWf/jO22Qgo8uRKlwJMrDyHdUOAJwc6iOglYIfB0tsZhJHBIAhR43BgqEaDAUykt9pokQIHHfaASAQo8ldJir0kCFHhy7QMKPLnyENKNKIEXjgKRmAuFgYSQdbOomgQo8NTMzcldU+AZl/4GTzU+9K1FyB3D+PBATI2OMG5yztRBgALvyBuhbnsL3nh4BWrW1WPI9CrMvmYcCkoD3D0CCVDgCYTP0lkToMDLGtkhB8SjCTSs1o67SbhQOrIQnoDbmIk5y0EEKPDk2hQUeHLlIaQbEQJvTbUbO2pdHestL0pgwsB48vg+fkigWwIUeN0i4gWSEaDAMyaQBlcLnsx/C3F0/dBnTmgSRsb6G1OAs1DgZbAH/nL9m9izqSl15YiZfXHW/07LYCQvMYsABZ5ZZDmvGQQo8HKnGm2PYd1ftiNUG+mYzF/qxcgrB8Cbz28mc6d78AwUeGZQ1T8nBZ5+drYZabXAa2gBPt6U/hfsyD5x9K/gnXi22VQmLoQCz0S4nNoUAhR4xmD91LsV7/g/SZtsTHQgTgxPMKYAZ6HA62YPtNa143dXvpp2VUF5Hq597BTuHoEEKPAEwmfprAlQ4GWN7KABdZ80Y+tLu9J+vf+pvVAxpST3yTnDQQQo8OTaFBR4cuUhpBurBd6WPS5sqEm/zbmqDBg3ICZk/SyqFgEKPLXyYrcABZ4xu4B34BnDsbtZ+AjtkQnxDrzudpD1v0+BZz1zVtRPgAJPP7t9I+tXNWPL8+kCr+9JFeh1lPYNJT+GE6DAMxxpThNS4OWEzx6DrRZ4ybPvFq/1aOffdfJLPkg7fXgMxfn24MlVmEuAAs9cvpzdeAIUeMYx5Rl4xrE83EwUeEdmzDPwzN+D2VagwMuWGK8XSYACL3f6sXAc6x7bjvbacMdkHY/Qfkl7hDaPj9DmTvfgGSjwzKCqf04KPP3sbDPSaoGXBNcWArbudSGmvcSiX884ygptg5MLMZkABZ7JgDm94QQo8AxHyglNJECBZyJcTm0KAQo8U7ByUpMIUOAZAzYp8RrXaOcyabeC8CUWxjA93CwUeObyzXZ2CrxsidnwehECz4YYuSSLCFDgWQSaZQwjQIFnGEpOZAEBCjwLIBtcomlrBJtfC8KlPdIwaG4BSgZ4Da4g93QUeHLnw+7SCVDgcUeoRoACT67EKPDkykNINxR4QrCzqE4CFHg6wXGYMAJOFXixmmrA64OnolIYexbOngAFXvbMRI5oqYni3dvrEA93vgjME3DhuFt7oLC3cyQeBZ7IHcja2RKgwMuWGK8XTYACT3QC6fUp8OTKQ0g3FHhCsLOoTgIUeDrBcZgwAk4TeIlIBK333Y3IRx90MA+ceiYKrrpWGH8Wzo4ABV52vERfveGlVqx9JvkYWddn3GUlGDjbOQcLU+CJ3oWsnw0BCrxsaPFaGQhQ4MmQQlcPFHhy5SGkGwo8IdhZVCcBCjyd4DhMGAGnCbzQf+ej7Q+/TeNd9IMfwzdhsrAMWDhzAhR4mbOS4cqdH7Zj6UONaa1M/lop+kzPk6E9S3owWuBFNy1H6PkHkNizDd6pcxE46xtw+Z3D05LQHFyEAs/B4Su6dAo8uYKjwJMrDyHdUOAJwc6iGRKIhUMI7d2F/N794HJ7QIGXITheJg0Bpwm8tj/+FqHX5qfxz7/4cuR9/gJpMmEjhydAgafW7kjEgaW/a0SNJvKSn74z8jHxqhLtv5dqrSOXbo0UeHHta462289GIth1V2Ng7pfgP+OaXFrkWBJIEaDA42ZQjQAFnlyJUeDJlYeQbijwhGBn0QwI1H+6BFuffRyx9iB8ZT0w7LLrUNZ/AIoLvKht6nx1PD8kIDsBpwm86Mb1aL7tf4BotCMaV0Ehiu++j2fhyb5RP+uPAk+RoA5os21vTHsXI5Bf4VFzATl0baTAi25egeD96Y/8uweOQ+ENv8uhQ+cNbW+PY83aEPx+YMTwPHi9yd3JT5IABR73gWoEKPDkSowCT648hHRzJIFXu2U1tix5DZFgG3qPmo5BU08S0iOLOo9AIh7Hsp/ciHgolFp8ychxGPvl6ynwnLcdlF6x0wReMqzI2tUIvfoSXNpLLPLOPg+ePv2UztBJzVPgOSlte6zVSIHXcQfenech0dqQgsM78LLbJ62tMTzyeD1aWmIdA6uqfLjkonJN5h0s8cLuBgS9O+GP9kR+vFd2hRS92myB14xdWIv/IOoKY1hiNiowTDlS7cF2LH53Ifbs3IWR40Zj4lQewSEyRAo8kfQPrk2BJ1ceQro5nMALNtfjo6fvRVKk7PuMPOE8VI2YIqRPFnUWgfY9NVh534/TFu0rKsG0W35BgeesraD8ap0o8JQPzcELECXwtm8G1q10Y+ioOAap9/2mg3eM+KUbKfCSq0mdgVe7A97JJ/EMvCwjXrS4FW++nf5ilbPPKsWY0ennCDZ7N2JvwUJt9s43KJe1T0R5eGKW1dS73EyBF0Q95rt+hAjaOsC4tP87KfED9MBgpUA98tvfo3rb9lTPc886HUcfN0OpNdipWQo8udKkwJMrDyHdHE7g7dm4HKsXPJnWU+XQCRg9+0IhfbKo8wise+TXaF6/KrXwvnPPxsC58yjwnLcVlF4xBZ7S8UnVfHNdPVa+/R7aW1ox6pjp6D18qOH9iRB4b81346k/dz36ee4VMcw5s+uHh4YvkhPaioDRAs9WcAQsJlOBt6P4BYRdXS9gccGDQU0Xd0gnO3/MFHib8S4+cD2Shm9U4nRMxPnKIK3X/jv34C/uTeu3z4D+uOo6nkMpKkQKPFHkD12XAk+uPIR0wzvwhGBn0QwIxLTHZ3e98yraarahdOQEVEybiYDfS4GXATteIg8BCjx5slC5k1AwiH/96n4Em5pTyzj9q1cZLvFECLwffs2H5q4nFlFcBvzkoYjKcbF3CwlQ4FkIO4NSyUdnH/3Lfo/Q9tIeof2i9gitL13MHSzwvJrAu4gCLwPGh7tEe3YFb7v+X9pvT0tcjqE4MYdZrR0aCoXx/+74GeKxzkewk5+xkyfinIvUkZDWEjO/GgWe+YyzqUCBlw0tm17b3Rl4mz9+FVHtJQJVo6Zh8NSTbUqBy1KFAN9Cq0pS7HMfAQo87gUjCGz5dCXeePTvaVONPPYozDzvbCOmT81BgWcoTk5mAQEKPAsgZ1kik5dYHPgIbXloEspCE7KspN7lZt6Bl6SxxPU3rMcbHWD6YAJmJK7T7m30KgVq+UdL8NKzL3RIvPKePXDxl69AeY9ypdZgp2Yp8ORKkwJPrjyEdMO30ArBzqI6CVDg6QTHYcIIUOAJQ2+rwnu378C/f/1Q2pqmnnYyJp4829B1ihB4Bz5Ce+wFW9wAACAASURBVMGX4jjx9K67LwxdICezHQEKPHUj5UsszMmuHY2IIoQiqPtikHB7CMnHaSuqesHjcZsDirNmRIACLyNMll1EgWcZankLUeDJmw07O5gABR53hWoEKPBUS0zefj96+VV88sZbHQ1WDRmMuV+5HD6/39CGRQi85AL4EgtDY3TUZBR4jopb+cWafQee8oC4AOkIUODJFQkFnlx5COmGAk8IdhbVSYACTyc4DhNGgAJPGHpbFg62tCDUFkRZr0pT1idK4JmyGE7qCAIUeI6I2TaLpMCzTZSOWQgFnlxRU+DJlYeQbijwhGBnUZ0EKPB0guMwYQQo8IShZ2EdBCjwdEDjEKEEKPCE4mfxLAlQ4GUJjJcLJ0CBJzyCtAYo8OTKQ0g3FHhCsLOoTgIUeDrBcZgwAhR4wtCzsA4CdhB4TQjjbU8NihJeHBfvrR3fbr/zk+LRONo31yLSEIS/VzHyBpTB5Up/y6eO+JUcQoGnZGyObZoCz7HRK7twCjy5oqPAkysPId1Q4AnBzqI6CVDg6QTHYcIIUOAJQ8/COgioLvBqXG34jn+RdoR7qGP1IxJl+GX4GPhsJvGaPtqGcE1TKuH8Eb1QONKcx6p1bCNLh1DgWYqbxXIkQIGXI0AOt5wABZ7lyI9YkAJPrjyEdEOBJwQ7i+okQIGnExyHCSNAgScMPQvrIKC6wPuTZw2e8m5MW/nt4emYnrCP3ErEEqidvxJIdC3TWxRA2azhOhJXfwgFnvoZOmkFFHhOStsea6XAkytHCjy58hDSDQWeEOwsqpOAXQResKUVqz/+BMHmFgwaPQL9hg3SSYTDZCdAgSd7Qur0FwpG8cHza7FjXR1GTO+DyacOhdtt7GOTFHjy74dEIoH619ch3h5JNeurKELpMc787wgFnvx7lh12EaDA425QjQAFnlyJUeDJlYeQbijwhGBnUZ0EZBV4DY1BfPxJNcpK8zF5XJ8jflMdi8Xw8mNPoq2pOUVhxry5GDB8iE4qHCYzAQo8mdNRq7d/3PkO1i7ekWr6hC+Ow+xLxhu6CNUF3k7tEdob93uEdkyiHD8LH227R2hDNc1oXb4D8UgMrnwfSqYNhK80z9C9oMpkFHiqJMU+kwQo8LgPVCNAgSdXYhR4cuUhpBsKPCHYWVQnARkF3vbqRvz8gTfRFuy8G2LCmN741tUzDyvx9u7chdf/+XwagYEjh+HYM07SSYXDZCZAgSdzOur0FgnFcPcXnkby7qt9n4r+Jfj6g2cYugjVBV4SRqMrjHfc9n6JRXKdCe1FFrHWELwl+YCxN2IauqfMnowCz2zCnN9IAhR4RtLkXFYQoMCzgnLmNSjwMmdl2ysp8GwbrS0XJqPAe/zJJXjzvfQzl/73htkYNrjnITNo0x6bffHPT6R9Iz5y8gRMnnWsLTNz+qIo8Jy+A4xZfzyewH1XvYCWumBqwiGTq3DZHbONKfDZLHYQeIYC4WTSE6DAkz4iNrgfAQo8bgfVCFDgyZUYBZ5ceQjppjuBF0IEH/lXYId7N4bHBmJSZLT2LjcH/6hXSErGF42F2xGu24q8XsPgcnuML2DSjHYQeEk0qz5chk8Xfoh4PI7yXhU44ezTkFdYYBI1TiuSAAWeSPr2qr1m0Q48d+9ihFojKKksxEW3HI/eQ8sMXSQFnqE4OZkFBCjwLIDMEoYRoMAzDCUnsogABZ5FoDMsQ4GXISg7X9adwHsq/z9Y59mcQnBsZDLmhI62MxLbr61x9QJUv3g3Yu2t8JZWYdAXfob8qmFKrFtGgbdNe4T2F/s9Qjt+TBWuv/q4bg+XDwXb0d7ahpKe5XC5KMWV2IA6mqTA0wGNQw5LINweRd2OZvQaXAa3x/i/NyjwuPlUI0CBp1pizu6XAs/Z+au4ego8uVKjwJMrDyHdHEngRRDFr4r+jIT2f/s+PRNl+GrrhUJ6dUrR9hagfk8CVYNcmgQydtUJ7Y6v1fd9HrG2xtTERcOOxeCLf25sIZNmk1HgJZeazUssTELDaSUlQIEnaTBs65AEZBd4bTt2oHHNShQOHIyS4SOYIgmAAo+b4EACiWgEri1LgOa9QK+hQJ9R2jmRxv/AQw95Cjw91DhGJAEKPJH0D66tpMDbVr0bt/78T1izfiv69q7AD2+4HFMndH4RV13bdTaMXKjl7aa7O/DuL/wrWlytqQX0i/fGFW1ny7sgxTtb/GICLz8SQzyqfc2hCbzLf+hCaaVxFi+0dwvWPXx5GiVvUQ+MvuFfSpCTVeApAY9NCiFAgScEO4vqJCCzwNv7/iJsfPRPqfNDB3z+XPQ97UydK+UwuxCgwLNLksatw/Wx9qKw2m1dEw4/Bokh040rkMNMFHg5wONQIQQo8IRgP2xRJQXelTfchZOOn4rLzjsF7334qSbz/ohX//Er+LweCjwd+6s7gbdWe3z23wULEEqEUZwowgXtp6B3rFJHJQ7pjkBbcwJ3XxnTvjnpunLaXBc+/w1jz6jb8s/vo3nde6kiVbOuRuXxV3TXnhS/T4EnRQxsIgsCFHhZwOKlwgnILPCW334rgjU1KUZuvx/T77lfO8fVuB9yGRVAuKkdzRsb4C8NoHhIuVHTcp5DEKDA47ZIIxBqheutR9KhFJYjMfMSKUBR4EkRA5vIggAFXhawLLhUOYFXW9+E0y+5GQv//Rt4PZ1S44JrbsPN130RR08ZTYGnY9N0J/CSU4a1R2nr3A3oFe+hvcBCvi+UdSxbyiHrPkrg8Ttjab1V9HPh+geMFXixcBB17z+Jtp2rUTx8BsonzZPyG6BDhUSBJ+XWZVNHIECBx+2hEgEKvNzTatnWhC3PrUI8Eu+YrGxsJQacNjz3iTnDIQlQ4HFjpBHQfgruWvB7QHuMdt8nUTEQmPI5KUBR4EkRA5vIggAFXhawLLhUOYH38SfrcPs9j+Jff74zheemH/8Wx0wdiws/Nxu1TWELsNmrRHGBFyHti8zwZ19o2mt1aq1GO54OD3wnguqNXbfgXXC9B9NPMVbgqUUlvVuvdmh7QZ4HTa3aM8b8kIACBEoLfWjVXjwQje13a60CfbNFZxIoK/LDo/2crqEljFinf5Lms3vxQqz90x9Tj9AOPvc8DDhjnjT97Wtkw7Mr0bC+Pq2vidcdBV+hX7pe7dBQebG/Y7/u//SCHdbFNegnkKhZj+jyV4FYFK7CUnimaD+oLu6pf0IDR+b5PR1/x7a2p//A3sASnIoEDCXQs8RPx2Io0dwmS+Zh5MeV0D5GTnjgXO99uAK//sPTeOKh21K/dcvdf8TIof1xxRdOM7M05yYBSwi0NCa0R8Ij2FOdwLTZHu0fryV1WYQESIAESIAEZCfQvG079qxYgbKhQ9Fj1Egp2/30qZXYtWJ3Wm8zbjgW+eV5UvbLpkjAjgQSUe2HEM0N8JRWKPOUiR1z4JpIgATkJmC6wFuyYh1u++UjeP6Rn6RI3Pij32Dm9PG44KxZHXeS8ZMdgeQdTfF4Atr/40cSArFIGC17d6C4V3+4PT5JupKjDbf2EjG39v/xbiY58mAX3RNI/h0b0/6CNffHW933wSvMJxDX7rKs2xFEWe88eP1qHjeRPKYg+bLGcDTOPatzyzRta8SKv3yC+Ge3MPaa2Asjzx6tczYO646AT/s7Nvk1Ab+M7Y4Uf18GAsm777RX4nZ8XcAPCahAwOd1IRLlfpUlq4DP2K8vTRd49Y3NmHvhTXjnuQeQn9d5++AZl96Mn/7vNZgyfgTPwNOxszI5A0/HtByik0D95uVY+/JvEQ02w68dujvqrOtR0k/Ouwx0LjGnYTwDLyd8HCyAAM/AEwBdQMnGXe1Y8KctCNaH4cv34OgL+mHAhFIBneRWUuYz8HJbmbWjQ40hNG+oQ0C7644vsTCXPc/AM5cvZzeWAM/AM5YnZzOfAM/AM59xNhWUOwMvubiv3PhzHDV5NK659Cy8/MbijkdqX/7rz+HRfqRRXRvMZv28ViNAgSfXNvjwD99GqGlPqqmiqqGYdOkdcjVpdTfaTyndbREk8rzwaf8kz23keZdWh8B6eglQ4Oklp9a4N/+0GTtXN6ea9gbcOP/HY7VHt7Tb2RT6UOApFBZb7SBwKIFXU9OIxYs2obm5HcOGVeLoY4Z0fJ/ADwmIJkCBJzoB1s+WAAVetsTMvV5JgbejZi9+cNfvsWbDNgzo2ws/uulLGDdqcAcpCrzsNwwFXvbMzBoRaW3A+w9/I216l/YI7cwbHjGrpPTzJsVd8Ypd8ASjSGiPyYRHVSBvWDkFnvTJscF9BCjwnLEX/nXHarQ3d731MLnqM787EiW9AkoBoMBTKi42ewiBFwlH8cTfP0BY+/e+z5SpAzF12iDyIgHhBJwk8Bo2bMf2t5bBkx/A4FOOQn5P9e5KF75hJGiAAk+CEPZrQUmBdySEFHjZbzAKvOyZmTli7SsPYc/Kt1Ml+kw5DUPnXGFmSannLvp0N/x721I9upI/QT91KGrb+BZaqYNjcykCFHjO2Awr/7sby+fvSi22akQR5lwzRLnFU+ApF5njGz7wDryd1Q146cVP0rhU9CrG5z8/2fGsCEA8AacIvKS8+/i+J1NvDfcV5OHYW78Ef1GB+BDYQVYEKPCywmX6xRR4piOWvwAFnlwZxaIR7Fw6H0071qJ84DhUTTxZe5GFc99EW7ZwO9z7/RTdlTxd/fj+qHV75AqO3ZDAYQhQ4DljayRfUrLh/TpUa4/RVvTPx4jjK+DTHqNV7UOBp1pi7PdAgcc78LgnZCbgFIG39qkF2Pbmx2lRjP/Smaiaxhf6yLw/D9UbBZ5ciVHgyZWHkG4o8IRgZ9EMCeTtaELB+rrU1YmyfLhPGMBHaDPkx8vEE6DAE58BO8icAAVe5qx4pRwEeAaeHDmwi8wIWCnwEtqbsKMrtiDRHoFv4iC4tEdZrfpsfu0DbHiu64miZN3pN30RpYP7WNUC6xhEgALPIJAGTUOBZxBIlaehwFM5PQf0rt3V4t/VAn9tG2KFfsQGlaG41E+B54Do7bJECjy7JOmMdVDgOSNnO62Sb6G1U5r2X4tVAi8RjaH1wVcQr+78Ibi7uACF3zwDrhJrHmGNtoex/Hf/Qv267R31B8yZhpHnzbJ/wDZcIQWeXKFS4MmVh5BuKPCEYGdRnQT8XjffQquTHYeJIeB0gbc31IqYK4Eqf5GYAFg1KwIUeFnh4sUSEBAp8JpaW/Dah4uxeVc1+ldW4fSjZ6KkkH/XSbAtpG3BKoEXXbMDbY+8nsYhcNoUBGaPt5RNa00dfAUB+EsKLa3LYsYRoMAzjqURM1HgGUFR8Tko8BQP0GHtU+A5LHAbLNfJAu83Wxbi3dpNHSlOLxuAbw2eCR/Pr5R6V1PgSR0PmzsEAZEC72+vvYwtNdWprgZW9calp8xjTiRwWAJCBd7JExGYO4npkEBWBCjwssJl+sUUeKYjlr8ABZ78GbHDLgIUeNwNqhFwqsBb0rgDv9jwZlpcXx88Ayf0UO/NrKrtuVz6pcDLhR7HiiAgUuDd88/HEQqHU8tOvmjrxouugN/r3JePidgDKtW0SuAlH6Fte2g+YjtqO/C4ivJRdP08uIrzVcLFXiUgQIEnQQj7tUCBJ1ceQrqhwBOCnUV1EqDA0wmOw4QRcKrA+/uOpXhh18o07idXDsdXBhwtLAsW7p4ABV73jHiFXARECjzD78BrbUN8bx3cA/pqh5ap9xZruXaG/m5q1m3AqrcXam8S92PC3Nkoreqlf7IDRlol8JJl973EIp58icWEgXAX5Bm2Dk7kHAIUeHJlTYEnVx5CuqHAE4KdRXUSoMDTCY7DhBFwqsDbG27F91a9iFAs2sHe43Lj7jFnom9eibAsWLh7AhR43TPiFXIRECnwjDwDL/byq4g8+ne4olG4Bg2E9/vfhruyQi7YDuhm79ZteO6n9yCR0N6ipn0CBQW44PYfIL/YmLMNrRR4DoiLS7SAAAWeBZCzKEGBlwUsu15KgWfXZO25Lgo8e+Zq51U5VeAlM93a1oBX9qxGVPtG6LTKkRhW2NPOUdtibRR4tojRUYsQKfCMAp2ob0D4qzdot0x1SqPkx3PayfBec6VRJUyfJ9Gu/bDG64LL6zG9lpkF3n/mBSyf/9+0EnOuvgLDjppqSFkKPEMwchILCVDgWQg7g1IUeBlAsvslFHiHT3j5mnV49LkX0NDUglNmHoMLTz9Fe6KBjzSI/DNBgSeSPmvrIeBkgaeHF8eIJUCBJ5Y/q2dPwA4CL/7xMkR++qu0xbtGDIP/rtuyB2LxiEQ0jsSWRiSCESTPAESvAu3OQXXfOLryjbfx3hNPp1E863vXo/fwoYaQpcAzBCMnsZAABZ6FsDMoRYGXASS7X0KBd+iE6xoacf1Pf45wOJK64Mvnn4PTT5hp9y0h9foo8KSOh80dggAFHreFSgSyFXgJJLCk9XWsCi5ClX8wji86B3ludb95Vykr9tpJwA4CD7EYwjffqomw7alYvddfC8+Jx0kfc7ymBYm9bWl9ekZqd1v71bwTLxaJYv5vfofqVWs71jTupBMx46LzDMuBAs8wlJzIIgIUeBaBzrAMBV6GoOx8GQXeodN99+OluO+xv6X95swpk/DtKy+183aQfm0UeNJHxAYPIECBxy2hEoFsBd67Lf/CS/V/SC2xf2AUvt4r/U4ildbPXtUjYAuBp2FPNDUh9vzLSNTshmfm0XBrT36o8IltqAe0u+/2/7gHlMBVqvYLExp27tLOv8tHfqmx57ZS4Kmwq9nj/gQo8OTaDxR4cuUhpBsKvENj5x14QrZjt0Up8LpFxAskI0CBJ1kgbOeIBLIVeA/uugnbw2vS5vx+38dQ7OlB0iRgCQG7CDxLYJlQJN4UQmJrY9fMPg/cI3rA5dYep+XnIAIUeNwUqhGgwJMrMQo8ufIQ0g0F3uGx8ww8IVvyiEUp8OTLhB0dmQAFHneISgSyFXj/qP0llrctSC3R7fLilr5/R8Cdr8yyg23NWLvyfeTlFWD4mKPg8XiV6Z2N2uQRWsWDTEo8NGgiz+eGp0L7s69JPH4OTYACjztDNQIUeHIlRoEnVx5CuqHAE4KdRXUSoMDTCY7DhBGgwBOGnoV1EMhW4O2N7MCje3+EuuhO+FwBnF3+dUwtnKujspghjfV78Pc/3oZga+cdRFX9huHCL90Kr9cnpiFWzZoA78DLGhkHCCRAgScQPkvrIkCBpwubaYMo8ExDq87EFHjqZMVOtTORvW4UF3hR2xQmDhJQggAFnhIxscnPCGQr8JLDYokYdke2ooe3SrvzrkAplgsXPI1Fbz6T1vM5l9yMISMmKbUOJzdLgefk9NVbOwWeepk5vWMKPLl2AAWeXHkI6YYCTwh2FtVJgAJPJzgOE0aAAk8YehbWQUCPwNNRRpohb7/2BD589wUKPGkSyb4RCrzsmXGEOAIUeOLYs7I+AhR4+riZNYoCzyyyCs1LgadQWGyVd+BxDyhHgAJPucgc3bDTBF5rcz0ef/iHqUdo+w4YifOv+AEfoVXoTwEFnkJhsVVQ4HETqEaAAk+uxCjw5MpDSDcUeEKws6hOArwDTyc4DhNGgAJPGHoW1kHAaQIviYgvsdCxUSQaQoEnURhspVsCFHjdIuIFkhGgwJMrEAo8ufIQ0g0FnhDsLKqTAAWeTnAcJowABZ4w9Cysg4ATBZ4OTBwiEQEKPInCYCvdEqDA6xYRL5CMAAWeXIFQ4MmVh5BuKPCEYDelaF0ojAU1e7EnFMKgwkLM6V2BgMdtSi1Rk1LgiSLPugcS2NrYiCW7dmFEeQ+Mraw4LCAKPO4dlQhQ4KmUFntNEqDA4z5QiQAFnkppsdckAQo8ufYBBZ5ceQjphgJPCHZTij66fguaItHU3GNLi3Fy316m1BI1KQWeKPKsuz+BBZu34N73P0Qikej45YvHjcUXx489JCQKPO4dlQhQ4KmUFnulwOMeUI0ABZ5qibFfCjy59gAFnlx5COmGAk8IdsOLNkYieGz91rR5ywN+XDZ0gOG1RE5IgSeSPmvvI/CNl+dje1NzCojf68E/zjsHbpfrIEiZCLxIJI5/Pb4MH76zHT0q8nHBlydj2OjD39XHJEjALAIUeGaR5bxmEeAdeGaRlWPetu01CO7ai6LB/RHoWSZHUzl0QYGXAzwOFUKAAk8I9sMWpcCTKw8h3VDgCcFuSlHegWcKVk5KAgcROFjgeTWB93ndAu+Vp1bh3//4NFUnv8CHOx48E3nav/khASsJUOBZSZu1jCBAgWcERTnn2P3WB6j9YHlHcy63G/3mzUbxyCFyNpthV5kIvNi6BkRf3KItGvCePQSeISUZzs7LSMB4AhR4xjPNZUYKvFzo2WQsBZ5NgtSW0XkG3h7tDLwwz8CzT6xciYQEDnyE9pLx43DRuDGH7DSTO/Duu+1NrFu5J238jT+Zg6Eje0q4erZkZwIUeHZO155ro8CzZ65x7UiYNfc/Bu2sitQC8/pUYsglZyu94O4EXnx7C9q+9RZc0XjnOvM8yH9gFtxVBUqvm82rS4ACT67sKPDkykNINxR4QrCzqE4CfIRWJzgOM5zAloZGLN1tzEssDrwDL3nn3Z28A8/wzDhh9wQo8LpnZOYV7dpLqP678HVs3L4FE0eNxwnTZsKt3XnEz+EJUODZc3c4VeBFnlyP8GOr00L1XzcBvjMG2TNorkp6AhR4ckVEgSdXHkK6ocATgp1FdRKgwNMJjsOEEcjkDjyegScsHmkKJ28yWa49KbZqtQtVVS7MnBFHIGB9exR41jPfv+L9f3kIy1Z1PjKY/Jx10pk45+SzxDYleXUKPMkDyqG9gx6h/dxJKB6utsjq7g68yBvbEb5naRq1wA+nw3ts7xxIcigJ6CdAgaefnRkjKfDMoKrYnBR4igXm8HYp8By+ARRcfiYCT8FlGd7yxuZaLNm7HUW+AGZWDUaxL8/wGjJP+M47wOuvd91pNUj7HvXKKz97hMrCxinwLIR9QKlwJIxv/Pg7qbdbJ3+7d2UV7vz2beKaUqAyBZ4CIeXQYuu2nWjfXeucl1jEE2i/6yPEFtV0UPPO0V7e8e1J0A7ZzYEih5KAfgIUePrZmTGSAs8MqorNSYGnWGAOb5cCz+EbQMHlU+B1H9qaht14clPXHQflgQJcPepYBDze7gfb5IqHH3Zj1670xdx4YxxFRdYukALPWt77V4vH4/jez3+IxubG1C+PGT4aN111vbimFKhMgadASGwxRaC7O/D2XRiraYPLo3m7Sp59x+0jlgAFnlj+B1anwJMrDyHdUOAJwc6iOglQ4OkEx2HCCFDgdY/+mc3LsbK+826DfZ+Lh07F8NKK7gfb5Iq//c2F9eu77rDwa4/P3vy9uHb+mbULpMCzlveB1ZasXIY/Pf0Ygu1BlJf1wLcu+xoG9ukvtinJq1PgSR4Q20sjkKnAIzYSkIUABZ4sSXT2QYEnVx5CuqHAE4KdRXUSoMDTCY7DhBGgwOse/Rs71+Pdmo1pF359zHHomVfY/WCbXLFHewnxX//mRpN285XXB5z9uTjGj7d+cRR41jM/sGIoHMauvTXo17sfPG7tFhx+jkiAAk/fBolqbzn99/zNWLZyL8aN7oFzzhgKr9finxjoa13pURR4SsfnyOYp8OSKnQJPrjyEdEOBJwQ7i+okQIGnExyHCSNAgdc9+mA0jL9vWILqts5HB+f0GY7jeg/tfqDNrojFgN2ayOtRDiEvsEjipMCz2aZywHIo8PSF/OvfL8Mrr21JDT7tpIG44drJ+ibjqIwJUOBljIoXSkKAAk+SID5rgwJPrjyEdEOBJwQ7i+okQIGnExyHCSNAgZc5+t3BFhT6fCj0Cnj9auZt2vpKCjxbx2vLxVHg6Yv1kmv/g4aG9tRgf8CDZx45U3tsny9L0Ec0s1EUeJlx4lXyEKDAkyeLZCcUeHLlIaQbCjwh2FlUJwEKPJ3gOEwYAQo889DHkMAOhFABHwrARw2NIE2BZwRFzmElAQo8fbSvvekNbNvenBo8oH8xHv7VHH2T2XBUfTCMd7fVoSUUwfR+PTC8hzFHOlDg2XCz2HxJFHhyBUyBJ1ceQrqhwBOCnUV1EqDA0wmOw4QRoMAzB/0eVwT3eLZityusqTsXroz2xnGJMnOKOWhWCjwHhW2TpVLg6QtyhXb23R33fITm5hCKiwO49cZpGD/WOS8OOhK1UDSGexdtQLMm7/Z9Lp04AGMqS/TB3m+UbAJv+94WfLRxLwI+N44dWYWyQt4Bn3PINpuAAk+uQCnw5MpDSDcUeEKws6hOAhR4OsFxmDACFHjmoH/YU4333Z1n5iU/SYn368hI5IGHsOdCnAIvF3ocK4KAGQKvftlu7PzPRsSCMVQc1xd9Th4iYmmm14xE4ti6vQn9+xZr527yLuZ9wNfubcZjy7am8T9Kuwvv86P75JyJTAJvm7bOv729DolE57LyfF589dSxKAh4c14nJ7APAQo8ubKkwJMrDyHdUOAJwW6rott2RVFS5EJpoflf/FHg2WrrOGIxFHjmxHyLbyN2ao/P7v/53+ggDE8UmFPQIbNS4DkkaBst02iB1767FavueR+J2GdWQ2M16OIx6Dktd3ljI+y2XsrethDuXbg+bY2zBlfilGG9cl63TALvtWXb8eGG3WlrOvvoIRjbX3uTEj8k8BkBCjy5tgIFnlx5COmGAk8IdlsUbQ3G8Yu/NWLNlnDHei44qRDnzy4ydW0UeKbi5eQmEKDAMwGqNuV/XXX4m3dXavIh2il4P4gM1O6/4wHsuRCnwMuFHseKIGC0wNuzcAe2PbMmbSkVx/bDwPNHiVgeawoi8JomthZs1l4Lrn0GlRXiskkDkO/N/QfVMgm8xet24Y1PdqQRvnTWSAzoae7X8oIiZVmdBCjwdIIzaRgFnklgVZqWAk+ltOTq9anXW/D0gta0pu7+Rg8MrPKZ1igFnmloObFJBCjwTAKrvgZGaQAAIABJREFUTfuuqwFL3C2oSvgxL96TL7IwADUFngEQOYWlBIwWeLwDz9L4pC7WEo6iVfunqijPsD5lEnjJs/6eXbQRm3d3vsxk+vBemDuxv2Fr5UT2IECBJ1eOFHhy5SGkGwo8IdhtUfSux+qxfH3n3Xf7Pl/+XAlOOSrftPVR4JmGlhObRIACzySwnNYUAhR4pmDlpCYSMFrgJVt1yhl4JsbCqQ9DQCaBt6/F2uZ27fw7DwrzzPsBPDeEugQo8OTKjgJPrjyEdEOBJwS7LYqu2BjGTx6pT62lrNiDe77VA/l55h0iT4Fni63jqEVQ4DkqbuUXS4GnfISOW4AZAs9xELlgywjIKPAsWzwLKUmAAk+u2Cjw5MpDSDcUeEKw26boJxvC+O+HQZQVuzFvRj4qy819cxUFnrFbZ9PSpVj09JMItQUx4aSTMOnMeaiNRtHD44XPzbPEjKBNgWcERc5hFQEKPKtIs45RBCjwjCLJeawgQIFnBWXWMJIABZ6RNHOfiwIvd4bKz0CBp3yEjloABZ5xcdfX1OCfP/o/xGOxjknDiQSiX7gQsclTkO/24HOlJRjs9xtX0KEzUeA5NHhFl02Bp2hwDm6bAs/B4Su4dAo8BUNzeMsUeHJtAAo8ufIQ0g0FnhDsLKqTAAWeTnCHGLZiwQK8/dfHU79Tq4m8xNFHI3Du+R2/Vubx4KsVFcYVdOhMFHgODV7RZVPgKRqcg9umwHNw+AounQJPwdAc3jIFnlwbgAJPrjyEdEOBJwQ7i+okQIGnE9whhh14B94u7dFZ34UXwTd1Wurqb1RWoFC7G48f/QQo8PSz40jrCVDgWc+cFXMjQIGXGz+OtpYABd4heCeA2NY2xHe3w13mh3tIIVxeHuNi7c48fDUKPFmS6OyDAk+uPIR0Q4EnBDuL6iRAgacT3GGG7X8GnmfmTOw8/oTUlePy8zGvpMTYgg6cjQLPgaErvGQKPIXDc2jrFHgODV7RZVPgHRxcdE0L4mubU7/h7p0H71HliiZsv7Yp8OTKlAJPrjyEdEOBJwQ7i+okQIGnE1wGw2LaT0CXBtuwORxGf58P0woK4HXxJ6AZoDviJRR4uRLkeCsJUOBZSZu1jCBAgWcERc5hFQEKvEMIvDf2IN4S7foN7UtP3+m9eReeVZuymzoUeJIE8VkbFHhy5SGkGwo8IdhZVCcBCjyd4DhMGAEKPGHoWVgHAQo8HdA4RCgBCjyh+Fk8SwIUeAcDiyyqQ2JPqOs3/G74T60C+DPkLHeXOZdT4JnDVe+sFHh6ydloHAWejcJ0wFIo8BwQss2WSIFns0BtvhwKPJsHbMPlUeDZMFQbL4kC7+Bw440RRD+oB4IxuHxueCaVwt0nz8a7QK2lUeDJlRcFnlx5COmGAk8IdhbVSYACTyc4DhNGgAJPGHoW1kFAVYHnWrUB7hffBLwexL9wOhL9tLs3+HEEAQo8R8Rsm0VS4B0mSu0Yl3hTGK5CHx+dlWy3U+DJFQgFnlx5COmGAk8IdhbVScAJAi8aiWLlJxtQu7sOJWXFGDVuKIqKC3QS4zDRBCjwRCfA+tkQUFHgudZvgfemu+GKxjqXWlSIyP23INGrRzZL57WKEqDAUzQ4h7ZNgefQ4BVeNgWeXOFR4MmVh5BuKPCEYGdRnQScIPBWLF2Hndt2pQjlF+Th+JOn6yTGYaIJUOCJTkD++g2JvdiZ2IqeqEIvdz+hDaso8Dx/fgaep+ancYt+41LEzzxRKEsWt4YABZ41nFnFGAIUeMZw5CzWEaDAs451JpUo8DKhZPNrKPBsHrDNlucEgffmf95HOBROS27WKUfDn+e3WZrOWA4FnjNy1rvKTfFVWBj/DxLa/yU/k93HYZz7KL3T5TxORYHnfuVteO//S9raI3d+G4kpY3LmwQnkJ0CBJ39G7LCLAAUed4NqBCjw5EqMAk+uPIR0Q4EnBDuL6iTgBIHHO/B0bg5Jh1HgSRqMQW1F0YYtnidQ6/4YhRiIwZFLtH/3z3j2F2KPoSlRl7reAy8u8l6nvXzPnfEcRl6oosCDduyA966H4V68vANFbN4sxK67xEgsnEtiAhR4EofD1g4iQIHHTaEaAQo8uRKjwJMrDyHdUOAJwc6iOgk4QeDxDDydm0PSYRR4kgZjUFsbPY9hl1t7ecJnH7+rJ6aGf5axgDtI4Ll8uMjz9YzHG7SM1DRKCrzPundV70ZCe4kFevU0Ggvnk5gABZ7E4bA1CjzuAeUJUODJFSEFnlx5COmGAk8IdhbVScAJAk8nGtOGeWItyI/s7Jg/6OuLmKfQtFp2nJgCz46pdq1pqe+HCKImbZGTo3ciP9Eno4XzEdqMMPEiEjgsAQo8bg6VCPAOPJXSyr3XcKQVy7e8iNqmzRhSdQxG9D0RLpcr94ktnIECz0LYGZSiwMsAkt0vocCze8L2Wh8FnrV5umNBlAWXwpWIdxROuDxoKJiEuDvf2kYUrkaBp3B4GbR+4B14gUQFpkTvyuoOOr7EIgPQvIQEDkOAAo9bQyUCFHgqpZV7ry+8/yPsrFuZmmjGmCsxYdC83Ce2cAYKPAthZ1CKAi8DSHa/hALP7gnba33ueBSx5hrEC6vg8frstTgJV5Mf3o6C0Ja0zlryhiHk6y1ht3K2RIEnZy5GdZXrGXhG9WHUPCo/QmsUA86jFgEKPGPySuzai9iW7XDlBeAeOUT7d54xE3OWNAIUeM7ZEG2hevzljWvTFlxZOgLnzviJUhAOJ/Bi0Tg2frAd9TXNqBxYjsFT+sDlVuvuQqWC+KxZCjwVUzO4Zwo8g4FyOtMI7Nm0Cov+ei+i7a3wFpTi2C9+E5VD+JZB04BrE/uje1EcXJNWojl/FMLeCjPL2mpuCjxbxWn7xVDg2T5i2y2QAi/3SGM7dyO2+OPURK7CAnhnz4TL5819cs5AgefQPRCJhTSBd432nqX2FIHkY7SnTLlJKSKHE3gfPrcSO9fuTa1l6FH9MW72UKXWpmKzFHgqpmZwzxR4BgPldKYReOXe76FldzU82k93orE4iqv64fRv/8K0epw4SSCBouBaBDSRl/yEfL3Qkjdc+1/8CVum+4MCL1NSvE4GAhR4MqTAHrIhQIGXDa1DXxv9YBniOzrPut338c6YBndVZe6Tc4Y0ArwDz1kbYl31W3jr098hFgujpKA3Tpt6M8qLMn9TvQy0DiXwYpE4Xr7vHSQSXR0GCv049bpjZWjZ1j1Q4Nk63swWR4GXGSdeJZZAsLkBL971jY4m9gk8t/YI7fl3PCq2MYdUd8c7f3oYd/ORmmwjp8DLlhivF0mAAk8kfdbWQ4ACTw+19DGxVesQW7Mh7Rd9c0+Aq4gvrcqdbvoMFHhGE5V/vkgkiMZgDXoUDYTbrb0pXbHP4e7Ae/XBxWhvCaVWU96vFMdfMkmx1anXLgWeepkZ3jEFnuFIOaFJBN5/8kFsXfJOSuANOfokTD/3apOqcVoSMIYABZ4xHDmLNQQo8KzhLEOVaGsYsdYoAr0KZGhHdw8UeLrRpQYmwmFEFmqP0NY3aGdYueEeNQwe7R8nf+qDbXhm3Qo0tgcxd8hITKzM7M3i3TGjwOuOEH9fNgKHE3i7N9Xh43+vRqQ9iryiAI4+byxKq4pla992/VDg2S7S7BdEgZc9M44QQyAei2Ljwvlo3LEOpQPGYOgxJ8Pt4fksYtIwr2qsqRFunx+ufHu86ZYCz7y9wpmNJ2CGwGsNtsDn9cOv/bnmRw4Cta9vRd3b2zqayR9Yij4Xj4InX80XQ1HgGbSntGfh4s0tcAW0//4GAgZNmj7NdtcmrPEuw7DYWAyOjzSlhhGTBiMR3PT6C9jT1pKa7uZj5uCovgNynp4CL2eEnMBiAkd6C200EkNrXRuKehZqLxd0W9yZM8tR4Dkz97RVU+BxE6hEwK/9x6G4wIvaprBKbbPXDAgkolG0vL0A0V01HVfnjRmH/ElTMhgp9yUUeHLnw+7SCRgp8KKxCFZtWIam5vqOIv37DMWgvs6+q0eG/Rba1YqtDy1N/1rwhAHoedJAGdrLugcKvKyRCRmwwPsingw8pJ2s23lo1nnhr2Bu5FwhvXRXdEnNDvx04X/TLjtFuwvvq5NzP9+LAq87+vx92QgcSeDJ1qsT+qHAc0LK3ayRAo+bQCUCFHgqpZVdr8FVn6J92ZK0QcWnnA5vT7XfeOs0gffBjnr8e00N2mMJnDSkAqcN75XdRuDVQgkYKfC2Vm/Atp0b09YzecwMFBYUCV2j04s3fFiDPS+mn3dWMKwM/S4bpyQaCjw1Yrsl/yrUufekmi1BOX7W+riUzVe3NOGGV/+V1tt5oybgi2Nz/6EiBZ6UkbOpIxCgwJNre1DgyZWHkG4o8IRgZ1GdBCjwdIJTYFjre+8gvHVzWqcF049BYPgIBbo/fItOEnjbGoO48801aTC+dvQQTOldqnSGTmreSIH36bqP0dBUm4Zv6MAx6FOp1hv47JZ/PBTD5gc+Rqyl6072vl8cg8KRPZRcKgWeGrH9uOBr2OXanmq2OFGGu9oeg3binpQLeGbNJ/j7ys4fKo6uqML/HDsbRb7cHy2mwJMybjZFgafMHqDAUyYq8xqlwDOPLWc2ngAFnvFMZZkx+ehs8wLtkZXP3knvzi9A8elnwh1Q+823ThJ4b26uxd+Wd56rte9zwuAKXDaRwkaWP2fd9WGkwGtorsOnaz9KlfT78zB17Ax4eHZpdzGY/vuR+nY0LKpGtCWC0mm9UTBUXclOgWf6djGkwLve+fhr4P7UXDI/QruvycZQEE2hEAaUlBnCIDkJBZ5hKDmRRQR4B55FoDMso6TAu/jrt2P1ui2Ay9WxzJKiArz17K87/nd1bTDDpfOyfQQo8LgXVCJAgadSWtn3mpR4oQ3rAZ+v4ww8T5H6j9o5SeBta2jDnW+tTQued+Bl/+dA5AgjBV5yHUmJV7Nnu/YCiwD69hqIvIA9Xk4jMiPWTidAgafOjtjm3oC1nk+kf4mFmUQp8Myky7n1ENhWs1g77uJ97YdrPgzudwKqeqYfp0CBp4eqeWOUFHjzLv8+7rv9Wxg+pN9BZCjwst8sFHjZM+MIcQQo8MSxZ2V9BJwk8JKEOs7AW7sL7dE45mhn4J3OM/D0bRxBo4wWeIKWwbIOIkCB56CwbbBUCjwbhGijJeypX4MVa59KW9FR47+CosLeqV+jwJMrcCUF3qzzbsA/Hr4NvSsPPquDAi/7DUaBlz0zjhBHgAJPHHtW1kfAaQJPHyWOkoUABZ4sSbCPTAlQ4GVKitcZSWDt6tV47qmn0dzYjBknHIfTzpoHt7v78/wo8IxMgXPlSmDN5pdRvevjtGlGDj4D/aqmUuDlCtek8UoKvCmnXoMTj5mIJSvWoaJHKb59zQU48dhJHYgo8LLfKRR42TPjCHEEKPDEsWdlfQScJPA27mjHc280oLYpiqmjC3D2rHJ4PZ3HXfCjBgEKPDVyYpddBCjwuBusJtDY0IC7bvsxIuGuF8Gce9GFOH72rG5bocDrFhEvsJDAoe7Amz7+ahQXVlHgWZhDNqWkFXir129FNBZLW4vP68WIIf1x68//iNPnHIMZ08fizfeW4fs/fRgvPHZXxx157eH0MdnAcOq1Pq8bsXgCce0ffkhAdgIu7exLnyYEwtrjefyQgAoEktI5Ekto7+aw99+x7aE4vnvfFiT/ve/zuRPL8flZar7ZUoW9ZUaPAZ+n44jhUCS2730yZpThnCRgGIGAz63tV35NYBhQTtQtgfcXfYDfP/iHtOumHzMd1153TbdjPW5Xx9+xUe3rAn5IQAYCG7YtxPqtC+H1+jFq8Gz0rxqf1lae3yOFYwkHE9jwURStDQn0Henp+MeJn2QeRn5c2jcohvxtdMvdf0SwPZTWW1lJEW79zhUH9XvVd36G8+fNwllzZ6CuuesnIUYuzM5zFed7O77woRCxc8r2WZvP40Z+wI2mtqh9FsWV2JpASaEPbe0x7Yt1e3+DuXZLO379xM60LAf1CeB7V/S1db52W1yptl+T32A2tkY6frjHDwnITqCsyI+m1jC4XWVPyj79NWh34N3+g9sQiXR933nBFy/CrJNmd7vI5Dffyb9jW9v5dWy3sHiBFASST+uJdixJw7T4yQha67q+Lhk924N+o42VWVIA76aJZB5GfgwTeIdrqi0YwrpN2zFp7LDUJZd98ye4/IJTcdrso/gIrY40+QitDmgcIowAH6EVhp6FdRJwyiO0Ie3Ou9t/vwPB/e7AO3VGKU6fWaaTHIeJIMBHaEVQZ81cCPAR2lzocaxeAjwDTy85jhNNoDXagse2PoT369/FoIJhuGrQddq/hx62LRleYtGm3XX30VPpT1qW93dh/OnOE3jSPkJ7uB3U0NiCUy7+Lu6745uYOX083l68HN+74yG8+PjP0LO8hAJPx98IFHg6oHGIMAIUeMLQs7BOAk4ReEk8HWfgLdDOwGvkGXg6t4vwYRR4wiNgA1kSoMDLEhgvF0qAZ+AJxc/iGoHfb74Pr+16McWiZ6AXHpj0GNyuQ7+ERQaBFwkl8P5fY9qRX10R9h7lwogTKPBy3dSm34GXbPDNhcvwywefwO7aBvTrXYGbv/FFHDt1bEfvfIlF9hFS4GXPjCPEEaDAE8fejMrtsSiWtu6BW7sjfkpxFXwZvMHNjD7MnNNJAs9MjpzbGgIUeNZwZhXjCFDgGceSM5lPgALPfMascGQC31n+FVQHt6VddM/EP6Bf/sBDDpRB4HV4nk8T2LhYO59Xk3iFPVwYe6oLeUXdv/nZbvtBuTvwuguAAq87Qgf/PgVe9sw4QhwBCjxx7I2u3BQN4ZdbP0JDpL1j6t6BQtw4cDry3Pb6aRoFntE7h/OZSYACz0y6nNsMAhR4ZlDlnGYRoMAziyznzZTAgXfg9Qr0xn2THpH6Drx9a9O+dUCoNYGC8s6XwTjxQ4HnxNQPWDMFHjeBSgQo8FRK68i9vlG/Fc/uXp920ZV9x2GadieenT4UeHZKU821hIJRLP9ob0fzE6dVIKC9vOpwHwo8+TKOxmKY/+5CfLJ2I8pLi3H6ccdiUN8+8jUqqCMKPOPBB7WX221riqLA50K/Yp9jv1E2nqx251CeF15P54uC+CEBEQRUPANPBCdZa1LgyZqMhX1R4FkIm6VyJkCBlzNCaSZ4tXYzXti7kQJPmkTYiB0JtGnfJN5/58fYU9PWsbxefQrwzR9ORYH2ttlDfSjw5NsFry/+EAs++CjVWJ4/gJu+dAkCfmPfRCffyjPriAIvM06ZXtUQjOG1LW2IRDsPm+pT7MWsgYWUeJkC7OY6CjyDQHIaywjI8gitZQuWvBAFnuQBWdEeBZ4VlFkjGwKvfbIF766pRr7fi3lThmDcgIrUcAq8bEjKfW1rLIK7t3yQeoS2b14RvjNgGgJ8hFbu4NidUgTee6Mazzy2Jq3n864YhZlz+lLgKZLkw08+ix27dqd1e/UFn8fA3r0VWYG5bVLgGcv3g+og1teF0yadO7QQlQWHv3PX2A7sPRsFnr3ztePqKPDkSpUCT648hHRDgScEO4sehsCHG2rwxHtd32x63C587/NHo6Ior2OEEwWea/c2uGq2IN53GFBhr8em+BIL/lVgNoFoIoxd7evRGq1FvqcYvQIjEPAUml1Wmvkp8KSJQncjB96BFwj48d0rL+UdeJ8RpcDTvbUOOZACz1ieB85GgWcuX85uPAEKPOOZ5jIjBV4u9GwylgLPJkHaZBlJeZeUePt/Lj1hDKYM7uVIgef5eAG8i+d34tBOa43MPg/x0dNtkrYzlsEz8MTmXB1chaZI198pfk3eDS08WmxTFlZPPkL7wE8+xu6dnY/QVvYuwLdu4SO0FkaQc6lwJIpXFy7iGXiHIUmBl/MWS5ugXnuE9vVNrQjHtdfDax8+QmssXwo8Y3lyNvMJUOCZzzibChR42dCy6bUUeDYNVtFl8Q68/YKLx+F/7C64gi1dv1heidDFN6IOIax21yGu3aE4OlaKikS+oonbv20KPLEZr295D9G49tqy/T7Di2fC6wqIbczC6nyJhYWwWcpyAhR4xiPnSyyMZ7pvRgo889hyZnMIUOCZw1XvrBR4esnZaBwFno3CtMlS/rNsMxau28kz8JIC748/giu635vKNIFXd9E38S//ZsTQ+dNx7UXqODs6GKUJHmgu4x8BCjyxqTj9Drxs6fMlFtkS4/WiCVDgiU6A9bMhQIGXDS1eKwMBCjwZUujqgQJPrjyEdEOBJwQ7i+ok4LQz8DzL3ob3vZc6aSUfoZ1zAVaNGYrFnl1pBKfGKjA+3lMnVQ4zkwAFnpl0u5/b6WfgdU8o/QoKvGyJ8XrRBCjwRCfA+tkQoMDLhhavlYEABZ4MKVDgyZWC4G4o8AQHwPJZEXCawOvwdge8xGK7qwWve3ekcTsu1gfD4iVZseTF1hCgwLOGM6sYQ4ACzxiOnMU6AhR41rFmpdwJUODlzpAzWEuAAs9a3t1V4x143RFywO9T4DkgZBst0YkC71DxveOtwUZXY8dvDYgX40RN4Hm0R2n5kY8ABZ58mbCjwxOgwOPuUI0ABZ5qiTm7Xwo8Z+ev4uop8ORKjQJPrjyEdEOBJwQ7i+okQIHXBa41EUZMc3Yl4Nl3OreTJcMo8CzBzCIGEaDAMwgkp7GMAAWeZahZyAACFHgGQOQUlhKgwLMUd7fFKPC6RWT/Cyjw7J+xnVZIgWenNJ2xFgo8Z+Rsl1VS4NklSeesgwLPOVnbYaUUeHZI0VlroMCTK28KPLnyENINBZ4Q7CyqkwAFnk5wHCaMAAWeMPQsrIMABZ4OaBwilIDTBV7biqUIbl6PoukzEejdV2gWLN49AQq87hnxCrkIUODJl4eRHbkS2sfICbOdq7o2mO0Qx19Pgef4LaAUAAo8peJisxoBCjxuA5UIUOCplBZ7TRJwssCr+dMDqH/x2Y6N4PL50P/m21E09WhuDIkJUOBJHA5bOyQBCjy5NgbvwJMrDyHdUOAJwc6iOglQ4OkEx2HCCFDgCUPPwjoIUODpgMYhQgk4VeDFQyGsuXQesN+9E4WTp2PgrXcLzYPFj0yAAo87RDUCFHhyJUaBJ1ceQrqhwBOCnUV1EqDA0wmOw4QRoMAThp6FdRCgwNMBjUOEEnCswGtvx5rLzkoXeOMmYeDt9wjNg8Up8LgH7EWAAk+uPCnw5MpDSDcUeEKws6hOAhR4OsFxmDACFHjC0EtVOBoOY8lLr6B61WoMmjgBE047BR6vV6oek81Q4EkXCRvqhoBTBV4SS9ojtB4P+n//Tj5CK/mfGN6BJ3lAbO8gAhR4cm0KCjy58hDSDQWeEOwsqpMABZ5OcBwmjAAFnjD0UhV+5b7fYNWCN1M9TZ53OuZc/WWpeqTAky4ONpQBAdUFXjwUQ+tzKxD6uBqeigIUnjsO/mEVGay885J9L7Eonnos/H37ZzyOF4ohQIEnhjur6idAgaefnRkjKfDMoKrYnBR4igXm8HYp8By+AQQuvx0JNHtciGuvairQzhwqTrgy6oYCLyNMtr4oHo/jN5dciah2ZtW+T0FZGa798++kWzfvwJMuEjbUDQHVBV7Lc58i+Nq61CpdhQH0uP1UuP0eZm9DAhR4NgzV5kuiwJMrYAo8ufIQ0g0FnhDsLKqTAAWeTnAclhOBqObq9rjSX7Jeqgm8ggzeu06BlxN62wz+83XXo2FnTWo9lUMG47J7fi7d+ijwpIuEDdlc4NX/8k1Et9SnrbLsxhPhG9KD2duQAAWeDUO1+ZIo8OQKmAJPrjyEdEOBJwQ7i+okQIGnExyH5UQgqAm8hgMEXj5cKIt3Py0FXveMnHDFlmWf4KVf3Yv25mYUlpfjzJtuQP9xY6VbOgWedJGwIZsLvLZ/r0Lr/DWpVfIOPHtveQo8e+drx9VR4MmVKgWeXHkI6YYCTwh2FtVJgAJPJzgOy4kA78DLCR8Hf0Ygoj1CW7+jGj0G9IfX55OSCwWelLGwqSMQUP0R2n1n4IU/2gF3ZWHWZ+Bxc6hFgAJPrbzYLUCBJ9cuoMCTKw8h3VDgCcHOojoJUODpBMdhORPoOAPPrZ2Bp82Urz06W5LB47PJorwDL2f0nMBCAhR4FsJmKUMIqC7wDIHASZQhQIGnTFRs9DMCFHhybQUKPLnyENINBZ4Q7CyqkwAFnk5wHCaMAAWeMPQsrIMABZ4OaBwilAAFnlD8LJ4lAQq8LIHxcuEEKPCER5DWAAWeXHkI6YYCTwh2FtVJgAJPJzgOE0aAAk8YehbWQYACTwc0DhFKgAJPKH4Wz5IABV6WwHi5cAIUeMIjoMCTKwLx3VDgic+AHWROgAIvc1a5XJlIJLB088fY1ViDaUOPQmVJr1ymc/RYCjxHx6/c4inwlIvM8Q1T4Dl+CygFgAJPqbjYrEaAAk+ubcA78OTKQ0g3FHhCsLOoTgIUeDrBZTnsvpd+iffWvt0xKs+fj/895/8wuq98b8zMcllCLqfAE4KdRXUSoMDTCY7DhBGgwBOGnoV1EKDA0wEtwyGtsRje1970visaRi+fH8cUFaPQ48lwNC87HAEKPLn2BgWeXHkI6YYCTwh2FtVJgAJPJ7gshu2o244bH/tG2oiZI0/ADWd+N4tZeOk+AhR43AsqEaDAUykt9pokQIHHfaASAQo889J6taEeNeFwqkCVJvFOLS83r6BDZqbAkytoCjy58hDSDQWeEOwsqpMABZ5OcFkMo8DLAlYGl1LgZQCJl0hDgAJPmijYSIYEKPAyBMXLpCBAgWdeDE/s3Y1IPJEq4NL+10WVveBzJf8XP3oJUODpJWfOOAo8c7gqNSsFnlJxOb5ZCjxrtsCvX/4V3l3zVkexgC8Pt55/B0b0HmlNcZtVocCzWaA2Xw4Fns0DtuHyKPBsGKqNl0SBZ164vAPPHLYUeOZw1TsrBZ5ecjYaR4FnozAdsBQKPGtC5kssjOMVwCL6AAAgAElEQVRMgWccS85kPgEKPPMZs4KxBCjwjOXJ2cwlQIFnHt/kGXiLW5qxO8Iz8IykTIFnJM3c56LAy52h8jNQ4CkfoaMWQIHnqLhtsVgKPFvE6JhFUOA5JmrbLJQCzzZROmIhFHiOiNlWi6TAkytOCjy58hDSDQWeEOwsqpOAEQIvof1kLl5fA3fPvnB5vDo74TASyIwABV5mnHiVHAQo8OTIgV1kToACL3NWvFI8AQo88Rmwg+wIUOBlx8vsqynwzCaswPwUeAqExBZTBHIVeLFNn6L95UeQCLbAVVSKwFnXwNtvGAmTgGkEKPBMQ8uJTSBAgWcCVE5pKgEKPFPxcnKDCVDgGQyU05lOgALPdMRZFaDAywqXPS+mwLNnrnZdVa4Cr/X3tyDRVJvC46kahPzLvm9XXFyXBAQo8CQIgS1kTIACL2NUvFASAhR4kgTBNjIiQIGXESZeJBEBCjyJwtBaocCTKw8h3VDgCcHOojoJ5CLwEq2NaH3oAFmnPUJb9O37dXbDYQcSiEcbEA1vh8fbAx5/XwLSCFDgcRuoRIACT6W02GuSAAUe94FKBCjwVEqLvSYJUODJtQ8o8OTKQ0g3FHhCsLOoTgK5CLxkyfaXH0V05aJUdd/UOQjMuVBnNxy2P4FoaCvaG/6r/VK845f9hRPhLzrK8ZAo8By/BZQCQIGnVFxslgKPe0AxAhR4igXGdinwJNsDFHiSBSKiHQo8EdRZUy+BXAVeIhpBZMkCxKrXwztgNHyTTgQ8Hr3tcNx+BNrqnkc8sme/X3GhsNcVcLmc/aIQCjz+MVGJAAWeSmmx1yQB3oHHfaASAQo8ldJir0kCvANPrn1AgSdXHkK6ocATgp1FdRLIVeA1J+rgTmhiyV2uswMOOxwBCrxDk6HA458ZlQhQ4KmUFnulwOMeUI0ABZ5qibFfCjy59gAFnlx5COmGAk8IdhbVSUCvwIslongn/Fdsi63oqDzEOw3H+S/W2QWHHYoAH6EVL/Ca61rR3hpG5QAKav4p1UeAAk8fN44SR4B34Iljz8rZE6DAy54ZR4glQIEnlv+B1Snw5MpDSDcUeEKws6hOAnoF3sboB3gv/M+0qnMCX0Y/zxidnXDYoQjEo43aSyy28SUW+8Gx6g6853/7Fha/2Cmoh03uj0tvOQOBfB83KglkRYACLytcvFgCAhR4EoTAFjImQIGXMSpeKAkBCjxJgvisDQo8ufIQ0g0FnhDsLKqTgF6Btzj8NNZFu15ekSw/1jsHU/1n6uyEw0ggMwJWCLzNn1Tj99//V1pDZ117AmacPSGzJnkVCXxGgAKPW0E1AhR4qiXm7H4p8Jydv4qrp8CTKzUKPLnyENINBZ4Q7Cyqk4BegdcU34MX2n+FBGIdlT0uH84OfI9n4enMgcMyJ2CFwHvznx/jP4+mC+qJJ47ARf9zSuaN8koS0AhQ4HEbqEaAAk+1xJzdLwWes/NXcfUUeHKlRoEnVx5CuqHAE4KdRXUS0CvwkuX2xrdhTfRd7SUWboz2nYBydx+dXXAYCWROwGyBF08AH1Q345nnViC+ox6+5Vu0PQ58/d4L0G94ZeaN8koSoMDjHlCQAAWegqE5uGUKPAeHr+jSKfDkCo4CT648hHRDgScEO4vqJJCLwNNZksNIICcCZgu8BTVt+KA2iGgkhpaGIEr2NuGi0T0xeHzfnPrmYGcS4B14zsxd5VVT4KmcnvN6p8BzXuaqr5gCT64EKfDkykNINxR4QrCzqE4CFHg6wXGYMAJmC7zfrmlAa7Tz0fDkx6X9c/2YHvC7k/+LHxLIjgAFXna8eLV4AhR44jNgB5kToMDLnBWvlIMABZ4cOezrggJPrjyEdEOBJwQ7i+okQIGnExyHCSNgtsD726Ym7GiLpNZX5HPj6yPLha2XhdUmQIGndn5O7J4Cz4mpq7tmCjx1s3Nq5xR4ciVPgSdXHkK6ocATgp1FdRKgwNMJjsOEETBb4O1qj+LprS1o1R6hDXjd+Fy/Igwp8glbLwurTYACT+38nNg9BZ4TU+9cs6s9DM/Gari1f8cG9EKsskx6GBR40kfEBg8gQIEn15agwJMrDyHdUOAJwc6iOglQ4OkEx2HCCJgt8JILS77IYo8m8soDHj46KyxpexSmwLNHjk5aBQWek9LuWmsiFkP+W8vgCoZSvxiaOhLx3j2lBkKBJ3U8bO4QBCjw5NoWFHhy5SGkGwo8IdgtK5r36U7kv78Z0L7BD04fhPaJah9sT4Fn2dZhIYMIWCHwDGqV05AAKPC4CVQjQIGnWmLG9OuubURg8cq0yWJ9KhCeMsKYAibNQoFnElhOaxoBCjzT0OqamAJPFzZ7DaLAs1ee+6/Gt6MBpU8tSVtg4/lTEOkv/yMGh0uFAs+++9WuK6PAs2uy9lwXBZ49c7Xzqijw7Jzu4dfmamtH3oL0r3GjQ/oiMmaQ1EAo8KSOh80dggAFnlzbggJPrjyEdEOBJwS7JUXzP9iCwvc2ptVqmzYQbccPs6S+GUUo8MygyjnNJECBZyZdzm00AQo8o4lyPrMJUOCZTVje+b3rt8Ov/ZPQzpGIlxYhdPQYwOeVt2GtMwo8qeNhcxR40u8BCjzpIzK/QQo88xmLquDbXo/Sp5emlW88bzIiA9R9QyUFnqjdxLp6CVDg6SXHcSIIUOCJoH5wzUgihDrXVvRIDITPFZCjKUm7oMCTNBir2gpH4ApFkCgq0N5qYVVR/XUo8PSz40gxBHgHnhjuh6tKgSdXHkK6ocATgt2yovufgZe8+y40qZ9ltc0oRIFnBlXOaSYBCjwz6XJuowlQ4BlNNPv5tmE5nvXejiAakY9SnBv9PwzAxOwncsgICjyHBG2TZVLg2SRIBy2DAk+usCnw5MpDSDcUeEKws6hOAhR4OsFxmDACFHjC0LOwDgIUeDqgGTzkD96rUYstqVl7YhCujv7B4Cr2mY4Czz5ZOmElFHhOSNlea6TAkytPCjy58hDSDQWeEOwsqpMABZ5OcFYMS8Th2/kiPHWLEC8ejXC/CwBvvhWVpa5BgSd1PGzuAAIUeOK3xC+9ZyKGSKoRD3z4bvQl8Y1J2gEFnqTBsK1DEqDA48ZQjQAFnlyJUeDJlYeQbijwhGBnUZ0EKPB0grNgmH/znxHY/GiqUrR8OoKTfmlBZblLUODJnQ+7SydAgSd+R7zivgfL3C+nGpkUPwOnx28U35ikHVDgSRoM26LA4x6wBQEKPLlipMCTKw8h3VDgCcHOojoJUODpBGfBsKL3r4CrbWtXJZcLzcdr34R68iyoLm8JCjx5s2FnBxOgwBO/K2KI4kP3M9juWoH+ifGYHj8PHsj9Zk2R1CjwRNJn7WwJ8A68bInxetEEKPBEJ5BenwJPrjyEdEOBJwQ7i+okQIGnE5wFw/KX/w+8dYtTlRLa47Mtx72ovRXObUF1eUtQ4MmbDTujwOMeUJ8ABZ76GTppBRR4TkrbHmulwJMrRwo8ufIQ0g0FnhDsLKqTAAWeTnAWDHM1r0fBpz+Eu30X4CtEcNTNiP7/9s4ETq6qzNtvLb2msy+EsEuAgCAEYQCVBFEQAUEQ+UAEBSeODKAoiFFwWATkQ1CDIuCCLMoER4aIMIAiAmERhsWwKBAgBLLva2/VVTW3KqaT6sT0vafuve859z71+2Vgwjnnfc/zP2k7T99lxMQYKttdAoFndz50V0uAK/A4Ea4RQOC5lli6+0XgpTt/F3ePwLMrNQSeXXmodIPAU8FOUUMCCDxDcHFNKxclu+ZNKbVun/pbZ9cjR+DFdfioEwYBBF4YFFkjTgIIvDhpU6teAgi8egkyP24CCLy4iW+5HgLPrjxUukHgqWCnqCEBBJ4hOKapEUDgqaGnsAEBBJ4BNKaoEkDgqeKneEACCLyAwBiuTgCBpx5BTQMIPLvyUOkGgaeCnaKGBBB4huCYViWwcvZfZPGLd0m5XJJhux4mw8cdETkZBF7kiCkQIgEEXogwWSoWAgi8WDBTJCQCCLyQQLJMbAQQeLGh9lUIgecLU7IHIfCSnW/SdofAS1qi8e2nc9nbMuvBS2oKbjvhXBm4zT6RNoHAixQvi4dMAIEXMlCWi5wAAi9yxBQIkQACL0SYLBULAQReLJh9F0Hg+UaV3IEIvORmm8SdIfCSmGo8e1r6t3tl0Yzf1hQbtvuRstU+J0baAAIvUrwsHjIBBF7IQFkucgIIvMgRUyBEAgi8EGGyVCwEEHixYPZdBIHnG1VyByLwkpttEneGwEtiqvHsqWPZLHn7wUtrinEFXjzsqeIOAQSeO1nR6ToCCDxOgksEEHgupUWvFQIIPLvOAQLPrjxUukHgqWCnqCEBBJ4hOKZVCax/Bp6UijJs3Mdl2G6HRU6GK/AiR0yBEAkg8EKEyVKxEEDgxYKZIiERQOCFBJJlYiOAwIsNta9CVgu8ZStWy+QrbpIFi5fLPbdc0buhd+ctkm9ffbO89sY7Mmb0CLnwK6fKvnvtUv3v85Z2+No4gzYQQOBxGmwiUJayLMw+L525JbJVYT9pkeE17SHwbEqLXvwQQOD5ocQYWwgg8GxJgj78EkDg+SXFOBsIIPBsSIEeghBA4AWhFf1YawXe2vZOOfnMy2TiQfvIo3+ZUSPwPveV78qhH9pXPnv8YfLks694Mu8X8sc7r5WGfA6BZ3BmEHgG0JgSGYHnmq+TebmnquvnpVkO7LhQhpbG9tZD4EWGnoUjIoDAiwgsy0ZCAIEXCVYWjZAAAi9CuCwdOgEEXuhIWTBiAgi8iAEHXN5agdfe0SlLlq2s/rrk2lt7Bd7S5avkiM9cIE/de73kc7nqdk+YdLFc8O8ny7+MH4fAC3gAKsMReAbQmBIJgdXZufJIy/k1a48pHiTv7/xyxAKv5MnC30sm84KUy+OkUD7e+/fGSPbIoukjgMBLX+Yu7xiB53J66ewdgZfO3F3dNQLP1eTS2zcCz67srRV46zE9/9LrNQLv+ZdmymXfv1Wm/fLyXpLnXfoTOWDfPeTETxyCwDM4Xwg8A2hMiYSAlsDLy88ln9nwdtJi+UNSkIsi2SOLpo8AAi99mbu8YwSey+mls3cEXjpzd3XXCDxXk0tv3wg8u7JXFXiVq+nmL1q6CZFtvOfaDR08sPr7fQXek8++LNf9/C6ZeuPFvfMu+v+/kF3fs62c9umPyer2gl2EHeimpSknhZ6y9BRLDnSboBYzCdpLiFuZnrta5smz1RUrt9AeUvwP7yl4G26hzWUy0tiQlY7uYmhVS90nipSXb7ReRrJN93j/f3NoNVgovQRam/LSVShKsVROL4SE7Lwwe6a0P/GAt5uMtB5ytDSM2TEhO9uwjcpfLrPe19m1nT1SKnNmExdwAjdUObPt3nnltCYw3ARuqSGXlWw2U/2+gA8E+iVgwRe2ga0NOJZ+g4pvQCWPMD+Zsvfxu+D0p1+UaQ88vsnwE4/5sBwwfvfq7/cVeC+8PFMuvuaWmmfife2S6+UD++0pJxw9UVYh8Pzi7x3X0ugJPE/e9RR9Rxe4BhM2QwDcmz0WlZdYzJPnpMN7icWY4v7S2uclFrlcRpo8gdfeGd43PuWec71bZ1/ZqJ/hkm2YGuuxzXh/YeaTTAKtzTnpLJS8F9/yh97lhHvmzpKlP/iGSLFn3TYam2TE5B9JbugIl7e1Se8DWioCT2RtR0XgJWprbCahBNq8M1sRzv7/BpJQEGwrEIEAf2UNtG5/gys/hK58je3s5sKJ/ljx3z0CFvz1YJAnjHAs9pzGSh5hfgIJPD+F+wq85StXy0dPPE8e/92PpaV53TOqPn7KBXLlNyfJ+D134RZaP1D7jOEWWgNoTFEjEMVLLLIyUxqz3lW95WWePhzsPQPv61KS/XztsWP6TOl8eKbkxgyWASfuK7nBLb7mMSg9BLiFNhlZdz9yt3Q98J81m2n65CRpPPCwZGzwH7vgFtpExZmKzXALbSpiTswmuYU2MVGmZiPcQmtX1Kq30PpB0VfgVeZ84WtXy/77jJNJpxwt9//56eottff/+mrJeZckz1va4WdZxmxEAIHHcXCJQBQCb93+C94PuWZ7Am9b79/93Trb/tDfZeVVf+jFl99puIy88TMi3tciPhBYTwCBl4yzUHj+Uen8zfU1m2n53GTJ775vMjaIwEtUjmnaDAIvTWm7v1cEnvsZpm0HCDy7ErdW4D00/Tk5/7IbvCtiyt7z2YrS0JCXnbYbLXfffLnMXbBEvvXdn8lrb74r240ZJZec93l57247Vski8IIfMARecGbM0CMQncALvqdlk6dJ17OzayaOvPlUyW8/LPhizEgsAQSeG9G+M7dTnn1xtTQ1ZeSD7x8sQwbX3qJQ7ilI539OkZ5XnqluKD9+grT8v7Pd2FyALrkCLwAshlpBIJ8X+eF9L8rMhStl3JihctKBY2Vom78fxFmxAZpIFQEEXqriTsRmEXh2xWitwDPFhMALTg6BF5wZM/QI2CTwVl73Z2m/58UNMLzn2I268wuSGzZADxCVrSOAwLMukk0aqsi72+5a0PsMrcrLnc783BgZ0JLbZGxp2UKRbE6yQ5L17Lv1G0Xg2X9e6bCWwPUPvSQzZi/xfui/7vcrEu9rR+4DJghYSQCBZ2UsNLUFAgg8u44HAs+uPFS6QeCpYKeoIQGbBF7PolWy/KJ7peetxd7lOFkZ/KUJ0vrJvQ13xrSkEkDg2Z/sg48uk2f+uqqm0eM/PlLeu2v6ZDwCz/7zSocbCHT3lORrv54ulX+uF3gtjXmZctrBYIKAlQQQeFbGQlMIPGfOAALPmaiiaxSBFx1bVg6fgE0Cr7K7sveaxp7ZyyQ3sk2ybU3hb5gVnSeAwLM/wqeeXykPTV9e0+jnPj1ath+TvtvwEHj2n1c6rCXAFXicCJcIIPBcSoteKwS4As+uc4DAsysPlW4QeCrYnSha9n6cvSz3mnTlVsqIwu7SWB6k3rdtAk8dCA1YTwCBZ31E0tVdkv+6d5HMerez2uwB4wfJ4RPS+SxLBJ7955UOawnkvGfgTeEZeBwLRwgg8BwJijZ7CSDw7DoMCDy78lDpBoGngt36omUpySutv5Ll+ZnVXvPem1n3WnOGtJW2Vu0dgaeKn+IGBNIu8JobS9LsXZxa9l5K1d6VlUIhY0AxnilLlhW8XrPSNmDTZ9/F04F+FQSefgZ0EIwAb6ENxovRugQQeLr8qR6cAAIvOLMoZyDwoqTryNoIPEeCirnNNdl58kKb9ybojT6jC/vLLh3HxNxJbTkEnip+ihsQSLPAa2woS1uL92yqjT4rVmelVLZX4hlEnKgpCLzo4ny9NFPmlOfKntk9ZFRmVHSFUrYyAi9lgTu+XQSe4wGmsH0Enl2hI/DsykOlGwSeCnbri7ZnF8tzbdfV9DmqMF526zhetXcEnip+ihsQSLPAG+DJuyZP4m38WduZ9W5ZReAZHKVYpiDwosH8Xz3/LY8UH6su7r3DWP614Qx5X3bPaIqlbFUEXsoCd3y7CDzHA0xh+wg8u0JH4NmVh0o3CDwV7E4Ufbn1Nm6hdSIpmrSZQJoFHlfg2XwyN98bAi/8zLqlIF/rusB7quwGmb1DZge5oPGr4RdL4YoIvBSG7vCWEXgOh5fS1hF4dgWPwLMrD5VuEHgq2J0oWnkO3rLc69KZXSkje6J/icWLL5fl6WdFhg4py8c+mpGBbZtepcMVeE4cHZrciECaBV4Fg0vPwOPgiiDwwj8FXeVuOa/7Gwi88NFWV0TgRQSWZSMhgMCLBCuLRkgAgRchXIOlEXgG0JI2BYGXtETd3M//Pl+WW27f8KysEcMzctHkrDR4b5fb+IPAczPfNHeddoGX5uxd3DsCL5rUuIV2y1xL5ZK81fWmLCoslOH5ETK2eRfJZfy9TAaBF82ZZdVoCCDwouHKqtERQOBFx9ZkZQSeCbWEzUHgJSxQR7dz4y9K8pJ3Bd7Gn/O+kpX37Fh7FR4Cz9GAU9y2H4FXKs2TnsI9HqVOyeWPkFxuXIqJsXVNAgi86OjzEot/zvbFtTNkZudrvQO2bdxODhh4kK8wEHi+MDHIEgIIPEuCoA3fBBB4vlHFMhCBFwtmu4sg8OzOJy3dTf1tSaY/sUHgZTxvd9m3szJsKAIvLWcgqfvsV+CVl0tX+zne7XVr/4EgI40tV0s2OzapSNiXxQQQeBaHk+DW7lv+e+ksdfTusHL13bFDj5dM5ZuBfj4IvP4I8d9tIoDAsykNevFDAIHnh1J8YxB48bG2thICz9poUtXYqlVlueFnZXlnTtm7+kjkk5/IyqETeQZeqg5BQjfbn8Ar9kyXQtf3a3afazhOGhpPSygRtmUzAQSezekkt7dHVj0sSwtLejfYnG2Ro4Z+wteGEXi+MDHIEgIIPEuCoA3fBBB4vlHFMhCBFwtmu4sg8OzOJ03dlb0L8OYtEBk8qCxtAzb/U3duoU3TiUjGXvsVeMUZUui8pGaz+cZJkm84MhkA2IVTBBB4TsWVmGaX9SyVp9Y8KZ3FDmnMNsm/DDhAtmoc7Wt/CDxfmBhkCQEEniVB0IZvAgg836hiGYjAiwWz3UUQeHbnQ3e1BBB4nAjXCPQn8Cr7KXT/VIqF+6tby+T2lsamC71bxxpc2yr9JoAAAi8BITq6haL3IovVxZXSlh0o+WyfN1htYU8IPEcDT2nbCLyUBu/wthF4doWHwLMrD5VuEHgq2ClqSACBZwiOaWoE/Ai8anPes/DK5U7JZLdW65XCEEDgcQZcI4DAcy2xdPeLwEt3/i7uHoFnV2oIPLvyUOkGgaeCnaKGBBB4huCYpkbAt8BT65DCENhAAIHHaXCNAALPtcTS3S8CL935u7h7BJ5dqSHw7MpDpRsEngp2ihoSQOAZgmOaGgEEnhp6ChsQQOAZQGOKKgEEnip+igckgMALCIzh6gQQeOoR1DSAwLMrD5VuEHgq2ClqSACBZwiOaWoEEHhq6ClsQACBZwCNKaoEEHiq+CkekAACLyAwhqsTQOCpR4DAsysC/W4QePoZ0IF/Agg8/6wYaQcBBJ4dOdCFPwIIPH+ctEetWdElr7+wUErljOy0xzAZPnqAdktq9RF4augpbEAAgWcAjSmqBBB4qvg3Kc4VeHblodINAk8FO0UNCSDwDMExTY0AAk8NPYUNCCDwDKDFPKV9dZc8/JuZ3turi9XKmVxGJh43VoaObI25EzvKIfDsyIEu/BFA4PnjxCh7CCDw7Mmi0gkCz648VLpB4Klgp6ghAQSeITimqRFA4Kmhp7ABAQSeAbSYp7z1ylKZ8dicmqq77DNK9jwonW+wRuDFfAApVxcBBF5d+JisQACBpwB9CyUReHblodINAk8FO0UNCSDwDMExTY0AAk8NPYUNCCDwDKDFPGXhu2vkyXvfrKm6z8RtvVtph8fciR3lEHh25EAX/ggg8PxxYpQ9BBB49mRR6QSBZ1ceKt0g8FSwU9SQAALPEBzT1Agg8NTQU9iAAALPAJrClBnT58pbLy+pVt56p8Gy/2E7SM67lTaNHwReGlN3d88IPHezS2vnCDy7kkfg2ZWHSjcIPBXsFDUkgMAzBMc0NQIIPDX0FDYggMAzgKY0pfIsvFJJpG1wk1IHdpRF4NmRA134I4DA88eJUfYQQODZk0WlEwSeXXmodIPAU8FOUUMCCDxDcExTI4DAU0NPYQMCCDwDaExRJYDAU8VP8YAEEHgBgTFcnQACTz2CmgYQeHblodINAk8FO0UNCSDwDMExTY0AAk8NPYUNCCDwDKAxRZUAAk8VP8UDEkDgBQTGcHUCCDz1CBB4dkWg3w0CTz8DOvBPAIHnnxUj7SCAwLMjB7rwRwCB548To+whgMCzJws66Z8AAq9/RoywiwACz748wuwoU/Y+YS4YdK15SzuCTkn9eARe6o+AUwAQeNHG1b1YZPFUkY43RFrHiYw6pSz5Qel8MHpYpBF4YZFknTgIIPDioEyNMAkg8MKkyVpRE0DgRU2Y9cMmgMALm2h963ELbX38EjEbgZeIGFOzCQRetFHPvlKka9aGGgP2EdnmrGhrJn11BF7SE07W/hB4ycozDbtB4KUh5eTsEYGXnCzTshMEnl1JI/DsykOlGwSeCnaKGhJA4BmC8zGt1C3yxtnewI2uo862ioyd4mMyQ/4pAQQeh8MlAgg8l9Ki1woBBB7nwCUCCDyX0qLXCgEEnl3nAIFnVx4q3SDwVLBT1JAAAs8QnM9pXIHnE1SAYQi8ALAYqk4AgaceAQ0EJIDACwiM4aoEEHiq+CluQACBZwAtwikIvAjhurI0As+VpOizQgCBF+052OQZeJ/xnoE3mGfg1UMdgVcPPebGTQCBFzdx6tVLAIFXL0Hmx0kAgRcnbWqFQQCBFwbF8NZA4IXH0tmVEHjORpfKxhF4qYzd6U0j8JyOL3XNI/DSFXlnZ7s8/syfZP7Cd+W9u42X8XsdKJmMWz+0QeCl68y6vlsEnusJpq9/BJ5dmSPw7MpDpRsEngp2ihoSQOAZgmOaGgEEnhp6ChsQQOAZQHN4ys9+da28Pfv13h0cdsixcsgHj3RqRwg8p+JKfbMIvNQfAecAIPDsigyBZ1ceKt0g8FSwU9SQAALPEBzT1Agg8NTQU9iAAALPAJqjU1avWSlXTbmgpvttx+wkZ54+2akdIfCciiv1zSLwUn8EnAOAwLMrMgSeXXmodIPAU8FOUUMCCDxDcD6nFVcVZc3D7dI9uyAN2+al7SMDJD8k53M2wzZHAIHHuXCJAALPpbTq67VUKsl3rj1Xuru7ehfadec95XMnnVPfwjHPRuDFDJxydRFA4NWFj8kKBBB4CtC3UBKBZ1ceKt0g8FSwU9SQAALPEJzPaSvuWiXdbxZ6Rzds2yBDPzPI52yGIfA4A64TQOC5nmCw/l946S/yu/t/LYVCtwwbOlJO/fRZMmrk1sEWUR6NwFMOgPKBCCDwAuFisAUEEHgWhLBRCwg8ux33JBEAACAASURBVPJQ6QaBp4KdooYEEHiG4HxOWzxlmZS7yhtGe88yH3HuMMk2uPVQc5/bjWUYV+DFgpkiIRFA4IUE0qFluro6ZdqcGfLCwLK05RrlmAE7yvuahzmzAwSeM1HRqEcAgccxcI0AAs+uxBB4duWh0g0CTwU7RQ0JIPAMwfmcxhV4PkEFGIbACwCLoeoEEHjqEcTewDMdi+T2VTN76+YkI98cPl62yrfE3otJQQSeCTXmaBFA4GmRp64pAQSeKblo5iHwouHq1KoIPKfiSn2zCLxojwDPwAufLwIvfKasGB0BBF50bG1deerqN+WJ9gU17Z04aGc5uGW0rS3X9IXAcyImmvwHAQQeR8E1Agg8uxJD4NmVh0o3CDwV7BQ1JIDAMwQXYFqpKLL0tay0L85K2+iyDNu1KBnuoA1AsHYoAs8YHRMVCCDwFKArl+QKPOUAKJ8qAgi8VMWdiM0i8OyKEYFnVx4q3SDwVLBT1JAAAs8QXIBpc55okBVvbzB2w3crytb7eVaPjxEBBJ4RNiYpEUiEwCuUpNxTlkwLb9D2e4z+Z8078njnQmnN5OSoth1kfNNwv1PVx3EFnnoENBCAAAIvACyGWkEAgWdFDL1NIPDsykOlGwSeCnaKGhJA4BmC8zmt1CPy6l2NUvnn+k/lMUjjju/2uQLD+hJA4HEmIiFQLErj/LmS9Rbv2mprKTc0hFLGdYFXmr1WSgs617EY1CC5XQdKJsclxKEcDksXQeBZGgxtbZYAAo+D4RoBBJ5diSHw7MpDpRsEngp2ihoSQOAZggsw7bVpjVJYu2FCy/Cy7HxEIcAKDN2YAAKP8xA6gZ6iDHnqUcmvWlVdutjSKis/cLCUmut/6YDLAq+0uiClv61jsv6T2a5VcmPq5xJ6hiwYGgEEXmgoWSgGAgi8GCBTIlQCCLxQcda9GAKvboTuL4DAcz/DNO0AgRd92mvmZeXdJ/NS7BKpXH23/YRuaR0Rfd2kVkDgJTVZvX01zZsjA194tqaBtbvtIR1jd627KacF3rwOKb3bXsMgO7xJsmPb6ubCAvYSQODZmw2dbUoAgcepcI0AAs+uxBB4duWh0g0CTwU7RQ0JIPAMwQWcVvRuoe32LmRpGiyS5TFSAenVDkfg1YWPyZshgMDb/LEoe8++K7240nv+Xal3QG73QZLxbqXlk1wCCLzkZpvEnSHwkphqsveEwLMrXwSeXXmodIPAU8FOUUMCCDxDcExTI4DAU0Of3MI9PTLkycckv3qjW2g/OEFKTc1179nlK/Aqmy93FL1n4HVIxnvvTmZUE/Ku7hNh/wIIPPszosMNBBB4nAbXCCDw7EoMgWdXHirdIPBUsFPUkAACzxAc09QIIPDU0Ce7MC+xSHa+7M43AQSeb1QMtIAAAs+CEGghEAEEXiBckQ9G4EWO2P4CCDz7M6LDDQQQeJwG1wgg8FxLLN39un4FXrrTS+fuEXjpzN3VXSPwXE0uvX0j8OzKHoFnVx4q3SDwVLBT1JAAAs8QHNPUCCDw1NBT2IDA5gRep/RIKZOR1jIPxDRAypSICSDwIgbM8qESQOCFipPFYiCAwIsBcoASCLwAsJI6FIGX1GSTuS8EXjJzTfKuEHhJTjd5e+sr8F7Nr5a52Y7qRkeUm2SvwiDJSiZ5G2dHzhJA4DkbXSobR+ClMnanN43Asys+BJ5deah0g8BTwU5RQwIIPENwTFMjgMBTQ09hAwIbC7yF5S75a35FzSrjegbKNqUWg5WZAoFoCCDwouHKqtEQQOBFw5VVoyOAwIuOrcnKCDwTagmbg8BLWKAJ3w4CL+EBJ3B7CLwEhprgLW0s8N6QNfJmbm3NbivyriLx+EDAFgIIPFuSoA8/BBB4figxxiYCCDyb0hBB4NmVh0o3CDwV7BQ1JIDAMwTHNDUCSRR4pZ6CdC94RxpHbSvZxiY1thQOn8DGAm9NsUeeaVwuBSlVC1VunN2/MFQGlhvCL8yKEDAkgMAzBMc0FQIIPBXsFK2DAAKvDngRTEXgRQDVtSUReK4llu5+EXjpzt/F3SdN4LW/8bLMv+V70rNymeQGDJIxZ3xDWnd9n4vR0PNmCPR9Bl679wKLd/Id3v8ty7bFZhlSboQbBKwigMCzKg6a6YcAAo8j4hoBBJ5diSHw7MpDpRsEngp2ihoSQOAZgmOaGoGkCbxZV54l3fPf6eXZuPX2stO3rlfjS+FwCWzuLbThVmC1NBDoecZ7d/E93u3XnSK5Q5ql4egBkW0bgRcZWhaOgAACLwKoLBkpAQRepHgDL47AC4wseRMQeMnLNMk7QuAlOd1k7i1pAu/1rx4vZe8W2t5PJiO7fO83km1qTmaAKdsVAi9lgUew3dLcHun6j2U1KzeeOVhy+0Vzuz0CL4IQWTIyAgi8yNCycEQEEHgRgTVcFoFnCC5J0xB4SUoz+XtB4CU/46TtMGkCb8HUH8vKJx7sjWnwQYfJ6M98OWmxObGfnlJJ/jDrb/Ly0vkybthWcuR79pR8NltX7wi8uvAx2SPQ80iHFG5fXcMid0iLNJ4azctPEHgcO5cIIPBcSoteKwQQeHadAwSeXXmodIPAU8FOUUMCCDxDcExTI5A0gVfu6ZEVT9wva1+fIS3v2UOGTjhasg3+n4tWuXqv3OHdWpfLS7a1TS2XJBT+6YzH5U9vv9a7lUN32FX+bZ+D69oaAq8ufFZOLpSLck/heXmm5y0ZlhkgJzTuLzvntoqs181egXe2dwXeeK7Aiww6CztDAIHnTFQ0+g8CCDy7jgICz648VLpB4Klgp6ghAQSeITimqRFImsCrB2Spq0OKi+aKeFeOVT7ZAQMlN2LrepZM9dwvPvifsrKzvZdBoydFbz3qNMl6tzWbfhB4puTsnfdA9wy5t/DX3ga9a+Hk0tbjpTUTjVCrFNr4GXj5Q5slfyTPwLP3hNBZnAQQeHHSplYYBBB4YVAMbw0EXngsnV0JgedsdKlsHIGXytid3jQCb0N8xSXzpbS29ta6/JgdJRPgCj6nD0PIzX/t4btk7uoVvatuM3CIfP/QT9VVBYFXFz4rJ0/pfEBmFhfW9HZm00flvfltrOw3aFPcQhuUGOM1CSDwNOlT24QAAs+EWnRzEHjRsXVmZQSeM1HRqEcAgccxcI0AAg+BF9WZ/bsnRK/534dlTXentDU2y/n7Hyq713lFIwIvqrT01u17BV5zplEua4n2Crw4d4vAi5M2teolgMCrlyDz4yaAwIub+JbrIfDsykOlGwSeCnaKGhJA4BmCY5oaAQTeBvTcQhv+MSyUijJn9XIZM2CINOXzdRdA4NWN0LoFKs/Am1Z4Tp7tmRXLM/DiBoDAi5s49eohgMCrhx5zNQgg8DSo//OaCDy78lDpBoGngp2ihgQQeIbgmKZGAIFXi56XWKgdRV+Fgwq8BSuKsqazJKOH5qWtyfzZe76aYxAENkMAgcexcIkAAs+ltOi1QgCBZ9c5QODZlYdKNwg8FewUNSSAwDMExzQ1Agg8NfQUNiAQROA9P6tL3l7UU62SzYp8cLdmGTkoZ1CVKRAwJ4DAM2fHzPgJIPDiZ07F+ggg8OrjF/ZsBF7YRB1cD4HnYGgpbhmBl+LwHd06As/R4FLatl+B11koy/88v+ENuBVcowbn5EPjmoOTK4sUl3RJpi0n2Zb6bwMO3gAzXCaAwHM5vfT1jsBLX+au7xiBZ1eCCDy78lDpBoGngp2ihgQQeIbgmKZGAIGnhp7CBgTiFnjlTu8W3N/Mk+K8zmq3zROGS/MHhhl0zpS0EkDgpTV5N/eNwHMztzR3jcCzK32rBd6yFatl8hU3yYLFy+WeW67oJXfSmZfJqzNni2TWPWtlUFurPHb3ddV/n7e0wy7CDnSDwHMgJFrsJYDA4zC4RgCB51pi6e7Xr8CrUArjFtrOx5ZK55PLaqAP/OIOkhvWmO4g2L1vAgg836gYaAEBBJ4FIdBCIAIIvEC4Ih9srcBb294pJ3uibuJB+8ijf5lRI/COOnWyTLnsHBm70zabAELgBT8zCLzgzJihRwCBp8eeymYEEHhm3JilQyCIwKt02PsSiyE5aWv2HoQX8LPmjjnS807tD18HHDNaGvYYGHAlhqeVAAIvrcm7uW8Enpu5pblrBJ5d6Vsr8No7OmXJspXVX5dce2uNwJt4/FfkzpsultEjN73FAoEX/IAh8IIzY4YeAQSeHnsqmxFA4JlxY5YOgaACr94uC6+vlbX/Pa93mezgBhl4xvaSaQouA+vthfnmBLzHGEpnqSzFkkiL9x6T3D/ukjFf0f9MBJ5/VozUJ4DA08+ADoIRQOAF4xX1aGsF3vqNP//S65sIvPGHT5IJB7xPXnh5powYNljOnXSCTDhw7+oUBF7wI4PAC86MGXoEEHh67KlsRgCBZ8aNWToE4hZ4lV32vNkuXS+tlGxbXpoOHFr9Jx93CJQ9e7ekuyTdnsCrfCoPuBnVnJP8uifdRP5B4EWOmAIhEkDghQiTpWIhgMCLBbPvIqoCb+nyVTJ/0dJNmt1m9AgZOnjdrRN9BV7J++bg21f/Qo748AFy0H57yKNPzpDJV94kv7/tu9Ur8pav6fa9eQauI9DWnJeunpIUvF98IGA7gXw2Ky3elRmrO3psb5X+IFAlMLC1Qdq9B/UXS3yN5UjYT2CQd15z2Yysai94Z3adkOEDgS0R6Ogpy/z22v9NHtyYkeHe95dxfAYPaJRVa7uF0xoHbWrUS6CpIVf9Gtvexfex9bJkfjwEhrY14ljiQe2rSiWPMD+Zsvfxu+D0p1+UaQ88vsnwE4/5sBwwfvfq72/uCry+E07/6lXyqaMmytEfPUg6uop+yzPuHwQaG7JSLFZue/AdHewgoEbA83eSz2Wlu4AMUQuBwoEINHlfYwvefWX4u0DYGKxEoLkxV31HWGd3Ufx/R6fULGWtILDGOyvvrqmVEcO95yGO8mRwHJ/mxqx0ed8TcF7joE2Negnkc5nq19iCJ775QMAFAi1NORyLRUFV8gjzE0jg+SncV+C1d3TJzFlzZO89du6d/tmzr5BTTzhcPnbI/txC6wdqnzHcQmsAjSlqBLiFVg09hQ0JcAutITimqRDQuIVWZaMUDY1ARUNUb6H1fhhc+XALbWhoWSiBBLiFNoGhJnxL3EJrV8Cqt9D6QdFX4K1YuUYOO+l8mfKds+UD++0plav4vv6dG+W+26+S4UMHIfD8QEXgGVBiii0EEHi2JEEffgkg8PySYpwNBBB4NqTgXg+8xMK9zOhYhwACT4c7Vc0JIPDM2UUx01qB99D05+T8y26QyvXwhZ6iNDTkZaftRsvdN18ujz41Q665YaosWrpCKs/Lu+Csk+XAffeo8uElFsGPCVfgBWfGDD0CCDw99lQ2I4DAM+PGrGgIFLx7uVcWCjK0sXGzbwpF4EXDnVWjI8BLLKJjy8rhE0Dghc+UFaMlgMCLlm/Q1a0VeEE3sn48Ai84OQRecGbM0COAwNNjT2UzAgg8M27MCp/AnLUd8ueFS6SrWJTWfF4O9X4IOrqluaYQAi987qwYLQEEXrR8WT1cAgi8cHmyWvQEEHjRMw5SAYEXhFZCxyLwEhpsQreFwEtosAneFgIvweE6trWpb8+RNYUNLxsY2dwkx263NQLPsRxpt5YAAo8T4RIBBJ5LadFrhQACz65zgMCzKw+VbhB4KtgpakgAgWcIjmlqBBB4augpvBGBDu9xJL+e9W4Nk6z3KsQzxu6AwOOkOE0Aged0fKlrHoGXusid3zACz64IEXh25aHSDQJPBTtFDQkg8AzBMU2NAAJPDT2F+xB4xLt99o1Va3p/971DBslBI4ch8DgpThNA4DkdX+qaR+ClLnLnN4zAsytCBJ5deah0g8BTwU5RQwIIPENwTFMjgMBTQ0/hPgR6SmV5ZeUqWdjZJdt4z74bN3jgJi+y4Bl4HBvXCCDwXEss3f0i8NKdv4u7R+DZlRoCz648VLpB4Klgp6ghAQSeITimqRFA4Kmhp7ABAQSeATSmqBJA4Knip3hAAgi8gMAYrk4AgaceQU0DCDy78lDpBoGngp2ihgQQeIbgmKZGAIGnhp7CBgQQeAbQmKJKAIGnip/iAQkg8AICY7g6AQSeegQIPLsi0O8GgaefAR34J4DA88+KkXYQQODZkQNd+COAwPPHiVH2EEDg2ZMFnfRPAIHXPyNG2EUAgWdfHmF2lCl7nzAXDLrWvKUdQaekfjwCL/VHwCkACDyn4qJZjwACj2PgEgEEnktp0WuFAAKPc+ASAQSeS2nRa4UAAs+uc8AttHblodINAk8FO0UNCSDwDMExTY0AAk8NPYUNCCDwDKAxRZUAAk8VP8UDEkDgBQTGcHUCCDz1CGoaQODZlYdKNwg8FewUNSSAwDMExzQ1Agg8NfQUNiCAwDOAxhRVAgg8VfwUD0gAgRcQGMPVCSDw1CNA4NkVgX43CDz9DOjAPwEEnn9WjLSDAALPjhzowh8BBJ4/ToyyhwACz54s6KR/Agi8/hkxwi4CCDz78gizI56BFybNmNZC4MUEmjKhEEDghYKRRWIkgMCLETal6iaAwKsbIQvETACBFzNwytVFAIFXFz4mKxBA4ClA30JJbqG1Kw+VbhB4KtgpakgAgWcIjmlqBBB4augpbEAAgWcAjSmqBBB4qvgpHpAAAi8gMIarE0DgqUdQ0wACz648VLpB4Klgp6ghAQSeITimqRFA4Kmhp7ABAQSeATSmqBJA4Knip3hAAgi8gMAYrk4AgaceAQLPrgj0u0Hg6WdAB/4JIPD8s2KkHQQQeHbkQBf+CCDw/HFilD0EEHj2ZEEn/RNA4PXPiBF2EUDg2ZdHmB3xDLwwaca0FgIvJtCUCYUAAi8UjCwSIwEEXoywKVU3AQRe3QhZIGYCCLyYgVOuLgIIvLrwMVmBAAJPAfoWSnILrV15qHSDwFPBTlFDAgg8Q3BMUyOAwFNDT2EDAgg8A2hMUSWAwFPFT/GABBB4AYExXJ0AAk89gpoGEHh25aHSDQJPBTtFDQkg8AzBMU2NAAJPDT2FDQgg8AygMUWVAAJPFT/FAxJA4AUExnB1Agg89QgQeHZFoN8NAk8/AzrwTwCB558VI+0ggMCzIwe68EcAgeePE6PsIYDAsycLOumfAAKvf0aMsIsAAs++PMLsiGfghUkzprUQeDGBpkwoBBB4oWBkkRgJIPBihE2pugkg8OpGyAIxE0DgxQyccnURQODVhY/JCgQQeArQt1CSW2jtykOlGwSeCnaKGhJA4BmCY5oaAQSeGnoKGxBA4BlAY4oqga2GNMviVZ1SKqm2QXEI+CKAwPOFiUEWEUDgWRSG1woCz648VLpB4Klgp6ghAQSeITimqRFA4Kmhp7ABAQSeATSmqBJA4Knip3hAAgi8gMAYrk4AgaceQU0DCDy78lDpBoGngp2ihgQQeIbgmKZGAIGnhp7CBgQQeAbQmKJKAIGnip/iAQkg8AICY7g6AQSeegQIPLsi0O8GgaefAR34J4DA88+KkXYQQODZkQNd+COAwPPHiVH2EEDg2ZMFnfRPAIHXPyNG2EUAgWdfHmF2xEsswqQZ01oIvJhAUyYUAgi8UDCySIwEEHgxwqZU3QQQeHUjZIGYCSDwYgZOuboIIPDqwsdkBQIIPAXoWyjJLbR25aHSDQJPBTtFDQkg8AzBMU2NAAJPDT2FDQgg8AygMUWVAAJPFT/FAxJA4AUExnB1Agg89QhqGkDg2ZWHSjcIPBXsFDUkgMAzBMc0NQIIPDX0FDYggMAzgMYUVQIIPFX8FA9IAIEXEBjD1Qkg8NQjQODZFYF+Nwg8/QzowD8BBJ5/Voy0gwACz44c6MIfAQSeP06MsocAAs+eLOikfwIIvP4ZMcIuAgg8+/IIsyOegRcmzZjWQuDFBJoyoRBA4IWCkUViJIDAixE2peomgMCrGyELxEwAgRczcMrVRQCBVxc+JisQQOApQN9CSW6htSsPlW4QeCrYKWpIAIFnCI5pagQQeGroKWxAAIFnAI0pqgQQeKr4KR6QAAIvIDCGqxNA4KlHUNMAAs+uPFS6QeCpYKeoIQEEniE4pqkRQOCpoaewAQEEngE0pqgSQOCp4qd4QAIIvIDAGK5OAIGnHkGyBZ5deOkGAhCAAAQgAAEIQAACEIAABCAAAQhAAAJ2EVB/Bp5dOOgGAhCAAAQgAAEIQAACEIAABCAAAQhAAAJ2EUDg2ZUH3UAAAhCAAAQgAAEIQAACEIAABCAAAQhAoIYAAo8DAQEIQAACEIAABCAAAQhAAAIQgAAEIAABiwkg8CwOZ3OtLVuxWiZfcZMsWLxc7rnlit4hJ515mbw6c7ZIJlP9vUFtrfLY3dc5tjvaTRqBnmJRfviz38ovp94vj//uRzJ08MDeLf78jvtk6rQ/SXehRz46YT/51pdPkXwulzQE7MdRAt3dBRl/+CRpaMj37uDQD46X719ylqM7ou0kEuDraBJTTe6e+F41udkmaWd/9/4+9dWLr5cP/ctectG5p/ZubfrTL8k1N06VxUtWyJ7jdpIrvzlJRgwbnKStsxcHCfwzN/D9m34jt/zmAclms727uvPGi2W3nbdzcJe0vDEBBJ5D52Fte6ec7Im6iQftI4/+ZUaNwDvq1Mky5bJzZOxO2zi0I1pNOoFzLpwi48ZuLzfefk9VKK8XeM+88KpcfM3NcvuPLpTWliY556Lr5CMfer985riPJB0J+3OEwJJlK+XY0y+UJ373Y0c6ps20EeDraNoSd3+/fK/qfoZJ38ELL8+Uy394e/XvUwMHtPYKvNVr2uWIUy6Q6688tyrvrv/lNHn73fnyg0vPTjoS9mcxgS25gUuvvUV2ec92/N3K4vxMW0PgmZJTmNfe0SmVv1RWfl1y7a01Am/i8V+RO2+6WEaPHKbQGSUhsHkCr77xTlXg7XXo6TUC7zs/uE1Gjxomk045ujrxz0++ILfc+YDcOuWboISAFQRmvTNfzpz8A3ngjqut6IcmINCXAF9HOROuEeB7VdcSS1+/78xdWL2q7rb/+kP171vrr8B78JFn5K77HpOffu/8KpSK0Dv4uC/LM/fdII2NDekDxY6tILAlN3D+ZTfIxAP3lk8c/gEreqWJ8Agg8MJjGdtKz7/0+iYCr3Kr14QD3ieVnxxV/ofn3EknyATvDy0fCNhAoK/A+8J5V8tJxx4qh3m3zlY+b3my5PRzr5JH/3uKDe3SAwTkxb+9Wb0y9D07bC0z35oru43dTr597mmy43ajoQMBKwjwddSKGGgiAAG+Vw0Ai6GqBG687Z4agXfT7b+XpctXeo97+WxvXxM8gXfbdd/i+wLVpCheIbA5N/BvF1wrpVJZ3p6zQCoP2Pr0Jw7pvXACam4TQOBZlt/S5atk/qKlm3S1zegRvbcf9v1DWvnD+e2rfyFHfPgAOWi/PeTRJ2fI5Ctvkt/f9l2uyLMs36S109nVLW+8PXeTbQ0Z1Cbbbj2y9/f7CrxTzrpcvnTaMXKwJ50rn/kLl8onz7hInvZ+kskHAnER2NL57fLO9u2//aOcduLHpPL19ye3TJNHn5oh0355eVztUQcCWyTA11EOiEsE+F7VpbTota/AqzzPuVgsyXlfOrEXzuEnnS/XXf7l6p0mfCCgSWBzAu+nv/q9DPSeif+pIyfIbO/K0orQ++Y5p/RePKHZL7XrI4DAq49f6LOnP/2iTHvg8U3WPfGYD8sB43ev/v7m/pD2nXD6V6+STx01UY7+6EGh98iCEFhPYN6CJXKt95DUvp/3v2+3mmcu9BV4/3r+9+TTRx8iHztk/+rUNz0JWPm9P//2h8CFQGwE/J7fSkOFnqLsf8QX5Q9Tr5VRI4bE1iOFIPDPCPB1lLPhOgG+V3U9weT231fgVWTIIu/lFRu/1OJDx54jd/zkItl+m62SC4KdOUHAjxv4ya2/k4WLl8ml55/uxJ5o8p8TQOA5eDr6/iFt7+iSmbPmyN577Ny7m8+efYWcesLhvYLEwW3ScoII9BV4V173K+9NyQPk7DOOq+7y3j8+JdMefFx+fs3XE7RrtuIygcVLV8iq1Wtl5x3XvRio8lba93sCb+OXsbi8P3p3nwBfR93PME074HvVNKXt/l77Crw/Pvasd1X+H6q3zFY+CxcvlyM/+w15+n9ukHwu5/6G2YHTBDYn8Cq/t+duO/U+o3HKz++qPrtxYwnt9KZT3DwCz8Hw+/4hXbFyjRzmXcY95Ttnywf221MqV/F9/Ts3yn23XyXDhw5ycIe0nDQCfQVe5Qxf4J3RX11/kQxoaZbKs5xO/uRH5LiPH5y0rbMfRwlUvo5W3uB1q/fNeuXlQNffcrc8+ewrMvWG/3B0R7SdNAJ8HU1aosneD9+rJjvfpO2ur8CrvO3z8JPPl2sv/nfZb+/d5Mrrfi0d3gUU3/3WpKRtnf04SGBzAu+kL11afR7+l047VubMXySf9541fun5Z3iPL9rLwR3S8sYEEHgOnYeHpj8nlTfKSLlcvZ2roSEvO3kPVL/75surz2a65oapssi7aqTyvKYLzjpZDtx3D4d2R6tJI1D5Zv2QE86tbqtQ6Kme18rnoTuvrb5o5ZdT75fb7/pD9ZkiR37kQPn6mSdJNlt5zCofCNhB4Ge/vlfuuPsh6fKuvqv8FPPi8z5f/frKBwK2EODrqC1J0IcfAnyv6ocSYzQJXPXjO2Tq7x72Hv5f8v66VZacd3Xdp4+eKBd+5VTvh3gvy1U/uqP6cot99hwrV06eJEMGt2m2S+2UE9iSG3j73QXeSy9vkVffeMe766m1emde5Rcf9wkg8NzPkB1AAAIQgAAEIAABCEAAAhCAAAQgAAEIJJgAAi/B4bI1CEAAAhCAAAQgAAEIQAACEIAABCAAAfcJIPDcz5AdQAACEIAABCAAAQhAAAIQgAAEIAABCCSYAAIvweGyNQhAAAIQgAAEIAABCEAAAhCAs6ckogAAAT5JREFUAAQgAAH3CSDw3M+QHUAAAhCAAAQgAAEIQAACEIAABCAAAQgkmAACL8HhsjUIQAACEIAABCAAAQhAAAIQgAAEIAAB9wkg8NzPkB1AAAIQgAAEIAABCEAAAhCAAAQgAAEIJJgAAi/B4bI1CEAAAhCAAAQgAAEIQAACEIAABCAAAfcJIPDcz5AdQAACEIAABCAAAQhAAAIQgAAEIAABCCSYAAIvweGyNQhAAAIQgAAEIAABCEAAAhCAAAQgAAH3CSDw3M+QHUAAAhCAAAQgAAEIQAACEIAABCAAAQgkmAACL8HhsjUIQAACEIAABCAAAQhAAAIQgAAEIAAB9wkg8NzPkB1AAAIQgAAEIAABCEAAAhCAAAQgAAEIJJgAAi/B4bI1CEAAAhCAAAQgAAEIQAACEIAABCAAAfcJ/B/MPAK1AUCKIwAAAABJRU5ErkJggg==", + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# We humans find it easier to visalize things in 2D!\n", + "# Reduce the dimensionality of the vectors to 2D using t-SNE\n", + "# (t-distributed stochastic neighbor embedding)\n", + "\n", + "tsne = TSNE(n_components=2, random_state=42)\n", + "reduced_vectors = tsne.fit_transform(vectors)\n", + "\n", + "# Create the 2D scatter plot\n", + "fig = go.Figure(data=[go.Scatter(\n", + " x=reduced_vectors[:, 0],\n", + " y=reduced_vectors[:, 1],\n", + " mode='markers',\n", + " marker=dict(size=5, color=colors, opacity=0.8),\n", + " text=[f\"Video: {t}
Text: {d[:100]}...\" for t, d in zip(video_numbers , documents)],\n", + " hoverinfo='text'\n", + ")])\n", + "\n", + "fig.update_layout(\n", + " title='2D Chroma Vector Store Visualization',\n", + " scene=dict(xaxis_title='x',yaxis_title='y'),\n", + " width=800,\n", + " height=600,\n", + " margin=dict(r=20, b=10, l=10, t=40)\n", + ")\n", + "\n", + "fig.show()" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "e1418e88-acd5-460a-bf2b-4e6efc88e3dd", + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.plotly.v1+json": { + "config": { + "plotlyServerURL": "https://plot.ly" + }, + "data": [ + { + "hoverinfo": "text", + "marker": { + "color": [ + "#f8d349", + "#d6d07a", + "#a958c9", + "#7341ee", + "#268bba", + "#4862ce", + "#dd8cd7", + "#6a6c06", + "#8a29da", + "#0d2037", + "#805527", + "#e69670", + "#75b5e3", + "#796278", + "#6d4052", + "#1f6ab0", + "#99fe53", + "#3f0a72", + "#fe8e92", + "#c3e1f2", + "#f645e0", + "#b43417", + "#e0a8df", + "#7740be", + "#43c2e8", + "#64f999", + "#2cde7f", + "#29fa15", + "#580c96", + "#10384a", + "#845aa9", + "#7f03bd", + "#2b3af3", + "#335dcf", + "#22398f", + "#c932c1", + "#d43c00", + "#e6f378", + "#08808d", + "#6a0fce", + "#e1b5db", + "#75195e", + "#6ff3c5", + "#4099c1", + "#b25d7b", + "#d65c3a", + "#9b9d6e", + "#fc2b74", + "#571122", + "#422abb", + "#efed10", + "#dfc6c7", + "#02cada", + "#3ec815", + "#8e8cab", + "#df5d2e", + "#c457d7", + "#ec0a37", + "#da28db", + "#2d7f7d", + "#b27d2e", + "#d01b19", + "#fb9dce", + "#35303c", + "#4f86b8", + "#fbfef2", + "#ca3592", + "#c1e3c5", + "#c97596", + "#091a90", + "#b280bb", + "#7b4427", + "#b2140a", + "#dbde1c", + "#7ea8e9", + "#539908", + "#8069bc", + "#d01f72", + "#4ce72d", + "#73e76a", + "#20f2c3", + "#996ff1", + "#91f4db", + "#d70d97", + "#3678a7", + "#5af098", + "#ae5204", + "#badd6d", + "#a9541c", + "#d4b1ce", + "#51d0da", + "#ff2d6a", + "#1c2c7e", + "#ae7afe", + "#d156c8", + "#480c89", + "#e2a239", + "#39821f", + "#7bee34", + "#92b4fa", + "#b9fd23", + "#591ab9", + "#0bdacc", + "#2a2d25", + "#dc152c", + "#ac9648", + "#6ad041", + "#fe62a5", + "#52b6df", + "#4aaf9f", + "#d34482", + "#2fef1a", + "#7dd58b", + "#987252", + "#94a85d", + "#2b9f18", + "#ee26df", + "#c6016b", + "#9df332", + "#9b5e28", + "#2ebca4", + "#1b312a", + "#2e1afc", + "#574e28", + "#ac55ba", + "#f090af", + "#5cb9ca", + "#2dcfac", + "#804ce2", + "#ce865d", + "#3e5237", + "#482281", + "#2ae342", + "#6df6ca", + "#85fa26", + "#793548", + "#bbfe83", + "#15ae86", + "#70d1d9", + "#bb0ee6", + "#a95826", + "#8afd40", + "#505bd9", + "#0c777d", + "#ed694d", + "#4e797a", + "#dc95ec", + "#612b32", + "#ad8b14", + "#474ff9", + "#71c500", + "#bd53b1", + "#11a70e", + "#144ada", + "#72e048", + "#188ca3", + "#b52bf6", + "#b64eac", + "#f59c06", + "#b1c27d", + "#ac5faf", + "#5b3f83", + "#108c41", + "#b61e76", + "#22463b", + "#c959de", + "#a64739", + "#659222", + "#0f8781", + "#2c168d", + "#0faf59", + "#68bece", + "#696eaa", + "#af0f59", + "#a9e927", + "#601568", + "#9780cf", + "#e01073", + "#dd889c", + "#046e5c", + "#c6eff5", + "#b3dba5", + "#426575", + "#913568", + "#de30e4", + "#50f10d", + "#9a5ba2", + "#cc8ec0", + "#79c82a", + "#9baca0", + "#1a5613", + "#246fa5", + "#cb725f", + "#682d42", + "#a03134", + "#d54222", + "#01f59b", + "#12897b", + "#74a788", + "#fcdcad", + "#048452", + "#3626a5", + "#4dfb77", + "#4212f1", + "#116019", + "#ad6bd0", + "#a63fa4", + "#d24e5d", + "#1a6fdf", + "#6f745a", + "#cf7e83", + "#4b9a93", + "#799a24", + "#e6e164", + "#011995", + "#4c4355", + "#d937bd" + ], + "opacity": 0.8, + "size": 5 + }, + "mode": "markers", + "text": [ + "Video: 59506507
Text: Well, I realized that was a whole lot of theory, but I hope it gave you a good intuition that will\nb...", + "Video: 59671315
Text: Okay, so here we are, back in Jupyter Lab, ready for the next use of a frontier model, and see this\n...", + "Video: 60616895
Text: It feels like 100 videos ago that I told you that we were going to have instant gratification with o...", + "Video: 60619275
Text: And we will conclude our expedition into the world of frontier models through their chat interface b...", + "Video: 59472693
Text: Friends.\nI am absolutely exhausted.\nI am exhausted and a little tiny bit traumatized.\nAnd you are so...", + "Video: 59670121
Text: So it's business time right now.\nWe are going to build a Rag pipeline to estimate the price of produ...", + "Video: 59295619
Text: Welcome back to the the moment when we bring it all together into a beautiful user interface.\nBut fi...", + "Video: 60617163
Text: And already that wraps up day two.\nNow that you have built that solution.\nAnd congratulations on tha...", + "Video: 60616423
Text: So I hope you've just enjoyed yourself experimenting with different LMS locally on your box using th...", + "Video: 59170227
Text: Welcome back to Google Colab.\nHere we are ready to explore the wonderful world of Tokenizers.\nSo, uh...", + "Video: 59169985
Text: So I hope you enjoyed that whirlwind tour of Google Colab.\nHere's just a little screenshot example o...", + "Video: 60616927
Text: It's time for our first LM experiment at this point.\nSo some of this you may know well, you may know...", + "Video: 59673721
Text: And here we are in JupyterLab for the last time, and we are looking here at day five, the last day\no...", + "Video: 59508055
Text: I'm so very happy that you've reached this epic moment in the course and that you're hanging in ther...", + "Video: 59670259
Text: It's remarkable.\nBut you are now at the 95% point.\nThere's 5% remaining of this course.\nUh, maybe it...", + "Video: 60616623
Text: So we're now going to start week one of the course when we are going to be looking at exploring fron...", + "Video: 59472383
Text: And welcome back to the week six folder.\nWe're now at day two, which is the second and final stage o...", + "Video: 59670171
Text: So as the very final step on this part four of day two of week eight, we are now going to build an\ne...", + "Video: 59297721
Text: And so now the time has come to talk about the most crucial aspect of Rag, which is the idea of vect...", + "Video: 59297599
Text: Well, that was a sneaky detour I took you on in the last one.\nI hope you enjoyed it though, and I ho...", + "Video: 59507635
Text: Look, I hope you're excited.\nYou really should be.\nYou've been through 80% of the course and it's al...", + "Video: 59669375
Text: Here we are for the day.\n2.1 notebook.\nAnd don't let it be said that I don't ever do anything for yo...", + "Video: 59297733
Text: Welcome back to JupyterLab and welcome to your first experiment with the world of Long Chain.\nLet me...", + "Video: 59670369
Text: It is terrific that you're hanging on in there and making such great progress with this course.\nAs w...", + "Video: 59166281
Text: And with that, amazingly, you completed day one of week two already and that gets you to the 15% poi...", + "Video: 59671567
Text: Well, the first thing you're going to notice is that I don't have a notebook open for you.\nAnd that'...", + "Video: 59297593
Text: And welcome to continuing our journey with Hrag.\nAnd today it's time to unveil Liang Chen.\nSo first,...", + "Video: 59166461
Text: And welcome back to the lab.\nHere we are in Jupyter Lab and we are going to go into week two.\nAnd we...", + "Video: 59167007
Text: Well, how fabulous is that?\nI hope that you are as wowed as I am by our new airline, I assistant and...", + "Video: 59508121
Text: The moment has arrived.\nHere we go.\nWe're in fine tuning.\nWe do fine tuning.\nTrain.\nThere is also a ...", + "Video: 59295579
Text: All right.\nAre you excited to see how this goes?\nLet's give it a try.\nSo in this next section, I cre...", + "Video: 60620375
Text: And with that, we've reached an important milestone.\nThe first week of our eight week journey is com...", + "Video: 59472491
Text: Welcome back.\nIf you are following along with me in JupyterLab, as I hope you are, then you will nee...", + "Video: 59472425
Text: Welcome to week six, day three.\nToday is going to be a day that you will either love or you will hat...", + "Video: 59508057
Text: Actually slight change in plan.\nI'm going to wrap up the day.\nDay three at this point, and say that ...", + "Video: 60619577
Text: And for the final piece of background information, I wanted to take another moment to talk about API...", + "Video: 59170291
Text: Welcome back to Colab and welcome back to our business project.\nSo again our assignment, we are due ...", + "Video: 60619651
Text: I mentioned before an AI company called vellum.\nWhen we were talking about the different questions, ...", + "Video: 59473191
Text: And you thought we'd never get here.\nHere we are in Jupyter Lab, running our fine tuning for a front...", + "Video: 59170297
Text: And here we are in Google Colab, ready for fun with models.\nSo first we do the usual Pip installs an...", + "Video: 59167015
Text: Welcome back to Jupyter Lab and welcome to Day Five's Lab.\nAnd this is going to be lots of creativit...", + "Video: 59170043
Text: Let me enthusiastically welcome you all back to week three of our LLM engineering journey.\nIf you en...", + "Video: 59473147
Text: Well, I'm very relieved.\nI've got that behind me.\nNo more human testing for me.\nWe'll have one final...", + "Video: 59166453
Text: Welcome back and welcome to our continuing JupyterLab experience.\nUh, I'm hopefully going to keep yo...", + "Video: 59166915
Text: Welcome back to the wonderful world of JupyterLab.\nAnd here we are in week two.\nDay three.\nUh, bring...", + "Video: 59667365
Text: Here we are back in Colab, looking at the week seven, day five of the Colab notebooks and I'm on a\nT...", + "Video: 60616845
Text: We're on the home stretch.\nThis is the final step in the environment setup, and it's an easy one.\nIt...", + "Video: 59295459
Text: And welcome back to More Leaderboard Fest as we go through some more leaderboards.\nBut this time we'...", + "Video: 59471979
Text: So we now turn to the parts of the problem, which is perhaps, let's say, not as glamorous as some\nof...", + "Video: 59503705
Text: And so now we talk about quantization the q and q Laura.\nQ stands for quantized quantized.\nLaura.\nAn...", + "Video: 59472505
Text: So the good news is that this is the very final video about data set curation.\nYou were probably fed...", + "Video: 59669217
Text: And welcome to the next part of visualizing the data.\nAnd just very quickly to show it to you in 3D....", + "Video: 59671221
Text: I gotta tell you, I don't like to toot my horn a whole lot, but I do think that I've done a great\njo...", + "Video: 59503703
Text: Well.\nHello there everybody.\nI am so grateful that you've made it through to the start of week seven...", + "Video: 59473201
Text: Well, before we do a postmortem on what happened, let's just quickly look at the standing the rankin...", + "Video: 60622463
Text: In this video, we're going to set up a full data science environment for Mac users.\nIn the next vide...", + "Video: 60619299
Text: Well, I hope you found that both educational and enjoyable.\nAs we went through and learned so much a...", + "Video: 59295607
Text: So to revisit then the solution that we built in the previous day and talk about the metrics.\nAs I s...", + "Video: 59297575
Text: Well, welcome to the final part on rag.\nAnd this is the session where you go from being a rag expert...", + "Video: 59507687
Text: It's time for action, everybody.\nWe've set up our colab.\nHere we are, week seven, day three.\nWe've g...", + "Video: 59671441
Text: And welcome once more to our favorite place to be Jupyter Lab, the Paradise for a data scientist exp...", + "Video: 59673431
Text: And here we have it.\nThe user interface is completed.\nThe extra notification came through on my phon...", + "Video: 59473137
Text: Let's get straight to it.\nSo the place where you can see everything that's going on and get knee dee...", + "Video: 59166421
Text: Welcome back to the radio day in the lab.\nMore to do.\nLet's keep going.\nWhere we left off is we had ...", + "Video: 59295599
Text: Welcome to the Jupyter Lab for day four.\nIt's going to look very familiar because it's actually I've...", + "Video: 59669631
Text: Here we are in our favorite place to be in JupyterLab, ready for some coding and a lot of coding tha...", + "Video: 59673663
Text: But wait, there's more.\nWe need to add some more to the user interface just to make it look more coo...", + "Video: 59506929
Text: And we return to the hugging face open LLM leaderboard.\nThe first place you go when selecting your b...", + "Video: 59504785
Text: So at this point we're going to talk about hyperparameters.\nAnd we're going to introduce three of th...", + "Video: 59505337
Text: So we're now going to look at four bit quantization, the rather remarkable effect of reducing the pr...", + "Video: 59271655
Text: So here we are on Hugging Face's main landing page at Hugging Face Core.\nA URL you know.\nWell, since...", + "Video: 59472883
Text: Okay, time to reveal the results.\nIt has run to completion.\nAnd here it is.\nSo a moment to pause.\nIt...", + "Video: 59673639
Text: And welcome now to the code for our user interface, which we will find in this Python module.\nPrice ...", + "Video: 59472463
Text: So last time we looked at a humble linear regression model with feature engineering, and now we say\n...", + "Video: 59297595
Text: So by the time you're watching this, hopefully you have played yourself with vectors.\nYou've created...", + "Video: 60619149
Text: So we're going to start our exploration into the world of frontier models by playing with the famous...", + "Video: 59297735
Text: And at last the time has come to see rag in action.\nAfter all of this talk, and here we are.\nWe're i...", + "Video: 60616407
Text: And now over to my Mac people.\nAnd I have news for you.\nIt's exactly the same thing.\nYou go to a fav...", + "Video: 59170235
Text: So here we are in Google Colab for our first collaborative session on the cloud using a GPU box.\nOn ...", + "Video: 59472067
Text: So we've covered steps 1 to 4 of the five step strategy.\nAnd that brings us to step five, which is p...", + "Video: 59472011
Text: Welcome everybody.\nSo in the past I've said quite a few times, I am excited to start this this week ...", + "Video: 59295553
Text: Welcome back.\nIn the last part, we gave our GPT four and clawed the challenge of converting a simple...", + "Video: 59297773
Text: Well, I hope you're eager with anticipation for this session in JupyterLab as we finally get to see\n...", + "Video: 59295583
Text: And here we are back in JupyterLab.\nIt's been a minute.\nWe've been working in Colab for last week, a...", + "Video: 59507329
Text: Okay.\nIt's moment of truth time.\nI have just taken our class tester.\nYou remember this class?\nUh, it...", + "Video: 59295429
Text: Continuing our investigation of benchmarks, and this will become more real when we actually see some...", + "Video: 60595637
Text: Here we are back in the Colab, which has been running overnight for me and probably for you too, I\nh...", + "Video: 59668027
Text: And so here we are at the home page for modal.\nAt modal.com spelt model not not model which is confu...", + "Video: 59295527
Text: I'm so happy to welcome you to week four, day four on our journey to LLM Engineering and Mastery.\nHe...", + "Video: 59295377
Text: Just before we go on to some of the more advanced metrics, I want to mention for a second something\n...", + "Video: 59666211
Text: So before we try our new model and one more recap on the models so far and keep notes of this so we\n...", + "Video: 59170107
Text: And once again, it's that moment when you take a pause and congratulate yourself on another day of\ns...", + "Video: 60616833
Text: So I realized that day one of week one has been a pretty long day, and I assure you that the other,\n...", + "Video: 59472413
Text: Wonderful.\nWhere we left off is we had just created the Get Features function, which builds our feat...", + "Video: 59297561
Text: And would you believe at this point you're 55% of the way along the journey?\nUh, it's been a while s...", + "Video: 59669211
Text: Well, we took on a lot today and we seem to have been successful.\nThese red icons that you see on th...", + "Video: 59166981
Text: Welcome to week two, day five.\nThe last day of week two where a lot is coming together.\nI am so grat...", + "Video: 60619227
Text: And now let's move to Claude from anthropic, my favorite model and typically the favorite model of\nm...", + "Video: 60620395
Text: Welcome back to Jupyter Lab, where I want to show you the assignment, the homework exercise for you\n...", + "Video: 59665127
Text: Well hi there everybody.\nI'm not going to give you my usual song and dance about how excited you are...", + "Video: 59668923
Text: Well, welcome back to Jupyter Lab for what will be an epic finale to our time in this platform.\nAnd ...", + "Video: 59504887
Text: Well, here we are again in Google Colab.\nIt's been a minute since we were here, and welcome back to ...", + "Video: 59170165
Text: Welcome, everybody to the last day of week three.\nWeek three.\nDay five.\nWe're here already wrapping ...", + "Video: 60617251
Text: Congratulations are definitely in order.\nYesterday was a mammoth first day on this course and you go...", + "Video: 59166951
Text: All right, back to the lab.\nBack to our project.\nTime to work with tools.\nI am in the week two folde...", + "Video: 60619619
Text: Well, day four was an information dense day.\nI do hope that you learned some something useful here, ...", + "Video: 60616663
Text: Well.\nHi there, this is time for PC people to get set up.\nSo all you Mac people out there, you don't...", + "Video: 59508175
Text: So I'm taking a moment now to explain that the training costs of optimizing a model for this course\n...", + "Video: 59670087
Text: And welcome to part four of day two of week eight.\nUh, there's a lot happening this week, and I have...", + "Video: 59506713
Text: Hi everyone.\nSo the reason I'm so fired up about week seven is that this is the time when we actuall...", + "Video: 60620169
Text: Hopefully you found this super satisfying to be able to have this nice business result and have it c...", + "Video: 59295435
Text: Well, just before we wrap up, let me introduce this week's challenge and talk about what we're going...", + "Video: 59297609
Text: Last week, we worked with models that were able to speed up code by a factor of 60,000 times, which\n...", + "Video: 59507489
Text: Continuing our adventure through hyperparameters for training.\nThe next one is pretty crucial and it...", + "Video: 59295549
Text: And welcome back to our challenge again.\nAnd this time we are working with our beautiful prototype.\n...", + "Video: 59665129
Text: And now let me make this real for you by showing you some, some diagrams, particularly now looking\na...", + "Video: 59169991
Text: Okay, so that was your introduction to Hugging Face.\nAnd now I'm going to turn to a different resour...", + "Video: 59472027
Text: And now the time has come to curate our data set.\nAnd the way we're going to do this is we're going ...", + "Video: 59472307
Text: Welcome to week six.\nDay two a day.\nWhen we get back into the data, we look back in anger at our dat...", + "Video: 59508289
Text: So here we are now, back in the Colab, in the same one that we kicked off in the previous day.\nIt's ...", + "Video: 59472333
Text: Thank you for putting up with me during my foray into traditional machine learning.\nI think it was u...", + "Video: 59295431
Text: Now I want to take a quick moment to give you a flyby of five different ways that llms are used comm...", + "Video: 59673449
Text: Well, I have to tell you that I'm a little bit sad.\nThis is the beginning of the beginning of the en...", + "Video: 59669389
Text: Well.\nHi there.\nSo you've made it to day two of week eight, and I am super grateful that you've been...", + "Video: 59170057
Text: And so at the beginning of this week, we started by talking about hugging face pipelines.\nAnd you us...", + "Video: 59166949
Text: Welcome back to making chatbots.\nLet's keep going.\nSo for the next part we're going to beef up the s...", + "Video: 59473019
Text: Welcome back to an action packed time of of training.\nSo now, after waiting about five minutes when ...", + "Video: 59297585
Text: Before we move on, let me show you one more time this fabulous slide that describes the simple three...", + "Video: 59170255
Text: And welcome back to us continuing our journey through the model class in Hugging Face Transformers l...", + "Video: 60614589
Text: So we're now going to run a large language model directly on your box using a platform called llama,...", + "Video: 59297601
Text: I'm not going to lie, at this point you have every reason to be impatient with me.\nWe've been yammer...", + "Video: 60616629
Text: And welcome back to team PC and Team Mac as we come back together again for a quick video.\nIn this o...", + "Video: 59297749
Text: It's always welcome back to JupyterLab, my favorite place to be.\nAnd now we are, of course in the we...", + "Video: 59170135
Text: Welcome.\nIt's week three.\nIt's day four.\nWe are back on the adventure in open source land, back inve...", + "Video: 59472017
Text: And this is the first time that we'll be coding against our big project of the course.\nWelcome to Ju...", + "Video: 59507017
Text: Welcome to Colab.\nWelcome to the week seven day two Colab.\nAnd just before we try our base model, we...", + "Video: 60619883
Text: And now we've arrived at an exciting moment in our first week.\nThe conclusion of the first week is w...", + "Video: 59508297
Text: What more is there to say, really?\nTomorrow is the day for results.\nA day that very excited indeed a...", + "Video: 60619247
Text: We're going to spend a little bit more time with GPT just to try out a few more interesting things.\n...", + "Video: 59504769
Text: Without further ado, we're going to get stuck into it.\nTalking about Laura.\nLow rank adaptation.\nAnd...", + "Video: 59170233
Text: Welcome back to our continued exploits with Tokenizers.\nWhat we're now going to look at is what's ca...", + "Video: 59671231
Text: And here we are back in the Jupyter Lab for the fast approaching conclusion with a really great proj...", + "Video: 60620397
Text: Well, that's a fantastic result to have now arrived towards the end of week one and having completed...", + "Video: 59170093
Text: I'm delighted to see you again.\nAs we get started with day three of week three of our adventure and ...", + "Video: 59473089
Text: Welcome back.\nSo hopefully you are still impressed by the GPT four mini results.\nThe frontier model ...", + "Video: 60395261
Text: Let's keep going with our project to equip our LM with a tool.\nWe just created this piece of code to...", + "Video: 60617259
Text: I'm excited to introduce you to your first exercise, and I'm looking forward to seeing what you make...", + "Video: 59507313
Text: And it's this time again, when we look at the podium of how our models are performing across the boa...", + "Video: 60619721
Text: Now it's time to talk for a minute about tokens.\nTokens are the individual units which get passed in...", + "Video: 59295451
Text: I know that everybody.\nIt seems like just the other day that we were embarking on our quest together...", + "Video: 59166919
Text: And with that, it concludes our session on tools.\nAnd at this point, you are probably an expert on t...", + "Video: 59295441
Text: Okay, so welcome to our leaderboard fast as we go through a ton of essential leaderboards for your\nc...", + "Video: 59295541
Text: And welcome back.\nYou've just seen GPT four zero spectacularly failed to work on our hard Python con...", + "Video: 59473101
Text: Welcome back.\nSo about ten minutes later, maybe 15 minutes later, the run has completed.\nAnd how do ...", + "Video: 59507423
Text: So you may remember eons ago when we were building our data set.\nAt the end of that, we uploaded our...", + "Video: 59295545
Text: I really hope you've enjoyed this week.\nWe've got tons done.\nWe've experimented with all sorts of ne...", + "Video: 59472503
Text: Welcome back to Jupyter Lab.\nLast time, we looked at some silly models for predicting the price of p...", + "Video: 60614591
Text: The mantra of this course is that the best way to learn is by doing, and we will be doing stuff toge...", + "Video: 59473021
Text: Welcome to our favorite place to be to JupyterLab.\nHere we are again now in day three.\nIn week six.\n...", + "Video: 60617255
Text: I'm now going to talk for a bit about models.\nA term you often hear is the term frontier models, whi...", + "Video: 59667829
Text: Well.\nHello there.\nLook, I know what you're thinking.\nYou're thinking I peaked too early.\nLast week ...", + "Video: 59505329
Text: Welcome back.\nYou may, like me, have just gone off and got a coffee while things loaded back up agai...", + "Video: 59669049
Text: So what you just saw was an ephemeral app, as it's called, which means just a temporary app that you...", + "Video: 60619439
Text: This now brings us to an extremely important property of LMS called the context window that I want t...", + "Video: 59668181
Text: And so it gives me great pleasure to introduce to you the project that I've lined up for you this we...", + "Video: 59472441
Text: Welcome back.\nSo we've been doing the thoroughly distasteful, unsavory work of feature engineering.\n...", + "Video: 59507785
Text: Well, I'm sure you're fed up of me saying that the moment of truth has arrived, but it really has.\nT...", + "Video: 59295587
Text: When I left you, we had just created this simple user interface for converting from Python to C plus...", + "Video: 59166465
Text: Welcome back to the JupyterLab on Gradio day, so you'll remember where we left off.\nWe'd written two...", + "Video: 59473071
Text: Hey, gang.\nLook, I know what you're thinking.\nThis week was supposed to be training week.\nI set it a...", + "Video: 59295423
Text: Welcome to day two of week four, when we get more into leaderboards so that by the end of this, you'...", + "Video: 59297723
Text: So I know what you're thinking.\nYou're thinking, what's going on here?\nWe're on day five.\nWe're on d...", + "Video: 59166947
Text: Well, thank you for coming along for week two, day four.\nWe have lots of good stuff in store today.\n...", + "Video: 59666831
Text: Take one more moment to look at this very nice diagram that lays it all out, and we will move on.\nNo...", + "Video: 59295493
Text: And welcome to week four, day three.\nAs we are about to embark upon another business project which w...", + "Video: 60616855
Text: Now I know what you're thinking.\nWe've been building environments for so long.\nAre we not done yet?\n...", + "Video: 59506611
Text: So in a future day, I'm going to be training, fine tuning a model and creating a fine tuned model.\nA...", + "Video: 60616493
Text: I'll just take a quick moment to introduce myself to convince you that I am actually qualified to be...", + "Video: 59166317
Text: And welcome to week two, day two, as we continue our adventure into the realm of LMS.\nUh, so today, ...", + "Video: 59295439
Text: So I'm aware that there's a big risk that you are getting fed up of leaderboards, because we've done...", + "Video: 59472421
Text: And welcome back to our final time in Jupyter Lab with traditional machine learning.\nIt's almost ove...", + "Video: 59472137
Text: Well, well, well, it's been a long day, but congratulations, you've made it.\nWe've gone through and ...", + "Video: 59297693
Text: So at the end of each week, it's customary for me to give you a challenge, an assignment to do on\nyo...", + "Video: 60620143
Text: So we're going to make a call to GPT four.\nOh, that's going to ask it to look through a set of links...", + "Video: 60619501
Text: I welcome to day four of our time together.\nThis is a very important day.\nToday we're going to be lo...", + "Video: 59297743
Text: And welcome to day five.\nFor reals.\nWe're actually in the proper Jupyter notebook.\nThis time we're i...", + "Video: 59166847
Text: Well, they say that time flies when you're having fun, and it certainly feels like time is flying.\nU...", + "Video: 59170223
Text: Well.\nFantastic.\nIt's coming up to the end of the week, and that means it's coming up to a challenge...", + "Video: 59170037
Text: So how does it feel to be 30% of the way down the journey to being a proficient LLM engineer?\nTake a...", + "Video: 59295609
Text: You must be feeling absolutely exhausted at this point.\nAnd if you are, that is okay.\nYou have done ...", + "Video: 60619281
Text: Well, I'm delighted to welcome you to day three of our eight week journey together.\nAnd today we're ...", + "Video: 59472429
Text: And continuing on our strategy to solve commercial problems with LMS, we get to step four, which is\n...", + "Video: 59167009
Text: Welcome back.\nIt's time to make our full agent framework.\nI'm super excited about this.\nIt's pulling...", + "Video: 59166481
Text: And here, once more we find ourselves in our favorite place, the Jupyter Lab.\nReady to go with weeks...", + "Video: 59670933
Text: I realized my enthusiasm for this project is a bit insane, but I have to tell you that I am very sat...", + "Video: 59670073
Text: Okay, it's time to complete the Rag workflow in our Jupyter Lab on day 2.3.\nWe've got this function ...", + "Video: 59673595
Text: That concludes a mammoth project.\nThree weeks in the making.\nIn the course of those three weeks, sta...", + "Video: 59297603
Text: And I'm delighted to welcome you back to LM engineering on the day that we turn to vectors.\nFinally,...", + "Video: 60614541
Text: I am delighted to welcome you to the first day of our eight weeks together as you join me on this ad...", + "Video: 59667357
Text: Let's now see our results side by side.\nWe started our journey with a constant model that was at $1....", + "Video: 59667841
Text: Now, look, I know that I went through that very fast, but maybe, uh, you're still, uh, blinking\nat t...", + "Video: 59472007
Text: So I hope you enjoyed our first bash at Scrubbing Data, and that you are now looking through the cod...", + "Video: 59507435
Text: So I'm now going to talk about five important hyperparameters for the training process.\nAnd some of ...", + "Video: 59509185
Text: So this is where I left you looking at this satisfying chart on training loss and seeing the trainin...", + "Video: 59473159
Text: Welcome to Jupyter Lab and welcome to our experiments at the frontier.\nSo we are going to put our fr...", + "Video: 60619447
Text: I want to take a moment to talk about something that's very fundamental to an LLM, which is the numb...", + "Video: 59166353
Text: Well, congratulations on leveling up yet again.\nYou've got some real hard skills that you've added t...", + "Video: 60619123
Text: So what we're now going to do is we're going to look at some models in practice and start to compare...", + "Video: 59295363
Text: Well, another congratulations moment.\nYou have 40% on the way to being an LM engineer at a high leve...", + "Video: 60619289
Text: And now we'll go a bit faster through the other models.\nWe'll start with Google's Gemini.\nI have the...", + "Video: 59472873
Text: So it's quite an adventure that we had at the frontier of what's capable with Llms today, solving a\n...", + "Video: 60619429
Text: Let me talk about some other phenomena that have happened over the last few years.\nOne of them has b...", + "Video: 59295601
Text: So it's time to continue our journey into the world of open source and understand which models we sh...", + "Video: 59170025
Text: And a massive welcome back one more time to LM engineering.\nWe are in week three, day two and we are...", + "Video: 59166443
Text: And welcome back everybody.\nWelcome to week two day three.\nIt's a continuation of our enjoyment of r...", + "Video: 60620025
Text: And welcome back to Jupyter Lab, one of my very favorite places to be.\nWhen Jupyter Lab sprung up on...", + "Video: 59170055
Text: Welcome to the world of Google Colab.\nYou may already be very familiar with Google Colab, even if so..." + ], + "type": "scatter3d", + "x": [ + -54.867924, + -10.016936, + -31.258097, + 31.70372, + 30.986057, + -6.185337, + -33.489285, + 28.219635, + 17.128328, + 62.72473, + 47.31179, + -24.2394, + -1.2053607, + -51.674755, + 21.652782, + -16.459143, + -20.696423, + 10.337022, + 19.14492, + -3.3044112, + -35.514133, + -5.4719872, + -5.1226387, + 39.50191, + 4.9317994, + 2.9549847, + 4.1326537, + -11.681201, + 13.737143, + -38.953957, + 38.51183, + 12.244814, + -13.795913, + 25.426931, + -41.59921, + -4.846219, + 26.703289, + 4.1719337, + -16.057596, + -45.16751, + -42.312576, + 41.194798, + -5.312441, + -25.082775, + -19.01139, + -25.420565, + -33.847595, + 44.942863, + 42.343197, + -52.003693, + -2.027634, + -8.932509, + 7.358109, + 21.236534, + 8.489856, + -32.668716, + 19.729362, + 38.779385, + 11.676811, + -34.77253, + -19.88353, + -5.9252167, + -27.363443, + -23.733849, + -38.90037, + 0.8739669, + -8.800101, + 52.81528, + -42.25234, + -60.99824, + 40.039967, + 8.218847, + 1.5493853, + 18.742544, + 12.445063, + 15.729233, + 2.305942, + -52.24494, + 45.984768, + 61.82299, + -36.262505, + 23.008461, + 3.1225016, + -25.169813, + 13.884145, + 28.117481, + -41.8918, + -62.741215, + 50.868504, + 14.695426, + 18.848124, + 53.23811, + 1.8822976, + -43.086723, + 22.107574, + 28.920977, + 0.9757627, + 8.354844, + -35.398823, + -18.877796, + -44.756004, + -44.143375, + 41.599285, + -6.32147, + -36.219677, + 11.882775, + -36.926907, + -27.499813, + 7.9463377, + 44.327618, + -7.9317803, + 47.82066, + 15.4281025, + -22.415867, + 34.414196, + -26.34403, + -72.41432, + -4.391843, + 49.58219, + -36.41383, + 8.9234085, + 46.049984, + 15.545077, + 11.44092, + 63.98327, + -23.970676, + -21.239939, + 20.37123, + -46.25401, + -51.65096, + 11.39227, + -45.638683, + -7.633436, + 48.093586, + -15.356642, + 60.4545, + -23.832989, + 33.39114, + 21.430023, + -40.279682, + 74.313805, + 12.70489, + -12.876895, + 61.037205, + -3.070069, + -32.003284, + -42.64056, + 22.65269, + -10.294073, + 32.200542, + -7.2023745, + 49.253765, + 37.47053, + -8.494583, + -23.560833, + 56.75159, + -31.536957, + 49.698734, + -6.0398607, + 32.40133, + 10.544113, + -57.28504, + -60.442104, + -2.4461548, + 8.221842, + -35.041687, + -34.68837, + 38.986423, + -18.542614, + -7.1585894, + 38.969902, + 3.209898, + -20.393421, + -14.69115, + 57.572422, + -16.431702, + -41.296795, + -11.364868, + -3.079418, + 36.56738, + 17.992006, + 41.732018, + 36.209473, + -14.367894, + 10.842153, + -21.875448, + 9.00169, + 24.985748, + 12.813471, + 31.651707, + 22.317532, + 25.142086, + -1.3272952, + -13.268854, + 18.224447, + -9.8931265, + 20.236471, + 11.86236, + -14.47948, + 21.426224, + -53.724995, + 34.6884, + -30.519207, + -29.45262, + -6.077813, + 0.9672612, + 0.880675, + 22.15887, + 32.418102, + 23.355238, + 9.021657, + 1.1571414, + 53.118507, + 53.786854, + -9.956359, + -26.244432, + -62.711346 + ], + "y": [ + -14.490821, + 13.269265, + -19.993616, + -4.6513524, + 13.124004, + 28.80794, + 16.636915, + -43.484592, + -19.25326, + 11.810007, + -4.4097085, + -22.479406, + -2.3730557, + 34.54029, + -42.26244, + -39.92246, + 57.083042, + 14.112607, + 46.32529, + 39.88998, + -18.428123, + 55.403828, + 46.364037, + -43.314423, + -37.030956, + -16.918308, + 46.21214, + -4.893018, + -39.002132, + 30.43938, + 66.75448, + -39.084946, + 58.451557, + 22.26844, + 38.007557, + -57.601913, + 4.895288, + -49.91165, + 7.408399, + 11.993464, + -13.689281, + -14.364397, + 24.817034, + -8.4831505, + -20.317192, + 25.323973, + -49.143436, + -21.03848, + 29.141571, + -6.6634197, + 50.849648, + 63.854736, + -30.517408, + -25.573277, + 18.286089, + -33.403877, + -23.222052, + -9.931835, + 40.63286, + 16.205715, + 3.051216, + -17.659916, + 35.058365, + 3.4864564, + 3.700205, + 36.984665, + -2.6762655, + -5.6370745, + -11.047944, + 0.7288602, + -21.658772, + 29.00057, + 1.724266, + 37.282166, + 53.082787, + -2.780458, + 30.921745, + -44.949852, + 2.5712757, + 15.491104, + 1.4430839, + 62.071556, + 50.113644, + 2.4988964, + 39.450127, + -43.835182, + 19.886007, + -9.444176, + -19.323963, + -40.854057, + 10.667189, + -20.973135, + -28.608091, + 55.058033, + 60.170963, + -40.133392, + -47.412437, + -11.715236, + -9.072732, + -2.5470715, + -1.6941338, + 3.4941216, + -15.892217, + -43.618923, + -30.00606, + -40.14183, + -38.547764, + 39.44576, + 24.002556, + 9.615723, + -14.60073, + -34.695293, + 24.552044, + -5.866595, + 55.08572, + -10.214125, + -24.097853, + 54.292786, + 13.034389, + 18.571875, + 29.010794, + -50.740494, + -21.601845, + 0.3132618, + -16.908802, + -22.852116, + 25.010887, + 40.74505, + 24.467821, + -35.78193, + 33.290512, + -36.799778, + 23.632227, + -6.332195, + 44.901176, + 14.856071, + -34.01993, + -32.657963, + -0.19790256, + -13.282099, + 3.6223302, + -27.537643, + -30.407427, + -1.4600859, + 34.974186, + -40.831738, + -21.020662, + 17.890947, + -34.56345, + -26.83522, + -51.115036, + -30.037083, + 51.039494, + 22.00767, + 33.06137, + -10.025558, + 50.783173, + 39.624516, + 33.785957, + -11.808369, + -16.247301, + 11.096719, + 3.9298108, + -36.71249, + 5.224064, + 61.941765, + 25.593391, + 57.116886, + 6.0533786, + 1.4528623, + -39.268654, + 63.146736, + -45.646656, + -18.361784, + -32.47726, + -54.57239, + -6.065345, + -67.6337, + -1.7727742, + -35.816135, + 34.30833, + 22.940289, + 34.016815, + -3.314618, + -23.380756, + 33.676125, + -51.20468, + -11.454998, + -57.482727, + 1.5153905, + -10.624147, + 17.203127, + -38.379494, + -13.956661, + -5.083528, + 19.297848, + -16.216873, + 59.104256, + -57.06995, + 25.764948, + 7.6150827, + 14.722553, + 5.355223, + 14.431028, + 23.698402, + -33.719845, + -28.600927, + -21.976587, + -46.58074, + -10.529209, + 15.65386, + -12.663424, + -17.225386, + -7.9554195, + -29.027065, + -9.322318, + -20.461489 + ], + "z": [ + 33.58955, + 6.424783, + 3.7736342, + 44.446266, + -14.211976, + -20.706146, + -27.404715, + 0.6617362, + 5.21688, + -33.28256, + -55.585873, + -20.28202, + -52.614647, + 27.328653, + -42.200733, + 8.861208, + 16.871103, + 3.2040837, + -25.11161, + -62.362576, + 55.908, + -11.463446, + -36.990646, + -25.511473, + -13.930499, + -55.75895, + -52.69715, + -45.441845, + -26.946115, + 55.971066, + 23.39898, + -4.4006095, + 4.4890857, + -9.830746, + 30.693487, + 28.606426, + -55.78968, + 33.20101, + 24.737425, + 6.3936353, + -37.13538, + -35.807217, + 28.661215, + -33.935314, + -34.212143, + 4.344414, + 19.838388, + 34.422718, + 7.5581713, + 42.43699, + 7.8875375, + -19.631613, + -49.337524, + -8.550479, + 38.11054, + 16.326885, + 31.682981, + 9.375222, + -62.779873, + 14.799346, + 3.19852, + -65.68846, + 39.51259, + -50.37156, + -30.692204, + -12.3859415, + -68.958145, + 35.205345, + 60.06998, + 35.38274, + -47.00898, + 50.37468, + -64.382324, + -7.8745794, + -36.411892, + 51.311146, + -40.564484, + -7.702737, + -45.418507, + 7.9016147, + -65.43584, + 25.962664, + -26.323004, + -24.03093, + 40.447968, + 49.361694, + 38.207314, + -8.632037, + 4.053423, + 54.306164, + 31.737167, + -23.932957, + 4.9471326, + -0.7725971, + -34.537224, + -32.44887, + -35.74875, + 37.907295, + -15.214101, + 46.366627, + -2.4118197, + 15.25195, + -17.950134, + 2.1769252, + -36.235577, + 8.1211405, + 7.4978642, + 19.339888, + -1.4508528, + 30.87729, + -9.810467, + 2.9457393, + -36.48326, + 59.188797, + 21.245472, + 41.02429, + 8.934574, + 19.655256, + 4.2094474, + 49.7386, + 21.39235, + 29.006418, + -34.319855, + -6.9651656, + -29.378496, + -49.78239, + 52.876617, + -47.45204, + -1.2140275, + -12.558638, + -48.634777, + 16.615135, + -43.865845, + -26.010925, + 10.593092, + -21.61767, + -10.1859255, + -10.26535, + 67.91046, + 39.773335, + -26.705135, + -61.808792, + -9.83589, + -28.824474, + 32.673676, + -35.845654, + -8.483805, + 39.611057, + 50.274525, + 18.065207, + -27.351385, + 26.596378, + 31.07249, + 46.519524, + -3.3408246, + 0.54741937, + -8.282282, + -27.067053, + 2.292199, + 28.1105, + -9.667651, + 17.858208, + -2.7038045, + 29.487076, + -21.28734, + -5.6881804, + 25.216806, + 8.090708, + -39.77805, + 28.19146, + 28.37309, + -36.739826, + -32.77224, + 45.384174, + -1.7217484, + 22.458988, + 28.484854, + -2.3853035, + -36.75745, + 41.8365, + 10.580859, + -6.242028, + -27.251656, + -11.90729, + 23.186678, + -40.240063, + -17.135298, + -6.0044813, + -3.6205585, + 12.106716, + 34.90803, + -36.0541, + -50.146595, + -23.62288, + -28.375637, + -16.228237, + -23.509655, + -22.093853, + -5.969808, + 28.515968, + -13.082469, + 11.173885, + 62.13355, + 41.156143, + 13.302348, + 54.074417, + -25.621063, + 46.219353, + 20.506983, + 58.0345, + 22.422977, + 15.098737, + 17.605795, + -40.685272, + -33.782177, + -0.57831883, + 7.4883723 + ] + } + ], + "layout": { + "height": 700, + "margin": { + "b": 10, + "l": 10, + "r": 20, + "t": 40 + }, + "scene": { + "xaxis": { + "title": { + "text": "x" + } + }, + "yaxis": { + "title": { + "text": "y" + } + }, + "zaxis": { + "title": { + "text": "z" + } + } + }, + "template": { + "data": { + "bar": [ + { + "error_x": { + "color": "#2a3f5f" + }, + "error_y": { + "color": "#2a3f5f" + }, + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "bar" + } + ], + "barpolar": [ + { + "marker": { + "line": { + "color": "#E5ECF6", + "width": 0.5 + }, + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "barpolar" + } + ], + "carpet": [ + { + "aaxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "baxis": { + "endlinecolor": "#2a3f5f", + "gridcolor": "white", + "linecolor": "white", + "minorgridcolor": "white", + "startlinecolor": "#2a3f5f" + }, + "type": "carpet" + } + ], + "choropleth": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "choropleth" + } + ], + "contour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "contour" + } + ], + "contourcarpet": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "contourcarpet" + } + ], + "heatmap": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmap" + } + ], + "heatmapgl": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "heatmapgl" + } + ], + "histogram": [ + { + "marker": { + "pattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + } + }, + "type": "histogram" + } + ], + "histogram2d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2d" + } + ], + "histogram2dcontour": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "histogram2dcontour" + } + ], + "mesh3d": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "type": "mesh3d" + } + ], + "parcoords": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "parcoords" + } + ], + "pie": [ + { + "automargin": true, + "type": "pie" + } + ], + "scatter": [ + { + "fillpattern": { + "fillmode": "overlay", + "size": 10, + "solidity": 0.2 + }, + "type": "scatter" + } + ], + "scatter3d": [ + { + "line": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatter3d" + } + ], + "scattercarpet": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattercarpet" + } + ], + "scattergeo": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergeo" + } + ], + "scattergl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattergl" + } + ], + "scattermapbox": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scattermapbox" + } + ], + "scatterpolar": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolar" + } + ], + "scatterpolargl": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterpolargl" + } + ], + "scatterternary": [ + { + "marker": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "type": "scatterternary" + } + ], + "surface": [ + { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + }, + "colorscale": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "type": "surface" + } + ], + "table": [ + { + "cells": { + "fill": { + "color": "#EBF0F8" + }, + "line": { + "color": "white" + } + }, + "header": { + "fill": { + "color": "#C8D4E3" + }, + "line": { + "color": "white" + } + }, + "type": "table" + } + ] + }, + "layout": { + "annotationdefaults": { + "arrowcolor": "#2a3f5f", + "arrowhead": 0, + "arrowwidth": 1 + }, + "autotypenumbers": "strict", + "coloraxis": { + "colorbar": { + "outlinewidth": 0, + "ticks": "" + } + }, + "colorscale": { + "diverging": [ + [ + 0, + "#8e0152" + ], + [ + 0.1, + "#c51b7d" + ], + [ + 0.2, + "#de77ae" + ], + [ + 0.3, + "#f1b6da" + ], + [ + 0.4, + "#fde0ef" + ], + [ + 0.5, + "#f7f7f7" + ], + [ + 0.6, + "#e6f5d0" + ], + [ + 0.7, + "#b8e186" + ], + [ + 0.8, + "#7fbc41" + ], + [ + 0.9, + "#4d9221" + ], + [ + 1, + "#276419" + ] + ], + "sequential": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ], + "sequentialminus": [ + [ + 0, + "#0d0887" + ], + [ + 0.1111111111111111, + "#46039f" + ], + [ + 0.2222222222222222, + "#7201a8" + ], + [ + 0.3333333333333333, + "#9c179e" + ], + [ + 0.4444444444444444, + "#bd3786" + ], + [ + 0.5555555555555556, + "#d8576b" + ], + [ + 0.6666666666666666, + "#ed7953" + ], + [ + 0.7777777777777778, + "#fb9f3a" + ], + [ + 0.8888888888888888, + "#fdca26" + ], + [ + 1, + "#f0f921" + ] + ] + }, + "colorway": [ + "#636efa", + "#EF553B", + "#00cc96", + "#ab63fa", + "#FFA15A", + "#19d3f3", + "#FF6692", + "#B6E880", + "#FF97FF", + "#FECB52" + ], + "font": { + "color": "#2a3f5f" + }, + "geo": { + "bgcolor": "white", + "lakecolor": "white", + "landcolor": "#E5ECF6", + "showlakes": true, + "showland": true, + "subunitcolor": "white" + }, + "hoverlabel": { + "align": "left" + }, + "hovermode": "closest", + "mapbox": { + "style": "light" + }, + "paper_bgcolor": "white", + "plot_bgcolor": "#E5ECF6", + "polar": { + "angularaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "radialaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "scene": { + "xaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "yaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + }, + "zaxis": { + "backgroundcolor": "#E5ECF6", + "gridcolor": "white", + "gridwidth": 2, + "linecolor": "white", + "showbackground": true, + "ticks": "", + "zerolinecolor": "white" + } + }, + "shapedefaults": { + "line": { + "color": "#2a3f5f" + } + }, + "ternary": { + "aaxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "baxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + }, + "bgcolor": "#E5ECF6", + "caxis": { + "gridcolor": "white", + "linecolor": "white", + "ticks": "" + } + }, + "title": { + "x": 0.05 + }, + "xaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + }, + "yaxis": { + "automargin": true, + "gridcolor": "white", + "linecolor": "white", + "ticks": "", + "title": { + "standoff": 15 + }, + "zerolinecolor": "white", + "zerolinewidth": 2 + } + } + }, + "title": { + "text": "3D Chroma Vector Store Visualization" + }, + "width": 900 + } + }, + "image/png": "iVBORw0KGgoAAAANSUhEUgAABPAAAAK8CAYAAABhiUEuAAAgAElEQVR4XuydB5gdVd2HT7I9mx5SAKULKiKC+ikWBJQizYIFRFBBxQJiAemKSlNREVABFRDpTYpSFQQExUaVjjQJKaRsku0l3/nNZpbJ3bn3npk77Wbf8zxLIDvlzHvOvct991/GrbTDMCAAAQhAAAIQgAAEIAABCEAAAhCAAAQgAIFCEhiHwCvkujApCEAAAhCAAAQgAAEIQAACEIAABCAAAQh4BBB4bAQIQAACEIAABCAAAQhAAAIQgAAEIAABCBSYAAKvwIvD1CAAAQhAAAIQgAAEIAABCEAAAhCAAAQggMBjD0AAAhCAAAQgAAEIQAACEIAABCAAAQhAoMAEEHgFXhymBgEIQAACEIAABCAAAQhAAAIQgAAEIAABBB57AAIQgAAEIAABCEAAAhCAAAQgAAEIQAACBSaAwCvw4jA1CEAAAhCAAAQgAAEIQAACEIAABCAAAQgg8OweuO0v/zbnXXajefzpF8zg4JDZbONXm4P229O8Z5stR3bILp/4pnlh7oKR/25ubjKzZkw1W2+xqdn7gzuYLV+/sdNuGhpaaa675W5zzU1/MY8/9bzp7u0zM+113rrlZmb/j+5sXrvJeiPX2fPTx5iN1lvbnPbdg52uXeSDeuxzbrfXV73nPOPEQ8tOdbf9jjQtlu3Vv/5ekR9n1Nzu+OsD5uLf/dE8Ztd0Scdy09rSbPfReubje25vdt9xm0I9y2cP+6F57Mnnze1XnWaaGhtC53b8qeeb3914l7ntyp+Yfb98gt3nrzEnHfW5zJ/juB+ca/7y9wfN7Vee5t1br8Ms5nL9LfeYI086x9xy6alm3TlrZf7c3BACEIAABCAAAQhAAAIQgAAEIBAkMOYF3k23/9184zs/Nx/edVuz2/vebvr7B8z5l99k/nH/Y+aiM481W7xuoxFxMLG9zXzzS/t4/93b12f++/xL5rqb7/akzZc//UHzJftVafQPDJqvHHu6ufNvD5id3vMWs907tjLtE1rNc/+bb664/s9m3oJF5pRjDjK7bP9/3mXWJIGn5znhtN+ay6+/3dx2xU/MWtOnjEJ138NPmk8efKI59qv7mX0++N5EXqkdyzvNO/b4svnHjWeZCW2tiVyz9CISXcd+/9fmg7u8y+y83VvNjGlTzKIlHZ4Au+WOf5qjv7Kv2ffDO3qn/fGuf5mzf3u9ueKc41OZi8tFb/7zP8zXj/+ZJ4Z33PYto06RbH3Phw8173zrG8yPj/+y+cOf/uat19u2ep3L5RM9plTgpTWXk06/yJOZh39pb2/+ek3+7V//sfL1Hd5rlAEBCEAAAhCAAAQgAAEIQAACEMiTwJgXeF888iemu6fXnH/akSPr0NnVY7bZ40uedDniy8PCTpE/isT59Y+/udp6KaLulDMvNhddfauVHV+yAmdYvoWN0355pfnlRb83Jx75WU/2BEdXd6858Bs/MP99bq655ZJTzZTJ7WucwFOE44cPPM4c9oWPm8/s/f5RiCRrbrCy6M82MmzSxAmJvC7uuvch84UjfpSqwNv1k0eYOTOnm3N/csSoOR9yzE+NGTfOnHHCV7zv/fjsy81f//VIrgJPInmHj3zVvOG1G5lfnPK1UXP2o89+derhZpu3bJ7IOsS9SKnAi3udauft/cXvmjfbaFpf4FU7nu9DAAIQgAAEIAABCEAAAhCAAASyJDDmBV4YbMm0bXb/kvnkR3Y0h39xOCKnnMDT95R2u8enjvLSJsulfnb39JltP3SIefMbNzVnff8boWv80vxFRnJlvXVned9XBN5rNlzX7PCurc2Z5/7OzJ33spk9c5onGfzIqd/f+ldzxIlnmwtOP9p8+9TzvLnceNH3jcSi0oKv+sMd3nmtrS1mqze8xhz62b1G0nQVzfTN751lLvn5cebUsy7zIgn1DBKXe+22rVEa5b8fesI02sikPXd+pyfe/PHsC/PMT391pY1SesQToLPWmmZ2fe/bvUjEpqbGsnt47y98x0iQXn/ByasdM8znK14E2wlHHOh97+5/PGzOufB68+R//2e5DJg3vm5j87WDPmresNmGI+cuX9Fl53GVufXOf5oVnd1mo/XXMZ//5O4en5+d9zvz899cO3Lstm/f0hNWLmy+9cNzzcOPPWMO/MRu5mQbnbXDu7Yy3z38gNDn2nHvw8wGr5pjfnnqYRVfu5869GTzzwceHznGF5nL7DP8xIq92+6+z0u/nTZlkpe+/bXPf9T7d41y81m5cqWXunv1DXfZqLF5psWu37Zv29J84wsfC41y9G/+o7Mut5GmN3rRkErhDo4DvvZ986LdMzdd/APrHseNSlvVPjntl1dYPs+azu4eKy+nmT12eqf5gk07Hz9+nLny93d4e/FPV/zYE5v++Pzhpxo966W/+Jb3Vy57qFIKrdZd+yNsKHJQe0njwqtuNZdfd7sXVTehrcVsZtPUxdZPe998u0+vdonLzv62eea5l0al0N5+z31e9OQTVkRrbLrRq8wB++zmRdNqLFy01EsT/76NolU06W13/9vbk0qLP+bQ/VZLj6+4UfgmBCAAAQhAAAIQgAAEIAABCECghAACbxUQiS+JKImLn59/jfcB/KKfHWtevc6wTKsk8PT9n5xzhfnVxX8wd11zhpk+dVi6BIdScj/91VPM9755gJeu6zIk8AYHB80Gr55jPrfv7qahocFGcF1m5/aUJ150n5v//HebDvlzT86p3tprrFSQMNB8fmNTgQ+zAnK7d7zJpnQu8yIFFeF3/W9OtsJtqj13OJVSNcUkp9a3EuoHP7/E/PbKW8wWr93QfNNGH6rGn58ies4PD/PSKiXAdvnE4V6U3PHf+LSZOmWilRr/84THJ/fa0ZOE5cZVf7jTk1EXW2kYrBuomoDHnPKrkb+X6PrM104x73v3m80hB+7lcZDEvOefD5urfvVdKzlne7fQMS/MXWgFySfN2rNmmOtvvcecf9lNRnPV3CUxJfJutbXMJk9qN0qDdmHzvZ9cYP58z/1mHRt1edB+e1g2s0f2QumziauY7WYFpuohvtHWQ2y0a1U6JBu1VkqvVSRnm5WqqvenGnNzrbz91tc/ZV5r6y9KkH3nx7+xzzPdXGJllyRauflIKJ3+66vMVywj1dqTBP6unft4e84Vv/xO2Rp3z78437x/3yPMVz/3EW9v+UP7fycrJIN/H6w7N2DXYXsrqfSMB3/mQx5PvVY0vy/s/wFz4D67Ogk81z1USeB1LOs0HctXrIb5Wz88z9vjV5zzHU92+3tXe3l7m7Ku9OCzf3uducfK4T9c+H3vNSRpuuPHv2E+sPO7zMEHfMjb1zf+6d7VBJ4fyfnR3bcz+1mxr6hK7bOrb7jT/Pzkr3nCVdd51wcO8e4rfu/f4e1W4HWZA78+LEK1bxkQgAAEIAABCEAAAhCAAAQgAIE4BBB4q6hJ1nz56OFC+W/afBOvYL+kjT+qCTxF+Ei66EN6sBGFf74fKadU3be+6bVOayWBt2TpMltI/0dW9jR75/z9vsc8aaUovne/bYsRCRcULopme9cHDvbSdI/72v4j91IknqLF/GN9gRdM6X30yefMRz73ba+hhp8+LLm55fsOtJLowza6bQ9P4EkAqaacRKA/Dj3uDE+AXmnFUbmh6EbVV9v1vW8z3znsMyOHKTpNQuaa807w/k7SQxFlkiySXBp6rvd9/OtmJxtd920rDv/90JNmv0NOHFXLTYJQ6aEf22M7c96lN3rRhX4NPFc2qtd3yTV/GiUaw55LUZOKRrzkd3/yBJGivLa0e2ibN29udn/fNp7Q8YdStl9e3DGSQuuvZ2n69bW2tuLRJ/9y5P5h8+nt6/fW+d024k7n++OhR/9rlBL6w+O+6HEuNxRpN2/hYnODZewPyc5zLvy9Fz3n1ykMCjw1ctF/6/XxARuV6Q+lR6tW3KvWnuks8Fz2UJQmFkpP1/zP/cmRnpTWWNqxwnvG4GvyCRvR+aEDjjVnnnSoJ/U03rLL560A32Ekhba0iYX25xJ7rWvt/pSM01D0oySoImYljH2BJ+Zi7w/NS+nz993yS6PmNwwIQAACEIAABCAAAQhAAAIQgEBUAgi8VcTU7ECpmvNfXuI1lHjymf+Zs075+mpNLMJq4PnAL7r6j+ak0y80151/otl4g3VHrcPv/2hTXU8426uT5toMQAJvndnTV0u5fdpGF+35qaPNqd/6oo3weduIwFMKrdJzNR6yqZ9KVf3BcV/wosKCQyl+khtKMfQFnlIG/bRUPwJLcu0ju79n5NS37fZF77/9lGJJkAuuuNk88J+nPLExtHLISM5J+vzxsh9V3Iff+dH5XmOEO64+3ROTz7+4wIqQb9pmD5+06bvv8859886f957PT6f1L3jw0T8dXiPbBOI39v4/+NklXs280jRQ//hSgefKxm+4cf+tv/bSQl1Gl00nVUqxogf/+eDj5j+PP+tFwB33tU95KckapQLv3EtvMEpnLX0GpXuqtp7f0CNsPv6zlMo03cdbr93seq1qyhA2/xtslNnh3/uF+e0ZR3vRihJSO+1zuNl80w1W63wcFHiSuZKDEnmK+HyHjcjc2kZ/BtOmXVNoXfaQq8BTY5gvHXWaJ4V91npmRQxedu3t5qbb77Wp5Its1+des9IKaKXyam996P3v9tBUE3jaj3vYhhbHH/bp1VAqBf2ef/7H/OXaM0YEntKXD9h715HjLrv2Ni8q8o6rf1oxrdllj3EMBCAAAQhAAAIQgAAEIAABCIxNAgi8kHVXRNXHDzreSw+UGNOoFoEnwXLZdbeZv/3+F6FdK/1oMUXE7f2BHZx2W1gXWnW+3WP/o0aiq3wJp8i112z4Ku+6f7VC4bOH/dCr96a6b8Gxuz1XIvLsH3xjROAFz/UF3ilHW2Gx0ztGTg0Kof+9tNB88DPHmI3XX9eL5nu1jUBSnTwJzEeeeK6qwPOj/HzxpBRQpSP+2QqOyTZ9UdJly/ce6IkzpQ0Hh1Jpp0+d7MkQpdT+4oJrKzaoKBV4rmy0njfc9jdzz3U/c1qrsIMUYfa1b//MPP3si+aPlw9HtJUKPL+O2z9vOmckylLXUpSeIhVVq+2ztg5f2Hz8Z1G67rgSyahuyhKgEr3lho7ZzjazUBSaZJbkoxqp+KnS/nlBgae/U1031d1Th12tpSIxd7cdnJWurSg8F4HnuodcBN4z9jUhqagUWHX8DQ6l9l5mo2OV1q3nnDixzasJud8hJzkLPEVQaj+q8UqwDqTuo6hbpX8rus6PwAuKaB2DwIv9EuJECEAAAhCAAAQgAAEIQAACEFhFYEwLPIm62/7yL5v2N8tsvtkGq22Ko076pbnjb/ePCJxKAk9pk6obpgYKwW62wQv22XTHba2QUVF/NboIi+pSqt/Vtkac0lclD+MKvIcff8YKyPIReG/ZcjNP7ITJPxeB50uxGy/6wUjDDT2rX4+uWgSejv3o5yVIW825Pz7Cqz/2f1u9zkvL9Mdb33+QlxqqmmSlQ/XdVBfwYpuyeuJPf+sJw7Vnzwh9UZcKPFc2UQSehJAiAMOad/zxrn8ZpRarwcU73vKGUQLPn1+5CLxv27p4H7ORbmHzUYTfx6xoVlTktrYGW+lQKm+wiUQYoB/+/FJz+fW3mzt/d4Y5/kfnmftsWvLNl/xwJE1U55QKvOB1lKJ6o41uU3fdHd65tfn+sQfZxinDdQ5Lm1hImqkZiZpYuO6hagJPdQU/bqNN9ZznWMaltQcVWfdeW0dRjSX8oQhJpcRGicDTdXZ/X3gE3r33PeoJZQQeP1chAAEIQAACEIAABCAAAQhAIC0CY1rgCaqinCSDfvPTo0YYq8bbB22NLKU/+oXnywk8pRQe+/1fm+tuudv8+kffNG9/8+vLrpU6oqpG1yEHfNgW/N9zteOUfvrFI3/spe7+/oJTvOL6cQWehKJqoykiKVgDT1FPO9sUSRX0/5SVhHEFnt844d4//MITjRpKg1Vk4MwZNoXWRptVG6oZ+L3TLjA/+c7BnuC68MxjvEYc/vjcYad6zR5Ku/rqPup6qlpifq23oIjR+bqearEpfdQXRX+/4SwvOsyVjavA+8vfHzIHffNHI6mupc99xrlXm7MuuG4ktVoReOpW6tcJ/NeDT5j9v3KSV8Nu5+3+b+R0v/mCjnvda9YPFXiSwu+0TRM+vOu7zVGHrB55plTrjdZbezURF7Ymil5TVKYiLpXm+bl9d/PqHAZHUOBJVt5n06ZLU7PVCfkRKxTVXViReV/79plePUM/KlT1+ra30X5qPiKB57qHKgk8vU6/dNSPzTPPzzOXn328mTK5fbV5KyV4qx0/a/b50PtG6jnqAL1exbdU4H1sj+2914ZGaQ081Qt82TaCUYq8P/xGHJtu9Gqvnh4Cr9qrnu9DAAIQgAAEIAABCEAAAhCAQFwCY17g+c0nlC6qGlf60H/VH+7wJMRp3z3Y7GgbJmhIYkhWffNLwx/wlcqpFMkrbbTRI08860VBffrju1RcB6WGKqXytr/822zzls3N+7d/m5k6eaJ57sV55tJrbrMdNTvNmSceOtLkIq7A0ySUlnruJTeYI63Y2fZtbzQLrDQ6+fSLvPpx11oJMcV2ZI0r8PzGCxI9qln31DMvmu//7GJP1txy5z/NNeeeYNa1Ak0CtNzo7Oox2+11qJd+OcUyCIoRneN3od1r1/d4nV11nGTZD39xqfnGQR/zut1qKOVTaZzHHrq/13REaa9Kx/WbfKhLqCSQBJVE2CYbruvExlXgab8ccszp5s57HzDqUPou21hEa6qGHHfd+6CXvrnzdm81P/r2cJMJdepVw5RzbArzDJtSq3RmRaa9OG+hV79tE8vw4cf+63V11XyV6qxRbj4SYUoj/rpNtVW6tCLcVMNRdd/U6bc0sjRsPSQQVXNPAupPVr6W1hMMCjxfmkoAf8A2SZEUlXT+lmW8y/b/Z7sB72ckilXTcJ8PDoszSVPtjzv++oAXKSmB57qHvmtTVP/y9wfN7VcON5gJzkVRf+fbTss/Of5g23159bqT2i9KWVak3Yt2Pmee9FUvPfvCq271ajVeaV/jSmVXaq26zr73o1/35ibhrcYsf7n3odW60Kr7saSyzlGE7IDl/Gv7+lLX49/8VDUEX4PAi/tTiPMgAAEIQAACEIAABCAAAQhAoCqBMS/wREiy7pwLrzeKWmq1kV1KhVW9q/fZ1Dt/SByocL8/1IlSUXIq/q8P9H7Xy2rEJXzUkfbqG++04ul5o8gkRZRtY9MrD7D3VOSYP2oReLrPeZfdaGXOHV7NL4kWRQeqptqr15nl3SKuwNO56qypxh3LrHR8vW16cMTBnzATbEOKzx1+qlFaoyRNWDOPIB+lWSrdUtFjvpALfl812X52/jWeIFXKsaK31DhBKaX+UDOCH9smEH+yqdCSghvaqDNFN/riVVL0IDsnsd5y8429moYubFwFnuYhMau6b1pXSd2ly1bYenYttkbgOmZ3K4U/tud2I6mdDz7ytCdxFV346Y+/36shKF6SUbfdfZ8ngSSednrPW23X3728jrYaleajbrnqgOt1dbXr/NqN1zMHWQauzVL8DsnvfffW5vTvfWXUFi5NodU8f2XX/0krbvXsSl+VpPzSpz4w0mVV4lSRh4ttF2UJwQP22dU2PHnaaxSjpimue+j8y28uK/B22+9I8+wL80Jfcur+q3Reff/bp55npegznoBXZ+ZDbDfl7595sbnCrtmutk6gUrcl8tWteGhoyGvwIgEr2XrLpad6klVD4vUsK0sft8+gNG7t+y9/5oPm7VsPR90SgVft3Y/vQwACEIAABCAAAQhAAAIQgEBcAgi8uOQ4DwIQgAAEIAABCEAAAhCAAAQgAAEIQAACGRBA4GUAmVtAAAIQgAAEIAABCEAAAhCAAAQgAAEIQCAuAQReXHKcBwEIQAACEIAABCAAAQhAAAIQgAAEIACBDAgg8DKAzC0gAAEIQAACEIAABCAAAQhAAAIQgAAEIBCXAAIvLjnOgwAEIAABCEAAAhCAAAQgAAEIQAACEIBABgQQeBlA5hYQgAAEIAABCEAAAhCAAAQgAAEIQAACEIhLAIEXlxznQQACEIAABCAAAQhAAAIQgAAEIAABCEAgAwIIvAwgcwsIQAACEIAABCAAAQhAAAIQgAAEIAABCMQlgMCLS47zIAABCEAAAhCAAAQgAAEIQAACEIAABCCQAQEEXgaQuQUEIAABCEAAAhCAAAQgAAEIQAACEIAABOISQODFJcd5EIAABCAAAQhAAAIQgAAEIAABCEAAAhDIgAACLwPI3AICEIAABCAAAQhAAAIQgAAEIAABCEAAAnEJIPDikuM8CEAAAhCAAAQgAAEIQAACEIAABCAAAQhkQACBlwFkbgEBCEAAAhCAAAQgAAEIQAACEIAABCAAgbgEEHhxyXEeBCAAAQhAAAIQgAAEIAABCEAAAhCAAAQyIIDAywAyt4AABCAAAQhAAAIQgAAEIAABCEAAAhCAQFwCCLy45DgPAhCAAAQgAAEIQAACEIAABCAAAQhAAAIZEEDgZQCZW0AAAhCAAAQgAAEIQAACEIAABCAAAQhAIC4BBF5ccpwHAQhAAAIQgAAEIAABCEAAAhCAAAQgAIEMCCDwMoDMLSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgEJcAAi8uOc6DAAQgAAEIQAACEIAABCAAAQhAAAIQgEAGBBB4GUDmFhCAAAQgAAEIQAACEIAABCAAAQhAAAIQiEsAgReXHOdBAAIQgAAEIAABCEAAAhCAAAQgAAEIQCADAgi8DCBzCwhAAAIQgAAEIAABCEAAAhCAAAQgAAEIxCWAwItLjvMgAAEIQAACEIAABCAAAQhAAAIQgAAEIJABAQReBpC5BQQgAAEIQAACEIAABCAAAQhAAAIQgAAE4hJA4MUlx3kQgAAEIAABCEAAAhCAAAQgAAEIQAACEMiAAAIvA8jcAgIQgAAEIAABCEAAAhCAAAQgAAEIQAACcQkg8OKS4zwIQAACEIAABCAAAQhAAAIQgAAEIAABCGRAAIGXAWRuAQEIQAACEIAABCAAAQhAAAIQgAAEIACBuAQQeHHJcR4EIAABCEAAAhCAAAQgAAEIQAACEIAABDIggMDLADK3gAAEIAABCEAAAhCAAAQgAAEIQAACEIBAXAIIvLjkOA8CEIAABCAAAQhAAAIQgAAEIAABCEAAAhkQQOBlAJlbQAACEIAABCAAAQhAAAIQgAAEIAABCEAgLgEEXlxynAcBCEAAAhCAAAQgAAEIQAACEIAABCAAgQwIIPAygMwtIAABCEAAAhCAAAQgAAEIQAACEIAABCAQlwACLy45zoMABCAAAQhAAAIQgAAEIAABCEAAAhCAQAYEEHgZQOYWEIAABCAAAQhAAAIQgAAEIAABCEAAAhCISwCBF5cc50EAAhCAAAQgAAEIQAACEIAABCAAAQhAIAMCCLwMIHMLCEAAAhCAAAQgAAEIQAACEIAABCAAAQjEJYDAi0uO8yAAAQhAAAIQgAAEIAABCEAAAhCAAAQgkAEBBF4GkLkFBCAAAQhAAAIQgAAEIAABCEAAAhCAAATiEkDgxSXHeRCAAAQgAAEIQAACEIAABCAAAQhAAAIQyIAAAi8DyNwCAhCAAAQgAAEIQAACEIAABCAAAQhAAAJxCSDw4pLjPAhAAAIQgAAEIAABCEAAAhCAAAQgAAEIZEAAgZcBZG4BAQhAAAIQgAAEIAABCEAAAhCAAAQgAIG4BBB4cclxHgQgAAEIQAACEIAABCAAAQhAAAIQgAAEMiCAwMsAMreAAAQgAAEIQAACEIAABCAAAQhAAAIQgEBcAgi8uOQ4DwIQgAAEIAABCEAAAhCAAAQgAAEIQAACGRBA4GUAmVtAAAIQgAAEIAABCEAAAhCAAAQgAAEIQCAuAQReXHKcBwEIQAACEIAABCAAAQhAAAIQgAAEIACBDAgg8DKAzC0gAAEIQAACEIAABCAAAQhAAAIQgAAEIBCXAAIvLjnOgwAEIAABCEAAAhCAAAQgAAEIQAACEIBABgQQeBlA5hYQgAAEIAABCEAAAhCAAAQgAAEIQAACEIhLAIEXlxznQQACEIAABCAAAQhAAAIQgAAEIAABCEAgAwIIvAwgcwsIQAACEIAABCAAAQhAAAIQgAAEIAABCMQlgMCLS47zIAABCEAAAhCAAAQgAAEIQAACEIAABCCQAQEEXgaQuQUEIAABCEAAAhCAAAQgAAEIQAACEIAABOISQODFJcd5EIAABCAAAQhAAAIQgAAEIAABCEAAAhDIgAACLwPI3AICEIAABCAAAQhAAAIQgAAEIAABCEAAAnEJIPDikuM8CEAAAhCAAAQgAAEIQAACEIAABCAAAQhkQACBlwFkbgEBCEAAAhCAAAQgAAEIQAACEIAABCAAgbgEEHhxyXEeBCAAAQhAAAIQgAAEIAABCEAAAhCAAAQyIIDAywAyt4AABCAAAQhAAAIQgAAEIAABCEAAAhCAQFwCCLy45DgPAhCAAAQgAAEIQAACEIAABCAAAQhAAAIZEEDgZQCZW0AAAhCAAAQgAAEIQAACEIAABCAAAQhAIC4BBF5ccpwHAQhAAAIQgAAEIAABCEAAAhCAAAQgAIEMCCDwMoDMLSAAAQhAAAIQgAAEIAABCEAAAhCAAAQgEJcAAi8uOc6DAAQgAAEIQAACEIAABCAAAQhAAAIQgEAGBBB4GUDmFhCAAAQgAAEIQAACEIAABCAAAQhAAAIQiEsAgReXHOdBAAIQgAAEIAABCEAAAhCAAAQgAAEIQCADAgi8DCBzCwhAAAIQgAAEIAABCEAAAhCAAAQgAAEIxCWAwItLjvMgAAEIQAACEIAABCAAAQhAAAIQgAAEIJABAQReBpC5BQQgAAEIQAACEIAABCAAAQhAAAIQgAAE4hJA4MUlx3kQgAazetsAACAASURBVAAEIAABCEAAAhCAAAQgAAEIQAACEMiAAAIvA8jcAgIQgAAEIAABCEAAAhCAAAQgAAEIQAACcQkg8OKS4zwIQAACEIAABCAAAQhAAAIQgAAEIAABCGRAAIGXAWRuAQEIQAACEIAABCAAAQhAAAIQgAAEIACBuAQQeHHJcR4EIAABCEAAAhCAAAQgAAEIQAACEIAABDIggMDLADK3gAAEIAABCEAAAhCAAAQgAAEIQAACEIBAXAIIvLjkOA8CEIAABCAAAQhAAAIQgAAEIAABCEAAAhkQQOBlAJlbQAACEIAABCAAAQhAAAIQgAAEIAABCEAgLgEEXlxynAcBCEAAAhCAAAQgAAEIQAACEIAABCAAgQwIIPAygMwtIAABCEAAAhCAAAQgAAEIQAACEIAABCAQlwACLy45zoMABCAAAQhAAAIQgAAEIAABCEAAAhCAQAYEEHgZQOYWEIAABCAAAQhAAAIQgAAEIAABCEAAAhCISwCBF5cc50EAAhCAAAQgAIGCE+jtHzTdvYOmt3/IjLNzbWluMG0tDaa5cXzBZ870IAABCEAAAhCAAASCBBB47AcIQAACEIAABCCwhhAYGlpp+gZWmp6+AU/aDdr/Dhvjrc1rtTKvpUlf4814/QUDAhCAAAQgAAEIQKCwBBB4hV0aJgYBCEAAAhCAAASqExgYHFol7QatuBusfkLIEZJ4w0JvvGlsIDovFkROggAEIAABCEAAAikSQOClCJdLQwACEIAABCAAgTQISNopwq7bCrs++2eSQxKveVVkHqm2SZLlWhCAAAQgAAEIQCA+AQRefHacCQEIQAACEIAABDIjoHp2PX022s4Ku34r8LIYyqxta2n06uY12v8g1TYL6twDAhCAAAQgAAEIjCaAwGNXQAACEIAABCAAgYISCDahKFfPLqupS+Y12eYXMya3GNXaQ+ZlRZ77QAACEIAABCAAAWMQeOwCCEAAAhCAAAQgUBACEmM9NsKuz4u2GzRlelDkOtt1ZrSZuYu6vXp51M3LdSm4OQQgAAEIQAACY4gAAm8MLTaPCgEIQAACEIBA8Qj4TSi6egcSr2eXxtP6Ai94baXXttgmGEq1pW5eGtS5JgQgAAEIQAACY50AAm+s7wCeHwIQgAAEIACBzAn0Ddg6dvZLsmvx8j6Td3psFABhAi94vlJthyPzhrvakmobhS7HQgACEIAABCAAgXACCDx2BgQgAAEIQAACEMiAgN+EQqmxvrCbMbnZLF3RX1cCb9bUVrNgaY8TMb9unoRem/1C5jlh4yAIQAACEIAABCAwigACj00BAQhAAAIQgAAEUiKgrrE9fQOm19a1C4uyqzeBN85G182c4i7wSrEqIq95VWQeqbYpbTouCwEIQAACEIDAGkkAgbdGLisPBQEIQAACEIBAHgT8enaKslMjimpNKOpN4DXYkDp1oXWNwKu0BorOa2tp9OrmqYYe0Xl57FjuCQEIQAACEIBAvRBA4NXLSjFPCEAAAhCAAAQKSaCWJhQSeMu7bPMKWw+vHkaSAi/4vJJ5isxTmi118+phJzBHCEAAAhCAAASyJoDAy5o494MABCAAAQhAoO4JSLgpLband9D0D8aXbwi88K0giTfcCGO8aWwYX/f7hQeAAAQgAAEIQAACtRJA4NVKkPMhAAEIQAACEBgTBNSEotsKu3L17OJAQOBVp6b02hY1wbCpttTNq86LIyAAAQhAAAIQWDMJIPDWzHXlqSAAAQhAAAIQqJHAkC1g1zewsmITihpvYevJ1V8K7dSJTWbRsr5aHz3W+Uq1VWTe1InNRutD3bxYGDkJAhCAAAQgAIE6JIDAq8NFY8oQgAAEIAABCKRDINiEQo0o0h4IvHiE15nRZuYt7jZNjcOptqqdh8yLx5KzIAABCEAAAhCoDwIIvPpYJ2YJAQhAAAIQgEBKBCTtlBbb7XWOjV/PLs706k3gKYV10oTG3CLwfMZzprWaeUt6VkOuenlqhOH9aefJgAAEIAABCEAAAmsSAQTemrSaPAsEIAABCEAAAk4EVM9OzRGWd/WbLlvXLq+BwItO3qUTrlJt21oavbp5qqFHdF50zpwBAQhAAAIQgECxCCDwirUezAYCEIAABCAAgZQIlDahUC23rh4bdWc7yuY1ijCHKM9ehAg8F4EXfCbJPEXmKc1W0XnIvCgrzrEQgAAEIAABCBSFAAKvKCvBPCAAAQhAAAIQSJSAmhz02JTYPhttp3p29j9XG5JnXupsjhF49Sjw2tsazZLl+TSx0AJK4E2f1GwWdvTG2i+SeKqbpz8VhcmAAAQgAAEIQAAC9UAAgVcPq8QcIQABCEAAAhBwIuA3oejqHahazw6B54R0tYMUgTehtcEsXdEf/eSEzkgyClDptS1qgmFTbambl9ACcRkIQAACEIAABFIhgMBLBSsXhQAEIAABCEAgKwJKge23X1GbUEyykWQDNiwv7wi8HhsBqEjBehitil6zsitvgZdGFKBSbRWZ19rcaGUedfPqYT8yRwhAAAIQgMBYIoDAG0urzbNCAAIQgAAE1hACqmfX0zfkpcYOlubGOj6jBJ7G8u4BxzOSP6wIUYBRnkqRako9zVPgZSERJfOabLShhJ5q51E3L8ou4VgIQAACEIAABNIggMBLgyrXhAAEIAABCEAgcQLDwm7Aq1sXV9oFJ4XAi75ERRB4ecxB0lKNMLw/rdhjQAACEIAABCAAgawJIPCyJs79IAABCEAAAhBwIuDXs1OUnRpRxAy0K3uv9tZGo0irNT0CT00f9Kz9g8MRiytLmnk4Lcaqg/KQZ6Xz0xyabPOJZV351OHTnmlrafTq5qmGHtF5UXYQx0IAAhCAAAQgEJcAAi8uOc6DAAQgAAEIQCBxAlGaUNR6c1/ArIkCT9JO6Z8TrGTS6LJ19ppsXbfmxgYbvTjk/XccmZe3PNOzFEG8+ntPMm/apBZj/7AdbZF5tb4mOR8CEIAABCAAgfIEEHjsDghAAAIQgAAEciWgJhRKi1UzB0WJZTWKEE2WZA28oLQbZ42SUo7VjXdg8JWQO/29otfUSbbVpoQOR+UNH+cSmVcE6VmE1OfgHp0xudks77Jdj+0+VoqtxKn+bLScGRCAAAQgAAEIQCApAgi8pEhyHQhAAAIQgAAEnAmoCYW6vyZVz875xoED1wSB5yLtyrHxZZ4fqeci84oi8JRO3dmTX/ORINNZU1vNomW9o+oyKr22RU0wbBQkdfPivEI5BwIQgAAEIACBIAEEHvsBAhCAAAQgAIHUCQxZ49I3sNI2AhjvyY5+G62U95BUUSRanh1VFU02YNlIZroOSTtxVHqs/j0s0s71WsHjxKOazCtC9FuSUYtxOJWeU07gBY+jbl4SpLkGBCAAAQhAYGwTQOCN7fXn6SEAAQhAAAKpEQg2oVC9NQ2lG3Z09q+W1pnaBKpcWMJq0oRGKxT78pqCcRV4pdKuz6Ycq46d0jbTGOVk3kTbDENjTawbGJfjOjPazNxF3c6nS+Y1rZKlbTZCjyYYzug4EAIQgAAEIDCmCSDwxvTy8/AQgAAEIACBZAlI2iktttvrHDtaLgXrhSV75+hXK7rAy1ralSMYlHk6ptdKw6Ur+pxq5kVflepnKAJP9RJ7QvZX9bOTP2LOtFYzb0lP7AurXl6zrUfo/WnFHgMCEIAABCAAAQiEEUDgsS8gAAEIQAACEKiJgOrZKY1Twq5aE4ppk5pNZ/dwwf+8h7qGTmlvKlQEXlGkXbm1mTyhyetmq0YYLjXz0ljjIklgrdeMyS1mwdL4Ai/IiLp5aewYrgkBCEAAAhBYMwgg8NaMdeQpIAABCEAAApkSiNuEokj1yyRfNJ+8U2g1D0W1+TXt0k6PrWWjBFN+XWrm1XKvcucWKQ1bazfdSumFHb2JP6pSbYc72g5H55FqmzhiLggBCEAAAhCoKwIIvLpaLiYLAQhAAAIQyIeAmlAoZVG17PpsxJ26gMYZiuBS5FaUpg1x7uNyTtLRUy739I9RB1jJGb8pRJGlXfC5yq1fljJv5pQWs3h536iur1H4J3VslmnYknjDQm+8abQRkAwIQAACEIAABMYWAQTe2FpvnhYCEIAABCDgTMBvQtHVa1NeE6o3VoQupj6ArAWeL+08CWNrnfmNPQatDc2zKYTzhrAHukRQpi3zXLq+RnmmWo5ts52AJdSy7mSsFObWVfembl4tK8i5EIAABCAAgfohgMCrn7ViphCAAAQgAIHUCag2Xb/96uoZrFrPLs5k2m0XU6UGFkVYSQYlVb8sjEeYtJO48xswFElouqyni8ALXicNmac1W9jRk1sTjeDz5SXwgnPQ66mtpdF+NRjV0CPV1mUncwwEIAABCECg/ggg8OpvzZgxBCAAAQhAIFECfhMKiSVFg6U5iiA8gs+3zow2M3dRd+KP3GqjstqsrPQj7YLSLnizNV3gpSHzau36muRiF239JPOabHSnojwlyxkQgAAEIAABCKw5BBB4a85a8iQQgAAEIAABZwLqGtvTN2B6bWps2tKuVOK0tzWaJbaGWRFGkgJP0k7iRF9qSiFpV63WX9EEULU1UQReT+8rEYTVji/3/Voi85Jcs7jz98/T+sl5d/YM1HqpxM+X6Fza2e8JPaX5kmqbOGIuCAEIQAACEMiUAAIvU9zcDAIQgAAEIJAPAb+eXa1NKGqdfZZF/13mWms0V5i0E+OVjoGMftpjUVKKqzGTwFN6tVKtkxpRZV6ta5bUvHWdqCnFSd672rVKOSm9tsXKZe05ZF41enwfAhCAAAQgUDwCCLzirQkzggAEIAABCCRCII0mFLVOrLFhnJnS3mQWLStGBF6cemqSHxOsBAlG2kWRdkGGCLzVd1SpzFOEqKIY/SjRrBuPVNvv0yY1m24bfefXNKx2fFbfr8ZJqbbDHW2Hm3BQNy+rleE+EIAABCAAgfgEEHjx2XEmBCAAAQhAoHAEFBkl4aEP5ZIfRUvtqyYWsgbq2tHUl3bNlqtkUpdlrM68taYf15vAmzG52Szvsl2JE4zAK7fmYTJPzKdObE618UiUPSgeHTZNdWDQMeQyysVrODaqKNf7xbDQG28abYdbBgQgAAEIQAACxSOAwCvemjAjCEAAAhCAQCQCakIhaResZ1fk2mppd36NAm/mlBaz2NbjCxNxaUm74PwQeG6rFZR5OmOFjXoLRua5XSX5o1wFcPJ3rnzFWprFNFmB12ojTKmbl/WqcT8IQAACEIBAZQIIPHYIBCAAAQhAoM4IDNkIsL6BlV4TCqVuhjWOreUDfNo4itSEoDSCKgtpVyrwJEyWdfWnjT2R62cZgRc2Ya2PV4fPCusJLY1WvA554jovmRcnBTuRhahyEXWgVbRrrftKqbZtlrMvmkm1zWL1uAcEIAABCEAgnAACj50BAQhAAAIQqAMCUevZFa1ZRBBxkZoQSEhJgkqiJZ0e67Ktiixaw+YvXktX9NecOuzCppzAC3Yx1j7XuuUl84oko4O80uiOK5mnjrZKtW2zX8i8uLuY8yAAAQhAAALxCCDw4nHjLAhAAAIQgEDqBCTtvOgiK5hU+yvKiFoDK8q1az22CGmHwUYUSp9VSmYSNe2iskHgRSPmdf216Z2SiKUja5k3zgqt2VNbzbwlPdEeIoOjs+iO66XYrmqCQVfbDBaVW0AAAhCAwJgngMAb81sAABCAAAQgUCQCqmfX0zfkRYXV0iChaM0igozzKvwvqTnRphYqYmul7Tkgaaduslk1ZQjbZ/Um8CrVDMzideTKKwuZV+TXmLrjdnZn02xE695ow/NaFJlnX0/IvCxeCdwDAhCAAATGIgEE3lhcdZ4ZAhCAAAQKRSCsCUUSEyxqel+WddQk7ZRe2do8LO1UOy0oRxWp1KO/ixjhmMT66BquQiqp+9V6nbyjJ8Uras3AoMzT83f1Dkdb1tpJV3trmu2Iu7Cjt1asiZ+f5zop1Xa4o+1wIwxSbRNfXi4IAQhAAAJjlAACb4wuPI8NAQhAAAL5EVATCgkjiaQ+G3EX1oQiidmp1tz8pT2euCrSSDs6qJq0C7LIItWwEnsEXrSdqeYMEkTLbXRZnJGkzNO1gvX44swnrXOK9NqXxBsWeuNNo601yYAABCAAAQhAIB4BBF48bpwFAQhAAAIQiEQgahOKSBcvc7DSHZes6DMDg8UyeGlIM6UzShK0tzZ4NDp7Vo+0K8dz8oQm029rDaqLaR7Dq8Vn5xxW0y2P+VS7Z56RXZqbmjNoxBV4weerVeYVVb4WuTYfdfOqvcL4PgQgAAEIQKA8AQQeuwMCEIAABCCQEgGl6PXbry4rkySJsh5ZpqpGebakOmT60k517CQtVDtQ6ZFRhGVSc4ny/KUSqd4E3sKO/KI6tV4DNmQ1aeHqyzwJpobx453SbOOk88bdJ1HOK3JtvuBzSLhPmtDoRQirhh6ptlFWmWMhAAEIQGAsEkDgjcVV55khAAEIQCA1AmnVs4sz4bzru5Wbcy1RVElIu+C8aplLnDUpPaceI/AW2LTsvEYa0Zulz6I95kfXVZJ5ee+dcmugPSUxtmhZX17L5HRfpUOL9bKufi8tusnOW1KvzX4h85wQchAEIAABCIwxAgi8MbbgPC4EIAABCCRPYLhr7IDptXXtaukcm/TMlB6q+XTabqtFGlHrmOlDvjrHqoOsIu3UgEAdZKNE2qUhE5NgisCLRjELgRecUSWZl3f6dTlyRU3tLZ1vpfcn6uZFe11wNAQgAAEIjA0CCLyxsc48JQQgAAEIJEhATSjGWZOk+nJqRFHUUdQIIRfB4Es7pcfq3yXt1EG21s6hpWuluSh9L4maanH2Qb0JvLw7G0vgKSU96X3gsnalMs+Yld6eXBGzoYbLPeMcE4xsi3N+VueomU23FfHVOkDr9dmiyDz7WtXrhQEBCEAAAhAYqwQQeGN15XluCEAAAhCIRKC0CYW6PM5bkl8qocvkXUSZy3WSPqZc986spF3wefKuY1Yv6Y4+s7wFXlHqOmqvTrcCSiJf6Z8SeV5XaVvzMu9R1MjbUi5xmuyI9XBH2+GutqTa5r3buD8EIAABCGRJAIGXJW3uBQEIQAACdUVAH8ZVLF/RX6VNKCTw5ttaYCrAXtRRVDkUnFce0q5U4EkE5NUFtqhrVG5PF0HgdXT2J5I+XevrVjJRc9F7gC/LmxrUACNfmZd1mnFcjrW+h1I3Ly55zoMABCAAgXolgMCr15Vj3hCAAAQgkAoB1yYURYkEqgShsWGcmdLeVLhi9pJWkgyqz5dmeqzLBsk7hbXeBN6sqa0mzyYWitpavLyvELUmxWLRst7V5hJMs81L5tXDe5NqWc62/JKMYpaIb14VmUeqrcu7H8dAAAIQgEC9EUDg1duKMV8IQAACEEiUgOrZ9Q2s9JpQKAXO/qfTcK3f5HSxlA7Sh+SZU/IVLv6jBSPtJDY0JGLyTjksl86b0pKMuiwCLxrpMGkW7QrJHa25LOwoH4Wbl8wrEqNytPXLhWkTmy2/3uQWJHAlRee1tTR6kZGqoUeqbSqYuSgEIAABCGRMAIGXMXBuBwEIQAAC+RMorWcXZ0b1Umcqz5RHCUTVq/JqVtmoOwlS78umJBelhmDeAi3v+0fZ+0UQwtWkWZTnqfXYKK8tsZtghVJr83iTdmReramptXJxOT/L+pySeYrMa/Nq51E3z2V9OAYCEIAABIpJAIFXzHVhVhCAAAQgkDABSbteK466VWje/lnrqJdOj1kLj0rSLsg8ivyoda0qnZ+3QMv7/lHYKqJsxuSWXFNoiyJ+a5GZacq8NFJTo+wR12P1/imxlkf3Z0m84UYY403jqmhg13lzHAQgAAEIQCBPAgi8POlzbwhAAAIQSJWA6tn19A15UV+qt5bkyDv10vVZ4nR6dL22f5yrtAteN+9aav5cJKVUj2/Rsr6oj53I8XnfP8pDFEHgFUX8JsUiaZmX1Lyi7Is4xxal0YbSa1uszFNEIHXz4qwk50AAAhCAQJYEEHhZ0uZeEIAABCCQOgHXJhS1TiTtGk61zs8/P82C9q2rIlkUzdJrO/b6KbIunXmLUqdLwmP6pPRqcVVbRwReNUKrf78o4jeNBjFJyLx6iehUDdHO7oHca2AGd5ciAodT/hutzKNuXrRXJkdDAAIQgEAWBBB4WVDmHhCAAAQgkBoBNaFQTTXJoz4bcZdwoF3ZeddLqpoiXXp6h+vOJTFqkXbB+0ssdnT2m4HBZCMjoz5j3hFL9Sbw8padeafw+vsr7QjcuDIvy9pyUV9rweOLIvDLPYNkXpOt2ymhp9p5NMGoZbU5FwIQgAAEkiKAwEuKJNeBAAQgAIHMCCTRhCKJydZDsfhJbY2e1OzsGYj9yElJu1KBt7yrGBE4eUZ11ZvAyzvduCgCL0tRFkXm1UttzqKkQru+Kapenti22GYYWg8GBCAAAQhAIA8CCLw8qHNPCEAAAhCITKDPpmj226+unkHTbxtSFGEUJYqsEou4xeIVYTRBdaHsB1fVD+yyUXxq/pFULcEipdDl2RgBgef+Si4SKwk8dZNd1tXv/gAJHFlN5tVDd+y8o17jLoNft6/XRnu32Y7C2gOqoUd0XlyinAcBCEAAAlEJIPCiEuN4CEAAAhDIjEBW9eziPpAkVLeNbEsqPTXuPCqd50XP2Q+aS1dUFw1pS7vgPItSxF5zyjMaqJ5kRt711fK+f3D/KrJVI48uqv48wmRek63dJtnebb+KOvSe1Gaj2ZYsz6dxTFwuYWm/SrVttlF5SrNVlB4yLy5dzoMABCAAARcCCDwXShwDAQhAAAKZERjuGjtgehOM9kpr8vUQ7VJNemQp7UoFSK2pvUmta94ReGmlhfrdgRVJqdQ/vytzV++AcWk0Usq32l5Kaj3KXSftunNR5q/XviKBiyLKfJnX3mrrtdn/kMTz6oLaqOWijbyiF2vl4CL6JfFUN09/NtoITQYEIAABCEAgSQIIvCRpci0IQAACEIhMwK9n19QwzmtqUMQPnOUeqh7qTYV1y81L2pUKPP13nhFM/nzyroGXtMB7ZX0bvMYuXvqzFTn6++Eum/bvB+zf23T0KK+3vAVelGjSyG9EEU8oUgRpcOray4uX93rCtrV5vJfm68s8Ccc44jYimqqH18MvPkofIk6krNJrW9QEQ6UI7GuPAQEIQAACEKiVAAKvVoKcDwEIQAACkQmENaGoh3TU0geth1QwRebMnNJqU2j7Uq1pF3UTFCkKJ8+OmHHEQBhrXUf1CpXa6dcsVARWmLAJRuZJ8KywaeCKJKtW39ATgzbCyyUdO+p+cDk+y8YR1eZTpBqOwbmWRpOWptn2SOiuqiOal8xLujN2tbVK4vu17j2l2kqcT7Cpw9TNS2JFuAYEIACBsUkAgTc2152nhgAEIJA5AUX6SBKoEUJYE4oi1JSKCiUsui3qNdI83o/E0odGNQCRqEmyEUUtc6/1A3Et9y49t54FnjhKDCiCVWuriMZqIi74/BI8iiSdYIvyDw4pfd02iimTYlsEgZdH44iwvVbEBjZay9k2Am/ekp7Ql0dRZN7MKS1mif2FwsCgbY9dJyPJn0+SeU2romFVO4+6eXWyCZgmBCAAgQIQQOAVYBGYAgQgAIE1lUCUJhT1EM1Wuk7VPjDnsa6SipIxSp9ThI3S5/Thc/7SnkKkz/lMilTPTDJGUWVRxFeSaxs1hVdrPNFKN0XcSdop0i6JRirBFFtJ9k4rA4MptnmnsMbtqJzkWvnXylP6lnueKNGceco8RQkW7f2o2h5JM2Va9fLUCMP7k1TbakvB9yEAAQiMaQIIvDG9/Dw8BCAAgWQJDNmuBH0DK70mFJIKalLgOooezVbuOYrwYTRM2om/L6SKGC2Udz214HrmzcdF4PkpshJ3GmlGU0rutFiR0G7Fr59iq/2kqCFJhrxSaJOMgnJ9Xyp3nNZsYceaIcWzlHlF/KWHy17IStgqOq/N/gJGkbWk2rqsDMdAAAIQGFsEEHhja715WghAAAKJEwirZxf3Ji5d/uJeO63z8koHk9BRRJIi7TSU+thpU2TDosgkqJZ3rR5NlRYP1+tq/opqWbSsz/WU1I7Lm08lgRdsOKJoO4m7LFMP/RRbpfqNs/+hNNvFy/tyieaUwBuwvxUoQufXIr5XJZGWHpR5DeNthOeqZidJNcCo11/U5LHeft08NSSROCfVNrUfAVwYAhCAQN0QQODVzVIxUQhAAALFISBp12tlQreNypFUSGrkJcNqmX+WxewlvbxC6DY6Qx+0/Xpl1YROmulfcdlFSfeLew/X87Jcw7A5lQo8sZGsUoqs35CiCNJqop2T9p4xwyIvaylclH3sN4ZZYNPSizSSTjEONjtJSuYlIRmzZi7pOH1Si8l7vSXx9P6vPxtt8xkGBCAAAQiMPQIIvLG35jwxBCAAgVgEVM9OwiiYmhnrQhVOqsdOtJMnNHmSRdFvaYy40i44F8kgpTOnNce4z13aMTPudWo9T2JInTmD9d5qvWaU8/10TF/Oas3VjKIoDUf8Z/HT+hQFWJpiq7mmwa/z/sdHUOr+42dON2Yt+5XjKJJ8DmLQe5Ei5dKQvWEyT7/EKdfpuNzyJC0Zs9gGRZSOSq9tsxHY1M3LYgdwDwhAAALFIYDAK85aMBMIQAAChSMwLOwGvGi7LAr8F6nGleti6AOpPtAv6+p3PaXqcaXSTnJEzSjiCpKifmjOIy0tDH6ekV1KkV3LduVUFGWfleS1rHPVjVXjAb7Ak1z0h59iK5GgUamLbZTbL7npHtM/f9Fqp+h1MWQ7s6g5y9SdtzHNc9aKcsnEjlVE1pT2YqR/Bx9K+7jHvk8k0dCkEqxSmadoTO1bF5mX52st7gaQGF1pN11w38e9VhrnUTcvDapcEwIQgEAxCSDwirkuzAoCEIBALgTUhEIf/vRBTDIhShOKJCZcj51ok5qz36RAKYr691qlXXA9E0PyBQAAIABJREFU8u4eWm5vFKURQNZSwV9rCWuJcdW4emlxdy515aK8bsMEXvB8v4ut9rAiweKk2PbNe9l03v/EKHmn+zRYcab3KAk8jbwkXpE6KAf559GMJarMyztdPcp+94/VnPUzMY3IxjjzqXSOZJ6azSiaV3UrqZuXNGGuBwEIQCBfAgi8fPlzdwhAAAK5E0iyCUWtD1OkzqSuz1JLNE6a0q5UrEya0FiIhhHBeWXV2bHaWmbVHEECLChoFdEjgVeUSEQXTjqmWiSSpI6612rP6U8/OsslgjQs8s6flwTeoI1UDI48JF4RUyrFJO/Xk4vMy3uO1fZ42Pfrcc7+c3gptquaYOjnKwMCEIAABOqbAAKvvteP2UMAAhCIRUAfpPvtl+p+KVKmKEMfAGdPbTXzlhSrOHwlPlHnnJW0C865qJ0f84gYClvLNGsEiv1Em2athhTloirXNIEXZBzsaqq/Vzq+ajH6UXTBY1XvrvOBJ8q+3MQyrGHLrE/tkelbmASexGSSafNJPEBRakrqWcrJvGkTm83cRd1JPG4m14j6/p7JpGLcRHtWEXl6/enfkXkxIHIKBCAAgQIQQOAVYBGYAgQgAIEsCKgJhVKAsqpnF/eZ9CF0vu3uGPYBP+410z6v2pzzkHalEqWIYlQCL06aZdLrmXTtRX+9Je401PShUkOKNVngBdcqLMVWv0DwX+sLfnN9xaUtJ/Dat9zUtL9ps6S3RdnrJb1fkph4UTvjlso8pYvrZ5Frzbwk2NRyjaL+8iPqM5U2W1Kq7XBH2+GutqTaRiXK8RCAAATyIYDAy4c7d4UABCCQOgHViuobsDXtMmxCkcRDFUXqRHmWmbYJwZIVfatFBwWlnaJ1VEMpzwYFRZREeXd/9dc4qSYfXq1B+6HYj7aTuAuLGCvdW0WKnKq075NMNfbT5YMptv/79bWxBF7T7Blm2i7viPKSrenYNLu9xp2Y3m+m21ptCzt6414i9fP0+mi3dR/1PjjcPbXBppC7N8BIfYIhNyhqunRUFmE/o4LX0HoMC73xptH+vGJAAAIQgEAxCSDwirkuzAoCEIBALALBJhQSRvU4SiMF6uEZfOmoaCJ9CFKdM1/aaR3S7grpwqiIdZyybh5RjlO15gyV+EqcSGxJ2qmeneRE1GL3WpsFNuq06CNJgec/q59iu3LhYvPCtXd63T6t0wkd5SLwdHCWabRF2bdBSEVtrBGcY6kM82slTmhVSuewzEuqi3FSr6V6/HlU+ux+GrBrZLt+drXan2HDkhWZl9Re4joQgAAEkiCAwEuCIteAAAQgkCMBNaFQWmy3FUXTbX0h1/9Jz3HKFW+taChJkaLVlyo3aX04mmq5N9o5a96esCuItAvOuSj15oJzSrP2XJT9HTXKZvX6XuO8pg6VUmSrzWUsCzyfjerfdT34hFc7bbz9h1pVBDvO6jh111TtzrCRpcArYifVqHu42p5M4/uVZFhQ5rXatE79MqQIMk9r3W0jaYvwS5i4a1JLGrBSbdtaGr26efoZR6pt3FXgPAhAAALJEEDgJcORq0AAAhDIlICaUCjKR9Ig2ISiHtNPS8HVQydaX+Ao2k6FwSUV9LW0sz/TfRDlZkWUDkWpJealvtoPqEtXVF4/7U1FV2rdk0yJrheBFxZ51vKn4YYEAxs2msGNmqJsyVHHBmvgeSLPCgP7h7GBjV5kngR5WEpy1im0RZThSaWB17SAVU52jVwskswrYuRy1DXS3mhqHFf1/a3adSXzJNH9n3vIvGrE+D4EIACB5Akg8JJnyhUhAAEIpELApQnFmpTuU8ROtBI9bfbDkKSdIh79aLsW+6FGf79keV8qa5/ERV0/PCdxL9drFKWbpyfmbBpfmMCTNNIH1nb7fT9FVuueZJOVehN4fY/2mPZfLQ9d5t73thl9xRnlmlhIHIwbkXmjU2yzbmJRRKlTxLp8pXugWh22sD2Tp8xbUzrQpvXe76XYrmqCQaptnHc8zoEABCAQnQACLzozzoAABCCQCYFgEwoJA0WhVBu+YCqySKr2DPp+ta6uLtdI6hi/MYEkTq+NsvOlXVDg1EPUYFGi3YLr4hr5ltRalrtO2PpJLirabjgtesh02jQ6Cbw0Rj0JPHPGEjP0ZHVR3fnZSZEj8pRG2/nAE2URKxVwyL7w/BTblXY9tCRZps9qckVcL0maHhuVXeRUz1rf17OWebWknqbxPhH3mtqvi5f3OjXUiXsPpde2KCLdvmci8+JS5DwIQAAC1Qkg8Koz4ggIQAACmRFQPTt1ju3qHa6pFXWsKR848k4FDko7pSirMUGlqKt6iNQoYm3BohTe9wVeh02BnmgjKf0usll1DS5iRFfYe8/k81aYcU/1OUcfxpF4S266x/TPXxT61tdgBd7g4LBE9Wvlrffh95iGmdO916dKC2QxitjROU50Wxas/Hsk/R6Zhcyrh7qC1dYwae7V7qfvK2J20oQmG7Xc6HI4x0AAAhCAQAQCCLwIsDgUAhCAQBoE9KFT9dOUkhlH2pXOqdYohzSeMeo18ygcHqxv5ku7KI0JxL2Iab8++6JEuwX3QhEiFxVhp0g7feDU63CFjbSLsu5R93bY8fUg8FTrbsLtPV4EnGv6sOridX1ucmREYRLPr4nnCzxddOrO25iWtdey69do05zH22jJ8d4vPxQt6TrHqJPTPGZOKV7X4KLvoTR/uZSWzFsTSlLk9R47pR2BF/W9heMhAAEIuBBA4LlQ4hgIQAACCRNQPTul5SlqJOm0vLyj15JAlVVBdl/aKeLKr28WV94UPQImrw9ylfaD5Nl02+VxYUdvEtsm0jX8dPMmG9XVb6O6lAKWxzw06aLLF81x8tGLvXTiKAJP58WJwvPOK0mn9SPu9DpV04r2N21qmuestdqaSxIp1V1RlGl1MRWDGZNbzIKlPZH2W9oHFzEqMPjMWUaz6b1O+0BivpZ9kMcvkpLeJ3lFXq81pYVU2qQXk+tBAAIQsAQQeGwDCEAAAhkRGBZ2A6bXpsYmLe2Cj7AmRA2kWcsvSWkX5F50cZqnLCv3Estahuh+qgUoYStpJ4Gubs5Zz6OUR90IPNWgswItSnRbLU0tfJGnP9X9UoJuYNrUUeIubH/5jUmaG4c7BieVYqs5KLpo0bLqdQAz+tHipRMXMSow+Px5iaRaZF49vC6r7bG0GlhUu68i0ulSW40S34cABCAQnQACLzozzoAABCDgRMCvZ+d9cLQRdynVvx81F0U6KPqgnhtZJJ1ulZa0qydxmkctJJcXStqpx3puPxpHom559+gU2bwFiKI3F9sOxmmKfZe1KHeM0mf1pRp0UQVe3DTa0rlUi+B6rmGuuav5X+b5hpdGPcYu499mdjRvNU0N470UaUnbuKyLmIpeRDlfugh5iaTgPKLIvKK+X0Z9HechIfU6mzm1JepUOR4CEIAABBwIIPAcIHEIBCAAAVcCtTahcL1PpeOSll9JzCnONWpNCctC2gWfK6u03zgs/XNqZVrLvcudm9acgjUNJdGrNaTIs7OoojeXruiPLZXSWJfgNYMCL1iDzvW+y06a7npo2eMk8CQGlnX1jzpG4k5f1cb+vXuY1za+uqYU22oisdoc0vh+mhHLSc23aOmo1WTemvBzNC8JqZ+FilJlQAACEIBA8gQQeMkz5YoQgMAYI6AmFEqL7bFRHaq3U4QhKfLS4u5IqW5FmHdwDnFqygWlndL8/O6xcaNtojCphw/ReURjVGOoOS3s6ElkryoSSYJFta/8moaVugcH55aWSKz2/Pp+0QVew3/7TfuvlnsReFEFXq0ptD6/coL8wrbrQ6PuynHft3t3s/7gOl59LkVm6qtvwAreHrcutkUU9UWUiqX8i/je488xTObpe02N4zyxXq8jr7qn0yY2e+/DDAhAAAIQSJ4AAi95plwRAhAYAwTUhEJpWGnXs4uLsuj12FyeyzViQ5ESKlqvumZZS7vgc+T1YcmFpX9MEfdFEumjvrSTwFOtSXUhjSptEXiVd5LXxCJHgafahRpKgfaH0mYvavt9lJeAWW9wbfPJ7j1GzlGUkt/F1k+xlfQdsDUSw0bYPCJNIIWDizin0sfM8/UVBXlQ5vm/BFBn4yh1H6PcL81j86o7OMumzzbaaFkGBCAAAQgkTwCBlzxTrggBCKyBBFT3qW9ARe/Tb0KRBL41oZFFpQ+lknb+h+48pV1wrfKuo+aybyRFO1UDzkaNFmVIKnZ09pcVJuXmGYy2VOfgaimy1Z6XFNrKhJRG2/7nnlHrtGj882ZxwwveydMHX21mDK232oWSSJ/VBfV+MGDfh/WLE39Ejb7zz/Oj8EqfWK9hSY9KXWz13qpI6+A8qu2ttL9fxDkFnznvJjFx+Ou9st++T463myLYzTaNzvFx5udyjp7Bb9TjcnwSx6ib9yzbwIIBAQhAAALpEEDgpcOVq0IAAmsAgWATCv1PcD0NL6XKpogttWKkXkdpSmpQ2umZFGmlyIhykTJ5PHfRo0yK+EE/SlSgRIAiLSVzJG7VkEDyLmq0XdjeSDKVN+reK3oKrf88049bMvJ6e7LpbvNU0z2hj7pJ/zvMa/rfaZJKn9VNwpognDTxnKiovePf3fdm76vSCKbYStj54lvzULmEHrvvijJco5Xzmm89lBcoZVOa8luaZqvo+1qaoWSxFnmkLUt2TrUptAwIQAACEEiHAAIvHa5cFQIQqFMCknbe/5h7nWOL8wEtKs41oQC3/wyKrGpvHa6nU0RpF1ybOHX7oq5tLccXMdXORYj4AqDJRl7qdSlxl7S4zePDrr+WUSRmLetf67lrLRxn5p1+n3my8Z6RqLty11zZPM68Yb/Pmalrb1jrbb3zSwVenPTZ4ESOXvF5p3kFuxgrxXbImmOl8RYpAq/o7zv1UKMvuBmqNX+oB5lX7RmcNn+Mg9S8QlGsDAhAAAIQSIcAAi8drlwVAhCoUwIvL+uta3EXxF70aLByW0RRViosr9/kN9kowhX2w3LRIu3Kzb3oIqZSJ8+8XrLlogK1DyQcFXHXb+uRpZ0KlmcUXNH3jb83JDmvOuUwM27ZkBnXF14jTscOTbL1KO3XlDkbmC13PTCRrSWBF2w04dp5ttzNXQVeqdiZOWU4PXBwyDYuWhUFnHd9tDzls8vi1ltJhyi/ACuqzMsr6nGtKS1egxgGBCAAAQikQwCBlw5XrgoBCNQpARW/Vz2uNWEoKkPPUqR6Zy7STpEDfk0z/TZ/8fK+RFIks1jTIqaoBp87rw91ldhL0tnSZl7jCT9FVvJW/65Ip6RSZKutPwKvGiFjnvjj+Wbe808PH9i70pN445cPRyor4m5li/2y4i44kpJ4YZIzbgqt5hdH4Ok8X5b5v2jQLxsk85Z35Vdbsui/rAlLf66+2/I7Im7EYJFknn4WaSzryu7/Z+xbtpkzvS2/hePOEIAABMYAAQTeGFhkHhECEHAnoO6yi5b1uZ9Q4COnWvmlyCWJkSIOX9aoYHxQ2gWFY71EJvl8i5iiGlz7InbKFTPtBQ3JEEXa1dqQIs5+z3Ov5XnvKKz++ptvee8pUce2B3wv6imjjg9rdhJX4LnUwCs34Tm2QP+8JT0j39Z7l+qNttt97HexlXTO6hcn9dAgougpvqVrnUTEYN4yL4+6iC02WnrG5JaaX+tcAAIQgAAEyhNA4LE7IAABCJQQmLuoe41gojo0qhlWpEYWpRFW1bqHJvFBKsvFLGKEW/D5xX+67Uy4sKM3Syyh99JcFOmiaDuNvOuK5SnR8ry360Z47r7bzNwH/xxL4K2/1fZm/a12cL1V6HGSQKXRuEl3oXWZYKnAC57jd7FtsyJatfKySLGNku7p8nxpHCNm85f2eI1n6mEkLb/ykHl5MJ9ko/78X2LVwzozRwhAAAL1SACBV4+rxpwhAIFUCawpdfCKEm0VVdoFF1cSUudnmQZUy+YqCvNKz1BJQNTy7K7n+tJO6yrBofRD1TpcuiK7VK+wuZbWWHN9niSOy/PervOXwHvpoTtiRZYlIfDC6rzFaWRRS/SdBJ1q4C2wMqraKJU2aaXYFv2XBnk1U6i2PpW+n2ZNwTCZp6jjJJvy5BWVSf27WnYd50IAAhBwI4DAc+PEURCAwBgioLpxRU07jboMedVGqkXaBZ+xHoRYcL5RPuBHXcukjs9D4GkdFWkXliKr7yn9cImtdZjncOmGm9b8EHjVyUqqLOwYHcUVReKtN7i2+WT3HtVvVuaIOBGsek9Qau2kCcMptkoPl7BJKsW2iI1pgvjqIUKw9D18tt1rwTTp2Bumyom+zNP7ooaaNSWRfq09oWtm/Z6qny3jV5VDSIsZ14UABCAw1gkg8Mb6DuD5IQCBUQTWpDp4WdYeCko7fVBNopZZPUZv5CVNXV/KYamIrudGOc7fD0qpUurcCluLMawhRVKSdsXcHtM5d/XU4PZ1WszEdYa7hlYbpYX25z9+kel8+SHTuegh79T2GVuY9rW2MLM327fapSJ/v14EXp4ptJXEs4vEq1XeaVFr3at+iq1qhWn02lp5+mVRLamlRa+7GbchROQXUUIn5CUctbfUcXtCS6P3JLXIvDxKT+hn/syp1L9LaBtyGQhAAAJlCSDw2BwQgAAESggM2ZaYWfz2PQvwamTROzBkum3URxojTNpJ3PXYD6ZJjTxq+dQy96wEWdw5pl1vzY/+UP1FCTvVthtUm9kyI05UU+ml/nvdfNP5Unhdv/a1W8yst0ypKvIkQgbsPBe+eJ955p6jKuKdtdknEhV5aQo8P2XZb7AQVxgtfekZ8+gt58WqgffG9x9gpq69Ydwt653nIsbvav6Xkcx7vuGlkXspZVbybv3BdWq6v05WumqrjW5KIt07LMW2f3AosswreofXeiuDUAThWKvMS/s9PuyFpHVW13gGBCAAAQikSwCBly5frg4BCNQpgYVLe+0H1eQkVF4Y0vjwpCgSpefoS90XPWGXsLQL8srjw0gt61X0+apAe6eVakml8ImVJJwEmCJI1KVU+8FVGteSdqyou2euX+C0XBvuMauixNP8OxY8aP5z++FO10tS4iUt8ILRj36jGL2f6f1AXZ/173Fqsj35p9+Yl557yomPf9CUORuYLXc9MNI5YQcrhdal9lzNN6pwgTTkTq0ptkk3XEian6LBtN9c3w+Svn/U6+URvVZpjnFkXh6/9Jo2sdlrSsSAAAQgAIF0CSDw0uXL1SEAgTolsKbUwas15ctfvqC0U4fF7lXSLosPZfX4AbDIH1iT4ulLIgkh7Q9F2oWlyLq8BcSty/fQ2c+7XH7kmEoSb2DZf8zjdx5hFIHrOjZ8x8lm4lpvdD287HFJCTyl/2k9JFIrRT/67wvj7cLptewaldfQ+aK5/bKzIj1vEtF3tUjeSJOtcrAEqEp8aa+nMfScSqFsbR5v7+O2NkWP+C26YCxdxyLP10Xm5dXAYpZNn220abQMCEAAAhBIlwACL12+XB0CEKhTAuqOuXh5eEpePT2SPhCuPb3NzF3UHXna5aSdoqtqqdkUdSJpRBFGnUOU44tek6rW+VVqSBGFU/DYOAKvUtpsuXkonXajPWeHfvvZvx7l1buLIvB0oS32/EPcxx45r9Y0SD9NVtJHtQZdxbqEnyJpJY38SNpykZm+GHji0UfNgzee6/TMScg73SgvKVH6kHrtyO9m0eSoNMVWUbMqh1D63luuuYfTAmVwUJodXdOYfr3Mt5zMa7CvadVYTCLN25WvpPYc+/8ZDAhAAAIQSJ8AAi99xtwBAhCoQwIDNuVngU2jXRNG1EYWXp2nVSmy+sDof7DPUtoFuScVRZjVWiZZJyuNOcfpWimBIpHa/dIS0/HCIk9y+YFqDe2tpmXOVNM40a1ZRNgzxZEQUaPv/PuGReGtePlB85wVeAoljCrwkojCiyPwgmnLiraTuBuw6ctxRjDyS+frFxgqoh98zQclmurhVZN4Sck7X+CJ0aJl+XcqVuMJV0EaZy3CzvHfA8O62LrUBkxqHnGuU/T5BZ+pHpsmaf5BmSeZpp/bSZdJqLT2+v+F6bY0AwMCEIAABNIngMBLnzF3gAAE6pTAmlIHzyUlKCjtlP7ZZZtexE2HTHq58+oKGPc5ii4cNb92G0m0ZHllGeJHYE6wdY0GO3vN/+5/zsqt8lQk8to3mRMLW9Q0wPn/7DAL/tUR616z3jzFzLZNLYJDHWdffuLiWAIviVp4UQSeHwGpNNla0pbLwfMjv/ShXBJPAl9iMCwK7rn7bht1mSlzNqy5YUXpRfUeoAL5RRB4Pfa9MckmPVE2cVC0Nowfrj+qdcq7NmC5ZyhK5KQr43r7WRP2XHov1c/w5sYGL91bP8u1T5KseVp6X7029QseBgQgAAEIpE8AgZc+Y+4AAQjUKYGlK/q8//mt91EuBbXI0q6UeT1FcRT9Q2s1wViaIrt8UadZ8vhcp5dBXIkXtfFHWgJvnDUklTrmhkHIQuD5MlUpnJqf3pfSjgLTPfXe4QncVffUv+ch0Vyls9MmXXXQ7dfNGzl8g80mmg3tV7URdZ9Wu14t3/dF60S7J3r7B0dETV6R0mHPksa61cKs2rlpNCmpds+kvx9sYOGnyCulNix6M6l7r2WlodaaAQEIQAAC6RNA4KXPmDtAAAJ1SkAfkiXx6n0EhU0wesf/UF6USLtKnKOmAee9ZkUWjhKMSnda2PFKiniwa6kEgNIxtS96l3WbrqdfER0uXONIvKhiJA2Bt9BG4KmGXB4CT2JuwEqyUin3Sppsg12PQS/iLur8XNas2jHBFE7tDdemF9Wu6/r9pNLSn3l8hZG4e/aJztBbb7/HbLP9nuWjSKNGiro+X9zjfC5dPYNm0oRGGyX5SlfwNCOuXOdbb0KsaB1oXTn7x1WKINR7ib8eSco86t9FXSWOhwAEIFAbAQRebfw4GwIQWIMJ6APQywHJUa+Pqg950ye32DS4oZFImnqQdkHeLmnARVqfon3QL2XjN43QBzql4DXZFMWwrqWdT82z6bM9kdFO2HhOpJp4SiGNkpqYtMBTDTw1sSiKwPObUuhDt6Rd1o1jwhbcF7/qWqtOt0rTW95lRa99n0x7JCGCJO/OO/XpqlPdYNN2c8Dhm4QeV7QGB6X1LEtTbJUCnbVsDYLLsulH1YV1OKDefs6UPpLr6yRJmafovhn2/y8YEIAABCCQDQEEXjacuQsEIFCnBOYv6ckl4qVWXKWRdvofdnWly+LDdq1zDzu/1s6pacyp0jWjRpRlOT9FaUhESOj229pm3TaiqlxNr2UPPBtrai2zp3qNLVyHIl8khKKkhSbZxELzfPj63cx4+zrpsUKq34Yhdq8q+NdorUiT/Zpgo5vCRhJdaLW/1X5C0XX6d8lUrw5lBnLMdY1KU6/1iwHVUpT01FxLm164XtfluDiNV4LXdZV3/jnlJF6cbskuzxf3mEqCzE+f9GVrWGOSuPd1PS9KbUfXa6Z5XNEEbdRnjRNBWKvMU5q9auAxIAABCEAgGwIIvGw4cxcIQKBOCSy2hf4V/VIPIyjtgmmQkgL1HlkgWdBmPyhUa7xQlHUq2gdXP0VWH+YVpaOhvV2ta2lcgafrT95yA+fliBOpEycKr33tFrPRnrND5/XiYxea5+1Xr99eN+QoybwpDQ0j30mi/p0vxrRGYVGQzhBTPrBc7cRg0wu/Y3XS4lGSQKl6ikaMM771uQcinxaWTls0gef6PuPXolPqZFprFAa4nkof1GsH2iD3WnnHkXnTJ7XYKG7q30V+g+EECEAAAjEJIPBiguM0CEBgbBBQ+lFHZ39hH7ZU2vkd50rrZJVrZFHYByuZWL11ByxKxGBpQwo/qsslQrB33lLTO39p7C0SReDFFTRRo/A23GOWmbhO66hnmtfXZ25ZusSs/fDJpmnpoxWf2Zd47TO2MBu985TYfBRVJqE6tKrrQBaNKWJP1p5YrSFBMH1T9/Hfi5JoqlDL60k1726/fn7kRy+Nwiticxr9YqbTSk1XYRpcI8k81TNU1GtadRWDDRUiL0DGJ9Tbz5hSPL6AnL+0xyTxmnOVeVpjRS4zIAABCEAgGwIIvGw4cxcIQKBOCRSxDp6rtAsir9Z5tB6Wp8iNIUr5JVV0P866vNL4YHzZjqUuEZkDK3oiN7AIzjeKwIubIrlibo955voFTpjKyTudfMGCYcGjD/HT7z/RtHY8VvGag1NfZ967w2lO9w0eFFwbRdtJoCgKslwTi8g3SPEE732ntcFLxa82ghFfSdRhq4VPXIGnZ/zuL7ccedSw5i/VOKT9/VpqbfqCJq0U23qLaHOtH5f2msa9fpoCspzMG7LRyrOswGNAAAIQgEB2BBB42bHmThCAQJ0SmLe420bJ5Dt5/c/5hJbGkVSVTtt1UKlQrpET9fZhKox2relBWa5g1sJU66tmFBNsVJc+bPnRXOX2h2vNubgptFFr4NUqPP973XzT+dIrXXWDa6202VlvmRIaeafjHuhcYb+Gu5KqmYfqAk55/nfeV9joWO9DRl87TZ1m5jQ3O22rV4TWcFOK0iYycVKInW6c4EFRBJ5/W+1LRVfW2vTCdb+GPe65P3yqbNfZaniCabRZv6arzU3fV822hR21R1z5a9vcaLsdD9h6hvbni2tUX7l5pimUXNhEPSZO/bio90jz+Kyi7IMyr6XplXICaT4b14YABCAAgVcIIPDYDRCAAASqEHh5Wa/3gTvrUSrt/CLk1eqWlZtnPQmwsGdwSfvMeo3K3S+raJ1yKbLVOLimJGbVhTYpOaK6eMHRvk5LWXHnH+dH3+m/fYEXvIYv8iTtgmPL9nazZfvEsqj9uoNiLZFaKUXWdT2qrWua348j8ILz8dfYT92M0h3VtdZb2PPHqX/nXyco8GqVzGmsTVhNvituutX856mnzSNP/de75es32chsvsnG5qO77Fh1CmEptvpFUZyfOUXkVQmAS1RyVYAKt7mqAAAgAElEQVQ5HlDLayTutKdNbDaKXGRAAAIQgEB2BBB42bHmThCAQJ0SUMTM8q7qaWNJPJ4+9Os36X5R6FqlXXBO9f4Bpd4iJNJK+Q2KodJmJa570DVlNU4abUN7q2nfZI7rVLzjJKvVyXDRsr5I5yVxcDWBV+ke+88a3RDDT5Ntsimn/baLrN4/qkXK1oPASyrFMBiV12v5uNRwk5yIGxVWSwrtZw7b2Gy42bCkTer5k9izukZpTT5Ju++ceXbFy0viuYg8//p+nUZ1iI7axTariLCkeNZ7B9o85j9raot976aBRVJ7kOtAAAIQcCGAwHOhxDEQgMCYJtDbP5iqWNAHMT/9UR9ug7WxkgRfbx+oSp+93uaf9AcqfZj2U2Rr7VZarSFBkH2UZhZx5J0vC6bbgvwLO8LTYJN8HZReKyjwxCVK6mBQ4AXXR9JOkUuuxeTHksDz+S+9tM/0/ceWAnh4uMt3y+YNpnnz8WbaPi2jljtqs4bgBWoReMEaeHEbraS1d4PS20Xe+fNQRN7xB38h0rSCnYYl81ykay1pz5Eml8DB9V5iIo/5q2/FnOltCdDnEhCAAAQgEIUAAi8KLY6FAATGLIG5i7oTffYwaed3CE30RoGLJZWmmNb8ql233uafRMqvPqSrflhzk43msrXZum3Tg54E0rmjRry5ROLFlXdad30AnTml1SywHRSzHrVE4H1mzhwvWlZyR1I17mu4aHIobA1cozarrV/3QwPmpWNWfz/V+o+3/5AUUL3RKXs3mal7vyLy9FpSN/A4qZyaT5w02mD6rK5RtDqFforq3fc/VjXyrnRNokTiBc8NS7HVvg+T3vUU8V1v9fpK1zOPn436paN+6cKAAAQgAIFsCSDwsuXN3SAAgTolkEQdvDykXemHr9m26Pm8JdlLkiSWPY8og1rmrbS/nl4bYRRRuPkpshJ3euawpge1zEvnlqbfuV4vLBpP4q5xYqtpmTPV9TKhx4XV86rpgo4nxxF4Wpf1J7Saj6w7yxN3LmmylaYjOdZo7ZWuU9SRxBzD5F3p80rijbf/mL1fm5ny8SajNNu1JreYxcv7yqYiN/Q/bVq7bjWN/cN13/wx0LSRGWja2Nz0xy3M7dcPdxp2HcHoO52TR42xantG9QS//oMzRurduT6bjrv8tB9EOXzUsX4qtJorDQ6NTrFNOgK5pslWOblo6dFRnzWP6HSVPNB9GRCAAAQgkC0BBF62vLkbBCBQpwQU/aGi61GHL2OCqY9xo3Si3jvs+HpvZCHJM99GabmmJibBLO41oqZFehE1NqpB0Xa1RHO5zjcvYVZufnnN5+Yli838/uEal2FNLILzlVzSa1pNqdXAYvPWCa64Kx7nIsc6H3rWdD783GrXaX/D+qZ9iw0SmUO1i7jMsdI1XORd8HwJotedNsVM3Xo4yke/RAmLwGvvOGuUuCudh0TeGee8z7kbbbD2nX+tuEK+Gte43/cjAnf7wtdjXSJuFF7YzYIptpJ5y7sGvOisevllUb3VVy1dgzzk8lpTWozWnQEBCEAAAtkSQOBly5u7QQACdUogSh28okm7IPJ6SmsK2ypJpKVmtQVdojr8hgeSdtU6lSY976LJUEXsLOzIXs7O6+sztyxd4uFVKl2YJNI6+emdQzbHc1ZTk9l52vTElqSSHJO4W3DpHaarRN75N59gJd6svd+TusirVeDNPbprpN5dFHAbXz/JKHJYQ/XXJIf8lE0XeeffSxLv93d8sGIk3gabtpvt95wz0rgiOM+47z0PP7LQPPzIy96l9v7I66I8esVjJW0uvP5mc8G1N8W6ZpICz5+ApKtfz7WlqcGsWFULMkpdyVgPU+NJ9f5zMY9oR/38UKQsAwIQgAAEsiWAwMuWN3eDAATqlIA+tFeKJiiytAsiV9TGOPspa1lGXXWTXu56ipQoV5co+CFX+0YRmd32q1qn0qRZKhqzUlpi0verdr085+NH4QUFntapwf5Df2ptVJvNHztNnWbmNCdX/6mcHJO8e/bYC6qh876/wQn7pyrxokaUlk76v3sud3qO0oPWPrHNbLjtRO/9139NKXW0Z+ENZuXSaPKqZ8KOptd+qbFF6djAdpv1O86GTTTq/jzt5MvM/CcfWu1Sj3Ztbd7w+rU8kfeG18+MxcM/SdLp/N/daC76/c2xr1NrGm25G/s1Nnttenm5FNvYk07hxDwEWFKPkUdpiRb7C6cZNq2dAQEIQAAC2RNA4GXPnDtCAAJ1SmDh0l4vAsQfQWnnfaC0XSe9r4g1z7LEkUex6ySfL49aP3Hnr/0R7Kwq9kqlnmDrBnXZdOw8U6n1THEjiuLyqHZe3vORxFs0NGBF3UpP3MnXSdyVpmsnLe/EJUzgRZF3Pts0JV4tAm/JJb1mySV91bZA6Pen7dNsNj94qgk2EtJra/LLh3uNL1baBZJcdU2r71grXu031wjReU8+aC469admZvNoSagHfLRzK+OLvBO+tW0sJjpJQvHsK/5gLr3hlljXSCMCz59IafSxn2Kr97/SKMpYk0/wpDwEWILTNyq90G5/MbdoWbzXV5y56OewauAxIAABCEAgewIIvOyZc0cIQKBOCSxd0WcUUaB0R30QqRdpF8Rd7x9W6k1ArjOjzeue2d7aMJIiq/p2WUfbhb3kilbTSxFFnTblLo90Oz+VWQLvLy8vNc91jm70smV7u5nd1Jxo5F1QeJQ2sXjmmN+UTZst9xaqdNoNT/xUKu+wtXRhrVXgbfbFKat1KFbTiokdZ3vPqSy+catKca20v18JRkqGgfCj8KJCcqnR+Phtl5l7f1c9YnJh3xxzV8duXjReXInnC8WPHvrNqI/iHZ+mwCu3V/Tzp8X+IkPCST8/9UsM/dIrj9e8Dy1qR+5YsFM8KY9fak2f1OJ132ZAAAIQgED2BBB42TPnjhCAQJ0SUISHoj3qIdKuEuJZU1u839YXQSJF3Qr6sDVtYrOtldYb9dRMj1cEiiSv6kAp2q7WLqXByf9t3BPm3vFPjvzVuiunm1etnGHevnLTSM9YtHTkPISiv04SeFoj1e/yJeIDnSs8nmlJu+BihdVL/M8HvhtpPf2D04rCk5QZsHZM6d5RRy0Cb/onWsymX5i8msBrsR1n1XU2OCSH9KWoPEVRDr9fj55pWgKv44VHzM2nH2V6HJsd+RLvhG+9O1Y6rS8UP/bVeALv2wcfZDbfZOOoS+l0vEtTBb+LbZt9zWm99MsxNYpyjaR0mojDQXrtSSbWa1mJPOr3Uf/OYWNxCAQgAIGUCCDwUgLLZSEAgTWPgOrg1UsH1Er08/gf/iR3g0skTJL3c72W5OJEm1qkCM3+QSs67IdRRZoEi+67XivsuFJxF3bMXoNvN68yM5xuU0tKpNMNIh4koaj0ujiCKMqtJOt8cVfa7Vfioasn+4igUoG34JI7zELbuCLOmGkbWsza5z1xTq14Ti0CL2oH2uBE1tq3xWxyUHWBFzxnvA0OUmSe/J1tirqaFIoj8LRnVPNrge2AXW5cf9xeZkmF74edp3Tahg12ihyFF5zPf5562nznzOFoRNfx+k02Mscf/AXXwyMfF7XbeViKrd4LspB5RftFRlTY+nmoX2hl9Qs5RQrPsvdkQAACEIBAPgQQePlw564QgECdEphvC6ln9T/KaSEqmriJ+pyqlaa01LBuoVGvVevxfh3EINNgimxSUWVXjv+reXHcYqfpuko8ly65TjdM6KC096Vfg1CCtadvONqn9LXsEjmU0OOudpl6E3hPLH3IPNnxsOnrf+Ux3jTpE2Zq+0obIRtOKG4Ti1edMsGs/fa21Wp8hUXghd3Vi8izMs+2IvGivCTz4gq8YD3L0ns9/9erzAM3XBxZ4Ok6Vy880Fxz6YcjbavStM/jzzzLPPLUf52vkWb0nSbhWi+wdMJaL0XDTZqQXYptnqn7zgtW5kD9/JEsrdRgq9Z7lJ6vyPKpNgqeAQEIQAAC+RBA4OXDnbtCAAIpE7jt7vvMY08+Z9661evMW7fcLLG7qQ6e6vbU86i3OnKlrIsQQajC4W022q7JRt2VRnEF55uElPqfWWSuavhbpC3nIvG0DxQhuGR5dsXPKz2EajkpakqprC7jgftHR6ht+abRkWeSY1oHyTq/42+562cVBVh6f+2nVjvPpSuGjVgRI/AkNx9e+KA5+R9HWI5q8GEXq2RI4ulrqw0HR4m8OGm0rW9oMOt9v90rmB8s0h+sgeeyV3TMcFSeFXmTdzadLe+LVHet2mtFAu9vV1/gnD4bnLMEnrrS6st1hMl3V4mXtrxLqs6qn2Krjqdat25bKy+NFNu4stF1rdI8zv85lOV7uF6Leq9mQAACEIBAPgQQePlw564QgECKBPb67LfMazdZz/v67ZW3mCMP2dfs8M6tErmjBIAkXj2PpD5g5cUgCSkWZ+5+owNFcLnIIN0jifpKP234Q5zpmkMHd6t4XtGKt7uykrh74P47yz7blm/a1my99XaetNNaSbC61iCsJU001iKtOsmLDrSNToos8G6fd7m5/PELbfRiuLzzn39O8xZml7VODpV4c4/uMj0Pu/0CRPJunZMmmHLyrL3jLNPY7x515s+ve86pXoSXXs96P3eRQqWCtXSt7/7xvubpZ5bG2gJKo91y130jCbxyjQuUTnvFTbeGRuOl2bQi+OCl3bdjQSk5qTTFVnUqewdqT7Gt95+FeaT/rmUj/rQeDAhAAAIQyIcAAi8f7twVAhBIiMA/7n/MvDh/kSfoJk+cYBR5d82Nd5nTT/iKdwd9/5hTfmVuufTURO6obnkvF7yBgsuDZl03x2VOrsdkGXWgD3hqbKC0If8Dv2q0uaZR1xrtGCf6zudYLQpPzzZzSmvFul6ua5LEcaUSK+yaN990gZk/77mytxtOmRxn1nvVhmanXfb35J3rWumitXRarYVB2LMXqYmFUmbPfPhYs8KWgBtw8G+1Sjxf3olpOXkWJwovmD4rgT382m4caUxUrhtqNbksgbfYlleIWgNPzyeBt+9hh0ZqZOGyTyXyNFTvLq1mFWF7vprsrOV1knSKba3vz7U8SxLnqpyEZGaPfZ/LYihCes70tixuxT0gAAEIQKAMAQQeWwMCEKhLAstWdJnPfPUUL8pukhV3jz/1vDnvtCPNNTffbW67618jAk8Pp+M+uMu7zAfsVxJj3uJuW08piSvldw39j39SzRWyfoosOtH69dIm2FQhdZFVpE65D/eVnr/Wubo0rih3/7cNvaZqZ9p1ZrSZuYu6s17C0PtV+zBdKfJOH+wlWIebFgx3IJ09Z32zs5V4UUZe0Z1hAi9OGu2EN6xvNjzxU1Ee2enYL9+5p93/xnYKHZ02W+4CfjrtDluMNn6V0mmn7dNspu3TMnLZSrUao0ThDTRtZDqnjG7coL0jidfaPBxVpPqIXb2rd0Otlt790OUnmGcfui+WwItTAy+vWo0um6VcdKDLuVGOCa5bg82P1pq5RFMG75HVXKM8V5Rj9Yu4LBtrKZ1ZzVwYEIAABCCQHwEEXn7suTMEIFADAYm6f9z3qDnxyM+udhVF3J1y5sXmql99d+Tvr73pL+bv9u9Lj417+8W2ZliPrcdTzyMvUZEUszTEU7A7qZ8iq3WutRNiLXNNW+AVqf5TpZTeefOeNbfc9NtR20drpg/yWqOwSLuddtnPzJmzgfO2kyxSl0XXOnzOF65yYLnow6hReBucsL9p38L9eV3m/4fnLjE32K/l3e7yTtf1o/A2nDVkNpxd/jceknkairpr22J0ba1q0W8ukXiujSv8VE1F5um1LymkZjnVIt5UA++Fv14dK422+T2nRkqfFasiN17Io45kaYqtfuHi8t6dRwqqy2vO5RiXzsgu14lyDPXvotDiWAhAAALpEEDgpcOVq0IAAikT+Pn513iRdzu8a2ujf9eneEXZqWHFTnsf5kXgKTpPI0zq1TI9/ZZfXVDreWSZhpoGJ3XeW2JrESbRiVaCwE+RjVIvzfW5apFkaQu8JDm68ih3XKXaWcHoOz9NVjpJ0q6SYI0ahVc0gdf50LPm2WMvcEKbhrzTjU974Giv42xUgadzP73O773OtFtvFD/Fr1r0mw8nrDOtxJ0i7wabNnZi6B/kN1DQ+4Ivhm99boHptyUU7n15iVl3Qqt51YThVMK3zZzu/RknjVbpsyefe0KkuelgvacsWtYbKT088k1inpB3kyE/kldReZJ4+ioXPV3Pkeh5dBGn/l3MFwWnQQACEEiQAAIvQZhcCgIQyI6AmlMoqk4f4vf7yE5meWe3OfrkX3qRd57Qs8OPuPvHA4+bn533O3O+TbFNYvT2D67WETGJa2Z9jeHf3jfb+mfD0S/1NmqNQPFTZNXkoN9G2HRbKZtWHaFaPiSmLfBqmVvUPdP3RKfpf7Jr1Gntu830/q5STb4Lzv+eV9tONZgk7IbsP1wjI/f/9HHOU83jQ7EmV63+X6V02rTSZn1oSp/ttb+v6BuIFoGn83eZcbKZ07KFCUujdV2UvKOFF/T1mquefdGb7qDddEMhLvLD661jJi1+xjx8xYnmxZdWOHWjXdg3x+x86CmRat/5zLJOnXRdKx1XFLkYTLFtahhv6zeOTrEtMsdqzPOIHhQvvQ8zIAABCEAgPwIIvPzYc2cIQKAGAo/ZmnfqNnvGiYeOdJhV6qyGhJ5X9+797/Yi8vT3+9u/S6oGnu6xJtTBG2sfXiQtJeyCQiBqk4M4W7bWiJS0utDqWVRLq0fpZikXQV9y2rOh8s7nOfXQ9U3zpu2mNN3YF60/P+vbXt1J1beLOqIIvGoiLeq9XY8v12m19HyJvOCYtc97XG8R+7giCLwBu+5qHpP1+J/9xdDVz881Su9WJJ5Xa9H+Q7uwNPpTEm/owRu8VNpqDS1mb7KFWXfHb8SSd2Kg9+55tmlGEUcR56bXl977J9qapv2DQ16dQ0XmKQK5qByrrW2Wv3zRXCRBZ06l/l21deH7EIAABNImgMBLmzDXhwAEUiOgVNkvf/qDI2LOq4unWndHHGhenPeyUZSexutsKm2S8k7XfNmmL0n+1PPI+gNAkqyidDr004Wb7IdwrZkiMZJIvXV9nmr1s6pdJ04UnksDC9231rlVm7ui7pb+tHzX2OD57bvONBt/8tVeUXbVINPc/FqEZ599fLVblf1+vQi8SRMaCxnZW2sKbbUaeNUWNo+aapqTL+/07xJ4wfcMv2mKvqdoUD8qLxiJp+9J5HnRvfZLY5pNfd10+4+ZzXb4eLXHLvv9PGqfuU62yHPznyGYYqtgMtW0jdOgyJVJWsdl/Qs4pbKrBh4DAhCAAATyJYDAy5c/d4cABGog8NjTL5ivHPNTc+JRnzOT2tvMV4493Rx5yL4jEXk1XLrqqSpyv7yrvuvg5Z2aVhVyhQOqdSzVB0k9n6IufAmURwSPHqFaEX4XDleO/6t5cdxil0PNuiunm48MbeN0rGt9MaeLBQ8aN9eY8S+Z5Re/NPK3/c9NMwP2q9LY+LhNTdtr2z3RqteYX39MKbRxR1SBl7RIe/7Ze03H0hfNso7hNEyNyVPWNett8H9mytRXef9dbT/HfXZ//+m1oJRjFfcv7bBa7dpxm1jouqqBV6vAy6vj6lXPvWhe7BqOcpP8V6p96fDqMdp/SAQpOHTttlYjiafR8cIjpuN/j652ynrb7FUNd9XvV2r2UvXklA/Q3CZNaDJLrBQr+tB7n9952E+x1c+IsGY4RXuWPETptInN3s8yBgQgAAEI5EsAgZcvf+4OAQjUSED17X57xc3eVZQyu8P/s3cd4HUU5/ZXr7YlWZLl3jsGTAkJzfTeDQ6d5KW95KWXFwIEAgntheSld0IoIYQWCB0e1SGhGzDuuDfJstXLVX9z9nqkvXu3zJa72nX++b4bxbqzM/+c2b1izj3/f45Y6HNEtcv3hTp4cTaywMF5jFCz6NOfZIqsNKQAWRGFA1lQ5IwKieeGvMOd7kbJqPRkgLjLeYeyBHnX19xL/eKlbz2byqj17oNTfpc0pRA18Pb+tvqX89MUknoTC6U49nZya2IR1F5h+uambbRl0xspxJ0xdknkVVVOEsRHcAo8PAt4DnDglmToUE2wHI3EgxmPah1BpNH29GWJtEN19IOof4fZQOB1JKyNCNQjUu+pV9/hKisCTz8iSDzUB7t01iQaW1jomihVjW646jSqxBfl2NKePUE0gqzDcwDiEapffYqtW6JbBZ+g+gwHztUifTZXpNFyYwQYAUaAERheBJjAG178eXZGgBGIKQKoxRXX2jkSchxa8K16fXM8jSxkChHUEyArioWiokMcxkDcRSklKkickU67LWtPmhoPxN1H+2fRBBrt6okKkrAiQd5l5T0xOH/PFusaXa13HUS9W8oJ58GkKUXyZ44QeBSdUkXFIp1W32prN9GzT9/tam3ofNIpl1FNzRTl60B8gTDa0+KCqTIZHeTdB+/9TXneAxaeR5MnTPU9r155CmICtb6MiqIhh9VcrQ4YiAqnlPK1TcvpFyuuEarjATGecxH7mvwFdErlzbRwap/4jFGGwbSjX8MaL7O/Xt+guc3KpkLgyb5H1oym4ydVazXDgC0wdsLXTYwZU826CcKib6ZT8gMIcXAIqxISsh4l9q+7VzwfIZPHKmsM28AiV3wuVou6i9wYAUaAEWAEhh8BJvCGfw84AkaAEYgpAvtCHbyw6+gEtdXSRRfpayhKDtIOB2VVRVFQcaiOE8XC7og9sHQ8A3mHse0IPLzffOPxggxKRRCEXtGp6QQeeqmq8Lo7u6lHSMWy24vooFkn07j5E2j8/IlKWxUUgbf83YdtlXfGYMrKx9PRR17omcDTuyoj9VglXVwq8kB+o45ba0evLfG9tP4Bum/V3eJZg4GDNYkXJHkHnEC0NLX1hJra6IfAO6yynA6rqtAML0C2SVWXE75KN6joFDZ5oxoX+g1XurObGGVfp799Zi62QZOxXuLGNTDfaGzrDpQYtosFnxFl4ss+bowAI8AIMALDjwATeMO/BxwBI8AIxBSB5vYeLf0mzi1uRhZIHZIpssC9Xagj4rAH1SLdt745ETmCEYfUqlGFtEsYR/hpWfm/T7ncLH3WOH7ny1Op85VpKb8GgQc1XtUv5pmG88zTd1FdrbkpBoi79sY2Qd71CPKukAo3J2uRyXb29Rc4EnlB1JZCzbutm99wDeesWR+jqrGHuLpOPg+4SKaMuxpgb2epxLQbB/fwv7a+RT9edhV1ifKf3b3pJN6BIy6mY2ou0ure+VXeyXWArIDRQJi1yYwEntHEwg5jSeDp+0h8Za01N+nLxrmiTJKFTSx5uddxjVkJBrux8LmAZy0qKbZO5KNXXKyug3kFyGhujAAjwAgwAsOPABN4w78HHAEjwAjEFAGkpzW0xjP9VEIeZTWHjFGqi1CjCAoI7SVMDnCgwMGqJQZmIlEmSseNLqIdezq9P4U5b1OWqHunbyoEHvo3fP/4lOtQC48EgVdpQeChs1k6LYi79sZ2bay8+nLtZdagxjvn+iW2awVR5YfQdKu+k8FUVU6kWfPPUdoHkAlIV0R9uyBTxmX6YEFutubWrCea9LggpXZd8wdi/qFwQd6VlQwERtzJkTHvHuH6PVwEnuY4K/6nF8yyQjMj8ORlQajy8FmCL4+CTMtVWJZSl6h+UWEMXt7nXsw2tL9HhTnCeCZHU3+3C8VrmCUbgizJoLSpolOlINGxbm6MACPACDACw48AE3jDvwccASPACMQUgX2hDl5UjSykIYXeKRdkhf4QH2j9tgzfg6jj1YmaZGINUWu+D90GAg+6LKQ2JzY7k4ItqIWnc6YFgVci6t8hjdapgciDGm/TWxto09sbtO5WxJ1+LCcSzy+B9+rLP3cK3fT9HFGT8qNHftHyWv0zYXTp9TShzUX6OnnS8CIIpaaXOP3uh5c5cc3PVq3XLpVOs6oE4pfnTnecEmOCJC0RJCyeFTfuwMNBaDouaC9ORmMhleuGo08QX/6Ypdjiucw0mYfY83KztLTyMBrMWWoqisKYiudgBBgBRoARUECACTwFkLgLI8AIMAJWCNQ3dWnfwse1Dce3+XZYSUIRReNxGIIKyEppggNU5ciCWJhwRLm4u29Fz14CTyqVoFNCTbXuzc5pucY0WhwWS05XI/DkffSr83/s+vGzS6f1q0gMmsCTxhR5gvDp6e0n1LhTJZOsgHn5b+8RXsY2ec4YWnTuATRlbs0geSXruMFhdbcwvMk0QWGMabgIPJlGi3syS9zcKpjbqe+s9gJfREBdrFcY22Ec1XqauE8rxBcVcTBFgvIcf7dVakWqfLgYlZVQ52fKxTbsFOqCvGxRh7JABQbuwwgwAowAIxACAkzghQAyT8EIMAL7LgJNopA01BNxbmHX0zFipXfOxCHZTS2v4Y5ddd+DUHyozuW2n5/0XuxdXv4y6qO3tPp+fToXEZU0WiOBVzK3lMZfOV1ZXfLm/f8ivNy2Q5d8jPAyayoEyasb70y79IipV2i/C4rA0z8XIO2CMmm586ZnaPPqOlvIQORdcdXJKX1ApPX1J7+sCMqQQWXf/BKqdnNs/GCAXvxrP21akZ4ee+zHs6nhoztpZ2dCicAbX1xIiyePV1lSSp9dr9yh/VurRznrEBo982Dt32YkkNvaba6D8XEBvnwpFKndYSnDfISqGaNkKg1ZT8jieQn6WQm7ziD+dqEGHjdGgBFgBBiBaCDABF409oGjYAQYgZgiALIJJF6cmx8Cx+u6ZTogioLjUAozChAUKioX/ZzDEbuXNfupueRlPjfXQNGRgIuvi/RevZlIc+J16u5/03TK3rpuGuhKV6j2be8kvDqfFsYe3bk0kFUt8m6rqfrKs2jUgpHKJIAX9Z0M9AsPft00ZjvFF4i7f5qQd3Kgw0HibWlxA/9gX6hhDxMptDI1HM+IqqOs6oQq5J0cy0jiSa5oIhkAACAASURBVCJNHx+e20wpjWQc+v1o2pWg5voEbV7RPLjkyfNH0aiqQiqrLlSFQesH8u6Oa+2/fOkWKq2a/6yn3KkJzVzFrqmkzsrr2zcvIxB37VveNR1ywnGfoqknfUZLs4UKGUoxfDYGYbLiCiQXnaP8JYVxGWF88WNMsZVO6X4UrMNB4HL9OxcPAXdlBBgBRiAEBJjACwFknoIRYAT2XQR6xQFvl0ijjXML08hCGlIUC+KuQxxM/Rbgj3JtOf09EbVUZX1squm9+hpsPX0DQ8Xbs3ZQVt4Tlo+AnsQDadf9RgP17Uim1/buHDF4XU51PuWI4vAVJx9DvYcdrvRIZYrAM3MMvu+dr9HWpvS0U2OgJX0j6YD8VHMOlcXMn3sEwYkWRE3Qqh3Mb5U2axcb0mnxQjMq4XBPFxfkaqmfsk6eToCpsmTHPnqX5PderBXknfVn7aiqAjrg2GTqr1NTIe/kGLjv536pmWqrG02HdZs2C/Ju4z1fcQqRSiYdSNMu/5lm1iPdT7sEyY5Uai/mC44T+uwQdFqqz3AsLw/KedtNfPqaklDleU2xHY66ryA7kT7PjRFgBBgBRiAaCDCBF4194CgYAUYgxgjUNSZcK8eitNxMG1ngAIxDaGF+toaTVCIEcdjXm1xECVOzWFRSM4djDdgbnM+g9jJrknTNFylyluYJuY9TVvZOy/CRTtuzspU6H9kx2Ke/NZ/62wooqyCbskeJGAR5h5YDs8OJk6j0iksd4cgUgWd0PVUl72TAeZ05dGjpWY7xo4NWO1BswNlnflOrH+ZWhao0ieh0w+V3qXZN6XftXZdr/7ZSJurJCekS7UdlpJ8cuFBHH734961KsVuReDmPDhmqbGjLpttvF+OWqblqIgYYFl3/cC5ta++k7R3JscYXF9GEEnfF/VXJO7lYkHhTL/uZ9k88h0hlRDxGh2AlcDLcKS5fpgy3Glqm2BaLdGPU4XND1oetcswTH8ZVZVz/LsOPDg/PCDACjIArBJjAcwUXd2YEGAFGIB2BhtZuLf0zri1T6jB9mmWmXDMzTT4GuadhpG15ideqdhX2T6Y4O6ZyOqjwEFfTd1fSAPIQkYo4kCfIu1LKGZVeW0nwFCS8Gqhg0VFUKF52LRMEHtKyUcdLEmlbGt+lvy4zT7e1iq2vt5vm5xxFFbnjLMOHqgX8FIjsuQvOpZlTpgs1r7Pxh5c99qK+k/Nc/p2TNFMLJzMJmTIIYgImJm6ICas1gaxatbSeare3Ky9bT+KBuMvVkXcY5P9q87QX2gBIPAciDzHgXkBNPLz8tI13f9kybdZq3OqjPknVR39Se1tzIBXKx14RD57NLvGgtAviPSjC1M/aouqOa1xT2CSYFaZ4XqBexfMCokwlxTZsAwuuf+fnieBrGQFGgBHIDAJM4GUGVx6VEWAE/o0Q2Bfq4AVFLkm1lt5R0U1tNbe3TabIR7dxqPTPZOF0lfmt+ujTsvTGCSBdXaU425B4iRfqKfFifTIEQd4J1sQy5FxBmICg0AiLyy+h3CmTLfs6mVj0d/ZQf6Jn8Prc8mLt/4+bP4HOuX6J6bhGAs+p7p1VcCOpkvYTJJ6xAWMc3qHqwjL3O+BcGlU2wZEg87PHfgg8mUbrRODp45P3FH7nxpTGuMbNK5po26oW16rE/Y8ZQ1V3dFP2mnRV6ZXvJu8B2QYKxWbUJNWfZk3ej1PmZ9F/fN+6n9P+uFXf6cfb7+pXtH/q01T1Ndbwnte0TKe4Vd+PqsLYGH8UU32NKbZIlW4XJSaMKvWwSdLy0nzCFzncGAFGgBFgBKKDABN40dkLjoQRYARiigDUD7tF6lucmx8zCH1tNGAAtRbIn0ylAhpxzqRLZZB7GtUUM0mCgjSDugcHR5ABnvZPkHiU805aOi3Ud1obAHlSYgsrYkCNPTQnFd72FVvp0eseSBuvt7GDeps6TOfJLSumxT+7gsbPn2j6vvFZ+OELx3m+Db513Au0ZdPrtG3LG0JtlyTugCsO5hMnf4QmTTlscGw3BJnbgMIm8GR8Ml1RmjGYkRJ2a3n/pTpq3e0+rbh8QxYdsrbUdGgjgafdljYknp5QvkGk0XptMK3YtTTpOOu2Tb30p1QyeSFZGc7onU+DTmNWiTXK5hrG+MN2cVXBT9/HLMUWqbZoY4QTdK0o2RFWqxbps7laTQNujAAjwAgwAlFBgAm8qOwEx8EIMAKxRqC2odPRpTDKC/RiZIFv5qG0A+ECwg51mXr3Ei9hrjXqBzKJhapZRFjY6YlXkEvNHT2a22UgDUSeriZe83UPi2HtiTs5r54wwe9GXXuVbUiPXHc/7VixbbBP987mFNWd8eLRxYX0sUk1NOHqM6h4XnqKK4hWfVqiHwLv2lNf1lLk0JyUaFEm8I5bfCCNHlngOcVXrzByY3ix9P7NWgF9V2RyYoCyavvohNXlpveNGYGHjgNQ4YHIMzSkN0oCZbgIPJlG66TilTgXic9lpDHjPkaabRD1Ru0eQnwJMKI4L5LmGsa4g1KbB/I5aTMI9hLEd0kR0qazhUt4n/YTdTLDaEjvr6lwV+MxjLh4DkaAEWAE/t0RYALv3/0O4PUzAoxAIAjEvQ6eai05HNRQewmGBtKQIjDSx+NORFXZZlxOVGovDaXJ5gjitU9TTI4RToM79gwV+ve4FaaX9W7aTO13/Vl5SLcEHgaWJJ4qeSeDMSPxoHLqSPQN1hXzQuBpxhTidf3pS5XTkKNK4KEG3vT5Y30ReBJvSTBJV1WnOnn/eGCzUC26JPAEeZclSDzXBJ5FPbygFHhe6t9J3CSB5yaFUqYxg/TJtOkFvswpEH8TUDsyyi1OSkE9jnhuykQ6Kwg9/N3tFDV33apZ3e4LvpyrEF9mcGMEGAFGgBGIFgJM4EVrPzgaRoARiCkC+I/p5vZoH17soLWrJSeVWtLQoF2QG0jTcqWKyeC+Rk3ZZrXU4XY/1JuKGNOcQQzUNycyotRxS+AhY2tvxpgGpZMCT+L94pX30bLn3re806TyTt+haO5YmnjNmSnXGNMU3RB4UK1IYwqIUZFCq9oySeAhBi8utJPnjKErrjpZcz4FLntaulWX49gPXxpAXYRmReSBwBM+vZqaTLVlbUrWvXNL4OGagSnpKbJB1cDzk0IrCTwvdebckqaqOOv7xeUzWPWLKi8YZPoaaWDRJz5YpPkFlKGZUlnC8RhfOnFjBBgBRoARiBYCTOBFaz84GkaAEYgpAl1CyRTk4XY4YDCmFhkNKVwZGoS4gLgcykCS4lAU5n0C4qUwXxAl4iCGunJWjpVOqXl+t7P5hpuUhwCB1y9KPoGyyZk8iUqvuFTp2rWX/E7rt3Z3U1r/WZXWphlGFZ7R6VHFxAJqOxAl4JlkFvnEsgPowoP+Vyl2dMo0gbdpVS3ddfOzyvGgo3SgzQSBJwORSjHMgS8HkGIr+bpMEHh6F1ojGGYEnnSh/eQNOTR1v/QUW1VA/RB4MLHA/VU1qtBzGjPizJQqL2x3VFXMjf2iooL2Er9RfYn7AerKEcXJFFsVF1s381aOKtDuF26MACPACDAC0UKACbxo7QdHwwgwAjFGIFMpiGFBAhIHqYNIhdKnyEJt50IAE1a4g/PoXVRDn9zlhF4UNC6n0LpL8hX7iPqEUNzZKSaNdd+8zGl3Tdud91Df5i1Kw+oJPCcXWjlgx8odtO3Gx5XGN3Yafd7BNHrxwYO/hpoIhh4yNXxL47v012VfNx1bEncg7YzPyMcX/pgmlR+oHJPXe6PvX4+kzJHzsXMs57zzpmdo8+o6pZgkeYfOmSTwZDAguIsLcjV1kayTt2VFM20WL7cKvGm7C2nabuv6XZZ18AwKPNB1qME3cS75cqCVa/zgxqOVsNd3kuo77AFSGoOogQaskyouEPtJFRfMmLy2uNQhjQvRaNwHkHV2BhZSZYm/3TnZIPN6faXYQkUMwhD3PjdGgBFgBBiBaCHABF609oOjYQQYgRgjsLulSyNL4tbwH/84zIG4QFMhfKK0RqfDTZRi9VJAffmOzbR852a6952lg0tZMHYSLRg7mS4+OJUQQJos9hFkEupeqdYntHK3DAo7N2m0ksDLdqG+2/PQ27Tn4bc9hetE4GFQvQovSeogsTOptjMjt92q7zCHWzdlEHf9BvJOApA1YQ5lCyIve+KcNExUHGn15B0GCIPAG4xdAAulFMil3Ts76JXHt6YZBHV29Ys0/mRabWF+FhUVDCmFkEJrlT4r59jQlk2/+7AwDRujAg/8xbQF2XT594JRIrVvXkYb7/mKq/sU6ju0TKTg47MzSZom1wf3ab0CUjXQTKbgq8ag0i8uRKNxLW5U5kYXW6nMc/MlHIhAmNZwYwQYAUaAEYgeAkzgRW9POCJGgBGIKQJQObUKJ8+4NGOKrBAdafW7mmJYyy8uB0i3qar3vv1KCnFndm/9z1mX0eHTpmuqSZCvXlKdw6hhparCk6KPossuodwpk5Uep6AJPEyK51nfHnzv67RVqPFAG8kUX7PgvJB3GEeVwOvfuloj7ga2rXbEJueCK01JPKTTQokHMk821LubMreGFp17QNq4w5H+DXIJirOmXV1JEk98QDW09gmnU/MvScpHZFP5iBw6qG4EjV7mCA2BxEM67Ya2pEvwgImJxawDcugLt+ZTS4Cf627MLKZe+lMqmbxQiy/TRhGS+MGXOVBd46WiytvXv0BxvpMy38Nr6q8+ZdpNii3mQ7kHbowAI8AIMALRQ4AJvOjtCUfECDACMUUgDnXwpCGFXqUF0gfplXZGFlHfEhBjTo6WUViDm1TV7zx+t1DeWaed4uCcLf4HP390zuU0q3KiZ2MRHNhAnBlJq6AxUyHxEEf5py6n3nETlKcPksDTY6FXp+I5WbnjLfrz21+zjOvwqVfQEeLlpamm0Pbef4sSeSdjsCLx3MSYCfWXyvwy5XHNO3votVcaBDltr3B+o6WYfr2kivL/p1VleK1PksTLpvXC5ZMKkymDx348m6bMz6L9D02SGEE/F05KvJJJB1L10Z8cJO8QQ1jPqEzHLBZqXvxdQHptwkZZHmRqr/KmeegYVwdaLNWv07peaaniSlwxomBQlekBar6EEWAEGAFGIIMIMIGXQXB5aEaAEfj3QgAKkdrGRCQXrXcgBRGB9MpeWW1fF7GqCihqixxZnJc8bIp1RbmpxmmnvMNhDIfRpBJsYDCF8+bTL6UF49QUa0aMkKJVKA7sTW2ZV5AinTbx8lLTmngFi46i8pOOITh/uiVNpImF2/03ptBqSqe9xdutagiiLt7WpiH1Gub0StzJeFVMLOzSZq3WjXTa3CVXuoUlpb+m1i0M5/7QTwwCD3U5r/ndHqrd1kkzirqoIq9PpC/j/h9ypm3oyaEPOwuosTeX5k7Jo+/X51D2GvXPgv7Zohbct0emYWSsh+gLRJOLYWxhbCWTD0wh7uT7+OxAvTrVtPggYlUxvXCT3hlETF7HiEucZuvzUnrBCidjiq1Z2jTm4/p3Xu80vo4RYAQYgcwiwAReZvHl0RkBRuDfDIH6pi7tkBWFBkVdqVBW6Q0pnA5/cVGypR16xTpBagWZ6paJPVRNhTrj9zemTY/1DTqdIt/Z0FAX7+YzLvMU9nAZgYDIQ8sV9e5kuqzXVMGtP3iMOlftdL3+WX/+7OA1wHikSB3LE88OnhWkndmZf7iezOYCFQKv58ef8DSlXxXecBF4UB7d9WQz3ftcqqKuPLeXRgsir7lfEHbb2+no+pYUXFAXb1peGeX2FVBuXnqtO31nK/IOfaJkepDpOpV2N5ZU5eHvCf6+6dXOqp9pnm7cAC+KS5zGJWdSOTjkAC3KL/SKzztBluNLoWpB4HFjBBgBRoARiCYCTOBFc184KkaAEYgpAs2iftxwqsBkiiwOWjh0tYv/IEctI1USQlUhFrXtGS4Cyi0OKqmIevWdliYL4k5MhD10KkT++GeudhuS1j9KaXBeySIvTrQTrj6DiueNS3PtBeBWasT1u1bQhvoVKTifOH+JJ9z1FzkReKh91/fALZ7mgaGFnTut06Be98RpXKf38YXCSV/dZtptcnsXXbalXntPEtv6jlPHic/Apn4q7a+2JPF6zy6iPvGyam5S3p3W4vd9t/Uz/c5ndT2UbCXCKEemYkIt2yPU3MP5d09lrVEiY1XilX3CUA7qU2wL8pI1IbkxAowAI8AIRBMBJvCiuS8cFSPACMQUAaSjNLR2hR69lgIpio/LAuRejAwQtEwfjJuRRVwKqauYAdz7zit037KlWn07EHb94n+ciDt5w1180FFpzrQqNyPwqxpVSLuahj8F3A8Z64bEA3k3euFE4cKZPLDimYHqzoqsAnH325evs4TzxHlLyA+R50TgeUmflcHGlcB74tVOuueZ9Hp2evLOakPKNFOLpLtqycSxlD2zNKWrHXEnO0aFNEM8uD/2CKdz1S9jVJ57P330qZhdvf1arTwV0ws/c/q5Nq4OtGF/qQb3WbjQcmMEGAFGgBGIJgJM4EVzXzgqRoARiCkCvSK9aJdIow2jQTWFGk36FFmo7VTJHrMY42xkEWSdoEztnx3RKF2B7xHOs79/9UVPB3WvBB7Wq2qikCls5Li4r6GW2dPS7WkqkHh7Hn7bNp121g1nU/XBk0xde81Ukr956bo01Z1ZcNOq5tN/HnO9p7ijTOB5TWv2BITuoqdf66I7RQqtsV22uZ4md9h/zo4elU0g8eAYjFbwxWmUYyDxnOKLEmkWlefTiBkw6ujqHSR9zGqqOeEcxvtx+PtghkPYZS2qy0TaeQ4TeGHckzwHI8AIMAJeEGACzwtqfA0jwAgwAjYIZLIOnnTFhGoIRAcMKVDsP0hVRlyNLMI+6Hh9CIwHSZAjelfg2//5It37TrI2nNvmh8DDQby+OeGLAHYbr1n/oGo+gcjT18TDuFCWTL70MNvnxqgAfG7F/fTcyvuVl+aVxGMCLx1iMwIP9e6O3p1a885sc6C+Gz1KmFkIdWmf+FYj56QxlHfqGOV9RMeoPBNRUsgaAdQTi5L8RnotvkwCsWdmluRqEwLoHNRnSgChuB4iTOIRz0pNhXVKuevg+QJGgBFgBBiBwBFgAi9wSHlARoAR+HdHoKmtW0vHC7JJdZbfFFmVmOKaajScRd5VcJV9QDQi3Qx7KV1O9SnPdg60TvP4caKNWrpgUOm8eqWqCuFtTHP+7wfOd4I97f3PLbqeplfPd3WdE4GHwYwmFu9N6xycY0xjLtU05pnO6TeFFiSzF2dgVwCYdDYj8FTUdxhKn0KbLQRFOYIFq/rtQdpns2qqZ1S+zIhSjUr9NlkRi3amF37vCS/Xh1FHzktcTteETTzib1KFMI7hxggwAowAIxBdBJjAi+7ecGSMACMQUwRwQASJ57fhP97xH9QlhTmaKqotIWoMCcVdkGo7sxjLhAsnaho5Odb6XV/Q18NlEAoCKBKj2nCQLCvN1+raIU6r/TRzoVVZk1cTC4wdpYL9QRAneuLODmsjrvoUXrfqOzmWFxWeitqr9/5baGf7cnpvWoLqKszv8wPWF9IBG4ZUNFkT5lDukitVbh/LPsNF4NU3ZNGXf1KXEtc1q8xNLYzB14zOETU9Yf8y1EDgQb2sPX8d9jXboqR6UzG/8bXBHi9WqekpFa3S9AJmF37KPHgJNa4OtGGnro8Sf/uBFTdGgBFgBBiB6CLABF5094YjYwQYgZgiAHXH7mbvdfDwH+36FFkQd2GmIcX1sBPVQ650BkaarCRfUSfKzrXxO4/fTct3bnH1BPhR32EiFEvvETUco0DcqqjRrMCRhAFwB3Hndj16tZMX9Z2M638ueNDV/qnUW9vZvY6eXPNNx3HHNOTSyW+P0PrlXHAlZU+c43iNXYfhIvCAyRd/vJNWbeoZDE+FwCvMz6KxlelumsU/3V8bR94j+P/4wsWsdmjY6icn/JH+beWM7GtzfVzshmAaTlVeXB1owzawqBxVoD0b3BgBRoARYASiiwATeNHdG46MEWAEYoxAXWPClVIOSoZS8c233pDCLfEQFFx+XECDisHLOFEz4JB7KtOeZa1CVaWgGxJvwdhJdPMZl3mBbfAaEIz9QulpRyz6msDFxSpklnE4WUsQJKmTusouFL3yKkwCD6nrDa3dlp8bIO+eavoFDXQLp+DGWkc0QeKdNu1G3+QdJhoOAk/uw0vLWuj7dzQNrleFwDNT32EASeDJwfSlCfBFiV4dpqIuc9wExQ6vP/e+1lP+HD9tDE2YXEUnjK2hPFGXEspiEoD0i3u7R7hFd04po15Bbg53w2cGmhvVM/a1QJBEJeJaOG13ilp5mVblxbUsRNh1XVFvL1u72bgxAowAI8AIRBUBJvCiujMcFyPACMQaARzEoeqwa3plljwEhZEiqwJsECmMKvME3ScKcesVlDjYGtU9buoxqZB4QZB32AfEjTS3lo4htVPQ+6M6Hg6uUBs5pYvrnyGV+naq80sFYJgEntOa/7jrK4PhayRemyC1esRPq1ZeQ6dVf5PG5s8E9+MrbdELUaOKtVU/vQJu5cbuQRLPqQaeFXmHeYwEnpxbqsOKC3K15xWEEuYvFuULMql627a+jh7+7XNpEGSLZzCrT7Dpoh2931Q6dv9p2gaCYJcNRF7rgTV+YfZ1vV9lGwhUfMEhv+QA9qr1Cd0EHqYRhJu4nPqGGTcUnqNHFjiFxO8zAowAI8AIDDMCTOAN8wbw9IwAI7BvIoADYHO7ORECAkd/aHFTVD0stOKqWBiuuHHYh7KuMD9bq2tnt6dulT1WphYg7i4+6GhaMG5yILeFdl8KEi+ThIVqoE7KE7fGFKrzyn7SWTMqBN6y9qdoWfvTacvQiDy89C2/kLLEC21s3gy6aMLXtc8bpEd7VSYGqc5c8fxDaeuomjqXqqfNS/m9vhYh3pAk3uT2LrpsS73pltqRd3mnOLvQ6tM8QZYBs0bxZUwmmgp5J+edMqacLj9uYRoJO9wkXlDGN8Ad5Ck+P9FQYgAOtkHUyotSKrSb+yhsRTn+fqEGHjdGgBFgBBiBaCPABF6094ejYwQYgZgiYKyDpyccoCqyqrsUleXG1cgCRgydgjxNCBItjCZT8KSbrEyTtZsbh9UxIv2tVqRZR6lFKXUayp6ORLoaR/8cIcUcz5GTSs8LxlLJGaaJhZ0Cz4rAs1sbXFdxr31u/M+1OoD6um9uiTwQeL3ic8tPWj+Iu5Um5J1cA0i8Yz7z3cElWRHdD77YTkf+awftWdaq9YXbLFr53p9WmFip76z6o/4YapH6IT6txrYi77LEPmVbKLdB4E2uLk8bsuWAmmFLp1UxXnH7/MlapkizRVozVHl+asC6UTy7jTWT/d3UFwwijnJhroQ5uTECjAAjwAhEGwEm8KK9PxwdI8AIxBgB1MEDsaM3pFAheKKw5LgaWYSR6gdSBIomzCXdgd0SG1LhFYW9ljHozRuGOy5jap6eKPViTOF2PTKFdv2uFfTbl69zezl9btH1NL16vqvr7Ai8Jxt/TrU9HyqNJ4m7PnFz4v5cWHKKeJ06eK0XIs8vgffS779P9RtXKcW/6NPXaGo8bc9tUlg7vpKsG6fSCr44jXJmlqp0HewjP0u6xJcBI4pztZRaEMZB1Gt76DfP0fYNqe66mDjHQu0nU6C/e+FxpmtoWDTF1dqC6pzJz7GgTC/i+rcsbAMLrn8X1FPB4zACjAAjkFkEmMDLLL48OiPACPybI9AhFARRTJF12pYoqbGcYtW/n0m1hTFt04878HCl+jphmckDudPc+vclYYTfgQBHw3Pklih1M6e+r94F160Kb1rVfPrPY653PbUdgaevf2c1sJG4k/2MBJ78vRu3Xj8EnpPyzrgeqcRzIvBwXeLn66n/w3ZbrL2QdxjQ6Mo8lCafo6V3+iHyfvbf96TFbKe+kwTeZUKFNyVkFd6K19+mFa+/kxbvfh89mGbMnET55ZWu73W3F8h7FTU6jWYjTmP5rdPnNH6m3g/zb0SuIKerhYEFN0aAEWAEGIHoI8AEXvT3iCNkBBiBmCJgVwcv6kuKapqnE25u68s5jYf3jaYUQRiNINW3XRhcZKJgu8qarPpEhcBDCnexqMk0XAS4MTXwNy9dRxvqVzhC65W8w8BeU2itiDsZ7KllX9SMLKyanphuF2nLZrXH/BB4D1x1sSNuxg7zjl9Mh5y2hFBY36kmY9+6Nup5ui6FyMueUUI5M0op79QxrueWF1gRP3plGNI73X5BA6dZ6TarD06FwIOhxSLxMrbOyWWaM22Qbde2HRpxV799p+mwwAEuskedcxpVTxgX5NS29yo+j+HYrpraHCYRFhQI8u9vXVMikDqATnHhS5IykULLjRFgBBgBRiD6CDCBF/094ggZAUYgpgh09fTRnpbMFEAPA5I4HnyASxAklN7dVNYsDFL9FXZ6lOr9gj2Hg3Im6so5xQDM2//ZppE22eL/I3VxoCaH1qzJpeX/l+7ovOCEHBozLYvGTE/WQAuymZFpTkq8E+ctoRPnL/EchlsCz4m4k4H8R/VPlWKSRB7Sw40qJ68qJrfqO32gl//orwRlEFKmh6M5kezSeAHkR7/IVVatK+iFwJPrD4vAA3n30sNP2MIutka0LG3tx5x3emgknjarzvQCJGLnXudgM9OLMJ1cg7pPwzawgHkFUo25MQKMACPACEQfASbwor9HHCEjwAiEhMD22t3U2tZBc2ZMCmzGHXs6Axsr7IHCNoQIan0goXa3dHlSLuDgBHWHG1MKL3HjsIQD8HCRE1YxB+Uq6QYTSdy1/KNV2zOQhzig9wnuu7FugNqy86gur5jac8wdEk/4bG7gJJ4dmQYiz9j8EHdyLCfsZRqtKnGHca3SZ+32RyrM4AoqU0VxwAeh6pbEdlP7zhjTcBN4dveAMVZ9XUEnB9U4EHgvPvS4pfJOrh3EGdGAIPCSvwmbxJNxAHu9qztUqkN2eQAAIABJREFUkVLZHFcH2rDr9lWKv5nAkRsjwAgwAoxA9BFgAi/6e8QRMgKMQIYRaBGk3a2/uJd2CAIPZxGQeD/7wZdpfI3/2j4gkpByGccW9iEiKIxw8FZVw8g5ZVoWzqQg1XAINFNzBBVjJmv1+YnRSXXkZ2zjtVLxtePuemr+sDMF754uopb6vczA3gvXF4wKjcRzItOCxEGOZXffAqsPEs/Qmy1PkjSnUInBqL7rWbWdelftSLm06LxDTYfSE3kgapDK6jbl20v6rAzmsDM+TnOPW6zVmhuOhjTqPeLz240aVZqtmKkY5Rq8ONDKa60UeEE50f796VXU09NN7y19nfaboEnsLBvuSajv5Odk1fixdOziM4Zjq7Q55f1aJBSkiAtEKn5iLxotzEGGLViHib0qXr2sB18k1VQUebmUr2EEGAFGgBEYBgSYwBsG0HlKRoARGH4EQNqNLC3WArn7wWdp9Ydb6MYrP639++pb/qD9lP/2E21ze8+wHUD9xI1r42pkoZqeajSlcFvLyg++majV5yceea2xcH8QYxrHAHlZIhx8gf/6P9RS6/pE2jR7tqWSd7KDHYl3ya3B1XAyI9NW72ihR9/aRmvET32bPW4knX3IBJojfvppZnPq71EQyw/V/kTZjVZf+w7EXeLhN9PIOxlvoSDx7Ii8ClGz0YuBgB8C74wvX08Vk+e4Vv252YPXHtxJ21a2aS/ZPnp+DU2YV0oHHV5Fu0QNMi9NT37iywCQkHoi0MzEAvNYudDKGMxMLHpGFVLrgTVewtSuWfNhPYG4W7N+t/bvzvZ2SrR3aP+/agRpRF71yHQyD+nNvVJ+t3f2JV/+jOc4grxQ/u0qyMsRytE+jXx2Q8QGGYuXsbyQx17mwTUoWTB6ZIHXy/k6RoARYAQYgZARYAIvZMB5OkaAERh+BB59+h/0yz89Qs/ed5sWzNW33k5zpk+ky84/Sfs3UmkXf/paeugPN/hW4cW5Dl5cjSyclINSJZPpNFm7Ox3YVomDt1eCIFNPEcwK0DKR2guVI8bHQRoKyT0vNVPrq0PEiVxTR8sAdaZyZCnLfb/YXBmLmnj7n5h0rPXboIDpEIYOUnF2699XphF3xjlA5H37rHmep9bPaSTu9Kmry9qfomXtT1vOU5M3Q0udlcYVIO/abnzUMa7cueNoxNXnmPZDbAnhApwr0uz0qbVOKlU/NfA+9ZMHNKMCt2m7jgsVHbatbKUHb/jQtuushaPotG9PUxnOso/e8EJvuuAljXZydRldftxBaXP5Ud+BvPvhL5emjNm4qz5tDhB5x81LTbGMMoEnF4D7Fs8SyGdV0wtfGx7AxWH/3R1RnKd9LnNjBBgBRoARiAcCTODFY584SkaAEQgQgU989RYtXfbKL11Cxx2xME2Bh6mgwkMtPEnqeZ2+X5AVtY3eVBxe5wzyujgaWZgpB3EoQiqVJJCgtjMSA3974uEU6ObMnEtzZ80NEs6UsYIw2wg6OJBsOOy2dPQEMrTeDASp5CAGpRJmx63m7pbNInW2V6TQYs/MCCLUw8PLrAWlwtOTaSrknYzFD4mnpc2JtD8oYkAuAys78gpEnrGBvNM7zqqSd3IcKxJPj4exRh5itFI3eSXwqqbOpXO+doNGGiYCLkGgQt4BD6hka2aX0PnXWjv4unlIpPIU18CB+s8/f4a2b6hLGyJbPHtZfakKVCvyzo/6zoy8QzBmBB5+byTxzAi8+YcdRPMPO9gNLBntq//7Jf8ueFGSZjRIw+BhK9+5/l2Yu8tzMQKMACPgHwEm8PxjyCMwAoxAzBA4T6jrrhBquzfeXa2lyUJx90lB6klFHpbzwqvL6O4HnqE7fnKl79XVN3Vp3/7HscXRyEKvYNA7ayKVDe6avYbD8U3/eyOtXrfacnu+89WrMkLkRZEcxeER6a1+a0YZ05P1xF3/fW9qWDc93az97K2sot6q6kH8ZfqsFYEHU4sNhaNM9ytIAg/k0V//tUVLm3XT/luo8Nym0wIvpKnKGoxBqc4aL/2VY+gbRLbkRvF6fncyTfKUEydT3sIpdNqSITWhWW1EvboMz5Z+j/WTekmjXfTpa2jW/gekqCAdF6LY4ScXLlPqCQIPnxVIpw2KxMPEeiLp9h89QetXpxPZRhLvuxcelxZz5+Qy6pxSprQWs06f/lrqFxayjxWBh/ePnTuUTot71kjcRiWFVq7FzIEW+4ovc2BW1NXbr5Gpbms7egZd4UInBbnCEK66ACO4fnNjBBgBRoARiAcCTODFY584SkaAEQgIgTffW0PPL31bU9addOE3NdIOZhUg8M455Ug6W7zQQOA98tRSzczCb4tzHbywDxN+sdYf3ECa4pAJYgHqL+Nhc9XaVXTzT25SmjITJJ4Xsw2lYH108qv+0BN3IKGgdJS4g7jrv+8tLbq+tj7qbxsitXsEiZeYM08j8pwIPFxvlUZbfOYDaas/98hLXCMCpSbqe13001ddX4sL/vifH1W6To8X1Ib4rAiKTOgUNe9Q986qgbh7XpQ929iRfnjPmVqlXXbaBfM0Is/O1EOvbsVzZqwluWvDSnr5Dz9QwgOdoL475jPfpUwYqqDm3WsP1irFkiOInr69ZP/5184QRJ7IIw2wYe/x+fras+/RC4+9M+jmOjiFIJeOOmgmnTxlXMqXDiDueoTBRq94eW2oeff3Z1aZXq6vgWfsIFV44HuyxMZHmcBzcqDFfYt08ML8ZGqwk3uwV6zdXhemgQXUiFVlXP/O7R5xf0aAEWAEhhMBJvCGE32emxFgBEJH4Fd3PkrjBGF3zslHaGmyI4SRxVyRKjtbvL58zc/oxu98hsaNGU23CFfaE448aJDQ8xMoDgYNrSInMIbNL6ET5pL16ZrZ4nSGwvF2tdyclHfG2IMm8cIwjHCLv1SC1Te7u1+lci9PkB7G1M+BD7ZrxN3AB0Pup0YCT8bZeuQiqutKkkdWCjy8ZyTwlvf8hZb33Eejx5srSUDiuSHyQOA99MZWuu/VzW4h1Po7qfD0dRglXsa6e54m1l1kR+CBvPvDFmvVTXbNKMoqSpqCgMS77NMHKhkBGOscSjJSlcST5B3mBWkYtPmAqvoO9x4+QyRBBWOLj54/1u+WmF6vVzKCyOsSJGjVpCqaMH2M1n/c6CLasacz0Ll/+ItXBk0rjAP3dHdTW1NSHWvWPn5YtsBGe0I1l1fZopY+68bpG88jVHl4QU2KV1BEutuNC9PAAgTyqJI8tyFyf0aAEWAEGIFhRIAJvGEEn6dmBBiB8BFA/bv/+sQ5Wg08GFkgffYmkUYL5R3Ueb+842/iWEIpajy/UfYKJdgukUYbxxZ2QW0vGJmZUuBg0i3cB63qZ6He3d+e+Jvr6e761d2ur7G6ADHiIJwJwwg/QbqpzQfCBqloOMib1RVEHH3XPJpC3snYemrN6+xtn72IOkcmSTyzZkyh/b/E1bSr/wOtqxWBh/fmTFpAV118qxI0IPD+9uY2uvcfm5T6GzvBlRYvY5OEuFSG6lNlgybwWm98xNR11om8Q8xZZcWUXV4yGP51t51ANVPLlZ085ToxAAxLQIaAxFv5/ENUv9Fc+TXv+MU0X7xkG04Cz0xh9tX7Fnq6F6wu2vnC49pbpVNn0QjxMirCgBtUxJkwu7FKn5WxtjY2UW+P+fMJAg/374B45vUmtFFLn/WiHh+q75ij3ev4TAOZ52TWEtSNEfbf2/LSfMJnODdGgBFgBBiB+CDABF589oojZQQYgQAQOFGkzY4UqrvLRQrtIQfO0VJnkSYLw4pMtjphZGFV6D2T8wYxdhRrtWFdkjySdcP0By2nw9vlX7jMEzTnnn4unXv6eZ6uNV7kRiESyISKg6gQeFJpZZYyqZ9GnzZrnN6KwBsQmbXrDzvfMlq9iYWevCsSGY7FI+1rOakq8XD/PPb2Nrpn6SZF1FK7GQk8M0LLOHDQBJ5V/bvfbzFPm9XHYyTwkE76i/vPd/0ZZrVumFvIBtVd9bR0996glUiq5hWIS1OYiQ8WmBDJFgSBB9Ju5wtPmN5TY487ncYed4b23hDRm0zvrG9OBEoiORF4mNOKxJMEnv7v2THnnU7VE8Z5elYydZHfVNThML0IW/FeLdJnc0UaLTdGgBFgBBiB+CDABF589oojZQQYgQAQaGnr0Ag82d4URhZIqUUdvEy2prZu7dv8OLYoGVkYzRGM9bYkvk5mDF4JvDkz59BVX7s6kG3UH9ZWtG6i+2tfppVtqSmb80on05KaRTR/xJRA5lQZBIRtQ2t3GlkD7IuFWgPkndFR1mrc3nN+bTmlVRotLrBT4cn02bq+5fR81zWD40N9N5AQhIsQuw40p7p4kijzlDUqi7IKs+g7F99CcyftbwsF1vjEO9t9E3gqxJ0MJGgCzyqF9qrVzgXr9Sm0iA8E3smL52rptF4acFjf3EBv76ijPy1bTnJ3Lpw/j/arqqIF1emKSxB4u5qCdfBWTaHVivobFGZ+Cby1t/+Y2jauc4Rv5qe+piny0EAkQw2KBgMelAUIQg2mQuBhTjMSDwSe3oE2iuQdYg/qiyd9ijMUkVJR6riRHjo4ffHkYUjLS7CH1cLAghsjwAgwAoxAvBBgAi9e+8XRMgKMQEwRANEEEi+OLcxDhRU++hprqCmIg6ydohFOg0gPMqvl5jV9VsYWVBotDoZIj/v8m79NI+6MOIDIu37mFaHcPkZzDTtHWbuAUPuu75q/28bc2yAIiW4D2Sau6BSGFttHpJNF6wtGUXtOsmaTXn03UvDvuYLrSSPujLMLIm/x4ssc6+H5JfC+e95+dMiMCm121QM/CDw431qlfbvdfDMCD6YV0m3WbjxpYiH74Hk6ySOBt3xXPd23YiV9UF+vlSeQjpdQt8mdB4l347GLUkKKEoHntwaeKnknAZAkHu5DGA20dvZoZB5MF6A07uhKd9N2c3/YmVgYx0FNvER7x2BKrSTwZh+6kOYfdrCbaUPta+ZA6zcAqKbh0o09CZJQlXGF+WUZvowpE38juTECjAAjwAjECwEm8OK1XxwtI8AIxBQB1IDa7dIYICpLDTutR65bb0oh6xHpa4Y54WOVChoVAg/x37rlz/TWnvVOS9HeD4vEk+6fqGuH+nb54tBq5eRrF7hd+qy8rl+QsX0N6cpUuNJ2LDqWmnf1U/deEZaevMP193acTbni/Fk8UijEmpLKO6UmSLx7/vdp264gTgrEus+7banSkLITSNkDJpfT9UsWKBN38lq/KX9mgRrTaFUIPGP6LMbVCLzzhAJPONK6aSDvrnnp5bRLzIg8I4mnksrtJhb0ffCGdbRtZZvjZfjswf0v1W5+XGjt0matAimdOpNmferrmvoOWbz4wgItKDXYmg/r6Ye/dHdvY/6zTp5L554GV+KCwNWRjpviooOTA62LoUy76k0voMprh9O5+BvvtwWdNm4XD8wrQApzYwQYAUaAEYgXAkzgxWu/OFpGgBGIMQK1DZ0pRb/jspSwC2tLxZd0BITSobcvXaXlhJ9dClUUUmivW3cnre3c4mpt35txecbTaUHgwU0Wzego64S5/n0VAk/2NyrxJIEHBqVLpMX2HVZB/3phqFZT9bQs+mPtWZRXkEybHdjl7v648/RHKed0weRZNM0YpTCH7nxpIz361jbHZeMZ0Qr7i57fPnMezagRBflcNpA1vYKtcUNSO03Rs2o7td346GA3RwKvMI9yxpalDeuVwLv6xZc15Z1Vk0QefvaJvZ4viFupxMuEAk+1Dp6ewJswr5TOv3amE9SW779zzec9XQsV3sQF8zVXWrN7Qp+eDQLJrXLTzonWLODZ0yvpW188mqJau1Mfc1gxGo1HoA6HOtJLmnPYf2crRakE3EPcGAFGgBFgBOKFABN48dovjpYRYARijADqiiH9KY7Nqi5akGuB6glpPdKhE3XW/Bh/2KUjeSXwgjSxuGDZDVptMS2V0IR/at08V4O3dUvyZ/6oeioYtZuePueCIGEfHEsaU+AXwL6p3dyFUnVylRRa41ioi4fWd8yBlHvSQVQwKZ8GasxVIpffcprWt3+Le+XLH+l+yv+VkO5ZNEngNbX10K1/X0lrdrSY9hwk7sT+4V7977Pm0Zxx1uPaYZcJAg/z6VNpbR1oBXk3UDVCEKai/mFv6udUXl4OHX3qVLriS0eqbj/9RaTNInVWteUIMIHnTccdQ/MqKzOm8nrtwZ302oO1tmHhuezb+6WBH/Vd68a1tO72/1WFIKUfTC3mnX2eo7rLj9mCKoknyTsEKNN6Wzr8fT54AkXxouEo+zBU5iFb+zuPlxtVXlikIyBEiceaiiJFNLkbI8AIMAKMQJQQYAIvSrvBsTACjMA+jQDSoJp9kiLDBVCmavOArCvMF3WFRCoPCBAvShIrTGTxd6jIjM1rGm1Q9e/u3/kyPSBMK8yK5Xc1VdKe5UdbbvW4gtF08aFldO5B/g9g+jRlaUyBlFnUeArigG5nYmF3L2dfeAiVXPFRrVi+2f7hWhB4XtR3uBYEHhR4Vio8Y9q4kcQzEncY0+g86/ZZzRSBhzj0SjxTEwtB3iVGFKQRd3INWO8J546nOQfU0KGHL1BampP6zmqQQ8bX0C/POEFTKwftvirndCLxoDiE6tcPeYe5vKTPyhhB4O1/7mJTQxkz7PAs43MUymWowFQNL5zq4enJO8w7sjgv+Vm9N61X6WYIuVMm0tFVl2BMc8bftC6RXuukygsTV5QHQBo0N0aAEWAEGIH4IcAEXvz2jCNmBBiBmCLQ1dNHe1riaWRhR4Z52Q5N4STUdiCLVB1N3c7jpGi46X9vpNXrVisP+52vXkVzZyXVcH7bIIGH3EFx4oMKD82JvEOf8rwRVCFeIPC8knh2xhRODr5u1u4mjVY/bu4jn9dS9QrFPQIVnFm76d5v06rl7zsbVxguPpsuILzsCDwQOKgRpX9eVwsV3pPLthN+4jAu1aF+iTsZXiYJPDkHiLwn73qPnnlzKK01u7yE2ts6qbdbpP4ZxYzi/swSWXbjJhXT8YLA0+6/0aOUSLyz73/Qza2S0veJiy6gSkEw4LFwQ0a5mRDptFDimdXEO/HyiTT/NP/O5H4JvAPOO981iaknkEAcqdZnA5GH2nhr1u8mkHazZ1SJV/KnvgVttuJmz1T7hqEYV4nFjToyU1+SmcUJohefb9wYAUaAEWAE4ocAE3jx2zOOmBFgBGKMQFzr4AVlZKFP0/RTX03lFrBzopXXq5J4QZJ3mFsSeDhsZ4v/ARmkQt5pBMpeAg///zunj6C5Y9UPYnriDnW14I5sTFM2I69U8Lbq03fNozTwwQ7lIXJ+cBZl7Tdeq88Ex8dGkXpu1lZteZ9u+vW3XRN4UN+h2RF4wAlEhSTw5H2bKbIZ8YRB4Ekcf3rdy7RuZZLES3R2CZdR6y8Wcge6aMnpfTRGkHhoHXkVNG7+Qpo+e5Ltnvol8EAwoOyAdF/NFJFntoigDDT8EngLF59PtY17XVyUn6BkR2N9NlU3ZKdpokKO2cUZ1P45YaH6vor5SCZcc63i4/p3qjvH/RgBRoARiB4CTOBFb084IkaAEdiHEdjd0qUpzuLW/BTYNqq9QBq5qQ3kByuVg5xdOu2cmXPo3NPPC0x5J9eSQuAJsgj1tna/fxR1N6eqXczWrifw8P5dn65whGioPlMyJdXOKAH7VSGMLOoDck12UwtPkndYkApp/J3ffoVKK9KVUvUbt9DuTVvTcJHqO7yhQuDhXgWxlkniTgYZJoGHOUHirX63jlpb2k3vHxB3ecLa9+Lj99DkMYLgA9msK3o//ozPUV7VZMt7zw+B99TFSzQTEam+lARIcUFuxhR5ciGYq2pUYWAuq15NLCaccAbtd87iQOKQzxK+LMA97dVoARiFSTQ5frCZdMi0A62XmPT3VsHeLyZQpgAGTUhFxr6E6eyLPdTKN3BjBBgBRoARiB0CTODFbss4YEaAEYgzAiBPWiNc/NsO2+qyAk2RpGosIVNY4WgKdz4cVFSvDWqP7ZxojXOAyNO3OTPnBk7cyfElgYd/y3pbO5aep7Ts6cXjUvpJFV7umh2UtzapdOuZNY56Z4/TCs6DGALuOLirOpyqEJ9Kweo6OSnx9OQdLjOq4PTz1bXsoudXvaDVwNu5a5ugmcxVSpuXLaeOpqQBxWyaR9+m7w0OY0fglQjyaFRJPnWIexbPbBj3bdgEXtuWTrr9ppfpzTU707Yyf6CdFi1ooqMWtKW+pyPxCgryqWTCDBp59KWmt4JXAm+/qir64YnHphB4coIwiLygCey1t/+Y2jauc/u40Edu/k1aGrfrQQwXyNIFqJMnySOn2mz6IYImN/2ux+x6p9T7TMzpZUzsBfYBrwGxCXCgtlIbexnf6hrUFa0WBB43RoARYAQYgXgiwARePPeNo2YEGIGYIhDnOngqNXr0pghuSaNMbCliVq0BlYn57caECy1anjjINayfPeg2a3dNYXY+jS9MVZwtyamjy//1csplEFfA1XPg/MOo6ZSDXCseM0HgIUCo8YzptFn7jdNSZo3NiiyQ5J3s31/XR01djdRMjYND4EAs24Y3l9Ent3+G5tB8GlVWPvh7MxdafapsvnBe3dXkLX3Ry70UNoH34V+207bdO6m5qJl21SWovq5LC/uwquU0dUyn5RKyBCGfJW4wEHgFhflUNPco7WVsbl1o5fUXzp9H/3HQAkKhfav6h3oiD26fQX45EHQKuRcn2pmf+hpVzZpjWwPSyz0mrzHiB0UeTDucmkpZAqcxMv0+Uq7x+WdlfpPp+d2Oj72A4jknO1t8UdCvfdnlRyHpND/Xv3NCiN9nBBgBRiDaCDCBF+394egYAUZgH0MAZgVeaxoNNxR2RhYyTbZYHJ7CVC05YRKms59TLMb3pQoPh+LGjXOodbOzQQYcaIty9roHJropu6mDLtqzli5q36QND9IOh1cU/+/TkVgt3zhLU+SptqjUuaouS09lvPf1+1KWkXSiTaal7+kTdd10PET+QAEViNfR9x40eE31mLFUsrgsxYHWrMad2dyq+HnpFzaB9+7/fEhNRU0agSdbyUADTRt42zZ8kHcg8SSBh84jjrrENJ3Wiwrv0SXna8pROwdiGaC+thiIvCDUkppKTZe+62Uvjde4IfFKp86kWZ/6uoZBUG7QVmtQqc2mvxYx2RGrQWDld4zhdKD1GvvokflCmZ90S0fdT6TZQjGNezrochPlpYJ0F/vIjRFgBBgBRiCeCDCBF89946gZAUYgxgjEtQ6emakADgJwkwWBh8MzaoWFkW6ouv1Ip4KSqiWiacvXrbuT1nRuoWZB4LU4EHgp5J0AILu2ibK6euiito10ScdmU+JOj1PD7/5TFTZRjyl5oAz68KgcwN6ORiXg8m0f0PLtH6QNAxVeT1u3lopm1iYvH0t4oW0t2ELjb55Bs2bPG0wxNqtxN250Ee3YY61Ec7sWp/5BOz07zWdG4E3tf4tKdUpGqzGyxXOlJ/CsVHjLd9XTNS+lqkPt4vrBMYtoQXWVMoEnx5KGDUh9xl66rbPZ2P36YFhQxIJUH+itEWT5BHryg2Xae0+teFf7OaOqhmZW19Bp+y10gjjlfZB4MLWwS6cde9zpNPa4M7TrcD+AiIe6MIwm6+RhLqt0+zio26Ly5YObPTPWFXRLrLqZC6UwckX9PW6MACPACDAC8USACbx47htHzQgwAjFGoLm9J7RDWZAwJVMak3XwcJArzM+mHpF21SkOmImIGnOoGCEEiZGXsW5Yfxe9s62d6t9LT0OU4xnJu6ymdspu7tDevqRjE10oSDwc9u0a6uK1fvMspRChYulIBK/+UJpc18mogjOq72TX5qZG6m3ooZL+UtMpRtWV0gHPz9LIuweq/qKRnd+79vs0Q9Q5tFJtZSqN2AqDMMmR2lcbCK9EboLqRtYNhrSg/zmlLTISeLio4ryrTK9VJfEkeYdB/JBX+rqPTiR0Z9822tmZWvtSq+2vmT1004d1/bRsYyntbjGvGXbq/ANdE3kg8dB2vvCE9hOKuxFTZw0SdxLE4VKSyc9MvcmC5MWHKyalm3Jvp7CfWzexmfV1Mt2w2w+3c+PerqkocnsZ92cEGAFGgBGIEAJM4EVoMzgURoAR+PdAADVuGlqT9abi1HCQqBQEXm9ffyiunEFg48c9N4j5VcYAWbGibTN94Xe91NjTmnKJ0XEWb2JN2ZtEquje9oOGZbSgp0llKlJV4SH1uEfss6rphdLkHjoZ1TRWBN6WzRu10fP68yh/QJgrGIi87qxuanh2A+0o3KqpRUF2Tp8xl776je9aRhV2Cq1V2uj2Natpx9o1g3EeeubZHpBMvwQKPLTaEbXUlZf8PHJD4I0clUqWWhF4GBck3n0rVtIH9UP3rYwINe9gXAHlnWxBpBPrFWVmRN6Ozoco0bc9DRi4cyZ6eqi2ZSi1+OUPaixJPCjyvnLcqYPjrNi9jlbu+ZAeWPt0ytgXzDqF5o2eQfMrZyrt33DX78RzkvyiJmfQ+bdMpF/ChEmlXp7SIgPu5ESGBTxdIMOppiVjbehbKvYEn81O5LRZcNhL1NvjxggwAowAIxBfBJjAi+/eceSMACMQUwTiVgdPKlok3CgsP9yplW623pie5ObaMPpK18Q7Xmmhv71jYx4AYwqR+TTQ2E7UmFTf7dfdSDc2JlP7VFrnmYcQXk4tTDWYXSxI5YViVRIGZgQe1HfNzc4E5s6XlidrA+qkir/67V8spx9uAu/Nxx6lNx/7u2l842bNpkPPPIvGz57jtJWW70sCT6/CUyHwUAOvsFjUFhQGFvpmR+Dp+4HMA5F3kSDurFqQBLIZkWemvJOxdPeBvEs6F+vbQ/+cYhmvVOI9sOapNOLOeBGIvAtmDxF+VoNGJRVUn84JcnOP+PKpS3wJFcUWFwdaPXZu67TKdHEo4EHqId0ZadYqbsKjSvIZSgVJAAAgAElEQVQ0UpYbI8AIMAKMQHwRYAIvvnvHkTMCjECMEahv6tK+RY9qk6YU+aLWlb6mVNh1uoLAJyr13KzWok/zvemJFlq9M7XmFQ5sGnGnkU9Cgbc3fdYteYf5VQm8MAroq+ytce/8EHhbnn8/bcrTz1hMp595vmkoYRMoegXeI7f9T4rqzgqrs7/xLVsSD88xalRCedO5161VHvThQtu2NUkYSxWeCoGXV5BLJSOK00JSJfBU9h2pmgkU8Q8wNV9P5L218zZLwmNXazMletPrzq3cWkarxEvfOhr6tH9qP7v3UOu0HZRXlCVe9jXGoMT73uFfsoUCBHJ9c0KJmFHB1G8fqWaWf7e8KMD8xuB0fVS+eHCKU/++8UsKN9finsazjRcML5xML6CgxzXcGAFGgBFgBOKLABN48d07jpwRYARijECTKLiPb86j1qBggAue/GYfKZR6xRLeLxLf4De2dkctdMt43Cocwl4YDsaVIwvEYT2ZxihJPJB2eE8SdzIuSeC5SZ2V16oSeGaGJX5waWtYT+2N66luw1CNtTHTTqSS8ulUWjHdcmhjLT6vBF6isY12vbMhbR47Ag8Ha6hNwzJlkQTe3d+/SYm8k4uxIvGkwYwk4JP/Fi7RXb2aYqd1cyd9eN9QCimUeMWlT9iaWOTlCgVPWXoNLSsTC6/3TCZrMDb3vk5NPW9ooUENbVQubWncYxm2VOH1dPZT8/Yhkq8jr4l2l6yhgp6kQ3ReoSBOK4SLrA2R56TEi1otN316qiRD8bt2USsT95SKAszr/aB6XRxq9BnXEoRCXKokiwSR1y82AmU6jHuC+ncghaGi5MYIMAKMACMQXwSYwIvv3nHkjAAjEGMEolQHD4cwKO2grgNZYeVACLiTh7h82iUUhHFpUGUg7qg60QJH6XgqlY8PvNVOf/5XmyAY0lFeklNHl/9L3dlTP4IqgQcXTqRbwbDEb1v/1q8FeZdOnslxS8qn0fRDPm86jTGV0syFViWFtnlDnXD6HTJrkJOFQeC9snE1bW7cTVuahoihSWWj6eipc2hyeeXgukGwLXviMXrh/odcQ/6F392eMg6eZXzGgKjTE5DyoC+JvLq1rbROKPFky8uppdKSp6gvO/XLhZz+HMoVbs5F5eZmDiOOuoTyqia7jtvqArP6b7VZHVSXnUwdl+2AviH8VCeH4yxeWi3JvWSGJPJQ+25XW3r6rBwbtfDWr8qhns5Ux5h11a9QQbdIK95L4Mn+o8bl2pJ495/5U9Owo1jLzezLGxB5UuHZJu411VRO1b1y2y9s1azb+Iz98TlbLuoKyi9v/I6H642mF1DloQRBgfgbP1p8UcSNEWAEGAFGIN4IMIEX7/3j6BkBRiCmCMAIYrhJMBweUBC7WLw6oMjpTD3sW0EbhGIgzG2LgxMtlBEDQjkBUgH7oDePkHXx5ozNpblj8zToKj77G08QtnzjLOqdPc7x2qTjcKG4RxOOfa06QHW34W21OK1IPDM3UqMKT4XAM0ufRdxf/fp3adZs81psflOvQdqBvNMTd0asQORddtCR2q9B4P3kE5+kPg9SJtTDO/r88zQSHoo7p2d5iMjLoZ1rWmn5nVsGQxtV9Azl5aaSnTmi3lZuiSDCxU9jg/ouL/9j1Lc+Pe00Z3ou5YqX26ZXP4K4ey9ntyDvzOtDjukvopN7JylPYTSvkEQedEkN7R3U3NlBVobO760dSa+9lmreAfXd9vJkevbI9pFpcdiReNd97IumphZBEujKwDh0tEur1xPDKqmcQcVkHCdqqkWndWayVIG+diHSnlF+AWQrN0aAEWAEGIF4I8AEXrz3j6NnBBiBGCNQ15gILT1PD5NMrYPKAwd9HPjdpAn6JTbC3jJjimrY89vNJw1CskWQWlqj2A+VNuK2v1Pe2h0qXQf79MwaR63fPEv5Gr+HYSflnTEQMxLPrKZVXcsuen7VC9rlUFB1JTpp586dluuqe3s9dYm6gWbNzsTCjwsoyLt7lr2qhLUk8bwSeBCRTdtvHi258kpH4s4YkFR8JnZ0Ud26Vtr4QtIlVpJ42UK1k1Mg6myNgDo3fTnZOROoqGiJ4zrzDsmn7Ar12luSwNs+0E7P5m11HB8dTuqZSDUD6bX5jBdbuc/ic6K5s9OWwHvj7VJatiKVpNtTspkaxCunL4dKEiVpsSKddtT4JPFubFZptFE0Y1ApRWAkjcKskxdF1aLTjRtWyi++xMLnGTDixggwAowAIxBvBJjAi/f+cfSMACMQYwQaRB05qBXCaPgPd5AhcK7rEek07SDuer2ZaJiposJYg585/JJRfuY2u1YSd7I+WXGhKELuomh/7podNPJH5g6lVrGqqu/k9X5cWOvWP5tS704fU19JAfWLl7FldffSzLmfpBGlUwbf0hs7yF/iXm4ThgF/f/c5re4W0h/randSV1e6WtCOvLNT32EuP3XYbnzhUVe3DEi8Y0dW0X233KI55eYLJiTf4bCNoziw0Gokiv/Rp9G6mnzvOCNFyjSmlKRL56qlhJemUBMvI4FXMOYIym0/THkqNyQe7r0VbY30VPaQMlBlosu7Zzt2kym0Vh23ihp4UOABX70Sr793gB57rpJq61PvXUngmaXQyjkqp6c69srfWxF4mVRmOQJk0cGtsYjeNMSuLIPXeIzXRZH0dFobUn4bRT1c6bLt1N/P+/gbyPXv/CDI1zICjAAjEA0EmMCLxj5wFIwAI/BviAAONTCzyGSTNYqkm6xTap1KLHE0svDj9KeCiUofPYlqTHP04u7rhsRzS95hPX7qSZmp7/pFDTUQdwP51imV2TkFVF6xgKaMO1WDVH8o1zsjI8UYz8+7W5bT8u0faH23bN44uA1WNe9kh5mz5tLXvnGt7bZ5JfCQNrt04xqVW0Lr09bfJ179VPTBGipavjblupLsbCqFm4mh5QhWDcQalLOSZPJD4Mnh9aSLJPK61/xDq5GpT+tG2mzXs+7Sq7PKsyn/UHMiy7g+EHh3t621TJu1AhfptEcPtFJDwYOUyF2Z0q2wdx5NeOVgynnjReodaBp8r3NMCSVqSqnxwBrtd5LAkx0kkQcC7/f3jk+bWhJ4xZ3FlNtvfm8XlwsDEWFqYWxWBF4UvyTx+nlgrMmWqTp5+IIKDePHoUlX31qhxM90yxUMfLVF/cpMz83jMwKMACPACASLABN4weLJozECjAAjoIwAFHC79zqPKl+k0BEHg0LhRqcnhfSHb4UhbLtkovC235icrkf6UKc42CVEunDYTRJPvdvbaNuzm6jpwyHyALEUThpBNcdPppHTy1wbbYDEK3rsLdt0Wi/kHeLykyr9/nPfSoO5pzq9PpjZXhSNmCDqMtZoJJ48/PcJ1agkkUDcmaV8r12zkn7y4+87bq8KeYdBjAYajgPv7aCqvuse6KeGviEFbmHpKCr+/T1p0+SJB7oiN0n+gLjT1HBCcQelnmzjZs2mc77536ohOvbTE3ld4pnBMw9HXtl6Rb07s5p3TgOr1sTrGtlLf02sdxou7f1J+b+hidnpqeWFIgt33F+T3XP6y2lAwD5AQ+vB70HkNQkSb5cg2xo7Us0y8P7b742gt8TL2FADr3bkCtP0WdnXLYEXVmqlG4D91j4d+gIjR3NINTqcu4nFrK9bhaDf+fxeH2adQ9S+KxNmGdwYAUaAEWAE4o8AE3jx30NeASPACMQYgdqGzpSDuJ+l6BVKMjXTa5qsUxx+D3NO4wf9vheFm98Y9OrHNb99n5rXpxJ3+vGh8hkxrYwqLnROATSLCyQemvwJt1k0+dPLWvwciI0EXm9Zsa3yTh8fCDy0aeNPpbGVk6hA1G9q7uhRrtX4xGMP0hOPpzu5grg7/YzzLU0rjBjhnukVLJlb8luVwKvtTSWQMH/FXx4z3aoCwdpV5Yk6dAbiTnaGicWhZ57tZZttr5FEHj5bQODJzxO36js5iaoKb9OIZnqlq9bVekDeFedsoKIBYcxDQ0o4PXk3OKBw1c3qKzIdv/bUGbSxqJe6eoeUXPXNhfT00irqaOjTlI9Gn5HtI96yVN9hEisCz8qF1k/9RVegKXYOwtRm8B4Q+EEtBwMlmCsEVScPqs09LV2u6rkqLj8j3cJ0R4fTLdKyuTECjAAjwAjEHwEm8OK/h7wCRoARiDECQdTB05tSIH0o0e3OlMILfH7UWV7m83tNmGm/RpOQjbcvp8SWVsclIM0pd0Ip1Vxq7orqOIDPDiuaH9ZGqCqYS9WFczUFpxcCC2PoCTykzvaVpxf3twoXBB4Io/IR42jahNOoQKRv7mlxn2oORd66tck0ytPPPD9lur899i49Il7Gds6ZB9KcWTU0d3aN5/WrEHgNfaIGpYnb7Lg9XZR49llTaKzSadE5iPRZy/0QB3+9eyUIl9Yn0xVqqrdfwUmFjl3dEniVuc9RZd5z2rhGAm/abebT5fRVivqJ6emLiZoS2nHyDKprbdZIPJB3r6yo0cg7vGSTRN4hqw6lraNX0vtTrU1LzJxordJntWdQ1EbD3wY35kKOoProkAnVtVSKy3vLL5EXtTqnTnCHqbKsLisQKlp1Exmn2Pl9RoARYAQYgeFDgAm84cOeZ2YEGAFGQKvX09yersRxggYEB1IKQbLgkBdGkXB9TCqOhE5rCPP9TBxAjfFLYwrshzyMNi3dRk1LtystNU8csKBIKTtqvHglVWiZbrsSq2hFy8NU37U6baqDKs+nhRWLlZ1x9QPoCTwr0wqrtY0sm6jd00gRnT/9k4LMKKRdTcHUiVq1ppZu+dHTjrBe+Y1T6CMHJvdA1RlYDupE4BlTZ/XBTDnoaGr79a+pf8MG0xjHCBWesWVKfSfn0RsqQJE3Sphd7HyoxTO55ETggdjZWJpU4DXuTFBT7dDel9UUUvnYdAJwTtFQ+rCewCv/JxFeVi2vd4JQNbanpdM2HjBGq4lXlHUQ3fT4Ou3yns5+at4+pMqbtn06zdw5Q6Q0Z2n36jMH3Et1ZeamG0YTi3mjZ9D3Dv+SZVxQk9U3J9KUfo43boY64B4Aka5Pow5yKr3Ssz2B+pa9rtaOv4cVokxCfQZKUgS5Tv1YYSkGkXJfU2GuNs3U2nhcRoARYAQYgcwhwARe5rDlkRkBRoARcESgq6fPlboIRBRSj4rFq0OQf0GYUjgGadIhTEWbl/jMrhk3uoh27OkMajhtHD2RajSmwPubbnpdeT4o8KB4Q5tylbq7p/IEho4v7rrRlLiT3UCkZIn/OXr0dzRFnpumd6F1Q+DlFYyk3PyhWnlV5QfSgmkfoyAKvauSd3KdN1x1Ou03d2zgBJ40rZDz5Od0EV6jK4iO+KhIF169m5bf9A+RrlogSDKkzQ6RduU5OSnutEHXvjPbYzMn4O7nEkMOuPpifAo3iROBh2fq7lUr6bk3zckwkHhTFpYNEnl69R2m1xN4Vuo7GWZe31jKpiS5ASIP9zzawNjZ1HfOf6Ws5skPltGGV9op8c9iqmgVm6VrwmtEI/KeFiRe7ajUuI3ps07kHYaNmpoM6Z6aQ7FwL89kk2UHUMO1Tfx9UzW8yDTBGPSawzSwAJYgN7kxAowAI8AI7BsIMIG3b+wjr4IRYARijIAKqYQDCog7/Ic/DlEgi4YzvSoMRVvQW4q0tEbh+tsrDBH8NllvEIejRHefKZHqRn2HeDBm/15X0ZpL5lLhZDXTBy9rcSLvWkXGb2vb0Mjtb1xNfQ3zaPLkAVq0aICmTHGeVarwVM0rMKKsfydHD5LAu+Kzf3IOWk/KiGftwbs+7Zq0cHKhlQReTlYvFeUNpaLOnplFeMkGEg9kXmd3ySCJp0+jzbTyTsahJ/CW/t+LtPT5l6i/JfUZmihqFR4++yiaVDnZFmOVGni/fvZdWlffRI0DXbZjHXhqjUbiGQm80QNDCj0nAg+GFrkD5YPz4Bkc2FtnsPvzN6fNv3N5gp66xro2H0iuXRVb6d3J/9CIvLzCLBo1Pk8bB8Qd0mbnV860XRdiGD2yIDDVqaub3qJzmOmeCAF/50AaFhfkir91Scdpu3quUXTttcNdKg69lAZwu59QzEqHXrfXcn9GgBFgBBiB6CHABF709oQjYgQYgX8zBHaLwtsg5IxNb0rRI0indhB3wrk2Ki1uRhZBFIbX7wnMDawcUbFHfgi8TKbRIm32pfqbLG+j3XuIug0l53ob5lLHG9cMXgMi74or7InQtob1tOHt35CqAi+/qIpycgtS4gKBN3/qx3yTGVY17+yeJZAx5529kM487QDXj5xdGi0IvM6B7hTyDhOcdVp6jarmVbs1Em9X2zhqTlTQ9IICmlGQnxHDCqtFSgLv1z/9PW3ZuEnrNtA1IF6pV4B0mTB6El14xKWWeOUdkk/ZFda1uEDera9rphyhNG7o66LeLPvPO5B4MycudV3/TgbolsDDdU4kHvoAi6kHldDi28a7NmnAlyMjivOoUdTAi0rDZ2erMJIJ4ssPN2saIvJEHU1daQLjGH4Md9zEE1TfMA0sKsUXVyAMuTECjAAjwAjsGwgwgbdv7COvghFgBGKMABR1OBzJpncvNUvLjMpS42Zk4Ueloa/RhP1ScSZ1S+AhBQ+tXyiAMkng2anvduw0kjJJRRKaVOHJHqok3rrVd1B/SSoxZ7yHzcg79Jk3DTXw/Bf0v/m2p2n1WneuptgO7Mkff3OF60duc+NuumeZuakBCDzKTXUkPvywLKocPaS+M5tw6abT6IiSEu0VZgOBd/dv76C1a9anTGtU4Q3eF1WCxDvyUqEmNdxL5dmUf6h1Kt/62ib69XPvaeQXcE8InFqynEmsCz67bpDA06vvMLsXBR6eP9zyZgo8/YqevLqWaj8wr8248MIyWnhRmeb86dakAeUJCsV1mao35+XeCatem11s8jMYffDFCZTP0gcmCvG5wTVMRSO+aMvGtxHcGAFGgBFgBPYJBJjA2ye2kRfBCDACcUYAdfBwWJOmFDiUoP6PCkk0nOuOm5EFDsb5whG1RUeWOuGnN6ZwaxTilsDDEQs15zJN4N2/9TLTZZsp7xCPJPC6PjyPuj5cnHLt5Zf3O6bTdnRspnUb/0q9vekpkdk5wh1R1LwzKu8wSXFhDU0Zd6pIJ8zXjF78qH/cps9ifj8EHq63IvF6szspQUOpsyrknTZe00y6MP8Ip1s28Pfv/cOfNOUd0rv1baBXEF0WZrRHzj2Kjpp7tHYvg8hTSZ395t0va8NL3KG46qF+RxLvkEPfpEM/8iaNHMinPFHRTt+cCDx9DTxchzqUmLdv3FTqPfuzSlhCkSeJvJr9CmnsgnSTDT355OS2alZz0BhI8esbB3/VcdhUpTj9dIpSTT6JJUx/ZJ28McL0I4g6mX4wcnNtWIQjMKoSDrTcGAFGgBFgBPYdBJjA23f2klfCCDACMUYAh2MoCpxq/URpiXEzslCtOwQCAbXtoNiDAtLrniQ2t1Dtn1cpbxkIPCglQCBkqgZec+9uenLHkPtlliA8srOyqUtwa3sa0kPVE3h4t+XpP6d1uvZa8zRHfbrxh9vepG273qY+QeL1i9RIEHdmpJ1+8MljT6GSorEUROrzcBB4ci2yJh7wwL3VTrupR3Bhxpp3TjdKnrj4kHb3akCncZ3ev+Xq74mc2aQrsLHZkXjfOvsqcT+LuobVeVR4WIGjIYEk8CAWwn0na3yCxOugXst02qqaYvrceb8wXYadC22WqJWX3z8u5Tqkr4Io7jnrMzQwfpoTNK7fVyHyrJTCedsaqfj1TZS3PVW9iSB6xpdRx2FTqGfCUD0/18FZXBDFmnwIFXEhFRWf1Wh7RCmK4awLq4p3mAYWwAc18LgxAowAI8AI7DsIMIG37+wlr4QRYARijIBVHbwoLyluRhZOB6ehA2G2RtwF4fDrxoUWey2daIN0oU30d1CLIO66BjppU9sLtLn9hZTbCiReR1sutbc510kyI/CMKjw9cadPN9604ynhnKyWxirJOwSKdLOOhH0Re6fnxDOBJ0iCP/7aO2lmhsWGkfdQtyDEGvtEKq2LBgfaOa3m6kkXw7jqCtOKV18UyjgLAk8OZlYT76IzLqOph02nnNHZg4YE7YkeQeSZr1tP4IHpNCr+QOThpW9Q3OF11ad3UWPBg6Zrs1LhGdV3uDhPEHhdY6Yoq+9cganrbEfkmaVXQnEH8s6pgchrXrzQqZur96P+RY1USSPtuqdPfG53RKtWrBHsMPEsL83X0ri5MQKMACPACOw7CDCBt+/sJa+EEWAEYowAUgTbRdps3FrcjCzM4tWTLEERd3If3arwQOBVXjQnMAdakHf1PVtTbquX64bMKOQb7W05GoknNGK2t6AZgQdXWrysiDv9gCoknp68w7VI1cbB3E9KuZcaeJgbZgpeCDw7LEDgoTUIAq9HFvFyePBB3uULgmJai7VBRCY+O1QJPLO5jzr+GDrqhGMH37LDRNa/Q2etXJcJgWe3vtsuW0Tbi6+nRO7KtG6F4vYf99fUX5uRd+gBAq/j9E9nRH1nFr+xrhvucaNhBJR3ox5+V3l7gybxQAAhFdNN6QHlYAPoKBWLHV29mhrPbc3BAEJwNUSYBhZc/87V1nBnRoARYARigQATeLHYJg6SEWAE9nUEEt391NCaXiMs6uuOm5GFPl69WQiUYiDvMpGCVXvPSkpsaVXayvKZ5VS2ZNZgcXaliyw6mZF36GpH4OF9KxLPrAYe+h8nOJozThEEk6gxqGLw0d65U1Pi1TcOkRKod1dSVENV5enqIT/mIxKaVWtq6ZYfPe0azosWH0ynnLxA+TqQVCAQQHpYYSEJPAyqosST5B36x5nAkyDqiTzU/pTO2oMKPDB4Doo//YZMHzOKPn/SgdqvVEg8K/IO14+45L9oz8gJyvsdVEe9SQ7G3NPSPfhZVPmzF11P03zegYGl00a91qmZA60eT5RAwJdjily5a6zdXgCCtlPEkzBxnnc7ll1/fBlULQwsuDECjAAjwAjsWwgwgbdv7SevhhFgBEJC4NGn/0GPiNehC+fSpYtPpJGlxb5m7hUKo11N8SPwon64M24K4oWqChwBiAQVwsnXxu69WMXQAs6z00+fFlgK2K7uLVrarLGZpdFKBR76WhF4RhdazWxAYHjKidl05FH+FHJ2GEOxgv3CXvlpXlR4T/71c1TfrPZcgmgEiSlVnFax7ih+TijF6lLebhNODyDzpCKvRBSPg+IOL9kKe8fQuI4T/UDg+togFXjGyY1ppF++I0lUaY6ZLgi8k/afTCcdMGVw+M6cFdQg0mmNarzyrvOp6h/llPPW82k49B1yPPV/5ATheFwoPofNnWVdg+fhAmBSIQgeNBg0DLz8IRW9NmRYoTok6uEFZW5hRpCpxhFGPztDCPklDZR50vBiuIm8sFTr+CKhTKTQcmMEGAFGgBHYtxBgAm/f2k9eDSPACISAwJeu+ZlG2J1zypH0/D/eoTffXU0P/eEG3zPXCwIPqYJxaqjng8NRk0gBjnqTtZIQp175E1bcSKdNbGmhpqXbU6YEcVc4aaSWNhvkYXlr1xrLpRlVeE4Enl59J4k7UEtQLH73u5m9Z1VcOVX30E0tvCu/cQotEkSIE6Ej7yukP0Lt46TibCx4X9Rre980ZGmiYPZmedf+hFeYzQ+Bd9XN12uh1i9/JCXk4uo5VDJmzuDvJJH32Nsb6W+vf6gZWLgh8JA+G0QDoY/nD+q34WwgeEAagxAuvPW5pJOviYGIU4y7vzyUvuzU1+79qlEFQh0+pAj0M1YmrlVxyMUthS8CSsVrOM2iwjQEgXkF1syNEWAEGAFGYN9CgAm8fWs/eTWMACOQAQS21+6mEYKwA2nX0tZBJ134TXrt8V8NzoR//9cnzqGzBaHnpzW1dWsEQJxa1I0scGAqzE8W0Ycyqku8igtzhv2QbrXHQaSLYmy4zbb07bG8lZq6N9J7jbenvF9fWzD4b70Kr69hLrW/cY3moAp1lCTuoGSZPHmArrjCA7vg4ibXVDRiz0C6BtFUlHgg7+bOriGoe6wIPEncWdVN/Mc/2wbDPfLw0pTQzVR46GBF4AWpvuvZVUedH7xPveKnsRXttz/hpW+3ChdakEhulEuTpk6ho6bXUceu1ZZbNum4K9OIvF89+y59WNukGViokFZG9Z2f+wP3WYkgzRoFWTVcDZ9XUOBJ1SfSZ2HOAFGiWyIvKAIvLMWYF8yNeDmNIYk8qNNAtIdteBGmgUWlIF5xT3NjBBgBRoAR2LcQYAJv39pPXg0jwAgEiMALry6jW37+Z428A4kHlR3+v5HAgwLv6lv+QM/ed5uv2UHegcSLW1NRQIS9JllnC+pAKC6koywOcJUjC5TTIsOOO6iC8U4EHtYFEg/ptM09yRQ9MxUeyLvE298dhAHEip7IMTrQZgKvTBArqIn3yGPv0uq1Q664c2bV0BxB2p17ZrKeGpoZgWdH3G3Z2k0g7rZuTScbJ07MIxB5kyaKGlg5dbSz5Lk0uKwIvLHtJ1JR3xjf8IK4w8uu5VaP0Ui8PPET7bWXX6YXn3lBmcDr60kI8q6WxpTZG6JgbCOJBzOL3z3/vkYWgxY23m/6uPW173wDIwbQ1MSC2AmKKPYSE/Z/hEjzlySivv4dSDyQeapEXhAEHvZhuNOK7XDEs1gg9s3LnhkNRPB3wg1J7WV/wyo5gXulpqLIS4h8DSPACDACjEDEEWACL+IbxOExAozA8CAApd3iT19Lf/rJlTS+ppLufug5emHp23SH+Dd+f+UXL6ZDDxxKAwOp97MffJnmzJjkOWAUc9+tWG/L8yQZuDBKRhb6AvlWKY1RVpTIQ6XfND4VAk/eCiDx0Da3v0BNDXmUqJ1PfQ3zqL9xHg00zdPeg1rFeLgNg7zD3CA1kA7mFxMvt76enMbeIMXSSnEH8u4vf210nAZE3sUfr9D6GZV4RgIPyjukzYZF3umDH3HciRqJhzX/5kpTz9YAACAASURBVKe/pw0fqtViO3JaLVWVqtUNxHxGEg9F/m995C1at7MxWQ8P919fqsozSOWdXHNQ5LnjDWDTwUhImRlYqBJ5QRB4UVdYB6FYlp+5cNrNdJ28sP5WgtQcLb6o4sYIMAKMACOw7yHABN6+t6e8IkaAEQgAAajvHnlqqUbKoen/LQ0sQObJ9mVRF++cU4+i445Id9F0E05dY8Kxhpab8cLoG5aqwG4tetdBJ2MKHKKaRc2+XgMpEAZWTnO4TQmzGs8NgWcc46n7JtLubSMGFVCIyYhVWOQdYhvO2mSyQD5ILDSr+0aVvJNY60k8qPFQEw/GFpLAC5K4w5xIm219IV3xZ3c/Qok3UpB4WHtHoo/++KvbacvGTba38MkfLaPijvecbvO09+de9KfB3+H5hKJq7Y5GWl/XRFni/isRSivUxusQxg7HL5jsenyVC4IyS1GZy6qPMYb1d2yj/I1DqfALKxsGL7Uj8nrGl1HzYn9/izCRH4WbHxxUrw2yZig+Z4C/VG3DudapnqVqnLJfWF8eQcUJcpMbI8AIMAKMwL6HABN4+96e8ooYAUYgAwj86s5HaURJEV12/klaOu0nv3oL/ezGr9Cc6RO12T4h/g1Vnh8FHsZBsXCk8sSpaYc8oU4aDiMLmc6IgxZSkKG6c2pQ+HSKw1lC1MOLYgsqJdnOxMJs3UiXwyF2SulUWr8un9ZvIHr55SzKE3vbI9ShaIsWDWivMNtwpfEBCxTwR8qiEyl8623p9eScMLro4+VaOq1smSQqWwR5Z1bzzilGqPCqpk3QCDwohDdv2EhbNmyipc+/lHLpUccfQ0edcCyt+ssnnIY0fb9yv3OoasE52nuSwDOSJ/JZT3T3i/3oCTzdEYRHr/gcUfkM0S/i3pVJBeu9q5Iuugsqp9CCqqmDLzeAyBie/W0z7d7US1ndvZTTlOokXVPcSQcKIm+s+Ik2ROQl6+ShNZ93IPVMKHcztWnfKJCadouwc6D1uni94QVMpYKqkxemgQXXv/O6+3wdI8AIMALRR4AJvOjvEUfICDACw4yATKdFDTwYWaBBkXezqI93/JEH0Q5B6I0TabYg8Pw2fOsPlU+cWthpVjhgQSWBwy7SGUHcgVxQbXE4lNY3J3wTFLu6t1DXQOrh3wwjSdzh6J83UERVeUlSWjYckoOIR4636u46wkvf5l42hir3L6GqA1LNHmSfoEhNlXtEn4aNmmN1TfZ7gZp3r/6zXWXolD5HHF6i1cSTLZMEXsN997iODxegFt64jx1C7Z29Ss9YEASeHSkzZEKQK577XsLnZVB1y6DmgsmNKoG3vH4jgbxbvnuTJbYg825e9Cll7DtqBwjknX5NOY0dlNWT/sXEKZO2D5J4mEASeQOTK2j32QcEoh5zi4nyQgPoiHthjPhsqhWq9Uw0+XcGhhdofom8MNWM+LyU6eeZwIbHZAQYAUaAERg+BJjAGz7seWZGgBGICQJQ36F94YqztZ8g9KQjLYg8qPD8Ku8kFHGtgzdudBHt2ONMFvnZcj2xYlWHTGX8TJgiqMyr2ieoOkmJ/g7a3LSMOjvS3WiLikdTSWmlprgDWSBdRkHeFWYnSWrZgko5rn+vjZZ+S8j6bBpIvKNvm57WIwwCT39/ScWdlRpMH6AX9Z28/tvfTDWmsHO9Vb1/zPp5JfAw1szP/oeW0uqUTli//BHa/cEjnsOUabQqGGSCyJOpwipfBoC8+84rf1RaqxsS7+83NJvirEriIXW279JDqVSkggahHoNaubUjmuUGwvziKIg6eWGVmkAtv6oyrn+n9HByJ0aAEWAEYogAE3gx3DQOmRFgBMJF4ERhUAH13ZoPt9DdDz6rTS5r42UiktqGTkGoZGLkzI0ZFMljFmFQxJ0cO8yDnxfEg6jrtHPHu/TU41+l0ooJNOujSyzDGFk2kfLykoSdGXmH3+MQr6rAsppIhbyT15qReEGrAPVx4v6CygaqThSx1yuw9gUCT8V51u4+jSKBJ+MNkshz8xl2xkNDzswqz7gKibf0T23UtLXPsjZndnsXZbenupQjnfZUocRDM9a905cX8Koey0SKqgpeKn3CVLTJeEDk6T8r3ChAg/pixgkbKMxh+sONEWAEGAFGYN9EgAm8fXNfeVWMACMQEAKodwfXWSju4Dp7zilHprjPBjRNyjC7W7q01NA4NagLoPhQTT9TWZs8LOULRz0oooCJkwpIZVz0CUMxqBqLsZ9fZ0VJ3slxQeKNnfkxGjE6NTVWvl9VPptGF6Ur7+T7fglFN+SdnBMptXjJpkKkecG7pDBJ3MlUbOP9pULqRF2BB4Jyz1/u9vylgCqBB/yDSKH1orYMgshDvUPUIHX6jEHarKx35+aeu/no/9Dq4lm1v32vadDExG5cEHn6dtH5HRp5Z1XzDkSX1zRQL3vhBhM/fcNStJnFqK+Th5q1KmUcwjKwKC/N18xHuDECjAAjwAjsmwgwgbdv7iuvihFgBAJCAATeo8+8SmeffASNF3XuwmhxrIOHAwNSd1pEupXfJg+cIB6czAO8zoXDemNbdySdaP1i+cffHZMCi1bjTvxPcfkEKhFknr617tlKpQWVdNqZP7GE0i+h+Mo319Pu993XiDvv2f0HY3Ii0vas6aaGNan3XsXsPBo9e8gkQr9AqU5ySsVWUR9GlcDTK1fX/e6PwsVVpEoLaa8bdS9q4E04/BClFFrga0fgtVZ0EV6yjWgoILzQ9CYWKim0VjerkchrE8S/alNVebpV38n5L557LF087zjTcFa9lKDV4iVdiFVjRr85xxTSXPFyajINFP1UFHlhmi44xW72fhTMiIbutxyN+LXCNSh3cRWcqkX6bK74W8yNEWAEGAFGYN9EgAm8fXNfeVWMACMQYwS6RMHyPS2pqVJRX448HPqJO4iUL1WcVIgZ1bGC7uenRt+yt/9EeKFJ4g417vocKv0vPPgThJdZ82v68fBJ73uCSK/Cs9uv13/YmEbeyQlB4s04q2SQyFMl7uT1KnXRvJpYTJyYRxd/vCIFGz/klX4gkK76lGC40PbXbdWI3H4aoIG9At/+bHOCU44FF9qaGROVCTyzOng7ZrTQjpmtlvfA7Ncr6SMn/2Xwnq0aVUi7hHGIn2ZWz9BpPFW1mVcCD/P/P3vfASZZUa79dff05Dyzs7NhNgfYBCw5iURBQFCCBEXMXnzkeo0oiIqiiBmVq/dHr1cFiYphySCIuMQFNu+yObAzs5Nz90xP//We3uqpPnNCnU4zPfN9z3N22emqU1VvVfdw3n6/7/37Jd+ynEY2CDw5sC6Rl8rnkBvW6Xh9vKX3qrjCKRkmK/JjN1vpvjAyqa8uSge8fA9GgBFgBBiBcYoAE3jjdGN4WowAIzC5Eci0IUQm0E0mLVXWH8MDjpsaKp1zTlVVls65mO+VSo0+qO+8EHdy7PppR9qq8ApFCnOh2B8YGXgNK8dZ3XuoBJ4VkQbV3cvf79C63alfraZ5K0s8nzEdAm/P3jD98b52rXmoja58fxXNakgk0FIl8CRBiVR21PMDgVAU2ku+tzdR47+tDUSGfflkReTl1U2lckHgeSVKdj99G/U1bzaWuuW4g9Rd4/xlRH5ZPU3NW0HvjH7RMFWpFjUXD3Ympol6BvdQBy9Eng72XswrrObsROBteW5AOMn6XFN4zffVVeCZ+7kReakqgZPdM51+mXag1ZmDXRsrw4uyoqCxr1DXZzKQKl0pUmg5GAFGgBFgBCYuAkzgTdy95ZUxAoxADiOQi3XwvKSlygfrYlFwu0881CBV1q32VDq3MxVSKp3zsLuXrhpI7Y8H7l/94h0GaeOmuLMa9yOfeNZyOqmoK1Mh8DAZmUYLwnVIPAA/vmtrfI7rf99Nc7dOd9wOPOj7kU0mMDn+i1VUsdBbcXfU2QoLReyAS03Ke+5ro7179QlOK/UdFqJDIlktGHsEstFMgpf1radgpMvoMtDSY0viRUmkAAZGlDuSvEt2TiDxXp/z/Ah5Z0iR1EtsDPkoWDaFAvllxvyWRC+kFb6LjHWoSt4HXoyp82QsmbmMls5c7ult6EZW6aaLZpLA2yoIPF8WCTwJoMQG5KFq4jJRv+TwdHBSaIwzFSsHkSeIWTLqK+o4HKcwpGFeAcU0ByPACDACjMDERYAJvIm7t7wyRoARyGEEOnsHM/5tfbrhqRQPD6EhZyMLVREDlRCKf2eTuJNrhsoNDzuppPymGz/1frr1uNBHTQv90Y9PTnpadgQe9sxMqugOkgqBpyrw7t7+Ku3ubovXbxvoGKZQZywPFCSeFZGHMlAg8IS3SjyV7by76nSnbrSTxKGOOYsuiWdH3mE8rwSefD8FxHmGQlJ9L0F5VxTem7BeOxIPxNEw+SniLyKVvEtmTujTTFvo6dBNNNQvFJJGvm6iKY8vkEeBgkLy5wmywRcUf1cY8zzL/0VaULzMeF+CuHvgxXtt9+vrl347bUSe2xkP/3uVMY/w6lV0eZGo5xgsIF9+IflKY/PWCTcn2r98U2CFffBSpFAMfMq1pTRlTuqkjSTyZO3RAqG8HRCfz27ktc7a090mWymp6Zi3VAviXjB60qk/mOy4taK2K/aRgxFgBBgBRmDiIsAE3sTdW14ZI8AI5DACuVgHD9/84+HPysjC/HCoQ4hkcvvGcwoW1u1m2oA2VvXczAYWXjC0I/CAVSp1yVKpgVd8cZB+v+1VQ8Gikhudu0enoh21ejFVtZaJMziauJM4HPfFSltjCyusvBB46O9WD8+JvEN/XQLPLTU0b6iTyvs32G5/x5YmQ5E30DpiLlJYW0o1x59Mw/ULxJcHkXhf3Tmpgz3r+z4d9G2lYTGPyEBi/TuQdwZxp8YhEu+owEV0TMF76bO//RJt3Lfe9fgmQ+LhpmZFHn5WIlSe7UIlpUZk71bqvz/R4OUb+f20MTBCSPpKKrSIPDcX2tW/76XmHYOeTEZqBXF3qiDw0hkSm4JggGAC0t0/GCfA0zlOKvcaSwdar/OWJRGgqkddSukI3CuwTSc5yvXvvO4Mt2cEGAFGIDcRYAIvN/eNZ80IMAITHAGoMBrbUyvknm2IrFItVWMKqO3GmrhTMUHKLx6qXPwdsg2jMZ6Tw6KTEUOyBJ5TDTzMxy2lt3NvO+Ha8++dCXjNOmkuvfnTNhps916XadbXq+ix4k3G/VQCT1XfmTfnnEePSVDcmV+HocVCcelGsmmEIPJQGw9ptSefFBsP9e7MNe/M89Ahy0oKAwYR4PR+slLf6aw5IpRw0Zojxf39hqIPKX86czLf+wH/xw3yjqL6acVQ4gXE+Nsfnkav7XhTZ7pGm2RJPPQd+WLBb6ijVALPirxDnw3+CH2zwPTZLBR5/uqptnN2U9+hY19jlJ74Vaenz6N0qe+sJo73/EA4EjdDQf228fJZOR4caHUPqNUXW1Z18lLFForJmvKYqzMHI8AIMAKMwMRFgAm8ibu3vDJGgBHIcQQOdoSMh8pcCjz0oQB9vniYAPmBmlwgGjJd+ycZjKByy2Q6UzJzkn2siCMdB1XVhdbL+Odd8BOaNv1I2y5OKb17hDmCmbhTbzTUF6HO9UUU6dBPN6xdUUK7P9lGu3ti5hBGLbtD9cGcCLxKocBbKZR4duGVwEvVgdfLHqCtE1Eq9x8OlyBTnFLPq1uejA1doE9Wyrm2lZ1EMsUcPwsKSaPXLxPup2spGhEEnoiW/mpq7a9JgGJx9VuW0Kx5uYP2vFLhOa3+/s/+xSvUCe1LxWdVqVAQyxTH/p2bRynv1A5mFZ5xRh2UeG7qO/TH59H2Df30j1/bO/aqc8gkeaeqbkfUniCNh4yzlyrZlNJmic5ejVVSHS+V/ig/EBK/B62+vAKRB0VevlA7pootPqtQFoKDEWAEGAFGYGIjwATexN5fXh0jwAjkMAK5VgcPD3pQtSGg3Mi2MYXXrR7PaVjSARIPy3jA8+LS61WF56a+k+QCzuNQBEYEI7HuvteE8s7dCbZnX4jCbUEaarRXKcm7grybLdR3SJ2VoUvgof0Zfz/G9ih4JfCAe5441zjL2QgrtZt0Zh0UajjH99T2l8m34xVjmtVLZiRMNwoiT5PMA4EnAwQD6mr1i/dzl9h/3XqV90evpoO9hbS1beEo8k7eu6aolU6a8VLCPO+6YxvVBuZrjyM7X3bCFXTZCVcmvUWSqIVRCsjzt39zOw3s3OJ4PysSzz911qg+OuQdOklSqlGk0W5+doBadlmfOaTNwnk2HXXv7BZo5YSN9yBwgilDqmRT0hslOo738gfmteF3IgwsnN47EluQyPjdmcyXXtVlBYZyloMRYAQYAUZgYiPABN7E3l9eHSPACOQwAlDatHWHxv0K1HpcmCxIhvGUKmsHoFPNvrEGHS65laX5oh5WdJSzqNvcDrz9Bj3698+6NYu/7qa+Q0OkrKFmk6qkRMrsuvvWaI8z0CqMWbaXOCrxQN694wfz6Z8HttM/G7cn3BtGDRFBIMK8ord3mIZEGbVQEYrjEQUGo5Qn+I6CfntTC7TLJQJPfV+BADCTpwngvPpn8rW/Hf+RmcDDC9GAUOeUVLnul0rgoTGIJdRBA7kAIk9HgXXLwS8Z5J1bmEm8u+54SxB4CzwTeHCm/calt7oNZ/u6udZh74+uM2opGo7ODiJopNM+kBeO18TzVdUZ5haIqw4/na5acob2nMzqy4OCwDOTeCDvMkncyck6uXSPNZFnRS5qg5zlhl7JxhFshSu0IJO9KMRxfvxGrQEORoARYAQYgYmMABN4E3l3eW2MACOQ0wgMiSfHZpFGO15jJP3Hb5B2SJdFbS47I4vxtg6rmn1jPUdgJxV3mAvcOHVVT+rcdUg8KO+OOvpax9RZeU+oFcODiY6U//rB057hQjqtv2s+tawdMU+QN1FdZ60IPDy4DwgypUucs97+YaooepIK8rZTYXBHfB6d/WdT9Y5T6Ygnrd14vZpYOBEZnhev0UGqsHAGws+sNVI6VTK84MwjRt/FRN6hgRWBh5/rkHhWBF5zx4ChfNJRYL3V107f3vMwRXx6qkVJ4h3Y10dPP9hKJXkzPBk5SEBSSaNV0xzhOAu3WYRfkHhI3QaRPuxSzQBEXmD+cjrq+PfQ8ilzNXZ7pEmqRjGeBtNorJM6rnseNIbz1CSXHGhT+R0j++L8QZEH1aNd6jJUwnWCwONgBBgBRoARmPgIMIE38feYV8gIMAI5jECTMLJIhsDJ5JLxAAU1Dh7gzGq7VB5YMjlnq3uPp4dmlbgDEQpckXrltfaYeZ1WNfFA3KHeHcg73TDX5POqvlPHWf7+lVTR4KwEUwk87BPwCQkCpUm4o+YHttOU4l8KNspu9kEq27eSFj74nYQG1YuDdPwX3RVoaieDpBamETB0yEaAwGtb9Rp1PPK6QRpZRcGZKyhO5Clps2rboillVDSl3LK/UzrtYKCcuouXJfQzp/VK4qZIkPU9IsXbrLa9fuszFBpqot5AjzZki0RNvPK+N+nZBzoEIVs/JgRenzhbUJiqBJ5cgC6Rl3/i+ZR/0vna65YNZZo06oeOhwBhbyaP7ealEnkg+TNdOmE8lz4wY5QOlbeO4QXXvxsP7xqeAyPACDAC2UGACbzs4MyjMAKMACOQFAIdPWHj2/fxEKqjrF1qj9eUobFeF9KOmoS6aKyKskviDspFc40zJ+OIbONmrgXnZlzhND840846aZ7jEkDgPd+03SDuYmmMUToQjlLQv42mlAjyDmGriAqSj/KpcucymvuXERLPq/oOQ2SLwJPvrdd+8TBt3pbo5It5HB8qS8ArMHcqlXz8HPI9+QtbHO1UeOgQLa+z7NdVtFSkJieajdi50GJvykXRfGTtyc+DR1t3Eq6IcKDt8Xdoq/AwmQ/Nfol+9fNNlBes9UzgpZpCCwMJWePRisCTYLkReckSeDhnJaL2nuqCm+33uDoeUua7+0bXvHSak7mOW6aIvFxyoHUysPC6v3i/4XMCNQhjdfKG4mn1VaLcAl7jYAQYAUaAEZj4CDCBN/H3mFfICDACOYwAyDuQeGMVVsowN0UglGPtYs6ONbvGakGmccfKiVatbwYFE/bZjKtKKshpN7dvpIMdG41/TqlcItKmlmQFSXMqaSYJPJAZB4e66M61q42ad4gWUQ9yUNBy08u+MLJevGQhUvPRiPPq1I1LafoT36VkyDsMpBIrB97uoDVr9lDjgZi7qoz6aRW0cuUsoWqs9LwXOAd4yMc67/zN72nn7r2CsLRW3h0/UJpA5AVK+6n0WHtloFcVnpX6DguyI/DkYqVCCP++d89W+lvzDhqO9FF0uJ96hApPJ5U2EM2jz5X66Nb7f0u+QIVnQj1VEwvVaCCyd6ujAy3WaUfkJUvgjbe00FRcXlUiT7r6ptOFPJW5eX6DptghE3O1whefIXko2sjBCDACjAAjMOERYAJvwm8xL5ARYARyGQE8+LSMQVqVJJiKRapsn0iT86KmyCWFhFVtt0yeFx3iTo6vGkds2Pkg4bKKpXMvJVyZDHNqdCYIPBUbOJ7+v40v0e6edmNZb4v82fKCJ6ms4InRy0wg8vyC5iuKtykRKr2zoz+lJUccnxQ8kmD73T2vjiLuzDcEkXf+BSu0xlHXunb7Hnpq9Yu0a8MWkZYOxaFtbjDNGMqnS3prYmOEeqnkmEHKq7V/cC+bXUvBkpgztBrmNFo78s5LmjnOyJ3CQGVDZytFxBoig63GkAP+AQqJyy4KhgupUFxnFYcocqCFHnjlH54JvFTq32FeZrVrzw+v09pHM5FXdPlnKdCwSKuv2kin5pznm6bQwY201b21jmpb915ol0sK70zPFfeHchv1MguCrL7zco64LSPACDACuYwAE3i5vHs8d0aAEZgUCGSzDp4XgskO/HTU/cnWxmbrwTkZXFF3rrFtI61afbMrHFDjnb7SvZ3rjWwaSCILphqIVGrgmVNoVWzUmoq7u9vo99teFbXvotQqXGZnlH/RefqC9/L7Ucg99jCLB9xy4eZ7cs1H6YjKjya1dMztycfX085dbVr93Ug8zAn1I0FsyLXe/eQq2rl+C4kCbK4EHiYRV+IJAq9gTogKDxN2vA6RV5xP5XOmJLRQCbz+/AbqL2iwvIN5391AQP07pNSi8H5kWKRgDo6oFUHkmQPEnYx3VVbRhw57N33+/75Ea/escxsq/vrXL/02LZ25XLu9VUOzA6xTGq1Vf4ifiucdRoWX/1dSDtxeas6ltFCNztjzmvICYaBkT7pq3CahSbqIvFxyoM1WPViQeNUi5ZmDEWAEGAFGYHIgwATe5NhnXiUjwAjkMAJt3WGj5k0mAymSqMGEhzezMYXXcbP14OJ1XlbtM117KhniTs6zq3czPfHKN4X7pb0iS11Tpkk8syonGRdazPeUL5xpTNuOuFPXBBLvzs2vULdIMXUj8AKBIkGAjVaiTC/w0wdnv5DUcXl9zW5684298VRenZscJdJpVx49e1RTSWIgZRrvMcTza9fQv9a+TpH9MbWamwJP3vT6zmmGAo9CfVRx0WiFndU8QeRJNV60do7IwZ5rS9zJ/l4JPFkDD/1B5PlIuFOHE1OOLTH05dFn57yTTqifahiGfO3+r9LGfetd4U4HeYdBrBRn/ff9mCL73nKdg3GWZy6k0is/RyDd88VnqdfPUKRADohzMSAMbMY6QJI1lj5Hm6JPjZpKxdAcmhU6nSoj3lx25Y3UdGu7OqpO6x9vqcZOc83WF1kVog4lxuJgBBgBRoARmBwIMIE3OfaZV8kIMAI5jECvSGFFgfVMhKqMQB02s6NkMmNmOnUomTnZ9cmUoiMV4k7O9dnXb6HWzk2eXIgzmU5rViklk0YL9d3cU+YbD5yF+XpExxNNB+nJxsepMvg9a+5HkHZ+f74leVcmyIiyPF/SBN6v/9/zoraUoKEO1eLTPXsf/fip8aYgx2H2IN2F1VqH3/3Dr412ksDTvb+hwusVxF1fpzaBp947evTFRNUzXIdLhcCTN/dFBwUJ3S8ui88wQdz5/cXk8wfpjkVnCOVXvkHgAaMHXvyjuO61nCNMK1D3LlXlHW7upDjTJfHU1FlJUkGFiM9tnfpvVvUuXTcnAw06AjtpX9Gz1Jm3U+yX/QAg8lb0fSTpGSRL5OWSA222SNlaUXMWeHIwAowAI8AITA4EmMCbHPvMq2QEGIEcRiA0GCGZupiOZeCBFSoRqEVAKoC403nI9DJ2LhlZTK8pordb+70sz7atjqpMZyCYVYDACwjySBo56PRDm8vPsCY9dPvbtcOetnSFEuqTrbvvNZFO26F164qGSjrp2uONsyfPnZshirzxzTt6qXjwdDF2ohIVijsr1Z3slwqBt+a13fS6MK1IhsCDCu/YY+cY7zGfeL+hpp/VWpMl8OK18LoOahN4w3taaXivUPrlBSm66GTKu/y9rvvmlcDDDZFGaxW4F8wthkV9PJT58weK483Oq5lLuKyK/oPIUwPkXTqIO3lPtzU6pdM6mVZ4IakyYXbgurkWDZ4vv9kw6IA5jJvwN1USD8N7wQjtc6m+ajb2FCpXjOPHf3AwAowAI8AITAoEmMCbFNvMi2QEGIFcR6CxTShY9DIpbZeqkktWaqB0YpRLD1rpIBvTRdzJPZCmFcmQR+886uaMuNPaqYR0SLy6eTV0ykeOt1Sh6Zw7EHglQ2fpNE1oUxP00bFVydXAS4XAO+WkeXTqyfNcUymTJfCwSCONdihMFWc71yobemErDf176wguxRWCxIvVzPItPcwg8vxLD6cdj66mneIyx/L3nkp1px+rjf1bfe30s32vW7YHzSDJBkloLiiqpOsbVhrt02WeoD1Z0RAkUnFhwFD+uQXIPESgYaG2WYUOSWVWt7rNIxOvry3+jVDe7RKKxBh55+ClEh9+ee+Hk06nVdcgMXIr4ZANUiwd2GZLhV4gvgxBvUIORoARYAQYgcmDayxt3QAAIABJREFUABN4k2eveaWMACOQwwhA+QTSLZnIJnEn55et+j/J4GHukwrZmApx96gvUSm3ILqMFtIyY3qpEHiZSqNVXXHNGMLUAim1ZjUeiLulZy6ikukVnpyMzff/R3uYVrdcT4Hom9pbni/YolqRpnv21J9RfWGMIPISyRB4IKfw8H70ytm0/EhrYwh1DnEC76CoEydMLLwECLyCM1cYRha+Ha9Ydg3fuzqmupOhkHfyRx2RPFpXu4B8RSOGEurNcMZBtq38zGVUtdB9TejrROLhdWAUEH8sKq2i/5h2ZJwsGgsCDynOhcJURIfA87I/5rZ2RJ4Xp99Uxnfqi9TZdSX/azTJE2UkhzRLrqZDhafOy4nIyxYplg6McaaKRJmAdlG/NpOB37OogcfBCDACjAAjMHkQYAJv8uw1r5QRYARyGAEURe/uc1eImB+GYEwRFGmY6A8CUDdlMVWocsnIAmmOCGksoLP2VIk7M3mnjvmZ4W9TeOdmg8RLJoU2UwSeF6dMWVsxnUrP3739Eh3o+azO9hhtoL6bVbSSzqn/uXYftaEXAs8gpJAiitRD8YedkYV5IpLAi4ZEnbiWLu15yhTa8u98MNanbT/5Xns4ob8uebc2XGr0802vH0XiScWc/NzwSuLB1GJbv3WKNVJmLxPmD8UFeSKNf4hQ63OqSAdsbE+f+6kOoDireYfMe3Tap9rGTOQBW7iIHuwMpXrrpPvvLvgH7REXwqvq99SuW5Ie166jxCgo5IA94lzgbOD9VVU6tjjpLrRU/E6BghHzzmRUlxUYtUQ5GAFGgBFgBCYPAkzgTZ695pUyAoxADiPgpQ4eHkhLxTfzIBW8uiGmC6KJqpZIhbgDtnf4bqJtPnd3zbodg7RoR9khUihWM0w3MkXggegEQeX0UIoHbxRvTydxJ9eNc/37vb+i7R0x4wenAHlXIB74pfquU7jZdna3G10qyqrEVe12Czrwdgc9smqdI6EhiTvsj0qOv/v85TRteqXrGHc/uYr2NDUa7SIeVHgwsXjHKScLBd4RiWNsf5mofT9F/v5sLG02INQ5ouYdFZRYzuXNUCl1Dh9ysCwUtbRm1Ce0MxN4ePHMOz7nui61AdR4ZhIP5J0MYAglUVE+6hn6qLkjuwQexkYJMS8EvicAbBqDeAEp3hjdQPsj6+mN6P0JLWsih9Oi8CVUO7wkHcM53iMVAg+utLPFlYmQn7eF4mwMRmJfQGVaKZmOdaSi6vYyPlKvuf6dF8S4LSPACDACuY8AE3i5v4e8AkaAEZgkCLgZLaiOsr1CcTeQZMptuuBEbbk2kUKULdVfsvOG4gRpSE5GIakSd5ibLnmHtgXtIVr0WoSq/DVGMSov9Q8zZWLhpFSSRgCYe3efUHsOjaR7+9ZuoeiKxcluT7yfrFX2993/TS+0WpN40rQCnUDehdsqaO+BHZZjN0ybR7Omz3ecl5MLLdYM8snKoVZ1oXUaYHfTAbrnyUfiTaDCgxrPLf6r+kgq+fg5ts1Cl17jdgvaPVhIu4cS02bNKjwrAm/ueSfSPHGlO/A+RAotvqwwn6F0j6XeD8T0kHiDpcOB2+s8W/wb6aWSb8eIejEHwVGNChB5Jw18zeutPbVPJPD0U2gxSCYJPLkI4AOVIv6WijwvX2p4AiMNjUGsNQkiOpNzhGq0TozDwQgwAowAIzC5EGACb3LtN6+WEWAEchgBqzp4eKApFsokkCuZUD2lAle2VAipzFH2tSsinw7iDmO8RevpZ/6bPE111tP7qd43gwqpyHi414kplUvo9JU36zT13Maq2L8dPiDtAvf8nfzrFPOEQyNGrrqAIldfmNT4SAlHXanGgTX0SvuvaW9fzCwBWWRQ3CFWVHyE5gTeS+u3vuY6RrmowbZ88TG27aDCe/yx9QlOwJK4AzFt9YCumz4rB1VVeEiZHmzvpWi3vSvyB6+8gmbPmuW4Nh0CL0F9d+huvqpK8lWPKAetCDw09arCc90I0UCSNJ3Csbes+FBau4kM1rmP1zZQjIbElx3ZJvC2BB+irfkPGeo/OmQi6heMMFx6h01EXqZJPEnggZDGfKyIRDtcs0HgYWz8PhkIR4xzoqZdZ5Ik83qW0B7zg7FEppWk+L1fKVKKORgBRoARYAQmFwJM4E2u/ebVMgKMQA4jgAdbmb4oiZNikf7VJ+rsIP1rvCndxio1LZkthloQBKl8GEwXcSfn4kV9J/tAhbd4zTBV+aq19zZTDrSYk1rX0AmfvBt+aEncqfsyvHwRDd32+YSt2jGwjnaG1sV/dmbFVQmvS5Wfk1ISHZAuq0PeyZu7kXhPPb6edu9pN4gNn/gjKog7Oz61floFnX/BCs9HUJJ4as1DKPFUNZ6vIEhXX3ARzZ4q3GcdYuj+P1NEXG7xz37rFF///Dnxrtkm8ECmyf3NZDq2ig3G7BuIJKhG3bBL9XUo71YXfdu4jR8lzFA78RBH7xsQquXWXor0JRog1OYfQcfv/xwVHDEz1eFH9ZcEniQTzQSi04CZqIFnNZ7qQCvTrscjkZctUxSoxvE7loMRYAQYAUZgciHABN7k2m9eLSPACOQwAgPhYeoSD3Wob5cvXO6gGOkT13gj7iTEuWRkUVOeb6TtAUuk1AHfdNYPvN5/cVInr2JHFx25e2qCAszuRpkk7zBmTFmSbyg97fDRIe/k/CWJ93TnPfR01z2Wy5pbsJzOLL+K5hUujyu03Ir9r9vyKnX1xOrd6YZMp+3Z20m9+xLNJFCQvrEwSs+/sMPxveZVeWee2/Nr19DqDW9Y7vWsqfV0yoqVruQd7pmrBJ5dKntJYUAQFYJkO2R0kW7FFc406qpl83P034XfotbAJuMICJ8Gg7wz6ii29tBwW2/8aIgkbcHtjahvl3znJJpScATV/PBS3aOt3e758ptHkYlundPtQms3nl1N1fFI5KGuIc5Spg0s6ioLRH1ONrBwO6P8OiPACDACEw0BJvAm2o7yehgBRmBCIzAkcpvSSSxlEqxcMrJAehbcehGZwDdZAg/z+enQD+mBZ2+03SqkzcK4oq4qc8XuZao2FB/d/VCCRkbNJ3D334y0WS/x+vwDdPcXLQp/mW7ysSnfNUg8qHCcUtO8qu/UYep3TB9F3oFUwqlAjbTCk2ZQdyBKr6/ZkzA7EHfThPJOx7TCDRuQSW9u30M7D7wdbzpLKO7cVHfqfYc3bKLBr3/XbSiyUuCZU2iR0olAWqcamUihtUrRlmOqRE3vgPX5c12wTYOxqNX5t5IRdWkgIIg78XYa2tdGUfHesgpJ5M3882JqEFf+ETPSTuJBhbe/6B+e0meX936YKiMjhiS6e9DbJMxK1gaotzmRgCqpG6aSqVGqW5H4+eJWp1SeD3y5hTTbsVSkyy+D1DqgurjotoNSsr66SLc5t2MEGAFGgBGYQAgwgTeBNpOXwggwAhMfgYMdIcONL1cCKgGkxGVT3eIFG6kSLAgGqF88+KG+WiYiFQLv/orHqLF9gDbsfNCYmvwbpB3Iu0wSdxgPikTUWITiEwQe5mIV+ed/0hN0Bwf3UzjaTz/+jd6DKEi8k6Ydazs+Bt/z9nZb0wq7yUXEvofFmir3VFF+f6ymFAhLPCSDuJN1ycBhlcwsp/mXLfW0Ti+NQSTDgCbVh/901MBDkXxDFWYhecs2gScxBFFTVhQkOLhCNZcqTrivGynsZf902qrps2gPcmqwJyQIPHfVaOXmWlp620lGjbzSa46nsmtO0BlSu83Wyv+jpuHtWu2TJe92Ppk3irgzDwgib+7ZQ/Ef4/MnKNRmXX3OBi9mIg+GF1YmM1oLTLJRNgws4MoLUw8ORoARYAQYgcmHABN4k2/PecWMACOQwwh09ISNtNlcifFqZCGJOxA1hlojIlJnReF8t/pqyeKeCoF3d+kjhPqH2X4Qla7GqjmKuVZgnFgRphXBr/xIG56uSBt1iwux+qI8evGioGtfpNPeuOjHjgq8ZAi8UFs/DYu04JLWEipvKztUxH+kzp3hECpILMljTT1hJk09scF1vsk0SFc9Np00WisXWtS/CwgWxHDXtSn0l6wL7XCzMOcQlzl8dSXkFxfOW4FIXQcx5xZOrsdufc2vp5vAa4psH0WCrQiOOAZL8wo5DxB4/Zsbtad98rUXGWcUxjZTn/xP7X46DYHFU+E7qTNvl2PzTJJ3cmCVxPOalqoSefjCK1uuxtkysOD6dzqnmdswAowAIzAxEWACb2LuK6+KEWAEJigCqIPX1h3KmdVBvYUASTYewkzcSefJTKf7JmNiAbzOi15BV5VekxZVli7+VsSd7Iv0MCsy0Wv67P7wtvh0dAk8dPjy/B9S5eBiS/dXvO6VwBsUhPiQIEcRZW2lVNpaOkotaibw0HbFf52oC6enduki8DCoVxVeYEY9+YuF47FCVlpN3qv6LtobJpB3dAhnO0BKDquloFAVefmskO9n3DPZOnbpIvBA3D0Z+qXtfk/1z6OzC/+DzAo8vzArCe2JkdluUb6phpZ+92SjGc7llI+fRMUfOD4tDroq+dQR2GmQeHtEWq0M1LurEOmys0Onu03T8nWkzDavE/nCHkKSeMkqU/G5DrUafg9BBZ5pIs8LCe0BhlFNa4XpEs4+ByPACDACjMDkQ4AJvMm357xiRoARyGEEUAOvWaTR5krgIaNEPDxlKjVVFwc74k7tn8nUp7doPf3Mf5PudOPt7hh+mKA+gYpEko2eb6LZwYm4k7ewe5DOFoH33mkfohPyL7dNyfZK4PU3jSjCSlpKhAqvdBRaIDaigtRSBWmZUuGBwEN6JOoM6po1rPnbYwlznrZoAU1bvIB0auF1RPJobbiUgjOFs21hoWW6rHrzlZ+5jKoW6qsPDfJuZ4fWCYSqrHBhDYUE4eI15NnFFxxesMPeIhXRzRjFbT5PDvy3UN3tcGtGksRTa+BRey8NtvS49kUDWQNPNq649gRquO5Ug8xDeinWn2xACVgmPmsy9Vm9/u7kUj6XXR020pwPdg5ovyesMJBnJJNEnlelYLJ7hd9VfsMymIMRYAQYAUZgsiHABN5k23FeLyPACGQNgf2NLTSjvjbt4zWJel3jtaacebFQQCDtcqxIR/nQhnm5mVNkuvi4VxUe1He4UHcOz2pelEleDh3ITaRkQXnlpmKyIxO9EHhq+izm6UWBt7T8SPpQ9a226cReCLyAICx63h4hTuq2TLWEzYrAy1QtPJn6VyRILBBRToQMiLs1f08k7+QCQOKtvOBc2vhqOxX+9Kc0LdyasLay6mIqry4xiJ/IR6+lDVsOUvu2fY7Hxit5h5tFdoq6bi7KOzmoQUiIM+hbWufl+MbbJuNIKlNxU0mdd1PemRcDEq+kssVwocWco8J1VpfAgwtthaiDJ0PWwVOViMmqzHTrzCWzOTCt2PmUe5q81b2nCkOL5e8IOta+9DKndGBlN56dQtnL/NzaohbgFFFbloMRYAQYAUZgciLABN7k3HdeNSPACGQQgTt/+zC98sZm6urpo25x/e9PbkgrkdcmjBbgtJcrASMLKFx0FUXpWFcyaguQU+HBCA2IemiZCl0Sb0F0GV0f/bYxjUypGEFelAviDu67uoYAdinRPg818FIh8JaUHUEfqf2uo3nBC6896bh9WDeIk0HhpDsg6t8hgn1BqtpbbdnPisBDw0yl0eLecm/AaVmlLK/64c/pwNaRNGTzxEP9YWrZ30mD+cso6q+g+lALTQuNkHhSvXPS/9xA9Yvrje47Hl1N7W/tpQ4TkZfumnd2myMJvOiUWE28ZEMl8kCCOilXnZxvdcf/Q98XdZvG200vmEm95S/FatlpEnhq+qy8kdnIIhVyCu9tqEx7hfFDuiOZ9Fk5h+lHDdPCE/xpr0+aClZ2+GRSxS3HxBc6+MKFgxFgBBgBRmByIsAE3uTcd141I8AIZAiBV97cQrf97G6DtCsvLSaQeQ8/9i964t4fpG1EmFjAzCJXItn6RcmsLxniTn0wyqTSTY7zqO9ewmUXnxn+Ni2kZfGXkdqGB7ZUVELqWCCH8LCeLwwDXm7eT6+1HqCHDqxLmM7hpXV0ybTltKQsUZXmpNLJu+GH5F+31XXbQsP91DK0P95O14UWHS6uv4beUXyFI8na2d1G67e+NmoeIIiwv1CvGu6qh9xn0VA60IYa++L9AiKdNU9c6OMTrJBZ9ZpJAk9OAiQD0mrV1FBd8k7eQ5J48t84T8b6D+UEf/S7F9G8FTNc981rAy/qO9xbEniYVmBZcio8q3MOpaWdKq1QvAcKhXmGjnGG1frXDj5BawedCWM73I6tOIG2FTxEUUG2hvc6O9BakXe4r50TbTLklHHOxO+WTHyBkQqBN/sYohlHRV0daL2eT/U9BgMjRLLqRfTF+6qqNPV0bLd1YAx8DnMwAowAI8AITE4EmMCbnPvOq2YEGIEMIfDw4y8Y6rtbv/zR+AjnXPEF+vS1F9NF556SllHDQ8PUIhRtuRLZMLJIhbhTH+SyWa/PTOJBdacSd3JesTTkQkf3VZ2zoBJ3SMf96rrHaVNPs2NXkHi4VIyKC60JDy8qPGli4ZY+e0zbEE3vH6YZ/YLVEVEZrKGqedfQYMkC8lUssp37ui2vCgVsjBQBfjEjiphzpxqogVe2sVTUaRNOxBbupyDxiqYVG+YKKoGXqRRauwWVCsK1uCCP/nHvX2n1nx9x3LP92w6Oej1ceLKBgeEuKxyXzXHrqut0jpCnNpH1zmfLfDPVLCQdBJ56Zu0IGnxu5B1yova0uEONUyHwPlD8feor2kL/CHyTQlubbIe3I+/QYdpTzi60Xog8lDqAujsT5RlSIfDmH++j2mWRjCgDVdB16qQ6nZFMpiCr40LRnifSaDkYAUaAEWAEJicCTOBNzn3nVTMCjECGEAB5d9vP76GH7rolPsJfhALvdw8+kfCzVIdvFKl/Ji4i1VtmrH+mUkAx4XQQd3Lh6SponwkgkZrVKGofJhNm4g4phd/a+pQreSfHghrva4vOMv7ppgbUrYWHNNqNCzrowS9b13ICaXfR/pg7rBoNhQuNOmnG2S9fSP7ln7OFJBTqpDc2vZKgNlMbl5dWUd/fOmhwXSyN1imgxCteUBFvkikTC6c5YB/v+uRnBSMZIyKtUtK7RDpmd9uIilDeb7hwOUV85bZp7GdcdSydefWxbjB4et2NwBt8bG/8fv4F5ZS/qDLugJssgbfnYDftUQwhTjlcmHMcCqlmDIsUeRDYIKqQjohINm00mfRZOZ8VwbPpHVXnU59I5V47cDdtbL4rAV8QdzCtUGveqQ3s1HdWmyTX7mTgkA6jCLsDkkoNPBB4FYcNOqbNezqYLo3NRB7KVeiUf8iG2RDI5jrxu4CDEWAEGAFGYPIiwATe5N17XjkjwAhkCAEo7m694WN07JGHxUc44YLrjDRapNWmI3KpDh6IBxT3TqeRRTqJO3U/UiHK0rGvdvdIpjg6cC8WCiOkyoK06BG1rfAginRZc8qs29ylEk+H5NRR4kWuuoC+/I5fWQ5rR97V5s2gokCRIamLK+ksSDxJWCJ1EqmRO/fFasXtPRBzCW2YNo8qyqrI3+yjjT98iQY1TBbg9+gXJF7JIRIvG+mzVuCAwHNSFB7c30FhUffNHJG8BorkzbLd5rnLp9PHbrvY7Rh4et2KwIts66QhQdwNb+8ada/AwgrKO2cm+QXGXgm8f206QLisYlZtKYHImxvcToGON4z3BIholCLIW/yxlByeUyXwzqy9MK56a/38gxR+cyS13Ans/CNmUM0PL/W0H2hs5zSNMzVVOL0m+yWBzkR2PplHvc3elWNnfNqfsgOtzvzMbSSRB9MIfHaC5HUi8qBgbBelLawUrsmMb9UHZ7dSpNByMAKMACPACExeBJjAm7x7zytnBBiBDCEAxd3T/1pDd3z7+vgIZwtS77dpNLPAwwSK2+dKpKu4t/oAigdwpBOnM5IhytI5vt29rOoI9uxeT43P30c9ezYkdKs/9f0096yrjDp3qJ2Gs6KmxV215p6kpnzPyquMfrokJ9R4PlETT9bFG16+iKLiilx9oXGfHQPr6K6DX0mYixN5V+AvMsgr1KNTU2F9DeeTb9YFRoqorO3n5jiMQdd86RljbBhZDLsYl4DAg+gPKrxF1x5BpQ0jajwzmHc/sCXhR1dftjgpvM2dzK6zqM2HunHAQqpxrdJn5X2QRusU6U6jNdfAg+Ju6PER1Z3dXPzzy6n4l2doY3bPP7cmqO7MHWfQVnqf/0dUXVogDGFGaof5BZfkF2dpaN6HqXvGNdrjqQ1TTaE1q966f/ci9fzuJce5JEveqTctEWnwZUWorRiJu1tXizRxmA1lKpJR4aXbgTaZtRlGJ4I4KxTO0HZEXjYIUMwdtVClajSZtXAfRoARYAQYgdxHgAm83N9DXgEjwAiMQwSgwjvjlJV0nah99xdRF+/hR59PawptSLilpsvUIBvwgRhLpUC4nXIk3XMHUdYvCK9MFHJPZa7m9Kxtf/jaKOIO94+ps4wsU1pw9beoqGFpwrDJqO/kDW5aeKZhaqFL4Dmtd/U9IyTXX8u/R30NeygoCMdL20bq3cn+UN6BvJPrA+miEpL9XYspROcYa1dTS4sXTqHiRdZmCAee3Em4ZLiReJLAqz26ng673jrV9MvffIHWbRxxelXXv3xJDV196WJasbSW9u7aSPt2b6LVz/0pAaKZsw+nE097HzXMWWIJnUrgDYcHCddQb2K6bD9SQ6M+CkUSlU7D/nIayh+pZWg1QLoJvGhvWNQW7DCG0iXvjMaCMMm/5jDKv9K+xqGcvxt5917fj2imb8RYxUzigfRFSjJVH0UDR/3EILy9RKoEntV7KfTmPqHE2zeKyANxV3rNCVRwxEwvU7Rtq7r1DkaGxWdGNGkzD90JeamFV1In3GfPjaTVwEd3nlbt5BcEVkSeW2mBVMZV+9YKlR8IRQ5GgBFgBBiByYsAE3iTd+955YwAI5BBBPY3ttCNt91F+Ps4kUoLIm9GfW1aR8ylOnhQRkEl5LXWVLaIO7kx2TDcSOYQQHUhHXKtyDuVuBPP4vFYcPUtVDp7xNE2FQJPptGmUux+77oWevCrq0dB0DtzD/U27KXrDttIBUJlki/OC6I8UJ3QFuuE6iwijBgGQzXUfTCmKvMVVFA0aK2KqzhhDgVrShLuYybw8OKgSH+zS6cN5PsprySfAkKFs/L2RHXY2g0tdMMt/9ba1k9cWUQ71v7WsS2IvMs/dNOoNge2bKNVP/q5QdyF2ztHvQ5lYnggpsqFMLVvaERt5pZCiz7pJvBwT6jwIm+2UPgXiSpRKwCwtyCefYf2vvDWEymwvMYWK6e0WXQ6zvd3Ol5caoD8qC4dqSGGNGucJUSw7mjynfAzz180JJNGe3bBp6g+b35azGm0Dp5DI+COtMwCgY1Oqmiq4+ko8eqWR6huRcRI+UUKa1ff+FGbq8RnXyiWWguDGRB8mZwnPv/rq2NfZHAwAowAI8AITF4EmMCbvHvPK2cEGIEcR6ClK2TUNsuFKBR12IoECdUuXA51ItvEnZyT13nqrCUdbTCvQvEw++ovb0hQ3tkRd+qYKomXDgIv2TTjB77yb9q33lqhhvkuW7LOuBBVM0rjJJ4ZP5Auob5q6jpE3hmv5wvyDpdNmEk8mT5r1Twi0gqHxSUDpF1QnF2p+lv4yaOobH5V/PV3v/+vWlvc1nqABsMDdNbRm2hqVbdjHzsS75dXf2KU6k690ZBRcD9GSKkk3lgReFDhhf57nVbqrEHgBQPkE/uLCAoFnpMK77Y/rbHFUKbNWjVQVXhQTqk1y4aOuYOKpx9jdEP9RB1H1qbIdnoy9EutM4BGU/3z6OzC/zAIn0pRV3E8KKmh8MVasQcgoyQxpWPeoL1wU0Oo8UDmqXXxJHEnm8p5ef3iJ9k5eemnEnlIsNdJ2/dyf3PbAvE7oKbc2vQnlftyX0aAEWAEGIHcQoAJvNzaL54tI8AIMAJxBPDA0D2OlAlOW6NjfoD+Y0XcyblnKxXK6zHGvKh5C736qxuMrjElWuwuw4LDdXrQLp21lBZ84FtG23QQeCAd4Jzppf6gnfJOxUEl8PDzqQsqLWECFs27Y3X0EqLU3qQB7WrPH0kndiLwrAbF+bUi8JzSZtX7hEP91N7WaPyorqqLzj56s+sRQDrtiaddEm/Xu/cAPfuDO2nn7pHUX/NNIoK1G1YkmJLEc6t/lwkXWjm3nvP/FmMTXWyz84TyboQ2jfUu+esFtjg5EXhW6jt5o9LCIOFCjCLwKo+k/iN/Iuqd+QnkEVJqu4UpiBuRpUviSfIOY0tSHkThWIdaY9NKYea2/kzN36r2Z6bGSva+sv4djncmic8ycR6lQjzZuXI/RoARYAQYgdxHgAm83N9DXgEjwAhMUgRyrQ6ek5HFWBN36hHSqfHWGH2IcJmjlA6net8lVOqzrmOW7FHFQ2Lfa3+iLY/9wahxh3+Dp9F9sD7yq7F6a6kQeLIGnrken86afnyhIHFcwkzglVQXUqm4ZIBEM3DoXEw9Hab6aC4KPNxDrYm39ZdrqGdHrD6bTmDs9rYBo+nfh2JzOueiOXTLna9SeaW7KqbpQCLpdvVZL+sMS5+7+e54u933P0J9+xppzdo11N5hP/dBkdanRrRwGbWGE1ORzYNnIn1WjtH7nlgaaxRqYbAcJiLPL5RFUDlGxGE2n2c7As8tffYz/k/Z4qum0ZoJPHTqfuezRl+vRBZIPNTEaxqOuR2bY0XwbFoRPCf+4/GUHgozjVah6FYVh1g/jC5g4JCN1ForzMwmH1pvmiw3kgReU8eAYTBRKi5pDqKj4NSdLte/00WK2zECjAAjMLERYAJvYu8vr44RYAQmMAIo2N/YHiMVciGsjCzGE3EnMXSq8dYT3Ujbot92hRtE3gL/11zb6TYAgbT3/q9T2/a1nog7eX81jTaGv/uZAAAgAElEQVRVF1q1Hp/O/GFY8eIfR4wEnPpccWmiQy5UeIbPgPgD5A7OfE/LyaLWm6k2mgaBh3GlCs+qBp7dvEKixtXBxv74y/8sLTf+e/vBbuNCTJlaRAXiwd0qVPWdfH35vP20QlxuoarwNv3oNwQhZltnB736hn36KDCKCJMbxLz6BdQSrqE93YUJbrXquB/97kU0b8UMx6k8s/Mg4TLH3MpiOmPuFJpblVhjUG0nCTyrAfLgomtwerG0X3NkgsDDGPVi3ggnAk/ORSXyekWNwV6hPnUKEHlNw9sTmqjEnXwh2bqgbmcmmdedvrTw6u6czPhWfYD7lIpCahbE2HgOEMJlxXnxVGh5XiSRly63dOwR6n9yMAKMACPACExuBJjAm9z7z6tnBBiBHEcgl+rgqQ+s45G4k0fBLm1Ll7yT90kHiac+PD/zlfNFva7kDmz9qe8nXIhkVHjSwAL9vSqHvBB4Z5z2FNVNaY4vsrahzKiFpypZ2ve/Z7TysEi4zQZG1Hp2KEkCr3t7O731q9ddwZTkHQwiUFtud36BcSFUAg//tiPxerrbqbcnUTHnlcCLNDbRjntXCZXaiOrytTfXUIcg86wCJN7smrlCDVRK7QMBer25xHj4x/M/sJR8mQ559+s1u2hnR6LbrXlMEHkfXTnHci5WBB6chDGXIZe0WjsCb48gTu95/i3b/XNS4Dml0OKGUoFnvrl8LwYFYdPZK4xEkBacQiSjZE1hONuuWBdqq7kRZbJmH/7OdL03TBbkKtJGdeumZgIbnXviCw0rAwuVyIPLbyou7DDymKKh9NWZL7dhBBgBRoARyG0EmMDL7f3j2TMCjMAkRwAPkuOxwLfVtkjSDq/BfAMPgelMMUrXUbBTxmwb/hb10CZPwyzw3ZRUOq2V6mXXvTdT58712mmz6kTNbrTf2voUbeoZIcqcFnV4aR19bdFZ8SZQnJQIUk33wdrNvEIdu25KE51x2tPxH5nTaPFC2773JE43IAi1oqla+6LWwXNT4anKO0ngSfUdBjMTePiZFYmXCoH3zrMuo3Pf/X7a+cwrtOfZV0etsb2jndpNJF5VRSVVVcZMNrpbe6m7rZee2RNTDUoX3ws/fAKdcMlK1/efG3m3KxpLj8Xf+YJkeMfUY2hp+RF02cwPxufaL1yHhw+ZlximK+IPJ9Wd7OhfVkNF3znRdl+dauDpEniqC60cyI7Ak69LxZWBb99Q0kReMrUktQ65x0ZeibJ0rd9tml6/KHC7X6Zexz6GxO+z/pD1tyvSGKSkUKSJi4OfzJkBSVgh3Lk5GAFGgBFgBBgBJvD4DDACjAAjkMMIoMh6W3doXK8AZBTqKOGBDMqbg52JtZbG2+RRXD5fuGF2KQYhXtV3ck1eVXh42EPqVb6YA0hOpF9JkrPl3/fT28/d6+YFYAmnrIGnvqhD4pnJO/T3avThhcDDWXnnO56iKbVNxlRdCTwP5B3upxJ4+LdTLbyDjX0UUtIlN5SUULs/EMffisArEGd8Sn0sPVNGsgQeiK53nn0ZrTzpYura/TbteeDRpN4qxTPrafbl707oGyPThRmJqJfXI4h0q3Ai7zqiWw3SroMSU6PL8vOoTLiYIi6b8UGDyIusa6WBG1cbxB0dUgDqLKTw1hMpsNyUKq10dKqD52RiIdNnDTJT/KF+iTB0yMRCZ34gskCqDAolXjJfRjil6uuMn642yRJlmSbyxotC0Q1nq/qBdn3kl1heibyq0nzj9ycHI8AIMAKMACPABB6fAUaAEWAEchiB8VwHTyXupOIOD60o9q1rvjAWW2OuaYQ52JlW6MzvSH9iXTe3hzs7dWL47Y20+Xc3Ga6zXkJ1oTX329jdZKTUmtV4IO6QNrukbLSyDcRHrUi5AxGrEzoptOb0TplKWzWj1EihVaO75SQaHKgVUi595Z3sbybw8HMrJZ6qvusIBIy02c5AnkH4YP1436Fq2xMb3x4FgVmFZ1UD76yjN9HUqlj9PHOA6DJSXcWb5Ph3jDjRogZeMlF74lE0RVzmkCYFcFyFE6qaErqzvZd+/fpuy+FA3r0R/VH8tbnli+iMmRfS3IrFxs+Ch+yRX2x6hGbl1dA186+l/q+spr7Xm7Xf927qOzm4nQpvhiAW3+cfmaNsr6bPGrUVDxF40UNEbU/VdygiSDyEryaf/LXuBiVQVpUIV1uvDqTjxaAh1Vp8mSLyUMoALutDyBsfpyENLLzWosWXRFAxI3QUeXUifTYP7kUcjAAjwAgwApMeASbwJv0RYAAYAUYg1xE42BEi1NgZL2FF3EmVi5WRxXiZt5yH1UNZMumz8n5wpcVlF7r1APHQt+3um6ll21pPkJnTZz11tmms49QruzoReEZKpTQyMNVDQzrtlf/VRdSVWOtsoHsx9fUfRwTjCg+hutBadQORh9p4cKftEu+pdX0+AnkH4k4NlJGXc35JGDu094UTXi+vyB/lTKu60NZVddHZR28eNQUQSjK9FOQdwsqF1sOSjaazLjuPShqm2XaTiko0AJGH96qdaYWZvPvoks/HiTs5QIyAHCm2f2LlXAoO5lPPDf+Op9I6rUGXvMM9nGrhmVV4qvss+hp+AIIcinTHFIiDtJS6/d9M3GtB4gUWlboSeU6OtX5aS7gQQ/SB+P29vIe87rmX9kgBHRBK3wGh+G01mUXXXKh/p3QTeeOF4HRCwGs5AfO9dDDDOa2vLtLfCG7JCDACjAAjMKERYAJvQm8vL44RmDwIPPPC67T5rUTFSFlpMX3w0nMmPAgdPWEj1XKsw4m4k3NDWhQIgvFetw8P16pS8I3hq5KG147A0yXu5MCSaHn6hgu055IJ8g6D4+Harei9OklzGq0k7sBT2dVBPOHKRXTiVTFVlxo4Q43PbaO+5h5tHNDQSn1nd4Mn/rKTnvjLLsf748G6QyiEXjS5s1oReKoKz0p9B9ILmKimDqoDLSbSu/eA5zRaq/RZu0WpabV/23SAnt4x2nX2jeEfGWmzoBdvPfF/xJ9mV8yo+ImP4C6rxjFFC6lA/Dws3IgHHRyJvZB38v4g8ZBOu6dl9Hl4r+9HNNO3VaSlB41LjWiPIO4OqbusyDu1bd6J1a4kHtqrqsb2rl9TIPoHS7gjvg9QVfnHPL2HPB12D42hin7rnjA1/8Va6Va0iKjhC/o3BCkFUtBrmqg6Qq440NoZWOijFWvpROSh/ESlSKHlYAQYAUaAEWAEjP/XEK5q41ebznvECDACjIAmAq+8uYX2N7bEW//+gcdpen0t/ezb12veIXebgbwDiTdWoRouuJlTQEVWJGq86RogjNWazErBVFJozUYWwKBc1M7CA65UPOmsU32o3faHr1HPng2O3TJF3mFQr/W79q5roQeFmQEC58WoheaQGjdTGBhc9t2TLNeHlL/+gz2CxNuuA5vRpuKEORSsKdFur0PgyZt19ofppZ0jnz1WBB7atrUeoNOWv56QOqumy6oCxJmzD6fLP3TTqPkeXP06tYhLJ7yQd/J+Ukn27K6D9JcNBwyiTo1nhz9Fw0JP9rGln6d5InXWLqSaUL5eFaym+SKdFiQeAkSeOfIFYZtKgMizIvFOm9ZExW98NuHWRspsKKZa7vddblxukX+hvYpR7Qu1Xb7vSwSDDISRbm3xf9p5eUdSz+BtbsNm9PW+LaI8wI99rm7AmMTMzxMVj+bTbecnv6AYFO/zLmG25MWwyKuxRkZBcrg50nz7B4YM9WI6QhJ5qtMv6iyCKORgBBgBRoARYASAABN4fA4YAUZgwiHw+4eeJBB4D951C5ULFd5EjyGRPtssUv6yHZK4KxYPF33iIUankDsezFCQW7d+WrbXJMeDyis8GEsrQ6RC4MkaeMALyhT8ba45prtONe2u8fn7qGf3+lFEXv2p7yfUvSudvUz3tp7beU2Fxprb3uqg333+eeNB3umrQyfyDhOVNbs69ndS39ZmGmzrc5y/V/ION/NC4KE90mh3CAKptTdEVgTe8iU1dPWli6m35Tla/dyfjPRNc7qsXIQdeSdf11HiJUPeqSAihfa53TEFnszOh2nFjugjRsrsx5d8zhHzQ34VMeMKESDwGoI1VBY1K/Y8H72kOwQ63iBcxprWdhpqORB3ul9jI5U2sLjMcXxJ3slG0vXXGFMhrKWBxmBkOYXp9qTX5KXjqsej9Ii41Lh2v6jFViwUk0G97/K9kngYCzUCYZgyEI5o/Y5An2SNNbzgkY62mUrzVYk8zJPr36Vjt/gejAAjwAhMDASYwJsY+8irYAQYgUMIQIn3mRt/Sg8J8m6GUOBNlmhqH/CkcEgFF1VxBxdcpMN6UVeY01NTmUum+ppTo5J1oUX67IzApQbpBGdZkJz9KaQ7e1W+ZQofEJF9QsWkGh/YjYW144Eca9/6ahO9eM9W2re+1bK5Xdqs2hj3Qoom7ocwSLzW3lFEnlvNOzdsvvCRf7g1GfV6pyDy3nPNAurtH3EPBnm3Ymnss8ggMhu30rat6+m5Zx5M6A/iDmmzDXOWaI1rpcYDcQfTCqeadzo3lzXwjFRnmHYIfmf78CraSY/QmTMvMC67UMrfCa1ezOkVBF5Vfg1NGx47Ak/ON7KlmyJbew7VMXQmk81rdFPh5dOXxHpH16g013o0MozFD6HOG4p+IKE2ns7+eGljRdyh/ym7fTRF1HlEVJSLS6OkpNd0WjlPqeyEwzYU4939gxTaPUi9L/TR4N7RLsg1p5dS6SnFKX1WesEombbZSPMtzA9QtVD5cTACjAAjwAgwAvHfqZxCy4eBEWAEJgoCSKG95GM3089u/U869ggPuT4TAIC27rChcMhkqMQdSCg8iHkh7uTcvKq3Mrkmu3uny4n2nWUPpIW4G2/YgZRDvTYnMlKm0NmdFZhbqGFV785qf5CCnB8MUJeoP5fJ+O/vvU7bt3R4GuKci+bQBZfMN5SWeH+A3JYKL5XITIXE9TShFBrf9MzGeG8QTpsCT9P+wT/Rd074peNdVQIPDfM8EHihpx4Yde+Csy5LYRWju8YJPMFM2qVxd/XlUVd/Hu1rLYzfoLxoiKqOKaLZx1mnM5rVd1aThkGvNPgAeSfTpgeij6V1jfJmP/nFML1lk2n+3k2JrqYFwnB3ap37NBah9GGSIYm8vn/3U/PTXbaO2vhdg98tlVeUU/6sxNqFSQ6d9m7ZKAdRID7raoTjNwcjwAgwAowAIyARYAUenwVGgBGYEAh09fTRpYK8++Bl76IPXnL2hFiTl0WAKOgUdYYyEeki7uTccsHIAg+ateLByZzqq+tGi9TBlWXfpLzBxWlVkZhTezOx3zr3hEIRpI5Uwal9ZI0/t3qIOuNYtQG5WizS8pCGnOnwosKbv7iS/uPLRxlTUs0MeoVSEWmEIO2s8Mr0GpK9v+pEG/EHqCm4lzYP3OqZwANNVCPUd1DglQoln1UaLYi7sAV5J+eeL0i8dBF54b8dMG6LdP4hizqMG/eWGuSdZRT4ySf2cvn5wlhgWqKaMI/+IMhKa9MK871QHw+9JYkXjt4uaguuSHarLPtt3Raln95pnRp72EEfHd4yWg2po8SDM60Xd1rz5KC6632hnySZOSwY7mFTCTl1b8YriZeN32P4nEUNPA5GgBFgBBgBRkAiwAQenwVGgBGYEAh85qY7qEeQeNdde3HCeo498rAJsT63RSCVsaUzvXXw0k3cyTWA4EFqUEeGCEc3rHRfV+vNqX2cSDyoa0BsLc67mYJD6VeBOhFnuutKRzsrFZw8L0FBsEERmow6U2duVupInX7JtNm+ud1wo3VT4qnknRwHeMj0t0Hx/uwUikHdemvJzDUTfX69Zhft7OijcLBAXIX0cu8HHAk8s/oOcwKBN6NoJhUFii1TaPv+5xsU2TGi9rNbR2DeEir+xDdSXqYTgedI3mHkQwQe/tNM4nki8MTZAHGFzwtgFh6+WlwfSHlt6g2c1Hd2BB76z2pwnkYqBF54zyB13NuVMIAVkacSeMGGPKq6UiO/N63oud8MBha9Io1fp4yA+92sW1SXFYjflYlKyWTvxf0YAUaAEWAEJgYCTOBNjH3kVTACkx6BG7/3a0sMvvzpKyeFkQUW39jWH0/JSuVAZIq4k3PKFSMLpPpC1Wil0kFNvB7aZJhbIPAgXpO/jAoji6mOLskYeWWQn6IGXDbUZ05nCCRaiUijhZuwel5SrfGnc26lGUhrV/acl51MLZA2e85Fc+NTByGDWl+y7h+Ud1DglRQGRUqtqPsFB9QcCijxHtvXZRB4+8N/osOmhi1r4FmRd1hmcaCIZhbFWCFzDTxd8k7ClQ4lnkyhNSvwXMk7MQlfRaIa6tSPjSj17OrfWW21OjZwKyi8lgJ511B3X3oIISf1HebjRODVTSEqHMkcHjX9VAi89j92Wta8Mz5DBamJLz8kya1+ATAeVXjZqOWKMYALByPACDACjAAjIBFgAo/PAiPACDACEwSBlq6QcE415SJ5WJuZiMG9MqWislO3eZhuxptCYdEvUpOlE63VgLLOW6bSRZu391Dzjl5a/2SzMbx0tayZXUzLzq6juvmlGcfBjoBAahfWDXMO1F/MFjFll96cDSCgyJNqPKju5h9WlTCsWvfPnC4riUd0AAGb7vdW3lt7yd/aRYG2mMJpcOFMGq4up+Ga1NRLWNNzjb30912d1BUaolDwAXr/YefF121H3MkGdQUzqDJYQvkim7NGcaEd2rGB+v/nm563regTX6e8eUs995MdQOANvxUzsZDkPGrebdzn/l4yE3izVvpptrgQXhR4ZvIQRhZ+QeDhPYXakl3ii4NUzoedcYXEQBJ42DuzKtQtjTYVAq/5dmvzGnUzzenFeK3k5CJxjR9HeZwd1KZr7hhI+hy6dYRRT50g8DgYAUaAEWAEGAEVASbw+DwwAowAIzBBEEi2Dl62FVSAOxeMLJzSVTNN3AGj9U82xYk79YiqD/9180rojE/Ny/oJhqKsoiRfONEOGXXdUiEbkpl8XWVhRh+evc4J76FyQb5EBfnihgfUiyBq+gXp2XPISdfreAnnQRB3wbf22d4iIki8IZB5Hok8+bkAQmVtUx/dvWMk9fHw+g4qL3KvQVgSmE4VwWIqFWuuFt8tFBiV32LhVX0n+6WiwpO1CTse3JugrIVZhWpYYQlmSYB8Yh3mkCq8VAg81cRCqjX7BFmqmqB4OSNuBB7uBROLZAi8mZ8XqsokqwPoEHgxxVlUzC1WJxCfLSAZ675U4wWCjLbF5z8MJjKphC4WY1SWsgNtRjeSb84IMAKMQA4iwAReDm4aT5kRYAQYASsEQoMR8pJWOBbEnZx3NgqAp3pKrFwGs0HcYd7P/HKHobyzCrN6J5skHsZGXTco71DHsLE9cwoUp/0bLwpOvIfwoK2my+qcO9XkAiRAsnW08l/cEFfcuY0bOn6JNokHEgn7Cydd6Zj7x51dtFekeMp4x3yhxhtutxy2wF9Fhf5q47Wp4l5m8g4/777hcrcp275edtv9nvuCOIU7MNbTubeXhla3xe/hmj4L04lSa2MLNY220Heu1rzU9/BwdAWF6faEftKttbggT+yBdyJPh8A7ZbdQePX7PCnwihYRNXxBa4mjGknzCrfeIIylO7BUHIPIq/9ybdLvE7cxvb6ejd9fVYK8w+cKByPACDACjAAjoCLABB6fB0aAEWAEJhACb7f2u65mLIk7OblcMLJQa/XJh3+oQezq4rkCr9kAabPP/GqnbWsr90yk0y47e6rmCN6bqWdGEk5jqYIby7Elek7psroISzVeSBCi3f3eTC6QMuukvLOaQ/+7T3ScWiw1MEbQmpWEe0Ra5727uuP9q4oCNL+6wFDiDUVjnzt5viLjklGa56MG4VKgKu/ka9ki8FSyFF9wSLXocEsoTuK9uLXSHhcH8g6dkkmjBc5yHqr6zjwJde5e6ku61cDDOLV9RKfuHq0odEqhTUV9hzF1FHhWn2/A4bBvTTPgSVedQN33qFW7bCjI6yoLhFMyG1iksk/clxFgBBiBiYgAE3gTcVd5TYwAIzBpEXCqgzceiDu5MbliZAGl12Bk2KiX5eUBOpUD6KS+w32hUBk+lFamjnPF7ctTGdayr9OZwUNsJmq56SxiSkWBrdPt3YG3Rt3i6shCndtqtZHusnCWdUuX1bqhaCTTJrv6wqKeoF4dy6JHVuvePt4O6bThE6zrx5UJUxI3JeELzf30wsHELwlA4oHMqy5OVKiFhVnHBbPKjfVYkZPZIPBU1Z25JiFAAYkX2dpDq1fb1FdTXGftwDa70bqZWRiqMvEHCLxw9HYaphWu+6jWT9QlsD79OedzhEzV2j4fnbwr0STBzoU2VfIOi0yWwJM18KQDNe6li4MruEk0yLSBBfamvnqECE9iityFEWAEGAFGYIIiwATeBN1YXhYjwAhMTgSgDkPdJDXGE3Gnzmt6TRHpKAbHYiclZsXCTbS7b9AgarIRbuo7zAE1oqKiKJRafL6xcIC2zu6jrrrRDqfvWzSX3rd4xCXVah3P3bOenvvjhoSX8BB53oePoHdetcxy/dlQodhhXpjvp/Li/ARXVxB3d+dts92mq4cWUCpEHs4E6iJi7EyQuV5MLpJR30lgzCo8N5LLDKhZiWcF+MlTiujkuhgBUSqIQaSCmh14M0ng2anu7A7H7jXDtPvfSk0/oRy0qndn1V9NoZWvO9XDw/tqmI6g8PDVWuSdOqZKYLkpgd3SaKW5aXWPULe1+GiKIPOs1HdIm4VxRbJ179T5u6XRquSm2s/sQjuWRJ4k8A92hjL2KwHp6yhVwMEIMAKMACPACJgRYAKPzwQjwAgwAhMIAbUO3ngl7iTcIIDcHkKzvTVmzPAg1SvIu2RrlHmdv51xhXof48FbPOlChYd4s6rTuIrKhIKqPGg75I0nHkWH1ya6pu5a10y/++o/Evrg/sABt5dpftd853Sas7wuoR3qifUJlVW2sBn1PzBinijyjvl+vP95WusfqWlmB8Ly4Wr63uDxXrfFUKZBoYb6aZkmc63ISfOEUyHw4E47tLDB2GOsCS7CamqpLjhWajwQdw0leTRLmHSooRpiSOVUpkwsvBKSmGfHgSitWzWa/HbDomKaj1acb1+nDESen9aKM7rWuBXcZkuKVoq/V4z6osVtLPX12BmJuUB3ii8YzE6ysu1PfjFMb223vjOyM9Hv0McInSjWct5hiWo8kHfpIO7UGTip8MyfbegXbMijqiutnZTV0gbZUuRlw8ACJjf4soCDEWAEGAFGgBEY9f+/4lv82BMAByPACDACjEDOIwBSp607bBTWx4N5JpRC6QIJD6BIT5VF8tN132TuY0d2guDAA65Z1ZjMGDp9dAg8qFTg0Ii9fnxaEzUVxZQgbgQe2qgknhV5h9RmYQBJQ/KpXpm0mcQDNmg31vv31YKXaX2gzcDDYtrGCoYHwxQVF+L9TaXGFayfRfnTZjtuSybSZXXOgVSQwekSJLeZJPViXmEeDwRecNkcKhUEQcwgwTtxpbMGqzZqzb+ODW9S76++4flWdgYWXlV35oHXCgKvUxB5XsKcPqvTF8Q3ah6m+r7RNbqwI/FA4ImPXyMWzif67KezU28tvGeQOu4dcTRWMTMcaMVjiXwfO5F3aj9JsGejRmk2DCxqRYkAvFc4GAFGgBFgBBgBMwKswOMzwQgwAozABEMADn5doiB+qg+ImYYFD11B8RTZJRQkYxXSRRRKu4FwxHDdlKozzAlmG/nBQNbmqEvg4UF3f7CfnpjeHIdOh8A7vKaSbjxppdHnlgvvi/cFDnh2xtrtSLDZy6bQh757RrwPFCLok2lFmtPZWOtrpRvyXzaaYA2ENYjzLwPE3XBfD0WHYuSdjJv+0UZLDoYNEq/4qFNGEXlqumyXINAGBOEyFiEVRuY6cqko8PKXzyHfYbPSVr8vGVxkzb8DP76R+ras075F/lmXUYG4zJGM6s58D68qPDf1nd2iqkRqJNLyh5Rzqg2ARUOVyDOnKcvmMLV45PFoghovT7xf5s6N0rvf5aNFCxKVd6nMR6evHYmnOtDqknfqePidUiG+GMJnebrqU5rXg/qb7T3htO2f+f74GINJj0FmcjACjAAjwAgwAiYEmMDjI8EIMAKMwARDwKoO3nhcItReSBVC+t5YRIxEyLN03JTzyfYcdWrgYW6Y16q6xrj6Dj/TIfDQDiq85if2GzXvdIg7dW9Ou3IpnSZq4iGyTW5anZEvB1+idUrqLMgMI/1XsJBDYeE42mWdVnvJhh7CJaPivKviJB5IAKjTJAkwFmdTHVMSNEWCZJZqvGQIPBAC4ARCxy+lcGXZWC8LWeAG2dL+i5upV5B4bvkggXlLqPgT30iYd6qqOzMIuiResuQdxnMyYEllU6ycop3uByOGxvaBVIZMuS9q4oHMG9wbqzGKz7WCEwpF2myQ8mfZlwNwGli+X+R72CnFOJkFZNoBG6rbmvKCZKbGfRgBRoARYAQmAQJM4E2CTeYlMgKMwORCAGqdtu7MFdhOJ5pjYWQh061QP8pNpYGHwalCDZHNB103F1rgHxTpVb+etSthK6pn6LkWwtRi4xdfHlXnTmdfVRUeVE8lIo22XaRsj1W8u+BRy6F9EZF22tHqOK177m9MeL3m/KupfvFCR0J3rNaJcUHQ7G/cQW9u2UyDjR0UeLslPp33LTzGcWogRkCQDVaVGQTeeAqco6FnHqSex+5LUL+qcyz6xNcpb17ivNOhurPDAaYWe8RlDhB3s1b6qVL8nWxkmgDSMXjIhhGDV3zwWTulopCaO9JDKuqmGHuZZzbc0/GlEr7Y4mAEGAFGgBFgBKwQYAKPzwUjwAgwAuMMgf2NLXTTbXdRFE8g4qn71hs+RjPqa7VnOSQKGzV35AaBl00jCy/EnQo2FDMtXSFXhZD2Brk01FHh4UHyN7N3x++kq76LKdREzatftdiSJW7ruPJWrkUAACAASURBVPlv7zeaZFudaDUvOwJvqLPNSJsFzWJX1Uwl8IyU2YY5VHHuVWOWLuuG+/fv+jFt2fmWoaKDmm64q18UKxypX/deQeKZiTypupPpmqHjl9BwjbUhgNv4mX4dilgQeZgrUtkRUN2ZiTv8HPUX8X5Oxnwj0+twu3+mCTw5vqw3OCx+h3T0DI4qDVAo8MPPx0tk6guBdBJ52Sj7UF1WYDhdczACjAAjwAgwAlYIMIHH54IRYAQYgXGEQFdPH334s7fRrV/5OB02v4Hu/O3D9PBj/6IH77qFykuLtWd6UBB4MIgY75ENIwtJ3A2iNqCoZ6bWuNPBByRjthwO5XzcVHhrqzvpjcpOo3keHCmnuKdcgbiL1YgjWvQ/B3WWbtkGBF7j9p3UtHMXlQoiBfW8EEeePVIfL+mbe+xoReCh7p1d6qx6exB48ZRbuHGKtNspH/mKxxlkvvmWHVvpr8+sMsg7NfzD4v3dLUg8JQ6rnk43nvCeUetCk0h1OYVPGF/qOzN6buYdIFrxfsyGG3AmdnYslG+yLlzPAAxLhowvIqDywlzGsv6oGd9MzykdRF66DEiczhZSm7n+XSbefXxPRoARYAQmBgJM4E2MfeRVMAKMQA4jANJOknOvvLHZIO3+9yc3xFcEQu/YIw+j6669WHuVHaLItlSxaHcag4aZfGiTqWR+8eRm5eSpu9xsuA5azcXJ0AIPeL+ds1uLvANxh4dX8LmyzliyBN5gKEQnv2+QGnfsMqYMFZ5ajP/Is0/PKpFnReBFhGnFYPdIfTvM0y8wwKWq8e5/qMnAQyV0YWhRctSpukcjK+2k8s5qMJ9Q4Pl6E1MOL1l0DF2y+NgEM4/xSN61d+2mju49CcuqLJtFVeWzDQfOsuKYA7Qk3XNZdScXifdLGer+ZTnt3ExewcAiNJS6E2463wDZ+DIH81Vr5Kmkps5aMlW/MH4+xL7UCQKPgxFgBBgBRoARsEOACTw+G4wAI8AIjBECMlV2n0iZ7RYk3kNCZYe45GM304t/vzM+K5B6N4qU2ifu/YH2TEHegcQb7yEf1NNpZKEWc0eNu1TdeDNJMrrtD9Jp1z/ZTM07ehOaNhxWTj+veIsCDqlWBmllIu5wE9TAW/+FmHOrlwB5NzzUSAuP6Y53MxN4eKF+3hw691Mf9XLrpNuaTSw620WdN0HeFeclEngYwFDb5Ym/BS6Xbeyh9wkTC7Nxwngj8P769CpDfecWvlCY/KHB+Hq+evx76PCa6YbqbmjhzHGVNgvibtf+50eRd3KNIPGOOvwDxj+lWy2o11xV3al7B+OXsUxdlQpHKGd7xGfjeFLgZbOcAvZE/p6AA7kOkZeNeqjFIq25sjTf7e3OrzMCjAAjwAhMYgSYwJvEm89LZwQYgbFDQKbK3vHt6436dlDdgaiD8g4E3jWXnkMXnXtKfIJQ4UGBByWeToSFuqKlMzfq4KXLDTHdxJ3EORMko84eOrUBEXDr6tfp9QOjjRpknTuol5BlaQ7VhVZ3HiDvug620oKju6isOuYYaTwEC0VRRKQmmyNbJN5aXyvdkB8jI5sOCIfVAR8VB7otCTw5xzxRH/6BPzcZKbPASI3xRuB97MbrtLZImlQYNfHE9Z6Vp9H5V8ZIsPEUIO/e2Hy31pSOPOxqmjV1vlHrDsY8qAuGmm34bMvVwFqgfsMXC2MZUJIhUB8v2+UB7NaN3wNNwsDCzY043bjh90a5MI0Iis8ypy98slHzE+YV+MKIgxFgBBgBRoARsEOACTw+G4wAI8AIjAECIOtu+/k9cdWdmjr7F1Hz7heC0FMVdzd+79cGeXfxu07Wnm1T+4Dnem/aN09jQzxMtgu1oJqK6eX2mSLu5BxAiNWWF9DBcUSISlLxvLsfi0MlibtYWqg1glDfvW/xXOPFWy68Txvm1n1vU2nVYIL6Dp2BPUgAq4fudKbTdvo2U5d/S8J8GyIXGf+GCu+ppjaDvEO4EXjverGHPrhbqPSKRXujJuAIi5drBJ40qUAasNwDaXLx+9t/KWqejZhcaG92mhvev+lN446RSJi27X2aZhVFaVaxnbXIyOAii5beferNcbJLdVc1mzKkecoZux3SgIfEXqWqCk51gvJLEx3H2lTH0umfbgdanTHNbSQW+EyzIvKyocSuqywQZQnYwCKZ/eM+jAAjwAhMFgSYwJssO83rZAQYgXGFABR4SJuV7rK/f/AJ6u7tp+s+FCMlrhWKu+MO1b1Dqi0UeFDneXGjbRN1lgbCY/8A7wZ8pVAdYJ4Dg96UNSpxl+n0unSpBN2w0H1dkor/3N5oKPFknTvFlHTUrQ6vqaQbT1oZ//mudc30u6/+w3XIvq5u6heXVN8dqK+ixvpqo59PTKT+QBvVN7ZZ3ufa27/len+nBiDu9gb+Ooq8k33KhxdTwdov04dbXqaOBTE1YtAXoop86/nM3xemTz8o8mxFNMyNjpg9HFLjjTcTCzsFntl8wwrDh+64y0ihHiuyC8TdA5tj5B2iu7cx/t8NgsQ7pWbYksgzCEhxQWinptPKzjFTmqCo8TliypDSIctSZ2nAMdYqQiuyDOpG1KALi89gkFdejX5ShTBTDrTJzMuO1My0gQXOfH11UTJT5j6MACPACDACkwgBJvAm0WbzUhkBRmD8IgDCDum00swCBN/1N91BIO/ws09/+L10xslHeVoAHAdh3jDew6uyAQ/CqBWEB3kQd6j3l+kHzmzXZ9LZM5CKID43t3XQF598yTH1zEzeyfvrkHj+QBtV1DZTz5IyevTc42yndt5jL9O0xhg5JuPcT36E6ufHFH9eA+TdhuD3XbutuvnD1L3+dNr5ri3i2mq0rwi2UtCfWAPyugfbaMG+kfdDeWWUKqpit8eZqjruNMpbdlLGz5LrgpQGVgQe5goSxk2xetetdxpmEEjL6xfnRDqQehk/2bZff/5x2tjSFO8eCvcIcmh0XcIrZ0YSSDyo7ozUb0Wgd/pxXx01DVnLLVfSaqUBx1iTdwDSyUgDtfGKC/KyTo56/R2Q7Ln00s9M5KE2XWtXKGOfDwWiLEKNUHpzMAKMACPACDACTggwgcfngxFgBBiBLCMAUu5VkUIra9y98uYW+v0DjxsEHgKve1Ha2U0/V+rgeakxF1Pf5GVdKRJTp3hXCWbqaAEDPPSClJH1tP60ZSdtam0XV0d8WKTMgrw7vPYQU2UzoefuWU/P/XFDwquzl02hOcvrqGJKM30n0BNX3TmtCUq8dz/2SrxJsmm0uuQdBrrrklsoEC2kIqo3xt193lYKUIiOrX3S+DdUdypxJydXUBilumkjq2n41I1ChZQvMB0cF6mnmJlK4MVVdxa1+9D25LJ6asgvpVkFpaJeXAHVT5lKvtpqCh6+gMobpmathpyZvMPc7Ag8vAYSb05JNK66M58v1MKDM60au/YN0O79AwYZBTJ/cEikpQqS8rTjKzP1lkvqvrK+WlTs2Vgo26wmjc9QkEUgE61CdazN1nsh0+q2pDbvUCf5+6kgGDDqymaqBiPXv0tll7gvI8AIMAKTBwEm8CbPXvNKGQFGYIwRADEnzSpu+MzVcUUd6ttdLAwrpk+tSTCzSAeJ19jWP6pQ/xjDYDm8W4rqWBF3crIgy5DiNNbF51UcgkKuBIVlph4o5dq/++Jz9Gh5QPvYqCResgTe+rzbbdNmzRMBgYco9FWJqzKuTKsv2kXnzvy947yRRouoOO8qyp8221C2QWmTrtTT1fc+kDD+zGVLqGHZUm0sv3/Xj2nLTuE2jAmJsFKagrQDeQfiTkZlWQVVllfE/w0ir2jZIqqaPZVCRprkiGOt9mQ0GprTZmUXJwIPbb6yeMj2c2rOjFNprrhk/N9DjQZ5p0Ys7dYnvvgI0tUXT6Gykth57Q+NXT0xvFdLxedGLNV3/JQyAPkPhSPIf6dQSxRkWjmYah1UjaOZUhOpZEW9T0QmjD9qRS1YjMPBCDACjAAjwAg4IcAEHp8PRoARYAQyjIBK3H1aOMmq7rIY+uwrvkAzhRMt2lm9nsr0WkTKD+oajfewe4Aba+JO4jbWNZqgNKouy09QHkK1MiDSh73WDvRyFtYP9dMXGrcS6uB5CZlOm0wKrRf1HeYkCTz8d2l0TsI0QeIdWf1Pqi/ebTn9eSc2UOW7rx71WqwmWPJqPBB3q+9LJO/kIDOXLqETr7hMi8jb+/YOuvWXPzSIOyujEJB3V9YuGDX/OTNmWa43/+JzhXIzIK6gQeKl20zhsj//znJcNwLvZFEPDzXxrEISeFDd/e5PI3X01LYnH11CuBCgOoNCYVZbJeyGRQwKnqq7T5+A9nLO7dpCsYsz1NoVzljKZbLz9Kp2y4bRxVg50OpiqKb4yt9JeE+mk8gDBjCl4WAEGAFGgBFgBJwQYAKPzwcjwAgwAhlG4JkXXheKkCLDRdYqoMrDa3avpzI9KMa6+8Z/HbwqQU71C0WIJKMKxQN4uajdhYeksSrCr+IOAq1KKLOy7USrqmC6hNpOJet0lTSpnJ/7Qu30vwf3eCbwjnpjGx31xnZKxsRib+AvhnGFW+BRN0888P73e78Zb1oUrRfps4XGv1sjEWobjpFCswWBN6d4j0HulPj9VCzUWm+0nUZ33Gv/3khWjXf/Td+gfRs2uk2fLvvW121JPGl4APL9xp/ebqjwrOJL048c9eP62joqLIhhoMYA9VN4ShHdfM7PjR8HxJlGnDtwJZ0fvsp1vjoNMkHgyRRaK+Ud5nTFBZU0a3p+wvSwsvJSkWZePELcdfX6hTozswSJ3LdMm+ro7IVdG3zW4neCWw1Fc39VhZbOz+QYZgXU3JGoqkxljenua/VlSTq/XAoKF6IpwoGWgxFgBBgBRoARcEOACTw3hPh1RoARYARyGIGQqNsGFch4D6lwQLoZagHhoS7TaVteMZleU0Rvt/Z77ZZUe6wfmEDFAxLWSikFkjNf1GXqyiBB+76uHcb8W/a97boOONJGFZnYHfvDdOTZZ7j2MzfQIfACYiyIVQYFwQsTiwMbYkYZ+VGRQjtcTvsFeecURaL/x6/w0XmXuqtTsQfNj6+ixnUbqX3z5vhtZ5x/Ic08/z0Jw+iSd7KTmcRT9x0u0pJkkam06mBIm8Wlhh151+TbTyFf7OyuXraNVi/fZvz3iJttlK7v/S4tGl7ueb9kB7v0Wbw+FAkLgt7aGVj2//Ii65ROmFg891KHcZnDirxT20ytyRdqPB8d4nEpkyTeeDKqcNrEuspC8UXEgKPpjeN7R6QGVwiFIcyD0pGKjc+xIvFZ1y7O+3gNYGZlYCHrBSJVGoZCydY5xGc9fu9xMAKMACPACDACbggwgeeGEL/OCDACjECOI5At0ikVmPAQh9pjqDFkR1ilcv909M1WnSap7ABp1yNUiVapk1iPF/OPZNcvCbzBUIg6D7Z6us0/l5ziqb1s7ETgyVpnOCfSqXTNfafTmvtPN7p3R8qoJ1KuNe6KX7bT96Y4m3t0bd1Cm37yA+N+UrEWiSgWqeLnkshzSpt1mtDn/ny/8bK671a1Fv/69Cr66zOr4rdS1XcwrUDNOyvlnUreofPeujZ64MyXE6aE1D1g+59936H5Q8mReE4EHgYbCLWJdFZ7ksaKwJPps7fcsWsUhGrarB2+qIVXUYb6lT5DzRsWgst0p9OCdIVCC+ciWQJH68CmqZFbvVGdYVSji1iNP/vPKbf7jUcHWnXOWOtUQeA1ttsrBFUiD5/ZXvGAuhvvfw5GgBFgBBgBRsANASbw3BDi1xkBRoARyHEExnMdPDVFFA/ZTg9JY70NSD3rFWq4TJlGeE3JAnaoi5fJtF5J4AF71MHTrYVXPXUKPTL18KSwsiLwZLosSLuIBaMJFd7+9XOpSxB4/UKB5xrn9ZJPXMvzg7YknkreyfsZBKL4Y9jkBFu2cBE99s8R913X8ZUGp1x5OZ3z4SsNglAnNXHLjq1GSu0J+3uMu6hmFeZxO31thMscP7rysVE/k2q8Lw5+j2YNLElKoWWVQivJwdBgiPr6rVV4VjXwKstm0VGHf8CYpxWB96VP1GnBPL0ulpooCdiObp8gEtOTSjtejSrsgEn3ZwbOTFlR0HAC7hRK4GRqKnqtyae16Wls5EUhmCyxWSfSZ/NEGi0HI8AIMAKMACPghgATeG4I8euMACPACOQ4AnAqdXMczPYSVeJOKu6ypXBLdq2ZqjknFTyYlw6Bo84/HWoaJzxUAg/t3JR4wYJ8qphSa9Sae37W4ca58+rAaTaxUNNlneZ6703X0ltvjriV2rZdECbf9SPpmI/MGE0EWZF36v2s1Hib9hygtmCxp+MFQvDMD11Jx11+qWfyI/zwaBLOPPgefyxV1hxWBJ5sszxwBN3s+4GRHjkQdk8xVu9tJvBQOxKkKwhPRG9vFw1GYoYoSLn2i3qECDOBp5J3eN1M4DVMC9KVFzqrJ+W8JIEXG1PUTQyg3iZS05N34pXE1Xg1qrA7hCCjCgXZhs+ZdIb6GebV2GG8f+5DIWicXRfXXhVPL0Qe6njWCQMLDkaAEWAEGAFGQAcBJvB0UOI2jAAjwAjkMAJ4CG/rDo2LFeBBD2oNKFeg1kAdJaS2IcxGFuNiwsok0l1zzorE9LpmPPxCYWmXZuv1fub2MLHAZQ4QeYOhxHRIkHeD+bE6ZtMDXfQ5kb44v3guzQw2eCYm1+fdTr2BrQbhoqbLOq3n4ZbTad397yV6NOZGahmfaSffwkTy4uqyErq6PLHPxh9/n7rf2uoIn6rG62ptp972dtpWXKMNefAQuYXzL9No3To/vrqDtu0doO37QvTpkjeF0UuACvJ9Rq1Ec9ip79DOicDD67/qX2WkhSK8kMoyjVaq7qR7bldnJ3V1dhn38weilCc8JwIxXkQo4wL01cNH0pJl2qy6HjOBp5M+K/urBB5+FhFGFpHhfPE5lJeUE28uGFXYnSN87sIwIVN1M5NxrB3vDrSp/F7SIfLw+xDlIzgYAUaAEWAEGAEdBJjA00GJ2zACjAAjkMMIQP0yHlJTZZF3OGta1Yoa77WQoCZCofFUTUFUEjPVen815fkEhaVXR0nd47x+qJ9u7jvg2rwz2k5DNEKMXVC40SDxEA35s+ik4lOoYmialhoPROlg0Tb6d+g7lumyVpPZ3DeHHmz6ZPylqAWJh5RZq7Ai8F667uOua5YNoMbraW0jkHhtwSJXFR72H+QfyC1Zx8+NwANx9/jqzoQ5gcCTUSCMGspLhf+uQuTZEXhWNfDMi4Ur7fmDVwkjlYC4gmLfoOJ1NgbBPbC277z4FK058HZcdXewqZlCgvA1B4g8vyj7dWxHC03LC9FRxxxDZ53+UUvckyXw8gUutVWJ5Mig4JhRBy8Z1Zj8DFMNRrQPyjhoWC7MJwYjw57Vnl6njnOIsew+6+PvnRxwoE0HwSi/rCnMDxh1TdUaefidIlV+XnHm9owAI8AIMAKTDwEm8CbfnvOKGQFGYBIicLBDKKbEg9tYhG5tt2yYMqS6/lRTVlWjAlV9mOy8oJIaECrGAUGKZiq+1vs2bYhYF3AfjIapixLdQaf5u+jCoo2jpnN55VU0JTrDqJVlpRhUFYkgSduim2hD8Ptay3qj7Tr6e2eDVlurRmoa7b5Vf6X9q/7m6V4g5Bq376C2vCJqtUmjNdI3UT8PdfwkcydGOfH9l9GJV1xmO94v7m80FHfmODbYSMflNyX8eEpVzLkYYUfgqS60doNKAg+vy3TRYJ5PEF/2NSAlufWvnXvp7vWv08aWJrIj7+S4S/e0UEX/iJLzh3f+wnJK//dQI+3eP3IGdVNoYWJRVnJI6nfozv0hnyCwRhSL+Nwx3kdCqWyXVptrRhV2+5qNzwt17NKiPEPpaGd04aW+nKc3ZJoax9SWBdTcYW9g4WUolcjDWQMpXitU1DiDHIwAI8AIMAKMgA4CTODpoMRtGAFGgBHIcQQ6esJGumo2Q5e4k3PScfvL5vytxko2ZdUrFrrrzFRdPvP4diRea7Q5oakdeScbXVTyfppXNMdIy1TNQLAOqU5RC+GjHt7ewF+py7/FEpLy4cW0bOhLdHdXL93dba2wc8PSrMBLhsDDGE2CwOsoKKaO/OJRikipuhs0OdiinxOBZ6W8U9ejqvDkzxumxhRnXgwszBipBN7/Z+9M4OSqyrT/dndV9b6k09k3yB5CIOwIARVkCYvAsIMoKH4zAiKKM4IsziBoGJdx2OabTxQERVYBAdkER4FRQUNYQhISyL530nv1Ur1857mVU7l9+y7nrnWr+z1jTUjXuWf5n1uV3336ed9XvmcldMmQUqMA9vVnH6O/fGQdhmwU7zDPjFmz6IqvXzPkyNZt6qIHfrNt0M+NRSyK+vupaCArZPfvic81hs/iPaOAh5/pQx2NbkNZqALOKS9FGpzuvyjfx/cX3IN6ATns+Y1hpO2iEJBscXddh5UzUP6yCp8dLl4R9h3I4zMBJsAEhhcBFvCG13nybpgAE2ACpgSizIPnR6zKxwOmm1sGIatukrTjAa1GhEgh3xlcZUE/OAedl8+OhTEfXnqggzppr2h2SHITHZLaZItzbNFkWpw6V6uei3uyJ9On8XEKtYOQZxTxpvSdMWiuUzYPFhNVzzUIAa+rt4/adjfRxrYuai2tpAoRKifP2qxqrX5tduGz3/jxetttTCxup7PKPxrUB+G0Y+uTpgLeo8e9SZvGmVeC1Q9yTef3aXb/giFz7xVjsoUQSkW4Mz7vyKlmLHhx7RVXatdvHF09aJyadPcg151xElUX3gWn1dHUiSkq7uulkv6hv5wQqfWooqqUysRL33a3ijcsmvy8wlGJitOpZDYsOYzPrur9GWQ/vw5iP2vRO2zl/RL3CrQIA8bn2E0BCzeMIGAihJYbE2ACTIAJMAFVAizgqZLifkyACTCBAibQK8Jnd4gw2jCbdOjggceNyKVfk5+E4WHuTY6t+kAXRIEKlf2AeaVwrzUJV01UTRa1+GvfK7kpnYQ7/do+U3IOTUpMoQYRmgZxq7Gle5Abz+s+vrWzid7rcV9dc0lDHR0gCnDI5lSBVr++ZpEbrqV775wbd7XSxj0fs3rx8F9fIQp7iM+eVZGRc7/7HZqy/3zTLTu57+RFZiIeXHhdQl7dUbw5N7aqeDerbwF9vev7tscA4bhenB+a2fm9+Nxz9NJzv/N0lCeeegqddOqpptfqQ2mTXZ30zSsmULEJXIh3xVAbRUsIEa6qPlsduLWjWLgjsz+3axAlR4nCApne/lCLxDitI8j3gWNMbVlg4aBe16YvdIHvyTjnE3T7Cxu3TKrFdwTcx9yYABNgAkyACagSYAFPlRT3YwJMgAkUOIHtTV2BO8CAROaHwp9+izLAkQD3C8aJY1MJ+ZLhoNm8T+GGLYM53Gw7hQgWZXu3/y/0nnh5aUeUH01HVS7KFjIRIaWowKhaJMFuvndFVdzrGgfn43Na34JUkm4fM2pIN6ciFnDcbe8YynzZrCMo/ceXc+OVC6FrYk3ZoIIV8k078Q59VAU8OZ4+Jx6q09aKohbbizbT/yx4j/68YI0Titz7Vu472UHmusP54bNqLHLR3/c+vbfsYfrgvfdyY/7tzcnK89sJeBjkj39t1l7jNq6hCdOq6NQvzMmNDZGqWKQTk+KdfAMiXmlNhVa8wqnpC1UgrBs53IK4P53mbfzdr7Uuu57P/jl68YXanw2nZP/021CEB4JRlGK/3Zoh5CH/W7dw4bqpdOyXg5vrgyhgYTcf579zcxrclwkwASbABECABTy+D5gAE2ACI4QAnA5dPcEJSmG4zOJeyMJufX5Ch/3cgvkIi/Mi4EFcwT1zUPIomtd/eE5MlgIw6jogV6OVW02FkRsXnpV4h3mc8uCtb0kPWU7T2Cn07mcu1NZf8si9VLJpndanTAgVE4XzCU1WC3YS79DXqniFCoeTPlFLJ32iTuv6H2XX0+qSvWKa3fV27juZ684Y7izDTjM971FTyy+FKLucWltatJe+vfXXSaQi5DkJeBjzjYffoS0fNYnQ6wFNxFtw9AQaN6XKcmsr39xOA4kymnP0NMs+VoUqcN/WCuEL1Ya9OovtmKdXv0cb77jB9linXH0bVcwaGtKsci/IPvh+QsgzxLI4NFkgAsUcwBc5Wq2KiORjvUEXsDDbA7674ULmxgSYABNgAkxAlQALeKqkuB8TYAJMoMAJ4AEJAonfFoZwJ9cU90IWWB9CP/WONxk67JTHzS93q+vH1pWJ9XT5Er7crs2tgId7BuwgYC0oPpIOEC9jqyzLup1aOgYXuHC7NpWCFnbinZzvg//4AbWtHlqEYVt7F3UbKjo3j5tK7xx/waDiFcUb11LxprXacAeMr6GDJtaJ/ZXQEeedo+TMdOvA03PSC3j4uYqIZyfe6V13ZoUc4LrLdN8sBFpRQAJCbNNQAU+u7+nfzKMtm2ssj1VFwPvtD/405PqGSZXUMKlKvCqpcfPe3IwQ72T77D8fazovxC2cDb4jrQpV4HOOfGUIq7WqpOz2XlUR7+SYfkQ8uO/g1PXrkHa7P7v++gq0xkIXyDnnR8gPYp1hC55J8WEZUzc4P2MQ6+YxmAATYAJMYHgTYAFveJ8v744JMAEmkCOAqp/IV+W16YU7PGCFFR4a90IWMqwKYXoQNlJ7XC36qqpeGbu5LrlqEyU/3ERVYg0I1xVFOCkzezJl5qiHK7qZT99XVcCDuwQGE+RFlA/kVgIexpchwd2Zfl9uHITTQsgz5sSDcId8dxfXVCpt3UzEM7rvWoR4t/S4CwR/oVzZtP/+7MJcuDm6OYUN+hHwrjh3HM2cknX9yfZc8iF6LvXQkBVCuEPVWbOiFVauO/0gEO96um7O/Sgbwkq0bu0GUxqbN1XTb5/cz5LUV675Gs2cPXvQ+08X78z9fe5raVr1v/bFPawGP+r8A6hhataZiCbddUkhzqlWZ4XQbAwbVrqZaa04xwAAIABJREFUDJ3ciHfyUi8inhRf41aIwyxdAs6jujypialw44X1b4zKeanmO1UZy6wPF7DwSo6vYwJMgAmMbAIs4I3s8+fdMwEmMMIIbNvdqTlk3DQ8xOOBCg8cEO7gUgm6mqp+PXEvZIHE5mCIyrL5cLRAuKt49i+UEuIdGs6nX6hjUiDrESJe+rQjQxfyftX7E8vbSIbLgpNR2EIRi3GiGq1Vkw/xqP4ZhwT3KGqBkFq48YxFK9YtOJo+2u8opY/TaXPG0+nihaYiAq3Z2EX3PLbXPaY0yZ5OP/6Gdaio6jhOrjs5Tk/nTeKMlw8aFme4c9t26uo2/4WBlYg3Y9YsuuLr12hjrSzqoKeLG2lV8eBw5S7hIq5b3UkL3+6l8duEau2izTlqWi6MVoqTcNy5zbmpF/68OkY3/Oe3qXPN+y5WT1Q+c3+a+rXvKV0jf+FSJL4fWoWrNczvbKUFGTrZVaCV4cz4JYlXvl7WpL8G3/OYW4a9+x3PeD2KpMDlx40JMAEmwASYgBsCLOC5ocV9mQATYAIFTsBtHjz5EB9leGicC1lIHl09/dSajj6XFMS7uh8/Pugu1HIoCfXOKMw2f+OcUEU8KxeePlzW+HEZK4S7E4SAp9JkyGIUxUBU1oM+z6zaRr/7cJuWt0rvKlS5Xi/gob8UKfDfVm48L3nwzNx3KuuTfVRcd7Kv0X2nn6e7q4t27thByPBl9juD/7rziCHLku47OO6eLmk0XTYEvO49qQBO+l23KxFPCnhBudJkTkx89tyIZF7cdxKGigvPjzjp5l7x01fFaa2vWBtG/kG79YddwGKsCJ9NIOacGxNgAkyACTABFwRYwHMBi7syASbABAqdABx0cBU4tXwVZMC68NBWKcJC41ItEWvS88DD+oAQzNy6dpyYO71vJt7hGuh3RcKpYuawCVvE07vwzMJljXtyct8Z+8PphCq12KNTyKkTP7/vQxT5/bqd9Ni7mz25mYwCnlyPnRvPrQtvxuRSuvK8rMvPS1N13cmxe3seod7MI5ZTSRHPrIOxqIWKeIdx9AIe/u5GxNtv0T70iZNnBZrHDmuQZwixuV2hgjYqzspqs27PCdVp7SrTZteS0D4vUYf1u9mLm+I7UecZDbu6N77PxteXu8HFfZkAE2ACTIAJaARYwOMbgQkwASYwggh0Z/oIuZCsWj6FO7kmiDZwZ+xo9p6vL6gjlcnfM6L4gnTY5EtgrP3R47mwWeP+sE6zcFWE07Zcq+Z488Js+8AmeqX/8T1hvEPDZfVjuhXv9NdKgaQ1jUrK7sImvezLeI0Utn65dCM98f4WT0NaCXgYzM6Npyri+RHv3Lju9Jt3EvDQFyIeKtJ2G8Jp9QKeFO8QNvvvCfPceXLeXlFJu0OkAtC3L/x88N/NDgiiyeIvHUpV46stC1V4Otg9F+lDv53EszAEPDl/MlGUd7HbiaPXCq/I94lCN1lXbniFLvDvIIpMhOWyLkuVaEVFuDEBJsAEmAATcEuABTy3xLg/E2ACTKDACZjlwZPCHVxc+XY6AS/Ci1DpNV+VCO0q7UIsQ/4ifSXaKG6JMf9onXMO80sHnDFnU1guPMkIBQB+vftXBDHPrCFsFlVn7fLeqfALqsCFylyyDypl1ojKo8iT1i4Eg5U72+n5j9/W3p43oS031Iqt1bSzPUWN7dZVJVHEwqnZufHswmmNVWed5tG/79Z1p79WRcCT/SHiock/u7oW0+YtR9JJp56aG9IudFY/b8u29kFbPPDtjJYTz6rh3hk7rY6OPO8AT+5JNzz1YZ9W36VBC3iFEDKrZ4jPVZkQycDHbYuiYi0KWGREpWmrisRu12zsj2rGcElyYwJMgAkwASbglgALeG6JcX8mwASYQIETaGztJuS0Q5N5xvBQ5OQaiXLbKGTRIULRog4Bw4MwHqxQQMGuQIWb8K8guFU88xeqFIUrnJosHqHPz9YhClqkTz/S6VJX71uJPsiLp28Q7fwKd8aF4eE67AIXegFXVu9M9zZRY/c6+rhlO3WLh3t9Ky7tJbyaigdoadnejG/p1fWiUEE9zR5dRdcePVOJsVNuPDjyPtrUpY01Y3LZkGqzSpOITl5dd/rx3Qh4+utwn46qvZgSyQsGVRv+YnKF0vJVXXjIvyfdqUecu2BQBVqliXx00ofVGt1iQQp4mAeuNFRtzYc71QuiIPKc2v2Sxcua9NfAAd4k8iyGVcCiQYyPf3u5MQEmwASYABNwS4AFPLfEuD8TYAJMoMAJQJhKCzcRnEX5qqTqhBACEVqUeeakCxGuC6dKu2FXKDTyURXw5HX6kNogBTzJCEIBRIl8VbaUObGwho6uPqfbydX72GOVEHH1xTMg3m1IL9PG6erto+0d2TD0opJ+TbgrSuwV9IwiHvqd2H4YfWqUu8qwKpVqXW1M19mP604/p10RC6e1lVX+hmRIpAyNVhXwMHa7CKPtE+G0shnDaCHwIGwW4e9HnX9AIOLdhqJ36PXiB2lj0buDtjdl4ABa1H8JTR04cNDP7cJqV331s06ITN+fc+dvcz+XYrYUmT0NmIeL7CrQul1OGIUuxtaViRQOWZE86Mb574ImyuMxASbABEYWARbwRtZ5826ZABNgAtQvwmT79xRhCCtEyC/mKPPMecn7B4dgpxCPuvY4Gf3u1+l6twIexpMhta2nHEEQ8fw0iCEQfNHcVNv0M6fTtRBHaoUbr0SENAcR9i2db31C8IFwrBcnV7b+YdByIOLtELndSirN80nqRbxxlaVUliih07uOoIn9o522Neh9Jzeeq8FEZ7s9uh1L9u/pvEl8pyx3dXlx8XxKlX9Xu0a/xzM633E1jt6Jpxfw8IsJ5ITEGQYl3j1U8s0hwp1xsRDyLur74ZA9mIXVenHhyQIWhRYyawSiUoHW1Y0gOquELquMiV9+IMTVLlesyjhWfUpF+PDoGutQez9j87VMgAkwASYw/AmwgDf8z5h3yASYABMYRAAC3ramcNwFQaGOopCFFA6KxWS723pcucngYIJAAAdYFM2LgId1gWPxuYto90mHewpHxvVwo0HktAspjoKB1RxZATaphRB6FaTtHGmN3Wu10Flj203t1DlgncPr76X9VCoe1CHeyeZFxMO1QbjxZKglcvl55WR2Bl5ceHDfGRvO8fNFy7XPIT5bbhoq0175JFHTphZNuB41qZbqJ9fSnKPduR6t5lQR7+S1ViIe3pf3qizCsP4n3xYh1u8rbbV85v409Wvf08bAZzLoc1RaRECdwkxBAD4Q9uGixneC2zyqCO/Fvw1hFbDA+BAIuTEBJsAEmAAT8EKABTwv1PgaJsAEmECBE9DnwYvrVsIqZBFE7iQkYS8XD2JNQviLqjkVsbBaR9s/n0s1h07X8mO5eSjVhxRDLHD7IBwVF8yjd5a1pNUf2lXywBndd5hPZMeiluIObYvSqde3B1CJUD0hfO6qStDOyr3iHfoekplFh4qXl+bVjReG6864fjciXqrsFiou2d8Uwe2J9bSmpFPjp8/j6MRrv6JKujU5IzA3pn4+hMy+IV5u2tEinBYhtWZNH1a7ZdlS2vL0rxxFPCneSaG50EJm9Ry8VqB1w99PoYsgw3vN1lxfLUR9kWOVGxNgAkyACTABLwRYwPNCja9hAkyACcSAwKtvvE1vvr2Caqor6XNnn0A1VRXKq2rpyETmHlNelKFj0IUs8OBYIdwZQbjJwg6zMmNW+6PHKfWheaVXK8Y9sydTy7XnaIIIXGrJhHO4aRzDZVXvoaxTLeFYkEUv4sJ9aZes3kzAay5qp0yRc+69D8amhiz9H9OnqG7HtJ/ejffxrm7a0bHXBTq2MkHjxEu2sFx3ZguDiIeiFlbhtAibTaTOtxTvMObKog7698QG7X5NiM+rDIO1A4a+t1fMon27y0LJmXl74kRP5/Wt3pdsr5PfIei05tEHaMdzD5n2R9jsuNMuonoRtt8tQvbdiPCeFh7yRX4q0Lpdmvzew/c+3Hgq+TLDCO/VrxvuQ7hEuTEBJsAEmAAT8EKABTwv1PgaJsAEmECeCTz4xMv01POv0XVXXUSvvL6UXhWv+35yHU0a36C0Mrixdrd1K/XNV6cgw1S95Llz2neYYWBWc7t14TV/4xzKzJmcG04KOhBwjRV+CyFc1ulM8D6EEYgdVo5DvbNQpUiKmYC3s7hFZSlkJ+BlPniPej94n7qeeDg3VmLe/lR2zgWU3G+B7fgfNHbT+zuzn18z8fHEGVU0Z1w5meXzU1q4j04Q8vr7BufEKy6Zbyvc6ae7vWQ9rSpOaz8qFkYlOBp7hZJn5gCFELt/cSV9o3uqqxB41e15cd/JsS/s+8GQohZm88KNhWIUCPnc+d4y6vjwPa1bxaz9xWuB5taC+K4vqqK6/jj2C6ICrdt9SRcq/rSrto7vwHGigEVYKSYgSo8VAh43JsAEmAATYAJeCbCA55UcX8cEmAATyCOBEy74Jt2vE+xuWHKvtprbrrtcaVWFkAcviDDVMIQ7CRhODYQiRxlamly1iep+/LjSGRvFO3mRFLiQB00KWIUULquyeem8QcJ4md9Quu6Sib0/UxkrDAGv7ZYbqHeFde4zOyHv92vbhesu6/6DkQf70ueNw8/g8IET75NT1V25Kiyi6qMX8TAn7lk0vViJQhWz+ivonzNTQ/sM+hHw7MJojRz3hnyWaA47iM9owyFk1rjXsENU7e5Rp4q18v2wCljACVhXNdSVG9XniudhAkyACTCBwifAAl7hnyHvgAkwgRFAAI47tEtEqCzakaddQU/ce0vOcbd5WyOdKES9lx7+obILb2dzN2X6sg+KcWwQJuCk2tni3ikow0DxkG/nuPCz76BDfN2sxS6cVobN2o2nr+CKfghVjEt1WTccnPpm3U0p4Tbso2RJsafE/14FvI5kEa0fNTRZ/UXXvWEr3un3VHXTrYPceHrxTt9PClz4GQRlmZdvwdhSWjC2MB0/RhFPipW4V3H/zurLindhtqgEPLkHfZ5D/AwOSjc5HcNkEdTYYYeoqqwTQh2ExIzgq//eC7uAxSgh3uGXJdyYABNgAkyACXglwAKeV3J8HRNgAkwgQgJw3MGDIsNk4bhDuOwVl56ZWwV+Vi3y4CGsVqU1i8qNCNuKc0OY6vbmLmWHTRAFKlR5BBniqzqnsR+q0+pb+vQjlYYCJzysZpOpQ+Ts8VSlFpNtK+7UXrKN7y8nvKJuu0pW05rS52l3Ys2gqSeVzKUDBk6j0f0zNTeeW8ekWRXajqIuShfZC8soYGEsYnHQjb+mWe+1ukIz6tdPa/23i1x3r6zNFs4wNum6w3eEsYrr8ftWDsqL52ryPHdGTrxVRWmRGy+thdUuTFbT/KIquiAxjprEPWuXuzCIpUct4GHNEGMbRPViNK+VVIPYe1hj5CP1gNVeqkQ18YrSxJ7w5F6tOizyDAZZpVk/NwozJcQvErgxASbABJgAE/BKgAU8r+T4OibABJhAhAT+4fKbad7MrNsEYbJw3F12zZJBee/eWraS7rn/Ke1nKg0PhxBu4txG16SoLd2rJC5BUCtLlYjws72hoWHuDSG+qWQ25K2QmjEHnAyp7RCVZlWSvMu9QrR7IbVF+2ums5s6m9qpt2vv/VR/1zt0yEEH0IJzjwkdz18r7hgi3BULmxaELVSHhWg3oWgOnUDfILP8f3YLTPc20Yb0skFd9FVora415r+re38DLbrxSaoYcOeIKzv7Aio/50Kyc9/pXXfGcNNCduFJttIxJcO+5d8RaoriBG5FWdUb0o+Ap5oDT78Wfchsv9gURHYITNhjWKKSKosg+kVRgdbtOvUVa/F9gbQIYQjDGHt8ffS/2HDLg/szASbABJhAvAmwgBfv8+HVMQEmwAQ0se5G4a67VQh3+jBZY967Bx9/Seur6sDrFeGzO0QYbZybisstzDx3dmzCzpcU9LnIkORML0SP3kFJ//EQi9xMeMhUcam9kNosXHdd2hJbt+waJNzp151a00z1d79Lx998MY2bPy3oLWnjGcU77AWFDxBqCRFE38bRbDq5+Fotx5gb4cdMxLOrRLuuTrh6NHfj3rbvw2/QYb+2zntnBwcuvIfeH1w4Q7rukM8Se9U3Y268i/avDYW906Bbdi+nrbs/oL+veSzXdUL9fjSxfj4dMvNcp8u1963ywMk8h3CRhhUmj/m9VKGdMnAAXdT3Q6X9oZMM+R8QB2n8bOrDalV/maE8ccQdo6xA63ZrEL7HigIW+HcRZxC0YIpfLiElBDcmwASYABNgAn4IsIDnhx5fywSYABOIgMDKNRsIOfBu+9aXCKJda3s6Fz579Y13aCuoEaGz6OemEi2u297UFUr1xqCw2BWykC6cHhHyZHzoDWp+u3HCrlgY1B704bJOD6aoUgvXj50gsiyxm5YlmrTl2Yl3cv1hinirRcgswmZlg3CHc0HVUqs2u2cxHVT8WdIXuFBhbSbimVWjNRPvMP5ZZ9xNSUqoTDWkj1HAg9igd91ZDSrdeBfOr438c/7Mm/+qiXd27bTDv6OJebJ92L+Gnut9gVYPZMOgkcMSRzl9YAadmjiZZhfPHDIc9ojQR/SDozhoN54XF54b913WlZbSBCO7qshRuQ493aAKF8lqum4dsApDB9IFfCuFi7tDiHfVFdnPaZCCKe5RfLdyYwJMgAkwASbghwALeH7o8bVMgAkwgQgIPPXiG7Rl60464+RFtOSuh+jV15fSlSL3ncx/99Y7q6hNiHrHHX2Q69XAbYWQ07g2s0IW+jx3SEDeJQS8fDW3OfqiXqcxXFZlfruQWn3YrIp4J+dDOG3qo5bAnXjP11ytTWHnujPb8+LWOwgP7HioTnerhw5DxGvsXkfpvubcsNKJZ5bzTnY6vesIKj//iyr4TftIAc/OdWc1OK65+uhxIjw64ypE2vNixYUq4p0cX4p4/9FzV064s9rnrKKZ9PXUVaZLy4rPyVD2+VDJN2lj0btKSNy476S7EN/DKmGb+nBP1fPs70rTQPfeHJVFpeVUXBZtdeJCqKZrLGChr1gL0VHlfOxukAZRtRxjcmMCTIAJMAEm4IcAC3h+6PG1TIAJMIGQCDz9wut0t8hn97ioNIv/RngsilacKUQ8hMnihVx4fhvynuHhJM5NimTIaYYHwZTIPefkJItqP25y9EW1JswjXT1e3YkQChDuZXQ1Sfcdct61bd2tvKWqF9ZT1Yvraex+U+kz3/mc8nV2HaX7DnslVF51Yb06vOOrNLpvlib8ybA2uA5l9Va3C9xSvIv+llxNW0sGMzkkM4sOFS+0pgvPcDtsrn/DI7/NhdB6WeMlB9RpVTfR/OxTZQNuxDs5Xsdxx+bEO5wninFYuSjtRDx9uGnQ+1QR8Y7uv4QWiZdT01eB9rJOlbDavpZd1C9eZk0T8WpHRyLk1VQkKZnIVgP3cu86sQzqfVQV7xT/Hhp/IaRP0eC1IjC+ohCeW4z/4MYEmAATYAJMwAcBFvB8wONLmQATYAJBE0AhCrjs5oqCFXDYQbRDaOwq8YIDTzaIevq/e11Hj8iH1tgS7zx4EMn6hckOD4EIM2sXD1kutBqvaJSuw8NpT6Yvry5A/UKDrsIrQ2qlQ+j+so+06Tqb2rSiFW7a+K//Set+0SPfdnOZZd91FSLUMvV8rkiFm0Fndi+mWeIlm3Rv+anG6zR/2y03UO8K9znwIPbMePZ5euzdJtrW3us0zZD39UUswnSpYWLkvHv2zX9ztcaWgVbatm81dU2fRAkhcEA0dhJ6Ti05WQuptWoI18RnE4V68EuKoL4vNhS9QwipNbrxINxNFXnvpg4c6Lh31ZBZx4FEB6uw2t7tGwe57qzGKhk7OTQRL8h9qrDw2wcC284W84rneudj1rHr7p5CuP7oPZWF/a6Tr2cCTIAJMIGRTYAFvJF9/rx7JsAEYkIAee1QqKJa5LKTwl1US9u2u3NIEvyo5naaR7of8EAfRweHMezKaT9hvu8lXFZlPVIkwEPr3fShdokfAW/BOcf4qkwrhYFl/c/QO/SsyhaG9DEKeOggw7W7tZyKwVc27Xz819T1xMOu1os1Jfbbn6pvuo22tGXolbUdrq5H5+P3raRxlXtzb4XpUkOxCn3BCpXFbhjYpHVrO+EIyvRZ5y40jnVP6U9sh4+qyIXKHmUflRyTbsZDX2NYbcv6dUrinZwnDBHPa2Vrt3sPqj8YjqktE0WdsoV5rJq8pypKS7TvCNWq3fh3AuH63JgAE2ACTIAJ+CXAAp5fgnw9E2ACTCAgAnDfHbZwbkCjqQ/T2NotXGT5yyNntlIp0kC4Q46+VLKEmkSeqLg1uU6IW6oPc0HvwW+4rMp6pOjzk96Vmjtq98dbVS4b1Ec68LwKeHp3IRyBKxK/G1TAws2CzAQ8eT2cW3BwqeYlczOvahitltMPbjTBuuLGWym53wJtmt+vbacdHeo5K8dWltBn9q0yXWIYbjy34bNw37VQq7a+1oPmUu+oGmWc1ySvMi1qYRxA5joMS5hVWbAUftwWTlEZW/bB/VJOPdS6YZ1236hLocLdPHW2m6ls+xpdu4ENHOJAdsWSrP59Qkg6mKtUQK6vLtW+U7gxASbABJgAE/BLgAU8vwT5eibABJhAgROIUx48sxBQuDlGVaVEeFM8Q331zq3WdHT5BIMOl1W5jR+o+JiQxqlpUyNlOt0Jqn4EPDN34a6S1fRm5Z0qyx7SR+bAs7pY7zoMUpjNfPAetX/3Rts1azn9xP/6hBut6qa94p28SFXEsxPv9KJPkLnx/t8L5ymfB7bZQm3U1N+iXdO57yTtpdqcwmiN41SJ/JkVpaLKaITFPLCGKENJETpLomAF5tSqFCvGDiMfXol4+W2FUKzCbI+4N4AK/xa6afpCF3YVa5HHlfPfuSHLfZkAE2ACTMCKAAt4fG8wASbABEY4gW6Rw21XqzsxJmhkeOBEmBFcCmYFKuJe7RU84NwK02GjZx5WuKzTuaKIxTvJJupqbqeOXW1O3XPvp9Y0U/3d2SqebnLgSZEyKao3whFnzI0mq9AqL0R0rO+dSUeks9Vr7RpcU3VCOIbQFGT4tpWIp3fdIQ9c2dkXUPk5F5ou8b0dXfTeDmtBW5/3zmmfeD8oN56qA08TKUXbLcQ7uPDQ3Drw3Ap4mCPM8GEzzvicVonvNeTtRP7OsFtmQzbEHU2r5Ctuqn6hTOF+sr3XRVGLxLgpnpcHrjUiRBSicxjh554XpnghClh0dPYScsJ6aVLwN0v1gLyOY4WAx40JMAEmwASYQBAEWMALgiKPwQSYABMocAJbdnXmbQdSjOrq6dccEGYJ7ONa7dUILezwsSjCZa1uhI4d7bRm23b686w0DQhFoE8Iv2h9GSGsZeydh/V3vUOpj1pcVaEFSzim7MQPLy48J/ed+ZkmNWEiSBFGnxNP77pLzNufys65IBc2a/fBhJBnbAvGehMLghC3nHLgGUVKfQjt7uMOd/Ud5EXAkxNkv3NQ5KKX2oVwE0aTodj45YhTUY6g5tcLeHLMEgEd3LEGOx3PaxhtlA7DoDgZxwnqF0T4zsJ9he8sWegC+fLwiwBuTIAJMAEmwASCIMACXhAUeQwmwASYQIETyEcePCncIf8eXHd2D7l4GMb7bkOc8nEsMudWkK6bfITL6tmt/8MaSu/MFlBY9okiamkooj6Rm3BAF6KX6eoUwt5QB4vefXf8zRfTuPnTbI/FrUj514o7aHdijdJRuxXvciKIcNEg1DTTG6zDSDq0Gt9aSgOz5yvtIexOftx4dlVoEUIIV1ivrlCFFPAyddXUdvA8V1tTzYFnNWhYRS7k/YtfSEQZUt/flaa+HdmCIMYGv6MWViv+tPqe9SLgZav9psQ+e0SuUm/uNVeHHkLn7HmVOhawUJ3aWLE2WVJM+JxzYwJMgAkwASYQBAEW8IKgyGMwASbABAqcQEsHKuqF40QxopFOH/ypWigAScbLUiXULNZZCE3mxUMBDoiTdm37a9uGvF05tYqqpmWLD+QrXFYuSi/eyZ9JEa9XOJj0zSjiuRHv8OCLcEPs1yyM2o7h6tLnHQta2BWuUL2npKtKJXG93ZhSkC0R+R2DDM9V3YdTPz9uPLMwWuSxRBgniisYG6rQug2fxRhOVWid9ijfl+GPEKD8hn9GHTJr3KOZA0/fxy6s1q2AV6j57ozMtH9bxHcOPodBNvkZrxDfadyYABNgAkyACQRFgAW8oEjyOEyACTCBAiYQRR48Py6yuBeyMDt6CFK1wjkIkQZCpTGffPv6dvr4IWvnWJUQ8eYunkoVUyodHYph3Xpm4p2ca93sIlo/x9qJV/XCeqp6cb3W3cl5pxcp4VxUzL0/aNsIp4UTb40Q8/QtCOFOP55e8PHisJIiD8I3gyyQoXIPdDy/MtetcrFzxWsvbjy9C0+67rTwTYv4zca6Adp0kHrxCmzAr/vOyErvmvLqJouDoOUk4Ml9IzcexDwZVuumiIV0LuJ7rVX8QiWq8GCV+9tLnzDd3WA8vr7cy7L4GibABJgAE2ACpgRYwOMbgwkwASbABDRnzLamofm0gkCjd1Yhh5iTI81qTuQpCmuNQezTagyzB3u47ra/PtR5J8eA2KmFG4pz2ffCmTk3XpjrNI6NnHcb/ucjxykh5CEnXibdRZkuIVSKMNrMz97QrltwzjFa3jursFm9qBtlrjDHTTl0kCJGMqHuoMuX665ndSNBuMusaRyyq+TMBoKQl5rVYLljL2687c0f0LNv/ps2pp3AM6F+Pzr98H+l/+i5i1YPqIVBzyqaSV9PXeX3CE2v1+91bUcHNVIPrS3Jho7v21ep/Tl9z5/6z2q9KILQraUCyHgSn4PaTF/LLuoXL5WGsFpZGRXuOxUhbjjkuzOyCTO/KooaITyXGxNgAkyACTCBoAiwgBcUSR6HCTABJlDgBHY2d1OmL9g8Rm7y3DnhC/NBy2m5w+8yAAAgAElEQVRuv+/rc0U1rm61dN5pIW7IVSXcSvoH6gOuX+h3Ca6v37l8GzUu3+76Oohbx375CMeiD9KJphJm7HoREV0giyF0dCEE3brKaL5cdxDu9K47Kyx1X11kK+LhOlU3nn6vf3r/YUJhC2ODcHfIzHNpYv3evH8qIp6fwhWqt0RTUQ9tLO0k/IlfbJhVcIWYByFPujGRfiBqN6XVfnq3b6SBbvWiRJVjxlLdhPFaQQ9ZeMFsbLigIVTGaa+qZ2rXL6gCFmZz1IrKvKiuzo0JMAEmwASYQFAEWMALiiSPwwSYABMocAJB5sELUriTWMMMdYri6KR75Q/XvznE7QI3jMwTZuaEQU68GRfPjGKZuTm8CngYYNyC8TTnyKmaK8kYZipdTn2imIFT8ZJIN+xxMrkfCD3N7YNDpfPlusNW4LxrvvN15V1lvjKbWvYZoDGJMTQmOdb0Ojs3nr4ohFs35Yq3ttPKv22nHaM3007x+mDOW5QQ7iW8yoQIAuFuVvFMmi1eYTaIdkuTzbkpsN9cBVdDCPCY4lL6TNlYcrvXMNcvx1YV8YpKyykxboq2R1RPxS8azPI7hl1dOwomZnPIXKU7W7pDWUJDbakm8nJjAkyACTABJhAUARbwgiLJ4zABJsAECpwAkrjvbvP3IKMPifSb6N+Is9AKWZjdDgid3fXn7YMcdvpwWbvcb1G78FY8+o7nO7ph/jgau/94TRRAGBlyAEKYlOHEbotUeF5IhBdKkUPe9/ksPqIq3jWNFyGiB3VQ84RsAv/k1LocsQYh5O1XPt9UzDO68byGVu7c3E6v//Zj21NqmFhJcw8dR2MmZYu6hNleSe0YMjzELexPX4QD+d9Q0nVhTx3V9SfDXJLnsZ1EPLO8dxCbqiuyjjFZXCUOuf08Q3C4EJ9RfD8FXcBCTgt3nwxTDmsPPC4TYAJMgAmMLAIs4I2s8+bdMgEmwAQsCfSK8NkdIozWS/NToEJ1vkIsZGHcm8x9J0U7mHqM4bJWPMYtGk/jjhmvist3P7sCFk6DQ8AbMz+7VhlmCsWjR8sT1quUb8tpjji+L8MMIVbijPPlzlIJnYVwh5e+FdeWUYl46dux1Z8yFfGkGw8FESByuRXsVcQ7/ToWfXZ6qCLe3xNN1FxsXYm0WBipsFc0vZh3fI+5WzEu9yfy4hlbSe1o2+VJgXZAfDllevsLpvq3W+ZhurqTJcU0po7z37k9E+7PBJgAE2AC9gRYwOM7hAkwASbABHIEvOTByzo0EtQpcih5LVChegSFWshC7g8C3g5RvAJCD4Q76AEoVKFSdTVqAc9PCO288w7UtqwXdvF3uDy9VG5VvT/y3U+67iDgQeyRzkOVdT2zdC09s3TdkK6zJ9TR6QfvQ3MmjFIZRuuz4+qnbPuaiXfyAr0LT/7MTMTThw5DrHDKA2hc0JP/9a7yftARTrxjzpjh6ho3nc3cd/rri8SZFiE3ZbcuT2iyiKYPVNF0Ct8d6GYvfvtKR6UUot2Ks37nj+p65FVF6oheEc4fdEPuO+TA48YEmAATYAJMIEgCLOAFSZPHYgJMgAkUOAHk8EqLSrEqLYw8d07zhvnA5TS33/fxULz7zztoyx+35EQ7mfsOD8pmyfL1c0Yt4KlWoTVyqRhTSdM+PXOP8w7CbrbysJfKrX6ZR3W9WTVdWbjESdhatbWJfvTcMselXnvqQmURz07AQ9js26fszfNmnNhMwEOfs+vPy3U1FuVwW6lW5rxz3LShA0Jp5x02zu1ljv2Nue/0FwxA3BHOUTLU98FnF7LPPltLaN924VycV+M4TyF0wH2L0HdU1IXgbhZWWwj7UFnj2Loy4ToPp/r6qKqU9h3IjQkwASbABJhAkARYwAuSJo/FBJgAEyhwAhDvIOLZNVl5MSMebFuFe8Gs6EJYGBDyhEq5EIUKqSEkrSxVQh//fjOtf2XzkKVLR54dy+kXzaSqadE6fbyE0U4/biaNm1ZHSZFPy8yBJsPzcJ/1iPC8Qm92ue70whZYGJ2WquKdZKQq4tkJeHbuO8xjJeDNE/nw5lfMzxU7MAsPVq1U69Z9J/cflgvv4xIRTixexqaJd3rHncnNOqqtiBauTokqEImCF/Hs8t3hPq8V37/tWsXdXiXXcJw/2/h3DA65sApYjBXhswnhTOXGBJgAE2ACTCBIAizgBUmTx2ICTIAJFDgBCCqNFhX5oshz54QPhSxSyZKCCcPEemvEQ6LM/dayto0+fmiN6TZlxUurcK6oi1jIRbopZrHfyXOoYUqt9pBvJ7LKEL1CDqk1c91Z3b9WouX/ufcPTrf8kPeNIl7H+8sovfwdanz0gVzf0oEDKFUzTbz2GXL9q18cWqhB38lKwEN12nOmnpxzVFotXMWN51XAw5xnfeUA18ycLjAT8IRhlga6+qnfIbxyX+HAm7E9oTloB6oKU8SDOxbiXJHYtLGSsp6dvlotQuHx+S3EJr9/wiqmkxAcx4oCFtyYABNgAkyACQRNgAW8oInyeEyACTCBAiewvalrkKsuDsKdRAqnGlwTcP/EuemZwXmlF+U++tUa6tjQbrp8iAa4Fv31WZmiDp/VLw6htI3Lt1F651CHkr7fwWfsR6X1FcpFKqRogIqesuJlnM9UvzYvFWZlEZZurZBHhn77d/Ocd04MkA/v9IP31bqtv/kbmnhnbH3izAZETsqkEPGqJh87SMizE/CKRC7LxLihLk/k88O9eW7D+cquSTs3XtwEPGMILT6DA0Kc6lMQqBBCu8/WhMYHr6K5NZqQVyjNSwVh+T2MPYaVQy4sftlCM6Xil0A9oQmQFcKtWCdCaLkxASbABJgAEwiaAAt4QRPl8ZgAE2ACBU4AglNXTzZENfsQns1jhvDaKMNlrTBOHF1OW3Z1xpYywtAQLpsWAkpH19BQ3/b17ZYuPGwKwhYeqvv35MWrnFpFMy6emff9WhW1GDWxhqYdMomK68o9hTbjHqsQwhGEgLiH1Lpx3ZkdmN7B9K1f/YVWbbXORWd34P/v8k/TirOPt+wyIByQfTuzIjFEvPr9Pp/rayfglYytomLxedc37Blqcp+I/9XnwVO5Ia3ceHET8LAXWcQCAg/cdH3tvSpbpIUfJqmuPRsqibx4tbOqKTG5glqEQ02lOI3SJCF1kqKy1XeV07TZPI9JTQiDKB33/cp/z8KuDo1fMuHfTW5MgAkwASbABIImwAJe0ER5PCbABJhAgRNAfiMIKRCiZOhnHIQ7iTWuhSyM4bJ2zJxEPOwVD9c1+1TT5POmx/KOki60IB7es66YlGN4Zj5BeHHdWa0X+bcu/+mrOZHW7b5u2PCMqfNOP4504eFnehHPSsAzuu80IVn8v36hysgCK24FPLkeoxvPq4AXVg48rHNjKk0IpcXnFkLUQNo5z2adzH9nOMDaY8cIASfpujKv2/vAT3+7fHduxsV9ArGqSrwgWsY1P2lQ+1Vh01BbqhX/4MYEmAATYAJMIGgCLOAFTZTHYwJMgAkMAwIZIeCZFSCIw9biVsjCLlzWjhdEvO2vb7MMp0XY7JyTpxBCTM0KIOTrLPy60KzWDSEAYWcwfMUppDas/SL/neZuE//rc8izNkiY60zTN//nx0rH37txr8OvUoTSVk3+JC1d3ETNEzKDrjeKdxDucB69utLIDSIH3idrPq00r1knvRvvz3/YRMvf3OZ6rEWfnU5jJgVfyAXfKXCTvdCxjZqKsmxUBDy9+06/mcTh9drZ1lUltR/H6X7GerDfUpGfM8jveP1+29LZXwLFpUlXdJD7tdobPtLj68vjsnVeBxNgAkyACQwzAizgDbMD5e0wASbABIIgsG13Z851E8R4QY4BJxTcHlE8jDmt2ylc1ul6+f721/aKGQiZ1VebDcs58sg7b9Mj7ywbtMTzD1xI5x94kOWyJXuEWCMBfBhNhrlB9Mi3CBCk687IShaw0HKnif8nnV9OTLs3b6Tr3v2FU7fc+/p8eAilNVahNYp3SMAP3Q7OO307tvpTNCY5Vnleq47SjffTJW9afscU9fYSXgOJhPZCC8N9Z5b/7e8JIXAWi3BQBweelXiHtULAky0bZpraE1Kf3+qt2C+crsjDiCIUYTRZpTwIZ67f9clfCgyIG7pZhOhH0SCMjq4pjWIqnoMJMAEmwARGIAEW8EbgofOWmQATYAJOBBpbu7Xw2bi2qHIZWe1fCjtRhRhLESCIxOvvb9tKN7/0gu3RGoU8+eAPZyaEu7BDqmVILcK5zfIIhn1fhuW606/7maWDi1jI3GvIfWjXOj/6kL794aOuECAnHopaTPzs7ZRZ00hvn9VGLfsMEMQ7mfNOFqrQhETD6H7dd8bFgm96dxe9+Ogq4T7MvgvBrriri4r6hgrDAyUJOvbkiVS3YB9X+7brnP0MJ7XcbcawT4TSftzdanq5LFphNXZR9dBKtPrch/kSpr0Uq/AKW4bVIrdlR1cmb59hiJVh/rLBjA/CiZEDjxsTYAJMgAkwgTAIsIAXBlUekwkwASYQAwKt7Wn65RMv01tvr6DDDppHV3zhDOVVQaRpC8mhobwIh45S1Gpu74nMqRWFsGO17SAewG968Xlavl0tdHH+uPH03ZMWa7kQIXbgnogyvxVEADyAo0UZQhym605/tqu2NtGPnhvsgIQTD448OzfeoW8+ScfsWu7pYzTviVe063ZmdtCf2v4nNwZcdzDcoVCFsQUt3unHb9mRpj899RH192SouN28MvP4ygwtHNtJE6p6qeXIRZQZ3eBp7/qLVFyt/Zs7aXdLBzVXZ3+R8UHnTur8uIme2P1Bbqiz6/ejeeVjaD/xkq14UjnhZdby5U7LV6GYfIXVBvFd6fUm4/x3XsnxdUyACTABJqBCgAU8FUrchwkwASZQgATOvvxmOnPxMXTYgXPohiX30mEL59J1V12ktJPuTB+hUl/cm3RqRSEu5UvI0p+BFLWQM81tlUsV592gucRfvnzUYXSBCKmNwnVnda9JtyVEvF4XueLc3rv5EGd/+Nzb9KGhEq2xCrFxH19//la3W8v1lwKe/MH/dvxRE/Mg3JlVEA1TvJNrKGvaRR89/Tdaum2w6AXhbnxlLx00bnDFaT8inhR2EN7pFEI60JqhvpVt2jK/u/mPtEIIeFYNIt5Nkz6pva0Pn7XqXyVEcbjTzNx/ng/X4kIVsTLoOY3jRSlcRvlvghm38aPKtJB4bkyACTABJsAEwiDAAl4YVHlMJsAEmECeCaxcs0ET7Z649xZtJXDjnSMEvSsvPZPOOHmR0uq27Br84Kx0UR46he22iDpcVgWhl6Ts//DAfSpDa31Omt4onGDZCqT/eMRR2euSRytfH3RHKQCEFVIblevOjIuZiId+ZgUurj11IfVfdY5nvFLA04uVK3dtom09O4RAtdfVN698Po0RRSuCyHnntNiG557SushcgAghdogipsZTz3Qadsj7cOwiZDYtQolVw7Lhwrvgjw8qzQUR71+PP42KatTCJ8MuciHzv2HxcCmbCbRKGwuoUxRhtfKMm8R+wxT7rZAkS4ppTB3nvwvoluFhmAATYAJMwIQAC3h8WzABJsAEhgGBzdsa6cHHX6Ka6kr63Nkn0Coh4C2566GcgIctPv3C63T3/U/RSw//UGnHcc+Dp98EHg4bROLwIJOz58ORpXQwezrJHF4qIcSq7rsT922kE/fdNWgZE2tqqDyZFSUGEkLMy5OQJ92HEHeCEiTicsZWIp4UteZMqKOTD5xGcyaMop2P/IIaH33Aza2i9W047/M05vwvkHQoIRw6rEIkqour+HAlVaxeOag7qi6j2VXmTc+aS+nZc1WnyYWBw1XsJn/jv73+O1q+dQuRSj7QshKaP3YCfWfRKcrrQkdZ1APCYntAhWHC/qWGqw0aOocVVhsHpyHnv/NzZ/C1TIAJMAEmoEKABTwVStyHCTABJhBjAq++8TYtufNXdMVlZ1FbWwddcs6J2mpPvOCbdMetV9PcmVNzq7/smiV0ybkn0XFHW1calZ1bRNU+OJ4KqdWJ5OFFQvXwK/DEIVxWhbtqsQezirPG8b9y8AaaUTfUdTmqvJzqKypy3QeKpxCVXqCyvFD6yLPxG1KbT9edGRjkxEM47TNL1+Xeni2EO4h3l35qjuZKk/f1irOPd80W7rug2Lme3OIC6b4zvq3ixlNx4emLr7gNOV/euJVueeN5bWkDCN0WYbem1kBRdZSE6Fi0R3i8+ejFNL9hgitEEKdrK5KUTBQTvnf9VF+GWxVFFNw4DV0tNqDO0lWLQkBuz8a4hDiId1hTfXUpwQXIjQkwASbABJhAWARYwAuLLI/LBJgAE4iIAHLd3Xbd5TmhDuGyNVUVpo67e4QDD+0KEUrr1AolD57Zw1xZqkQrfODGbYNx4hgu63ROUqSwcx/aCXi4/gThvPvMtEbTqYwCniZouBTxlj//kjb28udfzs0xf/EJNGbmDBo7a4bTFoe870ekiIvrzu2mZS5AVDFtWraUNnznWuUhpt/6Y5ryicO1ytL5zGdoXLCVgCf72bnxnAQ8v2HXcN99sGtowRdNzMNLJ9rp97Xf6PGuXXjyer+54uIiZKnemEGE1dYI4RP3id9f2qiu2apfXVVK5DUs8TsMX88EmAATYAJMwJYAC3h8gzABJsAECpyAFPBefX0pvSJeCKeF6+7+n1ynufDOFDnvpGB3qXDgXSmceihs4dSQi2pbU5dTt1i+L8UO1ZC5QhV19PDxIFsq3EBmwqWVgJcUD7771qXp/xy4wfIczQQ8dB5InS9EjL3uTrMBdqz+SIh2L9HONR9bjj9m5nT69NVfcX0fyVA8NwU9pEASh/BR1xsWF+gFnq1vvqUk4s1d8p809tBDqF24aaOsIqyyPycBD2NYufHsBLwghKzzn/65yhZM+zxyxhc9X7tX1CrRCm2g4IZKs/v8q1yfzz74LNcI1yCqIas6EMNIm+CFAe7PhtoyLTSdGxNgAkyACTCBsAmwgBc2YR6fCTABJhAyARSrmDS+QStUgSqzsmCFDJW9+sY7tBW0iff1Yp7KsnY2d1OmT+0BUmW8KPvIHHG727ptE5oXSrisCjuriq1GAQ8PzHjwhEPxM/sMzXunn8tSwHNw4UG8+587/6/KsoUTz5uIh8EhXCBszS6kFg/7KGCAfqqirtLC89BJ7gVi7aa/vEnbfv0LSi9/Z8hKqhcspOmXfolqRRVhL27UKLamIuDJdRjdeGYCnhSCBsR97ddpmC8BT+4XghBCYdHgurRyE6s4cKM4yyDmkAJ1RjgcW0Uosd2eR9ekNEE6n3kcs7kGS1m8C+LweQwmwASYABNQIsACnhIm7sQEmAATiC8BOO7gtLvztq/lctu9tWzloCIWqEpbLcJqIfS5aQhLSouHpEJtMkccHvKM7qNCDJdVOQer8FJUoYVoh4dO5FOTD8c/PG6V7bAzRo+2fH+g/J8t33v0auv3zC7yI+JlH/xTImcj8jYOvl8L3XVnBRhiZE3F3j13vL9ME/Iq5h9IoxYeLHgk8y5wON2vtX9+nZK7zUO3za6VbrzM6AbaddjgqshBF+fIt4An9y+LXJjd23EuVuF09nbvV5UnRDhqQsvjZyzsIfccVkVq1XXLdSRE5VluTIAJMAEmwASiIsACXlSkeR4mwASYQMAEINIhp919IlQWLjw05MJDQ2GLBx57UQuj9dMQvgUHWyE340PucAiXdToPoysHf/+NEHceWrpUOCqFeqdrdgKelftOXm4l4CFsVp/vzmm98v1PffWfPOXEw/UypDbTC/dVRhtyuLju9PzWbt5C68TrD2/+XfsxnGknfuIwGj9mHO07aaLniquqZxRkP7MqtCrjDxz3aaJxY3LOtDCKc8RFwJP3NkJMIWC2pXu1IhcQ9iByNYlfsvQaPtMqDOPeRxb2SAmnKRyI2LMU6lUqb4e5PxbvwqTLYzMBJsAEmIAdARbw+P5gAkyACRQYATjuINwhJPZbImRWhs+iwixcdocvnEsQ97711Ytp7gxRMdRH6xXhsztEGG2hN5kvqX9gQBN6zBx5hb5H4/rlAzAEnmLxl66ePrrmqWdo+fbBifmtBLyyRIIm1dbaYrES8Ny67+QkKGwxf3G2irLXJkNqcX2+Q+y87sHqup8/+Vsh3m0d8rZ0ps2eNplOEGJeQ/0YErd6QTS3LrxMfQO1fGKRJmBVliW1EH/s3y7M1AuIx1a+TY+vetv1pefMOYjOnetc5dv1wOIC6SaFexbfY4UeDq7CQDqK0RffafneM/KGjqpOibBZdt6pnB/3YQJMgAkwgWAJsIAXLE8ejQkwASYQGgHktrv9roc0ce5KUUX2DFGcwtjeemeVJuwdKopUoBJtEG27KGThtpprEPMGOYYMl4WoAREPOcEKReDwykG68KRoKR98b3rx+UEinpmApyLeYV1BC3gY87w7fuB1y9oDvnTdicd9zYkXdOGG3jVLqe+jocJO6Ulf8rxulQutxDtcKwU8/DfCo79wxmmaG69QmmouPCneYV/SBSU/00ELeJjDiwvPTwELp/PC/Y1QcRkKL51pTtcV+vvSYYnPNMJqET6bj+9viHfIeVeMA+DGBJgAE2ACTCAPBFjAywN0npIJMAEm4IUAnHd/E+KdmXDnZTzVawo5D55Mao8HLylg4WGwLFUS28T+qudi189YmEPmS2tN92hVLd/ftpUeeWeZJuSduO/eIhYQ7sqTSaqvcBZ/ByyKWLgpXmG2B68CnjHXnT6MGEKe3wd+CHfdL/3cVLyT+6j4yp2UmHlwEEc4aAw78Q77FLoGoRovGsQF/OiWq/6xoEIrnZx4evFOFmuRApZdnjg/h7G8cSvd8sbzykPcfPRimt8wQbm/m47GVAC436srEppga1fwwc0ccewrq+s2tmad4FKgj1q8RLgy8kpyYwJMgAkwASaQTwIs4OWTPs/NBJgAEygAAihiARGvkBqcKlVlQowqLTENl5UCQL7DsYJmKkWsnkz/kCqclgnv+zZQUc8jrpcykDpf2KCmml7nNYQWg7kV8JwqzEoBwE8lVoh36f/6qhKjoEU85Ly778lnhsyNfcuCJP1QcXQN753+ySPpk4cePKSoh9Im8tQpuauR8KpYvTK3gvSsuYSiFXjpK/Aaz1PmQMSFQbrxVEW8MMU7CPAQriBEQ4DXt7DEyzzdArlppdsQwnRrOpvTUjYpXgZ91lZ7rhaVrvFLEW5MgAkwASbABPJNgAW8fJ8Az88EmAATiDkBJA9vbCmcPHgyXBYPugi1sgr/zfZLakU6Cj0JvL4wh50zBQ/F9SJ/Ex6KW8RDcc6VlnmDinr/V/lOtHLfyQG8CniyEu1r/U9qQ73W/5T25zHFZ9LUork0rWjekAd5lWqr8qy9htS2Xju44qkTqCBFPDP3nXTZ2d230ydPpGs/f7a21CAFLae9h/W+asXVMAQtiHiPi5x4H+wanD8Se91v9Hg6R+S8C8t5J920dr9s0IuXLR2ZYfF9Nrom5ZjDUp51mGG1LN6F9YnmcZkAE2ACTMALARbwvFDja5gAE2ACI4zAtt2dWqhWnJsMl02I+EFVt1VChNZC0CrkohZSsHRTsMFUFOh+mIr6NzoesZN4hwH+cMd/0c41HzuOZeww/apP0UvTH7W97uKS62if4nmuK8waK/OqLq77xZ9pobNuW82P3nB7iWn/m+/670E/xz2Lz6LRdWd2McJopcghw6cDWZTCIK/86Rl65bVnB/U8/hiRm2/abJo+bY7CCHu7yIqr7UKQV8lpGJYbDytCcQvZwipWIcd36x6FU6+uMkXglK88ca4O1qSzqlArL9U7cIMOq2Xxzu9p8vVMgAkwASYQNAEW8IImyuMxASbABIYhAQhiqGIax4YHvkoRLouHVy9CnNsHxrgwkK67pMiFpSpY6tcuXWkIj4bLUmsO4bQDiaOIks5uNC958HbM2EGtVzrTxQP7VTU306SBuVpIoZvcdnYhmFYzu3XfyXFKT/wiBVHYQgp4slAFHKWqe4aAhxZ0PkC7U/p4/Sq695c/tj3IfafOpi9fcq3zYYsesqqwl3D3MNx4Sov22cnPd1KYgpbPbTleDnF6lCjSYRYq7HRx0GG1tZVJ7d8VbkyACTABJsAE4kSABbw4nQavhQkwASYQUwJwcyA0K25NNVzWad1e3VlO44b1vizEoepIslqHdCDifDu6dAKtEPLI6MZTEO708yx//iVa/vzLSgi6B7po85VN1D+zxLa/DB2FiHVR8XVDQmqVJhOdVF1pbnLfGecOUsAzFqpQ3acU8GR/KYZB8A0jbNzMdWe1VicRT4pYCIU35kBT3T/6henGc7MO1b6Wn0nVAfb0g6AFESojxPlB4fIux4mqu/wu93tv4rON1AheXYgQylHpF4WOuDEBJsAEmAATiBsBFvDidiK8HibABJhADAl0Z/q0Kq5xaVJww3q8uM/M9gHnSq1IVo4W1wfebW88IYpzlIhk/qLS6NjZVD55cE44L+cTpnipGkr78RVbHcU7Y+gocuJ9ruR6L1vWrlERiLyGz8pF+Q2jhQjzwG+fpZXrNimFzBphGAU8vC8LnQwRbT2TzF6o4rwzToGQ2uOPPX3IzBBzUITGr0CtH7gQ3HiyuI5fEUu/7ypRfAEVVDu6MrEtaKKS58/N7al3IUL8NRb+sBoL4l1DbRnhu4YbE2ACTIAJMIE4EmABL46nwmtiAkyACcSQAMJT2wzVAKNept9wWZX1SndbUMKgypxOfdb8+ruU3rSC8IAp6k8MCqEcf/TZhJff5jbflup8CKeFG88sJ978xSfQ7pM6csUqzMbUu+6MoaPfTvxCdRmm/eSDfjJRZFrowY+AVzLjIKq84i7P65OixuO//1/6/V/+5nqcfSZNoC+e9VnLfcNlhPspqAIXP33wR7R2w4eu1/m9Gwbn+AtazNEvKM5uvJG8b7jdwvi+hRAHF+OqjK8AACAASURBVCKaU3EPKegnSopd38N8ARNgAkyACTCBqAiwgBcVaZ6HCTABJjAMCKS7+8QDf36cePpiDViHVXXZIDCH+TDtZn3tGz6gjx65lWAI0QoXWBQSqZoyj2ZeeJOboU37huEA0k8EMW/nmo9ozMwZNHbWDO0tVJyV1WaNi3Iq2IDqtMcUnxXQvpPavZ3LB7hn1Khz4OmdgTLHn7GQhcqGzdx3xuukK81rdV79eN++LZtvz22TLryw8/TtbO2jxrZ+WrE5owmXxUK97ReK8JyJSWqoLqYxNfkJmZSiYqZ3wHVOR7es9VVb28UvZPLd6oS4ViQOA5871byOXtaMfzvgrsa/G2Z5M1m880KVr2ECTIAJMIF8EGABLx/UeU4mwASYQAETiFrEkw/2vUK9ahV5+MIU7vTHIos87G7rDiVfmNMtAPFurRDv4BKTNSbsrglKxJO5s9Ldhrx4Tgv2+L6ZgGfnutNPE5SAhzFl7jFjNV+vAl7FV+6kxMyDXVGRAqqxGMvazVvoviefUR7rsrNOp30nTVTqLwWkPmHt9Bo67ib3nXFREPBO/cwZWrGKoMN65Vx/WtGliXfGJs1WfeKteZOS2ivKBvYr2ts0IbG2KEkTKspCnz4ORS7kdzoKI+Fej6LJfVcIMQ/3uaxmzOJdFPR5DibABJgAEwiKAAt4QZHkcZgAE2ACI4gAXEq7W7stHWFBoJBVVlNJb9Vlg1iDrIoYZC4ulXVBPPzwV7dQ09rlrhgHFU4bZl484/6NAp6T6y4sAQ/jyjyIJcLyKENLvYTRug2flfe6fl4jJ1URz414p59DFrjAvo0uRKd71o+Ad/oJZ9IZJ56p5dgMQ5y3Eu/knvRuvPqqYjp2Xvgi2tZ0F73d1Ew7uru1PevdZwfX19GE8rLQxTyZCxH54dxWc3a6H+ze91Nh18+88lp9WC1+GQVnHn5hwI0JMAEmwASYQCEQYAGvEE6J18gEmAATiCEBVLFsbOlyJTCpbkMfLgvxLMzwKqc1RfnAKYUzCHd/u/dmp6WZvr/wXx7ydJ3xIr2YhfxUYZ2BFPBUXXf6dV5c4r0SrR0k6YSTYlbHPVdR30dvK3N1474rSxVr7jOj889qsp8/+Vtat3nrkLeR8+7Thx+q7LwzG18v6rip/OpVwIOYctKnPkuLPnFKKPcXwmXxUmnSjXf07FJqCDGc9rlN22i7EO6gGdlVAj510vjQRTx8xitFsRAUDGnu6FEu9qDC06yP/C4Ny2npZl3IAQk3HjcmwASYABNgAoVEgAW8QjotXisTYAJMIGYE8AC6SzjxgnLOyAe8jBg3ynBZJ6xRONJk3j2ElK199VFCxVkvbeYFN1LV1P28XGp6TZD5AFe8lt3TvGP2Ft3YVLSSHupfks3xZ5Xkz2I3fotY2EEyhtSqinhuxDs/bOHIWyde+4hQWdVwWZWbQoYalgrnq2phAbcVaOF3kk7LTx19qmklWpW1OvX5zZtppy6D3pduvAsXVYVSsVWKd2CMkGWnFoWIhzVEUdwjK1SnxD2Vn5QEetYQLGv2FLdwOgN+nwkwASbABJhAnAiwgBen0+C1MAEmwAQKkECvSCDlN/wtDuGyTuilIw39vOYKM5tDup56Mghl69XEUIh3XgW8oMJo9WuVD99eih1AtFv5+lAxsmHqPDrkM+fTPnMW0H/s/lda37/S6QgGvR9k/juriXHmslorxKyuF35G3S/93LQ7wmZLT/yiUt47s0IVrjYfQWeZA7KjK6MkZqlWocXeIZRBpEe7/HPfoOnT5gS+IzfuO+PkZxxWTWNrSwKr0Ivxt3Z20Ytbt7sWqi+ftU/gbKwGDKvIhR+hOujNVwu3K9bDjQkwASbABJhAIRJgAa8QT43XzASYABOIGQE/Il6cwmVVsOLhryxVouxOshpTL1oa847FTcDDHpxCiR/5zeAiC7UlHVTZvpoaN6wwRQAHFsJyR02eS8dcfBN9r/cLKvi1PlOL5tLnSq5X7u+3ozGktnfN0kEhtaUnfUl5Cr3TUibSV7444o7SmQVjpFOlUBUXXmJPrjEUpEGTFWjD2JYfAQ/FLA6dUSbCS1FYQ03AtNsD7vUXtmynDW1p1ykHkBPv4NF1YSAyHVOK1jirFlE0yG0+ROOgLN5FdnQ8ERNgAkyACYwAAizgjYBD5i0yASbABKIg4FbEk4KQ3nkWxTqDmMPvQ6letDSrwhhHAQ/c8HBfX53Swv+kCxHC3aNPPjsE61EVy7Wfjaqr1V6ymeW6gxtv6kXn0K/6ljgeT9TinVyQdEp6zd+F+12G7cUpPNwRuOhgFDCtrrFz4SWFiAXdTobb7zt1Nn35kmtVpvfUx6+ABxEviNBSye77b60i56DZoVtFQYtTJ4/3xMDPRUEUuUBux2SiSPtlR1g5NFX3yM47VVLcjwkwASbABOJMgAW8OJ8Or40JMAEmUGAEIOI1iYc1GR5ntvxCCJdVwS5DDN3kdNLv3S7s2I+AF1QRCzsGUsD8p3/5Hi1f8eGQrvNL11Jtyd78Y2VlpTRx/Dgt7xke5M1yJs5ddLaWG89YlVY/eBRhs3b7lgKmiiNNP46TYKtyv+W7j8wJ6FS11CjiyZBZuO6kiBO2eAdWUsDrM1GOkIOvGIdp0SDe4SWbDC1168bTC/3/vWqt5yOMMoxWv0iZDxEh9G6qE8vPSaZX5DJNqxUR8QxH4UKvBSta29P0yydeps+dfQLVVFUozMRdmAATYAJMgAmES4AFvHD58uhMgAkwgRFHAIUIUNjCTMQbDkKG/kCzokapyF2X0aqI2jW3oZPL/v0i1/dOGPnvrBbxb0t+TB+s/FAT4/QaSU1xB+1ftm7IZRXCSTRh/FhbJ85Z1++toLt+YAVtGMjmxYPrblrRPNc8wrpAniWcRXaVRDE/XEgQQPzmiQxrL27GVS1wgXDaV157ljZsXK25NiUjCHfTp80OrWiFfi/LN/XQe5uy4lHbQJrErxXEq1f7e4pEGHxRkmqLKkyFvGPmltIYQyVaN248s6I3964e+plQZZ8vAU+uD99ztXuKPkDIsyta5BRqr7rnIPohYru+ppTgJvTaHhQC3gOPvUifP/ckukQIecb21rKV1NbRSYceOIdFPq+Q+TomwASYABNQJsACnjIq7sgEmAATYAKqBIwiXiGHyzrt2emB1eve2zd8QGsevtVp+kHvR+G+w4Tvr1hF3/nejwkephKERgoRTxaQnZLcQVOSO03XDQGvvKzMck+LLrqRxkwLroKuK3guOzuF1OoLVcTBheRye7bdZVETK0ea02ciyLWYjdUtQrwbe/rp98vaaDe12k4HEa+ueLC76h8Ot3ZbObnxrPZeyAKeBKi6d69h5kHeF9lzKNVcv34bnHj33P8UrVyzga687Cw6TIh1+NkNS+6lLdsa6bCFcwlC3h23Xk2Txjf4nY6vZwJMgAkwASZgSYAFPL45mAATYAJMIDQCEC6QDD2VLNYqrDq51EJbSMgDS8dNV0+ftk80ffiZ172v+fV3qX2jeREI45ZmXnAjVU2NRvy6+bYf0XLhvpMND8kQ8CDk2Ql4xnx4xj3IMNqQjyuw4a2KPLh1Wwa2oAgHsnKkyZxv+Owj3DbqJsW7dQPb6Hdb11DDjn0dl1BGSRpXks3TaAyfNbvYau9S2GxNi4rFhr17FfCiLmLhBEu/dxS5kO5K6UY227vTmEG/L0XURIl3553Zmt56RzhLX19K1115IS25K+sWvu6qrFMaAh5Evvt+cl3Q2+HxmAATYAJMgAnkCLCAxzcDE2ACTIAJhEoAohZCDYd7g2BXK8Il0bp7+wkijlO+MBUmTk68qinzCKGzUYl3WPPZl/zjkKXL4hQTirdbOvBw0fR9plpuu9AEPLkRKdihOEWlOHcU+YBoaxdqqHL2hdBHurIQRp4Ugkm+w4U3d/YRxLsH+l7W8E1dexBVdIxyRFknnHgzaqvo2HnWDlHjIHpHGnLqIUWAVaj00l3NtHR3s+M6jB1OnTSeJlSor8n1BB4vyIqVSe07rq+/XxQ6ScQiTDws8U6PCULepV/7Ph13zCHaj6+89EyaO2MKPfXiG3TmSUdrP4NDj/Pmeby5+DImwASYABOwJMACHt8cTIAJMAEmEDoBiBltMUhmHvZG8fA4praUIGY1tnRTjxDygmoobGFsEO+iFO7k/GYCHt5DOO2UxA6aWLLDctvDUcDDZutEjrAKIWJ0Z/o0IWMktTLhsEWusYy43xtF/st8VRyV7rtf9L1E6we2545ARcTrqmym6w84wPWxwXk2ti4rsDl95t268PJVgVYVAn5p0SDOPSlyzDW19+TdYR2FeAc2l16zhObOnKq57zaLENrLxN/hvEP4LP5+9Y13aH9Wi8IXV4mQ2zP2iHqqXLkfE2ACTIAJMAErAizg8b3BBJgAE2ACkRAY7iKevkAHgNq5cSIBHuIkZgKerDA7scS7A09fxCLE5Qc6tLFSZ3VF1oHXIgTrfAlZgW7QYTBZjVk68D7ua6WHtn1Eq3taBl25uHIKnVJl7b4MYs2tmX56L7M1577Tj1nRUaeF0xrdeOnKJmocu5a6qprpG+Un06yS8cpL0ee76xeHXVmWJLtKtVvTXfTc5m1K48ddvMMm4DwtS5Vov5ypFm48VBpuFkJePu77pBBSkfMOvzwJu83/1KX052fvyTnsINjBjXfc0QfROZffTGeevIiuEK48uPDwdynuhb0uHp8JMAEmwASGPwEW8Ib/GfMOmQATYAKBEHj6hdfpKfGCq+BbwnngJVn3cBTxrIpUQNioEo4sOFOcKpUGckARDqIX8GTorL4a7VEVyy1XY+XAa5g6j465+KYId+F/KquCBSqVZ3f9ccOgBZRPq6WKfbJ52AqlGff5n7vfo496Wwlc9IVN9Pu5etT+NCsVzj4h4D3bs4z+2P+ua4TQfU5PLaRTxEulZXO+pYRg1yte2QrUKpVqIeIhlHZrZ5flNHEX7yBa11WlaACCnQgbl61KCHoVpQlbEVOFrds+mLOuKpu+IIp24gXfzIlyEOnw9ztv+5qWB+/Nt1fQ/bo8eDfc/jOtyIUMrY1ifTwHE2ACTIAJDF8CLOAN37PlnTEBJsAEAiPw4BMvaw8myPWD/D933/ckPXHvLSNexHMqViATu8OdNJwKeMgiFtJ1Z8z1ZlXIwq6IRSFVoMUHK5f3zqJYg1Wl1o2/eI8615tXRi2fVkOjPzk19kKeWYVdiHdrMnv3hXBqNLgRjS0sES8qAU+evVW+O6dqreABIQ8insyLB9EOr4NH1wX2vR3GQGYFe/TzqIiYQa4LvySpEeHrUTYUq0Axi+MXHaz9UgsCHX6pBSHv/v+8XsuHJ9sJ4mffu/7LWuVabkyACTABJsAE/BJgAc8vQb6eCTABJjACCOAhBK4C6bqDoPfgYy/SSw//0NPu4VpBBcNCbSmR8wmOjx7h+HEqVmDl0irUvWPdaz/+iK675QdakQarcLn5pWuptiQ9aJtW7rtCEu+kQKFSqEKKHd3iPtmxopE2/OJ9pWOf/Pn9YyviSWdpunuv88wo3slNwtUGh6bZfRKGiBeFgAfXYanI+YfCPHZFSqIWspRuLJ+d3HyX4T5BUZ92zaHYG0pYLcJ2Iabmo+EXWW+JX2rNnTVNC51FAYunnn9tkPvO77+T+dgXz8kEmAATYALxJsACXrzPh1fHBJgAE8gLgVffeJs2b91JZ4hcPqikd7bI44OE3XAayAa3ARx56OOlpbv7tHxJhdTwAIsHxpR4gG9uzygXqXByrRQKA3248NU3LKHlKz60XbreiTdl4jhKlZYOeZAvJPFO5jnUh02qnB1En7U/f5d2fbhbpbvWJ44inpnzDLnu7miyFybh1BRarxZWK9vMZA19rX6BMg+Vjihi8bfuLaY58JyuT4rP9inJAy1DaPVibKuLgjwqbjyntcXhfRkyjF9YqLqJjfkhgyzqk0/xzuw8Hnz8JWoT4bTIfYemD61l910c7mBeAxNgAkxgeBBgAW94nCPvggkwASYQGIEldz2kPYgg1x1ChCDawUmwcvV6uu26y3PzIN8P+iKU1muDiNfa0aM93Me96YtU4CHWbcPDLBwpaIVY4MAsXFiG0jqxOPOw8bTP6AQ1b16pnXX95LnUMHU/mnfM2U6XxuJ9vRBhFTZpt1Dku2v600YqEQP1Ccuiyv2OcNopXwhW4PIKU4pXqDJrvHd/176Bnu/Y6Di0Wa7EO8cd7Xid2w47xXfKjT0PDrnssL82DPrZW0c05v4OpyDO5mtlJ5kWsXDjPDNbb6G78fzm85SO5a4eOJb9F3epFSGzlSJ0Nk5NVp+97qsXU3VlOd2w5F7t308p6MVprbwWJsAEmAATKFwCLOAV7tnxypkAE2ACgRNAWBBCY++49epBY+Ph5LJrlgxx3B152hX0F1GNz09DgYfGli4lUcPPPF6v1bvuvIg3xnlVQ/C8rjfo66yKdMh5HvnNM/Tok8+aTjt/7mw67x9Oo/3nZfM/QQhD4n8IWPmqVumWj1/xBvN9eMsbuWkTQi1C2DGEPKcWBxeeFF+sXIeqAp48f63AxR43XhiVaeHCe6Lrba2QxcRNFQThbtLmClPUEPH+Jl44Eyv3HRx0KJIA4QkClJ9WiG48p3x/qjzw2Yfohpx1YCkLf6heL/uheEaFCM+NY8O/k7fv+QUYKtF6dafHcW+8JibABJgAE4gHARbw4nEOvAomwASYQCwIIHQWeXxuFU47PIggUTdCaO8QFfZWCQfe3SJ59317cuHJYhb6inteNxFXEc+pSIXX/Qb1UOx1ftXr3Oz//RWrBoXUzp83OyfcGecrtP27CZc27jW9roU2PTA4xBRuLwgavQ5WvNGfnKIVtchXUzmnr27fK06qrhMiHon/nVQ+mRZXBr8/iHgPr/4TLXyi1HFJWyenadV5PfS18pOH9FXZv+MEhg6F5MaL0/5xyzTUlhFCebkxASbABJgAExipBFjAG6knz/tmAkyACZgQWLlmA1194x105uJjtBDaS84+gZ4WyblRdRbCHQS+B4RDb/L4BoLbQIp5QcDs7eunIBxuQazFyXUWxBwyJBfJ8CFgxqlFt/+k5sQLMjdWEBz1IaNORUqc5kP47K4/Dg0xlWGbvcKJZ2XGy5eA56ZQh1UBCycu2P+5DdPppIrJnt1YVnP0bkhT+0ObaVtfC3WTdbg7RNRSStCkfcZT1UWTc8Ph53B6oYXlFI27G69OhKkWiUMKe/8ohtLukJJAfh8lSoqdbit+nwkwASbABJjAsCbAAt6wPl7eHBNgAkzAPQHk7nlT5Ld7WVdhFuGzl5x7klZtD8m5kSNPVqR1P4P1FfkW8fS5ztwka/fKIJsYvlQLKVNNDO91LtXr3LjuVMe06icT47stCuF3Xrvrzaqs+pnPSsCTY6J4Aox4ZiG1+RDw5JngflTJ9egmhNbI8Z4Ji7RqzkGHVLc/tIl6N3Rq07UMpKm5P/vfsuFzjlZbVC5e2fDaskX14jWaggiZVr1f4ujG81qsQ3XPxnOAUJoU34NWLlcW77yQ5WuYABNgAkxguBJgAW+4nizviwkwASbgkgDcd1u276I5M6YQKsy+JAQ8KdJdivx3l51FUVTTy5eIJx1xQSVaV8UfpWBgt6YoXHdm8+sFgyAS3KtyN/aTRUZQYThIJ6iTgId1WIXURi3gSfHWjStUpQqt2Znoq9Bm3WiiyImLys5W5yzdd2bvQ8zLCnfmOfHG3jiHUCABrjCvOdq83H9xcePl67vIqsgFi3de7ia+hgkwASbABIYzARbwhvPp8t6YABNgAgoEEAp7j8htBwEPxSsg2uHvDzz+En1euO42b92pue7uNBS2UBjac5coRbygi1R42bQUsbp61FxPXuawuyZK153VOvJZ3CNs4UJfxMJq/7mQWmFHkwHVURWxkJ+Bkj1OqD6VMrm6jXgJo7161P40K1WbG0Uv4rSmM55v8a7Xd1HX67tdX48quZMu34fSDUlyu3/Xk5lckG83nvwM5MsNK93PKFDRLCqT94m0AqNEwRsOmw3i7uIxmAATYAJMYLgQYAFvuJwk74MJMAEm4IHAg0+8TK++9ncyq5iHIhUrReEKFLHIRzU9iHhNIj9cJsT8cNL5oxou6AGx8iUy79aAEE9ahIChUKRUeWyrjvly3VmtR56HGweYXwhSvAzC/WW1FhUXHq5FZCcqokI/S02tpilfWOB3e47XByVeuilmYRTv5CKliFMqXJC4B7wIaV4EPIh3EFATnxilhdHms+XDjVeWKqaailQs8lEihHt0TakWysyNCTABJsAEmAATGEyABTy+I5gAE2ACI5QAxLu2tg763DknaiJdHFu/UDJ2tXYHLuJJ0QLiYGtHxpNQEBYv6URrFPsOU8SLg+vOjKF0YoXtBIpavFRx4UkeCKmd908LqW9cZaj3ZpChq1i7ihPPSrzT3wtSUOroyrgOZXUr4EEwgmCK7xqZBy+sz7bquFG68eQ9EGTYuOo+zfpVlCa0vIjcmAATYAJMgAkwgaEEWMDju4IJMAEmwARiTQAP1k2iUml3pj+QdcZVuNJvTq4xjIfqqIUrL4cWdiL9bL7DpBCH3AtEXvaDa9LrWmjTA+8rXY7Q2TFz67U1wokWdJVeme8vmfDudLPaCIpaIC/emkzroC6LK6doIbP6sFnjGH0re6h/VTZ8tuysqpyQA3ekqhvPjYCnF++0OfcUslA6pAg6he3GC/N7xgue6oqkuOcTXi7la5gAE2ACTIAJjAgCLOCNiGPmTTIBJsAECp8AHuKRXN5rk86uHiEEorqmqiDgdT6/18miGkGGkxaCeCm5SZEJednAIAg3YhzyHW78xXvUuX6wuCX3XD6thkZ/cipV7JPNDee2IqzKPRdUyKzKXCp9INr1/rYjJ9zprymek6S682updmGFspBpV8RCjo3oTITN4jtAf1/VXTdLZcmR9gnLjZfPnJNmAFm8i/S24smYABNgAkygQAmwgFegB8fLZgJMgAmMRAJeRDy9aBNmnrMwziMr4JQKwTFDyNPntUlHW6a3MMRL/T6DcgmFIYZ5PQ9ch7x4+lY+rTYn3Ol/LnMjQnRy40QzWxscXQhRbO/q9XU/+dm3/lqIdz0/aHYcrnS/Uhr/3bGkWiG6eclqyzFlvrteQ27NuLnvjBsIyo0n7ycUifBTLMTx0Fx0YPHOBSzuygSYABNgAiOaAAt4I/r4efNMgAkwgcIjAPdcm2KVSulii0ORCq+k/VaHjJto44VD1j2Z8ixkRlGowsu+3FzjN18dHFfILRdGWLabfci+quKd7F8yN0UN3xmj7cHJlWrlwpOFEczct3F03xm5+nXjxc19if3hc43Ks9yYABNgAkyACTABZwIs4Dkz4h5MgAkwASYQMwJOIh4eVGsqk5QUDra4CBZ+EEoHXVdPnxb+q9Lkwz6cNoUQMuy0Jy/iQyHk+3Pat/596SJ0U+BDMoB7LS6OK+yp80s73Gxd65v65zoq379MuFKzYm5Hl7Ur1SjiGfPd6SevumgSJabGs5CPGSQvbjwvnx/XB+TiAjhKId6VpbyJdyvXbKAldz1EV152Fh124BwXM3NXJsAEmAATYAKFS4AFvMI9O145E2ACTGBEE7AS8aTbyo3IUQggEfoG4QKCXItwINrlhJMP+H5Db+PGRTJA1dBmUdhkJDPA2TjlBpTFOuJ2H2Se7tDy3rltyIlX+i+jSNWJBhGv53+bqH9j55B8d5g7MbVcK1xRSOKdZKbKQNun+EXGKCGWxSV0Oismlmrr8tPeemcV3X3fkzR35lS64tIzY1tN3c8e+VomwASYABNgAnoCLODx/cAEmAATYAIFS0Av4g03t5XVocjk842t3UMELDcP9QV76GLhdnnx4lCoIgq2TiG1QeUODGMvXgU8rKX8Z2NzS5JCdWu6R8uPZ2ySwfb3m6l7XXrQ2xDvClG4M+7RyY0XRjEcP/eE/J5OlBT7GWbQtQ8+8TI99fxrdN1XLx7ixmttT9MfXl9Kc2ZNo7kzpgQ2Jw/EBJgAE2ACTCAfBFjAywd1npMJMAEmwAQCI5AWxR36hSULubEg6Pkp9hDYokIeyEyccXqQD3lJkQ8v3WVw4vWI4hxostJwIec8dANS7lfvNtWHzMJ5F0T1XjdrUunrJXxWjoswWuTDk02Gl3dr1aX37jduVVZVuHjtYyXcx03EDUO8k8w2b2ukp198g674whnaj/D3e+5/iraIP1eIcNvPn3tS7j2vnPk6JsAEmAATYAL5JsACXr5PgOdnAkyACTAB3wSQlH5Hc1csxQrfm7MYQDprWjsyVFme0Hr5rVIa1lrDGlefE65YxNeCiVOBg7DWkq9xpXiDsGLkSIRoE/fw8SAFPHBHaHV1ebZIBz4PyH8Zt5x/UdwfehEfnwfkl8PnwaxoRxTr0c8Rpninn+fVN96mBx97UfvRmScvookTxtAN3/8pPX7vLRxiG/Wh83xMgAkwASYQOAEW8AJHygMyASbABJhAPgjAhbVbhJVCyBgprU4IFRVlCeoUwk2TeFAfiQ0utIbaUs2FibDiXpEjcCS20TUpKk0KwUYw6BJutDi37n8XeelWZTwtUR9CaxygUnwWasVnojvTpxWvGYkNonaDyC9XLPLM4Zcacfg8oJgQct5hTWG2t5atpEuvWUL3/+Q6OmzhXEL47DmX30y3Xf9lLbRWuvJeESG1yJt323WX06TxDWEuicdmAkyACTABJhAoARbwAsXJgzEBJsAEmEA+CeBhtbGla9iLePqQubZ0r6jmmIy96yqM+wKOo4rShJacPylyapUmi2PjOApjv2ZjyhDSjBCw4cBDZU+nCq1Rrc1qnqBy4OnH1+cEhCMVWtFIc6Tqq1X3i9jpyjJ8L9hX6w37XsDns6ZCnEfI4p3cBwpbLLnzV3Tm4mNo89adhGq1EPSkmAdXHgpegCV4EwAAIABJREFUyH5PCGceNybABJgAE2AChUKABbxCOSleJxNgAkxghBCAi2KicEV4dUZAxNslXEhxCBsL48jMct3pH9yRB3C4N1moIincd/oQQSnijJQwWrOcf1LcValWnK/7pG+lyFv4g2bX0yc+W0nJMyqHXMc5IUmrzAsXpj7/Y76L2lQJRyTCmaNuEOtuv+sheuqF13NuvCXi723i53DdyXaZcOtBzINbjxsTYAJMgAkwgUIgwAJeIZwSr5EJMAEmMAIIILwJD1QIbXpTiHifP+dE7eHKS+vt69dC6IaTiOf0MI48YPXVKYqzcOPlLI3XOBWqMCvsEMS8cRvDqUABijggJ1xcxUwvYbTG8FknsVIK270ivBrFTuJY0COI+0qfC7Kjq2/IkPkocFMt7j/co/lqCKXFvyU11ZWaQPfVG/6TXnr4h4Py4B152hUEB57XXxbla288LxNgAkyACYxcAizgjdyz550zASbABGJFAA9cV152Vi5XEcQ8Ge7kZaHDScSTFVdVwuFk9U3kgxtugoWTaCXvE31l0ta0t1xrXu65KK7B3qSrCQUb7ETqrJiZynsYpRUXNyKeWfVZo+PMah4pZiKkVlYsjuKsopgDIm1NRUoItfb5H51+ARDkWvMt3sGBd88vnqbrrrxQ2xbcdxDyZIVa/Azvv/n2Cs2hx40JMAEmwASYQKEQYAGvUE6K18kEmAATGOYE4IbQOyTgyDvxgm9qP/PqkCh0EU+GipaIJPBu8nmpCl2FckvJ8MAeUZyhRQhyKsIkHIm1wgUEdnChqVwTdx7SaaUPk3Ras17MRG68uHFwyodXPCdJCJ0tmZvKbVWKVq3pHq3arEqTzszhVJ3Wy+c8bDdevsU7s3vhhtt/prnwzjzpaO1t/NtytihucedtX9N+YcSNCTABJsAEmEChEGABr1BOitfJBJgAExhmBB584mVauXp9LieRdNydIZKMy3bP/U9pD1v6vEVuMRSqiJd13YkKs919oiiB+7x2wyUfnL5QBVi4bVLkiGsoqep+5D68usikCy2uoeUQ8vpX9eSq00K0g3inF+7AyotoJRlLUdeYO1H1DOLUzw+HsNx4cHtWiO+tuLWnRS68J8Xrjluv1vLg+XV3x21/vB4mwASYABMYOQRYwBs5Z807ZQJMgAnEigAeolaICoHXX3URQbTDQ9bdQrC7T4Q0ScedzIsHF56f1i9yYKGwRUYUuIh7k667lKio6ldskSGUbpxKceFjVajCy/okBzjQvIiAXuYM6hqvLkyz+aVzTSUUO6j1BzUOxLfq8iQlE+7cqGbzuwlJD2r9QY4DMRYc/DpLg3LjocBsQ20ZwSEa14ZfGD3w2ItaDjzkV9X/oiiua+Z1MQEmwASYABMwEmABj+8JJsAEmAATyAsBrQKgyHl3w/d/mhPtblhyr7YW6bhDLiOEOr3sU8DDmIUg4vl13ZkdpAw/7ejqFbnQ3DvY8nFzZIWmpGf3oR0Hr47GfHAwqyzqdx1xD6mN6uz0LjS/QpjfM1G9HiJmQ00pdYtQ8qByO/p142Xv0dJYi3eqfLkfE2ACTIAJMIG4E2ABL+4nxOtjAkyACQxTAhDmUAFQL9rBcXf1jXdo1QMPO2gevfLa3+lw8eclZ58QCAWIeK3pXkp3uw9JDWQBFoME6bqLSgAJi4ef0ECnNclKveI2iH1VUhkCDaFGNc+b0/7l+9LNVipcnhCv4lyt2anCquqerfpJzl5Dk/3Or3p9GGKufm4vbjy5pkRJseo2uB8TYAJMgAkwASbggwALeD7g8aVMgAkwASbgncAJokAFnHUyoThGulPkKJojxLtfinCn1rYOOn7RwVry8aAbHtbjIuKF4boz4yXFqz4RRtwsqpfGrUkxAGJV2MUWwhQJ/XKV4hpciH5DqJ3WIkNJ4xpaHFUeRykSRnHvOZ2J2fthi3dyTjduPBbvvJwkX8MEmAATYAJMwB8BFvD88eOrmQATYAJMwCMBCHj3i3x3Dz7+Ej0l8t8hN5HfXHdultIqRKx2EVaarxa2685qXwhNhfOqUeQEjEtFUinUoFhHVDnqpHjV3N5DPb1qlUzDvleiEmr0+4hSOHXDL2qRVS+cxqngSVZcLBXOYfWKu244m/V1cuMlxZpGVadE2Cw77/yy5uuZABNgAkyACbghwAKeG1rclwkwASbABHwRQCJxNITEHnnaFTRPuO3OFAUskFAcobT47zAcd1aLhmDUJkIUo25Rue6s9hW1OGK1DohHNZVJQhJ8uCKjDuUMOzzTzX0lRZN8uOHiFFKb7xx9cSr0IYXtsJ2YZveplRsP4h1y3hXjQ8uNCTABJsAEmAATiJQAC3iR4ubJmAATYAIjk4CsJnucCIm94tIzNbcdqs4eKsJjZcVZFKzAz6NuUYp4+XLdmTGNKjzR6jzzLWLKdUmhAqHFLULMzYcrEa7IKEJmnT5bUkSM0u2lX1M+HIhuxCsnfkG+HxeRXe/GGxgoorqqZJDb5LGYABNgAkyACTABFwRYwHMBi7syASbABJiAewL33P+UFiJ7nwiXlWKd+1HCvSIKES8ugpWeZCpRLB7IU5GG52H+uAhWehYytDjKog768NWgqor6/aTka00ypDlf4qG1yJ2M/PMB8a4sVRKbAiO4J8bWlRGcmtyYABNgAkyACTCB/BFgAS9/7HlmJsAEmMCwJ/DWO6vorbdXaK67uLewRLw4ue7MzkAKNh0iH2BHV1+oxxTXfGty01G6EiFYVZUltGIqYXN3e6gQamqFKzApBN4oBM24uM3MOEVZ4ALcIagPiDLJ+XKDmjGoFvcCzogbE2ACTIAJMAEmkF8CLODllz/PzgSYABNgAjEikO7uE7nYegJbURxdd3YiHgpIQMgMo0mRJspCFV72kXUlJoWoFp6wFmfBSs9Mhk+GVehDClb5yoGoen9EUeAiLuHDRiYs3qneJdyPCTABJsAEmED4BFjAC58xz8AEmAATYAIFRCAIES/urjuz44BIUS8qSyIXXLOo0BtUs0qGH9T4YYyjL6QQZGirHDcjqt7GyWFlx1DvQAuaxeialFZ1OCzROOh7IyxxN67iXa0oMFMpXKLcmAATYAJMgAkwgXgQYAEvHufAq2ACTIAJMIEYEegVIlZjSxeJSDbXrVBcd1YbqxMP7QidbGzt9l3QoZBZyDDSElF1M4gKudnqpuE6+1zfrIoXBO2Ui1P1X0UEuW5S6MYPgrgvJIs4OVPhiEQoL/LwcWMCTIAJMAEmwATiQ4AFvPicBa+ECTABJsAEYkTArYhXiK47K9x+Qzz1IYe7Wnuoz4sSGpN7QbJALjjcE16aX55e5gzjGpkjEMJVj3ARemlB8PQyb9DX6Kuzes1hKPMgNomwfa/3VtD7gnjXUFtGEBa5MQEmwASYABNgAvEiwAJevM6DV8MEmAATYAIxIoCH6l3CieYkQBWy08wKt9eCDnEvVOHl9pLVets6M1rIp2qT4cMIS4bDyuk+Uh03n/2kY8xL6Gscqw/7YeknJDqOoq787CZKij1jefWNt+nQA+dQTVWF5zH4QibABJgAE2ACTMCcAAt4fGcwASbABJgAE7Ah0NvXL0Q8cxfZcHLdmSGQwpVqEYNCKVTh5YZ3m6fMj9DlZX1RXuM2jDSsnIJR7tluLilMqjoTh6t4B0YPPvEyPfjYi3Tb9V+mw4SQx40JMAEmwASYABMIjgALeMGx5JGYABNgAkxgmBIwE/GGo+vO7PhU8pVJpxkiZVtFAYzh4DQzYyGFK+wTouaARUTtcAkTdfo4q4TUuhU+neaM6/uywEVXTz/ZFftAjskiEadqd/9EvccgnHf6Na/8aCPd8P2f0mEL59IVl56Zc+PBnXf3fU9StXDnnXXyIjpDvLgxASbABJgAE2AC6gRYwFNnxT2ZABNgAkxgBBOQIh4QQKBJJYstnXnDDZOdCDNShEz9mVo5qMCpRgg0yCMWRIGDQriP7CqzQuCrKE2I8OEMQdga7k0W+0iIGwA5E/VCdlxdiEmR6250TSkV46YNuN3zi6fpqedfo8fvvUUT8SDsbdnWSHNmTKEbltxLh+8R+AKelodjAkyACTABJjBsCbCAN2yPljfGBJgAE2ACQROAiFck/i/d3avlNBtJTbrPkM+tWbjshlOhCi/nmBUuk5qTCgUdRorTzIyVdGDiPQhXcCbGMUzUyzl7ucZY4CKu9waqzMIRGIZ4J7ltFoLdpPENQzCuXLNBE/GeEOIeNybABJgAE2ACTECNAAt4apy4FxNgAkyACTABjUC/iJ9EYYuMx4qkhY4RD/wlwrUDUcIpXLDQ9+q0fhleDKcVeKjmQHMat1Dfl6Kd/GzEKUw0aqb6sHK43Dq6esVLvQBK2OuFM7KuKhn2NJbjL7nrIfr/7d1LjFxVnufxQ2Q8knzZmKwRDYJaQIkszQJMt9FIdksjI2wWI9k1BglR5Spb7d4YMCyQOj1mWCA8JBILsMGb8siu8hRCKtwkUksFRtALjNTCU0CtKlHBomCoohsbMiMfjoxHev7/G3WScFZkPG7cx4l7v0eywHbEved+zsWSf/zP+WuId/qFSe8z6wV9sU2QGyOAAAIIIOCgAAGeg4vClBBAAAEE3BbQEO9bqbxariR/W+DaldCQZngw6/3y13PtO/S6vZK9zc5umSxkM2ZZqvDSHFippIZW39tQ8FB126xLgVVvK+3v24P5jNk0WjBauetSuDs6lPMqJKMaFz6e8cK6+YUl75/6Q6vynpk84E1h/+NT3j/1SMlH9v/I7Nq5NaqpcR8EEEAAAQT6SoAAr6+Wi8kigAACCLgkoH8p1+20aRhrG1VoOKFBnm6ZrKawGnHttkjtRFqQcxHXnn2WhndDn7HxLDytzNTqrnbNPpJsYxt8aAdr+9+OumiwuV7zkyg8ogzvihLY3X/gKa9pxW5pWDFx2y3ev+s/dWjVnYZ3+nva7ML+/JRU5TXbdhuFD/dAAAEEEEDAZQECPJdXh7khgAACCDgvkIYQzzaqWLsNsB7a5FfPgXN+sQKaoA1n9BzEy8vfbYvspCtrQFNw6jLrnXenoaYGvWkLeZt52DMjNeTV6t04Qu8owzv7gp45+7b55a/fMocf/bHZvnXzVe/tPgnvNMybfOSh1V8/8tz/Nvdsu+uvPuvUC89kEEAAAQQQiEmAAC8meG6LAAIIIBCfgFZ6HHrymNEKEa30OCpbuXqp+NAgZ36pEt8DhXRnDR02SAjTquOuPQfOtTO+wiDppHFHq66sYcwp7mu2qzxUj02j+dRsqW3noYHm2FBethdHu8VYg/YhabwSx9A/b7VhhYZ1WmmnHWkv/O4Tc+TZn692qNV56Z/HOx58wmts0cufx3E8I/dEAAEEEEAgCgECvCiUuQcCCCCAgDMC+pfEPbKtSw9P178knjg9babfPG963baVtBCvm86Z3XzWmRehy4nYZ+ykcYd+VkMrPSOxmMBgV+m6eUb72YqcEzgnHnFuIe1y2Tv++Nouza2+2Ni1Vyt4tQlKWENeRbNprOBtcY57aDXel3/+2qu40yYWY6PD5uDPdq1Oa21ji7jny/0RQAABBBBwTYAAz7UVYT4IIIAAAqEK6IHqGtppYGdHUH9xTEqIZ7cAavikgVUnQwOMcQkKNKSZXUxWNWJ9C3HOqyJr3DLbysVWL2rH3rBDmk7WJ8jP+A1sk7ql1q+HbrnW92p2sdzxf2fdrGN9XgWjVbKuDf0zd+IH3ze7/9KwQivyHj3yItV3ri0U80EAAQQQcEqAAM+p5WAyCCCAAAJhCLz7/kfe9iw9g0k7IerB6edefX71Vvaw9Ydle9cuOVC9l9HPIV5jpZQ+h5/KoI3DOaOhlZ57loRKKxs6aTMCPx42DE3KOXC9nvMX1xbSXv6bbvVdv+Gdvabdgh50gws7r+xA/JV3zfzsMQYPS9fZ+flF87L8TxX9dzrQhvWmcl0EEEAAgSQIEOAlYRV5BgQQQACBpgK2q6GevaQhnQ7dOqvnMen2WT2PyY43ZBvtB1Kdp+fh9Tr0PLi5PqtCW69RhR+L9Zoa+LlWXN9p3DLba+dQ2+yjmwq+uJ671X2DWtfG7be92sbpVA/fCl1VZjabb+PZikEEva6Hd9Zg5rMvzJnXzhlN+vXPYs69i/Nt5t4IIIAAAv0gQIDXD6vEHBFAAAEEfAlol8N7/v5vzd499151QLpeTA9L1yo8+5dGrdKb/s175tgzh3zda+2XlqQ76ax0m3R96F/2NZhp1ajCzzPYSi2/lWt+7hnUd4IMM+2ceq3UCurZ/FxH5z4mlZU69J0OorKyk4YgfuYa1Xfs+x1E4GbnrNWJG4d7a/jRL+FdVOvEfRBAAAEEEEiSAAFeklaTZ0EAAQQQuEpAm1VoIGdDusafv/HW++blU6+bo4f/0YwOX+tV5f30/h09b6FtnICGeEU53yrEM+p7WnG7fU/PddMts0EPu11SQ5+ynI3XDyOoKrNmz2obHejv9csW47CDR3u+YNRdWXt5F8N8R3ppcJGTikA98y6jnSsYCCCAAAIIIJA4AQK8xC0pD4QAAgggYAW0YcWWOye8n+oWWq3I++eTT68C6cHpZ379lncu3m45+67X8++ayVdrV8zFuZJzIZ4NIbTBQpjhmg0JdVvxYqnm7MsZZafUMAOgIIGjOq+un7bU6toN5ge8ANbPmYidrk+9wq/zxilDhazZOFKvkmQggAACCCCAQDIFCPCSua48FQIIIIDAGgEN695577dm8pGHVgO9sZGhSJxcCvFsRVW5suJV3YUZQljcsKu4el3E+hl1Oa/DbBiViM3mZyvPXK1OjDpktFtqC7lM6OGYn/dF57dxJO99NahtxO3m0WmwOTKYXd3i3O6a/D4CCCCAAAII9K8AAV7/rh0zRwABBBBoIaANLOYXL5uJW2/2PjX10ivm7s0/9DrRnpCOh9PStKKxE23YmNXaion7PLg4tytqADIu2/uWJTgsLlXC5u74+lEHVY0TC3sLc8cIDR+MO0jrtvLMzzN2+x0bpJXK0QW8do6NZwU2q5YdHcp5Z1gyEEAAAQQQQCD5AgR4yV9jnhABBBBInYB2lNWAbu8DO73ATsd+2T77Eznj7jkJ8rZvu8vrehhVBZ5dgLhCvLAaVfh5sTZKM4QBOasr7jPg7Fljej5hUToGR1GJ2MzLzqMmW63nJNgMokGEn3XR7zR23o0zZHVlHo0mUVZnNls/WyVaKn8XgBPe+X3T+R4CCCCAAAL9KUCA15/rxqwRQAABBJoIaNWdVtfp0IDONq+wAZ7+vjat2HLH7bH5RR3ixbE9tB1unFVvOjcXK9/GpJIqzu2jrplo5dkGMcnJ9uawz5tb73117fxGu41Xe1RIQa8ZKgy0+0+N30cAAQQQQACBBAkQ4CVoMXkUBBBAIM0C2rBCw7v1mlFoeDcqZ95FXXXXbE2iCvGialTh572rb5XMRr6tuB9Mwm4ssna97FpoUKbnNbo07Jba4lLZaPVZVMM28Phmftk5k+9tLJjcQCYqCu6DAAIIIIAAAo4IEOA5shBMAwEEEECgNwEN6HQ0Vt31dsVwv70iezcvFZdNJYTAJI5GFX60omzkYLcR6/ZdDcji2jLbzslWTEbVtTfuash2Hvr7thKucftoJ9/z+xlXTbTybnzDoOfBQAABBBBAAIH0CRDgpW/NeWIEEEAAAUcEwgjxbMXSYqliFks1R550/WlEsU3R9S64a3Uau4+GdRZd4xmAUXVV7eVltFtqww5gXQ3v7DucpfKul9eI7yKAAAIIINDXAgR4fb18TB4BBBBAoN8FNMSblSYK2uGyl+FSo4punyPMgM1uD9UgLMotmN0arP18mIFVmN69Pne77+t6jl6b887FK1eD3VJrzyG8KJWxcTYTaRboXj+Wl8o7ts22ez/4fQQQQAABBJIsQICX5NXl2RBAAAEE+kZAt3UuLVd9zdfFRhXdPogGVuNjBbNc+a7LZrfXaPy8DcDyuUzk5+z1Mu+137UVYUGdT2fPdpu/XDHaWbUfR9BbaoN+94I0pfIuSE2uhQACCCCAQH8LEOD19/oxewQQQACBBAkUpRJvodRdiOdyU4Zul6ax6kwDK79VUP1cYdbMrB7Q5k2voZur20O7fU/0840dWXs509DldyUnZ91dN0rlnZ/3g+8ggAACCCCQRAECvCSuKs+EAAIIINC3AvOXq2Zetnu2G/3SqKLdczT7/V6CJnsGYK9hl595h/mdXoImDbt022lBqhE1GHW1gYcfP7tF2k/n3l5M/cy1m+/oWl0noW1GO1cwEEAAAQQQQAABESDA4zVAAAEEEEDAMYF2IV5SQ6rGZbDBzKVi54GTnmGmW0S7+Y5jS99yOhrEbZKKLB2dVii6HFIFZe9nC7mtaiwulZ07G3GokJWKy1xQPFwHAQQQQAABBBIiQICXkIXkMRBAAAEEkiXQLMTr50YVflbn2kK9YYF2SW3VsMCGVNqkIqyurX7mH9Z3Oq1QjKLDb1jP2O11uwk3bTgc1LmC3c611edHJYTW9Y1iXPh4xswvXjZ/d8ftZmxkKIpbcg8EEEAAAQQQ6EGAAK8HPL6KAAIIIIBAmAKNIV69+UDOazygv56WYUMofeZmTRdsyLdYqpjFUn82ZfCzlva516sgczmk8vO8nX6n3ZmQnYafnd4vyM9FGd4dmTppNMDbsvmHZuYPfzTHnjlkbrphPMjH4VoIIIAAAgggELAAAV7AoFwOAQQQQACBIAWWJLCr1VaMBjYuVgwF+azrXWu9baAuhzFRuNhwc22om3YXu6V2URrCNIa6GoC7eg5glOHd1EuvmC+/umiOS2inY+bTz81z8munXpiM4rXlHggggAACCCDgU4AAzyccX0MAAQQQQCAqAe3G+h+zpUQ1H+jWTkM8Pf9tubIioUzVXD+WN2X59zlp+OG3W223c3Dx8+qi56XVale8Dsba+ECNtIkHLvVz5LTBxdhwzlxZueLk+6IdhockoI9iXPjdJ+bIsz83r518enXbrIZ5+x6fMm+/+nwUU+AeCCCAAAIIIOBTgADPJxxfQwABBBBAIEoBPQPum+KykQwitcOec5YbyHghXpq2Erdb9I0SUA0NZs2CbDVOwzmA7Tzs72vV3YicKbck78vsYvvuzp1eN4jPaYNZDe8G89GEdzrnPQeeMpOP/thskXPv7Djxize8iryj//QPQTwW10AAAQQQQACBkAQI8EKC5bIIIIAAAggELVCVKquLc6XUhnh2a6g2q3B1K2TQa97J9ex5eKVyzevCqxVnrZp+dHLNJHymsbmJVrhpVaIr5yRqeDe+YdDoNuioRrNKO/01DfWOH33sqlAvqjlxHwQQQAABBBDoXIAAr3MrPokAAggggEDsAhriXZJKvFqKSvHs9tmKVCFq1Z0+e9rPebMv4lqH9c5/i/3FjXgCa5uf2K3G+p+NdjWOc3uxDRazUkka5SguLJkdDz5h/u1fTni31fBuv2yd3fvATrN3z71RToV7IYAAAggggIAPAQI8H2h8BQEEEEAAgTgFqtLU4lKxnIoQzwZSzbrv2sozDWTSVnGm24l1+6VWcmnFXWOg23heYBq309bfi2zTpi+6pVarFONqCBNXeGf/vDpxetp8IN1n775zwky/ed7svm+bObhvd5x/nHFvBBBAAAEEEOhQgACvQyg+hgACCCCAgEsCaQjxbHVZq7BlbaWVS2sU1lzW68rbeD8N+DZIWJXL1sOqtFRsdlKZqQHe2FBettNGu6U27vDOvh/ayGLmD380WyTEm7jtlrBeU66LAAIIIIAAAgELEOAFDMrlEEAAAQQQiEogqSGeBh3aNbRZdVkz204CrajWJOz7dLtFtpMQNOw5R3X9TsI7O5fGbdlRdDLOyVl3148VTEZfagYCCCCAAAIIIOBDgADPBxpfQQABBBBAwBWBpIV4tqKu2ZbZVuZp2DbqN4yrh355r4mDuiZt2GrDayQc6/Z8O7ulNswt6UOFrFT8ZQnvkvbi8TwIIIAAAghELECAFzE4t0MAAQQQQCBogRU5mV8bW1SkwUU/DxtQ6blt2mm222GDnAGpdtJto3E2Kuh27u0+32vQlNQqxSCCW3uWYhhbakcGJbyTalIGAggggAACCCDQqwABXq+CfB8BBBBAAIEGAe30+NxLr3gHxd90w7g5OnnA+2fYo59DPA1hNLwL6rw2vdZgfiARZ781BlRaQddLKKkB56bRvPcqJiHgDDKUDNLZ/rc+KmcQ6rvIQAABBBBAAAEEghAgwAtCkWsggAACCCDwF4F9j0+Ze/7+b82unVvN/3ntnNfp8dQLk5GEeDoF7Ui6tFztm/UIMoRpfOhuzkNzFQub9VfG2iyWqtKMIphtwRpwjl5b71Lb65ZawjtX/6tiXggggAACCPSvAAFe/64dM0cAAQQQcEzgy68uGg3w3n71+dWZnTg97YV45xp+Lexp90uINzw4YIZli+H85WooZ7PZrZF6Llq52v2W3LDXqdX16zY5U1wq+9pO3G7u1ias67e7fy+/b7vIhjV3a+/3zEDCu15Wl+8igAACCCCAwHoCBHi8GwgggAACCAQo8J//6z4vrGvcNrvjwSfMw/t2m133bQvwTq0vVVysmAWpTnJxBFnp1O75tCnGuHT/1E6j/dLAIarqQb8NQ9qZh/n7UdnYCj89i7GbrcvaLGSoMBAmAddGAAEEEEAAgZQKEOCldOF5bAQQQACBcASOTJ30Lqxn39lxQc7Dm5Jz8c6efDqcm65zVa1sm5fgyqXRGIxos4ooRlhbUYOeuwabGgDp0AC2Js1Jwh5qs3EkZ2rSAEVDzl7O2At7rlGFd/Y5bNBcyGXanqcojGZ8w6DRUJSBAAIIIIAAAgiEIUCAF4Yq10QAAQQQSK2AbqPdL9to11bc3StVeI1ba6MCcinE63VrYi9mQXQr7eX+7b4bd8ioXW47CaqGjKMCAAAfHElEQVTaPUdYvx/n/Nq9t/W1KxDehbX4XBcBBBBAAAEEPAECPF4EBBBAAAEEAhaY+ewLs++xZ83xo4+ZLXfcbvQcPO1OO/nIQwHfqbPLuRDiaQATRHOAzp64+ae0omqDzOMaCVz0XDxXqs3qZ7rlpBlDcA0Z/DhpUKVNHLRDrStnBtrOuVohOCtViXENu91Yt9Q2Vo7a4DU7kIlratwXAQQQQAABBFIiQICXkoXmMRFAAAEEohXQEG/q+K+MVuTds+2u2MI7+9QaDs3FEID4PUsszNWKs5pr7XPZbaEamlUlpIp75LMZb0tt3GGiOsRdlbh2LWwAnBMjXS8d14/lpfKO8C7u95b7I4AAAgggkAYBArw0rDLPiAACCCCAgAgsLde8yrOoRr3TadaJMGi94OxSsRzJWXPNzF0KEhvn58J2Y9fCu0Yfr1JRKiZX5IxCwruo/jThPggggAACCCBAgMc7gAACCCCAQIoENMQrLpZN2P0Rom444GcJ6wGjbhldjrT6zYWArJ3X2mqzKBpq2DnVt6sWvO6vLnYOzsn89My7jHauYCCAAAIIIIAAAhEJEOBFBM1tEEAAAQQQcEVAt2penCuFEuLZcKpSXXG+q6muh4ZF4xLGaAfWKMIilyvLmr2fUW/xrTeMyHpbVF3YUrzWZKiQ9bYYMxBAAAEEEEAAgagFCPCiFud+CCCAAAIIOCAQRohnmzFoEKaNM/plRBWq2XBKmyBoM4R+GfVz8fKhV8S5XrWp22Z1jgwEEEAAAQQQQCAOAQK8ONS5JwIIIIAAAg4IVGsrJqgz4KKu1AqaL+xtra6HU+08G5uRNHZhbfe9Tn9ffQbzA17lXZTbdTueH+Fdp1R/9Tlt5LPv8Slz+oVJc9MN497vP/o/j5u775wwe/fc6/u6fBEBBBBAAIG0CRDgpW3FeV4EEEAAAQQaBHoN8TTY0S2FeqZeUbrcuhi+dLrgeu6bVprp0GYfVwJoCqs+Y8P1LZdBXbPT5wn6c+qzabTuo0FbED5hmAf93FTe9S564vS015H76OQBc+bs22b6N++Zsyef7v3CXAEBBBBAAIEUCRDgpWixeVQEEEAAAQSaCfgN8erNBvLe2XH9tGW23VsQVHfYqLbmtnueoH8/qGpCW/VYKrv7/myQ8FXP5GP0LrDjwSfM7vu2mdffPH9VNV7vV+YKCCCAAAIIpEOAAC8d68xTIoAAAggg0FKg2xDPhjizCxVTloYVSRu9hlT18wDzZrFUkR+1pPEY28G3uFT2dZ6f6+GmNpgdG86bIelUzAhGYObTz82eA0+ZvffvMJOPPBTMRbkKAggggAACKRIgwEvRYvOoCCCAAAIItBLQEO9b2RpZkS616w27JTQrCYer55UFtcoaUm2Qs88uFpe76ojaa/gX1PzDvo7fCkz7vcVS1clwU8O78Q2DXofiMIZuIb3w0e/NxA++b34iZ8CNjQyFcRvnrqnn4On4k2ylPdVwHp5zE2VCCCCAAAIIOCpAgOfowjAtBBBAAAEE4hBYkcPsLklg1SzEc71qKgwvDXHGxwpmTjrH6lbhVkPPcxu9NmcKuUziw03rYM9ArEnoq0btzsWzlXvfzHcXioaxts2uad/x7EAmtFueee2cmbjtFnPhd5+Yd977bSrOgms8927qpVfM/MKSdx4eAwEEEEAAAQQ6FyDA69yKTyKAAAIIIJAKAQ3xvpUmDsuV77bG2qoy7UBaKidvy2yrhe0kuAy7S6vrL56eG6jbhlt1NXa9MjGK8G7tOh568pi5Z9tdZpecDZfUoc0rdOvs6RcPm4lbbzZFCe/u1620D+ykC21SF53nQgABBBAIRYAALxRWLooAAggggED/C+j5dpfLVW8baV6qylqFM/3/tK2fwDZc0FBTQ8zG4XcradLMhgcHvApE3Vq99lxEwrvmq63B1sP7f2S2b92ctNdh9Xk0wNOKO606tENDvE/kTLwtd04k9rl5MAQQQAABBIIWIMALWpTrIYAAAgjEKnDh4xlzu/xFsfFcqWZ/gYx1kn1085pU4y3JWWVJ6jLrl1+3yG4cyZsrYmK3i2popV1Kk9rMo1urZufbbZROrrlsxjtLsN0W227vF8Tnc7JN+jrpphzmtln9M0i3zP7pz197U9YA693zH5rtUn2nDR3093VrrQ5t8nDTDeNBPBrXQAABBBBAAIEECRDgJWgxeRQEEEAAAWOOTJ30GBrPV9LD07X6g86H/t4QDe/m11Sd+btSMr6l20X1nDutxmu3bTQZT9zdUzRWK1qntVWL3V0xvE8P5geMBowZ7VwR0jhxetr8UsK5n0owd+PffM/7nwujw9eaGyWk06DujbfeNy+fet08vG+3uSIp8Ru/ec88I+fDEeKFtCBcFgEEEEAAgT4VIMDr04Vj2ggggAACzQW0Ak9DvHOvPu99QCtddjz4hDl+9DGz5Y7bYfMpQIj3HVzjWWn/MVvqqkOtT/6++5pW4v2njYNGz1P8em7ZaCWna2OokJWKylzo09LqOv0zqdn/RJj57Auz77FnV8+H08loJZ5+h//hEPrScAMEEEAAAQT6SoAAr6+Wi8kigAACCHQioIGdVrPowfD6l+F3ZKva6RcmO/kqn2khQIhnTGNDi6qEUno+oG4NrUoXVkZdoNFIf66dZ/VcPJeMRmXd9Fy+KId2X52Rc9+0OthW1+mfVWubOZz4xRumOL9IgBfl4nAvBBBAAAEE+kCAAK8PFokpIoAAAgh0J/DGm+e90O7YM4eMbp/VLo96rhSjd4E0h3i6XXZsKG8WSxX5UfMw62e+FeSMQGn4sVz/tTQPa1RcKq92K87L+Xd6dmCjW5xGcYR39nn1HLwzv37L+7Pp3fc/Mr+Uf2/8nwtUDMf5ZnBvBBBAAAEE3BYgwHN7fZgdAggggIAPAd1+ppUtZ08+7QV4up22samFj0vylQaBJQmqZhfKqTJp1UW1seIszc0+bEOPZtV21qhU/usuvlG+SHGGd2ufU8/G03FQqoXt0K22o3JGHttno3wruBcCCCCAAAL9IUCA1x/rxCwRQAABBLoU0L8I63a1LXdO8JfhLu06+XhaQjztPKvbZLWLqgZT653l1ti4wdWGDZ2sq9/PtAo47TVtF1/tF6GWUXek1SrAIdnO68rQ8zpflhBPq/H0fzBooKeVw6dkuz//w8GVVWIeCCCAAAIIuCNAgOfOWjATBBBAAIEABfQvx1p9d/rFwzSvCNC18VKlcr0Sz8H+BIE8cbdVYxpQbRrNm5qchzcnXXujDqgCeWgfF7FdeVsFnI2X7STs8zGNdb+igaGGd9px1rVx5uzb3jbaeWm2o1v9tRqP7rOurRLzQQABBBBAwA0BAjw31oFZIIAAAggELEDzCmM0xPzy3y+Z7Vs3h1bRo40JLs6VEhfi1c+2y3vn2nW7LbbbQCvgVz+yy9mKuiuS4HYbWGpji9Frc6bxrLwwJl4PYQveWYWuDj33joo7V1eHeSGAAAIIIOCOAAGeO2vBTBBAAAEEAhDQvwz/q2xDe1Y6Ph4/+lhqq+90O55uIb7xhnEzLU09Dj/6Y7Nr59YAhP/6EhriXZJOrOttLw3lpiFetNVZbp3eNuoqs07nFdTngjj3r5eQtJPnsHPMDmQ6+TifQQABBBBAAAEEnBYgwHN6eZgcAggggEC3AtqBVgOr3fdtM7vkRxqHdrp8+dTrq90tZz77wjx65EUv0Jy49eZQSKq1FQnx1j8jLpSbhnBRrZ7TTqpBPItWmen5eRcl3NSQMykjiPDOWui1No7kAt92THiXlLeN50AAAQQQQAABK0CAx7uAAAIIIIBAwgTeff8j8857vzVHJw+sPpkGm3pgvnbkDWv0c4hnm1BUqitdbwdt5ZmX5hd6/tr85Yq3Hbffh62a023FQT5PkMEp4V2/v2XMHwEEEEAAAQSaCRDg8V4ggAACCCCQAAEN7b7889de1aEeiL9fGnhoN8vGA/G1M+/d0pU3zMrEfgzxgqwoa/YqhX39qF7f+rl1Wa+DbBgVhbp1Wc/F0+uXJUj1M3Jy1p2eeZfRzhUMBBBAAAEEEEAgQQIEeAlaTB4FAQQQQCB9Al9+ddEL67ZIMKf/ruHd2ZNPGw3r9DzA488cWkXRphbTb71vjv7TP4QK1U8hXpTNFLQphnbu7bYpRqiL1eHFozrTz1b4LZaqZrHUXcXiUCHrbcdlIIAAAggggAACSRQgwEviqvJMCCCAAAKpEZiSZh1jo8Pm4M92ec+848EnvMq70ZEhc/+Bp7yzAA/u2+393olfvGGK84tm8pGHQvdZkc6k2tii4vDZb1GFUhZbu7ZqiFcTk267toa+YC1uELWT3c68XFnxth5f6eD4wJHBrBkbJryL8z3h3ggggAACCCAQrgABXri+XB0BBBBAAIFQBbT6TgM6rcDToT9/Rs6+062zWpF36MljXpinP9cKvLXbasOcnKshngZpei5dVrZZ6nbNqLvn6nlvhVwmlnt3s97qpE04rhGn2YVyR0FaN9dv9Vl775ycIdhujUZljhoyMhBAAAEEEEAAgSQLEOAleXV5NgQQQACBxAtoKGfDO33Y/y5Vd/8sW2gbh3al1fSl8XNRwWiIV1yqmqXlalS3bHkfV86ji7qqrVv8xiq44lKl268H9nnrtN65e4R3gVFzIQQQQAABBBBwXIAAz/EFYnoIIIAAAgh0KmAr7vQMPB3a2OL2W2++qpFFp9cK+nOzC5XYQ7x6R9icnK3W/flqQXvo9bRpw7Bs/QyrKYTfObsSctr5206+i6XKVefibZAts+rHQAABBBBAAAEE0iBAgJeGVeYZEUAAAQRSIaANKmb+8EfvjLsTp6fNO+c/NMekiUVjJ9o4IYqLFbMg4Vkco10lVxxz0nvacErPeru83F3ThjDm3EsTiTDmY69pQ8VSeUUqOiveFugh6YrLQAABBBBAAAEE0iJAgJeWleY5EUAAAQQSL6AB3oWPfm9mPv3c3LPtrtXmFS49uHZgnY94S6aeOTeYz0hTjejPu+vE3pWKNzUaG8pLQFaWbrkrnUw90s/ouXjXSXBXyA0Y/XcGAggggAACCCCQJgECvDStNs+KAAIIIJBoAd0y+z+e/bk5LBV4u6T7rKsjqhDPTzfTuMziDvFcP5NP18UaZQcycS0T90UAAQQQQAABBGITIMCLjZ4bI4AAAgggELyAnoPnypbZVk8XdogXdyDmZ2W1qmzTaN7UalfMnFQpSt+RSAbhXSTM3AQBBBBAAAEEEOhJgACvJz6+jAACCCCAAAJ+BcIK8erNIXLObgVt56Vbfgu5jNfcoiZdfMMcUd7L73NQeedXju8hgAACCCCAQJIECPCStJo8CwIIIIAAAn0msCSNG2YXyoHNuh+qyTp52LCfQ6v9xscKplJdMbPSXMTVkRu4xlwnVYlsm3V1hZgXAggggAACCEQlQIAXlTT3QQABBBBAAIGmAkGEeBpIaWfSjPxzdqESeuVaFEtZryTMepV4VdlWG9Tol+3FWoWoTSsyuqgMBBBAAAEEEEAg5QIEeCl/AXh8BBBAAAEEXBAoSyXYN8Vl42fHaL8EUn6c89mMF0zOX66Yy1Kt2OvoF6uhQlaeO9fr4/J9BBBAAAEEEEAgMQIEeIlZSh4EAQQQQACB/hbQKrOLc6WuQrzBfMboOW6Lpar86D3gclEwqNAt6DAwLKtRWU/dQsxAAAEEEEAAAQQQ+E6AAI+3AQEEEEAAAQScEdAQ75JU4nXSvMGeExf0FlNnMBom0muIF9Z23KCtCO+CFuV6CCCAAAIIIJAUAQK8pKwkz4EAAggggEBCBKq1FQnxWndg1ao7rb5r97mEkHiPoef8bZKGDjUJOeeWKuZKh8fihd0QIyhjwrugJLkOAggggAACCCRRgAAviavKMyGAAAIIINDnAuuFeLYSrVReMUUJsdI4Ng7nTE7OxrsolYrtQjwN7wbzA14jjE6qGuPy3CDPpA07ohozn35u/vTvl8z2rZujuiX3QQABBBBAAAEEehIgwOuJjy8jgAACCCCAQFgCa0O87MA1XgWaNnOYv1wN67Z9cd12VXW2K68+zOxCuW3QF9dDa4NZbdKhIWOYo7iwZP7v7z4x77z3W6PhnQ7959mTT5uJ224J89ZcGwEEEEAAAQQQCESAAC8QRi6CAAIIIIAAAmEIaIj3rVSP5XMZaWyQM7OLZaPVdwwjFWsDXtXa2jMAez0vLypbDe/GNwwaDWbDGhrcPTl10gvrtm+7y9wjP26XwO7+A0+ZvQ/sNHv33BvWrbkuAggggAACCCAQqAABXqCcXAwBBBBAAAEEghZYWbniVZDpllGXt4EG/dydXG9tZ9l+Ce/sPLMDmU4es6fPnDg9bT74eMYce+aQGRsZMo8+eczcdMO4mXzkIe+6X3510bx7/kMz8YPvmy133N7TvfgyAggggAACCCAQlgABXliyXBcBBBBAAAEEAhPQEO9b2Qq6XKH6bi2q3VqslYna2GOxVJUftcDsg75QlOGdnfuZs2+bM79+y6vCuyBhnm6d1aHh3vSb583u+7Z5Id/dd06Yg/t2B/3IXA8BBBBAAAEEEOhZgACvZ0IugAACCCCAAAJRCcwuVMzScrrPv2tmrdtpNwznzYKcDehyc484wrvVEO+1c2bqpVfM5KM/9rbO2lDv1AuTXkWejj2ytVYr87ZIkMdAAAEEEEAAAQRcEiDAc2k1mAsCCCCAAAIItBUgxLuayDa00LPwtJtrrXbFzEmH3nYdattCB/yBnJx1d/1YwWT08LuIh56Fp+HcYQnv9Dw8DegePfKiOf3iYTNx682rs9n3+JR5WCrwCPAiXiBuhwACCCCAAAJtBQjw2hLxAQQQQAABBBBwTUC70M5LSJX20awb7UYJ8XLZjHdmoCshnnaZ1XnFEd7Zd+SCdKG1Z9xpUHf35h+agz/btfoK6e9rqHfu1ee9s/IYCCCAAAIIIICASwIEeC6tBnNBAAEEEEAAgY4F0h7itQrqmgV7HcMG/MER6ZQ7JuGdS+PeB5/wzsGzQZ1W6NGZ1qUVYi4IIIAAAgggsFaAAI93AgEEEEAAAQT6ViCNIZ6eI7dpNO819Gh13p2eizcs4Zlura3Ktto4xuhQzmiY6NrYLxV4ex/YabZv3ex1oT0knWl126ztTOvafJkPAggggAACCCBAgMc7gAACCCCAAAJ9LZCmEM82gbi8XDP63O2GdqUdG8rLZytGvxPlcDW8UwMb2mkFnv67dqGl+2yUbwf3QgABBBBAAIFuBQjwuhXj8wgggAACCCDgnMBiqWrmFpN9Jl634Z1dpKw0j9CKPTVaLEUT4rkc3jW+vNrQ4kbpQMuZd879J82EEEAAAQQQQGCNAAEerwQCCCCAAAIIhCag1U1nXjtnxkaHzU/23BtqULIkFWazC+XQniXOC9tKuuJS2ZTKK11PxW/41/WN5AsbR/JmqDDg56t8BwEEEEAAAQQQQGAdAQI8Xg0EEEAAAQQQCEXg3fc/MlPHf2UO7v+RmfnDH8275z80p16YNDdJxVNYQ0O84mLZrMRz5FsojxXUWXbXXGPM+FjBVKorZk46+AbdoVaO5jPjGwaNVvwxEEAAAQQQQAABBIIVIMAL1pOrIYAAAggggMBfBPZJowBtCjBx2y3er5w4PW2m3zwfeoinDRsuzpUSEeKF0U22Vfdavy9vvcKvQHjnF5DvIYAAAggggAACbQQI8HhFEEAAAQQQQCAUgT0HnjJHJw+sBng2xNNttfrrYY4khHhj0sG1kMt4XWRrAZcUBhkM2u252YFMmEvKtRFAAAEEEEAAgVQLEOClevl5eAQQQAABBIITsOfdadWdjjNn3zYffPR7c/yZQ1fdZMeDT4Rehac3rNZWzKVi8OFXcGLNr6RbXbXpRE0qCcPY6mrvGsTWXMK7sN8Gro8AAggggAACCNQFCPB4ExBAAAEEEEAgEIHpt943R579uXl4325zUH7o0LBu933bVn+uv3boyWNm7/07zJY7JwK5b6uL9FuIF2WzCXXrpTlGTs66u06CRirvQn+NuQECCCCAAAIIIECAxzuAAAIIIIAAAsEIXPh4xjvjTv+594GdZq90ndWqvP1yFp4N8ezPXzv5dKgdaRufqF9CvKjDO2ukTSe04m+xVJUftY5eBg3v9My7jHauYCCAAAIIIIAAAgiELkAFXujE3AABBBBAAIF0CGhw9450mtXqOm1gcVo6zo6ODJn5hSVzZOqkmfn0c68D7cPSlXb71s2Rorge4mmIph1idcvsZemkG/XoJjwcKmTNxpFc1FPkfggggAACCCCAQKoFCPBSvfw8PAIIIIAAAsEJaHXdcy+9Yo7JmXd6/t3Lp173quxstV1Rgjz9eVxDQ7xvpSFERc6Wc2kEcRZdEM+jZ+9piFiprpjZxUrTS45KYw1tgMFAAAEEEEAAAQQQiFaAAC9ab+6GAAIIIIBAYgU0oHtMzrc7JZV3J05Pm9dlO60Gdmdlu6wrY0W6uV4qLjsT4gXZDTYo443DOZPLZsxFcbrSkHUS3gUlzHUQQAABBBBAAIHuBQjwujfjGwgggAACCCCwjoA2rbhRtsneLQ0qtJHFlFTk3bPtrkgaVnS6KBriaYVZqRz9VtXGOboY3tn5rZ0b4V2nbxefQwABBBBAAAEEwhEgwAvHlasigAACCCCQSgE9++5H0nV2l/xwfcwuVMzScjXyaepW1Y0jee++swvlq6rcIp9Mixvarb3l6hUzVBhwaWrMBQEEEEAAAQQQSJ0AAV7qlpwHRgABBBBAAAErEHWIp80itOPrcmXFFKVhhctDG8xukjPx8rKdloEAAggggAACCCAQrwABXrz+3B0BBBBAAAEEYhaYv1w18xGEad10eo2ZxGh4N75h0Gh3XAYCCCCAAAIIIIBA/AIEePGvATNAAAEEEEAAgZgFwg7xNAjTyrvFUlV+xHv2XjtqGzRmB6i8a2fF7yOAAAIIIIAAAlEJEOBFJc19EEAAAQQQQMBpgbBCvMF8xowN5WXLbFkaZ6w4bUB45/TyMDkEEEAAAQQQSLEAAV6KF59HRwABBBBAAIGrBYIO8VzuNLt27Qnv+K8BAQQQQAABBBBwV4AAz921YWYIIIAAAgggEIPA0nLN6w7b6+in8C4nW3yvky2+bJvtddX5PgIIIIAAAgggEI4AAV44rlwVAQQQQAABBPpYoNcQb2woZwq5jPlmvmxqK1eclhjMD5iNwzmT0c4VDAQQQAABBBBAAAEnBQjwnFwWJoUAAggggAACcQuUqyvmm+Ky6SZ/u0a7t44VTEW+O7tYifsR2t5/qJA1G0dybT/HBxBAAAEEEEAAAQTiFSDAi9efuyOAAAIIIICAwwLV2hVzca7UUYhnz5C7LFtw9Sw918eoVAnqNl8GAggggAACCCCAgPsCBHjurxEzRAABBBBAAIEYBTTEuySVeK22whLexbhA3BoBBBBAAAEEEEiBAAFeChaZR0QAAQQQQACB3gSqtRUJ8ZqfZ5fPZmQbal6q7ipGq+9cH1FV3r3x5nnz8ulpoycA3nTDuDk6ecD7JwMBBBBAAAEEEECgewECvO7N+AYCCCCAAAIIpFCgWYg3PDhghgezXrMKrdRzfWjQOFQYCH2aJyS4m5YA79QLk15oN/PZF+bQkRfNuVefD/3e3AABBBBAAAEEEEiiAAFeEleVZ0IAAQQQQACBUAQaQzw9P+5aCcPWq8wLZQI+L6oNZjW8046zYY8zZ982U8d/ZbbcOWFu+pvvmYM/2+WFeP/lvx00//YvJ8K+PddHAAEEEEAAAQQSKUCAl8hl5aEQQAABBBBAICwBDfG0Q21uIONV3rU6Gy+sOXRz3fr5fAWTHZAUL4KhQd3/OvyPZvvWzVdV3hUXlszYyFAEM+AWCCCAAAIIIIBA8gQI8JK3pjwRAggggAACCEQgcFEaW5QrKxHcyf8tbHONrISNUYyZTz83R6ZOmrMnn1693Z4DT5nJRx7yKvK+/OqieVJ+///JP3/6wE6zd8+9UUyLeyCAAAIIIIAAAn0vQIDX90vIAyCAAAIIIIBAHAIrK/XutBVHz76LOrzTNdAA79CTx1bPutOz7/Y99qz38399/yPzrGytPSxh3t9JmHfmtXNeRd7BfbvjWD7uiQACCCCAAAII9JUAAV5fLReTRQABBBBAAAGXBDTEKy5VzdJy1aVpmTjCOwugDSw++HjGTNx2i3n3/Idmr1TaafWdBnmnXzxsJm69edWKc/Gcem2YDAIIIIAAAgg4LECA5/DiMDUEEEAAAQQQQAABBBBAAAEEEEAAAQQI8HgHEEAAAQQQQAABBBBAAAEEEEAAAQQQcFiAAM/hxWFqCCCAAAIIIIAAAggggAACCCCAAAIIEODxDiCAAAIIIIAAAggggAACCCCAAAIIIOCwAAGew4vD1BBAAAEEEEAAAQQQQAABBBBAAAEEECDA4x1AAAEEEEAAAQQQQAABBBBAAAEEEEDAYQECPIcXh6khgAACCCCAAAIIIIAAAggggAACCCBAgMc7gAACCCCAAAIIIIAAAggggAACCCCAgMMCBHgOLw5TQwABBBBAAAEEEEAAAQQQQAABBBBAgACPdwABBBBAAAEEEEAAAQQQQAABBBBAAAGHBf4/q8iactQmMFIAAAAASUVORK5CYII=", + "text/html": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Let's try 3D!\n", + "\n", + "tsne = TSNE(n_components=3, random_state=42)\n", + "reduced_vectors = tsne.fit_transform(vectors)\n", + "\n", + "# Create the 3D scatter plot\n", + "fig = go.Figure(data=[go.Scatter3d(\n", + " x=reduced_vectors[:, 0],\n", + " y=reduced_vectors[:, 1],\n", + " z=reduced_vectors[:, 2],\n", + " mode='markers',\n", + " marker=dict(size=5, color=colors, opacity=0.8),\n", + " text=[f\"Video: {t}
Text: {d[:100]}...\" for t, d in zip(video_numbers, documents)],\n", + " hoverinfo='text'\n", + ")])\n", + "\n", + "fig.update_layout(\n", + " title='3D Chroma Vector Store Visualization',\n", + " scene=dict(xaxis_title='x', yaxis_title='y', zaxis_title='z'),\n", + " width=900,\n", + " height=700,\n", + " margin=dict(r=20, b=10, l=10, t=40)\n", + ")\n", + "\n", + "fig.show()" + ] + }, + { + "cell_type": "markdown", + "id": "9468860b-86a2-41df-af01-b2400cc985be", + "metadata": {}, + "source": [ + "# Time to use LangChain to bring it all together" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "129c7d1e-0094-4479-9459-f9360b95f244", + "metadata": {}, + "outputs": [], + "source": [ + "# create a new Chat with OpenAI\n", + "llm = ChatOpenAI(temperature=0.7, model_name=MODEL)\n", + "\n", + "# set up the conversation memory for the chat\n", + "memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)\n", + "\n", + "# the retriever is an abstraction over the VectorStore that will be used during RAG\n", + "retriever = vectorstore.as_retriever()\n", + "\n", + "# putting it together: set up the conversation chain with the GPT 4o-mini LLM, the vector store and memory\n", + "conversation_chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=retriever, memory=memory)" + ] + }, + { + "cell_type": "markdown", + "id": "e85ddd60-6d97-44c4-a47a-1c7a6d4ce4df", + "metadata": {}, + "source": [ + "### When you run the next cell, you will get a LangChainDeprecationWarning about the simple way we use LangChain memory. They ask us to migrate to their new approach for memory. Just ignore this. Ed feels quite conflicted about this. \n" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "968e7bf2-e862-4679-a11f-6c1efb6ec8ca", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Ed Donner currently lives in New York City.\n" + ] + } + ], + "source": [ + "query = \"Can tell me where Ed Donner currently lives.\"\n", + "result = conversation_chain.invoke({\"question\":query})\n", + "print(result[\"answer\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "e6eb99fb-33ec-4025-ab92-b634ede03647", + "metadata": {}, + "outputs": [], + "source": [ + "# set up a new conversation memory for the chat\n", + "memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)\n", + "\n", + "# putting it together: set up the conversation chain with the GPT 4o-mini LLM, the vector store and memory\n", + "conversation_chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=retriever, memory=memory)" + ] + }, + { + "cell_type": "markdown", + "id": "bbbcb659-13ce-47ab-8a5e-01b930494964", + "metadata": {}, + "source": [ + "## Now we will bring this up in Gradio using the Chat interface -\n", + "\n", + "A quick and easy way to prototype a chat with an LLM" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "c3536590-85c7-4155-bd87-ae78a1467670", + "metadata": {}, + "outputs": [], + "source": [ + "# Wrapping in a function - note that history isn't used, as the memory is in the conversation_chain\n", + "\n", + "def chat(message, history):\n", + " result = conversation_chain.invoke({\"question\": message})\n", + " return result[\"answer\"]" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "b252d8c1-61a8-406d-b57a-8f708a62b014", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "* Running on local URL: http://127.0.0.1:7861\n", + "\n", + "To create a public link, set `share=True` in `launch()`.\n" + ] + }, + { + "data": { + "text/html": [ + "
" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# And in Gradio:\n", + "\n", + "view = gr.ChatInterface(chat, type=\"messages\").launch(inbrowser=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5435b2b9-935c-48cd-aaf3-73a837ecde49", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 4f4a5a429f2a0e29a0f383f71ff84a98e9420e97 Mon Sep 17 00:00:00 2001 From: Ivo Brett Date: Sun, 26 Jan 2025 22:13:24 +0000 Subject: [PATCH 4/4] new project --- project/data/all_data.json | 3021 ++++++++++++++++++++++ project/data/daily_routine_data.json | 163 ++ project/data/raw_data.json | 96 + project/data/rules.json | 60 + project/data/test_data.json | 2525 +++++++++++++++++++ project/data/training_data.json | 498 ++++ project/day1.ipynb | 679 +++++ project/day2.ipynb | 3502 ++++++++++++++++++++++++++ project/items.py | 83 + project/runner.py | 102 + project/testing.py | 97 + project/utils/data_curator.py | 71 + project/utils/data_generator.py | 77 + 13 files changed, 10974 insertions(+) create mode 100644 project/data/all_data.json create mode 100644 project/data/daily_routine_data.json create mode 100644 project/data/raw_data.json create mode 100644 project/data/rules.json create mode 100644 project/data/test_data.json create mode 100644 project/data/training_data.json create mode 100644 project/day1.ipynb create mode 100644 project/day2.ipynb create mode 100644 project/items.py create mode 100644 project/runner.py create mode 100644 project/testing.py create mode 100644 project/utils/data_curator.py create mode 100644 project/utils/data_generator.py diff --git a/project/data/all_data.json b/project/data/all_data.json new file mode 100644 index 0000000..a28c152 --- /dev/null +++ b/project/data/all_data.json @@ -0,0 +1,3021 @@ +[ + { + "input": [ + {"timestamp": 1737806400, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737806460, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737806700, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737807000, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1737807300, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737807600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737807660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737808200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737808500, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737808800, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737810000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737811200, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737811800, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737811860, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1737828000, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737828060, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737828120, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737828180, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737829200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737831600, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737831660, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737835200, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737838800, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737838860, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737839400, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737849600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737849660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737850800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737852000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737852060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737855600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737859200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737859260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737859800, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "The data reflects a consistent daily routine, including medication times, meal preparation, and relaxation periods. No unusual activity is detected." + }, + { + "input": [ + {"timestamp": 1737892800, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737892860, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737893100, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737893400, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1737893700, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737894000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737894060, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737894600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737894900, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737895200, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737896400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737897600, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737898200, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737898260, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1737914400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737914460, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737914520, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737914580, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737915600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737918000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737918060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737921600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737925200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737925260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737925800, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737936000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737936060, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737937200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737938400, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737938460, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737942000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737945600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737945660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737946200, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "The data shows a consistent routine similar to Day 1, with no deviations. All activities align with expected behavior." + }, + { + "input": [ + {"timestamp": 1737979200, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737979260, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737979500, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737979800, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1737980100, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737980400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737980460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737981000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737981300, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737981600, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737982800, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737984000, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737984600, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737984660, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738000800, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738000860, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738000920, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738000980, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738002000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738004400, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738004460, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738008000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738011600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738011660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738012200, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738022400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738022460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738023600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738024800, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738024860, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738028400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738032000, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738032060, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738032600, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "The data shows a typical day with all expected activities occurring at regular times." + }, + { + "input": [ + {"timestamp": 1738065600, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738065660, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738065900, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738066200, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738066500, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738066800, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738067100, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738067400, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738067700, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738068000, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738068300, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738068600, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738068900, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738069200, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738069500, "room": "bathroom", "nodeId": 3, "humidity": 7500} + ], + "result": "anomalous", + "reason": "High humidity persists in the bathroom with no movement or room transitions for over 45 minutes, suggesting a potential fall during shower." + }, + { + "input": [ + {"timestamp": 1738152000, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738152060, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738152300, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738152600, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738152900, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738153200, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738153260, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738153800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738154100, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738154400, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738155600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738156800, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738157400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738157460, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738173600, "room": "porch", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738184400, "room": "porch", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738195200, "room": "porch", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738206000, "room": "porch", "nodeId": 1, "occupancy": 0} + ], + "result": "anomalous", + "reason": "Person left home in the morning but did not return, missing all subsequent daily activities including medications and meals." + }, + { + "input": [ + {"timestamp": 1738238400, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738238460, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738238700, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738239000, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738239300, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738239600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738239660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738240200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738240500, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738240800, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738242000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738243200, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738243800, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738243860, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738260000, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738260060, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738260120, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738260180, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738261200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738263600, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738263660, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738267200, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738270800, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738270860, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738271400, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738281600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738281660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738282800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738284000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738284060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738287600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738291200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738291260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738291800, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "All activities follow the expected pattern with normal timing and sequence." + }, + { + "input": [ + {"timestamp": 1738324800, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738324860, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738325100, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738325400, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738325700, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738326000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738326600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738326900, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738327200, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738328400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738329600, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738330200, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738330260, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738346400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738346460, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738346520, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738347600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738350000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738350060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738353600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738357200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738357260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738357800, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738368000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738369200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738370400, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738370460, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738374000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738377600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738377660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738378200, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "anomalous", + "reason": "No fridge activity detected throughout the entire day despite multiple kitchen visits, suggesting potential eating issues or forgetfulness." + }, + { + "input": [ + {"timestamp": 1738411200, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738411260, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738411500, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738411800, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738412100, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738412400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738412460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738413000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738413300, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738413600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738417200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738420800, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738424400, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738428000, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738431600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738435200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738438800, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738442400, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738446000, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738449600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738453200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738456800, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738460400, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738464000, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738467600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738471200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738474800, "room": "bedroom", "nodeId": 1, "occupancy": 1} + ], + "result": "anomalous", + "reason": "Person remains in bedroom all day after breakfast, missing meals, medications, and normal daily activities. This could indicate illness or other health issues." + }, + { + "input": [ + {"timestamp": 1738497600, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738497660, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738497900, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738498200, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738498500, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738498800, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738498860, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738499400, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738499700, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738500000, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738501200, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738502400, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738503000, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738503060, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738519200, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738519260, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738519320, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738519380, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738520400, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738522800, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738522860, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738526400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738530000, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738530060, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738530600, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738540800, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738540860, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738542000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738543200, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738543260, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738546800, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738550400, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738550460, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738551000, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "All activities follow the expected daily routine with normal timing and sequence." + }, + { + "input": [ + {"timestamp": 1738584000, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738584300, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738584600, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738584900, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738585200, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738585260, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738585800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738586100, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738586400, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738587600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738588800, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738589400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738589460, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738605600, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738605660, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738605720, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738605780, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738606800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738609200, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738609260, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738612800, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738616400, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738617000, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738627200, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738627260, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738628400, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738629600, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738629660, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738633200, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738636800, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738637400, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "anomalous", + "reason": "All three daily medications were missed (morning, afternoon, and evening), while other activities remained normal. This suggests a serious deviation in medication adherence." + }, + { + "input": [ + {"timestamp": 1738670400, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738670460, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738670700, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738671000, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738671300, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738671600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738671660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738672200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738672500, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738672800, "room": "livingroom", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738672860, "room": "kitchen", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738672920, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738672980, "room": "bathroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673040, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673100, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673160, "room": "kitchen", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673220, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673280, "room": "bathroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673340, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673400, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673460, "room": "kitchen", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673520, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673580, "room": "bathroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673640, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738673700, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738674000, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738677600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738681200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738684800, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738688400, "room": "bedroom", "nodeId": 1, "occupancy": 1} + ], + "result": "anomalous", + "reason": "Unusual rapid transitions between rooms followed by extended bedroom stay suggests potential confusion or disorientation. Missing subsequent daily activities and medications adds to the concern." + }, + { + "input": [ + {"timestamp": 1738756800, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738756860, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738757100, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738757400, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738757700, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738758000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738758060, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738758600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738758900, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738759200, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738760400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738761600, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738762200, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738762260, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738778400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738778460, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738778520, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738778580, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738779600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738782000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738782060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738785600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738789200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738789260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738789800, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738800000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738800060, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738801200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738802400, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738802460, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738806000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738809600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738809660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738810200, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "All activities follow the expected daily routine with normal timing and sequence." + }, + { + "input": [ + {"timestamp": 1738843200, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738843260, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738843500, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738843800, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738844100, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738844400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738844460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738844520, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738844580, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738844640, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738844700, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738844760, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738844820, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738844880, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738844940, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738845000, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738845060, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738866400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738866460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738866520, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738866580, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738866640, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738866700, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738875600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738875660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738876200, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738888400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738888460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738888520, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738888580, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738888640, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738888700, "room": "kitchen", "nodeId": 2, "onOff": false} + ], + "result": "anomalous", + "reason": "Unusually frequent kitchen visits and fridge openings throughout the day suggest potential memory issues or compulsive behavior. While other activities remain normal, the repeated kitchen pattern is concerning." + }, + { + "input": [ + {"timestamp": 1738929600, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738929660, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738929900, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738930200, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738930260, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738930320, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738930380, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738930440, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738930500, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738930560, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738944000, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738944060, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738944120, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738944180, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738952400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738952460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738953000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738957600, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738957660, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738957720, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738957780, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738962000, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738962060, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738962600, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "anomalous", + "reason": "Unusually frequent bathroom visits throughout the day, with multiple entries and exits in short succession. While other activities remain normal, this pattern suggests potential health issues requiring attention." + }, + { + "input": [ + { + "timestamp": 1737806400, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737806460, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737806700, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737807000, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1737807300, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737807600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737807660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737808200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737808500, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737808800, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737810000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737811200, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737811800, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737811860, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1737828000, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737828060, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737828120, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737828180, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737829200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737831600, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737831660, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737835200, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737838800, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737838860, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737839400, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737849600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737849660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737850800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737852000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737852060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737855600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737859200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737859260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737859800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "The data reflects a consistent daily routine, including medication times, meal preparation, and relaxation periods. No unusual activity is detected." + }, + { + "input": [ + { + "timestamp": 1737892800, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737892860, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737893100, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737893400, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1737893700, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737894000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737894060, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737894600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737894900, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737895200, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737896400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737897600, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737898200, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737898260, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1737914400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737914460, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737914520, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737914580, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737915600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737918000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737918060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737921600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737925200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737925260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737925800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737936000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737936060, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737937200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737938400, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737938460, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737942000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737945600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737945660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737946200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "The data shows a consistent routine similar to Day 1, with no deviations. All activities align with expected behavior." + }, + { + "input": [ + { + "timestamp": 1737979200, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737979260, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737979500, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737979800, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1737980100, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737980400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737980460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737981000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737981300, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737981600, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737982800, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737984000, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737984600, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737984660, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738000800, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738000860, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738000920, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738000980, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738002000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738004400, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738004460, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738008000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738011600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738011660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738012200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738022400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738022460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738023600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738024800, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738024860, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738028400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738032000, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738032060, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738032600, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "The data shows a typical day with all expected activities occurring at regular times." + }, + { + "input": [ + { + "timestamp": 1738065600, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738065660, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738065900, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738066200, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738066500, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738066800, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738067100, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738067400, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738067700, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738068000, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738068300, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738068600, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738068900, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738069200, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738069500, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + } + ], + "result": "anomalous", + "reason": "High humidity persists in the bathroom with no movement or room transitions for over 45 minutes, suggesting a potential fall during shower." + }, + { + "input": [ + { + "timestamp": 1738152000, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738152060, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738152300, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738152600, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738152900, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738153200, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738153260, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738153800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738154100, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738154400, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738155600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738156800, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738157400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738157460, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738173600, + "room": "porch", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738184400, + "room": "porch", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738195200, + "room": "porch", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738206000, + "room": "porch", + "nodeId": 1, + "occupancy": 0 + } + ], + "result": "anomalous", + "reason": "Person left home in the morning but did not return, missing all subsequent daily activities including medications and meals." + }, + { + "input": [ + { + "timestamp": 1738238400, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738238460, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738238700, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738239000, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738239300, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738239600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738239660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738240200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738240500, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738240800, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738242000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738243200, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738243800, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738243860, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738260000, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738260060, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738260120, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738260180, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738261200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738263600, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738263660, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738267200, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738270800, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738270860, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738271400, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738281600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738281660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738282800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738284000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738284060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738287600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738291200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738291260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738291800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "All activities follow the expected pattern with normal timing and sequence." + }, + { + "input": [ + { + "timestamp": 1738324800, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738324860, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738325100, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738325400, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738325700, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738326000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738326600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738326900, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738327200, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738328400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738329600, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738330200, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738330260, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738346400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738346460, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738346520, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738347600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738350000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738350060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738353600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738357200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738357260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738357800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738368000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738369200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738370400, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738370460, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738374000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738377600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738377660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738378200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "anomalous", + "reason": "No fridge activity detected throughout the entire day despite multiple kitchen visits, suggesting potential eating issues or forgetfulness." + }, + { + "input": [ + { + "timestamp": 1738411200, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738411260, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738411500, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738411800, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738412100, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738412400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738412460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738413000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738413300, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738413600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738417200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738420800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738424400, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738428000, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738431600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738435200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738438800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738442400, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738446000, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738449600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738453200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738456800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738460400, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738464000, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738467600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738471200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738474800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + } + ], + "result": "anomalous", + "reason": "Person remains in bedroom all day after breakfast, missing meals, medications, and normal daily activities. This could indicate illness or other health issues." + }, + { + "input": [ + { + "timestamp": 1738497600, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738497660, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738497900, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738498200, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738498500, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738498800, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738498860, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738499400, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738499700, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738500000, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738501200, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738502400, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738503000, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738503060, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738519200, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738519260, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738519320, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738519380, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738520400, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738522800, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738522860, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738526400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738530000, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738530060, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738530600, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738540800, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738540860, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738542000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738543200, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738543260, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738546800, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738550400, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738550460, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738551000, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "All activities follow the expected daily routine with normal timing and sequence." + }, + { + "input": [ + { + "timestamp": 1738584000, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738584300, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738584600, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738584900, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738585200, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738585260, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738585800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738586100, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738586400, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738587600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738588800, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738589400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738589460, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738605600, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738605660, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738605720, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738605780, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738606800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738609200, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738609260, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738612800, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738616400, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738617000, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738627200, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738627260, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738628400, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738629600, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738629660, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738633200, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738636800, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738637400, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "anomalous", + "reason": "All three daily medications were missed (morning, afternoon, and evening), while other activities remained normal. This suggests a serious deviation in medication adherence." + }, + { + "input": [ + { + "timestamp": 1738670400, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738670460, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738670700, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738671000, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738671300, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738671600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738671660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738672200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738672500, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738672800, + "room": "livingroom", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738672860, + "room": "kitchen", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738672920, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738672980, + "room": "bathroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673040, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673100, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673160, + "room": "kitchen", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673220, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673280, + "room": "bathroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673340, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673400, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673460, + "room": "kitchen", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673520, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673580, + "room": "bathroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673640, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738673700, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738674000, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738677600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738681200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738684800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738688400, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + } + ], + "result": "anomalous", + "reason": "Unusual rapid transitions between rooms followed by extended bedroom stay suggests potential confusion or disorientation. Missing subsequent daily activities and medications adds to the concern." + }, + { + "input": [ + { + "timestamp": 1738756800, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738756860, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738757100, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738757400, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738757700, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738758000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738758060, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738758600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738758900, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738759200, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738760400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738761600, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738762200, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738762260, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738778400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738778460, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738778520, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738778580, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738779600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738782000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738782060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738785600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738789200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738789260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738789800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738800000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738800060, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738801200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738802400, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738802460, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738806000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738809600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738809660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738810200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "All activities follow the expected daily routine with normal timing and sequence." + }, + { + "input": [ + { + "timestamp": 1738843200, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738843260, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738843500, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738843800, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738844100, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738844400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738844460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738844520, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738844580, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738844640, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738844700, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738844760, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738844820, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738844880, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738844940, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738845000, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738845060, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738866400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738866460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738866520, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738866580, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738866640, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738866700, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738875600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738875660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738876200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738888400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738888460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738888520, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738888580, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738888640, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738888700, + "room": "kitchen", + "nodeId": 2, + "onOff": false + } + ], + "result": "anomalous", + "reason": "Unusually frequent kitchen visits and fridge openings throughout the day suggest potential memory issues or compulsive behavior. While other activities remain normal, the repeated kitchen pattern is concerning." + }, + { + "input": [ + { + "timestamp": 1738929600, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738929660, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738929900, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738930200, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738930260, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738930320, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738930380, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738930440, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738930500, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738930560, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738944000, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738944060, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738944120, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738944180, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738952400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738952460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738953000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738957600, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738957660, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738957720, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738957780, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738962000, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738962060, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738962600, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "anomalous", + "reason": "Unusually frequent bathroom visits throughout the day, with multiple entries and exits in short succession. While other activities remain normal, this pattern suggests potential health issues requiring attention." + } + ] \ No newline at end of file diff --git a/project/data/daily_routine_data.json b/project/data/daily_routine_data.json new file mode 100644 index 0000000..e6da5e5 --- /dev/null +++ b/project/data/daily_routine_data.json @@ -0,0 +1,163 @@ +//Day1 - normal +{"timestamp": 1737806400, "room": "pillbox", "nodeId": 1, "onOff": true} // 6:00 AM: Pillbox opened +{"timestamp": 1737806460, "room": "bedroom", "nodeId": 2, "onOff": true} // 6:01 AM: Bedroom light turned on +{"timestamp": 1737806700, "room": "bathroom", "nodeId": 2, "onOff": true} // 6:05 AM: Bathroom light turned on +{"timestamp": 1737807000, "room": "bathroom", "nodeId": 3, "humidity": 6500} // 6:10 AM: Humidity spike in bathroom (shower) +{"timestamp": 1737807300, "room": "bathroom", "nodeId": 2, "onOff": false} // 6:15 AM: Bathroom light turned off +{"timestamp": 1737807600, "room": "kitchen", "nodeId": 2, "onOff": true} // 6:20 AM: Kitchen light turned on +{"timestamp": 1737807660, "room": "kitchen", "event": "fridge_opened"} // 6:21 AM: Fridge opened (breakfast preparation) +{"timestamp": 1737808200, "room": "kitchen", "nodeId": 2, "onOff": false} // 6:30 AM: Kitchen light turned off +{"timestamp": 1737808500, "room": "livingroom", "nodeId": 2, "onOff": true} // 6:35 AM: Living room light turned on +{"timestamp": 1737808800, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 6:40 AM: Person 1 sits in the living room +{"timestamp": 1737810000, "room": "livingroom", "nodeId": 2, "onOff": false} // 7:00 AM: Living room light turned off +{"timestamp": 1737811200, "room": "hall", "nodeId": 1, "occupancy": 1} // 7:20 AM: Person 1 walks through the hall +{"timestamp": 1737811800, "room": "porch", "nodeId": 1, "occupancy": 1} // 7:30 AM: Person 1 leaves the house (porch occupancy) +{"timestamp": 1737811860, "room": "hall", "nodeId": 1, "occupancy": 0} // 7:31 AM: Hall occupancy ends +{"timestamp": 1737828000, "room": "porch", "nodeId": 1, "occupancy": 1} // 12:00 PM: Person 1 returns home (porch occupancy) +{"timestamp": 1737828060, "room": "hall", "nodeId": 1, "occupancy": 1} // 12:01 PM: Hall occupancy begins +{"timestamp": 1737828120, "room": "kitchen", "nodeId": 2, "onOff": true} // 12:02 PM: Kitchen light turned on +{"timestamp": 1737828180, "room": "kitchen", "event": "fridge_opened"} // 12:03 PM: Fridge opened (lunch preparation) +{"timestamp": 1737829200, "room": "kitchen", "nodeId": 2, "onOff": false} // 12:20 PM: Kitchen light turned off +{"timestamp": 1737831600, "room": "livingroom", "nodeId": 2, "onOff": true} // 1:00 PM: Living room light turned on +{"timestamp": 1737831660, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 1:01 PM: Person 1 sits in the living room +{"timestamp": 1737835200, "room": "livingroom", "nodeId": 2, "onOff": false} // 2:00 PM: Living room light turned off +{"timestamp": 1737838800, "room": "bedroom", "nodeId": 2, "onOff": true} // 3:00 PM: Bedroom light turned on +{"timestamp": 1737838860, "room": "pillbox", "nodeId": 1, "onOff": true} // 3:01 PM: Pillbox opened +{"timestamp": 1737839400, "room": "bedroom", "nodeId": 2, "onOff": false} // 3:10 PM: Bedroom light turned off +{"timestamp": 1737849600, "room": "kitchen", "nodeId": 2, "onOff": true} // 6:00 PM: Kitchen light turned on +{"timestamp": 1737849660, "room": "kitchen", "event": "fridge_opened"} // 6:01 PM: Fridge opened (dinner preparation) +{"timestamp": 1737850800, "room": "kitchen", "nodeId": 2, "onOff": false} // 6:20 PM: Kitchen light turned off +{"timestamp": 1737852000, "room": "livingroom", "nodeId": 2, "onOff": true} // 6:40 PM: Living room light turned on +{"timestamp": 1737852060, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 6:41 PM: Person 1 sits in the living room +{"timestamp": 1737855600, "room": "livingroom", "nodeId": 2, "onOff": false} // 7:40 PM: Living room light turned off +{"timestamp": 1737859200, "room": "bedroom", "nodeId": 2, "onOff": true} // 8:40 PM: Bedroom light turned on +{"timestamp": 1737859260, "room": "pillbox", "nodeId": 1, "onOff": true} // 8:41 PM: Pillbox opened +{"timestamp": 1737859800, "room": "bedroom", "nodeId": 2, "onOff": false} // 8:50 PM: Bedroom light turned off +//Day2 - normal +{"timestamp": 1737892800, "room": "pillbox", "nodeId": 1, "onOff": true} // 6:00 AM: Pillbox opened +{"timestamp": 1737892860, "room": "bedroom", "nodeId": 2, "onOff": true} // 6:01 AM: Bedroom light turned on +{"timestamp": 1737893100, "room": "bathroom", "nodeId": 2, "onOff": true} // 6:05 AM: Bathroom light turned on +{"timestamp": 1737893400, "room": "bathroom", "nodeId": 3, "humidity": 6500} // 6:10 AM: Humidity spike in bathroom (shower) +{"timestamp": 1737893700, "room": "bathroom", "nodeId": 2, "onOff": false} // 6:15 AM: Bathroom light turned off +{"timestamp": 1737894000, "room": "kitchen", "nodeId": 2, "onOff": true} // 6:20 AM: Kitchen light turned on +{"timestamp": 1737894060, "room": "kitchen", "event": "fridge_opened"} // 6:21 AM: Fridge opened (breakfast preparation) +{"timestamp": 1737894600, "room": "kitchen", "nodeId": 2, "onOff": false} // 6:30 AM: Kitchen light turned off +{"timestamp": 1737894900, "room": "livingroom", "nodeId": 2, "onOff": true} // 6:35 AM: Living room light turned on +{"timestamp": 1737895200, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 6:40 AM: Person 1 sits in the living room +{"timestamp": 1737896400, "room": "livingroom", "nodeId": 2, "onOff": false} // 7:00 AM: Living room light turned off +{"timestamp": 1737897600, "room": "hall", "nodeId": 1, "occupancy": 1} // 7:20 AM: Person 1 walks through the hall +{"timestamp": 1737898200, "room": "porch", "nodeId": 1, "occupancy": 1} // 7:30 AM: Person 1 leaves the house (porch occupancy) +{"timestamp": 1737898260, "room": "hall", "nodeId": 1, "occupancy": 0} // 7:31 AM: Hall occupancy ends +{"timestamp": 1737914400, "room": "porch", "nodeId": 1, "occupancy": 1} // 12:00 PM: Person 1 returns home (porch occupancy) +{"timestamp": 1737914460, "room": "hall", "nodeId": 1, "occupancy": 1} // 12:01 PM: Hall occupancy begins +{"timestamp": 1737914520, "room": "kitchen", "nodeId": 2, "onOff": true} // 12:02 PM: Kitchen light turned on +{"timestamp": 1737914580, "room": "kitchen", "event": "fridge_opened"} // 12:03 PM: Fridge opened (lunch preparation) +{"timestamp": 1737915600, "room": "kitchen", "nodeId": 2, "onOff": false} // 12:20 PM: Kitchen light turned off +{"timestamp": 1737918000, "room": "livingroom", "nodeId": 2, "onOff": true} // 1:00 PM: Living room light turned on +{"timestamp": 1737918060, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 1:01 PM: Person 1 sits in the living room +{"timestamp": 1737921600, "room": "livingroom", "nodeId": 2, "onOff": false} // 2:00 PM: Living room light turned off +{"timestamp": 1737925200, "room": "bedroom", "nodeId": 2, "onOff": true} // 3:00 PM: Bedroom light turned on +{"timestamp": 1737925260, "room": "pillbox", "nodeId": 1, "onOff": true} // 3:01 PM: Pillbox opened +{"timestamp": 1737925800, "room": "bedroom", "nodeId": 2, "onOff": false} // 3:10 PM: Bedroom light turned off +{"timestamp": 1737936000, "room": "kitchen", "nodeId": 2, "onOff": true} // 6:00 PM: Kitchen light turned on +{"timestamp": 1737936060, "room": "kitchen", "event": "fridge_opened"} // 6:01 PM: Fridge opened (dinner preparation) +{"timestamp": 1737937200, "room": "kitchen", "nodeId": 2, "onOff": false} // 6:20 PM: Kitchen light turned off +{"timestamp": 1737938400, "room": "livingroom", "nodeId": 2, "onOff": true} // 6:40 PM: Living room light turned on +{"timestamp": 1737938460, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 6:41 PM: Person 1 sits in the living room +{"timestamp": 1737942000, "room": "livingroom", "nodeId": 2, "onOff": false} // 7:40 PM: Living room light turned off +{"timestamp": 1737945600, "room": "bedroom", "nodeId": 2, "onOff": true} // 8:40 PM: Bedroom light turned on +{"timestamp": 1737945660, "room": "pillbox", "nodeId": 1, "onOff": true} // 8:41 PM: Pillbox opened +{"timestamp": 1737946200, "room": "bedroom", "nodeId": 2, "onOff": false} // 8:50 PM: Bedroom light turned off +//Day3 - normal +{"timestamp": 1737979200, "room": "pillbox", "nodeId": 1, "onOff": true} // 6:00 AM: Pillbox opened +{"timestamp": 1737979260, "room": "bedroom", "nodeId": 2, "onOff": true} // 6:01 AM: Bedroom light turned on +{"timestamp": 1737979500, "room": "bathroom", "nodeId": 2, "onOff": true} // 6:05 AM: Bathroom light turned on +{"timestamp": 1737979800, "room": "bathroom", "nodeId": 3, "humidity": 6500} // 6:10 AM: Humidity spike in bathroom (shower) +{"timestamp": 1737980100, "room": "bathroom", "nodeId": 2, "onOff": false} // 6:15 AM: Bathroom light turned off +{"timestamp": 1737980400, "room": "kitchen", "nodeId": 2, "onOff": true} // 6:20 AM: Kitchen light turned on +{"timestamp": 1737980460, "room": "kitchen", "event": "fridge_opened"} // 6:21 AM: Fridge opened (breakfast preparation) +{"timestamp": 1737981000, "room": "kitchen", "nodeId": 2, "onOff": false} // 6:30 AM: Kitchen light turned off +{"timestamp": 1737981300, "room": "livingroom", "nodeId": 2, "onOff": true} // 6:35 AM: Living room light turned on +{"timestamp": 1737981600, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 6:40 AM: Person 1 sits in the living room +{"timestamp": 1737982800, "room": "livingroom", "nodeId": 2, "onOff": false} // 7:00 AM: Living room light turned off +{"timestamp": 1737984000, "room": "hall", "nodeId": 1, "occupancy": 1} // 7:20 AM: Person 1 walks through the hall +{"timestamp": 1737984600, "room": "porch", "nodeId": 1, "occupancy": 1} // 7:30 AM: Person 1 leaves the house (porch occupancy) +{"timestamp": 1737984660, "room": "hall", "nodeId": 1, "occupancy": 0} // 7:31 AM: Hall occupancy ends +{"timestamp": 1738000800, "room": "porch", "nodeId": 1, "occupancy": 1} // 12:00 PM: Person 1 returns home (porch occupancy) +{"timestamp": 1738000860, "room": "hall", "nodeId": 1, "occupancy": 1} // 12:01 PM: Hall occupancy begins +{"timestamp": 1738000920, "room": "kitchen", "nodeId": 2, "onOff": true} // 12:02 PM: Kitchen light turned on +{"timestamp": 1738000980, "room": "kitchen", "event": "fridge_opened"} // 12:03 PM: Fridge opened (lunch preparation) +{"timestamp": 1738002000, "room": "kitchen", "nodeId": 2, "onOff": false} // 12:20 PM: Kitchen light turned off +{"timestamp": 1738004400, "room": "livingroom", "nodeId": 2, "onOff": true} // 1:00 PM: Living room light turned on +{"timestamp": 1738004460, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 1:01 PM: Person 1 sits in the living room +{"timestamp": 1738008000, "room": "livingroom", "nodeId": 2, "onOff": false} // 2:00 PM: Living room light turned off +{"timestamp": 1738011600, "room": "bedroom", "nodeId": 2, "onOff": true} // 3:00 PM: Bedroom light turned on +{"timestamp": 1738011660, "room": "pillbox", "nodeId": 1, "onOff": true} // 3:01 PM: Pillbox opened +{"timestamp": 1738012200, "room": "bedroom", "nodeId": 2, "onOff": false} // 3:10 PM: Bedroom light turned off +{"timestamp": 1738022400, "room": "kitchen", "nodeId": 2, "onOff": true} // 6:00 PM: Kitchen light turned on +{"timestamp": 1738022460, "room": "kitchen", "event": "fridge_opened"} // 6:01 PM: Fridge opened (dinner preparation) +{"timestamp": 1738023600, "room": "kitchen", "nodeId": 2, "onOff": false} // 6:20 PM: Kitchen light turned off +{"timestamp": 1738024800, "room": "livingroom", "nodeId": 2, "onOff": true} // 6:40 PM: Living room light turned on +{"timestamp": 1738024860, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 6:41 PM: Person 1 sits in the living room +{"timestamp": 1738028400, "room": "livingroom", "nodeId": 2, "onOff": false} // 7:40 PM: Living room light turned off +{"timestamp": 1738032000, "room": "bedroom", "nodeId": 2, "onOff": true} // 8:40 PM: Bedroom light turned on +{"timestamp": 1738032060, "room": "pillbox", "nodeId": 1, "onOff": true} // 8:41 PM: Pillbox opened +{"timestamp": 1738032600, "room": "bedroom", "nodeId": 2, "onOff": false} // 8:50 PM: Bedroom light turned off + +// Normal: Daily routine +{"timestamp": 1737806400, "room": "pillbox", "nodeId": 1, "onOff": true} // 6:00 AM: Pillbox opened +{"timestamp": 1737806460, "room": "bedroom", "nodeId": 2, "onOff": true} // 6:01 AM: Bedroom light turned on +{"timestamp": 1737806700, "room": "bathroom", "nodeId": 2, "onOff": true} // 6:05 AM: Bathroom light turned on +{"timestamp": 1737807000, "room": "bathroom", "nodeId": 3, "humidity": 6500} // 6:10 AM: Humidity spike in bathroom (shower) +{"timestamp": 1737807300, "room": "bathroom", "nodeId": 2, "onOff": false} // 6:15 AM: Bathroom light turned off +{"timestamp": 1737807600, "room": "kitchen", "nodeId": 2, "onOff": true} // 6:20 AM: Kitchen light turned on +{"timestamp": 1737807660, "room": "kitchen", "event": "fridge_opened"} // 6:21 AM: Fridge opened (breakfast preparation) +{"timestamp": 1737808200, "room": "kitchen", "nodeId": 2, "onOff": false} // 6:30 AM: Kitchen light turned off +{"timestamp": 1737808500, "room": "livingroom", "nodeId": 2, "onOff": true} // 6:35 AM: Living room light turned on +{"timestamp": 1737808800, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 6:40 AM: Person 1 sits in the living room +{"timestamp": 1737810000, "room": "livingroom", "nodeId": 2, "onOff": false} // 7:00 AM: Living room light turned off +{"timestamp": 1737811200, "room": "hall", "nodeId": 1, "occupancy": 1} // 7:20 AM: Person 1 walks through the hall +{"timestamp": 1737811800, "room": "porch", "nodeId": 1, "occupancy": 1} // 7:30 AM: Person 1 leaves the house (porch occupancy) +{"timestamp": 1737811860, "room": "hall", "nodeId": 1, "occupancy": 0} // 7:31 AM: Hall occupancy ends + +// Anomalous: No fridge activity for 6 hours +{"timestamp": 1737828000, "room": "porch", "nodeId": 1, "occupancy": 1} // 12:00 PM: Person 1 returns home (porch occupancy) +{"timestamp": 1737828060, "room": "hall", "nodeId": 1, "occupancy": 1} // 12:01 PM: Hall occupancy begins +{"timestamp": 1737828120, "room": "kitchen", "nodeId": 2, "onOff": true} // 12:02 PM: Kitchen light turned on +{"timestamp": 1737828180, "room": "kitchen", "event": "fridge_opened"} // 12:03 PM: Fridge opened (lunch preparation) +{"timestamp": 1737829200, "room": "kitchen", "nodeId": 2, "onOff": false} // 12:20 PM: Kitchen light turned off + +// Anomalous: Excessive pillbox usage +{"timestamp": 1737831600, "room": "pillbox", "nodeId": 1, "onOff": true} // 1:00 PM: Pillbox opened +{"timestamp": 1737831660, "room": "pillbox", "nodeId": 1, "onOff": false} // 1:01 PM: Pillbox closed +{"timestamp": 1737831720, "room": "pillbox", "nodeId": 1, "onOff": true} // 1:02 PM: Pillbox opened +{"timestamp": 1737831780, "room": "pillbox", "nodeId": 1, "onOff": false} // 1:03 PM: Pillbox closed +{"timestamp": 1737831840, "room": "pillbox", "nodeId": 1, "onOff": true} // 1:04 PM: Pillbox opened +{"timestamp": 1737831900, "room": "pillbox", "nodeId": 1, "onOff": false} // 1:05 PM: Pillbox closed + +// Anomalous: No movement in the living room during typical active hours +{"timestamp": 1737835200, "room": "livingroom", "nodeId": 2, "onOff": false} // 2:00 PM: Living room light remains off +{"timestamp": 1737838800, "room": "bedroom", "nodeId": 2, "onOff": true} // 3:00 PM: Bedroom light turned on +{"timestamp": 1737838860, "room": "pillbox", "nodeId": 1, "onOff": true} // 3:01 PM: Pillbox opened +{"timestamp": 1737839400, "room": "bedroom", "nodeId": 2, "onOff": false} // 3:10 PM: Bedroom light turned off + +// Anomalous: Unusual bathroom activity (occupied for 2 hours) +{"timestamp": 1737849600, "room": "bathroom", "nodeId": 2, "onOff": true} // 6:00 PM: Bathroom light turned on +{"timestamp": 1737849660, "room": "bathroom", "nodeId": 3, "humidity": 6500} // 6:01 PM: Humidity spike in bathroom +{"timestamp": 1737850800, "room": "bathroom", "nodeId": 2, "onOff": false} // 6:20 PM: Bathroom light turned off + +// Anomalous: Irregular transitions (person leaves and does not return for 12 hours) +{"timestamp": 1737852000, "room": "hall", "nodeId": 1, "occupancy": 1} // 6:40 PM: Person 1 walks through the hall +{"timestamp": 1737852600, "room": "porch", "nodeId": 1, "occupancy": 1} // 6:50 PM: Person 1 leaves the house (porch occupancy) +{"timestamp": 1737852660, "room": "hall", "nodeId": 1, "occupancy": 0} // 6:51 PM: Hall occupancy ends + +// Anomalous: No fridge activity for 12 hours +{"timestamp": 1737859200, "room": "kitchen", "nodeId": 2, "onOff": true} // 8:40 PM: Kitchen light turned on +{"timestamp": 1737859260, "room": "kitchen", "event": "fridge_opened"} // 8:41 PM: Fridge opened (dinner preparation) +{"timestamp": 1737859800, "room": "kitchen", "nodeId": 2, "onOff": false} // 8:50 PM: Kitchen light turned off +{"timestamp": 1737860400, "room": "livingroom", "nodeId": 2, "onOff": true} // 9:00 PM: Living room light turned on +{"timestamp": 1737860460, "room": "livingroom", "nodeId": 1, "occupancy": 1} // 9:01 PM: Person 1 sits in the living room +{"timestamp": 1737864000, "room": "livingroom", "nodeId": 2, "onOff": false} // 10:00 PM: Living room light turned off +{"timestamp": 1737867600, "room": "bedroom", "nodeId": 2, "onOff": true} // 11:00 PM: Bedroom light turned on +{"timestamp": 1737867660, "room": "pillbox", "nodeId": 1, "onOff": true} // 11:01 PM: Pillbox opened +{"timestamp": 1737868200, "room": "bedroom", "nodeId": 2, "onOff": false} // 11:10 PM: Bedroom light turned off \ No newline at end of file diff --git a/project/data/raw_data.json b/project/data/raw_data.json new file mode 100644 index 0000000..5a0dcbf --- /dev/null +++ b/project/data/raw_data.json @@ -0,0 +1,96 @@ +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1888}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1888}}}]} +{"fabric_id": 2, "compressed_fabric_id": 6710718474042576632, "schema_version": 11, "min_supported_schema_version": 9, "sdk_version": "2024.9.0", "wifi_credentials_set": false, "thread_credentials_set": false, "bluetooth_enabled": false} +{"message_id": "5469723", "result": []} +{"event": "node_added", "data": {"node_id": 1, "date_commissioned": "2024-12-07T16:12:50.085498", "last_interview": "2024-12-07T16:12:50.085515", "interview_version": 6, "available": false, "is_bridge": true, "attributes": {"0": {"Descriptor": {"DeviceTypeList": [{"0": 22, "1": 1}], "ServerList": [29, 31, 40, 48, 49, 51, 55, 60, 62, 63, 65], "ClientList": [31], "PartsList": [1, 3, 4, 5, 6], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}, "AccessControl": {"Acl": [{"254": 1}, {"1": 5, "2": 2, "3": [112233], "4": null, "254": 10}], "Extension": [], "SubjectsPerAccessControlEntry": 4, "TargetsPerAccessControlEntry": 3, "AccessControlEntriesPerFabric": 4, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [0], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533]}, "BasicInformation": {"DataModelRevision": 1, "VendorName": "SONOFF\n", "VendorID": 4897, "ProductName": "Smart Home Hub\n", "ProductID": 36, "NodeLabel": "", "Location": "XX", "HardwareVersion": 0, "HardwareVersionString": "TEST_VERSION", "SoftwareVersion": 1, "SoftwareVersionString": "1.0", "ManufacturingDate": "20200101", "PartNumber": "", "ProductURL": "", "ProductLabel": "", "LocalConfigDisabled": false, "UniqueID": "C000641B34E4798F", "CapabilityMinima": {"0": 3, "1": 65535}, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [0, 1, 2], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 18, 19, 65528, 65529, 65530, 65531, 65532, 65533]}, "GeneralCommissioning": {"Breadcrumb": 0, "BasicCommissioningInfo": {"0": 60, "1": 900}, "RegulatoryConfig": 2, "LocationCapability": 2, "SupportsConcurrentConnection": true, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5], "AcceptedCommandList": [0, 2, 4], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533]}, "NetworkCommissioning": {"MaxNetworks": 1, "Networks": [{"0": "ZXRoMA==", "1": true}], "ScanMaxTimeSeconds": 0, "ConnectMaxTimeSeconds": 0, "InterfaceEnabled": true, "LastNetworkingStatus": null, "LastNetworkID": null, "LastConnectErrorValue": null, "FeatureMap": 4, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533]}, "GeneralDiagnostics": {"NetworkInterfaces": [{"0": "p2p0", "1": false, "2": null, "3": null, "4": "0icDxcnj", "5": [], "6": [], "7": 1}, {"0": "wlan0", "1": true, "2": null, "3": null, "4": "0CcDxcnj", "5": ["wKgAJw=="], "6": ["KgZZAIBtHAB5WwuUsUVjyw==", "/SWs6wutAACpdONJpLEZtQ==", "/oAAAAAAAABIed7zJQrcsQ=="], "7": 1}, {"0": "sit0", "1": false, "2": null, "3": null, "4": "AAAAAAAA", "5": [], "6": [], "7": 0}, {"0": "eth0", "1": false, "2": null, "3": null, "4": "0CcDxcni", "5": [], "6": [], "7": 2}, {"0": "lo", "1": true, "2": null, "3": null, "4": "AAAAAAAA", "5": ["fwAAAQ=="], "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], "7": 0}], "RebootCount": 7, "UpTime": 3220862, "TotalOperationalHours": 894, "BootReason": 0, "ActiveHardwareFaults": [], "ActiveRadioFaults": [], "ActiveNetworkFaults": [], "TestEventTriggersEnabled": false, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 0], "EventList": [3], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533]}, "EthernetNetworkDiagnostics": {"PHYRate": 0, "FullDuplex": false, "PacketRxCount": 0, "PacketTxCount": 0, "TxErrCount": 0, "CollisionCount": 0, "OverrunCount": 0, "CarrierDetect": null, "TimeSinceReset": 3220863, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533]}, "AdministratorCommissioning": {"WindowStatus": 0, "AdminFabricIndex": null, "AdminVendorId": null, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "EventList": [], "AttributeList": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533]}, "OperationalCredentials": {"NOCs": [{"254": 1}, {"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRARgkBwEkCAEwCUEEE+PSMypUYP8rT/WHSOMhhGw4aZL2z6HvdvEZJNEvFkmDGPt7zfz2CHXWXdc3G3cfDC0Y/CwZMK8g2wWUVWw4lzcKNQEoARgkAgE2AwQCBAEYMAQUsO6DA3InCYmNpEigLz8pzd8e/ZgwBRRom5SXSQjNanFKz0NahjjIpGLZOhgwC0AaDm3nKCD552V2kLTY8F21MD+cZntwnwd8fhBWBrOzsl/Bj17k/9yWdl0xRqfKnOMnB/2UhoQlAjL2ta8MeLZCGA==", "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEMxi8MntoCWqI6x5VcKuHfh+D35EnvsCsl3i+mDmwzpmX+HY4ill38eAIqoGctJoivkaMamSO/Be1Pt/9Jx/cdDcKNQEpARgkAmAwBBRom5SXSQjNanFKz0NahjjIpGLZOjAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQECYOxHQV5GiATWNoxshUxg+Q3fZc829u6b4/ja9kOZRb1IPnuMc5bBO1t6FakyU7ChVw4CY8X2kzKtzovUL2AsY", "254": 10}], "Fabrics": [{"1": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", "2": 24582, "3": 36164968694715784, "4": 719458172, "5": "", "254": 1}, {"1": "BDBkphswNPti4De6nQNmGkYH2+oK+uKpUpqwaULfU2KdxOYJYOL2jDOOBK8i1NoaFDdA8qbm8hd7AcAjwoxLxxM=", "2": 4939, "3": 2, "4": 1, "5": "", "254": 10}], "SupportedFabrics": 15, "CommissionedFabrics": 2, "TrustedRootCertificates": ["FTABAQEkAgE3AyyEAlVTLAcGR29vZ2xlLAELTWF0dGVyIFJvb3QnFAEAAAD+////GCYEf9JDKSYFf5Rb5TcGLIQCVVMsBwZHb29nbGUsAQtNYXR0ZXIgUm9vdCcUAQAAAP7///8YJAcBJAgBMAlBBFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U3CjUBKQEkAgEYJAJgMAQUcsIB91cZE7NIygDKe0X0d0ZoyX4wBRRywgH3VxkTs0jKAMp7RfR3RmjJfhgwC0BlFksWat/xjBVhCozpG9cD6cH2d7cRzhM1BRUt8NoVERZ1rFWRzueGhRzdnv2tKWZ0vryyo6Mgm83nswnbVSxvGA==", "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEMGSmGzA0+2LgN7qdA2YaRgfb6gr64qlSmrBpQt9TYp3E5glg4vaMM44EryLU2hoUN0DypubyF3sBwCPCjEvHEzcKNQEpARgkAmAwBBRuEjr+hY7AsShpfNZ95WQg3OIE9DAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQG5+SU7Xb89dIqDz7VdAFwwEH3mIFOuZtHAKqBTBWlXBQbxnO4SUBLLKjZyGP7tFU7v6kqQWVeEtN4gbZ9i5oi4Y"], "CurrentFabricIndex": 10, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5, 8], "AcceptedCommandList": [0, 2, 4, 6, 7, 9, 10, 11], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533]}, "GroupKeyManagement": {"GroupKeyMap": [], "GroupTable": [], "MaxGroupsPerFabric": 4, "MaxGroupKeysPerFabric": 3, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [2, 5], "AcceptedCommandList": [0, 1, 3, 4], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}, "UserLabel": {"LabelList": [], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 65528, 65529, 65530, 65531, 65532, 65533]}}, "1": {"Descriptor": {"DeviceTypeList": [{"0": 14, "1": 1}], "ServerList": [29], "ClientList": [], "PartsList": [3, 4, 5, 6], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}}, "3": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "OccupancySensing": {"ClusterRevision": 3, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "Occupancy": 1, "OccupancySensorType": 0, "OccupancySensorTypeBitmap": 1}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 19, "1": 1}, {"0": 263, "1": 2}], "ServerList": [3, 4, 5, 1030, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "SONOFFMotion Sensor", "VendorName": "SONOFF", "ProductName": "SNZB-06P", "SerialNumber": "0ceff6fffe1a88c6", "SoftwareVersionString": "1.0.6", "Reachable": true}}, "4": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 19, "1": 1}], "ServerList": [3, 4, 5, 29, 57], "ClientList": [], "PartsList": [5, 6]}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "eWeLinkTemperature/Humidity Sen", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}, "5": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "TemperatureMeasurement": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "MeasuredValue": 1670, "MinMeasuredValue": -27315, "MaxMeasuredValue": 32767}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 770, "1": 2}], "ServerList": [3, 4, 5, 1026, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "Temperature Sensor", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}, "6": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "RelativeHumidityMeasurement": {"ClusterRevision": 3, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "MeasuredValue": 5400, "MinMeasuredValue": 0, "MaxMeasuredValue": 10000}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 775, "1": 2}], "ServerList": [3, 4, 5, 1029, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "Humidity Sensor", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}}, "attribute_subscriptions": []}} +{"event": "node_updated", "data": {"node_id": 1, "date_commissioned": "2024-12-07T16:12:50.085498", "last_interview": "2024-12-07T16:12:50.085515", "interview_version": 6, "available": true, "is_bridge": true, "attributes": {"0": {"Descriptor": {"DeviceTypeList": [{"0": 22, "1": 1}], "ServerList": [29, 31, 40, 48, 49, 51, 55, 60, 62, 63, 65], "ClientList": [31], "PartsList": [1, 3, 4, 5, 6], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}, "AccessControl": {"Acl": [{"1": 5, "2": 2, "3": [112233], "4": null, "254": 10}], "Extension": [], "SubjectsPerAccessControlEntry": 4, "TargetsPerAccessControlEntry": 3, "AccessControlEntriesPerFabric": 4, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [0], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533]}, "BasicInformation": {"DataModelRevision": 1, "VendorName": "SONOFF\n", "VendorID": 4897, "ProductName": "Smart Home Hub\n", "ProductID": 36, "NodeLabel": "", "Location": "XX", "HardwareVersion": 0, "HardwareVersionString": "TEST_VERSION", "SoftwareVersion": 1, "SoftwareVersionString": "1.0", "ManufacturingDate": "20200101", "PartNumber": "", "ProductURL": "", "ProductLabel": "", "LocalConfigDisabled": false, "UniqueID": "C000641B34E4798F", "CapabilityMinima": {"0": 3, "1": 65535}, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [0, 1, 2], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 18, 19, 65528, 65529, 65530, 65531, 65532, 65533]}, "GeneralCommissioning": {"Breadcrumb": 0, "BasicCommissioningInfo": {"0": 60, "1": 900}, "RegulatoryConfig": 2, "LocationCapability": 2, "SupportsConcurrentConnection": true, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5], "AcceptedCommandList": [0, 2, 4], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533]}, "NetworkCommissioning": {"MaxNetworks": 1, "Networks": [{"0": "ZXRoMA==", "1": true}], "ScanMaxTimeSeconds": 0, "ConnectMaxTimeSeconds": 0, "InterfaceEnabled": true, "LastNetworkingStatus": null, "LastNetworkID": null, "LastConnectErrorValue": null, "FeatureMap": 4, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533]}, "GeneralDiagnostics": {"NetworkInterfaces": [{"0": "p2p0", "1": false, "2": null, "3": null, "4": "0icDxcnj", "5": [], "6": [], "7": 1}, {"0": "wlan0", "1": true, "2": null, "3": null, "4": "0CcDxcnj", "5": ["wKgAJw=="], "6": ["KgZZAIBtHAB5WwuUsUVjyw==", "/SWs6wutAACpdONJpLEZtQ==", "/oAAAAAAAABIed7zJQrcsQ=="], "7": 1}, {"0": "sit0", "1": false, "2": null, "3": null, "4": "AAAAAAAA", "5": [], "6": [], "7": 0}, {"0": "eth0", "1": false, "2": null, "3": null, "4": "0CcDxcni", "5": [], "6": [], "7": 2}, {"0": "lo", "1": true, "2": null, "3": null, "4": "AAAAAAAA", "5": ["fwAAAQ=="], "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], "7": 0}], "RebootCount": 7, "UpTime": 3220864, "TotalOperationalHours": 894, "BootReason": 0, "ActiveHardwareFaults": [], "ActiveRadioFaults": [], "ActiveNetworkFaults": [], "TestEventTriggersEnabled": false, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 0], "EventList": [3], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533]}, "EthernetNetworkDiagnostics": {"PHYRate": 0, "FullDuplex": false, "PacketRxCount": 0, "PacketTxCount": 0, "TxErrCount": 0, "CollisionCount": 0, "OverrunCount": 0, "CarrierDetect": null, "TimeSinceReset": 3220864, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533]}, "AdministratorCommissioning": {"WindowStatus": 0, "AdminFabricIndex": null, "AdminVendorId": null, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "EventList": [], "AttributeList": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533]}, "OperationalCredentials": {"NOCs": [{"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRARgkBwEkCAEwCUEEE+PSMypUYP8rT/WHSOMhhGw4aZL2z6HvdvEZJNEvFkmDGPt7zfz2CHXWXdc3G3cfDC0Y/CwZMK8g2wWUVWw4lzcKNQEoARgkAgE2AwQCBAEYMAQUsO6DA3InCYmNpEigLz8pzd8e/ZgwBRRom5SXSQjNanFKz0NahjjIpGLZOhgwC0AaDm3nKCD552V2kLTY8F21MD+cZntwnwd8fhBWBrOzsl/Bj17k/9yWdl0xRqfKnOMnB/2UhoQlAjL2ta8MeLZCGA==", "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEMxi8MntoCWqI6x5VcKuHfh+D35EnvsCsl3i+mDmwzpmX+HY4ill38eAIqoGctJoivkaMamSO/Be1Pt/9Jx/cdDcKNQEpARgkAmAwBBRom5SXSQjNanFKz0NahjjIpGLZOjAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQECYOxHQV5GiATWNoxshUxg+Q3fZc829u6b4/ja9kOZRb1IPnuMc5bBO1t6FakyU7ChVw4CY8X2kzKtzovUL2AsY", "254": 10}], "Fabrics": [{"1": "BDBkphswNPti4De6nQNmGkYH2+oK+uKpUpqwaULfU2KdxOYJYOL2jDOOBK8i1NoaFDdA8qbm8hd7AcAjwoxLxxM=", "2": 4939, "3": 2, "4": 1, "5": "", "254": 10}], "SupportedFabrics": 15, "CommissionedFabrics": 2, "TrustedRootCertificates": ["FTABAQEkAgE3AyyEAlVTLAcGR29vZ2xlLAELTWF0dGVyIFJvb3QnFAEAAAD+////GCYEf9JDKSYFf5Rb5TcGLIQCVVMsBwZHb29nbGUsAQtNYXR0ZXIgUm9vdCcUAQAAAP7///8YJAcBJAgBMAlBBFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U3CjUBKQEkAgEYJAJgMAQUcsIB91cZE7NIygDKe0X0d0ZoyX4wBRRywgH3VxkTs0jKAMp7RfR3RmjJfhgwC0BlFksWat/xjBVhCozpG9cD6cH2d7cRzhM1BRUt8NoVERZ1rFWRzueGhRzdnv2tKWZ0vryyo6Mgm83nswnbVSxvGA==", "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEMGSmGzA0+2LgN7qdA2YaRgfb6gr64qlSmrBpQt9TYp3E5glg4vaMM44EryLU2hoUN0DypubyF3sBwCPCjEvHEzcKNQEpARgkAmAwBBRuEjr+hY7AsShpfNZ95WQg3OIE9DAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQG5+SU7Xb89dIqDz7VdAFwwEH3mIFOuZtHAKqBTBWlXBQbxnO4SUBLLKjZyGP7tFU7v6kqQWVeEtN4gbZ9i5oi4Y"], "CurrentFabricIndex": 10, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5, 8], "AcceptedCommandList": [0, 2, 4, 6, 7, 9, 10, 11], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533]}, "GroupKeyManagement": {"GroupKeyMap": [], "GroupTable": [], "MaxGroupsPerFabric": 4, "MaxGroupKeysPerFabric": 3, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [2, 5], "AcceptedCommandList": [0, 1, 3, 4], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}, "UserLabel": {"LabelList": [], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 65528, 65529, 65530, 65531, 65532, 65533]}}, "1": {"Descriptor": {"DeviceTypeList": [{"0": 14, "1": 1}], "ServerList": [29], "ClientList": [], "PartsList": [3, 4, 5, 6], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}}, "3": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "OccupancySensing": {"ClusterRevision": 3, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "Occupancy": 1, "OccupancySensorType": 0, "OccupancySensorTypeBitmap": 1}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 19, "1": 1}, {"0": 263, "1": 2}], "ServerList": [3, 4, 5, 1030, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "SONOFFMotion Sensor", "VendorName": "SONOFF", "ProductName": "SNZB-06P", "SerialNumber": "0ceff6fffe1a88c6", "SoftwareVersionString": "1.0.6", "Reachable": true}}, "4": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 19, "1": 1}], "ServerList": [3, 4, 5, 29, 57], "ClientList": [], "PartsList": [5, 6]}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "eWeLinkTemperature/Humidity Sen", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}, "5": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "TemperatureMeasurement": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "MeasuredValue": 1670, "MinMeasuredValue": -27315, "MaxMeasuredValue": 32767}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 770, "1": 2}], "ServerList": [3, 4, 5, 1026, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "Temperature Sensor", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}, "6": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "RelativeHumidityMeasurement": {"ClusterRevision": 3, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "MeasuredValue": 5400, "MinMeasuredValue": 0, "MaxMeasuredValue": 10000}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 775, "1": 2}], "ServerList": [3, 4, 5, 1029, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "Humidity Sensor", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}}, "attribute_subscriptions": []}} +{"event": "node_added", "data": {"node_id": 2, "date_commissioned": "2024-12-07T16:14:45.024475", "last_interview": "2024-12-07T16:14:45.024488", "interview_version": 6, "available": false, "is_bridge": false, "attributes": {"0": {"Groups": {"NameSupport": 128, "FeatureMap": 1, "ClusterRevision": 4, "GeneratedCommandList": [0, 1, 2, 3], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "Descriptor": {"DeviceTypeList": [{"0": 22, "1": 1}], "ServerList": [4, 29, 31, 40, 48, 49, 51, 54, 60, 62, 63, 64, 65], "ClientList": [], "PartsList": [1], "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}, "AccessControl": {"Acl": [{"254": 1}, {"1": 5, "2": 2, "3": [112233], "4": null, "254": 6}], "Extension": [], "SubjectsPerAccessControlEntry": 4, "TargetsPerAccessControlEntry": 3, "AccessControlEntriesPerFabric": 4, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533]}, "BasicInformation": {"DataModelRevision": 17, "VendorName": "Meross", "VendorID": 4933, "ProductName": "Smart Plug Mini", "ProductID": 40963, "NodeLabel": "", "Location": "XX", "HardwareVersion": 0, "HardwareVersionString": "9.0", "SoftwareVersion": 1, "SoftwareVersionString": "9.3.26", "ManufacturingDate": "20200101", "PartNumber": "", "ProductURL": "", "ProductLabel": "", "SerialNumber": "510801230701265", "LocalConfigDisabled": false, "Reachable": true, "UniqueID": "07C97DCD2FB0F1D7", "CapabilityMinima": {"0": 3, "1": 3}, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 65528, 65529, 65531, 65532, 65533]}, "GeneralCommissioning": {"Breadcrumb": 0, "BasicCommissioningInfo": {"0": 60, "1": 900}, "RegulatoryConfig": 2, "LocationCapability": 2, "SupportsConcurrentConnection": true, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5], "AcceptedCommandList": [0, 2, 4], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533]}, "NetworkCommissioning": {"MaxNetworks": 1, "Networks": [{"0": "Vk1FNUVEMzkx", "1": true}], "ScanMaxTimeSeconds": 10, "ConnectMaxTimeSeconds": 20, "InterfaceEnabled": true, "LastNetworkingStatus": 0, "LastNetworkID": "Vk1FNUVEMzkx", "LastConnectErrorValue": null, "FeatureMap": 1, "ClusterRevision": 1, "GeneratedCommandList": [1, 5, 7], "AcceptedCommandList": [0, 2, 4, 6, 8], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533]}, "GeneralDiagnostics": {"NetworkInterfaces": [{"0": "r0", "1": true, "2": null, "3": null, "4": "SOHpzoP6", "5": ["wKgAJQ=="], "6": ["/oAAAAAAAABK4en//s6D+g=="], "7": 1}], "RebootCount": 10, "UpTime": 3451499, "TotalOperationalHours": 958, "BootReason": 1, "ActiveHardwareFaults": [], "ActiveRadioFaults": [], "ActiveNetworkFaults": [], "TestEventTriggersEnabled": false, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533]}, "WiFiNetworkDiagnostics": {"Bssid": "AAAAAAAA", "SecurityType": 0, "WiFiVersion": 3, "ChannelNumber": 1, "Rssi": -57, "BeaconLostCount": 0, "BeaconRxCount": null, "PacketMulticastRxCount": 0, "PacketMulticastTxCount": 0, "PacketUnicastRxCount": 0, "PacketUnicastTxCount": 0, "CurrentMaxRate": 0, "OverrunCount": 0, "FeatureMap": 3, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 65528, 65529, 65531, 65532, 65533]}, "AdministratorCommissioning": {"WindowStatus": 0, "AdminFabricIndex": null, "AdminVendorId": null, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "AttributeList": [0, 1, 2, 65528, 65529, 65531, 65532, 65533]}, "OperationalCredentials": {"NOCs": [{"254": 1}, {"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRAhgkBwEkCAEwCUEEki5SRwR9ldsdDOYz+cXHqBk4+9UtGw3HZs3QwLky9ZwptQ3P3TYPMyk6eRzxitxiO+cuSsjqEuz8xLW88F54PTcKNQEoARgkAgE2AwQCBAEYMAQUW/omsScZAz5Y7CBtD3wzo/KLjBUwBRRom5SXSQjNanFKz0NahjjIpGLZOhgwC0AYyqHAxJG8Yh+ZEhVagGjfjydheYq1P6EgLcmPfjrPAorq7tO3JWSk0N8CEfsTS9qqP6Bg8VheSHK2lvg5ZZoJGA==", "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEMxi8MntoCWqI6x5VcKuHfh+D35EnvsCsl3i+mDmwzpmX+HY4ill38eAIqoGctJoivkaMamSO/Be1Pt/9Jx/cdDcKNQEpARgkAmAwBBRom5SXSQjNanFKz0NahjjIpGLZOjAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQECYOxHQV5GiATWNoxshUxg+Q3fZc829u6b4/ja9kOZRb1IPnuMc5bBO1t6FakyU7ChVw4CY8X2kzKtzovUL2AsY", "254": 6}], "Fabrics": [{"1": "BFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U=", "2": 24582, "3": 36164968694715784, "4": 192150520, "5": "", "254": 1}, {"1": "BDBkphswNPti4De6nQNmGkYH2+oK+uKpUpqwaULfU2KdxOYJYOL2jDOOBK8i1NoaFDdA8qbm8hd7AcAjwoxLxxM=", "2": 4939, "3": 2, "4": 2, "5": "", "254": 6}], "SupportedFabrics": 5, "CommissionedFabrics": 2, "TrustedRootCertificates": ["FTABAQEkAgE3AyyEAlVTLAcGR29vZ2xlLAELTWF0dGVyIFJvb3QnFAEAAAD+////GCYEf9JDKSYFf5Rb5TcGLIQCVVMsBwZHb29nbGUsAQtNYXR0ZXIgUm9vdCcUAQAAAP7///8YJAcBJAgBMAlBBFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U3CjUBKQEkAgEYJAJgMAQUcsIB91cZE7NIygDKe0X0d0ZoyX4wBRRywgH3VxkTs0jKAMp7RfR3RmjJfhgwC0BlFksWat/xjBVhCozpG9cD6cH2d7cRzhM1BRUt8NoVERZ1rFWRzueGhRzdnv2tKWZ0vryyo6Mgm83nswnbVSxvGA==", "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEMGSmGzA0+2LgN7qdA2YaRgfb6gr64qlSmrBpQt9TYp3E5glg4vaMM44EryLU2hoUN0DypubyF3sBwCPCjEvHEzcKNQEpARgkAmAwBBRuEjr+hY7AsShpfNZ95WQg3OIE9DAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQG5+SU7Xb89dIqDz7VdAFwwEH3mIFOuZtHAKqBTBWlXBQbxnO4SUBLLKjZyGP7tFU7v6kqQWVeEtN4gbZ9i5oi4Y"], "CurrentFabricIndex": 6, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5, 8], "AcceptedCommandList": [0, 2, 4, 6, 7, 9, 10, 11], "AttributeList": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533]}, "GroupKeyManagement": {"GroupKeyMap": [], "GroupTable": [], "MaxGroupsPerFabric": 4, "MaxGroupKeysPerFabric": 3, "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [2, 5], "AcceptedCommandList": [0, 1, 3, 4], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}, "FixedLabel": {"LabelList": [{"0": "room", "1": "bedroom 2"}, {"0": "orientation", "1": "North"}, {"0": "floor", "1": "2"}, {"0": "direction", "1": "up"}], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "UserLabel": {"LabelList": [], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}}, "1": {"Identify": {"IdentifyTime": 0, "IdentifyType": 2, "FeatureMap": 0, "ClusterRevision": 4, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 65528, 65529, 65531, 65532, 65533]}, "Groups": {"NameSupport": 128, "FeatureMap": 1, "ClusterRevision": 4, "GeneratedCommandList": [0, 1, 2, 3], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "5": {"0": 0, "1": 0, "2": 0, "3": false, "4": 0, "6": 16, "65532": 6, "65533": 4, "65528": [0, 1, 2, 3, 4, 6], "65529": [0, 1, 2, 3, 4, 5, 6], "65531": [0, 1, 2, 3, 4, 6, 65528, 65529, 65531, 65532, 65533]}, "OnOff": {"OnOff": true, "FeatureMap": 0, "ClusterRevision": 4, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "Descriptor": {"DeviceTypeList": [{"0": 266, "1": 1}], "ServerList": [3, 4, 5, 6, 29], "ClientList": [], "PartsList": [], "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}}}, "attribute_subscriptions": []}} +{"event": "node_updated", "data": {"node_id": 2, "date_commissioned": "2024-12-07T16:14:45.024475", "last_interview": "2024-12-07T16:14:45.024488", "interview_version": 6, "available": true, "is_bridge": false, "attributes": {"0": {"Groups": {"NameSupport": 128, "FeatureMap": 1, "ClusterRevision": 4, "GeneratedCommandList": [0, 1, 2, 3], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "Descriptor": {"DeviceTypeList": [{"0": 22, "1": 1}], "ServerList": [4, 29, 31, 40, 48, 49, 51, 54, 60, 62, 63, 64, 65], "ClientList": [], "PartsList": [1], "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}, "AccessControl": {"Acl": [{"1": 5, "2": 2, "3": [112233], "4": null, "254": 6}], "Extension": [], "SubjectsPerAccessControlEntry": 4, "TargetsPerAccessControlEntry": 3, "AccessControlEntriesPerFabric": 4, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533]}, "BasicInformation": {"DataModelRevision": 17, "VendorName": "Meross", "VendorID": 4933, "ProductName": "Smart Plug Mini", "ProductID": 40963, "NodeLabel": "", "Location": "XX", "HardwareVersion": 0, "HardwareVersionString": "9.0", "SoftwareVersion": 1, "SoftwareVersionString": "9.3.26", "ManufacturingDate": "20200101", "PartNumber": "", "ProductURL": "", "ProductLabel": "", "SerialNumber": "510801230701265", "LocalConfigDisabled": false, "Reachable": true, "UniqueID": "07C97DCD2FB0F1D7", "CapabilityMinima": {"0": 3, "1": 3}, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 65528, 65529, 65531, 65532, 65533]}, "GeneralCommissioning": {"Breadcrumb": 0, "BasicCommissioningInfo": {"0": 60, "1": 900}, "RegulatoryConfig": 2, "LocationCapability": 2, "SupportsConcurrentConnection": true, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5], "AcceptedCommandList": [0, 2, 4], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533]}, "NetworkCommissioning": {"MaxNetworks": 1, "Networks": [{"0": "Vk1FNUVEMzkx", "1": true}], "ScanMaxTimeSeconds": 10, "ConnectMaxTimeSeconds": 20, "InterfaceEnabled": true, "LastNetworkingStatus": 0, "LastNetworkID": "Vk1FNUVEMzkx", "LastConnectErrorValue": null, "FeatureMap": 1, "ClusterRevision": 1, "GeneratedCommandList": [1, 5, 7], "AcceptedCommandList": [0, 2, 4, 6, 8], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533]}, "GeneralDiagnostics": {"NetworkInterfaces": [{"0": "r0", "1": true, "2": null, "3": null, "4": "SOHpzoP6", "5": ["wKgAJQ=="], "6": ["/oAAAAAAAABK4en//s6D+g=="], "7": 1}], "RebootCount": 10, "UpTime": 3451501, "TotalOperationalHours": 958, "BootReason": 1, "ActiveHardwareFaults": [], "ActiveRadioFaults": [], "ActiveNetworkFaults": [], "TestEventTriggersEnabled": false, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533]}, "WiFiNetworkDiagnostics": {"Bssid": "AAAAAAAA", "SecurityType": 0, "WiFiVersion": 3, "ChannelNumber": 1, "Rssi": -57, "BeaconLostCount": 0, "BeaconRxCount": null, "PacketMulticastRxCount": 0, "PacketMulticastTxCount": 0, "PacketUnicastRxCount": 0, "PacketUnicastTxCount": 0, "CurrentMaxRate": 0, "OverrunCount": 0, "FeatureMap": 3, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 65528, 65529, 65531, 65532, 65533]}, "AdministratorCommissioning": {"WindowStatus": 0, "AdminFabricIndex": null, "AdminVendorId": null, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "AttributeList": [0, 1, 2, 65528, 65529, 65531, 65532, 65533]}, "OperationalCredentials": {"NOCs": [{"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRAhgkBwEkCAEwCUEEki5SRwR9ldsdDOYz+cXHqBk4+9UtGw3HZs3QwLky9ZwptQ3P3TYPMyk6eRzxitxiO+cuSsjqEuz8xLW88F54PTcKNQEoARgkAgE2AwQCBAEYMAQUW/omsScZAz5Y7CBtD3wzo/KLjBUwBRRom5SXSQjNanFKz0NahjjIpGLZOhgwC0AYyqHAxJG8Yh+ZEhVagGjfjydheYq1P6EgLcmPfjrPAorq7tO3JWSk0N8CEfsTS9qqP6Bg8VheSHK2lvg5ZZoJGA==", "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEMxi8MntoCWqI6x5VcKuHfh+D35EnvsCsl3i+mDmwzpmX+HY4ill38eAIqoGctJoivkaMamSO/Be1Pt/9Jx/cdDcKNQEpARgkAmAwBBRom5SXSQjNanFKz0NahjjIpGLZOjAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQECYOxHQV5GiATWNoxshUxg+Q3fZc829u6b4/ja9kOZRb1IPnuMc5bBO1t6FakyU7ChVw4CY8X2kzKtzovUL2AsY", "254": 6}], "Fabrics": [{"1": "BDBkphswNPti4De6nQNmGkYH2+oK+uKpUpqwaULfU2KdxOYJYOL2jDOOBK8i1NoaFDdA8qbm8hd7AcAjwoxLxxM=", "2": 4939, "3": 2, "4": 2, "5": "", "254": 6}], "SupportedFabrics": 5, "CommissionedFabrics": 2, "TrustedRootCertificates": ["FTABAQEkAgE3AyyEAlVTLAcGR29vZ2xlLAELTWF0dGVyIFJvb3QnFAEAAAD+////GCYEf9JDKSYFf5Rb5TcGLIQCVVMsBwZHb29nbGUsAQtNYXR0ZXIgUm9vdCcUAQAAAP7///8YJAcBJAgBMAlBBFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U3CjUBKQEkAgEYJAJgMAQUcsIB91cZE7NIygDKe0X0d0ZoyX4wBRRywgH3VxkTs0jKAMp7RfR3RmjJfhgwC0BlFksWat/xjBVhCozpG9cD6cH2d7cRzhM1BRUt8NoVERZ1rFWRzueGhRzdnv2tKWZ0vryyo6Mgm83nswnbVSxvGA==", "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEMGSmGzA0+2LgN7qdA2YaRgfb6gr64qlSmrBpQt9TYp3E5glg4vaMM44EryLU2hoUN0DypubyF3sBwCPCjEvHEzcKNQEpARgkAmAwBBRuEjr+hY7AsShpfNZ95WQg3OIE9DAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQG5+SU7Xb89dIqDz7VdAFwwEH3mIFOuZtHAKqBTBWlXBQbxnO4SUBLLKjZyGP7tFU7v6kqQWVeEtN4gbZ9i5oi4Y"], "CurrentFabricIndex": 6, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5, 8], "AcceptedCommandList": [0, 2, 4, 6, 7, 9, 10, 11], "AttributeList": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533]}, "GroupKeyManagement": {"GroupKeyMap": [], "GroupTable": [], "MaxGroupsPerFabric": 4, "MaxGroupKeysPerFabric": 3, "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [2, 5], "AcceptedCommandList": [0, 1, 3, 4], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}, "FixedLabel": {"LabelList": [{"0": "room", "1": "bedroom 2"}, {"0": "orientation", "1": "North"}, {"0": "floor", "1": "2"}, {"0": "direction", "1": "up"}], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "UserLabel": {"LabelList": [], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}}, "1": {"Identify": {"IdentifyTime": 0, "IdentifyType": 2, "FeatureMap": 0, "ClusterRevision": 4, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 65528, 65529, 65531, 65532, 65533]}, "Groups": {"NameSupport": 128, "FeatureMap": 1, "ClusterRevision": 4, "GeneratedCommandList": [0, 1, 2, 3], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "5": {"0": 0, "1": 0, "2": 0, "3": false, "4": 0, "6": 16, "65532": 6, "65533": 4, "65528": [0, 1, 2, 3, 4, 6], "65529": [0, 1, 2, 3, 4, 5, 6], "65531": [0, 1, 2, 3, 4, 6, 65528, 65529, 65531, 65532, 65533]}, "OnOff": {"OnOff": true, "FeatureMap": 0, "ClusterRevision": 4, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "Descriptor": {"DeviceTypeList": [{"0": 266, "1": 1}], "ServerList": [3, 4, 5, 6, 29], "ClientList": [], "PartsList": [], "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}}}, "attribute_subscriptions": []}} +{"fabric_id": 2, "compressed_fabric_id": 6710718474042576632, "schema_version": 11, "min_supported_schema_version": 9, "sdk_version": "2024.9.0", "wifi_credentials_set": false, "thread_credentials_set": false, "bluetooth_enabled": false} +{"message_id": "744597", "result": [{"node_id": 1, "date_commissioned": "2024-12-07T16:12:50.085498", "last_interview": "2024-12-07T16:12:50.085515", "interview_version": 6, "available": true, "is_bridge": true, "attributes": {"0": {"Descriptor": {"DeviceTypeList": [{"0": 22, "1": 1}], "ServerList": [29, 31, 40, 48, 49, 51, 55, 60, 62, 63, 65], "ClientList": [31], "PartsList": [1, 3, 4, 5, 6], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}, "AccessControl": {"Acl": [{"1": 5, "2": 2, "3": [112233], "4": null, "254": 10}], "Extension": [], "SubjectsPerAccessControlEntry": 4, "TargetsPerAccessControlEntry": 3, "AccessControlEntriesPerFabric": 4, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [0], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533]}, "BasicInformation": {"DataModelRevision": 1, "VendorName": "SONOFF\n", "VendorID": 4897, "ProductName": "Smart Home Hub\n", "ProductID": 36, "NodeLabel": "", "Location": "XX", "HardwareVersion": 0, "HardwareVersionString": "TEST_VERSION", "SoftwareVersion": 1, "SoftwareVersionString": "1.0", "ManufacturingDate": "20200101", "PartNumber": "", "ProductURL": "", "ProductLabel": "", "LocalConfigDisabled": false, "UniqueID": "C000641B34E4798F", "CapabilityMinima": {"0": 3, "1": 65535}, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [0, 1, 2], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 18, 19, 65528, 65529, 65530, 65531, 65532, 65533]}, "GeneralCommissioning": {"Breadcrumb": 0, "BasicCommissioningInfo": {"0": 60, "1": 900}, "RegulatoryConfig": 2, "LocationCapability": 2, "SupportsConcurrentConnection": true, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5], "AcceptedCommandList": [0, 2, 4], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533]}, "NetworkCommissioning": {"MaxNetworks": 1, "Networks": [{"0": "ZXRoMA==", "1": true}], "ScanMaxTimeSeconds": 0, "ConnectMaxTimeSeconds": 0, "InterfaceEnabled": true, "LastNetworkingStatus": null, "LastNetworkID": null, "LastConnectErrorValue": null, "FeatureMap": 4, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533]}, "GeneralDiagnostics": {"NetworkInterfaces": [{"0": "p2p0", "1": false, "2": null, "3": null, "4": "0icDxcnj", "5": [], "6": [], "7": 1}, {"0": "wlan0", "1": true, "2": null, "3": null, "4": "0CcDxcnj", "5": ["wKgAJw=="], "6": ["KgZZAIBtHAB5WwuUsUVjyw==", "/SWs6wutAACpdONJpLEZtQ==", "/oAAAAAAAABIed7zJQrcsQ=="], "7": 1}, {"0": "sit0", "1": false, "2": null, "3": null, "4": "AAAAAAAA", "5": [], "6": [], "7": 0}, {"0": "eth0", "1": false, "2": null, "3": null, "4": "0CcDxcni", "5": [], "6": [], "7": 2}, {"0": "lo", "1": true, "2": null, "3": null, "4": "AAAAAAAA", "5": ["fwAAAQ=="], "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], "7": 0}], "RebootCount": 7, "UpTime": 3220864, "TotalOperationalHours": 894, "BootReason": 0, "ActiveHardwareFaults": [], "ActiveRadioFaults": [], "ActiveNetworkFaults": [], "TestEventTriggersEnabled": false, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 0], "EventList": [3], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533]}, "EthernetNetworkDiagnostics": {"PHYRate": 0, "FullDuplex": false, "PacketRxCount": 0, "PacketTxCount": 0, "TxErrCount": 0, "CollisionCount": 0, "OverrunCount": 0, "CarrierDetect": null, "TimeSinceReset": 3220864, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533]}, "AdministratorCommissioning": {"WindowStatus": 0, "AdminFabricIndex": null, "AdminVendorId": null, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "EventList": [], "AttributeList": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533]}, "OperationalCredentials": {"NOCs": [{"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRARgkBwEkCAEwCUEEE+PSMypUYP8rT/WHSOMhhGw4aZL2z6HvdvEZJNEvFkmDGPt7zfz2CHXWXdc3G3cfDC0Y/CwZMK8g2wWUVWw4lzcKNQEoARgkAgE2AwQCBAEYMAQUsO6DA3InCYmNpEigLz8pzd8e/ZgwBRRom5SXSQjNanFKz0NahjjIpGLZOhgwC0AaDm3nKCD552V2kLTY8F21MD+cZntwnwd8fhBWBrOzsl/Bj17k/9yWdl0xRqfKnOMnB/2UhoQlAjL2ta8MeLZCGA==", "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEMxi8MntoCWqI6x5VcKuHfh+D35EnvsCsl3i+mDmwzpmX+HY4ill38eAIqoGctJoivkaMamSO/Be1Pt/9Jx/cdDcKNQEpARgkAmAwBBRom5SXSQjNanFKz0NahjjIpGLZOjAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQECYOxHQV5GiATWNoxshUxg+Q3fZc829u6b4/ja9kOZRb1IPnuMc5bBO1t6FakyU7ChVw4CY8X2kzKtzovUL2AsY", "254": 10}], "Fabrics": [{"1": "BDBkphswNPti4De6nQNmGkYH2+oK+uKpUpqwaULfU2KdxOYJYOL2jDOOBK8i1NoaFDdA8qbm8hd7AcAjwoxLxxM=", "2": 4939, "3": 2, "4": 1, "5": "", "254": 10}], "SupportedFabrics": 15, "CommissionedFabrics": 2, "TrustedRootCertificates": ["FTABAQEkAgE3AyyEAlVTLAcGR29vZ2xlLAELTWF0dGVyIFJvb3QnFAEAAAD+////GCYEf9JDKSYFf5Rb5TcGLIQCVVMsBwZHb29nbGUsAQtNYXR0ZXIgUm9vdCcUAQAAAP7///8YJAcBJAgBMAlBBFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U3CjUBKQEkAgEYJAJgMAQUcsIB91cZE7NIygDKe0X0d0ZoyX4wBRRywgH3VxkTs0jKAMp7RfR3RmjJfhgwC0BlFksWat/xjBVhCozpG9cD6cH2d7cRzhM1BRUt8NoVERZ1rFWRzueGhRzdnv2tKWZ0vryyo6Mgm83nswnbVSxvGA==", "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEMGSmGzA0+2LgN7qdA2YaRgfb6gr64qlSmrBpQt9TYp3E5glg4vaMM44EryLU2hoUN0DypubyF3sBwCPCjEvHEzcKNQEpARgkAmAwBBRuEjr+hY7AsShpfNZ95WQg3OIE9DAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQG5+SU7Xb89dIqDz7VdAFwwEH3mIFOuZtHAKqBTBWlXBQbxnO4SUBLLKjZyGP7tFU7v6kqQWVeEtN4gbZ9i5oi4Y"], "CurrentFabricIndex": 10, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5, 8], "AcceptedCommandList": [0, 2, 4, 6, 7, 9, 10, 11], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533]}, "GroupKeyManagement": {"GroupKeyMap": [], "GroupTable": [], "MaxGroupsPerFabric": 4, "MaxGroupKeysPerFabric": 3, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [2, 5], "AcceptedCommandList": [0, 1, 3, 4], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}, "UserLabel": {"LabelList": [], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 65528, 65529, 65530, 65531, 65532, 65533]}}, "1": {"Descriptor": {"DeviceTypeList": [{"0": 14, "1": 1}], "ServerList": [29], "ClientList": [], "PartsList": [3, 4, 5, 6], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}}, "3": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "OccupancySensing": {"ClusterRevision": 3, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "Occupancy": 1, "OccupancySensorType": 0, "OccupancySensorTypeBitmap": 1}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 19, "1": 1}, {"0": 263, "1": 2}], "ServerList": [3, 4, 5, 1030, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "SONOFFMotion Sensor", "VendorName": "SONOFF", "ProductName": "SNZB-06P", "SerialNumber": "0ceff6fffe1a88c6", "SoftwareVersionString": "1.0.6", "Reachable": true}}, "4": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 19, "1": 1}], "ServerList": [3, 4, 5, 29, 57], "ClientList": [], "PartsList": [5, 6]}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "eWeLinkTemperature/Humidity Sen", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}, "5": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "TemperatureMeasurement": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "MeasuredValue": 1670, "MinMeasuredValue": -27315, "MaxMeasuredValue": 32767}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 770, "1": 2}], "ServerList": [3, 4, 5, 1026, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "Temperature Sensor", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}, "6": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "RelativeHumidityMeasurement": {"ClusterRevision": 3, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "MeasuredValue": 5400, "MinMeasuredValue": 0, "MaxMeasuredValue": 10000}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 775, "1": 2}], "ServerList": [3, 4, 5, 1029, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "Humidity Sensor", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}}, "attribute_subscriptions": []}, {"node_id": 2, "date_commissioned": "2024-12-07T16:14:45.024475", "last_interview": "2024-12-07T16:14:45.024488", "interview_version": 6, "available": true, "is_bridge": false, "attributes": {"0": {"Groups": {"NameSupport": 128, "FeatureMap": 1, "ClusterRevision": 4, "GeneratedCommandList": [0, 1, 2, 3], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "Descriptor": {"DeviceTypeList": [{"0": 22, "1": 1}], "ServerList": [4, 29, 31, 40, 48, 49, 51, 54, 60, 62, 63, 64, 65], "ClientList": [], "PartsList": [1], "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}, "AccessControl": {"Acl": [{"1": 5, "2": 2, "3": [112233], "4": null, "254": 6}], "Extension": [], "SubjectsPerAccessControlEntry": 4, "TargetsPerAccessControlEntry": 3, "AccessControlEntriesPerFabric": 4, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533]}, "BasicInformation": {"DataModelRevision": 17, "VendorName": "Meross", "VendorID": 4933, "ProductName": "Smart Plug Mini", "ProductID": 40963, "NodeLabel": "", "Location": "XX", "HardwareVersion": 0, "HardwareVersionString": "9.0", "SoftwareVersion": 1, "SoftwareVersionString": "9.3.26", "ManufacturingDate": "20200101", "PartNumber": "", "ProductURL": "", "ProductLabel": "", "SerialNumber": "510801230701265", "LocalConfigDisabled": false, "Reachable": true, "UniqueID": "07C97DCD2FB0F1D7", "CapabilityMinima": {"0": 3, "1": 3}, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 65528, 65529, 65531, 65532, 65533]}, "GeneralCommissioning": {"Breadcrumb": 0, "BasicCommissioningInfo": {"0": 60, "1": 900}, "RegulatoryConfig": 2, "LocationCapability": 2, "SupportsConcurrentConnection": true, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5], "AcceptedCommandList": [0, 2, 4], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533]}, "NetworkCommissioning": {"MaxNetworks": 1, "Networks": [{"0": "Vk1FNUVEMzkx", "1": true}], "ScanMaxTimeSeconds": 10, "ConnectMaxTimeSeconds": 20, "InterfaceEnabled": true, "LastNetworkingStatus": 0, "LastNetworkID": "Vk1FNUVEMzkx", "LastConnectErrorValue": null, "FeatureMap": 1, "ClusterRevision": 1, "GeneratedCommandList": [1, 5, 7], "AcceptedCommandList": [0, 2, 4, 6, 8], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533]}, "GeneralDiagnostics": {"NetworkInterfaces": [{"0": "r0", "1": true, "2": null, "3": null, "4": "SOHpzoP6", "5": ["wKgAJQ=="], "6": ["/oAAAAAAAABK4en//s6D+g=="], "7": 1}], "RebootCount": 10, "UpTime": 3451501, "TotalOperationalHours": 958, "BootReason": 1, "ActiveHardwareFaults": [], "ActiveRadioFaults": [], "ActiveNetworkFaults": [], "TestEventTriggersEnabled": false, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533]}, "WiFiNetworkDiagnostics": {"Bssid": "AAAAAAAA", "SecurityType": 0, "WiFiVersion": 3, "ChannelNumber": 1, "Rssi": -57, "BeaconLostCount": 0, "BeaconRxCount": null, "PacketMulticastRxCount": 0, "PacketMulticastTxCount": 0, "PacketUnicastRxCount": 0, "PacketUnicastTxCount": 0, "CurrentMaxRate": 0, "OverrunCount": 0, "FeatureMap": 3, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 65528, 65529, 65531, 65532, 65533]}, "AdministratorCommissioning": {"WindowStatus": 0, "AdminFabricIndex": null, "AdminVendorId": null, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "AttributeList": [0, 1, 2, 65528, 65529, 65531, 65532, 65533]}, "OperationalCredentials": {"NOCs": [{"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRAhgkBwEkCAEwCUEEki5SRwR9ldsdDOYz+cXHqBk4+9UtGw3HZs3QwLky9ZwptQ3P3TYPMyk6eRzxitxiO+cuSsjqEuz8xLW88F54PTcKNQEoARgkAgE2AwQCBAEYMAQUW/omsScZAz5Y7CBtD3wzo/KLjBUwBRRom5SXSQjNanFKz0NahjjIpGLZOhgwC0AYyqHAxJG8Yh+ZEhVagGjfjydheYq1P6EgLcmPfjrPAorq7tO3JWSk0N8CEfsTS9qqP6Bg8VheSHK2lvg5ZZoJGA==", "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEMxi8MntoCWqI6x5VcKuHfh+D35EnvsCsl3i+mDmwzpmX+HY4ill38eAIqoGctJoivkaMamSO/Be1Pt/9Jx/cdDcKNQEpARgkAmAwBBRom5SXSQjNanFKz0NahjjIpGLZOjAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQECYOxHQV5GiATWNoxshUxg+Q3fZc829u6b4/ja9kOZRb1IPnuMc5bBO1t6FakyU7ChVw4CY8X2kzKtzovUL2AsY", "254": 6}], "Fabrics": [{"1": "BDBkphswNPti4De6nQNmGkYH2+oK+uKpUpqwaULfU2KdxOYJYOL2jDOOBK8i1NoaFDdA8qbm8hd7AcAjwoxLxxM=", "2": 4939, "3": 2, "4": 2, "5": "", "254": 6}], "SupportedFabrics": 5, "CommissionedFabrics": 2, "TrustedRootCertificates": ["FTABAQEkAgE3AyyEAlVTLAcGR29vZ2xlLAELTWF0dGVyIFJvb3QnFAEAAAD+////GCYEf9JDKSYFf5Rb5TcGLIQCVVMsBwZHb29nbGUsAQtNYXR0ZXIgUm9vdCcUAQAAAP7///8YJAcBJAgBMAlBBFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U3CjUBKQEkAgEYJAJgMAQUcsIB91cZE7NIygDKe0X0d0ZoyX4wBRRywgH3VxkTs0jKAMp7RfR3RmjJfhgwC0BlFksWat/xjBVhCozpG9cD6cH2d7cRzhM1BRUt8NoVERZ1rFWRzueGhRzdnv2tKWZ0vryyo6Mgm83nswnbVSxvGA==", "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEMGSmGzA0+2LgN7qdA2YaRgfb6gr64qlSmrBpQt9TYp3E5glg4vaMM44EryLU2hoUN0DypubyF3sBwCPCjEvHEzcKNQEpARgkAmAwBBRuEjr+hY7AsShpfNZ95WQg3OIE9DAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQG5+SU7Xb89dIqDz7VdAFwwEH3mIFOuZtHAKqBTBWlXBQbxnO4SUBLLKjZyGP7tFU7v6kqQWVeEtN4gbZ9i5oi4Y"], "CurrentFabricIndex": 6, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5, 8], "AcceptedCommandList": [0, 2, 4, 6, 7, 9, 10, 11], "AttributeList": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533]}, "GroupKeyManagement": {"GroupKeyMap": [], "GroupTable": [], "MaxGroupsPerFabric": 4, "MaxGroupKeysPerFabric": 3, "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [2, 5], "AcceptedCommandList": [0, 1, 3, 4], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}, "FixedLabel": {"LabelList": [{"0": "room", "1": "bedroom 2"}, {"0": "orientation", "1": "North"}, {"0": "floor", "1": "2"}, {"0": "direction", "1": "up"}], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "UserLabel": {"LabelList": [], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}}, "1": {"Identify": {"IdentifyTime": 0, "IdentifyType": 2, "FeatureMap": 0, "ClusterRevision": 4, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 65528, 65529, 65531, 65532, 65533]}, "Groups": {"NameSupport": 128, "FeatureMap": 1, "ClusterRevision": 4, "GeneratedCommandList": [0, 1, 2, 3], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "5": {"0": 0, "1": 0, "2": 0, "3": false, "4": 0, "6": 16, "65532": 6, "65533": 4, "65528": [0, 1, 2, 3, 4, 6], "65529": [0, 1, 2, 3, 4, 5, 6], "65531": [0, 1, 2, 3, 4, 6, 65528, 65529, 65531, 65532, 65533]}, "OnOff": {"OnOff": true, "FeatureMap": 0, "ClusterRevision": 4, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "Descriptor": {"DeviceTypeList": [{"0": 266, "1": 1}], "ServerList": [3, 4, 5, 6, 29], "ClientList": [], "PartsList": [], "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}}}, "attribute_subscriptions": []}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2481}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2576}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2832}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2780}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2597}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2397}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2307}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2243}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2188}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2105}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 2046}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1990}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1939}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1886}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1835}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1782}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1729}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 9600}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1678}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 9300}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 9000}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1626}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 8700}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 8400}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 8100}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 7800}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 7400}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1678}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 7100}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 0}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": false}}}]} +{"event": "attribute_updated", "data": [1, {"3": {"OccupancySensing": {"Occupancy": 1}}}]} +{"event": "attribute_updated", "data": [2, {"1": {"OnOff": {"OnOff": true}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 6800}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 6500}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 6200}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1729}}}]} +{"event": "attribute_updated", "data": [1, {"6": {"RelativeHumidityMeasurement": {"MeasuredValue": 5900}}}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1773}}}]} +{"fabric_id": 2, "compressed_fabric_id": 6710718474042576632, "schema_version": 11, "min_supported_schema_version": 9, "sdk_version": "2024.9.0", "wifi_credentials_set": false, "thread_credentials_set": false, "bluetooth_enabled": false} +{"message_id": "2904344", "result": [{"node_id": 1, "date_commissioned": "2024-12-07T16:12:50.085498", "last_interview": "2024-12-07T16:12:50.085515", "interview_version": 6, "available": true, "is_bridge": true, "attributes": {"0": {"Descriptor": {"DeviceTypeList": [{"0": 22, "1": 1}], "ServerList": [29, 31, 40, 48, 49, 51, 55, 60, 62, 63, 65], "ClientList": [31], "PartsList": [1, 3, 4, 5, 6], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}, "AccessControl": {"Acl": [{"1": 5, "2": 2, "3": [112233], "4": null, "254": 10}], "Extension": [], "SubjectsPerAccessControlEntry": 4, "TargetsPerAccessControlEntry": 3, "AccessControlEntriesPerFabric": 4, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [0], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533]}, "BasicInformation": {"DataModelRevision": 1, "VendorName": "SONOFF\n", "VendorID": 4897, "ProductName": "Smart Home Hub\n", "ProductID": 36, "NodeLabel": "", "Location": "XX", "HardwareVersion": 0, "HardwareVersionString": "TEST_VERSION", "SoftwareVersion": 1, "SoftwareVersionString": "1.0", "ManufacturingDate": "20200101", "PartNumber": "", "ProductURL": "", "ProductLabel": "", "LocalConfigDisabled": false, "UniqueID": "C000641B34E4798F", "CapabilityMinima": {"0": 3, "1": 65535}, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [0, 1, 2], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 16, 18, 19, 65528, 65529, 65530, 65531, 65532, 65533]}, "GeneralCommissioning": {"Breadcrumb": 0, "BasicCommissioningInfo": {"0": 60, "1": 900}, "RegulatoryConfig": 2, "LocationCapability": 2, "SupportsConcurrentConnection": true, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5], "AcceptedCommandList": [0, 2, 4], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65530, 65531, 65532, 65533]}, "NetworkCommissioning": {"MaxNetworks": 1, "Networks": [{"0": "ZXRoMA==", "1": true}], "ScanMaxTimeSeconds": 0, "ConnectMaxTimeSeconds": 0, "InterfaceEnabled": true, "LastNetworkingStatus": null, "LastNetworkID": null, "LastConnectErrorValue": null, "FeatureMap": 4, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65530, 65531, 65532, 65533]}, "GeneralDiagnostics": {"NetworkInterfaces": [{"0": "p2p0", "1": false, "2": null, "3": null, "4": "0icDxcnj", "5": [], "6": [], "7": 1}, {"0": "wlan0", "1": true, "2": null, "3": null, "4": "0CcDxcnj", "5": ["wKgAJw=="], "6": ["KgZZAIBtHAB5WwuUsUVjyw==", "/SWs6wutAACpdONJpLEZtQ==", "/oAAAAAAAABIed7zJQrcsQ=="], "7": 1}, {"0": "sit0", "1": false, "2": null, "3": null, "4": "AAAAAAAA", "5": [], "6": [], "7": 0}, {"0": "eth0", "1": false, "2": null, "3": null, "4": "0CcDxcni", "5": [], "6": [], "7": 2}, {"0": "lo", "1": true, "2": null, "3": null, "4": "AAAAAAAA", "5": ["fwAAAQ=="], "6": ["AAAAAAAAAAAAAAAAAAAAAQ=="], "7": 0}], "RebootCount": 7, "UpTime": 3220864, "TotalOperationalHours": 894, "BootReason": 0, "ActiveHardwareFaults": [], "ActiveRadioFaults": [], "ActiveNetworkFaults": [], "TestEventTriggersEnabled": false, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 0], "EventList": [3], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533]}, "EthernetNetworkDiagnostics": {"PHYRate": 0, "FullDuplex": false, "PacketRxCount": 0, "PacketTxCount": 0, "TxErrCount": 0, "CollisionCount": 0, "OverrunCount": 0, "CarrierDetect": null, "TimeSinceReset": 3220864, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65530, 65531, 65532, 65533]}, "AdministratorCommissioning": {"WindowStatus": 0, "AdminFabricIndex": null, "AdminVendorId": null, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "EventList": [], "AttributeList": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533]}, "OperationalCredentials": {"NOCs": [{"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRARgkBwEkCAEwCUEEE+PSMypUYP8rT/WHSOMhhGw4aZL2z6HvdvEZJNEvFkmDGPt7zfz2CHXWXdc3G3cfDC0Y/CwZMK8g2wWUVWw4lzcKNQEoARgkAgE2AwQCBAEYMAQUsO6DA3InCYmNpEigLz8pzd8e/ZgwBRRom5SXSQjNanFKz0NahjjIpGLZOhgwC0AaDm3nKCD552V2kLTY8F21MD+cZntwnwd8fhBWBrOzsl/Bj17k/9yWdl0xRqfKnOMnB/2UhoQlAjL2ta8MeLZCGA==", "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEMxi8MntoCWqI6x5VcKuHfh+D35EnvsCsl3i+mDmwzpmX+HY4ill38eAIqoGctJoivkaMamSO/Be1Pt/9Jx/cdDcKNQEpARgkAmAwBBRom5SXSQjNanFKz0NahjjIpGLZOjAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQECYOxHQV5GiATWNoxshUxg+Q3fZc829u6b4/ja9kOZRb1IPnuMc5bBO1t6FakyU7ChVw4CY8X2kzKtzovUL2AsY", "254": 10}], "Fabrics": [{"1": "BDBkphswNPti4De6nQNmGkYH2+oK+uKpUpqwaULfU2KdxOYJYOL2jDOOBK8i1NoaFDdA8qbm8hd7AcAjwoxLxxM=", "2": 4939, "3": 2, "4": 1, "5": "", "254": 10}], "SupportedFabrics": 15, "CommissionedFabrics": 2, "TrustedRootCertificates": ["FTABAQEkAgE3AyyEAlVTLAcGR29vZ2xlLAELTWF0dGVyIFJvb3QnFAEAAAD+////GCYEf9JDKSYFf5Rb5TcGLIQCVVMsBwZHb29nbGUsAQtNYXR0ZXIgUm9vdCcUAQAAAP7///8YJAcBJAgBMAlBBFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U3CjUBKQEkAgEYJAJgMAQUcsIB91cZE7NIygDKe0X0d0ZoyX4wBRRywgH3VxkTs0jKAMp7RfR3RmjJfhgwC0BlFksWat/xjBVhCozpG9cD6cH2d7cRzhM1BRUt8NoVERZ1rFWRzueGhRzdnv2tKWZ0vryyo6Mgm83nswnbVSxvGA==", "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEMGSmGzA0+2LgN7qdA2YaRgfb6gr64qlSmrBpQt9TYp3E5glg4vaMM44EryLU2hoUN0DypubyF3sBwCPCjEvHEzcKNQEpARgkAmAwBBRuEjr+hY7AsShpfNZ95WQg3OIE9DAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQG5+SU7Xb89dIqDz7VdAFwwEH3mIFOuZtHAKqBTBWlXBQbxnO4SUBLLKjZyGP7tFU7v6kqQWVeEtN4gbZ9i5oi4Y"], "CurrentFabricIndex": 10, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5, 8], "AcceptedCommandList": [0, 2, 4, 6, 7, 9, 10, 11], "EventList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 65528, 65529, 65530, 65531, 65532, 65533]}, "GroupKeyManagement": {"GroupKeyMap": [], "GroupTable": [], "MaxGroupsPerFabric": 4, "MaxGroupKeysPerFabric": 3, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [2, 5], "AcceptedCommandList": [0, 1, 3, 4], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}, "UserLabel": {"LabelList": [], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 65528, 65529, 65530, 65531, 65532, 65533]}}, "1": {"Descriptor": {"DeviceTypeList": [{"0": 14, "1": 1}], "ServerList": [29], "ClientList": [], "PartsList": [3, 4, 5, 6], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "EventList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65530, 65531, 65532, 65533]}}, "3": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "OccupancySensing": {"ClusterRevision": 3, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "Occupancy": 0, "OccupancySensorType": 0, "OccupancySensorTypeBitmap": 1}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 19, "1": 1}, {"0": 263, "1": 2}], "ServerList": [3, 4, 5, 1030, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "SONOFFMotion Sensor", "VendorName": "SONOFF", "ProductName": "SNZB-06P", "SerialNumber": "0ceff6fffe1a88c6", "SoftwareVersionString": "1.0.6", "Reachable": true}}, "4": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 19, "1": 1}], "ServerList": [3, 4, 5, 29, 57], "ClientList": [], "PartsList": [5, 6]}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "eWeLinkTemperature/Humidity Sen", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}, "5": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "TemperatureMeasurement": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "MeasuredValue": 1826, "MinMeasuredValue": -27315, "MaxMeasuredValue": 32767}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 770, "1": 2}], "ServerList": [3, 4, 5, 1026, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "Temperature Sensor", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}, "6": {"Identify": {"ClusterRevision": 4, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 65533], "EventList": [], "AcceptedCommandList": [0], "GeneratedCommandList": [], "IdentifyTime": 0, "IdentifyType": 0}, "Groups": {"ClusterRevision": 4, "FeatureMap": 1, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 65533], "EventList": [], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "GeneratedCommandList": [0, 1, 2, 3], "NameSupport": 128}, "5": {"65533": 4, "65532": 1, "65531": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 6, 1, 2, 3, 4, 65533], "65530": [], "65529": [0, 1, 2, 3, 4, 5, 6], "65528": [0, 1, 2, 3, 4, 6], "0": 0, "6": 0, "1": 0, "2": 0, "3": false, "4": 128}, "RelativeHumidityMeasurement": {"ClusterRevision": 3, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "MeasuredValue": 5700, "MinMeasuredValue": 0, "MaxMeasuredValue": 10000}, "Descriptor": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 0, 1, 2, 3, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "DeviceTypeList": [{"0": 775, "1": 2}], "ServerList": [3, 4, 5, 1029, 29, 57], "ClientList": [], "PartsList": []}, "BridgedDeviceBasicInformation": {"ClusterRevision": 1, "FeatureMap": 0, "AttributeList": [65528, 65529, 65530, 65531, 65533, 65532, 65531, 65530, 65529, 65528, 5, 1, 3, 15, 10, 17, 65533], "EventList": [], "AcceptedCommandList": [], "GeneratedCommandList": [], "NodeLabel": "Humidity Sensor", "VendorName": "eWeLink", "ProductName": "TH01", "SerialNumber": "00124b002e957108", "SoftwareVersionString": "0.5", "Reachable": true}}}, "attribute_subscriptions": []}, {"node_id": 2, "date_commissioned": "2024-12-07T16:14:45.024475", "last_interview": "2024-12-07T16:14:45.024488", "interview_version": 6, "available": true, "is_bridge": false, "attributes": {"0": {"Groups": {"NameSupport": 128, "FeatureMap": 1, "ClusterRevision": 4, "GeneratedCommandList": [0, 1, 2, 3], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "Descriptor": {"DeviceTypeList": [{"0": 22, "1": 1}], "ServerList": [4, 29, 31, 40, 48, 49, 51, 54, 60, 62, 63, 64, 65], "ClientList": [], "PartsList": [1], "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}, "AccessControl": {"Acl": [{"1": 5, "2": 2, "3": [112233], "4": null, "254": 6}], "Extension": [], "SubjectsPerAccessControlEntry": 4, "TargetsPerAccessControlEntry": 3, "AccessControlEntriesPerFabric": 4, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533]}, "BasicInformation": {"DataModelRevision": 17, "VendorName": "Meross", "VendorID": 4933, "ProductName": "Smart Plug Mini", "ProductID": 40963, "NodeLabel": "", "Location": "XX", "HardwareVersion": 0, "HardwareVersionString": "9.0", "SoftwareVersion": 1, "SoftwareVersionString": "9.3.26", "ManufacturingDate": "20200101", "PartNumber": "", "ProductURL": "", "ProductLabel": "", "SerialNumber": "510801230701265", "LocalConfigDisabled": false, "Reachable": true, "UniqueID": "07C97DCD2FB0F1D7", "CapabilityMinima": {"0": 3, "1": 3}, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 65528, 65529, 65531, 65532, 65533]}, "GeneralCommissioning": {"Breadcrumb": 0, "BasicCommissioningInfo": {"0": 60, "1": 900}, "RegulatoryConfig": 2, "LocationCapability": 2, "SupportsConcurrentConnection": true, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5], "AcceptedCommandList": [0, 2, 4], "AttributeList": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533]}, "NetworkCommissioning": {"MaxNetworks": 1, "Networks": [{"0": "Vk1FNUVEMzkx", "1": true}], "ScanMaxTimeSeconds": 10, "ConnectMaxTimeSeconds": 20, "InterfaceEnabled": true, "LastNetworkingStatus": 0, "LastNetworkID": "Vk1FNUVEMzkx", "LastConnectErrorValue": null, "FeatureMap": 1, "ClusterRevision": 1, "GeneratedCommandList": [1, 5, 7], "AcceptedCommandList": [0, 2, 4, 6, 8], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533]}, "GeneralDiagnostics": {"NetworkInterfaces": [{"0": "r0", "1": true, "2": null, "3": null, "4": "SOHpzoP6", "5": ["wKgAJQ=="], "6": ["/oAAAAAAAABK4en//s6D+g=="], "7": 1}], "RebootCount": 10, "UpTime": 3451501, "TotalOperationalHours": 958, "BootReason": 1, "ActiveHardwareFaults": [], "ActiveRadioFaults": [], "ActiveNetworkFaults": [], "TestEventTriggersEnabled": false, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533]}, "WiFiNetworkDiagnostics": {"Bssid": "AAAAAAAA", "SecurityType": 0, "WiFiVersion": 3, "ChannelNumber": 1, "Rssi": -57, "BeaconLostCount": 0, "BeaconRxCount": null, "PacketMulticastRxCount": 0, "PacketMulticastTxCount": 0, "PacketUnicastRxCount": 0, "PacketUnicastTxCount": 0, "CurrentMaxRate": 0, "OverrunCount": 0, "FeatureMap": 3, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 65528, 65529, 65531, 65532, 65533]}, "AdministratorCommissioning": {"WindowStatus": 0, "AdminFabricIndex": null, "AdminVendorId": null, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "AttributeList": [0, 1, 2, 65528, 65529, 65531, 65532, 65533]}, "OperationalCredentials": {"NOCs": [{"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRAhgkBwEkCAEwCUEEki5SRwR9ldsdDOYz+cXHqBk4+9UtGw3HZs3QwLky9ZwptQ3P3TYPMyk6eRzxitxiO+cuSsjqEuz8xLW88F54PTcKNQEoARgkAgE2AwQCBAEYMAQUW/omsScZAz5Y7CBtD3wzo/KLjBUwBRRom5SXSQjNanFKz0NahjjIpGLZOhgwC0AYyqHAxJG8Yh+ZEhVagGjfjydheYq1P6EgLcmPfjrPAorq7tO3JWSk0N8CEfsTS9qqP6Bg8VheSHK2lvg5ZZoJGA==", "2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEMxi8MntoCWqI6x5VcKuHfh+D35EnvsCsl3i+mDmwzpmX+HY4ill38eAIqoGctJoivkaMamSO/Be1Pt/9Jx/cdDcKNQEpARgkAmAwBBRom5SXSQjNanFKz0NahjjIpGLZOjAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQECYOxHQV5GiATWNoxshUxg+Q3fZc829u6b4/ja9kOZRb1IPnuMc5bBO1t6FakyU7ChVw4CY8X2kzKtzovUL2AsY", "254": 6}], "Fabrics": [{"1": "BDBkphswNPti4De6nQNmGkYH2+oK+uKpUpqwaULfU2KdxOYJYOL2jDOOBK8i1NoaFDdA8qbm8hd7AcAjwoxLxxM=", "2": 4939, "3": 2, "4": 2, "5": "", "254": 6}], "SupportedFabrics": 5, "CommissionedFabrics": 2, "TrustedRootCertificates": ["FTABAQEkAgE3AyyEAlVTLAcGR29vZ2xlLAELTWF0dGVyIFJvb3QnFAEAAAD+////GCYEf9JDKSYFf5Rb5TcGLIQCVVMsBwZHb29nbGUsAQtNYXR0ZXIgUm9vdCcUAQAAAP7///8YJAcBJAgBMAlBBFs332VJwg3I1yKmuKy2YKinZM57r2xsIk9+6ENJaErX2An/ZQAz0VJ9zx+6rGqcOti0HtrJCfe1x2D9VCyJI3U3CjUBKQEkAgEYJAJgMAQUcsIB91cZE7NIygDKe0X0d0ZoyX4wBRRywgH3VxkTs0jKAMp7RfR3RmjJfhgwC0BlFksWat/xjBVhCozpG9cD6cH2d7cRzhM1BRUt8NoVERZ1rFWRzueGhRzdnv2tKWZ0vryyo6Mgm83nswnbVSxvGA==", "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEMGSmGzA0+2LgN7qdA2YaRgfb6gr64qlSmrBpQt9TYp3E5glg4vaMM44EryLU2hoUN0DypubyF3sBwCPCjEvHEzcKNQEpARgkAmAwBBRuEjr+hY7AsShpfNZ95WQg3OIE9DAFFG4SOv6FjsCxKGl81n3lZCDc4gT0GDALQG5+SU7Xb89dIqDz7VdAFwwEH3mIFOuZtHAKqBTBWlXBQbxnO4SUBLLKjZyGP7tFU7v6kqQWVeEtN4gbZ9i5oi4Y"], "CurrentFabricIndex": 6, "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [1, 3, 5, 8], "AcceptedCommandList": [0, 2, 4, 6, 7, 9, 10, 11], "AttributeList": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533]}, "GroupKeyManagement": {"GroupKeyMap": [], "GroupTable": [], "MaxGroupsPerFabric": 4, "MaxGroupKeysPerFabric": 3, "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [2, 5], "AcceptedCommandList": [0, 1, 3, 4], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}, "FixedLabel": {"LabelList": [{"0": "room", "1": "bedroom 2"}, {"0": "orientation", "1": "North"}, {"0": "floor", "1": "2"}, {"0": "direction", "1": "up"}], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "UserLabel": {"LabelList": [], "FeatureMap": 0, "ClusterRevision": 1, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}}, "1": {"Identify": {"IdentifyTime": 0, "IdentifyType": 2, "FeatureMap": 0, "ClusterRevision": 4, "GeneratedCommandList": [], "AcceptedCommandList": [0], "AttributeList": [0, 1, 65528, 65529, 65531, 65532, 65533]}, "Groups": {"NameSupport": 128, "FeatureMap": 1, "ClusterRevision": 4, "GeneratedCommandList": [0, 1, 2, 3], "AcceptedCommandList": [0, 1, 2, 3, 4, 5], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "5": {"0": 0, "1": 0, "2": 0, "3": false, "4": 0, "6": 16, "65532": 6, "65533": 4, "65528": [0, 1, 2, 3, 4, 6], "65529": [0, 1, 2, 3, 4, 5, 6], "65531": [0, 1, 2, 3, 4, 6, 65528, 65529, 65531, 65532, 65533]}, "OnOff": {"OnOff": false, "FeatureMap": 0, "ClusterRevision": 4, "GeneratedCommandList": [], "AcceptedCommandList": [0, 1, 2], "AttributeList": [0, 65528, 65529, 65531, 65532, 65533]}, "Descriptor": {"DeviceTypeList": [{"0": 266, "1": 1}], "ServerList": [3, 4, 5, 6, 29], "ClientList": [], "PartsList": [], "FeatureMap": 0, "ClusterRevision": 2, "GeneratedCommandList": [], "AcceptedCommandList": [], "AttributeList": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533]}}}, "attribute_subscriptions": []}]} +{"event": "attribute_updated", "data": [1, {"5": {"TemperatureMeasurement": {"MeasuredValue": 1888}}}]} diff --git a/project/data/rules.json b/project/data/rules.json new file mode 100644 index 0000000..58e8dfd --- /dev/null +++ b/project/data/rules.json @@ -0,0 +1,60 @@ +{ + "rules": [ + { + "input": "Fridge opened, light on in kitchen, pillbox opened, movement in living room", + "label": "normal" + }, + { + "input": "Fridge not opened for 12 hours, no movement detected in living room", + "label": "abnormal" + }, + { + "input": "Temperature in garage is 2863°C, humidity is 5773", + "label": "abnormal" + }, + { + "input": "Pillbox opened multiple times in bedroom, occupancy detected", + "label": "normal" + }, + { + "input": "Bathroom light off, occupancy 0, humidity 6393", + "label": "normal" + }, + { + "input": "Living room light on, no movement detected for 8 hours", + "label": "abnormal" + }, + { + "input": "Fridge opened, kitchen light turned on, occupancy detected in kitchen", + "label": "normal" + }, + { + "input": "No movement in garage, temperature 2037°C, humidity 4264", + "label": "normal" + }, + { + "input": "Study occupancy 0, light on for extended duration", + "label": "abnormal" + }, + { + "input": "Kitchen light on and off multiple times within an hour", + "label": "normal" + }, + { + "input": "Living room humidity is 6742, light on, no movement detected", + "label": "abnormal" + }, + { + "input": "Pillbox unopened for more than 24 hours", + "label": "abnormal" + }, + { + "input": "Bedroom occupancy detected, pillbox opened, light on", + "label": "normal" + }, + { + "input": "Temperature in bathroom 2767°C, light on, humidity 5363", + "label": "normal" + } + ] +} \ No newline at end of file diff --git a/project/data/test_data.json b/project/data/test_data.json new file mode 100644 index 0000000..ef5b0f4 --- /dev/null +++ b/project/data/test_data.json @@ -0,0 +1,2525 @@ +[ + { + "input": [ + { + "timestamp": 1737806400, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737806460, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737806700, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737807000, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1737807300, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737807600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737807660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737808200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737808500, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737808800, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737810000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737811200, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737811800, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737811860, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1737828000, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737828060, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737828120, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737828180, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737829200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737831600, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737831660, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737835200, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737838800, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737838860, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737839400, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737849600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737849660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737850800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737852000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737852060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737855600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737859200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737859260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737859800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "The data reflects a consistent daily routine, including medication times, meal preparation, and relaxation periods. No unusual activity is detected." + }, + { + "input": [ + { + "timestamp": 1737892800, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737892860, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737893100, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737893400, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1737893700, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737894000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737894060, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737894600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737894900, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737895200, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737896400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737897600, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737898200, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737898260, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1737914400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737914460, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737914520, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737914580, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737915600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737918000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737918060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737921600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737925200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737925260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737925800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737936000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737936060, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737937200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737938400, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737938460, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737942000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737945600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737945660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737946200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "The data shows a consistent routine similar to Day 1, with no deviations. All activities align with expected behavior." + }, + { + "input": [ + { + "timestamp": 1737979200, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1737979260, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737979500, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737979800, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1737980100, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737980400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737980460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1737981000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737981300, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1737981600, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737982800, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1737984000, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737984600, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1737984660, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738000800, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738000860, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738000920, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738000980, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738002000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738004400, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738004460, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738008000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738011600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738011660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738012200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738022400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738022460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738023600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738024800, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738024860, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738028400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738032000, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738032060, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738032600, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "The data shows a typical day with all expected activities occurring at regular times." + }, + { + "input": [ + { + "timestamp": 1738065600, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738065660, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738065900, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738066200, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738066500, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738066800, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738067100, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738067400, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738067700, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738068000, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738068300, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738068600, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738068900, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738069200, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + }, + { + "timestamp": 1738069500, + "room": "bathroom", + "nodeId": 3, + "humidity": 7500 + } + ], + "result": "anomalous", + "reason": "High humidity persists in the bathroom with no movement or room transitions for over 45 minutes, suggesting a potential fall during shower." + }, + { + "input": [ + { + "timestamp": 1738152000, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738152060, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738152300, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738152600, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738152900, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738153200, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738153260, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738153800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738154100, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738154400, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738155600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738156800, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738157400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738157460, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738173600, + "room": "porch", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738184400, + "room": "porch", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738195200, + "room": "porch", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738206000, + "room": "porch", + "nodeId": 1, + "occupancy": 0 + } + ], + "result": "anomalous", + "reason": "Person left home in the morning but did not return, missing all subsequent daily activities including medications and meals." + }, + { + "input": [ + { + "timestamp": 1738238400, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738238460, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738238700, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738239000, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738239300, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738239600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738239660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738240200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738240500, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738240800, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738242000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738243200, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738243800, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738243860, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738260000, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738260060, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738260120, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738260180, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738261200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738263600, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738263660, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738267200, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738270800, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738270860, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738271400, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738281600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738281660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738282800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738284000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738284060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738287600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738291200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738291260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738291800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "All activities follow the expected pattern with normal timing and sequence." + }, + { + "input": [ + { + "timestamp": 1738324800, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738324860, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738325100, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738325400, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738325700, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738326000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738326600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738326900, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738327200, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738328400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738329600, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738330200, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738330260, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738346400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738346460, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738346520, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738347600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738350000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738350060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738353600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738357200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738357260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738357800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738368000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738369200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738370400, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738370460, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738374000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738377600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738377660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738378200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "anomalous", + "reason": "No fridge activity detected throughout the entire day despite multiple kitchen visits, suggesting potential eating issues or forgetfulness." + }, + { + "input": [ + { + "timestamp": 1738411200, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738411260, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738411500, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738411800, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738412100, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738412400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738412460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738413000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738413300, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738413600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738417200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738420800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738424400, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738428000, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738431600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738435200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738438800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738442400, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738446000, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738449600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738453200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738456800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738460400, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738464000, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738467600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738471200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738474800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + } + ], + "result": "anomalous", + "reason": "Person remains in bedroom all day after breakfast, missing meals, medications, and normal daily activities. This could indicate illness or other health issues." + }, + { + "input": [ + { + "timestamp": 1738497600, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738497660, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738497900, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738498200, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738498500, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738498800, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738498860, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738499400, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738499700, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738500000, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738501200, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738502400, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738503000, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738503060, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738519200, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738519260, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738519320, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738519380, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738520400, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738522800, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738522860, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738526400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738530000, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738530060, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738530600, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738540800, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738540860, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738542000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738543200, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738543260, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738546800, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738550400, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738550460, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738551000, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "All activities follow the expected daily routine with normal timing and sequence." + }, + { + "input": [ + { + "timestamp": 1738584000, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738584300, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738584600, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738584900, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738585200, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738585260, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738585800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738586100, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738586400, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738587600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738588800, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738589400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738589460, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738605600, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738605660, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738605720, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738605780, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738606800, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738609200, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738609260, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738612800, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738616400, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738617000, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738627200, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738627260, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738628400, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738629600, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738629660, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738633200, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738636800, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738637400, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "anomalous", + "reason": "All three daily medications were missed (morning, afternoon, and evening), while other activities remained normal. This suggests a serious deviation in medication adherence." + }, + { + "input": [ + { + "timestamp": 1738670400, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738670460, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738670700, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738671000, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738671300, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738671600, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738671660, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738672200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738672500, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738672800, + "room": "livingroom", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738672860, + "room": "kitchen", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738672920, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738672980, + "room": "bathroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673040, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673100, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673160, + "room": "kitchen", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673220, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673280, + "room": "bathroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673340, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673400, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673460, + "room": "kitchen", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673520, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673580, + "room": "bathroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738673640, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738673700, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738674000, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738677600, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738681200, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738684800, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738688400, + "room": "bedroom", + "nodeId": 1, + "occupancy": 1 + } + ], + "result": "anomalous", + "reason": "Unusual rapid transitions between rooms followed by extended bedroom stay suggests potential confusion or disorientation. Missing subsequent daily activities and medications adds to the concern." + }, + { + "input": [ + { + "timestamp": 1738756800, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738756860, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738757100, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738757400, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738757700, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738758000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738758060, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738758600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738758900, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738759200, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738760400, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738761600, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738762200, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738762260, + "room": "hall", + "nodeId": 1, + "occupancy": 0 + }, + { + "timestamp": 1738778400, + "room": "porch", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738778460, + "room": "hall", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738778520, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738778580, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738779600, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738782000, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738782060, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738785600, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738789200, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738789260, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738789800, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738800000, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738800060, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738801200, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738802400, + "room": "livingroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738802460, + "room": "livingroom", + "nodeId": 1, + "occupancy": 1 + }, + { + "timestamp": 1738806000, + "room": "livingroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738809600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738809660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738810200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "normal", + "reason": "All activities follow the expected daily routine with normal timing and sequence." + }, + { + "input": [ + { + "timestamp": 1738843200, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738843260, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738843500, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738843800, + "room": "bathroom", + "nodeId": 3, + "humidity": 6500 + }, + { + "timestamp": 1738844100, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738844400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738844460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738844520, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738844580, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738844640, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738844700, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738844760, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738844820, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738844880, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738844940, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738845000, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738845060, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738866400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738866460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738866520, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738866580, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738866640, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738866700, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738875600, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738875660, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738876200, + "room": "bedroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738888400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738888460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738888520, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738888580, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738888640, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738888700, + "room": "kitchen", + "nodeId": 2, + "onOff": false + } + ], + "result": "anomalous", + "reason": "Unusually frequent kitchen visits and fridge openings throughout the day suggest potential memory issues or compulsive behavior. While other activities remain normal, the repeated kitchen pattern is concerning." + }, + { + "input": [ + { + "timestamp": 1738929600, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738929660, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738929900, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738930200, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738930260, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738930320, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738930380, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738930440, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738930500, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738930560, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738944000, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738944060, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738944120, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738944180, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738952400, + "room": "kitchen", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738952460, + "room": "kitchen", + "event": "fridge_opened" + }, + { + "timestamp": 1738953000, + "room": "kitchen", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738957600, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738957660, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738957720, + "room": "bathroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738957780, + "room": "bathroom", + "nodeId": 2, + "onOff": false + }, + { + "timestamp": 1738962000, + "room": "bedroom", + "nodeId": 2, + "onOff": true + }, + { + "timestamp": 1738962060, + "room": "pillbox", + "nodeId": 1, + "onOff": true + }, + { + "timestamp": 1738962600, + "room": "bedroom", + "nodeId": 2, + "onOff": false + } + ], + "result": "anomalous", + "reason": "Unusually frequent bathroom visits throughout the day, with multiple entries and exits in short succession. While other activities remain normal, this pattern suggests potential health issues requiring attention." + } + ] \ No newline at end of file diff --git a/project/data/training_data.json b/project/data/training_data.json new file mode 100644 index 0000000..6d64e69 --- /dev/null +++ b/project/data/training_data.json @@ -0,0 +1,498 @@ +[ + { + "input": [ + {"timestamp": 1737806400, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737806460, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737806700, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737807000, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1737807300, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737807600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737807660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737808200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737808500, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737808800, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737810000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737811200, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737811800, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737811860, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1737828000, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737828060, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737828120, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737828180, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737829200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737831600, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737831660, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737835200, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737838800, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737838860, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737839400, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737849600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737849660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737850800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737852000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737852060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737855600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737859200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737859260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737859800, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "The data reflects a consistent daily routine, including medication times, meal preparation, and relaxation periods. No unusual activity is detected." + }, + { + "input": [ + {"timestamp": 1737892800, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737892860, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737893100, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737893400, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1737893700, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737894000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737894060, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737894600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737894900, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737895200, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737896400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737897600, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737898200, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737898260, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1737914400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737914460, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737914520, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737914580, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737915600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737918000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737918060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737921600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737925200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737925260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737925800, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737936000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737936060, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737937200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737938400, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737938460, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737942000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737945600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737945660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737946200, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "The data shows a consistent routine similar to Day 1, with no deviations. All activities align with expected behavior." + }, + { + "input": [ + {"timestamp": 1737979200, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1737979260, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737979500, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737979800, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1737980100, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737980400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1737980460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1737981000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1737981300, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1737981600, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737982800, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1737984000, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737984600, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1737984660, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738000800, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738000860, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738000920, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738000980, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738002000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738004400, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738004460, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738008000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738011600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738011660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738012200, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738022400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738022460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738023600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738024800, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738024860, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738028400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738032000, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738032060, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738032600, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "The data shows a typical day with all expected activities occurring at regular times." + }, + { + "input": [ + {"timestamp": 1738065600, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738065660, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738065900, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738066200, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738066500, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738066800, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738067100, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738067400, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738067700, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738068000, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738068300, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738068600, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738068900, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738069200, "room": "bathroom", "nodeId": 3, "humidity": 7500}, + {"timestamp": 1738069500, "room": "bathroom", "nodeId": 3, "humidity": 7500} + ], + "result": "anomalous", + "reason": "High humidity persists in the bathroom with no movement or room transitions for over 45 minutes, suggesting a potential fall during shower." + }, + { + "input": [ + {"timestamp": 1738152000, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738152060, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738152300, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738152600, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738152900, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738153200, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738153260, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738153800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738154100, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738154400, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738155600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738156800, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738157400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738157460, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738173600, "room": "porch", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738184400, "room": "porch", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738195200, "room": "porch", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738206000, "room": "porch", "nodeId": 1, "occupancy": 0} + ], + "result": "anomalous", + "reason": "Person left home in the morning but did not return, missing all subsequent daily activities including medications and meals." + }, + { + "input": [ + {"timestamp": 1738238400, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738238460, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738238700, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738239000, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738239300, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738239600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738239660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738240200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738240500, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738240800, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738242000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738243200, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738243800, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738243860, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738260000, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738260060, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738260120, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738260180, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738261200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738263600, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738263660, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738267200, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738270800, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738270860, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738271400, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738281600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738281660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738282800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738284000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738284060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738287600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738291200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738291260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738291800, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "All activities follow the expected pattern with normal timing and sequence." + }, + { + "input": [ + {"timestamp": 1738324800, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738324860, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738325100, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738325400, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738325700, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738326000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738326600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738326900, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738327200, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738328400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738329600, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738330200, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738330260, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738346400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738346460, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738346520, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738347600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738350000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738350060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738353600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738357200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738357260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738357800, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738368000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738369200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738370400, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738370460, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738374000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738377600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738377660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738378200, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "anomalous", + "reason": "No fridge activity detected throughout the entire day despite multiple kitchen visits, suggesting potential eating issues or forgetfulness." + }, + { + "input": [ + {"timestamp": 1738411200, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738411260, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738411500, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738411800, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738412100, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738412400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738412460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738413000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738413300, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738413600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738417200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738420800, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738424400, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738428000, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738431600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738435200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738438800, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738442400, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738446000, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738449600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738453200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738456800, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738460400, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738464000, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738467600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738471200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738474800, "room": "bedroom", "nodeId": 1, "occupancy": 1} + ], + "result": "anomalous", + "reason": "Person remains in bedroom all day after breakfast, missing meals, medications, and normal daily activities. This could indicate illness or other health issues." + }, + { + "input": [ + {"timestamp": 1738497600, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738497660, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738497900, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738498200, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738498500, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738498800, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738498860, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738499400, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738499700, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738500000, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738501200, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738502400, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738503000, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738503060, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738519200, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738519260, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738519320, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738519380, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738520400, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738522800, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738522860, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738526400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738530000, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738530060, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738530600, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738540800, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738540860, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738542000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738543200, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738543260, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738546800, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738550400, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738550460, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738551000, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "All activities follow the expected daily routine with normal timing and sequence." + }, + { + "input": [ + {"timestamp": 1738584000, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738584300, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738584600, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738584900, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738585200, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738585260, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738585800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738586100, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738586400, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738587600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738588800, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738589400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738589460, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738605600, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738605660, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738605720, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738605780, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738606800, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738609200, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738609260, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738612800, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738616400, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738617000, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738627200, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738627260, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738628400, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738629600, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738629660, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738633200, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738636800, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738637400, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "anomalous", + "reason": "All three daily medications were missed (morning, afternoon, and evening), while other activities remained normal. This suggests a serious deviation in medication adherence." + }, + { + "input": [ + {"timestamp": 1738670400, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738670460, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738670700, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738671000, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738671300, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738671600, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738671660, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738672200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738672500, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738672800, "room": "livingroom", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738672860, "room": "kitchen", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738672920, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738672980, "room": "bathroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673040, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673100, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673160, "room": "kitchen", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673220, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673280, "room": "bathroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673340, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673400, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673460, "room": "kitchen", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673520, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673580, "room": "bathroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738673640, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738673700, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738674000, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738677600, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738681200, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738684800, "room": "bedroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738688400, "room": "bedroom", "nodeId": 1, "occupancy": 1} + ], + "result": "anomalous", + "reason": "Unusual rapid transitions between rooms followed by extended bedroom stay suggests potential confusion or disorientation. Missing subsequent daily activities and medications adds to the concern." + }, + { + "input": [ + {"timestamp": 1738756800, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738756860, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738757100, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738757400, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738757700, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738758000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738758060, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738758600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738758900, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738759200, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738760400, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738761600, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738762200, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738762260, "room": "hall", "nodeId": 1, "occupancy": 0}, + {"timestamp": 1738778400, "room": "porch", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738778460, "room": "hall", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738778520, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738778580, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738779600, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738782000, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738782060, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738785600, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738789200, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738789260, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738789800, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738800000, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738800060, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738801200, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738802400, "room": "livingroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738802460, "room": "livingroom", "nodeId": 1, "occupancy": 1}, + {"timestamp": 1738806000, "room": "livingroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738809600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738809660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738810200, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "normal", + "reason": "All activities follow the expected daily routine with normal timing and sequence." + }, + { + "input": [ + {"timestamp": 1738843200, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738843260, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738843500, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738843800, "room": "bathroom", "nodeId": 3, "humidity": 6500}, + {"timestamp": 1738844100, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738844400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738844460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738844520, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738844580, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738844640, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738844700, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738844760, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738844820, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738844880, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738844940, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738845000, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738845060, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738866400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738866460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738866520, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738866580, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738866640, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738866700, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738875600, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738875660, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738876200, "room": "bedroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738888400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738888460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738888520, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738888580, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738888640, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738888700, "room": "kitchen", "nodeId": 2, "onOff": false} + ], + "result": "anomalous", + "reason": "Unusually frequent kitchen visits and fridge openings throughout the day suggest potential memory issues or compulsive behavior. While other activities remain normal, the repeated kitchen pattern is concerning." + }, + { + "input": [ + {"timestamp": 1738929600, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738929660, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738929900, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738930200, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738930260, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738930320, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738930380, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738930440, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738930500, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738930560, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738944000, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738944060, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738944120, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738944180, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738952400, "room": "kitchen", "nodeId": 2, "onOff": true}, + {"timestamp": 1738952460, "room": "kitchen", "event": "fridge_opened"}, + {"timestamp": 1738953000, "room": "kitchen", "nodeId": 2, "onOff": false}, + {"timestamp": 1738957600, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738957660, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738957720, "room": "bathroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738957780, "room": "bathroom", "nodeId": 2, "onOff": false}, + {"timestamp": 1738962000, "room": "bedroom", "nodeId": 2, "onOff": true}, + {"timestamp": 1738962060, "room": "pillbox", "nodeId": 1, "onOff": true}, + {"timestamp": 1738962600, "room": "bedroom", "nodeId": 2, "onOff": false} + ], + "result": "anomalous", + "reason": "Unusually frequent bathroom visits throughout the day, with multiple entries and exits in short succession. While other activities remain normal, this pattern suggests potential health issues requiring attention." + } + ] \ No newline at end of file diff --git a/project/day1.ipynb b/project/day1.ipynb new file mode 100644 index 0000000..442a745 --- /dev/null +++ b/project/day1.ipynb @@ -0,0 +1,679 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "28a0673e-96b5-43f2-8a8b-bd033bf851b0", + "metadata": {}, + "source": [ + "# The IoT Project begins!!\n", + "\n", + "## The AI Carer\n", + "\n", + "A model that can estimate if an elderly person is safe in their home.\n", + "\n", + "## Data Curation Part 1\n", + "\n", + "Today we'll begin our scrubbing and curating our dataset..\n", + "\n", + "The training dataset is here: \n", + "data/training_data.json\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 105, + "id": "67cedf85-8125-4322-998e-9375fe745597", + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "\n", + "import os\n", + "from dotenv import load_dotenv\n", + "from huggingface_hub import login\n", + "from datasets import load_dataset, Dataset, DatasetDict\n", + "import matplotlib.pyplot as plt\n", + "import json\n", + "import numpy as np\n", + "import random\n", + "from collections import Counter, defaultdict\n", + "import pickle" + ] + }, + { + "cell_type": "code", + "execution_count": 106, + "id": "7390a6aa-79cb-4dea-b6d7-de7e4b13e472", + "metadata": {}, + "outputs": [], + "source": [ + "# environment\n", + "\n", + "load_dotenv()\n", + "os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY', 'your-key-if-not-using-env')\n", + "os.environ['ANTHROPIC_API_KEY'] = os.getenv('ANTHROPIC_API_KEY', 'your-key-if-not-using-env')\n", + "os.environ['HF_TOKEN'] = os.getenv('HF_TOKEN', 'your-key-if-not-using-env')" + ] + }, + { + "cell_type": "code", + "execution_count": 107, + "id": "0732274a-aa6a-44fc-aee2-40dc8a8e4451", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Note: Environment variable`HF_TOKEN` is set and is the current active token independently from the token you've just configured.\n" + ] + } + ], + "source": [ + "# Log in to HuggingFace\n", + "\n", + "hf_token = os.environ['HF_TOKEN']\n", + "login(hf_token, add_to_git_credential=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 108, + "id": "b5521526-0da9-42d7-99e3-f950fab71662", + "metadata": {}, + "outputs": [], + "source": [ + "# One more import - the Item class\n", + "# If you get an error that you need to agree to Meta's terms when you run this, then follow the link it provides you and follow their instructions\n", + "# You should get approved by Meta within minutes\n", + "# Any problems - message me or email me!\n", + "\n", + "from items import Item" + ] + }, + { + "cell_type": "code", + "execution_count": 109, + "id": "1adcf323-de9d-4c24-a9c3-d7ae554d06ca", + "metadata": {}, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 110, + "id": "049885d4-fdfa-4ff0-a932-4a2ed73928e2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[{'input': [{'timestamp': 1737806400, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737806460, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737806700, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737807000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1737807300, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737807600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737807660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737808200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737808500, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737808800, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737810000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737811200, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737811800, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737811860, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1737828000, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737828060, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737828120, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737828180, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737829200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737831600, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737831660, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737835200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737838800, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737838860, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737839400, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737849600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737849660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737850800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737852000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737852060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737855600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737859200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737859260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737859800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'The data reflects a consistent daily routine, including medication times, meal preparation, and relaxation periods. No unusual activity is detected.'}, {'input': [{'timestamp': 1737892800, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737892860, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737893100, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737893400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1737893700, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737894000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737894060, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737894600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737894900, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737895200, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737896400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737897600, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737898200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737898260, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1737914400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737914460, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737914520, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737914580, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737915600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737918000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737918060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737921600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737925200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737925260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737925800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737936000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737936060, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737937200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737938400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737938460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737942000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737945600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737945660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737946200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'The data shows a consistent routine similar to Day 1, with no deviations. All activities align with expected behavior.'}, {'input': [{'timestamp': 1737979200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737979260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737979500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737979800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1737980100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737980400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737980460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737981000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737981300, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737981600, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737982800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737984000, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737984600, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737984660, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738000800, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738000860, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738000920, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738000980, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738002000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738004400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738004460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738008000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738011600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738011660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738012200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738022400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738022460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738023600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738024800, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738024860, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738028400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738032000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738032060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738032600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'The data shows a typical day with all expected activities occurring at regular times.'}, {'input': [{'timestamp': 1738065600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738065660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738065900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738066200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738066500, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738066800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067100, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067700, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068300, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068900, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738069200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738069500, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}], 'result': 'anomalous', 'reason': 'High humidity persists in the bathroom with no movement or room transitions for over 45 minutes, suggesting a potential fall during shower.'}, {'input': [{'timestamp': 1738152000, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738152060, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738152300, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738152600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738152900, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738153200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738153260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738153800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738154100, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738154400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738155600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738156800, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738157400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738157460, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738173600, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738184400, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738195200, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738206000, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}], 'result': 'anomalous', 'reason': 'Person left home in the morning but did not return, missing all subsequent daily activities including medications and meals.'}, {'input': [{'timestamp': 1738238400, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738238460, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738238700, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738239000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738239300, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738239600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738239660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738240200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738240500, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738240800, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738242000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738243200, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738243800, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738243860, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738260000, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738260060, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738260120, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738260180, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738261200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738263600, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738263660, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738267200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738270800, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738270860, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738271400, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738281600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738281660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738282800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738284000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738284060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738287600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738291200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738291260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738291800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'All activities follow the expected pattern with normal timing and sequence.'}, {'input': [{'timestamp': 1738324800, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738324860, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738325100, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738325400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738325700, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738326000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738326600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738326900, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738327200, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738328400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738329600, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738330200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738330260, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738346400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738346460, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738346520, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738347600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738350000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738350060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738353600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738357200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738357260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738357800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738368000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738369200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738370400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738370460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738374000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738377600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738377660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738378200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'anomalous', 'reason': 'No fridge activity detected throughout the entire day despite multiple kitchen visits, suggesting potential eating issues or forgetfulness.'}, {'input': [{'timestamp': 1738411200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738411260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738411500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738411800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738412100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738412400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738412460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738413000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738413300, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738413600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738417200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738420800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738424400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738428000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738431600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738435200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738438800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738442400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738446000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738449600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738453200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738456800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738460400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738464000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738467600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738471200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738474800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}], 'result': 'anomalous', 'reason': 'Person remains in bedroom all day after breakfast, missing meals, medications, and normal daily activities. This could indicate illness or other health issues.'}, {'input': [{'timestamp': 1738497600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738497660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738497900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738498200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738498500, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738498800, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738498860, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738499400, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738499700, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738500000, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738501200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738502400, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738503000, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738503060, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738519200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738519260, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738519320, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738519380, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738520400, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738522800, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738522860, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738526400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738530000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738530060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738530600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738540800, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738540860, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738542000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738543200, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738543260, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738546800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738550400, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738550460, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738551000, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'All activities follow the expected daily routine with normal timing and sequence.'}, {'input': [{'timestamp': 1738584000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738584300, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738584600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738584900, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738585200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738585260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738585800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738586100, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738586400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738587600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738588800, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738589400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738589460, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738605600, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738605660, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738605720, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738605780, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738606800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738609200, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738609260, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738612800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738616400, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738617000, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738627200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738627260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738628400, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738629600, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738629660, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738633200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738636800, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738637400, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'anomalous', 'reason': 'All three daily medications were missed (morning, afternoon, and evening), while other activities remained normal. This suggests a serious deviation in medication adherence.'}, {'input': [{'timestamp': 1738670400, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738670460, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738670700, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738671000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738671300, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738671600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738671660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738672200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738672500, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738672800, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738672860, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738672920, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738672980, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673040, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673100, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673160, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673220, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673280, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673340, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673460, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673520, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673580, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673640, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738673700, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738674000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738677600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738681200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738684800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738688400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}], 'result': 'anomalous', 'reason': 'Unusual rapid transitions between rooms followed by extended bedroom stay suggests potential confusion or disorientation. Missing subsequent daily activities and medications adds to the concern.'}, {'input': [{'timestamp': 1738756800, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738756860, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738757100, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738757400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738757700, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738758000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738758060, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738758600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738758900, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738759200, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738760400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738761600, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738762200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738762260, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738778400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738778460, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738778520, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738778580, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738779600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738782000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738782060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738785600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738789200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738789260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738789800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738800000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738800060, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738801200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738802400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738802460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738806000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738809600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738809660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738810200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'All activities follow the expected daily routine with normal timing and sequence.'}, {'input': [{'timestamp': 1738843200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738843260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738843500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738843800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738844100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844760, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844820, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844880, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844940, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738845000, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738845060, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738866400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738866460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738866520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738866580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738866640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738866700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738875600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738875660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738876200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738888400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738888460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738888520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738888580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738888640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738888700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}], 'result': 'anomalous', 'reason': 'Unusually frequent kitchen visits and fridge openings throughout the day suggest potential memory issues or compulsive behavior. While other activities remain normal, the repeated kitchen pattern is concerning.'}, {'input': [{'timestamp': 1738929600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738929660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738929900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930200, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930260, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930320, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930380, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930440, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930560, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738944000, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738944060, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738944120, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738944180, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738952400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738952460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738953000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738957600, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738957660, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738957720, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738957780, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738962000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738962060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738962600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'anomalous', 'reason': 'Unusually frequent bathroom visits throughout the day, with multiple entries and exits in short succession. While other activities remain normal, this pattern suggests potential health issues requiring attention.'}, {'input': [{'timestamp': 1737806400, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737806460, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737806700, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737807000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1737807300, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737807600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737807660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737808200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737808500, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737808800, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737810000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737811200, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737811800, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737811860, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1737828000, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737828060, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737828120, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737828180, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737829200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737831600, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737831660, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737835200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737838800, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737838860, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737839400, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737849600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737849660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737850800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737852000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737852060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737855600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737859200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737859260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737859800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'The data reflects a consistent daily routine, including medication times, meal preparation, and relaxation periods. No unusual activity is detected.'}, {'input': [{'timestamp': 1737892800, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737892860, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737893100, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737893400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1737893700, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737894000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737894060, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737894600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737894900, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737895200, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737896400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737897600, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737898200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737898260, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1737914400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737914460, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737914520, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737914580, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737915600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737918000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737918060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737921600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737925200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737925260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737925800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737936000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737936060, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737937200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737938400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737938460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737942000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737945600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737945660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737946200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'The data shows a consistent routine similar to Day 1, with no deviations. All activities align with expected behavior.'}, {'input': [{'timestamp': 1737979200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737979260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737979500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737979800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1737980100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737980400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737980460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737981000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737981300, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737981600, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737982800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737984000, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737984600, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737984660, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738000800, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738000860, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738000920, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738000980, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738002000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738004400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738004460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738008000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738011600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738011660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738012200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738022400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738022460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738023600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738024800, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738024860, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738028400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738032000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738032060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738032600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'The data shows a typical day with all expected activities occurring at regular times.'}, {'input': [{'timestamp': 1738065600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738065660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738065900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738066200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738066500, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738066800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067100, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067700, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068300, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068900, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738069200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738069500, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}], 'result': 'anomalous', 'reason': 'High humidity persists in the bathroom with no movement or room transitions for over 45 minutes, suggesting a potential fall during shower.'}, {'input': [{'timestamp': 1738152000, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738152060, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738152300, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738152600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738152900, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738153200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738153260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738153800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738154100, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738154400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738155600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738156800, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738157400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738157460, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738173600, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738184400, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738195200, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738206000, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}], 'result': 'anomalous', 'reason': 'Person left home in the morning but did not return, missing all subsequent daily activities including medications and meals.'}, {'input': [{'timestamp': 1738238400, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738238460, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738238700, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738239000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738239300, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738239600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738239660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738240200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738240500, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738240800, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738242000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738243200, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738243800, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738243860, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738260000, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738260060, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738260120, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738260180, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738261200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738263600, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738263660, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738267200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738270800, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738270860, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738271400, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738281600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738281660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738282800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738284000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738284060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738287600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738291200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738291260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738291800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'All activities follow the expected pattern with normal timing and sequence.'}, {'input': [{'timestamp': 1738324800, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738324860, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738325100, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738325400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738325700, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738326000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738326600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738326900, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738327200, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738328400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738329600, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738330200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738330260, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738346400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738346460, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738346520, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738347600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738350000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738350060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738353600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738357200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738357260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738357800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738368000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738369200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738370400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738370460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738374000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738377600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738377660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738378200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'anomalous', 'reason': 'No fridge activity detected throughout the entire day despite multiple kitchen visits, suggesting potential eating issues or forgetfulness.'}, {'input': [{'timestamp': 1738411200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738411260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738411500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738411800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738412100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738412400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738412460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738413000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738413300, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738413600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738417200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738420800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738424400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738428000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738431600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738435200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738438800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738442400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738446000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738449600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738453200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738456800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738460400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738464000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738467600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738471200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738474800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}], 'result': 'anomalous', 'reason': 'Person remains in bedroom all day after breakfast, missing meals, medications, and normal daily activities. This could indicate illness or other health issues.'}, {'input': [{'timestamp': 1738497600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738497660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738497900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738498200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738498500, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738498800, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738498860, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738499400, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738499700, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738500000, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738501200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738502400, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738503000, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738503060, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738519200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738519260, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738519320, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738519380, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738520400, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738522800, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738522860, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738526400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738530000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738530060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738530600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738540800, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738540860, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738542000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738543200, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738543260, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738546800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738550400, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738550460, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738551000, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'All activities follow the expected daily routine with normal timing and sequence.'}, {'input': [{'timestamp': 1738584000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738584300, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738584600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738584900, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738585200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738585260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738585800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738586100, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738586400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738587600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738588800, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738589400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738589460, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738605600, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738605660, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738605720, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738605780, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738606800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738609200, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738609260, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738612800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738616400, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738617000, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738627200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738627260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738628400, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738629600, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738629660, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738633200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738636800, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738637400, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'anomalous', 'reason': 'All three daily medications were missed (morning, afternoon, and evening), while other activities remained normal. This suggests a serious deviation in medication adherence.'}, {'input': [{'timestamp': 1738670400, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738670460, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738670700, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738671000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738671300, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738671600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738671660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738672200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738672500, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738672800, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738672860, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738672920, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738672980, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673040, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673100, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673160, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673220, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673280, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673340, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673460, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673520, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673580, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673640, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738673700, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738674000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738677600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738681200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738684800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738688400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}], 'result': 'anomalous', 'reason': 'Unusual rapid transitions between rooms followed by extended bedroom stay suggests potential confusion or disorientation. Missing subsequent daily activities and medications adds to the concern.'}, {'input': [{'timestamp': 1738756800, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738756860, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738757100, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738757400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738757700, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738758000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738758060, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738758600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738758900, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738759200, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738760400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738761600, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738762200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738762260, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738778400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738778460, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738778520, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738778580, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738779600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738782000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738782060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738785600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738789200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738789260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738789800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738800000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738800060, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738801200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738802400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738802460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738806000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738809600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738809660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738810200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'normal', 'reason': 'All activities follow the expected daily routine with normal timing and sequence.'}, {'input': [{'timestamp': 1738843200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738843260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738843500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738843800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738844100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844760, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844820, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844880, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844940, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738845000, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738845060, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738866400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738866460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738866520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738866580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738866640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738866700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738875600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738875660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738876200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738888400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738888460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738888520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738888580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738888640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738888700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}], 'result': 'anomalous', 'reason': 'Unusually frequent kitchen visits and fridge openings throughout the day suggest potential memory issues or compulsive behavior. While other activities remain normal, the repeated kitchen pattern is concerning.'}, {'input': [{'timestamp': 1738929600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738929660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738929900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930200, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930260, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930320, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930380, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930440, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930560, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738944000, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738944060, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738944120, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738944180, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738952400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738952460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738953000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738957600, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738957660, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738957720, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738957780, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738962000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738962060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738962600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}], 'result': 'anomalous', 'reason': 'Unusually frequent bathroom visits throughout the day, with multiple entries and exits in short succession. While other activities remain normal, this pattern suggests potential health issues requiring attention.'}]\n" + ] + } + ], + "source": [ + "# Load in our dataset\n", + "# Open and read the JSON file\n", + "with open('data/all_data.json', 'r') as file:\n", + " dataset = json.load(file)\n", + "\n", + "# Print the data\n", + "print(dataset)" + ] + }, + { + "cell_type": "code", + "execution_count": 111, + "id": "cde08860-b393-49b8-a620-06a8c0990a64", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Number of Situations: 28\n" + ] + } + ], + "source": [ + "print(f\"Number of Situations: {len(dataset):,}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 112, + "id": "3e29a5ab-ca61-41cc-9b33-22d374681b85", + "metadata": {}, + "outputs": [], + "source": [ + "# Investigate a particular datapoint\n", + "datapoint = dataset[2]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 113, + "id": "40a4e10f-6710-4780-a95e-6c0030c3fb87", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[{'timestamp': 1737979200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1737979260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737979500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737979800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1737980100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737980400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737980460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1737981000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737981300, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1737981600, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737982800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1737984000, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737984600, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1737984660, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738000800, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738000860, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738000920, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738000980, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738002000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738004400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738004460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738008000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738011600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738011660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738012200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738022400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738022460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738023600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738024800, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738024860, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738028400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738032000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738032060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738032600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}]\n", + "normal\n", + "The data shows a typical day with all expected activities occurring at regular times.\n" + ] + }, + { + "data": { + "text/plain": [ + "{'input': [{'timestamp': 1737979200,\n", + " 'room': 'pillbox',\n", + " 'nodeId': 1,\n", + " 'onOff': True},\n", + " {'timestamp': 1737979260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1737979500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1737979800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500},\n", + " {'timestamp': 1737980100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False},\n", + " {'timestamp': 1737980400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1737980460, 'room': 'kitchen', 'event': 'fridge_opened'},\n", + " {'timestamp': 1737981000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False},\n", + " {'timestamp': 1737981300, 'room': 'livingroom', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1737981600, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1},\n", + " {'timestamp': 1737982800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False},\n", + " {'timestamp': 1737984000, 'room': 'hall', 'nodeId': 1, 'occupancy': 1},\n", + " {'timestamp': 1737984600, 'room': 'porch', 'nodeId': 1, 'occupancy': 1},\n", + " {'timestamp': 1737984660, 'room': 'hall', 'nodeId': 1, 'occupancy': 0},\n", + " {'timestamp': 1738000800, 'room': 'porch', 'nodeId': 1, 'occupancy': 1},\n", + " {'timestamp': 1738000860, 'room': 'hall', 'nodeId': 1, 'occupancy': 1},\n", + " {'timestamp': 1738000920, 'room': 'kitchen', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1738000980, 'room': 'kitchen', 'event': 'fridge_opened'},\n", + " {'timestamp': 1738002000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False},\n", + " {'timestamp': 1738004400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1738004460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1},\n", + " {'timestamp': 1738008000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False},\n", + " {'timestamp': 1738011600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1738011660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True},\n", + " {'timestamp': 1738012200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False},\n", + " {'timestamp': 1738022400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1738022460, 'room': 'kitchen', 'event': 'fridge_opened'},\n", + " {'timestamp': 1738023600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False},\n", + " {'timestamp': 1738024800, 'room': 'livingroom', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1738024860, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1},\n", + " {'timestamp': 1738028400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False},\n", + " {'timestamp': 1738032000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True},\n", + " {'timestamp': 1738032060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True},\n", + " {'timestamp': 1738032600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}],\n", + " 'result': 'normal',\n", + " 'reason': 'The data shows a typical day with all expected activities occurring at regular times.'}" + ] + }, + "execution_count": 113, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Investigate\n", + "\n", + "print(datapoint[\"input\"])\n", + "print(datapoint[\"result\"])\n", + "print(datapoint[\"reason\"])\n", + "datapoint" + ] + }, + { + "cell_type": "code", + "execution_count": 114, + "id": "89078cb1-9679-4eb0-b295-599b8586bcd1", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxUAAAJOCAYAAADBIyqKAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAATupJREFUeJzt3XlcVHX////nyDIoAoo7iaDmLmKamsulYBa5oHZlLqWidaWVWW5lXqWimaZXmkumVubSYlm5Vm4laG6Zmtpibrl9MiVNQTFR4f37oy/zc2QROOCAPO6329xunvd5n3Nec2Yc5jnnfc6xGWOMAAAAACCHiri6AAAAAAAFG6ECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAiik+vTpI5vNpqNHj7q6lFyxdu1aNW/eXCVLlpTNZlPnzp1dXVK+Fx0dLZvNptjYWFeXgiwoTK+XzWZTWFiYq8vIsqNHj8pms6lPnz6uLgVwGUIFYFHqHxObzaaIiIh0+2zbto0/OHno6NGj6tSpk3777Tf17dtXo0ePVvfu3TNdJvULms1m06JFi9Lt8+STTxaaL3H53cKFCx2v1/fff+/qcpBNx48f19NPP61q1arJy8tLxYsXV+XKldW+fXtNnDhRiYmJN12Hq38ICQ4OVnBwsEu2DRQE7q4uALidrF27VuvXr1fr1q1dXUqh8vXXX+vy5cuaPHmyHnnkkWwv//LLL6tLly7y8PDIg+qQG+bOnSubzSZjjN577z01atTI1SUhi/bs2aOwsDCdP39ezZs3V9u2bVW8eHEdP35c3377rb766is99NBDuvPOOx3L7Nu3T8WKFXNh1dlzxx13aN++ffLz83N1KYDLECqAXBIcHKzjx49r+PDh2r59u2w2m6tLKjROnjwpSQoICMj2slWrVtXhw4c1e/ZsDRw4MLdLQy44ePCgNm7cqI4dO+rXX3/VokWLNGXKFBUtWtTVpSELhgwZovPnz2vhwoXq1atXmvlbt25V6dKlndpq1qx5q8rLFR4eHgWuZiC3MfwJyCU1atRQr169tGPHDi1evDhLy2R2OD0sLCxNMLl+TPW8efMUEhKiokWLqnLlypo+fbokyRijyZMnq0aNGvLy8lK1atW0cOHCDGtISUnRpEmTHMMSKleurLFjx+rq1avp9t+4caMiIyNVunRp2e12VatWTS+//LIuXbrk1C82NlY2m03R0dHasmWL7r//fpUoUSLLYeunn35S165dVbZsWdntdlWuXFmDBg3S2bNnHX1Sh56NHj1akhQeHu4YIpPVIUtDhw5VyZIlNW7cOF24cCFLy0jSypUrFR4eLj8/PxUtWlShoaGaMmWKrl275tTv+rHW+/bt04MPPqhSpUo5hnHMnz9fNptN8+fP18qVK9WkSRMVK1ZMd9xxh0aOHKmUlBRJ0oIFCxQaGqqiRYuqUqVK+t///pemppMnT2r06NG65557HPstODhYTz/9tOLi4rL83NJz5513ysfHJ83rnKpjx46y2Ww6cOCApH/eV++++64aN24sf39/FS1aVBUrVlRkZGS2h5O99957kqTevXurV69eio+P12effZZu39QhMkeOHNH06dNVs2ZN2e12BQUFacyYMY79eb1r165pypQpjv3r5+en8PBwrVy5Mk3f/Pp6ZfX9eP3/yxtldF7AwYMH1bdvX1WuXFl2u13+/v4KDQ3VoEGDZIy5aW1bt25ViRIl0g0UktS0aVOVKFHCqe3GcyqCg4O1YMECSVLlypUd/89T+9zsnIb0ztHYuXOnnnnmGdWtW9ex30JCQvTaa685ff6lrvvYsWM6duyYY9vX78fMtn/s2DE9/vjjuuOOO+Tp6amKFSvq8ccf1/Hjx9P0Tf3cv3r1qqKjoxUcHCy73a7q1avrrbfeStM/9QhtaGio/Pz85O3treDgYHXt2lV79uxJd18AeYUjFUAuGjt2rD7++GO9/PLL+ve//51nw2mmTp2q2NhYderUSa1bt9bnn3+u5557TsWKFdMPP/ygzz//XB06dNC9996rjz/+WFFRUQoODlbLli3TrGvQoEHavHmzunbtquLFi2vlypUaPXq09u7dm+aL26xZszRgwACVKFFCkZGRKlu2rHbs2KFXX31VMTExiomJkaenp9MyW7Zs0fjx4xUeHq5+/fql+4f0Rps2bVJERISuXLmiLl26KDg4WFu3btW0adP0xRdfaNu2bSpdurRKlCih0aNHKzY2Vhs2bHA8T0lZHvtcsmRJvfjiixo+fLhef/11jRkz5qbLTJkyRUOHDpW/v78eeeQReXt7a8WKFRo6dKi+/fZbLVmyJE14OnTokO655x6FhISoT58+Onv2rNO+Wrp0qdauXavOnTurefPm+vLLLzVu3DgZY+Tn56dx48apU6dOCgsL0+eff64XXnhB5cqVU+/evR3r2LhxoyZPnqx7771XTZo0kYeHh3744QfNmjVLa9as0a5du3I8PKNnz54aM2aMli1blmaI2ZkzZ7R69Wo1adJE1atXlySNGDFCkyZNUtWqVfXII4/Ix8dHv//+uzZt2qSvv/46yyfhJicna8GCBSpZsqQ6dOigu+++W6NGjdLcuXMz/JIqSc8//7w2bNigDh06KCIiQsuWLVN0dLSuXLmiV1991dHPGKMuXbpo+fLlql69ugYMGKDExER98skn6tixo6ZMmaLBgwenWX9+er1y8n7MqpMnT6px48ZKTExU+/bt1a1bNyUmJurgwYN666239Prrr8vdPfOvEqVKldKpU6d08uTJHB1NlP75nJo/f7727Nmj5557zhFCrJzj8M4772jlypVq2bKl2rVrp0uXLik2NlYjRozQ999/r88//1ySHJ8zU6dOddSS6mbv4wMHDqhFixb6888/FRkZqTp16uinn37Se++9p5UrV2rTpk2O/zPX69Gjh7Zv3662bdvKzc1Nixcv1oABA+Th4aEnnnjC0S8qKkqLFy9WvXr11LdvX9ntdp04cUIxMTH6/vvvFRoamuP9A2SbAWDJkSNHjCQTERFhjDFm2LBhRpKZMWOGo8/WrVuNJBMVFeW0bFBQkAkKCkp3va1atTI3/hcdPXq0kWT8/f3N4cOHHe3Hjx83np6exs/Pz1SvXt3ExcU55m3bts1IMpGRkU7rioqKMpJMmTJlzIkTJxztSUlJpmXLlkaS+eyzzxztP//8s3F3dzehoaHmzJkzTuuaMGGCkWRef/11R1tMTIyRZCSZ9957L93nmJ7k5GRTtWpVI8msXr3aad7zzz9vJJnHHnss3f0SExOT5e2kLrNo0SLz999/m8DAQOPt7W1OnTrl6NO/f/806z106JBxd3c3ZcuWNcePH3e0X7582bRo0cJIMgsXLnS0p74/JJlRo0alqWPevHlGkvHw8DDbt293tCckJJiyZcuaYsWKmfLly6f7eoeEhDit6/Tp0+bChQtptrFgwQIjyYwbNy7dfZCV/Xbw4EEjybRt2zbNvBkzZhhJ5s0333S0+fv7m4CAAJOYmJim/9mzZ2+6vVQrVqwwkkz//v0dbS1btjQ2m80cPHgwTf/U93XlypXNyZMnHe1//vmnKVGihPHx8TFJSUmO9tR906pVK6f2Y8eOmdKlSxt3d3enfZ/fXq/svh9T/1+OHj06zXZT36vXf05Nnz7dSDJTp05N0z+rr+OQIUMcr8nEiRPNli1b0n1fXC/1Nble6mt75MiRLNV+s/UdO3bMXLt2zaktJSXFPPbYY0aS2bRpk9O8zD6vM9p+eHi4kWTmzJnj1D5z5kwjybRu3dqpPfVzv0mTJiY+Pt7R/uuvvxp3d3dTo0YNR9v58+eNzWYzDRs2TPM8rl27Zs6dO5durUBeYfgTkMv++9//qkSJEnrllVd08eLFPNnGc889pypVqjimAwMD1aJFC8XHx+ull15SmTJlHPOaNGmiKlWqZHgo/LnnnlPFihUd056eno5fcufPn+9onzNnjq5du6YZM2aoVKlSTut44YUXVKZMmXSvotSgQQP17ds3y89t8+bNOnz4sNq2bZvmalqjRo2Sv7+/PvroI125ciXL67wZLy8vjRkzRomJiTc9UvHRRx/p2rVrGjp0qAIDAx3tdrtdEydOlOS831KVL19eL730Uobr7dmzp9PJxz4+PurQoYMuXbqkp556Kt3X+5dffnEa3lK2bFkVL148zbp79eolX19fff3115k+t8zceeedatq0qdatW5dmaM77778vDw8PdevWzand09NTbm5uadbl7++f5e3OnTtXkpx+4e/du7fjhO2MjBw5UhUqVHBMly5dWp06ddKFCxe0f/9+R3vqkJpJkyY5HTmqVKmSBg8erGvXrunDDz9Ms/788nrl9P2YXemdv5LV1/HVV19Vnz59dOzYMQ0fPlzNmjWTr6+vGjZsqHHjxun8+fOW68uJSpUqpXl/2mw2DRgwQJIs/X+R/rniVUxMjGrXru10dEH658pyNWvW1Pr163XixIk0y06YMEG+vr6O6Ro1aqh58+bav3+/Y5hm6oULvLy8VKSI89c5Nze3NEPKgLxGqAByWepwmri4OL3++ut5so369eunaUv9ApXRvNSTmW/0r3/9K01b06ZN5e7urh9++MHRtm3bNknSmjVrFB0d7fQYO3asPDw89Ouvv6ZZV3av0pO6zfSGFRQvXlx33323Ll++7PTFMDdERUWpTp06euedd3To0KEc1de0aVN5eXlp9+7daeaFhoamGRp2vZy8psnJyTp9+rRT+5IlSxQREaEyZcrI3d1dNptNRYoUUUJCQobvgazq1auXrl275hQeDx48qO3bt+uBBx5wOtm2e/fuOnr0qOrWrauRI0dq/fr1+vvvv7O1vVOnTunLL7/UnXfeqWbNmjnaH374YRUtWlQLFixQcnJyuss2bNgwTVtqeL7+S+wPP/ygYsWKqXHjxmn6h4eHS1K6r2d+eb1y+n7MqsjISHl7e2vAgAHq1q2b5s2bp99++y1b6/Dy8tK8efN07NgxzZkzR48//rhq166tXbt2aeTIkQoJCcn2OnPDlStXNGXKFDVu3Fi+vr4qUqSIbDab471j9f9L6n5v1apVmuFnRYoUcQxHTe/1ycr719fXV+3atdPmzZvVoEEDjR8/Xlu2bMnwfDggr3FOBZAHnn32Wb355puaPHmynn766Vxf//W/YKVKHdec0bwbT9hMVa5cuTRtbm5uKlWqlOLj4x1tf/31lyQ5jUfPivTWn5mEhIRMl0v94pbaL7cUKVJEEyZMUMeOHfXf//43w5PtM6vPZrOpXLly+v3339PMu9l+yMlrKsnpC8TkyZM1bNgwlSlTRvfff78qVqzo+IV56tSpSkpKyrSGm+nWrZsGDRqkDz74QM8995ykf45SSEpzfsO0adNUuXJlzZs3T+PGjdO4cePk5eWlrl27avLkyWmu9pOeBQsW6Nq1a2nW7evrq06dOunjjz/W6tWr1b59+zTLZrbPrg8iCQkJTr/wXy+z91p+eb1y+n7MquDgYG3btk3R0dH66quvHP8vatasqbFjx+rhhx/O8roqVqyofv36qV+/fpKkw4cP67HHHtPGjRs1ePBgLV++PMd15kSXLl20cuVKVa9eXd26dVPZsmXl4eGh8+fPa9q0aZb/v1j5LMvq+/fTTz/V+PHj9dFHHzmOhPr6+qpv374aP358gbosLwo+QgWQB4oWLaoxY8bo8ccf15gxYzI8obRIkSIZDuO5/gt9Xjp9+rRq1Kjh1JacnKyzZ886/TFM/SOXkJAgHx+fLK8/uyeIpm7nxl90U506dcqpX26KjIzUv/71L3366acZ3mDt+vqCgoKc5hljdPr06XRry+tLDF+7dk2vvPKKKlSooN27d6ts2bJOdU2aNMnyNvz9/dWuXTstW7ZM+/fvV40aNfTBBx/Iz89PkZGRTn3d3d01bNgwDRs2TCdPntSGDRs0b948LVy4UKdOndKaNWtuur3U4U2jR492XOHrRnPnzk03VGSVr69vhldaysv3Wm69Xtl9P6YOk0nvR4aMPnPq1q2rzz77TFevXtXOnTu1atUqTZ8+Xd26dVNAQICaN2+epVpvVLVqVc2fP19VqlTR+vXrc7SOVNl9Xt9//71WrlypiIgIffnll07DoLZt26Zp06ZZqke6NZ9lxYoVc4T2I0eOKCYmRrNnz9a0adP0999/a86cOTleN5BdDH8C8khWhtOULFlScXFxaf4Qpl5d5Vb49ttv07Rt3bpV165d01133eVoa9KkiaT/fxhUXkndZnqXHU1MTNSOHTtUtGjRNEEot6R+mRs+fHi26/vuu+90+fLldIe/5LUzZ84oPj5eTZs2dfqCKkk7duzI9tCjjKQG5A8++ECbN2/WkSNH1KVLF3l5eWW4TEBAgHr06KHVq1frzjvv1Ndff33Ter799lsdOHBAVatW1eOPP57uo0yZMvriiy8sXS73rrvu0qVLl7R9+/Y081Jf47x4PXPr9cru+7FkyZKSlO7Ri+uHO6bHw8ND99xzj8aMGaPp06fLGKMvvvgiS3VmJL1zSjKS+sU/vSFvqecPZPV5HT58WJLUvn37NOdVpPeZmLr9jIbbpSd1v2/cuDHNpXeNMdq4caNTP6sqV66sxx57TBs2bFDx4sW1YsWKXFkvkFWECiCPuLm5afz48Y7rjaenUaNGunr1qtOJoMYYjRgxQomJibekzmnTpun//u//HNNXrlxxHEa//prrTz/9tNzd3TVw4MB0Lwt7/vz5m34pyYrmzZuratWqWrVqVZoTJceNG6ezZ8+qR48emZ6fYMU999yjBx98UDExMemeqPnII4/I3d1dU6ZMcRpzfeXKFUcQyeha+XmpbNmyKlq0qHbt2uV0L4lz587l6k392rdvr5IlS+rDDz903P/kxiNxSUlJ2rJlS5plExMTdfHiRXl4eKQ5sfRGqSdov/TSS3r33XfTffznP//R1atXM70Py81ERUVJ+ucSuNcPTTpx4oSmTJkid3d3Pfroozlef0Zy6/XK7vuxRo0a8vHx0YoVKxxDGqV/fk0fN25cmvXv3Lkz3eE5qb++ZxYmU40dOzbdk5GNMXrttdckSS1atLjpelJPDE9vXb6+vqpRo4Y2bdrk9CPOhQsXNGLEiDT9U4/qbNq0yan9559/1oQJEzLc/pkzZ3T58uWb1ir9cyJ4eHi4fv755zQXFXj77be1b98+tW7dOsPhdzfz559/6qeffkrTfu7cOSUlJWXptQFyE8OfgDzUsWNHtWjRIs0frlTPPPOM5s2bp//85z9at26dypQpo2+//Vbnz59XaGjoLbl50T333KPQ0FB169ZN3t7eWrlypfbv369///vfeuihhxz96tatq7feektPPfWUatSooXbt2qlq1aq6cOGCfvvtN23YsEF9+vTR7NmzLdVTpEgRzZ8/XxEREWrXrp0efvhhBQUFaevWrYqNjVXVqlUdX0TyyoQJE7RixQrHr5nXq1q1qiZOnKihQ4eqXr166tq1q9N+69Spk3r27Jmn9aWnSJEievrppx03woqMjFRCQoJWrVqloKCgHN8f4EZ2u11du3bVnDlzNG/ePAUFBaW5/8nff/+t5s2bq3r16mrYsKEqVaqkixcv6osvvtCpU6c0bNgw2e32DLeRkJCgTz/9VN7e3pmO2e/Tp48mTJiguXPnatiwYTl6Pr169dKSJUu0fPly1atXTx06dHDcp+Kvv/7S5MmTna7klFty6/XK7vvR09NTAwcO1Pjx49WgQQPHFbFWrlypVq1apXnPv//++5ozZ45atmypqlWrytfXV7/88ou++uor+fv7Z+nKblOmTFF0dLTuvvtuNWzYUP7+/jp79qxiYmJ04MABlSpVSpMnT77pelq3bq3XX39d/fr100MPPSRvb28FBQU5Qu3QoUPVr18/NW3aVA8//LBSUlK0atWqdC8W0bhxYzVu3FiLFy/WH3/8oXvuuUfHjx/XihUr1L59+3Rvrti6dWvt2LFDbdu21b/+9S95enqqZcuW6d7/J9WsWbPUokULPfHEE1q5cqVq166tn3/+WStWrFCZMmU0a9asmz7vjPz++++66667FBoaqnr16umOO+7Q2bNntXz5cl29ejXH/yeAHHPRpWyB28aN96m40ebNmx33KUjvGurr1683TZo0MXa73ZQqVcr06tXLnD59OtP7VKR3X4HMruGe3rpS+x8+fNi89tpr5s477zSenp4mKCjIREdHO12z/3rbt2833bt3NwEBAcbDw8OULl3aNGjQwLz44otm3759jn6ZXQ8/K/bu3Wu6dOliSpcubTw8PExQUJB57rnnzJ9//pmmr9X7VKSnX79+jtctvfUuX77ctGrVyvj4+Bi73W5CQkLM5MmTzdWrV5363ez6+an3PZg3b162nld6r/eVK1fMq6++aqpVq2bsdrupVKmSGTp0qLlw4UK619jPyX4zxphNmzY59s2IESPSzL9y5YqZOHGiuf/++03FihWNp6enKVeunGnZsqX56KOPTEpKSqbrnzNnTqb77HrNmzc3kszmzZuNMZn/P8jo+V69etW8/vrrJiQkxNjtduPj42NatWplli9fnmYd+fX1yur70Zh/7gUTHR1tAgMDjaenp6levbqZNm2a+e2339Ls923btpn+/fubunXrmhIlSpiiRYuaatWqmWeeecYcO3YszbrTs3HjRvPiiy+apk2bOj43ihcvburVq2eGDRvmdD+RVErnvhLGGDNp0iRTrVo14+HhkW6fmTNnOuZXqlTJjBo1yly5ciXdvnFxceaxxx4zAQEBxsvLy4SEhJiZM2emux+MMebChQvmiSeeMBUqVDBubm5On2+Z/T8/evSo6du3r6lQoYJxd3c3FSpUMH379jVHjx5N0ze9z+pUN76Hzp07Z6Kjo03Lli1NhQoVjKenpwkICDAPPPCAWbVqVbrrAPKSzZgbBvoBAAAAQDZwTgUAAAAASwgVAAAAACwhVAAAAACwhFABAAAAwBJCBQAAAABLCBUAAAAALCl0N79LSUnRyZMn5ePjI5vN5upyAAAAgHzLGKMLFy4oICBARYpkfDyi0IWKkydPKjAw0NVlAAAAAAXGiRMnVLFixQznF7pQ4ePjI+mfHePr6+viagAAAID8KyEhQYGBgY7v0BkpdKEidciTr68voQIAAADIgpudNsCJ2gAAAAAsIVQAAAAAsIRQAQAAAMASQgUAAAAASwgVAAAAACwhVAAAAFiwceNGRUZGKiAgQDabTcuWLUvTZ9++ferYsaP8/Pzk7e2tRo0a6fjx47e+WCCPECoAAAAsSExMVGhoqGbOnJnu/MOHD6tFixaqWbOmYmNjtXfvXo0cOVJeXl63uFIg79iMMcbVRdxKCQkJ8vPzU3x8PPepAAAAucpms2np0qXq3Lmzo6179+7y8PDQ+++/77rCgBzK6ndnjlQAAADkkZSUFH355ZeqXr26IiIiVLZsWTVp0iTdIVJAQUaoAAAAyCNxcXG6ePGiXnvtNT3wwANau3atHnzwQf373//Whg0bXF0ekGvcXV0AAADA7SolJUWS1KlTJw0ePFiSVL9+fW3ZskWzZ89Wq1atXFkekGs4UgEAAJBHSpcuLXd3d9WuXdupvVatWlz9CbcVQgUAAEAe8fT0VKNGjbR//36n9gMHDigoKMhFVQG5j+FPAAAAFly8eFGHDh1yTB85ckS7d++Wv7+/KlWqpOeff17dunVTy5YtFR4ertWrV2vlypWKjY11XdFALuOSsgAAABbExsYqPDw8TXtUVJTmz58vSXrvvfc0YcIE/d///Z9q1KihMWPGqFOnTre4UiD7svrdmVABAAAAIF3cpwIAAADALZGvQsXGjRsVGRmpgIAA2Wy2dG8Ms2/fPnXs2FF+fn7y9vZWo0aNuHoCAAAA4EL5KlQkJiYqNDRUM2fOTHf+4cOH1aJFC9WsWVOxsbHau3evRo4cKS8vr1tcKQAAAIBU+facCpvNpqVLl6pz586Otu7du8vDw0Pvv/9+jtfLORUAAABA1tx251SkpKToyy+/VPXq1RUREaGyZcuqSZMm6Q6Rul5SUpISEhKcHgAAAAByT4G5T0VcXJwuXryo1157TePGjdPEiRO1evVq/fvf/1ZMTEyGt7mfMGGCxowZc4urBQDcFiIjXV0BgMJu5UpXV5AlBepIhSR16tRJgwcPVv369fXiiy+qQ4cOmj17dobLjRgxQvHx8Y7HiRMnblXJAAAAQKFQYI5UlC5dWu7u7qpdu7ZTe61atbRp06YMl7Pb7bLb7XldHgAAAFBoFZgjFZ6enmrUqJH279/v1H7gwAEFBQW5qCoAAAAA+epIxcWLF3Xo0CHH9JEjR7R79275+/urUqVKev7559WtWze1bNlS4eHhWr16tVauXKnY2FjXFQ0AAAAUcvkqVOzYsUPh4eGO6SFDhkiSoqKiNH/+fD344IOaPXu2JkyYoGeffVY1atTQ559/rhYtWriqZAAAAKDQy7f3qcgr3KcCAJBlXP0JgKu5+OpPt919KgAAAADkT4QKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGBJvgoVGzduVGRkpAICAmSz2bRs2bIM+z755JOy2WyaOnXqLasPAAAAQFr5KlQkJiYqNDRUM2fOzLTf0qVLtW3bNgUEBNyiygAAAABkxN3VBVyvbdu2atu2baZ9fv/9dw0cOFBr1qxR+/btb1FlAAAAADKSr45U3ExKSop69eql559/XnXq1HF1OQAAAACUz45U3MzEiRPl7u6uZ599NsvLJCUlKSkpyTGdkJCQF6UBAAAAhVaBOVKxc+dOTZs2TfPnz5fNZsvychMmTJCfn5/jERgYmIdVAgAAAIVPgQkV3377reLi4lSpUiW5u7vL3d1dx44d09ChQxUcHJzhciNGjFB8fLzjceLEiVtXNAAAAFAIFJjhT7169VKbNm2c2iIiItSrVy/17ds3w+XsdrvsdntelwcAAAAUWvkqVFy8eFGHDh1yTB85ckS7d++Wv7+/KlWqpFKlSjn19/DwUPny5VWjRo1bXSoAAACA/ydfhYodO3YoPDzcMT1kyBBJUlRUlObPn++iqgAAAABkJl+FirCwMBljstz/6NGjeVcMAAAAgCwpMCdqAwAAAMifCBUAAAAALCFUAAAAALCEUAEAAADAEkIFAAAAAEsIFQAAAAAsIVQAAAAAsIRQAQAAAMASQgUAAAAASwgVAAAAACwhVAAAAACwhFABAAAAwBJCBQAAAABLCBUAAAAALCFUAAAAALCEUAEAAADAEkIFAAAAAEsIFQAAAAAsIVQAAAAAsIRQAQAAAMASQgUAAAAASwgVAAAAACwhVAAAAACwhFABAAAAwBJCBQAAAABLCBUAAAAALCFUAAAAALCEUAEAAADAEkIFAAAAAEsIFQAAAAAsIVQAAAAAsIRQAQAAAMASQgUAAAAASwgVAAAAACwhVAAAAACwhFABAAAAwBJCBQAAAABLCBVAPrZx40ZFRkYqICBANptNy5Ytc8y7evWqhg8frpCQEHl7eysgIEC9e/fWyZMnXVcwAAAolAgVQD6WmJio0NBQzZw5M828S5cuadeuXRo5cqR27dqlJUuWaP/+/erYsaMLKgUAAIWZu6sLAJCxtm3bqm3btunO8/Pz07p165za3nzzTTVu3FjHjx9XpUqVbkWJAAAAHKkAbifx8fGy2WwqUaKEq0sBAACFCKECuE1cvnxZw4cPV48ePeTr6+vqcgAAQCFCqABuA1evXlXXrl1ljNGsWbNcXQ4AAChkOKcCKOBSA8WxY8e0fv16jlIAAIBbjlABFGCpgeLgwYOKiYlRqVKlXF0SAAAohAgVQD528eJFHTp0yDF95MgR7d69W/7+/qpQoYK6dOmiXbt26YsvvlBycrJOnTolSfL395enp6erygYAAIUMoQLIx3bs2KHw8HDH9JAhQyRJUVFRio6O1ooVKyRJ9evXd1ouJiZGYWFht6pMAABQyBEqgHwsLCxMxpgM52c2DwAA4Fbh6k8AAAAALMlXoWLjxo2KjIxUQECAbDabli1b5ph39epVDR8+XCEhIfL29lZAQIB69+6tkydPuq5gAAAAAPkrVCQmJio0NFQzZ85MM+/SpUvatWuXRo4cqV27dmnJkiXav3+/Onbs6IJKAQAAAKTKV+dUtG3bVm3btk13np+fn9atW+fU9uabb6px48Y6fvy4KlWqdCtKBAAAAHCDfBUqsis+Pl42m00lSpTIsE9SUpKSkpIc0wkJCbegMgAAAKDwKLCh4vLlyxo+fLh69OiR6R2EJ0yYoDFjxtzCyrImclGkq0sAUMit7LHS1SUAAG4T+eqciqxKvYuwMUazZs3KtO+IESMUHx/veJw4ceIWVQkAAAAUDgXuSEVqoDh27JjWr1+f6VEKSbLb7bLb7beoOgAAAKDwKVChIjVQHDx4UDExMSpVqpSrSwIAAAAKvXwVKi5evKhDhw45po8cOaLdu3fL399fFSpUUJcuXbRr1y598cUXSk5O1qlTpyRJ/v7+8vT0dFXZAAAAQKGWr0LFjh07FB4e7pgeMmSIJCkqKkrR0dFasWKFJKl+/fpOy8XExCgsLOxWlQkAAADgOvkqVISFhckYk+H8zOYBAAAAcI0CefUnAAAAAPkHoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWJKvQsXGjRsVGRmpgIAA2Ww2LVu2zGm+MUajRo1ShQoVVLRoUbVp00YHDx50TbEAAAAAJOWzUJGYmKjQ0FDNnDkz3fmTJk3S9OnTNXv2bH333Xfy9vZWRESELl++fIsrBQAAAJDK3dUFXK9t27Zq27ZtuvOMMZo6dapefvllderUSZK0cOFClStXTsuWLVP37t1vZakAAAAA/p98daQiM0eOHNGpU6fUpk0bR5ufn5+aNGmirVu3ZrhcUlKSEhISnB4AAAAAck+BCRWnTp2SJJUrV86pvVy5co556ZkwYYL8/Pwcj8DAwDytEwAAAChsCkyoyKkRI0YoPj7e8Thx4oSrSwIAAABuKwUmVJQvX16SdPr0aaf206dPO+alx263y9fX1+kBAAAAIPcUmFBRuXJllS9fXt98842jLSEhQd99952aNm3qwsoAAACAwi1fXf3p4sWLOnTokGP6yJEj2r17t/z9/VWpUiUNGjRI48aNU7Vq1VS5cmWNHDlSAQEB6ty5s+uKBgAAAAq5fBUqduzYofDwcMf0kCFDJElRUVGaP3++XnjhBSUmJqpfv346f/68WrRoodWrV8vLy8tVJQMAAACFXr4KFWFhYTLGZDjfZrNp7NixGjt27C2sCgAAAEBmCsw5FQAAAADyJ0IFAAAAAEsIFQAAAAAsIVQAAAAAsIRQAQAAAMASQgUAAAAASwgVAAAAACwhVAAAAACwhFABAAAAwBJCBQAAAABLCBUAAAAALCFUAAAAALCEUAEAAADAEkIFAAAAAEsIFQAAAAAsIVQAAAAAsIRQAQAAAMASQgUAAAAASwgVAAAAACzJcaho3bq1vvnmmwznx8TEqHXr1jldPQAAAIACIsehIjY2VqdPn85wflxcnDZs2JDT1QMAAAAoICwNf7LZbBnOO3TokHx8fKysHgAAAEAB4J6dzgsWLNCCBQsc0+PGjdM777yTpt/58+e1d+9etWvXznqFAAAAAPK1bIWKS5cu6c8//3RMX7hwQUWKOB/ssNls8vb21pNPPqlRo0blTpUAAAAA8q1shYqnnnpKTz31lCSpcuXKmjZtmjp27JgnhQEAAAAoGLIVKq535MiR3KwDAAAAQAGV41CR6sKFCzp27JjOnTsnY0ya+S1btrS6CQAAAAD5WI5DxZkzZzRw4EB9/vnnSk5OTjPfGCObzZbuPAAAAAC3jxyHin79+mnlypV69tln9a9//UslS5bMzboAAAAAFBA5DhVr167V4MGDNWnSpNysBwAAAEABk+Ob3xUrVkzBwcG5WAoAAACAgijHoaJnz55aunRpbtYCAAAAoADK8fCnLl26aMOGDXrggQfUr18/BQYGys3NLU2/Bg0aWCoQAAAAQP6W41DRokULx7/XrVuXZj5XfwIAAAAKhxyHinnz5uVmHQAAAAAKqByHiqioqNysAwAAAEABleMTtQEAAABAsnCk4rHHHrtpH5vNprlz5+Z0EwAAAAAKgByHivXr18tmszm1JScn648//lBycrLKlCkjb29vywUCAAAAyN9yHCqOHj2abvvVq1c1Z84cTZ06Nd2rQgEAAAC4veT6ORUeHh565plndP/99+uZZ57J7dUDAAAAyGfy7ETt0NBQbdy4Ma9WDwAAACCfyLNQsW7dOhUrViyvVg8AAAAgn8jxORVjx45Nt/38+fPauHGjdu3apRdffDHHhQEAAAAoGHIcKqKjo9NtL1mypKpWrarZs2friSeeyOnqAQAAABQQOQ4VKSkpuVkHAAAAgAKKO2oDAAAAsCTHRypSbdiwQV9++aWOHTsmSQoKClL79u3VqlUry8UBAAAAyP9yHCquXLmiHj16aNmyZTLGqESJEpL+OVF78uTJevDBB7Vo0SJ5eHjkVq1KTk5WdHS0PvjgA506dUoBAQHq06ePXn755TR39wYAAABwa+R4+NOYMWO0dOlSDR06VH/88Yf++usv/fXXXzp16pSGDRumJUuWZHiFqJyaOHGiZs2apTfffFP79u3TxIkTNWnSJM2YMSNXtwMAAAAg63J8pOKjjz5SVFSUJk2a5NRetmxZTZw4UadPn9b777+vV155xXKRqbZs2aJOnTqpffv2kqTg4GAtWrRI27dvz7VtAAAAAMieHB+p+OOPP9SkSZMM5zdp0kSnTp3K6erT1axZM33zzTc6cOCAJGnPnj3atGmT2rZtm6vbAQAAAJB1OT5SUbFiRcXGxurJJ59Md/6GDRtUsWLFHBeWnhdffFEJCQmqWbOm3NzclJycrFdffVWPPvpohsskJSUpKSnJMZ2QkJCrNQEAAACFXY6PVERFRWnx4sV68skntX//fiUnJyslJUX79+/XU089pU8//VR9+vTJxVKlxYsX68MPP9RHH32kXbt2acGCBXr99de1YMGCDJeZMGGC/Pz8HI/AwMBcrQkAAAAo7GzGGJOTBZOTk/X4449r4cKFstlsKlLkn3ySkpIiY4yioqI0d+5cR3tuCAwM1IsvvqgBAwY42saNG6cPPvhAv/76a7rLpHekIjAwUPHx8fL19c212rIrclGky7YNAJK0ssdKV5eQ/0XyWQ3AxVa69rM6ISFBfn5+N/3unOPhT25ubpo/f76GDBmir776yuk+Fe3atVO9evVyuuoMXbp0KU1IcXNzy/Tu3na7XXa7PddrAQAAAPCPbIWKy5cva9CgQapTp44GDhwoSapXr16aADF9+nTNnj1b06ZNy9X7VERGRurVV19VpUqVVKdOHf3www+aMmWKHnvssVzbBgAAAIDsyVaoePvttzV//nz98ssvmfZr3769XnjhBYWEhOipp56yVOD1ZsyYoZEjR+rpp59WXFycAgIC1L9/f40aNSrXtgEAAAAge7J1wsPixYv10EMPqUqVKpn2q1q1qh5++GEtWrTIUnE38vHx0dSpU3Xs2DH9/fffOnz4sMaNGydPT89c3Q4AAACArMtWqPjxxx/VokWLLPVt1qyZ9u7dm6OiAAAAABQc2QoVV65cyfJRAU9PT6erLgEAAAC4PWUrVAQEBOinn37KUt+ffvpJAQEBOSoKAAAAQMGRrVDRpk0bLVy4UHFxcZn2i4uL08KFC3XfffdZKg4AAABA/petUDF8+HBdvnxZrVu31nfffZdun++++0733nuvLl++rOeffz5XigQAAACQf2XrkrJVqlTR4sWL1aNHDzVr1kxVqlRRSEiIfHx8dOHCBf300086fPiwihUrpo8//lhVq1bNq7oBAAAA5BPZvqN2+/bttXfvXk2cOFFffPGFli1b5pgXEBCgJ554Qi+88MJNLzsLAAAA4PaQ7VAhScHBwZo1a5ZmzZqlCxcuKCEhQb6+vvLx8cnt+gAAAADkczkKFdfz8fEhTAAAAACFWLZO1AYAAACAGxEqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCUFLlT8/vvv6tmzp0qVKqWiRYsqJCREO3bscHVZAAAAQKHl7uoCsuPcuXNq3ry5wsPDtWrVKpUpU0YHDx5UyZIlXV0aAAAAUGgVqFAxceJEBQYGat68eY62ypUru7AiAAAAAAVq+NOKFSt099136+GHH1bZsmV111136Z133sl0maSkJCUkJDg9AAAAAOSeAhUqfvvtN82aNUvVqlXTmjVr9NRTT+nZZ5/VggULMlxmwoQJ8vPzczwCAwNvYcUAAADA7a9AhYqUlBQ1aNBA48eP11133aV+/frpiSee0OzZszNcZsSIEYqPj3c8Tpw4cQsrBgAAAG5/BSpUVKhQQbVr13Zqq1Wrlo4fP57hMna7Xb6+vk4PAAAAALmnQIWK5s2ba//+/U5tBw4cUFBQkIsqAgAAAFCgQsXgwYO1bds2jR8/XocOHdJHH32kt99+WwMGDHB1aQAAAEChVaBCRaNGjbR06VItWrRIdevW1SuvvKKpU6fq0UcfdXVpAAAAQKFVoO5TIUkdOnRQhw4dXF0GAAAAgP+nQB2pAAAAAJD/ECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCUFOlS89tprstlsGjRokKtLAQAAAAqtAhsqvv/+e82ZM0f16tVzdSkAAABAoVYgQ8XFixf16KOP6p133lHJkiVdXQ4AAABQqBXIUDFgwAC1b99ebdq0cXUpAAAAQKHn7uoCsuvjjz/Wrl279P3332epf1JSkpKSkhzTCQkJeVUaAAAAUCgVqCMVJ06c0HPPPacPP/xQXl5eWVpmwoQJ8vPzczwCAwPzuEoAAACgcClQoWLnzp2Ki4tTgwYN5O7uLnd3d23YsEHTp0+Xu7u7kpOT0ywzYsQIxcfHOx4nTpxwQeUAAADA7atADX+699579eOPPzq19e3bVzVr1tTw4cPl5uaWZhm73S673X6rSgQAAAAKnQIVKnx8fFS3bl2nNm9vb5UqVSpNOwAAAIBbo0ANfwIAAACQ/xSoIxXpiY2NdXUJAAAAQKHGkQoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhS4UDFhwgQ1atRIPj4+Klu2rDp37qz9+/e7uiwAAACg0CpwoWLDhg0aMGCAtm3bpnXr1unq1au6//77lZiY6OrSAAAAgELJ3dUFZNfq1audpufPn6+yZctq586datmypYuqAgAAAAqvAhcqbhQfHy9J8vf3T3d+UlKSkpKSHNMJCQm3pC4AAACgsChww5+ul5KSokGDBql58+aqW7duun0mTJggPz8/xyMwMPAWVwkAAADc3gp0qBgwYIB++uknffzxxxn2GTFihOLj4x2PEydO3MIKAQAAgNtfgR3+9Mwzz+iLL77Qxo0bVbFixQz72e122e32W1gZAAAAULgUuFBhjNHAgQO1dOlSxcbGqnLlyq4uCQAAACjUClyoGDBggD766CMtX75cPj4+OnXqlCTJz89PRYsWdXF1AAAAQOFT4M6pmDVrluLj4xUWFqYKFSo4Hp988omrSwMAAAAKpQJ3pMIY4+oSAAAAAFynwB2pAAAAAJC/ECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCWECgAAAACWECoAAAAAWEKoAAAAAGAJoQIAAACAJYQKAAAAAJYQKgAAAABYQqgAAAAAYAmhAgAAAIAlhAoAAAAAlhAqAAAAAFhCqAAAAABgCaECAAAAgCUFMlTMnDlTwcHB8vLyUpMmTbR9+3ZXlwQAAAAUWgUuVHzyyScaMmSIRo8erV27dik0NFQRERGKi4tzdWkAAABAoVTgQsWUKVP0xBNPqG/fvqpdu7Zmz56tYsWK6b333nN1aQAAAEChVKBCxZUrV7Rz5061adPG0VakSBG1adNGW7dudWFlAAAAQOHl7uoCsuPMmTNKTk5WuXLlnNrLlSunX3/9Nd1lkpKSlJSU5JiOj4+XJCUkJORdoVlw9dJVl24fAFz9OVggXOWzGoCLufizOvVvhTEm034FKlTkxIQJEzRmzJg07YGBgS6oBgDyD7//+Lm6BADAzfjlj8/qCxcuyC+TWgpUqChdurTc3Nx0+vRpp/bTp0+rfPny6S4zYsQIDRkyxDGdkpKiv/76S6VKlZLNZsvTeoG8kpCQoMDAQJ04cUK+vr6uLgcAkA4+q3E7MMbowoULCggIyLRfgQoVnp6eatiwob755ht17txZ0j8h4ZtvvtEzzzyT7jJ2u112u92prUSJEnlcKXBr+Pr68ocKAPI5PqtR0GV2hCJVgQoVkjRkyBBFRUXp7rvvVuPGjTV16lQlJiaqb9++ri4NAAAAKJQKXKjo1q2b/vzzT40aNUqnTp1S/fr1tXr16jQnbwMAAAC4NQpcqJCkZ555JsPhTkBhYLfbNXr06DRD+wAA+Qef1ShMbOZm14cCAAAAgEwUqJvfAQAAAMh/CBUAAAAALCFUAHAIDg7W1KlTXV0GABRKYWFhGjRokKvLAHKEUAEAAADAEkIFUIBcuXLF1SUAAACkQagA8lBYWJieffZZvfDCC/L391f58uUVHR3tmH/8+HF16tRJxYsXl6+vr7p27arTp0875kdHR6t+/fp69913VblyZXl5eUmSbDab5syZow4dOqhYsWKqVauWtm7dqkOHDiksLEze3t5q1qyZDh8+7FjX4cOH1alTJ5UrV07FixdXo0aN9PXXX9+yfQEAt8Lq1avVokULlShRQqVKlVKHDh0cn4VHjx6VzWbTkiVLFB4ermLFiik0NFRbt251Wsfnn3+uOnXqyG63Kzg4WJMnT3aaHxwcrHHjxql3794qXry4goKCtGLFCv3555+Oz/R69eppx44djmXOnj2rHj166I477lCxYsUUEhKiRYsWZfpczp07p969e6tkyZIqVqyY2rZtq4MHDzrmp/6NuN7UqVMVHBzsmI6NjVXjxo3l7e2tEiVKqHnz5jp27Fh2dimQJYQKII8tWLBA3t7e+u677zRp0iSNHTtW69atU0pKijp16qS//vpLGzZs0Lp16/Tbb7+pW7duTssfOnRIn3/+uZYsWaLdu3c72l955RX17t1bu3fvVs2aNfXII4+of//+GjFihHbs2CFjjNP9XC5evKh27drpm2++0Q8//KAHHnhAkZGROn78+K3aFQCQ5xITEzVkyBDt2LFD33zzjYoUKaIHH3xQKSkpjj4vvfSShg0bpt27d6t69erq0aOHrl27JknauXOnunbtqu7du+vHH39UdHS0Ro4cqfnz5ztt54033lDz5s31ww8/qH379urVq5d69+6tnj17ateuXapatap69+6t1Cv3X758WQ0bNtSXX36pn376Sf369VOvXr20ffv2DJ9Lnz59tGPHDq1YsUJbt26VMUbt2rXT1atXs7Qvrl27ps6dO6tVq1bau3evtm7dqn79+slms2VzrwJZYADkmVatWpkWLVo4tTVq1MgMHz7crF271ri5uZnjx4875v38889Gktm+fbsxxpjRo0cbDw8PExcX57QOSebll192TG/dutVIMnPnznW0LVq0yHh5eWVaX506dcyMGTMc00FBQeaNN97I9vMEgPzqzz//NJLMjz/+aI4cOWIkmXfffdcxP/Vzd9++fcYYYx555BFz3333Oa3j+eefN7Vr13ZMBwUFmZ49ezqm//jjDyPJjBw50tGW+rn8xx9/ZFhb+/btzdChQx3TrVq1Ms8995wxxpgDBw4YSWbz5s2O+WfOnDFFixY1ixcvNsb88zciNDTUaZ1vvPGGCQoKMsYYc/bsWSPJxMbGZraLgFzBkQogj9WrV89pukKFCoqLi9O+ffsUGBiowMBAx7zatWurRIkS2rdvn6MtKChIZcqUyXS95cqVkySFhIQ4tV2+fFkJCQmS/jlSMWzYMNWqVUslSpRQ8eLFtW/fPo5UALitHDx4UD169FCVKlXk6+vrGAp0/Wfd9Z+fFSpUkCTFxcVJkvbt26fmzZs7rbN58+Y6ePCgkpOT011HRp/B1683OTlZr7zyikJCQuTv76/ixYtrzZo1GX4G79u3T+7u7mrSpImjrVSpUqpRo4bT34jM+Pv7q0+fPoqIiFBkZKSmTZumP/74I0vLAtlFqADymIeHh9O0zWZzOgx/M97e3jddb+qh7PTaUrc1bNgwLV26VOPHj9e3336r3bt3KyQkhJO/AdxWIiMj9ddff+mdd97Rd999p++++06S84UuMvuszKrsfgb/73//07Rp0zR8+HDFxMRo9+7dioiIsPQZXKRIEcfwqlQ3Do2aN2+etm7dqmbNmumTTz5R9erVtW3bthxvE8gIoQJwkVq1aunEiRM6ceKEo+2XX37R+fPnVbt27Vzf3ubNm9WnTx89+OCDCgkJUfny5XX06NFc3w4AuMrZs2e1f/9+vfzyy7r33ntVq1YtnTt3LlvrqFWrljZv3uzUtnnzZlWvXl1ubm45rm3z5s3q1KmTevbsqdDQUFWpUkUHDhzItI5r1645QpH0/z+/1L8RZcqU0alTp5yCxfXn3qW66667NGLECG3ZskV169bVRx99lOPnAWSEUAG4SJs2bRQSEqJHH31Uu3bt0vbt29W7d2+1atVKd999d65vr1q1ao6Tvffs2aNHHnkk27/MAUB+VrJkSZUqVUpvv/22Dh06pPXr12vIkCHZWsfQoUP1zTff6JVXXtGBAwe0YMECvfnmmxo2bJil2qpVq6Z169Zpy5Yt2rdvn/r37+90tb/0+nfq1ElPPPGENm3apD179qhnz56644471KlTJ0n/XGHwzz//1KRJk3T48GHNnDlTq1atcqzjyJEjGjFihLZu3apjx45p7dq1OnjwoGrVqmXpuQDpIVQALmKz2bR8+XKVLFlSLVu2VJs2bVSlShV98sknebK9KVOmqGTJkmrWrJkiIyMVERGhBg0a5Mm2AMAVihQpoo8//lg7d+5U3bp1NXjwYP3vf//L1joaNGigxYsX6+OPP1bdunU1atQojR07Vn369LFU28svv6wGDRooIiJCYWFhKl++vDp37pzpMvPmzVPDhg3VoUMHNW3aVMYYffXVV45hVrVq1dJbb72lmTNnKjQ0VNu3b3cKP8WKFdOvv/6qhx56SNWrV1e/fv00YMAA9e/f39JzAdJjMzcOxgMAAACAbOBIBQAAAABLCBUAAAAALCFUAAAAALCEUAEAAADAEkIFAAAAAEsIFQAAAAAsIVQAAAAAsIRQAQAAAMASQgUA3GaCg4Mt3/03L/Tp00fBwcGuLgMAkAcIFQBQQPz444/q0qWLgoKC5OXlpTvuuEP33XefZsyYkelyv/zyi6Kjo3X06NE8r/HkyZOKjo7W7t2783xbWREWFiabzXbTR3R0tKtLBYACzWaMMa4uAgCQuS1btig8PFyVKlVSVFSUypcvrxMnTmjbtm06fPiwDh065OiblJSkIkWKyMPDQ5L02Wef6eGHH1ZMTIzCwsLytM4dO3aoUaNGmjdvXpqjJVevXlVKSorsdnue1nC9devW6fTp047p77//XtOnT9d///tf1apVy9Fer1491atX75bVBQC3G3dXFwAAuLlXX31Vfn5++v7771WiRAmneXFxcU7Tt/JLe3akhpxb6b777nOa9vLy0vTp03XfffflecACgMKE4U8AUAAcPnxYderUSRMoJKls2bJO09efUzF//nw9/PDDkqTw8HDHcJ/Y2FhJynDoz43nZfz1118aNmyYQkJCVLx4cfn6+qpt27bas2ePo09sbKwaNWokSerbt69jW/Pnz5eU/jkViYmJGjp0qAIDA2W321WjRg29/vrruvEgus1m0zPPPKNly5apbt26stvtqlOnjlavXn2TPZe5efPmyWaz6Ycffkgzb/z48XJzc9Pvv/8u6Z+hVHXr1tXOnTvVrFkzFS1aVJUrV9bs2bPTLJuUlKTRo0frzjvvlN1uV2BgoF544QUlJSVZqhcA8itCBQAUAEFBQdq5c6d++umnbC3XsmVLPfvss5Kk//73v3r//ff1/vvvOw39yYrffvtNy5YtU4cOHTRlyhQ9//zz+vHHH9WqVSudPHlSklSrVi2NHTtWktSvXz/Htlq2bJnuOo0x6tixo9544w098MADmjJlimrUqKHnn39eQ4YMSdN/06ZNevrpp9W9e3dNmjRJly9f1kMPPaSzZ89m67lcr0uXLipatKg+/PDDNPM+/PBDhYWF6Y477nC0nTt3Tu3atVPDhg01adIkVaxYUU899ZTee+89R5+UlBR17NhRr7/+uiIjIzVjxgx17txZb7zxhrp165bjWgEgXzMAgHxv7dq1xs3Nzbi5uZmmTZuaF154waxZs8ZcuXIlTd+goCATFRXlmP7000+NJBMTE5OmryQzevTom67j8uXLJjk52anPkSNHjN1uN2PHjnW0ff/990aSmTdvXpp1RkVFmaCgIMf0smXLjCQzbtw4p35dunQxNpvNHDp0yKlOT09Pp7Y9e/YYSWbGjBlptpWR9PZFjx49TEBAgNPz27VrV5rn0apVKyPJTJ482dGWlJRk6tevb8qWLet4Ld5//31TpEgR8+233zpte/bs2UaS2bx5c5brBYCCgiMVAFAA3Hfffdq6das6duyoPXv2aNKkSYqIiNAdd9yhFStW5Pn27Xa7ihT5509GcnKyzp49q+LFi6tGjRratWtXjtb51Vdfyc3NzXEkJdXQoUNljNGqVauc2tu0aaOqVas6puvVqydfX1/99ttvOdp+qt69e+vkyZOKiYlxtH344YcqWrSoHnroIae+7u7u6t+/v2Pa09NT/fv3V1xcnHbu3ClJ+vTTT1WrVi3VrFlTZ86ccTxat24tSU7bAYDbBaECAAqIRo0aacmSJTp37py2b9+uESNG6MKFC+rSpYt++eWXPN12SkqK3njjDVWrVk12u12lS5dWmTJltHfvXsXHx+donceOHVNAQIB8fHyc2lOHZh07dsypvVKlSmnWUbJkSZ07dy5H20913333qUKFCo4hUCkpKVq0aJE6deqUpraAgAB5e3s7tVWvXl2SHJfsPXjwoH7++WeVKVPG6ZHa78YT6wHgdsDVnwCggPH09FSjRo3UqFEjVa9eXX379tWnn36q0aNH59o2kpOTnabHjx+vkSNH6rHHHtMrr7wif39/FSlSRIMGDVJKSkqubTczbm5u6bYbi1dGd3Nz0yOPPKJ33nlHb731ljZv3qyTJ0+qZ8+eOVpfSkqKQkJCNGXKlHTnBwYGWikXAPIlQgUAFGB33323JOmPP/7IsI/NZstwXsmSJXX+/HmntitXrqRZ32effabw8HDNnTvXqf38+fMqXbp0lrZ1o6CgIH399de6cOGC0xGBX3/91TH/Vundu7cmT56slStXatWqVSpTpowiIiLS9Dt58qQSExOdjlYcOHBAkhxXtqpatar27Nmje++9N1v7AwAKMoY/AUABEBMTk+4v8l999ZUkqUaNGhkum/oF+MbwIP3zBXjjxo1ObW+//XaaIxVubm5ptv/pp586LrealW3dqF27dkpOTtabb77p1P7GG2/IZrOpbdu2N11Hbkm9+d27776rzz//XN27d5e7e9rf3a5du6Y5c+Y4pq9cuaI5c+aoTJkyatiwoSSpa9eu+v333/XOO++kWf7vv/9WYmJi3j0RAHARjlQAQAEwcOBAXbp0SQ8++KBq1qypK1euaMuWLfrkk08UHBysvn37Zrhs/fr15ebmpokTJyo+Pl52u12tW7dW2bJl9Z///EdPPvmkHnroId13333as2eP1qxZ43T0QZI6dOigsWPHqm/fvmrWrJl+/PFHffjhh6pSpYpTv6pVq6pEiRKaPXu2fHx85O3trSZNmqhy5cpp6oqMjFR4eLheeuklHT16VKGhoVq7dq2WL1+uQYMGOZ2UfSv07t1bw4YNk6QMhz4FBARo4sSJOnr0qKpXr65PPvlEu3fv1ttvv+24uV+vXr20ePFiPfnkk4qJiVHz5s2VnJysX3/9VYsXL9aaNWscR5gA4Lbh2otPAQCyYtWqVeaxxx4zNWvWNMWLFzeenp7mzjvvNAMHDjSnT5926nvj5WCNMeadd94xVapUMW5ubk6XVE1OTjbDhw83pUuXNsWKFTMRERHm0KFD6V5SdujQoaZChQqmaNGipnnz5mbr1q2mVatWplWrVk7bWr58ualdu7Zxd3d3uizrjZeUNcaYCxcumMGDB5uAgADj4eFhqlWrZv73v/+ZlJQUp36SzIABA9Lsl/Sea2Yyu7zuH3/8Ydzc3Ez16tXTXbZVq1amTp06ZseOHaZp06bGy8vLBAUFmTfffDNN3ytXrpiJEyeaOnXqGLvdbkqWLGkaNmxoxowZY+Lj47NcLwAUFDZjLJ7hBgDAbeDMmTOqUKGCRo0apZEjR6aZHxYWpjNnzmT7BoQAUBhwTgUAAJLmz5+v5ORk9erVy9WlAECBwzkVAIBCbf369frll1/06quvqnPnzo6rOAEAso5QAQAo1MaOHastW7aoefPmmjFjhqvLAYACiXMqAAAAAFjCORUAAAAALCFUAAAAALCEUAEAAADAEkIFAAAAAEsIFQAAAAAsIVQAAAAAsIRQAQAAAMASQgUAAAAASwgVAAAAACz5/wB2OpoU/8GT9QAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Plot the distribution of results\n", + "\n", + "# Count the occurrences of \"normal\" and \"anomalous\" results\n", + "situation_counts = {\"normal\": 0, \"anomalous\": 0}\n", + "for entry in dataset:\n", + " result = entry.get(\"result\", \"unknown\").lower()\n", + " if result in situation_counts:\n", + " situation_counts[result] += 1\n", + "\n", + "# Extract keys and values for the bar chart\n", + "labels = list(situation_counts.keys())\n", + "counts = list(situation_counts.values())\n", + "\n", + "# Plot the bar chart\n", + "plt.figure(figsize=(8, 6))\n", + "plt.bar(labels, counts, color=['green', 'red'], alpha=0.7)\n", + "\n", + "# Add labels and title\n", + "plt.xlabel(\"Situation Type\", fontsize=12)\n", + "plt.ylabel(\"Count\", fontsize=12)\n", + "plt.title(\"Number of Normal vs Anomalous Situations\", fontsize=14)\n", + "\n", + "# Annotate bars with counts\n", + "for i, count in enumerate(counts):\n", + " plt.text(i, count + 0.2, str(count), ha='center', fontsize=10)\n", + "\n", + "# Display the plot\n", + "plt.tight_layout()\n", + "plt.show()\n" + ] + }, + { + "cell_type": "code", + "execution_count": 115, + "id": "eabc7c61-0cd2-41f4-baa1-b85400bbf87f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[{'timestamp': 1738065600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738065660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738065900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738066200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738066500, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738066800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067100, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067700, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068300, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068900, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738069200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738069500, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}]\n", + "[{'timestamp': 1738152000, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738152060, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738152300, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738152600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738152900, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738153200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738153260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738153800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738154100, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738154400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738155600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738156800, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738157400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738157460, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738173600, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738184400, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738195200, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738206000, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}]\n", + "[{'timestamp': 1738324800, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738324860, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738325100, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738325400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738325700, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738326000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738326600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738326900, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738327200, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738328400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738329600, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738330200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738330260, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738346400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738346460, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738346520, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738347600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738350000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738350060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738353600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738357200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738357260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738357800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738368000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738369200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738370400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738370460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738374000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738377600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738377660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738378200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}]\n", + "[{'timestamp': 1738411200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738411260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738411500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738411800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738412100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738412400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738412460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738413000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738413300, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738413600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738417200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738420800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738424400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738428000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738431600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738435200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738438800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738442400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738446000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738449600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738453200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738456800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738460400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738464000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738467600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738471200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738474800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}]\n", + "[{'timestamp': 1738584000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738584300, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738584600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738584900, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738585200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738585260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738585800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738586100, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738586400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738587600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738588800, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738589400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738589460, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738605600, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738605660, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738605720, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738605780, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738606800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738609200, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738609260, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738612800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738616400, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738617000, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738627200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738627260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738628400, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738629600, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738629660, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738633200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738636800, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738637400, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}]\n", + "[{'timestamp': 1738670400, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738670460, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738670700, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738671000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738671300, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738671600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738671660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738672200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738672500, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738672800, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738672860, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738672920, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738672980, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673040, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673100, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673160, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673220, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673280, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673340, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673460, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673520, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673580, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673640, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738673700, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738674000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738677600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738681200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738684800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738688400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}]\n", + "[{'timestamp': 1738843200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738843260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738843500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738843800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738844100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844760, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844820, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844880, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844940, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738845000, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738845060, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738866400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738866460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738866520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738866580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738866640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738866700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738875600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738875660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738876200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738888400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738888460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738888520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738888580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738888640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738888700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}]\n", + "[{'timestamp': 1738929600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738929660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738929900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930200, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930260, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930320, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930380, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930440, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930560, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738944000, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738944060, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738944120, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738944180, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738952400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738952460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738953000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738957600, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738957660, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738957720, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738957780, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738962000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738962060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738962600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}]\n", + "[{'timestamp': 1738065600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738065660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738065900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738066200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738066500, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738066800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067100, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738067700, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068300, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738068900, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738069200, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}, {'timestamp': 1738069500, 'room': 'bathroom', 'nodeId': 3, 'humidity': 7500}]\n", + "[{'timestamp': 1738152000, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738152060, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738152300, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738152600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738152900, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738153200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738153260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738153800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738154100, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738154400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738155600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738156800, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738157400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738157460, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738173600, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738184400, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738195200, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738206000, 'room': 'porch', 'nodeId': 1, 'occupancy': 0}]\n", + "[{'timestamp': 1738324800, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738324860, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738325100, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738325400, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738325700, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738326000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738326600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738326900, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738327200, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738328400, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738329600, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738330200, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738330260, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738346400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738346460, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738346520, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738347600, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738350000, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738350060, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738353600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738357200, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738357260, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738357800, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738368000, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738369200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738370400, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738370460, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738374000, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738377600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738377660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738378200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}]\n", + "[{'timestamp': 1738411200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738411260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738411500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738411800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738412100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738412400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738412460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738413000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738413300, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738413600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738417200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738420800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738424400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738428000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738431600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738435200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738438800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738442400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738446000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738449600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738453200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738456800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738460400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738464000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738467600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738471200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738474800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}]\n", + "[{'timestamp': 1738584000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738584300, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738584600, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738584900, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738585200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738585260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738585800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738586100, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738586400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738587600, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738588800, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738589400, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738589460, 'room': 'hall', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738605600, 'room': 'porch', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738605660, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738605720, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738605780, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738606800, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738609200, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738609260, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738612800, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738616400, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738617000, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738627200, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738627260, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738628400, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738629600, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738629660, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738633200, 'room': 'livingroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738636800, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738637400, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}]\n", + "[{'timestamp': 1738670400, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738670460, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738670700, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738671000, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738671300, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738671600, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738671660, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738672200, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738672500, 'room': 'livingroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738672800, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 0}, {'timestamp': 1738672860, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738672920, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738672980, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673040, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673100, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673160, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673220, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673280, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673340, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673400, 'room': 'livingroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673460, 'room': 'kitchen', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673520, 'room': 'hall', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673580, 'room': 'bathroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738673640, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738673700, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738674000, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738677600, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738681200, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738684800, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}, {'timestamp': 1738688400, 'room': 'bedroom', 'nodeId': 1, 'occupancy': 1}]\n", + "[{'timestamp': 1738843200, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738843260, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738843500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738843800, 'room': 'bathroom', 'nodeId': 3, 'humidity': 6500}, {'timestamp': 1738844100, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844760, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738844820, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738844880, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738844940, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738845000, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738845060, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738866400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738866460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738866520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738866580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738866640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738866700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738875600, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738875660, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738876200, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738888400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738888460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738888520, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738888580, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738888640, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738888700, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}]\n", + "[{'timestamp': 1738929600, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738929660, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738929900, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930200, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930260, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930320, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930380, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930440, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738930500, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738930560, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738944000, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738944060, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738944120, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738944180, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738952400, 'room': 'kitchen', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738952460, 'room': 'kitchen', 'event': 'fridge_opened'}, {'timestamp': 1738953000, 'room': 'kitchen', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738957600, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738957660, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738957720, 'room': 'bathroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738957780, 'room': 'bathroom', 'nodeId': 2, 'onOff': False}, {'timestamp': 1738962000, 'room': 'bedroom', 'nodeId': 2, 'onOff': True}, {'timestamp': 1738962060, 'room': 'pillbox', 'nodeId': 1, 'onOff': True}, {'timestamp': 1738962600, 'room': 'bedroom', 'nodeId': 2, 'onOff': False}]\n" + ] + } + ], + "source": [ + "# So what are the anomalous items??\n", + "\n", + "for datapoint in dataset:\n", + " try:\n", + " result = datapoint[\"result\"]\n", + " if result == \"anomalous\":\n", + " print(datapoint['input'])\n", + " except ValueError as e:\n", + " pass" + ] + }, + { + "cell_type": "markdown", + "id": "a0d02f58-23f6-4f81-a779-7c0555afd13d", + "metadata": {}, + "source": [ + "## Now it's time to curate our dataset\n", + "\n", + "We select all items\n", + "\n", + "We will be create Item instances, which truncate the input and reasoning to fit within 180 tokens using the right Tokenizer\n", + "\n", + "And will create a prompt to be used during Training.\n", + "\n", + "Items will be rejected if they don't have sufficient characters.\n", + "\n", + "## But why 180 tokens??\n", + "\n", + "This is an example of a \"hyper-parameter\". In other words, it's basically trial and error! We want a sufficiently large number of tokens so that we have enough useful information to gauge the price. But we also want to keep the number low so that we can train efficiently. \n", + "\n", + "I started with a number that seemed reasonable, and experimented with a few variations before settling on 180. If you have time, you should do the same! You might find that you can beat my results by finding a better balance. This kind of trial-and-error might sound a bit unsatisfactory, but it's a crucial part of the data science R&D process.\n", + "\n", + "There's another interesting reason why we might favor a lower number of tokens in the training data. When we eventually get to use our model at inference time, we'll want to provide new products and have it estimate a price. And we'll be using short descriptions of products - like 1-2 sentences. For best performance, we should size our training data to be similar to the inputs we will provide at inference time.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 116, + "id": "430b432f-b769-41da-9506-a238cb5cf1b6", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "There are 28 items\n" + ] + } + ], + "source": [ + "# Create an Item object for each with a result\n", + "\n", + "items = []\n", + "for datapoint in dataset:\n", + " try:\n", + " result = datapoint[\"result\"]\n", + " if result == 'normal' or result == 'anomalous':\n", + " item = Item(datapoint, result)\n", + " if item.include:\n", + " items.append(item)\n", + " except ValueError as e:\n", + " pass\n", + "\n", + "print(f\"There are {len(items):,} items\")" + ] + }, + { + "cell_type": "code", + "execution_count": 117, + "id": "0d570794-6f1d-462e-b567-a46bae3556a1", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "<$normal>" + ] + }, + "execution_count": 117, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Look at the first item\n", + "\n", + "items[1]" + ] + }, + { + "cell_type": "code", + "execution_count": 118, + "id": "70219e99-22cc-4e08-9121-51f9707caef0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "How would you classify this sensor data - normal or anomalous?\n", + "\n", + "[{\"timestamp\": 1737892800, \"room\": \"pillbox\", \"nodeId\": 1, \"onOff\": true}, {\"timestamp\": 1737892860, \"room\": \"bedroom\", \"nodeId\": 2, \"onOff\": true}, {\"timestamp\": 1737893100, \"room\": \"bathroom\", \"nodeId\": 2, \"onOff\": true}, {\"timestamp\": 1737893400, \"room\": \"bathroom\", \"nodeId\": 3, \"humidity\": 6500}, {\"timestamp\": 1737893700, \"room\": \"bathroom\", \"nodeId\": 2, \"onOff\": false}, {\"timestamp\": 1737894000, \"room\": \"\n", + "\n", + "Result is normal\n" + ] + } + ], + "source": [ + "# Investigate the prompt that will be used during training - the model learns to complete this\n", + "\n", + "print(items[1].prompt)" + ] + }, + { + "cell_type": "code", + "execution_count": 119, + "id": "d9998b8d-d746-4541-9ac2-701108e0e8fb", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "How would you classify this sensor data - normal or anomalous?\n", + "\n", + "[{\"timestamp\": 1737892800, \"room\": \"pillbox\", \"nodeId\": 1, \"onOff\": true}, {\"timestamp\": 1737892860, \"room\": \"bedroom\", \"nodeId\": 2, \"onOff\": true}, {\"timestamp\": 1737893100, \"room\": \"bathroom\", \"nodeId\": 2, \"onOff\": true}, {\"timestamp\": 1737893400, \"room\": \"bathroom\", \"nodeId\": 3, \"humidity\": 6500}, {\"timestamp\": 1737893700, \"room\": \"bathroom\", \"nodeId\": 2, \"onOff\": false}, {\"timestamp\": 1737894000, \"room\": \"\n", + "\n", + "Result is \n" + ] + } + ], + "source": [ + "# Investigate the prompt that will be used during testing - the model has to complete this\n", + "\n", + "print(items[1].test_prompt())" + ] + }, + { + "cell_type": "code", + "execution_count": 120, + "id": "7a116369-335a-412b-b70c-2add6675c2e3", + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABNAAAAI4CAYAAACr9RQwAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAS71JREFUeJzt3XeUFeX9P/DP0hYUduksKE1UsIHY+BJUUJAiQbFEQGPA2AVUUGNMVMSjIbFg5aspX0EN1lhQIyogYEMjKhoLKIRioagEFhAWZef3h4f787owwgrsCq/XOfcc5plnZj5z99nL5c0zMzlJkiQBAAAAAGxQhbIuAAAAAADKMwEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAJQjOTk5MWjQoLIuA7ZL8+bNi5ycnBgzZkxqvzFjxkROTk5Mnz79B/fZqVOn6NSpU6nq6dSpU+y7776l2hYA2LYEaADwI+Xk5GzSa8qUKWVd6nbl6aefjquuumqbHzMnJycaNWoUxcXF2/TY33XttdfGMcccEw0aNIicnJyNvg/NmjXb6HjcY489SvRfvHhxnH322bHLLrtE1apVo1mzZnH66advcl1vvvlmHHPMMVG7du3YaaedYt99941bb721tKfJVvDZZ5/FVVddFTNmzNik/itXroxhw4ZF9+7do3bt2qkBZNrn31FHHZXVd+HChXHWWWdF8+bNo1q1atGiRYsYOnRofPnllz/yDAFg66hU1gUAwE/dvffem7V8zz33xIQJE0q077XXXtuyrO3e008/HaNGjdqmIdrYsWOjWbNmMW/evHj++eejS5cu2+zY33X55ZdHQUFBtG3bNp599tmN9rv55ptj5cqVWW3z58+Pyy+/PLp27ZrV/vHHH0eHDh0iIuKcc86JXXbZJT777LP417/+tUk1Pffcc9GrV69o27ZtXHHFFVG9evWYM2dOfPLJJ5t5dj8tzz33XFmXsFk+++yzGD58eDRr1iz233//H+z/xRdfxNVXXx1NmjSJNm3apP5HwPc/8yIipk+fHrfcckvWeFu5cmW0b98+Vq1aFeedd140btw43n777bj99ttj8uTJ8cYbb0SFCv6fH4DyRYAGAD/SL3/5y6zlV199NSZMmFCinZ+2VatWxbhx42LEiBExevToGDt2bJkFaHPnzo1mzZrFF198EfXq1dtov969e5dou+aaayIi4pRTTslqP/vss6NSpUrx+uuvR506dTarnsLCwvjVr34VPXv2jH/84x87VPhRpUqVsi5hq2rYsGEsXLgwCgoKYvr06XHwwQdvtO+GPvOmTJkSOTk50a9fv0zbE088EfPnz4+nnnoqevbsmWmvXbt2XH311fH2229H27Ztt+yJAMCPtON8uwGAMrRq1aq46KKLonHjxpGbmxstW7aMG264IZIk+cFtr7nmmqhQoULcdtttmbbx48fHYYcdFjvvvHPUqFEjevbsGe+9917WdgMGDIjq1avHp59+Gr17947q1atHvXr14uKLL45169ZtUt3jx4+Pjh07Ro0aNSIvLy8OPvjguO+++7L6PPzww3HggQdGtWrVom7duvHLX/4yPv3006w+G7tP1IABA6JZs2aZ5fX3qLrhhhviL3/5S7Ro0SJyc3Pj4IMPjtdffz1ru1GjRkVE9mVj6z3wwANx4IEHZureb7/94pZbbsk69pw5c2LOnDmb9D5ERDz22GOxevXq+MUvfhF9+/aNRx99NNasWZNZv++++8YRRxxRYrvi4uLYZZdd4sQTT8y0ffnll3HqqadGXl5e1KxZM/r37x9vv/32Jt2fKyKy3rPNdd9990Xz5s3jZz/7WaZt5syZMX78+LjkkkuiTp06sWbNmvj66683a5+LFy+Oa6+9NipUqBCrVq3arEtcX3zxxfjFL34RTZo0idzc3GjcuHEMGTIkVq9endVvc8b0smXLYsCAAZGfn595j5ctW7bJNUVEFBUVxdChQ6NevXqx8847x3HHHReff/55Vp8Nje358+fHMcccEzvvvHPUr18/hgwZEs8+++xGL+V+//3344gjjoiddtopdtlll7juuus2WMuwYcNi9913z7xHv/nNb6KoqCir34QJE+LQQw+NmjVrRvXq1aNly5bxu9/9LiK+DbPWB2CnnXZa5vcmbczl5uZGQUHBJrxbJRUVFcUjjzwSHTt2jF133TXTXlhYGBERDRo0yOrfsGHDiIioVq1aqY4HAFuTAA0AtrIkSeKYY46Jm266Kbp37x4jR46Mli1bxiWXXBJDhw5N3fbyyy+PK6+8Mv785z/H4MGDI+Lby6R69uwZ1atXjz/96U9xxRVXxPvvvx+HHnpozJs3L2v7devWRbdu3aJOnTpxww03RMeOHePGG2+Mv/zlLz9Y95gxY6Jnz56xdOnSuOyyy+KPf/xj7L///vHMM89k9TnppJOiYsWKMWLEiDjzzDPj0UcfjUMPPXSzw4rvuu++++L666+Ps88+O6655pqYN29eHH/88ZlQ5+yzz87cU+nee+/NvCK+DRD69esXtWrVij/96U/xxz/+MTp16hQvv/xy1jE6d+4cnTt33uSaxo4dG0cccUQUFBRE3759Y8WKFfHkk09m1vfp0ydeeOGFWLRoUdZ2L730Unz22WfRt2/fiPg2UOvVq1fcf//90b9//7j22mtj4cKF0b9//81/ozbTW2+9FR988EGcfPLJWe0TJ06MiG8Djc6dO0e1atWiWrVq0aNHjxJjakMmTpwYeXl58emnn0bLli2jevXqkZeXF+eee25WyLgxDz/8cHz11Vdx7rnnxm233RbdunWL2267LX71q1+V6LspYzpJkjj22GPj3nvvjV/+8pdxzTXXxCeffLLZ7/HgwYPj7bffjmHDhsW5554bTz755A8+5GPVqlVx5JFHxsSJE+P888+P3//+9/HKK6/EpZdeusH+//3vf6N79+7Rpk2buPHGG6NVq1Zx6aWXxvjx4zN9iouL45hjjokbbrghevXqFbfddlv07t07brrppujTp0+m33vvvRc///nPo6ioKK6++uq48cYb45hjjsmM/b322iuuvvrqiIg466yzMr83hx9++Ga9L5vq6aefjmXLlpWY7Xj44YdHhQoV4oILLohXX301Pvnkk3j66afj2muvjd69e0erVq22Sj0A8KMkAMAWNXDgwOS7f8U+/vjjSUQk11xzTVa/E088McnJyUlmz56daYuIZODAgUmSJMlFF12UVKhQIRkzZkxm/YoVK5KaNWsmZ555Zta+Fi1alOTn52e19+/fP4mI5Oqrr87q27Zt2+TAAw9MPYdly5YlNWrUSNq1a5esXr06a11xcXGSJEmydu3apH79+sm+++6b1eepp55KIiK58sorM20dO3ZMOnbsWOI4/fv3T5o2bZpZnjt3bhIRSZ06dZKlS5dm2seNG5dERPLkk09m2r7/Pq93wQUXJHl5eck333yTeo5NmzbNOnaaxYsXJ5UqVUr++te/Ztp+9rOfJccee2xmedasWUlEJLfddlvWtuedd15SvXr15KuvvkqSJEkeeeSRJCKSm2++OdNn3bp1yZFHHplERDJ69OhNqilJkuTzzz9PIiIZNmzYJvW/6KKLkohI3n///az2888/P/O+d+/ePXnwwQeT66+/PqlevXrSokWLZNWqVan7bd26dbLTTjslO+20UzJ48ODkkUceSQYPHpxERNK3b98frGv9e/NdI0aMSHJycpL58+dn2jZ1TK//nbvuuusybd98801y2GGHbdJ7PHr06CQiki5dumTGe5IkyZAhQ5KKFSsmy5Yty7R9f2zfeOONSUQkjz/+eKZt9erVSatWrZKISCZPnpy1bUQk99xzT6atqKgoKSgoSE444YRM27333ptUqFAhefHFF7PqvPPOO5OISF5++eUkSZLkpptuSiIi+fzzzzd6bq+//vpmj7PSbnvCCSckubm5yX//+98S6/72t78lNWvWTCIi8+rfv3/y9ddfb3ZdALAtmIEGAFvZ008/HRUrVozzzz8/q/2iiy6KJEmyZppEfDt7ZtCgQXHLLbfE3//+96xZMxMmTIhly5ZFv3794osvvsi8KlasGO3atYvJkyeXOP4555yTtXzYYYfFf/7zn9SaJ0yYECtWrIjf/va3UbVq1ax16y+VnD59eixZsiTOO++8rD49e/aMVq1axT//+c/UY6Tp06dP1KpVK6vmiPjBuiMiatasGatWrYoJEyak9ps3b94mza6K+PaS0AoVKsQJJ5yQaevXr1+MHz8+/vvf/0ZExJ577hn7779/PPjgg5k+69ati3/84x/Rq1evzGVpzzzzTFSuXDnOPPPMTL8KFSrEwIEDN6mW0iouLo4HHngg2rZtW+KBFusfNFBQUBD//Oc/46STToqLL744/vrXv8acOXNKXLb7fStXroyvvvoqfvWrX8Wtt94axx9/fNx6661x9tlnxwMPPBAfffRR6vbfvWRv1apV8cUXX8TPfvazSJIk3nrrrRL9f2hMP/3001GpUqU499xzM20VK1bMzOLcVGeddVbWpcGHHXZYrFu3LubPn7/RbZ555pnYZZdd4phjjsm0Va1aNevn/V3Vq1fPundYlSpV4pBDDsk6n4cffjj22muvaNWqVdbv/ZFHHhkRkfm9r1mzZkREjBs3rkyfEhvx7WWa//znP+Poo4/O1PVdu+yySxxyyCFx8803x2OPPRZDhw6NsWPHxm9/+9ttXywAbAIBGgBsZfPnz49GjRpFjRo1strXhxjf/8f4PffcE6NGjYrbbrst68bbEZEJIo488sioV69e1uu5556LJUuWZPWvWrVqiZvM16pVKxP6bMz6e4Ptu+++qecVEdGyZcsS61q1apUaMvyQJk2aZC2vD9N+qO6IiPPOOy/23HPP6NGjR+y6667x61//Ouuy09L4+9//Hoccckh8+eWXMXv27Jg9e3a0bds21q5dGw8//HCmX58+feLll1/O3ANuypQpsWTJkqzL7ObPnx8NGzaMnXbaKesYu++++4+q8YdMnTo1Pv300xKX00X8/wDrpJNOynoAwC9+8YuoVKlSvPLKK6n7Xr/998fr+ktFp02blrr9ggULYsCAAVG7du3Mfc06duwYERHLly/P6rspY3r9e1y9evWsfhsaq2lKMw7nz58fLVq0yAreIjb+8911111L9P3++Xz00Ufx3nvvlfid33PPPSMiMr/3ffr0iQ4dOsQZZ5wRDRo0iL59+8ZDDz1UJmHaI488EmvWrNngeHv55Zfj5z//eVx77bVxwQUXRO/evePGG2+Myy+/PEaOHBnvv//+Nq8XAH6Ip3ACQDnToUOHmDFjRtx+++1x0kknRe3atTPr1v9D+N57793gjb0rVcr+q71ixYpbt9hNlJOTs8EHJmzsYQYbq3tD+/i++vXrx4wZM+LZZ5+N8ePHx/jx42P06NHxq1/9Ku6+++7NKzy+DS/WP8Bgjz32KLF+7NixcdZZZ0XEtwHGZZddFg8//HBceOGF8dBDD0V+fn507959s4+7pY0dOzYqVKhQIuSKiGjUqFFElLype8WKFaNOnTo/GFw2atQo3nvvvRLb169fPyLSA6d169bFUUcdFUuXLo1LL700WrVqFTvvvHN8+umnMWDAgBLhz7Yc0z9mHG7JYxQXF8d+++0XI0eO3GDfxo0bR8S3QeYLL7wQkydPjn/+85/xzDPPxIMPPhhHHnlkPPfcc9v0vRs7dmzk5+fHz3/+8xLr/vznP0eDBg3ioIMOymo/5phj4qqrropXXnkl9t57721VKgBsEgEaAGxlTZs2jYkTJ8aKFSuyZqHNnDkzs/67dt9997juuuuiU6dO0b1795g0aVJmuxYtWkTEt8FEly5dtlrN64/z7rvvbnTmzPq6Z82albmUbL1Zs2ZlnVetWrU2ePnlj5ml9v1ZO99VpUqV6NWrV/Tq1SuKi4vjvPPOiz//+c9xxRVXbPZMr7Fjx0blypXj3nvvLRFAvPTSS3HrrbfGggULokmTJtG8efM45JBD4sEHH4xBgwbFo48+Gr17947c3NzMNk2bNo3JkyfHV199lTULbfbs2ZtV1+ZY/zTETp06ZcKy7zrwwAMjIko8PXXt2rXxxRdflJjxtaHtJ0yYkHmIwHqfffZZRETq9v/+97/jww8/jLvvvjvroQE/dAlumqZNm8akSZNi5cqVWbPQZs2aVep9bs6x33///UiSJGuM/pifb4sWLeLtt9+Ozp07p477iG8vB17/gIyRI0fGH/7wh/j9738fkydPji5duvzg9lvCwoULY/LkyTFgwICssb/e4sWLNxier39IyDfffLPVawSAzeUSTgDYyo4++uhYt25d3H777VntN910U+Tk5ESPHj1KbNO6det4+umn44MPPohevXrF6tWrIyKiW7dukZeXF3/4wx8y/9j8rs8//3yL1Ny1a9eoUaNGjBgxosRTFNfPjDnooIOifv36ceedd0ZRUVFm/fjx4+ODDz6Inj17ZtpatGgRM2fOzKrv7bffLvFkzM2x8847R0SUeNrnl19+mbVcoUKFaN26dUREVp1z5szJXKqaZuzYsXHYYYdFnz594sQTT8x6XXLJJRERcf/992f69+nTJ1599dW466674osvvsi6fDPi25/h119/HX/9618zbcXFxTFq1KhNOOvS2djTENfr1KlT1K9fP8aOHZv18x4zZkxmhth6X3zxRcycOTO++uqrTNtJJ50UERH/93//l7Xfv/3tb1GpUqXo1KnTRmtbH0p+d8ZVkiRxyy23bPoJfs/RRx8d33zzTdxxxx2ZtnXr1sVtt91W6n1uqm7dusWnn34aTzzxRKZtzZo1WT/vzXXSSSfFp59+usF9rF69OlatWhUREUuXLi2xfv/994+I/z/2N/Z7syU98MADUVxcvNHxtueee8bixYtjypQpWe3rf4/atm271WoDgNIyAw0AtrJevXrFEUccEb///e9j3rx50aZNm3juuedi3LhxceGFF2Zme33f//zP/8S4cePi6KOPjhNPPDEef/zxyMvLizvuuCNOPfXUOOCAA6Jv375Rr169WLBgQfzzn/+MDh06lAjqSiMvLy9uuummOOOMM+Lggw+Ok08+OWrVqhVvv/12fPXVV3H33XdH5cqV409/+lOcdtpp0bFjx+jXr18sXrw4brnllmjWrFkMGTIks79f//rXMXLkyOjWrVucfvrpsWTJkrjzzjtjn332icLCwlLVuH7W1Pnnnx/dunWLihUrRt++feOMM86IpUuXxpFHHhm77rprzJ8/P2677bbYf//9s26e37lz54iI1AcJvPbaazF79uwYNGjQBtfvsssuccABB8TYsWPj0ksvjYjI3ID/4osvjtq1a5eYKdi7d+845JBD4qKLLorZs2dHq1at4oknnsiEH5syQ+jee++N+fPnZ0KsF154Ia655pqIiDj11FNLzGocO3Zs5ObmZj0E4btyc3Pj+uuvj/79+8fhhx8ep556aixYsCBuueWWOOyww+L444/P9L399ttj+PDhMXny5Eww1rZt2/j1r38dd911V3zzzTfRsWPHmDJlSjz88MNx2WWXbXDW23qtWrWKFi1axMUXXxyffvpp5OXlxSOPPLJJ97vbmF69ekWHDh3it7/9bcybNy/23nvvePTRR0vcT21rOPvss+P222+Pfv36xQUXXBANGzaMsWPHZh60UZoZYKeeemo89NBDcc4558TkyZOjQ4cOsW7dupg5c2Y89NBD8eyzz8ZBBx0UV199dbzwwgvRs2fPaNq0aSxZsiT+93//N3bdddc49NBDI+LbMLtmzZpx5513Ro0aNWLnnXeOdu3aRfPmzTd6/Ntvvz2WLVuWmVH45JNPxieffBIREYMHD478/Pys/mPHjo1GjRptNDgdNGhQjB49Onr16hWDBw+Opk2bxtSpU+P++++Po446Ktq1a7fZ7xEAbHVl9PRPANhuDRw4MPn+X7ErVqxIhgwZkjRq1CipXLlyssceeyTXX399UlxcnNUvIpKBAwdmtY0bNy6pVKlS0qdPn2TdunVJkiTJ5MmTk27duiX5+flJ1apVkxYtWiQDBgxIpk+fntmuf//+yc4771yivmHDhpWob2OeeOKJ5Gc/+1lSrVq1JC8vLznkkEOS+++/P6vPgw8+mLRt2zbJzc1NateunZxyyinJJ598UmJff//735PddtstqVKlSrL//vsnzz77bNK/f/+kadOmmT5z585NIiK5/vrrS2wfEcmwYcMyy998800yePDgpF69eklOTk7mnP7xj38kXbt2TerXr59UqVIladKkSXL22WcnCxcuzNpf06ZNs469IYMHD04iIpkzZ85G+1x11VVJRCRvv/12pq1Dhw5JRCRnnHHGBrf5/PPPk5NPPjmpUaNGkp+fnwwYMCB5+eWXk4hIHnjggdSakiRJOnbsmETEBl+TJ0/O6rt8+fKkatWqyfHHH/+D+73//vuTNm3aJLm5uUmDBg2SQYMGJYWFhVl91o+f7x9n7dq1yVVXXZU0bdo0qVy5crL77rsnN9100w8eM0mS5P3330+6dOmSVK9ePalbt25y5plnJm+//XYSEcno0aMz/TZnTH/55ZfJqaeemuTl5SX5+fnJqaeemrz11lsl9rkho0ePTiIief3117PaJ0+eXOLcO3bsmHTs2DGr33/+85+kZ8+eSbVq1ZJ69eolF110UfLII48kEZG8+uqrWdvus88+JY7//d+LJPn2/f3Tn/6U7LPPPklubm5Sq1at5MADD0yGDx+eLF++PEmSJJk0aVJy7LHHJo0aNUqqVKmSNGrUKOnXr1/y4YcfZu1r3Lhxyd57751UqlRpk96Ppk2bbnS8zZ07N6vvzJkzk4hIhg4dmrrPmTNnJieeeGLSuHHjpHLlyknTpk2Tiy++OFm1alXqdgBQVnKSZAveBRUAgFJ5/PHH47jjjouXXnopOnToUNblsIXdfPPNMWTIkPjkk09il112KetyAIDNJEADANjGVq9eHdWqVcssr1u3Lrp27RrTp0+PRYsWZa3jp+f7P981a9ZE27ZtY926dfHhhx+WYWUAQGm5BxoAwDY2ePDgWL16dbRv3z6Kiori0UcfjVdeeSX+8Ic/CM+2A8cff3w0adIk9t9//1i+fHn8/e9/j5kzZ8bYsWPLujQAoJTMQAMA2Mbuu+++uPHGG2P27NmxZs2a2H333ePcc8/d6MMK+Gm5+eab429/+1vMmzcv1q1bF3vvvXf85je/KfFEVgDgp0OABgAAAAApKpR1AQAAAABQngnQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUgjQAAAAACCFAA0AAAAAUlQq6wK2tuLi4vjss8+iRo0akZOTU9blAAAAAFBGkiSJFStWRKNGjaJChU2fV7bdB2ifffZZNG7cuKzLAAAAAKCc+Pjjj2PXXXfd5P7bfYBWo0aNiPj2jcnLyyvjagAAAAAoK4WFhdG4ceNMXrSptvsAbf1lm3l5eQI0AAAAADb7Nl8eIgAAAAAAKQRoAAAAAJBCgAYAAAAAKQRoAAAAAJBCgAYAAAAAKQRoAAAAAJBCgAYAAAAAKQRoAAAAAJBCgAYAAAAAKQRoAAAAAJBCgAYAAAAAKQRoAAAAAJBCgAYAAAAAKQRoAAAAAJBCgAYAAAAAKQRoAAAAAJBCgAYAAAAAKQRoAAAAAJBCgAYAAAAAKSqVdQEAALAjyRmes02OkwxLtslxAGBHYAYaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABACgEaAAAAAKQQoAEAAABAijIN0EaMGBEHH3xw1KhRI+rXrx+9e/eOWbNmZfXp1KlT5OTkZL3OOeecMqoYAAAAgB1NmQZoU6dOjYEDB8arr74aEyZMiK+//jq6du0aq1atyup35plnxsKFCzOv6667rowqBgAAAGBHU6ksD/7MM89kLY8ZMybq168fb7zxRhx++OGZ9p122ikKCgq2dXkAAAAAUL7ugbZ8+fKIiKhdu3ZW+9ixY6Nu3bqx7777xmWXXRZfffXVRvdRVFQUhYWFWS8AAAAAKK0ynYH2XcXFxXHhhRdGhw4dYt999820n3zyydG0adNo1KhRvPPOO3HppZfGrFmz4tFHH93gfkaMGBHDhw/fVmUDAAAAsJ3LSZIkKesiIiLOPffcGD9+fLz00kux6667brTf888/H507d47Zs2dHixYtSqwvKiqKoqKizHJhYWE0btw4li9fHnl5eVuldgAA2FQ5w3O2yXGSYeXiaz4AlCuFhYWRn5+/2TlRuZiBNmjQoHjqqafihRdeSA3PIiLatWsXEbHRAC03Nzdyc3O3Sp0AAAAA7HjKNEBLkiQGDx4cjz32WEyZMiWaN2/+g9vMmDEjIiIaNmy4lasDAAAAgDIO0AYOHBj33XdfjBs3LmrUqBGLFi2KiIj8/PyoVq1azJkzJ+677744+uijo06dOvHOO+/EkCFD4vDDD4/WrVuXZekAAAAA7CDKNEC74447IiKiU6dOWe2jR4+OAQMGRJUqVWLixIlx8803x6pVq6Jx48ZxwgknxOWXX14G1QIAAACwIyrzSzjTNG7cOKZOnbqNqgEAAACAkiqUdQEAAAAAUJ4J0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFII0AAAAAAghQANAAAAAFKUaYA2YsSIOPjgg6NGjRpRv3796N27d8yaNSurz5o1a2LgwIFRp06dqF69epxwwgmxePHiMqoYAAAAgB1NmQZoU6dOjYEDB8arr74aEyZMiK+//jq6du0aq1atyvQZMmRIPPnkk/Hwww/H1KlT47PPPovjjz++DKsGAAAAYEeSkyRJUtZFrPf5559H/fr1Y+rUqXH44YfH8uXLo169enHffffFiSeeGBERM2fOjL322iumTZsW//M///OD+ywsLIz8/PxYvnx55OXlbe1TAACAVDnDc7bJcZJh5eZrPgCUG6XNicrVPdCWL18eERG1a9eOiIg33ngjvv766+jSpUumT6tWraJJkyYxbdq0De6jqKgoCgsLs14AAAAAUFrlJkArLi6OCy+8MDp06BD77rtvREQsWrQoqlSpEjVr1szq26BBg1i0aNEG9zNixIjIz8/PvBo3bry1SwcAAABgO1ZuArSBAwfGu+++Gw888MCP2s9ll10Wy5cvz7w+/vjjLVQhAAAAADuiSmVdQETEoEGD4qmnnooXXnghdt1110x7QUFBrF27NpYtW5Y1C23x4sVRUFCwwX3l5uZGbm7u1i4ZAAAAgB1Emc5AS5IkBg0aFI899lg8//zz0bx586z1Bx54YFSuXDkmTZqUaZs1a1YsWLAg2rdvv63LBQAAAGAHVKYz0AYOHBj33XdfjBs3LmrUqJG5r1l+fn5Uq1Yt8vPz4/TTT4+hQ4dG7dq1Iy8vLwYPHhzt27ffpCdwAgAAAMCPVaYB2h133BEREZ06dcpqHz16dAwYMCAiIm666aaoUKFCnHDCCVFUVBTdunWL//3f/93GlQIAAACwo8pJkiQp6yK2psLCwsjPz4/ly5dHXl5eWZcDAMAOLmd4zjY5TjJsu/6aDwClUtqcqNw8hRMAAAAAyiMBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQIpSBWi77bZbfPnllyXaly1bFrvtttuPLgoAAAAAyotSBWjz5s2LdevWlWgvKiqKTz/99EcXBQAAAADlRaXN6fzEE09k/vzss89Gfn5+ZnndunUxadKkaNas2RYrDgAAAADK2mYFaL17946IiJycnOjfv3/WusqVK0ezZs3ixhtv3GLFAQAAAEBZ26wArbi4OCIimjdvHq+//nrUrVt3qxQFAAAAAOXFZgVo682dO3dL1wEAAAAA5VKpArSIiEmTJsWkSZNiyZIlmZlp6911110/ujAAAAAAKA9KFaANHz48rr766jjooIOiYcOGkZOTs6XrAgAAAIByoVQB2p133hljxoyJU089dUvXAwAAAADlSoXSbLR27dr42c9+9qMP/sILL0SvXr2iUaNGkZOTE48//njW+gEDBkROTk7Wq3v37j/6uAAAAACwqUoVoJ1xxhlx3333/eiDr1q1Ktq0aROjRo3aaJ/u3bvHwoULM6/777//Rx8XAAAAADZVqS7hXLNmTfzlL3+JiRMnRuvWraNy5cpZ60eOHLlJ++nRo0f06NEjtU9ubm4UFBSUpkwAAAAA+NFKFaC98847sf/++0dExLvvvpu1bks/UGDKlClRv379qFWrVhx55JFxzTXXRJ06dTbav6ioKIqKijLLhYWFW7QeAAAAAHYspQrQJk+evKXr2KDu3bvH8ccfH82bN485c+bE7373u+jRo0dMmzYtKlasuMFtRowYEcOHD98m9QEAAACw/ctJkiQp6yIivp259thjj0Xv3r032uc///lPtGjRIiZOnBidO3feYJ8NzUBr3LhxLF++PPLy8rZ02QAAsFlyhm/ZKzY2JhlWLr7mA0C5UlhYGPn5+ZudE5VqBtoRRxyReqnm888/X5rd/qDddtst6tatG7Nnz95ogJabmxu5ublb5fgAAAAA7HhKFaCtv//Zel9//XXMmDEj3n333ejfv/+WqGuDPvnkk/jyyy+jYcOGW+0YAAAAAPBdpQrQbrrppg22X3XVVbFy5cpN3s/KlStj9uzZmeW5c+fGjBkzonbt2lG7du0YPnx4nHDCCVFQUBBz5syJ3/zmN7H77rtHt27dSlM2AAAAAGy2CltyZ7/85S/jrrvu2uT+06dPj7Zt20bbtm0jImLo0KHRtm3buPLKK6NixYrxzjvvxDHHHBN77rlnnH766XHggQfGiy++6BJNAAAAALaZUs1A25hp06ZF1apVN7l/p06dIu0ZBs8+++yWKAsAAAAASq1UAdrxxx+ftZwkSSxcuDCmT58eV1xxxRYpDAAAAADKg1IFaPn5+VnLFSpUiJYtW8bVV18dXbt23SKFAQAAAEB5UKoAbfTo0Vu6DgAAAAAol37UPdDeeOON+OCDDyIiYp999sk8DAAAAAAAthelCtCWLFkSffv2jSlTpkTNmjUjImLZsmVxxBFHxAMPPBD16tXbkjUCAAAAQJmpUJqNBg8eHCtWrIj33nsvli5dGkuXLo133303CgsL4/zzz9/SNQIAAABAmSnVDLRnnnkmJk6cGHvttVembe+9945Ro0Z5iAAAAAAA25VSzUArLi6OypUrl2ivXLlyFBcX/+iiAAAAAKC8KFWAduSRR8YFF1wQn332Wabt008/jSFDhkTnzp23WHEAAAAAUNZKFaDdfvvtUVhYGM2aNYsWLVpEixYtonnz5lFYWBi33Xbblq4RAAAAAMpMqe6B1rhx43jzzTdj4sSJMXPmzIiI2GuvvaJLly5btDgAAAAAKGubNQPt+eefj7333jsKCwsjJycnjjrqqBg8eHAMHjw4Dj744Nhnn33ixRdf3Fq1AgAAAMA2t1kB2s033xxnnnlm5OXllViXn58fZ599dowcOXKLFQcAAAAAZW2zArS33347unfvvtH1Xbt2jTfeeONHFwUAAAAA5cVmBWiLFy+OypUrb3R9pUqV4vPPP//RRQEAAABAebFZAdouu+wS77777kbXv/POO9GwYcMfXRQAAAAAlBebFaAdffTRccUVV8SaNWtKrFu9enUMGzYsfv7zn2+x4gAAAACgrOUkSZJsaufFixfHAQccEBUrVoxBgwZFy5YtIyJi5syZMWrUqFi3bl28+eab0aBBg61W8OYqLCyM/Pz8WL58+QYffgAAANtSzvCcbXKcZNgmf80HgB1GaXOiSptzkAYNGsQrr7wS5557blx22WWxPnvLycmJbt26xahRo8pVeAYAAAAAP9ZmBWgREU2bNo2nn346/vvf/8bs2bMjSZLYY489olatWlujPgAAAAAoU5sdoK1Xq1atOPjgg7dkLQAAAABQ7mzWQwQAAAAAYEcjQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFAI0AAAAAEghQAMAAACAFGUaoL3wwgvRq1evaNSoUeTk5MTjjz+etT5JkrjyyiujYcOGUa1atejSpUt89NFHZVMsAAAAADukMg3QVq1aFW3atIlRo0ZtcP11110Xt956a9x5553x2muvxc477xzdunWLNWvWbONKAQAAANhRVSrLg/fo0SN69OixwXVJksTNN98cl19+eRx77LEREXHPPfdEgwYN4vHHH4++fftuy1IBAAAA2EGV23ugzZ07NxYtWhRdunTJtOXn50e7du1i2rRpZVgZAAAAADuSMp2BlmbRokUREdGgQYOs9gYNGmTWbUhRUVEUFRVllgsLC7dOgQAAAADsEMrtDLTSGjFiROTn52dejRs3LuuSAAAAAPgJK7cBWkFBQURELF68OKt98eLFmXUbctlll8Xy5cszr48//nir1gkAAADA9q3cBmjNmzePgoKCmDRpUqatsLAwXnvttWjfvv1Gt8vNzY28vLysFwAAAACUVpneA23lypUxe/bszPLcuXNjxowZUbt27WjSpElceOGFcc0118Qee+wRzZs3jyuuuCIaNWoUvXv3LruiAQAAANihlGmANn369DjiiCMyy0OHDo2IiP79+8eYMWPiN7/5TaxatSrOOuusWLZsWRx66KHxzDPPRNWqVcuqZAAAAAB2MDlJkiRlXcTWVFhYGPn5+bF8+XKXcwIAUOZyhudsk+Mkw7brr/kAUCqlzYnK7T3QAAAAAKA8EKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQAoBGgAAAACkEKABAAAAQIpyHaBdddVVkZOTk/Vq1apVWZcFAAAAwA6kUlkX8EP22WefmDhxYma5UqVyXzIAAAAA25Fyn0ZVqlQpCgoKyroMAAAAAHZQ5foSzoiIjz76KBo1ahS77bZbnHLKKbFgwYLU/kVFRVFYWJj1AgAAAIDSKtcBWrt27WLMmDHxzDPPxB133BFz586Nww47LFasWLHRbUaMGBH5+fmZV+PGjbdhxQAAAABsb3KSJEnKuohNtWzZsmjatGmMHDkyTj/99A32KSoqiqKiosxyYWFhNG7cOJYvXx55eXnbqlQAANignOE52+Q4ybCfzNd8ANhmCgsLIz8/f7NzonJ/D7TvqlmzZuy5554xe/bsjfbJzc2N3NzcbVgVAAAAANuzcn0J5/etXLky5syZEw0bNizrUgAAAADYQZTrAO3iiy+OqVOnxrx58+KVV16J4447LipWrBj9+vUr69IAAAAA2EGU60s4P/nkk+jXr198+eWXUa9evTj00EPj1VdfjXr16pV1aQAAAADsIMp1gPbAAw+UdQkAAAAA7ODK9SWcAAAAAFDWBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkEKABgAAAAApBGgAAAAAkOInEaCNGjUqmjVrFlWrVo127drFv/71r7IuCQAAAIAdRLkP0B588MEYOnRoDBs2LN58881o06ZNdOvWLZYsWVLWpQEAAACwAyj3AdrIkSPjzDPPjNNOOy323nvvuPPOO2OnnXaKu+66q6xLAwAAAGAHUKmsC0izdu3aeOONN+Kyyy7LtFWoUCG6dOkS06ZN2+A2RUVFUVRUlFlevnx5REQUFhZu3WIBAGBTrNk2h/H9FwBKWv/3Y5Ikm7VduQ7Qvvjii1i3bl00aNAgq71BgwYxc+bMDW4zYsSIGD58eIn2xo0bb5UaAQCgPMr/Y35ZlwAA5daKFSsiP3/T/64s1wFaaVx22WUxdOjQzHJxcXEsXbo06tSpEzk5OWVY2eYrLCyMxo0bx8cffxx5eXllXQ5sUcY32zPjm+2dMc72zPhme2Z8sz3b1PGdJEmsWLEiGjVqtFn7L9cBWt26daNixYqxePHirPbFixdHQUHBBrfJzc2N3NzcrLaaNWturRK3iby8PB9ubLeMb7ZnxjfbO2Oc7ZnxzfbM+GZ7tinje3Nmnq1Xrh8iUKVKlTjwwANj0qRJmbbi4uKYNGlStG/fvgwrAwAAAGBHUa5noEVEDB06NPr37x8HHXRQHHLIIXHzzTfHqlWr4rTTTivr0gAAAADYAZT7AK1Pnz7x+eefx5VXXhmLFi2K/fffP5555pkSDxbYHuXm5sawYcNKXJIK2wPjm+2Z8c32zhhne2Z8sz0zvtmebe3xnZNs7nM7AQAAAGAHUq7vgQYAAAAAZU2ABgAAAAApBGgAAAAAkEKABgAAAAApBGjl2KhRo6JZs2ZRtWrVaNeuXfzrX/8q65Jgs1111VWRk5OT9WrVqlVm/Zo1a2LgwIFRp06dqF69epxwwgmxePHiMqwYNu6FF16IXr16RaNGjSInJycef/zxrPVJksSVV14ZDRs2jGrVqkWXLl3io48+yuqzdOnSOOWUUyIvLy9q1qwZp59+eqxcuXIbngVs2A+N7wEDBpT4PO/evXtWH+Ob8mrEiBFx8MEHR40aNaJ+/frRu3fvmDVrVlafTflOsmDBgujZs2fstNNOUb9+/bjkkkvim2++2ZanAiVsyvju1KlTic/wc845J6uP8U15dMcdd0Tr1q0jLy8v8vLyon379jF+/PjM+m352S1AK6cefPDBGDp0aAwbNizefPPNaNOmTXTr1i2WLFlS1qXBZttnn31i4cKFmddLL72UWTdkyJB48skn4+GHH46pU6fGZ599Fscff3wZVgsbt2rVqmjTpk2MGjVqg+uvu+66uPXWW+POO++M1157LXbeeefo1q1brFmzJtPnlFNOiffeey8mTJgQTz31VLzwwgtx1llnbatTgI36ofEdEdG9e/esz/P7778/a73xTXk1derUGDhwYLz66qsxYcKE+Prrr6Nr166xatWqTJ8f+k6ybt266NmzZ6xduzZeeeWVuPvuu2PMmDFx5ZVXlsUpQcamjO+IiDPPPDPrM/y6667LrDO+Ka923XXX+OMf/xhvvPFGTJ8+PY488sg49thj47333ouIbfzZnVAuHXLIIcnAgQMzy+vWrUsaNWqUjBgxogyrgs03bNiwpE2bNhtct2zZsqRy5crJww8/nGn74IMPkohIpk2bto0qhNKJiOSxxx7LLBcXFycFBQXJ9ddfn2lbtmxZkpubm9x///1JkiTJ+++/n0RE8vrrr2f6jB8/PsnJyUk+/fTTbVY7/JDvj+8kSZL+/fsnxx577Ea3Mb75KVmyZEkSEcnUqVOTJNm07yRPP/10UqFChWTRokWZPnfccUeSl5eXFBUVbdsTgBTfH99JkiQdO3ZMLrjggo1uY3zzU1KrVq3kb3/72zb/7DYDrRxau3ZtvPHGG9GlS5dMW4UKFaJLly4xbdq0MqwMSuejjz6KRo0axW677RannHJKLFiwICIi3njjjfj666+zxnqrVq2iSZMmxjo/OXPnzo1FixZljef8/Pxo165dZjxPmzYtatasGQcddFCmT5cuXaJChQrx2muvbfOaYXNNmTIl6tevHy1btoxzzz03vvzyy8w645ufkuXLl0dERO3atSNi076TTJs2Lfbbb79o0KBBpk+3bt2isLAwMxMCyoPvj+/1xo4dG3Xr1o199903Lrvssvjqq68y64xvfgrWrVsXDzzwQKxatSrat2+/zT+7K22Z02BL+uKLL2LdunVZP+CIiAYNGsTMmTPLqCoonXbt2sWYMWOiZcuWsXDhwhg+fHgcdthh8e6778aiRYuiSpUqUbNmzaxtGjRoEIsWLSqbgqGU1o/ZDX12r1+3aNGiqF+/ftb6SpUqRe3atY15yr3u3bvH8ccfH82bN485c+bE7373u+jRo0dMmzYtKlasaHzzk1FcXBwXXnhhdOjQIfbdd9+IiE36TrJo0aINfsavXwflwYbGd0TEySefHE2bNo1GjRrFO++8E5deemnMmjUrHn300Ygwvinf/v3vf0f79u1jzZo1Ub169Xjsscdi7733jhkzZmzTz24BGrBV9ejRI/Pn1q1bR7t27aJp06bx0EMPRbVq1cqwMgA2R9++fTN/3m+//aJ169bRokWLmDJlSnTu3LkMK4PNM3DgwHj33Xez7skK24uNje/v3o9yv/32i4YNG0bnzp1jzpw50aJFi21dJmyWli1bxowZM2L58uXxj3/8I/r37x9Tp07d5nW4hLMcqlu3blSsWLHEkyMWL14cBQUFZVQVbBk1a9aMPffcM2bPnh0FBQWxdu3aWLZsWVYfY52fovVjNu2zu6CgoMTDYL755ptYunSpMc9Pzm677RZ169aN2bNnR4TxzU/DoEGD4qmnnorJkyfHrrvummnflO8kBQUFG/yMX78OytrGxveGtGvXLiIi6zPc+Ka8qlKlSuy+++5x4IEHxogRI6JNmzZxyy23bPPPbgFaOVSlSpU48MADY9KkSZm24uLimDRpUrRv374MK4Mfb+XKlTFnzpxo2LBhHHjggVG5cuWssT5r1qxYsGCBsc5PTvPmzaOgoCBrPBcWFsZrr72WGc/t27ePZcuWxRtvvJHp8/zzz0dxcXHmiyz8VHzyySfx5ZdfRsOGDSPC+KZ8S5IkBg0aFI899lg8//zz0bx586z1m/KdpH379vHvf/87KyieMGFC5OXlxd57771tTgQ24IfG94bMmDEjIiLrM9z45qeiuLg4ioqKtv1n95Z4AgJb3gMPPJDk5uYmY8aMSd5///3krLPOSmrWrJn15Aj4KbjooouSKVOmJHPnzk1efvnlpEuXLkndunWTJUuWJEmSJOecc07SpEmT5Pnnn0+mT5+etG/fPmnfvn0ZVw0btmLFiuStt95K3nrrrSQikpEjRyZvvfVWMn/+/CRJkuSPf/xjUrNmzWTcuHHJO++8kxx77LFJ8+bNk9WrV2f20b1796Rt27bJa6+9lrz00kvJHnvskfTr16+sTgky0sb3ihUrkosvvjiZNm1aMnfu3GTixInJAQcckOyxxx7JmjVrMvswvimvzj333CQ/Pz+ZMmVKsnDhwszrq6++yvT5oe8k33zzTbLvvvsmXbt2TWbMmJE888wzSb169ZLLLrusLE4JMn5ofM+ePTu5+uqrk+nTpydz585Nxo0bl+y2227J4YcfntmH8U159dvf/jaZOnVqMnfu3OSdd95Jfvvb3yY5OTnJc889lyTJtv3sFqCVY7fddlvSpEmTpEqVKskhhxySvPrqq2VdEmy2Pn36JA0bNkyqVKmS7LLLLkmfPn2S2bNnZ9avXr06Oe+885JatWolO+20U3LcccclCxcuLMOKYeMmT56cRESJV//+/ZMkSZLi4uLkiiuuSBo0aJDk5uYmnTt3TmbNmpW1jy+//DLp169fUr169SQvLy857bTTkhUrVpTB2UC2tPH91VdfJV27dk3q1auXVK5cOWnatGly5plnlviPPeOb8mpDYzsiktGjR2f6bMp3knnz5iU9evRIqlWrltStWze56KKLkq+//nobnw1k+6HxvWDBguTwww9PateuneTm5ia77757cskllyTLly/P2o/xTXn061//OmnatGlSpUqVpF69eknnzp0z4VmSbNvP7pwkSZLNm7MGAAAAADsO90ADAAAAgBQCNAAAAABIIUADAAAAgBQCNAAAAABIIUADAAAAgBQCNAAAAABIIUADAAAAgBQCNACAn5gBAwZE7969S7Xt4YcfHvfdd9+PriEnJycef/zxH72f0li7dm00a9Yspk+fXibHBwB2PAI0AIAN+DEh1ZYyb968yMnJiRkzZmyR/T3xxBOxePHi6Nu3b6atLIOw0qpSpUpcfPHFcemll5Z1KQDADkKABgCwg7j11lvjtNNOiwoVfvpfAU855ZR46aWX4r333ivrUgCAHcBP/9sTAEAZePfdd6NHjx5RvXr1aNCgQZx66qnxxRdfZNZ36tQpzj///PjNb34TtWvXjoKCgrjqqquy9jFz5sw49NBDo2rVqrH33nvHxIkTs2aENW/ePCIi2rZtGzk5OdGpU6es7W+44YZo2LBh1KlTJwYOHBhff/31Ruv9/PPP4/nnn49evXpl2po1axYREccdd1zk5ORkliMi7rjjjmjRokVUqVIlWrZsGffee2/q+zFs2LBo2LBhvPPOOxER8dJLL8Vhhx0W1apVi8aNG8f5558fq1atyjr2H/7wh/j1r38dNWrUiCZNmsRf/vKXzPq1a9fGoEGDomHDhlG1atVo2rRpjBgxIrO+Vq1a0aFDh3jggQdS6wIA2BIEaAAAm2nZsmVx5JFHRtu2bWP69OnxzDPPxOLFi+Okk07K6nf33XfHzjvvHK+99lpcd911cfXVV8eECRMiImLdunXRu3fv2GmnneK1116Lv/zlL/H73/8+a/t//etfERExceLEWLhwYTz66KOZdZMnT445c+bE5MmT4+67744xY8bEmDFjNlrzSy+9FDvttFPstddembbXX389IiJGjx4dCxcuzCw/9thjccEFF8RFF10U7777bpx99tlx2mmnxeTJk0vsN0mSGDx4cNxzzz3x4osvRuvWrWPOnDnRvXv3OOGEE+Kdd96JBx98MF566aUYNGhQ1rY33nhjHHTQQfHWW2/FeeedF+eee27MmjUrIr6dLffEE0/EQw89FLNmzYqxY8dmBXwREYcccki8+OKLGz1nAIAtpVJZFwAA8FNz++23R9u2beMPf/hDpu2uu+6Kxo0bx4cffhh77rlnRES0bt06hg0bFhERe+yxR9x+++0xadKkOOqoo2LChAkxZ86cmDJlShQUFERExLXXXhtHHXVUZp/16tWLiIg6depk+qxXq1atuP3226NixYrRqlWr6NmzZ0yaNCnOPPPMDdY8f/78aNCgQdblm+v3X7Nmzaz933DDDTFgwIA477zzIiJi6NCh8eqrr8YNN9wQRxxxRKbfN998E7/85S/jrbfeipdeeil22WWXiIgYMWJEnHLKKXHhhRdmzv3WW2+Njh07xh133BFVq1aNiIijjz46c4xLL700brrpppg8eXK0bNkyFixYEHvssUcceuihkZOTE02bNi1xTo0aNYr58+dv8HwBALYkM9AAADbT22+/HZMnT47q1atnXq1atYqIiDlz5mT6tW7dOmu7hg0bxpIlSyIiYtasWdG4ceOs4OqQQw7Z5Br22WefqFix4gb3vSGrV6/OBFc/5IMPPogOHTpktXXo0CE++OCDrLYhQ4bEa6+9Fi+88EImPIv49v0ZM2ZM1vvTrVu3KC4ujrlz52b6fff9ycnJiYKCgsw5DBgwIGbMmBEtW7aM888/P5577rkSdVarVi2++uqrTTonAIAfwww0AIDNtHLlyujVq1f86U9/KrGuYcOGmT9Xrlw5a11OTk4UFxdvkRo2d99169aN//73v1vk2OsdddRRcf/998ezzz4bp5xySqZ95cqVcfbZZ8f5559fYpsmTZpk/px2DgcccEDMnTs3xo8fHxMnToyTTjopunTpEv/4xz8y/ZcuXZqZRQcAsDUJ0AAANtMBBxwQjzzySDRr1iwqVSrd16mWLVvGxx9/HIsXL44GDRpExP+/J9l6VapUiYhv75f2Y7Vt2zYWLVoU//3vf6NWrVqZ9sqVK5fY/1577RUvv/xy9O/fP9P28ssvx957753V75hjjolevXrFySefHBUrVoy+fftGxLfvz/vvvx+77777j6o5Ly8v+vTpE3369IkTTzwxunfvHkuXLo3atWtHxLcPcmjbtu2POgYAwKZwCScAwEYsX748ZsyYkfX6+OOPY+DAgbF06dLo169fvP766zFnzpx49tln47TTTtvksOuoo46KFi1aRP/+/eOdd96Jl19+OS6//PKI+HYmVkRE/fr1o1q1apmHFCxfvrzU59K2bduoW7duvPzyy1ntzZo1i0mTJmXCtYiISy65JMaMGRN33HFHfPTRRzFy5Mh49NFH4+KLLy6x3+OOOy7uvffeOO200zKzwy699NJ45ZVXYtCgQTFjxoz46KOPYty4cSUeIpBm5MiRcf/998fMmTPjww8/jIcffjgKCgqiZs2amT4vvvhidO3atRTvBgDA5hGgAQBsxJQpU6Jt27ZZr+HDh0ejRo3i5ZdfjnXr1kXXrl1jv/32iwsvvDBq1qyZdZP+NBUrVozHH388Vq5cGQcffHCcccYZmadwrr9XWaVKleLWW2+NP//5z9GoUaM49thjS30uFStWjNNOOy3Gjh2b1X7jjTfGhAkTonHjxpnZXL17945bbrklbrjhhthnn33iz3/+c4wePTo6deq0wX2feOKJcffdd8epp54ajz76aLRu3TqmTp0aH374YRx22GHRtm3buPLKK6NRo0abXG+NGjXiuuuui4MOOigOPvjgmDdvXjz99NOZ93fatGmxfPnyOPHEE0v3hgAAbIacJEmSsi4CAIBvL5M89NBDY/bs2dGiRYstvv9FixbFPvvsE2+++eYGn2r5U9KnT59o06ZN/O53vyvrUgCAHYB7oAEAlJHHHnssqlevHnvssUfMnj07LrjggujQocNWCc8iIgoKCuL//u//YsGCBT/pAG3t2rWx3377xZAhQ8q6FABgB2EGGgBAGbnnnnvimmuuiQULFkTdunWjS5cuceONN0adOnXKujQAAL5DgAYAAAAAKTxEAAAAAABSCNAAAAAAIIUADQAAAABSCNAAAAAAIIUADQAAAABSCNAAAAAAIIUADQAAAABSCNAAAAAAIIUADQAAAABS/D/gvsVXMQoQgAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Plot the distribution of token counts\n", + "\n", + "tokens = [item.token_count for item in items]\n", + "plt.figure(figsize=(15, 6))\n", + "plt.title(f\"Token counts: Avg {sum(tokens)/len(tokens):,.1f} and highest {max(tokens):,}\\n\")\n", + "plt.xlabel('Length (tokens)')\n", + "plt.ylabel('Count')\n", + "plt.hist(tokens, rwidth=0.7, color=\"green\", bins=range(0, 300, 10))\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "2b58dc61-747f-46f7-b9e0-c205db4f3e5e", + "metadata": {}, + "source": [ + "## Sidenote\n", + "\n", + "If you like the variety of colors that matplotlib can use in its charts, you should bookmark this:\n", + "\n", + "https://matplotlib.org/stable/gallery/color/named_colors.html\n", + "\n", + "## Todos for you:\n", + "\n", + "- Review the Item class and check you're comfortable with it\n", + "- Examine some Item objects, look at the training prompt with `item.prompt` and test prompt with `item.test_prompt()`\n", + "- Make some more histograms to better understand the data\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "id": "45d18625-f9a6-4571-ac62-8ef04b8ec411", + "metadata": {}, + "source": [ + "# Finally\n", + "- Its time to break down our data into a training, test and validation dataset.\n", + "- Its typical to use 5%-10% of your data for testing purposes, but actually we have far more than we need at this point. Well take 400,000 points for training, and we'll reserve 2,000 for testing, although we won't use all of them.\n", + "\n", + "\n", + "Craft a dataset which is more balanced in terms of results. Try to balance out the categories - fewer anomalous items." + ] + }, + { + "cell_type": "code", + "execution_count": 121, + "id": "79cc7c1d-b3ae-458b-b576-78421e01209c", + "metadata": {}, + "outputs": [], + "source": [ + "# Create a dict with a key of each result from normal to anomalous\n", + "# And in the result, put a list of items with that result (\n", + "\n", + "slots = defaultdict(list)\n", + "for item in items:\n", + " slots[item.result].append(item)" + ] + }, + { + "cell_type": "code", + "execution_count": 122, + "id": "4affb0a3-f178-4946-b55d-c673ebde94d0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[<$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>,\n", + " <$normal>]" + ] + }, + "execution_count": 122, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "slots['normal']" + ] + }, + { + "cell_type": "code", + "execution_count": 123, + "id": "69d4ca90-9b62-49f1-8a45-1dc0f5cbd1b4", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "There are 100 items in the sample\n" + ] + } + ], + "source": [ + "# Create a dataset called \"sample\" which tries to more evenly take from the range of results\n", + "# And gives more weight to items from categories other than Anomalous\n", + "# Set random seed for reproducibility\n", + "\n", + "# Note: we are duplicating data here to increase our training and test data.\n", + "\n", + "np.random.seed(42)\n", + "random.seed(42)\n", + "size=50\n", + "sample = []\n", + "for i in ['normal', 'anomalous']:\n", + " slot = slots[i]\n", + " weights = np.array([1 if item.result=='Normal' else 2 for item in slot])\n", + " weights = weights / np.sum(weights)\n", + " selected_indices = np.random.choice(len(slot), size=size, replace=True, p=weights)\n", + " selected = [slot[i] for i in selected_indices]\n", + " sample.extend(selected)\n", + "\n", + "print(f\"There are {len(sample):,} items in the sample\")" + ] + }, + { + "cell_type": "code", + "execution_count": 124, + "id": "18fbb4cd-3359-4cd9-96d2-09b03c6458f9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Divided into a training set of 80 items and test set of 20 items\n" + ] + } + ], + "source": [ + "random.seed(42)\n", + "random.shuffle(sample)\n", + "train = sample[:80]\n", + "test = sample[80:100]\n", + "print(f\"Divided into a training set of {len(train):,} items and test set of {len(test):,} items\")" + ] + }, + { + "cell_type": "code", + "execution_count": 125, + "id": "83eaf1df-e5eb-4ac0-b8db-6f3b3676212a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[<$normal>, <$normal>, <$anomalous>, <$normal>, <$anomalous>, <$anomalous>, <$normal>, <$anomalous>, <$normal>, <$anomalous>, <$anomalous>, <$normal>, <$anomalous>, <$anomalous>, <$anomalous>, <$normal>, <$normal>, <$anomalous>, <$anomalous>, <$normal>, <$normal>, <$normal>, <$normal>, <$anomalous>, <$anomalous>, <$normal>, <$anomalous>, <$normal>, <$anomalous>, <$normal>, <$normal>, <$normal>, <$anomalous>, <$anomalous>, <$anomalous>, <$normal>, <$anomalous>, <$anomalous>, <$anomalous>, <$normal>, <$normal>, <$normal>, <$normal>, <$normal>, <$normal>, <$anomalous>, <$anomalous>, <$anomalous>, <$normal>, <$normal>, <$anomalous>, <$normal>, <$normal>, <$anomalous>, <$normal>, <$normal>, <$anomalous>, <$anomalous>, <$normal>, <$anomalous>, <$normal>, <$normal>, <$normal>, <$anomalous>, <$anomalous>, <$normal>, <$anomalous>, <$normal>, <$anomalous>, <$normal>, <$normal>, <$anomalous>, <$anomalous>, <$anomalous>, <$anomalous>, <$anomalous>, <$normal>, <$anomalous>, <$anomalous>, <$anomalous>]\n" + ] + } + ], + "source": [ + "print(train)" + ] + }, + { + "cell_type": "code", + "execution_count": 126, + "id": "a5509145-390c-48c3-95c3-1c6e35277e67", + "metadata": {}, + "outputs": [], + "source": [ + "# One more thing!\n", + "# Let's pickle the training and test dataset so we don't have to execute all this code next time!\n", + "\n", + "with open('train.pkl', 'wb') as file:\n", + " pickle.dump(train, file)\n", + "\n", + "with open('test.pkl', 'wb') as file:\n", + " pickle.dump(test, file)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10bb88e5-fbd2-403b-9f61-2962db90d123", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/project/day2.ipynb b/project/day2.ipynb new file mode 100644 index 0000000..cb47f78 --- /dev/null +++ b/project/day2.ipynb @@ -0,0 +1,3502 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "db8736a7-ed94-441c-9556-831fa57b5a10", + "metadata": {}, + "source": [ + "# The Product Pricer Continued\n", + "\n", + "A model that can estimate how much something costs, from its description.\n", + "\n", + "## Baseline Models\n", + "\n", + "Today we work on the simplest models to act as a starting point that we will beat." + ] + }, + { + "cell_type": "code", + "execution_count": 190, + "id": "681c717b-4c24-4ac3-a5f3-3c5881d6e70a", + "metadata": {}, + "outputs": [], + "source": [ + "# imports\n", + "\n", + "import os\n", + "import math\n", + "import json\n", + "import random\n", + "from dotenv import load_dotenv\n", + "from huggingface_hub import login\n", + "from items import Item\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import pickle\n", + "from collections import Counter" + ] + }, + { + "cell_type": "code", + "execution_count": 191, + "id": "933b6e75-3661-4f30-b0b5-c28d04e3748e", + "metadata": {}, + "outputs": [], + "source": [ + "# More imports for our traditional machine learning\n", + "\n", + "import pandas as pd\n", + "import numpy as np\n", + "from sklearn.linear_model import LinearRegression\n", + "from sklearn.metrics import mean_squared_error, r2_score\n", + "from sklearn.preprocessing import StandardScaler" + ] + }, + { + "cell_type": "markdown", + "id": "b3c87c11-8dbe-4b8c-8989-01e3d3a60026", + "metadata": {}, + "source": [ + "## NLP imports\n", + "\n", + "In the next cell, we have more imports for our NLP related machine learning. \n", + "If the gensim import gives you an error like \"Cannot import name 'triu' from 'scipy.linalg' then please run in another cell: \n", + "`!pip install \"scipy<1.13\"` \n", + "As described on StackOverflow [here](https://stackoverflow.com/questions/78279136/importerror-cannot-import-name-triu-from-scipy-linalg-when-importing-gens). \n", + "Many thanks to students Arnaldo G and Ard V for sorting this." + ] + }, + { + "cell_type": "code", + "execution_count": 192, + "id": "42cf33b7-7abd-44ba-9780-c156b70473b5", + "metadata": {}, + "outputs": [], + "source": [ + "# NLP related imports\n", + "\n", + "from sklearn.feature_extraction.text import CountVectorizer\n", + "from gensim.models import Word2Vec\n", + "from gensim.utils import simple_preprocess" + ] + }, + { + "cell_type": "code", + "execution_count": 193, + "id": "a1ac3ec0-183c-4a12-920b-b06397f86815", + "metadata": {}, + "outputs": [], + "source": [ + "# Finally, more imports for more advanced machine learning\n", + "\n", + "from sklearn.svm import LinearSVR\n", + "from sklearn.ensemble import RandomForestRegressor" + ] + }, + { + "cell_type": "code", + "execution_count": 194, + "id": "6c01ee5f-c4fc-44fe-9d3a-907e8a0426d2", + "metadata": {}, + "outputs": [], + "source": [ + "# Constants - used for printing to stdout in color\n", + "\n", + "GREEN = \"\\033[92m\"\n", + "YELLOW = \"\\033[93m\"\n", + "RED = \"\\033[91m\"\n", + "RESET = \"\\033[0m\"\n", + "COLOR_MAP = {\"red\":RED, \"orange\": YELLOW, \"green\": GREEN}" + ] + }, + { + "cell_type": "code", + "execution_count": 195, + "id": "36d05bdc-0155-4c72-a7ee-aa4e614ffd3c", + "metadata": {}, + "outputs": [], + "source": [ + "# environment\n", + "\n", + "load_dotenv()\n", + "os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY', 'your-key-if-not-using-env')\n", + "os.environ['ANTHROPIC_API_KEY'] = os.getenv('ANTHROPIC_API_KEY', 'your-key-if-not-using-env')\n", + "os.environ['HF_TOKEN'] = os.getenv('HF_TOKEN', 'your-key-if-not-using-env')" + ] + }, + { + "cell_type": "code", + "execution_count": 196, + "id": "4dd3aad2-6f99-433c-8792-e461d2f06622", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Note: Environment variable`HF_TOKEN` is set and is the current active token independently from the token you've just configured.\n" + ] + } + ], + "source": [ + "# Log in to HuggingFace\n", + "\n", + "hf_token = os.environ['HF_TOKEN']\n", + "login(hf_token, add_to_git_credential=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 197, + "id": "c830ed3e-24ee-4af6-a07b-a1bfdcd39278", + "metadata": {}, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "markdown", + "id": "5105e13c-bca0-4c70-bfaa-649345f53322", + "metadata": {}, + "source": [ + "# Loading the pkl files\n", + "\n", + "Let's avoid curating all our data again! Load in the pickle files\n" + ] + }, + { + "cell_type": "code", + "execution_count": 198, + "id": "5c9b05f4-c9eb-462c-8d86-de9140a2d985", + "metadata": {}, + "outputs": [], + "source": [ + "with open('train.pkl', 'rb') as file:\n", + " train = pickle.load(file)\n", + "\n", + "with open('test.pkl', 'rb') as file:\n", + " test = pickle.load(file)" + ] + }, + { + "cell_type": "code", + "execution_count": 199, + "id": "a84638f7-5ff7-4f54-8751-3ef156264aee", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "How would you classify this sensor data - normal or anomalous?\n", + "\n", + "[{\"timestamp\": 1737806400, \"room\": \"pillbox\", \"nodeId\": 1, \"onOff\": true}, {\"timestamp\": 1737806460, \"room\": \"bedroom\", \"nodeId\": 2, \"onOff\": true}, {\"timestamp\": 1737806700, \"room\": \"bathroom\", \"nodeId\": 2, \"onOff\": true}, {\"timestamp\": 1737807000, \"room\": \"bathroom\", \"nodeId\": 3, \"humidity\": 6500}, {\"timestamp\": 1737807300, \"room\": \"bathroom\", \"nodeId\": 2, \"onOff\": false}, {\"timestamp\": 1737807600, \"room\": \"\n", + "\n", + "Result is normal\n" + ] + } + ], + "source": [ + "# Remind ourselves the training prompt\n", + "\n", + "print(train[0].prompt)" + ] + }, + { + "cell_type": "code", + "execution_count": 202, + "id": "d83cdb39-caa6-4ced-b5e4-4ff203126236", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "<$anomalous>\n" + ] + } + ], + "source": [ + "# Remind ourselves the details\n", + "\n", + "print(test[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 203, + "id": "b7619c85-6e9e-48a1-8efe-c6a60471b87c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "<$normal>" + ] + }, + "execution_count": 203, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Remind a test prompt\n", + "\n", + "train[0]" + ] + }, + { + "cell_type": "markdown", + "id": "bcccf130-125a-4958-bac3-f46dfcb29b3f", + "metadata": {}, + "source": [ + "## Unveiling a mighty script that we will use a lot!\n", + "\n", + "A rather pleasing Test Harness that will evaluate any model against the items from the Test set\n", + "\n", + "And show us the results in a visually satisfying way.\n", + "\n", + "You write a function of this form:\n", + "\n", + "```\n", + "def my_prediction_function(item):\n", + " # my code here\n", + " return my_estimate\n", + "```\n", + "\n", + "And then you call:\n", + "\n", + "`Tester.test(my_prediction_function)`\n", + "\n", + "To evaluate your model." + ] + }, + { + "cell_type": "code", + "execution_count": 222, + "id": "b5793f5c-e23e-4a74-9496-1e30dd1e8935", + "metadata": {}, + "outputs": [ + { + "ename": "SyntaxError", + "evalue": "invalid syntax (2808863601.py, line 14)", + "output_type": "error", + "traceback": [ + "\u001b[0;36m Cell \u001b[0;32mIn[222], line 14\u001b[0;36m\u001b[0m\n\u001b[0;31m if !error:\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n" + ] + } + ], + "source": [ + "class Tester:\n", + "\n", + " def __init__(self, predictor, data, title=None, size=80):\n", + " self.predictor = predictor\n", + " self.data = data\n", + " self.title = title or predictor.__name__.replace(\"_\", \" \").title()\n", + " self.size = size\n", + " self.guesses = []\n", + " self.truths = []\n", + " self.errors = []\n", + " self.colors = []\n", + "\n", + " def color_for(self, error, truth):\n", + " if not error:\n", + " return \"green\"\n", + " else:\n", + " return \"red\"\n", + " \n", + " def run_datapoint(self, i):\n", + " datapoint = self.data[i]\n", + " guess = self.predictor(datapoint)\n", + " truth = datapoint.result\n", + " error = guess != truth\n", + " color = self.color_for(error, truth)\n", + " prompt = datapoint.prompt if len(datapoint.prompt) <= 40 else datapoint.prompt[:40]+\"...\"\n", + " self.guesses.append(guess)\n", + " self.truths.append(truth)\n", + " self.errors.append(error)\n", + " self.colors.append(color)\n", + " #print(f\"{COLOR_MAP[color]}{i+1}: Guess: {guess} Truth: {truth:} Error: {error:} Item: {prompt}{RESET}\")\n", + " print(f\"{COLOR_MAP[color]}{i+1}: Guess: {guess} Truth: {truth:} Error: {error:}{RESET}\")\n", + "\n", + " def chart(self, title):\n", + " actual = self.truths\n", + " predicted = self.guesses\n", + "\n", + " # Get unique classes\n", + " classes = list(set(actual + predicted)) # Union of unique classes in actual and predicted\n", + "\n", + " # Initialize the confusion matrix as a dictionary\n", + " confusion_matrix = {true: {pred: 0 for pred in classes} for true in classes}\n", + "\n", + " # Populate the confusion matrix\n", + " for a, p in zip(actual, predicted):\n", + " confusion_matrix[a][p] += 1\n", + "\n", + " # Convert the confusion matrix into a 2D list for visualization\n", + " matrix = [[confusion_matrix[true][pred] for pred in classes] for true in classes]\n", + "\n", + " # Plot the confusion matrix\n", + " plt.figure(figsize=(8, 6))\n", + " plt.imshow(matrix, interpolation='nearest', cmap=plt.cm.Blues)\n", + " plt.title(title)\n", + " plt.colorbar()\n", + "\n", + " # Add labels\n", + " tick_marks = range(len(classes))\n", + " plt.xticks(tick_marks, classes)\n", + " plt.yticks(tick_marks, classes)\n", + " plt.ylabel('Actual Label')\n", + " plt.xlabel('Predicted Label')\n", + "\n", + " # Add text annotations\n", + " for i in range(len(classes)):\n", + " for j in range(len(classes)):\n", + " plt.text(j, i, matrix[i][j],\n", + " horizontalalignment=\"center\",\n", + " color=\"white\" if matrix[i][j] > max(max(row) for row in matrix) / 2 else \"black\")\n", + "\n", + " plt.tight_layout()\n", + " plt.show()\n", + "\n", + "\n", + " def report(self):\n", + " average_error = sum(self.errors) / self.size\n", + " hits = sum(1 for color in self.colors if color==\"green\")\n", + " title = f\"{self.title} Error={average_error:,.2f} Hits={hits/self.size*100:.1f}%\"\n", + " self.chart(title)\n", + "\n", + " def run(self):\n", + " self.error = 0\n", + " for i in range(self.size):\n", + " self.run_datapoint(i)\n", + " self.report()\n", + "\n", + " @classmethod\n", + " def test(cls, function, data):\n", + " cls(function, data).run()" + ] + }, + { + "cell_type": "markdown", + "id": "066fef03-8338-4526-9df3-89b649ad4f0a", + "metadata": {}, + "source": [ + "# Now for something basic\n", + "\n", + "What's the very simplest model you could imagine?\n", + "\n", + "Let's start with a random guess - either normal or anomalous!" + ] + }, + { + "cell_type": "code", + "execution_count": 223, + "id": "66ea68e8-ab1b-4f0d-aba4-a59574d8f85e", + "metadata": {}, + "outputs": [], + "source": [ + "def random_situation_assessor(item):\n", + " choices = ['normal', 'anomalous']\n", + " choiceNum = random.randint(0,1)\n", + " choice = choices[choiceNum] \n", + " return choice" + ] + }, + { + "cell_type": "code", + "execution_count": 219, + "id": "379062f2-4da4-42cb-9dbf-7d0a8ed7d584", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "anomalous\n" + ] + } + ], + "source": [ + "print(random_situation_assessor(train[1]))" + ] + }, + { + "cell_type": "code", + "execution_count": 220, + "id": "53d941cb-5b73-44ea-b893-3a0ce9997066", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[91m1: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m2: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m3: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m4: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m5: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m6: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m7: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m8: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m9: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m10: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m11: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m12: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m13: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m14: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m15: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m16: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m17: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m18: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m19: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[92m20: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[91m21: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m22: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m23: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[91m24: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m25: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m26: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m27: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m28: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m29: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m30: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m31: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[91m32: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m33: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m34: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m35: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m36: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m37: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[92m38: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m39: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m40: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m41: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m42: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[91m43: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m44: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m45: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m46: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m47: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[92m48: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m49: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m50: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m51: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[92m52: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m53: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[91m54: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m55: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[92m56: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[91m57: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[92m58: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m59: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m60: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m61: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m62: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m63: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m64: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m65: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[92m66: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m67: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m68: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m69: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m70: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m71: Guess: normal Truth: normal Error: False\u001b[0m\n", + "\u001b[91m72: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m73: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m74: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[92m75: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m76: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[92m77: Guess: anomalous Truth: normal Error: True\u001b[0m\n", + "\u001b[92m78: Guess: normal Truth: anomalous Error: True\u001b[0m\n", + "\u001b[91m79: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n", + "\u001b[91m80: Guess: anomalous Truth: anomalous Error: False\u001b[0m\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAs0AAAJOCAYAAABMazr3AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAZK9JREFUeJzt3XlcVNX/x/H3gDIgm6IiLrhvIIpmZi4plhsuYVqWZm6VVqiZS2alaVaUS1rm0qr2LdPcbXMrl9zDwjTN1DQxNc0FhFRQ7u8Pf0yOgw4XHRni9exxH9/vPffOOWeuDHz48LnnWgzDMAQAAADgmjxyewIAAACAuyNoBgAAAJwgaAYAAACcIGgGAAAAnCBoBgAAAJwgaAYAAACcIGgGAAAAnCBoBgAAAJwgaAYAAACcIGhGvtezZ0+VL18+t6eRI+XLl1fPnj1zexoO8vI1BfKKmTNnymKx6ODBg7k9FSBfIGjGLZP5DT5zK1CggEqXLq2ePXvqzz//zO3puZUdO3bo/vvvV7ly5eTt7a3SpUurRYsWmjx58nVft2vXLo0aNeqW/BA9cuSIRo0apYSEBJePlRO7d++WxWKRt7e3zpw5k9vTcXtr1qyx+3xevc2ZMye3p3hDNm7cqMaNG6tQoUIKCQnRgAEDlJKSYrqf9evX267J33//bXds1KhRWV47b2/vbPVdvnx5tWvXLstjmf8+8+fPv24fU6dO1cyZM7M1nqu0aNFCFotF/fr1czh2ra+v119/PVt9X7hwQcOGDVOpUqXk4+Oj+vXra+XKlQ7nvfvuu6pQoYKCgoL0yCOPKDk52e54RkaG6tSpo9deey1nbxL5UoHcngDyn5dfflkVKlTQ+fPntXnzZs2cOVPr16/Xzp07s/3D5b9s48aNatasmcqWLavHH39cISEhSkxM1ObNm/XWW2+pf//+tnP37NkjD49/f/fdtWuXRo8eraioKJdneo8cOaLRo0erfPnyql27tt2x999/XxkZGS4d35lPPvlEISEhOn36tObPn6/HHnssV+eTVwwYMED16tVzaG/QoEEuzObmSEhI0D333KOwsDC9+eabOnz4sMaPH6+9e/fqm2++yXY/GRkZ6t+/v3x9fZWamnrN86ZNmyY/Pz/bvqen5w3N/1oeeeQRPfTQQ7Jarba2qVOnqlixYrn2F6iFCxdq06ZN1z2nRYsW6t69u11bnTp1stV/z549NX/+fA0cOFBVqlTRzJkz1aZNG61evVqNGzeWdPkXmyeffFIDBgxQxYoVFRcXp6FDh+rdd9+19fP+++8rKSlJgwcPNvkOkZ8RNOOWi46O1u233y5Jeuyxx1SsWDG98cYbWrp0qTp37pzLs8t9r776qgIDA/XDDz+ocOHCdseOHz9ut3/lD0t3UrBgwVwd3zAMzZ49W127dtWBAwf06aefEjRLSk1Nla+v73XPueuuu3T//feb6jcjI0NpaWlZ/tKbnTGd+eeff1SoUKEcv/75559XkSJFtGbNGgUEBEi6nNV9/PHHtWLFCrVs2TJb/bz33ntKTEzUY489prfeeuua591///0qVqxYjuebXZ6eni4LyHPi/PnzGjx4sIYNG6aRI0de87yqVauqW7dupvvfunWr5syZo3HjxmnIkCGSpO7duysiIkLPPvusNm7cKEn68ssvFRUVpUmTJkmSAgICNHz4cFvQfObMGb344ot699133fZ7KNwT5RnIdXfddZckaf/+/ba2tLQ0jRw5UnXr1lVgYKB8fX111113afXq1XavPXjwoCwWi8aPH6/33ntPlSpVktVqVb169fTDDz84jLV48WJFRETI29tbERERWrRoUZZzSk1N1eDBgxUaGiqr1apq1app/PjxMgzD7rzMP0HOmzdP4eHh8vHxUYMGDbRjxw5Jl/9EWLlyZXl7eysqKipbZRP79+9XjRo1HAJmSQoODrbbv7KmeebMmXrggQckSc2aNbP92XPNmjW2uY4aNcqhz6vrok+dOqUhQ4aoZs2a8vPzU0BAgKKjo7V9+3bbOWvWrLFlI3v16mUbK/PPwlnVNJu9ppn/VlarVTVq1NCyZcucXLl/bdiwQQcPHtRDDz2khx56SOvWrdPhw4cdzouPj1erVq1UrFgx+fj4qEKFCurdu7fdOXPmzFHdunXl7++vgIAA1axZ0yFgOnPmjAYOHGh7b5UrV9Ybb7zhkG131ld6erpGjx6tKlWqyNvbW0WLFlXjxo0d/vz83Xff6a677pKvr68KFy6smJgY7d692+6czFKBXbt2qWvXripSpIgtE3ejMv+NPv30U9WoUUNWq1XLli2zlWCtXbtWTz31lIKDg1WmTBnb66ZOnWo7v1SpUoqNjXUonYmKilJERIS2bdumJk2aqFChQnr++edzPNfk5GStXLlS3bp1swXM0uVgy8/PT59//nm2+jl16pRefPFFvfzyy1l+Nq9kGIaSk5MdvrZvtqtrmsuXL69ffvlFa9eutX0mo6KiJGX/a+tGjB07VhkZGbaA9nrOnTun8+fPm+p//vz58vT0VJ8+fWxt3t7eevTRR7Vp0yYlJiba+i5SpIjtnKCgIP3zzz+2/VGjRqlmzZrq2LGjqfEBMs3IdZnf8K/8JpecnKwPPvhAXbp00eOPP66zZ8/qww8/VKtWrbR161aHcoDZs2fr7Nmz6tu3rywWi8aOHauOHTvq999/t2U9V6xYoU6dOik8PFxxcXE6efKkevXqZfdDXbr8A+/ee+/V6tWr9eijj6p27dpavny5hg4dqj///FMTJ060O//777/X0qVLFRsbK0mKi4tTu3bt9Oyzz2rq1Kl66qmndPr0aY0dO1a9e/fWd999d93rUa5cOW3atEk7d+5UREREtq9jkyZNNGDAAL399tt6/vnnFRYWJkm2/82u33//XYsXL9YDDzygChUq6K+//tK7776rpk2bateuXSpVqpTCwsL08ssva+TIkerTp4/tF5+GDRtm2afZa7p+/XotXLhQTz31lPz9/fX222+rU6dOOnTokIoWLer0PXz66aeqVKmS6tWrp4iICBUqVEifffaZhg4dajvn+PHjatmypYoXL67nnntOhQsX1sGDB7Vw4ULbOStXrlSXLl10zz336I033pB0uVZ6w4YNevrppyVdzoI2bdpUf/75p/r27auyZctq48aNGj58uI4ePWrLdmWnr1GjRikuLk6PPfaY7rjjDiUnJys+Pl4//vijWrRoIUlatWqVoqOjVbFiRY0aNUrnzp3T5MmT1ahRI/34448Ov6w88MADqlKlil577bVsBXFnz551qNWVpKJFi8pisdj2v/vuO33++efq16+fihUrpvLly9vq25966ikVL15cI0eOtJUxjBo1SqNHj1bz5s315JNPas+ePZo2bZp++OEHbdiwwe6vEydPnlR0dLQeeughdevWTSVKlJAkpaSkZCvQKliwoAIDAyVdvj/g4sWLtr9uZfLy8lLt2rX1008/Oe1PkkaMGKGQkBD17dtXY8aMue65FStWVEpKinx9fdWhQwdNmDDB9h6cSU9Pz/L6JyUlOX3tpEmT1L9/f/n5+emFF16QJNu42fnaunDhgs6ePZuteV6dST906JBef/11ffTRR/Lx8bnua2fOnKmpU6fKMAyFhYXpxRdfVNeuXZ2O+dNPP6lq1ap2v/xI0h133CHpchlOaGio6tWrpw8++EArVqxQhQoVNGHCBNs5u3bt0vTp07V169ZsvU/AjgHcIjNmzDAkGatWrTJOnDhhJCYmGvPnzzeKFy9uWK1WIzEx0XbuxYsXjQsXLti9/vTp00aJEiWM3r1729oOHDhgSDKKFi1qnDp1yta+ZMkSQ5LxxRdf2Npq165tlCxZ0jhz5oytbcWKFYYko1y5cra2xYsXG5KMV155xW78+++/37BYLMa+fftsbZIMq9VqHDhwwNb27rvvGpKMkJAQIzk52dY+fPhwQ5LduVlZsWKF4enpaXh6ehoNGjQwnn32WWP58uVGWlqaw7nlypUzevToYdufN2+eIclYvXq1w7mSjJdeeslpH+fPnzcuXbpkd86BAwcMq9VqvPzyy7a2H374wZBkzJgxw6HPHj163NA19fLysmvbvn27IcmYPHmyw1hXS0tLM4oWLWq88MILtrauXbsakZGRductWrTIkGT88MMP1+zr6aefNgICAoyLFy9e85wxY8YYvr6+xm+//WbX/txzzxmenp7GoUOHst1XZGSk0bZt2+u9PaN27dpGcHCwcfLkSVvb9u3bDQ8PD6N79+62tpdeesmQZHTp0uW6/WVavXq1Iema29GjR23nSjI8PDyMX375xa6PzM9448aN7d7n8ePHDS8vL6Nly5Z2X1vvvPOOIcn46KOPbG1NmzY1JBnTp093mGOPHj2uO8fMrWnTprbXZH4m1q1b59DfAw88YISEhDi9Ntu3bzc8PT2N5cuXG4bx77U9ceKE3XmTJk0y+vXrZ3z66afG/PnzjaefftooUKCAUaVKFSMpKcnpOOXKlXP63ubNm2c7P/N6X/k9pUaNGnbvP1N2vrYy+8vOdrX777/faNiwoW1fkhEbG+twXsOGDY1JkyYZS5YsMaZNm2ZEREQYkoypU6c6vT41atQw7r77bof2X375xe5r5uLFi0bHjh1tcw0NDTV+/vlnwzAMo2XLlsYTTzzhdCwgK2Saccs1b97cbr98+fL65JNP7DK+V9bqZWRk6MyZM8rIyNDtt9+uH3/80aHPBx980C5TnZn5/P333yVJR48eVUJCgp577jlbBkq6fENKeHi43U09X3/9tTw9PTVgwAC7MQYPHqz58+frm2++sbsr/J577rHL7tWvX1+S1KlTJ/n7+zu0//7779e9Sa9FixbatGmT4uLitHz5cm3atEljx45V8eLF9cEHH+jee++95mtvhitr/C5duqQzZ87Iz89P1apVy/LaZ4fZa9q8eXNVqlTJtl+rVi0FBATY/j2v55tvvtHJkyfVpUsXW1uXLl3Uvn17/fLLL6pRo4Yk2f7E/uWXXyoyMjLLOuzChQsrNTVVK1euVOvWrbMcb968ebrrrrtUpEgRuwxh8+bN9frrr2vdunV6+OGHs9VX4cKF9csvv2jv3r2qUqWKw/HMr+Nnn31WQUFBdtenRYsW+vrrrx1e88QTT2Q51rWMHDnS9vm50pXjSVLTpk0VHh6eZR+PP/64Xa3tqlWrlJaWpoEDB9rduPr444/r+eef11dffaVevXrZ2q1Wq91+pmeffTZbtbBXfi84d+6crc+reXt7245fz4ABAxQdHe209jnzLwaZOnXqpDvuuEMPP/ywpk6dqueee87pWPXr19crr7zi0L59+/ZslT1ci7OvLUlq1apVjso1Vq9erQULFmjLli1Oz92wYYPdfu/evVW3bl09//zz6tmz53Wz1OfOnbvmv2Pmcenyz48FCxZo3759SkpKUo0aNeTt7a2lS5dq69at+vTTT/Xnn3/qiSee0LZt21S3bl29++67KlWqlJm3jXyIoBm33JQpU1S1alUlJSXpo48+0rp167L8Rjhr1ixNmDBBv/76q9LT023tFSpUcDi3bNmydvuZPzRPnz4tSfrjjz8kKcsfFlcHg3/88YdKlSplF/BK/5Y5ZPZ1rbEzg/LQ0NAs2zPndD316tXTwoULlZaWpu3bt2vRokWaOHGi7r//fiUkJFwzWLkZMjIy9NZbb2nq1Kk6cOCALl26ZDuWndKIrNzoNZUu/5tm59p98sknqlChgqxWq/bt2ydJqlSpkgoVKqRPP/3UtsRU06ZN1alTJ40ePVoTJ05UVFSUOnTooK5du9q+Hp966il9/vnnio6OVunSpdWyZUt17tzZLujdu3evfv75ZxUvXjzL+WTevJmdvl5++WXFxMSoatWqioiIUOvWrfXII4+oVq1adtepWrVqDuOEhYVp+fLlDjfeZfV5uZ6aNWs6/GKblev1e/Wxa83by8tLFStWdPj3L126tLy8vBz6DQ8PN/21nxmEXbhwweHY+fPnnZYSzJ07Vxs3btTOnTtNjZupa9euGjx4sFatWpWtoLlYsWJZXv8CBW7sx7Wzry1JKlmypEqWLGmq34sXL2rAgAF65JFHslx1xRkvLy/169fPFsBer+7ex8fnmv+OmcevVLlyZdv/T0tL0+DBg/XSSy+pWLFiuuuuu1SyZEl98cUXev3119W1a1fb/R/AtRA045a74447bPWFHTp0UOPGjdW1a1ft2bPHtkzTJ598op49e6pDhw4aOnSogoOD5enpqbi4OLsbBjNd6w5yw8U34lxv7JsxJy8vL9WrV0/16tVT1apV1atXL82bN08vvfRSjuaalSuDYkl67bXXNGLECPXu3VtjxoxRUFCQPDw8NHDgwFu2jFxOr11ycrK++OILnT9/PstfkGbPnq1XX33VdpPU/PnztXnzZn3xxRdavny5evfurQkTJmjz5s3y8/NTcHCwEhIStHz5cn3zzTf65ptvNGPGDHXv3l2zZs2SdPmXjBYtWujZZ5/Nck5Vq1aVpGz11aRJE+3fv19LlizRihUr9MEHH2jixImaPn16jlf/cBYU5tT1+r3RMa/1+qSkpGxlhr28vGyZ8cwg8OjRow7nHT161Gl2cejQoXrggQfk5eVlu/8i8+bFxMREpaWlOe0jNDRUp06dcjpvV8rO19a5c+eyVTstSSEhIZKkjz/+WHv27NG7777rcKPz2bNndfDgQQUHB1939ZPMBIOza1SyZMks1/TP/Le93r/DxIkTVaBAAfXr10+JiYlav369Dhw4oPLly2vs2LGqWLGiDh8+7HCPC3AlgmbkqsxAuFmzZnrnnXdsmZj58+erYsWKWrhwod3NRzkNFsuVKyfpclbwanv27HE4d9WqVTp79qxdZvTXX3+16+tWy/xFI6sf/pmuvFZXK1KkiMNKBWlpaQ79zZ8/X82aNdOHH35o137mzBm7m3+uN9bVbtU1Xbhwoc6fP69p06Y53Ki0Z88evfjii9qwYYNdNuvOO+/UnXfeqVdffVWzZ8/Www8/rDlz5tgCCS8vL7Vv317t27dXRkaGnnrqKb377rsaMWKEKleurEqVKiklJSVb2VlnfUmXyyB69eqlXr16KSUlRU2aNNGoUaP02GOP2a7T1V+z0uVrWaxYsRte3s0Vrpx3xYoVbe1paWk6cOBAtq6ddLn8IfMXjOtp2rSpLWsYERGhAgUKKD4+3m5Jy7S0NCUkJDhd5jIxMVGzZ8/W7NmzHY7ddtttioyMvO4DfgzD0MGDB7O9DvGNut7n8npfW9LlrHpWZTFZyfwF9tChQ0pPT1ejRo0czvn444/18ccfa9GiRerQocM1+8osu7rWX2sy1a5dW6tXr1ZycrLdzYCZZSFX3yCe6ejRo3rllVc0b948FShQQEeOHJH0b5Cd+b9//vknQTOui6AZuS4qKkp33HGHJk2apIEDB8rb29uWaTQMw/ZDYMuWLdq0aVOWf7p3pmTJkqpdu7ZmzZplV9e8cuVK7dq1yy5oa9Omjd577z298847Gj58uK194sSJslgsio6OvpG369Tq1asVFRXl8MMvs141qz/NZ8oMmLJ6Al6lSpW0bt06u7b33nvPIdPs6enpkNGdN2+e/vzzT7s/d15vrKvdqmv6ySefqGLFilnW8V64cEGvv/66Pv30UzVu3FinT59W4cKF7a5z5g/dzD8Bnzx50q4kxcPDw/bn7MxzOnfurFGjRmn58uVq1aqV3ZiZ9eAFChTIVl9Xn+Pn56fKlSvbltK68ut4+PDhtrrsnTt3asWKFTla+/ZWaN68uby8vPT222+rdevWtmv+4YcfKikpSW3bts1WPzmpaQ4MDFTz5s31ySefaMSIEbZf2v73v/8pJSXFtkyjdHkllEOHDqlYsWK2X7qyWpZyzpw5mjt3rj7++GO7IOvEiRMOgd+0adN04sSJa9ax32y+vr5ZfiadfW1JOatpfuihh7IMVu+77z61adNGjz/+uO1+jqyuz9mzZzVp0iQVK1ZMdevWtbX//fff+vvvv1W2bFlblvr++++3LS+aWd994cIFzZgxQ/Xr13coicv03HPPqUmTJrZ/g8wVRX799VfVqlXLtlxjZvYcuBaCZriFzD+Bzpw5U0888YTatWunhQsX6r777lPbtm114MABTZ8+XeHh4Tl69K10eSm4tm3bqnHjxurdu7dOnTqlyZMnq0aNGnZ9tm/fXs2aNdMLL7yggwcPKjIyUitWrNCSJUs0cOBAuxvUXKF///76559/dN9996l69epKS0vTxo0bNXfuXJUvX/66maDatWvL09NTb7zxhpKSkmS1WnX33XcrODhYjz32mJ544gl16tRJLVq00Pbt27V8+XKHjGy7du308ssvq1evXmrYsKF27NihTz/91C5DKF0OwgsXLqzp06fL399fvr6+ql+/fpa1rrfimh45ckSrV692uNkwk9VqVatWrTRv3jy9/fbbmjVrlqZOnar77rtPlSpV0tmzZ/X+++8rICBAbdq0kXT54TunTp3S3XffrTJlyuiPP/7Q5MmTVbt2bVs99tChQ7V06VK1a9dOPXv2VN26dZWamqodO3Zo/vz5OnjwoIoVK5atvsLDwxUVFaW6desqKChI8fHxmj9/vt1NkuPGjVN0dLQaNGigRx991LbkXGBgYJbrcJv1/fffZ7msW61atezqX80oXry4hg8frtGjR6t169a69957tWfPHk2dOlX16tXLdrCfk5pm6fIDgxo2bKimTZuqT58+Onz4sCZMmKCWLVvaBbNbt25Vs2bN9NJLL9muZVYZ0szMcnR0tN3np1y5cnrwwQdVs2ZNeXt7a/369ZozZ45q166tvn37mp53TtStW1fTpk3TK6+8osqVKys4OFh33313tr62clLTXL16dVWvXj3LYxUqVLC7flOmTNHixYvVvn17lS1bVkePHtVHH32kQ4cO6X//+59dHfs777yj0aNH25II0uWbJB944AENHz5cx48fV+XKlTVr1iwdPHjQ4S9jmbZu3aq5c+fq559/trWVL19et99+u3r27KlHH31UH3zwgerXr59rf0VEHpKLK3cgn8lcziirJb4uXbpkVKpUyahUqZJx8eJFIyMjw3jttdeMcuXKGVar1ahTp47x5ZdfOixllrnk3Lhx4xz6VBZLrC1YsMAICwszrFarER4ebixcuNChT8MwjLNnzxrPPPOMUapUKaNgwYJGlSpVjHHjxhkZGRkOY1y9rNK15pS5pNeVS0Zl5ZtvvjF69+5tVK9e3fDz8zO8vLyMypUrG/379zf++usvu3OvXi7OMAzj/fffNypWrGh4enraLT936dIlY9iwYUaxYsWMQoUKGa1atTL27duX5ZJzgwcPNkqWLGn4+PgYjRo1MjZt2mQ0bdrUYSmrJUuWGOHh4UaBAgXslp+72df0Wu/1ShMmTDAkGd9+++01z5k5c6YhyViyZInx448/Gl26dDHKli1rWK1WIzg42GjXrp0RHx9vO3/+/PlGy5YtjeDgYMPLy8soW7as0bdvX7vl1zLf2/Dhw43KlSsbXl5eRrFixYyGDRsa48ePty0VmJ2+XnnlFeOOO+4wChcubPj4+BjVq1c3Xn31VYflBletWmU0atTI8PHxMQICAoz27dsbu3btsjvnWsuiXYuzJeeu/Cxd69/oep9xw7i8xFz16tWNggULGiVKlDCefPJJ4/Tp03bnNG3a1KhRo0a25mzG999/bzRs2NDw9vY2ihcvbsTGxtotCWkY/16DrJZmvNK1ru1jjz1mhIeHG/7+/kbBggWNypUrG8OGDXMY51rKlSt3zWXhsvr+kdWSc8eOHTPatm1r+Pv72y2/l92vrZslq6+RFStWGC1atDBCQkKMggULGoULFzZatmyZ5Wc28xpfvXzmuXPnjCFDhhghISGG1Wo16tWrZyxbtizLOWRkZBj169c3Bg0a5HBs3759RpMmTQw/Pz+jSZMmxv79+3P+ZpFvWAzjFtwpBQAAAORhPEYbAAAAcIKgGQAAAHCCoBkAAABwgqAZAAAAeVZcXJzq1asnf39/BQcHq0OHDlmuZy9dXso2OjpaFotFixcvNjUOQTMAAADyrLVr1yo2NlabN2/WypUrlZ6erpYtWyo1NdXh3EmTJpl6ONeVWD0DAAAA/xknTpxQcHCw1q5dqyZNmtjaExIS1K5dO8XHx6tkyZJOn1Z5NR5u8h+SkZGhI0eOyN/fP8e/RQEAAPdiGIbOnj2rUqVKycMj94sEzp8/r7S0NJeOYVzxROBMVqtVVqvV6WuTkpIkXX50fKZ//vlHXbt21ZQpU3L89EeC5v+QI0eOXPMxogAAIG9LTEy0e3R7bjh//rx8/ItKF/9x6Th+fn4OTwC+8mmd15KRkaGBAweqUaNGioiIsLU/88wzatiwoWJiYnI8J4Lm/xB/f39Jkld4D1k8vZycDSCvO7RmfG5PAcAtcDY5WZUrhNp+zuemtLQ06eI/sob3kFwVa1xKU8quWUpMTFRAQICtOTtZ5tjYWO3cuVPr16+3tS1dulTfffedfvrppxuaFkHzf0jmnzEsnl4EzUA+cOUPEwD/fW5VelnA22WxhmG5XIISEBBg6vtcv3799OWXX2rdunV2GfnvvvtO+/fvV+HChe3O79Spk+666y6tWbMmW/0TNAMAACDPMgxD/fv316JFi7RmzRpVqFDB7vhzzz2nxx57zK6tZs2amjhxotq3b5/tcQiaAQAAYI5Fkqsy3ya7jY2N1ezZs7VkyRL5+/vr2LFjkqTAwED5+PgoJCQky5v/ypYt6xBgX0/u34IJAAAA5NC0adOUlJSkqKgolSxZ0rbNnTv3po5DphkAAADmWDwub67q24ScPHIkJ68h0wwAAAA4QaYZAAAA5lgsLqxpdqNVQq5AphkAAABwgkwzAAAAzHGjmuZbxT1nBQAAALgRMs0AAAAwh5pmAAAAAFcj0wwAAACTXFjT7KY5XfecFQAAAOBGyDQDAADAHGqaAQAAAFyNTDMAAADMYZ1mAAAAAFcj0wwAAABzqGkGAAAAcDUyzQAAADCHmmYAAAAAVyPTDAAAAHOoaQYAAABwNTLNAAAAMIeaZgAAAABXI9MMAAAAcywWF2aaqWkGAAAA8iQyzQAAADDHw3J5c1XfbohMMwAAAOAEmWYAAACYw+oZAAAAAK5GphkAAADm8ERAAAAAAFcj0wwAAABzqGkGAAAAcDUyzQAAADCHmmYAAAAAVyPTDAAAAHOoaQYAAABwNTLNAAAAMIeaZgAAAABXI9MMAAAAc6hpBgAAAHA1Ms0AAAAwh5pmAAAAAFcj0wwAAACTXFjT7KY5XfecFQAAAOBGyDQDAADAHGqaAQAAAFyNoBkAAADmWCz/rtV80zdzmea4uDjVq1dP/v7+Cg4OVocOHbRnzx67c/r27atKlSrJx8dHxYsXV0xMjH799VdT4xA0AwAAIM9au3atYmNjtXnzZq1cuVLp6elq2bKlUlNTbefUrVtXM2bM0O7du7V8+XIZhqGWLVvq0qVL2R6HmmYAAACY40ZPBFy2bJnd/syZMxUcHKxt27apSZMmkqQ+ffrYjpcvX16vvPKKIiMjdfDgQVWqVClb4xA0AwAAwO0kJyfb7VutVlmtVqevS0pKkiQFBQVleTw1NVUzZsxQhQoVFBoamu35UJ4BAAAAczJXz3DVJik0NFSBgYG2LS4uzum0MjIyNHDgQDVq1EgRERF2x6ZOnSo/Pz/5+fnpm2++0cqVK+Xl5ZXtt0ymGQAAAObcgvKMxMREBQQE2Jqzk2WOjY3Vzp07tX79eodjDz/8sFq0aKGjR49q/Pjx6ty5szZs2CBvb+9sTYugGQAAAG4nICDALmh2pl+/fvryyy+1bt06lSlTxuF4Zsa6SpUquvPOO1WkSBEtWrRIXbp0yVb/BM0AAAAwx40ebmIYhvr3769FixZpzZo1qlChQrZeYxiGLly4kO1xCJoBAACQZ8XGxmr27NlasmSJ/P39dezYMUmXM8s+Pj76/fffNXfuXLVs2VLFixfX4cOH9frrr8vHx0dt2rTJ9jgEzQAAADDHjZacmzZtmiQpKirKrn3GjBnq2bOnvL299f3332vSpEk6ffq0SpQooSZNmmjjxo0KDg7O9jgEzQAAAMizDMO47vFSpUrp66+/vuFxCJoBAABgjhvVNN8qrNMMAAAAOEGmGQAAAKZYLBZZyDQDAAAAuBKZZgAAAJhCphkAAACAAzLNAAAAMMfy/5ur+nZDZJoBAAAAJ8g0AwAAwBRqmgEAAAA4INMMAAAAU8g0AwAAAHBAphkAAACmkGkGAAAA4IBMMwAAAEwh0wwAAADAAZlmAAAAmMMTAQEAAABcjUwzAAAATKGmGQAAAIADMs0AAAAwxWKRCzPNrun2RpFpBgAAAJwg0wwAAABTLHJhTbObpprJNAMAAABOkGkGAACAKayeAQAAAMABmWYAAACYwxMBAQAAAFyNTDMAAADMcWFNs0FNMwAAAJA3kWkGAACAKa5cPcN16z/fGDLNAAAAgBNkmgEAAGAKmWYAAAAADsg0AwAAwBzWaQYAAABwNTLNAAAAMIWaZgAAAAAOyDQDAADAFDLNAAAAAByQaQYAAIApZJoBAAAAOCDTDAAAAFPINAMAAABwQKYZAAAA5vBEQAAAACDviIuLU7169eTv76/g4GB16NBBe/bssR0/deqU+vfvr2rVqsnHx0dly5bVgAEDlJSUZGocgmYAAACYklnT7KrNjLVr1yo2NlabN2/WypUrlZ6erpYtWyo1NVWSdOTIER05ckTjx4/Xzp07NXPmTC1btkyPPvqoqXEozwAAAECetWzZMrv9mTNnKjg4WNu2bVOTJk0UERGhBQsW2I5XqlRJr776qrp166aLFy+qQIHshcMEzQAAADDlVqyekZycbNdutVpltVqdvj6z7CIoKOi65wQEBGQ7YJYozwAAAIAbCg0NVWBgoG2Li4tz+pqMjAwNHDhQjRo1UkRERJbn/P333xozZoz69Oljaj5kmgEAAGDKrcg0JyYmKiAgwNaenSxzbGysdu7cqfXr12d5PDk5WW3btlV4eLhGjRplal4EzQAAAHA7AQEBdkGzM/369dOXX36pdevWqUyZMg7Hz549q9atW8vf31+LFi1SwYIFTc2HoBkAAADmuNE6zYZhqH///lq0aJHWrFmjChUqOJyTnJysVq1ayWq1aunSpfL29jY9LYJmIA8Y0rulOtwdqarlS+jchXRt2f67Xnhrifb+cTzL8xe/86RaNaqhzs+8py/W/HyLZwvgRlkkFfCQPCySxSKlXZIyjH+PF/SQPK+6K+lShpSecUunCbiF2NhYzZ49W0uWLJG/v7+OHTsmSQoMDJSPj4+Sk5PVsmVL/fPPP/rkk0+UnJxsu8mwePHi8vT0zNY4BM1AHnDXbZU1fe46bfvlDxUo4KnR/drry2n9VKfjK/rnfJrduf0fbibDuEZHAPIEi0UydDkI9rrGz3OCZOSmW1HTnF3Tpk2TJEVFRdm1z5gxQz179tSPP/6oLVu2SJIqV65sd86BAwdUvnz5bI1D0AzkATH9ptrt93npEyV+97rqhIdqw4/7be21qpbW04/crUYPj9XBVc7vMgbgnjIM+8wygGsznGSKoqKinJ6THQTNQB4U4He5Fut00j+2Nh/vgpoZ11MDX/9cf508m1tTA3CLeFgkq+fljHSGIV0k64xbyJ0yzbcKQTOQx1gsFo0bcr82/rRfu/YftbWPHdxJm7cf0JdrduTi7ADcCpeMy5thXC7lKOBxuYwj7VJuzwz47+LhJm6qfPnymjRpUm5PA25o0vDOqlG5pLo/N8PW1rZpTUXdUVVDx83PxZkBuFUyyzcys8zply5nnj3cM0GH/yCLLLZs803fXLYsx40h0wzkIROHPaA2d0Wo+aOT9OfxM7b2qHpVVbFMMR1bN87u/M/GP6YNP+1Xq8ffusUzBXArGfr/rHNuTwT5BuUZyLa0tDR5eXnl9jSQj0wc9oDuvTtSLR9/S38cOWl3bPyMFZqxaKNd27b5L+jZCQv01dqdt3KaAHIR9w4CrpNvyjOioqI0YMAAPfvsswoKClJISIjd4xMPHTqkmJgY+fn5KSAgQJ07d9Zff/1lOz5q1CjVrl1bH3zwgSpUqGBbFNtisejdd99Vu3btVKhQIYWFhWnTpk3at2+foqKi5Ovrq4YNG2r//n9XONi/f79iYmJUokQJ+fn5qV69elq1atUtuxbIeyYN76yH2tZTj+dnKiX1vEoU9VeJov7ytl5+mtFfJ89q1/6jdpskJR497RBgA8gbrnx2xNXPkSjg8W+bh+VyPXNmqQZwS1hcvLmhfBM0S9KsWbPk6+urLVu2aOzYsXr55Ze1cuVKZWRkKCYmRqdOndLatWu1cuVK/f7773rwwQftXr9v3z4tWLBACxcuVEJCgq19zJgx6t69uxISElS9enV17dpVffv21fDhwxUfHy/DMNSvXz/b+SkpKWrTpo2+/fZb/fTTT2rdurXat2+vQ4cOmXo/Fy5csC3QfeVC3fjv6du5iQr7F9LKDwbq4Ko423Z/y9tye2oAXMDDIlkLXN4kqaDn5f9f8P9/alt0OVD28rzclmFwEyDgavmqPKNWrVp66aWXJElVqlTRO++8o2+//VaStGPHDh04cEChoaGSpI8//lg1atTQDz/8oHr16km6XJLx8ccfq3jx4nb99urVS507d5YkDRs2TA0aNNCIESPUqlUrSdLTTz+tXr162c6PjIxUZGSkbX/MmDFatGiRli5dahdcOxMXF6fRo0ebvQzIg3zqZP/r4kZeA8A9ZBjS+YvXPs5DTZDb8mNNc77KNNeqVctuv2TJkjp+/Lh2796t0NBQW8AsSeHh4SpcuLB2795taytXrpxDwHx1vyVKlJAk1axZ067t/PnztkxwSkqKhgwZorCwMBUuXFh+fn7avXu36Uzz8OHDlZSUZNsSExNNvR4AAADZk68yzQULFrTbt1gsysjI/q/rvr6+TvvN/O0oq7bMsYYMGaKVK1dq/Pjxqly5snx8fHT//fcrLc3+ccjOWK1WWa1WU68BAAC4Ufkx05yvguZrCQsLU2JiohITE23Z5l27dunMmTMKDw+/6eNt2LBBPXv21H333Sfpcub54MGDN30cAAAA3Bz5qjzjWpo3b66aNWvq4Ycf1o8//qitW7eqe/fuatq0qW6//fabPl6VKlVsNxNu375dXbt2NZXxBgAAyE0Wi2s3d0TQrMt/BliyZImKFCmiJk2aqHnz5qpYsaLmzp3rkvHefPNNFSlSRA0bNlT79u3VqlUr3XYbqyAAAAC4K4thGKzq+B+RnJyswMBAWWs+LosnD14B/utO//BObk8BwC2QnJysEkUDlZSUpICAgFyfS2BgoCr2ny8Pa9b3et2ojAup+n3y/W7xfq9EphkAAABwghsBAQAAYI4ra4+paQYAAADyJjLNAAAAMCU/rtNMphkAAABwgkwzAAAATHHlespummgm0wwAAAA4Q6YZAAAApnh4WOTh4ZqUsOGifm8UmWYAAADACTLNAAAAMIWaZgAAAAAOyDQDAADAFNZpBgAAAOCATDMAAABMoaYZAAAAgAMyzQAAADCFmmYAAAAADsg0AwAAwBQyzQAAAAAckGkGAACAKayeAQAAAMABmWYAAACYYpELa5rlnqlmMs0AAACAE2SaAQAAYAo1zQAAAAAckGkGAACAKazTDAAAAMABmWYAAACYQk0zAAAAAAdkmgEAAGAKNc0AAAAAHBA0AwAAwJTMmmZXbWbExcWpXr168vf3V3BwsDp06KA9e/bYnfPee+8pKipKAQEBslgsOnPmjOn3TNAMAACAPGvt2rWKjY3V5s2btXLlSqWnp6tly5ZKTU21nfPPP/+odevWev7553M8DjXNAAAAMMWdapqXLVtmtz9z5kwFBwdr27ZtatKkiSRp4MCBkqQ1a9bkeF4EzQAAAHA7ycnJdvtWq1VWq9Xp65KSkiRJQUFBN3U+lGcAAADAHFfWM/9/ojk0NFSBgYG2LS4uzum0MjIyNHDgQDVq1EgRERE39S2TaQYAAIDbSUxMVEBAgG0/O1nm2NhY7dy5U+vXr7/p8yFoBgAAgCm3oqY5ICDALmh2pl+/fvryyy+1bt06lSlT5qbPi6AZAAAAeZZhGOrfv78WLVqkNWvWqEKFCi4Zh6AZAAAApuRkPWUzfZsRGxur2bNna8mSJfL399exY8ckSYGBgfLx8ZEkHTt2TMeOHdO+ffskSTt27JC/v7/Kli2b7RsGuREQAAAAeda0adOUlJSkqKgolSxZ0rbNnTvXds706dNVp04dPf7445KkJk2aqE6dOlq6dGm2xyHTDAAAAFPcaZ1mwzCcnjNq1CiNGjUqhzO6jEwzAAAA4ASZZgAAAJjiTjXNtwqZZgAAAMAJMs0AAAAwxZ1qmm8VMs0AAACAE2SaAQAAYAqZZgAAAAAOyDQDAADAFFbPAAAAAOCATDMAAABMoaYZAAAAgAMyzQAAADCFmmYAAAAADsg0AwAAwJT8WNNM0AwAAABTLHJheYZrur1hlGcAAAAATpBpBgAAgCkeFos8XJRqdlW/N4pMMwAAAOAEmWYAAACYwpJzAAAAAByQaQYAAIAp+XHJOTLNAAAAgBNkmgEAAGCKh+Xy5qq+3RGZZgAAAMAJMs0AAAAwx+LC2mMyzQAAAEDeRKYZAAAAprBOMwAAAAAHZJoBAABgiuX//3NV3+6ITDMAAADgBJlmAAAAmMI6zQAAAAAckGkGAACAKRaLxWXrNLts/ecbRKYZAAAAcIJMMwAAAExhnWYAAAAADrKVaV66dGm2O7z33ntzPBkAAAC4Pw+LRR4uSgm7qt8bla2guUOHDtnqzGKx6NKlSzcyHwAAAMDtZCtozsjIcPU8AAAAkEdQ02zS+fPnb9Y8AAAAALdlOmi+dOmSxowZo9KlS8vPz0+///67JGnEiBH68MMPb/oEAQAA4F4y12l21eaOTAfNr776qmbOnKmxY8fKy8vL1h4REaEPPvjgpk4OAAAAcAemg+aPP/5Y7733nh5++GF5enra2iMjI/Xrr7/e1MkBAADA/WTWNLtqc0emg+Y///xTlStXdmjPyMhQenr6TZkUAAAA4E5MB83h4eH6/vvvHdrnz5+vOnXq3JRJAQAAwH1lrtPsqs0dmQ6aR44cqX79+umNN95QRkaGFi5cqMcff1yvvvqqRo4c6Yo5AgAAAFmKi4tTvXr15O/vr+DgYHXo0EF79uyxO+f8+fOKjY1V0aJF5efnp06dOumvv/4yNY7poDkmJkZffPGFVq1aJV9fX40cOVK7d+/WF198oRYtWpjtDgAAAHmMxcWbGWvXrlVsbKw2b96slStXKj09XS1btlRqaqrtnGeeeUZffPGF5s2bp7Vr1+rIkSPq2LGjqXGy9XCTq911111auXJlTl4KAAAA3DTLli2z2585c6aCg4O1bds2NWnSRElJSfrwww81e/Zs3X333ZKkGTNmKCwsTJs3b9add96ZrXFyFDRLUnx8vHbv3i3pcp1z3bp1c9oVAAAA8hBXrqd8o/0mJSVJkoKCgiRJ27ZtU3p6upo3b247p3r16ipbtqw2bdrkuqD58OHD6tKlizZs2KDChQtLks6cOaOGDRtqzpw5KlOmjNkuAQAAADvJycl2+1arVVar9bqvycjI0MCBA9WoUSNFRERIko4dOyYvLy9b3JqpRIkSOnbsWLbnY7qm+bHHHlN6erp2796tU6dO6dSpU9q9e7cyMjL02GOPme0OAAAAeYyHxbWbJIWGhiowMNC2xcXFOZ1XbGysdu7cqTlz5tz092w607x27Vpt3LhR1apVs7VVq1ZNkydP1l133XVTJwcAAID8KTExUQEBAbZ9Z1nmfv366csvv9S6devsKh9CQkKUlpamM2fO2GWb//rrL4WEhGR7PqYzzaGhoVk+xOTSpUsqVaqU2e4AAACQx2TWNLtqk6SAgAC77VpBs2EY6tevnxYtWqTvvvtOFSpUsDtet25dFSxYUN9++62tbc+ePTp06JAaNGiQ7fdsOmgeN26c+vfvr/j4eFtbfHy8nn76aY0fP95sdwAAAECOxcbG6pNPPtHs2bPl7++vY8eO6dixYzp37pwkKTAwUI8++qgGDRqk1atXa9u2berVq5caNGiQ7ZsApWyWZxQpUsTuTsbU1FTVr19fBQpcfvnFixdVoEAB9e7dWx06dDDxNgEAAJAXucuD+6ZNmyZJioqKsmufMWOGevbsKUmaOHGiPDw81KlTJ124cEGtWrXS1KlTTY2TraB50qRJpjoFAAAAbgXDMJye4+3trSlTpmjKlCk5HidbQXOPHj1yPAAAAAD+W9x5nWZXyfHDTaTLz/FOS0uza7vyLkcAAADgv8B00Jyamqphw4bp888/18mTJx2OX7p06aZMDAAAAO7pyvWUXdG3OzK9esazzz6r7777TtOmTZPVatUHH3yg0aNHq1SpUvr4449dMUcAAAAgV5nONH/xxRf6+OOPFRUVpV69eumuu+5S5cqVVa5cOX366ad6+OGHXTFPAAAAuIn8WNNsOtN86tQpVaxYUdLl+uVTp05Jkho3bqx169bd3NkBAAAAbsB00FyxYkUdOHBAklS9enV9/vnnki5noK98NCEAAAD+mywu3tyR6aC5V69e2r59uyTpueee05QpU+Tt7a1nnnlGQ4cOvekTBAAAAHKb6ZrmZ555xvb/mzdvrl9//VXbtm1T5cqVVatWrZs6OQAAALgfD4tFHi6qPXZVvzfKdKb5auXKlVPHjh0VFBSkPn363Iw5AQAAAG7lhoPmTCdPntSHH354s7oDAACAm7JYXLu5o5sWNAMAAAD/VTf0GG0AAADkP6zTDAAAAMBBtjPNHTt2vO7xM2fO3OhcAAAAkAe4svbYTRPN2Q+aAwMDnR7v3r37DU8IAAAAcDfZDppnzJjhynkAAAAgj2CdZgAAAAAOWD0DAAAApuTHmmYyzQAAAIATZJoBAABgSn5cp5mg+T8oomMHFfD2ze1pAHCxXrN/yu0pALgF0s+l5PYUoGwGzUuXLs12h/fee2+OJwMAAAD35yHX1fi6a+1wtoLmDh06ZKszi8WiS5cu3ch8AAAA4OYoz7iGjIwMV88DAAAAcFvUNAMAAMAUi0XyyGdLzuUoaE5NTdXatWt16NAhpaWl2R0bMGDATZkYAAAA4C5MB80//fST2rRpo3/++UepqakKCgrS33//rUKFCik4OJigGQAA4D/Ow4WZZlf1e6NM36D4zDPPqH379jp9+rR8fHy0efNm/fHHH6pbt67Gjx/vijkCAAAAucp00JyQkKDBgwfLw8NDnp6eunDhgkJDQzV27Fg9//zzrpgjAAAA3Ejm6hmu2tyR6aC5YMGC8vC4/LLg4GAdOnRIkhQYGKjExMSbOzsAAADADZiuaa5Tp45++OEHValSRU2bNtXIkSP1999/63//+58iIiJcMUcAAAC4EWqas+G1115TyZIlJUmvvvqqihQpoieffFInTpzQe++9d9MnCAAAAOQ205nm22+/3fb/g4ODtWzZsps6IQAAALg3i8V16ym7aUmz2z7eGwAAAHAbpjPNFSpUuO5djb///vsNTQgAAADuzcNikYeLUsKu6vdGmQ6aBw4caLefnp6un376ScuWLdPQoUNv1rwAAAAAt2E6aH766aezbJ8yZYri4+NveEIAAABwbx5yXY2vu9YO37R5RUdHa8GCBTerOwAAAMBtmM40X8v8+fMVFBR0s7oDAACAm8qPq2fk6OEmV94IaBiGjh07phMnTmjq1Kk3dXIAAACAOzAdNMfExNgFzR4eHipevLiioqJUvXr1mzo5AAAAuB8PuXD1DLlnqtl00Dxq1CgXTAMAAABwX6ZvBPT09NTx48cd2k+ePClPT8+bMikAAAC4r8yaZldt7sh00GwYRpbtFy5ckJeX1w1PCAAAAHA32S7PePvttyVJFotFH3zwgfz8/GzHLl26pHXr1lHTDAAAkA94WC5vrurbjHXr1mncuHHatm2bjh49qkWLFqlDhw6243/99ZeGDRumFStW6MyZM2rSpIkmT56sKlWqmBon20HzxIkTJV3ONE+fPt2uFMPLy0vly5fX9OnTTQ0OAAAA3IjU1FRFRkaqd+/e6tixo90xwzDUoUMHFSxYUEuWLFFAQIDefPNNNW/eXLt27ZKvr2+2x8l20HzgwAFJUrNmzbRw4UIVKVIk24MAAADgv8NikctWzzDbbXR0tKKjo7M8tnfvXm3evFk7d+5UjRo1JEnTpk1TSEiIPvvsMz322GPZHsd0TfPq1asJmAEAAOD2Lly4IEny9va2tXl4eMhqtWr9+vWm+jIdNHfq1ElvvPGGQ/vYsWP1wAMPmO0OAAAAecytWD0jOTnZbssMgM2oXr26ypYtq+HDh+v06dNKS0vTG2+8ocOHD+vo0aOm+jIdNK9bt05t2rRxaI+Ojta6devMdgcAAAA4CA0NVWBgoG2Li4sz3UfBggW1cOFC/fbbbwoKClKhQoW0evVqRUdHy8PDXBhs+uEmKSkpWS4tV7BgQSUnJ5vtDgAAAHnMrVg9IzExUQEBAbZ2q9Wao/7q1q2rhIQEJSUlKS0tTcWLF1f9+vV1++23m5uX2YFr1qypuXPnOrTPmTNH4eHhZrsDAAAAHAQEBNhtOQ2aMwUGBqp48eLau3ev4uPjFRMTY+r1pjPNI0aMUMeOHbV//37dfffdkqRvv/1Wn332mebNm2e2OwAAAOQxlv//z1V9m5GSkqJ9+/bZ9g8cOKCEhAQFBQWpbNmymjdvnooXL66yZctqx44devrpp9WhQwe1bNnS1Dimg+b27dtr8eLFeu211zR//nz5+PioVq1aWrVqlZo2bWq2OwAAACDH4uPj1axZM9v+oEGDJEk9evTQzJkzdfToUQ0aNEh//fWXSpYsqe7du2vEiBGmxzEdNEtS27Zt1bZtW4f2nTt3KiIiIiddAgAAII9wpycCRkVFyTCMax4fMGCABgwYcIOzykFN89XOnj2r9957T3fccYciIyNveEIAAACAu8lx0Lxu3Tp1795dJUuW1Pjx43X33Xdr8+bNN3NuAAAAcEOZmWZXbe7IVHnGsWPHNHPmTH344YdKTk5W586ddeHCBS1evJiVMwAAAPCfle1Mc/v27VWtWjX9/PPPmjRpko4cOaLJkye7cm4AAABwQxaLxaWbO8p2pvmbb77RgAED9OSTT6pKlSqunBMAAADgVrKdaV6/fr3Onj2runXrqn79+nrnnXf0999/u3JuAAAAcEP5saY520HznXfeqffff19Hjx5V3759NWfOHJUqVUoZGRlauXKlzp4968p5AgAAALnG9OoZvr6+6t27t9avX68dO3Zo8ODBev311xUcHKx7773XFXMEAACAG7FYXLu5oxtap7latWoaO3asDh8+rM8+++xmzQkAAABwKzl6IuDVPD091aFDB3Xo0OFmdAcAAAA35mGxyMNFKWFX9XujbviJgAAAAMB/3U3JNAMAACD/cOUqF3l+9QwAAAAgvyLTDAAAAHNcucoFmWYAAAAgbyLTDAAAAFM8ZJGHi1LCrur3RpFpBgAAAJwg0wwAAABTXPnkPjddpplMMwAAAOAMmWYAAACYwjrNAAAAAByQaQYAAIApHhaLPFxUfOyqfm8UmWYAAADACTLNAAAAMIXVMwAAAAA4INMMAAAAUzzkwppmnggIAAAA5E1kmgEAAGAKNc0AAAAAHJBpBgAAgCkecl3m1V0zuu46LwAAAMBtkGkGAACAKRaLRRYXFR+7qt8bRaYZAAAAcIJMMwAAAEyx/P/mqr7dEUEzAAAATPGwuPDhJpRnAAAAAHkTmWYAAACY5p75YNch0wwAAAA4QaYZAAAApvAYbQAAAAAOyDQDAADAFB5uAgAAAMABmWYAAACY4iHXZV7dNaPrrvMCAAAA3AaZZgAAAJhCTTMAAACQh6xbt07t27dXqVKlZLFYtHjxYrvjKSkp6tevn8qUKSMfHx+Fh4dr+vTppschaAYAAIApFhdvZqSmpioyMlJTpkzJ8vigQYO0bNkyffLJJ9q9e7cGDhyofv36aenSpabGoTwDAAAAeVZ0dLSio6OveXzjxo3q0aOHoqKiJEl9+vTRu+++q61bt+ree+/N9jhkmgEAAGBKZk2zq7abqWHDhlq6dKn+/PNPGYah1atX67ffflPLli1N9UOmGQAAAG4nOTnZbt9qtcpqtZruZ/LkyerTp4/KlCmjAgUKyMPDQ++//76aNGliqh8yzQAAADDFw8WbJIWGhiowMNC2xcXF5WiukydP1ubNm7V06VJt27ZNEyZMUGxsrFatWmWqHzLNAAAAcDuJiYkKCAiw7ecky3zu3Dk9//zzWrRokdq2bStJqlWrlhISEjR+/Hg1b948230RNAMAAMCUW7FOc0BAgF3QnBPp6elKT0+Xh4d9cYWnp6cyMjJM9UXQDAAAgDwrJSVF+/bts+0fOHBACQkJCgoKUtmyZdW0aVMNHTpUPj4+KleunNauXauPP/5Yb775pqlxCJoBAABgSk7WUzbTtxnx8fFq1qyZbX/QoEGSpB49emjmzJmaM2eOhg8frocfflinTp1SuXLl9Oqrr+qJJ54wNQ5BMwAAAPKsqKgoGYZxzeMhISGaMWPGDY9D0AwAAABTLJbLm6v6dkcsOQcAAAA4QaYZAAAApnjIIg8XVTW7qt8bRaYZAAAAcIJMMwAAAEyhphkAAACAAzLNAAAAMMXy//+5qm93RKYZAAAAcIJMMwAAAEyhphkAAACAAzLNAAAAMMXiwnWaqWkGAAAA8igyzQAAADCFmmYAAAAADsg0AwAAwBQyzQAAAAAckGkGAACAKTwREAAAAIADMs0AAAAwxcNyeXNV3+6ITDMAAADgBJlmAAAAmEJNMwAAAAAHZJoBAABgCus0AwAAAHBAphkAAACmWOS62mM3TTSTaQYAAACcIdMM5AHd7wxVVLViKhdUSBcuZmjHn8masuZ3HTp1znZOTGRJtaoRrGol/ORrLaDmE9cr5cKlXJw1gJyIiSihemUDVSrQW2kXM/TbiVR99uMRHU2+YDunoIdF3W4vrQYViqigh0Xbj5zVjC2JSjp/MRdnjvyEdZoBuKU6ZQtrwY9H9Nj/ftKAuT+rgIdFbz1YS94F//0Iexf00KbfT2nmpkO5OFMANyqshJ9W7PlbI7/+Ta+t2q8CHhYNb15Z1gL/ft4fqVdat4UG6q21B/Ty8r0qUqignomqkIuzBv77CJqBPOCZz3foqx1/6cDf/2jf8VSN+WqPSgZ6q3qIv+2cufF/6n+bE/XLkeRcnCmAG/X6t/u1bv8pHU46r0Onz2nahkMq7uelCkE+kiSfgh5qVrmo/vfDn/rlWIoOnDqndzf8oWrBfqpcrFAuzx75hcXF/7kjgmYgD/KzekqSks+l5/JMALhaIa/LP6pT0i6XW1UsWkgFPD208+hZ2zlHki/oREqaqhT3zZU5AvkBQXMORUVFaeDAgbk9DeRDFkkDm1fW9sQk/f73P7k9HQAuZJHUvV4Z/Xo8RYfPnJckBfoUVPqlDP2Tbn/PQtL5dBX2KZgLs0R+lLlOs6s2d0TQDOQxQ1tWUaXivnpx6a7cngoAF+tVv4xCC3tr8rqDuT0VIN8jaAbykMEtKqtR5SA9NXu7TpxNy+3pAHChnneU0W1lAjVmxT6d+uffUqykc+kq6OmhQgU97c4P9C6oM5Rs4RaxuHhzR7kaNC9btkyNGzdW4cKFVbRoUbVr10779++XJB08eFAWi0ULFy5Us2bNVKhQIUVGRmrTpk12fSxYsEA1atSQ1WpV+fLlNWHCBLvj5cuX1yuvvKLu3bvLz89P5cqV09KlS3XixAnFxMTIz89PtWrVUnx8vO01J0+eVJcuXVS6dGkVKlRINWvW1GeffXbd93L69Gl1795dRYoUUaFChRQdHa29e/fajo8aNUq1a9e2e82kSZNUvnx52/6aNWt0xx13yNfXV4ULF1ajRo30xx9/mLmk+A8b3KKymlYtpn6f/ayjSedzezoAXKjnHWVUr2ygXlmxTydS7H9B/v3kP7p4KUMRJf1sbSUDrCru56W9J1Jv9VSBfCNXg+bU1FQNGjRI8fHx+vbbb+Xh4aH77rtPGRkZtnNeeOEFDRkyRAkJCapataq6dOmiixcvr0O5bds2de7cWQ899JB27NihUaNGacSIEZo5c6bdOBMnTlSjRo30008/qW3btnrkkUfUvXt3devWTT/++KMqVaqk7t27yzAMSdL58+dVt25dffXVV9q5c6f69OmjRx55RFu3br3me+nZs6fi4+O1dOlSbdq0SYZhqE2bNkpPz95v/RcvXlSHDh3UtGlT/fzzz9q0aZP69Okji7sW9uCWGtqyslrXKKGXlu5WatpFBfkWVJBvQbslqIJ8C6pKsK/KFL58h32l4n6qEuyrAG+WYwfykt71y6hxxSJ65/s/dC79kgK9CyjQu4AKel7+eXAuPUOr951Ut9vLKLyEnyoE+eiJhmX12/EU7eM+B9wiHrLIw+KizU1zzbn607RTp052+x999JGKFy+uXbt2yc/v8m/QQ4YMUdu2bSVJo0ePVo0aNbRv3z5Vr15db775pu655x6NGDFCklS1alXt2rVL48aNU8+ePW39tmnTRn379pUkjRw5UtOmTVO9evX0wAMPSJKGDRumBg0a6K+//lJISIhKly6tIUOG2F7fv39/LV++XJ9//rnuuOMOh/exd+9eLV26VBs2bFDDhg0lSZ9++qlCQ0O1ePFi2zjXk5ycrKSkJLVr106VKlWSJIWFhV33NRcuXNCFC/8udp+czFJj/1WdbistSZr2cG279jFf/aqvdvwlSepYp5Qea1zeduzdbrUdzgHg/lpUKy5JGtmqil37tA1/aN3+U5Kk//3wpwxDeiaqggp4WPTzkbP6aEviLZ8rkJ/katC8d+9ejRw5Ulu2bNHff/9tyzAfOnRI4eHhkqRatWrZzi9ZsqQk6fjx46pevbp2796tmJgYuz4bNWqkSZMm6dKlS/L09HToo0SJEpKkmjVrOrQdP35cISEhunTpkl577TV9/vnn+vPPP5WWlqYLFy6oUKGs17/cvXu3ChQooPr169vaihYtqmrVqmn37t3ZuhZBQUHq2bOnWrVqpRYtWqh58+bq3Lmz7T1nJS4uTqNHj85W/8jb7nx9rdNzPlj/hz5YTzkPkNd1+fgnp+ekZxiasfWwZmw9fAtmBDhyZe2xe+aZc7k8o3379jp16pTef/99bdmyRVu2bJEkpaX9W79VsOC/y+dklipcWb6RHVn1cb1+x40bp7feekvDhg3T6tWrlZCQoFatWtnNyywPDw9b+Uemq0s3ZsyYoU2bNqlhw4aaO3euqlatqs2bN1+zz+HDhyspKcm2JSaSZQAAAHCFXMs0nzx5Unv27NH777+vu+66S5K0fv16U32EhYVpw4YNdm0bNmxQ1apVbVnmnNiwYYNiYmLUrVs3SZeD6d9++82W/c5qHhcvXtSWLVts5RmZ7y/zNcWLF9exY8dkGIYtSE9ISHDoq06dOqpTp46GDx+uBg0aaPbs2brzzjuzHNdqtcpqteb4fQIAAORIPkw151qmuUiRIipatKjee+897du3T999950GDRpkqo/Bgwfr22+/1ZgxY/Tbb79p1qxZeuedd+zqkXOiSpUqWrlypTZu3Kjdu3erb9+++uuva9eEVqlSRTExMXr88ce1fv16bd++Xd26dVPp0qVt5SNRUVE6ceKExo4dq/3792vKlCn65ptvbH0cOHBAw4cP16ZNm/THH39oxYoV2rt3r9O6ZgAAALhergXNHh4emjNnjrZt26aIiAg988wzGjdunKk+brvtNn3++eeaM2eOIiIiNHLkSL388st2NwHmxIsvvqjbbrtNrVq1UlRUlEJCQtShQ4frvmbGjBmqW7eu2rVrpwYNGsgwDH399de2MpCwsDBNnTpVU6ZMUWRkpLZu3WoX3BcqVEi//vqrOnXqpKpVq6pPnz6KjY213cAIAADgLiwu/s8dWYyrC22RZyUnJyswMFB1X/pKBbx9c3s6AFysQqmA3J4CgFsg/VyKFjzRRElJSQoIyN3PfWas8e1Ph+Tr75q5pJ5N1j11yrrF+70SC7gCAADAHIvkskdJuGeimcdoAwAAAM6QaQYAAIAp+XDxDDLNAAAAMMni4s2EdevWqX379ipVqpQsFosWL15sP1WLJcvN7AIUBM0AAADIs1JTUxUZGakpU6Zkefzo0aN220cffSSLxaJOnTqZGofyDAAAAJjiyqXhzPYbHR2t6Ojoax4PCQmx21+yZImaNWumihUrmhqHoBkAAAD5wl9//aWvvvpKs2bNMv1agmYAAACYYnHhknOZ/SYnJ9u1W61WWa3WG+p71qxZ8vf3V8eOHU2/lppmAAAAuJ3Q0FAFBgbatri4uBvu86OPPtLDDz8sb29v068l0wwAAABTbsWSc4mJiXZPBLzRLPP333+vPXv2aO7cuTl6PUEzAAAA3E5AQMBNfYz2hx9+qLp16yoyMjJHrydoBgAAgDlu9HSTlJQU7du3z7Z/4MABJSQkKCgoSGXLlpV0uT563rx5mjBhQo6nRdAMAACAPCs+Pl7NmjWz7Q8aNEiS1KNHD82cOVOSNGfOHBmGoS5duuR4HIJmAAAAmOJO6zRHRUXJMIzrntOnTx/16dPnRqbF6hkAAACAM2SaAQAAYMqtWKfZ3ZBpBgAAAJwg0wwAAABT3GjxjFuGTDMAAADgBJlmAAAAmJMPU81kmgEAAAAnyDQDAADAFHdap/lWIdMMAAAAOEGmGQAAAKawTjMAAAAAB2SaAQAAYEo+XDyDTDMAAADgDJlmAAAAmJMPU81kmgEAAAAnyDQDAADAFNZpBgAAAOCATDMAAABMYZ1mAAAAAA7INAMAAMCUfLh4BplmAAAAwBkyzQAAADAnH6aayTQDAAAATpBpBgAAgCms0wwAAADAAZlmAAAAmMI6zQAAAAAckGkGAACAKflw8QwyzQAAAIAzZJoBAABgTj5MNZNpBgAAAJwg0wwAAABTWKcZAAAAgAMyzQAAADDHhes0u2mimUwzAAAA4AyZZgAAAJiSDxfPINMMAAAAOEOmGQAAAObkw1QzmWYAAADACTLNAAAAMIV1mgEAAAA4INMMAAAAUywuXKfZZes/3yAyzQAAAIATZJoBAABgSj5cPINMMwAAAOAMQTMAAADMsbh4M2HdunVq3769SpUqJYvFosWLFzucs3v3bt17770KDAyUr6+v6tWrp0OHDpkah6AZAAAAeVZqaqoiIyM1ZcqULI/v379fjRs3VvXq1bVmzRr9/PPPGjFihLy9vU2NQ00zAAAATHGndZqjo6MVHR19zeMvvPCC2rRpo7Fjx9raKlWqZHpeZJoBAADwn5SRkaGvvvpKVatWVatWrRQcHKz69etnWcLhDEEzAAAATLHo37Wab/r2/2MkJyfbbRcuXDA9z+PHjyslJUWvv/66WrdurRUrVui+++5Tx44dtXbtWlN9ETQDAADA7YSGhiowMNC2xcXFme4jIyNDkhQTE6NnnnlGtWvX1nPPPad27dpp+vTppvqiphkAAACm3Ip1mhMTExUQEGBrt1qtpvsqVqyYChQooPDwcLv2sLAwrV+/3lRfBM0AAABwOwEBAXZBc054eXmpXr162rNnj137b7/9pnLlypnqi6AZAAAApmTWH7uqbzNSUlK0b98+2/6BAweUkJCgoKAglS1bVkOHDtWDDz6oJk2aqFmzZlq2bJm++OILrVmzxtQ4BM0AAAAwyX0epB0fH69mzZrZ9gcNGiRJ6tGjh2bOnKn77rtP06dPV1xcnAYMGKBq1appwYIFaty4salxCJoBAACQZ0VFRckwjOue07t3b/Xu3fuGxiFoBgAAgCnuVJ5xq7DkHAAAAOAEmWYAAACY4j4VzbcOmWYAAADACTLNAAAAMIWaZgAAAAAOyDQDAADAFMv//+eqvt0RmWYAAADACTLNAAAAMCcfLp9BphkAAABwgkwzAAAATMmHiWYyzQAAAIAzZJoBAABgCus0AwAAAHBAphkAAACmsE4zAAAAAAdkmgEAAGBOPlw+g0wzAAAA4ASZZgAAAJiSDxPNZJoBAAAAZ8g0AwAAwBTWaQYAAADggEwzAAAATHLdOs3uWtVMphkAAABwgkwzAAAATKGmGQAAAIADgmYAAADACYJmAAAAwAlqmgEAAGAKNc0AAAAAHJBpBgAAgCkWF67T7Lr1n28MmWYAAADACTLNAAAAMIWaZgAAAAAOyDQDAADAFMv/b67q2x2RaQYAAACcINMMAAAAc/JhqplMMwAAAOAEmWYAAACYwjrNAAAAAByQaQYAAIAprNMMAAAAwAGZZgAAAJiSDxfPINMMAAAAOEOmGQAAAObkw1QzmWYAAADkWevWrVP79u1VqlQpWSwWLV682O54z549ZbFY7LbWrVubHodMMwAAAExxp3WaU1NTFRkZqd69e6tjx45ZntO6dWvNmDHDtm+1Wk3Pi6AZAAAAeVZ0dLSio6Ove47ValVISMgNjUN5BgAAAEzJXKfZVdvNtmbNGgUHB6tatWp68skndfLkSdN9kGn+DzEMQ5J06cI/uTwTALdC+jnyHkB+kH4uVdK/P+fdQXJyssv7vnoMq9Wao7KK1q1bq2PHjqpQoYL279+v559/XtHR0dq0aZM8PT2z3Y/FcKd/AdyQw4cPKzQ0NLenAQAAXCAxMVFlypTJ1TmcP39eFSpU0LFjx1w6jp+fn1JSUuzaXnrpJY0aNeq6r7NYLFq0aJE6dOhwzXN+//13VapUSatWrdI999yT7TmRaf4PKVWqlBITE+Xv7y+Luz6DEjddcnKyQkNDlZiYqICAgNyeDgAX4vOePxmGobNnz6pUqVK5PRV5e3vrwIEDSktLc+k4hmE4xDI5yTJnpWLFiipWrJj27dtH0JxfeXh45PpvoMg9AQEB/BAF8gk+7/lPYGBgbk/BxtvbW97e3rk9jRw7fPiwTp48qZIlS5p6HUEzAAAA8qyUlBTt27fPtn/gwAElJCQoKChIQUFBGj16tDp16qSQkBDt379fzz77rCpXrqxWrVqZGoegGQAAAHlWfHy8mjVrZtsfNGiQJKlHjx6aNm2afv75Z82aNUtnzpxRqVKl1LJlS40ZM8Z0uQdBM5DHWa1WvfTSSzet1guA++LzDjiKioq67soiy5cvvynjsHoGAAAA4ASLfAIAAABOEDQDAAAAThA0A3BQvnx5TZo0KbenAeAGRUVFaeDAgbk9DeA/gaAZAAAAcIKgGciDXP0kJgAAYI+gGbgFoqKiNGDAAD377LMKCgpSSEiIRo0aZTt+6NAhxcTEyM/PTwEBAercubP++usv2/FRo0apdu3a+uCDD1ShQgXbk5gsFoveffddtWvXToUKFVJYWJg2bdqkffv2KSoqSr6+vmrYsKH2799v62v//v2KiYlRiRIl5Ofnp3r16mnVqlW37FoAec2yZcvUuHFjFS5cWEWLFlW7du1sn6mDBw/KYrFo4cKFatasmQoVKqTIyEht2rTJro8FCxaoRo0aslqtKl++vCZMmGB3vHz58nrllVfUvXt3+fn5qVy5clq6dKlOnDhh+95Qq1YtxcfH215z8uRJdenSRaVLl1ahQoVUs2ZNffbZZ9d9L6dPn1b37t1VpEgRFSpUSNHR0dq7d6/teOb3mitNmjRJ5cuXt+2vWbNGd9xxh3x9fVW4cGE1atRIf/zxh5lLCuRJBM3ALTJr1iz5+vpqy5YtGjt2rF5++WWtXLlSGRkZiomJ0alTp7R27VqtXLlSv//+ux588EG71+/bt08LFizQwoULlZCQYGsfM2aMunfvroSEBFWvXl1du3ZV3759NXz4cMXHx8swDPXr1892fkpKitq0aaNvv/1WP/30k1q3bq327dvr0KFDt+pSAHlKamqqBg0apPj4eH377bfy8PDQfffdp4yMDNs5L7zwgoYMGaKEhARVrVpVXbp00cWLFyVJ27ZtU+fOnfXQQw9px44dGjVqlEaMGKGZM2fajTNx4kQ1atRIP/30k9q2batHHnlE3bt3V7du3fTjjz+qUqVK6t69u2092vPnz6tu3br66quvtHPnTvXp00ePPPKItm7des330rNnT8XHx2vp0qXatGmTDMNQmzZtlJ6enq1rcfHiRXXo0EFNmzbVzz//rE2bNqlPnz6yWCwmryqQBxkAXK5p06ZG48aN7drq1atnDBs2zFixYoXh6elpHDp0yHbsl19+MSQZW7duNQzDMF566SWjYMGCxvHjx+36kGS8+OKLtv1NmzYZkowPP/zQ1vbZZ58Z3t7e151fjRo1jMmTJ9v2y5UrZ0ycONH0+wTygxMnThiSjB07dhgHDhwwJBkffPCB7Xjm53f37t2GYRhG165djRYtWtj1MXToUCM8PNy2X65cOaNbt262/aNHjxqSjBEjRtjaMj/fR48evebc2rZtawwePNi237RpU+Ppp582DMMwfvvtN0OSsWHDBtvxv//+2/Dx8TE+//xzwzAuf6+JjIy063PixIlGuXLlDMMwjJMnTxqSjDVr1lzvEgH/SWSagVukVq1advslS5bU8ePHtXv3boWGhio0NNR2LDw8XIULF9bu3bttbeXKlVPx4sWv22+JEiUkSTVr1rRrO3/+vJKTkyVdzjQPGTJEYWFhKly4sPz8/LR7924yzcA17N27V126dFHFihUVEBBgK1W48jNz5eewZMmSkqTjx49Lknbv3q1GjRrZ9dmoUSPt3btXly5dyrKPa32Wr+z30qVLGjNmjGrWrKmgoCD5+flp+fLl1/ws7969WwUKFFD9+vVtbUWLFlW1atXsvtdcT1BQkHr27KlWrVqpffv2euutt3T06NFsvRbI6wiagVukYMGCdvsWi8Xuz7vO+Pr6Ou0380+kWbVljjVkyBAtWrRIr732mr7//nslJCSoZs2a3FwIXEP79u116tQpvf/++9qyZYu2bNkiyf6G3Ot95rLL7Gd53LhxeuuttzRs2DCtXr1aCQkJatWq1Q19lj08PBweR3x16caMGTO0adMmNWzYUHPnzlXVqlW1efPmHI8J5BUEzUAuCwsLU2JiohITE21tu3bt0pkzZxQeHn7Tx9uwYYN69uyp++67TzVr1lRISIgOHjx408cB/gtOnjypPXv26MUXX9Q999yjsLAwnT592lQfYWFh2rBhg13bhg0bVLVqVXl6euZ4bhs2bFBMTIy6deumyMhIVaxYUb/99tt153Hx4kVb0C/9+/4yv9cUL15cx44dswucr7yHIlOdOnU0fPhwbdy4UREREZo9e3aO3weQVxA0A7msefPmqlmzph5++GH9+OOP2rp1q7p3766mTZvq9ttvv+njValSxXYz4fbt29W1a1fTGTEgvyhSpIiKFi2q9957T/v27dN3332nQYMGmepj8ODB+vbbbzVmzBj99ttvmjVrlt555x0NGTLkhuZWpUoVrVy5Uhs3btTu3bvVt29fu1V3sjo/JiZGjz/+uNavX6/t27erW7duKl26tGJiYiRdXunnxIkTGjt2rPbv368pU6bom2++sfVx4MABDR8+XJs2bdIff/yhFStWaO/evQoLC7uh9wLkBQTNQC6zWCxasmSJihQpoiZNmqh58+aqWLGi5s6d65Lx3nzzTRUpUkQNGzZU+/bt1apVK912220uGQvI6zw8PDRnzhxt27ZNEREReuaZZzRu3DhTfdx22236/PPPNWfOHEVERGjkyJF6+eWX1bNnzxua24svvqjbbrtNrVq1UlRUlEJCQtShQ4frvmbGjBmqW7eu2rVrpwYNGsgwDH399de2MpCwsDBNnTpVU6ZMUWRkpLZu3WoX3BcqVEi//vqrOnXqpKpVq6pPnz6KjY1V3759b+i9AHmBxbi6eAkAAACAHTLNAAAAgBMEzQAAAIATBM0AAACAEwTNAAAAgBMEzQAAAIATBM0AAACAEwTNAAAAgBMEzQAAAIATBM0AcJP17NnT7slsUVFRGjhw4C2fx5o1a2SxWHTmzBmXjXH1e82JWzFPALhRBM0A8oWePXvKYrHIYrHIy8tLlStX1ssvv6yLFy+6fOyFCxdqzJgx2Tr3VgeQ5cuX16RJk27JWACQlxXI7QkAwK3SunVrzZgxQxcuXNDXX3+t2NhYFSxYUMOHD3c4Ny0tTV5eXjdl3KCgoJvSDwAg95BpBpBvWK1WhYSEqFy5cnryySfVvHlzLV26VNK/ZQavvvqqSpUqpWrVqkmSEhMT1blzZxUuXFhBQUGKiYnRwYMHbX1eunRJgwYNUuHChVW0aFE9++yzMgzDbtyryzMuXLigYcOGKTQ0VFarVZUrV9aHH36ogwcPqlmzZpKkIkWKyGKxqGfPnpKkjIwMxcXFqUKFCvLx8VFkZKTmz59vN87XX3+tqlWrysfHR82aNbObZ05cunRJjz76qG3MatWq6a233sry3NGjR6t48eIKCAjQE088obS0NNux7MwdANwdmWYA+ZaPj49Onjxp2//2228VEBCglStXSpLS09PVqlUrNWjQQN9//70KFCigV155Ra1bt9bPP/8sLy8vTZgwQTNnztRHH32ksLAwTZgwQYsWLdLdd999zXG7d++uTZs26e2331ZkZKQOHDigv//+W6GhoVqwYIE6deqkPXv2KCAgQD4+PpKkuLg4ffLJJ5o+fbqqVKmidevWqVu3bipevLiaNm2qxMREdezYUbGxserTp4/i4+M1ePDgG7o+GRkZKlOmjObNm6eiRYtq48aN6tOnj0qWLKnOnTvbXTdvb2+tWbNGBw8eVK9evVS0aFG9+uqr2Zo7AOQJBgDkAz169DBiYmIMwzCMjIwMY+XKlYbVajWGDBliO16iRAnjwoULttf873//M6pVq2ZkZGTY2i5cuGD4+PgYy5cvNwzDMEqWLGmMHTvWdjw9Pd0oU6aMbSzDMIymTZsaTz/9tGEYhrFnzx5DkrFy5cos57l69WpDknH69Glb2/nz541ChQoZGzdutDv30UcfNbp06WIYhmEMHz7cCA8Ptzs+bNgwh76uVq5cOWPixInXPH612NhYo1OnTrb9Hj16GEFBQUZqaqqtbdq0aYafn59x6dKlbM09q/cMAO6GTDOAfOPLL7+Un5+f0tPTlZGRoa5du2rUqFG24zVr1rSrY96+fbv27dsnf39/u37Onz+v/fv3KykpSUePHlX9+vVtxwoUKKDbb7/doUQjU0JCgjw9PU1lWPft26d//vlHLVq0sGtPS0tTnTp1JEm7d++2m4ckNWjQINtjXMuUKVP00Ucf6dChQzp37pzS0tJUu3Ztu3MiIyNVqFAhu3FTUlKUmJiolJQUp3MHgLyAoBlAvtGsWTNNmzZNXl5eKlWqlAoUsP8W6Ovra7efkpKiunXr6tNPP3Xoq3jx4jmaQ2a5hRkpKSmSpK+++kqlS5e2O2a1WnM0j+yYM2eOhgwZogkTJqhBgwby9/fXuHHjtGXLlmz3kVtzB4CbjaAZQL7h6+urypUrZ/v82267TXPnzlVwcLACAgKyPKdkyZLasmWLmjRpIkm6ePGitm3bpttuuy3L82vWrKmMjAytXbtWzZs3dziemem+dOmSrS08PFxWq1WHDh26ZoY6LCzMdlNjps2bNzt/k9exYcMGNWzYUE899ZStbf/+/Q7nbd++XefOnbP9QrB582b5+fkpNDRUQUFBTucOAHkBq2cAwDU8/PDDKlasmGJiYvT999/rwIEDWrNmjQYMGKDDhw9Lkp5++mm9/vrrWrx4sX799Vc99dRT111juXz58urRo4d69+6txYsX2/r8/PPPJUnlypWTxWLRl19+qRMnTiglJUX+/v4aMmSInnnmGc2aNUv79+/Xjz/+qMmTJ2vWrFmSpCeeeEJ79+7V0KFDtWfPHs2ePVszZ87M1vv8888/lZCQYLedPn1aVapUUXx8vJYvX67ffvtNI0aM0A8//ODw+rS0ND366KPatWuXvv76a7300kvq16+fPDw8sjV3AMgLCJoB4BoKFSqkdevWqWzZsurYsaPCwsL06KOP6vz587bM8+DBg/XII4+oR48ethKG++6777r9Tps2Tffff7+eeuopVa9eXY8//rhSU1MlSaVLl9bo0aP13HPPqUSJEurXr58kacyYMRoxYoTi4uIUFham1q1b66uvvlKFChUkSWXLltWCBQu0ePFiRUZGavr06Xrttdey9T7Hjx+vOnXq2G1fffWV+vbtq44dO+rBBx9U/fr1dfLkSbusc6Z77rlHVapUUZMmTfTggw/q3nvvtasVdzZ3AMgLLMa17lYBAAAAIIlMMwAAAOAUQTMAAADgBEEzAAAA4ARBMwAAAOAEQTMAAADgBEEzAAAA4ARBMwAAAOAEQTMAAADgBEEzAAAA4ARBMwAAAOAEQTMAAADgBEEzAAAA4MT/Ab1FfUrmZeleAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Set the random seed\n", + "\n", + "random.seed(42)\n", + "\n", + "# Run our TestRunner\n", + "Tester.test(random_situation_assessor, train)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "3cef84a9-4932-48fd-9f7a-51cfc06e3216", + "metadata": {}, + "outputs": [], + "source": [ + "# Now some janky code to pluck out the Item Weight\n", + "# Don't worry too much about this: spoiler alert, it's not going to be much use in training!\n", + "\n", + "def get_weight(item):\n", + " weight_str = item.features.get('Item Weight')\n", + " if weight_str:\n", + " parts = weight_str.split(' ')\n", + " amount = float(parts[0])\n", + " unit = parts[1].lower()\n", + " if unit==\"pounds\":\n", + " return amount\n", + " elif unit==\"ounces\":\n", + " return amount / 16\n", + " elif unit==\"grams\":\n", + " return amount / 453.592\n", + " elif unit==\"milligrams\":\n", + " return amount / 453592\n", + " elif unit==\"kilograms\":\n", + " return amount / 0.453592\n", + " elif unit==\"hundredths\" and parts[2].lower()==\"pounds\":\n", + " return amount / 100\n", + " else:\n", + " print(weight_str)\n", + " return None" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "f4848b4a-3c5a-4168-83a5-57a1f3ff270d", + "metadata": {}, + "outputs": [], + "source": [ + "weights = [get_weight(t) for t in train]\n", + "weights = [w for w in weights if w]" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "0cd11cc8-f16e-4991-b531-482189ddc4b6", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "13.57055237029551" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "average_weight = sum(weights)/len(weights)\n", + "average_weight" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "efe8ec7f-9777-464f-a809-b06b7033bdb2", + "metadata": {}, + "outputs": [], + "source": [ + "def get_weight_with_default(item):\n", + " weight = get_weight(item)\n", + " return weight or average_weight" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "id": "c2659fef-a455-431a-9a0e-59342b80084b", + "metadata": {}, + "outputs": [], + "source": [ + "def get_rank(item):\n", + " rank_dict = item.features.get(\"Best Sellers Rank\")\n", + " if rank_dict:\n", + " ranks = rank_dict.values()\n", + " return sum(ranks)/len(ranks)\n", + " return None" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "20b9b5be-30bc-4d3a-8492-fbae119421a0", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "380638.258828069" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ranks = [get_rank(t) for t in train]\n", + "ranks = [r for r in ranks if r]\n", + "average_rank = sum(ranks)/len(ranks)\n", + "average_rank" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "081e646a-ea50-4ec3-9512-6d5f96f8aef6", + "metadata": {}, + "outputs": [], + "source": [ + "def get_rank_with_default(item):\n", + " rank = get_rank(item)\n", + " return rank or average_rank" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "afd5daf7-cb2b-47af-bf17-dd71a9db65d0", + "metadata": {}, + "outputs": [], + "source": [ + "def get_text_length(item):\n", + " return len(item.test_prompt())" + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "id": "2c3f4b85-eccf-46d5-b8d4-14cff03fc199", + "metadata": {}, + "outputs": [], + "source": [ + "def country_of_origin_is_china(item):\n", + " if item.features.get('Country of Origin') == 'China':\n", + " return 1\n", + " else:\n", + " return 0" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "id": "85c89012-a922-401b-8a3b-94af641bf27a", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[('HP', 5500),\n", + " ('Power Stop', 3594),\n", + " ('Dell', 3254),\n", + " ('Detroit Axle', 2734),\n", + " ('Lenovo', 2441),\n", + " ('Dorman', 2116),\n", + " ('SAMSUNG', 1772),\n", + " ('BUYAUTOPARTS!', 1715),\n", + " ('ACDelco', 1609),\n", + " ('Evan Fischer', 1415),\n", + " ('ASUS', 1387),\n", + " ('Sony', 1282),\n", + " ('Canon', 1201),\n", + " ('Callahan BRAKE PARTS', 1154),\n", + " ('Kohler', 1152),\n", + " ('CURT', 1120),\n", + " ('R1 Concepts', 1054),\n", + " ('Rareelectrical', 966),\n", + " ('Coverking', 941),\n", + " ('Garage-Pro', 888),\n", + " ('Kingston Brass', 852),\n", + " ('Spectra Premium', 834),\n", + " ('Moen', 829),\n", + " ('Auto Dynasty', 824),\n", + " ('WeatherTech', 822),\n", + " ('DELTA FAUCET', 811),\n", + " ('Generic', 793),\n", + " ('Apple', 783),\n", + " ('Cardone', 765),\n", + " ('APS', 763),\n", + " ('K&N', 758),\n", + " ('GM', 743),\n", + " ('Walker', 732),\n", + " ('EBC Brakes', 717),\n", + " ('AKKON', 646),\n", + " ('SPEC-D TUNING', 626),\n", + " ('TYC', 626),\n", + " ('Covercraft', 618),\n", + " ('Intel', 610),\n", + " ('A-Premium', 607)]" + ] + }, + "execution_count": 29, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# investigate the brands\n", + "\n", + "brands = Counter()\n", + "for t in train:\n", + " brand = t.features.get(\"Brand\")\n", + " if brand:\n", + " brands[brand]+=1\n", + "\n", + "# Look at most common 40 brands\n", + "\n", + "brands.most_common(40)" + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "id": "386dde54-e028-4a6d-b291-cce889ac1fa3", + "metadata": {}, + "outputs": [], + "source": [ + "TOP_ELECTRONICS_BRANDS = [\"hp\", \"dell\", \"lenovo\", \"samsung\", \"asus\", \"sony\", \"canon\", \"apple\", \"intel\"]\n", + "def is_top_electronics_brand(item):\n", + " brand = item.features.get(\"Brand\")\n", + " return brand and brand.lower() in TOP_ELECTRONICS_BRANDS" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "id": "c31c9c59-9d0d-47a8-a046-f20ed8d38d4c", + "metadata": {}, + "outputs": [], + "source": [ + "def get_features(item):\n", + " return {\n", + " \"weight\": get_weight_with_default(item),\n", + " \"rank\": get_rank_with_default(item),\n", + " \"text_length\": get_text_length(item),\n", + " \"is_top_electronics_brand\": 1 if is_top_electronics_brand(item) else 0,\n", + " \"is_chinese\": country_of_origin_is_china(item)\n", + " }" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "id": "88850855-f5bd-4be2-9d7c-75bf8a21609b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'weight': 56.0,\n", + " 'rank': 174503.5,\n", + " 'text_length': 840,\n", + " 'is_top_electronics_brand': 0,\n", + " 'is_chinese': 1}" + ] + }, + "execution_count": 53, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Look at features in a training item\n", + "get_features(train[9000])" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "id": "ee9b5298-68b7-497d-8b2e-875287bb25b2", + "metadata": {}, + "outputs": [], + "source": [ + "# A utility function to convert our features into a pandas dataframe\n", + "\n", + "def list_to_dataframe(items):\n", + " features = [get_features(item) for item in items]\n", + " df = pd.DataFrame(features)\n", + " df['price'] = [item.price for item in items]\n", + " return df\n", + "\n", + "train_df = list_to_dataframe(train)\n", + "test_df = list_to_dataframe(test[:250])" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "id": "cc1d68e0-ab33-40f4-9334-461d426af25c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "weight: 0.2663098766460396\n", + "rank: 5.08557619705454e-05\n", + "text_length: 0.011685295616100889\n", + "is_top_electronics_brand: 195.12510096469674\n", + "is_chinese: -17.577347874967053\n", + "Intercept: 182.73221079716183\n", + "Mean Squared Error: 33250.5163297445\n", + "R-squared Score: 0.08496658571890203\n" + ] + } + ], + "source": [ + "# Traditional Linear Regression!\n", + "\n", + "np.random.seed(42)\n", + "\n", + "# Separate features and target\n", + "feature_columns = ['weight', 'rank', 'text_length', 'is_top_electronics_brand','is_chinese']\n", + "\n", + "X_train = train_df[feature_columns]\n", + "y_train = train_df['price']\n", + "X_test = test_df[feature_columns]\n", + "y_test = test_df['price']\n", + "\n", + "# Train a Linear Regression\n", + "model = LinearRegression()\n", + "model.fit(X_train, y_train)\n", + "\n", + "for feature, coef in zip(feature_columns, model.coef_):\n", + " print(f\"{feature}: {coef}\")\n", + "print(f\"Intercept: {model.intercept_}\")\n", + "\n", + "# Predict the test set and evaluate\n", + "y_pred = model.predict(X_test)\n", + "mse = mean_squared_error(y_test, y_pred)\n", + "r2 = r2_score(y_test, y_pred)\n", + "\n", + "print(f\"Mean Squared Error: {mse}\")\n", + "print(f\"R-squared Score: {r2}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "id": "6561c3c7-ac7f-458b-983c-4a164b9d02c3", + "metadata": {}, + "outputs": [], + "source": [ + "# Function to predict price for a new item\n", + "\n", + "def linear_regression_pricer(item):\n", + " features = get_features(item)\n", + " features_df = pd.DataFrame([features])\n", + " return model.predict(features_df)[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "id": "9bf2caa4-657a-4fc6-9dcb-bed7eaf8dd65", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[93m1: Guess: $231.91 Truth: $374.41 Error: $142.50 SLE: 0.23 Item: OEM AC Compressor w/A/C Repair Kit For F...\u001b[0m\n", + "\u001b[92m2: Guess: $199.63 Truth: $225.11 Error: $25.48 SLE: 0.01 Item: Motorcraft YB3125 Fan Clutch\u001b[0m\n", + "\u001b[91m3: Guess: $177.06 Truth: $61.68 Error: $115.38 SLE: 1.09 Item: Dorman 603-159 Front Washer Fluid Reserv...\u001b[0m\n", + "\u001b[93m4: Guess: $390.63 Truth: $599.99 Error: $209.36 SLE: 0.18 Item: HP Premium 17.3-inch HD Plus Touchscreen...\u001b[0m\n", + "\u001b[91m5: Guess: $214.60 Truth: $16.99 Error: $197.61 SLE: 6.17 Item: 5-Position Super Switch Pickup Selector ...\u001b[0m\n", + "\u001b[91m6: Guess: $214.71 Truth: $31.99 Error: $182.72 SLE: 3.53 Item: Horror Bookmarks, Resin Horror Bookmarks...\u001b[0m\n", + "\u001b[91m7: Guess: $194.88 Truth: $101.79 Error: $93.09 SLE: 0.42 Item: SK6241 - Stinger 4 Gauge 6000 Series Pow...\u001b[0m\n", + "\u001b[93m8: Guess: $193.41 Truth: $289.00 Error: $95.59 SLE: 0.16 Item: Godox ML60Bi LED Light Kit, Handheld LED...\u001b[0m\n", + "\u001b[91m9: Guess: $206.46 Truth: $635.86 Error: $429.40 SLE: 1.26 Item: Randall RG75DG3PLUS G3 Plus 100-Watt Com...\u001b[0m\n", + "\u001b[91m10: Guess: $185.58 Truth: $65.99 Error: $119.59 SLE: 1.05 Item: HOLDWILL 6 Pack LED Shop Light, 4FT 24W ...\u001b[0m\n", + "\u001b[92m11: Guess: $217.61 Truth: $254.21 Error: $36.60 SLE: 0.02 Item: Viking Horns V103C/1005ATK 3 Gallon Air ...\u001b[0m\n", + "\u001b[91m12: Guess: $220.23 Truth: $412.99 Error: $192.76 SLE: 0.39 Item: CURT 70110 Custom Tow Bar Base Plate Bra...\u001b[0m\n", + "\u001b[92m13: Guess: $214.48 Truth: $205.50 Error: $8.98 SLE: 0.00 Item: 10-Pack Solar HAMMERED BRONZE Finish Pos...\u001b[0m\n", + "\u001b[93m14: Guess: $194.24 Truth: $248.23 Error: $53.99 SLE: 0.06 Item: COSTWAY Electric Tumble Dryer, Sliver\u001b[0m\n", + "\u001b[91m15: Guess: $180.52 Truth: $399.00 Error: $218.48 SLE: 0.62 Item: FREE SIGNAL TV Transit 32\" 12 Volt DC Po...\u001b[0m\n", + "\u001b[91m16: Guess: $209.44 Truth: $373.94 Error: $164.50 SLE: 0.33 Item: Bilstein 5100 Monotube Gas Shock Set com...\u001b[0m\n", + "\u001b[91m17: Guess: $187.69 Truth: $92.89 Error: $94.80 SLE: 0.49 Item: Sangean K-200 Multi-Function Upright AM/...\u001b[0m\n", + "\u001b[91m18: Guess: $194.87 Truth: $51.99 Error: $142.88 SLE: 1.71 Item: Charles Leonard Magnetic Lapboard Class ...\u001b[0m\n", + "\u001b[92m19: Guess: $192.57 Truth: $179.00 Error: $13.57 SLE: 0.01 Item: Gigabyte AMD Radeon HD 7870 2 GB GDDR5 D...\u001b[0m\n", + "\u001b[91m20: Guess: $209.54 Truth: $19.42 Error: $190.12 SLE: 5.44 Item: 3dRose LLC 8 x 8 x 0.25 Inches Bull Terr...\u001b[0m\n", + "\u001b[91m21: Guess: $190.30 Truth: $539.95 Error: $349.65 SLE: 1.08 Item: ROKINON 85mm F1.4 Auto Focus Full Frame ...\u001b[0m\n", + "\u001b[92m22: Guess: $182.12 Truth: $147.67 Error: $34.45 SLE: 0.04 Item: AUTOSAVER88 Headlight Assembly Compatibl...\u001b[0m\n", + "\u001b[91m23: Guess: $191.45 Truth: $24.99 Error: $166.46 SLE: 4.01 Item: ASI NAUTICAL 2.5 Inches Opera Glasses Bi...\u001b[0m\n", + "\u001b[93m24: Guess: $198.91 Truth: $149.00 Error: $49.91 SLE: 0.08 Item: Behringer TUBE OVERDRIVE TO100 Authentic...\u001b[0m\n", + "\u001b[91m25: Guess: $199.47 Truth: $16.99 Error: $182.48 SLE: 5.81 Item: Fun Express Insect Finger Puppets - 24 f...\u001b[0m\n", + "\u001b[91m26: Guess: $177.98 Truth: $7.99 Error: $169.99 SLE: 8.95 Item: WAFJAMF Roller Stamp Identity Theft Stam...\u001b[0m\n", + "\u001b[92m27: Guess: $201.87 Truth: $199.99 Error: $1.88 SLE: 0.00 Item: Capulina Tiffany Floor Lamp 2-Light 16\" ...\u001b[0m\n", + "\u001b[91m28: Guess: $387.31 Truth: $251.45 Error: $135.86 SLE: 0.19 Item: Apple Watch Series 6 (GPS, 44mm) - Space...\u001b[0m\n", + "\u001b[92m29: Guess: $213.04 Truth: $231.62 Error: $18.58 SLE: 0.01 Item: ICON 01725 Tandem Axle Fender Skirt FS17...\u001b[0m\n", + "\u001b[93m30: Guess: $195.71 Truth: $135.00 Error: $60.71 SLE: 0.14 Item: SanDisk 128GB Ultra (10 Pack) MicroSD Cl...\u001b[0m\n", + "\u001b[91m31: Guess: $213.92 Truth: $356.62 Error: $142.70 SLE: 0.26 Item: Velvac 2020,L,C/Hr,W,E2003,102\",Bk - 715...\u001b[0m\n", + "\u001b[92m32: Guess: $243.20 Truth: $257.99 Error: $14.79 SLE: 0.00 Item: TCMT Passenger Backrest Sissy Bar & Lugg...\u001b[0m\n", + "\u001b[91m33: Guess: $211.23 Truth: $27.99 Error: $183.24 SLE: 3.96 Item: Alnicov 63.5MM Brass Tremolo Block,Tremo...\u001b[0m\n", + "\u001b[93m34: Guess: $232.46 Truth: $171.20 Error: $61.26 SLE: 0.09 Item: Subaru Forester Outback Legacy OEM Engin...\u001b[0m\n", + "\u001b[93m35: Guess: $279.35 Truth: $225.00 Error: $54.35 SLE: 0.05 Item: Richmond Auto Upholstery - 2012 Dodge Ra...\u001b[0m\n", + "\u001b[91m36: Guess: $216.75 Truth: $105.00 Error: $111.75 SLE: 0.52 Item: AP-39 Automotive Paint Primer Grey 2K Ur...\u001b[0m\n", + "\u001b[93m37: Guess: $185.95 Truth: $299.99 Error: $114.04 SLE: 0.23 Item: Road Top Wireless Carplay Retrofit Kit D...\u001b[0m\n", + "\u001b[91m38: Guess: $252.73 Truth: $535.09 Error: $282.36 SLE: 0.56 Item: Gibson Performance Exhaust 5658 Aluminiz...\u001b[0m\n", + "\u001b[91m39: Guess: $193.28 Truth: $12.33 Error: $180.95 SLE: 7.18 Item: Bella Tunno Happy Links - Baby Montessor...\u001b[0m\n", + "\u001b[91m40: Guess: $196.66 Truth: $84.99 Error: $111.67 SLE: 0.69 Item: CANMORE H300 Handheld GPS Golf Device, S...\u001b[0m\n", + "\u001b[91m41: Guess: $195.97 Truth: $15.99 Error: $179.98 SLE: 6.00 Item: DCPOWER AC Adapter Compatible Replacemen...\u001b[0m\n", + "\u001b[91m42: Guess: $203.30 Truth: $62.44 Error: $140.86 SLE: 1.37 Item: Sharp, VX2128V, Commercial Desktop Calcu...\u001b[0m\n", + "\u001b[91m43: Guess: $180.25 Truth: $82.99 Error: $97.26 SLE: 0.59 Item: Melissa & Doug Lifelike Plush Stork Gian...\u001b[0m\n", + "\u001b[93m44: Guess: $393.99 Truth: $599.95 Error: $205.96 SLE: 0.18 Item: Sony SSCS8 2-Way 3-Driver Center Channel...\u001b[0m\n", + "\u001b[91m45: Guess: $388.24 Truth: $194.99 Error: $193.25 SLE: 0.47 Item: ASUS Chromebook CX1, 14\" Full HD NanoEdg...\u001b[0m\n", + "\u001b[93m46: Guess: $207.73 Truth: $344.95 Error: $137.22 SLE: 0.26 Item: FiiO X7 32GB Hi-Res Lossless Music Playe...\u001b[0m\n", + "\u001b[91m47: Guess: $193.93 Truth: $37.99 Error: $155.94 SLE: 2.59 Item: TORRO Leather Case Compatible with iPhon...\u001b[0m\n", + "\u001b[92m48: Guess: $204.24 Truth: $224.35 Error: $20.11 SLE: 0.01 Item: Universal Air Conditioner KT 1031 A/C Co...\u001b[0m\n", + "\u001b[91m49: Guess: $251.12 Truth: $814.00 Error: $562.88 SLE: 1.38 Item: Street Series Stainless Performance Cat-...\u001b[0m\n", + "\u001b[92m50: Guess: $406.87 Truth: $439.88 Error: $33.01 SLE: 0.01 Item: Lenovo IdeaPad 3 14-inch Laptop, 14.0-in...\u001b[0m\n", + "\u001b[92m51: Guess: $299.60 Truth: $341.43 Error: $41.83 SLE: 0.02 Item: Access Bed Covers TonnoSport 22050219 - ...\u001b[0m\n", + "\u001b[91m52: Guess: $208.07 Truth: $46.78 Error: $161.29 SLE: 2.18 Item: G.I. JOE Hasbro 3 3/4\" Wave 5 Action Fig...\u001b[0m\n", + "\u001b[93m53: Guess: $213.10 Truth: $171.44 Error: $41.66 SLE: 0.05 Item: T&S Brass B-0232-BST Double Pantry Fauce...\u001b[0m\n", + "\u001b[91m54: Guess: $218.65 Truth: $458.00 Error: $239.35 SLE: 0.54 Item: ZTUOAUMA Fuel Injection Pump 3090942 309...\u001b[0m\n", + "\u001b[91m55: Guess: $387.86 Truth: $130.75 Error: $257.11 SLE: 1.17 Item: 2AP18AA#ABA Hp Prime Graphing Calculator...\u001b[0m\n", + "\u001b[91m56: Guess: $189.60 Truth: $83.81 Error: $105.79 SLE: 0.66 Item: Lowrance 000-0119-83 Nmea 2000 25' Exten...\u001b[0m\n", + "\u001b[91m57: Guess: $213.09 Truth: $386.39 Error: $173.30 SLE: 0.35 Item: Jeep Genuine Accessories 82213051 Hood L...\u001b[0m\n", + "\u001b[92m58: Guess: $191.27 Truth: $169.00 Error: $22.27 SLE: 0.02 Item: GODOX CB-06 Hard Carrying Case with Whee...\u001b[0m\n", + "\u001b[91m59: Guess: $267.56 Truth: $17.95 Error: $249.61 SLE: 7.03 Item: Au-Tomotive Gold, INC. Ford Black Valet ...\u001b[0m\n", + "\u001b[92m60: Guess: $216.01 Truth: $269.00 Error: $52.99 SLE: 0.05 Item: Snailfly Black Roof Rack Rail + Cross Ba...\u001b[0m\n", + "\u001b[91m61: Guess: $184.83 Truth: $77.77 Error: $107.06 SLE: 0.74 Item: KING SHA Anti Glare LED Track Lighting H...\u001b[0m\n", + "\u001b[91m62: Guess: $215.41 Truth: $88.99 Error: $126.42 SLE: 0.77 Item: APS Compatible with Chevy Silverado 1500...\u001b[0m\n", + "\u001b[91m63: Guess: $216.87 Truth: $364.41 Error: $147.54 SLE: 0.27 Item: Wilwood Engineering 14011291R Brake Cali...\u001b[0m\n", + "\u001b[91m64: Guess: $220.39 Truth: $127.03 Error: $93.36 SLE: 0.30 Item: ACDelco Gold 336-1925A Starter, Remanufa...\u001b[0m\n", + "\u001b[91m65: Guess: $253.58 Truth: $778.95 Error: $525.37 SLE: 1.25 Item: UWS EC10783 69-Inch Matte Black Heavy-Wa...\u001b[0m\n", + "\u001b[91m66: Guess: $389.61 Truth: $206.66 Error: $182.95 SLE: 0.40 Item: Dell Latitude E5440 14in Business Laptop...\u001b[0m\n", + "\u001b[91m67: Guess: $194.53 Truth: $35.94 Error: $158.59 SLE: 2.78 Item: (Plug and Play) Spare Tire Brake Light W...\u001b[0m\n", + "\u001b[93m68: Guess: $208.49 Truth: $149.00 Error: $59.49 SLE: 0.11 Item: The Ultimate Roadside Rescue Assistant\u001b[0m\n", + "\u001b[92m69: Guess: $218.80 Truth: $251.98 Error: $33.18 SLE: 0.02 Item: Brand New 18\" x 8.5\" Replacement Wheel f...\u001b[0m\n", + "\u001b[91m70: Guess: $290.52 Truth: $160.00 Error: $130.52 SLE: 0.35 Item: Headlight Headlamp LH Left & RH Right Pa...\u001b[0m\n", + "\u001b[91m71: Guess: $193.49 Truth: $39.99 Error: $153.50 SLE: 2.42 Item: Lilo And Stitch Deluxe Oversize Print La...\u001b[0m\n", + "\u001b[93m72: Guess: $265.63 Truth: $362.41 Error: $96.78 SLE: 0.10 Item: AC Compressor & A/C Clutch For Hyundai A...\u001b[0m\n", + "\u001b[93m73: Guess: $213.77 Truth: $344.00 Error: $130.23 SLE: 0.22 Item: House Of Troy PIN475-AB Pinnacle Collect...\u001b[0m\n", + "\u001b[91m74: Guess: $181.04 Truth: $25.09 Error: $155.95 SLE: 3.77 Item: Juno T29 WH Floating Electrical Feed Sin...\u001b[0m\n", + "\u001b[91m75: Guess: $256.60 Truth: $175.95 Error: $80.65 SLE: 0.14 Item: Sherman GO-PARTS - for 2013-2016 Toyota ...\u001b[0m\n", + "\u001b[93m76: Guess: $193.82 Truth: $132.64 Error: $61.18 SLE: 0.14 Item: Roland RPU-3 Electronic Keyboard Pedal o...\u001b[0m\n", + "\u001b[91m77: Guess: $226.98 Truth: $422.99 Error: $196.01 SLE: 0.38 Item: Rockland VMI14 12,000 Pound 12 Volt DC E...\u001b[0m\n", + "\u001b[93m78: Guess: $214.01 Truth: $146.48 Error: $67.53 SLE: 0.14 Item: Max Advanced Brakes Elite XDS Front Cros...\u001b[0m\n", + "\u001b[91m79: Guess: $274.56 Truth: $156.83 Error: $117.73 SLE: 0.31 Item: Quality-Built 11030 Premium Quality Alte...\u001b[0m\n", + "\u001b[93m80: Guess: $196.91 Truth: $251.99 Error: $55.08 SLE: 0.06 Item: Lucida LG-510 Student Classical Guitar, ...\u001b[0m\n", + "\u001b[91m81: Guess: $246.72 Truth: $940.33 Error: $693.61 SLE: 1.78 Item: Longacre 52-79800 Aluminum Turn Plates\u001b[0m\n", + "\u001b[91m82: Guess: $194.31 Truth: $52.99 Error: $141.32 SLE: 1.65 Item: Motion Pro 08-0380 Adjustable Torque Wre...\u001b[0m\n", + "\u001b[92m83: Guess: $194.99 Truth: $219.95 Error: $24.96 SLE: 0.01 Item: Glyph Thunderbolt 3 NVMe Dock (0 GB)\u001b[0m\n", + "\u001b[91m84: Guess: $232.73 Truth: $441.03 Error: $208.30 SLE: 0.41 Item: TOYO Open Country MT Performance Radial ...\u001b[0m\n", + "\u001b[93m85: Guess: $214.96 Truth: $168.98 Error: $45.98 SLE: 0.06 Item: Razer Seiren X USB Streaming Microphone ...\u001b[0m\n", + "\u001b[91m86: Guess: $201.39 Truth: $2.49 Error: $198.90 SLE: 16.49 Item: Happy Birthday to Dad From Your Daughter...\u001b[0m\n", + "\u001b[91m87: Guess: $179.95 Truth: $98.62 Error: $81.33 SLE: 0.36 Item: Little Tikes My Real Jam First Concert S...\u001b[0m\n", + "\u001b[93m88: Guess: $202.01 Truth: $256.95 Error: $54.94 SLE: 0.06 Item: Studio M Peace and Harmony Art Pole Comm...\u001b[0m\n", + "\u001b[91m89: Guess: $197.03 Truth: $30.99 Error: $166.04 SLE: 3.32 Item: MyVolts 12V Power Supply Adaptor Compati...\u001b[0m\n", + "\u001b[93m90: Guess: $409.49 Truth: $569.84 Error: $160.35 SLE: 0.11 Item: Dell Latitude 7212 Rugged Extreme Tablet...\u001b[0m\n", + "\u001b[92m91: Guess: $213.80 Truth: $177.99 Error: $35.81 SLE: 0.03 Item: Covermates Contour Fit Car Cover - Light...\u001b[0m\n", + "\u001b[91m92: Guess: $210.86 Truth: $997.99 Error: $787.13 SLE: 2.41 Item: Westin 57-4025 Black HDX Grille Guard fi...\u001b[0m\n", + "\u001b[92m93: Guess: $215.75 Truth: $219.00 Error: $3.25 SLE: 0.00 Item: Fieldpiece JL2 Job Link Wireless App Tra...\u001b[0m\n", + "\u001b[92m94: Guess: $214.15 Truth: $225.55 Error: $11.40 SLE: 0.00 Item: hansgrohe Talis S Modern Premium Easy Cl...\u001b[0m\n", + "\u001b[91m95: Guess: $195.02 Truth: $495.95 Error: $300.93 SLE: 0.87 Item: G-Technology G-SPEED eS PRO High-Perform...\u001b[0m\n", + "\u001b[91m96: Guess: $245.06 Truth: $942.37 Error: $697.31 SLE: 1.81 Item: DreamLine SHDR-1960723L-01 Shower Door, ...\u001b[0m\n", + "\u001b[91m97: Guess: $191.62 Truth: $1.94 Error: $189.68 SLE: 17.49 Item: Sanctuary Square Backplate Finish: Oiled...\u001b[0m\n", + "\u001b[93m98: Guess: $198.87 Truth: $284.34 Error: $85.47 SLE: 0.13 Item: Pelican Protector 1750 Long Case - Multi...\u001b[0m\n", + "\u001b[93m99: Guess: $230.20 Truth: $171.90 Error: $58.30 SLE: 0.08 Item: Brock Replacement Driver and Passenger H...\u001b[0m\n", + "\u001b[92m100: Guess: $174.86 Truth: $144.99 Error: $29.87 SLE: 0.03 Item: Carlinkit Ai Box Mini, Android 11, Multi...\u001b[0m\n", + "\u001b[91m101: Guess: $201.96 Truth: $470.47 Error: $268.51 SLE: 0.71 Item: StarDot NetCamLIVE2 YouTube Live Stream ...\u001b[0m\n", + "\u001b[91m102: Guess: $214.36 Truth: $66.95 Error: $147.41 SLE: 1.33 Item: Atomic Compatible FILXXCAR0016 16x25x5 M...\u001b[0m\n", + "\u001b[91m103: Guess: $209.38 Truth: $117.00 Error: $92.38 SLE: 0.33 Item: Bandai Awakening of S. H. s.h.figuarts s...\u001b[0m\n", + "\u001b[93m104: Guess: $236.60 Truth: $172.14 Error: $64.46 SLE: 0.10 Item: Fit System 62135G Passenger Side Towing ...\u001b[0m\n", + "\u001b[93m105: Guess: $268.68 Truth: $392.74 Error: $124.06 SLE: 0.14 Item: Black Horse Black Aluminum Exceed Runnin...\u001b[0m\n", + "\u001b[91m106: Guess: $191.89 Truth: $16.99 Error: $174.90 SLE: 5.63 Item: Dearsun Twinkle Star Color Night Light P...\u001b[0m\n", + "\u001b[91m107: Guess: $217.09 Truth: $1.34 Error: $215.75 SLE: 20.56 Item: Pokemon - Gallade Spirit Link (83/108) -...\u001b[0m\n", + "\u001b[91m108: Guess: $195.43 Truth: $349.98 Error: $154.55 SLE: 0.34 Item: Ibanez GA34STCE-NT GIO Series Classical ...\u001b[0m\n", + "\u001b[93m109: Guess: $240.14 Truth: $370.71 Error: $130.57 SLE: 0.19 Item: Set 2 Heavy Duty 12-16.5 12x16.5 12 Ply ...\u001b[0m\n", + "\u001b[91m110: Guess: $270.69 Truth: $65.88 Error: $204.81 SLE: 1.96 Item: Hairpin Table Legs 28\" Heavy Duty Hairpi...\u001b[0m\n", + "\u001b[92m111: Guess: $215.44 Truth: $229.99 Error: $14.55 SLE: 0.00 Item: Marada Racing Seat with Adjustable Slide...\u001b[0m\n", + "\u001b[91m112: Guess: $214.63 Truth: $9.14 Error: $205.49 SLE: 9.35 Item: Remington Industries 24UL1007STRWHI25 24...\u001b[0m\n", + "\u001b[92m113: Guess: $194.82 Truth: $199.00 Error: $4.18 SLE: 0.00 Item: Acer S3-391-6046 13.3-inch Ultrabook, In...\u001b[0m\n", + "\u001b[93m114: Guess: $180.56 Truth: $109.99 Error: $70.57 SLE: 0.24 Item: ICBEAMER 7\" RGB LED Headlights Bulb Halo...\u001b[0m\n", + "\u001b[91m115: Guess: $268.98 Truth: $570.42 Error: $301.44 SLE: 0.56 Item: R1 Concepts Front Rear Brakes and Rotors...\u001b[0m\n", + "\u001b[93m116: Guess: $194.98 Truth: $279.99 Error: $85.01 SLE: 0.13 Item: Camplux 2.64 GPM Tankless , Outdoor Port...\u001b[0m\n", + "\u001b[91m117: Guess: $195.16 Truth: $30.99 Error: $164.17 SLE: 3.29 Item: KNOKLOCK 10 Pack 3.75 Inch(96mm) Kitchen...\u001b[0m\n", + "\u001b[91m118: Guess: $193.10 Truth: $31.99 Error: $161.11 SLE: 3.14 Item: Valley Enterprises Yaesu USB FTDI CT-62 ...\u001b[0m\n", + "\u001b[91m119: Guess: $192.79 Truth: $15.90 Error: $176.89 SLE: 5.95 Item: G9 LED Light Bulbs,8W,75W 100W replaceme...\u001b[0m\n", + "\u001b[91m120: Guess: $180.00 Truth: $45.99 Error: $134.01 SLE: 1.82 Item: ZCHAOZ 4 Lights Antique White Farmhouse ...\u001b[0m\n", + "\u001b[93m121: Guess: $192.12 Truth: $113.52 Error: $78.60 SLE: 0.27 Item: Honeywell TH8320R1003 Honeywell VisionPr...\u001b[0m\n", + "\u001b[91m122: Guess: $223.00 Truth: $516.99 Error: $293.99 SLE: 0.70 Item: Patriot Exhaust H8013-1 1-7/8\" Clippster...\u001b[0m\n", + "\u001b[93m123: Guess: $258.72 Truth: $196.99 Error: $61.73 SLE: 0.07 Item: Fitrite Autopart New Front Left Driver S...\u001b[0m\n", + "\u001b[91m124: Guess: $194.17 Truth: $46.55 Error: $147.62 SLE: 1.99 Item: Technical Precision Replacement for GE G...\u001b[0m\n", + "\u001b[93m125: Guess: $255.57 Truth: $356.99 Error: $101.42 SLE: 0.11 Item: Covercraft Carhartt SeatSaver Front Row ...\u001b[0m\n", + "\u001b[93m126: Guess: $194.11 Truth: $319.95 Error: $125.84 SLE: 0.25 Item: Sennheiser SD Pro 2 (506008) - Double-Si...\u001b[0m\n", + "\u001b[91m127: Guess: $208.46 Truth: $96.06 Error: $112.40 SLE: 0.59 Item: Hitachi MAF0110 Mass Air Flow Sensor\u001b[0m\n", + "\u001b[92m128: Guess: $197.05 Truth: $190.99 Error: $6.06 SLE: 0.00 Item: AmScope SE305R-P-LED-PS36A 10X-30X LED C...\u001b[0m\n", + "\u001b[92m129: Guess: $235.60 Truth: $257.95 Error: $22.35 SLE: 0.01 Item: Front Left Driver Side Window Regulator ...\u001b[0m\n", + "\u001b[91m130: Guess: $196.06 Truth: $62.95 Error: $133.11 SLE: 1.27 Item: Premium Replica Hubcap Set, Fits Nissan ...\u001b[0m\n", + "\u001b[91m131: Guess: $215.31 Truth: $47.66 Error: $167.65 SLE: 2.23 Item: Excellerations Phonics Spelling Game for...\u001b[0m\n", + "\u001b[92m132: Guess: $218.01 Truth: $226.99 Error: $8.98 SLE: 0.00 Item: RC4WD BigDog Dual Axle Scale Car/Truck T...\u001b[0m\n", + "\u001b[91m133: Guess: $212.38 Truth: $359.95 Error: $147.57 SLE: 0.28 Item: Unknown Stage 2 Clutch Kit - Low Altitud...\u001b[0m\n", + "\u001b[91m134: Guess: $209.89 Truth: $78.40 Error: $131.49 SLE: 0.95 Item: 2002-2008 Dodge Ram 1500 Mopar 4X4 Emble...\u001b[0m\n", + "\u001b[92m135: Guess: $212.36 Truth: $172.77 Error: $39.59 SLE: 0.04 Item: Pro Comp Alloys Series 89 Wheel with Pol...\u001b[0m\n", + "\u001b[92m136: Guess: $280.07 Truth: $316.45 Error: $36.38 SLE: 0.01 Item: Detroit Axle - Front Rear Strut & Coil S...\u001b[0m\n", + "\u001b[91m137: Guess: $233.61 Truth: $87.99 Error: $145.62 SLE: 0.94 Item: ECCPP Rear Wheel Axle Replacement fit fo...\u001b[0m\n", + "\u001b[91m138: Guess: $388.73 Truth: $226.63 Error: $162.10 SLE: 0.29 Item: Dell Latitude E6520 Intel i7-2720QM 2.20...\u001b[0m\n", + "\u001b[91m139: Guess: $265.04 Truth: $31.49 Error: $233.55 SLE: 4.42 Item: F FIERCE CYCLE 251pcs Black Universal Mo...\u001b[0m\n", + "\u001b[92m140: Guess: $197.97 Truth: $196.00 Error: $1.97 SLE: 0.00 Item: Flash Furniture 4 Pk. HERCULES Series 88...\u001b[0m\n", + "\u001b[91m141: Guess: $205.56 Truth: $78.40 Error: $127.16 SLE: 0.91 Item: B&M 30287 Throttle Valve/Kickdown Cable,...\u001b[0m\n", + "\u001b[91m142: Guess: $209.70 Truth: $116.25 Error: $93.45 SLE: 0.34 Item: Gates TCK226 PowerGrip Premium Timing Be...\u001b[0m\n", + "\u001b[93m143: Guess: $190.01 Truth: $112.78 Error: $77.23 SLE: 0.27 Item: Monroe Shocks & Struts Quick-Strut 17149...\u001b[0m\n", + "\u001b[91m144: Guess: $176.34 Truth: $27.32 Error: $149.02 SLE: 3.37 Item: Feit Electric BPMR16/GU10/930CA/6 35W EQ...\u001b[0m\n", + "\u001b[92m145: Guess: $182.57 Truth: $145.91 Error: $36.66 SLE: 0.05 Item: Yellow Jacket 2806 Contractor Extension ...\u001b[0m\n", + "\u001b[93m146: Guess: $215.37 Truth: $171.09 Error: $44.28 SLE: 0.05 Item: Garage-Pro Tailgate SET Compatible with ...\u001b[0m\n", + "\u001b[92m147: Guess: $194.27 Truth: $167.95 Error: $26.32 SLE: 0.02 Item: 3M Perfect It Buffing and Polishing Kit ...\u001b[0m\n", + "\u001b[91m148: Guess: $198.39 Truth: $28.49 Error: $169.90 SLE: 3.65 Item: Chinese Style Dollhouse Model DIY Miniat...\u001b[0m\n", + "\u001b[91m149: Guess: $203.63 Truth: $122.23 Error: $81.40 SLE: 0.26 Item: Generic NRG Innovations SRK-161H Steerin...\u001b[0m\n", + "\u001b[91m150: Guess: $176.54 Truth: $32.99 Error: $143.55 SLE: 2.73 Item: Learning Resources Coding Critters Range...\u001b[0m\n", + "\u001b[91m151: Guess: $263.51 Truth: $71.20 Error: $192.31 SLE: 1.69 Item: Bosch Automotive 15463 Oxygen Sensor, OE...\u001b[0m\n", + "\u001b[91m152: Guess: $204.91 Truth: $112.75 Error: $92.16 SLE: 0.35 Item: Case of 24-2 Inch Blue Painters Tape - 6...\u001b[0m\n", + "\u001b[91m153: Guess: $262.02 Truth: $142.43 Error: $119.59 SLE: 0.37 Item: MOCA Engine Water Pump & Fan Clutch fit ...\u001b[0m\n", + "\u001b[93m154: Guess: $259.51 Truth: $398.99 Error: $139.48 SLE: 0.18 Item: SAREMAS Foot Step Bars for Hyundai Palis...\u001b[0m\n", + "\u001b[91m155: Guess: $194.77 Truth: $449.00 Error: $254.23 SLE: 0.69 Item: Gretsch G9210 Square Neck Boxcar Mahogan...\u001b[0m\n", + "\u001b[92m156: Guess: $176.53 Truth: $189.00 Error: $12.47 SLE: 0.00 Item: NikoMaku Mirror Dash Cam Front and Rear ...\u001b[0m\n", + "\u001b[93m157: Guess: $194.40 Truth: $120.91 Error: $73.49 SLE: 0.22 Item: Fenix HP25R v2.0 USB-C Rechargeable Head...\u001b[0m\n", + "\u001b[92m158: Guess: $239.90 Truth: $203.53 Error: $36.37 SLE: 0.03 Item: R&L Racing Heavy Duty Roll-Up Soft Tonne...\u001b[0m\n", + "\u001b[91m159: Guess: $192.28 Truth: $349.99 Error: $157.71 SLE: 0.36 Item: Garmin 010-02258-10 GPSMAP 64sx, Handhel...\u001b[0m\n", + "\u001b[91m160: Guess: $211.77 Truth: $34.35 Error: $177.42 SLE: 3.22 Item: Brown 5-7/8\" X 8-1/2\" X 3/16\" Thick Heav...\u001b[0m\n", + "\u001b[91m161: Guess: $194.67 Truth: $384.99 Error: $190.32 SLE: 0.46 Item: GAOMON PD2200 Pen Display & 20 Pen Nibs ...\u001b[0m\n", + "\u001b[92m162: Guess: $212.76 Truth: $211.00 Error: $1.76 SLE: 0.00 Item: VXMOTOR for 97-03 Ford F150/F250 Lightdu...\u001b[0m\n", + "\u001b[91m163: Guess: $210.60 Truth: $129.00 Error: $81.60 SLE: 0.24 Item: HP EliteBook 2540p Intel Core i7-640LM X...\u001b[0m\n", + "\u001b[91m164: Guess: $213.21 Truth: $111.45 Error: $101.76 SLE: 0.42 Item: Green EPX Mixing Nozzles 100-Pack-fits 3...\u001b[0m\n", + "\u001b[91m165: Guess: $205.74 Truth: $81.12 Error: $124.62 SLE: 0.85 Item: Box Partners 6 1/4 x 3 1/8\" 13 Pt. Manil...\u001b[0m\n", + "\u001b[91m166: Guess: $198.04 Truth: $457.08 Error: $259.04 SLE: 0.69 Item: Vixen Air 1/2\" NPT Air Ride Suspension H...\u001b[0m\n", + "\u001b[91m167: Guess: $194.70 Truth: $49.49 Error: $145.21 SLE: 1.84 Item: Smart Floor Lamp, 2700-6500K+RGBPink Mul...\u001b[0m\n", + "\u001b[91m168: Guess: $217.48 Truth: $80.56 Error: $136.92 SLE: 0.97 Item: SOZG 324mm Wheelbase Body Shell RC Car B...\u001b[0m\n", + "\u001b[92m169: Guess: $227.26 Truth: $278.39 Error: $51.13 SLE: 0.04 Item: Mickey Thompson ET Street S/S Racing Rad...\u001b[0m\n", + "\u001b[91m170: Guess: $208.07 Truth: $364.50 Error: $156.43 SLE: 0.31 Item: Pirelli 275/40R20 106W XL RFT P0 PZ4-LUX...\u001b[0m\n", + "\u001b[93m171: Guess: $236.03 Truth: $378.99 Error: $142.96 SLE: 0.22 Item: Torklift C3212 Rear Tie Down\u001b[0m\n", + "\u001b[93m172: Guess: $213.75 Truth: $165.28 Error: $48.47 SLE: 0.07 Item: Cardone 78-4226 Remanufactured Ford Comp...\u001b[0m\n", + "\u001b[91m173: Guess: $205.13 Truth: $56.74 Error: $148.39 SLE: 1.62 Item: Kidde AccessPoint 001798 Supra TouchPoin...\u001b[0m\n", + "\u001b[93m174: Guess: $212.56 Truth: $307.95 Error: $95.39 SLE: 0.14 Item: 3M Protecta 3100414 Self Retracting Life...\u001b[0m\n", + "\u001b[91m175: Guess: $191.27 Truth: $38.00 Error: $153.27 SLE: 2.55 Item: Plantronics 89435-01 Wired Headset, Blac...\u001b[0m\n", + "\u001b[91m176: Guess: $192.92 Truth: $53.00 Error: $139.92 SLE: 1.63 Item: Logitech K750 Wireless Solar Keyboard fo...\u001b[0m\n", + "\u001b[91m177: Guess: $210.94 Truth: $498.00 Error: $287.06 SLE: 0.73 Item: Olympus PEN E-PL9 Body Only with 3-Inch ...\u001b[0m\n", + "\u001b[91m178: Guess: $213.58 Truth: $53.99 Error: $159.59 SLE: 1.85 Item: Beck/Arnley 051-6066 Hub & Bearing Assem...\u001b[0m\n", + "\u001b[93m179: Guess: $255.65 Truth: $350.00 Error: $94.35 SLE: 0.10 Item: Eibach Pro-Kit Performance Springs E10-6...\u001b[0m\n", + "\u001b[93m180: Guess: $213.22 Truth: $299.95 Error: $86.73 SLE: 0.12 Item: LEGO DC Batman 1989 Batwing 76161 Displa...\u001b[0m\n", + "\u001b[91m181: Guess: $199.99 Truth: $94.93 Error: $105.06 SLE: 0.55 Item: Kingston Brass KS3608PL Restoration 4-In...\u001b[0m\n", + "\u001b[91m182: Guess: $197.53 Truth: $379.00 Error: $181.47 SLE: 0.42 Item: Polk Vanishing Series 265-LS In-Wall 3-W...\u001b[0m\n", + "\u001b[93m183: Guess: $215.97 Truth: $299.95 Error: $83.98 SLE: 0.11 Item: Spec-D Tuning LED Projector Headlights G...\u001b[0m\n", + "\u001b[91m184: Guess: $183.26 Truth: $24.99 Error: $158.27 SLE: 3.84 Item: RICHMOND & FINCH Airpod Pro Case, Green ...\u001b[0m\n", + "\u001b[91m185: Guess: $247.06 Truth: $41.04 Error: $206.02 SLE: 3.15 Item: LFA Industries 43B-5A-33JT 1/16-1/2-1.5-...\u001b[0m\n", + "\u001b[91m186: Guess: $193.40 Truth: $327.90 Error: $134.50 SLE: 0.28 Item: SAUTVS LED Headlight Assembly for Slings...\u001b[0m\n", + "\u001b[91m187: Guess: $215.34 Truth: $10.99 Error: $204.35 SLE: 8.37 Item: 2 Pack Combo Womens Safety Glasses Impac...\u001b[0m\n", + "\u001b[91m188: Guess: $201.36 Truth: $14.99 Error: $186.37 SLE: 6.44 Item: Arepa - Venezuelan cuisine - Venezuela P...\u001b[0m\n", + "\u001b[91m189: Guess: $197.87 Truth: $84.95 Error: $112.92 SLE: 0.70 Item: Schlage Lock Company KS23D2300 Padlock, ...\u001b[0m\n", + "\u001b[93m190: Guess: $178.90 Truth: $111.00 Error: $67.90 SLE: 0.22 Item: Techni Mobili White Sit to Stand Mobile ...\u001b[0m\n", + "\u001b[91m191: Guess: $211.36 Truth: $123.73 Error: $87.63 SLE: 0.28 Item: Special Lite Products Contemporary Wall ...\u001b[0m\n", + "\u001b[91m192: Guess: $196.72 Truth: $557.38 Error: $360.66 SLE: 1.08 Item: Tascam DP-24SD 24-Track Digital Portastu...\u001b[0m\n", + "\u001b[91m193: Guess: $211.54 Truth: $95.55 Error: $115.99 SLE: 0.62 Item: Glow Lighting 636CC10SP Vista Crystal Fl...\u001b[0m\n", + "\u001b[91m194: Guess: $257.51 Truth: $154.00 Error: $103.51 SLE: 0.26 Item: Z3 Wind Deflector, Smoke Tint, Lexan, Wi...\u001b[0m\n", + "\u001b[92m195: Guess: $211.15 Truth: $198.99 Error: $12.16 SLE: 0.00 Item: Olympus E-20 5MP Digital Camera w/ 4x Op...\u001b[0m\n", + "\u001b[91m196: Guess: $228.43 Truth: $430.44 Error: $202.01 SLE: 0.40 Item: PHYNEDI 1:1000 World Trade Center (1973-...\u001b[0m\n", + "\u001b[91m197: Guess: $200.32 Truth: $45.67 Error: $154.65 SLE: 2.14 Item: YANGHUAN Unstable Unicorns Adventure Car...\u001b[0m\n", + "\u001b[93m198: Guess: $192.55 Truth: $249.00 Error: $56.45 SLE: 0.07 Item: Interlogix NX-1820E NetworX Touch Screen...\u001b[0m\n", + "\u001b[91m199: Guess: $213.40 Truth: $42.99 Error: $170.41 SLE: 2.51 Item: Steering Damper,Universal Motorcycle Han...\u001b[0m\n", + "\u001b[92m200: Guess: $180.65 Truth: $181.33 Error: $0.68 SLE: 0.00 Item: Amprobe TIC 410A Hot Stick Attachment\u001b[0m\n", + "\u001b[91m201: Guess: $191.51 Truth: $6.03 Error: $185.48 SLE: 10.96 Item: MyCableMart 3.5mm Plug/Jack, 4 Conductor...\u001b[0m\n", + "\u001b[91m202: Guess: $192.65 Truth: $29.99 Error: $162.66 SLE: 3.36 Item: OtterBox + Pop Symmetry Series Case for ...\u001b[0m\n", + "\u001b[91m203: Guess: $397.21 Truth: $899.00 Error: $501.79 SLE: 0.66 Item: Dell XPS X8700-1572BLK Desktop ( Intel C...\u001b[0m\n", + "\u001b[91m204: Guess: $183.88 Truth: $399.99 Error: $216.11 SLE: 0.60 Item: Franklin Iron Works Sperry Industrial Br...\u001b[0m\n", + "\u001b[91m205: Guess: $193.64 Truth: $4.66 Error: $188.98 SLE: 12.52 Item: Avery Legal Dividers, Standard Collated ...\u001b[0m\n", + "\u001b[92m206: Guess: $221.21 Truth: $261.41 Error: $40.20 SLE: 0.03 Item: Moen 8346 Commercial Posi-Temp Pressure ...\u001b[0m\n", + "\u001b[93m207: Guess: $212.06 Truth: $136.97 Error: $75.09 SLE: 0.19 Item: Carlisle Versa Trail ATR All Terrain Rad...\u001b[0m\n", + "\u001b[91m208: Guess: $190.95 Truth: $79.00 Error: $111.95 SLE: 0.77 Item: SUNWAYFOTO 44mm Tripod Ball Head Arca Co...\u001b[0m\n", + "\u001b[91m209: Guess: $193.95 Truth: $444.99 Error: $251.04 SLE: 0.68 Item: NanoBeam AC NBE-5AC-Gen2-US 4 Units 5GHz...\u001b[0m\n", + "\u001b[91m210: Guess: $217.97 Truth: $411.94 Error: $193.97 SLE: 0.40 Item: WULF 4\" Front 2\" Rear Leveling Lift Kit ...\u001b[0m\n", + "\u001b[93m211: Guess: $215.59 Truth: $148.40 Error: $67.19 SLE: 0.14 Item: Alera ALEVABFMC Valencia Series Mobile B...\u001b[0m\n", + "\u001b[92m212: Guess: $210.13 Truth: $244.99 Error: $34.86 SLE: 0.02 Item: YU-GI-OH! Ignition Assault Booster Box\u001b[0m\n", + "\u001b[91m213: Guess: $266.22 Truth: $86.50 Error: $179.72 SLE: 1.25 Item: 48\" x 36\" Extra-Large Framed Magnetic Bl...\u001b[0m\n", + "\u001b[93m214: Guess: $406.69 Truth: $297.95 Error: $108.74 SLE: 0.10 Item: Dell Latitude D620 Renewed Notebook PC\u001b[0m\n", + "\u001b[91m215: Guess: $212.39 Truth: $399.99 Error: $187.60 SLE: 0.40 Item: acer Aspire 5 Laptop, AMD Ryzen 3 5300U ...\u001b[0m\n", + "\u001b[91m216: Guess: $215.92 Truth: $599.00 Error: $383.08 SLE: 1.04 Item: Elk 31080/6RC-GRN 30 by 6-Inch Viva 6-Li...\u001b[0m\n", + "\u001b[91m217: Guess: $213.69 Truth: $105.99 Error: $107.70 SLE: 0.49 Item: Barbie Top Model Doll\u001b[0m\n", + "\u001b[91m218: Guess: $217.90 Truth: $689.00 Error: $471.10 SLE: 1.32 Item: Danby Designer 20-In. Electric Range wit...\u001b[0m\n", + "\u001b[91m219: Guess: $215.95 Truth: $404.99 Error: $189.04 SLE: 0.39 Item: FixtureDisplays® Metal Truss Podium Doub...\u001b[0m\n", + "\u001b[93m220: Guess: $262.05 Truth: $207.76 Error: $54.29 SLE: 0.05 Item: ACDelco 13597235 GM Original Equipment A...\u001b[0m\n", + "\u001b[93m221: Guess: $221.55 Truth: $171.82 Error: $49.73 SLE: 0.06 Item: EBC S1KF1135 Stage-1 Premium Street Brak...\u001b[0m\n", + "\u001b[93m222: Guess: $211.57 Truth: $293.24 Error: $81.67 SLE: 0.11 Item: FXR Men's Boost FX Jacket (Black/Orange/...\u001b[0m\n", + "\u001b[91m223: Guess: $205.96 Truth: $374.95 Error: $168.99 SLE: 0.36 Item: SuperATV Scratch Resistant 3-in-1 Flip W...\u001b[0m\n", + "\u001b[91m224: Guess: $260.67 Truth: $111.99 Error: $148.68 SLE: 0.71 Item: SBU 3 Layer All Weather Mini Van Car Cov...\u001b[0m\n", + "\u001b[91m225: Guess: $179.54 Truth: $42.99 Error: $136.55 SLE: 1.99 Item: 2 Pack Outdoor Brochure Holder Advertisi...\u001b[0m\n", + "\u001b[91m226: Guess: $296.29 Truth: $116.71 Error: $179.58 SLE: 0.86 Item: Monroe Shocks & Struts Quick-Strut 17158...\u001b[0m\n", + "\u001b[91m227: Guess: $205.38 Truth: $118.61 Error: $86.77 SLE: 0.30 Item: Elements of Design Magellan EB235AL Thre...\u001b[0m\n", + "\u001b[93m228: Guess: $201.32 Truth: $147.12 Error: $54.20 SLE: 0.10 Item: GM Genuine Parts 15-62961 Air Conditioni...\u001b[0m\n", + "\u001b[93m229: Guess: $173.56 Truth: $119.99 Error: $53.57 SLE: 0.13 Item: Baseus 17-in-1 USB C Docking Station to ...\u001b[0m\n", + "\u001b[91m230: Guess: $217.86 Truth: $369.98 Error: $152.12 SLE: 0.28 Item: Whitehall™ Personalized Whitehall Capito...\u001b[0m\n", + "\u001b[93m231: Guess: $231.58 Truth: $315.55 Error: $83.97 SLE: 0.10 Item: Pro Circuit Works Pipe PY05250 for 02-19...\u001b[0m\n", + "\u001b[92m232: Guess: $206.37 Truth: $190.99 Error: $15.38 SLE: 0.01 Item: HYANKA 15 \"1200W Professional DJ Speaker...\u001b[0m\n", + "\u001b[92m233: Guess: $191.19 Truth: $155.00 Error: $36.19 SLE: 0.04 Item: Bluetooth X6BT Card Reader Writer Encode...\u001b[0m\n", + "\u001b[93m234: Guess: $215.67 Truth: $349.99 Error: $134.32 SLE: 0.23 Item: AIRAID Cold Air Intake System by K&N: In...\u001b[0m\n", + "\u001b[93m235: Guess: $181.46 Truth: $249.99 Error: $68.53 SLE: 0.10 Item: Bostingner Shower Faucets Sets Complete,...\u001b[0m\n", + "\u001b[91m236: Guess: $194.98 Truth: $42.99 Error: $151.99 SLE: 2.23 Item: PIT66 Front Bumper Turn Signal Lights, C...\u001b[0m\n", + "\u001b[91m237: Guess: $193.05 Truth: $17.99 Error: $175.06 SLE: 5.40 Item: Caseology Bumpy Compatible with Google P...\u001b[0m\n", + "\u001b[91m238: Guess: $200.83 Truth: $425.00 Error: $224.17 SLE: 0.56 Item: Fleck 2510 Timer Mechanical Filter Contr...\u001b[0m\n", + "\u001b[93m239: Guess: $176.66 Truth: $249.99 Error: $73.33 SLE: 0.12 Item: Haloview MC7108 Wireless RV Backup Camer...\u001b[0m\n", + "\u001b[93m240: Guess: $210.43 Truth: $138.23 Error: $72.20 SLE: 0.17 Item: Schmidt Spiele - Manhattan\u001b[0m\n", + "\u001b[91m241: Guess: $224.68 Truth: $414.99 Error: $190.31 SLE: 0.37 Item: Corsa 14333 Tip Kit (Ford Mustang GT)\u001b[0m\n", + "\u001b[93m242: Guess: $209.66 Truth: $168.28 Error: $41.38 SLE: 0.05 Item: Hoshizaki FM116A Fan Motor Kit 1\u001b[0m\n", + "\u001b[92m243: Guess: $186.79 Truth: $199.99 Error: $13.20 SLE: 0.00 Item: BAINUO Antler Chandelier Lighting,6 Ligh...\u001b[0m\n", + "\u001b[93m244: Guess: $182.14 Truth: $126.70 Error: $55.44 SLE: 0.13 Item: DNA MOTORING HL-OH-FEXP06-SM-AM Smoke Le...\u001b[0m\n", + "\u001b[91m245: Guess: $214.69 Truth: $5.91 Error: $208.78 SLE: 11.84 Item: Wera Stainless 3840/1 TS 2.5mm Hex Inser...\u001b[0m\n", + "\u001b[92m246: Guess: $195.84 Truth: $193.06 Error: $2.78 SLE: 0.00 Item: Celestron - PowerSeeker 127EQ Telescope ...\u001b[0m\n", + "\u001b[93m247: Guess: $178.68 Truth: $249.99 Error: $71.31 SLE: 0.11 Item: NHOPEEW 10.1inch Android Car Radio Carpl...\u001b[0m\n", + "\u001b[91m248: Guess: $210.76 Truth: $64.12 Error: $146.64 SLE: 1.39 Item: Other Harmonica (Suzuki-2Timer24- A)\u001b[0m\n", + "\u001b[91m249: Guess: $204.78 Truth: $114.99 Error: $89.79 SLE: 0.33 Item: Harley Air Filter Venturi Intake Air Cle...\u001b[0m\n", + "\u001b[91m250: Guess: $181.14 Truth: $926.00 Error: $744.86 SLE: 2.65 Item: Elite Screens Edge Free Ambient Light Re...\u001b[0m\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA+0AAAK9CAYAAABRvo1QAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAxlVJREFUeJzs3Xd4VGXax/FvElJJo6SAIiBiQwQFe1es2CuKimXVtay6ttVdFXUta3tdy1pXxQJ27GLvymJv2LCgIKRAICEESDvvHydlQk0gyaR8P9fFxeSZMzP3pExyz+95nhMTBEGAJEmSJElqc2KjXYAkSZIkSVo+m3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJjTZ9+nRiYmIYN25ctEvRGjjuuOPo169ftMtYLePGjSMmJobp06dHu5ROZeedd/ZzLgFvv/02MTExvP3229EuRVInYtMuCahvhj755JNol9JiLrvsMmJiYur+xcfH069fP84880zmz58f7fI6vNo3fWr/xcXFsc4663DQQQfxxRdfRLu81VL7B/yK/j366KPRLrFVXHXVVey///7k5OQQExPDZZddttzjnn76afbcc0969+5NYmIia6+9NoceeijffPPNMseWlpZy9tlns/baa5OYmMhGG23EHXfcsVr1jR8/npiYGFJTU5d7/eOPP87WW29NZmYmPXr0YKedduLFF19s1H0v/TVPT09f4e1rX2djYmJ4//33l7k+CAL69OlDTEwM++67b4PrSktLGTt2LJtssgldu3alR48eDB06lLPOOotZs2bVHVf7OjdnzpwV1twa37ePPfYYRx99NAMHDiQmJoadd965Sbe/4447OOyww1hnnXWIiYnhuOOOW+5xO++88wqfR3x8/CofZ+edd2aTTTZZ7nW1r1k33HDDSu9jwoQJ/Pvf/17lYzWn2bNnc+GFF7LLLruQlpa2yjcSysvLufrqq9lwww1JSkoiJyeHkSNHMnPmzCY97vvvv1/3+V36e+yDDz5g8803Jy0tjZ133pnvv/9+mdufeeaZ7Lnnnk16TEnQJdoFSGo/+vbty6JFixr1h1Bbdscdd5CamsrChQt54403uPXWW/nss8+W+0d0R3TPPfdQXV0dtcc/8sgj2WeffaiqquK7777jjjvuYNKkSfzvf/9j6NChK73tMcccw6hRo0hMTGydYhvpzDPPZIsttlhmfJtttolCNc0rCAKqqqpYvHjxCo+5+OKLyc3NZbPNNuOVV15Z4XFff/013bp146yzzqJnz57k5eVx3333seWWWzJ58mSGDBkCQFVVFXvuuSeffPIJp59+OgMHDuSVV17htNNOY968efz9739vdP2lpaVccMEFdO3adbnX33rrrZx55pmMHDmSf/3rXyxevJhx48ax77778tRTT3HwwQev8jF23313jj32WIIg4LfffuOOO+5gv/32Y9KkScttUJKSkpgwYQLbb799g/F33nmHmTNnLvP9XVFRwY477sj333/PmDFj+Mtf/kJpaSlTp05lwoQJHHTQQfTu3bvRn5NaLfl9e8cdd/Dpp5+yxRZbMHfu3Cbf/tprr2XBggVsueWWzJ49e4XH/eMf/+BPf/pTg7GFCxfy5z//mT322KPJj7sqO+64I4sWLSIhIaFubMKECXzzzTecffbZzf54K/LDDz9w7bXXMnDgQAYPHszkyZNXeGxFRQUjR47kww8/5KSTTmLTTTdl3rx5TJkyheLiYtZee+1GPWZ1dTV/+ctf6Nq1KwsXLmxwXXFxMQcccABbb701J598MuPGjeOQQw7hq6++Ii4uDoCpU6dyzz338Omnn67+E5c6q0CSgiC4//77AyD4+OOPo13KGlm4cOEKrxs7dmwABIWFhQ3GjzjiiAAIpkyZ0tLlNVBVVRUsWrSoVR8zmn799dcACK6//voG488991wABCeffPIKb1taWtrS5a3WY7/11lsBEDzxxBNNvt+Vff2b4/mu7GehMWbOnBkceOCBQXJycgAEQNCtW7fgoIMOWubYX3/9NQiCICgsLAyAYOzYsY1+nLy8vKBLly7BKaecUjf2+OOPB0Bw7733Njj2kEMOCZKSkoL8/PxG3//f/va3YIMNNghGjx4ddO3adZnrBw4cGGyxxRZBdXV13VhxcXGQmpoa7L///qu8fyA4/fTTG4x9++23ARDsvffeDcZrX2cPPvjgoGfPnkFFRUWD60866aRg2LBhQd++fYORI0fWjdd+PsaPH7/M4y9atCgoLi6u+3hFr3OR1uT7trF+//33oKqqKgiCIBg0aFCw0047Nen206dPr/uadO3aNRgzZkyjb/vQQw+t8PO1tJ122ikYNGjQcq9b0WvW0kaOHBn07du30fU1h5KSkmDu3LlBEATBE088EQDBW2+9tdxjr7322iA+Pn6Nf8fdcccdQY8ePYKzzjprme+xSZMmBSkpKXWvabWfu++//77umBEjRgR/+ctf1qgGqbNyerykRlvemvbjjjuO1NRU/vjjDw488EBSU1PJysrivPPOo6qqqsHtq6ur+fe//82gQYPqpuedcsopzJs3r8Fxzz77LCNHjqybQjtgwAD++c9/LnN/tdMaP/30U3bccUdSUlKalMDV2mGHHQD4+eefG4xPmTKFvfbai4yMDFJSUthpp5344IMPlrn922+/zfDhw0lKSmLAgAHcdddddVNUI8XExHDGGWcwfvx4Bg0aRGJiIi+//DIAf/zxByeccAI5OTkkJiYyaNAg7rvvvmUe69Zbb2XQoEGkpKTQrVs3hg8fzoQJE+quX7BgAWeffTb9+vUjMTGR7Oxsdt99dz777LO6Y5a3pn3hwoWce+659OnTh8TERDbYYANuuOEGgiBY7nN45pln2GSTTepqrX0eq2PXXXcF4NdffwXqpxC/8847nHbaaWRnZ9clQSta0z5p0iR22mkn0tLSSE9PZ4sttmjweYHGfT1rv27ffvstRx11FN26dVsmDV1dK/r6r+z5Atx+++11x/fu3ZvTTz99meUczfWzEGn06NG88cYb/POf/2TTTTfl9ttv57zzzuO3335b5tg12SMhOzublJSUBs/pvffeA2DUqFENjh01ahSLFy/m2WefbdR9T5s2jZtuuon/+7//o0uX5U8uLCkpITs7u8HPa3p6OqmpqSQnJzfx2YQ22mgjevbsucxrSq0jjzySuXPn8tprr9WNlZeX8+STT3LUUUctc3zt/Wy33XbLXJeUlER6evpq1dmS+vTpQ2zs6v+Z2bdv32VeQxtrwoQJdO3alQMOOGC1H39Fll7TvvPOO/Piiy/y22+/1U0bj/x5WNVr9upKS0uje/fuqzyuurqam2++mYMOOogtt9ySyspKysrKmvx4RUVFXHzxxVxxxRVkZmYuc/2iRYtISkoiKSkJoK622sd65pln+Pzzz7n88sub/NiSnB4vqRnUTmXdaqutuOGGG3j99de58cYbGTBgAKeeemrdcaeccgrjxo3j+OOP58wzz+TXX3/ltttu4/PPP+eDDz6om3Y/btw4UlNTOeecc0hNTeXNN9/k0ksvpaSkhOuvv77BY8+dO5e9996bUaNGcfTRR5OTk9Pk+msbwG7dutWNvfnmm+y9994MGzaMsWPHEhsby/3338+uu+7Ke++9x5ZbbgnA559/zl577UWvXr24/PLLqaqq4oorriArK2u5j/Xmm2/y+OOPc8YZZ9CzZ0/69etHfn4+W2+9dV1Tl5WVxaRJkzjxxBMpKSmpm3J5zz33cOaZZ3LooYdy1llnsXjxYr766iumTJlS94f+n//8Z5588knOOOMMNt54Y+bOncv777/Pd999x+abb77cmoIgYP/99+ett97ixBNPZOjQobzyyiucf/75/PHHH9x0000Njn///feZOHEip512Gmlpadxyyy0ccsgh/P777/To0aPJn//ahmTp25522mlkZWVx6aWXLjMVM9K4ceM44YQTGDRoEBdddBGZmZl8/vnnvPzyy3Wfl8Z+PWsddthhDBw4kKuvvnqZNy6WZ8GCBctdQ9yjR48Gjcfyvv616/mX93wvu+wyLr/8ckaMGMGpp57KDz/8wB133MHHH3/c4GcGVvyzUFpautKp7bXi4+PJyMioez7vvvsuV155Jeeeey7PP/88e++9N/369VvjNwMA5s+fT0VFBXl5efz73/+mpKSE3Xbbre76JUuWEBcX12AKMkBKSgoAn376KSeddNIqH+fss89ml112YZ999uHxxx9f7jE777wzTz75JLfeeiv77bcfixcv5tZbb6W4uJizzjprtZ5fcXEx8+bNY8CAAcu9vl+/fmyzzTY88sgj7L333kD4xlNxcTGjRo3illtuaXB83759AXjwwQe5+OKLV7uZXVpjvm+Li4upqKhY5X0lJSWtcM+A1lRYWMhrr73GEUccscIlEUurqqpa7udh6TeUl+cf//gHxcXFzJw5s+61svbz0JjX7IqKCoqLixtVZ/fu3Zv8Rsi3337LrFmz2HTTTTn55JN54IEHKC8vZ/Dgwdx8883ssssujbqfSy65hNzcXE455RT++c9/LnP9ZpttRnFxMTfeeCOHHnoo//73v8nIyGCDDTZgyZIlnHvuuVx++eUNfs9KaoIoJ/2S2ojGTI+vne52//33142NGTMmAIIrrriiwbGbbbZZMGzYsLqP33vvveVOV3z55ZeXGS8rK1vmsU855ZQgJSUlWLx4cd3YTjvtFADBnXfe2ajnWDtt9IcffggKCwuD6dOnB/fdd1+QnJwcZGVl1U0nrq6uDgYOHBjsueeeDabMlpWVBf379w923333urH99tsvSElJCf7444+6sWnTpgVdunQJln6JBYLY2Nhg6tSpDcZPPPHEoFevXsGcOXMajI8aNSrIyMio+3wccMABK5zGWSsjI2OZqbpLGzNmTIOpnM8880wABFdeeWWD4w499NAgJiYm+Omnnxo8h4SEhAZjX375ZQAEt95660oft/b75/LLLw8KCwuDvLy84O233w4222yzAAieeuqpIAjqvxe33377oLKyssF91F5XOx17/vz5QVpaWrDVVlstM9W89mvXlK9n7ffIkUceudLnUqt2mvGK/s2ePbvB5255X/8VPd+CgoIgISEh2GOPPeqmGQdBENx2220BENx33311Yyv7Waj9GV3Vv8jpywsXLgxiY2ODf/zjH3X3X/s5X5nGTo/fYIMN6h43NTU1uPjiixs8xxtvvDEAgvfee6/B7S688MIACPbdd99V1vLCCy8EXbp0qft8jxkzZrnT4/Pz84PddtutweeiZ8+ewYcffrjKxwiC8Ot64oknBoWFhUFBQUHwySefBHvttddyp1VHvs7edtttQVpaWt3P92GHHRbssssuQRAEy0yPLysrq/uc9e3bNzjuuOOCe++9d7nLBJoyPb4x37e131ur+rey6eurMz0+UlOmx996660BELz00kuNOr4xzy/y61j7uYucir6i6fGNec1e1dci8t+KfgZXNj1+4sSJARD06NEjGDhwYHD//fcH999/fzBw4MAgISEh+PLLL1f5Ofryyy+DuLi44JVXXgmCYMXfY9dff30QFxcXAEFycnIwYcKEIAiC4Kqrrgo22WSTZV7PJTWeSbukZvHnP/+5wcc77LADDz30UN3HTzzxBBkZGey+++4NEo1hw4aRmprKW2+9VZc8RE5JXbBgAUuWLGGHHXbgrrvu4vvvv6/brAogMTGR448/vkm1brDBBg0+Hjx4MPfff39divfFF18wbdo0Lr744mU2UNptt9146KGHqK6uJggCXn/99WU2gVpvvfXYe++9ef7555d57J122omNN9647uMgCHjqqac4/PDDCYKgwedmzz335NFHH+Wzzz5ju+22IzMzk5kzZ/Lxxx8vd/MogMzMTKZMmcKsWbMavTHVSy+9RFxcHGeeeWaD8XPPPZcnn3ySSZMmccYZZ9SNjxgxokGCuOmmm5Kens4vv/zSqMcbO3YsY8eOrfs4PT2da6+9dpkNv0466aS6DYxW5LXXXmPBggVceOGFddMya9UmhY39ekYmWEt/P6/KpZdeWrfMItLS01eX/vpHWvr5vv7665SXl3P22Wc3qO2kk07i73//Oy+++GKD7/0V/SxccMEFHH300at8DpEJWEpKCocffjjXXXcds2fP5o8//uCPP/5otlMF3n///ZSUlPDLL79w//33s2jRIqqqquqe51FHHcUVV1zBCSecwH/+8x8GDhzIq6++yu233w6EU3FXpry8nL/+9a/8+c9/XuHnO/K5brDBBqy99trsu+++LFiwgJtuuomDDz6Y9957j/XWW2+Vz+fee+/l3nvvrfs4Pj6eCy64gHPOOWeFtzn88MM5++yzeeGFF9hrr7144YUXlknYayUnJzNlyhSuuuoqHn/8ccaNG8e4ceOIjY3ltNNO44YbblitzRkb83174403NipxXp2N8FrChAkTyMrKYvfdd2/0bfr168c999yzzHh+fn6jfnZWpDGv2UOGDGmwTGJlcnNzm1xDaWkpEP4u/fzzz+nTpw8QLktab731uO6663j44YdXeh9nnnkme++99yo39jvvvPM45phj+PXXX9lggw3o1q0bs2bN4pprruGZZ56hsrKSs88+m2effZbc3Fxuuumm5S75kLQsm3ZJaywpKWmZ6eDdunVr8IfetGnTKC4uJjs7e7n3UVBQUHd56tSpXHzxxbz55puUlJQ0OG7paYRrrbXWMlNoV+Wpp54iPT2dwsJCbrnlFn799dcGbxRMmzYNgDFjxqzwPoqLi1m8eDGLFi1a7h/1K/pDv3///g0+LiwsZP78+dx9993cfffdy71N7efmb3/7G6+//jpbbrkl6623HnvssQdHHXVUgz96rrvuOsaMGUOfPn0YNmwY++yzD8ceeyzrrrvuCp/Lb7/9Ru/evUlLS2swvtFGG9VdH2mdddZZ5j6W/nqvzMknn8xhhx1GbGwsmZmZdeu1l7b052p5aqfWr+iUTdD4r2dk09qYx440ePBgRowYscrjVna/S19X+3lf+k2mhIQE1l133WW+Liv6Wdh4441X2bguz7hx4xgyZAgPP/wwP/30E9tvvz39+vXj0ksvbfIbZUuL3J181KhRdd9rtafWys3N5bnnnuOYY46paxTS09O59dZbGTNmzCqnYd90003MmTOnUetnDzvsMLp06dLgTbYDDjiAgQMH8o9//IPHHntslfdxwAEHcMYZZ1BeXs7HH3/M1VdfTVlZ2UqnMmdlZTFixAgmTJhAWVkZVVVVHHrooSs8PiMjg+uuu47rrruO3377jTfeeIMbbriB2267jYyMDK688spV1rm0xnzfDhs2rMn3Gy2//PILkydP5owzzljhHgbL07Vr1+V+HpbeO6OpGvOa3a1bt0a9dqyu2t9t2223XV3DDuHr+Pbbb8+HH3640ts/9thjfPjhh8s9LePy5OTkNFim9re//Y3ddtuN3XbbjYsvvpg33niDxx57jLfeeouRI0cyffr05a6Rl9SQTbukNbaqNBTCzXCys7MZP378cq+vbfrnz5/PTjvtRHp6OldccQUDBgwgKSmJzz77jL/97W/LnKpsdTaK2nHHHenZsycA++23H4MHD2b06NF8+umnxMbG1j3G9ddfv8JTkKWmpjZqnfDSlq639rGOPvroFTaVm266KRA20T/88AMvvPACL7/8Mk899RS33347l156aV1zcvjhh7PDDjvw9NNP8+qrr3L99ddz7bXXMnHixLq1s2tqRV/voBFrvwEGDhzYqD9SV3cTsKU19uvZEo+9tJXd75o+5opuX1xcvMpkGsI3AyIT1sTERC688EIuvPBCdt55Z4466igef/xxTjjhBJKTk5fZJG51devWjV133ZXx48c3OB/2jjvuyC+//MLXX3/NwoULGTJkSN35yNdff/0V3l9xcTFXXnklp512GiUlJXVv/JWWlhIEAdOnTyclJYXs7Gx++eUXXn755WXeMOvevTvbb7/9cjeeXJ6111677nt6n332oWfPnpxxxhnssssuKz1l3FFHHcVJJ51EXl4ee++9d6Obl759+3LCCSdw0EEHse666zJ+/PjVatobo6ioiPLy8lUel5ycXLcnQrTUbvA2evToqNZRqzGv2eXl5RQVFTXq/rKyshr1+zZS7QyI5e33kp2dzeeff77S259//vkcdthhJCQk1L2JUbtp5IwZMygvL1/hLIv//e9/PPnkk3UN/yOPPMIll1zCNttswzbbbMNdd93FCy+8sEazGaTOwqZdUqsYMGAAr7/+Otttt91Km5O3336buXPnMnHiRHbccce68dqdxZtbamoqY8eO5fjjj+fxxx9n1KhRdVO/09PTV9pcZmdnk5SUxE8//bTMdcsbW56srCzS0tKoqqpqVCPbtWtXjjjiCI444gjKy8s5+OCDueqqq7jooovqpof36tWL0047jdNOO42CggI233xzrrrqqhU27X379uX1119nwYIFDdL277//vu76tqr2a/XNN9+scHZDY7+ebU3t5/2HH35oMFOivLycX3/9tdHP5ayzzuKBBx5Y5XE77bRT3Y7Yy7PHHntw3HHH0bt3b5588slma9ohnO6+vM244uLiGrzR8vrrrwOs9LnPmzeP0tLSulR6af379+eAAw7gmWeeIT8/H2CZM1NAuEFYZWVlU58KEG66edNNN3HxxRdz0EEHrXDjuIMOOohTTjmF//3vf41K9JfWrVs3BgwY0OgUdHUcfPDBvPPOO6s8bsyYMQ3OLBINEyZMYMCAAWy99dat+rgr2xhwVa/ZH374YaM3g/v111+bvERl8ODBxMfH88cffyxz3axZs1a4aWqtGTNmMGHChOXueL/55pszZMiQus00IwVBwJlnnslZZ51V9xq89LKt3r17L7cuScuyaZfUKg4//HBuv/12/vnPf3L11Vc3uK6yspLS0lIyMzPrUoTI1La8vLxuLWtLGD16NJdccgnXXnsto0aNYtiwYQwYMIAbbriBo446apkUtrCwsC7xGDFiBM8880yDP0Z++uknJk2a1KjHjouL45BDDmHChAl88803y0zzrn0sCHcHj9xhPSEhgY033phJkyZRUVFBfHw8paWlDdKu7OxsevfuzZIlS1ZYwz777MPdd9/NbbfdxkUXXVQ3ftNNNxETE9NsCX1L2GOPPUhLS+Oaa65hr732arCuPQgCYmJiGv31bGtGjBhBQkICt9xyC3vttVddY3DvvfdSXFzMyJEjG3U/q7OmfcmSJVRXVy93ZsjyxhuroKBgmSUy06dP54033mD48OErvW1hYSHXXnstm266aYOmvbi4mNmzZ9OrVy8yMjLIzs7m6aefXub2t9xyC5MnT+aRRx6hV69eQLiMJTY2lscee4xTTjml7nM8c+ZM3nvvvdU+3V+XLl0499xzOe2003j22Wc58MADl3tcamoqd9xxB9OnT2e//fZb4f19+eWXrLXWWnUzhGr99ttvfPvtt8ssoWhOLb2mvaysjN9//52ePXsu8/ya4vPPP+e7777jkksuWe37WF1du3Zd7ptOq3rNTkpKavE17Wlpaeyzzz688MILfP/992y44YYAfPfdd3z44Yeccsopdccu72uxvJ+lRx99lMcee4wHH3ywwekpI40bN44ZM2bwj3/8o24sJyeH77//nj322IOKigp++umn1XpOUmdk0y6pgfvuu2+559xe3VMf1dppp5045ZRTuOaaa/jiiy/YY489iI+PZ9q0aTzxxBPcfPPNHHrooWy77bZ069aNMWPGcOaZZxITE8NDDz3U6KnXqyM+Pp6zzjqL888/n5dffpm99tqL//73v+y9994MGjSI448/nrXWWos//viDt956i/T09Lr1r5dddhmvvvoq2223HaeeeipVVVXcdtttbLLJJstNH5bnX//6F2+99RZbbbUVJ510EhtvvDFFRUV89tlnvP7663VTJ/fYYw9yc3PZbrvtyMnJ4bvvvuO2225j5MiRpKWlMX/+fNZee20OPfRQhgwZQmpqKq+//joff/wxN9544woff7/99mOXXXbhH//4B9OnT2fIkCG8+uqrPPvss5x99tkrPG1VW5Cens5NN93En/70J7bYYou6c6t/+eWXlJWV8cADDxAbG9vor+fqeu+995a7XGLTTTetW97QVFlZWVx00UVcfvnl7LXXXuy///788MMP3H777WyxxRaNnlK6OmvaZ8+ezfbbb88JJ5zAtttuS0lJCc899xwTJ05k3rx5y5xL/KGHHuK3336rOydz7eniAI455pi6WQODBw9mt912Y+jQoXTr1o1p06Zx7733UlFRwb/+9a8G97nTTjuxzTbbsN5665GXl8fdd99NaWkpL7zwQoO14k8//TTHH388999/P8cddxwpKSnLbZKfeeYZPvroowbXZWVlccIJJ/Df//6X3XbbjYMPPpgFCxZw++23s2jRogZvYjXVcccdx6WXXsq11167wqYdVr7XQq3XXnuNsWPHsv/++7P11luTmprKL7/8wn333ceSJUu47LLLlrnN//3f/9VtrlkrNja2wSn7GvN9u7pr2t99913effddIHzDZeHChXXfEzvuuGPdTKqPPvqIXXbZhbFjxzZ4Hs8//zxffvklEM56+Oqrr+puv//++y/zc1W79CoaU+OHDRvGY489xjnnnMMWW2xBamoq++233ypfs2HN1rTXfj6mTp0KhD+H77//PgAXX3xx3XFXX301b7zxBrvuumvdhqO33HIL3bt3b/D9sLyvxfK+d2t/t+29997LfaNlwYIF/P3vf+fqq69uMHvr0EMP5YorrqC6upoPPviAxYsXs88++6zWc5c6najtWy+pTak9FdGK/s2YMWOFp3xb3mmUak8Js7S77747GDZsWJCcnBykpaUFgwcPDi644IJg1qxZdcd88MEHwdZbbx0kJycHvXv3Di644ILglVdeWeaUNjvttNMqT6ezvJqWdyqk4uLiICMjo8FpiT7//PPg4IMPDnr06BEkJiYGffv2DQ4//PDgjTfeaHDbN954I9hss82ChISEYMCAAcF///vf4Nxzzw2SkpIaHAes8HRs+fn5wemnnx706dMniI+PD3Jzc4PddtstuPvuu+uOueuuu4Idd9yxrp4BAwYE559/flBcXBwEQRAsWbIkOP/884MhQ4YEaWlpQdeuXYMhQ4YEt99+e4PHWvqUb0EQBAsWLAj++te/Br179w7i4+ODgQMHBtdff32DU6St7Dn07dt3ladkqv3+Wfo0WEtb2ekHlz7lW63nnnsu2HbbbYPk5OQgPT092HLLLYNHHnmkwTGN+Xo25nRZkVZ1uqbIU5+t6HO3qtMt3nbbbcGGG24YxMfHBzk5OcGpp54azJs3r8ExTf1ZWJXFixcH//d//xdst912Qc+ePetOzTZs2LDg8ccfX+b4lZ02K/JnduzYscHw4cODbt26BV26dAl69+4djBo1Kvjqq6+Wuc+//vWvwbrrrhskJiYGWVlZwVFHHRX8/PPPyxxX+/mLfF1anhW9VlVUVAS33nprMHTo0CA1NTVITU0Ndtlll+DNN99c9ScqWPnP9WWXXdbgc9CYU2sGwbKnfPvll1+CSy+9NNh6662D7OzsoEuXLkFWVlYwcuTIZeqs/R5e3r+4uLggCJr2fbu6VlZH5P3X1rL0Y67sVIVLf62rqqqCtdZaK9h8882bXOfKfnaW95q1vFO+lZaWBkcddVSQmZkZUHNaviBY9Wv2mlrZ13Bpn376aTBixIiga9euQVpaWnDAAQcEP/74Y4NjVvS1WNqqXifPP//8YPjw4cv8/igtLQ2OPfbYIDMzM9hwww2Dl19+uWlPWOrEYoKgBeMrSeqkDjzwQKZOnVq3c7nUnu28886MGzeu2U75JkmSGm/F5yKRJDXK0jtzT5s2jZdeeomdd945OgVJkiSpw3BNuyStoXXXXZfjjjuu7tzZd9xxBwkJCVxwwQXRLk1qFscdd5znUpYkKUqcHi9Ja+j444/nrbfeIi8vj8TERLbZZhuuvvpqNt9882iXJkmSpHbOpl2SJEmSpDbKNe2SJEmSJLVRNu2SJEmSJLVRbkQHVFdXM2vWLNLS0oiJiYl2OZIkSZKkDi4IAhYsWEDv3r2JjV1xnm7TDsyaNYs+ffpEuwxJkiRJUiczY8YM1l577RVeb9MOpKWlAeEnKz09PcrVSJIkSZI6iopqmFQEb86D2l3g42Jgh/gSjtu0T10/uiI27VA3JT49Pd2mXZIkSZLULH5ZBA/kQV4FxKeGY32TYEwupC2B42CVS7Rt2iVJkiRJakYV1fDsHHg9Il3vEgP79oA9u0NsDJQsadx92bRLkiRJktRMfq5J1/PL68f6JcFxudArsen3Z9MuSZIkSdIaKq+GZ+Y0XLveJQb27wm7dwvT9dVh0y5JkiRJ0hqYVham64UV9WPrJsOYHMhdjXQ9kk27JEmSJEmrYUk1PF0Ib82vH4uPgQN7wq5rkK5HsmmXJEmSJKmJfiiDB/NgTkS6PiA53Bk+J6H5HsemXZIkSZKkRlpcBRPnwDvz68fiY+CgLNgls3nS9Ug27ZIkSZIkNcJ3C+GhfJgbka4PTIZjcyG7GdP1SDbtkiRJkiStxOIqeLIQ3iuuH0uMhYN6ws6ZENPM6Xokm3ZJkiRJklZg6kJ4KA/mVdaPbZACx+ZAzxZK1yPZtEuSJEmStJRFVfBEIXywVLp+aBbskNGy6Xokm3ZJkiRJkiJ8UxquXZ8fka5vlALH5EKP+NatxaZdkiRJkiSgrAoeL4DJJfVjSTXp+vatmK5HsmmXJEmSJHV6X5XCw/lQHJGuD+oKR+dA91ZO1yPZtEuSJEmSOq2FVfBYAUxZKl0/PBu2TY9Ouh7Jpl2SJEmS1Cl9sQDGF0BJRLq+SU263i2K6Xokm3ZJkiRJUqdSWgmPFsDHC+rHUuLgiCzYqg2k65Fs2iVJkiRJncZnC2BCPiyoqh/bNBVGZ0NmG0nXI9m0S5IkSZI6vAWV8EgBfLpUuj4qG7ZMa1vpeiSbdkmSJElShxUEYaP+SAGURqTrQ1NhdA6kt/GuuI2XJ0mSJEnS6impDKfCf15aP9Y1Do7MhuFtOF2PZNMuSZIkSepQgiDcZO7RgvCUbrU2Twsb9raerkdqR6VKkiRJkrRyxZUwPh++jEjX0+LgyBwYlha9ulaXTbskSZIkqd0LAphSAo8VQllEuj48LdxsLq2ddr/ttGxJkiRJkkLzK+DhfPh6Yf1YWly40dxm7TBdj2TTLkmSJElql4IAJpfA4wWwqLp+fMv0MF3vGhe92pqLTbskSZIkqd2ZVwEP5cPUiHQ9vQscnQNDUqNXV3OzaZckSZIktRtBAB8UwxOFsDgiXd86HQ7vIOl6JJt2SZIkSVK7UFQBD+bBd2X1Y5k16frgDpSuR7JplyRJkiS1aUEA7xXDk4WwJCJd3zYDDsuClA6WrkeyaZckSZIktVlzysO1699HpOvdusAxuTCoa/Tqai027ZIkSZKkNicI4J35MHFOw3R9+ww4NAuSO3C6HsmmXZIkSZLUphSWw4P58GNEut49Ho7JgY07QboeyaZdkiRJktQmBAG8NR+engPlEen6jplwSE9I6iTpeiSbdkmSJElS1BWUhzvDT1tUP9YjHo7NgQ07WboeyaZdkiRJkhQ11QG8OQ+emQMVQf34zplwcBYkxkattDbBpl2SJEmSFBX55TAuD36JSNd7xsOYXFg/JXp1tSU27ZIkSZKkVlUdwOvz4LmIdD0G2KUbHNjTdD2STbskSZIkqdXMXgIP5MGvi+vHshPCtesDTdeXYdMuSZIkSWpx1QG8WgTPz4XKiHR9t25wQE9IMF1fLpt2SZIkSVKLmrUkXLv+W0S6npMQrl0fkBy9utoDm3ZJkiRJUouoCuCVInhhbngZwnR99+6wfw+IN11fJZt2SZIkSVKzm7k4TNdnLKkf61WTrvc3XW80m3ZJkiRJUrOprIaXi+Cloobp+p7dYV/T9SazaZckSZIkNYsZNen6zIh0vXcijMmBfqbrq8WmXZIkSZK0Riqrw2R9UlG4SzxAbAzs1R1GdocupuurzaZdkiRJkrTaflscnnf9j4h0fe3EcO36OknRq6ujsGmXJEmSJDVZZXW4K/wr8xqm6/t0h71N15uNTbskSZIkqUmmLwrXrs8urx/rU5Ou9zFdb1Y27ZIkSZKkRqmohufnwqtFUBOuExcDI3uE69fjYqJaXodk0y5JkiRJWqVfFoVr1/Mi0vV1kuC4XFgrMXp1dXQ27ZIkSZKkFaqohmfnwOvz6tP1LjHhOdf3MF1vcTbtkiRJkqTl+rkmXc+PSNf7JYVr13ubrrcKm3ZJkiRJUgPl1fDMHHhzqXR9/56we7dwl3i1Dpt2SZIkSVKdaWVhul5YUT+2bjKMyYFc0/VWZ9MuSZIkSWJJNTxdCG/Nrx+Lj4EDesJuputRY9MuSZIkSZ3cD2XwYB7MiUjXBySHa9dzEqJXl2zaJUmSJKnTWlwFE+fAO/Prx+Jj4KAs2CXTdL0tsGmXJEmSpE7o+4XwYD7MjUjXBybDsbmQbbreZti0S5IkSVInsrgKniyE94rrxxJi4eCesHMmxJiutyk27ZIkSZLUSXy7MFy7Pq+yfmz9lHBn+J6m622STbskSZIkdXCLquCJQvggIl1PjIVDsmDHDNP1tsymXZIkSZI6sG9K4aF8mB+Rrm+YEq5d7xEfvbrUODbtkiRJktQBlVXB4wUwuaR+LCkWDs2C7U3X2w2bdkmSJEnqYL4qhYfzoTgiXd+4KxyTA91N19sVm3ZJkiRJ6iAW1qTr/1sqXT88G7ZNN11vj2zaJUmSJKkD+GIBjC+Akoh0fZOucHQOdDNdb7ds2iVJkiSpHSuthEcL4OMF9WMpcXB4Fmxtut7u2bRLkiRJUjv12QKYkA8LqurHNk2F0dmQabreIdi0S5IkSVI7s6AmXf9kqXR9VDZsmWa63pHYtEuSJElSOxEE8OkCeKQASiPS9aGpMDoH0u3wOhy/pJIkSZLUDpRUhs36ZxHpetc4ODIbhpuud1g27ZIkSZLUhgVBOA3+kYLwlG61Nk8LG3bT9Y7NL68kSZIktVHFlTA+H74srR9LjYOjcmBYWvTqUuuxaZckSZKkNiYIYEoJPFYIZRHp+vC0cLO5NDu5TsMvtSRJkiS1IfMrYHwBfBWRrqfVpOubm653OjbtkiRJktQGBAFMLoEnlkrXt0yHI7Ig1e6tU/LLLkmSJElRNq8CHs6HbxbWj6V3gaNzYEhq9OpS9Nm0S5IkSVKUBAF8WAKPF8Di6vrxrdPh8OzwlG7q3GzaJUmSJCkKiirgoXz4NiJdz+wCo3NgU9N11bBplyRJkqRWFATwXjE8VdgwXd82Aw7LghTTdUWwaZckSZKkVjK3Ah7Mg+/L6scyu8AxObCJ6bqWw6ZdkiRJklpYEMA782HiHFgSka5vnwGHZkGy6bpWwKZdkiRJklrQnHJ4IB9+jEjXu3WBY3Nh467Rq0vtg027JEmSJLWAIIC354fpenlEur5jJhzSE5JM19UINu2SJEmS1MwKysO169MW1Y/1iIdjc2BD03U1gU27JEmSJDWT6gDenAfPzIGKoH5850w4yHRdq8GmXZIkSZKaQX45PJAHP0ek6z3jYUwurJ8SvbrUvtm0S5IkSdIaqA7g9Xnw3FLp+q7d4MCekBgbvdrU/tm0S5IkSdJqmr0EHsyHXyLS9eyEcO36QNN1NQObdkmSJElqouoAXqtJ1ytr0vUYYLducEBPSDBdVzOxaZckSZKkJpi1JFy7Pn1x/VhOQrh2fUBy9OpSx2TTLkmSJEmNUBXAK0Xw4tyG6fru3WH/HhBvuq4WYNMuSZIkSaswczE8kA+/R6TruTXp+rqm62pBNu2SJEmStAJVAUyaCy8VhZchTNf37A77mq6rFdi0S5IkSdJyzFgcrl2fsaR+rHcijMmBfqbraiU27ZIkSZIUobIaJhWF6Xp1TboeGwN7dYeR3aGL6bpakU27JEmSJNX4fTGMy4M/ItL1tRLDtet9k6JXlzovm3ZJkiRJnV5lNbxYBC8vla7v0x32Nl1XFNm0S5IkSerUpi8Kd4afFZGur50Ix+VCH9N1RZlNuyRJkqROqaIanp8LrxZBTbhOXG263iO8LEWbTbskSZKkTueXReHO8Hnl9WPrJIU7w69tuq42xKZdkiRJUqdRUQ3PzYXXItL1LjHhOdf36G66rrbHpl2SJElSp/BzTbqeH5Gu90sKd4bvnRi9uqSVsWmXJEmS1KGVV8Ozc+CNeQ3T9f17wu7dwl3ipbbKpl2SJElShzWtDB7Mh4KIdL1/Tbrey3Rd7YBNuyRJkqQOZ0k1PF0Ib8+vT9fja9L1Eabrakds2iVJkiR1KD+WhWvX51TUjw1IDtP1nITo1SWtDpt2SZIkSR3CkmqYWJOu14qPgYOyYJdM03W1TzbtkiRJktq97xeGa9fnRqTrA5Ph2FzINl1XO2bTLkmSJKndWlwFT82Bd+fXjyXEwsE9YedMiDFdVztn0y5JkiSpXfp2ITyYB/Mq68fWT4ExOdDTdF0dhE27JEmSpHZlURU8UQgfFNePJdak6ztlmq6rY7FplyRJktRufFMKD+c3TNc3TAnXrveIj15dUkuxaZckSZLU5pXVpOsfRqTrSbFwaBZsn2G6ro4rNpoPXlVVxSWXXEL//v1JTk5mwIAB/POf/yQIgrpjgiDg0ksvpVevXiQnJzNixAimTZvW4H6KiooYPXo06enpZGZmcuKJJ1JaWtraT0eSJElSC/iqFC6f3rBh37grjO0HO2TasKtji2rTfu2113LHHXdw22238d1333Httddy3XXXceutt9Ydc91113HLLbdw5513MmXKFLp27cqee+7J4sWL644ZPXo0U6dO5bXXXuOFF17g3Xff5eSTT47GU5IkSZLUTBZWwf2z4T9/wPya6fBJseFU+DPXgu5Oh1cnEBNExtqtbN999yUnJ4d77723buyQQw4hOTmZhx9+mCAI6N27N+eeey7nnXceAMXFxeTk5DBu3DhGjRrFd999x8Ybb8zHH3/M8OHDAXj55ZfZZ599mDlzJr17915lHSUlJWRkZFBcXEx6enrLPFlJkiRJjfZlzdr1koi165t0haNzoJvNujqAxvahUU3at912W9544w1+/PFHAL788kvef/999t57bwB+/fVX8vLyGDFiRN1tMjIy2GqrrZg8eTIAkydPJjMzs65hBxgxYgSxsbFMmTJluY+7ZMkSSkpKGvyTJEmSFH2llfDfWXD7H/UNe3IsjMmFM9ayYVfnE9WN6C688EJKSkrYcMMNiYuLo6qqiquuuorRo0cDkJeXB0BOTk6D2+Xk5NRdl5eXR3Z2doPru3TpQvfu3euOWdo111zD5Zdf3txPR5IkSdIa+HwBjM+HBVX1Y5umwuhsyLRZVycV1ab98ccfZ/z48UyYMIFBgwbxxRdfcPbZZ9O7d2/GjBnTYo970UUXcc4559R9XFJSQp8+fVrs8SRJkiSt2IJKeLQAPllQP5YSB6OyYcs0N5pT5xbVpv3888/nwgsvZNSoUQAMHjyY3377jWuuuYYxY8aQm5sLQH5+Pr169aq7XX5+PkOHDgUgNzeXgoKCBvdbWVlJUVFR3e2XlpiYSGJiYgs8I0mSJElN8ekCmJAPpRHp+pBUGJ0DGZ6gWorumvaysjJiYxuWEBcXR3V1NQD9+/cnNzeXN954o+76kpISpkyZwjbbbAPANttsw/z58/n000/rjnnzzTeprq5mq622aoVnIUmSJKmpSirhrllw96z6hr1rHPypF5za24ZdqhXVH4X99tuPq666inXWWYdBgwbx+eef83//93+ccMIJAMTExHD22Wdz5ZVXMnDgQPr3788ll1xC7969OfDAAwHYaKON2GuvvTjppJO48847qaio4IwzzmDUqFGN2jlekiRJUusJgnAa/CMF4Sndam2WCkflQLrNutRAVH8kbr31Vi655BJOO+00CgoK6N27N6eccgqXXnpp3TEXXHABCxcu5OSTT2b+/Plsv/32vPzyyyQlJdUdM378eM444wx22203YmNjOeSQQ7jlllui8ZQkSZIkrUBJZbjR3Bel9WOpcXBkNgxz7bq0XFE9T3tb4XnaJUmSpJYTBPDRgnCzubKIdH14WrjZXJrpujqhxvah/nhIkiRJajHzK2B8AXwVka6nxYVT4TdPi15dUnth0y5JkiSp2QUB/K8EHi9smK5vmQ5HZEGqnYjUKP6oSJIkSWpW8yrg4Xz4ZmH9WHoXGJ0NQ03XpSaxaZckSZLULIIAPiyBxwtgcXX9+NbpcHh2eEo3SU1j0y5JkiRpjRVVwEP58G1Eup7ZBUbnwKap0atLau9s2iVJkiSttiCA94vhycKG6fq2GXBYFqSYrktrxKZdkiRJ0mqZWwEP5cF3ZfVjmV3gmBzYxHRdahY27ZIkSZKaJAjg3WJ4qhCWRKTr29Wk68mm61KzsWmXJEmS1GhzyuGBfPgxIl3v1gWOyYVBXaNXl9RR2bRLkiRJWqUggLfnw8Q5UB6Rru+QAYdmQZLputQibNolSZIkrVRBOTyYB9MW1Y/1iA/Xrm9kui61KJt2SZIkSctVHcBb8+HpQqgI6sd3zoSDepquS63Bpl2SJEnSMvLL4YE8+DkiXe8ZD8fmwgYp0atL6mxs2iVJkiTVqQ7gjXnw7JyG6foumXBQFiTGRq00qVOyaZckSZIEQN6ScGf4XyLS9ax4GJMLA03XpaiwaZckSZI6ueoAXpsHz82Bypp0PQbYtRsc2BMSTNelqLFplyRJkjqxWUvCtevTF9ePZSfAcbkwIDl6dUkK2bRLkiRJnVB1AK8UwQtzG6bru3eH/XtAvOm61CbYtEuSJEmdzB816fpvEel6bkK4dn1d03WpTbFplyRJkjqJqgBeLoIX54aXIUzX9+gO+5muS22STbskSZLUCcxYHKbrM5bUj/WqWbvez3RdarNs2iVJkqQOrLIaJhXBS0XhOnaA2BjYsxvs2wO6mK5LbZpNuyRJktRB/V6Trs+MSNd7J4bpet+k6NUlqfFs2iVJkqQOprIaXiwK169Hpuv7dIe9u5uuS+2JTbskSZLUgfy2GMblhedfr7V2Tbrex3Rdands2iVJkqQOoKI6POf6q/Mapusju8NeputSu2XTLkmSJLVzvy4K167PLq8fWycJxuTA2qbrUrtm0y5JkiS1UxXV8NxceK0IasJ14mLCXeH37B5eltS+2bRLkiRJ7dDPNel6fkS63jcpXLveOzF6dUlqXjbtkiRJUjtSXg3PzoE35tWn611iYL8esEf3cB27pI7Dpl2SJElqJ6aVwYP5UBCRrvdPgjG50Mt0XeqQbNolSZKkNm5JNTwzB95aKl0/oCeM6Ga6LnVkNu2SJElSG/ZjWbh2fU5F/diA5DBdz0mIXl2SWodNuyRJktQGLamGiYXw9vz6sfgYOLAn7Gq6LnUaNu2SJElSG/P9wnDt+tyIdH29mnQ923Rd6lRs2iVJkqQ2YnEVPDUH3p1fP5YQCwf1hF0yIcZ0Xep0bNolSZKkNuC7mnS9KCJdXz8Fjs2BLNN1qdOyaZckSZKiaFEVPFkI7xfXjyXGwsE9YadM03Wps7NplyRJkqJk6kJ4KA/mVdaPbZgCx+RAT9N1Sdi0S5IkSa2urAqeKIQPl0rXD82CHTJM1yXVs2mXJEmSWtHXpfBwPsyPSNc3SoFjcqFHfPTqktQ22bRLkiRJrWBhFTxeAP8rqR9LioXDsmA703VJK2DTLkmSJLWwL2vS9ZKIdH1Q13DtejfTdUkrYdMuSZIktZCFVfBoAXwUka4nx8Lh2bBNuum6pFWzaZckSZJawOcLYHw+LKiqHxvcFY7OgUzTdUmNZNMuSZIkNaMFlWG6/smC+rGUODgiC7YyXZfURDbtkiRJUjP5dAE8slS6PiQVRudAhn95S1oNvnRIkiRJa2hBJUwogM8i0vWucTAqG7ZIM12XtPps2iVJkqTVFAThNPhHCsJN52ptlgpH5UC6f21LWkO+jEiSJEmroaQy3Gjui9L6sdQ4ODIbhpmuS2omNu2SJElSEwQBfLQg3GyuLCJdH54WTodP8y9sSc3IlxRJkiSpkeZXwPgC+CoiXU+LC6fCb54WvbokdVw27ZIkSdIqBAH8rwQeL2yYrm9Rk66n+le1pBbiy4skSZK0EvMq4OF8+GZh/Vh6FxidDUNN1yW1MJt2SZIkaTmCAD4sgccLYHF1/fhW6XBEdnhKN0lqaTbtkiRJ0lKKatL1qRHpekYXODoHNk2NXl2SOh+bdkmSJKlGEMD7xfBkYcN0fdsMOCwLUkzXJbUym3ZJkiQJmFsBD+XBd2X1Y5ld4Jgc2MR0XVKU2LRLkiSpUwsCeLcYniqEJRHp+nY16Xqy6bqkKLJplyRJUqc1pxwezIcfItL1bl3gmFwY1DV6dUlSLZt2SZIkdTpBAG/Ph4lzoDwiXd8hAw7NgiTTdUlthE27JEmSOpWCcngwD6Ytqh/rER+uXd/IdF1SG2PTLkmSpE4hCODN+fB0IVQE9eM7Z8JBPU3XJbVNNu2SJEnq8PLL4YE8+DkiXe8ZD8fmwgYp0atLklbFpl2SJEkdVnUAb8yDZ+c0TNd3yYSDsiAxNmqlSVKj2LRLkiSpQ8pbAg/kwy8R6XpWTbq+vum6pHbCpl2SJEkdSnUAr82D5+ZAZU26HgPs2g0O6Gm6Lql9sWmXJElShzFrSbh2ffri+rHsBDguFwYkR68uSVpdNu2SJElq96oDeKUIXpjbMF0f0Q327wkJpuuS2imbdkmSJLVrf9Sk679FpOs5Nen6uqbrkto5m3ZJkiS1S1UBvFwEL84NL0OYru/RHfbrAfGm65I6AJt2SZIktTszF8O4PJixpH6sV0263s90XVIHYtMuSZKkdqOyGiYVwUtF4Tp2gNgY2LMb7NsDupiuS+pgbNolSZLULsyoSddnRqTrvRPDdL1vUvTqkqSWZNMuSZKkNq2yGl4sCtevR6bre3eHfbqbrkvq2GzaJUmS1Gb9VpOuz4pI19euSdf7mK5L6gRs2iVJktTmVFSHu8K/Mq9huj6yO+xlui6pE7FplyRJUpvy66LwvOuzy+vH+tSk62ubrkvqZGzaJUmS1CZUVMNzc+G1IqgJ14mLCXeF37N7eFmSOhubdkmSJEXdL4vCtev5Eel636QwXe+dGL26JCnabNolSZIUNeXV8OwceGNefbreJQb26wF7dA/XsUtSZ2bTLkmSpKj4qQweyIeCiHS9X0263st0XZIAm3ZJkiS1siU16fqbS6XrB/SEEd1M1yUpkk27JEmSWs2PZeHO8HMq6sfWTYYxOZBrui5Jy7BplyRJUotbUg0TC+Ht+fVj8TFwYE/Y1XRdklbIpl2SJEkt6vuF8GA+zI1I19dLhjG5kJ0QvbokqT2waZckSVKLWFwFT82Bd+fXj8XHwMFZsEsmxJiuS9Iq2bRLkiSp2X1Xk64XRaTr66fAsTmQZbouSY1m0y5JkqRms6gKniqE94rrxxJj4eCesFOm6bokNZVNuyRJkprF1IXwUB7Mq6wf26AmXe9pui5Jq8WmXZIkSWukrAqeKIQPl0rXD82CHTJM1yVpTdi0S5IkabV9XQoP58P8iHR9oxQ4Jhd6xEevLknqKGzaJUmS1GRlVfBYAfyvpH4sKRYOy4LtTNclqdnYtEuSJKlJviyF8flQHJGuD+oKx+RAN9N1SWpWNu2SJElqlIU16fqUiHQ9ORYOz4Zt0k3XJakl2LRLkiRplT5fABMKoCQiXR/cFY7OgUzTdUlqMTbtkiRJWqEFlfBoAXyyoH4sJQ6OyIKtTNclqcXZtEuSJGm5PlsAE/JhQVX92JBUGJ0DGf4VKUmtwpdbSZIkNbCgMpwK/1lEut41DkZlwxZppuuS1Jps2iVJkgRAEMCnNWvXF0ak65ulwlE5kO5fjpLU6nzplSRJEiWV4VT4z0vrx1Lj4MhsGGa6LklRY9MuSZLUiQUBfLQgPJVbZLo+LC1s2NP8a1GSosqXYUmSpE6quBLG58OXEel6Wlw4FX7ztOjVJUmqZ9MuSZLUyQQBTCmBxwqhLCJd3yIt3Gwu1b8QJanN8CVZkiSpE5lfAQ/lwzcL68fSu8BR2bCZ6boktTk27ZIkSZ1AEMDkEni8ABZV149vlQ5HZIendJMktT027ZIkSR3cvJp0fWpEup7RBUbnwJDU6NUlSVo1m3ZJkqQOKgjg/WJ4shAWR6Tr26TD4dmQYrouSW2eTbskSVIHNLcCHsqD78rqxzK7wNE5MNh0XZLaDZt2SZKkDiQI4L2adH1JRLq+XQYcmmW6LkntjU27JElSBzGnHB7Mhx8i0vVuXeCYXBjUNXp1SZJWn027JElSOxcE8M58mDinYbq+Q026nmS6LkntVmy0C/jjjz84+uij6dGjB8nJyQwePJhPPvmk7vogCLj00kvp1asXycnJjBgxgmnTpjW4j6KiIkaPHk16ejqZmZmceOKJlJaWtvZTkSRJanWF5fB/M+GRgvqGvXs8nL02HJ1rwy5J7V1Um/Z58+ax3XbbER8fz6RJk/j222+58cYb6datW90x1113Hbfccgt33nknU6ZMoWvXruy5554sXry47pjRo0czdepUXnvtNV544QXeffddTj755Gg8JUmSpFYRBPDGPLh8OvwYMR1+p0wY2xc2cjq8JHUIMUEQBNF68AsvvJAPPviA9957b7nXB0FA7969OffccznvvPMAKC4uJicnh3HjxjFq1Ci+++47Nt54Yz7++GOGDx8OwMsvv8w+++zDzJkz6d279yrrKCkpISMjg+LiYtLT05vvCUqSJLWAgnJ4IA9+WlQ/1jMejsmBDW3WJaldaGwfGtWk/bnnnmP48OEcdthhZGdns9lmm3HPPffUXf/rr7+Sl5fHiBEj6sYyMjLYaqutmDx5MgCTJ08mMzOzrmEHGDFiBLGxsUyZMmW5j7tkyRJKSkoa/JMkSWrrqgN4vQiumN6wYd8lEy7tZ8MuSR1RVJv2X375hTvuuIOBAwfyyiuvcOqpp3LmmWfywAMPAJCXlwdATk5Og9vl5OTUXZeXl0d2dnaD67t06UL37t3rjlnaNddcQ0ZGRt2/Pn36NPdTkyRJalZ5S+D6GfBEIVTUzJPMiodz+8CoHEiM+k5FkqSWENXd46urqxk+fDhXX301AJttthnffPMNd955J2PGjGmxx73ooos455xz6j4uKSmxcZckSW1SdQCvz4Nn50BlTbMeA+zaDQ7oabMuSR1dVJv2Xr16sfHGGzcY22ijjXjqqacAyM3NBSA/P59evXrVHZOfn8/QoUPrjikoKGhwH5WVlRQVFdXdfmmJiYkkJiY219OQJElqEbOXwLg8mF6//y7ZCTAmB9ZLiV5dkqTWE9X3Zrfbbjt++OGHBmM//vgjffv2BaB///7k5ubyxhtv1F1fUlLClClT2GabbQDYZpttmD9/Pp9++mndMW+++SbV1dVstdVWrfAsJEmSmld1AJPmwpW/1TfsMcDu3eCSvjbsktSZRDVp/+tf/8q2227L1VdfzeGHH85HH33E3Xffzd133w1ATEwMZ599NldeeSUDBw6kf//+XHLJJfTu3ZsDDzwQCJP5vfbai5NOOok777yTiooKzjjjDEaNGtWoneMlSZLaklk16fpvEel6TgIclwvrJkevLklSdET1lG8AL7zwAhdddBHTpk2jf//+nHPOOZx00kl11wdBwNixY7n77ruZP38+22+/Pbfffjvrr79+3TFFRUWcccYZPP/888TGxnLIIYdwyy23kJqa2qgaPOWbJEmKtqoAXimCF+aGlyFM1/foDvv1gHjXrktSh9LYPjTqTXtbYNMuSZKiaebiMF2fsaR+rFcCjMmF/qbrktQhNbYPjer0eEmSpM6sshpeLoIXi8J17ACxMbBnNxhpui5JwqZdkiQpKmbUpOszI9L13onh2vW+SdGrS5LUtti0S5IktaLK6jBZf3mpdH3v7rBPd+hiui5JimDTLkmS1Ep+q0nXZ0Wk62snhmvX1zFdlyQtx2q9l/vee+9x9NFHs8022/DHH38A8NBDD/H+++83a3GSJEkdQWU1PFMI//q9vmGPjQl3hb9oHRt2SdKKNblpf+qpp9hzzz1JTk7m888/Z8mS8DdPcXExV199dbMXKEmS1J5NXwRX/gaTIqbD90mEf6wD+/Z0OrwkaeWa/Gviyiuv5M477+See+4hPj6+bny77bbjs88+a9biJEmS2quKaphYk67PLg/H4mLggJ5wUV9Y23RdktQITV7T/sMPP7DjjjsuM56RkcH8+fOboyZJkqR27ZdF4dr1/PL6sb5J4dr1tRKjV5ckqf1pctOem5vLTz/9RL9+/RqMv//++6y77rrNVZckSVK7U14Nz86BN+ZBzUx4usTAvj1gz+7hOnZJkpqiyU37SSedxFlnncV9991HTEwMs2bNYvLkyZx33nlccsklLVGjJElSm/dTGTyQDwUR6Xq/pPC8671M1yVJq6nJTfuFF15IdXU1u+22G2VlZey4444kJiZy3nnn8Ze//KUlapQkSWqzltSk628ula7v3xN272a6LklaMzFBEASrPmxZ5eXl/PTTT5SWlrLxxhuTmpra3LW1mpKSEjIyMiguLiY9PT3a5UiSpHbixzJ4MA8KK+rH1k2GMTmQa7ouSVqJxvahTd49/oQTTmDBggUkJCSw8cYbs+WWW5KamsrChQs54YQT1qhoSZKk9mBJNTyaDzfOqG/Y42PgsCw4v48NuySp+TQ5aY+Li2P27NlkZ2c3GJ8zZw65ublUVlY2a4GtwaRdkiQ11vcL4aF8mBORrg9IDneGz0mIXl2SpPalsX1oo9e0l5SUEAQBQRCwYMECkpLqTy5aVVXFSy+9tEwjL0mS1FEsroKJc+Cd+fVj8TFwUBbskunadUlSy2h0056ZmUlMTAwxMTGsv/76y1wfExPD5Zdf3qzFSZIktQXf1aTrcyPS9YHJcGwuZJuuS5JaUKOb9rfeeosgCNh111156qmn6N69e911CQkJ9O3bl969e7dIkZIkSdGwuAqeLIT3iuvHEmPhoJ6wcybEmK5LklpYo5v2nXbaCYBff/2VPn36EBvb5D3sJEmS2o2pC+GhPJgXsV3PBilwbA70NF2XJLWSJp+nvW/fvgCUlZXx+++/U15e3uD6TTfdtHkqkyRJioKymnT9g6XS9UOzYIcM03VJUutqctNeWFjI8ccfz6RJk5Z7fVVV1RoXJUmSFA3flIZr1+dHpOsbpcAxudAjPnp1SZI6rybPcT/77LOZP38+U6ZMITk5mZdffpkHHniAgQMH8txzz7VEjZIkSS2qrArGzYZb/6hv2JNi4egcOGttG3ZJUvQ0OWl/8803efbZZxk+fDixsbH07duX3XffnfT0dK655hpGjhzZEnVKkiS1iK9K4eF8KI5I1wd1DRv27jbrkqQoa3LTvnDhwrrzsXfr1o3CwkLWX399Bg8ezGeffdbsBUqSJLWEhVXwWAFMKakfS46Fw7Jh23TXrkuS2oYmN+0bbLABP/zwA/369WPIkCHcdddd9OvXjzvvvJNevXq1RI2SJEnN6osFML4ASiLS9U1q0vVupuuSpDakyU37WWedxezZswEYO3Yse+21F+PHjychIYFx48Y1d32SJEnNprQSHi2AjxfUj6XEwRFZsJXpuiSpDYoJgiBYkzsoKyvj+++/Z5111qFnz57NVVerKikpISMjg+LiYtLT06NdjiRJagGfLYAJ+bAg4kQ3m6bC6GzINF2XJLWyxvahTU7al5aSksLmm2++pncjSZLUIhZUwiMF8GlEut41Do7Ihi3TTNclSW1bk5v2IAh48skneeuttygoKKC6urrB9RMnTmy24iRJklZXEISN+iMFUBqRrg9NhdE5kL7G0YUkSS2vyb+uzj77bO666y522WUXcnJyiPHtaUmS1MaUVIZT4T8vrR/rGgdHZsNw03VJUjvS5Kb9oYceYuLEieyzzz4tUY8kSdJqC4Jwk7lHC8JTutXaPA2OyoY003VJUjvT5F9dGRkZrLvuui1RiyRJ0morroTx+fBlRLqeFgdH5sCwtOjVJUnSmoht6g0uu+wyLr/8chYtWtQS9UiSJDVJEMD/iuGy6Q0b9i3SYGw/G3ZJUvvW5KT98MMP55FHHiE7O5t+/foRH9/wHCmfffZZsxUnSZK0MvMr4OF8+Hph/VhaXLjR3GY265KkDqDJTfuYMWP49NNPOfroo92ITpIkRUUQwOQSeLwAFkWcyGar9PBUbl3jolebJEnNqclN+4svvsgrr7zC9ttv3xL1SJIkrdS8CngoH6ZGpOvpXeDoHBiSGr26JElqCU1u2vv06UN6enpL1CJJkrRCQQAfFMMThbA4Il3fOh0ON12XJHVQTd6I7sYbb+SCCy5g+vTpLVCOJEnSsuZWwM0zw4S9tmHP7AJnrAXH97JhlyR1XE1O2o8++mjKysoYMGAAKSkpy2xEV1RU1GzFSZKkzi0I4L1ieLIQlkSk69tlwKFZkGKzLknq4JrctP/73/9ugTIkSZIamlMeJuvfl9WPdesCx+TCoK7Rq0uSpNa0WrvHS5IktZQggHfmw8Q5DdP1HTLgkCxINl2XJHUijWraS0pK6jafKykpWemxblInSZJWV2E5PJgPP0ak693j4dgc2Mh0XZLUCTWqae/WrRuzZ88mOzubzMzM5Z6bPQgCYmJiqKqqavYiJUlSxxYE8NZ8eHoOlEek6ztmwiE9Icl0XZLUSTWqaX/zzTfp3r07AG+99VaLFiRJkjqXgnJ4IA9+WlQ/1qMmXd/QdF2S1Mk1qmnfaaed6i7379+fPn36LJO2B0HAjBkzmrc6SZLUYVUH8OY8eGYOVAT14ztnwsFZkNjkE9NKktTxNHkjuv79+9dNlY9UVFRE//79nR4vSZJWKb8cxuXBLxHpes94GJML66dEry5JktqaJjfttWvXl1ZaWkpSUlKzFCVJkjqm6gBenwfPzoHKmnQ9Bti1GxzQ03RdkqSlNbppP+eccwCIiYnhkksuISWl/m3wqqoqpkyZwtChQ5u9QEmS1DHMXhKuXf91cf1YdgKMyYH1TNclSVquRjftn3/+ORAm7V9//TUJCQl11yUkJDBkyBDOO++85q9QkiS1a9UBvFoEz89tmK7vVpOuJ5iuS5K0Qo1u2mt3jT/++OO5+eabPR+7JElapVlLwrXrv0Wk6zkJ4dr1AcnRq0uSpPaiyWva77///gYfl5SU8Oabb7Lhhhuy4YYbNlthkiSp/aoK4JUieGFueBnCdH337rB/D4g3XZckqVGa3LQffvjh7LjjjpxxxhksWrSI4cOHM336dIIg4NFHH+WQQw5piTolSVI7MXNxmK7PWFI/1qsmXe9vui5JUpM0+X3ud999lx122AGAp59+miAImD9/PrfccgtXXnllsxcoSZLah8pqeGEOXPV7fcMeA+zdHf7R14ZdkqTV0eSmvbi4mO7duwPw8ssvc8ghh5CSksLIkSOZNm1asxcoSZLavhmL4Zrfw83mqmumw/dOhIv6woFZToeXJGl1NXl6fJ8+fZg8eTLdu3fn5Zdf5tFHHwVg3rx5nqddkqROprIaXiqCSUX1zXpsDOzVHUZ2hy4265IkrZEmN+1nn302o0ePJjU1lb59+7LzzjsD4bT5wYMHN3d9kiSpjfptcXje9T8i1q6vnRiuXV/H9/ElSWoWTW7aTzvtNLbaait+//13dt99d2Jjw7fQ1113Xde0S5LUCVRWh7vCvzKvYbq+T/dw/brpuiRJzScmCIIg2kVEW0lJCRkZGRQXF3v+eUmSVmL6onBn+Nnl9WN9atL1PqbrkiQ1WmP70Ea/F77xxhtTVFRU9/Fpp53GnDlz6j4uKCggJSVlNcuVJEltWUU1TCyEf/1e37DHxcABPcPN5mzYJUlqGY1u2r///nsqKyvrPn744YcpKSmp+zgIAhYvXty81UmSpKj7ZRFc+Ru8UgS10/P6JoWncdunR9i8S5KkltHkNe21ljerPibG39qSJHUUFdXw7Bx4fV59s94lBvbtAXt2D9exS5KklrXaTbskSeq4fl4U7gyfH7F2vV9SuHa9d2L06pIkqbNpdNMeExOzTJJusi5JUsdSXg3PzIE3l0rX9+8Ju3czXZckqbU1umkPgoDddtuNLl3CmyxatIj99tuPhIQEgAbr3SVJUvszrSxM1wsr6sfWTYYxOZBrui5JUlQ0umkfO3Zsg48POOCAZY455JBD1rwiSZLUqpZUw9OF8Nb8+rH4GDiwJ+xqui5JUlR5nnY8T7skqfP6oQwezIM5Een6gORw7XpOQvTqkiSpo2tsH+pGdJIkdUKLq2DiHHhnfv1YfAwclAW7ZJquS5LUVti0S5LUyXy3EB7Kh7kR6frAZDg2F7JN1yVJalNs2iVJ6iQWV8GThfBecf1YQiwc3BN2zgRPCiNJUttj0y5JUicwdSE8lAfzIk72sn5KuDN8T9N1SZLaLJt2SZI6sEVV8EQhfBCRrifGwiFZsGOG6bokSW1do5r2W265pdF3eOaZZ652MZIkqfl8UxquXZ8fka5vmBKuXe8RH726JElS4zXqlG/9+/dv3J3FxPDLL7+scVGtzVO+SZI6krIqeLwAJpfUjyXFwqFZsL3puiRJbUKznvLt119/bbbCJElSy/mqFB7Oh+KIdH3jrnBMDnQ3XZckqd1Z7TXt5eXl/PrrrwwYMIAuXVwaL0lSNC2sgscKYMpS6frh2bBtuum6JEntVWxTb1BWVsaJJ55ISkoKgwYN4vfffwfgL3/5C//617+avUBJkrRyXyyAy6Y3bNg36QqX9YPtnA4vSVK71uSm/aKLLuLLL7/k7bffJikpqW58xIgRPPbYY81anCRJWrHSSvjvLLhjFpTUTIdPiYPjcuGMtaCb0+ElSWr3mjyv/ZlnnuGxxx5j6623JibirftBgwbx888/N2txkiRp+T5bABPyYUFV/dimqTA6GzJt1iVJ6jCa3LQXFhaSnZ29zPjChQsbNPGSJKn5LaiERwrg0wX1YylxMCobtkxzKrwkSR1Nk6fHDx8+nBdffLHu49pG/b///S/bbLNN81UmSZLqBAF8UhKuXY9s2IemwuX9YCs3m5MkqUNqctJ+9dVXs/fee/Ptt99SWVnJzTffzLfffsuHH37IO++80xI1SpLUqZVUhlPhPy+tH+saB0dmw3DTdUmSOrQmJ+3bb789X3zxBZWVlQwePJhXX32V7OxsJk+ezLBhw1qiRkmSOqUggI9q0vXIhn3ztHBn+C1M1yVJ6vBigiAIol1EtJWUlJCRkUFxcTHp6enRLkeSJIorYXw+fBnRrKfGwVE5MCwtenVJkqTm0dg+tFHT40tKSlZ9UA2bXkmSVl8QhOdbf6wQyiJ2hh+eFm42l9bkhW2SJKk9a9Sv/szMzEbvDF9VVbXqgyRJ0jLmV8DD+fD1wvqxtDgYnQObma5LktQpNappf+utt+ouT58+nQsvvJDjjjuubrf4yZMn88ADD3DNNde0TJWSJHVgQQCTS+DxAlhUXT++ZXqYrneNi15tkiQpupq8pn233XbjT3/6E0ceeWSD8QkTJnD33Xfz9ttvN2d9rcI17ZKkaJlXk65/E5Gup3eBo3NgSGr06pIkSS2rsX1ok3ePnzx5MsOHD19mfPjw4Xz00UdNvTtJkjqlIIAPisOd4SMb9q3Tw53hbdglSRKsRtPep08f7rnnnmXG//vf/9KnT59mKUqSpI6sqAJu+QMezIPFNdPhM7vA6WvB8b2cDi9Jkuo1eQ/am266iUMOOYRJkyax1VZbAfDRRx8xbdo0nnrqqWYvUJKkjiII4L1ieLIQlkSsXd82Aw7LghSbdUmStJTVOk/7zJkzuf322/n+++8B2Gijjfjzn//cbpN217RLklra3IowWf++rH6sW83a9U2cCi9JUqfT2D50tZr2jsamXZLUUoIA3pkPE+c0TNe3z4BDsyDZdF2SpE6psX1ok6fHA8yfP597772X7777DoBBgwZxwgknkJGRsXrVSpLUARWWw4P58GNEut49Ho7JgY27Rq8uSZLUfjQ5af/kk0/Yc889SU5OZssttwTg448/ZtGiRbz66qtsvvnmLVJoSzJplyQ1pyCAt+eH6Xp5RLq+YyYc0hOSTNclSer0Wmx6/A477MB6663HPffcQ5cuYVBfWVnJn/70J3755RfefffdNas8CmzaJUnNpaA8XLs+bVH9WI94ODYHNjRdlyRJNVqsaU9OTubzzz9nww03bDD+7bffMnz4cMrKylZwy7bLpl2StKaqA3hzHjwzByoifrPunAkHZ0Fik0+yKkmSOrIWW9Oenp7O77//vkzTPmPGDNLS0ppeqSRJ7Vx+OTyQBz9HpOs942FMLqyfEr26JElS+9fkpv2II47gxBNP5IYbbmDbbbcF4IMPPuD888/nyCOPbPYCJUlqq6oDeH0ePLdUur5rNziwp+m6JElac01u2m+44QZiYmI49thjqaysBCA+Pp5TTz2Vf/3rX81eoCRJbdHsJWG6/uvi+rHshHDt+kDTdUmS1ExW+zztZWVl/PzzzwAMGDCAlJT2+xeKa9olSY1VHcBrNel6Zc1v0Bhgt25wQE9IMF2XJEmN0KLnaQdISUlh8ODBq3tzSZLanVk16fr0iHQ9JyFcuz4gOXp1SZKkjqvRTfsJJ5zQqOPuu+++1S5GkqS2qCqAV4rgxbkN0/Xdu8P+PSDedF2SJLWQRjft48aNo2/fvmy22Was5ox6SZLanZmL4YF8+D0iXe9Vk673N12XJEktrNFN+6mnnsojjzzCr7/+yvHHH8/RRx9N9+7dW7I2SZKipiqASXPhpaLwMoTp+p7dYV/TdUmS1Eoa/SfHf/7zH2bPns0FF1zA888/T58+fTj88MN55ZVXTN4lSR3KjMVw9W/w/Nz6hr13Ily4DhyUZcMuSZJaz2rvHv/bb78xbtw4HnzwQSorK5k6dSqpqanNXV+rcPd4SRJAZTVMKgrT9eqa346xMbBXdxjZHbrYrEuSpGbS4rvHx8bGEhMTQxAEVFVVre7dSJLUJvy2ONwZ/o8l9WNrJYZr1/smRa8uSZLUuTUpM1iyZAmPPPIIu+++O+uvvz5ff/01t912G7///nu7TdklSZ1bZTU8Uwj/+r2+YY+NCdet/30dG3ZJkhRdjU7aTzvtNB599FH69OnDCSecwCOPPELPnj1bsjZJklrU9EXhzvCzItL1PjXpeh+bdUmS1AY0ek17bGws66yzDpttthkxMTErPG7ixInNVlxrcU27JHUuFdXhJnOvFkHtL8G4GBjZI1y/HrfiX3OSJEnNotnXtB977LErbdYlSWoPflkUrl3PK68fWycJjssN17BLTTb1Gvj+Jui2GWz/GCRkRrsiSVIHstq7x3ckJu2SmlX5fCj7AzI2ghi3G28rKqrhubnwWkS63qVm7foeLZiuz1s0j//N/B9Dc4fSK61XyzyIoqdkGrywfs0HsTD4Mhh8STQrkiS1Ey2+e7wkaTnmfQmv7QCVCyB3BOzyio17G/BzTbqeH5Gu90sK1673bsF0vWhREYPvGMysBbNITUjls5M/Y2CPgS33gGp9sXFLfeyfVpKk5uVfkpLUnH4ZB1Vl4eW816H4u6iW09mVV8PjBXD97/UNe5cYODgL/rZOyzbsAO///j6zFswCoLS8lBenvdiyD6jWl7ouDL8N0gbCOofBBmdGuyJJUgfTZpr2f/3rX8TExHD22WfXjS1evJjTTz+dHj16kJqayiGHHEJ+fn6D2/3++++MHDmSlJQUsrOzOf/886msrGzl6iWpRuYmEFRBTBx0SYWUtaJdUac1rQz++Ru8Ma9+Ovy6yXBxX9ize3hat5Y2NHcoyV2SAYiNiWWbtbdp+QdV61v/dNjvR9j+UejSNdrVSJI6mDYxh+vjjz/mrrvuYtNNN20w/te//pUXX3yRJ554goyMDM444wwOPvhgPvjgAwCqqqoYOXIkubm5fPjhh8yePZtjjz2W+Ph4rr766mg8FUmd3bonQBBA8VRYd4wbUkXBkmp4uhDeml8/Fh8DB/SE3bq1TrNea52Mdfj4pI95adpL7NB3B7Zae6vWe3BJktQhRH0jutLSUjbffHNuv/12rrzySoYOHcq///1viouLycrKYsKECRx66KEAfP/992y00UZMnjyZrbfemkmTJrHvvvsya9YscnJyALjzzjv529/+RmFhIQkJCY2qwY3oJKlj+LEsXLs+p6J+bEByuHY9p3G/EiRJklpFY/vQqE+PP/300xk5ciQjRoxoMP7pp59SUVHRYHzDDTdknXXWYfLkyQBMnjyZwYMH1zXsAHvuuSclJSVMnTp1hY+5ZMkSSkpKGvyTJLVfS6rhkXy4cUZ9wx4fA4dnw3l9bNglSVqlX3+FiRNhzpxoV6KlRHV6/KOPPspnn33Gxx9/vMx1eXl5JCQkkJmZ2WA8JyeHvLy8umMiG/ba62uvW5FrrrmGyy+/fA2rlyS1Bd8vhAfzYW5Euj4wGY7NhWybdUmSVu2rr2CrrWDxYujVC6ZOhW7dol2VakQtaZ8xYwZnnXUW48ePJykpqVUf+6KLLqK4uLju34wZM1r18SVJa25xFYzPh5tm1jfsCbEwKhvO7WPDrg6sohRmTIR5X0W7EkkdxaRJsGRJeHn2bPjoo+jWowailrR/+umnFBQUsPnmm9eNVVVV8e6773LbbbfxyiuvUF5ezvz58xuk7fn5+eTm5gKQm5vLR0t9Q9XuLl97zPIkJiaSmNjC5/mRJLWYbxfCg3kwL+JkIeunwJgc6Gmzro6suhJe3RaKvwZiYKfnYa2R0a5KUnu3004QExNuppuRAUOHRrsiRYha0r7bbrvx9ddf88UXX9T9Gz58OKNHj667HB8fzxtvvFF3mx9++IHff/+dbbYJT5mzzTbb8PXXX1NQUFB3zGuvvUZ6ejobb7xxqz8nSVLLWlQVNus3z6xv2BNj4agcOGdtG3Z1Agun1zTsADEw89loViOpo9h6a5gyBW69FT77DJZagqzoilrSnpaWxiabbNJgrGvXrvTo0aNu/MQTT+Scc86he/fupKen85e//IVtttmGrbfeGoA99tiDjTfemGOOOYbrrruOvLw8Lr74Yk4//XSTdKkxysvhuuvCjUfOOAM222zN7m/ePEhOhlZe8qLO4ZtSeDi/Ybq+YUq4dr1HfPTqklpVch+IS4KqxUA1JPaMdkWSOorhw8N/anOivnv8ytx0003su+++HHLIIey4447k5uYyceLEuuvj4uJ44YUXiIuLY5tttuHoo4/m2GOP5Yorrohi1WqTXnsNzj0XXn892pW0vPJyOO00GDYM/vvflR977bVw6aXwwAOw666waNHqP+7ll0P37tCzJ7z33urfz4p8/z1Mm9b896s2r6wqPI3brX/UN+xJsXB0Dpy9tg27OpnyuTUNO0AMlLkvjyR1dFE/T3tb4HnaO7hPP4Uttqhfp/Ppp2ueKLdlt90GZ54ZPleAH36A9ddf/rHHHw8PPQRVVeHHeXmrNx2qvDxM2Kurw8/zyJHw/POrV//yXHUVXHxxePmWW+Avf2m++1ab9lVNul4cka5v3BWOyYHuNuvqjKor4Ln1oGwmUA1b3A4DT412VZKk1dBuztMutbipU8MGtro6/H/q1GhX1LJKS8PGuVZZ2YqPPeMM6No1vHzyyau/fik+HrKzIS4ufOx+/Vbvflbk5puXf1kd1sIquH82/OeP+oY9KTacCn/mWjbs6sRi42GPyTD0atj+cVjvz9GuSJLUwqJ6nnapVeyzD6yzDvz+O/TtC3vvHe2KWtYpp8BLL8Enn8Cf/wxDhqz42GHDwtN6FBeH5+RcXTEx4dKD664LG/9LL139+1qeYcPg1VfDy1ts0bz3rTbniwUwvgBKItL1TbqG0+G72axLkNIbNv5btKuQJLUSp8fj9PhOoawsnCa+4YbhNG61L8XF8J//QJcucPrp9bMD1KGUVsKjBfDxgvqx5Fg4PBu2SW84gUSSJKm9a2wfatOOTbskRdtnC2BCPiyoqh/bNBVGZ0Om6bokSeqAGtuHOj1ekhQ1C2rS9U8i0vWUOBiVDVumma5LkiTZtEuSWl0QwKcL4JECKI1I14ekwugcyPC3kyRJEmDTLklqZSWVYbP+WUS63jUOjsyG4abrkiRJDdi0S5JaRRCE0+AfKQhP6VZrs1Q4KgfS/Y0kSZK0DP9EkiS1uOJKGJ8PX5bWj6XWpOvDTNclSZJWyKZdktRiggCmlMBjhVAWka4PTws3m0vzt5AkSdJK+eeSJKlFzK+A8QXwVUS6nhYXToXfPC16dUmSJLUnNu2SpGYVBDC5BJ5YKl3fMh2OyIJUf/NIkiQ1mn86SZKazbwKeDgfvllYP5beBUZnw1DTdUmSpCazaZckrbEggA9L4PECWFxdP751OhyeHZ7STZIkSU1n0y5JWiNFFfBQPnwbka5ndoHRObBpavTqkiRJ6ghs2iVJqyUI4L1ieKqwYbq+bQYclgUppuuSJElrzKZdktRkcyvgwTz4vqx+LLMLHJMDm5iuS5IkNRubdklSowUBvDMfJs6BJRHp+vYZcGgWJJuuS5IkNSubdklSo8wphwfy4ceIdL1bFzgmFwZ1jV5dkiRJHZlNuyRppYIA3p4fpuvlEen6jplwSE9IMl2XJElqMTbtkqQVKigP165PW1Q/1iMejs2BDU3XJUmSWpxNuyRpGdUBvDkPnpkDFUH9+M6ZcJDpuiRJUquxaZckNZBfDg/kwc8R6XrPeDg2FzZIiV5dkiRJnZFNuyQJCNP11+fBc0ul67tkwkFZkBgbtdIkSZI6LZt2SRKzl8CD+fBLRLqeFQ9jcmGg6bokSVLU2LRLUidWHcBrNel6ZU26HgPs1g0O6AkJpuuSJElRZdMuSZ3UrCXh2vXpi+vHchLCdH1AcvTqkiRJUj2bdknqZKoDeKUIXpjbMF3fvTvs3wPiTdclSZLaDJt2SepEZi6GB/Lh94h0PbcmXV/XdF2SJKnNsWmXpE6gKoCXi+DFueFlCNP1PbrDfqbrkiRJbZZNuyR1cDMWh2vXZyypH+uVAMflQj/TdUmSpDbNpl2SOqjKaphUBC8VhevYAWJjYK/uMLI7dDFdlyRJavNs2iWpA/q9Jl2fGZGur5UYrl3vmxS9uiRJktQ0Nu2S1IFUVsOLReH69ch0fZ/usLfpuiRJUrtj0y5JHcT0ReHO8LMi0vW1E8O1631M1yVJktolm3ZJaucqqsNzrr9SBDXhOnG16XqP8LIkSZLaJ5t2SWrHflkED+bB7PL6sXWSYEwOrG26LkmS1O7ZtEtSO1RRDc/NhdeWStf36xGee910XZIkqWOwaZekdubnReHO8PkR6XrfpHDteu/E6NUlSZKk5mfTLkntRHk1PDsH3phXn653iYH9e8Lu3cJd4iVJktSx2LRLUjswrQwezIeCiHS9f1J43vVepuuSJEkdlk27JLVhS6rhmTnwVkS6Hl+Tro8wXZckSerwbNolqY36sSxcuz6non5sQHKYruckRK8uSZIktR6bdklqY5ZUw8RCeHt+/Vh8DByUBbtkmq5LkiR1JjbtktSGfL8wXLs+NyJdH5gMx+ZCtum6JElSp2PTLkltwOIqeGoOvDu/fiwhFg7uCTtnQozpuiRJUqdk0y5JUfbtQngoH4oi0vX1U+DYHMgyXZckSerUbNolKUoWVcGThfB+cf1YYk26vlOm6bokSZJs2iUpKr4phYfzYV5l/diGKXBMDvQ0XZckSVINm3ZJakVlVfBEIXy4VLp+WBZsn2G6LkmSpIZs2iWplXxVCuPzYX5Eur5RChyTCz3io1eXJEmS2i6bdklqYQur4PEC+F9J/VhSLByeDdumm65LkiRpxWzaJakFfVmzdr0kIl3fpCscnQPdopiuFy8uJqlLEoldEqNXhCRJklYpNtoFSFJHtLAK7p0Nt/9R37Anx8KYXDhjreg27GPfGkvmtZlk35DNlJlToleIJEmSVsmmXe1PEMCSJdGuQlqhzxfA2F/ho4jp8JumwmX9YNsobzZXVlHGP9/9JwCl5aVc/+H10StGkiRJq2TTrvalqAiGDYOkJDjkEKisXPVtpFayoBLumQV3zoIFVeFYShyc0AtO6w2ZbWCzucS4RHok9yA2Jnz575PeJ8oVSZIkaWVc06725aGH4IsvwssTJ8K778Kuu0a1JAng0wXwSH59sw4wJBVG50BGG3qljYuN47VjX+P6D6+nd2pvLtv5smiXJEmSpJVoQ39KSo2QnR1Oj4/8WIqikkp4pAA+W1A/1jUOjsyG4Wltc2f4oblDGX/w+GiXIUmSpEawaVf7csQR8OuvYcJ+9NGwySbRrmhZs2bBhAkwYAAceGDb7Nq0xoIAPlkQNuwLI9L1zVLhqBxI99VVkiRJzSAmCCJjy86ppKSEjIwMiouLSU9Pj3Y5as/Ky2G99WDmzLCru/tuOOmkaFelZlZSCePz4YvS+rHUmnR9WBtN1yVJktS2NLYPdSM6qTnl5cGMGWHDHhsLH34Y7YrUjIIAppTA2OkNG/bhaeHO8MPTbdglSZLUvJzAKTWntdeGHXaA994Lu7dRo6JdkZrJ/AoYXwBfRTTraXHhVPjN06JXlyRJkjo2m3apOcXGwuuvw/vvwzrrhFPl1a4FAfyvBB4vhLKItetbpMGobEj1VVSSJEktyD83peaWkOBp6DqIeRXwcD58s7B+LL0LjM6GoabrkiRJagU27ZK0lCCAD0vg8QJYXF0/vlU6HJEdntKtNZRXlZMQl9A6DyZJkqQ2yY3oJK2ZIICf74VPzoSiT6NdzRorqoBb/oAH8+ob9owucPpacEKv1mnYy6vKGTlhJIlXJrLdfdtRWl666htJkiSpQ7Jpl7Rmfn0QpvwJpt0Or+0IS+ZGu6LVEgTw3ny4fDp8GzEdftuMcGf4TVNbr5ZXfnqFl6a9BMCHMz7ksW8ea70HlyRJUpvi9HhJa6Z4KsTEQVAFVWVQNgMSe0S7qiaZWwEP5cF3ZfVjmV3gmBzYpBWb9VpZXbMafJzdNbv1i5AkSVKbYNMuNYepU+Hmm6FPH7jgAkhMjHZFraf/GPjpbqgohpxdIGNwtCtqtCCAd4vhqUJYErF2fbsMOCwLkltp7frStl57a+7a9y6e+vYpRqw7gn3X3zc6hUiSJCnqYoIgCKJdRLSVlJSQkZFBcXEx6enp0S5H7U1FBay1FhQVQXU1/P3vcOWV0a6qdVWUQNkfkL4BxLSPVTdzyuGBfPgxIl3v1gWOyYVBXaNXlyRJkjqHxvahJu2d1fffQ3k5DB4MMTHRrqZ9W7gQCgvDy7Gx8NNP0a0nGuLTIaN9vOEVBPD2fJg4B8oj0vUdMuDQLEiKUrouSZIkLU/7iMTUvG65BTbaCIYMgUsvjXY17V9mJpx+eng5ORnOPDOq5WjFCsrhxhnwaEF9w94jHs5eG47OtWFvV5wkprZm7lz44Qe/NyVJzc6mvTn89hu8+CIUF0e7ksa5/fb6y//5T/Tq6Ehuuw3++APy8mDbbaNdjZZSHcAb8+CK6TBtUf34zplwaV/YyOnw7UcQwJRT4JEu8PJwWDwn2hVJ8Pbb4TKpDTeEY4+NdjWSpA7Gpn1NffFF+Et6331h6FAobQfnU95663BKfGwsbLXVmt1XEMAzz8D118PMmc1SXrvVuzekRmGrca1UfjncMAMeL4CKmgCsZzyc0weOzDFdbxZV5eGeBq2RMM77DH6+G6iGos/h53ta/jGlVbnrrnB/E4CHHw5Td0mSmolr2tfUc8/BkiXh5enT4ZNPYOedV36badPgvPPCyzfeCOut15IVLuvOO8M3GMrL4c9/XrP7GjcOTjghfBPg5pvh5587187parNq0/Vn59Q36wC7ZMJBWZDoW5bNY+EMeG1bKJsJubvDzi9BbAv+aumSDsQAAVANCd1a7rGkxtpkE3jsMYiLg6wscFNbSVIzsmlfUzvsUH85IwMGDVr1bUaPhs8+Cy8XFMDkyS1T24okJcHZZzccmzq1fl32bbeFf4A0xgcfhH+kVFWF08NnzYL+/Zu1XKmp8paEO8P/EjEVPisexuTCwJTo1dUh/foAlM0KL+e9BnM/gqwWXCKSPhC2eTA8zWDPrWHAn1rusaTG+tvfIC0tXC735z9DfHy0K5IkdSA27Wtql13g3Xfh449h//3Dd9hXpagobHIB5rSR9ZjHHhtO9a+9XPumwqqMGhWm7RCu5e7bt/lrKy8Pp922RoJfVgZjxsCHH8Ipp3SOjfoW5cP7h8GCabDJJbD+adGuaLVVB/DaPHhuDlTWpOsxwK7d4MCekGC63vxS1wWqgdjwdH8pa7X8Y/Y/OvwntRVdurgJqSSpxfgnbHPYfnv4619hwIDGHf/vf4fvyKenh5fbgrKysDEOgvByY40YEe6W+8Yb8Oab4Tr55jRxYvh5Sk8Ppx62tHvugaeeCmcMjB0Ln3/e8o8Zbd/+C+Z8CIvz4JMzYHFhtCtaLbOWwLW/w8TC+oY9OwHOXwcOz7ZhbzF9j4Tht4VN9C6ToGsLvHHXWAt/h9+fhEV50atBkiSpmZm0R8O++9bvNN9WzpH+n//A0UfXX26KAQMa/4ZFU/3tb/V7BlxwARxxRMs8Tq2lvx6r+vosWhS+ydGjR8vVtLoWF8DkMVD6Cwy+HPqNWv5xDdYfx4RpaTtSHcArRfDC3Ibp+ohucEBPiG9fT6f9iYmB9U8HTo9uHQt+gpeGQFUZJHSHkVMhOTe6NUmSJDUD/5yNlpiYttOwA+y6a5guz5oFu+0W7WrqrbVWuGY+Li7cnb2lnXRS+MZAnz5w1VXhhn0r8u674XKInj2bbxr9Z5/BRRfBs8+u+X19fUW4xnjBjzD5GKhYsPzjNr4Ieu8DaQNh6/shsQ2+AbECfyyBf/0Oz0RMh89NgAvWgUOzbdg7lbzXwoYdoLwICt+Lbj2SJEnNxKRdbdvDD8M//gHV1XDllS3/eMnJ8MgjjTv2+uvrlxJcdVVY55qsu//jD9huu3ANf3U1vPACjBy5+vcXVEZcrg7/LU9id9jpudV/nCioCuDlInhxbngZwnR9j+6wXw+b9TU27S744m+QvBbs9Cyk1Zzhonw+fDAK5n0OG54LG18Q1TIbyNoeYuIhqIC4FOixZbQrkiRJahY27Wrb1l4bHngg2lUsX//+9ee7z86GhIQ1u79p02Dx4vBybGy4nn5NmvZNLoH5X4fT44dcCQkZa1ZfGzFjMTyQBzOW1I/1SoDjcqFfcvTq6jAqy+CT0yGogspS+Ppy2Pah8LofbgkT7aA6bOr7HFzf0Edb5mDY6xMofB9yR0R3bb1aX2kp5OfDuuu2rVlsUmczYQI8/TTsvjucfHK0q5E6DJt2aXVdcw2kpIR/KJ533pr/objVVrDppvDVV+HGe2u6fj9lLdjjgzW7jzakshomFcFLReE6doDYGNizG+zbA7qYrjePmDiITYCqmjeQunRteF2w1LFtSbdNw3/qXL76CnbcMdwr5oADwobBxl1qfR9/HJ7WOCYGnnwyDDd2373165g3L/xXG65IHYBNu7S6unaFf/2r+e4vORk++ij8A3S99aBbt+a773bu95p0fWZEut47MUzX+yZFr64OKS4RdngKvhoLKX1g04hlKRucCfO+hHmfwYZ/hdT+0atTqvXf/4ZJO4T7gUybBuuvH92apM5o5szw/6Dm3d3ff2/9Gt55B/baK5y5eNJJcPfdrV+D1AJigiAIVn1Yx1ZSUkJGRgbFxcWkp6dHuxxJNSqr4cWicP16ZLq+d3fYp7vpuiTCM56ccUa4YWlyctg4ZHSM5UBSu7JoUZisf/BBOHPw3Xdb/2fx8MPDU/dW1+zjM3++rwdq0xrbh5q0t1U//xxOv87ICHcm9wVHncxvi2FcXnj+9Vpr16TrfUzXJdU69dTwD/Rvv4U//cnfl1K0JCfDe+9BUVE4WzA2Cu+sb7RRmPTHxYVn+OnaddW3UdtXVgbjx0NqavjGTFwbW57XCmza26qRI+Gnn8IXnrlzYdy4aFcktYqK6vCc66/Oa5iuj+wOe5muS1pabCz85S/RrkIShGvIe0Tx1LEXXxw2djNmhDNwutjqdAiHHgqTJoWXH3gAzjorXAbRifYscHo8bXR6fHJyuB4nJiY8Ddh7nnNYHd+vi8K167PL68f61KTra68gXf8y70vKKsrYeu2tielEL96SJEmdQkpKuPwi0q23hm/MtHON7UPNrNqqK64IG/aEBPj736NdjdSiKqrhqUK49vf6hj0uBg7oCRf1XXHDfvP/bmboXUPZ9r5tOfuVs1utXkmSJLWSo45aduy111q/jiiyaW+rzj8f5swJp8bvvXe0q1EHk1+az5SZUyivKl/1wS3s50Xwz9/g1aL6s4n1TYKL+8I+PcLmfUXu/fzeusv3f35/yxaq1RMEMOsV+O1xqFqy6uMlSZIi3X03vPoqDBtWP3bwwdGrJwpc6NGWde8e7QrUAX0661N2vH9HyirL2HqtrXnvhPfoEtv6LwXl1fDsHHhjXn2z3iUG9usBe3QP17Gvyg59d+Drgq+JIYZt1t6mRevVavr2X/BlzWyhtfaFnZ6Pbj2SJKl9iY0Nz0yw447w8svQqxdsuWW0q2pVNu1StC38PUwg0we2ysNN+HoCS2oSz//98T+mFkxlSO6QVnnsWtPK4MF8KIgI+vsnwZhc6JXY+Pv5957/ZmjOUMoqyjhx8xObv1CtuZnP1V+e9XL06pAkSe1bYiIccEC0q4gKp8dL0fTz/fBsP3hhffjqslZ5yGG9h1EVVBEbE0tGYgZ9M/u2yuMCLKmGxwrgxhn1DXuXGDgkCy5Yp2kNO8CiykX8seAPCssKWVLp1Os2ae2IX669l7PUZ+bz8OJgeHNPKJvVenVJkiS1E+4eTxvdPb69KSqC11+HwYPDc2SqcV4aAvO/Ci93SYfDi1v8IYMg4Mlvn+Sr/K8YveloNuy5YYs/JsCPZeHO8HMq6sfWTYYxOZDbxGa91qGPH8rT3z9NDDHs3G9nXj/29eYpVs0nCCDvNagohrX2h7iIL3ZVOTyZCVWLICYO+h8DW7s3gSRJ6hwa24c6PV5rrrQUNtsMfv89PB/mO+/AtttGu6q2r2oxULtwOxYyB7fKw8bExHDYoMM4bNBhrfJ4S6phYiG8Pb9+LD4GDuwJu3Zr3Nr1Ffky/0uqg2oAvi74es0KbWsWF0J8BsQlRLuSNRMTA732qP94SRHM+R903wwSukNQVX9dG9gYscXcdRfceSdsvTXcfHN4ZhBJkqRGcHq81tzXX4cNO0B1Nbz4YnTraS++GgvzvwRiIDYBdngq2hU1u+8XwuXTGzbs6yXDpf1gRCM3m1uZ87c9n5iaNz7O3/b8NbuztiII4MOjYWI2PNMHSn6IdkXNZ/EceHEQvDMSnt8AymbAVv+FpF6QOQSG/DPaFbaMH36AP/8ZvvgibN7vvjvaFUmSpHbEpF1rbuONITsbCgrChmO33aJdUftQ+kvNhQCqF8PsV2DdY6NaUnNZXAVPzYF359ePJcTCQT1hl8wwfG0OJw87mX0G7kNVdVWrrs1vUQumwfTx4eXyuTDtThh2U3Rrai6F78HivPBy5YJwY7oNzginxbdXQQDTp0OPHrCiaW1lZfWXY2Jg4cJWKU2SJHUMJu1acxkZ8PnnYYL0v//BrrtGu6L2YeCpDT/+7ZHo1NHMvlsIl//WsGFfPwUu7RtOh2+uhr3W2ulrN0vDXh1U89g3j3Hf5/exqGJRM1S2mhJ7QmwSxMSGU8e79oteLc2t++YQl1zzQSxktZPT9FVVwYcfwk8/LXvdccfBuutC797h69/yDB0K55wDXbuGp6s55ZSWrFaSJHUwbkSHG9EpSoIAXtwYSn4EqmGTsbDpZdGuarUtqoKnCuG9iL30EmPh4J6wU2bzN+vN7YLXLuD6D68HYOTAkbxw1AvRK6bgvTBhzxwEG10AsR1oUtT8qTB7EmTtAD23inY1jXPYYfDkk+F5Yh9/HA45JBzPz4fc3PByXByMHg0PPBC9OiVJUrviRnRSWxcTAyPegZ/vhaQc6D8m2hWttqkL4aE8mFdZP7ZhChyTAz3byX5br/38Wt3lN399M4qVANk7hP86osxB4b+2ZPHicM355Mnh/3/9a/11CxeGDTuEe3Y88EB9056ZGU6Lnz8/vG6DDVq7ckmS1AnYtEvRlJQNgy6KdhWrrawKniiED5dK1w/Ngh0y2n66HunIwUfyRf4XAIzaZFR0i1HruuceePDBcPbLOefAHnvAoJo3FlJSYMMN4ccfw8Z8663rb5eYGJ4t4/bbwynyZ50Vnfrbs6oqOP98eOstOPbYhm+YSJIkwKZdHdlvv4V/EK67brQrWbGf7g53kU9dF7Z/AlJ6R7uiRvu6FB7Oh/kR6fpGKXBMLvSIb/16llQuYezbY/mp6CfO3OpMduy7Y5Nuf8F2F7Bdn+1YWLGQ3fq7mWKnUV0NRUUNx8ojTj0XExM25vfdB716wTFLbZo3aBD85z8tX2dHNWEC3FSz0eIXX8D228MWW0S1JEmS2hqbdnVMd94Jp9Zs9HbddWGS09aUz4OPTgWqYUkhTL0Ktmj7f/yXVcFjBfC/kvqxpFg4LAu2a8F0fcGSBfww9wc2ztqYlPiUZa6/9oNruf7D6wmCgEk/TSLv3DzSEtOa9BjbrbNdc5Wr9qC0FHbZBT75JNz5vUuXcHr80KENj8vOhgsvjEqJdSorIS8v3PAutgPtIbtwYfiiUbu9jjvrS5K0jA70m78ZLJ22qKG5c+H+++GDD6JdyardeGP95RtuiF4dKxUb7hBeK6btv4f2ZSmMnd6wYR/UFS7rB4OTijn1xT+z/yP787+ZK9hFezXNXjCbDW7bgC3u2YINbt2AsW+NZfKMyQ2OmbVgFjHEEBBQVlHGgvIFzVqDGiEIwuYy0v33hzusv/hiVEpaqfPPDxt2gJISuPlmuOqqtreuY86c8NSaffqE0/MjTyHX3h1zTLgcITkZTjop3F1fak0ffhguzbjmmmVfvySpjbBpj7TeejBx4oqvLy2FJUtar54Vqaho/TrKy2GrreCEE8Lpi08/vewx1dUwc2ZYX1NUVMCRR0JaWvh/U2+/PEOHhmlUXBwMGbLm9xcEMP8bKJm25vdVKyEDtroH0gZCrz1hk0ua776b2cIquG823Dyjgq/mTKdoURHJsTAmF/6yFnSLh3+8+Q/u+eweXvzxRfYevzezF8xmbtncZnn85398ntmlswGYuWAmV7x7Bdvfvz2fzvq07piztjqLrK5ZdZd7pzVcajBp2iSuee8afi76uVlqWqX/b+++w6Mq2j6Of3fTKSEJIQm9iYIoiCDVLkXEBvaKiqICCmLvWFHU57WD+tgFsT8oKkWagnQEEQFB6ZJGSKGk7rx/TJLdDUlI3yX8Pl65zJ6dPWc2nGzOfe6Ze4yBbZ/Dxlch6wi7IejKgYT5kL6x7K/56y9o1crO8x43zm77/nv7mfHxx3DhhbB+fTV09jByc+3x//tfOOixlN/ixXZEjqcWLWq2b2X15ZewKf+zZ/ly+Okn3/anKtWtCzNm2BsRb79du0YRVJf0dPuz+t//3CMUpGJSU6FfP5g8GR5+GF57zdc9EhEplv46esrLs1nZffvgnntsBmD5cnjsMejVyw6fbNjQ+4Lpq69sdmDatPIda+JEGDLEXkyWx4wZEBkJ9erZOZYVlZMD775r+3GwDGtSb90Kf+cHO04nzJzp/XxWls2QNG9uKygnJJS9L9OmwdSp9uc+dWr5f5bFee89Gzg8/DB89lnl9pWxGZbcBD+cCNOPhU1vVWw/mUk2GNq3FX46G6a1gWW3QsbfNmgPjS5smufKY3vadnJd1XfXPycvh9eXvc5j8x7j34x/S2z3WwaM2wqL01xM2zCNWX/P5Kvlj9PbNYve+cPhs3Kz+C3+N4wxuHCRlplGk/80IebFGD5a81Gl+3pizIk48v8r4DIulu1aVvi4Q6MO7LxrJxkPZvDyuS97vf6b9d9w3pTzeHjuw3R/pztpmWlUu/UvwKIrYOUYmHO2/15c5x6052YBY2DBhTDnLJh+POwocoPu779hy5ZD9/PCC7Brl71598QTkJTk/sxwuezXtm3V9z5KctddNot2yy1w9dXu7bt3e7cbNsx/s7wFdTmcTvsL16qVT7sjPtavH9x6Kwwe7McjyY4Qycn2hpHLZX+/ivtsExHxAwraPTkcdgjiAw/YwjhTpsCZZ8LTT8OSJfZi9sABePJJ237RIrj0UhsgXnyxe5jl4cycCSNG2Lvk118Py5aV3HbiRGjZEgYNgr174ZFHbB9yc+2NhQLTptnlhxo1grllWK5q1Ci4+WYYOdJmtw+ndWvo0MF+bwycf7738/Pnu4fNb90Kn356+H0WqFNkfvLzz9ubCsbAf/4Dp51m56WXJ+ipXx8efdQGD5GRJbczLsgtZajpts/gu+NgywfubX+V8068ccEvl8PXMTCtFSwbDkk/w/4t4MoGXOStupfE/YmAnbt98tsn0/LllnSa2InUzNTyHa+MHp//OHf+eCfP/vIs53x0Dib/5/vtxm+ZsGgC61N28s6/MOlfSM+FfdkZpOzfAfHv49g9kdkbvyjc1wWfXsCvO37FYHDgoG5wXcAG1qN+GMWna8txPhSjV/NezLh2BiNPGUlUWBQAEaERDGw30KtdgDOAesH1Dnn9rzt+JcARgMGQkpnC33trINseX3Bzz0DqGsgrcnMsMwm2fgppG6q/LyXZswK+aWzPzRV32m3Ze2H3jPwGBrZOcbefMMGOSGrTxhZf27ABVq2yv5vR0fb/Docd6hwWBldcYT87wN74PPPM8vVv/ny4/HJ747SsI3CSkuCXX9xDyD1vss6b5/7+vPPgjDPs9926uYuh+aP+/eGDD+w68F9/DZ06+bpH4itZWd7XDHPm+K4vtUHbtvb3CiAqyl0LR0TEzyho93TffXZO4/bt9uLT5bIXfp7BosNhg2hwD1d0uez/N28ued+bN9uA+6OPYMcOu61gv4MG2XV/9+3zfs2//9rgfvt2G+i/8IK7CFFAgK1kXGDUKEhLs/POS1syJzkZFi50B/bGwM8/l/5zAQgKsjcupk61NyeKBu0tW9p+OZ12n23bup+bPh0GDoT77/euylxg4EAYMMD9eMUKe7E+bx7cfbft7/33w6xZpfdx92773u+/3w55K7Bzp73hUdS+LTCtNXxeF5beUvxNgX/eB1weGxzQsEfp/Sgq9Q/YkR/gZibAvn8Kj2UM5BnYmJlNm1dasTZhLT9s+oHfE34HYH3yeqZtqNzIg5y8HP676r/83+L/Iz3LPRl91e5VGAx5Jo8NyRvIceXw8ZqPuWjqRdy/5HO6zfqOJWnuTH+vyLq02Ps+ZCzFZfI4u/XZABzMOcjsf9xrnPdv25+TG5+MM3++fkZ2Bld/ffUhc9DLq3/b/rx23mtsvmMzM6+dycZRG2kV0apMr72s42UEOm3NgE6xnTgh5oRK9cXL3t9h/vmw8Co46JG9dXkEmY5AcIa4H2enwY+d4der4YdOkFzKjbvqtPEVyM2f+//Xa5C1B4IjoP6xgAMw0Ki3u/1//uP+/okn7I28rl3tMmmPPGKLuPXrB999Z0cDxcbCxo02w75wIYSGlr1ve/faz4avvrI3TssSVG/YYD97Tj8dTj7ZFjXzrPZecHEOti/z5tmhxsuW2Rt9/mzoUPv34+KLfd0Tt/R0+Pxz+O03X/fk6BESYq8ZClx+ue/6Uhs4HPDJJ7bI444d7uSEiEhNyMg4dPRyCRS0e3roITu/7v777QWnwwE33ujOBB9/vL0L++qr9vFFF8Gxx9rv27WDP/+0FzBFg7+DB6F3b1vkZOhQ+w/UpYv7+eRkm3Uvy1yqd96x+7j8cu955RER7qC5pMzypk32gva007yD2htuKP2Ys2bBBRfAs8/a4Xgnn3xom/bt4dtv4dprbR8LgvqEBPuaGTPsTYfi3qPDYbP+nuLiDi0MmJJib0qMHm33XzTDMGSI3f9LL7n398ADdsh+bCz8+KN3+42vwYGd9vu//wuzT4W8IrUCCgN0B4TEQJcXyl/hPTQGnEGAE0weNOxuAyNnGAl58FEGXLAbDuZmMfWPqbSJtENhC4LetlFtS953Gdw7+15u+e4W7p51NxdPvbhw+61dby08xs1dbiY4IJj5O1biaHIbNL6FAy4n+3P2UzcAhjWGO5oFsvymn3jzvDf56bqfuOpEO0IjNDCUHk3dNzLOPeZcPr/0c/q16efVj62pWw/b1193/MpJk06ix3978EfiH8W2iQyLpH/b/sTUjSnzz6B70+5sumMTc66fw5JhSwgOCC7zaw/r54vh3x/tjZnlI9zbQxpCwXB+Y8B4THXY+5s7wDd5HpntGlavdX52PACCIyGwni2O2O8X6DIB+kyF48a42xfUinA6vTPfb71lPzvfeMP+8TnHY8m8oCA7V9zptDc4v/zSzi8/XJXwvXshM9M9bHXnzsO/n6+/dt/83LjR3mh86CFbaGru3EM/fxwOG6z7W+G5I0FOjh09ccUV9sbN9Om+7tHR45tvbL2I5cttzQipvNhYe0NERKSmHDhg/36W9earEZOWlmYAk5aW5t6YmWlMaqr9PiPDmPj44l+cnW3MunXGNG1qjNNpDBgzaZJ3m7//ttvBmIAAY265xRiXy5jly93bnU5jnnnm0P2/8YYxzZsbc+65xiQmGvPqq8aMHm2P6WntWmMGDDDm/PPt8Yrz3HPGOBzuY776qjELF9q+lCQ52ZigIPs6h8OY558vuW1xNmxwHw+MufTS4tu5XMa88IIxF1xgzOTJdltmpn0/Tqd9bwcPGnP22d77mzHDvY+GDd3bjz/emNxc97+Jw2FM377ex1z3vDGT8f7a8T/vNnm5xmx+15i1zxiz9HZjprU1ZuVYY1x55fs57J5jzMKrjFl2uz3OlABjJjtNylctTb0nMM5xGMZhvvrzK2OMMV+u+9IM/Wao+XTtp+U7TjG6vtXVkL//0KdDvZ7bnrrdrIlfY/LyXGZZmjGXrtxuHFPeNkyeZGL+9415Y0eeScs5/DHSM9PNu6veNdM3Tjeu/PMpLTPNdJrYyTAOc9Kkk0x6Zvph99P65dbGOc5pnE84Te93e1fo/da4zxvknz8OY2b2cW/f+7sx37Qw5tNQYza+7v2arBRjvozOf53TmMSFNdrlQrmZxqx51Jg5/ezvQ25W6e337jVm3Dhjnn7afo45HPZ37PTT7e/wO+8Yc8cdxqxYUfzrx41z/46edVbpx3K5jBk+3LaNizNm48bDv5/Zs92fp3XqGLNr1+FfIxXz11/ef9eGD/d1j0RERI4cixcbAyYNDo1Di+Ewxl+rI9Wc9PR0GjRoQFpaGuHh4eXfwfbt7iHzTqfNhHsWiXO57PDvn36yd3LnzIE++etBv/ACvPyynVP5ySelD9F88UW7RFFAgM2s79hh542W1dy5NgPmdEJwsM28N2tW+mv+/tvOYS14b7ffboest2pVtuyUMXa42cb8KtRhYXZIZWA5ljcryLSBnbOfnOx+buxY9/Jur7wCY8bYth9/bOfqt2tn59gbY+fvF4ySAMjLhl+vdQ9dB+i/GKJ7HtqHXdNhwQXux6d/C80uOLTd4Wx+x85pL9BnKusCWzJ143RObtyVwR0Gl3+fwOr41Xz2x2d0b9r9kH1MXD6RET/YDPCIU0bwxnneIwXSc2FKAvyWn6DMyEonJyeVu9rE0SsiuFJJSJdxkbAvgdh6sYVZ/dK0/L+W7EjfgcPhoFuTbiy9eWnFD15T/vnIFhQMrAOn/w9iTvN+vmCed1EHdtkMfVRXiOpy6PM1Zd14WPOQ/b7ZxXB6MStDFCc7246q2b8fhg+32b+bbrKfTyEh9nOxYUPv15x6qrv2hcNha3Mcrlp4WprN4pf1M2PmTJthHzIETjyxbK+R8svOtqPPCooNfvWV/ZnL0SMnx16/7Nxpp+i1a+frHomIHDlSU+GYY0jfs4cGcNg4VEE7VRC0u1w2GJ4/315Y/vgj9O3r3SYvz877a9bMDv2uiJtusnMa8/Ls4x07ig+6X3rJXkCdd56tnu4ZMMyaZS+ahwwp21JoLpcdyv/77+5gPzPTVmH+5JOyBe5XXglffGH3FRxspwcEl2N48rJldvhtbKy9GPcsJhUZaee6Dh1qHyck2H+DgmBhyxb7fHS0veFR9CaHMXYu7+6Z0HwItB1WfB92fAO/eFyQnvaVbV9eBxNgZjc7LL9+Oxiw3C79Vgnx++Jp+0pbMvMycRkX066cxoXHXejVZm3CWvZl76Nns5448v/NjIFlGfBZol3SrUDX+nBVDNQvY4yU68pldfxqmoc3J7ZebKXey9wtcxn+3XBCAkP4ePDHnNy4mKkY/si4AMeROcx6Zg/Ykz+n3hEEVxVTd6Is7r3X/q4VfD6tXn3oZ8zrr8Mdd7gf33mnvdkmR6Y9e2wR1OOOc9+IlqPH44/DU0/Za4OYGHtNEhDg616JiBw5tm4l/dNPafDQQ/4dtI8fP56vv/6aDRs2EBYWRu/evXn++ec57rjjCttkZmZy9913M3XqVLKyshgwYABvvvkmsbHu4GD79u3cfvvtzJs3j3r16jF06FDGjx9PYBkzM5UO2sHecV6yxM6frq7leJYssTcHDhyAyy6zS5kVDRIWLPCu0Pztt3Y++oYNNpiPjbVzPItWbC/JP/+4i8o5PObngr273rTp4fexaZOtsr97tx0tcP31ZTs22Kx8kya2LoDLZftgjD3u7t12W1CQLSL42We2yvIrr5Qvk18WrlxYdhvs+h80vQi6vw3OCl6c5B6wy8iFHwsB5SjMVYKF2xdy2vs2u+t0OHns9Md4/MzHS31Nag5MToTfPWof1g+Aq2Ph5HLU43IZF30/6su8rfMIDQxl3tB59GxWzEgF8V/rnoU1D9vvm14IZ1Sw8OG6dTZwS0uzn1MzZxZ/Ad+mjXtZpfBw215EjjyXXWYTBAXXBKmp0KByN6FFRI42ZY1DqziyKZ8FCxYwcuRITjnlFHJzc3nooYfo378/f/75J3Xr2iWj7rrrLr7//nu++OILGjRowKhRoxgyZAiL8odY5uXlMWjQIOLi4vj111/ZvXs3119/PUFBQTz77LPl69B779nh6dddd/hM8D//2ArJ3brZC9WgIFvgrTr17GkD5aQkOwytuKyeZ4G5gscFIwESEuwf13377JC2sggPt+8tN9f9hzkgwG6PiirbPtq1gzVryta2qORk74JVBUONg4Lc79/hgGeesd9v3myL/nlWia6szGTY8RW0ugp6/rfy+wusA5FVt2RTtybdOCnuJFbHr6ZecD2uOOGKQ9r8nfI3Y2eNJdeVx3WnvsHS7JYc8Miun1IfroyBeuX8RNicspl5W+3Ih5y8HD5a81HNBe0Zm2HJTZCTDl1fhtgza+a4tc3xD0JkV8hJg2YXVXw/HTvaIfH//msLdJY07P3cc+1Slg7HkZedNca+v4YNy1cJv6Zt3mwr+Rd81vfUjTSpBiNG2OugrCy45RYF7CIi1civhscnJSURExPDggULOP3000lLS6NRo0ZMmTKFSy+9FIANGzbQoUMHFi9eTM+ePfnxxx85//zz+ffffwuz75MmTeL+++8nKSmJ4DIMwy68wwGEg608/s47Jb8gOdkGoqmp9sLz+eft0PWi8zerUkqKvQjbutUOSfNc8sVTTo4djj5tmh2i/8039uI5LMwd8A4aZP/QltWMGbbqcvv2dgjctm22L2VZK/jAAZtdT021y7E1b17244Lt8zXX2HXfg4PtPMqgIPjwQztVYNcue7f/scfcr3n//cNXxC8rVw5MPx725S/n1+sTaF2FNwSqSHZeNqvjV3NM1DGF65h7OvW9U1m8ez0m9lqiok/nkg6XABAeCFfHQJcKrna1P3s/LV9uyd7MvbiMiwl9J3Bvn3sr81bKbt55ED/LDk0PaQSXJNTMcaVycnLs729mpl2dI/8Grd8zxlZK/+ILO93ml1/sZ6I/6tfPTiMyxn7mbt3q6x5JbbV3r/1q3frInB4kIuJjZc20+1XQvnnzZtq1a8fatWs54YQTmDt3Lueccw579+4lIiKisF3Lli0ZM2YMd911F4899hjffvstq1evLnx+y5YttGnThlWrVtHFc2m1fFlZWWRluZf2Sk9Pp3nz5u6gvU0bd3Gd4syfD2ed5b2tUSM777uk+erZ2XaN4x077JzO8l7s3XGHzU4VzAtPSSn7EHewQ+LHj7ev+f577yH0FZWTYy9c4+JsQaLi3H47vP22/WN+3HF2CG15FWS3IiPtCIfoaO+fs8tll4H79FN7o+LDD6tu6Zb922BaK/djRxC0vAJ6vg9Onw5UOazfE35nQ/IG+rcZwEmfjmBbcHdwhlIvuD5Xn3g1PcLhihioW8kpiN9u+JbBnw3GhYvIkEjWj1pf6bntZTKnLyTOs0F7cCRcmnL415SXMfD7Y7D5LYjuBX2mQOAREmQeSdLT7e+sPy+5tGGDew3ngAA7H99z3frqtHOnrUfSvTuccMLh2596KixebD8bY2LsKCsRERHxO2UN2v1mnXaXy8WYMWPo06cPJ+RflMTHxxMcHOwVsAPExsYSHx9f2MZzfnvB8wXPFWf8+PE0aNCg8Kt50ezvjTeW3tmuXd3V4gskJdnq7CV5+mkbOL/1FpxyCvToAU8+eeia7iUpGCJujA2Wc3NLb1/Us8/aIZO//AKnn16+1xbHGBg40A6779gRpkwpvt26dfbCMS8P/vqr7O/Xk8Nh57DXqWMvWIveGHE67UiA5GSYOrVqL/zDmkGkx40fkwNbP4GdFZz3W03eXfUuPf7bgzEzxrAvax8zN8+ky6QuXPG/Wzn2m9dof8KzBAc1ICggmH4tuzOyKdzUuPIBO8DmvZtx4QJgb9Zelv+7vPI7Lc6miXZN9L/ft4+7/h+Ed4A6zaHnh9VzzL2/wbqnISsJdn1nq/9L1XriCTusNibGBpr+KibGjlhyOu3nWUGtj+qWlGQL+g0bZouCrlhx+Nf83//Z2iqxsfDfKpjSczTat88WWXvoIftvICIi4kN+kyocOXIkf/zxBwsXLqz2Yz344IOMHTu28HFBpp2ZM+3FY48epe+gfn17cdmrlx0qDrbw2cmlVLr+6y8bfObl2YuBZcvs10knwYUX2sJMe/faizLPIWb//GOrtHftCkuX2ozL00/bIeLTptmhj5s22YuKSy8tOdP/xx82+5KWZrPRM2ZUrsprerpduq7AuHG2onxRd99tf1a5uXD//d7vbe1auwze2WeXrZJ9SXIPQGY81C3j8Ly8TFh4OcT/BC0ugx7vlVxUzhkA/X6xy2Kte8a9PaDiNwZy8nLIysuiXnC9Cu/D08bkjdz83c0ALNu1jFeWvkKjujGYBqdC9BCSCCUjK53ezXtzVYtWXNs4mDqV+Kdfm7CWr9Z/RY+mPRjYbiBntz6bIGcQOa4cIkMjOaXJKVXyvgC7LJ/DAQnzYbldto6d0yC8PTTqBYP+qLpjFUhZBTu/hZjTISTa+zmH33xkVq8ZM+znTMuWtuJ7ZGT1HCcz0968BPu5+Pzz8L//2cfbttkK9KeeWr1Tj8oqKsp+Xr3zjr1RedttVbdvY+yqIwcP2r8HQUHu51atsiOrwP79mDPH1lIpzSmnlD5arLJ27rQ/h6ZN7c2E2lgx/Lbb7Ogth8MWeC1YqlBERMQH/OIKdNSoUUyfPp2ff/6ZZh5LmMXFxZGdnU1qaqpXtj0hIYG4/OA0Li6OZcuWee0vIX8oYFwJAWxISAghxWVje/a0weiwYfYP9ZNP2srlBVwuO58xOdleRBYE7GAvtBo1Ku1N2kruBw96b5892xZxufJKu//oaLufLl3sHLFJk2ww73LBPffYrMkDD9jM8qZN3vu64w549107v76oDz6wF8VgLzz//LNyaxgXXTqtpGD5ootslfcDB6BFC/f2jRvthWfBHPU1a9xDT8sjfSPM6gPZe6DxuXDG9JID8JwMWD4SkhbB/n/sti0fQcsrocnAko8RWBc6PWWHYe/8xq5l3aSEmgKHsXjHYgZOHkh6VjpPnfUUD5/+cKntc125BB5mGH5GdkaR/kaRFHEV1LE/z+CAYH7d8gMkfEzdra0YfvX0CvUdYHfGbnq+25PMXLu83IxrZjDgmAGsuW0NS3ctpW+bvlU3NH7rp7DkBsAJx9zq/dzBf6vmGEXt2wKzetnVAjD2hs1JE/KHx/eGY26G7DTY9AY4g6HdCFtYsLr99Qasvh/CmsIZ39mVBwq4XLaI5vbt9rOr6Cig8jp4EAYPtp9LS5bYz6TqWpYtONh+lu7ZYx8XfEasW2cDz4MHoXFj+7i6bhyUR+/e9quyMjJswdPffrO1PpKT3cU0r7rKe+RSt272b0JSkr053L9/5Y9fGS4XnHGGnSfvckFiIjzyiG/7VB3WrLHvDyo2rUtERKQqGR9yuVxm5MiRpkmTJuavv/465PnU1FQTFBRkvvzyy8JtGzZsMIBZvHixMcaYH374wTidTpOQkFDY5q233jLh4eEmMzOzTP1IS0szgEmbMMGYbt2MCQgwxuk0pkMHY1JT3Q2fesoYmxMxpn179/cFX40bG7Nnj7t9Xp4xs2YZ88svxrhcxiQnG9Oxo/drwsKMOe20Q/cFxjgc7u8DAoyJivLeVtxX3brFv8l333Xvp25d736WhctlTE6O97brrnP30+PfqEw++si73++9V/qx1z5tzE9nGbPpLe/nVt1nzJQAYyZjv1JWl7yf3x40ZorT3bbgK2FB+fpeRMK+BLMtdVuZ2l4w5QLjGOcwjMM4n3Cag9kHi22XkZVhTnvvNMM4zAVTLjDZudkl7tPlcpmR3480AU8GGv7vdMPHrxgmTzKnzF9lTl/4p2k++WHDk2GGcZjgp4Ir9B4LzNsyzzAOwziMY5zDPLXgqUrtr1RfNXH/G30abMz3ne33s043JudA9Rxzx7fe58b6/zu0zdwBxkx2GjPZYczCq6unH55y9uUfD3uuL7rW+/kJE+zvkNNpTLNmh/6elldamvfnzBVXVG5/h7N6tTHXXGPM3Xcbk5FhtxW8p4KvH36o3j7UtKeftv9eBe+vXTv39/XqHdp+925jPvnEmPXra76vRaWne/+NuvhiX/eoenzwgfvf6JlnfN0bERGppQrj0LS0Utv5dE77yJEj+eSTT5gyZQr169cnPj6e+Ph4DuZnoxs0aMCwYcMYO3Ys8+bNY+XKldx444306tWLnvlL2PTv35/jjz+e6667jjVr1jBz5kweeeQRRo4cWXw2vTT33QcrV9ohiC4XrF9vs+85Ofb5+fPdbTdsOPT1u3fbzFSB226zWZHTTrNF4NauPfSO/cGDJQ/9NMZdWTkgwGahnE6b1S7pvWVmFr/9xhttQbgRI+Dnn8u+XBvYIfrHHGOPed997r7df7/d17ZtcMklZd8f2CHxBX2IiLBD9ks8/nvw+yOQMA+W3Qp7POZ0hh8LJg8cARAQBmFNSt5PbgbgMSIgvCOc9Bw0qvhSfV+s+4ImLzWh5cstGTd/3GHbNw93109wGRd3/nhnse0+X/c5v2z/BYDv/vqOmX/PLHGfDoeDcX1fZ8INOZzR/W2aRRxD92Y9OKvZSbx5UgdGtQwHl/2dGtx+cDne3aG6N+1Oh2ibwa8bXLewCn21CPE4R13ZdiTFZWnQdz4EhpX4skqJOd1OswBb3K64JdBSVgIuwEBKNc3f9+QIhIBQCs/doCKl/letsp8LLpcdtrx3b+WOFxRU8nJt1aFzZzsF6MUXoV7+lJHTTnP3oX59O42oNila28Pz8++CCw5tHxdnV9Hwh2r19evb0QBg/y7dfLNv+1OSjRvt37unn7ajRspr6FA7emXLFjuvXURExJdq6CZCsYBiv95///3CNgcPHjQjRowwkZGRpk6dOmbw4MFm9+7dXvvZunWrGThwoAkLCzPR0dHm7rvvNjnlyDYV3uEoLsMNxhSMAnjvPfe2M844NMsdGmrMrl3uHdev737u+ONtlsTh8N5/nTrGbN5szJtv2ky65/6cTpuRXrjQmB07jNmyxZjBg405/3xjFi82ZtIkY2bONOacc7z7VdXuuMNm6AuOsW2bMUOGuEcK/Pprya/Nyyv5ufh4Y6ZNs1mkEl+fbcyX0d7Zz10/up93uYzZ+IYxS4cbk7S49Pexb7sx33cyZmodY/4YX3rbMur+TvfCzHPY02GHbZ+emW6a/6e5V7Y6J+/Qc3XahmmFbRiHWbyj+PfmchkzN8WYO/4yZvgG99fHu405mFvQxmXmb5lvpm2YVuyxyutA9gHz89afTeK+xErvq1RJS2w2ezL2/3++UL3HK5CdYUdfZCYX//zvT7jPxQ2v1Uyf/p1pzMzexvxy5aH9mjXLmKAg+/s4ZIg9KSpj3z7373tAgDE33FC5/VXU0qXGvPyyMZs2+eb41SktzZgLLjCmaVNjXnrJ/pt9/70dsZRd8qgav5GXZ8zKlcZs3+7rnhQvN9eYuDj3qLkHHvB1j0RERIpV1ky7Xy355ite67Q3bGgrxo7IL3rVrp0t4law3vvq1Xb+5Rln2OzQ6NF2HnxMjC3e5LnE3JAhdp10sPMW//Mf+OormDzZVh5u29ZmWI45xra56SY799zhsPMYp00rubCcp8xMO/c9O9tm90tZLqBCnnzSVng2xmbhVq50z4cPCIDrr7dzaj1t2ADnnmuXanvySTsPvyKKLrlWpwVc+LffLLd287c38/7q93Hg4MTYE/nt1t8O+5oHfnqACYsm4HQ4aR/dnj9GHFpMzRjDhEUTmPXPLC7tcCm3n3L7IW2SsuHDeNjkUSYhKgiuj4UORVYly8nL4ZPfP8FlXFzb6VpCAv14aS1PO76Gja9BZGc46flKFQCsUml/2uX/wtv5uifWrl12Wa+TTqqaLPn778ODD9pCl19+Wfl58iI1ad8+OyIA7N/TwYPt314RERE/c0Su0+4rhT+sjz8mvF8/u0zO4sW24vuFF5ZeACk72xbkad3au+Iv2GD6009t0bbLLjt8hd2DB+GNN+z/R44s3xD28khNtRfkSUnw8MPeNxqKs26dLZKUnAxjx9o13ps1s/vJy4OXXrLbPd1wg72pkZdnL5pSUuww+PJy5cGsnpCyAnDAGd9C0/PLv59qsj97Py/++iIZ2RmM7TWWJvVLGZ6fLycvh7dWvkXygWRu63YbcfXKcGPGgzEwNxW+SYIcj9/eMyJgSDSEFnOa3fztzbz727sAXNnxSj699NNyHVMqYN9W+GUIHNgBnZ+FY27xdY9Ejh533mkLtoaF2cr8Z5zh6x6JiIgcQkF7OZT1h1Vuxthl2kJD/WtO5i23uDPjjRrZufglVX+fPNlWOTYGhg+368yDzaS/844dJXDrrYdm98aOtRWnjbEXTcnJh1acL6vcg5AwB+q2gogTKraPWiIhP7v+t0d2PToIrouF9nVLfl3rl1uzNW0rADF1Y0i4J6F6Oyqw5Ca7OoHJA4cTLk2DoKpZ5k9KYIy98bl4sV2CclDFVnmQWmLnTjvyrKpHn4mIiFSRssahPi1EV+uNHWvXcu/SBV5+2de9cdu92/7f5bLBdF5eyW3ffttdNOm//3UvgdO+vc2w33578cNxx42zxe/OPtsudVfRgB1s0bGm55cvYE9dCytGw6ZJdqm2Cnrp15c49rVj6f1ub8bMGMP6pPUV3ldluAzMToGntnoH7GdFwGOtSg/YAa7vfL37+07Xl9LSD7ny7NJ+uQd83ZPyCQh1f+8IsoG7VK8vvrBLX06dakdJbd7s6x6JLzVrpoBdRERqBWXaqcZMe716sH+//b5VK1uFtjJSU+2c+s6dK7dm8ZIlMHCgnYs/YQLcfXfJbe+7D154wQbmnTvbStU5OXaeq9Npq8YHlmN++f798Pvvdk320obL52XC+pcgMxHaj4F6rct+jJwM+F9zyN1ns5zdXodjR5b99fl+T/idzpM6Fz524CAyNJIdY3dQJ6gG1ubON2XjXCZsTqBO+AmcEHMCDhw0CoLr4+DYMnbDGMOvO37FZVyc2uJUHCWNrPA3eVnw05mwZwmExMC5y6DuETK/OjMJlg6zdRk6PVl8JXqpWjfcAB9+6H48f76GRYuIiPiLvDw7cnfKFDvd9quvoE7NXVP7o7LGof5RzcufGWOzy4ebj14cz9fs2VOx4z/+ODz/vC1al5hoM+ONGsGaNdC4ccX22bOn3U9Ojh26X5pnn4U2bWz/b73Vbhs+3BbMA7tt0qSyHTctzY462LIFoqPtDYDmzYtvu/oh2Ph/gAO2fAiD/oQ6h58vDkBmAuSk2e8dAbZoWAXkunK9HhsMKZkpJO5PpFVEqwrtszxcBj7eEc+NKzdgHIGQ9ithgaHc2rYdF0VDSDkStw6Hgz4t+lRfZ6tL8mIbsANkJcLWKdDxQd/2qaxCG9kaDFIzFi3yDth79oTevX3XHxEREfE2f757iu6MGXYa7i2q+VMWGq9ZmkWLbIBct66tplxe7drZueIOh60uX3RQQ3a2XQfWVcLw7e3bbeX1rCw7hzw52W5PSrInelm5XPD55za4zsiw2wICDh+wg82i33abLVgXHW23eR77xx/L3o9589yjDZKTbXX8kqQXDEM3NgBfUI7ic/XauovVBYRC22Flf62HLnFdeOjUh2gQ0gBn/q/K+ceeT8sG1Z/p3Z0Fz2+Hb5LBEGDPnZwkOmfP5vIY74B9d8ZuDuQcYUPHyyqoyB3HSkx1kFruzyI35+6669DioCIiIuI79eqV/lhKpKC9NA89BHv32qD5zjvL//r33oOTT7ZZ8pQU7+B/1y67vWVLOOssG8AXFRrqztYbY4ejOxx2W9euZe/H00/DFVfY+efnnVf+91HU5ZcX//3hdOxoL6IL5sCXVrX+uNHej9M3lP04DgecPg0GrYeLd0HUyWV/rdduHNzT+x4S700k8d5E1t6+lmlXTqvWoeUuAz/ugae3wdZMaFQnhuYNmsPe2TRP/YCxXS70an/79Ntp8p8mxL0Yx/Jdy6utXz6Tmej9OCvFN/0Q/3fRRdCihf3+2GNhwADf9kdERES89ehha2KdfDLcc0/54oijnOa0U8pcgksusdlgY+ww7q1bK3aAvn1tltnlsvM29u2z88QfeMCdfZ83z87tKOrLL+367h07wrXXwoIF0K+fLXBXVmedZYejgA1oc3IqNty/gMsFs2fb4Ltv35Irzxdn6VJbmO7MM+37KM3md2HZcMAFJzwGnZ44tE1mIqT8Bg1PgZCqXSJv9IzRvLr0VRqGNWTu0Ll0iu1UpfsvaleWrQy/LdO9LTYYhsYa6pskosKiCPRYnz5pfxIxL8YA4HQ4uebEa/ho8EfV2scat2s6LLjA/fjYUdDtNd/1R/zbwYPwzz92VYuQEF/3RkRERKRUmtNeFd54w2a709JstrqioqJsYOt0QoMG9vu2bb2z5wUZoqIuvdR+FahIUaWrrnIH7ZdeWrmAHWyfK5rF6tHDfpXFMcOg6SDIO1h8Ibr92+CHkyAnFUJj4bzfITSmYv0qInF/Iq8ufRWA1MxUXl7yMu9d9F6V7LuoPAMzUuD7PfZ7AAfQPwouaAhBTgdw6PsKDwknMjSS9Kx0XMZF28i21dI/n4o8CRzBYHIBFzQ+19c9En8WFmZvcIqIiIjUIgraSxMXZwskVNarr9pAd+9eeOYZu23IELvO+aJFdj3hNm0qf5ySDB9uh9OnpNis+5EkLK7k5/6dYQN2sMXnEuZDy6oZZlMvuB71g+uzP2c/BkOLBiXcVKmknZnwQTzsyHJvaxwMQ+Og9WFWyQsJDGHu0Lm8tvQ1Wke25r4+91VLH32qTjPo9wts/xyie9mbOCIiIiIiRxENj6cal3yT6rV3NczoZouTOYPgvD8gvF2V7X7pzqW8svQV2kS24dHTHyUksOqG2+a64McU+CHFzmMHcDpgQCQMaghBqjYhIiIiIlKrlTUOVdCOgvYjWvJSSJxvh01Hdj5sc3+wIz+7vtMju94kBG6Ig5ZlKOgvIiIiIiJHPs1pl6NDdA/7dQTIdcH3KXb+umd2fWAUnBcFgcqui4iIiIhIEQraRWrAtvzs+r8e2fVm+dn15squi4iIiIhICRS0i1SjHJetCj9zr3d2fVAUnKvsuoiIiIiIHIaCdpFqsuWgXXd9d7Z7W/P87HozZddFRERERKQMFLSLVLEcF3y7B2anQEGVxwAHnN8QBkTZ70VERERERMpCQbtIFfrnoJ27nuCRXW8Zatddb1p1K8aJiIiIiMhRQkG7SBXIdsG0ZJiz151dD3TABQ2hf5Sdxy4iIiIiIlJeCtpFKmnzAfgwARI9suutQu3c9cbKrouIiIiISCUoaBepoKz87PrcItn1i6Khb6Sy6yIiIiIiUnkK2kUq4K8DtjJ8co57W5swGBoLccqui4iIiIhIFVHQLlIOWS74Ognmp7q3BTng4mg4W9l1ERERERGpYgraRcpow374KAH2eGTXjwmD6+MgNth3/RIRERERkdpLQbvIYWTmwVfJ8HOqe1uQA4Y0grMiwKHsuoiIiIiIVBMF7SKlWJ+fXU/xyK63y8+uxyi7LiIiIiIi1UxBu0gxDubBV0nwS5p7W4gThkTDGRHKrouIiIiISM1Q0C5SxLr98HE87M11bzuuDlwfC9HKrouIiIiISA1S0C6S70AefJEEvxbJrl/aCE5roOy6iIiIiIjUPAXtIsDaffBJAqR6ZNc71IHr4qBhkO/6JSIiIiIiRzcF7XJUO5AHnyXCknT3tlAnXNYI+ii7LiIiIiIiPqagXY5aa/bB5ARI88iud6wL18ZClLLrIiIiIiLiBxS0y1Fnf352falHdj3MCZfHQK9wZddFRERERMR/KGiXo8pvGTAlEdI9susn5mfXI5RdFxERERERP6OgXY4KGbkwNRFWZLi31QmAKxpBD2XXRURERETETylol1pvVQZMSYCMPPe2zvXg6hhl10VERERExL8paJdaKyPXDoVf5ZFdrxsAV8bAKfWVXRcREREREf+noF1qHWNgZf7c9f0e2fUu9eDqWAjXWS8iIiIiIkcIhS9Sq6Tn2qHwv+1zb6sbYIfCd1V2XUREREREjjAK2qVWMAaWZdil3Dyz613rw1UxUF9nuoiIiIiIHIEUysgRLy0XJifAGo/sev0AuCrWBu0iIiIiIiJHKgXtcsQyBpamw2dJcMAju35KfbhC2XUREREREakFFNbIESk1Bz5JgLX73dvCA+3c9S7KrouIiIiISC2hoF2OKMbA4nT4PBEOutzbe4Tb7HrdAN/1TUREREREpKopaJcjxt4c+DgB1hXJrl8bC53r+a5fIiIiIiIi1UVBu/g9Y2BhGnyZBJke2fWe4XC5susiIiIiIlKLKWgXv7YnBz6Oh/UH3Nsi8rPrJyq7LiIiIiIitZyCdvFLxsAv+dn1LI/sep8GcGkjqKPsuoiIiIiIHAUUtIvfSc62c9c3eGTXIwPhujjoWNd3/RIREREREalpCtrFbxgDC1Lh62Tv7PppDeCSRhCm7LqIiIiIiBxlFLSLX0jKho8S4C+P7HpUEFwfCx2UXRcRERERkaOUgnbxKWNgbir8LxmyPbLrp0fAJdEQquy6iIiIiIgcxRS0i88kZsOH8bD5oHtbw/zsentl10VERERERBS0S81zGZi712bXc4x7+1kRMLgRhDh91jURERERERG/oqBdalRCNnwQD/94ZNejg2BoHBxbx3f9EhERERER8UcK2qVGuAz8tBemJUNufnbdAZwdCRdFK7suIiIiIiJSHAXtUu12Z9ns+tZM97aYYBgaC8couy4iIiIiIlIiBe1SbVwGZqbA9D3e2fVz8rPrwcqui4iIiIiIlEpBu1SLf/Oz69s8suuxwXbuetsw3/VLRERERETkSKKgXapUnkd2Pc8ju94vCi5sCEHKrouIiIiIiJSZgnapMjszbXZ9R5Z7W+P87HprZddFRERERETKTUG7VFquC2akwPcpdh472Oz6uVEwSNl1ERERERGRClPQLpWyIz+7vtMju94kBG6Ig5ahvuuXiIiIiIhIbaCgXSok1wU/pMCPHtl1pyM/ux4Fgcqui4iIiIiIVJqCdim3bfnZ9X89suvNQuzc9RbKrouIiIiIiFQZBe1SZrkuWxV+5l7v7Pp5UTBQ2XUREREREZEqp6BdymTrQZtd353t3tY8P7veXNl1ERERERGRaqGgXUqV44Lv9sCsFMhPrhPggPMbwoAo+72IiIiIiIhUDwXtUqJ/DsKH8RDvkV1vGWqz601DfNcvERERERGRo4WCdjlEjgumJcNPe93Z9UCP7LpT2XUREREREZEaoaBdvGw+AB8mQKJHdr1Vfna9ibLrIiIiIiIiNUpBuwCQlZ9dn1sku35hNPSLVHZdRERERETEFxS0C5sO2LnrSTnubW3CYGgsxCm7LiIiIiIi4jMK2o9iWS74Jgnmpbq3BTng4mg4W9l1ERERERERn1PQfpTaeAA+iodkj+x62zA7dz022Hf9EhERERERETcF7UeZzDz4OhkWpLq3BTlgcCM4K0LZdREREREREX+ioP0osn4/fJwAezyy6+3C4Po4iFF2XURERERExO8oaD8KZObBl0nwS5p7W7AThkTDmRHgUHZdRERERETELylor+XW7YeP42FvrnvbcXXg+liIVnZdRERERETErylor6UO5GfXF3lk10OccGkjOK2BsusiIiIiIiJHAgXttdAf++zc9VSP7HqHOnBdHDQM8l2/REREREREpHwUtNciB/Lg80RYnO7eFpqfXT9V2XUREREREZEjjoL2WuL3ffBJAqR5ZNc71oVrYyFK2XUREREREZEjkoL2I9z+PPgsEZYWya5fHgO9w5VdFxEREREROZIpaD+Crc6AyYmQ7pFdPyE/ux6p7LqIiIiIiMgRT0H7EWhfLkxNhOUZ7m11AuCKRtBD2XUREREREZFaQ0H7EWZVBkxJgIw897ZO9eCaGIhQdl1ERERERKRWUdB+hMjIhU8TYWWR7PqVMdC9vrLrIiIiIiIitZGCdj9njA3UP02EfR7Z9ZPqwTWxEK5/QRERERERkVpLIZ8fS8+1Q+F/2+feVjcAroqBbsqui4iIiIiI1HoK2v2QMbbI3NREu6RbgZPr24Bd2XUREREREZGjg8I/P5OWC5MTYI1Hdr1+AFwVC13r+65fIiIiIiIiUvMUtPsJY2BpOnyWBAc8suvd6ttic/X1LyUiIiIiInLUUSjoB1Jz4JMEWLvfva1+gC0010XZdRERERERkaOWgnYfMgYWp8PniXDQ5d7ePdxm1+sG+K5vIiIiIiIi4nsK2n1kbw58nADrPLLr4YFwbSx0rue7fomIiIiIiIj/UNBew4yBRWnwRRJkemTXe4bD5cqui4iIiIiIiAcF7TVoTw58HA/rD7i3ReRn109Udl1ERERERESKUNBeA4yBX9LgyyTI8siu924AlzWCOsqui4iIiIiISDEUtFez5Gw7d32DR3Y9MhCui4OOdX3XLxEREREREfF/CtqriTGwIBW+TvbOrp/aAC5tBGHKrouIiIiIiMhhKGivBknZ8FEC/OWRXY8Kguti4Xhl10VERERERKSMFLRXIWNgXip8kwzZHtn10yPgkmgIVXZdREREREREykFBexVJzIYP42HzQfe2hkFwfSy0V3ZdREREREREKkBBeyW5DMzdC/9Lhhzj3n5mBAxpBCFOn3VNREREREREjnAK2ishIRs+iId/PLLr0UEwNA6OreO7fomIiIiIiEjtoKC9AlwGftoL33pk1x3AWZFwcbSy6yIiIiIiIlI1FLSX0+4sO3d9S6Z7W0ywnbveTtl1ERERERERqUIK2svIZWBWCny3B3I9suvnRMJF0RCs7LqIiIiIiIhUMQXtZfBvlp27vs0jux4bbOeutw3zXb9ERERERESkdlPQXoo8AzNTYPoe+z3Y7Hq/KLiwIQQpuy4iIiIiIiLVSEF7CXZm2uz6jiz3tsb52fXWyq6LiIiIiIhIDVDQXkSuC2akwA8p3tn1AVFwvrLrIiIiIiIiUoNqTQj6xhtv0KpVK0JDQ+nRowfLli0r9z52ZsL47bbYXEHA3iQEHmgBgxspYBcREREREZGaVSvC0M8++4yxY8fy+OOPs2rVKjp37syAAQNITEws135e2gk784fDOx1wXkN4uAW00nB4ERERERER8YFaEbT/5z//4ZZbbuHGG2/k+OOPZ9KkSdSpU4f33nuvXPtx5WfXm4XAgy3sUm6BteInJCIiIiIiIkeiI35Oe3Z2NitXruTBBx8s3OZ0Ounbty+LFy8u9jVZWVlkZbkrzKWlpQGQuz+dc6OgXwQEZkN6drV2XURERERERI5S6enpABhjSm13xAftycnJ5OXlERsb67U9NjaWDRs2FPua8ePH88QTTxyy/ZMzmvNJtfRSRERERERE5FAZGRk0aNCgxOeP+KC9Ih588EHGjh1b+Dg1NZWWLVuyffv2Un9YIkey9PR0mjdvzo4dOwgPD/d1d0Sqhc5zOVroXJejgc5zqe2MMWRkZNCkSZNS2x3xQXt0dDQBAQEkJCR4bU9ISCAuLq7Y14SEhBASEnLI9gYNGugDQWq98PBwnedS6+k8l6OFznU5Gug8l9qsLEnjI77MWnBwMF27dmXOnDmF21wuF3PmzKFXr14+7JmIiIiIiIhI5RzxmXaAsWPHMnToULp160b37t15+eWX2b9/PzfeeKOvuyYiIiIiIiJSYbUiaL/iiitISkriscceIz4+npNOOokZM2YcUpyuJCEhITz++OPFDpkXqS10nsvRQOe5HC10rsvRQOe5iOUwh6svLyIiIiIiIiI+ccTPaRcRERERERGprRS0i4iIiIiIiPgpBe0iIiIiIiIifkpBu4iIiIiIiIifOuqD9jfeeINWrVoRGhpKjx49WLZsma+7JFJm48eP55RTTqF+/frExMRw8cUXs3HjRq82mZmZjBw5koYNG1KvXj0uueQSEhISvNps376dQYMGUadOHWJiYrj33nvJzc2tybciUmbPPfccDoeDMWPGFG7TeS61xa5du7j22mtp2LAhYWFhnHjiiaxYsaLweWMMjz32GI0bNyYsLIy+ffuyadMmr32kpKRwzTXXEB4eTkREBMOGDWPfvn01/VZEipWXl8ejjz5K69atCQsLo23btjz11FN41sbWeS7i7agO2j/77DPGjh3L448/zqpVq+jcuTMDBgwgMTHR110TKZMFCxYwcuRIlixZwuzZs8nJyaF///7s37+/sM1dd93Fd999xxdffMGCBQv4999/GTJkSOHzeXl5DBo0iOzsbH799Vc+/PBDPvjgAx577DFfvCWRUi1fvpy33nqLTp06eW3XeS61wd69e+nTpw9BQUH8+OOP/Pnnn7z00ktERkYWtpkwYQKvvvoqkyZNYunSpdStW5cBAwaQmZlZ2Oaaa65h3bp1zJ49m+nTp/Pzzz8zfPhwX7wlkUM8//zzTJw4kddff53169fz/PPPM2HCBF577bXCNjrPRYowR7Hu3bubkSNHFj7Oy8szTZo0MePHj/dhr0QqLjEx0QBmwYIFxhhjUlNTTVBQkPniiy8K26xfv94AZvHixcYYY3744QfjdDpNfHx8YZuJEyea8PBwk5WVVbNvQKQUGRkZpl27dmb27NnmjDPOMKNHjzbG6DyX2uP+++83p556aonPu1wuExcXZ1544YXCbampqSYkJMR8+umnxhhj/vzzTwOY5cuXF7b58ccfjcPhMLt27aq+zouU0aBBg8xNN93ktW3IkCHmmmuuMcboPBcpzlGbac/OzmblypX07du3cJvT6aRv374sXrzYhz0Tqbi0tDQAoqKiAFi5ciU5OTle53n79u1p0aJF4Xm+ePFiTjzxRGJjYwvbDBgwgPT0dNatW1eDvRcp3ciRIxk0aJDX+Qw6z6X2+Pbbb+nWrRuXXXYZMTExdOnShXfeeafw+S1bthAfH+91rjdo0IAePXp4nesRERF069atsE3fvn1xOp0sXbq05t6MSAl69+7NnDlz+OuvvwBYs2YNCxcuZODAgYDOc5HiBPq6A76SnJxMXl6e1wUcQGxsLBs2bPBRr0QqzuVyMWbMGPr06cMJJ5wAQHx8PMHBwURERHi1jY2NJT4+vrBNcb8HBc+J+IOpU6eyatUqli9ffshzOs+ltvjnn3+YOHEiY8eO5aGHHmL58uXceeedBAcHM3To0MJztbhz2fNcj4mJ8Xo+MDCQqKgoneviFx544AHS09Np3749AQEB5OXl8cwzz3DNNdcA6DwXKcZRG7SL1DYjR47kjz/+YOHChb7uikiV2rFjB6NHj2b27NmEhob6ujsi1cblctGtWzeeffZZALp06cIff/zBpEmTGDp0qI97J1I1Pv/8cyZPnsyUKVPo2LEjq1evZsyYMTRp0kTnuUgJjtrh8dHR0QQEBBxSXTghIYG4uDgf9UqkYkaNGsX06dOZN28ezZo1K9weFxdHdnY2qampXu09z/O4uLhifw8KnhPxtZUrV5KYmMjJJ59MYGAggYGBLFiwgFdffZXAwEBiY2N1nkut0LhxY44//nivbR06dGD79u2A+1wt7dolLi7ukIK6ubm5pKSk6FwXv3DvvffywAMPcOWVV3LiiSdy3XXXcddddzF+/HhA57lIcY7aoD04OJiuXbsyZ86cwm0ul4s5c+bQq1cvH/ZMpOyMMYwaNYpvvvmGuXPn0rp1a6/nu3btSlBQkNd5vnHjRrZv3154nvfq1Yu1a9d6/fGbPXs24eHhh1w8ivjCOeecw9q1a1m9enXhV7du3bjmmmsKv9d5LrVBnz59Dlm286+//qJly5YAtG7dmri4OK9zPT09naVLl3qd66mpqaxcubKwzdy5c3G5XPTo0aMG3oVI6Q4cOIDT6R2CBAQE4HK5AJ3nIsXydSU8X5o6daoJCQkxH3zwgfnzzz/N8OHDTUREhFd1YRF/dvvtt5sGDRqY+fPnm927dxd+HThwoLDNbbfdZlq0aGHmzp1rVqxYYXr16mV69epV+Hxubq454YQTTP/+/c3q1avNjBkzTKNGjcyDDz7oi7ckUiae1eON0XkutcOyZctMYGCgeeaZZ8ymTZvM5MmTTZ06dcwnn3xS2Oa5554zERERZtq0aeb33383F110kWndurU5ePBgYZtzzz3XdOnSxSxdutQsXLjQtGvXzlx11VW+eEsihxg6dKhp2rSpmT59utmyZYv5+uuvTXR0tLnvvvsK2+g8F/F2VAftxhjz2muvmRYtWpjg4GDTvXt3s2TJEl93SaTMgGK/3n///cI2Bw8eNCNGjDCRkZGmTp06ZvDgwWb37t1e+9m6dasZOHCgCQsLM9HR0ebuu+82OTk5NfxuRMquaNCu81xqi++++86ccMIJJiQkxLRv3968/fbbXs+7XC7z6KOPmtjYWBMSEmLOOeccs3HjRq82e/bsMVdddZWpV6+eCQ8PNzfeeKPJyMioybchUqL09HQzevRo06JFCxMaGmratGljHn74Ya/lN3Wei3hzGGOMLzP9IiIiIiIiIlK8o3ZOu4iIiIiIiIi/U9AuIiIiIiIi4qcUtIuIiIiIiIj4KQXtIiIiIiIiIn5KQbuIiIiIiIiIn1LQLiIiIiIiIuKnFLSLiIiIiIiI+CkF7SIiIiIiIiJ+SkG7iIiIVIlx48Zx0kkn+bobAJx55pmMGTPG190QERGpNAXtIiIifiY+Pp7Ro0dzzDHHEBoaSmxsLH369GHixIkcOHDA192rkHHjxuFwOEr9qoj58+fjcDhITU2t2g6LiIj4iUBfd0BERETc/vnnH/r06UNERATPPvssJ554IiEhIaxdu5a3336bpk2bcuGFFxb72pycHIKCgmq4x2Vzzz33cNtttxU+PuWUUxg+fDi33HJLse2zs7MJDg6uqe6JiIj4LWXaRURE/MiIESMIDAxkxYoVXH755XTo0IE2bdpw0UUX8f3333PBBRcUtnU4HEycOJELL7yQunXr8swzzwAwceJE2rZtS3BwMMcddxwff/xx4Wu2bt2Kw+Fg9erVhdtSU1NxOBzMnz8fcGev58yZQ7du3ahTpw69e/dm48aNXn197rnniI2NpX79+gwbNozMzMwS31e9evWIi4sr/AoICKB+/fqFj6+88kpGjRrFmDFjiI6OZsCAAYft69atWznrrLMAiIyMxOFwcMMNNxS2dblc3HfffURFRREXF8e4cePK+a8hIiLiewraRURE/MSePXuYNWsWI0eOpG7dusW2KTqMfNy4cQwePJi1a9dy00038c033zB69Gjuvvtu/vjjD2699VZuvPFG5s2bV+7+PPzww7z00kusWLGCwMBAbrrppsLnPv/8c8aNG8ezzz7LihUraNy4MW+++Wa5j+Hpww8/JDg4mEWLFjFp0qTDtm/evDlfffUVABs3bmT37t288sorXvurW7cuS5cuZcKECTz55JPMnj27Un0UERGpaRoeLyIi4ic2b96MMYbjjjvOa3t0dHRhFnvkyJE8//zzhc9dffXV3HjjjYWPr7rqKm644QZGjBgBwNixY1myZAkvvvhiYVa6rJ555hnOOOMMAB544AEGDRpEZmYmoaGhvPzyywwbNoxhw4YB8PTTT/PTTz+Vmm0/nHbt2jFhwoTCx1u3bi21fUBAAFFRUQDExMQQERHh9XynTp14/PHHC/f9+uuvM2fOHPr161fhPoqIiNQ0ZdpFRET83LJly1i9ejUdO3YkKyvL67lu3bp5PV6/fj19+vTx2tanTx/Wr19f7uN26tSp8PvGjRsDkJiYWHicHj16eLXv1atXuY/hqWvXrpV6fVGe/Qf7Hgr6LyIicqRQpl1ERMRPHHPMMTgcjkPmjrdp0waAsLCwQ15T0jD6kjid9n69MaZwW05OTrFtPYvaFQzLd7lc5TpeeRR9L+Xpa3GKFuVzOBzV2n8REZHqoEy7iIiIn2jYsCH9+vXj9ddfZ//+/RXaR4cOHVi0aJHXtkWLFnH88ccD0KhRIwB2795d+LxnobfyHGfp0qVe25YsWVLu/ZSmLH0tqDCfl5dXpccWERHxF8q0i4iI+JE333yTPn360K1bN8aNG0enTp1wOp0sX76cDRs2HHYI+b333svll19Oly5d6Nu3L9999x1ff/01P/30E2Cz9T179uS5556jdevWJCYm8sgjj5S7n6NHj+aGG26gW7du9OnTh8mTJ7Nu3brCUQFVoSx9bdmyJQ6Hg+nTp3PeeecRFhZGvXr1qqwPIiIivqZMu4iIiB9p27Ytv/32G3379uXBBx+kc+fOdOvWjddee4177rmHp556qtTXX3zxxbzyyiu8+OKLdOzYkbfeeov333+fM888s7DNe++9R25uLl27dmXMmDE8/fTT5e7nFVdcwaOPPsp9991H165d2bZtG7fffnu593M4h+tr06ZNeeKJJ3jggQeIjY1l1KhRVd4HERERX3IYz4liIiIiIiIiIuI3lGkXERERERER8VMK2kVERERERET8lIJ2ERERERERET+loF1ERERERETETyloFxEREREREfFTCtpFRERERERE/JSCdhERERERERE/paBdRERERERExE8paBcRERERERHxUwraRURERERERPyUgnYRERERERERP/X/jVniepzVPWQAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# test it\n", + "\n", + "Tester.test(linear_regression_pricer)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "79e1574b-52ef-49cc-bfb5-e97252ed5db8", + "metadata": {}, + "outputs": [], + "source": [ + "# For the next few models, we prepare our documents and prices\n", + "# Note that we use the test prompt for the documents, otherwise we'll reveal the answer!!\n", + "\n", + "prices = np.array([float(item.price) for item in train])\n", + "documents = [item.test_prompt() for item in train]" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "id": "e126c22e-53e7-4967-9ebb-6b7dd7fe4ade", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
LinearRegression()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
" + ], + "text/plain": [ + "LinearRegression()" + ] + }, + "execution_count": 60, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Use the CountVectorizer for a Bag of Words model\n", + "\n", + "np.random.seed(42)\n", + "vectorizer = CountVectorizer(max_features=1000, stop_words='english')\n", + "X = vectorizer.fit_transform(documents)\n", + "regressor = LinearRegression()\n", + "regressor.fit(X, prices)" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "id": "4b7148d3-3202-4536-a75c-1627495c51d3", + "metadata": {}, + "outputs": [], + "source": [ + "def bow_lr_pricer(item):\n", + " x = vectorizer.transform([item.test_prompt()])\n", + " return max(regressor.predict(x)[0], 0)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "id": "38f7f7d0-d22c-4282-92e5-9666a7b8535d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[93m1: Guess: $296.58 Truth: $374.41 Error: $77.83 SLE: 0.05 Item: OEM AC Compressor w/A/C Repair Kit For F...\u001b[0m\n", + "\u001b[93m2: Guess: $165.14 Truth: $225.11 Error: $59.97 SLE: 0.09 Item: Motorcraft YB3125 Fan Clutch\u001b[0m\n", + "\u001b[91m3: Guess: $162.26 Truth: $61.68 Error: $100.58 SLE: 0.92 Item: Dorman 603-159 Front Washer Fluid Reserv...\u001b[0m\n", + "\u001b[91m4: Guess: $326.14 Truth: $599.99 Error: $273.85 SLE: 0.37 Item: HP Premium 17.3-inch HD Plus Touchscreen...\u001b[0m\n", + "\u001b[92m5: Guess: $51.69 Truth: $16.99 Error: $34.70 SLE: 1.15 Item: 5-Position Super Switch Pickup Selector ...\u001b[0m\n", + "\u001b[93m6: Guess: $74.06 Truth: $31.99 Error: $42.07 SLE: 0.68 Item: Horror Bookmarks, Resin Horror Bookmarks...\u001b[0m\n", + "\u001b[91m7: Guess: $344.82 Truth: $101.79 Error: $243.03 SLE: 1.47 Item: SK6241 - Stinger 4 Gauge 6000 Series Pow...\u001b[0m\n", + "\u001b[92m8: Guess: $241.39 Truth: $289.00 Error: $47.61 SLE: 0.03 Item: Godox ML60Bi LED Light Kit, Handheld LED...\u001b[0m\n", + "\u001b[93m9: Guess: $404.56 Truth: $635.86 Error: $231.30 SLE: 0.20 Item: Randall RG75DG3PLUS G3 Plus 100-Watt Com...\u001b[0m\n", + "\u001b[93m10: Guess: $111.12 Truth: $65.99 Error: $45.13 SLE: 0.27 Item: HOLDWILL 6 Pack LED Shop Light, 4FT 24W ...\u001b[0m\n", + "\u001b[91m11: Guess: $362.96 Truth: $254.21 Error: $108.75 SLE: 0.13 Item: Viking Horns V103C/1005ATK 3 Gallon Air ...\u001b[0m\n", + "\u001b[93m12: Guess: $328.25 Truth: $412.99 Error: $84.74 SLE: 0.05 Item: CURT 70110 Custom Tow Bar Base Plate Bra...\u001b[0m\n", + "\u001b[93m13: Guess: $135.78 Truth: $205.50 Error: $69.72 SLE: 0.17 Item: 10-Pack Solar HAMMERED BRONZE Finish Pos...\u001b[0m\n", + "\u001b[93m14: Guess: $196.39 Truth: $248.23 Error: $51.84 SLE: 0.05 Item: COSTWAY Electric Tumble Dryer, Sliver\u001b[0m\n", + "\u001b[91m15: Guess: $205.64 Truth: $399.00 Error: $193.36 SLE: 0.44 Item: FREE SIGNAL TV Transit 32\" 12 Volt DC Po...\u001b[0m\n", + "\u001b[92m16: Guess: $301.44 Truth: $373.94 Error: $72.50 SLE: 0.05 Item: Bilstein 5100 Monotube Gas Shock Set com...\u001b[0m\n", + "\u001b[91m17: Guess: $298.78 Truth: $92.89 Error: $205.89 SLE: 1.35 Item: Sangean K-200 Multi-Function Upright AM/...\u001b[0m\n", + "\u001b[91m18: Guess: $202.57 Truth: $51.99 Error: $150.58 SLE: 1.81 Item: Charles Leonard Magnetic Lapboard Class ...\u001b[0m\n", + "\u001b[93m19: Guess: $240.07 Truth: $179.00 Error: $61.07 SLE: 0.09 Item: Gigabyte AMD Radeon HD 7870 2 GB GDDR5 D...\u001b[0m\n", + "\u001b[92m20: Guess: $55.82 Truth: $19.42 Error: $36.40 SLE: 1.05 Item: 3dRose LLC 8 x 8 x 0.25 Inches Bull Terr...\u001b[0m\n", + "\u001b[91m21: Guess: $291.76 Truth: $539.95 Error: $248.19 SLE: 0.38 Item: ROKINON 85mm F1.4 Auto Focus Full Frame ...\u001b[0m\n", + "\u001b[92m22: Guess: $171.79 Truth: $147.67 Error: $24.12 SLE: 0.02 Item: AUTOSAVER88 Headlight Assembly Compatibl...\u001b[0m\n", + "\u001b[93m23: Guess: $91.69 Truth: $24.99 Error: $66.70 SLE: 1.62 Item: ASI NAUTICAL 2.5 Inches Opera Glasses Bi...\u001b[0m\n", + "\u001b[91m24: Guess: $361.79 Truth: $149.00 Error: $212.79 SLE: 0.78 Item: Behringer TUBE OVERDRIVE TO100 Authentic...\u001b[0m\n", + "\u001b[92m25: Guess: $0.00 Truth: $16.99 Error: $16.99 SLE: 8.35 Item: Fun Express Insect Finger Puppets - 24 f...\u001b[0m\n", + "\u001b[91m26: Guess: $149.93 Truth: $7.99 Error: $141.94 SLE: 7.96 Item: WAFJAMF Roller Stamp Identity Theft Stam...\u001b[0m\n", + "\u001b[91m27: Guess: $46.05 Truth: $199.99 Error: $153.94 SLE: 2.11 Item: Capulina Tiffany Floor Lamp 2-Light 16\" ...\u001b[0m\n", + "\u001b[92m28: Guess: $258.31 Truth: $251.45 Error: $6.86 SLE: 0.00 Item: Apple Watch Series 6 (GPS, 44mm) - Space...\u001b[0m\n", + "\u001b[93m29: Guess: $149.74 Truth: $231.62 Error: $81.88 SLE: 0.19 Item: ICON 01725 Tandem Axle Fender Skirt FS17...\u001b[0m\n", + "\u001b[93m30: Guess: $197.30 Truth: $135.00 Error: $62.30 SLE: 0.14 Item: SanDisk 128GB Ultra (10 Pack) MicroSD Cl...\u001b[0m\n", + "\u001b[92m31: Guess: $313.84 Truth: $356.62 Error: $42.78 SLE: 0.02 Item: Velvac 2020,L,C/Hr,W,E2003,102\",Bk - 715...\u001b[0m\n", + "\u001b[93m32: Guess: $203.01 Truth: $257.99 Error: $54.98 SLE: 0.06 Item: TCMT Passenger Backrest Sissy Bar & Lugg...\u001b[0m\n", + "\u001b[91m33: Guess: $202.24 Truth: $27.99 Error: $174.25 SLE: 3.79 Item: Alnicov 63.5MM Brass Tremolo Block,Tremo...\u001b[0m\n", + "\u001b[92m34: Guess: $192.52 Truth: $171.20 Error: $21.32 SLE: 0.01 Item: Subaru Forester Outback Legacy OEM Engin...\u001b[0m\n", + "\u001b[92m35: Guess: $204.56 Truth: $225.00 Error: $20.44 SLE: 0.01 Item: Richmond Auto Upholstery - 2012 Dodge Ra...\u001b[0m\n", + "\u001b[93m36: Guess: $159.54 Truth: $105.00 Error: $54.54 SLE: 0.17 Item: AP-39 Automotive Paint Primer Grey 2K Ur...\u001b[0m\n", + "\u001b[92m37: Guess: $257.12 Truth: $299.99 Error: $42.87 SLE: 0.02 Item: Road Top Wireless Carplay Retrofit Kit D...\u001b[0m\n", + "\u001b[92m38: Guess: $565.27 Truth: $535.09 Error: $30.18 SLE: 0.00 Item: Gibson Performance Exhaust 5658 Aluminiz...\u001b[0m\n", + "\u001b[91m39: Guess: $140.36 Truth: $12.33 Error: $128.03 SLE: 5.58 Item: Bella Tunno Happy Links - Baby Montessor...\u001b[0m\n", + "\u001b[91m40: Guess: $179.17 Truth: $84.99 Error: $94.18 SLE: 0.55 Item: CANMORE H300 Handheld GPS Golf Device, S...\u001b[0m\n", + "\u001b[92m41: Guess: $14.62 Truth: $15.99 Error: $1.37 SLE: 0.01 Item: DCPOWER AC Adapter Compatible Replacemen...\u001b[0m\n", + "\u001b[93m42: Guess: $137.26 Truth: $62.44 Error: $74.82 SLE: 0.61 Item: Sharp, VX2128V, Commercial Desktop Calcu...\u001b[0m\n", + "\u001b[91m43: Guess: $201.47 Truth: $82.99 Error: $118.48 SLE: 0.77 Item: Melissa & Doug Lifelike Plush Stork Gian...\u001b[0m\n", + "\u001b[91m44: Guess: $353.73 Truth: $599.95 Error: $246.22 SLE: 0.28 Item: Sony SSCS8 2-Way 3-Driver Center Channel...\u001b[0m\n", + "\u001b[92m45: Guess: $225.54 Truth: $194.99 Error: $30.55 SLE: 0.02 Item: ASUS Chromebook CX1, 14\" Full HD NanoEdg...\u001b[0m\n", + "\u001b[92m46: Guess: $301.16 Truth: $344.95 Error: $43.79 SLE: 0.02 Item: FiiO X7 32GB Hi-Res Lossless Music Playe...\u001b[0m\n", + "\u001b[92m47: Guess: $34.62 Truth: $37.99 Error: $3.37 SLE: 0.01 Item: TORRO Leather Case Compatible with iPhon...\u001b[0m\n", + "\u001b[92m48: Guess: $255.31 Truth: $224.35 Error: $30.96 SLE: 0.02 Item: Universal Air Conditioner KT 1031 A/C Co...\u001b[0m\n", + "\u001b[93m49: Guess: $491.75 Truth: $814.00 Error: $322.25 SLE: 0.25 Item: Street Series Stainless Performance Cat-...\u001b[0m\n", + "\u001b[92m50: Guess: $459.04 Truth: $439.88 Error: $19.16 SLE: 0.00 Item: Lenovo IdeaPad 3 14-inch Laptop, 14.0-in...\u001b[0m\n", + "\u001b[92m51: Guess: $312.07 Truth: $341.43 Error: $29.36 SLE: 0.01 Item: Access Bed Covers TonnoSport 22050219 - ...\u001b[0m\n", + "\u001b[91m52: Guess: $191.58 Truth: $46.78 Error: $144.80 SLE: 1.94 Item: G.I. JOE Hasbro 3 3/4\" Wave 5 Action Fig...\u001b[0m\n", + "\u001b[92m53: Guess: $193.19 Truth: $171.44 Error: $21.75 SLE: 0.01 Item: T&S Brass B-0232-BST Double Pantry Fauce...\u001b[0m\n", + "\u001b[91m54: Guess: $192.23 Truth: $458.00 Error: $265.77 SLE: 0.75 Item: ZTUOAUMA Fuel Injection Pump 3090942 309...\u001b[0m\n", + "\u001b[91m55: Guess: $261.21 Truth: $130.75 Error: $130.46 SLE: 0.47 Item: 2AP18AA#ABA Hp Prime Graphing Calculator...\u001b[0m\n", + "\u001b[91m56: Guess: $180.51 Truth: $83.81 Error: $96.70 SLE: 0.58 Item: Lowrance 000-0119-83 Nmea 2000 25' Exten...\u001b[0m\n", + "\u001b[91m57: Guess: $155.91 Truth: $386.39 Error: $230.48 SLE: 0.82 Item: Jeep Genuine Accessories 82213051 Hood L...\u001b[0m\n", + "\u001b[91m58: Guess: $289.72 Truth: $169.00 Error: $120.72 SLE: 0.29 Item: GODOX CB-06 Hard Carrying Case with Whee...\u001b[0m\n", + "\u001b[93m59: Guess: $77.23 Truth: $17.95 Error: $59.28 SLE: 2.01 Item: Au-Tomotive Gold, INC. Ford Black Valet ...\u001b[0m\n", + "\u001b[91m60: Guess: $137.48 Truth: $269.00 Error: $131.52 SLE: 0.45 Item: Snailfly Black Roof Rack Rail + Cross Ba...\u001b[0m\n", + "\u001b[93m61: Guess: $35.73 Truth: $77.77 Error: $42.04 SLE: 0.58 Item: KING SHA Anti Glare LED Track Lighting H...\u001b[0m\n", + "\u001b[91m62: Guess: $326.09 Truth: $88.99 Error: $237.10 SLE: 1.67 Item: APS Compatible with Chevy Silverado 1500...\u001b[0m\n", + "\u001b[93m63: Guess: $263.41 Truth: $364.41 Error: $101.00 SLE: 0.10 Item: Wilwood Engineering 14011291R Brake Cali...\u001b[0m\n", + "\u001b[92m64: Guess: $149.55 Truth: $127.03 Error: $22.52 SLE: 0.03 Item: ACDelco Gold 336-1925A Starter, Remanufa...\u001b[0m\n", + "\u001b[91m65: Guess: $416.72 Truth: $778.95 Error: $362.23 SLE: 0.39 Item: UWS EC10783 69-Inch Matte Black Heavy-Wa...\u001b[0m\n", + "\u001b[91m66: Guess: $538.10 Truth: $206.66 Error: $331.44 SLE: 0.91 Item: Dell Latitude E5440 14in Business Laptop...\u001b[0m\n", + "\u001b[91m67: Guess: $121.37 Truth: $35.94 Error: $85.43 SLE: 1.43 Item: (Plug and Play) Spare Tire Brake Light W...\u001b[0m\n", + "\u001b[91m68: Guess: $297.14 Truth: $149.00 Error: $148.14 SLE: 0.47 Item: The Ultimate Roadside Rescue Assistant\u001b[0m\n", + "\u001b[92m69: Guess: $209.51 Truth: $251.98 Error: $42.47 SLE: 0.03 Item: Brand New 18\" x 8.5\" Replacement Wheel f...\u001b[0m\n", + "\u001b[91m70: Guess: $243.61 Truth: $160.00 Error: $83.61 SLE: 0.17 Item: Headlight Headlamp LH Left & RH Right Pa...\u001b[0m\n", + "\u001b[91m71: Guess: $135.49 Truth: $39.99 Error: $95.50 SLE: 1.45 Item: Lilo And Stitch Deluxe Oversize Print La...\u001b[0m\n", + "\u001b[91m72: Guess: $199.62 Truth: $362.41 Error: $162.79 SLE: 0.35 Item: AC Compressor & A/C Clutch For Hyundai A...\u001b[0m\n", + "\u001b[91m73: Guess: $203.28 Truth: $344.00 Error: $140.72 SLE: 0.27 Item: House Of Troy PIN475-AB Pinnacle Collect...\u001b[0m\n", + "\u001b[93m74: Guess: $84.55 Truth: $25.09 Error: $59.46 SLE: 1.41 Item: Juno T29 WH Floating Electrical Feed Sin...\u001b[0m\n", + "\u001b[92m75: Guess: $164.67 Truth: $175.95 Error: $11.28 SLE: 0.00 Item: Sherman GO-PARTS - for 2013-2016 Toyota ...\u001b[0m\n", + "\u001b[91m76: Guess: $228.51 Truth: $132.64 Error: $95.87 SLE: 0.29 Item: Roland RPU-3 Electronic Keyboard Pedal o...\u001b[0m\n", + "\u001b[93m77: Guess: $327.05 Truth: $422.99 Error: $95.94 SLE: 0.07 Item: Rockland VMI14 12,000 Pound 12 Volt DC E...\u001b[0m\n", + "\u001b[93m78: Guess: $201.04 Truth: $146.48 Error: $54.56 SLE: 0.10 Item: Max Advanced Brakes Elite XDS Front Cros...\u001b[0m\n", + "\u001b[91m79: Guess: $320.96 Truth: $156.83 Error: $164.13 SLE: 0.51 Item: Quality-Built 11030 Premium Quality Alte...\u001b[0m\n", + "\u001b[92m80: Guess: $227.20 Truth: $251.99 Error: $24.79 SLE: 0.01 Item: Lucida LG-510 Student Classical Guitar, ...\u001b[0m\n", + "\u001b[91m81: Guess: $151.75 Truth: $940.33 Error: $788.58 SLE: 3.31 Item: Longacre 52-79800 Aluminum Turn Plates\u001b[0m\n", + "\u001b[91m82: Guess: $211.82 Truth: $52.99 Error: $158.83 SLE: 1.88 Item: Motion Pro 08-0380 Adjustable Torque Wre...\u001b[0m\n", + "\u001b[91m83: Guess: $75.14 Truth: $219.95 Error: $144.81 SLE: 1.13 Item: Glyph Thunderbolt 3 NVMe Dock (0 GB)\u001b[0m\n", + "\u001b[91m84: Guess: $241.86 Truth: $441.03 Error: $199.17 SLE: 0.36 Item: TOYO Open Country MT Performance Radial ...\u001b[0m\n", + "\u001b[91m85: Guess: $316.55 Truth: $168.98 Error: $147.57 SLE: 0.39 Item: Razer Seiren X USB Streaming Microphone ...\u001b[0m\n", + "\u001b[93m86: Guess: $80.51 Truth: $2.49 Error: $78.02 SLE: 9.93 Item: Happy Birthday to Dad From Your Daughter...\u001b[0m\n", + "\u001b[93m87: Guess: $177.39 Truth: $98.62 Error: $78.77 SLE: 0.34 Item: Little Tikes My Real Jam First Concert S...\u001b[0m\n", + "\u001b[92m88: Guess: $256.24 Truth: $256.95 Error: $0.71 SLE: 0.00 Item: Studio M Peace and Harmony Art Pole Comm...\u001b[0m\n", + "\u001b[92m89: Guess: $21.27 Truth: $30.99 Error: $9.72 SLE: 0.13 Item: MyVolts 12V Power Supply Adaptor Compati...\u001b[0m\n", + "\u001b[93m90: Guess: $356.27 Truth: $569.84 Error: $213.57 SLE: 0.22 Item: Dell Latitude 7212 Rugged Extreme Tablet...\u001b[0m\n", + "\u001b[92m91: Guess: $165.47 Truth: $177.99 Error: $12.52 SLE: 0.01 Item: Covermates Contour Fit Car Cover - Light...\u001b[0m\n", + "\u001b[91m92: Guess: $505.02 Truth: $997.99 Error: $492.97 SLE: 0.46 Item: Westin 57-4025 Black HDX Grille Guard fi...\u001b[0m\n", + "\u001b[92m93: Guess: $242.10 Truth: $219.00 Error: $23.10 SLE: 0.01 Item: Fieldpiece JL2 Job Link Wireless App Tra...\u001b[0m\n", + "\u001b[92m94: Guess: $222.99 Truth: $225.55 Error: $2.56 SLE: 0.00 Item: hansgrohe Talis S Modern Premium Easy Cl...\u001b[0m\n", + "\u001b[91m95: Guess: $279.94 Truth: $495.95 Error: $216.01 SLE: 0.33 Item: G-Technology G-SPEED eS PRO High-Perform...\u001b[0m\n", + "\u001b[91m96: Guess: $345.71 Truth: $942.37 Error: $596.66 SLE: 1.00 Item: DreamLine SHDR-1960723L-01 Shower Door, ...\u001b[0m\n", + "\u001b[91m97: Guess: $159.50 Truth: $1.94 Error: $157.56 SLE: 16.00 Item: Sanctuary Square Backplate Finish: Oiled...\u001b[0m\n", + "\u001b[91m98: Guess: $166.28 Truth: $284.34 Error: $118.06 SLE: 0.29 Item: Pelican Protector 1750 Long Case - Multi...\u001b[0m\n", + "\u001b[93m99: Guess: $224.55 Truth: $171.90 Error: $52.65 SLE: 0.07 Item: Brock Replacement Driver and Passenger H...\u001b[0m\n", + "\u001b[93m100: Guess: $93.78 Truth: $144.99 Error: $51.21 SLE: 0.19 Item: Carlinkit Ai Box Mini, Android 11, Multi...\u001b[0m\n", + "\u001b[91m101: Guess: $259.73 Truth: $470.47 Error: $210.74 SLE: 0.35 Item: StarDot NetCamLIVE2 YouTube Live Stream ...\u001b[0m\n", + "\u001b[92m102: Guess: $77.38 Truth: $66.95 Error: $10.43 SLE: 0.02 Item: Atomic Compatible FILXXCAR0016 16x25x5 M...\u001b[0m\n", + "\u001b[93m103: Guess: $45.28 Truth: $117.00 Error: $71.72 SLE: 0.88 Item: Bandai Awakening of S. H. s.h.figuarts s...\u001b[0m\n", + "\u001b[91m104: Guess: $268.58 Truth: $172.14 Error: $96.44 SLE: 0.20 Item: Fit System 62135G Passenger Side Towing ...\u001b[0m\n", + "\u001b[91m105: Guess: $218.92 Truth: $392.74 Error: $173.82 SLE: 0.34 Item: Black Horse Black Aluminum Exceed Runnin...\u001b[0m\n", + "\u001b[92m106: Guess: $22.51 Truth: $16.99 Error: $5.52 SLE: 0.07 Item: Dearsun Twinkle Star Color Night Light P...\u001b[0m\n", + "\u001b[93m107: Guess: $46.78 Truth: $1.34 Error: $45.44 SLE: 9.10 Item: Pokemon - Gallade Spirit Link (83/108) -...\u001b[0m\n", + "\u001b[93m108: Guess: $256.04 Truth: $349.98 Error: $93.94 SLE: 0.10 Item: Ibanez GA34STCE-NT GIO Series Classical ...\u001b[0m\n", + "\u001b[93m109: Guess: $255.82 Truth: $370.71 Error: $114.89 SLE: 0.14 Item: Set 2 Heavy Duty 12-16.5 12x16.5 12 Ply ...\u001b[0m\n", + "\u001b[91m110: Guess: $185.73 Truth: $65.88 Error: $119.85 SLE: 1.05 Item: Hairpin Table Legs 28\" Heavy Duty Hairpi...\u001b[0m\n", + "\u001b[92m111: Guess: $270.29 Truth: $229.99 Error: $40.30 SLE: 0.03 Item: Marada Racing Seat with Adjustable Slide...\u001b[0m\n", + "\u001b[93m112: Guess: $83.80 Truth: $9.14 Error: $74.66 SLE: 4.51 Item: Remington Industries 24UL1007STRWHI25 24...\u001b[0m\n", + "\u001b[91m113: Guess: $485.39 Truth: $199.00 Error: $286.39 SLE: 0.79 Item: Acer S3-391-6046 13.3-inch Ultrabook, In...\u001b[0m\n", + "\u001b[91m114: Guess: $279.62 Truth: $109.99 Error: $169.63 SLE: 0.86 Item: ICBEAMER 7\" RGB LED Headlights Bulb Halo...\u001b[0m\n", + "\u001b[93m115: Guess: $359.09 Truth: $570.42 Error: $211.33 SLE: 0.21 Item: R1 Concepts Front Rear Brakes and Rotors...\u001b[0m\n", + "\u001b[93m116: Guess: $204.85 Truth: $279.99 Error: $75.14 SLE: 0.10 Item: Camplux 2.64 GPM Tankless , Outdoor Port...\u001b[0m\n", + "\u001b[91m117: Guess: $132.76 Truth: $30.99 Error: $101.77 SLE: 2.05 Item: KNOKLOCK 10 Pack 3.75 Inch(96mm) Kitchen...\u001b[0m\n", + "\u001b[91m118: Guess: $124.75 Truth: $31.99 Error: $92.76 SLE: 1.79 Item: Valley Enterprises Yaesu USB FTDI CT-62 ...\u001b[0m\n", + "\u001b[93m119: Guess: $78.44 Truth: $15.90 Error: $62.54 SLE: 2.40 Item: G9 LED Light Bulbs,8W,75W 100W replaceme...\u001b[0m\n", + "\u001b[93m120: Guess: $118.26 Truth: $45.99 Error: $72.27 SLE: 0.87 Item: ZCHAOZ 4 Lights Antique White Farmhouse ...\u001b[0m\n", + "\u001b[92m121: Guess: $105.69 Truth: $113.52 Error: $7.83 SLE: 0.01 Item: Honeywell TH8320R1003 Honeywell VisionPr...\u001b[0m\n", + "\u001b[93m122: Guess: $353.03 Truth: $516.99 Error: $163.96 SLE: 0.14 Item: Patriot Exhaust H8013-1 1-7/8\" Clippster...\u001b[0m\n", + "\u001b[93m123: Guess: $262.23 Truth: $196.99 Error: $65.24 SLE: 0.08 Item: Fitrite Autopart New Front Left Driver S...\u001b[0m\n", + "\u001b[92m124: Guess: $74.66 Truth: $46.55 Error: $28.11 SLE: 0.22 Item: Technical Precision Replacement for GE G...\u001b[0m\n", + "\u001b[93m125: Guess: $259.26 Truth: $356.99 Error: $97.73 SLE: 0.10 Item: Covercraft Carhartt SeatSaver Front Row ...\u001b[0m\n", + "\u001b[93m126: Guess: $195.84 Truth: $319.95 Error: $124.11 SLE: 0.24 Item: Sennheiser SD Pro 2 (506008) - Double-Si...\u001b[0m\n", + "\u001b[91m127: Guess: $221.73 Truth: $96.06 Error: $125.67 SLE: 0.69 Item: Hitachi MAF0110 Mass Air Flow Sensor\u001b[0m\n", + "\u001b[92m128: Guess: $227.91 Truth: $190.99 Error: $36.92 SLE: 0.03 Item: AmScope SE305R-P-LED-PS36A 10X-30X LED C...\u001b[0m\n", + "\u001b[92m129: Guess: $271.48 Truth: $257.95 Error: $13.53 SLE: 0.00 Item: Front Left Driver Side Window Regulator ...\u001b[0m\n", + "\u001b[93m130: Guess: $132.58 Truth: $62.95 Error: $69.63 SLE: 0.54 Item: Premium Replica Hubcap Set, Fits Nissan ...\u001b[0m\n", + "\u001b[92m131: Guess: $19.46 Truth: $47.66 Error: $28.20 SLE: 0.75 Item: Excellerations Phonics Spelling Game for...\u001b[0m\n", + "\u001b[91m132: Guess: $339.20 Truth: $226.99 Error: $112.21 SLE: 0.16 Item: RC4WD BigDog Dual Axle Scale Car/Truck T...\u001b[0m\n", + "\u001b[93m133: Guess: $278.09 Truth: $359.95 Error: $81.86 SLE: 0.07 Item: Unknown Stage 2 Clutch Kit - Low Altitud...\u001b[0m\n", + "\u001b[91m134: Guess: $200.77 Truth: $78.40 Error: $122.37 SLE: 0.87 Item: 2002-2008 Dodge Ram 1500 Mopar 4X4 Emble...\u001b[0m\n", + "\u001b[91m135: Guess: $282.35 Truth: $172.77 Error: $109.58 SLE: 0.24 Item: Pro Comp Alloys Series 89 Wheel with Pol...\u001b[0m\n", + "\u001b[93m136: Guess: $247.77 Truth: $316.45 Error: $68.68 SLE: 0.06 Item: Detroit Axle - Front Rear Strut & Coil S...\u001b[0m\n", + "\u001b[91m137: Guess: $192.76 Truth: $87.99 Error: $104.77 SLE: 0.61 Item: ECCPP Rear Wheel Axle Replacement fit fo...\u001b[0m\n", + "\u001b[93m138: Guess: $281.20 Truth: $226.63 Error: $54.57 SLE: 0.05 Item: Dell Latitude E6520 Intel i7-2720QM 2.20...\u001b[0m\n", + "\u001b[91m139: Guess: $222.78 Truth: $31.49 Error: $191.29 SLE: 3.72 Item: F FIERCE CYCLE 251pcs Black Universal Mo...\u001b[0m\n", + "\u001b[92m140: Guess: $213.61 Truth: $196.00 Error: $17.61 SLE: 0.01 Item: Flash Furniture 4 Pk. HERCULES Series 88...\u001b[0m\n", + "\u001b[92m141: Guess: $96.89 Truth: $78.40 Error: $18.49 SLE: 0.04 Item: B&M 30287 Throttle Valve/Kickdown Cable,...\u001b[0m\n", + "\u001b[93m142: Guess: $194.39 Truth: $116.25 Error: $78.14 SLE: 0.26 Item: Gates TCK226 PowerGrip Premium Timing Be...\u001b[0m\n", + "\u001b[91m143: Guess: $263.61 Truth: $112.78 Error: $150.83 SLE: 0.71 Item: Monroe Shocks & Struts Quick-Strut 17149...\u001b[0m\n", + "\u001b[91m144: Guess: $151.33 Truth: $27.32 Error: $124.01 SLE: 2.83 Item: Feit Electric BPMR16/GU10/930CA/6 35W EQ...\u001b[0m\n", + "\u001b[92m145: Guess: $152.80 Truth: $145.91 Error: $6.89 SLE: 0.00 Item: Yellow Jacket 2806 Contractor Extension ...\u001b[0m\n", + "\u001b[92m146: Guess: $149.96 Truth: $171.09 Error: $21.13 SLE: 0.02 Item: Garage-Pro Tailgate SET Compatible with ...\u001b[0m\n", + "\u001b[93m147: Guess: $210.42 Truth: $167.95 Error: $42.47 SLE: 0.05 Item: 3M Perfect It Buffing and Polishing Kit ...\u001b[0m\n", + "\u001b[91m148: Guess: $140.96 Truth: $28.49 Error: $112.47 SLE: 2.47 Item: Chinese Style Dollhouse Model DIY Miniat...\u001b[0m\n", + "\u001b[91m149: Guess: $235.92 Truth: $122.23 Error: $113.69 SLE: 0.43 Item: Generic NRG Innovations SRK-161H Steerin...\u001b[0m\n", + "\u001b[91m150: Guess: $171.83 Truth: $32.99 Error: $138.84 SLE: 2.64 Item: Learning Resources Coding Critters Range...\u001b[0m\n", + "\u001b[91m151: Guess: $163.76 Truth: $71.20 Error: $92.56 SLE: 0.68 Item: Bosch Automotive 15463 Oxygen Sensor, OE...\u001b[0m\n", + "\u001b[93m152: Guess: $58.53 Truth: $112.75 Error: $54.22 SLE: 0.42 Item: Case of 24-2 Inch Blue Painters Tape - 6...\u001b[0m\n", + "\u001b[93m153: Guess: $218.31 Truth: $142.43 Error: $75.88 SLE: 0.18 Item: MOCA Engine Water Pump & Fan Clutch fit ...\u001b[0m\n", + "\u001b[93m154: Guess: $282.25 Truth: $398.99 Error: $116.74 SLE: 0.12 Item: SAREMAS Foot Step Bars for Hyundai Palis...\u001b[0m\n", + "\u001b[93m155: Guess: $302.86 Truth: $449.00 Error: $146.14 SLE: 0.15 Item: Gretsch G9210 Square Neck Boxcar Mahogan...\u001b[0m\n", + "\u001b[91m156: Guess: $391.64 Truth: $189.00 Error: $202.64 SLE: 0.53 Item: NikoMaku Mirror Dash Cam Front and Rear ...\u001b[0m\n", + "\u001b[91m157: Guess: $228.64 Truth: $120.91 Error: $107.73 SLE: 0.40 Item: Fenix HP25R v2.0 USB-C Rechargeable Head...\u001b[0m\n", + "\u001b[91m158: Guess: $288.50 Truth: $203.53 Error: $84.97 SLE: 0.12 Item: R&L Racing Heavy Duty Roll-Up Soft Tonne...\u001b[0m\n", + "\u001b[93m159: Guess: $246.48 Truth: $349.99 Error: $103.51 SLE: 0.12 Item: Garmin 010-02258-10 GPSMAP 64sx, Handhel...\u001b[0m\n", + "\u001b[92m160: Guess: $44.74 Truth: $34.35 Error: $10.39 SLE: 0.07 Item: Brown 5-7/8\" X 8-1/2\" X 3/16\" Thick Heav...\u001b[0m\n", + "\u001b[93m161: Guess: $237.98 Truth: $384.99 Error: $147.01 SLE: 0.23 Item: GAOMON PD2200 Pen Display & 20 Pen Nibs ...\u001b[0m\n", + "\u001b[92m162: Guess: $247.85 Truth: $211.00 Error: $36.85 SLE: 0.03 Item: VXMOTOR for 97-03 Ford F150/F250 Lightdu...\u001b[0m\n", + "\u001b[91m163: Guess: $392.86 Truth: $129.00 Error: $263.86 SLE: 1.23 Item: HP EliteBook 2540p Intel Core i7-640LM X...\u001b[0m\n", + "\u001b[91m164: Guess: $212.95 Truth: $111.45 Error: $101.50 SLE: 0.41 Item: Green EPX Mixing Nozzles 100-Pack-fits 3...\u001b[0m\n", + "\u001b[91m165: Guess: $251.81 Truth: $81.12 Error: $170.69 SLE: 1.26 Item: Box Partners 6 1/4 x 3 1/8\" 13 Pt. Manil...\u001b[0m\n", + "\u001b[91m166: Guess: $246.36 Truth: $457.08 Error: $210.72 SLE: 0.38 Item: Vixen Air 1/2\" NPT Air Ride Suspension H...\u001b[0m\n", + "\u001b[91m167: Guess: $213.15 Truth: $49.49 Error: $163.66 SLE: 2.09 Item: Smart Floor Lamp, 2700-6500K+RGBPink Mul...\u001b[0m\n", + "\u001b[93m168: Guess: $150.56 Truth: $80.56 Error: $70.00 SLE: 0.38 Item: SOZG 324mm Wheelbase Body Shell RC Car B...\u001b[0m\n", + "\u001b[93m169: Guess: $335.11 Truth: $278.39 Error: $56.72 SLE: 0.03 Item: Mickey Thompson ET Street S/S Racing Rad...\u001b[0m\n", + "\u001b[93m170: Guess: $262.32 Truth: $364.50 Error: $102.18 SLE: 0.11 Item: Pirelli 275/40R20 106W XL RFT P0 PZ4-LUX...\u001b[0m\n", + "\u001b[92m171: Guess: $331.71 Truth: $378.99 Error: $47.28 SLE: 0.02 Item: Torklift C3212 Rear Tie Down\u001b[0m\n", + "\u001b[93m172: Guess: $205.37 Truth: $165.28 Error: $40.09 SLE: 0.05 Item: Cardone 78-4226 Remanufactured Ford Comp...\u001b[0m\n", + "\u001b[91m173: Guess: $163.48 Truth: $56.74 Error: $106.74 SLE: 1.10 Item: Kidde AccessPoint 001798 Supra TouchPoin...\u001b[0m\n", + "\u001b[93m174: Guess: $208.04 Truth: $307.95 Error: $99.91 SLE: 0.15 Item: 3M Protecta 3100414 Self Retracting Life...\u001b[0m\n", + "\u001b[91m175: Guess: $139.91 Truth: $38.00 Error: $101.91 SLE: 1.65 Item: Plantronics 89435-01 Wired Headset, Blac...\u001b[0m\n", + "\u001b[91m176: Guess: $182.46 Truth: $53.00 Error: $129.46 SLE: 1.50 Item: Logitech K750 Wireless Solar Keyboard fo...\u001b[0m\n", + "\u001b[92m177: Guess: $425.16 Truth: $498.00 Error: $72.84 SLE: 0.02 Item: Olympus PEN E-PL9 Body Only with 3-Inch ...\u001b[0m\n", + "\u001b[93m178: Guess: $130.02 Truth: $53.99 Error: $76.03 SLE: 0.75 Item: Beck/Arnley 051-6066 Hub & Bearing Assem...\u001b[0m\n", + "\u001b[93m179: Guess: $231.02 Truth: $350.00 Error: $118.98 SLE: 0.17 Item: Eibach Pro-Kit Performance Springs E10-6...\u001b[0m\n", + "\u001b[93m180: Guess: $215.97 Truth: $299.95 Error: $83.98 SLE: 0.11 Item: LEGO DC Batman 1989 Batwing 76161 Displa...\u001b[0m\n", + "\u001b[91m181: Guess: $204.17 Truth: $94.93 Error: $109.24 SLE: 0.58 Item: Kingston Brass KS3608PL Restoration 4-In...\u001b[0m\n", + "\u001b[93m182: Guess: $267.22 Truth: $379.00 Error: $111.78 SLE: 0.12 Item: Polk Vanishing Series 265-LS In-Wall 3-W...\u001b[0m\n", + "\u001b[92m183: Guess: $275.83 Truth: $299.95 Error: $24.12 SLE: 0.01 Item: Spec-D Tuning LED Projector Headlights G...\u001b[0m\n", + "\u001b[93m184: Guess: $88.00 Truth: $24.99 Error: $63.01 SLE: 1.52 Item: RICHMOND & FINCH Airpod Pro Case, Green ...\u001b[0m\n", + "\u001b[91m185: Guess: $240.87 Truth: $41.04 Error: $199.83 SLE: 3.06 Item: LFA Industries 43B-5A-33JT 1/16-1/2-1.5-...\u001b[0m\n", + "\u001b[91m186: Guess: $97.60 Truth: $327.90 Error: $230.30 SLE: 1.45 Item: SAUTVS LED Headlight Assembly for Slings...\u001b[0m\n", + "\u001b[91m187: Guess: $204.80 Truth: $10.99 Error: $193.81 SLE: 8.08 Item: 2 Pack Combo Womens Safety Glasses Impac...\u001b[0m\n", + "\u001b[92m188: Guess: $36.23 Truth: $14.99 Error: $21.24 SLE: 0.71 Item: Arepa - Venezuelan cuisine - Venezuela P...\u001b[0m\n", + "\u001b[91m189: Guess: $209.54 Truth: $84.95 Error: $124.59 SLE: 0.80 Item: Schlage Lock Company KS23D2300 Padlock, ...\u001b[0m\n", + "\u001b[91m190: Guess: $196.07 Truth: $111.00 Error: $85.07 SLE: 0.32 Item: Techni Mobili White Sit to Stand Mobile ...\u001b[0m\n", + "\u001b[93m191: Guess: $195.32 Truth: $123.73 Error: $71.59 SLE: 0.21 Item: Special Lite Products Contemporary Wall ...\u001b[0m\n", + "\u001b[91m192: Guess: $188.61 Truth: $557.38 Error: $368.77 SLE: 1.17 Item: Tascam DP-24SD 24-Track Digital Portastu...\u001b[0m\n", + "\u001b[91m193: Guess: $210.68 Truth: $95.55 Error: $115.13 SLE: 0.62 Item: Glow Lighting 636CC10SP Vista Crystal Fl...\u001b[0m\n", + "\u001b[92m194: Guess: $181.51 Truth: $154.00 Error: $27.51 SLE: 0.03 Item: Z3 Wind Deflector, Smoke Tint, Lexan, Wi...\u001b[0m\n", + "\u001b[91m195: Guess: $346.23 Truth: $198.99 Error: $147.24 SLE: 0.30 Item: Olympus E-20 5MP Digital Camera w/ 4x Op...\u001b[0m\n", + "\u001b[91m196: Guess: $255.54 Truth: $430.44 Error: $174.90 SLE: 0.27 Item: PHYNEDI 1:1000 World Trade Center (1973-...\u001b[0m\n", + "\u001b[93m197: Guess: $0.00 Truth: $45.67 Error: $45.67 SLE: 14.77 Item: YANGHUAN Unstable Unicorns Adventure Car...\u001b[0m\n", + "\u001b[92m198: Guess: $220.49 Truth: $249.00 Error: $28.51 SLE: 0.01 Item: Interlogix NX-1820E NetworX Touch Screen...\u001b[0m\n", + "\u001b[92m199: Guess: $63.52 Truth: $42.99 Error: $20.53 SLE: 0.15 Item: Steering Damper,Universal Motorcycle Han...\u001b[0m\n", + "\u001b[92m200: Guess: $173.71 Truth: $181.33 Error: $7.62 SLE: 0.00 Item: Amprobe TIC 410A Hot Stick Attachment\u001b[0m\n", + "\u001b[91m201: Guess: $109.21 Truth: $6.03 Error: $103.18 SLE: 7.57 Item: MyCableMart 3.5mm Plug/Jack, 4 Conductor...\u001b[0m\n", + "\u001b[91m202: Guess: $110.06 Truth: $29.99 Error: $80.07 SLE: 1.63 Item: OtterBox + Pop Symmetry Series Case for ...\u001b[0m\n", + "\u001b[91m203: Guess: $395.83 Truth: $899.00 Error: $503.17 SLE: 0.67 Item: Dell XPS X8700-1572BLK Desktop ( Intel C...\u001b[0m\n", + "\u001b[92m204: Guess: $413.81 Truth: $399.99 Error: $13.82 SLE: 0.00 Item: Franklin Iron Works Sperry Industrial Br...\u001b[0m\n", + "\u001b[91m205: Guess: $178.44 Truth: $4.66 Error: $173.78 SLE: 11.95 Item: Avery Legal Dividers, Standard Collated ...\u001b[0m\n", + "\u001b[93m206: Guess: $183.03 Truth: $261.41 Error: $78.38 SLE: 0.13 Item: Moen 8346 Commercial Posi-Temp Pressure ...\u001b[0m\n", + "\u001b[91m207: Guess: $312.93 Truth: $136.97 Error: $175.96 SLE: 0.68 Item: Carlisle Versa Trail ATR All Terrain Rad...\u001b[0m\n", + "\u001b[93m208: Guess: $145.74 Truth: $79.00 Error: $66.74 SLE: 0.37 Item: SUNWAYFOTO 44mm Tripod Ball Head Arca Co...\u001b[0m\n", + "\u001b[91m209: Guess: $166.98 Truth: $444.99 Error: $278.01 SLE: 0.95 Item: NanoBeam AC NBE-5AC-Gen2-US 4 Units 5GHz...\u001b[0m\n", + "\u001b[93m210: Guess: $248.28 Truth: $411.94 Error: $163.66 SLE: 0.25 Item: WULF 4\" Front 2\" Rear Leveling Lift Kit ...\u001b[0m\n", + "\u001b[92m211: Guess: $148.51 Truth: $148.40 Error: $0.11 SLE: 0.00 Item: Alera ALEVABFMC Valencia Series Mobile B...\u001b[0m\n", + "\u001b[91m212: Guess: $89.95 Truth: $244.99 Error: $155.04 SLE: 0.99 Item: YU-GI-OH! Ignition Assault Booster Box\u001b[0m\n", + "\u001b[91m213: Guess: $323.49 Truth: $86.50 Error: $236.99 SLE: 1.72 Item: 48\" x 36\" Extra-Large Framed Magnetic Bl...\u001b[0m\n", + "\u001b[92m214: Guess: $332.80 Truth: $297.95 Error: $34.85 SLE: 0.01 Item: Dell Latitude D620 Renewed Notebook PC\u001b[0m\n", + "\u001b[91m215: Guess: $598.74 Truth: $399.99 Error: $198.75 SLE: 0.16 Item: acer Aspire 5 Laptop, AMD Ryzen 3 5300U ...\u001b[0m\n", + "\u001b[91m216: Guess: $127.95 Truth: $599.00 Error: $471.05 SLE: 2.36 Item: Elk 31080/6RC-GRN 30 by 6-Inch Viva 6-Li...\u001b[0m\n", + "\u001b[91m217: Guess: $235.11 Truth: $105.99 Error: $129.12 SLE: 0.63 Item: Barbie Top Model Doll\u001b[0m\n", + "\u001b[91m218: Guess: $401.45 Truth: $689.00 Error: $287.55 SLE: 0.29 Item: Danby Designer 20-In. Electric Range wit...\u001b[0m\n", + "\u001b[93m219: Guess: $247.40 Truth: $404.99 Error: $157.59 SLE: 0.24 Item: FixtureDisplays® Metal Truss Podium Doub...\u001b[0m\n", + "\u001b[92m220: Guess: $189.46 Truth: $207.76 Error: $18.30 SLE: 0.01 Item: ACDelco 13597235 GM Original Equipment A...\u001b[0m\n", + "\u001b[91m221: Guess: $267.65 Truth: $171.82 Error: $95.83 SLE: 0.19 Item: EBC S1KF1135 Stage-1 Premium Street Brak...\u001b[0m\n", + "\u001b[93m222: Guess: $221.85 Truth: $293.24 Error: $71.39 SLE: 0.08 Item: FXR Men's Boost FX Jacket (Black/Orange/...\u001b[0m\n", + "\u001b[91m223: Guess: $213.19 Truth: $374.95 Error: $161.76 SLE: 0.32 Item: SuperATV Scratch Resistant 3-in-1 Flip W...\u001b[0m\n", + "\u001b[92m224: Guess: $124.39 Truth: $111.99 Error: $12.40 SLE: 0.01 Item: SBU 3 Layer All Weather Mini Van Car Cov...\u001b[0m\n", + "\u001b[93m225: Guess: $86.85 Truth: $42.99 Error: $43.86 SLE: 0.48 Item: 2 Pack Outdoor Brochure Holder Advertisi...\u001b[0m\n", + "\u001b[91m226: Guess: $248.36 Truth: $116.71 Error: $131.65 SLE: 0.56 Item: Monroe Shocks & Struts Quick-Strut 17158...\u001b[0m\n", + "\u001b[91m227: Guess: $310.35 Truth: $118.61 Error: $191.74 SLE: 0.92 Item: Elements of Design Magellan EB235AL Thre...\u001b[0m\n", + "\u001b[91m228: Guess: $241.37 Truth: $147.12 Error: $94.25 SLE: 0.24 Item: GM Genuine Parts 15-62961 Air Conditioni...\u001b[0m\n", + "\u001b[91m229: Guess: $242.05 Truth: $119.99 Error: $122.06 SLE: 0.49 Item: Baseus 17-in-1 USB C Docking Station to ...\u001b[0m\n", + "\u001b[93m230: Guess: $247.50 Truth: $369.98 Error: $122.48 SLE: 0.16 Item: Whitehall™ Personalized Whitehall Capito...\u001b[0m\n", + "\u001b[92m231: Guess: $274.61 Truth: $315.55 Error: $40.94 SLE: 0.02 Item: Pro Circuit Works Pipe PY05250 for 02-19...\u001b[0m\n", + "\u001b[93m232: Guess: $255.66 Truth: $190.99 Error: $64.67 SLE: 0.08 Item: HYANKA 15 \"1200W Professional DJ Speaker...\u001b[0m\n", + "\u001b[93m233: Guess: $83.32 Truth: $155.00 Error: $71.68 SLE: 0.38 Item: Bluetooth X6BT Card Reader Writer Encode...\u001b[0m\n", + "\u001b[92m234: Guess: $331.34 Truth: $349.99 Error: $18.65 SLE: 0.00 Item: AIRAID Cold Air Intake System by K&N: In...\u001b[0m\n", + "\u001b[92m235: Guess: $236.16 Truth: $249.99 Error: $13.83 SLE: 0.00 Item: Bostingner Shower Faucets Sets Complete,...\u001b[0m\n", + "\u001b[91m236: Guess: $128.41 Truth: $42.99 Error: $85.42 SLE: 1.16 Item: PIT66 Front Bumper Turn Signal Lights, C...\u001b[0m\n", + "\u001b[93m237: Guess: $63.43 Truth: $17.99 Error: $45.44 SLE: 1.49 Item: Caseology Bumpy Compatible with Google P...\u001b[0m\n", + "\u001b[91m238: Guess: $252.08 Truth: $425.00 Error: $172.92 SLE: 0.27 Item: Fleck 2510 Timer Mechanical Filter Contr...\u001b[0m\n", + "\u001b[93m239: Guess: $343.13 Truth: $249.99 Error: $93.14 SLE: 0.10 Item: Haloview MC7108 Wireless RV Backup Camer...\u001b[0m\n", + "\u001b[91m240: Guess: $21.15 Truth: $138.23 Error: $117.08 SLE: 3.38 Item: Schmidt Spiele - Manhattan\u001b[0m\n", + "\u001b[92m241: Guess: $395.59 Truth: $414.99 Error: $19.40 SLE: 0.00 Item: Corsa 14333 Tip Kit (Ford Mustang GT)\u001b[0m\n", + "\u001b[92m242: Guess: $193.62 Truth: $168.28 Error: $25.34 SLE: 0.02 Item: Hoshizaki FM116A Fan Motor Kit 1\u001b[0m\n", + "\u001b[93m243: Guess: $268.18 Truth: $199.99 Error: $68.19 SLE: 0.09 Item: BAINUO Antler Chandelier Lighting,6 Ligh...\u001b[0m\n", + "\u001b[91m244: Guess: $252.22 Truth: $126.70 Error: $125.52 SLE: 0.47 Item: DNA MOTORING HL-OH-FEXP06-SM-AM Smoke Le...\u001b[0m\n", + "\u001b[91m245: Guess: $120.28 Truth: $5.91 Error: $114.37 SLE: 8.21 Item: Wera Stainless 3840/1 TS 2.5mm Hex Inser...\u001b[0m\n", + "\u001b[92m246: Guess: $201.22 Truth: $193.06 Error: $8.16 SLE: 0.00 Item: Celestron - PowerSeeker 127EQ Telescope ...\u001b[0m\n", + "\u001b[93m247: Guess: $313.24 Truth: $249.99 Error: $63.25 SLE: 0.05 Item: NHOPEEW 10.1inch Android Car Radio Carpl...\u001b[0m\n", + "\u001b[91m248: Guess: $188.60 Truth: $64.12 Error: $124.48 SLE: 1.14 Item: Other Harmonica (Suzuki-2Timer24- A)\u001b[0m\n", + "\u001b[91m249: Guess: $299.09 Truth: $114.99 Error: $184.10 SLE: 0.90 Item: Harley Air Filter Venturi Intake Air Cle...\u001b[0m\n", + "\u001b[91m250: Guess: $177.13 Truth: $926.00 Error: $748.87 SLE: 2.72 Item: Elite Screens Edge Free Ambient Light Re...\u001b[0m\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA+0AAAK9CAYAAABRvo1QAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAA1KZJREFUeJzs3Xd8leX5x/HPyU4ISQhZIFNEQdyiCC6cOHEgiqLirHVUraPVtlatWrWt2mp/FVcFFbc461ZUVBy4ceJAQCVhZUBC5vP744TkhJlAkpPxefvKy5P7POc8V8i8zve+7ycUBEGAJEmSJElqc2KiXYAkSZIkSVozm3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJkiRJktoom3ZJ6qT69evHSSedFO0yOpXXXnvNf3Oplj+DJKlxbNoltWuTJk0iFAo1eMvJyWGvvfbiueeei3Z5AIRCIc4555xmfc6TTjqpwceclpbGtttuyw033EB5eXmznqu1jBw5crXP5cq3QYMGRbu8VvHee+9x1llnseOOOxIfH08oFFrrsbfeeitjx46lT58+hEKhtTY/b7zxBqNHj6Z3794kJSWRl5fHAQccwFtvvdWk2h566CGGDx9Oly5dyMjIYMSIEbz66qurHXfXXXcxePBgkpKSGDhwILfcckujnn/V7+W4uDg22WQTTjrpJH766afVjl/59TJw4MA1Pt9LL71U91yPPvpog/s+++wzjjrqKPr27UtSUhKbbLIJ++2332q19uvXj0MOOWSdda/6vRj5lpSU1KiPfX3Ky8v5/e9/T8+ePUlOTmbYsGG89NJLjX78gw8+yA477EBSUhLZ2dmceuqpLFq0aLXj8vPzOfnkk8nJySE5OZkddtiBRx55pFHnWPn5mzlz5hrvHzlyJFtttdU6n+OLL77giiuuYM6cOY06Z3OYN28eV155JTvvvDPdunUjKyuLkSNH8vLLL6/3saeffjqhUGi9XyORHn74YXbZZRcyMjLo3r07e+65J//73/8aHFNYWMj48ePp1q0bm266KXfddddqzzNz5kxSUlL44YcfGn1uSe1XXLQLkKTm8Je//IX+/fsTBAH5+flMmjSJgw46iKeffrpJf1C1J4mJidx5551A+I+8xx57jIsuuoj333+fBx98cL2P//rrr4mJaVuv3fbq1Ytrr712tfH09PQoVNP8KisrqayspKamZo3/9s8++yx33nkn22yzDZtuuinffPPNWp/r+uuvp6SkhJ133plffvllrcd98803xMTE8Otf/5q8vDyWLl3Kfffdxx577MH//vc/DjjggPXWfcUVV/CXv/yFo446ipNOOonKykpmzZq1WjN922238etf/5oxY8ZwwQUXMH36dM4991xKS0v5/e9/v97zQP338ooVK3jnnXeYNGkSb775JrNmzVqtCU5KSuLbb7/lvffeY+edd25w35QpU0hKSmLFihUNxt9++2322msv+vTpw+mnn05eXh7z5s3jnXfe4V//+he/+c1vGlVnpMjvxUixsbFNfq41Oemkk3j00Uc5//zzGThwYN3Pt2nTprHbbrut87G33norZ511Fvvssw833ngj8+fP51//+hczZ87k3Xffrfs3LS4uZrfddiM/P5/zzjuPvLw8Hn74YY4++mimTJnCcccd1ywfS6RVfwZ98cUXXHnllYwcOZJ+/fo1+/nW5Mknn+T666/n8MMPZ8KECVRVVXHPPfew33778d///peTTz55jY+bOXMmkyZNatILM7fccgvnnnsuBx98MNdddx0rVqxg0qRJHHLIITz22GMceeSRAFx00UW89tprXHnllXz77becfvrpDB48mBEjRgAQBAHnnnsu559/Pv3799/4fwRJbV8gSe3Y3XffHQDB+++/32B8yZIlQXx8fHDcccdFqbJ6QHD22Wc36TE1NTVBaWnpWu+fMGFC0KVLlwZj1dXVwdChQwMg+OmnnzboeVtSdXV1UFZWttb799xzz2DIkCEb9NzLli1b43hzfLxlZWVBdXX1Rj3HXXfdFfTp0ycAAiCIjY0Nttpqq+Cpp55qcNyCBQvq6j377LODdf2anjNnTlBTUxMEQRB06dIlmDBhQqPrWb58eZCbmxuMGjVqvcfOmDEjCIVCwY033rjO40pLS4Pu3bsHBx98cIPx8ePHB126dAmWLFmyzsev7Xv597//fQAEDz30UIPxlV8vW2yxRXD++ec3uK+srCxIS0sLxowZEwDBI488UnffQQcdFGRnZwdLly5drYb8/PwG7/ft23e1j2dVa/pebE7vvvtuAAR///vf68bKysqCAQMGBMOHD1/nY8vLy4OMjIxgjz32qPtaCYIgePrppwMguPnmm+vG/va3vwVA8Morr9SNVVdXBzvttFOQl5cXlJeXr/Nca/v8rdSY7+9HHnkkAIJp06at87jmNGvWrGDhwoUNxlasWBEMGjQo6NWr1xofU1NTEwwfPjw45ZRTGvU1stLAgQODnXbaqcHnoqioKEhNTQ1Gjx5dN5abmxtMnjy57v0999wzuOSSS+rev/fee4OePXsGJSUljTqvpPavbUUsktRMMjIySE5OJi6u4YSi5cuXc+GFF9K7d28SExPZYost+Mc//kEQBHXHHHnkkeywww4NHnfooYcSCoV46qmn6sbeffddQqFQs0zDXzkN94UXXmDo0KEkJydz2223Nek5YmJiGDlyJEDd9NJ1Pe+a1pMWFhby29/+ln79+pGYmEivXr048cQTG0ylLS8v5/LLL2ezzTYjMTGR3r1787vf/W61afkrlwVMmTKFIUOGkJiYyPPPP9+0f5g1uOKKKwiFQnzxxRccd9xxdOvWrS5tXNfH+/333zN27FgyMzNJSUlhl112WW1a6muvvUYoFOLBBx/kT3/6E5tssgkpKSkUFxdvcL2vvfYap556KoMHD+bKK69kn332YeLEieywww589913DY7Nzc0lOTm5Uc/bt2/fdU6fX5eUlBSys7MpLCxc77H//Oc/ycvL47zzziMIApYtW7bG46ZNm8bixYs566yzGoyfffbZLF++fLV/68bafffdAVb7t1rp2GOP5aGHHqKmpqZu7Omnn6a0tJSjjz56teO/++47hgwZQkZGxmr35eTkbFCNLenRRx8lNjaWX/3qV3VjSUlJnHrqqcyYMYN58+at9bGzZs2isLCQY445psHXyiGHHEJqamqDGTnTp08nOzubvffeu24sJiaGo48+mgULFvD6668380fW8GfQpEmTGDt2LAB77bVX3RKD1157DQgn26NGjSIrK4vk5GT69+/PKaecstE1DBkyhKysrAZjiYmJHHTQQcyfP5+SkpLVHnPvvfcya9Ysrrnmmiadq7i4mJycnAafi7S0NFJTUxt835eVldGtW7e69zMzMyktLQXCv8MuueQSrr32WlJTU5t0fkntl9PjJXUIRUVFLFq0iCAIKCgo4JZbbmHZsmUcf/zxdccEQcDo0aOZNm0ap556Kttttx0vvPACF198MT/99BM33XQTEG4SnnzySYqLi0lLSyMIAt566y1iYmKYPn06o0ePBsJ/5MbExLDrrrs2y8fw9ddfc+yxx3LGGWdw+umns8UWWzT5OVY2Nt27d2/y8y5btozdd9+dL7/8klNOOYUddtiBRYsW8dRTTzF//nyysrKoqalh9OjRvPnmm/zqV79i8ODBfPbZZ9x000188803PPHEEw2e89VXX+Xhhx/mnHPOISsra71TXqurq9e41jY5OZkuXbo0GBs7diwDBw7kr3/9a4MXXdb08ebn5zNixAhKS0s599xz6d69O5MnT2b06NE8+uijHHHEEQ2e+6qrriIhIYGLLrqI8vJyEhISqKmpYcmSJeusf6X09HTi4+MB+N///kdqaipPPvkkM2bM4Pvvv+e0007jtNNOa9RzNZfi4mIqKipYtGgR99xzD7NmzeIPf/jDeh/3yiuvMGLECG6++WauvvpqFi9eTF5eHn/84x8b7NXw0UcfATB06NAGj99xxx2JiYnho48+avD92FgrX4CKbGIiHXfccVxxxRW89tprdQ3n/fffzz777LPGJrxv377MmDGDWbNmrXeNdVOs6es2ISGBtLQ0gA3++vnoo4/YfPPN655npZXLAT7++GN69+69xudZ+ULaml4ISk5O5qOPPqpbqlFeXr7G41JSUgD44IMP2G+//dZb+8qfxauqrKxc5+P22GMPzj33XG6++Wb+8Ic/MHjwYAAGDx5MQUEB+++/P9nZ2VxyySVkZGQwZ84cpk6d2uA5li5dSnV19XprTElJqfu41mbBggVrPK6kpITf//73/OEPfyAvL2+954o0cuRIHn30UW655RYOPfRQVqxYwS233EJRURHnnXde3XE77bQTN954I4MGDeL777/n+eef54477gDgr3/9K5tssgknnHBCk84tqZ2LZswvSRtr5ZTMVd8SExODSZMmNTj2iSeeCIDg6quvbjB+1FFHBaFQKPj222+DIAiC999/PwCCZ599NgiCIPj0008DIBg7dmwwbNiwuseNHj062H777ddbI42YHt+3b98ACJ5//vlGfdwrp+QuXLgwWLhwYfDtt98Gf/3rX4NQKBRss802jXrevn37NphS/ec//zkAgqlTp6527MrpnPfee28QExMTTJ8+vcH9EydODIDgrbfeavBxx8TEBJ9//nmjPqY999xzjZ9LIDjjjDPqjrv88ssDIDj22GPX+DGt6eM9//zzA6BB3SUlJUH//v2Dfv361U1/nzZtWgAEm2666WrT6n/44Ye11rfqW+T03t///vdBcnJyUFRUFEybNq3R09jXNz0+UmOmx48aNaquvoSEhOCMM85Y53KFIAgvMwGC7t27B6mpqcHf//734KGHHgoOOOCAAAgmTpzYoN7Y2Ng1Pk92dnYwbty4dZ5r5ffyyy+/HCxcuDCYN29e8OijjwbZ2dlBYmJiMG/evAbHR063Hjp0aHDqqacGQRAES5cuDRISEoLJkyfXfT4jp8e/+OKLQWxsbBAbGxsMHz48+N3vfhe88MILQUVFxWo1NXZ6/Nq+DiKXH2zo18+QIUOCvffee7Xzfv7556t9Dla1cOHCIBQK1f3brPTVV1/VnWvRokVBEATBb37zmyAmJiaYM2dOg2PHjRsXAME555yzzn+Htf0sjnxbdXr8qj+D1jY9/vHHH1/n1PvI52vMv+/ll1++zueZPXt2kJSUFJxwwgmr3XfRRRcF/fv3D1asWFF3zsZOj8/Pzw/22WefBrVkZWUFb7/9doPjPv3006BXr151x4wZMyaorq4Ovv/++yA5OTmYMWNGo84nqeMwaZfUIfzf//0fm2++ORDeAfm+++7jtNNOo2vXrnWb+zz77LPExsZy7rnnNnjshRdeyKOPPspzzz3HOeecw/bbb09qaipvvPEGBx54INOnT6+bJn7kkUdSWlpKcnIyb7755gYlh2vTv39/Ro0a1ejjly9fTnZ2doOxESNGcO+9927Q8z722GNsu+22q6XOQN10zkceeYTBgwczaNCgBmnayoRz2rRpdZslAey5555sueWWjf6Y+vXrV5coRerVq9dqY7/+9a/X+Bxr+nifffZZdt555wabdqWmpvKrX/2KSy+9lC+++KJB6jphwoTVUse8vLxG79i97bbb1t0eP348N910E8OHD2fYsGEsXbqUsrKyRk+Dby7XXXcdF154IfPmzWPy5MlUVFRQVVW1zsesnAq/ePFiHnzwQY455hgAjjrqKLbeemuuvvpqzjjjDCA8pTchIWGNz5OUlERZWVmj6tx3330bvN+vXz/uu+++NX4NrHTcccdx1VVX8Z///KduOvkRRxzBBx98sNqx++23HzNmzODaa6/lhRdeYMaMGfztb38jOzubO++8s24mTVMkJSXx9NNPrzYeOe16Q79+ysrKSExMXOM5V96/NllZWRx99NFMnjyZwYMHc8QRR/DTTz/xm9/8hvj4eCorK+sef9pppzFx4kSOPvpobrrpJnJzc3n44Yd5/PHH13ueSJE/iyNdeOGFjUrB12TlUoZnnnmGbbfdtm4WwqqmTJnSqDo33XTTtd5XWlrK2LFjSU5O5rrrrmtw3zfffMO//vUvHnjggTV+TtYnJSWFLbbYgl69enHIIYdQUlLCTTfdxJFHHsn06dPZbLPNANh6662ZPXs2s2bNIiMjo278wgsvZMyYMeyyyy5MnTqVK6+8kuLiYk4++WQuu+yyDV4uI6nts2mX1CHsvPPODablHnvssWy//facc845HHLIISQkJPDjjz/Ss2dPunbt2uCxK6dh/vjjj0B4x+fhw4czffp0IDwNfvfdd2e33Xajurqad955h9zcXJYsWVK33rY5NHUX4MhGITExkf79+6+xsWns83733XeMGTNmncfMnj2bL7/8crUXC1YqKCjYoHOv1KVLl9WatrVZ23OvafzHH39k2LBhq41Hfu4jm/Y1PUdSUlKja4u09dZb884773DVVVfx6KOPUlJSQkZGBocddhg33HDDWqc2N7ftttuu7vbxxx/PDjvsULcr+dqsfGEhPj6eo446qm48JiaGY445hssvv5y5c+fSp08fkpOTqaioWOPzrFixotEvUqxs+oqKivjvf//LG2+8sd4Gady4cVx00UU899xzTJkyhUMOOWS17/NIO+20E1OnTqWiooJPPvmExx9/nJtuuomjjjqKjz/+uEkvNEH4Z8b6vjY29OsnOTl5jZdxXLkr/vr+XW+77TbKysq46KKLuOiii4Dw53/AgAFMnTq1bl30Nttsw/3338+vf/3ruiU/eXl5/POf/+TMM89s9PrpVX8Wr9StW7c1TptvjD333JMxY8Zw5ZVXctNNNzFy5EgOP/xwjjvuuAZfGxu7VKm6uppx48bxxRdf8Nxzz9GzZ88G95933nmMGDFivT8n12bs2LHExcU1eIHnsMMOY+DAgfzxj3/koYceqhtPSkpq8O/46quv8uKLL/L111/z9ddfM27cOG677Tb69evHscceS+/evde6072k9s+mXVKHFBMTw1577cW//vUvZs+ezZAhQ5r0+N12241rrrmGFStWMH36dP74xz+SkZHBVlttxfTp08nNzQVo1qa9qclrYxqFDXnedampqWHrrbfmxhtvXOP9qzagLZkmr+25m+Oca3qO6upqFi5c2KjHZ2ZmNkidt99+e6ZOncprr73G9ddfz5577sk111zDF198wccff7zahoktLSEhgdGjR3PdddetM/XPzMwkKSmJjIyM1S5ftnK9+NKlS+nTpw89evSgurqagoKCBmvJKyoqWLx48WoN0NpENn2HH344u+22G8cddxxff/31WhvHHj16MHLkSG644QbeeustHnvssUadKyEhgZ122omddtqJzTffnJNPPplHHnmEyy+/vFGPb4oN/frp0aPHGq9Tv/Iyf+v7d01PT+fJJ59k7ty5zJkzh759+9K3b19GjBhBdnZ2gw35jjrqKEaPHs0nn3xCdXU1O+ywQ91GcGtKz1tLKBTi0Ucf5Z133uHpp5/mhRde4JRTTuGGG27gnXfeqfu6WLhwYaPS/NTU1DV+LZ1++uk888wzTJkypcGGfBBump9//nmmTp3a4DryVVVVlJWVMWfOHDIzM1fbe2CllWvTb7/99gbjmZmZ7Lbbbrz11ltrrbe6uprzzjuPSy65hE022YSrrrqKESNG1DXpZ5xxBlOmTLFplzowm3ZJHdbKqb8rp/j27duXl19+mZKSkgYp3FdffVV3/0q77747FRUVPPDAA/z00091zfkee+xR17Rvvvnmdc17RzBgwABmzZq13mM++eQT9tlnn3Y1FbNv3758/fXXq42v6XO/NvPmzWv0zIFp06bV7eS/qtzcXC655BISEhK48MIL+eqrr5p1Q7TGKisrIwgCSkpK1tq0x8TEsN122/H+++9TUVHR4IWIn3/+GaBu1sXKJH/mzJkcdNBBdcfNnDmTmpqaBkl/Y8XGxnLttdey11578e9//5tLLrlkrcced9xxnHbaaWRkZDQ4f2OtfKFgXde83xgb+vWz3XbbMW3atLqNMVd699136+5vjD59+tCnTx8gfJWIDz74YI2J8coXMlZ6+eWXgdWXLbSE9f1M2WWXXdhll1245ppruP/++xk/fjwPPvhg3aaOO+20U92MqXW5/PLLueKKKxqMXXzxxdx9993885//5Nhjj13tMXPnzgWoW24V6aeffqJ///7cdNNNnH/++Ws8Z35+PsAaX1SorKxc51KVW2+9lZKSkrqZEj///HODF2t69uy5xhd2JHUcNu2SOqTKykpefPFFEhIS6qZAH3TQQdx+++38+9//5tJLL6079qabbiIUCnHggQfWjQ0bNoz4+Hiuv/56MjMz65L63XffnbvvvpuMjAwOOOCA1v2gWtiYMWP4y1/+wuOPP77auvYgCAiFQhx99NE8++yz3HHHHQ0uQQXhJrCmpma1Xd7bgoMOOoh//vOfzJgxg+HDhwPhPQFuv/12+vXr16jp0Bu6Jnnp0qVr3Pl85W7aLb22fdXkG8JN22OPPUbv3r0b3Dd37lxKS0sZNGhQ3dgxxxzDO++8w+TJkzn99NOB8NTsKVOmsOWWW9Y1D3vvvTeZmZnceuutDZrmW2+9lZSUFA4++OANqn/kyJHsvPPO/POf/+T888+vW8u9qqOOOop58+axxRZbrHVtPdQ3xKs2iM8++yzABl21oTE29OvnqKOO4h//+Ae33357XdNWXl7O3XffzbBhwxrMblnT529NLr30Uqqqqvjtb3+7zuNmz57NxIkTOeSQQ1olaV/5s2PVSxEuXbqUjIyMBp+zlS9WRC4d2NA17X//+9/5xz/+wR/+8IcGu7hH2nvvvevW90f61a9+Rd++ffnjH//I1ltvXTe+8koeAwYMAGCzzTYjJiaGhx56iDPOOKPuY5k/fz7Tp09vsN9GpCVLlnD55ZczceLEuq/93NzcuhdtAL788ssm72QvqX2xaZfUITz33HN1qWlBQQH3338/s2fP5pJLLqlLpw499FD22msv/vjHPzJnzhy23XZbXnzxRZ588knOP//8uj+uILxh0I477sg777xTd412CCfty5cvZ/ny5U2aGj9z5kyuvvrq1cZHjhy51j/WWtvFF1/Mo48+ytixYznllFPYcccdWbJkCU899RQTJ05k22235YQTTuDhhx/m17/+NdOmTWPXXXelurqar776iocffrju+ugbqqioiPvuu2+N923Mpn+XXHIJDzzwAAceeCDnnnsumZmZTJ48mR9++IHHHnuMmJiY9T7Hhq5JvvDCC1m8eDFHHnkkhYWFzJ8/n7/85S/87W9/Y5dddmnQQPz44491GwnOnDkToO7rpm/fvg0u8/T000/zySefAOEXAD799NO6Y0ePHs0222wDwIEHHkivXr0YNmwYOTk5zJ07l7vvvpuff/65wRpagBNPPJHXX3+9wSX0zjjjDO68807OPvtsvvnmG/r06cO9997Ljz/+2GBtbnJyMldddRVnn302Y8eOZdSoUUyfPp377ruPa665hszMzCb/26108cUXM3bsWCZNmrTWDQjT09NXS0/X5De/+Q2lpaUcccQRDBo0iIqKCt5++20eeugh+vXrt9oU42+//XaN37vbb7993QsRVVVVa/26PeKII+jSpcsGf/0MGzaMsWPHcumll1JQUMBmm23G5MmTmTNnDnfddVeDY9f0+bvuuuuYNWsWw4YNIy4ujieeeIIXX3yRq6++ukGiDrDlllsyduxY+vTpww8//MCtt95KZmYmEydObHLdG2K77bYjNjaW66+/nqKiIhITE9l77725//77+c9//sMRRxzBgAEDKCkp4Y477iAtLa3BC0Qbsqb98ccf53e/+x0DBw5k8ODBq30e99tvP3JzcxvMVIh0/vnnk5uby+GHH95gfJ999gHqL1mYnZ3NKaecwp133sk+++zDkUceSUlJCf/5z38oKytr8EJypMsuu4ytt9667hr2UP8C65lnnknfvn257bbb1rpkSVIHEd3N6yVp46zpMkNJSUnBdtttF9x66611lypbqaSkJPjtb38b9OzZM4iPjw8GDhwY/P3vf1/tuCAIgosvvjgAguuvv77B+GabbRYAwXfffdeoGletL/LtqquuCoKgaZcNCoL6S76tz7qed9XLLQVBECxevDg455xzgk022SRISEgIevXqFUyYMKHuslBBEAQVFRXB9ddfHwwZMiRITEwMunXrFuy4447BlVdeGRQVFTX4uNd3qbtI67rkW+Svq5WXfFu4cGGTPt7vvvsuOOqoo4KMjIwgKSkp2HnnnYNnnnmmwTFrukTYxnr//feDk08+ORgwYECQmJgYxMTEBL169QpOPfXUYMGCBWs8/5re9txzzwbHrutSY3fffXfdcf/+97+D3XbbLcjKygri4uKC7Ozs4NBDDw3eeOON1Wpd+TlYVX5+fjBhwoQgMzMzSExMDIYNG7bWyxPefvvtwRZbbBEkJCQEAwYMCG666aY1fn+tauX38pou61VdXR0MGDAgGDBgQFBVVVVX66qXEFvVmj6fzz33XHDKKacEgwYNClJTU4OEhIRgs802C37zm98E+fn5DR6/rkuIrbyM2ro+D0Dwww8/rPdjX5+ysrLgoosuCvLy8oLExMRgp512WuO//5o+f88880yw8847B127dg1SUlKCXXbZJXj44YfXeJ5x48YFvXv3DhISEoKePXsGv/71r1f7N1mbdX3+Vta2vku+BUEQ3HHHHcGmm24axMbG1l3+7cMPPwyOPfbYoE+fPkFiYmKQk5MTHHLIIcHMmTMbVdu6rPx5sra3VS8/t6q1/czp27dv0Ldv3wZjlZWVwS233BJst912QWpqapCamhrstddewauvvrrG5/7000+DhISE4KOPPlrtvkmTJgX9+vULunfvHlxwwQV13xeSOqZQEES8HCtJklrMa6+9xqRJk5g0aVK0S5EkSe3E+ucDSpIkSZKkqLBplySplfTr12+1ta+SJEnr4vR4SZIkSZLaKJN2SZIkSZLaKJt2SZIkSZLaKJt2SZIkSZLaqLhoF9AW1NTU8PPPP9O1a1dCoVC0y5EkSZIkdXBBEFBSUkLPnj2JiVl7nm7TDvz888/07t072mVIkiRJkjqZefPm0atXr7Xeb9MOdO3aFQj/Y6WlpUW5GkmSJElSR1FZA88tgVeXwspLt8WGYPf4Yk7apnddP7o2Nu1QNyU+LS3Npl2SJEmS1Cy+L4PJC2BBJcSnhsf6JsGEPOhaDifBepdo27RLkiRJktSMKmvgyUXwckS6HheCQ7rDqEyICUFxeeOey6ZdkiRJkqRm8l1tup5fUT/WLwlOyoMeiU1/Ppt2SZIkSZI2UkUNPLGo4dr1uBCMzoL9uoXT9Q1h0y5JkiRJ0kaYXRpO1xdW1o9tmgwTciFvA9L1SDbtkiRJkiRtgPIaeHwhTCusH4sPweFZsPdGpOuRbNolSZIkSWqir0vhngWwKCJdH5Ac3hk+N6H5zmPTLkmSJElSI62ohqmL4PXC+rH4EByRDXtlNE+6HsmmXZIkSZKkRvhyOdybD4sj0vWByXBiHuQ0Y7oeyaZdkiRJkqR1WFENjy6E6UX1Y4kxcEQWjMyAUDOn65Fs2iVJkiRJWovPl8O9C2BpVf3YFilwYi5ktVC6HsmmXZIkSZKkVZRVwyML4a1V0vWjsmH39JZN1yPZtEuSJEmSFGHWsvDa9cKIdH1wCpyQB93jW7cWm3ZJkiRJkoDSani4AGYU148l1abru7Viuh7Jpl2SJEmS1Ol9ugzuy4eiiHR9SBc4PhcyWzldj2TTLkmSJEnqtJZXw0MF8O4q6frROTAiLTrpeiSbdkmSJElSp/RxCUwpgOKIdH2r2nS9WxTT9Ug27ZIkSZKkTmVZFTxYAO+X1I+lxMIx2TCsDaTrkWzaJUmSJEmdxoclcH8+lFTXj22TCuNzIKONpOuRbNolSZIkSR1eSRU8UAAfrJKuj8uBnbu2rXQ9kk27JEmSJKnDCoJwo/5AASyLSNe3S4XxuZDWxrviNl6eJEmSJEkbprgqPBX+o2X1Y11i4dgcGNqG0/VINu2SJEmSpA4lCMKbzD1YEL6k20o7dA037G09XY/UjkqVJEmSJGndiqpgSj58EpGud42FY3Nhx67Rq2tD2bRLkiRJktq9IIB3i+GhhVAaka4P7RrebK5rO+1+22nZkiRJkiSFFVbCffnw2fL6sa6x4Y3mtm+H6Xokm3ZJkiRJUrsUBDCjGB4ugLKa+vGd08LpepfY6NXWXGzaJUmSJEntztJKuDcfPo9I19Pi4Phc2DY1enU1N5t2SZIkSVK7EQTwVhE8shBWRKTru6TB0R0kXY9k0y5JkiRJaheWVMI9C+DL0vqxjNp0fesOlK5HsmmXJEmSJLVpQQDTi+DRhVAeka6PSIex2ZDSwdL1SDbtkiRJkqQ2a1FFeO36VxHperc4OCEPhnSJXl2txaZdkiRJktTmBAG8XghTFzVM13dLh6OyIbkDp+uRbNolSZIkSW3Kwgq4Jx++iUjXM+PhhFzYshOk65Fs2iVJkiRJbUIQwLRCeHwRVESk63tkwJgsSOok6Xokm3ZJkiRJUtQVVIR3hp9dVj/WPR5OzIVBnSxdj2TTLkmSJEmKmpoAXl0KTyyCyqB+fGQGHJkNiTFRK61NsGmXJEmSJEVFfgVMWgDfR6TrWfEwIQ82T4leXW2JTbskSZIkqVXVBPDyUngqIl0PAXt1g8OzTNcj2bRLkiRJklrNL+UweQH8sKJ+LCchvHZ9oOn6amzaJUmSJEktriaAF5fA04uhKiJd36cbHJYFCabra2TTLkmSJElqUT+Xh9eu/xiRrucmhNeuD0iOXl3tgU27JEmSJKlFVAfwwhJ4ZnH4NoTT9f0yYXR3iDddXy+bdkmSJElSs5u/IpyuzyuvH+tRm673N11vNJt2SZIkSVKzqaqB55fAs0sapuujMuEQ0/Ums2mXJEmSJDWLebXp+vyIdL1nIkzIhX6m6xvEpl2SJEmStFGqasLJ+nNLwrvEA8SE4IBMODgT4kzXN5hNuyRJkiRpg/24Inzd9Z8i0vVeieG1632SoldXR2HTLkmSJElqsqqa8K7wLyxtmK4flAkHmq43G5t2SZIkSVKTzCkLr13/paJ+rHdtut7bdL1Z2bRLkiRJkhqlsgaeXgwvLoHacJ3YEBzcPbx+PTYU1fI6JJt2SZIkSdJ6fV8WXru+ICJd75MEJ+XBJonRq6ujs2mXJEmSJK1VZQ08uQheXlqfrseFwtdc3990vcXZtEuSJEmS1ui72nQ9PyJd75cUXrve03S9Vdi0S5IkSZIaqKiBJxbBq6uk66OzYL9u4V3i1Tps2iVJkiRJdWaXhtP1hZX1Y5smw4RcyDNdb3U27ZIkSZIkymvg8YUwrbB+LD4Eh2XBPqbrUWPTLkmSJEmd3NelcM8CWBSRrg9IDq9dz02IXl2yaZckSZKkTmtFNUxdBK8X1o/Fh+CIbNgrw3S9LbBplyRJkqRO6KvlcE8+LI5I1wcmw4l5kGO63mbYtEuSJElSJ7KiGh5dCNOL6scSYuDILBiZASHT9TbFpl2SJEmSOokvlofXri+tqh/bPCW8M3yW6XqbZNMuSZIkSR1cWTU8shDeikjXE2NgTDbskW663pbZtEuSJElSBzZrGdybD4UR6fqglPDa9e7x0atLjWPTLkmSJEkdUGk1PFwAM4rrx5Ji4Khs2M10vd2waZckSZKkDubTZXBfPhRFpOtbdoETciHTdL1dsWmXJEmSpA5ieW26/s4q6frROTAizXS9PbJplyRJkqQO4OMSmFIAxRHp+lZd4Phc6Ga63m7ZtEuSJElSO7asCh4sgPdL6sdSYuHobNjFdL3ds2mXJEmSpHbqwxK4Px9KquvHtkmF8TmQYbreIdi0S5IkSVI7U1Kbrs9cJV0flwM7dzVd70hs2iVJkiSpnQgC+KAEHiiAZRHp+napMD4X0uzwOhw/pZIkSZLUDhRXhZv1DyPS9S6xcGwODDVd77Bs2iVJkiSpDQuC8DT4BwrCl3RbaYeu4YbddL1j89MrSZIkSW1UURVMyYdPltWPpcbCcbmwY9fo1aXWY9MuSZIkSW1MEMC7xfDQQiiNSNeHdg1vNtfVTq7T8FMtSZIkSW1IYSVMKYBPI9L1rrXp+g6m652OTbskSZIktQFBADOK4ZFV0vWd0+CYbEi1e+uU/LRLkiRJUpQtrYT78mHW8vqxtDg4Phe2TY1eXYo+m3ZJkiRJipIggLeL4eECWFFTP75LGhydE76kmzo3m3ZJkiRJioIllXBvPnwRka5nxMH4XNjGdF21bNolSZIkqRUFAUwvgscWNkzXR6TD2GxIMV1XBJt2SZIkSWoliyvhngXwVWn9WEYcnJALW5muaw1s2iVJkiSphQUBvF4IUxdBeUS6vls6HJUNyabrWgubdkmSJElqQYsqYHI+fBORrneLgxPzYMsu0atL7YNNuyRJkiS1gCCA1wrD6XpFRLq+RwaMyYIk03U1gk27JEmSJDWzgorw2vXZZfVj3ePhxFwYZLquJrBplyRJkqRmUhPAq0vhiUVQGdSPj8yAI0zXtQFs2iVJkiSpGeRXwOQF8F1Eup4VDxPyYPOU6NWl9s2mXZIkSZI2Qk0ALy+Fp1ZJ1/fuBodnQWJM9GpT+2fTLkmSJEkb6JdyuCcfvo9I13MSwmvXB5quqxnYtEuSJElSE9UE8FJtul5Vm66HgH26wWFZkGC6rmZi0y5JkiRJTfBzeXjt+pwV9WO5CeG16wOSo1eXOiabdkmSJElqhOoAXlgC/1vcMF3fLxNGd4d403W1AJt2SZIkSVqP+Stgcj7MjUjX82rT9U1N19WCbNolSZIkaS2qA3huMTy7JHwbwun6qEw4xHRdrcCmXZIkSZLWYN6K8Nr1eeX1Yz0TYUIu9DNdVyuxaZckSZKkCFU18NyScLpeU5uux4TggEw4OBPiTNfVimzaJUmSJKnW3BUwaQH8FJGub5IYXrveNyl6danzsmmXJEmS1OlV1cD/lsDzq6TrB2XCgabriiKbdkmSJEmd2pyy8M7wP0ek670S4aQ86G26riizaZckSZLUKVXWwNOL4cUlUBuuE7syXe8evi1Fm027JEmSpE7n+7LwzvALKurH+iSFd4bvZbquNsSmXZIkSVKnUVkDTy2GlyLS9bhQ+Jrr+2earqvtsWmXJEmS1Cl8V5uu50ek6/2SwjvD90yMXl3Suti0S5IkSerQKmrgyUXwytKG6froLNivW3iXeKmtsmmXJEmS1GHNLoV78qEgIl3vX5uu9zBdVztg0y5JkiSpwymvgccXwmuF9el6fG26vq/putoRm3ZJkiRJHco3peG164sq68cGJIfT9dyE6NUlbQibdkmSJEkdQnkNTK1N11eKD8ER2bBXhum62iebdkmSJEnt3lfLw2vXF0ek6wOT4cQ8yDFdVztm0y5JkiSp3VpRDY8tgjcK68cSYuDILBiZASHTdbVzNu2SJEmS2qUvlsM9C2BpVf3Y5ikwIReyTNfVQdi0S5IkSWpXyqrhkYXwVlH9WGJtur5nhum6OhabdkmSJEntxqxlcF9+w3R9UEp47Xr3+OjVJbUUm3ZJkiRJbV5pbbr+dkS6nhQDR2XDbumm6+q4YqJ58urqai677DL69+9PcnIyAwYM4KqrriIIgrpjgiDgz3/+Mz169CA5OZl9992X2bNnN3ieJUuWMH78eNLS0sjIyODUU09l2bJlrf3hSJIkSWoBny6DK+c0bNi37AKX94PdM2zY1bFFtWm//vrrufXWW/n3v//Nl19+yfXXX8/f/vY3brnllrpj/va3v3HzzTczceJE3n33Xbp06cKoUaNYsWJF3THjx4/n888/56WXXuKZZ57hjTfe4Fe/+lU0PiRJkiRJzWR5Ndz9C/zfT1BYOx0+KSY8Ff7cTSDT6fDqBEJBZKzdyg455BByc3O566676sbGjBlDcnIy9913H0EQ0LNnTy688EIuuugiAIqKisjNzWXSpEmMGzeOL7/8ki233JL333+foUOHAvD8889z0EEHMX/+fHr27LneOoqLi0lPT6eoqIi0tLSW+WAlSZIkNdontWvXiyPWrm/VBY7PhW426+oAGtuHRjVpHzFiBK+88grffPMNAJ988glvvvkmBx54IAA//PADCxYsYN999617THp6OsOGDWPGjBkAzJgxg4yMjLqGHWDfffclJiaGd999d43nLS8vp7i4uMGbJEmSpOhbVgV3/gz/+am+YU+OgQl5cM4mNuzqfKK6Ed0ll1xCcXExgwYNIjY2lurqaq655hrGjx8PwIIFCwDIzc1t8Ljc3Ny6+xYsWEBOTk6D++Pi4sjMzKw7ZlXXXnstV155ZXN/OJIkSZI2wkclMCUfSqrrx7ZJhfE5kGGzrk4qqk37ww8/zJQpU7j//vsZMmQIH3/8Meeffz49e/ZkwoQJLXbeSy+9lAsuuKDu/eLiYnr37t1i55MkSZK0diVV8GABzCypH0uJhXE5sHNXN5pT5xbVpv3iiy/mkksuYdy4cQBsvfXW/Pjjj1x77bVMmDCBvLw8APLz8+nRo0fd4/Lz89luu+0AyMvLo6CgoMHzVlVVsWTJkrrHryoxMZHExMQW+IgkSZIkNcUHJXB/PiyLSNe3TYXxuZDuBaql6K5pLy0tJSamYQmxsbHU1NQA0L9/f/Ly8njllVfq7i8uLubdd99l+PDhAAwfPpzCwkI++OCDumNeffVVampqGDZsWCt8FJIkSZKaqrgKbvsZbv+5vmHvEgun9YAze9qwSytF9Vvh0EMP5ZprrqFPnz4MGTKEjz76iBtvvJFTTjkFgFAoxPnnn8/VV1/NwIED6d+/P5dddhk9e/bk8MMPB2Dw4MEccMABnH766UycOJHKykrOOeccxo0b16id4yVJkiS1niAIT4N/oCB8SbeVtk+F43IhzWZdaiCq3xK33HILl112GWeddRYFBQX07NmTM844gz//+c91x/zud79j+fLl/OpXv6KwsJDddtuN559/nqSkpLpjpkyZwjnnnMM+++xDTEwMY8aM4eabb47GhyRJkiRpLYqrwhvNfbysfiw1Fo7NgR1duy6tUVSv095WeJ12SZIkqeUEAbxXEt5srjQiXR/aNbzZXFfTdXVCje1D/faQJEmS1GIKK2FKAXwaka53jQ1Phd+ha/TqktoLm3ZJkiRJzS4I4J1ieHhhw3R95zQ4JhtS7USkRvFbRZIkSVKzWloJ9+XDrOX1Y2lxMD4HtjNdl5rEpl2SJElSswgCeLsYHi6AFTX147ukwdE54Uu6SWoam3ZJkiRJG21JJdybD19EpOsZcTA+F7ZJjV5dUntn0y5JkiRpgwUBvFkEjy5smK6PSIex2ZBiui5tFJt2SZIkSRtkcSXcuwC+LK0fy4iDE3JhK9N1qVnYtEuSJElqkiCAN4rgsYVQHpGu71qbriebrkvNxqZdkiRJUqMtqoDJ+fBNRLreLQ5OyIMhXaJXl9RR2bRLkiRJWq8ggNcKYeoiqIhI13dPh6OyIcl0XWoRNu2SJEmS1qmgAu5ZALPL6se6x4fXrg82XZdalE27JEmSpDWqCWBaITy+ECqD+vGRGXBElum61Bps2iVJkiStJr8CJi+A7yLS9ax4ODEPtkiJXl1SZ2PTLkmSJKlOTQCvLIUnFzVM1/fKgCOyITEmaqVJnZJNuyRJkiQAFpSHd4b/PiJdz46HCXkw0HRdigqbdkmSJKmTqwngpaXw1CKoqk3XQ8De3eDwLEgwXZeixqZdkiRJ6sR+Lg+vXZ+zon4sJwFOyoMBydGrS1KYTbskSZLUCdUE8MISeGZxw3R9v0wY3R3iTdelNsGmXZIkSepkfqpN13+MSNfzEsJr1zc1XZfaFJt2SZIkqZOoDuD5JfC/xeHbEE7X98+EQ03XpTbJpl2SJEnqBOatCKfr88rrx3rUrl3vZ7outVk27ZIkSVIHVlUDzy2BZ5eE17EDxIRgVDc4pDvEma5LbZpNuyRJktRBza1N1+dHpOs9E8Ppet+k6NUlqfFs2iVJkqQOpqoG/rckvH49Ml0/KBMOzDRdl9oTm3ZJkiSpA/lxBUxaEL7++kq9atP13qbrUrtj0y5JkiR1AJU14Wuuv7i0Ybp+cCYcYLoutVs27ZIkSVI790NZeO36LxX1Y32SYEIu9DJdl9o1m3ZJkiSpnaqsgacWw0tLoDZcJzYU3hV+VGb4tqT2zaZdkiRJaoe+q03X8yPS9b5J4bXrPROjV5ek5mXTLkmSJLUjFTXw5CJ4ZWl9uh4XgkO7w/6Z4XXskjoOm3ZJkiSpnZhdCvfkQ0FEut4/CSbkQQ/TdalDsmmXJEmS2rjyGnhiEUxbJV0/LAv27Wa6LnVkNu2SJElSG/ZNaXjt+qLK+rEByeF0PTchenVJah027ZIkSVIbVF4DUxfCa4X1Y/EhODwL9jZdlzoNm3ZJkiSpjflqeXjt+uKIdH2z2nQ9x3Rd6lRs2iVJkqQ2YkU1PLYI3iisH0uIgSOyYK8MCJmuS52OTbskSZLUBnxZm64viUjXN0+BE3Mh23Rd6rRs2iVJkqQoKquGRxfCm0X1Y4kxcGQW7Jlhui51djbtkiRJUpR8vhzuXQBLq+rHBqXACbmQZbouCZt2SZIkqdWVVsMjC+HtVdL1o7Jh93TTdUn1bNolSZKkVvTZMrgvHwoj0vXBKXBCHnSPj15dktomm3ZJkiSpFSyvhocL4J3i+rGkGBibDbuarktaC5t2SZIkqYV9UpuuF0ek60O6hNeudzNdl7QONu2SJElSC1leDQ8WwHsR6XpyDBydA8PTTNclrZ9NuyRJktQCPiqBKflQUl0/tnUXOD4XMkzXJTWSTbskSZLUjEqqwun6zJL6sZRYOCYbhpmuS2oim3ZJkiSpmXxQAg+skq5vmwrjcyHdv7wlbQB/dEiSJEkbqaQK7i+ADyPS9S6xMC4Hdupqui5pw9m0S5IkSRsoCMLT4B8oCG86t9L2qXBcLqT517akjeSPEUmSJGkDFFeFN5r7eFn9WGosHJsDO5quS2omNu2SJElSEwQBvFcS3myuNCJdH9o1PB2+q39hS2pG/kiRJEmSGqmwEqYUwKcR6XrX2PBU+B26Rq8uSR2XTbskSZK0HkEA7xTDwwsbpus71abrqf5VLamF+ONFkiRJWoellXBfPsxaXj+WFgfjc2A703VJLcymXZIkSVqDIIC3i+HhAlhRUz8+LA2OyQlf0k2SWppNuyRJkrSKJbXp+ucR6Xp6HByfC9ukRq8uSZ2PTbskSZJUKwjgzSJ4dGHDdH1EOozNhhTTdUmtzKZdkiRJAhZXwr0L4MvS+rGMODghF7YyXZcUJTbtkiRJ6tSCAN4ogscWQnlEur5rbbqebLouKYps2iVJktRpLaqAe/Lh64h0vVscnJAHQ7pEry5JWsmmXZIkSZ1OEMBrhTB1EVREpOu7p8NR2ZBkui6pjbBplyRJUqdSUAH3LIDZZfVj3ePDa9cHm65LamNs2iVJktQpBAG8WgiPL4TKoH58ZAYckWW6LqltsmmXJElSh5dfAZMXwHcR6XpWPJyYB1ukRK8uSVofm3ZJkiR1WDUBvLIUnlzUMF3fKwOOyIbEmKiVJkmNYtMuSZKkDmlBOUzOh+8j0vXs2nR9c9N1Se2ETbskSZI6lJoAXloKTy2Cqtp0PQTs3Q0OyzJdl9S+2LRLkiSpw/i5PLx2fc6K+rGcBDgpDwYkR68uSdpQNu2SJElq92oCeGEJPLO4Ybq+bzcYnQUJpuuS2imbdkmSJLVrP9Wm6z9GpOu5ten6pqbrkto5m3ZJkiS1S9UBPL8E/rc4fBvC6fr+mXBod4g3XZfUAdi0S5Ikqd2ZvwImLYB55fVjPWrT9X6m65I6EJt2SZIktRtVNfDcEnh2SXgdO0BMCEZ1g0O6Q5zpuqQOxqZdkiRJ7cK82nR9fkS63jMxnK73TYpeXZLUkmzaJUmS1KZV1cD/loTXr0em6wdmwkGZpuuSOjabdkmSJLVZP9am6z9HpOu9atP13qbrkjoBm3ZJkiS1OZU14V3hX1jaMF0/OBMOMF2X1InYtEuSJKlN+aEsfN31Xyrqx3rXpuu9TNcldTI27ZIkSWoTKmvgqcXw0hKoDdeJDYV3hR+VGb4tSZ2NTbskSZKi7vuy8Nr1/Ih0vW9SOF3vmRi9uiQp2mzaJUmSFDUVNfDkInhlaX26HheCQ7vD/pnhdeyS1JnZtEuSJCkqvi2FyflQEJGu96tN13uYrksSYNMuSZKkVlZem66/ukq6flgW7NvNdF2SItm0S5IkqdV8UxreGX5RZf3YpskwIRfyTNclaTU27ZIkSWpx5TUwdSG8Vlg/Fh+Cw7Ngb9N1SVorm3ZJkiS1qK+Wwz35sDgiXd8sGSbkQU5C9OqSpPbApl2SJEktYkU1PLYI3iisH4sPwZHZsFcGhEzXJWm9bNolSZLU7L6sTdeXRKTrm6fAibmQbbouSY1m0y5JkqRmU1YNjy2E6UX1Y4kxcGQW7Jlhui5JTWXTLkmSpGbx+XK4dwEsraof26I2Xc8yXZekDWLTLkmSpI1SWg2PLIS3V0nXj8qG3dNN1yVpY9i0S5IkaYN9tgzuy4fCiHR9cAqckAfd46NXlyR1FDbtkiRJarLSanioAN4prh9LioGx2bCr6bokNRubdkmSJDXJJ8tgSj4URaTrQ7rACbnQzXRdkpqVTbskSZIaZXltuv5uRLqeHANH58DwNNN1SWoJNu2SJElar49K4P4CKI5I17fuAsfnQobpuiS1GJt2SZIkrVVJFTxYADNL6sdSYuGYbBhmui5JLc6mXZIkSWv0YQncnw8l1fVj26bC+FxI969ISWoV/riVJElSAyVV4anwH0ak611iYVwO7NTVdF2SWpNNuyRJkgAIAvigdu368oh0fftUOC4X0vzLUZJanT96JUmSRHFVeCr8R8vqx1Jj4dgc2NF0XZKixqZdkiSpEwsCeK8kfCm3yHR9x67hhr2rfy1KUlT5Y1iSJKmTKqqCKfnwSUS63jU2PBV+h67Rq0uSVM+mXZIkqZMJAni3GB5aCKUR6fpOXcObzaX6F6IktRn+SJYkSepECivh3nyYtbx+LC0OjsuB7U3XJanNsWmXJEnqBIIAZhTDwwVQVlM/PiwNjskJX9JNktT22LRLkiR1cEtr0/XPI9L19DgYnwvbpkavLknS+tm0S5IkdVBBAG8WwaMLYUVEuj48DY7OgRTTdUlq82zaJUmSOqDFlXDvAviytH4sIw6Oz4WtTdclqd2waZckSepAggCm16br5RHp+q7pcFS26boktTc27ZIkSR3Eogq4Jx++jkjXu8XBCXkwpEv06pIkbTibdkmSpHYuCOD1Qpi6qGG6vnttup5kui5J7VZMtAv46aefOP744+nevTvJyclsvfXWzJw5s+7+IAj485//TI8ePUhOTmbfffdl9uzZDZ5jyZIljB8/nrS0NDIyMjj11FNZtmxZa38okiRJrW5hBdw4Hx4oqG/YM+Ph/F5wfJ4NuyS1d1Ft2pcuXcquu+5KfHw8zz33HF988QU33HAD3bp1qzvmb3/7GzfffDMTJ07k3XffpUuXLowaNYoVK1bUHTN+/Hg+//xzXnrpJZ555hneeOMNfvWrX0XjQ5IkSWoVQQCvLIUr58A3EdPh98yAy/vCYKfDS1KHEAqCIIjWyS+55BLeeustpk+fvsb7gyCgZ8+eXHjhhVx00UUAFBUVkZuby6RJkxg3bhxffvklW265Je+//z5Dhw4F4Pnnn+eggw5i/vz59OzZc711FBcXk56eTlFREWlpac33AUqSJLWAggqYvAC+Lasfy4qHE3JhkM26JLULje1Do5q0P/XUUwwdOpSxY8eSk5PD9ttvzx133FF3/w8//MCCBQvYd99968bS09MZNmwYM2bMAGDGjBlkZGTUNewA++67LzExMbz77rtrPG95eTnFxcUN3iRJ7VxNDfzlL3DAAXD//dGuRmoRNQG8vAT+Mqdhw75XBvy5nw27JHVEUW3av//+e2699VYGDhzICy+8wJlnnsm5557L5MmTAViwYAEAubm5DR6Xm5tbd9+CBQvIyclpcH9cXByZmZl1x6zq2muvJT09ve6td+/ezf2hSZJa25QpcPnl8MILcPzx8NVX0a5IalYLyuHv8+CRhVBZO08yOx4u7A3jciEx6jsVSZJaQlR3j6+pqWHo0KH89a9/BWD77bdn1qxZTJw4kQkTJrTYeS+99FIuuOCCuveLi4tt3CWpvSsogFAovNA3CGDRomhXJDWLmgBeXgpPLoKq2mY9BOzdDQ7LslmXpI4uqj/me/TowZZbbtlgbPDgwcydOxeAvLw8APLz8xsck5+fX3dfXl4eBQUFDe6vqqpiyZIldcesKjExkbS0tAZvkqR27qSTYJttwrePOQaGD49qOVJz+KUcrp8Ljy2sb9hzEuCi3nB0jg27JHUGUf1Rv+uuu/L11183GPvmm2/o27cvAP379ycvL49XXnml7v7i4mLeffddhtf+MTZ8+HAKCwv54IMP6o559dVXqampYdiwYa3wUUiS2oTu3eHjj6G8HB58EGK9zpXar5oAnlsMV/8Ic2ovmBMC9usGl/WFzVKiWp4kqRVFdXr8b3/7W0aMGMFf//pXjj76aN577z1uv/12br/9dgBCoRDnn38+V199NQMHDqR///5cdtll9OzZk8MPPxwIJ/MHHHAAp59+OhMnTqSyspJzzjmHcePGNWrneElSB5OQEO0KpI3yczlMWgA/1l/dltwEOCkPNk2OXl2SpOiI6iXfAJ555hkuvfRSZs+eTf/+/bngggs4/fTT6+4PgoDLL7+c22+/ncLCQnbbbTf+85//sPnmm9cds2TJEs455xyefvppYmJiGDNmDDfffDOpqamNqsFLvkmSpGirDuCFJfDM4vBtCKfr+2fCod0h3qnwktShNLYPjXrT3hbYtEuSpGiavyKcrs8rrx/rkQAT8qC/6bokdUiN7UOjOj1ekiSpM6uqgeeXwP+WhNexA8SEYFQ3ONh0XZKETbskSVJUzKtN1+dHpOs9E8Nr1/smRa8uSVLbYtMuSZLUiqpqwsn686uk6wdmwkGZEGe6LkmKYNMuSZLUSn6sTdd/jkjXeyWG1673MV2XJK3BBr2WO336dI4//niGDx/OTz/9BMC9997Lm2++2azFSZIkdQRVNfDEQrhubn3DHhMK7wp/aR8bdknS2jW5aX/ssccYNWoUycnJfPTRR5SXh3/zFBUV8de//rXZC5QkSWrP5pTB1T/CcxHT4Xsnwh/7wCFZToeXJK1bk39NXH311UycOJE77riD+Pj4uvFdd92VDz/8sFmLkyRJaq8qa2Bqbbr+S0V4LDYEh2XBpX2hl+m6JKkRmrym/euvv2aPPfZYbTw9PZ3CwsLmqEmSJKld+74svHY9v6J+rG9SeO36JonRq0uS1P40uWnPy8vj22+/pV+/fg3G33zzTTbddNPmqkuSJKndqaiBJxfBK0uhdiY8cSE4pDuMygyvY5ckqSma3LSffvrpnHfeefz3v/8lFArx888/M2PGDC666CIuu+yylqhRkiSpzfu2FCbnQ0FEut4vKXzd9R6m65KkDdTkpv2SSy6hpqaGffbZh9LSUvbYYw8SExO56KKL+M1vftMSNUqSJLVZ5bXp+qurpOujs2C/bqbrkqSNEwqCIFj/YaurqKjg22+/ZdmyZWy55ZakpqY2d22tpri4mPT0dIqKikhLS4t2OZIkqZ34phTuWQALK+vHNk2GCbmQZ7ouSVqHxvahTd49/pRTTqGkpISEhAS23HJLdt55Z1JTU1m+fDmnnHLKRhUtSZLUVNU11ZSUl7TqOctr4MF8uGFefcMeH4Kx2XBxbxt2SVLzaXLTPnnyZMrKylYbLysr45577mmWoiRJkhrj2yXf0ueffUi7Lo0znj6DDZxA2CRfLYe/zIFphfVjA5Lhsn6wr5vNSZKaWaPXtBcXFxMEAUEQUFJSQlJS/cVFq6urefbZZ8nJyWmRIiVJktbk/977P/KX5QNw+4e387tdf8eAzAEtcq4V1TB1EbxeWD8WH4IjsmGvDJt1SVLLaHTTnpGRQSgUIhQKsfnmm692fygU4sorr2zW4iRJktalZ9ee1AQ1hAiREJtAt+RuLXKeL5fDvfmwOGLt+sBkODEPchJa5JSSJAFNaNqnTZtGEATsvffePPbYY2RmZtbdl5CQQN++fenZs2eLFClJkrQm5+9yPsXlxXyx6AvO3ulsMpMz1/+gJlhRDY8uhOlF9WOJMXBEFozMgJDpuiSphTV59/gff/yR3r17ExPT5OXwbZa7x0uSpFV9vhzuXQBLq+rHtkiBE3Mhy3RdkrSRGtuHNvk67X379gWgtLSUuXPnUlFR0eD+bbbZpqlPKUmS1GaU1qbrb62Srh+VDbunm65LklpXk5v2hQsXcvLJJ/Pcc8+t8f7q6uqNLkqSJCkaZi0Lr10vjEjXB6fACXnQPT56dUmSOq8mz3E///zzKSws5N133yU5OZnnn3+eyZMnM3DgQJ566qmWqFGSJKlFlVbDpF/glp/qG/akGDg+F87rZcMuSYqeJiftr776Kk8++SRDhw4lJiaGvn37st9++5GWlsa1117LwQcf3BJ1SpIktYhPl8F9+VAUka4P6RJu2DNt1iVJUdbkpn358uV112Pv1q0bCxcuZPPNN2frrbfmww8/bPYCJUmSWsLyanioAN4trh9LjoGxOTAizbXrkqS2oclN+xZbbMHXX39Nv3792Hbbbbntttvo168fEydOpEePHi1RoyRJUrP6uASmFEBxRLq+VW263s10XZLUhjS5aT/vvPP45ZdfALj88ss54IADmDJlCgkJCUyaNKm565MkSWo2y6rgwQJ4v6R+LCUWjsmGYabrkqQ2qMnXaV9VaWkpX331FX369CErK6u56mpVXqddkqSO78MSuD8fSiIudLNNKozPgQzTdUlSK2ux67SvKiUlhR122GFjn0aSJKlFlFTBAwXwQUS63iUWjsmBnbuarkuS2rYmN+1BEPDoo48ybdo0CgoKqKmpaXD/1KlTm604SZKkDRUE4Ub9gQJYFpGub5cK43MhbaOjC0mSWl6Tf12df/753Hbbbey1117k5uYS8uVpSZLUxhRXhafCf7SsfqxLLBybA0NN1yVJ7UiTm/Z7772XqVOnctBBB7VEPZJa21dfwbx5sMcekJgY7WokaaMEQXiTuQcLwpd0W2mHrnBcDnQ1XZcktTNN/tWVnp7Opptu2hK1SGpt//sfjB4NNTXhpv2114yfJLVbRVUwJR8+iUjXu8bCsbmwY9fo1SVJ0saIaeoDrrjiCq688krKyspaoh5JrenBB+tvv/EG/Pxz9GqRpA0UBPBOEVwxp2HDvlNXuLyfDbskqX1rctJ+9NFH88ADD5CTk0O/fv2Ij294jZQPP/yw2YqT1MJ23x3uuy+crvfrBzk50a5IkpqksBLuy4fPltePdY0NbzS3vc26JKkDaHLTPmHCBD744AOOP/54N6KT2rvTT4fcXPj+ezjuOIj3QsWS2ocggBnF8HABlEVcyGZYWvhSbl1io1ebJEnNKRQEQdCUB3Tp0oUXXniB3XbbraVqanWNvai9JEmKvqWVcG8+fB6RrqfFwfG5sG1q9OqSJKkpGtuHNjlp7927t42tJElqdUEAbxXBIwthRUS6vksaHG26LknqoJq8Ed0NN9zA7373O+bMmdMC5UiSJK1ucSX8a344YV/ZsGfEwTmbwMk9bNglSR1Xk5P2448/ntLSUgYMGEBKSspqG9EtWbKk2YqTJEmdWxDA9CJ4dCGUR6Tru6bDUdmQYrMuSergmty0//Of/2yBMiRJkhpaVBFO1r8qrR/rFgcn5MGQLtGrS5Kk1rRBu8dLkiS1lCCA1wth6qKG6fru6TAmG5JN1yVJnUijmvbi4uK6zeeKi4vXeayb1EmSpA21sALuyYdvItL1zHg4MRcGm65LkjqhRjXt3bp145dffiEnJ4eMjIw1Xps9CAJCoRDV1dXNXqQkSerYggCmFcLji6AiIl3fIwPGZEGS6bokqZNqVNP+6quvkpmZCcC0adNatCBJktS5FFTA5AXwbVn9WPfadH2Q6bokqZNrVNO+55571t3u378/vXv3Xi1tD4KAefPmNW91kiSpw6oJ4NWl8MQiqAzqx0dmwJHZkNjkC9NKktTxNHkjuv79+9dNlY+0ZMkS+vfv7/R4SZK0XvkVMGkBfB+RrmfFw4Q82DwlenVJktTWNLlpX7l2fVXLli0jKSmpWYqSJEkdU00ALy+FJxdBVW26HgL27gaHZZmuS5K0qkY37RdccAEAoVCIyy67jJSU+pfBq6ureffdd9luu+2avUBJktQx/FIeXrv+w4r6sZwEmJALm5muS5K0Ro1u2j/66CMgnLR/9tlnJCQk1N2XkJDAtttuy0UXXdT8FUqSpHatJoAXl8DTixum6/vUpusJpuuSJK1Vo5v2lbvGn3zyyfzrX//yeuySJGm9fi4Pr13/MSJdz00Ir10fkBy9uiRJai+avKb97rvvbvB+cXExr776KoMGDWLQoEHNVpgkSWq/qgN4YQk8szh8G8Lp+n6ZMLo7xJuuS5LUKE3+lXn00Ufz73//G4CysjKGDh3K0UcfzdZbb81jjz3W7AVKkqT2Zf4KuPbH8GZzKxv2Hgnw+z4wJtuGXZIk/vEPGDKkUYc2+dfmG2+8we677w7A448/ThAEFBYWcvPNN3P11Vc39ekkSVIHUVUDzyyCa+bCvPLwWAg4MBP+2Bf6Ox1ekiT46iu4+GKYP79Rhze5aS8qKiIzMxOA559/njFjxpCSksLBBx/M7Nmzm/p0kiSpA5i3Aq6dG95srqY2Xe+ZCJf2hcNN1yVJqhcETTq8yb9Ce/fuzYwZM1i+fDnPP/88+++/PwBLly71Ou2SJHUyVTXw1CL461yYX5uux4TgoO7wxz7Q1z8NJElqaPBguO466NGjUYc3eSO6888/n/Hjx5Oamkrfvn0ZOXIkEJ42v/XWWzf16SRJUjv144rwddd/Kq8f65UY3hm+j826JElr9/vfw5lnQnr6eg9tctN+1llnMWzYMObOnct+++1HTEw4rN90001d0y5JUidQVRPeFf6FpfVT4WNCcFBmeP16nFPhJUlqNqEgaOKE+g6ouLiY9PR0ioqKvP68JEnrMKcsfN31Xyrqx3rXpuu9TdclSWq0xvahjX4tfMstt2TJkiV175911lksWrSo7v2CggJSUlI2sFxJktSWVdbA1IVw3dz6hj02BIdlhTebs2GXJKllNLpp/+qrr6iqqqp7/7777qO4uLju/SAIWLFiRfNWJ0mSou77Mrj6R3hhCaycntc3KXwZt4O6h5t3SYq6qlL46GJ4cxwsnhntaqRm0+Q17SutaVZ9KORvbUmSOorKGnhyEby8tL5ZjwvBId1hVGZ4HbsktRmfXQ5f3hi+veBFODIfYuKjW5PUDDa4aZckSR3Xd2XhneHzI9au90sKr13vmRi9uiRprUrn196ogYqlUF1m064OodFNeygUWi1JN1mXJKljqaiBJxbBq6uk66OzYL9upuuS2rDBF8EvL4Qb9iF/hHg3mFbH0OimPQgC9tlnH+Liwg8pKyvj0EMPJSEhAaDBendJbcjPP8NvfwtFRfDXv8IOO0S7Iklt1OzScLq+sLJ+bNNkmJALeabrktq6zB3DU+KrV0B812hXIzWbRjftl19+eYP3DzvssNWOGTNmzMZXJKl5nX02PP00BAF8+SX8+GO0K5LUxpTXwOMLYVph/Vh8CA7Pgr1N1yW1JzHxTolXh7PBTbukdmLJknDDXlMDS5dGuxpJbczXpXDPAlgUka4PSA6vXc9NiF5dkiQpzI3opI7u2mvh8MNh+XL4v/+LdjWS2ogV1TB1EbxeWD8WH4IjsmGvDNN1SZLaCpt2qaMbMQLy88O33TxSEvDlcrg3HxZHpOsDk+HEPMgxXZckqU2xaZc6A5t1SYTT9UcXwvSi+rGEGDgyC0Zm+KNCkqS2yKZdkqRO4PPlcO8CWBpxsZfNU8I7w2eZrkuS1GbZtEuS1IGVVcMjC+GtiHQ9MQbGZMMe6abrkiS1dY1q2m+++eZGP+G55567wcVIkqTmM2tZeO16YUS6PiglvHa9u1dEkiSpXQgFQRCs76D+/fs37slCIb7//vuNLqq1FRcXk56eTlFREWlpadEuR5KkjVJaDQ8XwIzi+rGkGDgqG3YzXZckqU1obB/aqKT9hx9+aLbCJElSy/l0GdyXD0UR6fqWXeCEXMg0XZckqd3Z4DXtFRUV/PDDDwwYMIC4OJfGS5IUTcur4aECeHeVdP3oHBiRZrouSVJ7FdPUB5SWlnLqqaeSkpLCkCFDmDt3LgC/+c1vuO6665q9QEmStG4fl8AVcxo27Ft1gSv6wa5Oh5ckqV1rctN+6aWX8sknn/Daa6+RlJRUN77vvvvy0EMPNWtxkiRp7ZZVwZ0/w60/Q3HtdPiUWDgpD87ZBLo5HV6SpHavyfPan3jiCR566CF22WUXQhEv3Q8ZMoTvvvuuWYuTJElr9mEJ3J8PJdX1Y9ukwvgcyLBZlySpw2hy075w4UJycnJWG1++fHmDJl6SJDW/kip4oAA+KKkfS4mFcTmwc1enwkuS1NE0eXr80KFD+d///lf3/spG/c4772T48OHNV5kkSR3AJws+4eo3ruaNH9/YqOcJAphZHF67Htmwb5cKV/aDYW42J0lSh9TkpP2vf/0rBx54IF988QVVVVX861//4osvvuDtt9/m9ddfb4kaJUlql+YUzmHYncOoqK4A4M1T3mRE7xFNfp7iqvBU+I+W1Y91iYVjc2Co6bokSR1ak5P23XbbjY8//piqqiq23nprXnzxRXJycpgxYwY77rhjS9QoSVK79Fn+Z5RXlxPU/vf+T+836fFBAO/VpuuRDfsOXcM7w+9kui5JUoe3QRdYHzBgAHfccUdz1yJJUoeye9/d6ZPeh7lFc0lPTOfQLQ5t9GOLqmBKPnwS0aynxsJxubBj1xYoVpIktUmNatqLi4vXf1CttLS0DS5GkqSOJCMpg8/O/IyZP89k65ytye6Svd7HBEH4eusPLYTSiJ3hh3YNbzbXdYNebpckSe1Vo371Z2RkNHpn+Orq6vUfJElSJ5GWmMbe/fdu1LGFlXBfPny2vH6sayyMz4XtTdclSeqUGtW0T5s2re72nDlzuOSSSzjppJPqdoufMWMGkydP5tprr22ZKiVJ6sCCAGYUw8MFUFZTP75zWjhd7xIbvdokSVJ0hYIgCJrygH322YfTTjuNY489tsH4/fffz+23385rr73WnPW1iuLiYtLT0ykqKnJ6vySpVS2tTddnRaTraXFwfC5smxq9uiRJUstqbB/a5N3jZ8yYwdChQ1cbHzp0KO+9915Tn06SpE4pCOCtovDO8JEN+y5p4Z3hbdglSRJsQNPeu3fvNe4cf+edd9K7d+9mKUqSpI5sSSXc/BPcswBW1E6Hz4iDszeBk3s4HV6SJNVr8h60N910E2PGjOG5555j2LBhALz33nvMnj2bxx57rNkLlCSpowgCmF4Ejy6E8oi16yPSYWw2pNisS5KkVTR5TTvA/Pnz+c9//sNXX30FwODBg/n1r3/dbpN217RLklra4spwsv5Vaf1Yt9q161s5FV6SpE6nsX3oBjXtHY1NuySppQQBvF4IUxc1TNd3S4ejsiHZdF2SpE6psX1ok6fHAxQWFnLXXXfx5ZdfAjBkyBBOOeUU0tPTN6xaSZI6oIUVcE8+fBORrmfGwwm5sGWX6NUlSZLajyYn7TNnzmTUqFEkJyez8847A/D+++9TVlbGiy++yA477NAihbYkk3ZJUnMKAnitMJyuV0Sk63tkwJgsSDJdlySp02ux6fG77747m222GXfccQdxceGgvqqqitNOO43vv/+eN954Y+MqjwKbdklScymoCK9dn11WP9Y9Hk7MhUGm65IkqVaLNe3Jycl89NFHDBo0qMH4F198wdChQyktLV3LI9sum3ZJ0saqCeDVpfDEIqiM+M06MgOOzIbEJl9kVZIkdWQttqY9LS2NuXPnrta0z5s3j65duza9UkmS2rn8Cpi8AL6LSNez4mFCHmyeEr26JElS+9fkpv2YY47h1FNP5R//+AcjRowA4K233uLiiy/m2GOPbfYCJUlqq2oCeHkpPLVKur53Nzg8y3RdkiRtvCY37f/4xz8IhUKceOKJVFVVARAfH8+ZZ57Jdddd1+wFSpLUFv1SHk7Xf1hRP5aTEF67PtB0XZIkNZMNvk57aWkp3333HQADBgwgJaX9/oXimnZJUmPVBPBSbbpeVfsbNATs0w0Oy4IE03VJktQILXqddoCUlBS23nrrDX24JEntzs+16fqciHQ9NyG8dn1AcvTqiobP8j/jzg/vZFDWIM4YegYxIV+tkCSpJTS6aT/llFMaddx///vfDS5GkqS2qDqAF5bA/xY3TNf3y4TR3SG+k/WryyqWsfvdu7OsYhnVQTXVQTXn7HxOtMuSJKlDanTTPmnSJPr27cv222/PBs6olySp3Zm/Aibnw9yIdL1Hbbrev5Ol6yvlL8unqLwIgNhQLJ8XfB7liiRJ6rga3bSfeeaZPPDAA/zwww+cfPLJHH/88WRmZrZkbZIkRU11AM8thmeXhG9DOF0flQmHdMJ0PdKm3Tbl0M0P5elvniYpLonTdjgt2iVJktRhNWkjuvLycqZOncp///tf3n77bQ4++GBOPfVU9t9/f0KhUEvW2aLciE6SFGneCpi0AOaX14/1TIQJudCvk6brqwqCgK8Xf01eah4ZSRnRLkeSpHansX3oBu8e/+OPPzJp0iTuueceqqqq+Pzzz0lNTd3ggqPJpl2SBFBVA88tCafrNbW/HWNCcEAmHJwJcZ04XZek9Zo7F157DUaMgM02i3Y1UpvX4rvHx8TEEAqFCIKA6urqDX0aSZLahB9XhHeG/ykiXd8kMbx2vW9S9OqSpHZh/nzYaisoKYGkJPjoIxg0KNpVSR1CkzKD8vJyHnjgAfbbbz8233xzPvvsM/79738zd+7cdpuyS5I6jodmPcT5z5/P2/PebvRjqmrgiYVw3dz6hj0mFF63/oc+NuyS1Chvvx1u2AFWrAgn7pKaRaOT9rPOOosHH3yQ3r17c8opp/DAAw+QlZXVkrVJktRoT3/9NOMeG0dsKJb/vP8fvj33W/qk91nnY+aUhXeG/zkiXe9dm673tlmXpMYbPhy6dq1P2keOjHZFUofR6KZ94sSJ9OnTh0033ZTXX3+d119/fY3HTZ06tdmKkySpsWYVzCJEqO664d8t+W6tTXtlDTy9GF5cAis3dokNwcHdw+vXY9vv3qqSFB29e8Onn9avad9882hXJHUYjW7aTzzxxHa9Q7wkqWMbt9U4bnznRhaVLmKHHjswvPfwNR73fVl47fqCivqxPklwUl54DbskaQP16wcnnRTtKqQOZ4N3j+9I3D1ekjqGZRXLmFM4h0FZg4iLafi6dGUNPLUYXopI1+Nq167vb7ouSZJaWYvvHi9JUluTmpDKVjlbrTb+XW26nh+RrvdLCq9d72m6LkmS2jCbdklSh1VRA08sgleXNkzXR2fBft3Cu8RL2gClP8Hbx0PpXNj2Wuh7dLQrkqQOq0mXfGtJ1113HaFQiPPPP79ubMWKFZx99tl0796d1NRUxowZQ35+foPHzZ07l4MPPpiUlBRycnK4+OKLqaqqauXqJUltzexSuOpHeCWiYd80Gf7UF0Zl2rBLG+XTP8PC6bDse5hxAlSVRbsiSeqw2kTT/v7773PbbbexzTbbNBj/7W9/y9NPP80jjzzC66+/zs8//8yRRx5Zd391dTUHH3wwFRUVvP3220yePJlJkybx5z//ubU/BElSG1FeAw/mwz/mQUHtdPj4EByVDRf3hh5Oh5eaQcSWSO15e6Rrr4X0dNhlF1iwINrVSNIaRb1pX7ZsGePHj+eOO+6gW7dudeNFRUXcdddd3Hjjjey9997suOOO3H333bz99tu88847ALz44ot88cUX3HfffWy33XYceOCBXHXVVfzf//0fFRUVazulJKmD+qYU/jIHphXWjw1Ihsv6wX6m61Lz2eYv0H0X6NIXhk+GuORoV9R0c+fCH/4AxcUwcybccEO0K5KkNYp603722Wdz8MEHs++++zYY/+CDD6isrGwwPmjQIPr06cOMGTMAmDFjBltvvTW5ubl1x4waNYri4mI+//zztZ6zvLyc4uLiBm+SpParvAYeyIcb5sGiyvBYfAiOzoGLekNuQnTrkzqclF6w/5tw2Bzod2y0q9kwiYkQU/uncBBASkp065GktYjqRnQPPvggH374Ie+///5q9y1YsICEhAQyMjIajOfm5rKgdvrSggULGjTsK+9fed/aXHvttVx55ZUbWb0kqS34ajnckw+LK+vHBibDiXmQ08hm/dsl3/Jp/qeM7DeSzOTMlilUUtuSmwv33AP/+AcMGQIXXxztiqToeuMNePttGD0attwy2tUoQtSa9nnz5nHeeefx0ksvkZSU1KrnvvTSS7ngggvq3i8uLqZ3796tWoMkaeOsqIbHFsEbhfVjCTFwZBaMzIBQI6fCz/x5JiPuGkFlTSV90vsw68xZdE3s2hIlS2prxo8Pv0md3RtvwMiR4dtXXQWzZ0PPnlEtSfWiNj3+gw8+oKCggB122IG4uDji4uJ4/fXXufnmm4mLiyM3N5eKigoKCwsbPC4/P5+8vDwA8vLyVttNfuX7K49Zk8TERNLS0hq8qYP65hs47TT4/e9h2bJoVyOpmXyxHK6Y07Bh3zwFLu8Le3VrfMMO8Mw3z1AdVAMwt2guHy34qFlrlSSpzatdfkwQQGkpzJoV3XrUQNSa9n322YfPPvuMjz/+uO5t6NChjB8/vu52fHw8r7zySt1jvv76a+bOncvw4cMBGD58OJ999hkFBQV1x7z00kukpaWxpVM6FASw334waVJ46ltHnPZ2993Qqxfsvjv8/HO0q5FaXFk13LMA/jUfltZe3TMxBo7LhQt6QdYGrF0f2W8kQe3u192Tu7NVzlbNWLEkSe3AYYfV7+vQr1/4igpqM6I2Pb5r165stVXDP4y6dOlC9+7d68ZPPfVULrjgAjIzM0lLS+M3v/kNw4cPZ5faL6L999+fLbfckhNOOIG//e1vLFiwgD/96U+cffbZJCZ6TZ9Or6YGfvoJqqvDsdt330W7ouZVXAynnx7++BYsCE9luvXWaFcltZhZy+C+/PpmHWBQSnjtevf4DX/ekf1GMuPUGcz8eSaHbnGoa9olSZ3PoEHhKfGffRZu2J2J3KZEdSO69bnpppuIiYlhzJgxlJeXM2rUKP7zn//U3R8bG8szzzzDmWeeyfDhw+nSpQsTJkzgL3/5SxSrVpsRGwt//jNcfjkkJYWnyHckoVDDOcAxUb8YhNQiSqvhkYXwdlH9WFJM+Lrru6U3bSr82gzrNYxhvYZt/BNJktRe9egRflObEwpWzgnsxIqLi0lPT6eoqMj17R3R4sXhpr1Ll2hX0vzuvz/8wsSAAeEdcFe5moLU3n1am64XRaTrW3aBE3IhcyPSdUmSpGhrbB9q045NuyS1Ncur4eECeKe4fiwpJnzd9RFpzZOuS5IkRVNj+9A2PT1ektT5fFwCUwqgOCJd36oLHJ8L3UzXJUlSJ2PTLklqE5ZVwYMF8H5J/Vhybbo+3HRdkiR1UjbtkqSo+7AE7s+Hkur6sW1SYXwOZJiuS5KkTsymXZIUNSW16frMiHQ9JRbG5cDOXU3XJUmSbNolSa0uCOCDEnigAJZFpOvbpsL4XEj3t5MkSRJg0y5JamXFVeFm/cOIdL1LLBybA0NN1yVJkhqwaZcktYogCE+Df6AgfEm3lbZPheNyIc3fSJIkSavxTyRJUosrqoIp+fDJsvqx1Np0fUfTdUmSpLWyaZcktZgggHeL4aGFUBqRrg/tGt5srqu/hSRJktbJP5ckSS2isBKmFMCnEel619jwVPgdukavLkmSpPbEpl2S1KyCAGYUwyOrpOs7p8Ex2ZDqbx5JkqRG808nSVKzWVoJ9+XDrOX1Y2lxMD4HtjNdlyRJajKbdkntS3k5fPklDBwIXbpEuxrVCgJ4uxgeLoAVNfXju6TB0TnhS7pJkiSp6Wzapcb68EN4+WXYd1/YYYdoV9M5FRfDTjvBN99Az57wwQeQlxftqjq9JZVwbz58EZGuZ8TB+FzYJjV6dUmSJHUENu1SY3z5JeyyC1RWQnw8fPwxbLlltKvqfF5+OdywA/z8MzzxBPz611EtqTMLApheBI8tbJiuj0iHsdmQYrouSZK00WzapcZ4//1www7h/7//vk17NAwaBLG1nWB1NWy1VXTr6cQWV8I9C+Cr0vqxjDg4IRe2Ml1XZ1O+BN47A5bPga0vh00OiXZFkqQOxKZdaox994Xu3WHx4vD/99032hV1TltuGU7bn3oK9t4bdtst2hV1OkEArxfC1EVQHpGu75YOR2VDsum6OqPPLof5U2unnxwFRy2BuJRoVyVJ6iBs2qXG6NkTvvoKZs6EHXeE7OxoV9R5jRwZflOrW1QBk/Phm4h0vVscnJAHQ9wTUJ1VEMDPz0FQ+ypWTQUEVdGtSZLUodi0S42VlQUHHBDtKqRWFwTwWmE4Xa+ISNf3yIAxWZBkuq6NEQRQVQJxXSEUinY1TVc0C5Z9V/9+3n4Qn9ay55w7F6ZMgS22gCOOaJ//bpKkRrNplyStVUFFeO367LL6se7xcGIuDDJd18YqXwwvjww3vj0OgD2fgpj4aFfVNAndIRQHQTUQQK/RLXu+FStg2DDIzw+/4HH33XDSSS17TklSVMVEuwBJUttTE8DLS+Avcxo27CMz4M99bdjVTObcD0Wfh2//8jzkvxbVcjZISs/wiw29Doetr4DNzmjZ8/30EyxYEG7YY2NhxoyWPZ8kKepM2iVJDeRXwOQF8F1Es54VDyfmwRburaXmlNwTCIDa6d3JPaJZzYbreWD4rTX07w+77gpvvQUxMXDssa1zXklS1Ni0S5KA2nR9KTy1CCqD+vG9MuCIbEh0bpaaW+8jYYcboeAN6HMMZHgZx/WKiYFXX4W334Z+/cJvkqQOLRQEQbD+wzq24uJi0tPTKSoqIi2thTePUUNB4AY6Le2rr8LrHUtK4JZbwpdKk1bxSznckw/fR6Tr2fEwIQ8Gmq5LkqRomz8fbrsNevSAX/0K4tp//tzYPrT9f6Rqn8rL4aij4NlnYdQomDoVkpKiXVXH9JvfhC9VV1MDxx0XXgsp1aoJ4KXadL2q9iXcELBPNzgsCxJM1yVJUrTV1MAee4SvnlFdDQsXwuWXR7uqVuOfY4qOp5+GZ54JfwM+9xw88US0K+q4amrCMxpW3m5PggBmngcPd4VX94PKkmhX1KH8XA7Xz4WpC+sb9twEuLgPjM2xYZckSW1EaSn88EO4YQ+F4KOPol1Rq/JPMkVHRkbD97t1i0oZncK//gVbbx3evOjee6NdTdMsfg++uRmqlsGCV+C7u6JdUYdQE8Bzi+GaH2HOivBYCNg/Ey7rCwOSo1qeJElSQ6mpcPzx4duxsXD66dGtp5U5PV7Rsc8+8Pe/hxP2Qw+F/fePdkUd11Zbwccft/x5qqvhrrvgl1/CP0h79tz454yLXEwdrPK+NsT8FTA5H+auqB/LSwivXd/UZl2SJLVV99wDF10EWVmwySbRrqZVuREdbkQnNYurroI//zm8s3H//vDNN+HbG+vrW+C7OyF7d9jxJoiJ3/jn7ISqA3h+Cfxvcfg21Kfrh3aHeOddSZIktSo3opPUuj74ILzGqKYGvvsuvPYoNXXjn3eL34TfWsPK1zA72BUN5q0IX3d9Xnn9WI8EOCkP+pmuS5IktWlmK5Kaxymn1Cfrxx7bPA17ayp4E6bmhDe9m/NAtKtpFlU18PQi+Ovc+oY9JgQHdYc/9bVhlyRJag9M2iU1j9Gj4fvvw5fg2H77aFfTdB//DsoXAwHMPBv6HbtBTxMEAf/96L/MKpjFSdudxLZ52zZvnY00tzZdnx+Rrm+SGF673terK0qSJLUbJu2Smk+fPrDjjs2zlr21JXSHUAwQA/EbfjWD/370X057+jRuee8Wdrt7NwpXFDZbiY1RVQNPLoJr59Y37DEhOKQ7/KFP4xr2D3/5kPFTx3PJy5dQVlnWovU+8NkD9L6xN8PuHMacwjktei5JkqT2yKRdkgB2vhU++G348nLbXbfBTzOrYBaxoViqg2qWVSzjp+KfyEjKaL4612FOWXhn+J8j0vVeieG1670bma5XVFew3z37UVheCIRnDly/3/XNXyxQXlXOSU+eREV1Bb8s+4U/vfon7jvyvhY5lyRJUntl0y5JACm9YPdHNvppTtruJO786E6WVSxj3033ZXD24GYobt0qa+CZxfDCElh5OZDYEByUCQd2D99urLLKMpasWAJATCiGucVzm7/gWqFQiJhQ/ayMuJhW/pUUBPDl32H+E7DJaNjy92vfhLCmEkJxHW6TQkmS1Pa1wzmsarO++w7+/e/wLuJSJ7Vt3rbM++08Zp05ixeOf6FBU9oSvi+Da34MX85tZcPeJyk8Ff6QrKY17ADpSen8bsTvAEhLTOPC4Rc2b8EREmITeGDMA2zRfQtG9hvJNXtf02LnWqP8V+Dj38OiGfDJpfDLC2s+7st/wEPJMDUPlnzUujVKa5KfDzfdBE88UX/VC0lSh+V12vE67c3il19giy2gpCS8nnnGDNh552hXJXVYlTXw1GJ4aZV0/dDu4WuvN7VZX1XhikJS4lNIiE3Y6FrbrLmPwptj698f8QD0G9fwmKoyeDgVqAFioPeRzTIjQ9pgVVWw+eYwZ064Yb/tNvjVr6JdlSRpAzS2DzVp18YpLISjjoJddgk37BC+Tvf06VEtS+rIviuDq36EFyMa9r5J4cu4NXU6/NpkJGV07IYdwlPiNzkUQrHQ82DofcTqx8TEQ3waEAMhICm7tauUGioogB9+CDfsMTH+vpWkTsA17do4114bnp5XXV0/lpwMBxwQtZKioqoqPLugRw/YbLNoV6MOqqJ2Z/hXltY363EhGJ0F+3UL7xKvJohNgD2fCjc/a1urHhMHI5+Fz6+B5J6w7V9bt0ZpVT16wB57wBtvhN8fN27dx0uS2j2bdm2c8vKG7//3v7DXXtCvX1TKiYoggCOOgGeeCacejz8evma51Ixml8I9+VBQUT/WPyl83fUeidGrq0NY3+Zy2cNh5DOtU4u0PqEQvPhiuGnv0ye8NE2S1KG5ph3XtG+UX36BI4+Eb76Byy6D88+PdkWtb8kS6N49fDsUgjFj4BHXvKp5lNfAE4tgWkS6Hl+bru9rui5JktRuNbYPNWnXxunRIzwtvDNLT4cBA8JrDGtqYPjwdR9fUQG33gpLl8KZZ0JubuvUqXbnm1KYvAAWVdaPDUgOp+u5HXy5uSRJksJs2qWNFRsLb74JkydD795w7LHrPv73v4d//Ss8lf7JJ+EjLyGlhsprYOpCeK2wfiw+BEdkw14ZpuuSJEmdiU27OrdvvoHZs8Pr8FNSNvx58vLCzXhjfPBBeB18dTV8+um6N8ESFM6CeVOh+zDoOSra1bS4r5aH164vjkjXBybDiXmQE+V0fca8GZz+9OnExsRy92F3s0OPHaJbkCRJUifgJd/UeU2bBltuCYccEp7SXlm5/sc0h7POCqfsK2/bsK9d2S/wwjD47Ep47QD45aVoV9RiVlTDlHy4aX59w54QA+Ny4MLezduwv/rDq+z+390Z9+g4FpUuavTjTnriJL5c+CWzCmZx+tOnN19BkiRJWiuTdrVPxcVw6aUwfz784Q8wbFjTn+Oxx+pvf/ppOHHfcsvmq3Ftxo0Lv0hQUgJDhrT8+dqzoi+hurT2nRAsfg967BfVklrCF8vh3nxYEvG60eYpcGIuZDdzul5ZXclhDxzG8srlxIRiSE9M57ZDb2vUY4Pa/0KEcA9TSZKk1mHTrvbpT3+CiRPDt994AxYuhLgmfjnvuSf83/+Fb/fsCf37N2+N69K3b+udqz3LGgZdt4CSryGuC/Q+MtoVNauyanh0IbxZVD+WGANHZsGeGS0zCaM6qKasqqyuAS8qL1r/g2rdfdjdnP706cTFxHHHoXc0f3GSJElajU272qf8/PD/a2rCqXtFRdOb9rFjw9e6/eKL8O3k5OavUxsnrgsc+CEsmQlpgyApJ9oVNZtZy+C+fFhaVT82KAVOyIWsFly7nhSXxM0H3szFL11Mj9QeXL7n5Y1+7K59duWLs79oueIkSZK0Gq/Tjtdpb5c++ghGjYLFi+Hqq8NT5aV2oLQaHlkIb6+Sro/Nht3S3eJAkiSps/A67erYtt8eFiwIbx6XmBjtaqRG+XRZeLO5woh0fXAKnJAH3eOjV5fagKpSCKog3heOJUlSQzbtar9iYmzY1S4sr4aHC+Cd4vqxpBg4OgdGpEU/Xa+qqaK6pprEuJb/fppTOIeS8hK2ytmKULQ/8LZi/lPw5tFQUwk7/RsGnhntiiRJUhviJd8kqQV9sgyumNOwYd+qC1zRD3ZtA9PhX5vzGt3/1p3Ua1O59f1bW/RcD3z2AANuHsA2E7fh3OfPbdFztSufXQE15UANfPLHaFcjSZLaGJt2tYyqKvjXv+DCC8OXUmsOr74a3vH9hBNg6dLmeU6phSyvhrt+gf/8BMW10+GTY2BCHpyzCXRrI9Ph/zztz5SUl1BVU8VFL13Uouf6z8z/UBPUAHDr+7d62biVuvSDUGz4LcUrS0iSpIacHq+W8be/hS/LFhMDU6bAvHkQvxFdSnk5jB4NpaXh50xLq79cW2t56SV47jk44ADYf//WPbfalY9KwmvXS6rrx7ZJhfE5kNFGmvWVenbtSUwohoCA3C65LXquoT2G8tbctwiFQmyTM8Tp8SsNuwM+7QnVZbDVZdGuRpIktTE27WoZs2aF5/1WV4cvz1ZUBFlZG/58VVVQVgYrk7nCwmYps9E+/DC8W31MDPzzn/D++7Djjq1bg9q8kip4sABmltSPpcTCuBzYuWv0p8Kvyb8P+jddE7pSuKKQK0Ze0aLnun7PP9B//r0sLVvMmd1+gtL5kNKrRc/ZLiR2D69llyRJWgObdrWMM86AqVPDCfn48RvXsAN06QI33giXXAI9eoRT/Nb0+efhFwyqq+vft2lXhA9K4IFV0vVtU2F8LqS34Z+0WSlZ3DH6jlY5V8Kitzg3ZTGkANWLYd4TsMU5rXJuSZKk9srrtON12lvM4sWwcCFssUXbjBibYvFiGDoU5syBvn3hgw+ge/doV6U2oLgKHiiADyPS9S6xcGwODG2j6XrUFM+G/w2BoBqogX2nQ85u0a5KHVFNDcycCdnZ0L9/tKuRJGmNvE67oq97947T2HbvDl98AV9/HX4RIjk52hUpyoIgPA3+gYLwpnMrbZ8Kx+VCmj9dV5c2EPZ9HX56CnJG2rCr5YwbB488El7SNHUqHHZYtCuSJGmDuXu81FjJybDddhvfsH/8MfTpA6mpcNddzVFZ0+XnwzvvQEVFdM7fzhVXwcSf4c5f6hv21Fg4vQec0dOGfZ2yh8N210LPUdGuRB1VSUm4YYfwq2uTJkW1HEmSNpZNu9Ta/vQn+OknWL4czjqrfp18a/ngg/B00eHDw5fQq6pq3fO3Y0EA7xbD5XPg42X140O7hq+7PjTN6fBS1HXpApttFk7ZgwB22inaFUmStFHMg6TWlpER7uxCoXDaHtPKr53df399wv7OO+FN9bbdtnVraIcKK2FKAXwa0ax3jQ1Phd+ha/TqkrSKmBh4443wTKaePeGkk6JdkSRJG8WmXWptN9wQbpoXLoSrr279aHbHHcPp/srr3ffr17rnb2eCAN4phocXQmnEpIiduoYv5ZbqT1Gp7YnGVUYkSWoh/rkptbbcXHj44fr3586FJ5+E7beH3VphY67jjoOEBPj00/Dt9PSWP2c7tbQS7suHWcvrx9LiYHwObGe63vaUfAuLZoQ3uevSO9rVSJIkNQsv+YaXfFMUFRbCwIGwaFH4/RdegP33j2pJG2zx+zDrKkjKg+2vh4Ru0a5ogwUBvF0MDxfAipr68WFpcExO+JJuamOKvoLntoeaFRCfBgd/ASmbRLsqSZKktWpsH+pGdGqbFi+GCRPggAPC6647qq++qm/YY2LgtdeiWs4GC2rgtYPgp//B93fBx5dEu6INtqQSbv4J7llQ37Cnx8HZm8ApPWzY26yCaeGGHaCyGBa9Hd16OrI774TMTNhqK5g9O9rVSJLU4Tk9Xm3TRRfBlCm1F8OeGV7/3RG35d5mGxgwAL77DmJjYfToaFe0YYJqqFgK1EAQA2ULol1RkwUBvFkEjy5smK6PSIex2ZBis97Qsh/g/TOhchnscANkDYtuPTkjISYRasohritkDY9uPR1VWRmceWb4qhPFxXDFFeGf1ZIkqcXYtKttWrw43EXV1IT/MKyuhrgO+OWakhK+BNtrr8GQIeHLFLVHMfGw3d/g49+Fp8VvdVm0K2qSxZVw7wL4srR+LCMOTsiFrVI37DmDIOCF715gwbIFjBk8hq6JHWwR/PtnwYKXa1/tGAuHz41uPemD4aBPYOHbkLsXpPSKbj0dVWxseE+MlZeqTEmJbj2SJHUCHbALUodwxRXw3nuwdCnceGPHbNhXSk+Hww6LdhUbb/AFsMVvIBQLofax8iYI4I0ieGwhlEek67vWpuvJG5Gu3/LeLZz3/HkA3DbzNt4+9W1CjZwt8ubcN5k4cyJDsodw8a4XExfTBr/+q5aH/wGpCd9uC9K2CL+p5SQkwKOPwmWXQe/ecM010a5IkqQOrw3+JSgBO+wAv/wSbgpa+zrm2nAx8dGuoNEWVcDkfPgmIl3vFgcn5MGQLhv//C9890Ld7Xd+eoeyqjJS4tefSi4pW8J+9+5HRXUFNUENKfEpnLfLeRtfUHPb4YZwwl61DHa+I9rVqDUdeGD4TZIktQqbdrVdoVDHXMfeSay8MEVj0+UNMbdoLr9/+fdU1VRx7T7Xslnm+pcXBAG8VghTF0FFRLq+ezoclQ1JzbR2/YhBR/Ds7GcB2Kf/Po1q2AEWlS5iRVV4Q7XYUCxzCuc0T0HNrftOcNicaFchSZLU4dm0S2p2T339FCc8fgJxoTgeGvsQ+266b4uc56QnTuKNH98A4Pul3/PBrz5Y5/EFFeFd4WeX1Y91jw+vXR/cDOl6pNN2OI0h2UNYsGwBBw5sfCo5MHMg47cez5TPptA9uTu/Hvrr5i1MkiRJ7YrXacfrtCuKXn8djjkmvBPz5Mlw8MEtc57Sn+HTy8K7vG/zF+jSp2XOU6vvTX2ZWzyXECGGZA/hs7M+a5HzDPnPEL5c+CUBAb269mLeBfPWeFxNANMK4fGFUBnxE29kBhyR1XzpenMqWF5ARlIGCbEJ0S6l5ZUvgdhkiEuOdiWSJEmtxuu0S21dTQ0cdBDk54d3yx8/vuXONeNE+GEyzLkP3jym5c5TKyM5g5hQDDGhGLold2ux8/x9v7+TEp9CUlwSN4y6YY3H5FfAP+bBwwX1DXtWPFzQG47NbZsNO0BOl5zO0bB/+md4rDtMzYGFb0W7GkmSpDbH6fFStPzyC5RG7IJWVdVy5yqdF07ZAcrmt9x5aj045kEufPFC4mLiuGnUTS12noMGHkTRJUUEBKvtsF4TwCtL4clFDdP1vTLgiGxI9CXL6KteAbOuDt+uWg5f/h2yd41uTZIkSW2Mf7aq7bv7bujfH0aNgkWLol1N88nLg4ED69+/+OLmP0cQQE0VbPtXiEmAUDxse13zn2cVg7MH8+z4Z3nq2KcYkDmgRc8VGxO7WsO+oBz+Pg8ejZgOnx0PF/WGcbk27C2iajm8ewa8vFf4+u2NEZMAidnhywQSgi79WrJCSZKkdsk17bimvU1bvBhycsJTyWNj4dxzw9dtb6pvv4Xp02GPPWBAyzaRTVJUBE88EX5RYo89mvm5v4Jp+0PZz7D1FTD4onAT34HXDdcE8NJSeGoRVNX+ZAsBe3eDw7MgwWa95bx3Jnw7sfadGDiqEBK6rv9xhbPgy39Ach5sdRnENfOOgJIkSW1UY/tQp8erfampWf8xq/ruO9h22/BU9JQU+Owz2HTT5q9tQ6Snw4QJLfPcX/4t3LAH1eFN6AaeBYmZLXOuNuDncpi8AOasqB/LSYCT8mBAx32dovVVFEHpfEgbBDERGwIsejvioBoo/Bhydl//82VsBcMnNXORnVxVFdxzT/hn3kknQWpqtCuSJEkbwaZdbVv37nDbbfCXv8Dmm8Mf/tD055g+vX7teGkpvPlm22naW1JiVjhZJxTemTs2KdoVbbBFpYv4+1t/Jz42notHXEx6UnrdfTUBvLAEnlncMF3ftxsclgXxpuvNp3AWvLQrVBZDzl6w90v1jfsmh0Hhp+HboRjoOnDtz6OWdcEFcMstEArBM8/A889HuyJJkrQRbNrV9p12WvhtQ+2xRzhhLy2FLl1g90akfx3BVn8ON1fLfoAtfw9xKdGuaIMd/cjRdddj/3zh5zx+zOMAPDXnAx5fmkZ88gBCoXB3npcAE/Jg0yin6wuWLeCEqSfwY9GPXL331Rw95OjoFtQUQQBFX0BSNiTl1I//MDm8dh2gYBoUzYJu24bf3/rPEBMXbtw3+1V4uvtKNZXhvRU68NKMNuX118P/DwJ4yx35JUlq78yg1PFtuinMmhWeLjprVnj9eEcSBPDBb+GhLuFNwCoKw+PxqbDzRNj7Bcjbu8VO/79v/sd+9+7Hec+dx4qqFet/wAb4YuEXVAfVVAfVfF7wOdUBnP7WExz21rtM+vJVps15jRAwKhP+1Df6DTvA5dMuZ9qcacxeMpsTpp5AaWXp+h/UVrx9HDy7FTzRGxa8Wj+evlV4uUUoNrz2PKV3/X0xceHGffdHocf+9eP50+DRTHikK3z979b7GFrL3XdDnz6w116wYEG0qwk7+eT62y21/EaSJLUak3Z1Dv37d7xmfaWlH8LX/wzfLngDvr0dtvxdq5x6celijnjoCKpqqnjl+1fI6ZLDH/f4Y7Of59LdLuX8F87n/9u77/AoyrWP49/d9B5CSELv0nsXRFAEFcUOKnosHAugB0Rsr0fFdrB3BQuW47F3RVEBFaU36VWQTgqBFBLSduf94wkpECBlk9kkv8917ZXZ2Zln7sUBc8/9FAcObur3CFN3wjcH3PmzjsP2hAXMPvMsmnlBsn6U6+gSe4AbN9Vmzs/sZNj5sdl258G2Nwsf+jT/B1huU01v/o/SzZGwdkp+dd6CP++CNrdVVuRVLyXF9AJyu2HfPnj0UXj1VbujgokTTQ+jzEzoryX0REREqjtV2qX0XC4zTnLiRNiwwe5oTu6996B9e7jiCjNDe03mU3S2bQt8q27SqcM5h8l152Jh4XQ4OZBZOUvyTeg7gR0T9/DumGS2h1/J7mxoGNYIcMPBWQx2LvCqhB1gyqAp9GnUh4ZhDZkxYgYh/tVkVnS/cAhqkP9AxA2RnQs/czig5Q3Q43mI6la69oIamTHuOCGofmVEXHW+/hrOOgsmTICsLPPn4XAUfl50227du8OAAd4Vk4iIiJSLlnxDS76V2rPPwuTJZum1yEjYvRuCvCxTAkhIgAYNTPXL6YR774XHH7c7qsq19XX463WI7gfdnwcf/yq79N2z7+bZRc/Sok4L5lw7h6aRTT1+jV1ZZmb4PdmF++r7u6mX/gMBefFc3elqgv2q75h9r3N4u7mnQppAq1uLzxJfVtnJsPp+M2yj04MQ0d5jYVaphARo2NA8vHQ4YOpUuOceeP99eOghaNUK/vc/s0SliIiISCmUNg9V0o6S9lK78UYzLtyV3+13925o1Kjq41i0yLyGD4c2bY7/fN8+88s1mAcMd9wBTz9dtTFWYxk5GRzIPECTiCY4Slmlc7ld+FQksTuBPDd8fxB+PGhmiQdwOuC8KDg/Cnxrcl8hy4LsJLMKgKMmf1Ebpf8F8XMhZiBEtDv5sX/9Ba3zZ8T38TEztD/1VOXHKCIiIjVWafNQ/SYopXfzzRAQYLavuKIwMa5KCxaYLp933gk9e8L+/ccf06CB6RUQHQ2nnw533VX1cVZTaxLW0Oj5RjR7sRmjPh9V6nHYlZGw78yCx3fBD8mFCXujAPi/JjAiuoYn7HlHYPYA+DIWfugMOYeq9vquHNg/G1LWV6ydvCPw+yXwWR1Yfnv+EoRVLGUd/Ngbvu8MSUVmUj+8A37oAstuhVldIXXjydtp2dIk6k6nWX7yX/+qzKiLy8qCJUvgUBXfByIiIuIVavKvveJpffvC3r2weTN88ok9YyUXLDDd3gEOH4bVq0s+btIkSEqC339Xd9UyeH3566RnpwPw2YbP2H5oe5XHkOuGr5LgiV2wL787vNMBF9aF+5pA4+q73Hzpxf8MBxaa7dT1sOvzqru2ZcG8C+DXofBDJ9j1Rfnb2vE/2PM15KbAllfMRIlVbektcGiFWZ5u4TWF+w8sBlf+jP7uHEj64+TtOBzmYWBurpnTo6p6GWVkmPHpffuaLvjbq/7vpIiIiNhLSbuUTWSkqTLZNbnRBReYNdfB/NLct689cZzMjh0wZYoZ61rNRp+0rtsal+XCx+FDqH8o9ULqVen1/z4Cj+8s3h2+cQDc3wQuqOnV9aIKllLL/3sW4vl5Ak4o5xDEzy58f3Qm+fJwBhR/7xNQ8nGVyXKDVWT7qJgzwC/CbPsEQ2wpl0V0VvFNuGABbMzvBXDoEHxehQ9wRERExCtoyTepXtq3N5X+1avNUkYLF8KcOXDhhWad5MpgWbBrF9StC6GnmJk9Oxv69YPERNMjIC0Nxo+vnLgqwe29b8dtudl0YBM397iZ8IDiY2vWJqzlzZVv0qZuG8b2GovTQ2Otc93wbTLMPliYX/k44IK6Zu11Hy+bAHt/+n58nD7EhFRSL46o7tD/E9j9JcQNKb7ueWXzj4TwtpC2GbBMclteza6G5CVm3HiLf0C0DQ/Zer0Gi64FVxb0fqNwf3BDGL4ekuZD3T4Q2qxy49ixA775Bnr0MEN8SqttWzMsKTfX/JvSrZSz9ouIiEiNoYno8NKJ6ObNMxOoRUbCjBk1d43xili0yIxZ9/ExifXq1dCxo2evYVlw7bXwwQcmYf/tN/NL94ns3g1NmphtHx9z7jvvlO/aGTsheSnUG+AVS2Vl5GTQ6PlGpGen47JcvDDsBSb0nVDhdrcdMTPDJ+QU7msaCNfHQQMbCrOn8uLiF5n400ScDidvj3ib67peZ3dInpd1AHZ8ACGNodElWjasog4eNJPYHTxo3s+ZA2efXfrzly+HL74wDyovuKByYhQREZEqp4noqrsrroBVq8yY7Kqc8Kg6WbfO/HS5TAVq4ykmkiqPvXtNwg5w5AhMm3by4xs1gqH5VVGnE64rZ0KXthVmtof5I+H7jnAkvnzteFBiRiIpWSkF3ec3JG2oUHs5bvgsEZ7eVZiw+zrgkmi4t4l3JuwA//njPwC4LTdT50+1OZpKEhgNbSdA40vtS9gtC/Iy7bm2p23YUJiwO53moWxZ9OxplphTwi4iIlIrKWn3Vjk55pdWyzIzB8vxRowonMG+VSs45xzPXyMqCiIiTNXc7Tbj+U/G4YDvvzczPf/9NwwaVL7rxs8unCQr56DpwmuzZpHNuLjNxQAE+gZyU4+byt3W1kx4dCfMOVTYHb55IPy7KZxb10w8563aRLfBx+GD0+Gkfb1quua4t8s+CLO6wachMG8EuPPsjqhiunUr7C3l62v+7RIREREpJXWPx0u7x3/9NYwbZ7rHf/IJdOpkd0SVLz4e1q6F3r1NolwaGRmwdavpbvrZZ6bK/fDDnp0satUqmD7dPBiYONH80l3ZUtbCrB5g5ZpJsi7YCCFNKv+6p2BZFlsPbiU2JJaIwFL+Nyoi2w1fH4BfiyTrvg64KBqG1Dk+Wbcsi7tn382MP2fQr3E/Prn8E0L9TzGvQCVLzEjkqQVP4e/jz9397yYyMNLWeGqkTS/CyjsouEvOmgNxZehO7o1SU03PqY4dNdxJREREgNLnoUra8dKkvbbZssWMFT982IwJX73aPLAojaNj24/69FMzvKC6S1lrKuyxQyC8td3RVNiWTDN2/UBu4b4WQXBdLMSdoCv80r1L6fNWHwAcOHh+2PMeGUcvXm7nJ7DgysL356+FSA/PVyEiIiJis9LmoZo9XrzDd9+ZhB3MTO0LF8L555fu3LS0k7+vriI7mVc1l+2GL5Pgt5TCfX4OuDgaziqhul5UQJElwiwsAny9dKC7eFaTkZCxw6zr3my0EnYRERGp1ZS0i3fo27dwwqvAwLINBzj7bLj6atM9ftAguOqqSglRym5TBvw3AZKLVNdbBcF1cRDjf+rzu8R14dmhz/Lmyjc5o8kZ3NjtxsoLVgolzIN1j5r14ns8b5aBq0oOB7S/x7xEREREajl1j0fd473GH3/A/PlmzfWyLN22c6fpXt+/PwQHn/zYpCTYv9+078lx71JMlgu+OAC/pxTu83eameEHR2oFMa/myoEv6kJehvkP1Xoc9HzZ7qhEREREahwt+SbVzxlnwH33lS1hX7zYzOg+dCj06WNm3T+RhQvNePkuXeCii8zM/DXEi4tfpN2r7bjxmxvJyrN3tYGNGfDwzuIJ+2nB8GBT0x2+vAn75gObmbFyBtsPbfdInHICVm7+UmuWmQcuO9nuiERERERqNXWPl+rt88/NOu1g1m1fv94sr1SSd96B3Px+2jNnmrHzTZtWTZyVaH3ieib+NBEwiW23uG4Mbj6Y1fGrGdpyKPVC6lVJHEdc8EUS/JFauC/ACZdGw5mRFauubzqwia7Tu5LtyibEL4R149bRLLJZRUOWkviGQLenYPV9EBgHHe63OyIRERGRWk1Ju1Rv/fvDs8+ajDAqClq2PPGxnTubBN/Hx8xMHxNTZWGeUNICWDMFAmOhxwsQGF3mJnJchb0LHA4Hmw5s4o6f7sBluWgQ1oCN4zcSHlC5wz7WZ8D78XCoyHLabYPh2liILsXY9VP5Y+cfZLuyAcjIzWDR7kVK2itTuzuh7R3gUGcsEREREbspabfDvn1mLXGAKVOgfn1bw6kSlgVut0mYPemSS+DHH80ScVdcASebk2D8eDPJ3datMGYMBAV5NpaycrvgtwsgN9UkRz4B0HdGmZvpGteV+wbcx/Tl0+nTqA8hfiFY+etb70vfx9qEtfRv0t/T0QOQ6YLPkuCzPXtZtncZwX7BnN1sAP9oEMwZEZ4buz64+WCC/YLJzM0kPCCcAU0GeKZhT8lNA4cv+J5iToXqRAm7iIiIiFfQRHTYMBHdoEFmwjWAgQPhl18q/5p2WrUKzjsPkpPhqadg4kS7I/IOrhz4NASsPMAJjUbAwK8q3OzvO39n8HuDcVtu4kLj2DR+ExGBERWP9xhrD8P/EuBgrpt3V79LnisPx5FNjKqXw0cjXvHINbLzslm6dykt6rQg25XNot2LGNh0II0jGnukfY/Y/BKsuAOc/nDGF9CwlEsVioiIiEitponovNmOHaabtstltmu6Rx+FxEQznnzyZMjOtjsi7+DjDz1eBGcABNWHTlM80uzApgNZftNy3h7xNn/e8qfHE/ZMF7yzH17ZCyl5Zv10d14GJPwX9r5IXnaCR66T68rljHfOYOC7A2n5UksOZB5gdOfR3pWwA6y+H3CDOxvWPWJ3NLXD4cPw88+we7fdkYiIiIhUOiXtdnjsMfD1Na/HHrM7msoXnT9O2+mEsDDzvb1VXgbMuwi+iIHVD1T+DPOnjYNRR+CSPVCni8ea7Va/Gzd0u4G40DiPtQmw+jA8tAMWpxXu6xzqw/NtI4nIXkerOi15ZJBnEteNBzaybN8yAPLceXy09iOPtOtxwY3B4WO6k4c2tzuami8zE7p3h2HDzMoRq1bZHZGIiIhIpfLi7KkGu+YauPhisx0aamsoVeKJJ0yVfd8+ePBBz49r96S/3oS93wEWrH8MGl8CUd0r95rVYNHyDBd8kghLiiTrQU4YGQP9wsHR6Fr+1eNaj16zWWQzogKjSMlOwWW56NOoj0fb95gzv4P1j4NvaPHeEpYFuz6FI/HQ/FoIiLItxBpl5UozLwWYJR6/+gq6drU1JBEREZHKpKTdLrUhWT+qTh14+227oygdh+/J31czbsuNs4ITiv2ZDh8mQlqRmeE7hcA1sRDpV8EATyI8IJwlNy3ho7Uf0Sm2Exe3vbjyLlYRYS2hbwn398anYNW9gAO2vwPn/VktHtB4vbZtISIC0tLM5Jann253RCIiIiKVSt3jq6vffoPhw+H22yEjw+5oKs/MmTB2LHzzTdVcr9U/odk1ENoSuj4FdTpXzXU9bE/aHtq/2h6/R/2Y/PPkcrWRngdv7oPp+woT9mAfuCEOxjes3IT9qFZRrXjgzAe8N2E/mcTf8zcsSFkNriO2huM19uyBkSPhwgth/fqynx8dDcuXmx48c+eabvIiIiIiNZhmj8eG2eMrKjMT6tWDI0dM5e7aa2HpUtMF/a234Mwz7Y6wOJcL1q2Dxo3NWuqltWIF9OplvqPbDYsWQd++lRdnVbHc4M4zE9FVkrtn381zi57DZbkA2Hr7VlpFtSr1+SvS4aMESHcV7usSCqNjIaJ6dz6oOjs+hIWjzXbDEXBmFT148nbnngtz5pjhA23awIYNdkckIiIiYovS5qH69bs6ysw0LzCTu82cCYcOmV+Cb7wRtm2rmjgSEuDdd6FhQ7j6ahPLsVwuGDLE9AwICTFL3ZV2/OnmzeY7HX2utHlz9U/ak5fDr+dC7iHo+iS0K18V/FSigqJwW24AfBw+hPmHleq89DzTFX5leuG+EB+4MgZ6hdnbu3vJniX8uuNXzm11Ll3jutoXSGk1uxoiO0NWAsR42YM0Ox04YB7CWZbZFhEREZGTUtJeHUVHwwMPwOOPQ2ysqV4fOmQyqqqa5M3tNhX9rVvN9t69cM89xx+3caNJ2AGysuC990qftJ9/vpkdessWaNnSdKet7tY9BjmHALcZ73zavyql4j6x70T2pe9jTcIaJvadSGxo7EmPtyxYng4fJZpJ547qFgpXx0K4zf9S/Ln/T/q/3R+35eah3x5i/bj1Zeo5YJvIjkDHkj87sBR2fgTRfaHpqCoNy1ZPPQWXXWaWfnzpJbujEREREfF6Stqrq0cegX//G/z8YO1aU2HPy4Pp06vm+hkZpvIN5mHB4sUlH9eokVnmLTPTVN27lGFZs8hI893+/huaNwf/yutOXmWCjibPTvCLBGfl/BUM9A3kpfNKlxCl5cGHCfDn4cJ9oT5wVQz0sLm6ftSSvUsKuvrnuHL4c/+f1SNpP5HMvTBnIFh5sPkFcAZA44vtjqpqnHUWHDxonhSV1DunItxu03bdut5x44qIiIh4gCaiK634ePi//4MnnzQVY2/g729+Me3c2UzMtGpV1XUfDwsrXLbu6Lj6kkRGwrffwvXXm670111Xtuv4+5txr9U9YV/3OHweDWmbofk/oOGFMHiWWdvbJpZllnCbsqN4wt4jDKY0g57hpc97FuxawLRl09ifvr8yQuW8VudRJ7AOAHGhcQxqNqhSrgNA/BxYOhZ2flJ51zi8DdzZYLkAJ6SsrbxreSOHw/MJe2qqWb+9Xj3o39/M+SEiIiJSA9iatE+dOpVevXoRFhZGTEwMF198MZuPVm/zZWVlMX78eOrWrUtoaCiXXXYZCQkJxY7ZtWsXw4cPJzg4mJiYGO666y7y8vLwqPPOM90677sPJk3ybNsVtXdv4brFVenzz+GPP2DTJrj00pKP+eILM6Z9xgwzU3RtrH6lbYE1/4acZEj8A0Kbw5lfQ91etoWUkguv7YO39xd2hw/zgVsawM0NIKwMHQB+2PoDA94ZwLgfxtH9je4czjl86pPKqGlkUzbdtomfrvmJDeM2UC+knvnAnQfb34Ot0yAvs+IXSt1k5hzY9iYsuBL2z654myWp2weieppt/0hodlXlXOdUcnLMMJubbza9WqqzL76A1avN9qJFMGuWvfGIiIiIeIitSfu8efMYP348ixcvZvbs2eTm5jJ06FAyiixhdscdd/Ddd9/x2WefMW/ePPbt28elRRJEl8vF8OHDycnJYeHChbz33nu8++67PPjgg54Ndv16073bsgp/MSyLhx821eL27WHnTs/F9fHH0KSJGftd0phyT0lJMd3vwXSNf+45ePllU9lq3frE573wgvlzA3j++cJJ5aqT9L/gwBIz63t5HNsF3ll5vQb2pe/jw7Uf8tfBv0r83LJgUSo8vBPWFMmte+VX17uXbr66Yn75+xd8HGYuhfjD8WxJ3lKOyE8tJiSGoS2HUieoTuHOlXfC4uth2XiYf0XFL3J4m6l+53fFJ21jxdssiU8ADF0I562Gi/6GMJu6+k+daubHePttGDzYJPHVVZMm5ufRB4ONG9sXi4iIiIgHedWSb0lJScTExDBv3jwGDhxIamoq9erV48MPP+Tyyy8HYNOmTbRr145FixbRt29fZs2axQUXXMC+ffuIjTXjhadPn84999xDUlIS/qXoVl2qqfbvvhueftp06fzoI7POcGnt3WvGdoOZKG7cOM9NwHT66aaqBOahQHa2Z9o9yrJgzBh45x2Ii4Pffzd/FkfXTR850jw4OJHx42HaNPPn1q5d9avm7fwUFlwFuM367ae/X752tk6HTS9AVHfo8yb4hngySgASDifQ/tX2HMw6SKBvICtuXkH7eu0LPk/JhfcTYF3hMzHCfeHqGOhWjmT9qN93/s5Z752Fy3LRKqoVa25dQ5BfUAW+SRl83wlS15lt31AYmX7y408lLxNm94dDqyC4EQxbCkH1Kxym17rmGvPvmTv/gdSBA2Y8eFmtW2eG5wwdCjExHg2xTN56C37+2Qzdufpq++IQERERKYVqueRbamoqAFH5a3mvWLGC3NxchgwZUnBM27ZtadKkSUHSvmjRIjp16lSQsAMMGzaMsWPHsn79erp163bcdbKzs8kuktympaWdOrgnn4SbbjLLljVoULYvFhRkJozLyzNJcGRk2c4/mS5dzCRwTid06OC5do/ats0k7ABJSfDqq7BkSWHF/EQT0B317LOm4nXoEEyY4Pn4Ktv2t4H8hGbH/6DPjPLN9t76VvOqRIv2LOJg1kEAsvKymLN9Du3rtceyYGEafJYIR4p0FugTDqNizJJuFTGw6UDWjl3LxgMbObv52VWXsAO0uB7+zF82r/kJ5lUoC99gGLbMVNxDmoJPYMXb9Ga3324ewB0+DLfeWr6EfdEiOOMM06MmLs4Ml4mI8HyspfHPf5qXiIiISA3iNUm72+1m4sSJ9O/fn44dzRJJ8fHx+Pv7E3lMkhsbG0t8fHzBMUUT9qOfH/2sJFOnTuXhhx8uW4AOx8m7gZ9MVJQZ//3009C2rWe7sT//PDRrBmlp5hdwT4uKgoAAyM011bjGjWHsWDg6/GDs2JOfHxgI995b/uvHx5sJpZo3L38bx3Jlw8LRsP9naDISer8OzhNkrnX7wP6fACdEtAenn+fi8LDeDXsTHhBOWnYafk4/BjUbxKH86vr6ItX1CF+4JhY6h3ru2u3qtaNdvXaeaxDA7YK934LDFxoOL3nSvnZ3QsxAUyGPOcMz13X6Qngbz7TlSa4cE5snJy/s08f8HUtNLfvDyKN++qnwIV58vBk+NHCg52IUERERqeW8JmkfP34869atY/78+ZV+rfvuu49JRSaTS0tLo3Flj38cMcK8PC0wsHLHskdFmV/KX3vNjMf/179Mr4GLLjIPMjp1qrxrf/wxjB5tHhY8+KCZFwAgKxFwQGC94senbYU1D5gx412fgOATJCE7P4HdX5jt7TOgyeXQ4NySj+34IIQ0MddsOcarJ9JrENaA1beuZs72OfRt2I9DAR2YsQOyilTX+4XDyBgIrmB1vUosvRW2v2W2206C7s+WfJyNE/pVmXWPw5oHISAazvoZ6pRh6cRTCQkxr/I65xx47DGzHRNjVrMQEREREY/xiqT9tttuY+bMmfz+++80Ojr2G4iLiyMnJ4eUlJRi1faEhATi4uIKjlm6dGmx9o7OLn/0mGMFBAQQEBDg4W9Rg515pnkVdewv5pZlJtiLizMPEjzhuecKx9o+/bRJ2rdOh2XjAAf0eQta3lB4/PzLIXW92c5OMkuqlcT3mO7bPifpzu30Mcm6B81YOYOft//MxW0upkNMBxbvWcy5rc6lSUSTCrfdLLIZl3T6J+/Hw8aUwv2RvnBtLHT0YHW90u39unB799cnTtprutx08zAKC7IPwIYnof+HdkdVqH9/WLEC/vwTzj3Xs8N/RERERMTe2eMty+K2227jq6++4pdffqH5MV2ge/TogZ+fH3Pnzi3Yt3nzZnbt2kW/fv0A6NevH2vXriUxMbHgmNmzZxMeHk779u2pcebNMzM9HzpkdySF8vLMBFTNm0PTprB9u2fa7dzZjNX38TFVfoB1jwIW4Ib1jxU//kh8/szfbjhykvXCG19mKrfh7aHzo6Z7dRWZs30O//zun3y2/jOu/vJqukzvwi0zb6HtK21JzEg8dQMnYVkwLwUe3gEbi6x+1j8CHmpWzRJ2gIYXFW43uujEx9V0zgDwCwPye3kExp70cFt06QLXX28e2onnuVxw113m38SpU6vnKhwiIiJSbrZW2sePH8+HH37IN998Q1hYWMEY9IiICIKCgoiIiGDMmDFMmjSJqKgowsPDuf322+nXrx99+/YFYOjQobRv355rr72Wp556ivj4eP79738zfvz4mldN//BD010czJrx69aBrxd0lli5EubMMdsHDsC778Ijj1S83ZdeMg8C0tNh4kSzL6wNZJmeFIS3LX5892dhyRhw+EHXqSdu1+E0x9pQud2VugsAi+K/dB/JO8KCXQu4pN0l5Wr3QA78NwE2F0nW6/jCtXHQoRQ9n7PzsknPSSc6OLpc168UvadDw/Pzx7RfYHc09vHxh0GzYMMTZkb7zh74uyXVy+efwzPPmO21a2HAADP5n4iIiNQKti755jjB+OB33nmH66+/HoCsrCzuvPNOPvroI7Kzsxk2bBivvfZasa7vO3fuZOzYsfz222+EhIRw3XXX8cQTT+BbyoS2tFPt2+766+H99wu7jO/cWbg2cWl9/jksWABXXGGWi/OEfftMcu1ymdd//wvXemAm75JkJZruwQ4ntL8PAqKKf+7KMZ8duza6l0jNSuWMd85gbeJa6gbVJflIcsFneyftpUFY2SYDsyz4LQW+OgDZRcaunxEBl9eDwFKMXV+XuI5B7w4i+UgyN3W/iTcufKNMMYhIJXvnHbjxxsL3P/4Iw4bZF09tkJFhJno9csQ8NK5X75SniIiIlFVp81CvWqfdLtUmaf/yS7jsMrPdtSssX266jpfWrFlw/vnmHKcTtmwxM897wu+/m2S9Rw+zdJQXT9hmN7flJjkzGafDyY3f3MjWg1t5ZPAjXN7+8jK1k5QD78XD1iOF+6L84B+x0K4M84rd8t0tzPhzBi7LBcCuibtoHFHJEzOKSOllZcGoUTB3LowcCW++WbZ/+6XsrrkGPvrI/L+sb1+ogklyRUSk9qmW67TLKVx6qUnUt2+H884r+y9ta9eaX0COVsRHjDCJdteuFY9t4EAt81RKToeTeiGmavPNVd+U+XzLgl9S4KskyC3yyO3MSLg0unTV9aKaRTbDbblxOpwE+QZRJ6hOmWMSkUoUGAjflP3fCqmA1asLe7WtXWtvLCIiUuspaa9uevQwr/IYNcqMi0xKMu/XrYMrr4RNmzwXn1SqhPzq+rYi1fVoPzMzfNtyrto1+fTJ5Lpz2ZK8hdt630aof3Wbsa6IvEzIOQhBDdXbQ0TK7847zZAEy4K777Y7GhERqeXUPZ5q1D3+RLZtM+unZ2ebZdJOtk5yejr07l2YqDdsCHv2VE2cNVD84XhSslJoU7fNCedo8AS3BXMPwTcHilfXB0fCJfUgwNZ1ILzEwT9h7mDITYVm10K/95S4i0j57d0LOTlmzhYREZFKUNo8VL/q1wT//Cf89BP8+qupnJ9MWJgZD9mgAdStC9Onl++amzaZSsRrrxV2Iaxlvt/yPY2fb0y7V9sx7vtxlXad+Gx4ejd8XqQ7fD0/uLMxXBmrhL3A1mmQd9hs73gfjuy1Nx4Rqd4aNlTCLiIiXkHd449lWbBwoanQ9etXPSp1qakmcbYsSEs79fEDBpgKQnllZZk2Dh0y1337bfPQoG7d8rdZDb22/DVcbjN52/QV03nh3BcI8PXcMoNuC2Yfgm8PQF5+su4AzqoDF0UrWT9OWEuw3ODwAd8Q8NfYfBERERGp/vRr/7Huv98kpP37w4MP2h1N6Tz/vFmOpk4dU/mubAcOQHJyYYV95UqYPLl05+7+ChZeA9verrz4qkjnGDMMwcfhQ4s6LfD38fdY2/uz4cld8GVSYcIe4w+TG8PIGCXsJWp7J3SdCi1ugLN/NYm7iIiIiHiPNWvg8cfNylNSahrTzjFjCdq1M+uOAzRuDLt22RucN7IsM5P911+b9w4HXHABfPvtyc87tAZmdcXUi90w6EdoUH3XGs515fLK0ldIyEhgXK9xNIloUuE23Rb8dBBmJhevrg+pAyOiwV/JuoiIiIhURzt3Qtu2Zh4ugAULTM/mWkxj2str8ODC7UGDbAvDqzkcZs34f/8b/PwgJgYeeujU52XsBCwgv0Kfsb0yo6x0fj5+3NHvDp4Y8oRHEva92fDELvi6SHf4WH+4MfoAmfv/y6akNRW+hoiIiIiILdasMcNsLcu8li61O6JqQ5V2jnnCERAA779vEtNrrwV/z3V5Pk5KCvz1F3TqBAGeGwtdpdxu82dVmrH/eUdg7lmQvBjCToOhCyGgeoyDT85MZl/6PjrEdMDp8OyzLpcFPx6E75PNNpjq+tAoGBiSSqdpbYk/HI+Pw4ffb/id0xuf7tHri4iIiIhUukOHoGtX05M5IgJWrICWLe2OylalrbRrIrpjBQSY2dgr219/maXXDh2Cjh1hyRIIDq7863qaswwJrG8QDF0AR+IhMAac1eP2W7xnMWe9dxZH8o5wQesL+Paqbz22vNueLHg3HnZnF+6r7w/XxUHzIPh952riD8cDYGHx87aflbSLiIiISPVTpw6sXQvLlpmiZUyM3RFVG+oeb5fPPjOVdoB162DxYlvDqTIOJwQ3qDYJO8Dbf75NjisHgJlbZ7IzdWeF28xzw3cH4PFdhQm70wHnRcH9TU3CDtA5tjOxIbEAOHAwtOXQCl9bRERERMQW4eFw9tlK2Muo+mRONU23bmYsh9NpxoW3bm13RHICnWI64bJc+Dh8iAyMJCakYv/I7M6vru8pUl1vEADXx0HTwOLHRgZGsvrW1fy07Se6xXWjU2ynCl1bRLxEUhL8/bf5f4Gfn93RiIiIiBdT0m6Xc881s68vWgSXX25mqhevNL73eAJ9A9mSvIUx3ccQ7Fe+YQx5bvj+oBm/7s4fu360un5+FPgW6feSnJnMsn3L6F6/O7Ghsfyjyz888E0qkdsFOQchILp08xtUJ2+8AW++CaefDs88owRLKm7VKrOsaGYm9O1rlr3RfSUiIiInoInoKP0EANWGZUF6OoSFlS2B2rDB/GzfvnLiqsV25lfX9xWprjfKr643LlJdn7ZsGtOWT2Prwa1k5WVRJ7AOq29dTeMIL36ok3UAZg+A9M0QMwgG/wg+1XRixWNt2AAdOhS+nz4dbrnFvnikZrj7bnjuOXC5zPsVK6B7d3tjEhERkSqnJd9qqyNHzFJ1ERFmorv0dLN/xQqYPRvy8ko+78knTXLSoQM88USVhVvT5brh6ySzlNu+ImPXL6wL9zUpnrCvT1zPuB/GsTZxLVl5WQAcyjrEz9t+tiHyMtj5kUnYARJ/g4Tf7IzGsw4fLtx2Ogv/PolURPfuJmF3Os3D1WbN7I5IREREvJi6x9c0s2aZrpYAy5fDF19ATk5hdfCKK+DTT48/75VXCrdffRXuvbfyY63h/j4C78XD/pzCfY3zq+uNAo8/PiM347h9vk5fejfsXYlRekDI0TXq83t1BDe0LRSP69ULxo+HGTPM9k032R2R1ASjRpmEffVquPpqiIqyOyIRERHxYuoeT5FuCddfT/gVV8D559sdUvktXgz9+hW+//ln+M9/4LffzHuHA3Jzwcen+HmXXw5ffmk+v/hik+xXpV2fwb4foMH50OSKqr22h+W64dtkmH0Qjv7l8nHABXVhWJTZLollWfzrx38xY+UMOtTrwIVtLuT81ufTs0HPKou9XCwLtk6DxHnQdBQ0vtTuiEREREREvF5pu8craafIH5bDQTiYSYI6d7Y5qgp4910zyd2wYTB2LDzyCDz0kPlswAD444/jz8nIgGnTzPbYsRASUlXRQtIimH06ZrSGG85ZCPX6neosr7T9iBm7nlCkut400Ky73rCGDPMWEREREZGKK23Sru7xRR19fvHXX9U7ab/+evM66t//hnbtIDkZRo8u+ZyQEJg8uSqiO97hbfkb7vz326td0p7jhm8OwNxDhdV13/yx60OjzDh2ESmj5GT47jto06Z4DyIRERGRWkRJ+7E6doShQ+2OwrOcTjOW3Vs1vBAi2kPqBojoAA0vsDuiMvkrE95LgMQi1fVmgWbsen1V10XKJzsb+vSBbfkP9b7+Gi66yNaQREREROygpL2olSuhUyfw1R9LlfKPgPNWw5G9ENQQnN7z578leQtfb/qaXg16Mbj54GKfZedX1385prp+UTQMqaPqukiF7NhRmLD7+MCPPyppFxERkVrJe7Ijb9CyZeUk7FlZ8PLLkJICt98OcXGev0ZVyT4IfmHg9PNsu05fCGnq2TYrKDEjkZ5v9ORwzmEsLGZfO5shLYYAsCXTzAx/ILfw+BZBcF0sxKm6LlJxzZubbvGbN4PbDRdUrx44IiIi4iELF8LEiRAaCm++aXK2WkbrtJ9McjLceqtZkmfjxvK3M3ky3HOPWQv93HM9F19VsixYfBN8URe+agSpm+yO6KRyXDnMWDmD15e/XrDmeVltSNpAek46FhZOh5NFuxeR7YaPEuDZ3YUJu58DrqgHdzUuW8KemJHIDd/cwOWfXs6GpA3litHr7f0elt8O+2bZHYlUN/7+sGQJfPSRWb5y+HC7IxIRERE7jBoFK1aYZa3HjrU7Gluo0n4yt90Gn31mEtalS80EdeWxapVpw+WC9evNtqOa9Z3O2AHb3zLbOcmw5RXo9cpJT7HTuO/HMePPGQDM2T6Hz0Z+VuY2ejXoRauoVvx18C/8nH68/9cy3kr5gH7NhxMREAlAqyD4RxzE+pc9xrHfj+WbTd9gYbE6YTVbb99a9ka82YGlMO9CcDhhy6tw7nKI6m53VFKdRETAlVfaHYWIiIjYKSvL9LpzOs12LaRK+8ns2WNuELcb9u8vfzv/+pe5yQAmTKh+CTuATzA4AwEnWG6v68p+rF93/Fqw/dvO38rVRoh/CCtvXsl3o2cT0uhmtgZfwO7MDH7f+Qd+DhgVA5Mbly9hB4hPj8dtuXFbbhIzEsvXiDdL2whYYLnMz9QK9FYRERERkdrp7behfn1o0QJeeMHuaGyhpP1kpkyB4GAzCdLTT5e/nZEjYdcu2LSpYu3YJWUdfN8e3FkQ0gQ6PghtJ9od1Und0PWGgu3ru1xf7nb25IWxwDmErJBeJv8E/LL/5sFmcFadij1/efSsRwn1D8XP6cezQ58tf0PequEFENLcbIe2hIbn2xuPiIiIiFQ/F14I+/bB1q3QvXb22nRY1tHFyWuvky5qn5sLeXkQFGRPcN5g2Tj46438iikw4m8IbWZrSKWxZM8S8tx5nN74dBxlzK6PuOCLJPgj1bzflbqL33fMJjxtDl+fewd9GvX2SIwutwuX5cLfp5zlem/nyoL0bRDWCnw0Q5+IiIiIyFEnzUOL0Jj2U/HzMy9POHQIDh40XTuqUxf5kOamS7zDCT5BEBBld0Sl0qdRn3Kdtz4D3o+HQ3mF+86p34T3u44h2n+Mh6IzfJw++ODj0Ta9ik8gRHawOwoRERERkWpLSXtZxMfDuHGQlASPPw4DB5b+3HnzzMzxWVkwZgy89dbxx2Rnm2UMjhyBm282kzB5g7Z3gJUHaZuh9VjwO/FToOos0wWfJcHC1MJ9AU64vB6cEVG9nrOIiIiIiEjNoO7xlL5bAtdcAx9/bCami4yEAwcKJ5g7lVGj4PPPzblgKu516hQ/5uabTTLvcMDgwTBnTrm+j5Td2sPwvwRIKVJdbxcM18ZBXQ8vSS82crnMHBUiIiIiIjYrbR6qiejKIj3dLNdmWZCRYX6WVrt25ngfH4iNhbCw449ZtMgc43abJeY8afdX8G0r+HkAZOz0bNvVWKYL3tkPr+wtTNgDnXBtLExopIS9xti/Hzp2NGt/33Zb2f7uioiIiIjYSEl7WTz2GDRtCuHh8PrrZavY3X8/PPUUjB8Pv/8OviWMTBg7tnA7MtLMlLjTAwm2Ow8WjobD2yB5May6r+Jt1gCrD8OUHbA4rXBfhxB4qBkMiKwF3eFT1pkHOZ9FwLa37Y6mcr32mlm9we2GV1+FDRvsjkhEREREpFQ0pr0sOnWC7dvLd66fH0yebLbdbjM+vl694on/uHGmW/xZZ5k14vftM/u+/77isauyWCDDBZ8kwpIiyXqQE0bGQL/wWpCsH7XmATj8N+CGZWOh+T/AWUP/Sahbt3BoitPpPfNFiIiIiIicQg39Dd2LZWbCoEGwbBl06ADz55uq+lHt2pkJ6Y52w09LO1FLpef0hV6vmKXb3LngG2barjXZaaE/0+HDREgrMna9UwhcEwuRta0rvG/+EwrLYVYFcNTgjjfjxpkHYX/+abYbNbI7IhERERGRUlHSXtVmzTIJO8D69fDFF2Y2+aJef91MShcSAs88U7p2XTmw6zNw+kHjy8B5TNf99G3566xbsO0NaHcnhJ9W4a9TXaTnwceJsDy9cF+wD4yqB31qU3W9qG5PQV46ZMVDl//U7KTd37/0f5dERERERLyIkvaq1rSp+el0mu66zZsff8wVV5gXmOXl3njDVODPOOPE7S6+AXZ+aLZPux16vlT8c/9Is9a6uTj4hlTkW1QrK9PhwwRIdxXu6xIKV8fUwup6UUGxMPBLu6MQEZGyio83D/ZLmtRWRERqHCXtVa1nT/jkE/j6axg61IxfP5GsLOjVq3Ayui++gEsvLfnY/T8Wbu+bdfznbf5lZo0/tNpsBzcs91eoLtLzTFf4lUWq6yE+cGUM9AqrpdV1ERGp3u69F558EgIDYeZMOPtsuyMSEZFKpqTdDiNHwkUXwY4dkJtrJqkryY4dhQm7j49Zt/1ESXuTK+Cv181201HHf+4TCL1erWjk1YJlwYr8sesZRarr3ULh6lgI110vIiLVUU6OWYkGzPw3zz+vpF1EpBaowYNYvVhSErRta17dupn130vSsqXpFg8mEx0x4sRt9noNBv0AZ82Bzo96Pmab/LHzD5q90Iwmzzfh179/PeXxaXnw+j54c39hwh7iAzfVh1saKGEXEZFqzM8PGjY0D/IdDmjd2u6IRESkCjgsS2uBpaWlERERQWpqKuHh4ZV/wTffNBPNHfX552ZN9pwcCA09NjiYPRtOO80sOVfLdHytIxuSzJraLaNasvX2rSUeZ1mwNN0s5Va0ut4jDK6KgTAl6yIiUhNs2QLPPQexsaarfFCQ3RGJiEg5lTYPVSpTEssyiXJCgumOHuLhSdvatDE/fXzMtdLTIToaMjLg8cfN/4SPCg+Hyy7z7PWrkUDfQBz5g8+DfEv+xSQ1Dz5IgNWHC/eF+cBVsSZpFxERqTFOOw2mT7c7ChERqUKqtFPCE46XXoIJE8yHAwbAH394/qJffGHGqF90kbnejz+aBN7Hx0xA51vC8xR3LhzeASFNwcff8zF5oXWJ6xg7cywuy8Vrw1+ja1zXgs8sC5akwSdJkFmkut4rDEapui4iIiIiIl5MlfaKmFVk9vX5800SHRjo2WtcdllhBf3LL83YNIcDYmJM4n6snBT4uR+kbYKw1jBsCfjX8WxMXqhjTEf+uPH4hyYpufC/BFibUbgv3Ncs49ZN1XUREREREakhlLSX5KKLTOUbzKysnk7Yj/XMM2ZMWlIS3HdfyWuR7fvBJOwA6Vthz7fQ4rrKjcsLWRYsSoNPE+GIu3B/n3BTXQ8p4XmHiIiIiIhIdaWkvSS33godO5ox7cOHV/71wsPhxRdPfkzYaeanwwmWG8LbVH5cXuZQLryfAOuPqa5fEwtdQk98noiIiIiISHWlpP1EBgywO4Li6vaEgd/Avu+h/rkQ3dfuiKqMZcH8VPg8CbKKVNf7hsNIVddFRERERKQGU9Je1SwLDhyAqKiSx66fTKMR5lVR2cmw4g7IToROD0N0n4q3WUmSc+H9eNiYWbgvMr+63knVdRERERERqeGcdgfglSwL3noL7roLNm70XLu5uTBsmJlsrm1b0/3eDisnw84PYf9s+G24+b5exrLg9xR4eEfxhL1/BDzUTAm7iIiIiIjUDqq0l2TaNBg/3lTCZ8yA3bs9s1b7/Plm/XeAbdvg/fdh8uSKt1tWOcn5ibobctPAcoHDe26FAzlm7PqmIsl6HV+4Ng46eOA/g4iIiIiISHWhSntJ1q41CbvLBYcOQXy8Z9pt2BCcTjM7vGVB06aeabesOk2BwHrg9IceL4DTOxJ2y4LfDsEjO4sn7GdEwKjQTSzZ8jY7U3baF6CIiIiIiEgV845szdvccAP897+QmQnnnQctWnim3dNOg6++gg8/hNNPh8sv90y7ZRXVHS7ZD1hmNnovkJQD/02ALUWS9Sg/+Ecs5B5eQ883epLrziUiIIKN4zdSP6y+fcGKiIiIiIhUESXtJendG3btgn37oEOHktdNL68RI8zLbg4H4MHvVU6WBb+kwNcHIKfIzPADI+GyaAj0gRfX/kquOxeA1OxUlu5dykVtL7IlXhERERERkaqkpP1E6tY1L6k0iTnwXjz8daRwX9386nrbImPXh7QYQoBPANmubKICo+jbqPYsdyciIiIiIrWbknapcm4Lfjlkquu5RSauHxwJl9SDgGN67HeI6cDasWtZsncJg5sNJjY0tkrjFRERERERsYuSdqlSCTnwbjxsL1Jdj/aD6+LgtOATn9e6bmta121d+QGKVEe//mqG9FxyCYSH2x2NiIiIiHiQd8xCJl7t0XmPEvKfEHq+0ZP4w+WbSd9twc8H4ZEdhQm7Azi7DjzY7OQJu4icxLvvwllnwfXXw8CB4Haf6gwRERERqUaUtMtJ7UzZyYO/PUhmbiar4lfx3KLnytzG/mx4chd8kQR5+d3hY/xhcmMYGXN8d3iv4soGS0mQeLEffyycLHP1ajhwwN54RERERMSjvDldEi8Q6BuIM39ZOAuLEL+QU5xRyG3BrGR4bCfsyDL7HMCQOvBAU2jl7dX1Vf8HnwTB140hdaPd0YiUbMQIswwDQK9eEB1tbzwiIiIi4lEa016ZUtbBqnvAJxi6Pwchje2OqMxiQ2P54NIPeHbRs3Ss15HJp08u1Xn7ss3Y9Z1ZRdryN2PXWwZVUrCelJUEG6bmbyfApmehz1v2xiRSkquvhhYtzJj24cPBqWexIiIiIjWJkvbK9MflkL7VdF3NS4fBP9odUblc2fFKrux4ZamOdVnw00GYmWy2wVTXz4mCEXXBr7rkE77B5mGL64ipYgZqxvoqsWMHPPwwBATAI49ATIzdEVUPffual4iIiIjUOEraK1N2MuAGC1O5reH2ZJnq+u7swn3186vrzatDdb0o3xAY9D1sfBbCWkKH++2OqHa47DIzLhtgzx6YOdPeeEREREREbKakvTKFNIKc/EmhgsvZNX73V7DpOYjoaLrY+3pf9pvnhh8PwvcHzTh2MNX1c6NgeHWqrh8rdpB5SdXZvRtcLrO9a5e9sYiIiIiIeAEl7ZUpfVvh9uG/yn5+VhLMHwlWHiQthKAG0OkBz8XnAbvzq+t7ilTXGwTA9XHQNNC+uKSamjoVbrkFfH3h0UftjkZERERExHZK2k8lYycsuBqO7INuT0OTy0t/bngbOLjcbPuGl/3arkyTsIMZF5+bUvY2KkmeG344CLOKVNedjvzqehT4VtfqelnlZZqeELnp0HYiBNW3O6LqbcwYuPJKM5lakPf1KhERERERqWpK2k9l9f2QvAQsFyy8BhqOAB//0p2bfbBw+/CWsl87pCl0+D/Y8DSEnwZt7yh7G5VgZ351fV+R6nqjADN2vUltq64vnwDb3zYPVeJnw3kr7Y6o+gsp/bKCIiIiIiI1nZL2omafASH+0GcG1Omcv9NR/BiH47jTTqjhcNjystmOG1a+mLo8Dp0fK9t1K0me28wK/9Oh4tX186PgvNpUXS8qdQ0Fkw1qLXcREREREfEwJe1FHVoL2Q5Y8k84d6nZ1+VxyNhhusd3fQqcfoXH7/3eVFcbXghxZx/fXvfnod4A08W9yRXlj8sLEvYdR0x1fX9O4b7G+dX1xrWtul5U20lm+ARuaH+X3dGIiIiIiEgN47Asy7I7CLulpaURERFB6psQHuyEOl1O3c05aSHM7g8Op1nHO6Q5tPkXtJ1QNUFX0NK9Sxn9xWhyXDnMuGgGQ1oMKfG4XDd8lww/HzTFZAAfB1xQF4ZFme1a70i8Wc89tLndkYiIiIiISDVRkIemphIefuI50FRpLyq0ueke32v6qY9NXW9+Wm7zM2M7rJxoKu6RHSstxI1JG8lz59EptlOF2hn/w3i2p2zHsizGfDOGnXfsPO6Y7UfgvXiIL1JdbxpoqusNAyp0+ZolKM7uCKrWn3/Cp59C375w0UV2RyMiIiIiUqMpaS/q/FVwkiccxTQcAcGPQOae4vtdWac+150Hy8bD/lnQZKSZlb4UXeBfWPwCd/xkJqN76MyHmDJoSuliLYF//mR6DocDf9/iE+vluuGbAzDnUGF13bdIdd2p6nrttW8f9O8P2dngdsP338P559sdlYiIiIhIjVUbpw7zjKBYuGAznLMQ4s4BvwhoOxmiepz63N1fwLY3IHM3bHoW4ueU6pKvLnu1xO3yePPCN+nbqC/d63fng0s/KNj/VyY8shNmF0nYmwXC/U3hvLpK2Gu9LVvgyBGTsDudsFKz5YuIiIiIVCZV2ivCNxjq9YOzfi7beUe71J/o/Qn0adiHbQe34XA46N2wd9mueYz29dqz4MYFBe+z86vrvxxTXR8RDefUUbIu+fr0gU6dYO1aCAuDkSPtjkhEREREpEZT0m6HJleYWef3/QhNR0L9c0p12psXvkmP+j3Ic+dxa89bPRbO1kwzdj0pt3BfiyC4LhbiNHZdigoKgmXLYPVqaNUKoqLsjkhEREREpEbT7PGUfta+mibbDV8lwa8phfv8HHBxNJyl6rqIiIiIiEil0ezxclKbM+G/8XCgSHW9ZZCZGT7W/8TniYiIiIiISNVR0u4JGbshZTXU6w/+dTzadEpWCj9s/YE2ddvQo0EpJrk7hSwXfHkA5qUU7vNzwCX1YHCkqusiIiIiIiLeREl7RaVuhB97gisTghrC8LUeS9xzXDn0easPW5K34MDBd1d9x/DThpe7vY0Z8H4CJBeprrcOgn/EQYyq6yIiIiIiIl5HS75V1L7vTcIOcGQvJC3yWNPbD21nS/IWAJwOJ99v/b5c7WS54H/x8MKewoTd3wlXxsCdjZWwi4iIiIiIeCtV2iuq3gDMsw83+IZAna4ea7p5ZHNa1GnB9kPbcVkuzm11bpnbWJ8B78fDobzCfW2C4R+xEF1dknVXFiTNh5DmENbS7mhERERERESqjJL2ioruC0MXQNJCaHgBBDfwWNMBvgEs/edSZm6ZSZvoNvRt1LfU52a64PMkWJBapD0nXF4PzogAR3UZu+52weyBcHAZOHzhrJ8hdrDdUYmIiIiIiFQJJe2eEN2XNa5gpv3+PK2iWjGh7wR8nZ75o60bXJfrul5XpnPWHTZj11OKVNfbBcO1cVDXzyNhVZ3D20zCDoAFOz9R0i4iIiIiIrWGknYPyMzN5Mx3zyQ9Ox235cZtubmr/11VH4cLPk2ERWmF+wLzq+sDqlN1vajgxhBYH7LiwXLlD0cQERERERGpHZS0V5BlWexK2UVKVgoAPg4fNidvrvI41hyG/yVAapHqeocQuCYWoqpbdb0o3yA4dyns/BjC25ohCCIiIiIiIrWEkvYKOJxzmCH/HcKSvUuoF1yPpMwkgvyCuLXnrVUWQ4YLPkmEJcdU10fGwOnh1bS6fqzgRtBust1RiIiIiIiIVDkl7RXw1cavWLJ3CQBJmUm8ct4r/KPLPwgLCKuS669Khw8SIa1Idb1jfnW9TnWurouIiIiIiAigpL04y12mwxuEFZ8pvmtc1ypJ2A/nwceJsCy9cF+wD4yqB31qSnVdRERERERElLQX83UzOO9biBlYqsPPbnE204ZPY9Zfsxhx2gj6N+lfufEBK9PhwwRIdxXu6xwKo2MgsrZU1y0LshLBvw74VJfF5m2WmgrPPgtuN0yaBFFRdkckIiIiIiKl4LAsy7I7CLulpaURERFB6psQ3vQMOOf3Krv2B2s+4NVlr9KjQQ+eHfos/idIQtPz4KNEWHFMdf3KGOgdVouq65Yb5o+C3Z9DYCycMx/CWtkdlfe7+GL47jtzo5x9Nvz0k90RiYiIiIjUagV5aGoq4eHhJzxOlfaiHE4Iiquyy+1I2cG1X12LhcXiPYtpWaclE/tOLHaMZZlE/aNEOFykut41FEbHQnht+y+YusEk7ADZB+CvN6Hbk/bGVB2sXWuq7ABr1tgbi4iIiIiIlJrT7gC8StNR0POVKrvc4ZzDWJiODg6Ho2DZuKPS8uD1ffDm/sKEPcQH/lkfbm1QCxN2gMAYcAYATrNue2gzuyOqHu6+u3D7nnvsi0NERERERMpE3eMpfbcET7Msi0k/TeLVZa/SJa4LP1z9A/VC6mFZZpK5jxPNkm5HdQ+Dq2JqabJeVMI82PYWRHaGtpPA6WN3RNXDnj2m60bjxnZHIiIiIiJS65U2D1XSjn1Je0lS8+CDBFh9uHBfmA9cFQs9qmYlOREREREREalkGtNeydYkrOHzDZ/Tu2FvLjjtggq3Z1mwJA0+SYLMItX1nmFmsrkwO/9LZeyC3DSI6FCLZrwTERERERGxn5L2ctifvp9+M/qRlZeF23Lzw9U/cF7r88rdXkou/C8BVh3OY33iBlyWiz5x7bixQSDd7K6u7/wUFlwFuOG026HnSzYHJCIiIiIiUntoIrpy2JK8hczcTNyWG6fDyfJ9y8vVjmXBwlSYsgPWZsAfO+ezeM9ilm9+jfVLr7A/YQfY+hqQP+v4llfNkmsiIiIiIiJSJVRpL4feDXvToV4H1ietJ8QvhCs6XFHmNg7lwvsJsD6jcF9y+nbY9ypWxhpWBkR4MOIKqNMdEn83y+FFdDA/RUREREREpEooaS+HIL8glt+8nJX7V9I6qjX1QuqV+lzLggWp8FkSZBUpWvcNh7ZN8pi03qyhfVvv2zwddvl0fQJCmkJOMrQeZ3c0IiIiIiIitYpmj6fqZo9PzoX342FjZuG+SF+4JhY6hZr3W5K3kJ2XTafYTpUWh4iIiIiIiNhLs8d7EcuCP1Lh8yTILlJdPz0CrqgHwUWWGT+t7mlVH6CIiIiIiIh4JSXtlexAjhm7vqlIdb2OL1wbBx1C7ItLREREREREvJ+S9gqyLIvM3EyC/YJxFFnD3LJgXgp8eaB4dX1ABFxeD4J8jm9LREREREREpChNBV4BGTkZDHh7AKFTQxn83mCO5B4BICkHntsDHyUWJuxRfjChkamwH8k5yE9//URiRqKN0UuBw4dh+XI4csTuSERERERERIpRpb0Cvtz4JQv3LARg3s55zNzyPXUbXM5XByCnSHV9YCRcFg2BPpCYkUinaZ1IzEgkMjCSVbesomlk0zJdN/5wPDmuHJpENPHgt6ml9u+HHj3MzxYtYMUKiIy0OyoRERERERFAlfYKqR9Wv/CNXz1+dfXgk8TChL2uH9zRCEbHmoQd4Ne/fy2osKdkpfDjXz+W6ZofrPmAhs81pOkLTZn6x1RPfI3a7bvvTMIOsH07zJljbzwiIiIiIiJFKGmvgCEthvDyea/Sp8P9DDrzV1wBzQs+GxQJDzWDtsdMNte9fnf8ffxx4MDH4UOfRn3KdM2nFz6N2zJPBZ5Y8EQFv4HQqRM4HOB0go8PtG9vd0QiIiIiIiIF1D2+AnYfyWFb6KV06FAXX6cfANF+cF0cnBZc8jmt67ZmyT+X8NNfPzG4+WC6xnUt0zU7xnRkbeJaHDhoF92ugt9A6NcPZs2CuXPhgguUtIuIiIiIiFdxWJZl2R2E3Uq7qP1Rbgu+TczkxgVfcygrnWC/EC5rdwnDY0K4OBoCKrH/wuGcwzyz8BmO5B5hUr9JxIbGVt7FREREREREpFKUNg9Vpb2M9mfDe/Hwa3wih7LSAcjM/Jvmh79iVMdrKv36of6hTBk0pdKvIyIiIiIiIvZT0l5Kbgt+PgjfJUOeBRGBETgAR8ovuA98xeAzf7Y7RBEREREREalhlLSXwr5seDcedmYV7msbXof/9uzM0m3rOeusTxnUbJBt8YmIiIiIiEjNpKT9JFwW/HQQZiabbQAHcE4UjKgLfs5+XHNaP1tjFBERERERkZpLSfsJ7Mky1fXd2YX76vubmeGbB9kXl4iIiIiIiNQeStqPkeeGHw/CDweLV9eHRcEFdcFPK9uLiIiIiIhIFakxKeirr75Ks2bNCAwMpE+fPixdurTMbezJgqm7zGRzRxP2BgFwbxO4pJ4SdhEREREREalaNSIN/eSTT5g0aRIPPfQQK1eupEuXLgwbNozExMQytfPsHtiT3x3e6YDz68L9TaDZSbrD57hy2JK8hVxXbgW+QSnlpMDhv8GyKv9aIiIiIiIiYrsakbQ/99xz3HTTTdxwww20b9+e6dOnExwczNtvv12mdtz5uXCjALivCVwUDb4n+RM6eOQg7V9tT5tX2tB5emdSs1Ir8C1OIXE+fNUAvm0BS8ZU3nVERERERETEa1T7Me05OTmsWLGC++67r2Cf0+lkyJAhLFq0qMRzsrOzyc4unGEuNdUk23kZaZwbBedEgm8OpOWc/NqfrPmEbfu3AbBpzya+WPUFl7e/vGJf6ERWvQjp+WvOrXsHWj4IAVGVcy0RERERERGpVGlpaQBYp+hJXe2T9gMHDuByuYiNjS22PzY2lk2bNpV4ztSpU3n44YeP2/+/MxvzvwrEMuaJMYyhiqrgNzWvmuuIiIiIiIhIpUlPTyciIuKEn1f7pL087rvvPiZNmlTwPiUlhaZNm7Jr166T/mGJVGdpaWk0btyY3bt3Ex4ebnc4IpVC97nUFrrXpTbQfS41nWVZpKen06BBg5MeV+2T9ujoaHx8fEhISCi2PyEhgbi4uBLPCQgIICAg4Lj9ERER+gdBarzw8HDd51Lj6T6X2kL3utQGus+lJitN0bjaT0Tn7+9Pjx49mDt3bsE+t9vN3Llz6devn42RiYiIiIiIiFRMta+0A0yaNInrrruOnj170rt3b1544QUyMjK44YYb7A5NREREREREpNxqRNI+atQokpKSePDBB4mPj6dr1678+OOPx01OdyIBAQE89NBDJXaZF6kpdJ9LbaD7XGoL3etSG+g+FzEc1qnmlxcRERERERERW1T7Me0iIiIiIiIiNZWSdhEREREREREvpaRdRERERERExEspaRcRERERERHxUrU+aX/11Vdp1qwZgYGB9OnTh6VLl9odkkipTZ06lV69ehEWFkZMTAwXX3wxmzdvLnZMVlYW48ePp27duoSGhnLZZZeRkJBQ7Jhdu3YxfPhwgoODiYmJ4a677iIvL68qv4pIqT3xxBM4HA4mTpxYsE/3udQUe/fu5ZprrqFu3boEBQXRqVMnli9fXvC5ZVk8+OCD1K9fn6CgIIYMGcLWrVuLtXHw4EFGjx5NeHg4kZGRjBkzhsOHD1f1VxEpkcvl4oEHHqB58+YEBQXRsmVLHn30UYrOja37XKS4Wp20f/LJJ0yaNImHHnqIlStX0qVLF4YNG0ZiYqLdoYmUyrx58xg/fjyLFy9m9uzZ5ObmMnToUDIyMgqOueOOO/juu+/47LPPmDdvHvv27ePSSy8t+NzlcjF8+HBycnJYuHAh7733Hu+++y4PPvigHV9J5KSWLVvG66+/TufOnYvt130uNcGhQ4fo378/fn5+zJo1iw0bNvDss89Sp06dgmOeeuopXnrpJaZPn86SJUsICQlh2LBhZGVlFRwzevRo1q9fz+zZs5k5cya///47N998sx1fSeQ4Tz75JNOmTeOVV15h48aNPPnkkzz11FO8/PLLBcfoPhc5hlWL9e7d2xo/fnzBe5fLZTVo0MCaOnWqjVGJlF9iYqIFWPPmzbMsy7JSUlIsPz8/67PPPis4ZuPGjRZgLVq0yLIsy/rhhx8sp9NpxcfHFxwzbdo0Kzw83MrOzq7aLyByEunp6Vbr1q2t2bNnW2eeeaY1YcIEy7J0n0vNcc8991gDBgw44edut9uKi4uznn766YJ9KSkpVkBAgPXRRx9ZlmVZGzZssABr2bJlBcfMmjXLcjgc1t69eysveJFSGj58uHXjjTcW23fppZdao0ePtixL97lISWptpT0nJ4cVK1YwZMiQgn1Op5MhQ4awaNEiGyMTKb/U1FQAoqKiAFixYgW5ubnF7vO2bdvSpEmTgvt80aJFdOrUidjY2IJjhg0bRlpaGuvXr6/C6EVObvz48QwfPrzY/Qy6z6Xm+Pbbb+nZsydXXHEFMTExdOvWjTfffLPg87///pv4+Phi93pERAR9+vQpdq9HRkbSs2fPgmOGDBmC0+lkyZIlVfdlRE7g9NNPZ+7cuWzZsgWA1atXM3/+fM477zxA97lISXztDsAuBw4cwOVyFfsFDiA2NpZNmzbZFJVI+bndbiZOnEj//v3p2LEjAPHx8fj7+xMZGVns2NjYWOLj4wuOKenvwdHPRLzBxx9/zMqVK1m2bNlxn+k+l5pi+/btTJs2jUmTJvF///d/LFu2jH/961/4+/tz3XXXFdyrJd3LRe/1mJiYYp/7+voSFRWle128wr333ktaWhpt27bFx8cHl8vF448/zujRowF0n4uUoNYm7SI1zfjx41m3bh3z58+3OxQRj9q9ezcTJkxg9uzZBAYG2h2OSKVxu9307NmT//znPwB069aNdevWMX36dK677jqboxPxjE8//ZQPPviADz/8kA4dOrBq1SomTpxIgwYNdJ+LnECt7R4fHR2Nj4/PcbMLJyQkEBcXZ1NUIuVz2223MXPmTH799VcaNWpUsD8uLo6cnBxSUlKKHV/0Po+Liyvx78HRz0TstmLFChITE+nevTu+vr74+voyb948XnrpJXx9fYmNjdV9LjVC/fr1ad++fbF97dq1Y9euXUDhvXqy313i4uKOm1A3Ly+PgwcP6l4Xr3DXXXdx7733cuWVV9KpUyeuvfZa7rjjDqZOnQroPhcpSa1N2v39/enRowdz584t2Od2u5k7dy79+vWzMTKR0rMsi9tuu42vvvqKX375hebNmxf7vEePHvj5+RW7zzdv3syuXbsK7vN+/fqxdu3aYv/zmz17NuHh4cf98ihih7PPPpu1a9eyatWqglfPnj0ZPXp0wbbuc6kJ+vfvf9yynVu2bKFp06YANG/enLi4uGL3elpaGkuWLCl2r6ekpLBixYqCY3755Rfcbjd9+vSpgm8hcnKZmZk4ncVTEB8fH9xuN6D7XKREds+EZ6ePP/7YCggIsN59911rw4YN1s0332xFRkYWm11YxJuNHTvWioiIsH777Tdr//79Ba/MzMyCY2699VarSZMm1i+//GItX77c6tevn9WvX7+Cz/Py8qyOHTtaQ4cOtVatWmX9+OOPVr169az77rvPjq8kUipFZ4+3LN3nUjMsXbrU8vX1tR5//HFr69at1gcffGAFBwdb//vf/wqOeeKJJ6zIyEjrm2++sdasWWNddNFFVvPmza0jR44UHHPuueda3bp1s5YsWWLNnz/fat26tXXVVVfZ8ZVEjnPddddZDRs2tGbOnGn9/fff1pdffmlFR0dbd999d8Exus9FiqvVSbtlWdbLL79sNWnSxPL397d69+5tLV682O6QREoNKPH1zjvvFBxz5MgRa9y4cVadOnWs4OBg65JLLrH2799frJ0dO3ZY5513nhUUFGRFR0dbd955p5Wbm1vF30ak9I5N2nWfS03x3XffWR07drQCAgKstm3bWm+88Uaxz91ut/XAAw9YsbGxVkBAgHX22WdbmzdvLnZMcnKyddVVV1mhoaFWeHi4dcMNN1jp6elV+TVETigtLc2aMGGC1aRJEyswMNBq0aKFdf/99xdbflP3uUhxDsuyLDsr/SIiIiIiIiJSslo7pl1ERERERETE2ylpFxEREREREfFSStpFREREREREvJSSdhEREREREREvpaRdRERERERExEspaRcRERERERHxUkraRURERERERLyUknYRERERERERL6WkXURERDxiypQpdO3a1e4wABg0aBATJ060OwwREZEKU9IuIiLiZeLj45kwYQKtWrUiMDCQ2NhY+vfvz7Rp08jMzLQ7vHKZMmUKDofjpK/y+O2333A4HKSkpHg2YBERES/ha3cAIiIiUmj79u3079+fyMhI/vOf/9CpUycCAgJYu3Ytb7zxBg0bNmTEiBElnpubm4ufn18VR1w6kydP5tZbby1436tXL26++WZuuummEo/PycnB39+/qsITERHxWqq0i4iIeJFx48bh6+vL8uXLGTlyJO3ataNFixZcdNFFfP/991x44YUFxzocDqZNm8aIESMICQnh8ccfB2DatGm0bNkSf39/2rRpw/vvv19wzo4dO3A4HKxatapgX0pKCg6Hg99++w0orF7PnTuXnj17EhwczOmnn87mzZuLxfrEE08QGxtLWFgYY8aMISsr64TfKzQ0lLi4uIKXj48PYWFhBe+vvPJKbrvtNiZOnEh0dDTDhg07Zaw7duxg8ODBANSpUweHw8H1119fcKzb7ebuu+8mKiqKuLg4pkyZUsb/GiIiIvZT0i4iIuIlkpOT+fnnnxk/fjwhISElHnNsN/IpU6ZwySWXsHbtWm688Ua++uorJkyYwJ133sm6deu45ZZbuOGGG/j111/LHM/999/Ps88+y/Lly/H19eXGG28s+OzTTz9lypQp/Oc//2H58uXUr1+f1157rczXKOq9997D39+fBQsWMH369FMe37hxY7744gsANm/ezP79+3nxxReLtRcSEsKSJUt46qmneOSRR5g9e3aFYhQREalq6h4vIiLiJf766y8sy6JNmzbF9kdHRxdUscePH8+TTz5Z8NnVV1/NDTfcUPD+qquu4vrrr2fcuHEATJo0icWLF/PMM88UVKVL6/HHH+fMM88E4N5772X48OFkZWURGBjICy+8wJgxYxgzZgwAjz32GHPmzDlptf1UWrduzVNPPVXwfseOHSc93sfHh6ioKABiYmKIjIws9nnnzp156KGHCtp+5ZVXmDt3Luecc065YxQREalqqrSLiIh4uaVLl7Jq1So6dOhAdnZ2sc969uxZ7P3GjRvp379/sX39+/dn48aNZb5u586dC7br168PQGJiYsF1+vTpU+z4fv36lfkaRfXo0aNC5x+raPxgvsPR+EVERKoLVdpFRES8RKtWrXA4HMeNHW/RogUAQUFBx51zom70J+J0muf1lmUV7MvNzS3x2KKT2h3tlu92u8t0vbI49ruUJdaSHDspn8PhqNT4RUREKoMq7SIiIl6ibt26nHPOObzyyitkZGSUq4127dqxYMGCYvsWLFhA+/btAahXrx4A+/fvL/i86ERvZbnOkiVLiu1bvHhxmds5mdLEenSGeZfL5dFri4iIeAtV2kVERLzIa6+9Rv/+/enZsydTpkyhc+fOOJ1Oli1bxqZNm07Zhfyuu+5i5MiRdOvWjSFDhvDdd9/x5ZdfMmfOHMBU6/v27csTTzxB8+bNSUxM5N///neZ45wwYQLXX389PXv2pH///nzwwQesX7++oFeAJ5Qm1qZNm+JwOJg5cybnn38+QUFBhIaGeiwGERERu6nSLiIi4kVatmzJn3/+yZAhQ7jvvvvo0qULPXv25OWXX2by5Mk8+uijJz3/4osv5sUXX+SZZ56hQ4cOvP7667zzzjsMGjSo4Ji3336bvLw8evTowcSJE3nsscfKHOeoUaN44IEHuPvuu+nRowc7d+5k7NixZW7nVE4Va8OGDXn44Ye59957iY2N5bbbbvN4DCIiInZyWEUHiomIiIiIiIiI11ClXURERERERMRLKWkXERERERER8VJK2kVERERERES8lJJ2ERERERERES+lpF1ERERERETESylpFxEREREREfFSStpFREREREREvJSSdhEREREREREvpaRdRERERERExEspaRcRERERERHxUkraRURERERERLzU/wOcvnkEC83LBgAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# test it\n", + "\n", + "Tester.test(bow_lr_pricer)" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "id": "b623079e-54fa-418f-b209-7d54ebbcc23a", + "metadata": {}, + "outputs": [], + "source": [ + "# The amazing word2vec model, implemented in gensim NLP library\n", + "\n", + "np.random.seed(42)\n", + "\n", + "# Preprocess the documents\n", + "processed_docs = [simple_preprocess(doc) for doc in documents]\n", + "\n", + "# Train Word2Vec model\n", + "w2v_model = Word2Vec(sentences=processed_docs, vector_size=400, window=5, min_count=1, workers=8)" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "id": "3de4efc7-68a6-4443-b9fd-70ee9d722362", + "metadata": {}, + "outputs": [], + "source": [ + "# This step of averaging vectors across the document is a weakness in our approach\n", + "\n", + "def document_vector(doc):\n", + " doc_words = simple_preprocess(doc)\n", + " word_vectors = [w2v_model.wv[word] for word in doc_words if word in w2v_model.wv]\n", + " return np.mean(word_vectors, axis=0) if word_vectors else np.zeros(w2v_model.vector_size)\n", + "\n", + "# Create feature matrix\n", + "X_w2v = np.array([document_vector(doc) for doc in documents])" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "id": "9f05eeec-dab8-4007-8e8c-dcf4175b8861", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
LinearRegression()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
" + ], + "text/plain": [ + "LinearRegression()" + ] + }, + "execution_count": 65, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Run Linear Regression on word2vec\n", + "\n", + "word2vec_lr_regressor = LinearRegression()\n", + "word2vec_lr_regressor.fit(X_w2v, prices)" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "id": "e43d3fb9-e013-4573-90bf-9a522132b555", + "metadata": {}, + "outputs": [], + "source": [ + "def word2vec_lr_pricer(item):\n", + " doc = item.test_prompt()\n", + " doc_vector = document_vector(doc)\n", + " return max(0, word2vec_lr_regressor.predict([doc_vector])[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "id": "6740319d-5c8e-4125-9106-97e2e8ab72c7", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[93m1: Guess: $240.89 Truth: $374.41 Error: $133.52 SLE: 0.19 Item: OEM AC Compressor w/A/C Repair Kit For F...\u001b[0m\n", + "\u001b[93m2: Guess: $160.26 Truth: $225.11 Error: $64.85 SLE: 0.11 Item: Motorcraft YB3125 Fan Clutch\u001b[0m\n", + "\u001b[91m3: Guess: $146.82 Truth: $61.68 Error: $85.14 SLE: 0.74 Item: Dorman 603-159 Front Washer Fluid Reserv...\u001b[0m\n", + "\u001b[91m4: Guess: $356.76 Truth: $599.99 Error: $243.23 SLE: 0.27 Item: HP Premium 17.3-inch HD Plus Touchscreen...\u001b[0m\n", + "\u001b[93m5: Guess: $90.47 Truth: $16.99 Error: $73.48 SLE: 2.64 Item: 5-Position Super Switch Pickup Selector ...\u001b[0m\n", + "\u001b[92m6: Guess: $0.00 Truth: $31.99 Error: $31.99 SLE: 12.22 Item: Horror Bookmarks, Resin Horror Bookmarks...\u001b[0m\n", + "\u001b[91m7: Guess: $302.17 Truth: $101.79 Error: $200.38 SLE: 1.17 Item: SK6241 - Stinger 4 Gauge 6000 Series Pow...\u001b[0m\n", + "\u001b[92m8: Guess: $252.65 Truth: $289.00 Error: $36.35 SLE: 0.02 Item: Godox ML60Bi LED Light Kit, Handheld LED...\u001b[0m\n", + "\u001b[91m9: Guess: $353.84 Truth: $635.86 Error: $282.02 SLE: 0.34 Item: Randall RG75DG3PLUS G3 Plus 100-Watt Com...\u001b[0m\n", + "\u001b[93m10: Guess: $129.15 Truth: $65.99 Error: $63.16 SLE: 0.44 Item: HOLDWILL 6 Pack LED Shop Light, 4FT 24W ...\u001b[0m\n", + "\u001b[93m11: Guess: $354.50 Truth: $254.21 Error: $100.29 SLE: 0.11 Item: Viking Horns V103C/1005ATK 3 Gallon Air ...\u001b[0m\n", + "\u001b[92m12: Guess: $337.24 Truth: $412.99 Error: $75.75 SLE: 0.04 Item: CURT 70110 Custom Tow Bar Base Plate Bra...\u001b[0m\n", + "\u001b[92m13: Guess: $174.80 Truth: $205.50 Error: $30.70 SLE: 0.03 Item: 10-Pack Solar HAMMERED BRONZE Finish Pos...\u001b[0m\n", + "\u001b[92m14: Guess: $251.42 Truth: $248.23 Error: $3.19 SLE: 0.00 Item: COSTWAY Electric Tumble Dryer, Sliver\u001b[0m\n", + "\u001b[93m15: Guess: $255.02 Truth: $399.00 Error: $143.98 SLE: 0.20 Item: FREE SIGNAL TV Transit 32\" 12 Volt DC Po...\u001b[0m\n", + "\u001b[93m16: Guess: $268.43 Truth: $373.94 Error: $105.51 SLE: 0.11 Item: Bilstein 5100 Monotube Gas Shock Set com...\u001b[0m\n", + "\u001b[91m17: Guess: $266.37 Truth: $92.89 Error: $173.48 SLE: 1.10 Item: Sangean K-200 Multi-Function Upright AM/...\u001b[0m\n", + "\u001b[92m18: Guess: $89.79 Truth: $51.99 Error: $37.80 SLE: 0.29 Item: Charles Leonard Magnetic Lapboard Class ...\u001b[0m\n", + "\u001b[91m19: Guess: $308.18 Truth: $179.00 Error: $129.18 SLE: 0.29 Item: Gigabyte AMD Radeon HD 7870 2 GB GDDR5 D...\u001b[0m\n", + "\u001b[91m20: Guess: $116.05 Truth: $19.42 Error: $96.63 SLE: 3.05 Item: 3dRose LLC 8 x 8 x 0.25 Inches Bull Terr...\u001b[0m\n", + "\u001b[93m21: Guess: $326.37 Truth: $539.95 Error: $213.58 SLE: 0.25 Item: ROKINON 85mm F1.4 Auto Focus Full Frame ...\u001b[0m\n", + "\u001b[93m22: Guess: $206.72 Truth: $147.67 Error: $59.05 SLE: 0.11 Item: AUTOSAVER88 Headlight Assembly Compatibl...\u001b[0m\n", + "\u001b[93m23: Guess: $75.57 Truth: $24.99 Error: $50.58 SLE: 1.17 Item: ASI NAUTICAL 2.5 Inches Opera Glasses Bi...\u001b[0m\n", + "\u001b[91m24: Guess: $302.17 Truth: $149.00 Error: $153.17 SLE: 0.50 Item: Behringer TUBE OVERDRIVE TO100 Authentic...\u001b[0m\n", + "\u001b[92m25: Guess: $0.00 Truth: $16.99 Error: $16.99 SLE: 8.35 Item: Fun Express Insect Finger Puppets - 24 f...\u001b[0m\n", + "\u001b[91m26: Guess: $162.11 Truth: $7.99 Error: $154.12 SLE: 8.40 Item: WAFJAMF Roller Stamp Identity Theft Stam...\u001b[0m\n", + "\u001b[91m27: Guess: $116.90 Truth: $199.99 Error: $83.09 SLE: 0.28 Item: Capulina Tiffany Floor Lamp 2-Light 16\" ...\u001b[0m\n", + "\u001b[93m28: Guess: $187.53 Truth: $251.45 Error: $63.92 SLE: 0.09 Item: Apple Watch Series 6 (GPS, 44mm) - Space...\u001b[0m\n", + "\u001b[92m29: Guess: $232.91 Truth: $231.62 Error: $1.29 SLE: 0.00 Item: ICON 01725 Tandem Axle Fender Skirt FS17...\u001b[0m\n", + "\u001b[93m30: Guess: $187.82 Truth: $135.00 Error: $52.82 SLE: 0.11 Item: SanDisk 128GB Ultra (10 Pack) MicroSD Cl...\u001b[0m\n", + "\u001b[92m31: Guess: $328.18 Truth: $356.62 Error: $28.44 SLE: 0.01 Item: Velvac 2020,L,C/Hr,W,E2003,102\",Bk - 715...\u001b[0m\n", + "\u001b[92m32: Guess: $268.53 Truth: $257.99 Error: $10.54 SLE: 0.00 Item: TCMT Passenger Backrest Sissy Bar & Lugg...\u001b[0m\n", + "\u001b[91m33: Guess: $176.66 Truth: $27.99 Error: $148.67 SLE: 3.29 Item: Alnicov 63.5MM Brass Tremolo Block,Tremo...\u001b[0m\n", + "\u001b[92m34: Guess: $157.74 Truth: $171.20 Error: $13.46 SLE: 0.01 Item: Subaru Forester Outback Legacy OEM Engin...\u001b[0m\n", + "\u001b[93m35: Guess: $151.78 Truth: $225.00 Error: $73.22 SLE: 0.15 Item: Richmond Auto Upholstery - 2012 Dodge Ra...\u001b[0m\n", + "\u001b[91m36: Guess: $244.83 Truth: $105.00 Error: $139.83 SLE: 0.71 Item: AP-39 Automotive Paint Primer Grey 2K Ur...\u001b[0m\n", + "\u001b[93m37: Guess: $212.29 Truth: $299.99 Error: $87.70 SLE: 0.12 Item: Road Top Wireless Carplay Retrofit Kit D...\u001b[0m\n", + "\u001b[92m38: Guess: $501.15 Truth: $535.09 Error: $33.94 SLE: 0.00 Item: Gibson Performance Exhaust 5658 Aluminiz...\u001b[0m\n", + "\u001b[92m39: Guess: $27.86 Truth: $12.33 Error: $15.53 SLE: 0.60 Item: Bella Tunno Happy Links - Baby Montessor...\u001b[0m\n", + "\u001b[91m40: Guess: $201.46 Truth: $84.99 Error: $116.47 SLE: 0.73 Item: CANMORE H300 Handheld GPS Golf Device, S...\u001b[0m\n", + "\u001b[91m41: Guess: $106.00 Truth: $15.99 Error: $90.01 SLE: 3.39 Item: DCPOWER AC Adapter Compatible Replacemen...\u001b[0m\n", + "\u001b[91m42: Guess: $163.11 Truth: $62.44 Error: $100.67 SLE: 0.90 Item: Sharp, VX2128V, Commercial Desktop Calcu...\u001b[0m\n", + "\u001b[91m43: Guess: $197.21 Truth: $82.99 Error: $114.22 SLE: 0.74 Item: Melissa & Doug Lifelike Plush Stork Gian...\u001b[0m\n", + "\u001b[91m44: Guess: $334.93 Truth: $599.95 Error: $265.02 SLE: 0.34 Item: Sony SSCS8 2-Way 3-Driver Center Channel...\u001b[0m\n", + "\u001b[91m45: Guess: $337.41 Truth: $194.99 Error: $142.42 SLE: 0.30 Item: ASUS Chromebook CX1, 14\" Full HD NanoEdg...\u001b[0m\n", + "\u001b[92m46: Guess: $292.20 Truth: $344.95 Error: $52.75 SLE: 0.03 Item: FiiO X7 32GB Hi-Res Lossless Music Playe...\u001b[0m\n", + "\u001b[92m47: Guess: $49.54 Truth: $37.99 Error: $11.55 SLE: 0.07 Item: TORRO Leather Case Compatible with iPhon...\u001b[0m\n", + "\u001b[92m48: Guess: $214.46 Truth: $224.35 Error: $9.89 SLE: 0.00 Item: Universal Air Conditioner KT 1031 A/C Co...\u001b[0m\n", + "\u001b[91m49: Guess: $380.51 Truth: $814.00 Error: $433.49 SLE: 0.58 Item: Street Series Stainless Performance Cat-...\u001b[0m\n", + "\u001b[92m50: Guess: $525.79 Truth: $439.88 Error: $85.91 SLE: 0.03 Item: Lenovo IdeaPad 3 14-inch Laptop, 14.0-in...\u001b[0m\n", + "\u001b[92m51: Guess: $307.51 Truth: $341.43 Error: $33.92 SLE: 0.01 Item: Access Bed Covers TonnoSport 22050219 - ...\u001b[0m\n", + "\u001b[91m52: Guess: $262.70 Truth: $46.78 Error: $215.92 SLE: 2.92 Item: G.I. JOE Hasbro 3 3/4\" Wave 5 Action Fig...\u001b[0m\n", + "\u001b[92m53: Guess: $162.26 Truth: $171.44 Error: $9.18 SLE: 0.00 Item: T&S Brass B-0232-BST Double Pantry Fauce...\u001b[0m\n", + "\u001b[91m54: Guess: $212.51 Truth: $458.00 Error: $245.49 SLE: 0.59 Item: ZTUOAUMA Fuel Injection Pump 3090942 309...\u001b[0m\n", + "\u001b[91m55: Guess: $278.03 Truth: $130.75 Error: $147.28 SLE: 0.56 Item: 2AP18AA#ABA Hp Prime Graphing Calculator...\u001b[0m\n", + "\u001b[93m56: Guess: $163.31 Truth: $83.81 Error: $79.50 SLE: 0.44 Item: Lowrance 000-0119-83 Nmea 2000 25' Exten...\u001b[0m\n", + "\u001b[91m57: Guess: $196.01 Truth: $386.39 Error: $190.38 SLE: 0.46 Item: Jeep Genuine Accessories 82213051 Hood L...\u001b[0m\n", + "\u001b[93m58: Guess: $229.61 Truth: $169.00 Error: $60.61 SLE: 0.09 Item: GODOX CB-06 Hard Carrying Case with Whee...\u001b[0m\n", + "\u001b[91m59: Guess: $116.47 Truth: $17.95 Error: $98.52 SLE: 3.33 Item: Au-Tomotive Gold, INC. Ford Black Valet ...\u001b[0m\n", + "\u001b[91m60: Guess: $111.87 Truth: $269.00 Error: $157.13 SLE: 0.76 Item: Snailfly Black Roof Rack Rail + Cross Ba...\u001b[0m\n", + "\u001b[93m61: Guess: $156.11 Truth: $77.77 Error: $78.34 SLE: 0.48 Item: KING SHA Anti Glare LED Track Lighting H...\u001b[0m\n", + "\u001b[91m62: Guess: $256.26 Truth: $88.99 Error: $167.27 SLE: 1.10 Item: APS Compatible with Chevy Silverado 1500...\u001b[0m\n", + "\u001b[92m63: Guess: $338.73 Truth: $364.41 Error: $25.68 SLE: 0.01 Item: Wilwood Engineering 14011291R Brake Cali...\u001b[0m\n", + "\u001b[93m64: Guess: $184.24 Truth: $127.03 Error: $57.21 SLE: 0.14 Item: ACDelco Gold 336-1925A Starter, Remanufa...\u001b[0m\n", + "\u001b[91m65: Guess: $325.08 Truth: $778.95 Error: $453.87 SLE: 0.76 Item: UWS EC10783 69-Inch Matte Black Heavy-Wa...\u001b[0m\n", + "\u001b[91m66: Guess: $560.23 Truth: $206.66 Error: $353.57 SLE: 0.99 Item: Dell Latitude E5440 14in Business Laptop...\u001b[0m\n", + "\u001b[91m67: Guess: $116.51 Truth: $35.94 Error: $80.57 SLE: 1.34 Item: (Plug and Play) Spare Tire Brake Light W...\u001b[0m\n", + "\u001b[92m68: Guess: $186.66 Truth: $149.00 Error: $37.66 SLE: 0.05 Item: The Ultimate Roadside Rescue Assistant\u001b[0m\n", + "\u001b[92m69: Guess: $238.54 Truth: $251.98 Error: $13.44 SLE: 0.00 Item: Brand New 18\" x 8.5\" Replacement Wheel f...\u001b[0m\n", + "\u001b[92m70: Guess: $177.72 Truth: $160.00 Error: $17.72 SLE: 0.01 Item: Headlight Headlamp LH Left & RH Right Pa...\u001b[0m\n", + "\u001b[91m71: Guess: $204.26 Truth: $39.99 Error: $164.27 SLE: 2.60 Item: Lilo And Stitch Deluxe Oversize Print La...\u001b[0m\n", + "\u001b[93m72: Guess: $219.33 Truth: $362.41 Error: $143.08 SLE: 0.25 Item: AC Compressor & A/C Clutch For Hyundai A...\u001b[0m\n", + "\u001b[91m73: Guess: $192.85 Truth: $344.00 Error: $151.15 SLE: 0.33 Item: House Of Troy PIN475-AB Pinnacle Collect...\u001b[0m\n", + "\u001b[91m74: Guess: $185.12 Truth: $25.09 Error: $160.03 SLE: 3.86 Item: Juno T29 WH Floating Electrical Feed Sin...\u001b[0m\n", + "\u001b[93m75: Guess: $122.04 Truth: $175.95 Error: $53.91 SLE: 0.13 Item: Sherman GO-PARTS - for 2013-2016 Toyota ...\u001b[0m\n", + "\u001b[91m76: Guess: $284.74 Truth: $132.64 Error: $152.10 SLE: 0.58 Item: Roland RPU-3 Electronic Keyboard Pedal o...\u001b[0m\n", + "\u001b[93m77: Guess: $328.23 Truth: $422.99 Error: $94.76 SLE: 0.06 Item: Rockland VMI14 12,000 Pound 12 Volt DC E...\u001b[0m\n", + "\u001b[91m78: Guess: $273.18 Truth: $146.48 Error: $126.70 SLE: 0.38 Item: Max Advanced Brakes Elite XDS Front Cros...\u001b[0m\n", + "\u001b[92m79: Guess: $119.48 Truth: $156.83 Error: $37.35 SLE: 0.07 Item: Quality-Built 11030 Premium Quality Alte...\u001b[0m\n", + "\u001b[92m80: Guess: $227.29 Truth: $251.99 Error: $24.70 SLE: 0.01 Item: Lucida LG-510 Student Classical Guitar, ...\u001b[0m\n", + "\u001b[91m81: Guess: $280.51 Truth: $940.33 Error: $659.82 SLE: 1.46 Item: Longacre 52-79800 Aluminum Turn Plates\u001b[0m\n", + "\u001b[91m82: Guess: $144.41 Truth: $52.99 Error: $91.42 SLE: 0.98 Item: Motion Pro 08-0380 Adjustable Torque Wre...\u001b[0m\n", + "\u001b[91m83: Guess: $112.48 Truth: $219.95 Error: $107.47 SLE: 0.44 Item: Glyph Thunderbolt 3 NVMe Dock (0 GB)\u001b[0m\n", + "\u001b[93m84: Guess: $294.11 Truth: $441.03 Error: $146.92 SLE: 0.16 Item: TOYO Open Country MT Performance Radial ...\u001b[0m\n", + "\u001b[91m85: Guess: $307.33 Truth: $168.98 Error: $138.35 SLE: 0.35 Item: Razer Seiren X USB Streaming Microphone ...\u001b[0m\n", + "\u001b[91m86: Guess: $83.75 Truth: $2.49 Error: $81.26 SLE: 10.17 Item: Happy Birthday to Dad From Your Daughter...\u001b[0m\n", + "\u001b[91m87: Guess: $252.42 Truth: $98.62 Error: $153.80 SLE: 0.87 Item: Little Tikes My Real Jam First Concert S...\u001b[0m\n", + "\u001b[92m88: Guess: $207.86 Truth: $256.95 Error: $49.09 SLE: 0.04 Item: Studio M Peace and Harmony Art Pole Comm...\u001b[0m\n", + "\u001b[91m89: Guess: $165.09 Truth: $30.99 Error: $134.10 SLE: 2.71 Item: MyVolts 12V Power Supply Adaptor Compati...\u001b[0m\n", + "\u001b[93m90: Guess: $440.60 Truth: $569.84 Error: $129.24 SLE: 0.07 Item: Dell Latitude 7212 Rugged Extreme Tablet...\u001b[0m\n", + "\u001b[93m91: Guess: $118.73 Truth: $177.99 Error: $59.26 SLE: 0.16 Item: Covermates Contour Fit Car Cover - Light...\u001b[0m\n", + "\u001b[91m92: Guess: $394.91 Truth: $997.99 Error: $603.08 SLE: 0.86 Item: Westin 57-4025 Black HDX Grille Guard fi...\u001b[0m\n", + "\u001b[93m93: Guess: $291.07 Truth: $219.00 Error: $72.07 SLE: 0.08 Item: Fieldpiece JL2 Job Link Wireless App Tra...\u001b[0m\n", + "\u001b[92m94: Guess: $230.56 Truth: $225.55 Error: $5.01 SLE: 0.00 Item: hansgrohe Talis S Modern Premium Easy Cl...\u001b[0m\n", + "\u001b[93m95: Guess: $379.39 Truth: $495.95 Error: $116.56 SLE: 0.07 Item: G-Technology G-SPEED eS PRO High-Perform...\u001b[0m\n", + "\u001b[91m96: Guess: $342.31 Truth: $942.37 Error: $600.06 SLE: 1.02 Item: DreamLine SHDR-1960723L-01 Shower Door, ...\u001b[0m\n", + "\u001b[91m97: Guess: $170.47 Truth: $1.94 Error: $168.53 SLE: 16.53 Item: Sanctuary Square Backplate Finish: Oiled...\u001b[0m\n", + "\u001b[91m98: Guess: $162.22 Truth: $284.34 Error: $122.12 SLE: 0.31 Item: Pelican Protector 1750 Long Case - Multi...\u001b[0m\n", + "\u001b[93m99: Guess: $223.83 Truth: $171.90 Error: $51.93 SLE: 0.07 Item: Brock Replacement Driver and Passenger H...\u001b[0m\n", + "\u001b[92m100: Guess: $115.94 Truth: $144.99 Error: $29.05 SLE: 0.05 Item: Carlinkit Ai Box Mini, Android 11, Multi...\u001b[0m\n", + "\u001b[93m101: Guess: $294.22 Truth: $470.47 Error: $176.25 SLE: 0.22 Item: StarDot NetCamLIVE2 YouTube Live Stream ...\u001b[0m\n", + "\u001b[91m102: Guess: $169.76 Truth: $66.95 Error: $102.81 SLE: 0.85 Item: Atomic Compatible FILXXCAR0016 16x25x5 M...\u001b[0m\n", + "\u001b[91m103: Guess: $0.00 Truth: $117.00 Error: $117.00 SLE: 22.76 Item: Bandai Awakening of S. H. s.h.figuarts s...\u001b[0m\n", + "\u001b[91m104: Guess: $361.25 Truth: $172.14 Error: $189.11 SLE: 0.55 Item: Fit System 62135G Passenger Side Towing ...\u001b[0m\n", + "\u001b[93m105: Guess: $247.07 Truth: $392.74 Error: $145.67 SLE: 0.21 Item: Black Horse Black Aluminum Exceed Runnin...\u001b[0m\n", + "\u001b[92m106: Guess: $24.88 Truth: $16.99 Error: $7.89 SLE: 0.13 Item: Dearsun Twinkle Star Color Night Light P...\u001b[0m\n", + "\u001b[91m107: Guess: $83.32 Truth: $1.34 Error: $81.98 SLE: 12.85 Item: Pokemon - Gallade Spirit Link (83/108) -...\u001b[0m\n", + "\u001b[92m108: Guess: $322.47 Truth: $349.98 Error: $27.51 SLE: 0.01 Item: Ibanez GA34STCE-NT GIO Series Classical ...\u001b[0m\n", + "\u001b[93m109: Guess: $270.89 Truth: $370.71 Error: $99.82 SLE: 0.10 Item: Set 2 Heavy Duty 12-16.5 12x16.5 12 Ply ...\u001b[0m\n", + "\u001b[91m110: Guess: $192.37 Truth: $65.88 Error: $126.49 SLE: 1.13 Item: Hairpin Table Legs 28\" Heavy Duty Hairpi...\u001b[0m\n", + "\u001b[93m111: Guess: $182.79 Truth: $229.99 Error: $47.20 SLE: 0.05 Item: Marada Racing Seat with Adjustable Slide...\u001b[0m\n", + "\u001b[91m112: Guess: $118.81 Truth: $9.14 Error: $109.67 SLE: 6.10 Item: Remington Industries 24UL1007STRWHI25 24...\u001b[0m\n", + "\u001b[91m113: Guess: $414.69 Truth: $199.00 Error: $215.69 SLE: 0.54 Item: Acer S3-391-6046 13.3-inch Ultrabook, In...\u001b[0m\n", + "\u001b[91m114: Guess: $254.15 Truth: $109.99 Error: $144.16 SLE: 0.69 Item: ICBEAMER 7\" RGB LED Headlights Bulb Halo...\u001b[0m\n", + "\u001b[91m115: Guess: $310.71 Truth: $570.42 Error: $259.71 SLE: 0.37 Item: R1 Concepts Front Rear Brakes and Rotors...\u001b[0m\n", + "\u001b[92m116: Guess: $291.88 Truth: $279.99 Error: $11.89 SLE: 0.00 Item: Camplux 2.64 GPM Tankless , Outdoor Port...\u001b[0m\n", + "\u001b[91m117: Guess: $167.02 Truth: $30.99 Error: $136.03 SLE: 2.75 Item: KNOKLOCK 10 Pack 3.75 Inch(96mm) Kitchen...\u001b[0m\n", + "\u001b[91m118: Guess: $189.77 Truth: $31.99 Error: $157.78 SLE: 3.08 Item: Valley Enterprises Yaesu USB FTDI CT-62 ...\u001b[0m\n", + "\u001b[93m119: Guess: $76.96 Truth: $15.90 Error: $61.06 SLE: 2.34 Item: G9 LED Light Bulbs,8W,75W 100W replaceme...\u001b[0m\n", + "\u001b[93m120: Guess: $103.82 Truth: $45.99 Error: $57.83 SLE: 0.64 Item: ZCHAOZ 4 Lights Antique White Farmhouse ...\u001b[0m\n", + "\u001b[91m121: Guess: $222.81 Truth: $113.52 Error: $109.29 SLE: 0.45 Item: Honeywell TH8320R1003 Honeywell VisionPr...\u001b[0m\n", + "\u001b[93m122: Guess: $319.32 Truth: $516.99 Error: $197.67 SLE: 0.23 Item: Patriot Exhaust H8013-1 1-7/8\" Clippster...\u001b[0m\n", + "\u001b[92m123: Guess: $188.13 Truth: $196.99 Error: $8.86 SLE: 0.00 Item: Fitrite Autopart New Front Left Driver S...\u001b[0m\n", + "\u001b[93m124: Guess: $5.98 Truth: $46.55 Error: $40.57 SLE: 3.68 Item: Technical Precision Replacement for GE G...\u001b[0m\n", + "\u001b[93m125: Guess: $237.53 Truth: $356.99 Error: $119.46 SLE: 0.16 Item: Covercraft Carhartt SeatSaver Front Row ...\u001b[0m\n", + "\u001b[91m126: Guess: $136.65 Truth: $319.95 Error: $183.30 SLE: 0.72 Item: Sennheiser SD Pro 2 (506008) - Double-Si...\u001b[0m\n", + "\u001b[91m127: Guess: $232.87 Truth: $96.06 Error: $136.81 SLE: 0.77 Item: Hitachi MAF0110 Mass Air Flow Sensor\u001b[0m\n", + "\u001b[91m128: Guess: $295.44 Truth: $190.99 Error: $104.45 SLE: 0.19 Item: AmScope SE305R-P-LED-PS36A 10X-30X LED C...\u001b[0m\n", + "\u001b[91m129: Guess: $110.47 Truth: $257.95 Error: $147.48 SLE: 0.71 Item: Front Left Driver Side Window Regulator ...\u001b[0m\n", + "\u001b[91m130: Guess: $180.50 Truth: $62.95 Error: $117.55 SLE: 1.09 Item: Premium Replica Hubcap Set, Fits Nissan ...\u001b[0m\n", + "\u001b[92m131: Guess: $73.69 Truth: $47.66 Error: $26.03 SLE: 0.18 Item: Excellerations Phonics Spelling Game for...\u001b[0m\n", + "\u001b[92m132: Guess: $253.02 Truth: $226.99 Error: $26.03 SLE: 0.01 Item: RC4WD BigDog Dual Axle Scale Car/Truck T...\u001b[0m\n", + "\u001b[92m133: Guess: $351.26 Truth: $359.95 Error: $8.69 SLE: 0.00 Item: Unknown Stage 2 Clutch Kit - Low Altitud...\u001b[0m\n", + "\u001b[91m134: Guess: $249.37 Truth: $78.40 Error: $170.97 SLE: 1.32 Item: 2002-2008 Dodge Ram 1500 Mopar 4X4 Emble...\u001b[0m\n", + "\u001b[91m135: Guess: $339.45 Truth: $172.77 Error: $166.68 SLE: 0.45 Item: Pro Comp Alloys Series 89 Wheel with Pol...\u001b[0m\n", + "\u001b[91m136: Guess: $156.66 Truth: $316.45 Error: $159.79 SLE: 0.49 Item: Detroit Axle - Front Rear Strut & Coil S...\u001b[0m\n", + "\u001b[92m137: Guess: $103.48 Truth: $87.99 Error: $15.49 SLE: 0.03 Item: ECCPP Rear Wheel Axle Replacement fit fo...\u001b[0m\n", + "\u001b[91m138: Guess: $323.00 Truth: $226.63 Error: $96.37 SLE: 0.12 Item: Dell Latitude E6520 Intel i7-2720QM 2.20...\u001b[0m\n", + "\u001b[91m139: Guess: $152.72 Truth: $31.49 Error: $121.23 SLE: 2.42 Item: F FIERCE CYCLE 251pcs Black Universal Mo...\u001b[0m\n", + "\u001b[92m140: Guess: $173.89 Truth: $196.00 Error: $22.11 SLE: 0.01 Item: Flash Furniture 4 Pk. HERCULES Series 88...\u001b[0m\n", + "\u001b[91m141: Guess: $183.41 Truth: $78.40 Error: $105.01 SLE: 0.71 Item: B&M 30287 Throttle Valve/Kickdown Cable,...\u001b[0m\n", + "\u001b[91m142: Guess: $224.91 Truth: $116.25 Error: $108.66 SLE: 0.43 Item: Gates TCK226 PowerGrip Premium Timing Be...\u001b[0m\n", + "\u001b[91m143: Guess: $251.87 Truth: $112.78 Error: $139.09 SLE: 0.64 Item: Monroe Shocks & Struts Quick-Strut 17149...\u001b[0m\n", + "\u001b[91m144: Guess: $158.10 Truth: $27.32 Error: $130.78 SLE: 2.98 Item: Feit Electric BPMR16/GU10/930CA/6 35W EQ...\u001b[0m\n", + "\u001b[93m145: Guess: $201.79 Truth: $145.91 Error: $55.88 SLE: 0.10 Item: Yellow Jacket 2806 Contractor Extension ...\u001b[0m\n", + "\u001b[92m146: Guess: $178.66 Truth: $171.09 Error: $7.57 SLE: 0.00 Item: Garage-Pro Tailgate SET Compatible with ...\u001b[0m\n", + "\u001b[93m147: Guess: $223.82 Truth: $167.95 Error: $55.87 SLE: 0.08 Item: 3M Perfect It Buffing and Polishing Kit ...\u001b[0m\n", + "\u001b[93m148: Guess: $93.78 Truth: $28.49 Error: $65.29 SLE: 1.36 Item: Chinese Style Dollhouse Model DIY Miniat...\u001b[0m\n", + "\u001b[91m149: Guess: $234.40 Truth: $122.23 Error: $112.17 SLE: 0.42 Item: Generic NRG Innovations SRK-161H Steerin...\u001b[0m\n", + "\u001b[91m150: Guess: $127.55 Truth: $32.99 Error: $94.56 SLE: 1.77 Item: Learning Resources Coding Critters Range...\u001b[0m\n", + "\u001b[91m151: Guess: $236.22 Truth: $71.20 Error: $165.02 SLE: 1.42 Item: Bosch Automotive 15463 Oxygen Sensor, OE...\u001b[0m\n", + "\u001b[93m152: Guess: $48.29 Truth: $112.75 Error: $64.46 SLE: 0.70 Item: Case of 24-2 Inch Blue Painters Tape - 6...\u001b[0m\n", + "\u001b[92m153: Guess: $133.66 Truth: $142.43 Error: $8.77 SLE: 0.00 Item: MOCA Engine Water Pump & Fan Clutch fit ...\u001b[0m\n", + "\u001b[91m154: Guess: $170.36 Truth: $398.99 Error: $228.63 SLE: 0.72 Item: SAREMAS Foot Step Bars for Hyundai Palis...\u001b[0m\n", + "\u001b[92m155: Guess: $378.40 Truth: $449.00 Error: $70.60 SLE: 0.03 Item: Gretsch G9210 Square Neck Boxcar Mahogan...\u001b[0m\n", + "\u001b[91m156: Guess: $278.33 Truth: $189.00 Error: $89.33 SLE: 0.15 Item: NikoMaku Mirror Dash Cam Front and Rear ...\u001b[0m\n", + "\u001b[93m157: Guess: $193.41 Truth: $120.91 Error: $72.50 SLE: 0.22 Item: Fenix HP25R v2.0 USB-C Rechargeable Head...\u001b[0m\n", + "\u001b[91m158: Guess: $297.42 Truth: $203.53 Error: $93.89 SLE: 0.14 Item: R&L Racing Heavy Duty Roll-Up Soft Tonne...\u001b[0m\n", + "\u001b[93m159: Guess: $267.84 Truth: $349.99 Error: $82.15 SLE: 0.07 Item: Garmin 010-02258-10 GPSMAP 64sx, Handhel...\u001b[0m\n", + "\u001b[92m160: Guess: $41.71 Truth: $34.35 Error: $7.36 SLE: 0.04 Item: Brown 5-7/8\" X 8-1/2\" X 3/16\" Thick Heav...\u001b[0m\n", + "\u001b[93m161: Guess: $237.06 Truth: $384.99 Error: $147.93 SLE: 0.23 Item: GAOMON PD2200 Pen Display & 20 Pen Nibs ...\u001b[0m\n", + "\u001b[92m162: Guess: $240.60 Truth: $211.00 Error: $29.60 SLE: 0.02 Item: VXMOTOR for 97-03 Ford F150/F250 Lightdu...\u001b[0m\n", + "\u001b[91m163: Guess: $441.85 Truth: $129.00 Error: $312.85 SLE: 1.50 Item: HP EliteBook 2540p Intel Core i7-640LM X...\u001b[0m\n", + "\u001b[91m164: Guess: $192.06 Truth: $111.45 Error: $80.61 SLE: 0.29 Item: Green EPX Mixing Nozzles 100-Pack-fits 3...\u001b[0m\n", + "\u001b[91m165: Guess: $186.86 Truth: $81.12 Error: $105.74 SLE: 0.68 Item: Box Partners 6 1/4 x 3 1/8\" 13 Pt. Manil...\u001b[0m\n", + "\u001b[91m166: Guess: $249.24 Truth: $457.08 Error: $207.84 SLE: 0.37 Item: Vixen Air 1/2\" NPT Air Ride Suspension H...\u001b[0m\n", + "\u001b[91m167: Guess: $185.39 Truth: $49.49 Error: $135.90 SLE: 1.71 Item: Smart Floor Lamp, 2700-6500K+RGBPink Mul...\u001b[0m\n", + "\u001b[93m168: Guess: $157.57 Truth: $80.56 Error: $77.01 SLE: 0.44 Item: SOZG 324mm Wheelbase Body Shell RC Car B...\u001b[0m\n", + "\u001b[92m169: Guess: $266.67 Truth: $278.39 Error: $11.72 SLE: 0.00 Item: Mickey Thompson ET Street S/S Racing Rad...\u001b[0m\n", + "\u001b[93m170: Guess: $255.73 Truth: $364.50 Error: $108.77 SLE: 0.12 Item: Pirelli 275/40R20 106W XL RFT P0 PZ4-LUX...\u001b[0m\n", + "\u001b[92m171: Guess: $454.47 Truth: $378.99 Error: $75.48 SLE: 0.03 Item: Torklift C3212 Rear Tie Down\u001b[0m\n", + "\u001b[91m172: Guess: $264.15 Truth: $165.28 Error: $98.87 SLE: 0.22 Item: Cardone 78-4226 Remanufactured Ford Comp...\u001b[0m\n", + "\u001b[91m173: Guess: $254.82 Truth: $56.74 Error: $198.08 SLE: 2.22 Item: Kidde AccessPoint 001798 Supra TouchPoin...\u001b[0m\n", + "\u001b[93m174: Guess: $220.15 Truth: $307.95 Error: $87.80 SLE: 0.11 Item: 3M Protecta 3100414 Self Retracting Life...\u001b[0m\n", + "\u001b[91m175: Guess: $157.22 Truth: $38.00 Error: $119.22 SLE: 1.96 Item: Plantronics 89435-01 Wired Headset, Blac...\u001b[0m\n", + "\u001b[91m176: Guess: $221.90 Truth: $53.00 Error: $168.90 SLE: 2.01 Item: Logitech K750 Wireless Solar Keyboard fo...\u001b[0m\n", + "\u001b[93m177: Guess: $355.65 Truth: $498.00 Error: $142.35 SLE: 0.11 Item: Olympus PEN E-PL9 Body Only with 3-Inch ...\u001b[0m\n", + "\u001b[91m178: Guess: $213.13 Truth: $53.99 Error: $159.14 SLE: 1.85 Item: Beck/Arnley 051-6066 Hub & Bearing Assem...\u001b[0m\n", + "\u001b[92m179: Guess: $289.11 Truth: $350.00 Error: $60.89 SLE: 0.04 Item: Eibach Pro-Kit Performance Springs E10-6...\u001b[0m\n", + "\u001b[93m180: Guess: $227.65 Truth: $299.95 Error: $72.30 SLE: 0.08 Item: LEGO DC Batman 1989 Batwing 76161 Displa...\u001b[0m\n", + "\u001b[91m181: Guess: $275.22 Truth: $94.93 Error: $180.29 SLE: 1.12 Item: Kingston Brass KS3608PL Restoration 4-In...\u001b[0m\n", + "\u001b[92m182: Guess: $317.74 Truth: $379.00 Error: $61.26 SLE: 0.03 Item: Polk Vanishing Series 265-LS In-Wall 3-W...\u001b[0m\n", + "\u001b[92m183: Guess: $256.02 Truth: $299.95 Error: $43.93 SLE: 0.02 Item: Spec-D Tuning LED Projector Headlights G...\u001b[0m\n", + "\u001b[92m184: Guess: $54.41 Truth: $24.99 Error: $29.42 SLE: 0.57 Item: RICHMOND & FINCH Airpod Pro Case, Green ...\u001b[0m\n", + "\u001b[91m185: Guess: $211.16 Truth: $41.04 Error: $170.12 SLE: 2.62 Item: LFA Industries 43B-5A-33JT 1/16-1/2-1.5-...\u001b[0m\n", + "\u001b[91m186: Guess: $153.42 Truth: $327.90 Error: $174.48 SLE: 0.57 Item: SAUTVS LED Headlight Assembly for Slings...\u001b[0m\n", + "\u001b[93m187: Guess: $87.01 Truth: $10.99 Error: $76.02 SLE: 3.97 Item: 2 Pack Combo Womens Safety Glasses Impac...\u001b[0m\n", + "\u001b[92m188: Guess: $38.55 Truth: $14.99 Error: $23.56 SLE: 0.82 Item: Arepa - Venezuelan cuisine - Venezuela P...\u001b[0m\n", + "\u001b[91m189: Guess: $168.78 Truth: $84.95 Error: $83.83 SLE: 0.46 Item: Schlage Lock Company KS23D2300 Padlock, ...\u001b[0m\n", + "\u001b[91m190: Guess: $249.40 Truth: $111.00 Error: $138.40 SLE: 0.65 Item: Techni Mobili White Sit to Stand Mobile ...\u001b[0m\n", + "\u001b[93m191: Guess: $203.14 Truth: $123.73 Error: $79.41 SLE: 0.24 Item: Special Lite Products Contemporary Wall ...\u001b[0m\n", + "\u001b[91m192: Guess: $184.91 Truth: $557.38 Error: $372.47 SLE: 1.21 Item: Tascam DP-24SD 24-Track Digital Portastu...\u001b[0m\n", + "\u001b[91m193: Guess: $187.15 Truth: $95.55 Error: $91.60 SLE: 0.45 Item: Glow Lighting 636CC10SP Vista Crystal Fl...\u001b[0m\n", + "\u001b[93m194: Guess: $197.81 Truth: $154.00 Error: $43.81 SLE: 0.06 Item: Z3 Wind Deflector, Smoke Tint, Lexan, Wi...\u001b[0m\n", + "\u001b[91m195: Guess: $316.69 Truth: $198.99 Error: $117.70 SLE: 0.21 Item: Olympus E-20 5MP Digital Camera w/ 4x Op...\u001b[0m\n", + "\u001b[93m196: Guess: $259.39 Truth: $430.44 Error: $171.05 SLE: 0.25 Item: PHYNEDI 1:1000 World Trade Center (1973-...\u001b[0m\n", + "\u001b[92m197: Guess: $38.70 Truth: $45.67 Error: $6.97 SLE: 0.03 Item: YANGHUAN Unstable Unicorns Adventure Car...\u001b[0m\n", + "\u001b[92m198: Guess: $250.76 Truth: $249.00 Error: $1.76 SLE: 0.00 Item: Interlogix NX-1820E NetworX Touch Screen...\u001b[0m\n", + "\u001b[91m199: Guess: $131.31 Truth: $42.99 Error: $88.32 SLE: 1.21 Item: Steering Damper,Universal Motorcycle Han...\u001b[0m\n", + "\u001b[92m200: Guess: $166.78 Truth: $181.33 Error: $14.55 SLE: 0.01 Item: Amprobe TIC 410A Hot Stick Attachment\u001b[0m\n", + "\u001b[92m201: Guess: $8.23 Truth: $6.03 Error: $2.20 SLE: 0.07 Item: MyCableMart 3.5mm Plug/Jack, 4 Conductor...\u001b[0m\n", + "\u001b[93m202: Guess: $100.18 Truth: $29.99 Error: $70.19 SLE: 1.40 Item: OtterBox + Pop Symmetry Series Case for ...\u001b[0m\n", + "\u001b[91m203: Guess: $470.26 Truth: $899.00 Error: $428.74 SLE: 0.42 Item: Dell XPS X8700-1572BLK Desktop ( Intel C...\u001b[0m\n", + "\u001b[93m204: Guess: $249.56 Truth: $399.99 Error: $150.43 SLE: 0.22 Item: Franklin Iron Works Sperry Industrial Br...\u001b[0m\n", + "\u001b[91m205: Guess: $160.55 Truth: $4.66 Error: $155.89 SLE: 11.23 Item: Avery Legal Dividers, Standard Collated ...\u001b[0m\n", + "\u001b[92m206: Guess: $253.96 Truth: $261.41 Error: $7.45 SLE: 0.00 Item: Moen 8346 Commercial Posi-Temp Pressure ...\u001b[0m\n", + "\u001b[91m207: Guess: $257.57 Truth: $136.97 Error: $120.60 SLE: 0.39 Item: Carlisle Versa Trail ATR All Terrain Rad...\u001b[0m\n", + "\u001b[91m208: Guess: $208.89 Truth: $79.00 Error: $129.89 SLE: 0.93 Item: SUNWAYFOTO 44mm Tripod Ball Head Arca Co...\u001b[0m\n", + "\u001b[91m209: Guess: $248.55 Truth: $444.99 Error: $196.44 SLE: 0.34 Item: NanoBeam AC NBE-5AC-Gen2-US 4 Units 5GHz...\u001b[0m\n", + "\u001b[93m210: Guess: $264.01 Truth: $411.94 Error: $147.93 SLE: 0.20 Item: WULF 4\" Front 2\" Rear Leveling Lift Kit ...\u001b[0m\n", + "\u001b[91m211: Guess: $254.72 Truth: $148.40 Error: $106.32 SLE: 0.29 Item: Alera ALEVABFMC Valencia Series Mobile B...\u001b[0m\n", + "\u001b[91m212: Guess: $61.72 Truth: $244.99 Error: $183.27 SLE: 1.87 Item: YU-GI-OH! Ignition Assault Booster Box\u001b[0m\n", + "\u001b[91m213: Guess: $204.05 Truth: $86.50 Error: $117.55 SLE: 0.73 Item: 48\" x 36\" Extra-Large Framed Magnetic Bl...\u001b[0m\n", + "\u001b[93m214: Guess: $411.56 Truth: $297.95 Error: $113.61 SLE: 0.10 Item: Dell Latitude D620 Renewed Notebook PC\u001b[0m\n", + "\u001b[91m215: Guess: $594.63 Truth: $399.99 Error: $194.64 SLE: 0.16 Item: acer Aspire 5 Laptop, AMD Ryzen 3 5300U ...\u001b[0m\n", + "\u001b[91m216: Guess: $270.19 Truth: $599.00 Error: $328.81 SLE: 0.63 Item: Elk 31080/6RC-GRN 30 by 6-Inch Viva 6-Li...\u001b[0m\n", + "\u001b[91m217: Guess: $218.03 Truth: $105.99 Error: $112.04 SLE: 0.51 Item: Barbie Top Model Doll\u001b[0m\n", + "\u001b[91m218: Guess: $280.62 Truth: $689.00 Error: $408.38 SLE: 0.80 Item: Danby Designer 20-In. Electric Range wit...\u001b[0m\n", + "\u001b[93m219: Guess: $260.28 Truth: $404.99 Error: $144.71 SLE: 0.19 Item: FixtureDisplays® Metal Truss Podium Doub...\u001b[0m\n", + "\u001b[92m220: Guess: $206.89 Truth: $207.76 Error: $0.87 SLE: 0.00 Item: ACDelco 13597235 GM Original Equipment A...\u001b[0m\n", + "\u001b[91m221: Guess: $313.50 Truth: $171.82 Error: $141.68 SLE: 0.36 Item: EBC S1KF1135 Stage-1 Premium Street Brak...\u001b[0m\n", + "\u001b[92m222: Guess: $295.08 Truth: $293.24 Error: $1.84 SLE: 0.00 Item: FXR Men's Boost FX Jacket (Black/Orange/...\u001b[0m\n", + "\u001b[92m223: Guess: $320.76 Truth: $374.95 Error: $54.19 SLE: 0.02 Item: SuperATV Scratch Resistant 3-in-1 Flip W...\u001b[0m\n", + "\u001b[92m224: Guess: $126.03 Truth: $111.99 Error: $14.04 SLE: 0.01 Item: SBU 3 Layer All Weather Mini Van Car Cov...\u001b[0m\n", + "\u001b[93m225: Guess: $0.00 Truth: $42.99 Error: $42.99 SLE: 14.32 Item: 2 Pack Outdoor Brochure Holder Advertisi...\u001b[0m\n", + "\u001b[91m226: Guess: $241.32 Truth: $116.71 Error: $124.61 SLE: 0.52 Item: Monroe Shocks & Struts Quick-Strut 17158...\u001b[0m\n", + "\u001b[91m227: Guess: $259.14 Truth: $118.61 Error: $140.53 SLE: 0.60 Item: Elements of Design Magellan EB235AL Thre...\u001b[0m\n", + "\u001b[91m228: Guess: $235.19 Truth: $147.12 Error: $88.07 SLE: 0.22 Item: GM Genuine Parts 15-62961 Air Conditioni...\u001b[0m\n", + "\u001b[93m229: Guess: $169.35 Truth: $119.99 Error: $49.36 SLE: 0.12 Item: Baseus 17-in-1 USB C Docking Station to ...\u001b[0m\n", + "\u001b[91m230: Guess: $206.58 Truth: $369.98 Error: $163.40 SLE: 0.34 Item: Whitehall™ Personalized Whitehall Capito...\u001b[0m\n", + "\u001b[92m231: Guess: $304.39 Truth: $315.55 Error: $11.16 SLE: 0.00 Item: Pro Circuit Works Pipe PY05250 for 02-19...\u001b[0m\n", + "\u001b[91m232: Guess: $347.33 Truth: $190.99 Error: $156.34 SLE: 0.35 Item: HYANKA 15 \"1200W Professional DJ Speaker...\u001b[0m\n", + "\u001b[93m233: Guess: $84.99 Truth: $155.00 Error: $70.01 SLE: 0.35 Item: Bluetooth X6BT Card Reader Writer Encode...\u001b[0m\n", + "\u001b[92m234: Guess: $299.13 Truth: $349.99 Error: $50.86 SLE: 0.02 Item: AIRAID Cold Air Intake System by K&N: In...\u001b[0m\n", + "\u001b[93m235: Guess: $193.45 Truth: $249.99 Error: $56.54 SLE: 0.07 Item: Bostingner Shower Faucets Sets Complete,...\u001b[0m\n", + "\u001b[91m236: Guess: $173.78 Truth: $42.99 Error: $130.79 SLE: 1.90 Item: PIT66 Front Bumper Turn Signal Lights, C...\u001b[0m\n", + "\u001b[93m237: Guess: $85.55 Truth: $17.99 Error: $67.56 SLE: 2.30 Item: Caseology Bumpy Compatible with Google P...\u001b[0m\n", + "\u001b[91m238: Guess: $231.03 Truth: $425.00 Error: $193.97 SLE: 0.37 Item: Fleck 2510 Timer Mechanical Filter Contr...\u001b[0m\n", + "\u001b[91m239: Guess: $356.10 Truth: $249.99 Error: $106.11 SLE: 0.12 Item: Haloview MC7108 Wireless RV Backup Camer...\u001b[0m\n", + "\u001b[93m240: Guess: $62.62 Truth: $138.23 Error: $75.61 SLE: 0.61 Item: Schmidt Spiele - Manhattan\u001b[0m\n", + "\u001b[93m241: Guess: $293.10 Truth: $414.99 Error: $121.89 SLE: 0.12 Item: Corsa 14333 Tip Kit (Ford Mustang GT)\u001b[0m\n", + "\u001b[93m242: Guess: $209.11 Truth: $168.28 Error: $40.83 SLE: 0.05 Item: Hoshizaki FM116A Fan Motor Kit 1\u001b[0m\n", + "\u001b[93m243: Guess: $152.60 Truth: $199.99 Error: $47.39 SLE: 0.07 Item: BAINUO Antler Chandelier Lighting,6 Ligh...\u001b[0m\n", + "\u001b[91m244: Guess: $241.41 Truth: $126.70 Error: $114.71 SLE: 0.41 Item: DNA MOTORING HL-OH-FEXP06-SM-AM Smoke Le...\u001b[0m\n", + "\u001b[93m245: Guess: $76.88 Truth: $5.91 Error: $70.97 SLE: 5.87 Item: Wera Stainless 3840/1 TS 2.5mm Hex Inser...\u001b[0m\n", + "\u001b[92m246: Guess: $216.42 Truth: $193.06 Error: $23.36 SLE: 0.01 Item: Celestron - PowerSeeker 127EQ Telescope ...\u001b[0m\n", + "\u001b[92m247: Guess: $261.91 Truth: $249.99 Error: $11.92 SLE: 0.00 Item: NHOPEEW 10.1inch Android Car Radio Carpl...\u001b[0m\n", + "\u001b[91m248: Guess: $200.01 Truth: $64.12 Error: $135.89 SLE: 1.27 Item: Other Harmonica (Suzuki-2Timer24- A)\u001b[0m\n", + "\u001b[91m249: Guess: $292.05 Truth: $114.99 Error: $177.06 SLE: 0.86 Item: Harley Air Filter Venturi Intake Air Cle...\u001b[0m\n", + "\u001b[91m250: Guess: $176.35 Truth: $926.00 Error: $749.65 SLE: 2.74 Item: Elite Screens Edge Free Ambient Light Re...\u001b[0m\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA+0AAAK9CAYAAABRvo1QAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAA1dxJREFUeJzs3Xd4VGXax/HvJISQQhICJAGlW0DsYgEBC6Io9oqCouvq7tr7q+u61rWvu5a1bhEV7L1hxYYsdlfsjSKShJYECCXJnPePE5IJBEggYVK+n+vK5eSZM2fuiUnIPb/neU4kCIIASZIkSZLU5CTEuwBJkiRJklQ7m3ZJkiRJkpoom3ZJkiRJkpoom3ZJkiRJkpoom3ZJkiRJkpoom3ZJkiRJkpoom3ZJkiRJkpoom3ZJkiRJkpoom3ZJkiRJkpoom3ZJrdJbb71FJBLhrbfeincpLdL06dOJRCLcf//98S6lVbniiiv8mkuVIpEIV1xxRbzLkKQNZtMuqdE89thjRCIRnn766dXu22677YhEIkyaNGm1+7p3786gQYM2Rok1PPXUUxxzzDH07t2b1NRUttxyS84//3yKiopqHBOJRPjnP/+5xvO89tprRCIRbrvttkaveeWbD0888USDnrdnz55EIpGqj5ycHIYMGVLr/8vmIvb1rPrx+9//Pt7lbRSPPvooY8aMYfPNNycSibDnnnvWetzixYu5/PLLGTFiBNnZ2Wt9A+bEE0+s9Wvat2/fddYzf/58brrpJoYOHUrnzp3Jyspit91249FHH63z86z8mD179lqfa9XHJycns8UWW/DnP/+ZZcuWrXb8yuN++9vf1nq+Sy+9tOqYefPm1bjv+eefZ4899iAnJ4fU1FR69+7N0UcfzcSJE6uOWfnG1s0337zWulf9WYz9GDFixFofWxfffvst5557LoMGDaJdu3ZEIhGmT59er3N8/fXXjBgxgvT0dLKzszn++OOZO3dujWNWvt7aPh555JF1PscVV1xR69d6pZ49e3LggQeu9Rzvv/8+V1xxRY3f6Y3tm2++4aKLLmL77benffv2dOnShZEjR/LRRx+t87HDhw8nEolwxhln1Om5otEod999N9tvvz3p6enk5uay//778/7779c4bvbs2YwcOZKMjAy22mornn/++dXO9dRTT5GTk0NxcXHdXqikRtUm3gVIarkGDx4MwHvvvcdhhx1WNV5SUsK0adNo06YNkydPZq+99qq6b9asWcyaNYtRo0Zt9HpPPfVUunbtypgxY+jevTtffPEFd9xxBy+99BKffPIJKSkpjBw5kszMTCZMmLDGP+YnTJhAYmJiXF5DQ9p+++05//zzAfj111+55557OPzww7nrrrvW2eT26NGDpUuXkpSUtDFKrbPhw4dzwgknrDa+xRZbxKGahldWVsby5cvXeP9dd93Fxx9/zM4778z8+fPXeNy8efO46qqr6N69O9ttt906Z6QkJyev9kZWZmbmOuudMmUKl156KQcccAB/+tOfaNOmDU8++SSjRo3iq6++4sorr6w69ne/+x377LNPjccHQcDvf/97evbsySabbLLO54uts7i4mGeffZarr76aH3/8kfHjx692fLt27XjyySe58847adu2bY37Hn74Ydq1a7daw3/zzTdz4YUXsscee3DJJZeQmprKDz/8wOuvv84jjzyyXo127M9irK5du9b7XKuaMmUKt912G1tttRX9+vXjs88+q9fjf/nlF4YOHUpmZibXXnstixcv5uabb+aLL77ggw8+WO3rduyxx3LAAQfUGBs4cOCGvoxaLV26lDZtqv/Uff/997nyyis58cQTycrKapTnXNU///lP/vWvf3HEEUdw2mmnUVxczD333MNuu+3GxIkTV/ueXumpp55iypQp9XquCy+8kFtuuYUxY8Zw2mmnUVRUxD333MMee+zB5MmT2WWXXQAYO3Yss2fP5oYbbmDy5MkcddRRfPPNN/Ts2ROAZcuWccEFF3DNNdfU6edY0kYQSFIj6tWrV7DLLrvUGJs4cWIQiUSCY489Nthvv/1q3DdhwoQACJ599tkNet5oNBqUlpau8f5JkyYFQDBp0qQaY6saN25cAAT33Xdf1djJJ58cJCQkBLNnz17t+KVLlwaZmZnBiBEjNqj+ulr5Oh5//PF6Pa6srCxYvnz5Gu/v0aNHMHLkyBpjc+bMCdLS0oIttthivc/bmJYuXRpUVFSs8X4gOP3009fr3EuWLKl1vCFe7+LFizfo8cuXLw/OO++8ICsrKwACIGjXrl0wZMiQ4Icffqhx7MyZM6u+Rv379w/22GOPWs+5bNmyYM6cOUEQBMGHH34YAMF//vOfWo8dO3ZskJaWtl61//TTT8H06dNrjEWj0WDvvfcOkpOT1/m1effddwMg+Mtf/rLO56qtzmg0Guy2225BJBIJ8vPza9wHBIceemiQkJAQPPPMMzXumzx5cgAERxxxRAAEc+fODYIg/H7IyMgIhg8fXmsNBQUFVbd//vnnAAhuuummtdZd289iQ5o/f35QUlISBEEQ3HTTTQEQ/Pzzz3V+/B/+8IcgJSUlmDFjRtXYa6+9FgDBPffcUzVW19e7JpdffnmNr/Wq6vJ1Wp/Xt6E++uijYNGiRTXG5s2bF3Tu3DnYfffda33M0qVLg549ewZXXXVVnX9vlZWVBSkpKcGRRx5ZY/ynn34KgOCss84KgiAISktLg0gkErz99ttBEIQ/A7169QruvvvuqsdcffXVwfbbb7/W36eSNi6nx0tqVIMHD+bTTz9l6dKlVWOTJ0+mf//+7L///vz3v/8lGo3WuC8SibD77rsDUF5eztVXX02fPn1ITk6mZ8+e/PGPf1wtTVw5NfKVV15hwIABpKSkcM899wBhEnTooYeSlpZGTk4O5557bq1pZG1ThVfOEPj666+rxsaMGUM0Gq11SueLL75IcXExo0ePrhp76KGH2GmnnUhJSSE7O5tRo0Yxa9as1R47depUDjjgADp06EBaWhrbbrstt956a61f1/qInYb797//vepr+dVXX9XrPHl5efTr14+ff/55nedd05r2b775hqOPPprOnTuTkpLClltuyaWXXlrjmNmzZ/Ob3/yG3NxckpOT6d+/P//+979rHLNyWcAjjzzCn/70JzbZZBNSU1MpKSmp/xdoFXvuuSdbb701H3/8MUOHDiU1NZU//vGP6/w6vvnmmwwZMoS0tDSysrI45JBDanzfQPUU36+++orjjjuODh06VM1IWV/XXnstt9xyCyeccALHHXccZ511FjfeeCMpKSkUFhbWOLZbt24kJKz7n/7k5GTy8vLqVUdFRUW9v/69evWiR48eNcYikQiHHnooy5cv56efflrr4ydMmEAkEuG4446r1/PGPtfgwYMJgqDW59pkk00YOnQoEyZMqDE+fvx4ttlmG7beeusa4/PmzaOkpKTq99eqcnJy1qvOxpSdnU379u3X+/FPPvkkBx54IN27d68a22effdhiiy147LHHan3MkiVLWLFixXo/Z13Frmm/4ooruPDCC4Hw+27l1PyVSwFee+01Bg8eTFZWFunp6Wy55Zb88Y9/3OAadtppJ9LT02uMdezYkSFDhqz2+2GlG2+8kWg0ygUXXFDn5ykrK2Pp0qXk5ubWGM/JySEhIYGUlBQgTNGDIKBDhw5A+DXKysqitLQUCH//Xn/99dx66611+l0haeNwerykRjV48GAefPBBpk6dWtUUT548mUGDBjFo0CCKi4uZNm0a2267bdV9ffv2pWPHjgD89re/Zdy4cRx55JGcf/75TJ06leuuu46vv/56tfXV3377Lcceeyy/+93vOOWUU9hyyy1ZunQpw4YNY+bMmZx11ll07dqVBx98kDfffLNO9efn5wPQqVOnqrGhQ4ey6aabMmHCBM4777wax0+YMIHU1FQOPfRQAP7yl79w2WWXcfTRR/Pb3/6WuXPncvvttzN06FA+/fTTqimar732GgceeCBdunTh7LPPJi8vj6+//poXXniBs88+u15f8zX5z3/+w7Jlyzj11FNJTk4mOzu7Xo8vKytj1qxZVf9v1nbe2DdiVvrf//7HkCFDSEpK4tRTT6Vnz578+OOPPP/88/zlL38BoKCggN12261qHWfnzp15+eWXOfnkkykpKeGcc86pcc6rr76atm3bcsEFF7B8+fLVpuKuatmyZbWuic3IyKjx2Pnz57P//vszatQoxowZU+MP4dpe7+uvv87+++9P7969ueKKK1i6dCm33347u+++O5988knVtNOVjjrqKDbffHOuvfZagiAAYPny5SxatGit9a8U+/344osvMnz4cG699VauuOIKevbsyYknnsiZZ55Zp3M1hNLSUjIyMigtLaVDhw4ce+yx3HDDDas1K3VV28/dqsrKynjssccYNGjQal/f+ljZtK1sYlZ13HHHcfbZZ7N48WLS09MpLy/n8ccf57zzzlttanxOTg4pKSk8//zznHnmmfX+GVuTsrKyWr9v09LSqpqx9f3+2RCzZ8+msLCQAQMGrHbfLrvswksvvbTa+JVXXsmFF15IJBJhp5124i9/+Qv77rtvnZ9zwYIFtY7X9jsn1uGHH853333Hww8/zN/+9reqr0Hnzp358ssvOfDAA9l222256qqrSE5O5ocffmDy5Mk1zrGm9fSrat++PcnJyWs9Jj8/v9b/DzNnzuT666/n3//+d9X/27pISUlh11135f7772fgwIEMGTKEoqIirr76ajp06MCpp54KhN/nffr04dprr+Xaa6/l/fff57PPPuP2228H4KKLLmL//fdn6NChdX5uSRtBnJN+SS3cl19+GQDB1VdfHQRBOIUvLS0tGDduXBAEQZCbmxv84x//CIIgCEpKSoLExMTglFNOCYIgCD777LMACH7729/WOOcFF1wQAMGbb75ZNdajR48ACCZOnFjj2L///e8BEDz22GNVY0uWLAk222yz1abH1+bkk08OEhMTg++++67G+IUXXhgAwbfffls1VlxcHLRr1y449thjgyAIgunTpweJiYmrTd394osvgjZt2lSNl5eXB7169Qp69OgRLFy4sMax0Wh0rfXVZXr8ymmpGRkZQWFh4VrPt1KPHj2CfffdN5g7d24wd+7c4PPPPw9GjRoVAMGZZ565zvOuvC92SvXQoUOD9u3b15hGu+prPPnkk4MuXboE8+bNq3HMqFGjgszMzKolDytfd+/evde6DCIWlVPHa/t4+OGHq47bY489AqDGdNF1vd7tt98+yMnJCebPn1819vnnnwcJCQnBCSecUDW2corvyu+RWP/5z3/WWmPsR6xdd921aprt5ZdfvsZp7Kta2/T4WOuaHn/xxRcH//d//xc8+uijwcMPPxyMHTs2AILdd989KCsrq1MtsebPnx/k5OQEQ4YMWetxzz//fAAEd955Z53Ou3J6/Mrv6R9++CG4+eabg0gkEmy99dar/axROS15wYIFQdu2bYMHH3wwCIIgePHFF4NIJBJMnz691inbf/7znwMgSEtLC/bff//gL3/5S/Dxxx+vVk99psev6fvguuuuqzpufb9/YtV3+vjK740HHnhgtftW/o5ctmxZEARBMGPGjGDfffcN7rrrruC5554L/v73vwfdu3cPEhISghdeeGGdz7Xya722j1WnxwPB5Zdfvs7X97e//W2tU+9jz1eXj3X9DL7zzjtBJBIJLrvsstXuO/LII4NBgwbVeM66Luv5/vvvgx133LFGLb179w6++eabGse98cYbQYcOHaqOOeecc4IgCJd9pKSkrLZkRVL8mbRLalT9+vWjY8eOvPfeewB8/vnnLFmypGp3+EGDBjF58mROO+00pkyZQkVFRdV04ZUpzapp9vnnn8/NN9/Miy++WGMTu169erHffvvVOPall16iS5cuHHnkkVVjqampnHrqqVx00UVrrX3ChAn861//4qKLLmLzzTevcd+YMWO46aabmDBhQtX0yyeffJJly5ZVTY1/6qmniEajHH300TUSmry8PDbffHMmTZrEH//4Rz799FN+/vln/va3v622OVIkEllrjfVxxBFH0Llz5zof/+qrr9Y4PjExkeOPP54bbrih3uedO3cu77zzDmeffXaNabRQ/RqDIODJJ5/k6KOPJgiCGl+z/fbbj0ceeYRPPvmkxtTjsWPH1iuNOuSQQ2rdiXmbbbap8XlycjInnXRSredY9fXOmTOHzz77jIsuuqhGsrrtttsyfPjwWtPG2jby22+//Xjttdfq/FpWOumkk/j973/PiBEjiEajtGvXjmg0utGmtl533XU1Ph81ahRbbLEFl156KU888US9NmSMRqOMHj2aoqKiquRvTSZMmEBSUhJHH310nc+/ZMmS1b5XBw8ezLhx49b4s9ahQwdGjBjBww8/zJgxY5gwYQKDBg1abVr/SldeeSV9+/blzjvv5JVXXuHll1/m0ksvZYcddmD8+PH069evzvWutOuuu3LNNdesNh77e2l9v382xMplT7Wlyu3atas6Jjk5me7du/PKK6/UOOb4449nq6224vzzz2fkyJF1es4nn3ySjIyM1cbHjBlT3/KrrPy9++yzz3LSSSet8Wenrl/f/v37r/G+wsJCjjvuOHr16rXav0GTJk3iySefZOrUqXUrfBXt27enf//+DBw4kGHDhpGfn8/111/PoYceyrvvvluV7O+9997MnDmTL7/8kq5du9KtWzei0ShnnXUW559/Pj169OCuu+7i1ltvJQgCzj333FZzhQ2pqbJpl9SoIpEIgwYN4p133iEajTJ58mRycnLYbLPNgLBpv+OOOwCqpiKubNpnzJhBQkJC1bEr5eXlkZWVxYwZM2qM9+rVa7XnnzFjBpttttlqf5BvueWWa6373Xff5eSTT2a//farmroda9ttt2Xrrbfm4YcfrmraJ0yYQKdOnareOPj+++8JgmC1hn+llTur//jjjwCrrY9taLV9fdZmZaMQiURITU2lX79+te64XJfzrlwvvLbXOHfuXIqKirj33nu59957az1m1TXa9X1Nm2666Rp3a461ySabrHGq/arPufL7sLbvqX79+vHKK6+wZMkS0tLS1ngOgC5dutClS5d11raq3/3ud2RnZ3P77bczZcoUXnvtNW644QZ+97vfcfnll9frTY2Gcu6553LZZZfx+uuv16tpP/PMM5k4cSIPPPAA22233RqPW7x4Mc8++yz77bffass11qZdu3ZVl7f65ZdfuPHGGyksLFzn1+i4447j+OOPZ+bMmTzzzDPceOONaz3+2GOP5dhjj6WkpISpU6dy//33M2HCBA466CCmTZtW1dDWVadOndb5fbu+3z8bInZq/qpWLh1Y29c2Ozubk046ieuvv55ffvmFTTfddJ3POXTo0Fqnldf3axrrmGOO4Z///Ce//e1vufjiixk2bBiHH344Rx55ZI0Gvi6/O9ZmyZIlHHjggSxatIj33nuvxvKR8vJyzjrrLI4//nh23nnnep+7vLycffbZhz333LPGG1777LMP/fv356abbqrxhmt6ejq77rpr1ef/+c9/yM/P5+KLL+b111/nwgsv5KGHHqraM2LLLbes8Sa5pI3Lpl1Soxs8eDDPP/88X3zxRdV69pUGDRrEhRdeyOzZs3nvvffo2rUrvXv3rvH4uqbNDdWcfP755xx88MFsvfXWPPHEEzUuGRRrzJgxXHzxxXz00UdsuummTJo0id/97ndVx0ejUSKRCC+//DKJiYmrPX591/uur/p+ferSKKzPeddk5ZrUMWPGMHbs2FqPWbn3QUM/96rWdt6GeM7azrF06dI6XxN51U3ijjrqKI466iiuuOIKFi9ezJIlS7jhhhsoKiri7rvv3uB66yslJYWOHTuucf1xba688kruvPNOrr/+eo4//vi1HvvMM89QWlpaY8PHukhMTKzxPb3ffvvRt29ffve73/Hcc8+t8XEHH3wwycnJjB07luXLl9c53c/IyGD48OEMHz6cpKQkxo0bx9SpU9ljjz3qVXddbMj3z/pa+SbBnDlzVrtvzpw5ZGdnr3Ntd7du3YBwrXpdmvbGkJKSwjvvvMOkSZN48cUXmThxIo8++ih77703r776atXv75V7LaxLZmbmaj/jK1as4PDDD+d///sfr7zyympvYD7wwAN8++233HPPPVX7LKy0aNEipk+fTk5ODqmpqbU+5zvvvMO0adO45ZZbaoxvvvnm9OvXb7X1+bFKSkq49NJLufnmm0lLS+Phhx/myCOPrNqb5cgjj2T8+PE27VIc2bRLanSx12ufPHlyjc3EdtppJ5KTk3nrrbeqdk9fqUePHkSjUb7//vsaU0oLCgooKipa4/TUWD169GDatGkEQVCj+f/2229rPf7HH39kxIgR5OTk8NJLL621sT722GO55JJLmDBhAj169KCioqJGE9GnTx+CIKBXr15rvQ54nz59AJg2bdoGJzlN1co3YqZNm7bGYzp37kz79u2pqKhoVl+Hld+HtX1PffPNN3Tq1KlGyr4mjz766Bqn5K8qqNy8rjZbb701J554IrNnz+aJJ56IS9O+aNEi5s2bV+flGP/4xz+44oorOOecc/i///u/dR4/fvx40tPTOfjggzeozi5dunDuuedy5ZVX8t///pfddtut1uNSUlI49NBDeeihh9h///3XayO3AQMGMG7cuFob3IbQUN8/9bHJJpvQuXNnPvroo9Xu++CDD9h+++3XeY6Vs3Dqs3Rnfa3tDeCEhASGDRvGsGHDuOWWW7j22mu59NJLmTRpUtXvo7rOZPjPf/7DiSeeWPV5NBrlhBNO4I033uCxxx6r9U2bmTNnUlZWVuuVBx544AEeeOABnn766apGelUFBQVAeBWHVZWVlVFeXr7Geq+66ip69epV9e/Xr7/+yg477FB1f9euXfnss8/W+HhJjc+mXVKjGzBgAO3atWP8+PHMnj27RtKenJzMjjvuyD/+8Q+WLFlS4/JXBxxwAH/84x/5+9//XnX5NqAqSajLGsgDDjiAV199lSeeeIKjjjoKCHe6rm36dX5+Pvvuuy8JCQm88sor6/wjsnv37gwZMoRHH32Url270qtXrxqv7fDDD+eSSy7hyiuvrJpmuFIQBCxYsICOHTuy44470qtXL/7+979z4okn1piCvuqbDc1V586dGTp0KP/+978577zzaqxrX/kaExMTOeKII5gwYQLTpk1bLYmaO3fuRvnDvr66dOnC9ttvz7hx47jkkkuq/v9NmzaNV199tc5rbdd3TfLChQtr3fm8rKys0afGL1u2jLKystUuGXb11VcTBAEjRoyoUc+PP/5IZmZmjebn0Ucf5ayzzmL06NGrpYS1mTt3Lq+//jrHHnvsGlPH+jjzzDO56aabuP7663nmmWfWeNwFF1xAnz59Vts3I1ZpaSmff/45AwcOXO2+l19+GVj30pz1tTHWtK9cyrPyjUYI93gYN24cs2bNqkrN33jjDb777jvOPffcquNq+/mdPXs2//73v9l22203ytT+lW+eFRUV1RhfsGDBajv9r3zDIXbq//quaT/zzDN59NFHueeeezj88MNrfcyoUaNqfZPjsMMO44ADDuCUU06pMZ39m2++ITU1tep36co3hh955JEaP3effPIJ3377bdXu8av67rvvuOOOO3jnnXeq/q3Jzc3lm2++qTrm66+/brDZGZLWj027pEbXtm1bdt55Z959912Sk5PZaaedatw/aNAg/vrXvwLUaNq32247xo4dy7333ktRURF77LEHH3zwAePGjePQQw+t01S9U045hTvuuIMTTjiBjz/+mC5duvDggw/W+sf+iBEj+Omnn7jooot47733qjbPg/CPmOHDh6/2mDFjxnDqqafy66+/rna98T59+nDNNddwySWXMH36dA499FDat2/Pzz//zNNPP82pp57KBRdcQEJCAnfddRcHHXQQ22+/PSeddBJdunThm2++4csvv1xt86baPPnkkzX+yFppTdPM4+G2225j8ODB7Ljjjpx66qn06tWL6dOn8+KLL1alONdffz2TJk1i11135ZRTTmGrrbZiwYIFfPLJJ7z++uv1mm5dm++++46HHnpotfE1/f+tq5tuuon999+fgQMHcvLJJ1dd8i0zM7Nqz4N1Wd81yYMHD2bIkCHsu+++zJw5k/nz5zN58mQmTpy4Wmr9zjvv8M477wBhE7VkyZKqDc6GDh1a4zJPd9xxB0VFRfz6668APP/88/zyyy9A2IRkZmaSn5/PDjvswLHHHkvfvn0BeOWVV3jppZcYMWIEhxxySNX5Zs+eTb9+/Rg7diz3338/EKaxJ5xwAh07dmTYsGGMHz++Rr2DBg1abbnMo48+Snl5eb2nxq9Jx44dOemkk7jzzjv5+uuv17hR3HbbbbfWdfYQNu2DBg1it912Y8SIEXTr1o2ioiKeeeYZ3n33XQ499NAaCSaEDe6ql44DOPTQQ6veuJo9e3at37fp6elVyev6fv8UFxdXrYFeOYX6jjvuICsri6ysrBobNw4bNgygxvTtP/7xjzz++OPstddeVZfGu+mmm9hmm21qJP8XXXQRP/74I8OGDaNr165Mnz6de+65hyVLlnDrrbfWu+71sfLfnksvvZRRo0aRlJTEQQcdxFVXXcU777zDyJEj6dGjB4WFhdx5551suummNf5NWp8ZQH//+9+58847GThwIKmpqav9fzzssMNIS0ujb9++VT9Dq+rVq9dqCXu/fv3YY489eOutt6pe2/Dhwxk3bhwlJSXsu+++zJkzh9tvv52UlJTVLpe50rnnnssxxxzDLrvsUjV25JFHcsghh1Rdp/7555/nhRdeqPdrl9SA4rNpvaTW5pJLLgmAGpeyWempp54KgKB9+/ZBeXl5jfvKysqCK6+8MujVq1eQlJQUdOvWLbjkkkuqLiO0Uo8ePVa73M9KM2bMCA4++OAgNTU16NSpU3D22WcHEydOXO2Sb6zlEj5rujTWggULguTk5AAIvvrqq1qPefLJJ4PBgwcHaWlpQVpaWtC3b9/g9NNPr3G5uCAIgvfeey8YPnx40L59+yAtLS3Ydtttg9tvv73Wc6608tJna/p4991363xpqVhr+3qutLbz1nbJtyAIgmnTpgWHHXZYkJWVFbRr1y7YcsstV7vsUUFBQXD66acH3bp1C5KSkoK8vLxg2LBhwb333rva617bpe5WVdf/v3vssUfQv3//er3eIAiC119/Pdh9992DlJSUICMjIzjooINW+56o7RJhG+qJJ54IDjrooGCTTTYJEhMTg7Zt2wZbbLFFcM011wQrVqyo9flr+4i9NFYQrP1SYysvmbVw4cJgzJgxwWabbRakpqYGycnJQf/+/YNrr712tede+fUbO3Zs1di6LlNW26WzdttttyAnJ2e13xXrsvKSb7X58ccfg8TExBq1UYdLba36/7OsrCy47777gkMPPTTo0aNHkJycHKSmpgY77LBDcNNNNwXLly+veuzKr8eaPlZeYm5t/x969OhRr69BbdZWx6rn79GjR63POW3atGDfffcNUlNTg6ysrGD06NFBfn5+jWMmTJgQDB06NOjcuXPQpk2boFOnTsFhhx1W6+XwarOun53afmfV9n199dVXB5tsskmQkJBQ9b38xhtvBIccckjQtWvXoG3btkHXrl2DY489drVLfa6PlZdAXNfP0pqs6fuwtn+XSktLg6uuuirYaqutgpSUlCAzMzM48MADg08//bTWc7/44otBenp68Ouvv65233XXXRd07do16NKlS3DDDTfU9eVKaiSRIGighU2SJCmurrjiCnr27FljPa0kSWreNs5FXCVJkiRJUr25pl2SpBZizz33rLGRoSRJav6cHi9JkiRJUhPl9HhJkiRJkpoom3ZJkiRJkpoom3ZJkiRJkpooN6IDotEov/76K+3btycSicS7HEmSJElSCxcEAYsWLaJr164kJKw5T7dpB3799Ve6desW7zIkSZIkSa3MrFmz2HTTTdd4v0070L59eyD8YmVkZMS5GkmSJElSS1EWhZcXwJsLYeWl2xIjMCSphBO37VbVj66JTTtUTYnPyMiwaZckSZIkNYiflsK4fMgvg6T0cKxHOxibB+2Xw4mwziXaNu2SJEmSJDWgsig8Ow9ej0nX20TgwI6wXzYkRKBked3OZdMuSZIkSVID+bEyXS9YUT3Wsx2cmAddkut/Ppt2SZIkSZI20IooPDOv5tr1NhE4uBMM7xCm6+vDpl2SJEmSpA3wfWmYrs8tqx7rnQJjcyFvPdL1WDbtkiRJkiSth+VReHouTCqqHkuKwKGdYO8NSNdj2bRLkiRJklRP35bCA/kwLyZd75MS7gyf27bhnsemXZIkSZKkOlpWAU/Ng7eLqseSInBYZ9grq2HS9Vg27ZIkSZIk1cHXS+DBApgfk65vngIn5EFOA6brsWzaJUmSJElai2UV8MRceLe4eiw5AQ7rBHtmQaSB0/VYNu2SJEmSJK3Bl0vgwXxYWF49tmUqnJALnRopXY9l0y5JkiRJ0iqWVsDjc2HyKun6kZ1hSGbjpuuxbNolSZIkSYoxbXG4dr0oJl3vlwrH50HHpI1bi027JEmSJElAaQU8VghTSqrH2lWm64M3Yroey6ZdkiRJktTq/W8xPFQAxTHpev80GJML2Rs5XY9l0y5JkiRJarWWVMCjhTB1lXT96BwYlBGfdD2WTbskSZIkqVX6bBGML4SSmHR968p0vUMc0/VYNu2SJEmSpFZlcTk8UggfLqoeS02EYzrDrk0gXY9l0y5JkiRJajU+WQQTCmBRRfXYtukwOgeymki6HsumXZIkSZLU4i0qh4cL4eNV0vVRObBL+6aVrseyaZckSZIktVhBEDbqDxfC4ph0fft0GJ0LGU28K27i5UmSJEmStH5KysOp8J8urh5LS4Rjc2BAE07XY9m0S5IkSZJalCAIN5l7pDC8pNtKO7YPG/amnq7HakalSpIkSZK0dsXlML4APo9J19snwrG5sFP7+NW1vmzaJUmSJEnNXhDA1BJ4dC6UxqTrA9qHm821b6bdbzMtW5IkSZKkUFEZPFQAXyypHmufGG40t0MzTNdj2bRLkiRJkpqlIIApJfBYISyNVo/vkhGm62mJ8autodi0S5IkSZKanYVl8GABfBmTrme0gTG5sF16/OpqaDbtkiRJkqRmIwhgcjE8PheWxaTru2XA0S0kXY9l0y5JkiRJahYWlMED+fB1afVYVmW6vk0LStdj2bRLkiRJkpq0IIB3i+GJubA8Jl0flAlHdYbUFpaux7JplyRJkiQ1WfNWhGvXv4lJ1zu0gePzoH9a/OraWGzaJUmSJElNThDA20Xw1Lya6frgTDiyM6S04HQ9lk27JEmSJKlJmbsCHiiA72LS9ewkOD4XtmoF6Xosm3ZJkiRJUpMQBDCpCJ6eByti0vWhWXBEJ2jXStL1WDbtkiRJkqS4K1wR7gz//dLqsY5JcEIu9G1l6Xosm3ZJkiRJUtxEA3hzITwzD8qC6vE9s+DwzpCcELfSmgSbdkmSJElSXBSsgPvz4aeYdL1TEozNgy1S41dXU2LTLkmSJEnaqKIBvL4QnotJ1yPAXh3g0E6m67Fs2iVJkiRJG82c5TAuH35eVj2W0zZcu7656fpqbNolSZIkSY0uGsCrC+D5+VAek64P6wCHdIK2puu1smmXJEmSJDWqX5eHa9dnxKTruW3Dtet9UuJXV3Ng0y5JkiRJahQVAbyyAF6YH96GMF0fng0Hd4Qk0/V1smmXJEmSJDW4X5aF6fqs5dVjXSrT9V6m63Vm0y5JkiRJajDlUZi4AF5aUDNd3y8bDjRdrzebdkmSJElSg5hVma7/EpOud02GsbnQ03R9vdi0S5IkSZI2SHk0TNZfXhDuEg+QEIER2TAyG9qYrq83m3ZJkiRJ0nqbsSy87vrsmHR90+Rw7Xr3dvGrq6WwaZckSZIk1Vt5NNwV/pWFNdP1A7Jhf9P1BmPTLkmSJEmql+lLw7Xrc1ZUj3WrTNe7ma43KJt2SZIkSVKdlEXh+fnw6gKoDNdJjMDIjuH69cRIXMtrkWzaJUmSJEnr9NPScO16fky63r0dnJgHmyTHr66WzqZdkiRJkrRGZVF4dh68vrA6XW8TCa+5vq/peqOzaZckSZIk1erHynS9ICZd79kuXLve1XR9o7BplyRJkiTVsCIKz8yDN1dJ1w/uBMM7hLvEa+OwaZckSZIkVfm+NEzX55ZVj/VOgbG5kGe6vtHZtEuSJEmSWB6Fp+fCpKLqsaQIHNIJhpmux41NuyRJkiS1ct+WwgP5MC8mXe+TEq5dz20bv7pk0y5JkiRJrdayCnhqHrxdVD2WFIHDOsNeWabrTYFNuyRJkiS1Qt8sgQcKYH5Mur55CpyQBzmm602GTbskSZIktSLLKuCJufBucfVY2wQ4vBPsmQUR0/UmxaZdkiRJklqJr5aEa9cXllePbZEa7gzfyXS9SbJplyRJkqQWbmkFPD4XJsek68kJcERnGJpput6U2bRLkiRJUgs2bTE8WABFMel639Rw7XrHpPjVpbqxaZckSZKkFqi0Ah4rhCkl1WPtEuDIzjDYdL3ZsGmXJEmSpBbmf4vhoQIojknXt0qD43Mh23S9WbFplyRJkqQWYklluv7fVdL1o3NgUIbpenNk0y5JkiRJLcBni2B8IZTEpOtbp8GYXOhgut5s2bRLkiRJUjO2uBweKYQPF1WPpSbC0Z1hN9P1Zs+mXZIkSZKaqU8WwYQCWFRRPbZtOozOgSzT9RbBpl2SJEmSmplFlen6R6uk66NyYJf2pustiU27JEmSJDUTQQAfL4KHC2FxTLq+fTqMzoUMO7wWx/+lkiRJktQMlJSHzfonMel6WiIcmwMDTNdbLJt2SZIkSWrCgiCcBv9wYXhJt5V2bB827KbrLZv/eyVJkiSpiSouh/EF8Pni6rH0RDguF3ZqH7+6tPHYtEuSJElSExMEMLUEHp0LpTHp+oD24WZz7e3kWg3/V0uSJElSE1JUBuML4X8x6Xr7ynR9R9P1VsemXZIkSZKagCCAKSXw+Crp+i4ZcExnSLd7a5X83y5JkiRJcbawDB4qgGlLqscy2sCYXNguPX51Kf5s2iVJkiQpToIA3i+BxwphWbR6fLcMODonvKSbWjebdkmSJEmKgwVl8GABfBWTrme1gdG5sK3puirZtEuSJEnSRhQE8G4xPDm3Zro+KBOO6gyppuuKYdMuSZIkSRvJ/DJ4IB++Ka0ey2oDx+fC1qbrqoVNuyRJkiQ1siCAt4vgqXmwPCZdH5wJR3aGFNN1rYFNuyRJkiQ1onkrYFwBfBeTrndoAyfkwVZp8atLzYNNuyRJkiQ1giCAt4rCdH1FTLo+NAuO6ATtTNdVBzbtkiRJktTACleEa9e/X1o91jEJTsiFvqbrqgebdkmSJElqINEA3lwIz8yDsqB6fM8sOMx0XevBpl2SJEmSGkDBChiXDz/GpOudkmBsHmyRGr+61LzZtEuSJEnSBogG8PpCeG6VdH3vDnBoJ0hOiF9tav5s2iVJkiRpPc1ZDg8UwE8x6XpO23Dt+uam62oANu2SJEmSVE/RAF6rTNfLK9P1CDCsAxzSCdqarquB2LRLkiRJUj38ujxcuz59WfVYbttw7XqflPjVpZbJpl2SJEmS6qAigFcWwIvza6brw7Ph4I6QZLquRmDTLkmSJEnr8MsyGFcAM2PS9bzKdL236boakU27JEmSJK1BRQAvz4eXFoS3IUzX98uGA03XtRHYtEuSJElSLWYtC9euz1pePdY1GcbmQk/TdW0kNu2SJEmSFKM8Ci8vCNP1aGW6nhCBEdkwMhvamK5rI7JplyRJkqRKM5fB/fkwOyZd3yQ5XLveo1386lLrZdMuSZIkqdUrj8KLC2DiKun6Admwv+m64simXZIkSVKrNn1puDP8rzHp+qbJcGIedDNdV5zZtEuSJElqlcqi8Px8eHUBVIbrJK5M1zuGt6V4s2mXJEmS1Or8tDTcGT5/RfVY93bhzvCbmq6rCbFplyRJktRqlEXhufnwWky63iYSXnN932zTdTU9Nu2SJEmSWoUfK9P1gph0vWe7cGf4rsnxq0taG5t2SZIkSS3aiig8Ow/eWFgzXT+4EwzvEO4SLzVVNu2SJEmSWqzvS+GBAiiMSdd7VabrXUzX1QzYtEuSJElqcZZH4em58FZRdbqeVJmu72O6rmbEpl2SJElSi/Jdabh2fV5Z9ViflDBdz20bv7qk9WHTLkmSJKlFWB6FpyrT9ZWSInBYZ9gry3RdzZNNuyRJkqRm75sl4dr1+THp+uYpcEIe5JiuqxmzaZckSZLUbC2rgCfnwTtF1WNtE+DwTrBnFkRM19XM2bRLkiRJapa+WgIP5MPC8uqxLVJhbC50Ml1XC2HTLkmSJKlZWVoBj8+FycXVY8mV6foeWabralls2iVJkiQ1G9MWw0MFNdP1vqnh2vWOSfGrS2osNu2SJEmSmrzSynT9/Zh0vV0CHNkZBmearqvlSojnk1dUVHDZZZfRq1cvUlJS6NOnD1dffTVBEFQdEwQBf/7zn+nSpQspKSnss88+fP/99zXOs2DBAkaPHk1GRgZZWVmcfPLJLF68eGO/HEmSJEmN4H+L4crpNRv2rdLg8p4wJMuGXS1bXJv2G264gbvuuos77riDr7/+mhtuuIEbb7yR22+/veqYG2+8kdtuu427776bqVOnkpaWxn777ceyZcuqjhk9ejRffvklr732Gi+88ALvvPMOp556ajxekiRJkqQGsqQC/jMH/jEbiiqnw7dLCKfCn7UJZDsdXq1AJIiNtTeyAw88kNzcXP71r39VjR1xxBGkpKTw0EMPEQQBXbt25fzzz+eCCy4AoLi4mNzcXO6//35GjRrF119/zVZbbcWHH37IgAEDAJg4cSIHHHAAv/zyC127dl1nHSUlJWRmZlJcXExGRkbjvFhJkiRJdfZ55dr1kpi161unwZhc6GCzrhagrn1oXJP2QYMG8cYbb/Ddd98B8Pnnn/Pee++x//77A/Dzzz+Tn5/PPvvsU/WYzMxMdt11V6ZMmQLAlClTyMrKqmrYAfbZZx8SEhKYOnVqrc+7fPlySkpKanxIkiRJir/F5fDPX+HO2dUNe0oCjM2DMzaxYVfrE9eN6C6++GJKSkro27cviYmJVFRU8Je//IXRo0cDkJ+fD0Bubm6Nx+Xm5lbdl5+fT05OTo3727RpQ3Z2dtUxq7ruuuu48sorG/rlSJIkSdoAny6C8QWwqKJ6bNt0GJ0DWTbraqXi2rQ/9thjjB8/ngkTJtC/f38+++wzzjnnHLp27crYsWMb7XkvueQSzjvvvKrPS0pK6NatW6M9nyRJkqQ1W1QOjxTCR4uqx1ITYVQO7NLejebUusW1ab/wwgu5+OKLGTVqFADbbLMNM2bM4LrrrmPs2LHk5eUBUFBQQJcuXaoeV1BQwPbbbw9AXl4ehYWFNc5bXl7OggULqh6/quTkZJKTkxvhFUmSJEmqj48XwYQCWByTrm+XDqNzIdMLVEvxXdNeWlpKQkLNEhITE4lGowD06tWLvLw83njjjar7S0pKmDp1KgMHDgRg4MCBFBUV8fHHH1cd8+abbxKNRtl11103wquQJEmSVF8l5XDPr3Dvr9UNe1oi/LYL/KGrDbu0Ulx/FA466CD+8pe/0L17d/r378+nn37KLbfcwm9+8xsAIpEI55xzDtdccw2bb745vXr14rLLLqNr164ceuihAPTr148RI0ZwyimncPfdd1NWVsYZZ5zBqFGj6rRzvCRJkqSNJwjCafAPF4aXdFtph3Q4LhcybNalGuL6I3H77bdz2WWXcdppp1FYWEjXrl353e9+x5///OeqYy666CKWLFnCqaeeSlFREYMHD2bixIm0a9eu6pjx48dzxhlnMGzYMBISEjjiiCO47bbb4vGSJEmSJK1BSXm40dxni6vH0hPh2BzYybXrUq3iep32psLrtEuSJEmNJwjgg0XhZnOlMen6gPbhZnPtTdfVCtW1D/XHQ5IkSVKjKSqD8YXwv5h0vX1iOBV+x/bxq0tqLmzaJUmSJDW4IID/lsBjc2um67tkwDGdId1ORKoTf1QkSZIkNaiFZfBQAUxbUj2W0QZG58D2putSvdi0S5IkSWoQQQDvl8BjhbAsWj2+WwYcnRNe0k1S/di0S5IkSdpgC8rgwQL4KiZdz2oDo3Nh2/T41SU1dzbtkiRJktZbEMB7xfDE3Jrp+qBMOKozpJquSxvEpl2SJEnSeplfBg/mw9el1WNZbeD4XNjadF1qEDbtkiRJkuolCOCdYnhyLiyPSdd3r0zXU0zXpQZj0y5JkiSpzuatgHEF8F1Mut6hDRyfB/3T4leX1FLZtEuSJElapyCAt4rgqXmwIiZdH5IJR3aGdqbrUqOwaZckSZK0VoUr4IF8+H5p9VjHpHDtej/TdalR2bRLkiRJqlU0gElF8PRcKAuqx/fMgsM6ma5LG4NNuyRJkqTVFKyAcfnwY0y63ikJTsiDLVPjV5fU2ti0S5IkSaoSDeCNhfDsvJrp+l5ZcFhnSE6IW2lSq2TTLkmSJAmA/OXhzvA/xaTrnZNgbB5sbrouxYVNuyRJktTKRQN4bSE8Nw/KK9P1CLB3Bzi0E7Q1XZfixqZdkiRJasV+XR6uXZ++rHospy2cmAd9UuJXl6SQTbskSZLUCkUDeGUBvDC/Zro+PBsO7ghJputSk2DTLkmSJLUysyvT9Rkx6Xpe23Dtem/TdalJsWmXJEmSWomKACYugBfnh7chTNf3zYaDTNelJsmmXZIkSWoFZi0L0/VZy6vHulSuXe9pui41WTbtkiRJUgtWHoWXF8BLC8J17AAJEdivAxzYEdqYrktNmk27JEmS1ELNrEzXf4lJ17smh+l6j3bxq0tS3dm0S5IkSS1MeRReXBCuX49N1w/Ihv2zTdel5sSmXZIkSWpBZiyD+/PD66+vtGllut7NdF1qdmzaJUmSpBagLBpec/3VhTXT9ZHZMMJ0XWq2bNolSZKkZu7npeHa9Tkrqse6t4OxubCp6brUrNm0S5IkSc1UWRSemw+vLYDKcJ3ESLgr/H7Z4W1JzZtNuyRJktQM/ViZrhfEpOs92oVr17smx68uSQ3Lpl2SJElqRlZE4dl58MbC6nS9TQQO6gj7Zofr2CW1HDbtkiRJUjPxfSk8UACFMel6r3YwNg+6mK5LLZJNuyRJktTELY/CM/Ng0irp+iGdYJ8OputSS2bTLkmSJDVh35WGa9fnlVWP9UkJ0/XctvGrS9LGYdMuSZIkNUHLo/DUXHirqHosKQKHdoK9TdelVsOmXZIkSWpivlkSrl2fH5Oub1aZrueYrkutik27JEmS1EQsq4An58E7RdVjbRPgsE6wVxZETNelVsemXZIkSWoCvq5M1xfEpOtbpMIJudDZdF1qtWzaJUmSpDhaWgFPzIX3iqvHkhPg8E6wR5bputTa2bRLkiRJcfLlEngwHxaWV4/1TYXjc6GT6bokbNolSZKkja60Ah6fC++vkq4f2RmGZJquS6pm0y5JkiRtRF8shocKoCgmXe+XCsfnQcek+NUlqWmyaZckSZI2giUV8Fgh/LekeqxdAhzVGXY3XZe0BjbtkiRJUiP7vDJdL4lJ1/unhWvXO5iuS1oLm3ZJkiSpkSypgEcK4YOYdD0lAY7OgYEZpuuS1s2mXZIkSWoEny6C8QWwqKJ6bJs0GJMLWabrkurIpl2SJElqQIvKw3T9o0XVY6mJcExn2NV0XVI92bRLkiRJDeTjRfDwKun6dukwOhcy/ctb0nrwV4ckSZK0gRaVw4RC+CQmXU9LhFE5sHN703VJ68+mXZIkSVpPQRBOg3+4MNx0bqUd0uG4XMjwr21JG8hfI5IkSdJ6KCkPN5r7bHH1WHoiHJsDO5muS2ogNu2SJElSPQQBfLAo3GyuNCZdH9A+nA7f3r+wJTUgf6VIkiRJdVRUBuML4X8x6Xr7xHAq/I7t41eXpJbLpl2SJElahyCA/5bAY3Nrpus7V6br6f5VLamR+OtFkiRJWouFZfBQAUxbUj2W0QZG58D2puuSGplNuyRJklSLIID3S+CxQlgWrR7fNQOOyQkv6SZJjc2mXZIkSVrFgsp0/cuYdD2zDYzJhW3T41eXpNbHpl2SJEmqFATwXjE8Mbdmuj4oE47qDKmm65I2Mpt2SZIkCZhfBg/mw9el1WNZbeD4XNjadF1SnNi0S5IkqVULAninGJ6cC8tj0vXdK9P1FNN1SXFk0y5JkqRWa94KeKAAvo1J1zu0gePzoH9a/OqSpJVs2iVJktTqBAG8VQRPzYMVMen6kEw4sjO0M12X1ETYtEuSJKlVKVwBD+TD90urxzomhWvX+5muS2pibNolSZLUKgQBvFkET8+FsqB6fM8sOKyT6bqkpsmmXZIkSS1ewQoYlw8/xqTrnZLghDzYMjV+dUnSuti0S5IkqcWKBvDGQnh2Xs10fa8sOKwzJCfErTRJqhObdkmSJLVI+cthXAH8FJOud65M17cwXZfUTNi0S5IkqUWJBvDaQnhuHpRXpusRYO8OcEgn03VJzYtNuyRJklqMX5eHa9enL6sey2kLJ+ZBn5T41SVJ68umXZIkSc1eNIBXFsAL82um6/t0gIM7QVvTdUnNlE27JEmSmrXZlen6jJh0PbcyXe9tui6pmbNplyRJUrNUEcDEBfDi/PA2hOn6vtlwUEdIMl2X1ALYtEuSJKnZ+WUZ3J8Ps5ZXj3WpTNd7mq5LakFs2iVJktRslEfh5QXw0oJwHTtAQgT26wAHdoQ2puuSWhibdkmSJDULsyrT9V9i0vWuyWG63qNd/OqSpMZk0y5JkqQmrTwKLy4I16/Hpuv7Z8MB2abrklo2m3ZJkiQ1WTMq0/VfY9L1TSvT9W6m65JaAZt2SZIkNTll0XBX+FcW1kzXR2bDCNN1Sa2ITbskSZKalJ+Xhtddn7OieqxbZbq+qem6pFbGpl2SJElNQlkUnpsPry2AynCdxEi4K/x+2eFtSWptbNolSZIUdz8tDdeuF8Sk6z3ahel61+T41SVJ8WbTLkmSpLhZEYVn58EbC6vT9TYROKgj7JsdrmOXpNbMpl2SJElx8UMpjCuAwph0vWdlut7FdF2SAJt2SZIkbWTLK9P1N1dJ1w/pBPt0MF2XpFg27ZIkSdpovisNd4afV1Y91jsFxuZCnum6JK3Gpl2SJEmNbnkUnpoLbxVVjyVF4NBOsLfpuiStkU27JEmSGtU3S+CBApgfk65vlgJj8yCnbfzqkqTmwKZdkiRJjWJZBTw5D94pqh5LisDhnWGvLIiYrkvSOtm0S5IkqcF9XZmuL4hJ17dIhRNyobPpuiTVmU27JEmSGszSCnhyLrxbXD2WnACHd4I9skzXJam+bNolSZLUIL5cAg/mw8Ly6rEtK9P1TqbrkrRebNolSZK0QUor4PG58P4q6fqRnWFIpum6JG0Im3ZJkiStty8Ww0MFUBSTrvdLhePzoGNS/OqSpJbCpl2SJEn1VloBjxbCf0uqx9olwFGdYXfTdUlqMDbtkiRJqpfPF8P4AiiOSdf7p8HxudDBdF2SGpRNuyRJkupkSWW6PjUmXU9JgKNzYGCG6bokNQabdkmSJK3Tp4tgQiGUxKTr26TBmFzIMl2XpEZj0y5JkqQ1WlQOjxTCR4uqx1IT4ZjOsKvpuiQ1Opt2SZIk1eqTRTChABZVVI9tlw6jcyHTvyIlaaPw160kSZJqWFQeToX/JCZdT0uEUTmwc3vTdUnamGzaJUmSBEAQwMeVa9eXxKTrO6TDcbmQ4V+OkrTR+atXkiRJlJSHU+E/XVw9lp4Ix+bATqbrkhQ3Nu2SJEmtWBDAB4vCS7nFpus7tQ8b9vb+tShJceWvYUmSpFaquBzGF8DnMel6+8RwKvyO7eNXlySpmk27JElSKxMEMLUEHp0LpTHp+s7tw83m0v0LUZKaDH8lS5IktSJFZfBgAUxbUj2W0QaOy4EdTNclqcmxaZckSWoFggCmlMBjhbA0Wj2+awYckxNe0k2S1PTYtEuSJLVwCyvT9S9j0vXMNjA6F7ZLj19dkqR1s2mXJElqoYIA3iuGJ+bCsph0fWAGHJ0DqabrktTk2bRLkiS1QPPL4MF8+Lq0eiyrDYzJhW1M1yWp2bBplyRJakGCAN6tTNeXx6Tru2fCkZ1N1yWpubFplyRJaiHmrYAHCuDbmHS9Qxs4Pg/6p8WvLknS+rNplyRJauaCAN4ugqfm1UzXh1Sm6+1M1yWp2UqIdwGzZ89mzJgxdOzYkZSUFLbZZhs++uijqvuDIODPf/4zXbp0ISUlhX322Yfvv/++xjkWLFjA6NGjycjIICsri5NPPpnFixdv7JciSZK00c1dAbf8Ag8XVjfs2UlwzqYwJs+GXZKau7g27QsXLmT33XcnKSmJl19+ma+++oq//vWvdOjQoeqYG2+8kdtuu427776bqVOnkpaWxn777ceyZcuqjhk9ejRffvklr732Gi+88ALvvPMOp556ajxekiRJ0kYRBPDGQrhyOnwXMx1+jyy4vAf0czq8JLUIkSAIgng9+cUXX8zkyZN59913a70/CAK6du3K+eefzwUXXABAcXExubm53H///YwaNYqvv/6arbbaig8//JABAwYAMHHiRA444AB++eUXunbtus46SkpKyMzMpLi4mIyMjIZ7gZIkSY2gcAWMy4cfllaPdUqC43Ohr826JDULde1D45q0P/fccwwYMICjjjqKnJwcdthhB+67776q+3/++Wfy8/PZZ599qsYyMzPZddddmTJlCgBTpkwhKyurqmEH2GeffUhISGDq1Km1Pu/y5cspKSmp8SFJktTURQN4fQFcNb1mw75XFvy5pw27JLVEcW3af/rpJ+666y4233xzXnnlFf7whz9w1llnMW7cOADy8/MByM3NrfG43Nzcqvvy8/PJycmpcX+bNm3Izs6uOmZV1113HZmZmVUf3bp1a+iXJkmS1KDyl8NNs+DxuVBWOU+ycxKc3w1G5UJy3HcqkiQ1hrjuHh+NRhkwYADXXnstADvssAPTpk3j7rvvZuzYsY32vJdccgnnnXde1eclJSU27pLUEixaBNOnQ9++kJQU72qkBhEN4PWF8Ow8KK9s1iPA3h3gkE4265LU0sX113yXLl3Yaqutaoz169ePmTNnApCXlwdAQUFBjWMKCgqq7svLy6OwsLDG/eXl5SxYsKDqmFUlJyeTkZFR40OS1Mz9/DP07g3bbgu77QYxG5ZKzdWc5XDDTHhybnXDntMWLugGR+fYsEtSaxDXX/W777473377bY2x7777jh49egDQq1cv8vLyeOONN6ruLykpYerUqQwcOBCAgQMHUlRUxMcff1x1zJtvvkk0GmXXXXfdCK9CktQkPPIIzJ8f3v7kE3j//fjWI22AaAAvz4drZsD0yvefIsDwDnBZD9gsNa7lSZI2orhOjz/33HMZNGgQ1157LUcffTQffPAB9957L/feey8AkUiEc845h2uuuYbNN9+cXr16cdlll9G1a1cOPfRQIEzmR4wYwSmnnMLdd99NWVkZZ5xxBqNGjarTzvGSpBZim23Ca2AlJkJCAmy2WbwrktbLr8vh/nyYETNZJLctnJgHvVPiV5ckKT7iesk3gBdeeIFLLrmE77//nl69enHeeedxyimnVN0fBAGXX3459957L0VFRQwePJg777yTLbbYouqYBQsWcMYZZ/D888+TkJDAEUccwW233UZ6enqdavCSb5LUQjz2WJiwH3MMVM7IkpqLigBeWQAvzA9vQ5iu75sNB3WEJKfCS1KLUtc+NO5Ne1Ng0y5JkuLpl2Vhuj5refVYl7YwNg96ma5LUotU1z40rtPjJUmSWrPyKExcAC8uCNexAyREYL8OMNJ0XZKETbskSVJczKpM13+JSde7Jodr13u0i19dkqSmxaZdkiRpIyqPhsn6xFXS9f2z4YBsaGO6LkmKYdMuSZK0kcyoTNd/jUnXN00O1653N12XJNVivd7LfffddxkzZgwDBw5k9uzZADz44IO89957DVqcJElSS1AehWfmwvUzqxv2hEi4K/wl3W3YJUlrVu+m/cknn2S//fYjJSWFTz/9lOXLw395iouLufbaaxu8QEmSpOZs+lK4Zga8HDMdvlsyXNodDuzkdHhJ0trV+5+Ja665hrvvvpv77ruPpKSkqvHdd9+dTz75pEGLkyRJaq7KovBUZbo+Z0U4lhiBQzrBJT1gU9N1SVId1HtN+7fffsvQoUNXG8/MzKSoqKghapIkSWrWfloarl0vWFE91qNduHZ9k+T41SVJan7q3bTn5eXxww8/0LNnzxrj7733Hr17926ouiRJkpqdFVF4dh68sRAqZ8LTJgIHdoT9ssN17JIk1Ue9m/ZTTjmFs88+m3//+99EIhF+/fVXpkyZwgUXXMBll13WGDVKkiQ1eT+UwrgCKIxJ13u2C6+73sV0XZK0nurdtF988cVEo1GGDRtGaWkpQ4cOJTk5mQsuuIAzzzyzMWqUJElqspZXputvrpKuH9wJhncwXZckbZhIEATBug9b3YoVK/jhhx9YvHgxW221Fenp6Q1d20ZTUlJCZmYmxcXFZGRkxLscSZLUTHxXCg/kw9yy6rHeKTA2F/JM1yVJa1HXPrTeu8f/5je/YdGiRbRt25atttqKXXbZhfT0dJYsWcJvfvObDSpakiSpOVgehUcK4K+zqhv2pAgc1Rku7GbDLklqOPVO2hMTE5kzZw45OTk1xufNm0deXh7l5eUNWuDGYNIuSZLq6psl8GABzItJ1/ukhDvD57aNX12SpOalrn1onde0l5SUEAQBQRCwaNEi2rWrvrhoRUUFL7300mqNvCRJUkuxrAKemgdvF1WPJUXgsM6wV5Zr1yVJjaPOTXtWVhaRSIRIJMIWW2yx2v2RSIQrr7yyQYuTJElqCr6uTNfnx6Trm6fACXmQY7ouSWpEdW7aJ02aRBAE7L333jz55JNkZ2dX3de2bVt69OhB165dG6VISY1kxgw45BD4+We48ko455x4VyRJTcqyCnhiLrxbXD2WnACHdYI9syBiui5JamT1XtM+Y8YMunXrRkJCvfewa7Jc065W6/e/h3/+Eyoqwr88582DmDfkJKk1+3IJPJgPC2O269kyFU7IhU6m65KkDdTga9pX6tGjBwClpaXMnDmTFStW1Lh/2223re8pJcVLWhqsfN+uTZvwQ5JaudLKdH3yKun6kZ1hSKbpuiRp46r3X+hz587lpJNO4uWXX671/oqKig0uStJG8qc/wS+/wI8/hredaSKplZu2OFy7XhSTrvdLhePzoGNS/OqSJLVe9W7azznnHIqKipg6dSp77rknTz/9NAUFBVxzzTX89a9/bYwaJTWWDh3g0UfjXYUkxV1pBTxWCFNKqsfaVabrg03XJUlxVO+m/c033+TZZ59lwIABJCQk0KNHD4YPH05GRgbXXXcdI0eObIw6JUmSGsX/FsNDBVAck673T4MxuZBtui5JirN6N+1Lliypuh57hw4dmDt3LltssQXbbLMNn3zySYMXKEmS1BiWVMCjhTA1Jl1PSYCjcmBQhum6JKlpqHfTvuWWW/Ltt9/Ss2dPtttuO+655x569uzJ3XffTZcuXRqjRkmSpAb12SIYXwglMen61pXpegfTdUlSE1Lvpv3ss89mzpw5AFx++eWMGDGC8ePH07ZtW+6///6Grk+SJKnBLC6HRwrhw0XVY6mJcExn2NV0XZLUBNX7Ou2rKi0t5ZtvvqF79+506tSpoeraqLxOuyRJLd8ni2BCASyKudDNtukwOgeyTNclSRtZo12nfVWpqansuOOOG3oaSZKkRrGoHB4uhI9j0vW0RDgmB3Zpb7ouSWra6t20B0HAE088waRJkygsLCQajda4/6mnnmqw4iRJktZXEISN+sOFsDgmXd8+HUbnQsYGRxeSJDW+9bpO+z333MNee+1Fbm4uEd+eliRJTUxJeTgV/tPF1WNpiXBsDgwwXZckNSP1btoffPBBnnrqKQ444IDGqEeSJGm9BUG4ydwjheEl3VbasT0clwPtTdclSc1Mvf/pyszMpHfv3o1RiyRJ0norLofxBfB5TLrePhGOzYWd2sevLkmSNkRCfR9wxRVXcOWVV7J06dLGqEeSJKleggD+WwxXTK/ZsO/cHi7vacMuSWre6p20H3300Tz88MPk5OTQs2dPkpJqXiPlk08+abDiJEmS1qaoDB4qgC+WVI+1Tww3mtvBZl2S1ALUu2kfO3YsH3/8MWPGjHEjOkmSFBdBAFNK4LFCWBpzIZtdM8JLuaUlxq82SZIaUr2b9hdffJFXXnmFwYMHN0Y9kiRJa7WwDB4sgC9j0vWMNjAmF7ZLj19dkiQ1hno37d26dSMjI6MxapEkSVqjIIDJxfD4XFgWk67vlgFHm65Lklqoem9E99e//pWLLrqI6dOnN0I5kiRJq5tfBrf+EibsKxv2rDZwxiZwUhcbdklSy1XvpH3MmDGUlpbSp08fUlNTV9uIbsGCBQ1WnCRJzd2ELybw2JePsWfPPTl717PdC6aeggDeLYYn5sLymHR990w4sjOk2qxLklq4ejftf//73xuhDEmSWp7P8j9j9FOjAXj222fpmdWTQ/seGt+impF5K8Jk/ZvS6rEObeD4POifFr+6JEnamNZr93hJkrRu+Yvza3z+66Jf41RJ8xIE8HYRPDWvZro+JBOO6AwppuuSpFakTk17SUlJ1eZzJSUlaz3WTeokSQrt3Wtv9u2zL6/++Crb5W7HsVsfG++Smry5K+CBAvguJl3PToITcqGf6bokqRWqU9PeoUMH5syZQ05ODllZWbWuxwuCgEgkQkVFRYMXKUlSc9Q2sS0TR09k8YrFpLdNdz37WgQBTCqCp+fBiph0fWgWHNEJ2pmuS5JaqTo17W+++SbZ2dkATJo0qVELkiSpJYlEIrRPbh/vMpq0whUwLh9+WFo91rEyXe9rui5JauXq1LTvscceVbd79epFt27dVksLgiBg1qxZDVudJElqsaIBvLkQnpkHZUH1+J5ZcHhnSK73hWklSWp56r0RXa9evaqmysdasGABvXr1cnq8JElap4IVcH8+/BSTrndKgrF5sEVq/OqSJKmpqXfTvnLt+qoWL15Mu3btGqQoSZLUMkUDeH0hPDsPyivT9Qiwdwc4pJPpuiRJq6pz037eeecB4dq8yy67jNTU6rfBKyoqmDp1Kttvv32DFyhJklqGOcvDtes/L6sey2kLY3NhM9N1SZJqVeem/dNPPwXCpP2LL76gbdu2Vfe1bduW7bbbjgsuuKDhK5QkSc1aNIBXF8Dz82um68Mq0/W2puuSJK1RnZv2lbvGn3TSSdx6661ej12SJK3Tr8vDteszYtL13Lbh2vU+KfGrS5Kk5qLea9r/85//1Pi8pKSEN998k759+9K3b98GK0ySJDVfFQG8sgBemB/ehjBdH54NB3eEJNN1SZLqpN5N+9FHH83QoUM544wzWLp0KQMGDGD69OkEQcAjjzzCEUcc0Rh1SpKkZuKXZWG6Pmt59ViXynS9l+m6JEn1Uu/3ud955x2GDBkCwNNPP00QBBQVFXHbbbdxzTXXNHiBkiSpeSiPwgvz4C8zqxv2CLB/Nlzaw4ZdkqT1Ue+mvbi4mOzsbAAmTpzIEUccQWpqKiNHjuT7779v8AIlSVLTN2sZXDcz3GwuWjkdvmsyXNIDDu3cuNPhn/v2Oa5/73pmFM1ovCeRJClO6j09vlu3bkyZMoXs7GwmTpzII488AsDChQu9TrskSa1MeRReWgAvL6hu1hMiMCIbRmZDm0Zeuz7+f+MZ8/QYIkT423//xs9n/0xqktePkyS1HPVu2s855xxGjx5Neno6PXr0YM899wTCafPbbLNNQ9cnSZKaqBnLwuuuz45Zu75pcrh2vftGeh9/yi9TSIwkUhFUULikkJnFM+nbyY1xJUktR72b9tNOO41dd92VmTNnMnz4cBISwrfQe/fu7Zp2SZJagfJouCv8KwtrpusHZIfr1xs7XY91TP9juPfje6kIKti5685snr35xntySZI2gkgQBEG8i4i3kpISMjMzKS4u9vrzkiStxfSl4c7wc1ZUj3WrTNe7xWmV3PSi6fy08Cd277Y7yW2S41OEJEn1VNc+tM7vhW+11VYsWLCg6vPTTjuNefPmVX1eWFhIaqpryCRJaonKovDUXLh+ZnXDnhiBQzqFm83Fq2EH6JnVk7177W3DLklqkerctH/zzTeUl5dXff7QQw9RUlJS9XkQBCxbtqxhq5MkSXH301K4Zga8sgBWTs/r0S68jNsBHcPmXZIkNY56r2lfqbZZ9ZGI/2pLktRSlEXh2Xnw+sLqZr1NBA7sCPtlh+vYJUlS41rvpl2SJLVcPy4Nd4YviFm73rNduHa9q7PQJUnaaOrctEcikdWSdJN1SZJalhVReGYevLlKun5wJxjewXRdkqSNrc5NexAEDBs2jDZtwocsXbqUgw46iLZt2wLUWO8uSZKan+9Lw3R9bln1WO8UGJsLeabrkiTFRZ2b9ssvv7zG54cccshqxxxxxBEbXpEkSdqolkfh6bkwqah6LCkCh3aCvU3XJUmKK6/TjtdplyS1Xt+WwgP5MC8mXe+TEq5dz20bv7okSWrp6tqHuhGdJEmt0LIKeGoevF1UPZYUgcM6w15ZpuuSJDUVNu2SJLUyXy+BBwtgfky6vnkKnJAHOabrkiQ1KTbtkiS1Essq4Im58G5x9VjbBDi8E+yZBV4URpKkpsemXZKkVuDLJfBgPiyMudjLFqnhzvCdTNclSWqybNolSWrBllbA43Nhcky6npwAR3SGoZmm65IkNXV1atpvu+22Op/wrLPOWu9iJElSw5m2OFy7XhSTrvdNDdeud0yKX12SJKnu6nTJt169etXtZJEIP/300wYXtbF5yTdJUktSWgGPFcKUkuqxdglwZGcYbLouSarNJ5/A1Kmw//7Qs2e8q2kVGvSSbz///HODFSZJkhrP/xbDQwVQHJOub5UGx+dCtum6JKk2U6fC7rtDRQVkZcG330JOTryrUqWE9X3gihUr+PbbbykvL1/3wZIkqVEtqYB/z4F/zK5u2NslhFPhz9rEhl2StBZvvQXRaHi7qAg+/TSe1WgV9W7aS0tLOfnkk0lNTaV///7MnDkTgDPPPJPrr7++wQuUJElr99kiuGI6TI2ZDr91GlzRE3Z3OrwkaV1GjIC2lZcSycuDnXeObz2qod5N+yWXXMLnn3/OW2+9Rbt27arG99lnHx599NEGLU6SJK3Z4nL4569w169QUpmupybCiXlwxibQwXRdklQX220HX34Jjz0G//sfZGfHuyLFqPcl35555hkeffRRdtttNyIxb93379+fH3/8sUGLkyRJtftkEUwogEUV1WPbpsPoHMiyWZck1VefPuGHmpx6N+1z584lp5ZNCZYsWVKjiZckSQ1vUTk8XAgfL6oeS02EUTmwS3unwkuS1NLUe3r8gAEDePHFF6s+X9mo//Of/2TgwIENV5kkSaoSBPBRSbh2PbZh3z4druwJu2bYsEuS1BLVO2m/9tpr2X///fnqq68oLy/n1ltv5auvvuL999/n7bffbowaJUlq1UrKw6nwny6uHktLhGNzYIDpuiRJLVq9k/bBgwfz2WefUV5ezjbbbMOrr75KTk4OU6ZMYaeddmqMGiVJapWCAD6oTNdjG/Yd24c7w+9sui5JUosXCYIgiHcR8VZSUkJmZibFxcVkZGTEuxxJajU+y/+Mm96/iS7pXbhyzytJa5sW75KajOJyGF8An8c06+mJcFwu7NQ+fnVJkqSGUdc+tE7T40tKStZ9UCWbXklSXVREK9j3wX2Zv3Q+AOXRcv4+4u/xLaoJCILweuuPzoXSmJ3hB7QPN5trX++FbZIkqTmr0z/9WVlZdd4ZvqKiYt0HSZJaveUVy5lXOo+AgIRIArOKZ8W7pLgrKoOHCuCLJdVj7RNhdC7sYLouSVKrVKemfdKkSVW3p0+fzsUXX8yJJ55YtVv8lClTGDduHNddd13jVClJanFSk1K5bOhlXPXOVbRv254Ld78w3iXFTRDAlBJ4rBCWRqvHd8kI0/W0xPjVJkmS4qvea9qHDRvGb3/7W4499tga4xMmTODee+/lrbfeasj6NgrXtEtS/BQvKyYlKYW2iW3jXUpcLKxM16fFpOsZbWBMLmyXHr+6JElS46prH1rv3eOnTJnCgAEDVhsfMGAAH3zwQX1PJ0lq5TLbZbbKhj0IYHJxuDN8bMO+W0a4M7wNuyRJgvVo2rt168Z999232vg///lPunXr1iBFSZLUki0og9tmwwP5sKxyOnxWGzh9Ezipi9PhJUlStXrvQfu3v/2NI444gpdffpldd90VgA8++IDvv/+eJ598ssELlCSppQgCeLcYnpgLy2PWrg/KhKM6Q6rNuiRJWkW9k/YDDjiA77//noMOOogFCxawYMECDjroIL777jsOOOCAxqhRWn/PPgu9e8Muu8APP8S7Gkmt2Pwy+Psv4bXXVzbsHdrAmZvA2Dwb9mYtCKD0V6hYEe9KJEktUL03omuJ3IiuhYpGISMDliyBxEQ4+GB46ql4VyWplQkCeLsInppXM10fnAlHdoYUm/XmLYjCu0fAL89Au1wYPhna94l3VZKkZqCufWi9p8cDFBUV8a9//Yuvv/4agP79+/Ob3/yGzMzM9atWaiyRSO23JWkjmLsCHiiA70qrx7KT4Phc2CotfnWpARV9ETbsAMvnwY//hO29BK4kqeHUe3r8Rx99RJ8+ffjb3/5WNT3+lltuoU+fPnzyySeNUaO0fhIS4OGHYfPNYddd4aab4l2RpFYiCGDSQrhqRs2GfWgWXN7Dhr1FaZcHCW2BBAgqIL13vCuSJLUw9Z4eP2TIEDbbbDPuu+8+2rQJg/ry8nJ++9vf8tNPP/HOO+80SqGNyenxkqSGUrgi3BX++6XVYx2T4IRc6Guz3jIVvA0//RuytoO+50Ck3pmIJKkVqmsfWu+mPSUlhU8//ZS+ffvWGP/qq68YMGAApaWla3hk02XTLknaUNEA3lwIz8yDsph/WffMgsM7Q7J9nCRJitFoa9ozMjKYOXPmak37rFmzaN++ff0rlSSpmStYAePy4ceYdL1TUrgr/Bap8atLkiQ1f/Vu2o855hhOPvlkbr75ZgYNGgTA5MmTufDCCzn22GMbvEBJkpqqaACvL4TnVknX9+4Ah3YyXZckSRuu3k37zTffTCQS4YQTTqC8vByApKQk/vCHP3D99dc3eIGSJDVFc5aH6frPy6rHctqGa9c3N12XJEkNZL2v015aWsqPP/4IQJ8+fUhNbb5/obimXZJUV9EAXqtM18sr/wWNAMM6wCGdoK3puiRJqoNGvU47QGpqKttss836PlySpGbn18p0fXpMup7bNly73iclfnVJkqSWq85N+29+85s6Hffvf/97vYuRJKkpqgjglQXw4vya6frwbDi4IySZrkuSpEZS56b9/vvvp0ePHuywww6s54x6SZKanV+WwbgCmBmTrnepTNd7ma5LkqRGVuem/Q9/+AMPP/wwP//8MyeddBJjxowhOzu7MWuTJCluKgJ4eT68tCC8DWG6vl82HGi6LkmSNpI6/8nxj3/8gzlz5nDRRRfx/PPP061bN44++mheeeUVk3dJUosyaxlcOwOen1/dsHdNhou7w2GdbdglSdLGs967x8+YMYP777+fBx54gPLycr788kvS09Mbur6Nwt3jJUkA5VF4eUGYrkcr/3VMiMCIbBiZDW1s1iVJUgNp9N3jExISiEQiBEFARUXF+p5G0sZQUgLLl0PnzvGuRGqyZiwLd4afvbx6bJPkcO16j3bxq0uSJLVu9coMli9fzsMPP8zw4cPZYost+OKLL7jjjjuYOXNms03ZpRbv+eehUyfIyYEbboh3NVKTUx6FZ+bC9TOrG/aESLhu/Y/dbdglSVJ81Xl6/GmnncYjjzxCt27d+M1vfsPo0aPp1KlTY9e3UTg9Xi3aoEEwZUp4OyUFSkvjW09rsGIhlM6GjH6QkBjvarQW05eGO8P/GpOud6tM17vZrEuSpEbU4NPj7777brp3707v3r15++23efvtt2s97qmnnqp/tZIaz+abw9SpEIlA797xrqblW/g5vDYYyhdD7jDY+1WIuBC6qSmLhpvMvboAVr5znRiBkR3D9euJkbiWJ0mSVKXOTfsJJ5xAJOJfMVKzc/vtkJcXrmv/v/+LdzUt30/3Q8XS8HbBG1D8NWT1j2tJqumnpeHa9fwV1WPd28GJeeEadkmSpKakzk37/fff34hlSGo0GRmuZd+YsraGoAIiiZCYCqmbxLsiVSqLwnPz4bWYdL1N5dr1fU3XJUlSE7Xeu8dLkmrR+zdABIqmQe8ToW1WnAsSwI+V6XpBTLres124dr2r6bokSWrCbNolqSFFItDnN/GuQpVWROGZefDmwprp+sGdYHiHcJd4SZKkpqzJ7I50/fXXE4lEOOecc6rGli1bxumnn07Hjh1JT0/niCOOoKCgoMbjZs6cyciRI0lNTSUnJ4cLL7yQ8vLyjVy9JKmp+b4Urp4Bb8Q07L1T4E89YL9sG3ZJktQ8NImk/cMPP+See+5h2223rTF+7rnn8uKLL/L444+TmZnJGWecweGHH87kyZMBqKioYOTIkeTl5fH+++8zZ84cTjjhBJKSkrj22mvj8VIkSXG2PApPz4VJRdVjSRE4pBMMM12XJEnNTNyT9sWLFzN69Gjuu+8+OnToUDVeXFzMv/71L2655Rb23ntvdtppJ/7zn//w/vvv89///heAV199la+++oqHHnqI7bffnv3335+rr76af/zjH6xYsWJNTylJaqG+K4Wrptds2PukwGU9YbjpuiRJaobi3rSffvrpjBw5kn322afG+Mcff0xZWVmN8b59+9K9e3emTJkCwJQpU9hmm23Izc2tOma//fajpKSEL7/8co3PuXz5ckpKSmp8SJKar+VReLgA/joL5pWFY0kRODoHLugGuW3jW58kSdL6iuv0+EceeYRPPvmEDz/8cLX78vPzadu2LVlZWTXGc3Nzyc/PrzomtmFfef/K+9bkuuuu48orr9zA6iVJTcE3S+CBAphfVj22eQqckAc5NuuSJKmZi1vSPmvWLM4++2zGjx9Pu3btNupzX3LJJRQXF1d9zJo1a6M+vyRpwy2rgPEF8Ldfqhv2tgkwKgfO72bDLkmSmrCrroJevep0aNyS9o8//pjCwkJ23HHHqrGKigreeecd7rjjDl555RVWrFhBUVFRjbS9oKCAvLw8APLy8vjggw9qnHfl7vIrj6lNcnIyyclemFeSmquvlsAD+bAw5mIhW6TC2FzoZLMuSZKasq++gssvr/PhcUvahw0bxhdffMFnn31W9TFgwABGjx5ddTspKYk33nij6jHffvstM2fOZODAgQAMHDiQL774gsLCwqpjXnvtNTIyMthqq602+muSJDWupRVhs37rL9UNe3ICHJcL521qwy5JkpqBNvXLzuOWtLdv356tt966xlhaWhodO3asGj/55JM577zzyM7OJiMjgzPPPJOBAwey2267AbDvvvuy1VZbcfzxx3PjjTeSn5/Pn/70J04//XSTdElqYaYthocKaqbrfVPDtesdk+JXlyRJUr1ssQXcemv48dNP6zy8SVynfU3+9re/kZCQwBFHHMHy5cvZb7/9uPPOO6vuT0xM5IUXXuAPf/gDAwcOJC0tjbFjx3LVVVfFsWpJUkMqrYDH58L7xdVj7RLgyM4wOBMiXsZNkiQ1N2edBSeeCJmZ6zw0EgRB0PgVNW0lJSVkZmZSXFxMRkZGvMuRJFX6X2W6XhyTrm+VBsfnQrbpuiRJasbq2oc26aRdktQ6LamAxwrhvyXVY+0SwuuuD8owXZckSa2HTbskqUn5bBGML4SSmHR96zQYkwsdTNclSVIrY9MuSWoSFpfDI4Xw4aLqsZTKdH2g6bokqS6CACoq6r07t9SUxe2Sb5IkrfTJIrhies2Gfdt0uKInDHKzuaalYhn88hzM/yjelUhSTdOmwSabQLt2cP318a5GajA27ZKkuFlUDvf9Cvf8CosqwrHURPhNFzitK2S1gunwcxbNYezTYznq8aP4Zt438S5n7YIA3hoJ7xwCr+wMPz8Y74okqdp110FBQZi0//GPsGjRuh8jNQPOG5EkbXRBAB8vgocLYXFF9fh26TA6FzJb0b9Opzx/ChN/mEhAwFdzv+LL076Md0lrVr4ICt6s/nzWk9Dr+PjVI0mxOncO/xuJQFoaJCfHtx6pgbSiP4skSU1BSXnYrH8SE4CkJcKxOTCgfeubCp+/OJ9oECUgoGBxQbzLWbs27aHDDrDw0/Dz3H3iW48kxbrqKli2DGbOhEsugbZt412R1CBs2iVJG0UQwEeV6fqSmHR9h3Q4LhcyWum/SNcOu5YjHjuCFeUruGW/W+JdztpFIjBsEsx8HFI3gS4j4l2RJFXLyIC77453FVKDiwRBEMS7iHir60XtJUnrp7gcxhfA54urx9Ir0/WdWmG6vqqKaAXRIEpSYitYxC9JkoC696GtNNeQJG0MQQBTS+DRuVAak64PaA+jcqC9/woBkJiQSCKJ8S5DkiQ1Qf65JElqFEVlML4Q/heTrrdPDKfC79g+fnVJkiQ1JzbtkqQGFQQwpQQeXyVd3yUDjukM6f7LI0mSVGf+6SRJajALy+ChApi2pHosow2MzoHtTdclSZLqzaZdkrTBggDeL4HHCmFZtHp8tww4Oie8pJskSZLqz6ZdircFC6CsDHJz412JtF4WlMGDBfBVTLqe1QZG58K26fGrS5IkqSVIiHcBUqv22GOQkwN5efDXv8a7GqleggDeKYIrp9ds2AdlwuU9bdglSZIagtdpx+u0K4522AE++yy8nZkJRUXxrEaqs/ll8EA+fFNaPZbVBo7Pha1t1iVJktbJ67RLzcGWW8IXX4S3N9ssvrVIdRAE8HYRPDUPlsesXR+cCUd2hhTXrkuNb/FiePFF6NkTdt013tVIkhqZTbsUT/fcE/7RtXQpXHRRvKtpeoIAzjoL/vUvGDgQnn4anA0TN/NWwLgC+C4mXe/QBo7Pg/5p8atLcVL4Lnx3B2T0ha3/BAlJ8a6odYhGYciQ6llajz4KRx8d15IkSY3Lpl2Kp8xMuP76eFfRdE2dCnfcEd6eNAn+eS/s+inMegJyh8GQJ6FNSnxrbAWCAN4qCtP1FTHp+tAsOKITtDNdb31WFMGk/aBiORBAm1TY6v/iXVXrMGdOdcOekADPPx+fpr28HO66C375Bf7wh/ANaElSo7Bpl9R0pcXEt0EA5TNhxoTw8zkvw8xHofeJcSmttShcEa5d/35p9VjHJDghF/qarrdeK4qgovKbIpIIS2bFtZxWJS8Ptt4apk0LU/cRI+JTx3XXwZ//DImJ8PDD8PPP4W1JUoNz93hJTdc228A//hFu2HfmmTD6sJr3t2kfn7pagWgAry+Aq6bXbNj3zII/94DihVPZ9JZNyb4hmye+eiJeZSpe0npAn1PD28mdYIsz4ltPa5KYCO+9B/ffD2+/DaNHx6eOzz+HSAQqKmDWrHCdvSSpUbh7PO4eLzUrX98MMx6FLiNg2ysh4nuPDa1gBYzLhx9jmvVOSXBCHmyZGn4+5N9DeP+X94kGUbJTspl/0fz4FKv4WlEEbdIhwYl7rc5rr8HIkVBWBscfDw88EO+KJKnZcfd4qTX59NNwM6JddoHDD493NY2r3wXhhxpcNIDXF8Jz86As5u3cvbLgsM6QHPP+SEZyBhEiJEQSaN/WGQ+tVtuseFegeBk+PEzY58+Hfv3iXY0ktWg27VJzl58PgwfDsmXh+sZnn4WDD453VWpm5iyHBwrgp5h0vXMSjM2DzVNXP/7OkXdy+kuns3jFYm7e9+aNV6ikpiM3N/yQJDUqm3apufvhByitvAZXQkKYutu0q46iAbxWma6XV6brEWBYBzikE7Rdw+qDHlk9eOG4FzZanZIkSa2VTbvU3O28M2y/fXgJoPR0OOaYeFekZuLX5eHa9enLqsdy24bpeh+vpCdJkpqShQvDq1V07QqHHBJuhtlK2LRLzV1ycng9888/hz59IDs73hWpiYsG8MoCeGF+zXR9eDYc3BGS3NtPkiQ1JUEAe+0V/r0L8Le/wTnnxLWkjck/zaR4e/llOOqo8Jq30ej6naNt2zBxt2HXOvyyDK6bCc/ETIfPawsXdYcjOm9Yw/5LyS98MPsDKqIVDVOsJEkSwJIl1Q07wKRJ8aslDkzapXiaNStcf15RAU88AZ07w29/u+HnnTo1XNt+4IGw6aYbfj41exUBTFwAL84Pb0OYru+bDQc1QLr+5s9vMuKhEZRFyxix2QheOu4lIs1g2lrB4gKmzp7KLpvsQl56XrzLkSRJtUlLg/32g1deCT8/+uj41rOR2bRL8VRYCOXl4e3ExLCJ31CTJsGwYeE0ossug++/h6ysDT+vmq1Zy8K167OWV491aQsn5kHPBlq7fv9n91MRhAn7xB8mMmfxHLq279owJ28ks0tms81d27Bw2UKy2mXx+e8/p3tm93iXJUmSVhWJwHPPwZtvQl5euJ9TK+L0eCmedtghnBoPYSLeECn7m29Wb8wxbx58+eWGn1PNUnkUnp8H186sbtgTInBAR/hTj4Zr2AF27roz0SBKQiSBLuld6JTaqeFO3kje/PlNFi5bCEDRsiLe+OmNOFck1dHdd0O3brDvvuHveUlqDdq2hREjWl3DDibtUnwlJMBjj4W7YWZkhGn7hho5Em64IVwf3707bLfdhp9Tzc7MynT9l5h0fZPkcGf4Hu0a/vnO2OUMMttl8tPCnzhp+5Nom9i24Z+kge266a60TWzLiooVJCUksdumu8W7JGndCgrgtNPC2VRz5oS/72+6Kd5VSZIakU271BR06NBw59ptN/jii/Bj773Dy8Cp1SiPwosLwvXr0cq16wkROCAb9s+GNo00vyoSiXDCdic0zskbyRYdt+CjUz7ijZ/fYO9ee9Ovc794lySt26p7RSQ4aVKSWrpIEARBvIuIt5KSEjIzMykuLiYjIyPe5UjSepm+FMYVhNdfX2nT5HDterdGSNclxcl998G110K/fvDQQ145RJKaqbr2oTbt2LRLat7KouE1119ZACt/oSeuTNc7hrcbwo8LfuTiNy4mMZLIjcNvbD2bthW+C9OugpSusNPfoW0DzoyRJEmtVl37UKfHS1Iz9tNSeCAf5qyoHuveDsbmwqYNnK4f88QxfJb/GQCFSwp5c+ybDfsETVG0HN4+EMoWQSQBElNhl7viXZXWpeQ7+PY2SO0Kfc+HxOR4VyRJ0nqzaZekZqgsCs/Nh9dWSdcP6hhee72h0vVYBUsKqi7rlr84v+GfoCkKyqF8MRCEG38tnx/virQu0Qp4Y09YVghBFMoWw/bXxrsqqeULgnDZxqOPwgEHhLfdc0FqEP4kSWoY+flw9dVwzz1QURHvalq0H5fC1TPg1ZiGvUe78DJuDTEdPggCphVOo2BxQY3xW/a9hXZt2pGWlMaNw2/csCdpLhLbwQ63QEJbSOkC21wW74q0LhVLYekcqHyDiUXfxrceqbV46y3405/CjXBvuAGefjreFUkthkm7pA0XBOFO9d9+G15qbvZsuOqqeFfV4qyIwrPz4I2F1c16mwgc3AmGdwh3iW8Ixz55LI9++ShJCUlMHDORvXvtDcBR/Y/isH6HESFCYsL6X55wful8yqPl5KbnNkzBja3v2bDlmUBk9Z271fQkpcMWZ8B3d0BCMmxxZrwrklqH0tK1fy5pvZm0S9pwK1bA11+HDTvAhx/Gt54W6PvSMF1/PaZh71WZru+X3XAN+/zS+Tz65aMAVAQV3PfJfTXub5PQZoMa9kemPULuzbnk/TWPW6bcskG1blSRBBv2xrJsLhS8FU5jbygDbodDpsPh+ZC7Z8OdV9KajRgBJ5wAaWlw2GFw9NHxrkhqMWzaJW245OTwH2oI16/99rfxracFWR6FRwvhr7OgsHKzuaQIHNEZLuoOXRp4f62M5Ay6tu9KYiSRaBBl25xtG/T81717XdW6+KvfuZryaDlvT3+br+Z+1aDPo2Zi0Y/w/Obwxl7w0rZQVtJw507rAW0zG+58ktYuMRHGjYPFi+Gpp8K/DSQ1CKfHSy1dSUm4GcyiRfB//wfdG+kyXf/5D5x1FnTsCD17Ns5ztDLflcK4fJhXVj3WJwXG5kFu28Z5zqTEJN496V3u/fheumd253c7/a5Bz79lpy2ZNncaESJs0XELjn78aJ7+5mkiRJhwxARGbT2qQZ9PTdwvz0JZcXh7yc9Q+B5sckB8a5IkqYnxOu14nXa1cGPGwCOPhLf794fPP49vPVqn5VF4ai68VVQ9lhSBwzrDXlkNNxU+HoqXFXPde9exrHwZZ+5yJpvdvlnVfSM3H8kLx70Qx+q00RW+C6/vAUTCzf4O+hbSGumNRUmSmhiv0y41Bx9+CLfeCr17w6WXNs5Usu+/r97N/ccfG/78alDfLIEHCmB+TLq+eQqckAc5jZSub0yZ7TK5fp/rgXCX+m1ytmFa4TQCAnbvtnucq9NGlzME9n4d5r4Hmx5iwy5JUi1s2qV4WboU9tknXPsVBOFasMsvb/jnueSScDOYsjL4858b/vxqEMsq4Ml58E5R9VjbBDi8E+yZ1TL3QItEIrw59k0e+PwB8tLznBrfWuXtHX5IkqRa2bRL8bJoUbjeHMKGffr0xnmeQw+FwsJwh/ecnMZ5Dm2Qr5bAgwWwICZd3yIVTsiFzi0gXV+bTqmdOG/gefEuQ9owP/8c7pY9cyb85S/whz/EuyJJUgvi7vFSvOTkwBlnhLczM+Hss2ve/9134S6sM2du+HNlZdmwN0FLK+DBfLj1l+qGPTkBjs2B8zZt+Q271GJccw1MmwYLF4a/10sacBd8SVKrZ9MubWw//QRDh4abwh10EMybB7/+CttvX33MV1/BttvCiSfCNtvA7NnxqlaNZNpiuHI6vFdcPdY3Ff7cA/bs0LjT4e/+6G7aX9eezW/fnG/mfdN4TyS1Fikp1beTkqCNExklSQ3Hpl3a2M47D95/H77+Go45BrKzV9+A7q23YPny8HZJCUyZstHLVOMorQgv43b7bFhYHo4lJ8CYXDhnU+hUma4vL19OY1zcY1n5Ms546QwWr1jMzwt/5sq3r2zw55BanSuvhEMOgR12gMceg9TUeFckSWpBfCtY2tjKy8ON54Kgelf3Ve21V9jIL18OGRkwcODGrVGN4n+LYXwBFJVXj/VLhePzoGNS+HkQBJw18Szu+OAOemb1ZNLYSfTM6tlgNSRGEmnXph2lZaUAtG/bvsHOLbVaHTvCk0/GuwpJUgtl0q74efDBcFfz+++PdyV1M25cWO+4cRt2nptvDqe+d+8efg1qmwfdrx/873/h1+aLL2CTTTbsORVXSyrgP3PgH7OrG/Z2CeFl3M7etLphB/hx4Y/c8cEdAMwqnsXtU29v0FqSEpN4ZtQzDNx0IIf3O5xrh13boOeXJElSwzJpV3xMngwnnBA2rI8/Hl6nfOjQeFe1Zu+9F64vj613yJD1O1ffvvDpp+s+bostwg81a58vhocKoCQmXd86LZwO3yFp9eMzkzNJSkiiPFpONIiSm57b4DXt03sf9um9T4OfV5IkSQ3Ppl3xMWtW+N+Va3YbYof0xrSyvuZSr+JuSQU8UggfxGwinZIAR+fAwIw1bzTXOa0zz456ljs+vIOtO2/NObuds1HqjTXxh4lc8dYV9MjswZ0j76RjaseNXoMkSZJCkaAxdjpqZkpKSsjMzKS4uJiMjIx4l9M6LFkCe+4JH30Ubtzz9tvQvgmvrV28OFxn/tFHsNNO4UZx6enxrmr9rFgBl14Kn30Gp58eXsddDerTReHa9UUxWxZsmw6jcyCrlnS9KVlWvozsG7JZVr6MhEgCv9vpd/xj5D/iXZYkSVKLU9c+1KRd8ZGWBh98APPnh7unJzTx7RXS02HqVFiwoGnW++WX4bT9nXeGkSPXfuwdd8Bf/xrOGpg0KZw10LXrxqmzBYoGUd6b+R5pSWlskbMTjxTCR4uq709NhFE5sEv7xr2MW0Mpj5azvGI5AeH7uYvLFse5IkmSpNatiXUealUiEejUqek1wGuSkNA06y0ogF13hauvhgMPhBdfXPvx8+ZVv4aKivCSclpvv3/h9+xx/x4MePhUDvnvJzUa9u3S4YqesOtapsM3Nelt07ltxG2kt01n846bc9nQy+Jd0sYRRKGsEX8WfnkW3jkMvrw2fK7GtGQWvHsETBoBCz9v3OeSJEmNrol1H9JaFBfDP/8JEydWry0XfPdduNwgGg2b8Q8/XPvxp58Om28edpGnnw5bbrlx6mwm5pXO49lvnmVW8aw6Hf/Al89Al1Ohy6l8Me9nANIS4bdd4A9dIbOJz2cqqyjjrg/v4vr3rmfB0gUAnL7L6Sy6ZBFfn/41m2VvFucKN4Klc+D5LeHxTHjnCIiu4VKM62vxz/DO4WHj/vml8NP9DXv+VX1wavhcc16Dd49s3OeSJEmNron/OSlVCoJwDfxnn4Wf/+MfcNpp8ayo6RgwALbeGqZNg5QUOOqotR+/ySbw9dfh9eLb1PNXwIxH4cvrILMf7HIPJLWsPSDmlc5j6zu3pmBJAalJqXxy6ids2an2NzWCIJwGn9v/LmYumgdA1/Zd2SEdjsuFjGby2/XC1y7ktqm3EYlEePbbZ5ly8pR4l7Tx/fQfWPxTePuXp2DBh9Bpt4Y7/7K5wMp0PQGW/tpw567NioWVaX4Q3pYkSc2aSbuah+Li6oY9EoHXX49rOU1KSkq4P8C778JPP0H//nV7XH0b9uXz4f3RUPQ5zHgMvry+/rU2ce/Pep+CJQUAlJaV8vIPL9d6XEk53P0r/HMODNvsYHbvPpgRvQZz74Bd+F3X5tOwA0z9ZSoBAdEgyse/fgzAt/O+5YyXzuD6965nRcWKOFe4EaRsQthUR4AEaNfAl9nrOAC6Vb6Zlt4b+pzcsOdf1Q43QXJHaJMGA+5o3OeSJEmNrhn9aalWLTMT9tgj3GU+CODww+NdUdOSkgKDBzfuc0RXQFA5bTgSgYqljft8cbBjlx1JS0pjSdkSEiIJDO5e82saBPDBovBSbqWVX4qkhCTG9u7PqBxo3wx/o56606n8d/Z/AThlp1Moj5az57g9mbtkLtEgyuIVi7lm72viW2Rj63V8OEV+/gfQeyyk92rY80cSYMhjULYobKQjjfx+ec4QOLyw8rmbyWYKkiRpjbzkG17yrdlYtgxeeSXc6XznneNdTev01U3w5V+g/RYw9BlIbXm7zn83/zte/v5ldu++OwO6DqgaLyqD8YXwv5jN1NsnhlPhd2zCVyusi+/mf0fJ8hJ26rITi1YsIvP6TAASSODwrQ7n8aMej3OFkiRJLU9d+1CbdmzaW42CgnC99/ffw2WXuSZedRIE8N8SeGxudboOsHP78FJu6c0wXV+X0148jbs+uot2bdoxcfRE9ui5R7xLkiRJanFs2uvBpr2VOPdcuP328DJnkQjk50NOTryrUhO2sAweKoBpS6rHMtrA6BzYvpmn6+syo2gGme0yyWqXFe9SJEmSWqS69qEtMCOS1iB247VIBBIT41eLmrQggPdL4LFCWBZzSe1dM+CYnPCSbi1dj6we8S5BkhpfURG89FJ4+dOddop3NZJqE42GGy6npYVXTWqFbNrVelxyCXzzDXz7bTg9vmPHeFekJmhBGTxYAF/FpOuZbWBMLmybHr+6JEkNbMUK2HVX+O678M3855+HkSPjXZWkVf3hD3DvveHtv/0NzjknruXEg5d8U8sXBHDddXDggeEO699+C8cfH++qGsZtt4U762+7LUyfHu9qmrUggHeL4MrpNRv2QZlwRU8bdklqcX78MWzYARIS4MUX41uPpNo9+GD17XHj4ldHHNm0q/FFo1BWFr/nf/VV+OMfYcoUuPjilnON96KicJ1+SQl89RX85S/xrqjZml8Gt/4Srl9fOR0+qw2cuQmMzYPUVjAdXpJand69oU+f8HZFBYwYEd96JNVu6NDqS5jutVd8a4kTp8ercU2eDAcdBIsXh6nw73+/8WsoKlr7581Vmzbhx4oV4edpafGtpxkKAninGJ6cC8tj1q7vnglHdYYUm3VJarmSk2HqVHjhhXBN+267xbsiSbV58kl44IHwb93Ro+NdTVy4ezzuHt+o9tkH3nwz7I7atYPS0up3yjaW5cvhyCPh5ZfhgAPgiSegbdu6PbawEM44I7xc3NVXh+/0NSXPPQdXXRUmBXffDR06xLuiZmPeChhXAN+VVo91aAPH50F/3/+QJElSI3P3eDUNubnhOrEggE6dNn7DDuE76c8/v36PvegieOqpcIr/IYfA/Pnh62kqDj44/FCdBQG8VQRPzYMVMen6kEw4sjO0M11vWEEAhW/BiiLY5EBISIp3RZIkSc2KTbsa1623hqn2woVwxRXxrqb+iorCpiMIYMmSsHlvSk276qVwBTyQD98vrR7rmATH50I/0/XG8fXN8NlF4e1ND4OhT8W3Hqm5e+ghOO+88E3xJ54Ip3VLklo0p8fj9Hitxf/+FybshYXw97/DKafEuyLF+Cz/M0Y/OZrSslLuOege9u2zb63HRQOYVARPz4WymN94e2bBYZ1M11ezrBB+fhBSu0H3ozZshswrA2H+f8PbkUToPBhWLIQdb4G8YQ1Tr9RalJeHazpXrAjfQD7ssLBxlyQ1S06PV3xVVMCdd8KMGeHmc5ttFu+K1s+228LPP8e7Cq3BmS+dyTfzvyEIAk585kR+Pf/X1Y4pWAHj8uHHmHS9UxKckAdbplaPzS6ZzbmvnMviFYu5fp/r2TZ3243wCpqgIAqvDYFF3wMBLP0V+p6z/ufb5MDqpr1tJ5j7Xvgc7x0NR8yLz5IZqbmKRMLZa2Vl4e2UlHhXJEnaCGza1ThuuAEuvRQSE2H8eJg5E5Ja8VrW8nL44gvo0QOys+NdTYvRJrH6V1ibhJq/zqIBvLEQnp1XM13fKwsO6wzJq6xy+P0Lv+flH14mIOC7+d/xw1k/NGLlTdiKIlhUed1iIjB38oY17f3/CB22D8/78wOQ/zoQQDSOl4FcKYjC0jnQLg8SnG6hZiAxMUzWL7kknB5/ww3xrkiStBHYtDc3X38dptd77hnuxt5UTZsWTt2rqID8fCguDjeia43Ky8NrSr73HqSnw/vvwzbbxLuqFuHOA+7kN8/9htKyUv5xwD+qxvOXhzvD/xSTrndOCq+5vnlqLScC5i+dTzSIEhCwYOmCRq68CWvbAbqMgDkTgQj0PG7DzheJwCYjw9vZO8B7o8Lp8QNui2/KXr4EXhsKCz+BjH6w7+TwtUtN3X77hR+SpFbDNe00ozXtL74Y7hQejcLAgWET2FQ3RXvrLRgxIrzc2nHHhWl7a/Xpp7DjjuHtxEQ4/3zTkXq4bept3PfJfQzpPoRb/7+9+w6Pqtr6OP6dVBJIIQkpEEKX3kEIqKCgoFgQFUVUsFwL8Ap6bahXsCDWaxesV1ApFmwIKgIiIr33Jh3SgBQIqXPeP3ZIgQRSJplJ8vs8zzycObNnnzVyiFmzdun/Fp7uRY/YsFsw7zj8mABZOT/ZbMBltWFgCHid45/L4n2LuX7m9ZzKOsVH13zErW3LmKxWZvZMM4zdpy74V9FFrvZ/A3/dlPe82yfQ5C7nxSMiIiLVjua0V0Vff513vHQpHDwIUVHOi+dcevc28cXHQ4sW5XONU6fMiu6uXsGPijIV9lOnzMiDdtV0rnQpbIjdwOhfRgOwKW4T7cPac1+X+wptezjdzF3fm5Z3LtQLhodDk2JM+7y4wcXEPxoPgK26z7N284SwS50dhWMk74AV90N2mqnuB3cx52s2zGngBtihVqPS9W/PhISl4BsJtRo7IGARERGRgly0TCuF6tXLVNkBGjWCiAjnxnM+ISHQsmX5DIH9+28IDYU6deDxx0vfT2qqmW5QngNOgoNhyRKzRc8XX5iRB1IspzLzxrfbsHEq69RZbewWzD0KE/blJew24PLa8EyD4iXsudew2ZSwVzUr7oX4P+HoclgyJO98cBe46Gsz/D96aum+pLAsWHgl/N4LfroADv/iuLhFREREcmh4PJVoeDyYIfK7d8PNN5tFaFxJfDz8+acZCt6olFWr4rrxRvjuu7wvMU6cMNvglMTmzXDxxWYP+UGDzOI+SthcimVZPPzrw3yy9hN61O/B1zd9jZ+3X+7rh3Kq6/vyVdfDvczc9cYusKhy3Mk4EtMSaRbUrPp8GWDZweZC3wf/ciEcWw1YZrj/9Qcd1/epI/Bd3ZwnbuYLgB6fO65/ERERqdI0PL6qGjDg/G22bIF580xlvkOHcg8JgKNHzeJqsbFmgbyVK6FNm7L1abebh0cht+npLwXc3c1q7KVZlO+DDyA52RzPmgU7dkDzIubvZmbCjz+a61x1lZL7CmKz2Xij/xu80f+NAuezLfjlGPx81ByDqa5fEQTXBIOnC+SMv+z6hWunX0umPZP7O9/PpKsnOTuk8nXqCCy4ApK2wAWjoPObFf/v5NDPEDMP6l2Ttwd8l7dNhT07Dbp95NjreYeAbxSkHgTsENLdsf2LiIiIoOHxVc/u3dC5M4wZAxdeaLYZqwjLlpmEHSAtDX79tWz9rVwJ4eFmD9q33jr79eeegyeegNtvh/nzTfJeUhdcYOaYu7ubKv25Ri4MH26q+1dfDU8/XfJrVROrDq9i9o7ZpGell9s1DqTBxH1msbnTCXuEF9wdEs8bsy8m6r8RvL/y/XK7PkBSWhIp6SnnbPPeyvfIsmcBMHn15AJD/aukHe9C8lbADjvehuRtFXv9+L9h0dUmjgVXQOImcz6kO1y3BwYdgbpXOvaabp5wxd/QfgL0nAnNRji2fxERERGUtFc9K1eapBlMdXjp0oq5bqdO4JczbNndHS655Ow22dkwbJhJxAcMMAuzFeW550z1PivLrLZ+zz3wyivmOZg+JkyA//2v9NunjRgB//0v3HknLFwIgYFFt/3pp7zj778v3fWquKnrp9L1o65cM/0arp5+NWWdeWO37MzdOZd5u+dhWRZZdvgpAV7cDwdyvhNws8GVQfB0A/hy5QSWHlhKzMkYRs0ZRfzJeAd8qrN9uPpDgl4JIviVYL7a/FWR7dqGmvvS3eZOw8CG1PBw4S0aHcEryAyNB8ANPCt4qlHSZvOnlQ3Yc75AqAC+9aD1E9BgsEbgiIiISLnQ8Piqplcvs/DZ0aMmib78csf0m5hoVkAvbKg6mEXx1q2DX36B7t3ztjjLb/58mDrVHM+ZA9Onw11FbLEUGpr3C3B2Nnz2mRkqb1llW3guPzc3eOih4rW95hqYNs0cDxzomOtXMd9u+Tb3+Pd/fudExokC889LatScUUxaZYaUhwdfyPB+Czhmz1u3oK63WRm+QU4u7OGWd2/abDbcymle9TMLn8Fu2bFbdsb9MY7BrQcX2m587/EE+wRz5MQRRl04qurPab/g/8ww8ePr4IKRJpmtSPWuBd/nTAw+9SG0iqx+f5pl6UsBERGRakqV9qomIsIssPb997BtW9kXhLMsuPtuqF0b6teHnTuLbtu4saleF5aww9kLxZ1r4bhXX4U+fQom7m5u575+eZoyxSxUN3s2vPCCc2JwcX0a98k97hTRiVpetcrU34xNMwB3CL6WmKDh/HpwC2Cq6wOC4amovIQd4MmLn+SqZldxQfAFfHbdZwT7Bpfp+kVpGNgQd5s77jZ3mtRuUmQ7L3cv/t3j37x2xWs0DGxYLrG4FHcv6PwG9F0IUTdW/PV9wqD3L+AZAKcOwMLLoZDdBlyCZZmfKQ89BGvXnrttdraZBuThAT16QFJSxcQoIiIiLkOrx1PJVo+vaLt2QbNm5tjdHUaNgjffLH1/r75qKuxXXAEvvmgS8aL06GHmyp++Rf39zTD2or4UEKeyLIufd/7M4ZTD3Nz6ZgJqBJSpv/5f38evJ8PBy6zO3aVuV65t2Inh4VDfiSPNDyYf5IU/X8DTzZNxvccR4hvivGCqq8O/wp4pENwVmo/J+3Jv80RY/xSQ8zPjsvkQfpmzoiza55/DHXeYn6ne3rB/vxkhVZg//zQjqMB8zjffhAcfrLBQRUREpPxo9XhxjKAg80tlZqYZnl6/ftn6e/RR8zjT0aOm4tSpk7kmQMOGsGKFOW7UyAy/L+m2blJhbDYbV19wdZn7ybTD7KPQoO37tNi/hAPJB6hbK4zHW7RiYCh4OHl8UKR/JJOvnuzcIKqzk/vNgnOWHfZNNyu4N7rdvBbYDrDA5m4efs2cGmqRNm0yCXt2NqSmnjtpz7/WhmWZUU8iIiJSrShpl3MLCjLz1CdNgpYty6fCc+QItGsHCQkQEgIbNphh/u+/b+a2p6TAU08pYa8G9pwy+64fyQBs7lzS4BLq58xdj6zi67hJMZ06DFbOgpS4wYm9ea/VGwAXfQ0JSyFqMNQs45eM5eWOO2DyZLPlZK9e515Ms107+OgjM5y+Vy+49daKi1NERERcgobHo+HxTjd1qllV/rQpU8wvtUXJyoK4OJPYF7Uwk2WZof1hYWZYvbi0TDv8eBTmHcsd2Iy7Da4Ohn5B5lgEAHs2/DkQDs8G3/pw+RLXTc7PJSkJDhwwX4aWZstKERERqfSKm4dqITpxvs6dzSJLNpv5s3PnotvGxEDz5lCvHlx6KWRknN3GsswK7xdcAJGR51/oSXJlZmfy/KLnGTprKEv2L6mQa+4+Bc/vg9/yJewNapht3K4KVsJepVgWbBgPPzaDlSPBnnW+d5zNzR16/QjXx8C1/1TOhB0gIADatFHCLiIiIuelpF2cr3Vr+Ptvsyr733+b50WZNg327DHHixaZRZrOtHs3/PijOU5NhY8/dnzMVdRby99i3B/jmLFpBld8cQVJaeW3UnWGHb6Og1f3Q2zOdy8eNrg+BJ6IMlu65bctYRufrPmEf47/U24xSTmLXwybnoUTu2Dn+7C/6H3uz8lmM6vFu2mGl4iIiFR9+o1HXEPXruZxPo0amWrd6VXnC1sY7/SQ+JMnzUJPLVs6NtYqbF/iPtxsbmRb2aRmpnLs1LEyrwJfmJ2pMDUW4vINlGhUA4aFQ4T32e23xG+h0wedSM9Op5ZXLTaP2ExUQJTD45LzyE6HtY/AsbVwwShoeEvJ3m/POPfz/OL+hPglZv/1wHN8kSciIiJSxanSLsbvv5tkNywM5s1zdjRFGzjQLOA0ZAj88IMZKn8mPz9TgX/gAXj3XbN3fFmkpprK/caNZeunEhjRdQRBPmb1/uEdhjt8f/F0O8yMg9cP5CXsHja4oQ48FlV4wg7w574/Sc9OB+BExgmWHVzm0LikmHa8Azveg4Ql8PdQSD1YsveHXQbNRoJXMETdDA2GFN4ubjH83tts3/brhSW/joiIiEgVooXo0EJ0ALRoATt2mONmzWD7dufGUxKWBc89B19+afZ/f/NNMzfeEbKzoVs3WL3aDMn94Qe45hrH9O2iMrIzSE5Pdvj+4ztSzcrwCZl55xr7wLAwCC8iWc9979EddJjcgVNZpwjwDmDziM3U86/n0PikGNY/DVteAivbPB+wFQJaOP46W16FdY/lPe89F+r2d/x1RERERJxIC9FJydSqZZJSm80cO9KePSbxjYqCr0o5h/VcFi+G8eNh50547z3HXuPgQZOwg/lvM2uW4/p2UV7uXg5J2LPsWQz/fjhBr0bQe84HvLbfnpuwe9rgpjrwaP3zJ+wAFwRfwKYRm/ji+i/Y+MBGJezOcsEoCGgLNg9o8TD4FzLSxREirwWPnJ9DNRtASHT5XEdERESkEtCcdjE+/xz+7/9M1fqddxzb99NPm8Q3O9ts5Xb99eDp6bj+z1xBvrAV5Uurbl1o0sQsbme3w2WXOa7vKuTYqWN8vv5zwmqFcXPrm7HZbHy/7Xum7FgG4f/HokTwTtpHw8BGNPUxc9dDvUp2jca1G9O4duNyiV+KySccrlprfk4Utd2iI/g3h2t2QuJGCOkGni4yAmrWLDOSp317ePVVqFHD2RGJiIhINaCkXYyWLc289vLglm9AR3n8on/ZZTByJEyfDn37mvnujuLpCUuXwjffmOT9iivK3mfKbjMfOD0eOv0XIq8re59OZFkWl025jA2xG7Cw2Ju4lzHRT/B3RiOIfCi3nZebjZtD4dLA8s33pAJUxF+gT7h5uIqYGBg82Hz5+NdfZtvJJ55wdlQiIiJSDShpl/L34otmmPmhQ/DKK46tsoP5UuDdd82jPNSpYxa1c5S1j8GxVWZe8JKhMDgZbJV3pkpaVhrrY9fnPp9zaA9J++CETyfahWWwN3EfnQJr8Um7BoQWYyh8UdYeWcsfe//g8iaX0ya0jQMiL2eZJ2DjODgVA60eg9rtnR2RlMXp3SjA/Mw5fty58YiIiEi1oaRdyl/9+rBwobOjcB35q5RVoOTs4+nDwOYD+X7nLxByAx5Rj3AsE2zY6BUVzVudoukVWLaPujF2I90+7kamPRNvd282jdhE06CmDvsM5WL9k2aldRsQ8zsMOlKpv5yp9po0gUcegTfeMAt3PvigsyMSERGRakJJu1Quc+bA7bebStfp4fCVTePhkLQFsjOg46twZB7UCIWgjs6OrNTGD/iaGjsPkeXml7tlXAtfuD0MQko4d70wyw4uI9NuVrFLz05n5aGVrp+0p+43f1p2SE8we5K7aw50pfbqq2a0UBX4sk1EREQqDyXtVcXx42boZohjt+lyCZZlVoVfuNDMJT09LHX0aNi82bmxldQ/U2HZMHNc7xrY8ykcmm2ed/+fSegrkdRs+Doe/k7ywL9mAwC83eDGOnBxgONym8ubXI6/tz/J6ckE+wTTu2Fvx3Rcnlo+BrELITMF2j6jhL2qUMIuIiIiFUxJe1UwYwbcdptZ3fydd8yibFXJ7NlmZXubzSTwbm7m+Bx7GbqsfdPzjg/9dMZrMytV0r7xBHwRC4lZeeda+sLt4RDs4GULGgY2ZNvIbaw6vIrukd2pU7OOYy9QHur0gEFxkJ0GXgGFt8k6ZZJ5JYIiIiIiUgRNsKwKnn/eVNktC5591tnRON6BA+ZPyzJ/tm9vVnGfOtV5MZVWWO+849qdzKOw11xYajb87wi8eygvYa/hZobCj450fMJ+WoRfBNc0v6ZyJOynuXsXnrBbdvjrFvjKF+a0gbS4io/NkQ58D+vGwvF1zo5EREREpMpRpb0qaNYMtm83x02aODeW0rLbYeJEWLQIbr0Vhg/Pe+2WW2DSJNi0CS69FObOBe8yLEPuTC0fg1pNIC0WGg4FbLBvGtQIg8jrnR3dea3Pqa4n56uut65pEvba5ZSsV0kJy2H/THOctA12fwqtK+n2YQd/gsXXA26w/W24drdrbdUmIiIiUskpaa8KPv0UJkyAjAwYO9bZ0ZTO11/D00+b43nzoEMH8wAICoL16yEpCQIDK/dQYpsNom4seK6Z47aT+3D1h8zYNIM+jfrw5MVPYnPQf6uT2TAzDpYn553zcYPBoRDtX7n/SpzCOwQz0MluHpU5yT2+ltzPkp0KKTsr9+cRERERcTFK2quCoCB4/XVnR1E28fF5c9YBEhIKvu7mBrVrV3xclcjKQyu5b/Z9ACzcu5CWdVoyqOWgMve7NgWmxRWsrretCbeFQaAn7Dy6k6nrp9IipAW3tr3VYV8UVGn+zeCiGfDPZxDcHRrd4eyISi9qMGx7HTKTIbA9BHd1dkQiIiIiVYqS9qpi925Tqfb0NMPM69VzdkQlc9ttMGUKrFoFgwZB797OjqjSOXrqaIHn8Sfjy9RfShbMiINVKXnnfN3h5jrQLae6fjLjJNGfRJOYlki2lU1aVhp3d7q7TNetUtLizHZv/i3PHo4QdZN5VHYBLeDaPZCyC2q3N/P4xfEOHDA/H3v2hNBQZ0cjIiIiFUhJu6tKTIRPPoGAADO/2+M8f1U33QQbNpjjmBj47bfyjtCxAgNh5UrIzDRfPEiJ9WnUh0EtBjFr2yx61O/BrW1vLXVfq1NgeiykZOeda18LhoZBQL5b8VDKodwvC9xt7qw+spq7UdIOQMwC+KM/2DPN+gU9vnB2ROXHOwi8L3R2FFXXjh3QsSOkppptPTdvVuIuIiJSjShpd1VXXw1Ll5oF2rZuPf/w95gYs4I8wJEjjo8nIwNSUiA42PF951cdEvbMZEjaAoFtwaOmw7r1dPfk25u/Jduejbube6n6SMkyQ+HX5Kuu13SHW0Khq9/ZxeKmQU3p3bA3f+z9A093T25vd3sZPkEVs+sDsOf8m9z7JXR+2yS3IiX1228mYQczdeivv8yIJBEREakWtOWbq1qxwiTsAH//ff72r70GXl7g62uGxzvSli0QGWkqPP/6V968cym51MPwYzP4LRp+bg3pxxx+idIk7JYFK5Nh3N6CCXvHWjC+IVxYxGJzbjY35t0+jxX3rGDP6D1E148uddwllW3P5ssNXzJp5SROZJyosOsWW2A7wA42d/CpC57+zo5IKquLLsobbeXrC126ODceERERqVBK2l3VnXfmHd9djOHGt95qKuGJiaZK70jvvQfHcpLLjz+G/fsd27+ry0qFlSNgfh84NKdsfR2eDek5e3Kf3Acxv5c9vjLam3KMtj+8S99501kTZ7YOrOUO/4qA++qC/3nG43i4edC1XlfCa1XsiuFPLXiK2767jRFzRnDd9Osq9NrF0uoJ6PwOtHgYLl8MbtVwYJNlwfvvw803w7ffOjuayqtDBzN96O23Yc0aiIpydkQiIiJSgarhb5GVgGXBAw9Anz7QurV5FIeXV/nEExVlqv5ublCjxtmruKenm+3Yquocyy0vwc4PAAviF8OgOPAKLF1fgR0Am3nY3MwQeSexLFiRAiNXbmTLKS8sK5lF+xZxbd26/CvSDz8X/+mwYM+C3OPF+xc7MZIiuLlD81HOjsK5fvwRRo40wzS+/tqsu9GmjbOjqpzyb4MpIiIi1YpTK+0TJ06ka9eu+Pn5ERoaysCBA9m+fXuBNmlpaYwcOZLg4GBq1arFDTfcQGxsbIE2+/fvZ8CAAfj6+hIaGsqjjz5KVlYWldYjj5hFh26+2cxrd7aHH4bx40088+aBf75hvhs3Qt26EBZWvBEB5SnjOGx6Aba8YqrjjpJ+LGdsuGUWFStL3yEXwmXzoNXj0PcPCGjpqChLJDET3j8Mnx6BdCtnHYHsFDj8AbfXSXP5hB3gjvZ526SVZdE9KUenR+VYlnkcOuTceEREREQqIZtlOW+Ccv/+/bnlllvo2rUrWVlZPPnkk2zatIktW7ZQs6ZZoOuBBx7g559/5rPPPiMgIIBRo0bh5ubGkiVLAMjOzqZDhw6Eh4fz6quvcuTIEe644w7+9a9/8eKLLxYrjuTkZAICAkhKSsLf3wXmnfr6wqlT5rhTJ1i92rnxFCU+HgYMMPGdnn+/fz/Ur++ceOb3hdiFgJWzWvfnZe9z/zewcxIcX2++FGj9JLR/vuz9OollwbJk+CoeUnPWSDuVdYoV294jcf9kxl30bx7o+oBzgyyB1YdXk5yeTK+GvXCzabaPy0lIgIsvhm3b4NJL4Zdfym9EkIiIiEglU9w81KlJ+5ni4+MJDQ1l0aJFXHLJJSQlJVGnTh2mTZvGjTfeCMC2bdto2bIlS5cupXv37sydO5err76aw4cPExYWBsDkyZN5/PHHiY+Px6sYvyC6XNJ+8cVm8TnLgvvvN3NCHcGy4PPPzcrDN99sht+Xpa/27WHTJnNss5kvG2JioFYtx8RbUt8EmcQazL7YV28pW3/JO2F2CyDnC4lun0KTO8/5FleWmAmfx8Kmk3nn/D3g1lDo6Oe8uKSKy842a2KEhBS+mqGIiIhINVXcPNSlSlNJSUkABAWZbZFWr15NZmYmffv2zW3TokULoqKiWJozbHzp0qW0bds2N2EH6NevH8nJyWzevLnQ66Snp5OcnFzg4VJ+/BFeeAHefBPeeMNx/f70EwwbBp9+Cv37w+7dpe8rK8sMjT/9nU94OMyf77yEHaD56HzHD5a9v/R4chN23PIWkMsvZRckLAPLfvZrLsKyYEkSjN9bMGHv5m9WhlfCLuXK3R3q1FHCLiIiIlJKLjNz1W63M2bMGHr27EmbnIWKYmJi8PLyIjAwsEDbsLAwYmJictvkT9hPv376tcJMnDiRZ5991sGfwIFq14axYx3f7/bt5hfn0/u579kDTZqUvJ+lS+GeeyAw0KxWb7OZLxe6dXNktCXXdhxE3QxunuBXis91puBuEDUY9n8F/s2h8RlV9n1fwZIhgB0aDIGe08p+TQc7nlNd35wvWQ/wgNvCoJ0Tv18RKRbLgn37TNKfM2VKREREpLpxmUr7yJEj2bRpEzNmzCj3a40dO5akpKTcx4EDB8r9mi5hyBCIiDDH3bubYfilcdddZo5qcjI0bQo7dpjh9q4goIVjEnYwq39fNBMGn4QBm6HGGavj//MZuZX4fdMhO80x13UAy4LFiaa6nj9hj86prithF5dnWXDTTdCoEdSrZ1aeFxEREamGXCJpHzVqFLNnz2bhwoVERkbmng8PDycjI4PExMQC7WNjYwkPD89tc+Zq8qefn25zJm9vb/z9/Qs8qoXISDMkftcuM6/d27t0/eQf5lqzpkncncWyYN9MWPckJBY+HaLMPHwLH9obcnpkgRsEtAK3Uv73dLCjmfDWQfgiFtJyvlMI9ID/qwfDI8DX3bnxlVjCcljzb7MooDheXBxs3pw31cVV7N6dt7f7iRPw4YfOjUdERETESZyatFuWxahRo/juu+9YsGABjRo1KvB6586d8fT0ZP78+bnntm/fzv79+4mOjgYgOjqajRs3EheXN9943rx5+Pv706pVq4r5IJVJjRpmT/X27aFZM/jjj5L38b//QatWZr/ljz92eIgFfPQRXHstTJ5c+Ov7ZsCSW2Dry/BbdN5CdBWh9dNmcbr2E6DPQqfP2bUsWJQIz+6Frfl2pesZAOMaQpvKWF0/eQB+7wXb34K/boJDs50dUdXyxx9mt4c2bcxIHFcSGmrWyHB3N1N6LrjA2RGJiIiIOIVTV48fMWIE06ZN44cffqB58+a55wMCAvDx8QHMlm9z5szhs88+w9/fn//7v/8D4O+//wbytnyrW7cur7zyCjExMdx+++3cc889rr3lW2wsrF0LF14IOQvvVZhu3WDVKpPlRUXB3r0Ve/3iWrwYLrkk7/n8+XDZZQXbrH0Utr0BVs48/f5rIKhjxcXoIhIyYGosbM+XrNf2gNvDoXVlngocuxDmn/47t0H7F8y2e+IYt94KM2fmbdkYF2fmj7uK1atNhb1lS/i//zMJvIiIiEgVUSlWj580aRJJSUn07t2biIiI3MfMmTNz27zxxhtcffXV3HDDDVxyySWEh4cza9as3Nfd3d2ZPXs27u7uREdHc9ttt3HHHXfw3HPPOeMjFc/evdC8OVx5palYxxWyKnl5Ov2Lr83m2r8EHzly7udg9mN3r2GOg6MhsG3x+rbskJbgekOCS8iyYOFxeG5fwYT94gAzd90rbSd/7P2DzOxMp8VYJiHRUDvnSxivILPQoDhOu3YmYXd3NztAnLHop9N17gwffABjxrj2zyoRERGRcuRS+7Q7S4VX2idPhgceyHs+cyYMHlz+1z1t40azmFx6uomlR4/ivc+y4JtvzFzT224zc+SLY8EC84t327bwxBPgUcxNC06dgn79TMW9Rw/47bfCV5BOS4CTe6B2B7Ny/PlkJMK8SyBpI9S5CC6bl5f4n8Oe43v4a/9fXBR1EY1qNzpv+/IWnwFTYmDnqbxzQZ5wRxi0rAk/7/iZa2dci92y07dRX367/TdslXHbLXsm/DMFNk0Ady+InppvPQEpk+xs8zNg3z64777S7SYhIiIiIqVS3DxUSTvlmLRnZ8PXX5s9zQcPBi8vc37tWuja1bzu7W0WgSrpL8vbtsGkSWZl5VGjip8Il8UHH8D994ObG9Sta5L305+pKPHxJrnPyjIVvbffNsNci8uy4ORJk6w7KuHc9RGsuDfv+SXfQ+R153zLnuN7aDupLSczT+Lr6cvGBzbSuHZjx8RTQpYFCxLhu3jIzPevt1cgDAqBGjkFyaGzhjJj0wzsOXvIx/w7hrBaYWf1Vyn82BRO/APYoHZ7uHKNsyMSkapu1SpIS4OePZ2+ZomIiFRNxc1DXWaf9ippzBh4911zPGcOTMvZx7tjR/j7b7MIVP/+JU/Y09PNdm3Hj5tEOC3NVLDLS2Kiif/nn03CbrfDwYNmXn79+ud+b0ICZGSYY3d32L+/ZNe22cxiVCV1bA0su8tUabt9BHXyjSbwPR2zDbDA9/wjBv7c9ycnM83eaamZqSzet9gpSXtsTnV9d77qeogn3B4GLc4YhNAjsgfTNk7Dho2GgQ0J9g2u2GAdyeaB+fs6fSwiUo5eew0efdQcjxoF77zj3HhERKRaU6Wdcqy0N21qqtEAwcEmgXWE2Fgz/xRMEj10KEyd6pi+z5SRYVaW3rnTPD+9knPv3mbY+/mqD5YFd9wBX3xhqvN//lkxQ3B/6QLH1ppjvyZwzY6Cr+/8AGLnQ+QgaHjLebvbfWw37Sa3IzUzFV9PXzbcv4EmQRU3lNhuwfzj8ENCwer6pYFwfR3wLmR1Csuy+GbLN+xN3Mtt7W4jwi+iwuLl4E+w9A6z133PmRDep2z9JayAlQ+Amwd0+7j4axeIiJRGmzZmFByAnx8kJzs3HhERqZJUaXcFQ4bACy+Y45sduIBWWJhZ9XnaNDO8/v77z/8ey4IpU8x89jvuMFu+Fcfu3QUT9iFD4F//gujo4g0XtNng88/hrbfA379ihvEXUMR3Us3uM49iahLUhA33b+DPfX9ySYNLKjRhj0mHKbHwT77qeh1PuCMcLvAt+n02m42bWt9U/gEWZtUoyEwEbLB6DAzYWLb+Qi6EK1c7IDARkWLo3dsk7TZbwV1MREREnECVdsqx0m5ZsHChmc/dt6+pijuy7127ICQEatc+f/vPPoM77zSJt4+PGaZenPelp5sV7v/5xzz/8Ue45poyhV4hjq2BpcPBysoZHt/T2RGdxW7Z+XbLt6RkpDCkzRB8PH3OeB3mHYcfEyAr51+pDbisNlwXUnh13WXMaQ+Jm8wvvMHd4Yq/nB2RiEjxZWWZL5xPnYLhw8H3HN+QioiIlJIq7a7AZsvbV9xuhw0bzBDxkBDH9N2sWfHbb9yYN7T9xAk4cKB4Sbu3N6xYAbNnm23quncvfcwVKagTDNjglEt/vv5znv/zeZoFN2PqwKlFziV/esHTTPxrIgDfbv2Wn2/9Ofe1I+nwWQzsTctrH+oFw8KgaWX43bHndFjzsJl/3vktZ0cjIlIyHh7mi24REXE8u92xxcxqQP+1KoJlwdVXmyHpUVGwbFnFx3DHHabCDnDppdC6dfHfGxwMw4ZVnoTdiY6fOs7wH4az89hOft31KxMWTyiy7a+7f809XrBnAWCq63OPwgv78hJ2G3B5bfhPA8cn7Nn2bNKy0s7fsKQCWsGlv0Dv2WZNARERERGp3rKyzJRhDw8z9SglxdkRVRpK2ivC3r0wd645zsiATz8t/nvj4sw+yosWlS2G9u3NkPgNG2DePFN1F4c7vb3aaVn2rCLbDmkzJPf4plY3cSgdXtoP3+cbDh/mBY9FwY2h4FXCf60/bv+RiYsnsjdxb6GvLzu4jDqv1qHmizV5dcmrJetcRERERKQkFiyAr74yBc3Fi+HLL50dUaWh4fEVISzMVKsTE83w9OIuApeWZvZzP71N2jffwA03lD6O2rWLNyReSi3YN5j3r3qfZxc9S7PgZjx58ZNFtn2kxyNER0aTmJ5CduDlTNgH2fnmrl8RBNcEg2cpvlqbvnE6t866FRs2/rvsv+wZvYdaXgW3zpvw5wSS0pKwY2fs/LGM6T4GT3fPkl9MREREROR8goIKPg+uxNsRVzAl7flNmwZXXWWGsDuSry8sWWIq7BdcAHfdVbz37d2bl7C7u8P8+WVL2qVC3NflPu7rUryV6RuE9mRhDBw4lncuwguGhUMjn6Lfdz7LDi7D3eZOtpVNQmoC+xL30Tq04JSIun51wQZuuBHkE4SHm34cSCV3+DCsXm2m8tSp4+xoREREJL8uXcwI4i+/hD59lNeUgFaPJ9+qfWBW7du8GSIjnR0WZGZCx44mHjc3mDMH+vVzTN9Ll5rt204P1+/fv+R9pKfDb79BRIT5R+gMOz+AmN8gciA0ut05MZRClh3mHoM5x8w8dgA3G/SrDQNKWV3P7+8Df9P7s95k2jPpWrcrf9/991lJeVJaEmPnjyXuZBxPXvwknSI6le2iIs70zz/QoYOZH1enjll8MyzM2VGJiIiIFKm4q8craeeMpB3MXIubynF/6+RkWLcO2rY9/3D1lBT4/XezUnybNo6LoUsXWLvWzCmJjMyr6BeXZZlt7BaYBdSYOhVur+Ck+chvsLAfZjC5BVcsN/t5u7gDaWZl+IPpeefqesPwcGhQ4+z2lmURnxpf4mr4/qT97D62m+j60dTwKKRjkapk0iQYMSLveXn/HBcREREpI235Vlp+fhAdXX79Hz1q5rQfOmS2flu3DurVO3c811/v+DhO7zlrs+WtKl8SKSl5CTvAt99WfNKeejDnIOd7p1OHKvb6JZRlh5+PwS9nVNevDIKrgsCjkOp6lj2L62Zcx5ydc4gKiGLJXUuI9C/eKJCogCiiAhw81UPEVUVHm9Vos7KgRg3njf4RERERcTCtHp/fpElmSGV5Do2fO9ck7AAJCSbZdYaPPjJbLXTrZubyl5Sfnxm6f1rfvo6LrbiiboTaOTHUuRgiSjHEv4LsS4MJ+2HO0byEPdIbnoyCa0MKT9gBVh5ayZydcwA4mHyQKeumVFDEIpVMhw5mO83//tfMa2/UyNkRiYiIiDiEKu353XornDksYc8eOHnS7Gtus5X9GomJBZ8nJJS9z+KYOxeeeALCw80c9ubNYeHC0ve37XV4cCesiYTez8P1wxwXa3F5+kP/1ZCZCJ6Bjvn7cbBMO/x8FH49XrC6PiAI+gfBkZQD/LRvHRdFXURtn7OnSkT4ReBuc8fCwm7ZaRDYoII/gZCSYtaAaNXKNda6kKJ17mweIiIiIlWIkvbCrFtnEsANG2DYMDN/+6GHTAWnrM4cstmzZ9n7PJ+sLDO3MzXVLGr3xBPw+eel7y8tHtY+Cl5A9CmovxJswx0VbcnYbODlmtvY7TkFU2LgSEbeufo5c9cja8CW+C10/bArqVmp1POrx8YHNp6VuDcMbMjsW2czdf1ULqx3IUPbDq3gT1FMlh0yk1z2y5NSO3HCjCjZvdtMI1m2DNq1c3ZUIiIiIlKNKGk/0/PPwzPPmOPISJOwA7z3nmOS9u7d4ZNP4Pvv4YorzKO8WZZZif70Z0lPP3f783HzApsnWJmABR41yxxiVZJphx+PwrxjubPtcbfB1cHQL8gcA/y842dSs1IBOJRyiGUHl3FlsyvP6q9/0/70b+q6Q/9JPwq/94KkzRDeF3r9DO5ezo7KMVatMgk7mH83P/ygpF1EREREKpSS9jN98EHecWKiqRrabGa+ZGnFxcG//w3Hj5svBe66q+i92vfvN8lBs2alv96ZPD3NkPhHHjFbIE2YULp+0hLg4Hfg3xwumgmbXwS/C6D1U46LtZL755RZGT42X3W9QQ2z73o974Jte9TvgQ0bFhY1PWvSIbxDhcbqMHunmYQdIOZ3iPsDIirgy6iK0KqVmTKTkgJ2O/To4eyIRERERKSaUdJ+ph494JtvzHH//mb4enJywa2ESurBB02flmWG3h88WHi7Tz+Fe+4x7caNg/HjS3/NMw0dah4lZc+C3R9DWhzs/gRSc7aGu+hr6L/ScfFVchl2+CEB5h/Pq6572OCaYLgiyMxjP9OifYuwsLBhI8gniLBalXRPad/T87xzPqRPhNNCcbjQUFi50oyM6doVLr20Yq5rWfD00zB9uvk59M474O5eMdcWEREREZeifdo5Y388Dw/48ENTXb/33tJth3amyy6DRYtMpc7HxyxsV9i83/btzTx6MNW9pKSyX7us1j4OW18hdy90AJs7NLkHLpzszMhcxq5UmBILcfmq6w1rmLnrEd5Fv2/QzEF8v+17rJz/romPJxJQI6Ccoy0HlgU73oG4RRB1MzQY7OyIKr8FC6BPn7zn06fDLbc4Lx4RERERcbji7tOuLd/O5OsLY8bA6NGOSdjBDEevXRu8vOCtt8zCcNOmmUdWVl67Dh3Azc08XGXe7NFlOQf5vtux7BB5nVPCcSXpdvgqDl47kJewe9jghjrweNS5E3aAOzvciZvN/BO8sdWN5ZuwWxYc+Q0O/2L+/hzJZoPmD8LF3yphd5T8PxcKe55fbKypxrdoAV99Vb5xiYiIiEiFU6Wd4n/DUSaWZSrt7u5w991mKDyY4fAffWSOU1Ph7bfh1CkzpD44uHxiKYk9X8LS2wELGg2HeleDXzOo7SJfKjjJjlSzMnxCZt65xj4wLAzCz5Os57cvcR9xJ+PoXLdzbgJfLtY+BltfNcfNRkLXdx3Xd+YJWH4PHF8DLR6CZg84ru/qym43Xx7OmAH9+pmfF56ehbcdMcKMDsrONm2OHYNatSo0XBEREREpueLmoUraqaCkPb969eDwYXMcGQkHDpT/NcsiZRdkJEJQ56q1nVcppNthVjz8kZh3ztMGA0PgstqFz113CT80gpN7zbF3KNwQ67i+N78I6/8D5FTwr/0HajVyXP9ybvfea5L67GzzpeDx4+Dn5+yoREREROQ8NDzeleWfm1oZ5qn6NYXgLtU+Yd92Ep7dWzBhb+oD/2kIfYtYbM5l1L0q3/HZ28qViT3zjOfnGMotjvfMM3DhhRARYUbtKGEXERERqVJUaccJlXbLgvnzzXGfPuWfDB85AqNGQUICvPiiWRFfii0tG75NgD8T88552mBQHbg0sJJ8l2HPhgPfmvnsUTeCmwM3jsg4Dn/dDMfXmuHxrZ90XN8iIiIiIlWUhseXQIUn7eVhxQrIzDRb1p2ZRQ4ZAl9/bebJBgebfeMrRabpfFtPwtRYOJavmNzMB+4Ih1Av58UlIiIiIiKVm4bHVycTJkC3bnDRRfDoo2e/npRkqvuWBSdOmD/lnE5lwxcx8ObBvITd2w2GhMK/6ythFxERERGRiqGkvSr43//yjqdMOfv1F180C97VqgWTJ5st5aRIm3Pmri9OyjvX3BeeaQC9a2uQgoiIiIiIVBwHTmwVp7n0Uti92xz36nX26x06wL59FRpSZZSaDV/Hw9/5knVvN7ixDlwcoGRdREREREQqnpL2quC996B7dzOnfdgwZ0dTKW08AV/EQmK+hc9b+sLt4RBcxPbYIiIiIiIi5U1Je1Xg5QV33+3sKCql1GyYGQfLkvPO1XCDm+pAT1XXxVUcOWKmwURGwm23aYqLiIiISDWipF2qrfUn4MtYSMpXXW9dE24LgyBV18VV2O1w8cWwZ485jomBxx5zdlQiIiIiUkGUtEu1czKnur48X3Xdxw0Gh0J3P4t3V77D8kPLua3tbVzZ7ErnBSoCkJyct2aFzWa2dxQRERGRakNJu6vLzoYNG6B+fQgJcXY0ld7aFJgWB8n5quttc6rrgZ4wbeN0Rv8yGjebGzM3zWTn/+2kUe1GzgtYJDAQBg6E7783SbvWrRARERGpVpS0uzK7Hfr1g/nzwdcX/vgDunZ1dlSVUkoWzIiDVSl553zd4eY60M0/b+763sS9uNncsFt2AA6lHFLSLs73zTemwh4WBo0bOzsaEREREalAWs3Ile3aZRJ2gPT0wvdgL40Te+C3HvBjUzj4g2P6dGFrUsy+6/kT9va1YFwD6H7GYnPD2g+jvn99APo16Uf3yO4VG6xIYdzdITpaCbuIiIhINaRKu7OcOgV33QWLF5uV38ePP3up8rp1oXZtM6c1O9vst+4I6x6HoyvAyoYlQ+GmJHBzd0zfLiQlywyFX5MvWa/pDreEQle/wleGr+dfj10P7uJo6lFCa4Zi0/LxIiIiIiLiREraneXTT2HmTLAseO45uPrqs4e+16oFS5aYCnvLlnDHHeUQiFUOfTrOor2LGDprKHbLztTrp9K3cd/zvseyYHXO3PWT2XnnO9aCW8PA/zx3vYebB2G1wsoYuYiIiIiISNkpaXeWMyu4RVV0W7aEl15y7LU7vAQnD0BaHHR63aWr7CPmjOBwymEAHpj9ADsf3HnO9slZMC0W1p7IO1fTHW4Nhc5FVNdFRERERERclZJ2Z7nrLlNFX7wY7rkHunSpuGvXagz9llbc9crA19M3d4i6r5dvke0sC1akmK3c8lfXO/vBkFDwqyx3etJWOPgdBHeD8D7OjkZERERERJyssqQyVU+NGvDll86OwuVNGTiFkXNGYrfbefeqdwttk5QFX8bC+nzVdT93GBJmkvZK41Qs/HohZJ0ELLhsHoSffzqAiIiIiIhUXUraz8duh2nT4MgRGD4c6tRxdkTVSqs6rVg4bGGhr1kWLE+GmfGQmq+63tUPbq7g6vqUdVMYO38sUQFRfHXTV0QFRJW8k+StkJXvm4et/wXvOlC7veMCFRERERGRSsVmWZZrr0RWAZKTkwkICCApKQl/f/+CL774Ijz1lJkM3bIlbNp09sRoyzKrwfsWPXz7vDIyzKNWrdL3UY0kZsIXsbDxZN45fw8zd71jBVfXT2acJPDlQLLsWbjb3BnWfhifXPdJyTvKOglzO0JKvnn7Nk+4ah0EtCpbkJYFf/1lvoS65BJN7hcRERERcbJz5qH5aJ/281maM/fbsmDLFpOc5xcXB61bQ82aEBQEQ4eaLdpK4q+/ICQEAgLg5ZcdE3cVZVnwdxKM31swYe/mD+MbOjZh/2PvH1w97WpGzx1NamZqke1sNhtu+f4pebiVssTvUROuXAutxuadszIhwQHrD4wbZ5L13r3hkUfK3p+IiIiIiFQIJe3nc/vteVXJG244u5r+6aewfbs5Pn4cpk831fmSmDgRTpwwVdCnnoKsrLLHXQUdz4R3DsGUGDhlN+f8PWBEPbgrwqwS7ygnM04yYNoA5uycw7sr32XCnxOKbOvr6cuXN3xJ8+Dm9GnUh2cvfbb0F/aoCReMAq8g89wzwDHz2j//PO/4iy/K3p+IiIiIiFQIzWk/n8GDoX17iI+H6OizX4+IMMn2aTYbpKSU7Br16pn32Wxmzry7627B5gyWBX8lwTfxkJbvP3V3fxgc6thk/bTUzNTc6robbsSejD1n+xtb3ciNrW50zMV968KALXB0OQR3BZ+IsvfZty98/HHecVksWQIzZkC3bnDbbWWPraIcPQp790K7duDp6exoRERERESKRXPaKf5cgkLZ7aZS/sUX8M8/0Lw5zJ4NUSVYiCwpCf7zH0hIgCeeMEmFAHA0Ez6Pga35RqcHesBtYdC2nKf/P73gaSb+NZGIWhH8fsfvtAhpUb4XLE+ZmWZBRbvdTOHw8ipdP/v3Q9Ompp/sbJg1C66/3rGxloRlwfz5ZnTKFVeAWxGDhzZsgB494ORJ8+eiReCh7yxFRERExHmKm4cqaaeMSbuUC8uCxTnV9fR81fWeAXBjHfCtoMEIGdkZeLp55u4VX+0tWmTmxYNJkJ97zkzpcJYnnzRfmgE88AC8/37h7R5/HF5/3XzRALBmDXTsWDExioiIiIgUQgvRSaWVkAFvHjR7r59O2Gt7wIORcEd4xSXsAF7uXkrY8+ve3QyLB7Pw4q23OjeemTPzjr/+uuh27dubhN3d3ezQ0KBB+ccmIiIiIuIAGh9aFvPnw3ffwWWXwaBBzo6m0rMsWJQIsxIKVtcvDoAb6oCPpvo7n7e3mdO+Y4eZAlKzpnPjueoqePddc9yvX9Hthgwxa0asX2+mBwQFVUx8IiIiIiJlpOHxlHJ4/Nat0KaNSQSys2HBArj00vINNDMFEjdAYFvwdL1h/GUZSh6fAVNjYUe+uetBnnBHGLR0cl4oLiw7G7791sxpv+kmLTAnIiIiIpVGcfNQVdpLa8eOgqvGb9lSvkl7WjzM7QCnDkONcLhyHfiEld/1SsCyLO768S4+W/cZLUNasmDYAsJrhRfzvbAgEb5PgIx8/zkvCYQbQqCGqutyLu7uZocHEREREZEqSnPaS6tPH2jd2hxHRpo93IurNIMbjvxqEnaAtBg4MrfkfZSTtTFr+WzdZwBsP7qdD1d/WKz3xWXAawfgq7i8hD3YEx6KhKFhSthFRERERERUaS+tWrVg7VrYuRMaNQIfn/O/x7LDsrtg7xcQ3A16zwGvgOJdr3Z7sLnnJfy1O5Q6dEcL8gnCzeaG3bJjt+yE1gw9Z3u7BQuOm+p6Zr7vLy4NhOvrgLe+ShIREREREQGUtJeNpye0alX89vFLYM8Uc5ywFP75H7QYU7z3BraFvovg8C8QcblLJe0NAxsy88aZfLzmY7rU7cI9ne4psm1sBnwWA/+cyjsX4gnDwuEC3woIVkREREREpBJR0l6RvALzPbHAq3bJ3l+np3m4oBtb3ciNrW4s8nW7Bb8fhx8SICunum4DLqsN14Woui4iIiIiIlIYJe0VKaA1dHkf9n4JoRdDw9ucHVGFOJJuqut70/LOhXrBsDBoquq6iIiIiIhIkZS0V5TjG2DB5ZCeAG3HQ9v/ODuicme34NdjMPtowep6n5zqupeq6yIiIiIiIuekpN0RLMvs134uW1+BjATADhvHQYvRLrnXuqMczqmu78tXXQ/zMnPXmxRjzT4RERERERFR0l422Wnw50A4Mg/qXQUXfQvuXoW3rREGFoANPGqBm7dDQjiVeYo3lr1Bcnoyo7uNJsIvwiH9llZ2vup6dr7q+uVBcG0weKq6LiIiIiIiUmxK2sviwCyzfzrAodlweDbUH1R427bPQnY6pO6HVk+Au2OS9jG/juHjNR9jw8avu35l7f1rHdJvaRxMM9X1A+l55yJyquuNVF0XEREREREpMSXtxXVkHqweDZ4BED0V/Judvfr7uVaD96wFXd91eFgbYzdit+wAbEnY4vD+iyPLDr8cg5+PmXnsYKrr/YNggKrrIiIiIiIipaZ0qriWDIHkbXBsJawZY85F9Ie2z4FnINg8Ye90yEmgK8qY7mNws5m/xkeiH6nQawMcSIOJ++Gno3kJe11vGNsABtapBgl7ZgqsfwpWPQgnDzg7GhERERERqWJUac/vggsgMhI+/RTatTvjRTtmUrqVl5jbbFAjBDKTzPndH0HDoRDWq8JCHtx6MJc0uITUzFQa125cYdfNssOcYzA3X3XdzZZTXQ8Cj6qerJ+26v9gz+fmXoj9AwZscHZEIiIiIiJShShpzy821jzuuQdWrCj4WvTnsGqUGQLf6b95591rkrPCnOFRs0JCzS+8VniFXm9fztz1w/nmrkd6m7nrUTUqNBTnS94G2M0tcGKXs6MREREREZEqRkl7YbKyzj5Xb4B5nKnhUDi+DuL+gEbDILhLeUfnNFl2syr8r8cLVtevCoIrq1N1Pb9Wj8Nfg8HKgjZPOzsaERERERGpYmyWZVnnb1a1JScnExAQQBLgHxEB330H3bo5OyyXsveUqa4fycg7Vz+nul6/ulXXz5R+DOzp4OPc7fZERERERKTyyM1Dk5Lw9/cvsp0q7fnt2AHNmpn91zc+D2mx0GIM+DUtvP3uT+HIbxA5EBreUpGRVphMu1lk7rdjeZMA3G1wdTD0CzLH1Z53kLMjEBERERGRKkpJe35hYebP9U/BtjfA5gaHfoTr9pmFxvKLmQ/L7wZssH+mSezPHBp/bDWsfgjcfaDr++DXpEI+hqP8cwqmxEBMvup6gxqmul7PMdvMi4iIiIiIyDkoaS9M8nbzp5UNqQfBngHuZ2SpJ/flHOTUn1P3n520/3UznNhjEv4V90Kf+WUO7fip4zz/5/OcyjzF2IvHEhUQVeY+z5Rphx8S4PfjedV1j3zVdTdV10VERERERCqEkvbCtBgDMb+becotHjo7YQeoPwi2vQlJGyG4u9mz/UxZJzEri7tB1gmHhHbf7PuYtXUWAMsPLWfNfWsc0u9pu1JhSizE5auuN8yprtdVdV1ERERERKRCKWkvTHhfGBRjEm3fyMLbeAXCVesgPQG865w9fB7gwg/MEHp3H+j0pkNC23VsF9lWNgD/HP/HIX0CpOdU1xecUV2/NgQur63quoiIiIiIiDMoaS+KV6B5nIvNDWqEFv165LUQGe/IqHj6kqe55ZtbyLJn8dylzzmkz52pZu56fGbeucY+MCwMwlVdFxERERERcRol7ZXMoJaDSHgsgSx7FkE+ZVu1PN0O38XDwsS8c542GBgCl6m6LiIiIiIi4nRK2ishf++i9/Arru2pMDUGEvJV15v4mLnrYV5l7l5EREREREQcQEl7NZOWDbMSYFFi3jlPG1xfBy4NVHVdRERERETElShpr0a2noTPY+Fovup6Mx+4IxxCq2t13bLD+ifh8ByIuhlaP1n4ooJSMSwL1q4FLy9o08bZ0YiIiIiIOJ2S9mogLRu+iYfFSXnnvNxgUAj0DqzmOeqB72DLy+Y4cSPU6QFhlzo3pups7Fh4Oefv4403YMwYp4YjIiIiIuJsbs4OQMrX5pMwfm/BhL25L4xrAJfWruYJO0B2asHnWamFt5OK8eGHeccffOC8OEREREREXISS9ioqNdssNPf2QTieZc55u8HQMHgoEkKq63D4M0UNhshB4F4TGt0BEf2dHVH11r27+SbJZoOePZ0djYiIiIiI02l4fBW06YSZu56YlXeupS/cHg7Bns6LyyW5e8Ml3zo7Cjlt5kz46CPw9oZ77nF2NCIiIiIiTqekvRxtid/C478/jq+HL6/3e51I/8hyvV5qNnwVB0uT887VcIMb68BFARoKL5WAnx88/LCzoxARERERcRlK2svRDTNvYMexHdiwkZyRzNyhc8vtWhtOwBexkJSvut66JtwWBkGqrouIiIiIiFRKStrL0dFTR7FbdmzYiD8ZXy7XOJkNM+Ng+RnV9cGh0MNf1XUREREREZHKTAvRlaO3+r+Fj4cPATUCeOXyV87b/mDyQW755hYGzhjI1vit522/LsWsDJ8/YW9TE8Y3hJ4aDi8iIiIiIlLp2SzLspwdhLMlJycTEBBAUlIS/v7+Du37dKXdVowMuv8X/fn9n9+xsGgR0oLNIzYX2u5EFsyIg5Upeed83eHmOtCtKlbXN02ALRPBrzn0+gl86zo7IhERERERkTIpbh6q4fHlzM1W/MEM8anx2C07FhYJqQmFtlmTAtNiISU771y7WjA0FAKr4tz11IOw4WlznLgetr0OnV53bkwiIiIiIiIVRMPjXcjLfV/Gz9uPGh41eLPfmwVeS8mCDw/DB4fzEnZfd7grAkbUraIJO4CbN9jczbFlgUdN58YjIiIiIiJSgTQ8nvIdHl9SdsuOZVm4u5lE1bJgdQpMj4MT+arrHWrB0DDwrw5jJfZ9BVtfhYBW0OU98Kzl7IhERERERETKRMPjK4mJiyfy1vK36Fy3M9NvmI6/tz/kzElPzjJD4deeyGtf0x2GhEIXvyo4d70oDQabh4iIiIiISDWj4fFOtD1hO08ueJLYk7H8susX3ln+DmCq6yuSzcrw+RP2DrXsPBGZRtequNiciIiIiIiInEVJeylYlsVDvzyE30Q/+k7tS3J68vnfVIjTQ+BP83DzICkLJh2GT46YPdgB/NzhMu+djJ8eQcQrNRm3cFxZP4KIiIiIiIhUAkraS2HV4VW8ufxNTmScYOHehXy0+qNS9dM0qClv9nuTRoGNuKHljVzY8kHG74X1+arrXfxgXEOYvfY5ElITsFt2nvvzORLTEh3xUURERERERMSFaU57Kfh6+uYe2y17geclNbr7aIZ1Hs0XsTAj3y5vfu5mobmOfuZ5eM1wAGzYqOVVixoeNUp9TREREREREakclLSXQuvQ1rx75bt8uOZDetbvyd2d7i5VP5YFS5Phqzg4Zc87f6E/3BJqFp07bXzv8WRkZ7A3aS+P9XhMSbuIiIiIiEg1oC3fcM6Wb8cz4fNY2Hwy75y/B9wWBu21o5mIiIiIiEiVpi3fXJRlwZIk+Doe0vJV17v7w+AzqusiIiIiIiJSvSlpL6HF+xaTkJrAVc2uwtvDu0TvPZoJn8fA1tS8c4E51fW2qq6LiIiIiIjIGZS0l8B7K95j1NxRAPRv2p+5Q+cW632WBYuT4Jt4SM9XXe8RADfVAV9V10VERERERKQQStpLYNa2WbnHv+z6hSx7FssOLmPe7nn0b9qf6PrRZ70nIcPMXd+Wr7pe2wNuD4fWNSsiahEREREREamslLSXwFVNr2LBngUA9GrQi42xG+n1WS8AJiyewPr719M6tDVgquuLEmFWQsHq+kUBcGMd8FF1XURERERERM5DSXsJPBz9MK3qtCIhNYEbWt3AjE0zsFt5Gfn6WJO0x2fA1FjYka+6HuQJt4dBK1XXRUREREREpJiUtJeAzWbjymZX5j6/sumVhNYMJe5kHHX96tK30eUsOA7fJUBGvur6JYFwQwjUUHVdRERERERESkBJexlE+EWwfdR2NsZupG5wB6Yc82PXqbzXgz3hjjBooeq6iIiIiIiIlIKS9jLy9w4k3e9i3jgMmVbe+d6BMKgOeLs5LTQRERERERGp5JS0l0FsBnwWA//kq66HeMKwcLjA13lxiYiIiIiISNWgpL0U7Bb8fhx+TMirrtuAS2vDwBBV10VERERERMQxlLSX0JF0mBIDe9LyzoV6mbnrzVRdFxEREREREQdS0l5Mdgt+OwY/HYWsfNX1PrXhuhDwUnVdREREREREHExJexEOpxxmf9J+utTtQlymB5/FwL581fUwLzN3vYmP82IUERERERGRqk1JeyGW7F/CZVMvIyM7i7YtHqVnx4nYLRtgquuXB8G1weCp6rqIiIiIiIiUIyXthfh8w+dkeYRDvdvZmF2fVmnJBHgHEJFTXW/krOp64kZYfg/Ys6DrJAi50EmBiIiIiIiISEVQ0n6GLDt41rkee/124OZBDfca1PL0pX8QXO3s6vrye+DYKrCApbfDNdudGIyIiIiIiIiUtyozwPu9996jYcOG1KhRg27durFixYoS93EwDSbuh/TAK7is8RV0Cu/EfW2v5umGnlxfxwWGw9uzTMKOZY5FRERERESkSnN2GuoQM2fO5OGHH2bcuHGsWbOG9u3b069fP+Li4krUz+sH4WA62LBxQXBTnmnbhdeaB9HwHMPhM7Iz2HF0B5nZmSW6VnpWevHeZ1mQsgsyU8yQeL+mULMRdP+kRNerUux22LULTpxwdiQiIiIiIiLlqkok7f/973/517/+xZ133kmrVq2YPHkyvr6+fPrppyXqx56zlVukN4yNMlu5eZzjv9CxU8do9V4rmr/bnHaT25GUllSs68SfjKfFey1o/m5zOn7QkZT0lMIbWhb8ORB+agbfR4K7txkSf91uCOtdos9WZdjtcNVV0KwZREXBli3OjkhERERERKTcVPo57RkZGaxevZqxY8fmnnNzc6Nv374sXbq00Pekp6eTnp6e+zwpySTbWSeT6R8ElweCRwYkZ5z72jM3zGT3kd0AbDu4jW/XfcuNrW48b8zT105nb8xeADYf2Mz367/nuhbXnd0weTfs/NEc207A+veh06vn7b9K27QJfv3VHCclweTJ8MILzo1JRERERESkhJKTkwGwLOuc7WzW+Vq4uMOHD1OvXj3+/vtvoqOjc88/9thjLFq0iOXLl5/1nvHjx/Pss89WZJgiIiIiIiIiZzlw4ACRkZFFvl7pK+2lMXbsWB5++OHc54mJiTRo0ID9+/cTEBDgxMhEyk9ycjL169fnwIED+Pv7OzsckXKh+1yqC93rUh3oPpeqzrIsUlJSqFu37jnbVfqkPSQkBHd3d2JjYwucj42NJTw8vND3eHt74+3tfdb5gIAA/UCQKs/f31/3uVR5us+lutC9LtWB7nOpyopTNK70C9F5eXnRuXNn5s+fn3vObrczf/78AsPlRURERERERCqbSl9pB3j44YcZNmwYXbp04cILL+TNN9/k5MmT3Hnnnc4OTURERERERKTUqkTSfvPNNxMfH88zzzxDTEwMHTp04JdffiEsLKxY7/f29mbcuHGFDpkXqSp0n0t1oPtcqgvd61Id6D4XMSr96vEiIiIiIiIiVVWln9MuIiIiIiIiUlUpaRcRERERERFxUUraRURERERERFyUknYRERERERERF1Xtk/b33nuPhg0bUqNGDbp168aKFSucHZJIsU2cOJGuXbvi5+dHaGgoAwcOZPv27QXapKWlMXLkSIKDg6lVqxY33HADsbGxBdrs37+fAQMG4OvrS2hoKI8++ihZWVkV+VFEiu2ll17CZrMxZsyY3HO6z6WqOHToELfddhvBwcH4+PjQtm1bVq1alfu6ZVk888wzRERE4OPjQ9++fdm5c2eBPo4dO8bQoUPx9/cnMDCQu+++mxMnTlT0RxEpVHZ2Nv/5z39o1KgRPj4+NGnShOeff578a2PrPhcpqFon7TNnzuThhx9m3LhxrFmzhvbt29OvXz/i4uKcHZpIsSxatIiRI0eybNky5s2bR2ZmJldccQUnT57MbfPQQw/x008/8fXXX7No0SIOHz7MoEGDcl/Pzs5mwIABZGRk8PfffzNlyhQ+++wznnnmGWd8JJFzWrlyJR988AHt2rUrcF73uVQFx48fp2fPnnh6ejJ37ly2bNnC66+/Tu3atXPbvPLKK7z99ttMnjyZ5cuXU7NmTfr160daWlpum6FDh7J582bmzZvH7Nmz+fPPP7n33nud8ZFEzvLyyy8zadIk3n33XbZu3crLL7/MK6+8wjvvvJPbRve5yBmsauzCCy+0Ro4cmfs8Ozvbqlu3rjVx4kQnRiVSenFxcRZgLVq0yLIsy0pMTLQ8PT2tr7/+OrfN1q1bLcBaunSpZVmWNWfOHMvNzc2KiYnJbTNp0iTL39/fSk9Pr9gPIHIOKSkpVrNmzax58+ZZvXr1skaPHm1Zlu5zqToef/xx66KLLirydbvdboWHh1uvvvpq7rnExETL29vbmj59umVZlrVlyxYLsFauXJnbZu7cuZbNZrMOHTpUfsGLFNOAAQOsu+66q8C5QYMGWUOHDrUsS/e5SGGqbaU9IyOD1atX07dv39xzbm5u9O3bl6VLlzoxMpHSS0pKAiAoKAiA1atXk5mZWeA+b9GiBVFRUbn3+dKlS2nbti1hYWG5bfr160dycjKbN2+uwOhFzm3kyJEMGDCgwP0Mus+l6vjxxx/p0qULN910E6GhoXTs2JGPPvoo9/U9e/YQExNT4F4PCAigW7duBe71wMBAunTpktumb9++uLm5sXz58or7MCJF6NGjB/Pnz2fHjh0ArF+/nr/++osrr7wS0H0uUhgPZwfgLAkJCWRnZxf4BQ4gLCyMbdu2OSkqkdKz2+2MGTOGnj170qZNGwBiYmLw8vIiMDCwQNuwsDBiYmJy2xT27+D0ayKuYMaMGaxZs4aVK1ee9Zruc6kq/vnnHyZNmsTDDz/Mk08+ycqVK3nwwQfx8vJi2LBhufdqYfdy/ns9NDS0wOseHh4EBQXpXheX8MQTT5CcnEyLFi1wd3cnOzubCRMmMHToUADd5yKFqLZJu0hVM3LkSDZt2sRff/3l7FBEHOrAgQOMHj2aefPmUaNGDWeHI1Ju7HY7Xbp04cUXXwSgY8eObNq0icmTJzNs2DAnRyfiGF999RVffvkl06ZNo3Xr1qxbt44xY8ZQt25d3eciRai2w+NDQkJwd3c/a3Xh2NhYwsPDnRSVSOmMGjWK2bNns3DhQiIjI3PPh4eHk5GRQWJiYoH2+e/z8PDwQv8dnH5NxNlWr15NXFwcnTp1wsPDAw8PDxYtWsTbb7+Nh4cHYWFhus+lSoiIiKBVq1YFzrVs2ZL9+/cDeffquX53CQ8PP2tB3aysLI4dO6Z7XVzCo48+yhNPPMEtt9xC27Ztuf3223nooYeYOHEioPtcpDDVNmn38vKic+fOzJ8/P/ec3W5n/vz5REdHOzEykeKzLItRo0bx3XffsWDBAho1alTg9c6dO+Pp6VngPt++fTv79+/Pvc+jo6PZuHFjgf/5zZs3D39//7N+eRRxhj59+rBx40bWrVuX++jSpQtDhw7NPdZ9LlVBz549z9q2c8eOHTRo0ACARo0aER4eXuBeT05OZvny5QXu9cTERFavXp3bZsGCBdjtdrp161YBn0Lk3FJTU3FzK5iCuLu7Y7fbAd3nIoVy9kp4zjRjxgzL29vb+uyzz6wtW7ZY9957rxUYGFhgdWERV/bAAw9YAQEB1h9//GEdOXIk95Gamprb5v7777eioqKsBQsWWKtWrbKio6Ot6Ojo3NezsrKsNm3aWFdccYW1bt0665dffrHq1KljjR071hkfSaRY8q8eb1m6z6VqWLFiheXh4WFNmDDB2rlzp/Xll19avr6+1hdffJHb5qWXXrICAwOtH374wdqwYYN13XXXWY0aNbJOnTqV26Z///5Wx44dreXLl1t//fWX1axZM2vIkCHO+EgiZxk2bJhVr149a/bs2daePXusWbNmWSEhIdZjjz2W20b3uUhB1TpptyzLeuedd6yoqCjLy8vLuvDCC61ly5Y5OySRYgMKffzvf//LbXPq1ClrxIgRVu3atS1fX1/r+uuvt44cOVKgn71791pXXnml5ePjY4WEhFj//ve/rczMzAr+NCLFd2bSrvtcqoqffvrJatOmjeXt7W21aNHC+vDDDwu8brfbrf/85z9WWFiY5e3tbfXp08favn17gTZHjx61hgwZYtWqVcvy9/e37rzzTislJaUiP4ZIkZKTk63Ro0dbUVFRVo0aNazGjRtbTz31VIHtN3WfixRksyzLcmalX0REREREREQKV23ntIuIiIiIiIi4OiXtIiIiIiIiIi5KSbuIiIiIiIiIi1LSLiIiIiIiIuKilLSLiIiIiIiIuCgl7SIiIiIiIiIuSkm7iIiIiIiIiItS0i4iIiIiIiLiopS0i4iIiEOMHz+eDh06ODsMAHr37s2YMWOcHYaIiEiZKWkXERFxMTExMYwePZqmTZtSo0YNwsLC6NmzJ5MmTSI1NdXZ4ZXK+PHjsdls53yUxh9//IHNZiMxMdGxAYuIiLgID2cHICIiInn++ecfevbsSWBgIC+++CJt27bF29ubjRs38uGHH1KvXj2uvfbaQt+bmZmJp6dnBUdcPI888gj3339/7vOuXbty77338q9//avQ9hkZGXh5eVVUeCIiIi5LlXYREREXMmLECDw8PFi1ahWDBw+mZcuWNG7cmOuuu46ff/6Za665JretzWZj0qRJXHvttdSsWZMJEyYAMGnSJJo0aYKXlxfNmzfn888/z33P3r17sdlsrFu3LvdcYmIiNpuNP/74A8irXs+fP58uXbrg6+tLjx492L59e4FYX3rpJcLCwvDz8+Puu+8mLS2tyM9Vq1YtwsPDcx/u7u74+fnlPr/lllsYNWoUY8aMISQkhH79+p031r1793LppZcCULt2bWw2G8OHD89ta7fbeeyxxwgKCiI8PJzx48eX8G9DRETE+ZS0i4iIuIijR4/y22+/MXLkSGrWrFlomzOHkY8fP57rr7+ejRs3ctddd/Hdd98xevRo/v3vf7Np0ybuu+8+7rzzThYuXFjieJ566ilef/11Vq1ahYeHB3fddVfua1999RXjx4/nxRdfZNWqVURERPD++++X+Br5TZkyBS8vL5YsWcLkyZPP275+/fp8++23AGzfvp0jR47w1ltvFeivZs2aLF++nFdeeYXnnnuOefPmlSlGERGRiqbh8SIiIi5i165dWJZF8+bNC5wPCQnJrWKPHDmSl19+Ofe1W2+9lTvvvDP3+ZAhQxg+fDgjRowA4OGHH2bZsmW89tpruVXp4powYQK9evUC4IknnmDAgAGkpaVRo0YN3nzzTe6++27uvvtuAF544QV+//33c1bbz6dZs2a88soruc/37t17zvbu7u4EBQUBEBoaSmBgYIHX27Vrx7hx43L7fvfdd5k/fz6XX355qWMUERGpaKq0i4iIuLgVK1awbt06WrduTXp6eoHXunTpUuD51q1b6dmzZ4FzPXv2ZOvWrSW+brt27XKPIyIiAIiLi8u9Trdu3Qq0j46OLvE18uvcuXOZ3n+m/PGD+Qyn4xcREaksVGkXERFxEU2bNsVms501d7xx48YA+Pj4nPWeoobRF8XNzXxfb1lW7rnMzMxC2+Zf1O70sHy73V6i65XEmZ+lJLEW5sxF+Ww2W7nGLyIiUh5UaRcREXERwcHBXH755bz77rucPHmyVH20bNmSJUuWFDi3ZMkSWrVqBUCdOnUAOHLkSO7r+Rd6K8l1li9fXuDcsmXLStzPuRQn1tMrzGdnZzv02iIiIq5ClXYREREX8v7779OzZ0+6dOnC+PHjadeuHW5ubqxcuZJt27addwj5o48+yuDBg+nYsSN9+/blp59+YtasWfz++++AqdZ3796dl156iUaNGhEXF8fTTz9d4jhHjx7N8OHD6dKlCz179uTLL79k8+bNuaMCHKE4sTZo0ACbzcbs2bO56qqr8PHxoVatWg6LQURExNlUaRcREXEhTZo0Ye3atfTt25exY8fSvn17unTpwjvvvMMjjzzC888/f873Dxw4kLfeeovXXnuN1q1b88EHH/C///2P3r1757b59NNPycrKonPnzowZM4YXXnihxHHefPPN/Oc//+Gxxx6jc+fO7Nu3jwceeKDE/ZzP+WKtV68ezz77LE888QRhYWGMGjXK4TGIiIg4k83KP1FMRERERERERFyGKu0iIiIiIiIiLkpJu4iIiIiIiIiLUtIuIiIiIiIi4qKUtIuIiIiIiIi4KCXtIiIiIiIiIi5KSbuIiIiIiIiIi1LSLiIiIiIiIuKilLSLiIiIiIiIuCgl7SIiIiIiIiIuSkm7iIiIiIiIiItS0i4iIiIiIiLiov4fZNE3wVg8D/EAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "Tester.test(word2vec_lr_pricer)" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "id": "9d6d3265-37c1-464c-a489-5be4df0a7276", + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
LinearSVR()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
" + ], + "text/plain": [ + "LinearSVR()" + ] + }, + "execution_count": 68, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Support Vector Machines\n", + "\n", + "np.random.seed(42)\n", + "svr_regressor = LinearSVR()\n", + "\n", + "svr_regressor.fit(X_w2v, prices)" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "id": "fcc289e6-56a1-4119-864f-2fdf8efde643", + "metadata": {}, + "outputs": [], + "source": [ + "def svr_pricer(item):\n", + " np.random.seed(42)\n", + " doc = item.test_prompt()\n", + " doc_vector = document_vector(doc)\n", + " return max(float(svr_regressor.predict([doc_vector])[0]),0)" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "id": "80286a48-7cca-40e6-af76-a814a23bb9dc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[93m1: Guess: $246.51 Truth: $374.41 Error: $127.90 SLE: 0.17 Item: OEM AC Compressor w/A/C Repair Kit For F...\u001b[0m\n", + "\u001b[93m2: Guess: $168.97 Truth: $225.11 Error: $56.14 SLE: 0.08 Item: Motorcraft YB3125 Fan Clutch\u001b[0m\n", + "\u001b[91m3: Guess: $142.40 Truth: $61.68 Error: $80.72 SLE: 0.68 Item: Dorman 603-159 Front Washer Fluid Reserv...\u001b[0m\n", + "\u001b[91m4: Guess: $300.53 Truth: $599.99 Error: $299.46 SLE: 0.48 Item: HP Premium 17.3-inch HD Plus Touchscreen...\u001b[0m\n", + "\u001b[93m5: Guess: $74.31 Truth: $16.99 Error: $57.32 SLE: 2.05 Item: 5-Position Super Switch Pickup Selector ...\u001b[0m\n", + "\u001b[92m6: Guess: $0.00 Truth: $31.99 Error: $31.99 SLE: 12.22 Item: Horror Bookmarks, Resin Horror Bookmarks...\u001b[0m\n", + "\u001b[91m7: Guess: $239.94 Truth: $101.79 Error: $138.15 SLE: 0.73 Item: SK6241 - Stinger 4 Gauge 6000 Series Pow...\u001b[0m\n", + "\u001b[93m8: Guess: $194.11 Truth: $289.00 Error: $94.89 SLE: 0.16 Item: Godox ML60Bi LED Light Kit, Handheld LED...\u001b[0m\n", + "\u001b[91m9: Guess: $304.70 Truth: $635.86 Error: $331.16 SLE: 0.54 Item: Randall RG75DG3PLUS G3 Plus 100-Watt Com...\u001b[0m\n", + "\u001b[92m10: Guess: $101.19 Truth: $65.99 Error: $35.20 SLE: 0.18 Item: HOLDWILL 6 Pack LED Shop Light, 4FT 24W ...\u001b[0m\n", + "\u001b[93m11: Guess: $338.06 Truth: $254.21 Error: $83.85 SLE: 0.08 Item: Viking Horns V103C/1005ATK 3 Gallon Air ...\u001b[0m\n", + "\u001b[93m12: Guess: $288.65 Truth: $412.99 Error: $124.34 SLE: 0.13 Item: CURT 70110 Custom Tow Bar Base Plate Bra...\u001b[0m\n", + "\u001b[91m13: Guess: $119.40 Truth: $205.50 Error: $86.10 SLE: 0.29 Item: 10-Pack Solar HAMMERED BRONZE Finish Pos...\u001b[0m\n", + "\u001b[93m14: Guess: $183.59 Truth: $248.23 Error: $64.64 SLE: 0.09 Item: COSTWAY Electric Tumble Dryer, Sliver\u001b[0m\n", + "\u001b[91m15: Guess: $201.14 Truth: $399.00 Error: $197.86 SLE: 0.47 Item: FREE SIGNAL TV Transit 32\" 12 Volt DC Po...\u001b[0m\n", + "\u001b[93m16: Guess: $241.86 Truth: $373.94 Error: $132.08 SLE: 0.19 Item: Bilstein 5100 Monotube Gas Shock Set com...\u001b[0m\n", + "\u001b[91m17: Guess: $239.75 Truth: $92.89 Error: $146.86 SLE: 0.89 Item: Sangean K-200 Multi-Function Upright AM/...\u001b[0m\n", + "\u001b[92m18: Guess: $53.98 Truth: $51.99 Error: $1.99 SLE: 0.00 Item: Charles Leonard Magnetic Lapboard Class ...\u001b[0m\n", + "\u001b[91m19: Guess: $260.03 Truth: $179.00 Error: $81.03 SLE: 0.14 Item: Gigabyte AMD Radeon HD 7870 2 GB GDDR5 D...\u001b[0m\n", + "\u001b[93m20: Guess: $65.32 Truth: $19.42 Error: $45.90 SLE: 1.39 Item: 3dRose LLC 8 x 8 x 0.25 Inches Bull Terr...\u001b[0m\n", + "\u001b[91m21: Guess: $269.79 Truth: $539.95 Error: $270.16 SLE: 0.48 Item: ROKINON 85mm F1.4 Auto Focus Full Frame ...\u001b[0m\n", + "\u001b[92m22: Guess: $183.96 Truth: $147.67 Error: $36.29 SLE: 0.05 Item: AUTOSAVER88 Headlight Assembly Compatibl...\u001b[0m\n", + "\u001b[92m23: Guess: $57.41 Truth: $24.99 Error: $32.42 SLE: 0.66 Item: ASI NAUTICAL 2.5 Inches Opera Glasses Bi...\u001b[0m\n", + "\u001b[91m24: Guess: $279.25 Truth: $149.00 Error: $130.25 SLE: 0.39 Item: Behringer TUBE OVERDRIVE TO100 Authentic...\u001b[0m\n", + "\u001b[92m25: Guess: $5.51 Truth: $16.99 Error: $11.48 SLE: 1.03 Item: Fun Express Insect Finger Puppets - 24 f...\u001b[0m\n", + "\u001b[91m26: Guess: $99.37 Truth: $7.99 Error: $91.38 SLE: 5.82 Item: WAFJAMF Roller Stamp Identity Theft Stam...\u001b[0m\n", + "\u001b[93m27: Guess: $145.83 Truth: $199.99 Error: $54.16 SLE: 0.10 Item: Capulina Tiffany Floor Lamp 2-Light 16\" ...\u001b[0m\n", + "\u001b[91m28: Guess: $149.51 Truth: $251.45 Error: $101.94 SLE: 0.27 Item: Apple Watch Series 6 (GPS, 44mm) - Space...\u001b[0m\n", + "\u001b[93m29: Guess: $174.26 Truth: $231.62 Error: $57.36 SLE: 0.08 Item: ICON 01725 Tandem Axle Fender Skirt FS17...\u001b[0m\n", + "\u001b[92m30: Guess: $145.82 Truth: $135.00 Error: $10.82 SLE: 0.01 Item: SanDisk 128GB Ultra (10 Pack) MicroSD Cl...\u001b[0m\n", + "\u001b[93m31: Guess: $257.96 Truth: $356.62 Error: $98.66 SLE: 0.10 Item: Velvac 2020,L,C/Hr,W,E2003,102\",Bk - 715...\u001b[0m\n", + "\u001b[92m32: Guess: $220.95 Truth: $257.99 Error: $37.04 SLE: 0.02 Item: TCMT Passenger Backrest Sissy Bar & Lugg...\u001b[0m\n", + "\u001b[91m33: Guess: $134.52 Truth: $27.99 Error: $106.53 SLE: 2.38 Item: Alnicov 63.5MM Brass Tremolo Block,Tremo...\u001b[0m\n", + "\u001b[92m34: Guess: $151.09 Truth: $171.20 Error: $20.11 SLE: 0.02 Item: Subaru Forester Outback Legacy OEM Engin...\u001b[0m\n", + "\u001b[93m35: Guess: $136.61 Truth: $225.00 Error: $88.39 SLE: 0.25 Item: Richmond Auto Upholstery - 2012 Dodge Ra...\u001b[0m\n", + "\u001b[93m36: Guess: $178.94 Truth: $105.00 Error: $73.94 SLE: 0.28 Item: AP-39 Automotive Paint Primer Grey 2K Ur...\u001b[0m\n", + "\u001b[91m37: Guess: $178.99 Truth: $299.99 Error: $121.00 SLE: 0.26 Item: Road Top Wireless Carplay Retrofit Kit D...\u001b[0m\n", + "\u001b[92m38: Guess: $443.82 Truth: $535.09 Error: $91.27 SLE: 0.03 Item: Gibson Performance Exhaust 5658 Aluminiz...\u001b[0m\n", + "\u001b[92m39: Guess: $11.71 Truth: $12.33 Error: $0.62 SLE: 0.00 Item: Bella Tunno Happy Links - Baby Montessor...\u001b[0m\n", + "\u001b[93m40: Guess: $140.54 Truth: $84.99 Error: $55.55 SLE: 0.25 Item: CANMORE H300 Handheld GPS Golf Device, S...\u001b[0m\n", + "\u001b[92m41: Guess: $48.90 Truth: $15.99 Error: $32.91 SLE: 1.16 Item: DCPOWER AC Adapter Compatible Replacemen...\u001b[0m\n", + "\u001b[92m42: Guess: $101.15 Truth: $62.44 Error: $38.71 SLE: 0.23 Item: Sharp, VX2128V, Commercial Desktop Calcu...\u001b[0m\n", + "\u001b[91m43: Guess: $164.64 Truth: $82.99 Error: $81.65 SLE: 0.46 Item: Melissa & Doug Lifelike Plush Stork Gian...\u001b[0m\n", + "\u001b[91m44: Guess: $257.26 Truth: $599.95 Error: $342.69 SLE: 0.71 Item: Sony SSCS8 2-Way 3-Driver Center Channel...\u001b[0m\n", + "\u001b[93m45: Guess: $268.69 Truth: $194.99 Error: $73.70 SLE: 0.10 Item: ASUS Chromebook CX1, 14\" Full HD NanoEdg...\u001b[0m\n", + "\u001b[93m46: Guess: $226.83 Truth: $344.95 Error: $118.12 SLE: 0.17 Item: FiiO X7 32GB Hi-Res Lossless Music Playe...\u001b[0m\n", + "\u001b[92m47: Guess: $74.17 Truth: $37.99 Error: $36.18 SLE: 0.43 Item: TORRO Leather Case Compatible with iPhon...\u001b[0m\n", + "\u001b[92m48: Guess: $214.73 Truth: $224.35 Error: $9.62 SLE: 0.00 Item: Universal Air Conditioner KT 1031 A/C Co...\u001b[0m\n", + "\u001b[91m49: Guess: $300.19 Truth: $814.00 Error: $513.81 SLE: 0.99 Item: Street Series Stainless Performance Cat-...\u001b[0m\n", + "\u001b[92m50: Guess: $450.07 Truth: $439.88 Error: $10.19 SLE: 0.00 Item: Lenovo IdeaPad 3 14-inch Laptop, 14.0-in...\u001b[0m\n", + "\u001b[92m51: Guess: $294.39 Truth: $341.43 Error: $47.04 SLE: 0.02 Item: Access Bed Covers TonnoSport 22050219 - ...\u001b[0m\n", + "\u001b[91m52: Guess: $143.94 Truth: $46.78 Error: $97.16 SLE: 1.23 Item: G.I. JOE Hasbro 3 3/4\" Wave 5 Action Fig...\u001b[0m\n", + "\u001b[93m53: Guess: $111.77 Truth: $171.44 Error: $59.67 SLE: 0.18 Item: T&S Brass B-0232-BST Double Pantry Fauce...\u001b[0m\n", + "\u001b[91m54: Guess: $178.96 Truth: $458.00 Error: $279.04 SLE: 0.88 Item: ZTUOAUMA Fuel Injection Pump 3090942 309...\u001b[0m\n", + "\u001b[93m55: Guess: $192.17 Truth: $130.75 Error: $61.42 SLE: 0.15 Item: 2AP18AA#ABA Hp Prime Graphing Calculator...\u001b[0m\n", + "\u001b[93m56: Guess: $126.51 Truth: $83.81 Error: $42.70 SLE: 0.17 Item: Lowrance 000-0119-83 Nmea 2000 25' Exten...\u001b[0m\n", + "\u001b[91m57: Guess: $151.76 Truth: $386.39 Error: $234.63 SLE: 0.87 Item: Jeep Genuine Accessories 82213051 Hood L...\u001b[0m\n", + "\u001b[92m58: Guess: $154.53 Truth: $169.00 Error: $14.47 SLE: 0.01 Item: GODOX CB-06 Hard Carrying Case with Whee...\u001b[0m\n", + "\u001b[93m59: Guess: $74.14 Truth: $17.95 Error: $56.19 SLE: 1.90 Item: Au-Tomotive Gold, INC. Ford Black Valet ...\u001b[0m\n", + "\u001b[91m60: Guess: $147.84 Truth: $269.00 Error: $121.16 SLE: 0.35 Item: Snailfly Black Roof Rack Rail + Cross Ba...\u001b[0m\n", + "\u001b[92m61: Guess: $116.75 Truth: $77.77 Error: $38.98 SLE: 0.16 Item: KING SHA Anti Glare LED Track Lighting H...\u001b[0m\n", + "\u001b[91m62: Guess: $208.97 Truth: $88.99 Error: $119.98 SLE: 0.72 Item: APS Compatible with Chevy Silverado 1500...\u001b[0m\n", + "\u001b[92m63: Guess: $304.12 Truth: $364.41 Error: $60.29 SLE: 0.03 Item: Wilwood Engineering 14011291R Brake Cali...\u001b[0m\n", + "\u001b[92m64: Guess: $154.33 Truth: $127.03 Error: $27.30 SLE: 0.04 Item: ACDelco Gold 336-1925A Starter, Remanufa...\u001b[0m\n", + "\u001b[91m65: Guess: $268.19 Truth: $778.95 Error: $510.76 SLE: 1.13 Item: UWS EC10783 69-Inch Matte Black Heavy-Wa...\u001b[0m\n", + "\u001b[91m66: Guess: $500.64 Truth: $206.66 Error: $293.98 SLE: 0.78 Item: Dell Latitude E5440 14in Business Laptop...\u001b[0m\n", + "\u001b[93m67: Guess: $91.35 Truth: $35.94 Error: $55.41 SLE: 0.84 Item: (Plug and Play) Spare Tire Brake Light W...\u001b[0m\n", + "\u001b[92m68: Guess: $167.41 Truth: $149.00 Error: $18.41 SLE: 0.01 Item: The Ultimate Roadside Rescue Assistant\u001b[0m\n", + "\u001b[92m69: Guess: $212.57 Truth: $251.98 Error: $39.41 SLE: 0.03 Item: Brand New 18\" x 8.5\" Replacement Wheel f...\u001b[0m\n", + "\u001b[92m70: Guess: $187.86 Truth: $160.00 Error: $27.86 SLE: 0.03 Item: Headlight Headlamp LH Left & RH Right Pa...\u001b[0m\n", + "\u001b[91m71: Guess: $153.30 Truth: $39.99 Error: $113.31 SLE: 1.76 Item: Lilo And Stitch Deluxe Oversize Print La...\u001b[0m\n", + "\u001b[91m72: Guess: $204.77 Truth: $362.41 Error: $157.64 SLE: 0.32 Item: AC Compressor & A/C Clutch For Hyundai A...\u001b[0m\n", + "\u001b[91m73: Guess: $124.88 Truth: $344.00 Error: $219.12 SLE: 1.02 Item: House Of Troy PIN475-AB Pinnacle Collect...\u001b[0m\n", + "\u001b[93m74: Guess: $104.41 Truth: $25.09 Error: $79.32 SLE: 1.95 Item: Juno T29 WH Floating Electrical Feed Sin...\u001b[0m\n", + "\u001b[93m75: Guess: $96.17 Truth: $175.95 Error: $79.78 SLE: 0.36 Item: Sherman GO-PARTS - for 2013-2016 Toyota ...\u001b[0m\n", + "\u001b[91m76: Guess: $234.23 Truth: $132.64 Error: $101.59 SLE: 0.32 Item: Roland RPU-3 Electronic Keyboard Pedal o...\u001b[0m\n", + "\u001b[93m77: Guess: $272.58 Truth: $422.99 Error: $150.41 SLE: 0.19 Item: Rockland VMI14 12,000 Pound 12 Volt DC E...\u001b[0m\n", + "\u001b[91m78: Guess: $238.54 Truth: $146.48 Error: $92.06 SLE: 0.24 Item: Max Advanced Brakes Elite XDS Front Cros...\u001b[0m\n", + "\u001b[92m79: Guess: $126.64 Truth: $156.83 Error: $30.19 SLE: 0.05 Item: Quality-Built 11030 Premium Quality Alte...\u001b[0m\n", + "\u001b[93m80: Guess: $166.58 Truth: $251.99 Error: $85.41 SLE: 0.17 Item: Lucida LG-510 Student Classical Guitar, ...\u001b[0m\n", + "\u001b[91m81: Guess: $191.22 Truth: $940.33 Error: $749.11 SLE: 2.52 Item: Longacre 52-79800 Aluminum Turn Plates\u001b[0m\n", + "\u001b[91m82: Guess: $135.62 Truth: $52.99 Error: $82.63 SLE: 0.86 Item: Motion Pro 08-0380 Adjustable Torque Wre...\u001b[0m\n", + "\u001b[91m83: Guess: $90.65 Truth: $219.95 Error: $129.30 SLE: 0.77 Item: Glyph Thunderbolt 3 NVMe Dock (0 GB)\u001b[0m\n", + "\u001b[93m84: Guess: $276.30 Truth: $441.03 Error: $164.73 SLE: 0.22 Item: TOYO Open Country MT Performance Radial ...\u001b[0m\n", + "\u001b[91m85: Guess: $251.23 Truth: $168.98 Error: $82.25 SLE: 0.16 Item: Razer Seiren X USB Streaming Microphone ...\u001b[0m\n", + "\u001b[93m86: Guess: $52.25 Truth: $2.49 Error: $49.76 SLE: 7.43 Item: Happy Birthday to Dad From Your Daughter...\u001b[0m\n", + "\u001b[91m87: Guess: $192.08 Truth: $98.62 Error: $93.46 SLE: 0.44 Item: Little Tikes My Real Jam First Concert S...\u001b[0m\n", + "\u001b[91m88: Guess: $146.41 Truth: $256.95 Error: $110.54 SLE: 0.31 Item: Studio M Peace and Harmony Art Pole Comm...\u001b[0m\n", + "\u001b[91m89: Guess: $114.29 Truth: $30.99 Error: $83.30 SLE: 1.64 Item: MyVolts 12V Power Supply Adaptor Compati...\u001b[0m\n", + "\u001b[93m90: Guess: $362.86 Truth: $569.84 Error: $206.98 SLE: 0.20 Item: Dell Latitude 7212 Rugged Extreme Tablet...\u001b[0m\n", + "\u001b[93m91: Guess: $115.82 Truth: $177.99 Error: $62.17 SLE: 0.18 Item: Covermates Contour Fit Car Cover - Light...\u001b[0m\n", + "\u001b[91m92: Guess: $308.70 Truth: $997.99 Error: $689.29 SLE: 1.37 Item: Westin 57-4025 Black HDX Grille Guard fi...\u001b[0m\n", + "\u001b[92m93: Guess: $208.21 Truth: $219.00 Error: $10.79 SLE: 0.00 Item: Fieldpiece JL2 Job Link Wireless App Tra...\u001b[0m\n", + "\u001b[92m94: Guess: $201.90 Truth: $225.55 Error: $23.65 SLE: 0.01 Item: hansgrohe Talis S Modern Premium Easy Cl...\u001b[0m\n", + "\u001b[93m95: Guess: $323.94 Truth: $495.95 Error: $172.01 SLE: 0.18 Item: G-Technology G-SPEED eS PRO High-Perform...\u001b[0m\n", + "\u001b[91m96: Guess: $228.78 Truth: $942.37 Error: $713.59 SLE: 1.99 Item: DreamLine SHDR-1960723L-01 Shower Door, ...\u001b[0m\n", + "\u001b[91m97: Guess: $112.99 Truth: $1.94 Error: $111.05 SLE: 13.38 Item: Sanctuary Square Backplate Finish: Oiled...\u001b[0m\n", + "\u001b[91m98: Guess: $85.56 Truth: $284.34 Error: $198.78 SLE: 1.42 Item: Pelican Protector 1750 Long Case - Multi...\u001b[0m\n", + "\u001b[92m99: Guess: $193.30 Truth: $171.90 Error: $21.40 SLE: 0.01 Item: Brock Replacement Driver and Passenger H...\u001b[0m\n", + "\u001b[93m100: Guess: $80.33 Truth: $144.99 Error: $64.66 SLE: 0.34 Item: Carlinkit Ai Box Mini, Android 11, Multi...\u001b[0m\n", + "\u001b[91m101: Guess: $242.19 Truth: $470.47 Error: $228.28 SLE: 0.44 Item: StarDot NetCamLIVE2 YouTube Live Stream ...\u001b[0m\n", + "\u001b[93m102: Guess: $142.35 Truth: $66.95 Error: $75.40 SLE: 0.56 Item: Atomic Compatible FILXXCAR0016 16x25x5 M...\u001b[0m\n", + "\u001b[91m103: Guess: $0.00 Truth: $117.00 Error: $117.00 SLE: 22.76 Item: Bandai Awakening of S. H. s.h.figuarts s...\u001b[0m\n", + "\u001b[91m104: Guess: $267.67 Truth: $172.14 Error: $95.53 SLE: 0.19 Item: Fit System 62135G Passenger Side Towing ...\u001b[0m\n", + "\u001b[91m105: Guess: $209.43 Truth: $392.74 Error: $183.31 SLE: 0.39 Item: Black Horse Black Aluminum Exceed Runnin...\u001b[0m\n", + "\u001b[92m106: Guess: $28.64 Truth: $16.99 Error: $11.65 SLE: 0.25 Item: Dearsun Twinkle Star Color Night Light P...\u001b[0m\n", + "\u001b[92m107: Guess: $17.05 Truth: $1.34 Error: $15.71 SLE: 4.17 Item: Pokemon - Gallade Spirit Link (83/108) -...\u001b[0m\n", + "\u001b[93m108: Guess: $234.25 Truth: $349.98 Error: $115.73 SLE: 0.16 Item: Ibanez GA34STCE-NT GIO Series Classical ...\u001b[0m\n", + "\u001b[93m109: Guess: $237.03 Truth: $370.71 Error: $133.68 SLE: 0.20 Item: Set 2 Heavy Duty 12-16.5 12x16.5 12 Ply ...\u001b[0m\n", + "\u001b[93m110: Guess: $109.49 Truth: $65.88 Error: $43.61 SLE: 0.25 Item: Hairpin Table Legs 28\" Heavy Duty Hairpi...\u001b[0m\n", + "\u001b[93m111: Guess: $169.72 Truth: $229.99 Error: $60.27 SLE: 0.09 Item: Marada Racing Seat with Adjustable Slide...\u001b[0m\n", + "\u001b[93m112: Guess: $73.86 Truth: $9.14 Error: $64.72 SLE: 4.00 Item: Remington Industries 24UL1007STRWHI25 24...\u001b[0m\n", + "\u001b[91m113: Guess: $368.12 Truth: $199.00 Error: $169.12 SLE: 0.38 Item: Acer S3-391-6046 13.3-inch Ultrabook, In...\u001b[0m\n", + "\u001b[91m114: Guess: $209.96 Truth: $109.99 Error: $99.97 SLE: 0.41 Item: ICBEAMER 7\" RGB LED Headlights Bulb Halo...\u001b[0m\n", + "\u001b[91m115: Guess: $281.70 Truth: $570.42 Error: $288.72 SLE: 0.50 Item: R1 Concepts Front Rear Brakes and Rotors...\u001b[0m\n", + "\u001b[92m116: Guess: $236.85 Truth: $279.99 Error: $43.14 SLE: 0.03 Item: Camplux 2.64 GPM Tankless , Outdoor Port...\u001b[0m\n", + "\u001b[91m117: Guess: $123.46 Truth: $30.99 Error: $92.47 SLE: 1.85 Item: KNOKLOCK 10 Pack 3.75 Inch(96mm) Kitchen...\u001b[0m\n", + "\u001b[91m118: Guess: $134.93 Truth: $31.99 Error: $102.94 SLE: 2.00 Item: Valley Enterprises Yaesu USB FTDI CT-62 ...\u001b[0m\n", + "\u001b[92m119: Guess: $37.50 Truth: $15.90 Error: $21.60 SLE: 0.68 Item: G9 LED Light Bulbs,8W,75W 100W replaceme...\u001b[0m\n", + "\u001b[93m120: Guess: $102.70 Truth: $45.99 Error: $56.71 SLE: 0.63 Item: ZCHAOZ 4 Lights Antique White Farmhouse ...\u001b[0m\n", + "\u001b[92m121: Guess: $140.23 Truth: $113.52 Error: $26.71 SLE: 0.04 Item: Honeywell TH8320R1003 Honeywell VisionPr...\u001b[0m\n", + "\u001b[91m122: Guess: $292.72 Truth: $516.99 Error: $224.27 SLE: 0.32 Item: Patriot Exhaust H8013-1 1-7/8\" Clippster...\u001b[0m\n", + "\u001b[92m123: Guess: $192.08 Truth: $196.99 Error: $4.91 SLE: 0.00 Item: Fitrite Autopart New Front Left Driver S...\u001b[0m\n", + "\u001b[93m124: Guess: $2.54 Truth: $46.55 Error: $44.01 SLE: 6.75 Item: Technical Precision Replacement for GE G...\u001b[0m\n", + "\u001b[93m125: Guess: $247.39 Truth: $356.99 Error: $109.60 SLE: 0.13 Item: Covercraft Carhartt SeatSaver Front Row ...\u001b[0m\n", + "\u001b[91m126: Guess: $132.93 Truth: $319.95 Error: $187.02 SLE: 0.76 Item: Sennheiser SD Pro 2 (506008) - Double-Si...\u001b[0m\n", + "\u001b[91m127: Guess: $203.87 Truth: $96.06 Error: $107.81 SLE: 0.56 Item: Hitachi MAF0110 Mass Air Flow Sensor\u001b[0m\n", + "\u001b[92m128: Guess: $226.28 Truth: $190.99 Error: $35.29 SLE: 0.03 Item: AmScope SE305R-P-LED-PS36A 10X-30X LED C...\u001b[0m\n", + "\u001b[91m129: Guess: $104.06 Truth: $257.95 Error: $153.89 SLE: 0.81 Item: Front Left Driver Side Window Regulator ...\u001b[0m\n", + "\u001b[93m130: Guess: $119.23 Truth: $62.95 Error: $56.28 SLE: 0.40 Item: Premium Replica Hubcap Set, Fits Nissan ...\u001b[0m\n", + "\u001b[92m131: Guess: $31.29 Truth: $47.66 Error: $16.37 SLE: 0.17 Item: Excellerations Phonics Spelling Game for...\u001b[0m\n", + "\u001b[92m132: Guess: $211.22 Truth: $226.99 Error: $15.77 SLE: 0.01 Item: RC4WD BigDog Dual Axle Scale Car/Truck T...\u001b[0m\n", + "\u001b[93m133: Guess: $279.73 Truth: $359.95 Error: $80.22 SLE: 0.06 Item: Unknown Stage 2 Clutch Kit - Low Altitud...\u001b[0m\n", + "\u001b[91m134: Guess: $237.04 Truth: $78.40 Error: $158.64 SLE: 1.21 Item: 2002-2008 Dodge Ram 1500 Mopar 4X4 Emble...\u001b[0m\n", + "\u001b[91m135: Guess: $275.22 Truth: $172.77 Error: $102.45 SLE: 0.21 Item: Pro Comp Alloys Series 89 Wheel with Pol...\u001b[0m\n", + "\u001b[93m136: Guess: $224.22 Truth: $316.45 Error: $92.23 SLE: 0.12 Item: Detroit Axle - Front Rear Strut & Coil S...\u001b[0m\n", + "\u001b[92m137: Guess: $116.34 Truth: $87.99 Error: $28.35 SLE: 0.08 Item: ECCPP Rear Wheel Axle Replacement fit fo...\u001b[0m\n", + "\u001b[93m138: Guess: $291.79 Truth: $226.63 Error: $65.16 SLE: 0.06 Item: Dell Latitude E6520 Intel i7-2720QM 2.20...\u001b[0m\n", + "\u001b[91m139: Guess: $146.67 Truth: $31.49 Error: $115.18 SLE: 2.29 Item: F FIERCE CYCLE 251pcs Black Universal Mo...\u001b[0m\n", + "\u001b[93m140: Guess: $146.71 Truth: $196.00 Error: $49.29 SLE: 0.08 Item: Flash Furniture 4 Pk. HERCULES Series 88...\u001b[0m\n", + "\u001b[91m141: Guess: $163.45 Truth: $78.40 Error: $85.05 SLE: 0.53 Item: B&M 30287 Throttle Valve/Kickdown Cable,...\u001b[0m\n", + "\u001b[93m142: Guess: $185.01 Truth: $116.25 Error: $68.76 SLE: 0.21 Item: Gates TCK226 PowerGrip Premium Timing Be...\u001b[0m\n", + "\u001b[91m143: Guess: $243.38 Truth: $112.78 Error: $130.60 SLE: 0.58 Item: Monroe Shocks & Struts Quick-Strut 17149...\u001b[0m\n", + "\u001b[91m144: Guess: $137.19 Truth: $27.32 Error: $109.87 SLE: 2.51 Item: Feit Electric BPMR16/GU10/930CA/6 35W EQ...\u001b[0m\n", + "\u001b[92m145: Guess: $162.58 Truth: $145.91 Error: $16.67 SLE: 0.01 Item: Yellow Jacket 2806 Contractor Extension ...\u001b[0m\n", + "\u001b[92m146: Guess: $168.94 Truth: $171.09 Error: $2.15 SLE: 0.00 Item: Garage-Pro Tailgate SET Compatible with ...\u001b[0m\n", + "\u001b[92m147: Guess: $169.30 Truth: $167.95 Error: $1.35 SLE: 0.00 Item: 3M Perfect It Buffing and Polishing Kit ...\u001b[0m\n", + "\u001b[92m148: Guess: $63.52 Truth: $28.49 Error: $35.03 SLE: 0.61 Item: Chinese Style Dollhouse Model DIY Miniat...\u001b[0m\n", + "\u001b[93m149: Guess: $178.10 Truth: $122.23 Error: $55.87 SLE: 0.14 Item: Generic NRG Innovations SRK-161H Steerin...\u001b[0m\n", + "\u001b[92m150: Guess: $56.38 Truth: $32.99 Error: $23.39 SLE: 0.27 Item: Learning Resources Coding Critters Range...\u001b[0m\n", + "\u001b[91m151: Guess: $195.40 Truth: $71.20 Error: $124.20 SLE: 1.00 Item: Bosch Automotive 15463 Oxygen Sensor, OE...\u001b[0m\n", + "\u001b[93m152: Guess: $37.28 Truth: $112.75 Error: $75.47 SLE: 1.19 Item: Case of 24-2 Inch Blue Painters Tape - 6...\u001b[0m\n", + "\u001b[92m153: Guess: $116.50 Truth: $142.43 Error: $25.93 SLE: 0.04 Item: MOCA Engine Water Pump & Fan Clutch fit ...\u001b[0m\n", + "\u001b[91m154: Guess: $189.87 Truth: $398.99 Error: $209.12 SLE: 0.55 Item: SAREMAS Foot Step Bars for Hyundai Palis...\u001b[0m\n", + "\u001b[93m155: Guess: $322.89 Truth: $449.00 Error: $126.11 SLE: 0.11 Item: Gretsch G9210 Square Neck Boxcar Mahogan...\u001b[0m\n", + "\u001b[93m156: Guess: $239.04 Truth: $189.00 Error: $50.04 SLE: 0.05 Item: NikoMaku Mirror Dash Cam Front and Rear ...\u001b[0m\n", + "\u001b[92m157: Guess: $155.64 Truth: $120.91 Error: $34.73 SLE: 0.06 Item: Fenix HP25R v2.0 USB-C Rechargeable Head...\u001b[0m\n", + "\u001b[91m158: Guess: $292.75 Truth: $203.53 Error: $89.22 SLE: 0.13 Item: R&L Racing Heavy Duty Roll-Up Soft Tonne...\u001b[0m\n", + "\u001b[93m159: Guess: $218.50 Truth: $349.99 Error: $131.49 SLE: 0.22 Item: Garmin 010-02258-10 GPSMAP 64sx, Handhel...\u001b[0m\n", + "\u001b[92m160: Guess: $19.54 Truth: $34.35 Error: $14.81 SLE: 0.29 Item: Brown 5-7/8\" X 8-1/2\" X 3/16\" Thick Heav...\u001b[0m\n", + "\u001b[91m161: Guess: $168.49 Truth: $384.99 Error: $216.50 SLE: 0.68 Item: GAOMON PD2200 Pen Display & 20 Pen Nibs ...\u001b[0m\n", + "\u001b[92m162: Guess: $245.69 Truth: $211.00 Error: $34.69 SLE: 0.02 Item: VXMOTOR for 97-03 Ford F150/F250 Lightdu...\u001b[0m\n", + "\u001b[91m163: Guess: $397.07 Truth: $129.00 Error: $268.07 SLE: 1.25 Item: HP EliteBook 2540p Intel Core i7-640LM X...\u001b[0m\n", + "\u001b[92m164: Guess: $132.75 Truth: $111.45 Error: $21.30 SLE: 0.03 Item: Green EPX Mixing Nozzles 100-Pack-fits 3...\u001b[0m\n", + "\u001b[93m165: Guess: $147.06 Truth: $81.12 Error: $65.94 SLE: 0.35 Item: Box Partners 6 1/4 x 3 1/8\" 13 Pt. Manil...\u001b[0m\n", + "\u001b[91m166: Guess: $229.56 Truth: $457.08 Error: $227.52 SLE: 0.47 Item: Vixen Air 1/2\" NPT Air Ride Suspension H...\u001b[0m\n", + "\u001b[91m167: Guess: $172.56 Truth: $49.49 Error: $123.07 SLE: 1.52 Item: Smart Floor Lamp, 2700-6500K+RGBPink Mul...\u001b[0m\n", + "\u001b[92m168: Guess: $85.22 Truth: $80.56 Error: $4.66 SLE: 0.00 Item: SOZG 324mm Wheelbase Body Shell RC Car B...\u001b[0m\n", + "\u001b[92m169: Guess: $259.30 Truth: $278.39 Error: $19.09 SLE: 0.01 Item: Mickey Thompson ET Street S/S Racing Rad...\u001b[0m\n", + "\u001b[93m170: Guess: $218.84 Truth: $364.50 Error: $145.66 SLE: 0.26 Item: Pirelli 275/40R20 106W XL RFT P0 PZ4-LUX...\u001b[0m\n", + "\u001b[92m171: Guess: $421.17 Truth: $378.99 Error: $42.18 SLE: 0.01 Item: Torklift C3212 Rear Tie Down\u001b[0m\n", + "\u001b[93m172: Guess: $213.00 Truth: $165.28 Error: $47.72 SLE: 0.06 Item: Cardone 78-4226 Remanufactured Ford Comp...\u001b[0m\n", + "\u001b[91m173: Guess: $171.06 Truth: $56.74 Error: $114.32 SLE: 1.19 Item: Kidde AccessPoint 001798 Supra TouchPoin...\u001b[0m\n", + "\u001b[91m174: Guess: $157.03 Truth: $307.95 Error: $150.92 SLE: 0.45 Item: 3M Protecta 3100414 Self Retracting Life...\u001b[0m\n", + "\u001b[91m175: Guess: $120.29 Truth: $38.00 Error: $82.29 SLE: 1.29 Item: Plantronics 89435-01 Wired Headset, Blac...\u001b[0m\n", + "\u001b[91m176: Guess: $183.12 Truth: $53.00 Error: $130.12 SLE: 1.50 Item: Logitech K750 Wireless Solar Keyboard fo...\u001b[0m\n", + "\u001b[91m177: Guess: $277.02 Truth: $498.00 Error: $220.98 SLE: 0.34 Item: Olympus PEN E-PL9 Body Only with 3-Inch ...\u001b[0m\n", + "\u001b[91m178: Guess: $175.52 Truth: $53.99 Error: $121.53 SLE: 1.36 Item: Beck/Arnley 051-6066 Hub & Bearing Assem...\u001b[0m\n", + "\u001b[93m179: Guess: $235.75 Truth: $350.00 Error: $114.25 SLE: 0.16 Item: Eibach Pro-Kit Performance Springs E10-6...\u001b[0m\n", + "\u001b[91m180: Guess: $163.29 Truth: $299.95 Error: $136.66 SLE: 0.37 Item: LEGO DC Batman 1989 Batwing 76161 Displa...\u001b[0m\n", + "\u001b[91m181: Guess: $228.25 Truth: $94.93 Error: $133.32 SLE: 0.76 Item: Kingston Brass KS3608PL Restoration 4-In...\u001b[0m\n", + "\u001b[93m182: Guess: $248.12 Truth: $379.00 Error: $130.88 SLE: 0.18 Item: Polk Vanishing Series 265-LS In-Wall 3-W...\u001b[0m\n", + "\u001b[93m183: Guess: $233.00 Truth: $299.95 Error: $66.95 SLE: 0.06 Item: Spec-D Tuning LED Projector Headlights G...\u001b[0m\n", + "\u001b[92m184: Guess: $63.51 Truth: $24.99 Error: $38.52 SLE: 0.83 Item: RICHMOND & FINCH Airpod Pro Case, Green ...\u001b[0m\n", + "\u001b[91m185: Guess: $151.53 Truth: $41.04 Error: $110.49 SLE: 1.66 Item: LFA Industries 43B-5A-33JT 1/16-1/2-1.5-...\u001b[0m\n", + "\u001b[91m186: Guess: $137.65 Truth: $327.90 Error: $190.25 SLE: 0.75 Item: SAUTVS LED Headlight Assembly for Slings...\u001b[0m\n", + "\u001b[93m187: Guess: $59.58 Truth: $10.99 Error: $48.59 SLE: 2.62 Item: 2 Pack Combo Womens Safety Glasses Impac...\u001b[0m\n", + "\u001b[92m188: Guess: $16.92 Truth: $14.99 Error: $1.93 SLE: 0.01 Item: Arepa - Venezuelan cuisine - Venezuela P...\u001b[0m\n", + "\u001b[93m189: Guess: $141.85 Truth: $84.95 Error: $56.90 SLE: 0.26 Item: Schlage Lock Company KS23D2300 Padlock, ...\u001b[0m\n", + "\u001b[91m190: Guess: $199.34 Truth: $111.00 Error: $88.34 SLE: 0.34 Item: Techni Mobili White Sit to Stand Mobile ...\u001b[0m\n", + "\u001b[92m191: Guess: $148.64 Truth: $123.73 Error: $24.91 SLE: 0.03 Item: Special Lite Products Contemporary Wall ...\u001b[0m\n", + "\u001b[91m192: Guess: $136.37 Truth: $557.38 Error: $421.01 SLE: 1.97 Item: Tascam DP-24SD 24-Track Digital Portastu...\u001b[0m\n", + "\u001b[93m193: Guess: $149.52 Truth: $95.55 Error: $53.97 SLE: 0.20 Item: Glow Lighting 636CC10SP Vista Crystal Fl...\u001b[0m\n", + "\u001b[92m194: Guess: $152.67 Truth: $154.00 Error: $1.33 SLE: 0.00 Item: Z3 Wind Deflector, Smoke Tint, Lexan, Wi...\u001b[0m\n", + "\u001b[93m195: Guess: $241.31 Truth: $198.99 Error: $42.32 SLE: 0.04 Item: Olympus E-20 5MP Digital Camera w/ 4x Op...\u001b[0m\n", + "\u001b[91m196: Guess: $156.84 Truth: $430.44 Error: $273.60 SLE: 1.01 Item: PHYNEDI 1:1000 World Trade Center (1973-...\u001b[0m\n", + "\u001b[92m197: Guess: $40.71 Truth: $45.67 Error: $4.96 SLE: 0.01 Item: YANGHUAN Unstable Unicorns Adventure Car...\u001b[0m\n", + "\u001b[92m198: Guess: $232.71 Truth: $249.00 Error: $16.29 SLE: 0.00 Item: Interlogix NX-1820E NetworX Touch Screen...\u001b[0m\n", + "\u001b[93m199: Guess: $116.02 Truth: $42.99 Error: $73.03 SLE: 0.96 Item: Steering Damper,Universal Motorcycle Han...\u001b[0m\n", + "\u001b[93m200: Guess: $118.35 Truth: $181.33 Error: $62.98 SLE: 0.18 Item: Amprobe TIC 410A Hot Stick Attachment\u001b[0m\n", + "\u001b[92m201: Guess: $35.10 Truth: $6.03 Error: $29.07 SLE: 2.68 Item: MyCableMart 3.5mm Plug/Jack, 4 Conductor...\u001b[0m\n", + "\u001b[93m202: Guess: $76.80 Truth: $29.99 Error: $46.81 SLE: 0.85 Item: OtterBox + Pop Symmetry Series Case for ...\u001b[0m\n", + "\u001b[91m203: Guess: $424.23 Truth: $899.00 Error: $474.77 SLE: 0.56 Item: Dell XPS X8700-1572BLK Desktop ( Intel C...\u001b[0m\n", + "\u001b[91m204: Guess: $221.11 Truth: $399.99 Error: $178.88 SLE: 0.35 Item: Franklin Iron Works Sperry Industrial Br...\u001b[0m\n", + "\u001b[91m205: Guess: $112.15 Truth: $4.66 Error: $107.49 SLE: 8.97 Item: Avery Legal Dividers, Standard Collated ...\u001b[0m\n", + "\u001b[93m206: Guess: $202.44 Truth: $261.41 Error: $58.97 SLE: 0.06 Item: Moen 8346 Commercial Posi-Temp Pressure ...\u001b[0m\n", + "\u001b[91m207: Guess: $245.08 Truth: $136.97 Error: $108.11 SLE: 0.33 Item: Carlisle Versa Trail ATR All Terrain Rad...\u001b[0m\n", + "\u001b[93m208: Guess: $133.43 Truth: $79.00 Error: $54.43 SLE: 0.27 Item: SUNWAYFOTO 44mm Tripod Ball Head Arca Co...\u001b[0m\n", + "\u001b[91m209: Guess: $183.42 Truth: $444.99 Error: $261.57 SLE: 0.78 Item: NanoBeam AC NBE-5AC-Gen2-US 4 Units 5GHz...\u001b[0m\n", + "\u001b[93m210: Guess: $252.15 Truth: $411.94 Error: $159.79 SLE: 0.24 Item: WULF 4\" Front 2\" Rear Leveling Lift Kit ...\u001b[0m\n", + "\u001b[92m211: Guess: $187.53 Truth: $148.40 Error: $39.13 SLE: 0.05 Item: Alera ALEVABFMC Valencia Series Mobile B...\u001b[0m\n", + "\u001b[91m212: Guess: $34.48 Truth: $244.99 Error: $210.51 SLE: 3.75 Item: YU-GI-OH! Ignition Assault Booster Box\u001b[0m\n", + "\u001b[91m213: Guess: $175.49 Truth: $86.50 Error: $88.99 SLE: 0.49 Item: 48\" x 36\" Extra-Large Framed Magnetic Bl...\u001b[0m\n", + "\u001b[93m214: Guess: $360.92 Truth: $297.95 Error: $62.97 SLE: 0.04 Item: Dell Latitude D620 Renewed Notebook PC\u001b[0m\n", + "\u001b[93m215: Guess: $545.83 Truth: $399.99 Error: $145.84 SLE: 0.10 Item: acer Aspire 5 Laptop, AMD Ryzen 3 5300U ...\u001b[0m\n", + "\u001b[91m216: Guess: $245.38 Truth: $599.00 Error: $353.62 SLE: 0.79 Item: Elk 31080/6RC-GRN 30 by 6-Inch Viva 6-Li...\u001b[0m\n", + "\u001b[92m217: Guess: $118.53 Truth: $105.99 Error: $12.54 SLE: 0.01 Item: Barbie Top Model Doll\u001b[0m\n", + "\u001b[91m218: Guess: $251.69 Truth: $689.00 Error: $437.31 SLE: 1.01 Item: Danby Designer 20-In. Electric Range wit...\u001b[0m\n", + "\u001b[91m219: Guess: $216.70 Truth: $404.99 Error: $188.29 SLE: 0.39 Item: FixtureDisplays® Metal Truss Podium Doub...\u001b[0m\n", + "\u001b[92m220: Guess: $188.75 Truth: $207.76 Error: $19.01 SLE: 0.01 Item: ACDelco 13597235 GM Original Equipment A...\u001b[0m\n", + "\u001b[91m221: Guess: $283.71 Truth: $171.82 Error: $111.89 SLE: 0.25 Item: EBC S1KF1135 Stage-1 Premium Street Brak...\u001b[0m\n", + "\u001b[92m222: Guess: $236.96 Truth: $293.24 Error: $56.28 SLE: 0.05 Item: FXR Men's Boost FX Jacket (Black/Orange/...\u001b[0m\n", + "\u001b[93m223: Guess: $261.90 Truth: $374.95 Error: $113.05 SLE: 0.13 Item: SuperATV Scratch Resistant 3-in-1 Flip W...\u001b[0m\n", + "\u001b[92m224: Guess: $105.51 Truth: $111.99 Error: $6.48 SLE: 0.00 Item: SBU 3 Layer All Weather Mini Van Car Cov...\u001b[0m\n", + "\u001b[92m225: Guess: $21.47 Truth: $42.99 Error: $21.52 SLE: 0.45 Item: 2 Pack Outdoor Brochure Holder Advertisi...\u001b[0m\n", + "\u001b[91m226: Guess: $231.11 Truth: $116.71 Error: $114.40 SLE: 0.46 Item: Monroe Shocks & Struts Quick-Strut 17158...\u001b[0m\n", + "\u001b[93m227: Guess: $186.96 Truth: $118.61 Error: $68.35 SLE: 0.20 Item: Elements of Design Magellan EB235AL Thre...\u001b[0m\n", + "\u001b[93m228: Guess: $193.09 Truth: $147.12 Error: $45.97 SLE: 0.07 Item: GM Genuine Parts 15-62961 Air Conditioni...\u001b[0m\n", + "\u001b[92m229: Guess: $117.78 Truth: $119.99 Error: $2.21 SLE: 0.00 Item: Baseus 17-in-1 USB C Docking Station to ...\u001b[0m\n", + "\u001b[91m230: Guess: $159.05 Truth: $369.98 Error: $210.93 SLE: 0.71 Item: Whitehall™ Personalized Whitehall Capito...\u001b[0m\n", + "\u001b[93m231: Guess: $214.39 Truth: $315.55 Error: $101.16 SLE: 0.15 Item: Pro Circuit Works Pipe PY05250 for 02-19...\u001b[0m\n", + "\u001b[91m232: Guess: $298.75 Truth: $190.99 Error: $107.76 SLE: 0.20 Item: HYANKA 15 \"1200W Professional DJ Speaker...\u001b[0m\n", + "\u001b[91m233: Guess: $33.15 Truth: $155.00 Error: $121.85 SLE: 2.31 Item: Bluetooth X6BT Card Reader Writer Encode...\u001b[0m\n", + "\u001b[93m234: Guess: $276.34 Truth: $349.99 Error: $73.65 SLE: 0.06 Item: AIRAID Cold Air Intake System by K&N: In...\u001b[0m\n", + "\u001b[93m235: Guess: $168.54 Truth: $249.99 Error: $81.45 SLE: 0.15 Item: Bostingner Shower Faucets Sets Complete,...\u001b[0m\n", + "\u001b[91m236: Guess: $129.21 Truth: $42.99 Error: $86.22 SLE: 1.18 Item: PIT66 Front Bumper Turn Signal Lights, C...\u001b[0m\n", + "\u001b[93m237: Guess: $67.36 Truth: $17.99 Error: $49.37 SLE: 1.64 Item: Caseology Bumpy Compatible with Google P...\u001b[0m\n", + "\u001b[91m238: Guess: $181.90 Truth: $425.00 Error: $243.10 SLE: 0.71 Item: Fleck 2510 Timer Mechanical Filter Contr...\u001b[0m\n", + "\u001b[92m239: Guess: $295.37 Truth: $249.99 Error: $45.38 SLE: 0.03 Item: Haloview MC7108 Wireless RV Backup Camer...\u001b[0m\n", + "\u001b[91m240: Guess: $43.78 Truth: $138.23 Error: $94.45 SLE: 1.29 Item: Schmidt Spiele - Manhattan\u001b[0m\n", + "\u001b[91m241: Guess: $218.39 Truth: $414.99 Error: $196.60 SLE: 0.41 Item: Corsa 14333 Tip Kit (Ford Mustang GT)\u001b[0m\n", + "\u001b[92m242: Guess: $191.11 Truth: $168.28 Error: $22.83 SLE: 0.02 Item: Hoshizaki FM116A Fan Motor Kit 1\u001b[0m\n", + "\u001b[92m243: Guess: $169.41 Truth: $199.99 Error: $30.58 SLE: 0.03 Item: BAINUO Antler Chandelier Lighting,6 Ligh...\u001b[0m\n", + "\u001b[91m244: Guess: $212.36 Truth: $126.70 Error: $85.66 SLE: 0.26 Item: DNA MOTORING HL-OH-FEXP06-SM-AM Smoke Le...\u001b[0m\n", + "\u001b[93m245: Guess: $64.28 Truth: $5.91 Error: $58.37 SLE: 5.04 Item: Wera Stainless 3840/1 TS 2.5mm Hex Inser...\u001b[0m\n", + "\u001b[93m246: Guess: $147.69 Truth: $193.06 Error: $45.37 SLE: 0.07 Item: Celestron - PowerSeeker 127EQ Telescope ...\u001b[0m\n", + "\u001b[92m247: Guess: $251.73 Truth: $249.99 Error: $1.74 SLE: 0.00 Item: NHOPEEW 10.1inch Android Car Radio Carpl...\u001b[0m\n", + "\u001b[91m248: Guess: $168.36 Truth: $64.12 Error: $104.24 SLE: 0.91 Item: Other Harmonica (Suzuki-2Timer24- A)\u001b[0m\n", + "\u001b[91m249: Guess: $279.87 Truth: $114.99 Error: $164.88 SLE: 0.78 Item: Harley Air Filter Venturi Intake Air Cle...\u001b[0m\n", + "\u001b[91m250: Guess: $141.66 Truth: $926.00 Error: $784.34 SLE: 3.50 Item: Elite Screens Edge Free Ambient Light Re...\u001b[0m\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA+0AAAK9CAYAAABRvo1QAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAz51JREFUeJzs3Xd4leX9x/H3ScgEkhCy2MOF4hZF3BsXWsWBguC2rmrVWrX6U1vraK111NbRqqi4R7Vu3Av3VhQVURTIICQBEkLG8/vjCckJM4EkJ+P9uq5cnNznOc/5HhJCvudz3/cTCYIgQJIkSZIktTtxsS5AkiRJkiStnE27JEmSJEntlE27JEmSJEntlE27JEmSJEntlE27JEmSJEntlE27JEmSJEntlE27JEmSJEntlE27JEmSJEntlE27JEmSJEntlE27JHVQs2bNIhKJcNddd8W6lC7lsssu8+9cwp9BktRWbNoldVmff/45hx12GIMGDSI5OZl+/fqx9957c9NNN7XJ8w8ePJhIJFL/kZOTw84778zjjz/eJs/fGqJfz/Ifv/71r2NdXpt48MEHmTBhAhtssAGRSITddtttpcctWrSISy+9lH333ZfMzMw1Nj/Tp09n3333pUePHmRmZnLMMcdQWFjY7Pq+//57kpOTiUQifPDBB43u22233Vb59UtISFjjuZd/fEpKCptvvjnXX389tbW1jY5d1vBFIhGuuOKKlZ5v/PjxRCIRevTo0Wi8traWu+++m5EjR5KZmUnPnj3ZcMMNmThxIu+88079ca+++iqRSIRHHnlktXW3xfftunz9Fi1axNlnn03//v1JSkpi44035l//+tcKx73++uscdNBBDBgwgOTkZPLy8th333156623mvQ8xx577Ap/19EikQhnnHHGas/xzDPPcNlllzXp+VrK+++/zxlnnMHw4cPp3r07AwcO5IgjjmDGjBkrHLu6r/Xee+/dpOdbuHAh559/PkOGDCEpKYl+/fpx2GGHUV5eXn/MV199xc4770zPnj0ZMWIE06ZNW+E81113HcOHD6e6unrtX7ykLqFbrAuQpFh4++232X333Rk4cCAnnXQSeXl5zJ49m3feeYcbbriBM888s03q2HLLLTn33HMBmDNnDrfeeiuHHnoo//rXv9bYLAwaNIiKioomNVNtae+992bixIkrjG+44YYxqKblVVVVUVlZucr7//Wvf/Hhhx+y7bbbMn/+/FUeV1RUxB//+EcGDhzIFltswauvvrrKY3/++Wd22WUX0tPTufLKK1m0aBHXXnstn3/+Oe+99x6JiYlNrv+3v/0t3bp1W+lr+MMf/sCJJ57YaGzx4sX8+te/Zp999mnS+fv3789VV11V/xrvu+8+fvvb31JYWMif//znFY5PTk7m/vvv5+KLL17heZ944gmSk5NXeMxvfvMbbr75Zg4++GDGjx9Pt27d+Oabb3j22WcZOnQo22+/fZNqjdaa37fr8vWrqalh9OjRfPDBB5x++ulssMEGPP/885x22mksWLCAiy66qP7YGTNmEBcXx69//Wvy8vJYsGAB9957L7vssgtPP/00++677zq/lmgr+xn0zDPPcPPNN7dp437NNdfw1ltvcfjhh7P55pszb948/vGPf7D11lvzzjvvsOmmm9Yfe88996zw+A8++IAbbrihSd/jpaWl7Lrrrvz888+cfPLJrL/++hQWFvLGG29QWVlJamoqNTU1HHrooWRmZvLXv/6VJ598koMPPpjvvvuOtLQ0AAoKCvjjH//IQw89RLdu/jouaQ0CSeqC9t9//yA7OztYsGDBCvfl5+e3yHNUVVUFlZWVq7x/0KBBwQEHHNBobO7cuUH37t2DDTfccK3P25oqKiqCmpqaVd4PBKeffvpanXvx4sUrHW+J17to0aJ1enxlZWVwzjnnBBkZGQEQAEFycnKw8847B999912jY3/66af6v6Phw4cHu+6660rPuWTJkmDu3LlBEATB+++/HwDBnXfeudJjTz311CAlJSX48ccf68emTp0aAMGtt97a5Nfx3HPPBYmJicHFF18cAMH777+/xsfcc889ARBMmTJljcfuuuuuwfDhwxuNVVRUBIMGDQp69uwZVFdX14//8MMPARAceuihARB88sknjR43ZcqUICEhIRgzZkzQvXv3+vF58+YFkUgkOOmkk1Z4/tra2kb/fl955ZUACB5++OHV1r0u37dNsS5fv4ceeigAgv/85z+NxseOHRskJyev8efV4sWLg9zc3GD06NFrrHPSpEmN/q6X15S/p9NPPz1o618v33rrrRV+RsyYMSNISkoKxo8fv8bHn3DCCUEkEglmz569xmNPPfXUICMjI5g5c+Yqj5k+fXoA1H+9Fy9eHKSkpATPPfdco+ccM2bMGp9PkoIgCJweL6lL+v777xk+fDgZGRkr3JeTk1N/e9NNN2X33Xdf4Zja2tr6KZHQMNX32muv5frrr2e99dYjKSmJr776qll15eXlsfHGG/PDDz+s8byrWk/69ddfc8QRR5CdnU1KSgobbbQRf/jDHxod88svv3D88ceTm5tLUlISw4cP54477mh0zLKpxQ888AAXX3wx/fr1IzU1lbKysma9ppXZbbfd2HTTTfnwww/ZZZddSE1N5aKLLlrj3+PLL7/MzjvvTPfu3cnIyODggw9m+vTpjc592WWXEYlE+Oqrrzj66KPp1asXO+200zrVe+WVV3LdddcxceJEjj76aH7zm9/wl7/8hZSUFAoKChodO2DAAOLi1vzfa1JSEnl5eU16/kcffZQDDzyQgQMH1o/ttddebLjhhjz00ENNOkdVVRVnnXUWZ511Fuutt16THgNw33330b17dw4++OAmPyZacnIy2267LQsXLlzh7wpg1KhRDBkyhPvuu6/R+JQpU+qXDkT74YcfCIKAHXfccYVzLVtm0t6sy9fvjTfeAGDcuHGNxseNG8eSJUt44oknVvv41NRUsrOzKSkpWbviV2P5n0HHHnssN998M9B4GvoyDzzwANtssw09e/YkLS2NzTbbjBtuuGGd69hhhx1WmK2wwQYbMHz48BV+PiyvsrKSRx99lF133ZX+/fuv9tiSkhLuvPNOTj75ZIYMGcLSpUtXOmOloqICgF69egHh1yAlJaV++vxHH33ElClTuO6665r8GiV1bc7HkdQlDRo0iGnTpvHFF180mjq5vCOPPJLLLruMefPmNWqw3nzzTebMmbPCL9J33nknS5Ys4eSTTyYpKWmFhmNNqqqqmD17Nr17917jeZdfIwzw2WefsfPOO5OQkMDJJ5/M4MGD+f777/nf//5XPzU5Pz+f7bffvn59anZ2Ns8++ywnnHACZWVlnH322Y3O+ac//YnExETOO+88Kisr1zgVe8mSJRQVFa0wnpaW1uix8+fPZ7/99mPcuHFMmDCB3Nzc1b7eF198kf3224+hQ4dy2WWXUVFRwU033cSOO+7IRx99xODBgxs93+GHH84GG2zAlVdeSRAEQPgL+sKFC1db/zJZWVn1t59++mn23ntvbrjhBi677DIGDx7Mscce2ybLKH755RcKCgoYMWLECvdtt912PPPMM006z/XXX8+CBQu4+OKLeeyxx5r0mMLCQqZOncqRRx5J9+7dm1V3tGXN3creJAM46qijuPfee7n66quJRCIUFRXxwgsvcM899/Dcc881OnbQoEEAPPzwwxx++OGkpqaudV3RmvJ9W15e3mjd8qrEx8fXN2zr+vWrrKwkPj5+hX93y173hx9+yEknndTovrKyMpYuXUpRURF33303X3zxRaNp9Guysr+HpjjllFOYM2cOU6dOXWEa+tSpUznqqKPYc889ueaaa4Bwnf9bb73FWWedBYRvhhYXFzfpudLT01e7NCgIAvLz8xk+fPhqz/PMM89QUlLC+PHj1/icb775JkuWLGH99dfnsMMO47///S+1tbWMGjWKm2++mS233BIIl1Skp6dz2WWX8Zvf/IaHHnqIsrIytt56ayBc3nHGGWew/vrrN+m1SpLT4yV1SS+88EIQHx8fxMfHB6NGjQrOP//84Pnnnw+WLl3a6LhvvvkmAIKbbrqp0fhpp50W9OjRIygvLw+CoGGqb1paWlBQUNCkGgYNGhTss88+QWFhYVBYWBh8+umnwbhx4wIgOPPMM9d43mX3RU+p3mWXXYKePXs2moYbBOG04WVOOOGEoE+fPkFRUVGjY8aNGxekp6fXv6ZlU4uHDh1aP7Ym1E0dX9nH/fffX3/crrvuGgDBLbfcstLXtLLXu+WWWwY5OTnB/Pnz68c+/fTTIC4uLpg4cWL92KWXXhoAwVFHHbVCfXfeeedqa4z+iDZy5Mhgxx13rD//qqaxL2910+OjrW56/LL77r777hXu+93vfhcAwZIlS1Z7/rlz5wY9e/asn4q97O9hTdPjb7rppgAInnnmmTW+hiAIv67Dhg2r/57++uuv62tcfinIsq/1X//61+CLL74IgOCNN94IgiAIbr755qBHjx7B4sWLVzple+LEiQEQ9OrVKzjkkEOCa6+9Npg+ffoK9TRnenxTvm+XfW+t6WPQoEH1j1nXr9/f/va3Rn83y1xwwQUBEBx44IErPGb06NH1tSQmJgannHJKUFFRsdq/gyAIp8ev6bVFT49f2c+gVU2PP+uss4K0tLRGSySWt+x8Tfl45ZVXVvtali3rWH5ZwfLGjh0bJCUlrXSp1PKuu+66AAh69+4dbLfddsGUKVOCf/7zn0Fubm7Qq1evYM6cOfXH3nfffUFKSkoABPHx8cG1114bBEG47CM3NzcoLS1d4/NJ0jIm7ZK6pL333ptp06Zx1VVX8fzzzzNt2jT+8pe/kJ2dzb///W8OOuggIExMttxySx588MH6XZNramp45JFHGDNmDCkpKY3OO3bsWLKzs5tcxwsvvNDo+Pj4eI455pj6JKo55y0sLOT111/nrLPOajQNF6ifohoEAY8++ihHHHEEQRA0StRGjx7NAw88wEcffdRo6vGkSZNWeJ2rc/DBB690h+nNNtus0edJSUkcd9xxKz3H8q937ty5fPLJJ5x//vmNZi9svvnm7L333itNK1e2kd/o0aOZOnVqk1/LMscddxy//vWv2XfffamtrSU5OZna2tomTYNfV8um2iYlJa1w37JN2ioqKlZ6/zK///3vGTp06AqbzK3JfffdR3Z2dpN31YZwecby36sHHXQQ//nPf1b5mOHDh7P55ptz//33s9NOO3Hfffdx8MEHrzJFv/POO9luu+244447ePzxx3n88cc577zz2GOPPbj77rvp169fk+tdpinftxMnTmzSUovofy/r+vU7+uij+eMf/8jxxx/PzTffzAYbbMALL7zAP//5z0bnj3b11Vdz7rnnMnv2bCZPnszSpUubvEN5cnIy//vf/1Z6X3O+D5aXkZHB4sWLmTp16io3xMvLy2vyv88ttthilfd9/fXXnH766YwaNYpJkyat8riysjKefvpp9t9//1XOAom2aNEiIPx5+tJLL9XvtL/VVlvVp+3LroRw1FFHse+++/LNN98wZMgQcnNzKS8v5/e//z1//vOf6dGjB5dffjmTJ0+uv33IIYc06bVL6nps2iV1Wdtuuy2PPfYYS5cu5dNPP+Xxxx/n73//O4cddhiffPIJm2yyCRBOkb/ooov45Zdf6NevH6+++ioFBQUceeSRK5xzyJAhzaph5MiRXHHFFUQiEVJTU9l4441X+stjU847c+ZMgNVO9y8sLKSkpITbbruN2267baXHLL/uuLmvqX///uy1115rPK5fv36rnGq//HP++OOPAGy00UYrHLvxxhvz/PPPs3jx4kZTuFdWd58+fejTp88aa1veKaecQmZmJjfddBPTpk1j6tSpXHPNNZxyyilceumlzXpTo7mWnXtla2eXLFnS6JiVeeedd7jnnnt46aWXmvUmw8yZM5k2bRpnnHFGs3a3Hjx4MLfffju1tbV8//33/PnPf6awsHClu8BHO/roo/nb3/7Gb3/7W95+++3VTueOi4vj9NNP5/TTT2f+/Pm89dZb3HLLLTz77LOMGzeufh14czTl+3bo0KEMHTq0Wedd169fXl4eTz75JMccc0z97uZpaWncdNNNTJo0aaWXaFs2TRtgwoQJbL311hx77LFrvPQdhG8cNuXfb3OddtppPPTQQ+y3337069ePffbZhyOOOKJRA5+cnLzOzz1v3jwOOOAA0tPTeeSRR4iPj1/lsY8++ihLlixp0tR4aPg6jRkzptHf+/bbb8+QIUN4++23Gx3fq1evRlcyuOqqq8jJyeG4447jjjvu4JZbbmHKlCnMmjWLI488kq+++sop85JWyqZdUpeXmJjItttuy7bbbsuGG27Icccdx8MPP8yll14KhE37hRdeyMMPP8zZZ5/NQw89RHp6+krTouY2b1lZWU36JbWlmsJl6+AnTJiwygRq8803b5XnXt7qztsSz7myc1RUVFBaWtqkxy+/Sdzhhx/O4YcfzmWXXcaiRYtYvHgx11xzDSUlJdxyyy3rXO+qLHuTYe7cuSvcN3fuXDIzM1ebsp9//vnsvPPODBkyhFmzZgENa5bnzp3LTz/9tMLMDKB+Y7imNjTLdO/evdH39I477sjWW2/NRRddxI033rjKxx111FFceOGFnHTSSfTu3bvJl5jr3bs3Bx10EAcddBC77bYbr732Gj/++GP92veWtGjRovq0dXXi4+PrZxus69cPYJdddmHmzJl8/vnnLF68mC222II5c+YAa74kXWJiIgcddBBXX301FRUVrfoG0+rk5OTwySef8Pzzz/Pss8/y7LPPcueddzJx4kQmT54MhLOYmnrt+szMzBXe9CstLWW//fajpKSEN954g759+672HFOmTCE9PZ0DDzywSc+57HzR+28sk5OTw4IFC1b52FmzZvG3v/2NF154gbi4OO6//35OOeUU9thjDwAmT55cv+mnJC3Ppl2SoizbLCr6F+whQ4aw3Xbb1U+Rf+yxx/jVr361xl+029qyBPCLL75Y5THZ2dn07NmTmpqaVknTWsuyBuybb75Z4b6vv/6arKysJm2U9uCDD65ySv7ygrrN61Zm00035dhjj+WXX37hkUceadWmvV+/fmRnZ/PBBx+scN97773XKFVdmZ9++okff/xxpTMPDjroINLT01e6s/h9993Heuutt1bXPI+2+eabM2HCBG699VbOO++8lb5BADBw4EB23HFHXn31VU499dS1unb1iBEjeO2115g7d26rNO3XXnstl19++RqPGzRoUP0bJOv69VsmPj6+0bEvvvgiQJP+HVdUVBAEAQsXLmz1pj16t/jlJSYmMmbMGMaMGUNtbS2nnXYat956K5dccgnrr78+s2fPbvLMnldeeYXddtut/vMlS5YwZswYZsyYwYsvvlg/U2pV5s6dyyuvvMKxxx7b5J/l22yzDRBuLri8OXPmMGzYsFU+9rzzzuOggw6qX14xZ86cRm8q9O3bd6XnlSSwaZfURS37hW/5XzCXrY1efhr2kUceybnnnssdd9xBUVHRSqfGx1p2dja77LILd9xxB+ecc06j5igIAiKRCPHx8YwdO5b77rtvpTvnFxYWNmtNflvp06cPW265JZMnT+bCCy+sX0LwxRdf8MILLzBhwoQmnWdt17QvWLCgfjfwaFVVVW2SXI4dO5bJkycze/ZsBgwYAMBLL73EjBkz+O1vf9uonu+//5709PT6hPe2225bYcfzl19+mZtuuolrr712pY3Gxx9/zPTp07nkkktapP7zzz+fu+++m+uuu47rr79+lcddccUVvPLKK6v99zVv3jyKi4tXaMqWLl1avwSgtaYYr82adli3r9/KFBYWcs0117D55ps3atoLCgpWuORdSUkJjz76KAMGDGiTy+Ete/OspKSk0VKf+fPnN7oqRlxcXP2snmVLB9Z2TXtNTQ1HHnkk06ZN44knnmDUqFFrfPwDDzxAbW3tKmeSrOxrsdFGG7HFFlvwxBNPUFRUVH+FiRdeeIHZs2ev8moSr7zyCs888wxff/11/Vhubm6jz6dPn+6adkmrZNMuqUs688wzKS8v55BDDmHYsGEsXbqUt99+mwcffJDBgwevkMYeccQRnHfeeZx33nlkZma225T6xhtvZKeddmLrrbeuv5bwrFmzePrpp/nkk0+AcJOqV155hZEjR3LSSSexySabUFxczEcffcSLL77Y5EsurcqMGTO49957VxjPzc1dp42s/vrXv7LffvsxatQoTjjhhPpLvi27tFJTrO2a9p122omdd96ZffbZh59++ql+HfVzzz3H73//+0bHvv7667z++utA2FwtXry4fnOqXXbZhV122aX+2H/84x+UlJTUT3X+3//+x88//wyE36Pp6ekAXHTRRTz88MPsvvvunHXWWSxatIi//vWvbLbZZo2+V3/55Rc23nhjJk2aVH/t7JVNM1+WrO+6664rvRTZlClTgOZPjV+VTTbZhP33359///vfXHLJJStc0nCZXXfdlV133XW15/r555/Zbrvt2GOPPdhzzz3Jy8ujoKCA+++/n08//ZSzzz670eX6IFy7HN0gLTNp0qT6Jrop37drs6Yd1u3rB+Hfy6hRo1h//fWZN28et912G4sWLeKpp55qtE/BfvvtR//+/Rk5ciQ5OTn89NNP3HnnncyZM4cHH3yw2XWvjWVp9G9+8xtGjx5NfHw848aN48QTT6S4uJg99tiD/v378+OPP3LTTTex5ZZbsvHGGwNrv6b93HPP5cknn2TMmDEUFxev8HVc2Zt6U6ZMoW/fvo3S+mir+lr8/e9/Z++992annXbilFNOobS0lOuuu44NN9yQU089dYXz1NTUcPbZZ/O73/2u0Ruphx12GOeffz7Z2dn8+OOPfP755/X/7iRpBTHdu16SYuTZZ58Njj/++GDYsGFBjx49gsTExGD99dcPzjzzzCA/P3+lj9lxxx0DIDjxxBNXuC/68lVNNWjQoBUug9Wc867scktBEARffPFFcMghhwQZGRlBcnJysNFGGwWXXHJJo2Py8/OD008/PRgwYECQkJAQ5OXlBXvuuWdw22231R/T1MtlRWM1l2iKvvTZrrvuGgwfPrxZrzcIguDFF18MdtxxxyAlJSVIS0sLxowZE3z11VeNjll2Wa7CwsIm170mjzzySDBmzJigX79+QXx8fJCYmBhsuOGGwRVXXLHCZQJXd1mwSy+9tNGxgwYNWuWxP/zwQ6Njv/jii2CfffYJUlNTg4yMjGD8+PHBvHnzGh2z7O9v0qRJq309q7vkW01NTdCvX79g6623bvLfzzKr+roGQRC8+uqrjf4OmvpvZvlLvpWVlQU33HBDMHr06KB///5BQkJC0LNnz2DUqFHB7bff3ujyhsu+h1f1sewyak39vl0X6/L1++1vfxsMHTo0SEpKCrKzs4Ojjz46+P7771d4jn/84x/BTjvtFGRlZQXdunULsrOzgzFjxgSvv/56k2pc2eX1otGES75VV1cHZ555ZpCdnR1EIpH6y7898sgjwT777BPk5OQEiYmJwcCBA4NTTjklmDt3bpNqW51ll5Bc1cfyvv766wAIzjnnnFWec3X/lqZOnRpsv/32QXJycpCZmRkcc8wxq3wdN998c9C/f/9g8eLFjcarqqqCc845J8jKygoGDRoUTJ48uXkvWlKXEgmC1SzakyRJjVx22WUMHjyYY489NtalSJKkLqD1LzArSZIkSZLWimvaJUlqht12263RBluSJEmtyenxkiRJkiS1U06PlyRJkiSpnbJplyRJkiSpnbJplyRJkiSpnXIjOqC2tpY5c+bQs2dPIpFIrMuRJEmSJHVyQRCwcOFC+vbtS1zcqvN0m3Zgzpw5DBgwINZlSJIkSZK6mNmzZ9O/f/9V3m/TDvTs2RMI/7LS0tJiXI0kSZIkqbOoqoVni+HlBbDs0m3xEdg5oYxjNx9Q34+uik071E+JT0tLs2mXJEmSJLWImRUweR7Mq4KEHuHYoGSYlAc9K+FYWOMSbZt2SZIkSZJaUFUtPFEEL0al690icGBvGJ0JcREoq2zauWzaJUmSJElqId/Xpev5SxvGBifDsXnQJ6n557NplyRJkiRpHS2thf8WNV673i0CB2XB3r3CdH1t2LRLkiRJkrQOvi0P0/XCqoaxoSkwKRfy1iJdj2bTLkmSJEnSWqishccL4ZWShrGECPwqC/ZYh3Q9mk27JEmSJEnN9E053D0PiqLS9fVSwp3hcxNb7nls2iVJkiRJaqIlNfBYEbxW0jCWEIFDsmH3jJZJ16PZtEuSJEmS1ATTF8M9+TA/Kl3fIAUm5kFOC6br0WzaJUmSJElajSU18EghvFHaMJYUB4dkwW4ZEGnhdD2aTbskSZIkSavw5WK4Zx4sqG4Y2ygVJuZCViul69Fs2iVJkiRJWk5FDTxcCG8tl64flg07p7duuh7Npl2SJEmSpChfLArXrpdEpesbp8IxedA7oW1rsWmXJEmSJAkor4GHCmBaWcNYcl26vlMbpuvRbNolSZIkSV3eZ4vg3nwojUrXh3eHCbmQ2cbpejSbdkmSJElSl7W4Bh4sgHeXS9ePyIEd0mKTrkezaZckSZIkdUmfLIQpBVAWla5vWpeu94phuh7Npl2SJEmS1KUsqoYHCuD9hQ1jqfFwZDaMbAfpejSbdkmSJElSl/HRQrgvHxbWNIxt3gPG50BGO0nXo9m0S5IkSZI6vYXVcH8BfLhcuj4uB7br2b7S9Wg27ZIkSZKkTisIwkb9/gJYFJWub9kDxudCWjvvitt5eZIkSZIkrZ2y6nAq/MeLGsa6x8NROTCiHafr0WzaJUmSJEmdShCEm8w9UBBe0m2ZrXuGDXt7T9ejdaBSJUmSJElavdJqmJIPn0al6z3j4ahc2KZn7OpaWzbtkiRJkqQOLwjg3TJ4sBDKo9L1ET3DzeZ6dtDut4OWLUmSJElSqKQK7s2Hzxc3jPWMDzea26oDpuvRbNolSZIkSR1SEMC0MnioACpqG8a3SwvT9e7xsautpdi0S5IkSZI6nAVVcE8+fBmVrqd1gwm5sEWP2NXV0mzaJUmSJEkdRhDAW6XwcCEsiUrXt0+DIzpJuh7Npl2SJEmS1CEUV8Hd82B6ecNYRl26vlknStej2bRLkiRJktq1IIA3SuGRQqiMStd3SIfDsyG1k6Xr0WzaJUmSJEntVtHScO3611Hpeq9ucEweDO8eu7raik27JEmSJKndCQJ4rQQeK2qcru+UDodlQ0onTtej2bRLkiRJktqVwqVwdz7MiErXMxPgmFzYpAuk69Fs2iVJkiRJ7UIQwCsl8HgRLI1K13fJgLFZkNxF0vVoNu2SJEmSpJgrWBruDP9tRcNY7wSYmAvDuli6Hs2mXZIkSZIUM7UBvLwA/lsEVUHD+G4ZcGg2JMXFrLR2waZdkiRJkhQT+UvhrnkwMypdz0qASXmwYWrs6mpPbNolSZIkSW2qNoAXF8CTUel6BNi9F/wqy3Q9mk27JEmSJKnNzK2EyfPghyUNYzmJ4dr1DUzXV2DTLkmSJElqdbUBvFAM/5sP1VHp+p694OAsSDRdXymbdkmSJElSq5pTGa5d/zEqXc9NDNeur5cSu7o6Apt2SZIkSVKrqAng+WJ4an54G8J0fe9MOKg3JJiur5FNuyRJkiSpxf28JEzXZ1c2jPWpS9eHmK43mU27JEmSJKnFVNfCc8XwTHHjdH10Jhxout5sNu2SJEmSpBYxuy5d/zkqXe+bBJNyYbDp+lqxaZckSZIkrZPq2jBZf7Y43CUeIC4C+2bCAZnQzXR9rdm0S5IkSZLW2o9Lwuuu/xKVrvdPCteuD0yOXV2dhU27JEmSJKnZqmvDXeGfX9A4Xd8/E/YzXW8xNu2SJEmSpGaZVRGuXZ+7tGFsQF26PsB0vUXZtEuSJEmSmqSqFv43H14ohrpwnfgIHNA7XL8eH4lpeZ2STbskSZIkaY1mVoRr1+dFpesDk+HYPOiXFLu6OjubdkmSJEnSKlXVwhNF8OKChnS9WyS85vo+puutzqZdkiRJkrRS39el6/lR6frg5HDtel/T9TZh0y5JkiRJamRpLfy3CF5eLl0/KAv27hXuEq+2YdMuSZIkSar3bXmYrhdWNYwNTYFJuZBnut7mbNolSZIkSVTWwuOF8EpJw1hCBA7Ogj1N12PGpl2SJEmSurhvyuHueVAUla6vlxKuXc9NjF1dsmmXJEmSpC5rSQ08VgSvlTSMJUTgkGzYPcN0vT2waZckSZKkLujrxXB3PsyPStc3SIGJeZBjut5u2LRLkiRJUheypAYeKYQ3ShvGEuPg0CzYLQMipuvtik27JEmSJHURXy0O164vqG4Y2zA13Bk+y3S9XbJplyRJkqROrqIGHi6Et6LS9aQ4GJsNu6SbrrdnNu2SJEmS1Il9sQjuyYeSqHR9WGq4dr13QuzqUtPYtEuSJElSJ1ReAw8VwLSyhrHkODgsG3YyXe8wbNolSZIkqZP5bBHcmw+lUen6Jt3hmFzINF3vUGzaJUmSJKmTWFyXrr+zXLp+RA7skGa63hHZtEuSJElSJ/DJQphSAGVR6fqm3WFCLvQyXe+wbNolSZIkqQNbVA0PFMD7CxvGUuPhiGzY3nS9w7NplyRJkqQO6qOFcF8+LKxpGNu8B4zPgQzT9U7Bpl2SJEmSOpiFden6B8ul6+NyYLuepuudiU27JEmSJHUQQQAfLoT7C2BRVLq+ZQ8Ynwtpdnidjl9SSZIkSeoAyqrDZv2jqHS9ezwclQMjTNc7LZt2SZIkSWrHgiCcBn9/QXhJt2W27hk27KbrnZtfXkmSJElqp0qrYUo+fLqoYaxHPBydC9v0jF1dajs27ZIkSZLUzgQBvFsGDxZCeVS6PqJnuNlcTzu5LsMvtSRJkiS1IyVVMKUAPotK13vWpetbm653OTbtkiRJktQOBAFMK4OHl0vXt0uDI7Ohh91bl+SXXZIkSZJibEEV3JsPXyxuGEvrBhNyYYsesatLsWfTLkmSJEkxEgTwdhk8VABLahvGt0+DI3LCS7qpa7NplyRJkqQYKK6Ce/Lhq6h0PaMbjM+FzU3XVcemXZIkSZLaUBDAG6XwaGHjdH2HdDg8G1JN1xXFpl2SJEmS2sj8Krh7Hnxd3jCW0Q2OyYVNTde1EjbtkiRJktTKggBeK4HHiqAyKl3fKR0Oy4YU03Wtgk27JEmSJLWioqUwOR9mRKXrvbrBxDzYpHvs6lLHYNMuSZIkSa0gCODVkjBdXxqVru+SAWOzINl0XU1g0y5JkiRJLaxgabh2/duKhrHeCTAxF4aZrqsZbNolSZIkqYXUBvDyAvhvEVQFDeO7ZcAhputaCzbtkiRJktQC8pfC5HnwfVS6npUAk/Jgw9TY1aWOzaZdkiRJktZBbQAvLoAnl0vX9+gFv8qCpLjY1aaOz6ZdkiRJktbS3Eq4Ox9mRqXrOYnh2vUNTNfVAmzaJUmSJKmZagOYWpeuV9el6xFgz15wcBYkmq6rhdi0S5IkSVIzzKkM167PWtIwlpsYrl1fLyV2dalzsmmXJEmSpCaoCeD5Ynh6fuN0fe9MOKg3JJiuqxXYtEuSJEnSGvy8BCbnw09R6XpeXbo+1HRdrcimXZIkSZJWoSaAZ+fDM8XhbQjT9dGZcKDputqATbskSZIkrcTsJeHa9dmVDWN9k2BSLgw2XVcbsWmXJEmSpCjVtfBscZiu19al63ER2DcTDsiEbqbrakM27ZIkSZJU56clcNc8+CUqXe+XFK5dH5Qcu7rUddm0S5IkSeryqmvh6WJ4brl0ff9M2M90XTFk0y5JkiSpS5tVEe4MPycqXe+fBMfmwQDTdcWYTbskSZKkLqmqFv43H14ohrpwnfhl6Xrv8LYUazbtkiRJkrqcmRXhzvDzljaMDUwOd4bvb7qudsSmXZIkSVKXUVULT86HqVHperdIeM31fTJN19X+2LRLkiRJ6hK+r0vX86PS9cHJ4c7wfZNiV5e0OjbtkiRJkjq1pbXwRBG8tKBxun5QFuzdK9wlXmqvbNolSZIkdVrflsPd+VAQla4PqUvX+5iuqwOwaZckSZLU6VTWwuOF8GpJQ7qeUJeu72W6rg7Epl2SJElSpzKjPFy7XlTVMLZeSpiu5ybGri5pbdi0S5IkSeoUKmvhsbp0fZmECBySDbtnmK6rY7JplyRJktThfb04XLs+Pypd3yAFJuZBjum6OjCbdkmSJEkd1pIaeLQIXi9pGEuMg0OzYLcMiJiuq4OzaZckSZLUIX21GO6eBwuqG8Y2TIVJuZBluq5OwqZdkiRJUodSUQMPF8JbpQ1jSXXp+q4ZpuvqXGzaJUmSJHUYXyyCe/Mbp+vDUsO1670TYleX1Fps2iVJkiS1e+V16frbUel6chwclg07pZuuq/OKi+WT19TUcMkllzBkyBBSUlJYb731+NOf/kQQBPXHBEHA//3f/9GnTx9SUlLYa6+9+Pbbbxudp7i4mPHjx5OWlkZGRgYnnHACixYtauuXI0mSJKkVfLYILp/VuGHfpDtcOhh2zrBhV+cW06b9mmuu4V//+hf/+Mc/mD59Otdccw1/+ctfuOmmm+qP+ctf/sKNN97ILbfcwrvvvkv37t0ZPXo0S5YsqT9m/PjxfPnll0ydOpWnnnqK119/nZNPPjkWL0mSJElSC1lcA3fOhZt/gZK66fDJceFU+N/0g0ynw6sLiATRsXYbO/DAA8nNzeU///lP/djYsWNJSUnh3nvvJQgC+vbty7nnnst5550HQGlpKbm5udx1112MGzeO6dOns8kmm/D+++8zYsQIAJ577jn2339/fv75Z/r27bvGOsrKykhPT6e0tJS0tLTWebGSJEmSmuzTurXrZVFr1zftDhNyoZfNujqBpvahMU3ad9hhB1566SVmzJgBwKeffsqbb77JfvvtB8APP/zAvHnz2Guvveofk56ezsiRI5k2bRoA06ZNIyMjo75hB9hrr72Ii4vj3XffXenzVlZWUlZW1uhDkiRJUuwtqoZ/z4F//tLQsKfEwaQ8OKOfDbu6nphuRHfBBRdQVlbGsGHDiI+Pp6amhj//+c+MHz8egHnz5gGQm5vb6HG5ubn1982bN4+cnJxG93fr1o3MzMz6Y5Z31VVXcfnll7f0y5EkSZK0Dj5eCFPyYWFNw9jmPWB8DmTYrKuLimnT/tBDDzFlyhTuu+8+hg8fzieffMLZZ59N3759mTRpUqs974UXXsg555xT/3lZWRkDBgxoteeTJEmStGoLq+GBAvhgYcNYajyMy4HterrRnLq2mDbtv/vd77jgggsYN24cAJttthk//vgjV111FZMmTSIvLw+A/Px8+vTpU/+4/Px8ttxySwDy8vIoKChodN7q6mqKi4vrH7+8pKQkkpKSWuEVSZIkSWqODxfCffmwKCpd36IHjM+FdC9QLcV2TXt5eTlxcY1LiI+Pp7a2FoAhQ4aQl5fHSy+9VH9/WVkZ7777LqNGjQJg1KhRlJSU8OGHH9Yf8/LLL1NbW8vIkSPb4FVIkiRJaq6yarh1Dtw2p6Fh7x4PJ/aBU/vasEvLxPSfwpgxY/jzn//MwIEDGT58OB9//DHXXXcdxx9/PACRSISzzz6bK664gg022IAhQ4ZwySWX0LdvX371q18BsPHGG7Pvvvty0kknccstt1BVVcUZZ5zBuHHjmrRzvCRJkqS2EwThNPj7C8JLui2zVQ84OhfSbNalRmL6T+Kmm27ikksu4bTTTqOgoIC+fftyyimn8H//93/1x5x//vksXryYk08+mZKSEnbaaSeee+45kpOT64+ZMmUKZ5xxBnvuuSdxcXGMHTuWG2+8MRYvSZIkSdIqlFWHG819sqhhrEc8HJUD27h2XVqpmF6nvb3wOu2SJElS6wkCeG9huNlceVS6PqJnuNlcT9N1dUFN7UP95yFJkiSp1ZRUwZQC+CwqXe8ZH06F37pn7OqSOgqbdkmSJEktLgjgnTJ4qLBxur5dGhyZDT3sRKQm8Z+KJEmSpBa1oAruzYcvFjeMpXWD8Tmwpem61Cw27ZIkSZJaRBDA22XwUAEsqW0Y3z4NjsgJL+kmqXls2iVJkiSts+IquCcfvopK1zO6wfhc2LxH7OqSOjqbdkmSJElrLQjgzVJ4pLBxur5DOhyeDamm69I6sWmXJEmStFbmV8E982B6ecNYRjc4Jhc2NV2XWoRNuyRJkqRmCQJ4vRQeLYTKqHR9x7p0PcV0XWoxNu2SJEmSmqxoKUzOhxlR6XqvbnBMHgzvHru6pM7Kpl2SJEnSGgUBvFoCjxXB0qh0fed0OCwbkk3XpVZh0y5JkiRptQqWwt3z4NuKhrHeCeHa9Y1N16VWZdMuSZIkaaVqA3ilBB4vhKqgYXy3DDgky3Rdags27ZIkSZJWkL8UJs+D76PS9awEmJgHG6XGri6pq7FplyRJklSvNoCXFsATRY3T9d0z4JBsSIqLWWlSl2TTLkmSJAmAeZXhzvAzo9L17ASYlAcbmK5LMWHTLkmSJHVxtQFMXQBPFkF1XboeAfboBb/KgkTTdSlmbNolSZKkLmxOZbh2fdaShrGcRDg2D9ZLiV1dkkI27ZIkSVIXVBvA88Xw1PzG6fremXBQb0gwXZfaBZt2SZIkqYv5pS5d/zEqXc9LDNeuDzVdl9oVm3ZJkiSpi6gJ4LlieHp+eBvCdH2fTBhjui61SzbtkiRJUhcwe0mYrs+ubBjrU7d2fbDputRu2bRLkiRJnVh1LTxbDM8Uh+vYAeIiMLoXHNgbupmuS+2aTbskSZLUSf1Ul67/HJWu900K0/VBybGrS1LT2bRLkiRJnUx1LTxdHK5fj07X98+E/TJN16WOxKZdkiRJ6kR+XAJ3zQuvv75M/7p0fYDputTh2LRLkiRJnUBVbXjN9RcWNE7XD8iEfU3XpQ7Lpl2SJEnq4H6oCNeuz13aMDYwGSblQn/TdalDs2mXJEmSOqiqWnhyPkwthrpwnfhIuCv86MzwtqSOzaZdkiRJ6oC+r0vX86PS9UHJ4dr1vkmxq0tSy7JplyRJkjqQpbXwRBG8tKAhXe8WgTG9YZ/McB27pM7Dpl2SJEnqIL4th7vzoSAqXR+SDJPyoI/putQp2bRLkiRJ7VxlLfy3CF5ZLl0/OAv26mW6LnVmNu2SJElSOzajPFy7XlTVMLZeSpiu5ybGri5JbcOmXZIkSWqHKmvhsUJ4taRhLCECv8qCPUzXpS7Dpl2SJElqZ75eHK5dnx+Vrq9fl67nmK5LXYpNuyRJktROLKmBR4vg9ZKGscQ4OCQLds+AiOm61OXYtEuSJEntwPS6dL04Kl3fMBUm5kK26brUZdm0S5IkSTFUUQOPFMKbpQ1jSXFwaBbsmmG6LnV1Nu2SJElSjHy5GO6ZBwuqG8aGpcIxuZBlui4Jm3ZJkiSpzZXXwMOF8PZy6fph2bBzuum6pAY27ZIkSVIb+nwR3JsPJVHp+sapcEwe9E6IXV2S2iebdkmSJKkNLK6BhwrgnbKGseQ4ODwbdjRdl7QKNu2SJElSK/u0Ll0vi0rXh3cP1673Ml2XtBo27ZIkSVIrWVwDDxTAe1HpekocHJEDo9JM1yWtmU27JEmS1Ao+XghT8mFhTcPYZt1hQi5kmK5LaiKbdkmSJKkFLawO0/UPFjaMpcbDkdkw0nRdUjPZtEuSJEkt5MOFcP9y6foWPWB8LqT7m7ekteCPDkmSJGkdLayG+wrgo6h0vXs8jMuBbXuarktaezbtkiRJ0loKgnAa/P0F4aZzy2zVA47OhTR/25a0jvwxIkmSJK2Fsupwo7lPFjWM9YiHo3JgG9N1SS3Epl2SJElqhiCA9xaGm82VR6XrI3qG0+F7+hu2pBbkjxRJkiSpiUqqYEoBfBaVrveMD6fCb90zdnVJ6rxs2iVJkqQ1CAJ4pwweKmycrm9bl6738LdqSa3EHy+SJEnSaiyognvz4YvFDWNp3WB8Dmxpui6pldm0S5IkSSsRBPB2GTxUAEtqG8ZHpsGROeEl3SSptdm0S5IkScsprkvXv4xK19O7wYRc2LxH7OqS1PXYtEuSJEl1ggDeLIVHChun6zukw+HZkGq6LqmN2bRLkiRJwPwquGceTC9vGMvoBsfkwqam65JixKZdkiRJXVoQwOul8GghVEal6zvWpesppuuSYsimXZIkSV1W0VK4Ox++iUrXe3WDY/JgePfY1SVJy9i0S5IkqcsJAni1BB4rgqVR6frO6XBYNiSbrktqJ2zaJUmS1KUULIW758G3FQ1jvRPCtesbm65Lamds2iVJktQlBAG8XAKPF0JV0DC+WwYckmW6Lql9smmXJElSp5e/FCbPg++j0vWsBJiYBxulxq4uSVoTm3ZJkiR1WrUBvLQAnihqnK7vngGHZENSXMxKk6QmsWmXJElSpzSvEibnw8yodD27Ll3f0HRdUgdh0y5JkqROpTaAqQvgySKorkvXI8AeveDgLNN1SR2LTbskSZI6jTmV4dr1WUsaxnIS4dg8WC8ldnVJ0tqyaZckSVKHVxvA88Xw1PzG6fpeveCgLEg0XZfUQdm0S5IkqUP7pS5d/zEqXc+tS9eHmq5L6uBs2iVJktQh1QTwXDE8PT+8DWG6vk8mjOkNCabrkjoBm3ZJkiR1OD8vgbvmwezKhrE+den6YNN1SZ2ITbskSZI6jOpaeLYYnikO17EDxEVgdC84sDd0M12X1MnYtEuSJKlDmF2Xrv8cla73TQrT9UHJsatLklqTTbskSZLatepaeLo4XL8ena7vlwn7Z5quS+rcbNolSZLUbv1Yl67PiUrX+9el6wNM1yV1ATbtkiRJaneqasNd4Z9f0DhdPyAT9jVdl9SF2LRLkiSpXfmhIrzu+tylDWMD6tL1/qbrkroYm3ZJkiS1C1W18OR8mFoMdeE68ZFwV/jRmeFtSepqbNolSZIUczMrwrXr+VHp+qDkMF3vmxS7uiQp1mzaJUmSFDNLa+GJInhpQUO63i0CY3rDPpnhOnZJ6sps2iVJkhQT35XD5HwoiErXB9el631M1yUJsGmXJElSG6usS9dfXi5dPzgL9uplui5J0WzaJUmS1GZmlIc7wxdVNYwNTYFJuZBnui5JK7BplyRJUqurrIXHCuHVkoaxhAj8Kgv2MF2XpFWyaZckSVKr+nox3J0P86PS9fVTYFIe5CTGri5J6ghs2iVJktQqltTAo0XweknDWEIEDs2G3TMgYrouSWtk0y5JkqQWN70uXS+OStc3TIWJuZBtui5JTWbTLkmSpBZTUQOPFsIbpQ1jSXFwaBbsmmG6LknNZdMuSZKkFvHlYrhnHiyobhjbqC5dzzJdl6S1YtMuSZKkdVJeAw8XwtvLpeuHZcPO6abrkrQubNolSZK01j5fBPfmQ0lUur5xKhyTB70TYleXJHUWNu2SJElqtvIaeLAA3ilrGEuOg8OzYUfTdUlqMTbtkiRJapZPF8GUfCiNSteHd4djcqGX6boktSibdkmSJDXJ4rp0/d2odD0lDo7IgVFppuuS1Bps2iVJkrRGHy+E+wqgLCpd36w7TMiFDNN1SWo1Nu2SJElapYXV8EABfLCwYSw1Ho7MhpGm65LU6mzaJUmStFIfLYT78mFhTcPYFj1gfC6k+1ukJLUJf9xKkiSpkYXV4VT4j6LS9e7xMC4Htu1pui5JbcmmXZIkSQAEAXxYt3Z9cVS6vlUPODoX0vzNUZLanD96JUmSRFl1OBX+40UNYz3i4agc2MZ0XZJixqZdkiSpCwsCeG9heCm36HR9m55hw97T3xYlKab8MSxJktRFlVbDlHz4NCpd7xkfToXfumfs6pIkNbBplyRJ6mKCAN4tgwcLoTwqXd+2Z7jZXA9/Q5SkdsMfyZIkSV1ISRXckw9fLG4YS+sGR+fAVqbrktTu2LRLkiR1AUEA08rgoQKoqG0YH5kGR+aEl3STJLU/Nu2SJEmd3IK6dP3LqHQ9vRuMz4UtesSuLknSmtm0S5IkdVJBAG+WwiOFsCQqXR+VBkfkQKrpuiS1ezbtkiRJndD8KrhnHkwvbxjL6AYTcmEz03VJ6jBs2iVJkjqRIIA36tL1yqh0fcd0OCzbdF2SOhqbdkmSpE6iaCncnQ/fRKXrvbrBMXkwvHvs6pIkrT2bdkmSpA4uCOC1EnisqHG6vnNdup5sui5JHVZcrAv45ZdfmDBhAr179yYlJYXNNtuMDz74oP7+IAj4v//7P/r06UNKSgp77bUX3377baNzFBcXM378eNLS0sjIyOCEE05g0aJFbf1SJEmS2lzhUrjuZ7i/oKFhz0yAs/vDhDwbdknq6GLatC9YsIAdd9yRhIQEnn32Wb766iv+9re/0atXr/pj/vKXv3DjjTdyyy238O6779K9e3dGjx7NkiVL6o8ZP348X375JVOnTuWpp57i9ddf5+STT47FS5IkSWoTQQAvLYDLZ8GMqOnwu2bApYNgY6fDS1KnEAmCIIjVk19wwQW89dZbvPHGGyu9PwgC+vbty7nnnst5550HQGlpKbm5udx1112MGzeO6dOns8kmm/D+++8zYsQIAJ577jn2339/fv75Z/r27bvGOsrKykhPT6e0tJS0tLSWe4GSJEmtoGApTJ4H31U0jGUlwDG5MMxmXZI6hKb2oTFN2p988klGjBjB4YcfTk5ODltttRW33357/f0//PAD8+bNY6+99qofS09PZ+TIkUybNg2AadOmkZGRUd+wA+y1117ExcXx7rvvrvR5KysrKSsra/QhSZLU3tUG8GIx/HFW44Z99wz4v8E27JLUGcW0aZ85cyb/+te/2GCDDXj++ec59dRT+c1vfsPkyZMBmDdvHgC5ubmNHpebm1t/37x588jJyWl0f7du3cjMzKw/ZnlXXXUV6enp9R8DBgxo6ZcmSZLUouZVwl9nw8OFUFU3TzI7Ac4dAONyISnmOxVJklpDTHePr62tZcSIEVx55ZUAbLXVVnzxxRfccsstTJo0qdWe98ILL+Scc86p/7ysrMzGXZIktUu1Aby4AJ4oguq6Zj0C7NELDs6yWZekzi6mTXufPn3YZJNNGo1tvPHGPProowDk5eUBkJ+fT58+feqPyc/PZ8stt6w/pqCgoNE5qqurKS4urn/88pKSkkhKSmqplyFJktQq5lbCXfNgVsP+u+QkwqRcWD81dnVJktpOTN+b3XHHHfnmm28ajc2YMYNBgwYBMGTIEPLy8njppZfq7y8rK+Pdd99l1KhRAIwaNYqSkhI+/PDD+mNefvllamtrGTlyZBu8CkmSpJZVG8Cz8+GKHxsa9giwdy+4ZJANuyR1JTFN2n/729+yww47cOWVV3LEEUfw3nvvcdttt3HbbbcBEIlEOPvss7niiivYYIMNGDJkCJdccgl9+/blV7/6FRAm8/vuuy8nnXQSt9xyC1VVVZxxxhmMGzeuSTvHS5IktSdz6tL1H6PS9dxEODYPhqbEri5JUmzE9JJvAE899RQXXngh3377LUOGDOGcc87hpJNOqr8/CAIuvfRSbrvtNkpKSthpp5345z//yYYbblh/THFxMWeccQb/+9//iIuLY+zYsdx444306NGjSTV4yTdJkhRrNQE8XwxPzQ9vQ5iu75MJY3pDgmvXJalTaWofGvOmvT2waZckSbH085IwXZ9d2TDWJxEm5cEQ03VJ6pSa2ofGdHq8JElSV1ZdC88Vw9PF4Tp2gLgIjO4FB5iuS5KwaZckSYqJ2XXp+s9R6XrfpHDt+qDk2NUlSWpfbNolSZLaUHVtmKw/t1y6vl8m7J8J3UzXJUlRbNolSZLayI916fqcqHS9f1K4dn2g6bokaSXW6r3cN954gwkTJjBq1Ch++eUXAO655x7efPPNFi1OkiSpM6iuhf8WwtU/NTTscZFwV/gLB9qwS5JWrdlN+6OPPsro0aNJSUnh448/prIy/J+ntLSUK6+8ssULlCRJ6shmVcAVP8KzUdPhByTBHwbCgVlOh5ckrV6z/5u44ooruOWWW7j99ttJSEioH99xxx356KOPWrQ4SZKkjqqqFh6rS9fnLg3H4iNwcBZcOAj6m65Lkpqg2Wvav/nmG3bZZZcVxtPT0ykpKWmJmiRJkjq0mRXh2vX8pQ1jg5LDtev9kmJXlySp42l2056Xl8d3333H4MGDG42/+eabDB06tKXqkiRJ6nCW1sITRfDSAqibCU+3CBzYG0ZnhuvYJUlqjmY37SeddBJnnXUWd9xxB5FIhDlz5jBt2jTOO+88LrnkktaoUZIkqd37rhwm50NBVLo+ODm87nof03VJ0lpqdtN+wQUXUFtby5577kl5eTm77LILSUlJnHfeeZx55pmtUaMkSVK7VVmXrr+8XLp+UBbs3ct0XZK0biJBEARrPmxFS5cu5bvvvmPRokVssskm9OjRo6VrazNlZWWkp6dTWlpKWlparMuRJEkdxIxyuHseFFY1jA1NgUm5kGe6Lklajab2oc3ePf74449n4cKFJCYmsskmm7DddtvRo0cPFi9ezPHHH79ORUuSJHUElbXwQD78bXZDw54QgcOz4XcDbNglSS2n2Ul7fHw8c+fOJScnp9F4UVEReXl5VFdXt2iBbcGkXZIkNdXXi+GefCiKStfXSwl3hs9NjF1dkqSOpal9aJPXtJeVlREEAUEQsHDhQpKTGy4uWlNTwzPPPLNCIy9JktRZLKmBx4rgtZKGsYQIHJINu2e4dl2S1Dqa3LRnZGQQiUSIRCJsuOGGK9wfiUS4/PLLW7Q4SZKk9mB6Xbo+Pypd3yAFJuZBjum6JKkVNblpf+WVVwiCgD322INHH32UzMzM+vsSExMZNGgQffv2bZUiJUmSYmFJDTxSCG+UNowlxcEhWbBbBkRM1yVJrazJTfuuu+4KwA8//MCAAQOIi2v2HnaSJEkdxpeL4Z55sCBqu56NUmFiLmSZrkuS2kizr9M+aNAgAMrLy/npp59YunRpo/s333zzlqlMkiQpBsrr0vW3lkvXD8uGndNN1yVJbavZTXthYSHHHXcczz777Ervr6mpWeeiJEmSYuGLReHa9ZKodH3jVDgmD3onxK4uSVLX1ew57meffTYlJSW8++67pKSk8NxzzzF58mQ22GADnnzyydaoUZIkqVWV18Bdc+GmXxoa9uQ4mJALZ/W3YZckxU6zk/aXX36ZJ554ghEjRhAXF8egQYPYe++9SUtL46qrruKAAw5ojTolSZJaxWeL4N58KI1K14d3Dxv2TJt1SVKMNbtpX7x4cf312Hv16kVhYSEbbrghm222GR999FGLFyhJktQaFtfAgwXwblnDWEocHJ4DO6S5dl2S1D40u2nfaKON+Oabbxg8eDBbbLEFt956K4MHD+aWW26hT58+rVGjJElSi/pkIUwpgLKodH3TunS9l+m6JKkdaXbTftZZZzF37lwALr30Uvbdd1+mTJlCYmIid911V0vXJ0lS0wW1UL0YEnrGuhK1U4uq4YECeH9hw1hqPByZDSNN1yVJ7VAkCIJgXU5QXl7O119/zcCBA8nKymqputpUWVkZ6enplJaWkpaWFutyJElrY0kBvLgrlH0N/Q6CnR+FuGa/N61O7KOFcF8+LIy60M3mPWB8DmSYrkuS2lhT+9B1/m0mNTWVrbfeel1PI0nSupk5Gcq+CW//8iQUvQ05u8S2JrULC6vh/gL4MCpd7x4PR+bAdj1N1yVJ7Vuzm/YgCHjkkUd45ZVXKCgooLa2ttH9jz32WIsVJ0lSk6X2AwIgEn4k58W4IMVaEISN+v0FsCgqXd+yB4zPhTQnYkiSOoBm/3d19tlnc+utt7L77ruTm5tLxLenJUntwaBxUP4zFE2DwRMgbcNYV6QYKqsOp8J/vKhhrHs8HJUDI0zXJUkdSLPXtGdmZnLvvfey//77t1ZNbc417ZIkdQ5BEG4y90BBeEm3ZbbuCUfnQE/TdUlSO9Fqa9rT09MZOnToOhUnSZLU0kqrYUo+fBqVrveMh6NyYRsvKCBJ6qDimvuAyy67jMsvv5yKiorWqEeSJKlZggDeKYXLZjVu2LftCZcOtmGXJHVszU7ajzjiCO6//35ycnIYPHgwCQmNr5Hy0UcftVhxkiRJq1NSBffmw+eLG8Z6xocbzW1lsy5J6gSa3bRPmjSJDz/8kAkTJrgRnSRJiokggGll8FABVERdyGZkWngpt+7xsatNkqSW1Oym/emnn+b5559np512ao16JEmSVmtBFdyTD19Gpetp3WBCLmzRI3Z1SZLUGprdtA8YMMAd1iVJUpsLAnirFB4uhCVR6fr2aXCE6bokqZNq9kZ0f/vb3zj//POZNWtWK5QjSZK0ovlVcMPPYcK+rGHP6AZn9IPj+tiwS5I6r2Yn7RMmTKC8vJz11luP1NTUFTaiKy4ubrHiJElS1xYE8EYpPFIIlVHp+o7pcFg2pNqsS5I6uWY37ddff30rlCFJktRY0dIwWf+6vGGsVzc4Jg+Gd49dXZIktaW12j1ekiSptQQBvFYCjxU1Ttd3Toex2ZBiui5J6kKa1LSXlZXVbz5XVla22mPdpE6SJK2twqVwdz7MiErXMxNgYi5sbLouSeqCmtS09+rVi7lz55KTk0NGRsZKr80eBAGRSISampoWL1KSJHVuQQCvlMDjRbA0Kl3fJQPGZkGy6bokqYtqUtP+8ssvk5mZCcArr7zSqgVJkqSupWApTJ4H31U0jPWuS9eHma5Lkrq4JjXtu+66a/3tIUOGMGDAgBXS9iAImD17dstWJ0mSOq3aAF5eAP8tgqqgYXy3DDg0G5KafWFaSZI6n2ZvRDdkyJD6qfLRiouLGTJkiNPjJUnSGuUvhbvmwcyodD0rASblwYapsatLkqT2ptlN+7K168tbtGgRycnJLVKUJEnqnGoDeHEBPFEE1XXpegTYoxccnGW6LknS8prctJ9zzjkARCIRLrnkElJTG94Gr6mp4d1332XLLbds8QIlSVLnMLcyXLv+w5KGsZxEmJQL65uuS5K0Uk1u2j/++GMgTNo///xzEhMT6+9LTExkiy224Lzzzmv5CiVJUodWG8ALxfC/+Y3T9T3r0vVE03VJklapyU37sl3jjzvuOG644Qavxy5JktZoTmW4dv3HqHQ9NzFcu75eSuzqkiSpo2j2mvY777yz0edlZWW8/PLLDBs2jGHDhrVYYZIkqeOqCeD5YnhqfngbwnR970w4qDckmK5LktQkzW7ajzjiCHbZZRfOOOMMKioqGDFiBLNmzSIIAh544AHGjh3bGnVKkqQO4uclYbo+u7JhrE9duj7EdF2SpGZp9vvcr7/+OjvvvDMAjz/+OEEQUFJSwo033sgVV1zR4gVKkqSOoboWniqCP//U0LBHgP0y4Q+DbNglSVobzW7aS0tLyczMBOC5555j7NixpKamcsABB/Dtt9+2eIGSWtmCBTBjBgRBrCuR1IHNXgJX/RRuNldb9+OkbxJcOAh+le10eEmS1laz/wsdMGAA06ZNY/HixTz33HPss88+ACxYsMDrtEsdzdtvQ79+sNFGcPTRNu6Smq26Fp4sgit/gp/r0vW4COzfG/4wEAb5q4EkSeuk2U372Wefzfjx4+nfvz99+/Zlt912A8Jp85tttllL1yepNd16K1TW/Zb9wAMwb15s65HUofy4JGzWn45K1/snwYUDw0u5dTNdlyRpnTV7I7rTTjuNkSNH8tNPP7H33nsTFxf+jzx06FDXtEsdzfDhUFsL8fHQqxfULX2RpNWprg13hX9+QUOzHheB/TPD9es265IktZxIEDgftqysjPT0dEpLS73+vLqWmhq4+WaYORN+/Wvwso2S1mBWRbgz/NylDWMDksKd4Qc4FV6SpCZrah/a5PfCN9lkE4qLi+s/P+200ygqKqr/vKCggNTU1LUsV1JMxMfDb34D119vwy5ptapq4bFCuPqnhoY9PhJOg79wkA27JEmtpclN+9dff011dXX95/feey9lZWX1nwdBwJIlS1q2OkmSFHMzK+CKH+H5Ylg2PW9QcngZt/17h827JElqHc1e077MymbVRyL+ry1JUmdRVQtPFMGLCxqa9W4ROLA3jM4M17FLkqTWtdZNuyRJ6ry+r4DJ8yA/au364ORw7XrfpNjVJUlSV9Pkpj0SiayQpJusS5LUuSythf8WwcvLpesHZcHevUzXJUlqa01u2oMgYM8996Rbt/AhFRUVjBkzhsTERIBG690lSVLH8215mK4XVjWMDU2BSbmQZ7ouSVJMNLlpv/TSSxt9fvDBB69wzNixY9e9IkmS1KYqa+HxQnilpGEsIQK/yoI9TNclSYopr9OO12mXJHVd35TD3fOgKCpdXy8lXLuemxi7uiRJ6uya2oe6EZ0kSV3Qkhp4rAheK2kYS4jAIdmwe4bpuiRJ7YVNuyRJXcz0xXBPPsyPStc3SIGJeZBjui5JUrti0y5JUhexpAYeKYQ3ShvGEuPg0CzYLQO8KIwkSe2PTbskSV3Al4vhnnmwIOpiLxumhjvDZ5muS5LUbtm0S5LUiVXUwMOF8FZUup4UB2OzYZd003VJktq7JjXtN954Y5NP+Jvf/Gati5EkSS3ni0Xh2vWSqHR9WGq4dr13QuzqkiRJTdekS74NGTKkaSeLRJg5c+Y6F9XWvOSbJKkzKa+BhwpgWlnDWHIcHJYNO5muS5LULrToJd9++OGHFitMkiS1ns8Wwb35UBqVrm/SHY7JhUzTdUmSOpy1XtO+dOlSfvjhB9Zbbz26dXNpvCRJsbS4Bh4sgHeXS9ePyIEd0kzXJUnqqOKa+4Dy8nJOOOEEUlNTGT58OD/99BMAZ555JldffXWLFyhJklbvk4Vw2azGDfum3eGywbCj0+ElSerQmt20X3jhhXz66ae8+uqrJCcn14/vtddePPjggy1anCRJWrVF1fDvOfCvOVBWNx0+NR6OzYMz+kEvp8NLktThNXte+3//+18efPBBtt9+eyJRb90PHz6c77//vkWLkyRJK/fRQrgvHxbWNIxt3gPG50CGzbokSZ1Gs5v2wsJCcnJyVhhfvHhxoyZekiS1vIXVcH8BfLiwYSw1HsblwHY9nQovSVJn0+zp8SNGjODpp5+u/3xZo/7vf/+bUaNGtVxlkiSpXhDAB2Xh2vXohn3LHnD5YBjpZnOSJHVKzU7ar7zySvbbbz+++uorqqurueGGG/jqq694++23ee2111qjRkmSurSy6nAq/MeLGsa6x8NROTDCdF2SpE6t2Un7TjvtxCeffEJ1dTWbbbYZL7zwAjk5OUybNo1tttmmNWqUJKlLCgJ4ry5dj27Yt+4Z7gy/rem6JEmdXiQIgiDWRcRaWVkZ6enplJaWkpaWFutyJEmitBqm5MOnUc16j3g4Ohe26Rm7uiRJUstoah/apOnxZWVlaz6ojk2vJElrLwjC660/WAjlUTvDj+gZbjbXs9kL2yRJUkfWpP/6MzIymrwzfE1NzZoPkiRJKyipgnvz4fPFDWM942F8Lmxlui5JUpfUpKb9lVdeqb89a9YsLrjgAo499tj63eKnTZvG5MmTueqqq1qnSkmSOqDaoJaTnjyJ+7+4n90G78YjRzxCakLqCscFAUwrg4cKoKK2YXy7tDBd7x7fhkVLkqR2pdlr2vfcc09OPPFEjjrqqEbj9913H7fddhuvvvpqS9bXJlzTLklqDS/NfIm97tmr/vPbDryNk7Y5qdExC+rS9S+i0vW0bjAhF7bo0VaVSpKkttbUPrTZu8dPmzaNESNGrDA+YsQI3nvvveaeTpKkTqt7YvdVfh4E8FZpuDN8dMO+fVq4M7wNuyRJgrVo2gcMGMDtt9++wvi///1vBgwY0CJFSZLUGWzff3uu3ftatsrbivNGnceRw48EoLgKbvwF7p4HS+qmw2d0g9P7wXF9nA7fXEtrllKwuCDWZUiS1CqaPT3+mWeeYezYsay//vqMHDkSgPfee49vv/2WRx99lP33379VCm1NTo+XJLWFIIA3SuGRQqiMWru+Qzocng2pNuvN9nXR1+xy5y4UlhcyfrPx3HPIPU3ePFeSpFhqtenx+++/P99++y1jxoyhuLiY4uJixowZw4wZMzpkwy5JUluYXwXX/xxee31Zw96rG5zZDybl2bCvrX+9/y+KK4oBmPL5FL4r/i7GFUmS1LLW6mqv/fv358orr2zpWiRJ6nSCAF4rgceKGqfrO6XDYdmQYrO+TgZnDKY2qCUuEkdSfBJZqVmxLkmSpBa1Vk17SUkJ//nPf5g+fToAw4cP5/jjjyc9Pb1Fi5MkqSMrXAp358OM8oaxzAQ4Jhc26b7qx6npzhx5JhXVFUwvms4p25xCr5ResS5JkqQW1ew17R988AGjR48mJSWF7bbbDoD333+fiooKXnjhBbbeeutWKbQ1uaZdktSSggBeLQnT9aVR6fouGTA2C5JN1yVJ6vKa2oc2u2nfeeedWX/99bn99tvp1i0M6qurqznxxBOZOXMmr7/++rpVHgM27ZKkllKwNNwV/tuKhrHeCTAxF4aZrkuSpDqt1rSnpKTw8ccfM2zYsEbjX331FSNGjKC8vHwVj2y/bNolSeuqNoCXF8B/i6Aq6n/W3TLg0GxIavbWr5IkqTNrah/a7DXtaWlp/PTTTys07bNnz6Znz57Nr1SSpA4ufylMngffR6XrWQnhrvAbpsauLkmS1PE1u2k/8sgjOeGEE7j22mvZYYcdAHjrrbf43e9+x1FHHdXiBUqS1F7VBvDiAnhyuXR9j17wqyzTdUmStO6a3bRfe+21RCIRJk6cSHV1NQAJCQmceuqpXH311S1eoCRJ7dHcyjBd/2FJw1hOYrh2fQPTdUmS1EKavaZ9mfLycr7//nsA1ltvPVJTO+5vKK5plyQ1VW0AU+vS9eq6/0EjwJ694OAsSDRdlyRJTdBqa9qXSU1NZbPNNlvbh0uS1OHMqUvXZ0Wl67mJ4dr19VJiV5ckSS2ithbifPe5vWly03788cc36bg77rhjrYuRJKk9qgng+WJ4en7jdH3vTDioNyT4+40kqSMrKYF994X33oOjjoK774b4+FhXpTpNbtrvuusuBg0axFZbbcVazqiXJGkF+Yvy6Z7YnR6JPWJdykr9vAQm58NPUel6n7p0fYjpuiSpM5g8OWzYgwDuuw9OPhl23TXWValOk5v2U089lfvvv58ffviB4447jgkTJpCZmdmatUmSOrnfvfA7rp12Ld0TuvPs+GfZedDOsS6pXk0Az86HZ4rD2xCm66Mz4UDTdUlSZ5KZGTbs0Z+r3Wjyrxw333wzc+fO5fzzz+d///sfAwYM4IgjjuD55583eZckNVt5VTnXTru2/vbf3/l7jCtqMHsJXPkj/G9+Q8PeNwkuGAiHZNuwS5I6mfHj4ZJLYJdd4D//Afcua1eatRFdUlISRx11FEcddRQ//vgjd911F6eddhrV1dV8+eWX9OjRPqc2SpLan6T4JHK751JUXkRAwHq91ot1SVTXwrPFYbpeW9esx0Vg30w4IBO62axLkjqjuDj44x9jXYVWYa13j4+LiyMSiRAEATU1NS1ZkySpC4iPi+flSS/z92l/p2/Pvlyw0wUxrefHJeHO8L9UNoz1SwrXrg9Kjl1dkiSpa2vWddorKyt57LHHuOOOO3jzzTc58MADOe6449h3332J68CXBvA67ZLUdVXXwlPz4fkFjdP1/TNhP9N1SZLUSlr8Ou2nnXYaDzzwAAMGDOD444/n/vvvJysrq0WKlSQpFmZVhDvDz4lK1wfUpesDTNclSVI70OSkPS4ujoEDB7LVVlsRiURWedxjjz3WYsW1FZN2SepaqmrDTeZeKIZl/wnGR+CA3uH69fhV/zcnSZLUIlo8aZ84ceJqm3VJkjqCmRXh2vV5SxvGBibDsXnhGnZJkqT2pMlN+1133dWKZUiS1LqqauHJ+TA1Kl3vFgmvub6P6bokSWqn1nr3eEmSOorv69L1/Kh0fXByuHa9r+m6JElqx2zaJUmd1tJa+G8RvLygcbp+UBbs3SvcJV6SJKk9azcXsrn66quJRCKcffbZ9WNLlizh9NNPp3fv3vTo0YOxY8eSn5/f6HE//fQTBxxwAKmpqeTk5PC73/2O6urqNq5ektTefFsOf/oRXopq2IemwMWDYHSmDbskSeoY2kXS/v7773Prrbey+eabNxr/7W9/y9NPP83DDz9Meno6Z5xxBoceeihvvfUWADU1NRxwwAHk5eXx9ttvM3fuXCZOnEhCQgJXXnllLF6KJCnGKmvh8UJ4paRhLCECB2fBnqbrkiSpg4l50r5o0SLGjx/P7bffTq9everHS0tL+c9//sN1113HHnvswTbbbMOdd97J22+/zTvvvAPACy+8wFdffcW9997LlltuyX777cef/vQnbr75ZpYuXbqqp5QkdVIzyuGPsxo37OulwCWDYW/TdUmS1AHFvGk//fTTOeCAA9hrr70ajX/44YdUVVU1Gh82bBgDBw5k2rRpAEybNo3NNtuM3Nzc+mNGjx5NWVkZX3755Sqfs7KykrKyskYf6qSCAD74AL77LtaVSGpFlbVwfz78bTYUVYVjCRE4IgfOGwC5ibGtT5IkaW3FdHr8Aw88wEcffcT777+/wn3z5s0jMTGRjIyMRuO5ubnMmzev/pjohn3Z/cvuW5WrrrqKyy+/fB2rV4dw4olwxx0QicCUKXDUUbGuSFIL+3ox3J0P86saxjZIgYl5kGOzLkmSOriYJe2zZ8/mrLPOYsqUKSQnJ7fpc1944YWUlpbWf8yePbtNn19tpKYG7rwzvB0E8O9/x7YeSS1qSQ1MyYe//9zQsCfGwbgcOHeADbskSeocYpa0f/jhhxQUFLD11lvXj9XU1PD666/zj3/8g+eff56lS5dSUlLSKG3Pz88nLy8PgLy8PN57771G5122u/yyY1YmKSmJpCQvzNvpxcfD5pvD559DbS2MHBnriiS1kK8Ww93zYEHUxUI2TIVJuZBlsy5JkjqRmDXte+65J59//nmjseOOO45hw4bx+9//ngEDBpCQkMBLL73E2LFjAfjmm2/46aefGDVqFACjRo3iz3/+MwUFBeTk5AAwdepU0tLS2GSTTdr2Bal9mjoVbr8dsrLg+ONjXY2kdVRRAw8XwlulDWNJcTA2G3ZJD1fCSJIkdSYxa9p79uzJpptu2mise/fu9O7du378hBNO4JxzziEzM5O0tDTOPPNMRo0axfbbbw/APvvswyabbMIxxxzDX/7yF+bNm8fFF1/M6aefbpKuUHY2XHRRrKuQ1AK+WAT35jdO14elhmvXeyfEri5JkqTW1C6u074qf//734mLi2Ps2LFUVlYyevRo/vnPf9bfHx8fz1NPPcWpp57KqFGj6N69O5MmTeKPf/xjDKuWJLWk8rp0/e2odD05Dg7Lhp1M1yVJUicXCYIgiHURsVZWVkZ6ejqlpaWkpaXFuhxJUp3P6tL10qh0fZPucEwuZJquS5KkDqypfWi7TtolSV3T4hp4qADeKWsYS44Lr7u+Q5rpuiRJ6jps2iVJ7conC2FKAZRFpeubdocJudDLdF2SJHUxNu2SpHZhUTU8UADvL2wYS6lL10eZrkuSpC7Kpl2SFHMfLYT78mFhTcPY5j1gfA5kmK5LkqQuzKZdkhQzC+vS9Q+i0vXUeBiXA9v1NF2XJEmyaZcktbkggA8Xwv0FsCgqXd+iB4zPhXT/d5IkSQJs2iVJbaysOmzWP4pK17vHw1E5MMJ0XZIkqRGbdklSmwiCcBr8/QXhJd2W2aoHHJ0Laf6PJEmStAJ/RZIktbrSapiSD58uahjrUZeub2O6LkmStEo27ZKkVhME8G4ZPFgI5VHp+oie4WZzPf1fSJIkabX8dUnq7IIApk6FsjI46CBITIx1ReoiSqpgSgF8FpWu94wPp8Jv3TN2dUmSJHUkNu1SZ3fVVfCHP4S3Dz0UHn00tvWo0wsCmFYGDy+Xrm+XBkdmQw//55EkSWoyf3WSOrvHH2+4/dRTsatDXcKCKrg3H75Y3DCW1g3G58CWpuuSJEnNZtMudXYHHwwffBDe3n//2NaiTisI4O0yeKgAltQ2jG+fBkfkhJd0kzqtIIDynyE5G+KTY12NJKmTsWmXOrs//AG22SZc037IIbGuRp1QcRXckw9fRaXrGd1gfC5s3iN2dUltIqiF1w6GOU9BUjbs/RakbRDrqiRJnYhNu9TZRSKw336xrqLrWFIEbxwKJZ/DJr+D4RfFuqJWEwTwRik8Wtg4Xd8hHQ7PhlTTdXUFJZ+HDTvA0mKYeQdseVVsa5IkdSo27ZLUkr75OxS9DUENfPoHGHQU9BgS66pa3PwquHsefF3eMJbRDY7JhU1N19WVpPSBuCQIqsN/9z3Xj3VFkqROxqZdklpSXHIYQQMQgbiEmJbT0oIAXiuBx4qgMipd3ykdDsuGFNN1dTXJObDHVJh5J/TaEoYeF+uKJEmdjE27JLWkYb+FsulQ8hkMOxdS+8e6ohZTtBQm58OMqHS9Vzc4Jg+Gd49dXVLM5ewcfkiS1Aps2iWpJSX0gB3vi3UVLSoI4NWSMF1fGpWu75IBY7Mg2XRdkiSp1di0S5JWqWBpuHb924qGsd4JMDEXhpmuS5IktTqbdknSCmoDeHkB/LcIqoKG8d0y4BDTdUmSpDZj0y5JaiR/KUyeB99HpetZCTAxDzZKjV1dkiRJXZFNuyQJCNP1FxfAk8ul67tnwCHZkBQXs9IkSZK6LJt2SRJzK+HufJgZla5nJ8CkPNjAdF2SJClmbNolqQurDWBqXbpeXZeuR4A9e8HBWZBoui5JkhRTNu2S1EXNqQzXrs9a0jCWmxim6+ulxK4uSZIkNbBpl6QupjaA54vhqfmN0/W9M+Gg3pBgui5JktRu2LRLUhfy8xKYnA8/RaXreXXp+lDTdUmSpHbHpl1SxxEE8OWfYdb90Hdf2OqvEDEWboqaAJ4rhqfnh7chTNf3yYQxpuuSJEntlk27pI6j4HX47JLwdtlXkLU9DDw8tjV1ALOXhGvXZ1c2jPVJhGPzYLDpuiRJUrtm0y6p46ipaPx5dcXKjxMA1bXwbDE8UxyuYweIi8C+mXBAJnQzXZckSWr3bNoldRx5e8PQE+Cnh6DPPjDoyFhX1G79VJeu/xyVrvdLCteuD0qOXV2SJElqHpt2SR1HXDxs/+/wQytVXQtPF4fr16PT9f0zYT/TdUmSpA7Hpl2SOolZFeHO8HOi0vX+SeHa9QGm65IkSR2STbskdXBVteE1158vhrpwnfhl6Xrv8LYkSZI6Jpt2SerAZlbA3fNg7tKGsYHJMCkX+puuS5IkdXg27ZLUAVXVwpPzYepy6fqY3uG1103X1aqWFEG37tDNawZKktTabNolqYP5viLcGT4/Kl0flByuXe+bFLu6tJZqq2HWfeElDYccA91SY13R6n38e5j+F+jWA3Z/HrJ3iHVFkiR1ajbtktRBLK2FJ4rgpQUN6Xq3CByUBXv3CneJVwf00Tkw46bw9i//g92eim09q1O1KGzYAarL4evrbNolSWplNu2S1AF8Ww5350NBVLo+JDm87nof0/WOLf/lhtsFr8eujqaIT4akbFhaDEEAPYbEuiJJkjo9m3ZJascqa+G/RfBKVLqeUJeu72W63jkMmQSfnF93e0Jsa1mTuG6w50thwp7SD4b/IdYVSZLU6UWCIAjWfFjnVlZWRnp6OqWlpaSlpcW6HEkCYEZ5uHa9qKphbL2UMF3PTYxdXWoFRe+Fa9pzdoZIXKyrkSRJbaCpfahJuyS1M5W18FghvFrSMJYQgUOyYfcM0/VOKWu7WFcgSZLaKZt2SWpHvl4crl2fH5Wub5ACE/Mgx3RdkiSpy7Fpl6R2YEkNPFoEr5c0jCXGwaFZsFsGREzXJUmSuiSbdkmKsa8Wwz35UByVrm+YChNzIdt0XZIkqUuzaZekGKmogUcK4c3ShrGkunR91wzTdUmSpE7rf/+DW25p0qE27ZIUA18sgnvzYUF1w9iwVDgmF7JM1yVJkjqvn36CQw6BmpomHW7TLkltqLwGHi6Et5dL1w/Php3STdcldSCzZ0NBAWy1FcR5qUJJarKioiY37AD+hJWkNvLZIrh8VuOGfeNUuHQw7JzRsRv2gsUF7HPPPgy9YSh3f3p3rMuR1NqeeQaGDIERI2DcuFhXI0kdy5ZbwtFHN/nwSBAEQetV0zE09aL2krQ2FtfAQwXwTlnDWHIcHJEDO6R17GZ9md88+xv++f4/qQlqiIvEMf/8+WQkZ8S6LEmt5bDD4LHHYNmvkSUlkJ4e05IkqaMpmzOH9H791tiHmrRLUiv6dBFcNqtxw75pd7hsMOzYiabDL//+r+8HS63ogw9g4kS49FJYujQ2NWyzTdiwx8XB4MHQs2ds6pCiBQE8+STcfDMUF8e6GmnNevRo0mGuaZc6gw8+gAcegO22gyOOiHU1IkzXHyiA96Ka9ZS6dH1UJ0nXo128y8V8Wfgl3xV/x+W7XU6vlF6xLknqnCoqYM89YdGihqb50kvbvo7zz4fsbPj5ZzjxRNe0q324+WY488zw9r/+BZ995vemOgWbdqmjmzsXdt4ZqqrCDS0SEsLdKBUzHy+EKfmwMGp/kc17wPgcyEiIXV2tKbdHLi9PejnWZUid38KFUFb3bmB8PMyaFZs64uPDZl1qT159NXxXPAjgyy+htBR6+SayOj7fepI6uu+/hyVLwoY9Li58V7kTmTF/Bn956y+8Nuu1WJeyRgur4fY5cMuchoY9NR6O7wOn9e28DbukNpSTA6edFt5OS4Pf/Ca29UjtyRFHNOyzsOeekJER03KkluJGdLgRnTq4ykrYaadwinyvXvDOO7DhhrGuqkXkL8png5s2YNHSRQQEvDzxZXYfsnusy1qpDxfC/cul61v0gPG5kN7O5jQ9Nv0xLnjxAvr17Mc9h95D/7T+sS5JUnMVFYXryJOSYl2J1L589hn88kvYtCcmxroaabWa2oeatEsdXVISvP02fPwx/PDDmhv2hQvDd6I32CBc79VSamug8G0om9Fip/yy8EsWLl1IQECECG/PfrvFzt1Syqrh1jlwW1S63j0eTuwDp/Ztfw370pqlHP3o0Xxb/C1v/PQGF798caxLUldWVRWmxsOGwVVXxbqajiUry4ZdWpnNN4f99rNhV6di0y51BgkJ4fUem3K5nRtugEcfhe++C39Zbqn1kNMmwNQd4alhMOuBFjnldv22Y0jGEABSuqVw8LCDW+S8LSEI4P2ycGf4jxY2jG/VI9wZftt2utlcEAQENEywqg1qY1iNurwpU8I3D7/5Bi66KJwpJEmSGmlnGZCkVldd3fjz2hZo2moq4cdljXoAP0yGwePW+bQ9Envwya8/YdrsaWyasyn90vqt8zlbQll1uNHcJ4saxnrEw1E5sE3P9tmsL5PULYnJv5rM71/8PX179uVPu/8p1iXFVsU8KHoHskZCSp9YV9P1LH+5slhdvkySpHbMpl3qas46C6ZNg08+gXPPhaFD1/2ccYmQvimUfgXUQtb2637OOmlJaYxef3SLnW9dBAG8tzC8lFt51Nr1ET1hXA707CA/UcdtOo5xm677myodXvnP8PRmUFUCCemw/2fQfWCsq+pajjkGnnkGXnstvL3zzrGuSJKkdqeD/IopqcX06gXPP9+y54xEYM+XYeadkJwLQ45p2fO3AyVVMKUAPotK13vGw9G5sHXP2NWldZD/StiwA1SVwryXYL3jYlpSl5OSAv/9b6yrkCSpXbNpl2Kpqgreegv694f11491NesmORs2OT/WVbS4IIB3yuChwsbp+rZ16XoPf4p2XL1HQlwS1FaGs0WyRsW6IkmSpBX466YUK0EA++4LL78M8fHw1FPh52o3FlTBvfnwxeKGsbRuMD4HtjRd7/jSNoR9P4D8lyF3d0gfFuuK1JEFQbhnSEJCrCuRJHUy7h4vxcrcuWHDDuEve1OmxLYe1QsCeKs03Bk+umEfmRbuDG/D3olkbAob/QYyNot1JerIvvwS+vWD5GT44x9jXY0kqZOxaVfHcc894S9FI0fCjz+2/fO/8QZcfHH4Z0vIzoZBg8L14LW1sMMOLXNerZPiKrjxF7h7Hiyp21g/vRuc3g+O7xNeg13Aopnw0l7w/EgonBbraqTYuuYaKCgIf5ZfeimUlMS6IklSJ+L0eHUMFRVwwgnhGvD8/PCXorvuarvn//RT2G238PZVV8FHH8EWW6zbORMS4O234d57wx3cx45d5zK19oIA3iyFRwobmnWAHdLh8GxItVlv7L1ToeDVumkJR8CvZse6IrW1IAjfQM3Ohu7dY11NbOXkhH8fkUj4d5GcHOuKJEmdiE27OoZIpPHFr7u18bfup582vp75p5+ue9MO0LcvnH8+VFau+7m01uZXwT3zYHp5w1hGNzgmFzbtEbu6YqbgTfjq6vDyZ1teAwkrWQ9QUxE2KdSGt7V6s/8LM++C3tvC8Ash0sEnugUBHHkkPPwwpKeHl2xriZ+JHdVll8GSJTBrFvz+9zbtkqQWFQmCIIh1EbFWVlZGeno6paWlpKWlxbocrcpjj8Ef/gADBsDkydCnT9s9d0EBbLUVzJkTPu8nn4TJyroKAjj++HDWwLBh8MorkJe37udVkwQBvF4KjxZCZdR7MjvWpespXTFdr6mER7Ohum4x/0ZnwTbXrXhc8Yfw5pFQvQi2uw36H9S2dXYki36AJ9cHgvBj5H9gveNjXdW6mTkT1lsvvB0fDyefDP/8Z2xrkiSpg2lqH2rSro7j0EPDj1jIyYHp0+GLL2DTTaGl3tz55JOGaf4zZsCtt4ZT/9XqipbC5HyYEZWu9+oGx+TB8K4807e2MmzECYA4qCxY+XGZ28BB37VlZbFTtQhmPwopfaDPPs1//JJCYNm7QnFQMaclq4uN7Gzo2RPKy6GmBjbcMNYVSZLUadm0S02Vltbym8VlZkJcXDj1vrYWcnNb9vxaQRDAqyXwWBEsjUrXd06Hw7IhuSum69ES0mDzP8Hn/wdJObDJ72NdUWwFAby8F8x/N/x8xD9gw9Obd47eI2DgEfDTQ9BzPVjvhJavs6317BnODLrttnCW0JlnxroiSZI6LafH4/R4xdhjj8Htt8M224TrIltqvf6yTZFUr2BpuCv8t1FLsHsnhGvXN+7K6frK1FZBpJvfQ1WL4OGoNf39xsCuT679ubqldvz17B3Bxx/DX/8a7hty+eVulCdJapecHi91BEEAKSlw4olw8MEt07AvKYJXRsOCT2D9k2Dbf3X5xqs2gFdK4PFCqIp6m3K3DDgky3R9peISYl1B+9CtO+TsFu6UD9D/4LU/V0JX3NUwBqqrYe+9YcGC8PPaWrhuJfsySGp5kyfD44+H/wZPb+asJEmrZNMuxdKll8Kf/hTePuoouO++dT/nd7eGDTu14e31T4bMrdf9vB1U/lKYPA++j0rXsxJgYh5slBq7utRBRCKw+7Pwy9PhmvbsFl4io5ZXWQnFxeGbonFxMNvLEUpt4r334Nhjw9tPPAHrrw+jR8e0JKmzsGmXYunxxxtu/+9/LXPOpEwaNr2KhGuUu6DaAF5aAE8UNU7Xd8+AQ7IhyRnKaqr4ZBg4NtZVqKm6d4dLLoE//jHci+R3v4t1RVLXMGe5TTZ/+SU2dUidkE27FEu/+lW4Iz3AgQe2zDnXOxEWzYSid2H9U6Dn+i1z3g5kXmW4M/zMqHQ9OwEm5cEGputS53f55XDuueH10hMTY12N1DXsuy/suiu89hpsvTUcdlisK5I6DTeiw43oFENBAM88AxUV4Zr2BNcRr4vaAKYugCeLoLruJ1sE2KMX/CoLEk3XY2/hd/Der6GmAra5HnpvG+uKJEktJQhg4cLwChNdfD8dqSnciE7qCCIROOCAld93xx3w4otwyCFw+OFtW1cHNKcyXLs+a0nDWE4iHJsH66U07Rw/lvxIRXUFw7KGtU6RgndPhsLXw1/s3hwHB38f64okSS0lEgmXpUhqUTbtUnv0wgtwwgnhJkr33w8ffQRbbRXrqtql2gCeL4an5jdO1/fqBQdnQUIT0/W7PrmL4584noCAi3a6iD/v+edWq7nDqq2BX56ESDz0O3DtLl1Wszhs2KmFmvIWL1GSJKmzcbKo1B7NmhX+WVu3odyPP8aslPbsl0q4+if4b9R0+LxEOH8gHJbT9IYd4MZ3byQgPMkN797QCtV2Au/9Gt44FF4/GD46d+3Osc0NkDoAknJg5O0tW5/UVVxzDaSnw7bbrrj5lySp07Fpl9qjww+HTTYJb2+/vZdMWU5NAE/Phz//CD/WTYePAKMz4eJBMLSJ0+GjbdNnGyJEiIvEsXnu5i1ab6fxyxMNt39+YtXHrU7W9vCrWTA2P0zrJTXPnDlwwQVQVgYffwx//WusK5IktTKnx0vtUa9e8NlnUFgIOTnhNHkBMHtJuHZ9dmXDWJ+6teuD16JZX+bG/W5kw94bsrhqMWdud+a6F9oZ9f8VfH97w21JbS8xEeLjoaYmXGqS6iUxJKmzc/d43D1eMfDDD+Huqptt5u6qa/Dx3I955KtH2LbfKOKzDuSZ4nAdO0BcBEb3ggN7Qzff12h99Wvau0G/A9ZuTXt7EQTw1dXw0yPQd3/Y/I/+W1TH8eCDcPXV4Yysf/3Ljb/WxcyZcO+94d/l2LH+HJDUpprah9q0Y9OuNnb//TBhQrhe/bTT4OabY11Ru/VL2S9scNMGLOmWQ5BzDPsNP4kB6QMB6JsUpuuDkmNcpDqm/FfgpT0aPt/lv9D/4JiVIykGFi+GwYOhuDj8P/muu2DSpFhXJakLaWof2oFjEqmD+te/GjaYu+WWup20tTLTi76lIn1vggG/h6QBFFXMJy4CB/SGPwy0Ye8Uaqtg7guw4NO2fd6qhav/XFLnN3s2FBWF/yfHx8N778W6IklaKZt2qa2NGBFOv4uLg803dyreKvy4BF4NdqRXv/FAHInxCYzKHsBFA+GgLKfDx0TVQnjjCHhyfZixljNElhTBz09C+c/h528cCq+Mhme3hB/ubbFS16jv/jDoKIhLhn5jYODhbffcktqHDTaAHXcMb8fHw9FHx7YeSVoFp8fj9Pgu7ZVX4KuvwnVseXlt85xVVWHCvmAB/PrX4UZz7cTCyoWc9sxpfFX4Fb/b4XeM23Rcm9dQVRtec/2FBeHa9ZqgmuLy+Ryel8rheT1t1mPpiz/DZ/8H1M0UOWgm9BjS9McvKYSnN4XKAujWHfZ+G57douH+vvvDbk+3aMlSp/Pdd/D3v0NuLvzud5CyDjtwCpYuhfffh0GDoH//WFcjqYtpah/q7vFq/0pL4cknYf31YdSoljvvY4+FzTqEG/rMmNE2v/wkJMCZ7XN38r++/Vfu+/w+aoNaxj82nj2H7El29+w2e/4fKsKd4ecubRgbnNyNywbn0t+p8LEX1Kz+8zUpfDNs2AGqF0P+q9BrS1jwSTiWs9u61Sd1dkEAe+4Jv/wS3i4pgeuui3VVHVtiYkPaLkntlE272rfqathhhzANB3joofAa5i3hpZcaLpvz88/hju7Lro3e2VQvDtcOJ2as9rDFSxcTIZyuXxvUUllTudrjW0pVLTw5H6YWw7KpP/GRcFf40ZnhbbUDG/0Git6GBR/DsHOg5/rNe3yvrSCSCMFSIA5ydoKhx8Cs+yA5DwYc2ipldzkLFsAVV4SbbF10EQwcGOuK1FKqqsJ12EEQLq365ptYVyRJagNONFXrWbgwbIZXpaQknJ6+YMGqj/nll4aGPS4Onnmm5eo79NCGTeCGDw/XtnVGvzwFj/SGRzLh67+v9tDzdjiPrfK2omdiT67c40r6p7X+VMHvK+BPP8ILUQ37oGS4eBDs39uGvV1JzIDdn4ND82GT3zf/8TPvqmvYgcytIHNrSOwFG54OA73UUov59a/hhhvg3/+Gg90Rv1NJTIRzz224/dvfxrYeSVKbcE07rmlvFdOmwd57h0nPySfDrbc2vj8/H7bYIvwzOxs+/RT69FnxPNXVsOWW8OWX4ecPPwyHHdZydU6fHk6L33NP6NFjxfuDAF58EQoK4JBDIDW15Z67JRS+DdP/Bj0Gw+Z/gm4rqe/5kTD/fSAI1xEfsaitq1yppbXwRBG8tKChWe8WgTG9YZ/M8Brs6mRe2AmK3qr7JA6OqrZRbw3bbAMffRTeTk8P3yBV5/Lzz9CzZ/j1lSR1WF7yTS1n9my49trmpdz/+AdUVIS3b7sN5s9vfP/UqWHDDlBYCM8/v/LzdOsGb78NU6aEbwS0ZMMOsPHGYRK1soYd4MYbYZ99wuuq779/yz73uqquCHfd/vm/8PX18MUVKz+ux9CwMYrEQ/fBbVjgqn1bHqbrL0Y17EPq0vV9e8e2YZ+3aB4PfPEA3xd/36Tjl1Qv4e5P7+ax6Y9RG9S2cnUdUG0N/DAl3G0+evr7wMNt2FvLxReHe2dEInD55bGuRq2hf38bdknqQlzTrtUrL4fttgsb7CCA++6Do45a8+M22CA8Pj4eMjLCRCDaVluFDXlNTTjtfeutV32utLTYXYYl+o2K114Ld5lNTIxNLcurKYfqutQ8Eg8Vc1Z+3Lb/DNcLVy+E4Re1XX0rUVkL/y2CV5ZL1w/Ogr16xT5dL1hcwKb/3JT5FfNJ7pbMByd9wPCc4at9zOEPH85TM54C4A87/4Er9ljFmycdSfHHMP1aSO0Pm1268hkcTfXZJfDVVeHtnF1hn3ehqgRy92iRUrUShxwSXnu6uhoyM2NdjSRJWkc27Vq9H3+EefPC2/Hx8NZbTWvaL7oIkpLCx59xxoqN7vDh8Oab8MILsNde4fXKW1tFBVxwQTgd/pxzwun7a3LwwWGNENbZXhp2gKTesMkF8NU1kJgJG5+78uMSe8E2q1/L3hZmlIc7wxdVNYwNTYFJuZCXFLu6or3z8zvMrwhnhSypXsLUmVPX2LS/OPPF+tvPfvtsx2/aa6vgxd2guiz8vKYCRty49ucreLXhduFbkLXdulSnpnKplyRJnYZNu1Zvgw3CFPyjj8Kplk3duT0xES68cPXHjBwZfrSVq68Op+0HQbgBXn7+mqcXnnZa+AZDQQGMGdM2dTbHllfBphdDXDLExce6mpWqrIXHCuHVkoaxhAj8Kgv2WEO6fu9n93L9O9ezZd6W3LTfTaQktO4l+Ub0HUFaUhpllWUkxCWw66Bd1/iYQ4cdyn1f3AfAEcOPaLij9Ct4dQxU5sNWf4MNTmmtsltW5YKGhh1g7iqWrjTV4AlQNC28PagJb/hJkiSpEZt2rV63bmEi/uabMHQorLdebOuprg53zn3lFTjuuObtnFtYGL7xUFsLlZXhJnlNWRO465obt5jq1j3WFazS14vh7nyYH5Wur58Ck/IgZw2TFn4p+4VJ/51EbVDLx/M+Zr1e63Hhzmt4I2gd9e3Zl09//SkvfP8Co/qPYrPczdb4mMmHTObozY6mR2IPdhm0S8Mdn10G5bMgqIUPzoChx0J8O5lSsDrL19jcy7otb8PTIGt7qCoNp8dLkiSpWdyITmuWkhJOJV/bhr2mJlwL3hLuuSfcHO7zz8Mp7h9+2PTHnnNOuHlPXFw4Tb5v35apSStYUgNT8uHvPzc07IlxcGQOnDdgzQ07QEV1Rf3GbhEiLFraNrveD84YzMnbnNykhh2gW1w3DtjwAHYdvCuR6I3VEnoCkfAjPiXcd6AjSEyHLa4C4iFlAGx17bqfM3NryN0dIv6Xs1b+/Ofw5+/xx7fcz1JJkjqaigp46CF4441YV9LmTNrVut58M5xWvmhReN3g005bt/OVl4dp+bIrFZaXN/2x668PP/wQpvUJCetWh1Zpel26XhyVrm+YChNzIbsZWwKsn7k+F+98MX+b9jc2y92Ms7Y/q+WLbU1bXh1u/lc+B7b4E8S14x+3SwrCtevdB4WfD78ANv5d2GS7w3tsvf9+uBs8wMyZMGoUnHRSbGuSJKmtBQGMHt3QsN9+O5x4YmxrakNepx2v096q9twznMoeBJCc3NB0r63Fi8PLvr3xBkyaFK5Rt6ngxndv5JYPbmGHATtw8/43k9St7adhV9TAo4XwRmnDWFIcHJoFu2b4ZWq3fnoU3joSghrY9BLY/I+xrkjRpk2DHXYIb0cicPPNcOqpsa1JkqS2tnBh401WDzoInngidvW0kKb2oe04+lGn0KdPOB09CCA7e907t+7d4dlnW6a2TuKrwq8467kwhf666Gu2yN2CM0ee2aY1fLkY7pkHC6obxoalwjG5kNWONtzvUPJfg1+eDC+N1u+A1nuer/8WNuwAX/3Fpr292X57OP98uOOOcH+NY4+NdUWSJLW9Hj3C2WbT6ja33W+/2NbTxmza1bquvz5M2IuL4bLLYl1Np1RZXVl/OxKJUFlTuZqjW1Z5DTxcCG8vl64flg07p5uur7XS6fDynuHtr6+Dvd+E7B1b57nSN4Wid8MvVtpGrfMcWnuRCFxzTfghSVJXFYnA1Knw+OPhvlR77BHritqUTbtaV1YW/Pvfsa6iU9syb0su2PECbvnwFkb2G8kp27TNpcU+XwT35kNJVLq+cSockwe922jLgCXVS/jn+/+koqqC07Y9jV4pvdrmiVtb2fSG9Bug5PPWa9q3uR5SB0BVCQw7p3Weo60EAdx2W/gu/FFHhWvfJElS59C9O0yYEOsqYsI17bimXWqO8hp4sADeibqUd3IcHJ4NO7Zxun7ikydyx8d3EIlE2Hngzrx67Ktt9+StaWkpPL8tLPwWkvvAfh9CSp+2raG2CiLdmvcFrciHxT9Ar60hPgbrIh55BA4/PFySE4nAjBnhpSolSZLaoab2oV5/R1KTfboILp3VuGEf3h0uGww7ZbT9dPj3f3mfgIDaoJaP5n7Utk/emhLTYf/PYN+PYMyMtm/Yv7wKHkyB//aDki+a9pjij+DJofDCKHhxF6itXvNjWtr334ffhLW14aUmf/qpaY8rLQ2n2fXoAb/9bcPVKSRJktoBm3ZJa7S4Bu6YC//8BcrqerGUOJiUB2f2g14xuoLeGdudsdLbnUJ8MmRuBQk92vZ5q8rg0z+E0/Mr8uGrq5v2uFn3QW3dfgrz34XSJjb7LWnCBOjfP7y9227hrusffgjDhsGQIfDiiyt/3G23wWuvhVenuP56+KgTvQEkSZI6PNe0S1qtjxfCfQUNzTrAZt1hQi5kJMDLP7zMze/fzLDew7h0t0tJbMNp0SdtcxK7D9mdJdVL2DRn0zZ73la3aBb8+P/t3Xd8FNX6x/HPbnoPISShhY70Il1QLBQRVBQUFVSQKxbwiij2gteC/dpRrz+7WC8W8KIgTekd6aK0UFIoKZC+O78/TkjREFI22U3yffvKi9mZMzPPwiTm2XPOcz6DsPbQ6LKqvbfdD7yDIfeEee1Xr3TnRXTLm4dvB58QCGpaWRGeXsOG8McfkJhotm02mDgRdu0yvec33VR873tQUNHe9cDAqotZRERE5AyUtEvNlJNjethiY02FSXdLWgZ/vgd1ukDriWDz/EEuabnweSKsTSvYF+gFo+pBr1CTDx3POM4ln15CtiMbgDD/MO7te2+lxPPnsT9ZcWAF/Zv0p3FY4/z9LSNaVsr93Cb3JPzUE7KOABac8yk0va7q7u/lB+f/YHrYA2Oh0+OlO6/JNWD3huO/mXh9wys1zNPy9S3obQfwyRsGYrOB92n+l/ePf8CWLaaA3S23QNu2lR+niIiISCkpaZeax+mEgQPNcFc/P1i40AyTdZfMRFhwEVi5sPs98A6EFuPdF08prEuDzxIgrVAB887BMDoawgr91EjOTM5fYs6OnR//+JH0nHRuPvtmGoY2dFk8O4/spOvbXcnIzSDUL5Rtt29z6fU9ysl9kJVktm1ecGRF1SbtAFHnmq+ysNkg9irz5Uneftsk5ZmZ8MYbxbfx9YU336zauERERERKyfO7+6T6cDhg927IcsE64UeOwPXXw6BBpverLP74wyTsALm58MknFY+nIjIOmbm+lsMkYWl/uDeeEqTlwtuH4J1DBQl7kBeMrw+3NSiasAM0DW/Krd1vNe18g1i0dxFP/PIE/T/oj9NyuiyuRXsXkZGbAUBqVirL4pa57NoeJ6Q1RPQw2zY7xI5ybzyeaP9+k4CX5mdDu3awfLmZp96nT+XHJiIiIuJiStrFNTIzzS/ELVpAq1Zw+HDFrjd1Knz2GSxYAJdeanrPS6tRI7M+vN1uPkjo1q1isVRUeCdoMMxs+0ZA85vcG08xLAvWpJrK8OsLDYfvGmwqw/fMGw6/bP8yrv3vtTy26DFyHDnYbDZmDJ1B6v2pXND0AmzYcFpO/jz+J+k56S6Lr3+T/vh5+QEQ7BtMn0Y1OPmye8PAX+GihXDpLojq5+6IPMvx43D22TBpEvTtC4sWuTsiERERkUql4fHiGkuWwJo1ZjsuDr74AiZPLv/1jh41maTTCamp5k97KT9jCgw0PXAff2yqRl9zTfnjcAWbHfp/D+lx4B9lqoJ7kNRcmJkAG04U7Av2gmujoFtIwTJuqVmpDPpkEJm5mViWRYBPAPf3ux+AEL8Q/nH2P/hh1w84LAfXdbiOYF/XVT1vW68tm27dxLK4ZVzQ9IIic9prJC8/iL7A3VF4pq1bzc8HMA/nokVwgf6uREREpOZS0i6u0ayZSaptNtO73br139tYVukX8n78cVi71vxy/vLLpy8gdTotW5preAqbDYJi3R1FEZYFq9Pgi0SzpNsp3UJMwh7yl7/y5Mzk/N5zL5sX+1OKVuG+9KxL2X3nbo6kH6FLTBeXx3tW5FmcFXmWy68rLvD117B6NYwaVfkjW7p0gSZNYN8+8PKCYcMq934iIiIibubW4fHTp0+nR48ehISEEBUVxfDhw9m5c2eRNpmZmUycOJG6desSHBzMiBEjSEhIKNJm//79DB06lMDAQKKiopg6dSq5ublIFWrdGubOhXHj4MMP4ZJLCo5t3myquAcGnr4Q1F917QoHD0JGBtx2W+XEXIsl58Cbh8za66cS9hAvuKUBTGjw94QdIDYslglnTwAgIiCi2HXRY8NiObv+2djdVR3fkWnWFpeq8913cNVV8NJL0K9fxafGnElwMGzYALNmwbZt0LNn5d5PRERExM1sllV4cdqqdfHFF3PNNdfQo0cPcnNzefDBB9myZQvbtm0jKCgIgNtuu40ffviBDz74gLCwMCZNmoTdbmfZMlOIyuFw0KVLF2JiYnj++ec5fPgwN9xwAzfffDNPP/10qeJITU0lLCyMlJQUQkNDK+391lojR8I335gh7t7ecOKEqeouVc6yYGUqfJkE6YV613uEwDVREFyKAQ3HM44T7BuMj5dP/r4le5fw6OJHiQmK4fVLXqdeUCnX9nal45tgwQWQfdzUDej9f1UfQ230r3+ZUS2n6k788gucW8bK865y+LAZodO7N9RzwzMoIiIiUgalzUPdmrT/VVJSElFRUSxZsoTzzjuPlJQU6tWrx8yZMxk5ciQAO3bsoG3btqxYsYLevXszd+5chg0bxqFDh4iOjgbgrbfe4r777iMpKQlfX98z3ldJeyW7+WZ4/33zS31ICBw7Zoa1lkZcHKSnw1kaFl1RyTnwcQJsOVmwL9QbrouCriHlv67D6SDiuQjSstKw2+zc2PlG/u9yNyTMqyaYJfWsvE8jhsdBYKOSz5GK+/136NULkpNNgbjly93zodyePWbofGqqKUS5eTPExFR9HCIiIiKlVNo81KOqx6ekpAAQEREBwLp168jJyWHAgAH5bdq0aUNsbCwr8pb6WbFiBR07dsxP2AEGDx5MamoqW7duLfY+WVlZpKamFvmSSvTMM6YYXP/+MHt26RP2Tz+Fpk1NMbkHH6zUEGsyy4JlKTBtb9GEvVeoqQxfkYQdwGE5SM9Jx8r7LyUrpWIXLK/gZmA5zbJ63iHgW8c9cdQ2rVubhHndOlMA0l2jaObNMwk7mCUjTy37KCIiIlLNeUwhOqfTyeTJk+nbty8dOnQAID4+Hl9fX8LDw4u0jY6OJj4+Pr9N4YT91PFTx4ozffp0HvekImU1Xd265Vsr/ZVXCobcvvwylHK6gxQ4nte7vrVQsh7mDWOioZOLirv7evny+pDXmTJvClFBUUw7f5prLlxWbe8xSXvaLmg9EbyD3BNHbRQebnrZ3alPHzP9JjcX/P2hRw/3xiMiIiLiIh6TtE+cOJEtW7awdOnSSr/XAw88wJQpU/Jfp6am0rhxDV9C6kx+/x1O/Z289FLx1d+rWteuZn6q3Q55H+RI6VgWLE2Br5Mgs9AS931C4eooCCzlYIfSuqX7LdzS/RbXXrSs7D7Q4SH3xiDu06mTqWC/eDEMGgTNm7s7IhERERGX8IikfdKkScyZM4dffvmFRo0K5qDGxMSQnZ1NcnJykd72hIQEYvLmKsbExLB69eoi1ztVXT7mNPMZ/fz88FMhtKKuv94MbwVITDS//LrbK69AixaQlgZ33OG+OHJPwuGfILg51Oni0ksfyzjGx5s+Jjo4mqvbX+2SqutHc+DjeNieXrAv3Buuj4YOrls6vfY6OAdW3Ah2X+j7OUT3d3dEckrXruZLREREpAZx65x2y7KYNGkS33zzDQsXLqRZs2ZFjnfr1g0fHx8WLFiQv2/nzp3s37+fPn36ANCnTx82b95MYmJifpv58+cTGhpKu3btquaN1ATHjpmh6E4nHD9eefd5/32zHNyLL5ru4JL4+8O998ITT0BUVPnvaVmwdTr8dA5se/bM9y3M6YD558KvI2Du2XDgu/LHkSfXmcuB1AM4nA4u+ugi7vrpLq7977U8s/SZCl3XsmBJMjy+1yTsOc4clsUtY/W2l7jIvlQJu6usuR2yj0FmAqy7093RiIiIiNR8f/5ploOupdza0z5x4kRmzpzJd999R0hISP4c9LCwMAICAggLC2P8+PFMmTKFiIgIQkNDueOOO+jTpw+9e/cGYNCgQbRr147rr7+e5557jvj4eB5++GEmTpyo3vSyePVVuO46s/3KK5Vzj7Vr4aabzPbcudCyJVx+eeXcq7DD82BTXiG7oyugztlQf2Dpzs04AMc35L2wwYFvoVH5Yz6ecZxz3juHHUd20DWmKxvjN+Yf+3X/r+W+7pFs+CgBdhbqXd92eBnbNt4BJ7dy5fZAkqYmEeATUO57SB7vEMAONhv4aLUJj7JnD6xaZZaca9jQ3dGIiIiIKzz2mFli1m6HDz+EMWPcHVGVc2tP+4wZM0hJSeH888+nfv36+V9ffPFFfpt///vfDBs2jBEjRnDeeecRExPDrFmz8o97eXkxZ84cvLy86NOnD2PGjOGGG27gX//6lzveUvU1ZIjpbT92zPSEV4ZCoyEAyJvGUOlykkt+XZKABhDSKu+FE6IHlNj8lIQTCWTmZv5t/6zts9hxZAcAG+I30LNBz/xjozuOLn1ceSwLFh2Hf+0rmrCfGwat0j7Fnr4dC4uTOSfJyM0o9ho5jhweX/w4Y2aNYeWBlWWOodbp+5kZEh8zAHq/5+5o5JTffze1L6691vx56JC7IxIRERFXePFF86fTCf/+t3tjcROPWqfdXbROexXJzobhw00ve/fusGABlPfv25kLu2ZAxiFodSsENTl9W0cWLB0Jh+ZCg0ug39fg5Vv6e2UdZdv6p3hqw2dssOrw6ZWf0rX+6efNTpg9gf+s/w91/Ouw8MaFdInpkn9s0Z5FXPjRhdhtdpyWk1X/WMXJ7JPUC6pHh6iyFdtLyoYP42FXoVw8wgduiIa2QbA1cSsDPh5A4slEHjr3If51QfEfZE3/dToPLXwIm81GgHcA8ffEE+yrsfRSzcyYAbffXvD6yy/hqqvcF4+IiIi4Rq9epvaWZcHYsfB//+fuiFymtHmoknaUtFe5jAwzX91mK/u5lhMO/Qh7Z8K+mWCzQ2AjuGy32a4kjV5qxKG0Q9hsNno17MXy8cuLbXco7RANXzLDcr1sXoztMpZ3L3u3SJtPfvuEeX/O4/KzLmdEuxFljsWyYGEyfJMEOYW+e/uHw5WR4F+oMrzTcpLjyMHP+/RTRW6dcyvvrn8Xh+UAIO6uOBqFNjptexGPtHWrKUKXkwNBQbBtG8TGujsqERERqajERDN9NygIJk+GwEB3R+Qypc1DPaJ6vLhAXJxZn/gvxfw8UkAF5lWv/SfseqPgteWAk/tMhXefkIrHVoys3CzSstOwsLBhw8fuc9q24f7hhPiGkJ6TjtNy0rzO35edGtNpDGM6lW8uTkJe7/qfhXrXI31MZfg2xSxLbrfZS0zYASb1nMTX277maMZRbj77Zs9J2HMzYPf7YPeGZmPLNjpCap/27WHjRli6FC66yPMT9o0bzQcNF18Mdeu6OxoRERHPFRUFTz3l7ijcSkl7TfDuuzBhgumCnT4d7r/f3RFVnriv/76v2Y2VlrADPLfsOVKzUgGz4sFzA587bdtAn0AW3riQ11e/TsuIlkw9Z6pLYnBasOA4fHekaO/6BeFwRT3wq8Aggw5RHTh09yFSs1KJDIyscKwus3wMHPgGsCBpBfR5390Riadr1858VZbDh2HWLPMBwfnnl/86CxfCwIFmbl6TJmZUQA3qNRARERHXUtJeExRePu2FF2p20t5gKOzOK/7VbCy0vQfCKndpv8MnDuNl88JhObCwaBhaclXq7g2688HwD1x2//gs+DABdhfqXa/nAzfEQGsX/Z7v6+XrWQk7QNIvQN5znVT+yvoiLpGebmpxnCpwN2sWXHFF+a41d27B9r59sH07dOtW8RhFRESkRnJr9Xhxkc6dzRIIXl7QqZO7oymfA9/B6lvOvA56z7eh7xfQ7yvo9S6Etzdz4y0LMo+YOe8uNrn3ZKKDowG4s9edVTZ83GnBT8fgiX0FCbsNuKgOPNLUdQm7x2oxvtD2Te6Lo7yysszyJP/4B2ze7O5opKJ27y5I2L28YNGi8l9r8OCCD1obNYI2bc58zmefmWfphx/Kf18RERGpllSIjhpQiO7ECbP8QXa2Kc5Q3eZHHlkN83phPkNywqBVENnzTGcVcGTCwsGmZzasPQz8FXzruDREp+UkMzeTQJ+qyZQPZ8EH8bC30KpxUb5wYzS0rOnJ+imWBUdXgc0b6nZ3dzRl98gjZv6V3Q516piEz+f09RDEw2Vnm0J327aZpH3ePLjwwvJfb90682HO0KFQr17JbX/+2Qynt9vN98WmTdCxY/nvLSIiIh6htHmoetprguBguOsuk6x//rnp4asqlgXffgvPPw8HD5bvGmm/5204//K6lOJ/zhtKDaRshX1flC+OEtht9ipJ2J0WzD0KT+4rSNhtwMA68EiTWpSwgxlBEdm7eibsAH/8Yd6DwwFHjpgP16T68vWFVavMz7stWyqWsIMZDj927JkTdoBdu8yfTqf5mbt7d8XuLSIiItWKkvaa4rrrYMoUuOMOuPXWqrvvBx+YeZ333WfWUCzPBwYNh0FIa7Md0tq8LovAU8PV85aQC/TwqtGncTALntkP3x6B3LzxL9G+cG8sjIwC3+r43fr7G7D4Etg1w92RVL077yxYKWHSJNPbLtVbcDBcfnnphrO70siRBSuDdO0KAwZU7f1FRETErVSIrrpaswZeftn8IvfII7B8ecEcyWXLqi6OZcvMkE2n0/S0HzpU9mXnfMPhks1m6bbgplDCkmrFqtPFzHOP+xqiL4QGQ8p2vps5LPjxGPxw1GyD+fhhUARcWhd8qmOyDhC/ANZOMtuH5kJoO4ju796YqlLv3pCQAKmpUL++u6OR6qxePdixAw4cMNXmvbzcHZGIiIhUISXt1VFGhulpOTXc1svLLPk2fbp53b49XHABDBkCU6eaIbqVZdQo09sOcM455hfK8vDyhdBW5Y+jydXmq5o5kGnmrscVGqBQ3xdujIFmFVjO3iNkJpT8ujYICjJfIhXl6wvNm7s7ChEREXEDJe3V0YkTpvcOTMK+bx+8/z5ceaUpbHRTXqXtxYuhQwe45JKCc51OSE42Q3VdkcwPHAg7d8LevdCvn+l1l9M6mX2SJ355gsMnk+je/hFmH8kly5FLy4gW+Hr5MLgODK2E3vV9yfu47+f7cFpOpl80nRYRLVx7gyOrIGkZNLgEwtpAwmLY9CDY/cGZCVHnQ8NLy3dtZw6kbIfgZuAT4sqoxZNlZsL+/dCihXqWRUREpFZT0l4d1asH//wnvPoqhIWZivE2m1lD+MCBom2PHCnYTkqCc881SfbFF8P337ummnWLFuarhlt1YBXzd89nYPOB9GrUq1zXeGDBA7z+23cQPZaZaxeS68wB4NDRtfw8aAxN/F0ZcYEbvr2BZfvNtIm9yXtZffNq1138yCqYdw7ghN8egUt3wqp/wMn9gA3CO8GAci6P5ciEeX3h+HrwqwuDV0OwehtrvIMHoWdPM92mWzf49deC+gAiIiIitYy6RaurV16BY8fg8GHo0qVg/yWXmCWEwAyRHzmy4NjHH8PveZXZf/wRli6tsnCruy2JW+j3fj8eXfQo/d7vx5bELWW+Rq4TVmREYTV+AMu3QV7C7oRjPxC3+dZKS9gBEk4k4LAcOCwH8SfiXXvxpGXkV/53pMPxjXl1CWxmcr7dr/zXPrLCJOwAWUdhditYOsr0vkvN9cUX5mcbmKXRfv3VvfGIiIiIuJGS9uqsTh0zz7EwX1+YMwdyc2HhQggstEZYo0amWJ3NZr7KUhwrM7Og0N22bTBokOmt37mz4u+jGlh/eD25zlwsLHKduaw7tK5M5+/LhKf2Q/0m4/G2m3+z5oF+sP9pODqbUe1HVEbY+Z4f+DxBPkEEeAfw0uCXXHvxBpeAV95zFtAAIs+BPh9DxNkQ0QN6/1/5rx3cEuy+5P+ospyw/0s49L8Khy0erE0b8/PGywu8vTWXW0RERGo1m2WdysRqr9Iual/tWZbpoV+2zCwRd8UVpTvnn/+E1183ReYWLYIRI2DTJnO8Rw9YubJy43ajLYlb+Hrb17SKaMVdP91FUnoS9QLrsenWTdQPOfOHHjlOUxX+p+NmDXYAp5XLhaEZXBUTwuqDy0nLSmNA8wF42St33q7D6QConPukHzA97JHngF+Ea6+dtAK2PQMHvy/Yd9FCiL7AtfdxtZ07zfdYfDy8+CKMG+fuiKqXTz81PexXX13xNdFFREREPFBp81Al7dSipL08du8umK/u5WUS+O++gz17zL42bUzPew0UfyKeFq+2IDM3E6fl5MuRX1I3sC5dYroQEXDmxHRPBnwYD4ezC/Y19oOxMdCoEofC10iWBZsfNz3ssSOg7b2VuyqCK1xzDXz9NTgcpnbEiRN/HxkjIiIiIrVWafNQFaKTkoWGmoQjN9dUno+ONr3uY8aYSvGvvuruCCvNziM7Sc9JB8Bus7PjyA4e6f/IGc/LccL3R2H+MTj1iZiXDYbVhcERZlvKyGaDTtPMV3VxqnCazWaS9dq6ssK+fWZe+rnnmiKaIiIiIlImStqlZJGRMHu2SdTbtYO77jIJyNGj7o6s0vVo2IN29dqxLWkbgT6BjGw38ozn7M4w664nFOpdb+Jv1l1vWIF6bFINTZ9uvk8OHYKnnzZzswuzLLP8YmhozV3SbNs2s6pFRgZERZnXdeu6OyoRERGRakXD49Hw+Oou8WQie47voWv9rvh6uXb4cUZOBmsPreWsyLOICoo6bbtsJ3x3BBYcL+hd97bBpXVhUATY1bsuhTkcZr777Nlm+smvv5atMGR18dJLcPfdBa9nz4Zhw9wXT2V48UV49FFo2tQUAW3WzN0RiYiISDVR2jy0lo7XlJpiw+ENNHulGb3/rzfnvX8eOQ6zFJjD6eDLrV8yc/PM/H3lEeATwLlNzi0xYf8jHZ7YBz8XStib+sPDTeDiun9P2HMcOby++nWmLZ7m+uXXpHpYudIksGDqQ3zwgVvDqTTnnlswiiA4GM4+273xuNqxYzB1KqSnm8KD06e7OyIRERGpgTQ83hOtWwf33mt+yX3tNYiNdXdEHmvm5plk5WYBsOrgKrYkbqFr/a7c+eOdvLHmDQDm7prLx1d+7PJ7Zznhya2bWX7Cn6bhTfG2++Btg8sjYUCd0/euP7zwYZ5f/jx2m52vt33NltvLvua7VHP165s57pZlakU0buzuiCpHjx6wejUsX26WiGzQwN0RuZaPj/nKyTH/liEh7o5IREREaiAl7Z5oxAjYv99sb9wIGzZAhIuX0aohujXohsNyYLfZCfYNpml4U8Ak6qf8+OePLr/v7+lwx/q1zNtv1muPCa7P5C6XcWM0xJxh7vq6w+uwsHBYDrYlbeM/6/7DqA6jCPXT1Ixao3lz+PZb+Ogj6NXLLMFYU519dvl72D/6CJ59Ftq3h3ffNfP/PUlICPz3v/Dkk9CypRkmLyIiIuJimtOOB85pj4iA48cLXl97Lcyc6b54Kir9IMT9F8I6QIzr11v+autXbErYxHUdr6NdvXYAPLjgQaYvNUNVb+9xO29c8oZL7pXlhFlJsDgZ/vfH/ziQEgdWDhz5lpO3zyHQ58xruX297WtGfT0Kp+XM39c1pivrJqzD5unLmLlLVpb5HvD2Nt8Pfy3qJjVPQoLpmXc6zaiEBx+EJ55wd1QiIiIiLqMl36qzGTPMGs9glos6cMC98VRE7kn4sTtk5s3dPvcbaDzcpbe4qv1VXNX+KhbuWcilMy/Fz9uPj4Z/xIXNLiTXmcugFoNccp8dJ+GjBDiaN0W+aVgTDsQvgoSPuKBh+1Il7AAj241kz517GPvtWBbtXQTAhvgNpGalEuYfVrEgM49A0lKI6AZBNWjI9dix8PnnZnvZMnjrrbJfY8UK2LwZLrsMYmJcGp5UglPLTIL5OZiV5d54RERERNxESbs7HDhQMIzyX/+CRo2KHh81ChITYfJks9bzY49VeYjFSj8Iv14JaX9Cx8fgrDvOfM6J3QUJu80LEhe7PGk/Zdy344hLjcNmszFx7kRW/WPVadtm5WaxN3kvzes0x8fLp8TrZjrgv0fgl+SCfT42eLx9e+wNh5GY3oNLW19aplhjw2IZ26UgaT+vyXkVHx6feQR+aA9ZieAVCE1HQ8YhaD0JGlwM6YcAJwQ2OuOlPM7PPxdsz59v/szOhrffhtRUuPXWkpcS++knM6caYNo0+P13UzNCPFfDhvD886a4W9u2cM897o5IRERExC2UtLvDtdeaXj+AP/+EJUv+3uaOO+Cmm0yRI9+/LGN2arhoVdv2DBxbB5YD1t0JTa4F/8iSzwk5C8LaQco2wAaNrqi08Ly9vLFhhpd720//aCeeTKTnf3qyL2UfHaI6sGL8CoJ9i0/gtuf1rh8rVIC+VQDcEANRvkCd/uWO94bON9Amsg2H0g5xccuLKz40PmmpSdgBHOnw538AGxyeB12mw4apgAXdXoGz/lmxe1W10aPhlVfM9pgx5s+774Y33jC9sHPmFHxPFefnn833jNMJhw+bSt/dulV+3FIx99yjZF1ERERqPS355g5xcWadZoej5KHvQUFFE/Y//zTFjvz84PHHy3bP3FzIzCy6z3Kar9KyF6qwZvOCEhLjfF6+MGgV9J8NQ7dCdPmT3DP55IpP6Bjdke4NuvPOsHdO2+6b7d+wL2UfAFsStzD/z/l/a5PhgE/i4eUDBQm7nx2ujYK7G+cl7C7Qs2FPhrcZjr936YbWlyiim+lhB8CG+fa2zJz7na+QvyDd1mq4LNW//w0LF8Ivv5iecjBVyU9VX1+/vuTzhw0zyT2YddHbtavUcEVEREREXEVJuzs884xJxn194emnS3/e88/D3r0mAZ82zRRqKo2lSyEy0nwI8PLLZt/hefBVOHwZDPu+KN11OjwEDS83BeXO+QRSd8Ivw2HNRMhJPf15PsHQcBiEti7dfcqpT+M+bLx1I6v+sYr2Ue1P265NZBsAvGxe2G12WtVtVeT41pPw+F74NaVg31mB8GgTOL9OQe7ncYIaw5D1pif9vO8gKG+pwFa3QXgX80GLzQvCO7g1zHKx2eCCC8y636f+AW67reD4rbeWfH7//vDbb/Dll7B2rZl2IiIiIiJVa8ECuP128zuZlJqqx+Om6vEZGebPsiQPDz5olj+yLJPwJyaWbgmkwYPNPOBT52VkwI9nQ/JvgAX+9eHKQ2WL35ljkn5Hunnd7Hro81HZrlFKlmXx0oqXWLh3IaPaj+KGzjdU+Jpfb/uaBbsXMLzNcAa3HAxAugO+SoLlhZJ1PzuMrAfnhnlwsn46lgXOLPDyh+xk2P6CGVnR9m7wK2H+d3Xyxx9w4gR07lwN/4FEREREapEdO6BDXueRwwHz5sHAge6Nyc1UPd7Tlaen78EH4ehRk6hMnVr6NYvr1zcJjc1metxtNvCvB7a8gRZ+Z5iXXpzcjIKEHSB+UdmvUUrf7viWe+abea3/2/U/OkR14Oz65Vz3Oc/IdiMZ2W5k/uvNJ+CTBEjOLWjTNhCuj4G6Jdep81w2m0nYAXzDofOTbg2nUrRs6e4IPEN6uinIp6r4IiIi4qn++MMk66fs2FHrk/bSUtJeFfbsgfHjzdrrL71khvmWR3CwqZZdVv/+N/j7m4T/4YdNMtfrPdhwDzizocszZb+mdxDYfUyPO5hic5Uk/kR8kdcJJ0o5LaAU0h3wRSKsLDS6398OV9WDvtWxd91dDv0IyZsg9moIbubuaGqXVatgwAAz4uD2201xPhGpGMsyU8syMsz3lzuKv4qI1DQXXGBGR27aZFaJGTnyzOcIoOHxQBUMj7/8cvjhB1Mwq04dOHIENm40RbUGDTLLGVXExo2m0nxOjknqzznHFVGf2cEfYP3d4B8N53wEQU0q5TbJmclc9NFFrD+8niEth/DtNd/i61XxSnCbTsCnCZBSqHe9fRCMiYaI6tq77g5x35ilALGBbwRcvht8qmiaiZhq+p99VrCm+ZEjJS9/JyJn9tRT5kNugJtvhndOX9xURETKIDfX9Lg3aaIaQ2h4vGfJzCwYCnLsGHz6KYwbZx7agADYuhWaVaB3csIE84kVwNixZg1qV7IsU6wudQc0vwGCm5v9DYear0rktJxk5Wax+h+ryXJkEegTeOaTzuBkXu/6qkK96wF2uDoK+oSqd73MkpaZAneWA7KPQtqfENHV3VHVLJYFmzdDvXpmukthzZub415eEBYGISHuiVGkJpk5s2D788+VtIuIuIq3N7Rp4+4oqh2N96oKd91VsG23w1tvmYQdzNC7NWsqdv1TPWynlr9ytT//D5ZfC1uegJ96m/nsVeBE9gl6/qcnMS/G0O2dbuQ6c8980hlsSINpe4sm7B2DYFpTOEfD4csndqRJ2gHCOkLY6Sv3SzmNGWOGkzVpAj/9VPTYww+bJSDHjoVFi4ouEyki5TN4cMH2gAHui0NERAT1tFeNc881w+JTU02P+0UXmeWn0tJMYbj+FVy7/O23zS/sOTnwn/+4JOQijq0p6EnNSoKMQxDSwvX3+Ys5v89h3eF1AGxK2MR3O77j+s7Xl+taabnweSKsTSvYF+gFo+pBL/WuV0xkb7j0d0jbBZHngAumLkghyckFvX65ufDuu0UTCl9feOQRt4QmUmM9/zz06mU+WL/mGndHIyIitZyS9qoQFGQK2rz7LrRubYazT5hg1ovu0weiok5/7r//bdZ1b9/erGcYWUyl927dzNDZytJ0DOz+wCTtUedXWaGxJmFmjrwNGxYWTcLLN2d+fRrMTIC0QsUqOwfDdVEQrrnrrhHUpNJqGtR6ISEQGwsHD5oP/bp0cXdEIjWflxeMGuXuKERERAAVogPctE57aRw4AI0bm20vL7jnHpPAu0P6QTi5H+p2z6sanwtH10BgIwhqXGm3/fS3T5mzaw5DWg4p8/rsabkwM9Ek7acEecE1UdAjxLW965m5mcz/cz4NQxtWeDk6kb/Zt8/MqW3YEG65xfw8EBEREZFqTYXoagIvL5NZWpb58nbjP1dgQ/MFJpbFQyD+Z7D5wAU/QsyFlXLb0Z1GM7rT6DKdY1mwLs0k7CcL9a53DYbroiHUxX+NlmVx8ScXs2TfEgA+G/EZ13TQcEpxoSZNTDXryrB/P8THmxE7+jBARERExOOoEJ07xMWZCvK7d5fcrn59M0e9dWsYPhymTq2S8M4o/YBJ2AFwwN6P3RpOYam58PYh+M/hgoQ9yAturg+3NHB9wg5wLONYfsJuw8Z/t//X9TcRqQw//QQtWpi5u8OHm0+8RERERMSjqKe9qh04AB06mKJ0gYGwYYNJyk9n/Hjz5Un8oyCgAWTEg+WEuj3dHRGWBavTzFJuhXvXu4XAtVEQUolPep2AOnSI6sCWxC1YWFzYtHJGHYi43AcfFKw4MWcOJCWVXGNDRERERKqckvaqtmyZSdgB0tPNEk2nknanE777zlSrHTnSc5du8vKDQcth90cQ0hKauHcoeEoufJoAm04U7AvxgmujTdJe2ew2O7+M/YUvt35J47DGDGk5pPJvKuIKPXqYNajtdmjUCCIi3B2RiIiIiPyFCtFRxYXo9u+Hdu3g5Enw8zM97W3bmmP33w/PPmu2r7rKVIt3FUcWbLwPjm+C1pMgdoTrru0i+1P2s+HwBvrF9qNuYN0ztrcss976F0mQXqh3vUcIjKrk3vUq5ciGlC0Q3Bx8w1133f374cYbISEBnnsOhg1z3bWlenA64eOPTaG7ceMKCl+KiIiISKUrbR6qpB03VI/fvdv0sPftC23aFOzv0AG2bjXbwcFmHXdX2f4CbLgXsMya65fvh8AGpTp1b/JeUrNS6RjVEVslLWi+LWkbPd7pQXpuOjHBMWy9fSsRAafv9UvOgY/inby9YzlxqftpXqcFA2J7MjraRtcK9q47LScnsk8Q6ucBKwk4smDeOXB8PfiEw+BVEFrCdIqyuOYa+Pprs4xYUBCkpKgQmYiIiIhIFSltHqpCdO7QvLmZp144YQfTu37KlVeaP+Nmwbx+sPoWyE0v/z2zjoEt75/bckBu6T4Q+HzL57R4tQWd3+rMpLmTim1jWRZTfppC4FOB9H2vL8cyjpU5vLm75pKe9/7iT8SzIm7Fae4Fy1Ng2l6YfXAP25K2kpaVxqZdb3OetbDCCXv8iXjavtGWsGfCuPKLK3E4HWc+qTLs+xKWXw9bnjQJO0BOKuz73HX3cDiKbuvzOxERERERj6Ok3R3efx8uvxzeeKPo/kcfhZ9/htmz4b33IDMJll4NR5bBH+/C9hfLf8/WkyC0jUncz5oMIaXrrZ2xdgZOyxSqemvtW/nbhf2W8Bv/XvlvMnIzWHlgJTPWzChzeOc2ORd73ocKwb7BdK3f9W9tjufAawfhw3jIcILTssCRCofehPj38SG7zPf9qw82fsAfx/4A4Jsd37Dq4KoKX7PMjqyGZaNg70zY+iTYfc3oCJxQp7Pr7vPMM9C1qxkS/dFH7l1SUEREREREiqXf0gtr0QKuvhouvNAME77sMlOgyZVWroSbbjLb338PLVvC4MHmtc0GF11U0DYz3fSKnzqWk1z++wY2gKFbTLV3W+nfU48GPfh136/YbDY6RnXMT6wLC/INyt+2LIsQv7J3d/ds2JOV41eyPG45l7S6hAYhBUP3LQuWpsDXSZBZ6DODa2ObEp3wOz/HH+aqHrczuOXgMt/3rxqGNMRpObFhw2azER0UXeFrlsiRZZbQC2oK9ryh6Sf35B3Me7PtHgTHSVOlv9Hlrrt3ixawZo3rriciIiIiIi6nOe0UmksAFJlJMHEivP66a2/2zTcFQ9/B9KiPG3f69psege3PQkgbuOB/ENjItfGcQY4jh7fWvsWxjGPc2v1WooOLT2Lf2/Aeb655k54Ne/LyxS/j6+WayvdHc+DjeNheaGZAuDeMiYaOwS65RRFOy8nzy55n1cFV3ND5Boa3Ge76m5ySfgjm9TJJe91eMGAxePlDzgn4+Vw4vhHC2sPAZeAbVnlxiIiIiIhIlVMhujI4bdLeqBHExbn2ZpmZMGQILF4M3bvDwoUQUgXrkhV2cj+svwec2dDlGQhrc+Zzqphlwa95vetZhXrX+4bByHoQWBPqpW1/CTbcA+R9C17wE9QfZLYtJ2QcBv+Ygh54ERERERGpMUqbtGt4fGGxsRAZCevzCn8NH+76e/j7m0T9xAlTIb6SqrGXaNV4SFgEWHBiNwz9repjKMGRbPg4AXYU6l2v4w3Xx0D7oNOfV+2Etia/mj9AULOCYzY7BDZ0S1giIiIiIuI5lLQXtnmzWfpqzhyTTFfWutU2W9X3rheWmWR6crEgK8l9cfyFZcGSZJh1pKB33cKikXMP/X2O0C6wB+CGDzkqS8Nh0PtDSPoVmoyC0FbujkhERERERDyMhsdTieu0ZyfDtufAyoG2U8E/ynXXrojD82HpSHDmQu/3ocnV7o6IpGz4KAF+L9S7HuEDJ/a/yetLJgLw0LkP8eSFT7opQvFITqcp6Jiba0bGqAK+iIiIiFQTmtNeBpWWtP9yJRz8DrBBvb4wYEnFrpewCA7NhfqDIeaiM7cvyaml28pQSb4yWBYsTIZvj0B2obnr54XDiEg467Um7E/ZD0BMcAyH7z7sljjFQ911F7z8stkeN84UdhQRERERqQY0p90TpGwtSI5TdlTsWsc3wYIBZnv7C3DxWog4u9Sn7zm+h1dXvUpMcAy3dLuFl1e9TNLJJKb0mUKLiBYVi62cErPNmut/ZBTsq+sDN0RDm7y56+c1OY9PfvsEGzbOiz3PLXGKB/v++4Lt2bPdF4eIiIiISCVR0l6S3Fz44ANITTVrq4eHl+389g/CypsAp9muiOQt5K/bDZD8W6mTdsuyuPCjC4lLicNpOflq21dsiN+ADRs/7PqBPXfuwVaFBfGcFiw8bnrXcwqN87ggHK6oB36FOv/fvfRd+jXuh9NyMq5rCUvjSe105ZXwwgtm+4or3BuLiIiIiEgl0PB4ShiWcPfd8NJLpnBcv37wyy9lv3hGAuCEgPoVCzLrKMw9G9L3m7XaL14P/vVKd2puFv5P+QNgw0ZkYCRH0o9gYWG32cl8KBMfL5+KxVdKCdnwQTzsLtS7HukDN8ZA68AqCUFqEsuCuXPNB2xDh4KXlscTERERkeqhtMPj3Tuh2dMtXWr+tCxYtap81wiIrnjCDuBXF4Ztg8FrYNiOUifsAH7eftzR8w4AvO3eTO49GV8vXwDu6XNPlSTsTgvmHYN/7S1I2G3ARXXg0aZK2PMd/w1+ewwO/s/dkVQPNhtccglcdpkSdhERERGpkdTTTgmfcLzzDtxyi9m+5RZ46y33BOgiu4/vJswvjLqBdUnOTOZk9kkahlb+WuCHs0zv+t7Mgn1RvnBjNLRUsl4g4zB83xIcmYATzv8RGgx2d1QiIiIiIlIJVIjOFSZMgL59IS0NevVydzQV1rxO8/ztcP9wwv3DK/V+Tgt+OgZzjkJu3kdDp3rXL48EX43zKCp1JzhOrXlnh2NrlLSLiIiIiNRyStrPpH37yr+HMwd2vQ1ZSdDqVtcMp3ezQ3m96/sK9a5H+5q56y0C3BeXR6vbE0LbQup28A6CxiPdHVHxLAsOH4Z69cCnamohiIiIiIjUVkraPcFvj8C258z83P1fw7Ct7o6o3ByFetcdhXrXB0bAZXXBR73rp+cdCBevg+PrIaR1meoWVJmcHDOH/OefoXFjWLECGlb+FAsRERERkdpKKZQnOLYOsMya7qnbwJnr7ojK5UAmTN8H3x0pSNjr+8J9sTCinhL2UvEOgHp9PTNhB1i50iTsAAcPwkcfuTceEREREZEaTj3tnqDlrRC/EHBCiwlgr17/LLlO+PEY/HDMzGMH07t+cQQMVe96zdKokanSblngdELz5mc+R0REREREyk3plCeIHQGX74Ehm6Cn51ao/37n9/T/oD+3/XAb6TmmYFpcJkzfD7OPFiTsDfzggSYwXL3rNU+zZvDDD3DddfD663D11e6OqHJZFkyaBAEBcN55kJLi7ohEREREpJbRkm+UvtR+bXY0/Sj1X6xPjjMHu83Ow+dNo1uHR5hbqHfdbsvrXY8AbyXrUhOsXl2wcoTdDs8+C/fc496YRERERKRG0JJv4lLpOenkOHPMC78m/JTTnfijBccb+ZnK8LH+7olPpFIEBxdsO50QEuK+WERERESkVlJ/qJRK47DGPHDuI9jrXUloq6dpEdMPML3rw+rCA7E1MGGPXwALB8Oa2yH3pLujEXdo1w7efhu6d4d//hNuusndEYmIiIhILaPh8Wh4fGnszTDrrh/KtrBhA6BxXu9645qWrAPknIBZUeDIBGzQ7l7oMt3dUYmIiIiISA2h4fGe6shK2P8VRJ5jCtB5uBynKTI37xhYgA0bXnm964MjwMvm7ggriSPDfAHY7JB1xL3xiIiIiIhIraSk3RWcOWD3OXO7k/vg5/PMeuw7XoLz/wcNhlR+fOW0OwM+jIf47IJ9TfxN73pDP/fFVSX860GHx2DrExDQENpOdXdEIiIiIiJSC2lO+5lkHYMVY2HhIEhaVvRYbjrMOxc+94XPfGHDvWaJqNNJ22USfMsB2CD5tyKHVx9cTYtXW9DwpYbM3TXX5W+ltHKc8HUiPLe/IGH3tsHwSLg/thYk7Kd0mgajsuDyvRDa2t3RiIiIiIhILaSk/Uw2PQB7P4H4n2HxUHA6Co7FzYIjS822lQPbn4fj609/rci+EN7JbPtFQGzRNa7vmHsHe5P3cjjtMP+Y/Q8Xv5HS+SMd/rUP5h83w+EBmvrDQ01gSF1TeK5WsXuDrba9aRERERER8RQaHn8mWcfyes8tyD1hknO8zDG/en9vbythmLx3AAxeA6nbILgF+BRdPirAO8AUebOBv3fVVnfLcsJ3R2BhoWTd2waXRcLAOrUwWRcREREREfEAStrPpMPDcGS5KUTW5TnwKpRM1x8EXZ4389OtXGh3H9TpVPL1vHyhTpdiD71z6TtMmD2BjNwMXhvymuvewxnsSjdz15NyCvY1D4AboyGmtgyFFxERERER8UBK2s+kTmcYfgCwTBXxwmw2aDvFVBlPXAK+ERW6Veu6rVk8dnGFrlEWWU74JgkWJRfs88mbu36hetdFRERERETcTkl7adhswGky2H1fwuZHzXbCAshOhjaTPX4e9M50+CgejhTqXW8RYCrDR/u6Ly4REREREREpoKS9OOkHYdV4yIiHLtNLXpYt6wgmoc+bCb5hCviGQovxOJwO/rfrfwT4BHBRs4tYeWAlcalxDGs9jECfwKp4J3+T6YBZR2BJcsE+HxtcUQ8uCFfvuoiIiIiIiCdR0l6cjfebavGWE369Cq5KNlXEi9PsevjzXUjelLfDZpaGazGe8d+P58NNHwIw/KzhfLvzWwB6N+zN8vHLsVVxb/z2k/BxAhwt1LveKgBuiIEo9a6LiIiIiIh4HCXtxXFm5W1YedXiS1h73TcMhqw367UfXW7mvceOAmDW9ln5zRbuWYgNGxYWKw+u5FjGMeoG1q2891BIpgO+ToJfUwqFbYcrI+H8cI8fyS8iIiIiIlJraZ324nR6CsLag3809HoP7D4Q9w3Mqg+z20Dy5qLtbXYYsBAu/BmG7YAGgwEY0rJgWP05jc/Bykv+u8Z0pU5AnSp5K1tPwrS9RRP2swLhsSZwQR0l7CIiIiIiIp7MZllWCd3ItUNqaiphYWGkpKQQGhpafKOvwiEnBbBDzEVw4bwzXjcuJY7bf7idIN8g3h72NusPrycuNY4r2lxBiF/IGc+viPS83vVlhZJ1PzuMrAfnhilZFxERERERcadS5aFoeHzp2X0Bm8l2C6/VXoJRX49i9cHVWFjYbXZmjpiZfywtK40rv7iS5QeWM67LOF4b8prL5rhvOWHmrifnFuxrGwjXx0BdH5fcQkRERERERKqAkvbS6vcVrJ8CvnWg26ulOmVb0jYclgOArYlbixx7b8N7LNizAAuLN9a8weiOo+nTuE+Zw3I4HXz828ekZqVydcex/JgSyorUguP+eb3r/dS7LiIiIiIiUu0oaS9s39cQFgKNrgS7V9Fj0f1hyLoyXe7+fvfzwIIHsNvsTO07tcixIN+g/DnuQLmXgLv/5/t5YcULENSJ5w9Hcknb6/KPtQ+CMdEQod51ERERERGRaklJe2GrxkMg0HoSdH+twpe7v9/9XNfxOny9fIkJjily7MbON7I5cTO/7P2F8WePp3NM53LdY1HcaogZByG9OJRxEjC961dHwTmh6l0XERERERGpzpS0F+fg/0qVtMefiOfXfb/So2EPmoY3LbZNbFhssft9vHx45eJXKhIlG9Mgos2rsG8lAK3rtqZDXu96HfWui4iIiIiIVHtK2ovT5OozNkk4kUCHNztwNOMogd6BrL9lPWdFnlUFwcGJXPg8EdakQbPIzowIaoTdmcnkFvXpo951ERERERGRGkNJe2H9voKwUIgZeMamy+OWczTjKADpuenM+3NelSTt69NgZgKkOQr2XVCvLqOjIFy96yIiIiIiIjWKkvbCGgyCEtbHK6xHwx4E+QRxMuckPnYfzmtyXqWGlpYLnyXCurSCfYFecE0U9AxR73qFZGTAH39A69bg5+fuaERERERERPIpaS+nRqGN2HjrRub/OZ++sX3pFN2pUu5jWSZR/ywRThTqXe8SDKOjIVT/ghWTkAA9ekBcnEna16wp9Qc3IiIiIiIilU0pXwW0jGhJy4iWlXb91FwzFH7DCfM6OTMZZ24aU1rUp3eYt3rXXeG770zCDvD77zBvHowc6d6YRERERERE8ihp90CWZYrMfZ4IJ/N61/cm72X+b89iJXxKSpPe/DTmJ0BZe4W1b2/+9PIyf/Ft2rg3HhERERERkUKUtFdA/Il41h5aS+9GvYkMjHTJNVNy4dME2HSiYF+IFzgPvQWH3gYs5u+ez9GMoy67p8ezLEj7HfxjwDfMtdfu2xe+/x5+/hkuvRQ6dHDt9UVERERERCpASXs57U3eS5e3upCSlUK9wHpsvm0z0cHR5b6eZcGqVPgiCdILzV3vHmKKzX1wtCE//2Zhw0aLiBbU8a/jgndRDVgW/HI5HJwN3sEwYAlEnO3ae1x6qfkSERERERHxMErai5GalcqE2RPYnrSd+/rdx3Udr/tbm5/++ImUrBQAktKTWLx3MaM6jCrX/ZJz4JME2HyyYF+Ilyk01zXEvJ7UcxIxwTHsT9nPmE5j8LJ7lete1c6JP03CDuDIgD/fhYg33RuTVJ7MTJg501TxHzUKvPUjSkRERERqN/1GXIxnlz7LV9u+wmk5uf6b6xnYfCD1guoVadOncR+87d7kOnPx8/Kje4PuZb6PZcGKVPgyETKcBft7hpre9aBCebnNZuOq9leV9y1VX/7R4BMGuSfAckBoO3dHVFRaGkyfbv6cOhViY90dUfU2Zgz8979me+VKeO0198YjIiIiIuJmStqLkZmbiS2vyJvTcpLjzPlbm07RnVg5fiVL9i1hUItBtIhoUaZ7HM+BjxNga6He9VBvGBMNnYNPf16uM5ePN31Mek46Y7uMJcg3qEz3rXZ8QmDAL6aHPawttLzF3REVdccd8PHHYLPBkiXw22/ujqh6W7CgYHv+fPfFISIiIiLiIZS0F2Nq36ksi1vGzqM7eejch2gQ0qDYdt0adKNbg25lurZlwbIU+CoJMgv1rvcOhav/0rtenLt+uovXV7+ODRtzds1h7ui5Zbp/tVSnE3R/1d1RFG/nTnDm/UP+8Yd7Y6kJrrsO3syb/jBmjHtjERERERHxAEraixETHMPKf6w8Yzun5eS++fcx+/fZjGg7gicvfBJbCYunH82Bj+Nhe3rBvvC83vWOJfSuF7Z472IALCyW7V9WupOk8tx3H1x9NeTkwMMPuzua6u+112DECDOn/Zxz3B2NiIiIiIjbKWmvgO93fs8LK14A4OmlT3Nuk3O5uOXFf2tnWfBrCnydBFmFetfPCYOr6kFgGWrKjesyjrvn3Q3ADZ1vqFD84gLDh0NCAmRlQUyMu6Op/ux2uPBCd0chIiIiIuIxlLRXQLYju8TXAEeyzdz1HYV61+t4w/Ux0L4c09Gn9JlC/yb9ycjNoG/jvmW/gLhenVqy/J6IiIiIiFQ5Je0VcEWbKxjdcTQ/7PqBK9pcwdBWQ/OPWRYsSYZZR4r2rvcLg5H1IKACK7aVdR69iIiIiIiIVE9K2ivAx8uHT6785G/7k7LhowT4vVDveoQPXB8N7crYu550Molvd3xL23pt6Rfbr4IRi4iIiIiISHWipN2FLAsWJcM3RyC7UO/6eeEwIhL8i+ldz3HksClhE83Cm1E3sG6RY1m5WfR8tyd7k/cCMOvqWVzR9opKi19EREREREQ8i93dAdQUidnwQhx8kViQsNf1gbsawejo0yfs575/Lj3+04OmrzRlc8LmIsf3JO/JT9i9bF7M3611q0VERERERGoTJe2F3DPvHo5lHCtV21/3/crtP9zO+xs+ZP4xi3/thT8yCo6fHw6PNYU2JQyH35SwiVUHVwGQkZPBp5s/LXK8RZ0WtI1sC5jl5S4767IyvBsRERERERGp7jQ8vpD/W/9/nLCdYOaImSW2i0uJ46KPLsLhHcmMhBAGZu2hWZ3mAET6wI0x0DrwzPdrFt6MYN9gMnIycFgOzq5/dpHjPl4+rPzHSub9OY/WdVvTKbpTud+biIiIiIiIVD9K2gtxWk72p+w/Y7vdx/eSE3Y+1L0M7L4cz0ymOXBBHRgeCX6lHL9QN7Auy29azqebP+Xs+mdzdfur/9Ym1C+Uke1Glu2NiIiIiIiISI2gpL2QAJ8AHuv/2GmPW5bFF7t+YdbxEKKa3E7iyQT8vfzpHdWMfzaGVqXoXf+rjtEdeSb6mQpELSIiIiIiIjWVkvZC9k3eR72IesUec1pw6/Jv+M/ew2DzpkFIA0Z1GMWwegFcHe2Hr6oDiIiIiIiIiIspaS/Ez9uv2P2HsuCDePjhmC/YzF/ZoWMbePzCIZwVpL9CERERERERqRzKOEvgsOCnYzDnqNluGt6UQ2kH4Ph8LgpNV8IuIiIiIiIilUpZ52kcyDS963FZBfsGNurAxOg0fHLqculZlwIw7895zPtzHpe0uoQLm13opmhFRERERESkJrJZlmW5Owh3S01NJSwsjJSUFAKDQ/nxGPzvmOldB7ABgyNgWF3wKTR3fc3BNfR6txc2mw2A9RPW0zmmc9W/AREREREREalWCuehoaGhp21XY8qnvfHGGzRt2hR/f3969erF6tWry3yNA5kwfT/MPlqQsDfwg/tj4Yp6RRN2gC2JW7CwcFpOnJaTbUnbXPBOSnAyDpaNhl+vgrQ/KvdeIiIiIiIi4nY1Imn/4osvmDJlCo899hjr16+nc+fODB48mMTExDJd58UDcCBvOLzdBpfUhYdioWlA8e2HtR5Go9BGgJnvPrjl4FLfKys3i9+P/k6OI6f0Aa4cC/u/gAPfwNJRpT+vpnE64Y8/4MQJd0ciIiIiIiJSqWpE0v7SSy9x8803M27cONq1a8dbb71FYGAg7733Xpmu48zrXW/kBw/EwuWR4F3C31C9oHrsmLiDdRPWsfX2rUQERJTqPkknk2jzRhvOev0sur7dlbSstNIFmBkPlhMsB2QmlO6cmsbphEsugVatIDYWtlXy6AYRERERERE3qvaF6LKzs1m3bh0PPPBA/j673c6AAQNYsWJFsedkZWWRlVVQYS4lJQWA3JOpXBwBA8PBOxtSs0sXQ8ugluRm5JKakVqq9p9t+Iy98XsB2Bq3lW83fcvlbS4/84ktHoeVN4LTAZ2egNTS3a9G2bIFfvrJbKekwFtvwZNPujcmERERERGRMkrNy+fOVGau2heiO3ToEA0bNmT58uX06dMnf/+9997LkiVLWLVq1d/OmTZtGo8//nhVhikiIiIiIiLyN3FxcTRq1Oi0x6t9T3t5PPDAA0yZMiX/dXJyMk2aNGH//v2EhYW5MTKRypOamkrjxo2Ji4srsTqlSHWm51xqCz3rUhvoOZeazrIs0tLSaNCgQYntqn3SHhkZiZeXFwkJRed4JyQkEBMTU+w5fn5++Pn5/W1/WFiYfiBIjRcaGqrnXGo8PedSW+hZl9pAz7nUZKXpNK72heh8fX3p1q0bCxYsyN/ndDpZsGBBkeHyIiIiIiIiItVNte9pB5gyZQo33ngj3bt3p2fPnrz88sucPHmScePGuTs0ERERERERkXKrEUn7qFGjSEpK4tFHHyU+Pp4uXbrw448/Eh0dXarz/fz8eOyxx4odMi9SU+g5l9pAz7nUFnrWpTbQcy5iVPvq8SIiIiIiIiI1VbWf0y4iIiIiIiJSUylpFxEREREREfFQStpFREREREREPJSSdhEREREREREPVeuT9jfeeIOmTZvi7+9Pr169WL16tbtDEim16dOn06NHD0JCQoiKimL48OHs3LmzSJvMzEwmTpxI3bp1CQ4OZsSIESQkJBRps3//foYOHUpgYCBRUVFMnTqV3NzcqnwrIqX2zDPPYLPZmDx5cv4+PedSUxw8eJAxY8ZQt25dAgIC6NixI2vXrs0/blkWjz76KPXr1ycgIIABAwawa9euItc4duwYo0ePJjQ0lPDwcMaPH8+JEyeq+q2IFMvhcPDII4/QrFkzAgICaNGiBU888QSFa2PrORcpqlYn7V988QVTpkzhscceY/369XTu3JnBgweTmJjo7tBESmXJkiVMnDiRlStXMn/+fHJychg0aBAnT57Mb3PXXXcxe/ZsvvrqK5YsWcKhQ4e48sor8487HA6GDh1KdnY2y5cv58MPP+SDDz7g0UcfdcdbEinRmjVrePvtt+nUqVOR/XrOpSY4fvw4ffv2xcfHh7lz57Jt2zZefPFF6tSpk9/mueee49VXX+Wtt95i1apVBAUFMXjwYDIzM/PbjB49mq1btzJ//nzmzJnDL7/8woQJE9zxlkT+5tlnn2XGjBm8/vrrbN++nWeffZbnnnuO1157Lb+NnnORv7BqsZ49e1oTJ07Mf+1wOKwGDRpY06dPd2NUIuWXmJhoAdaSJUssy7Ks5ORky8fHx/rqq6/y22zfvt0CrBUrVliWZVn/+9//LLvdbsXHx+e3mTFjhhUaGmplZWVV7RsQKUFaWprVqlUra/78+Vb//v2tO++807IsPedSc9x3331Wv379Tnvc6XRaMTEx1vPPP5+/Lzk52fLz87M+++wzy7Isa9u2bRZgrVmzJr/N3LlzLZvNZh08eLDyghcppaFDh1o33XRTkX1XXnmlNXr0aMuy9JyLFKfW9rRnZ2ezbt06BgwYkL/PbrczYMAAVqxY4cbIRMovJSUFgIiICADWrVtHTk5Okee8TZs2xMbG5j/nK1asoGPHjkRHR+e3GTx4MKmpqWzdurUKoxcp2cSJExk6dGiR5xn0nEvN8f3339O9e3euuuoqoqKi6Nq1K//5z3/yj+/Zs4f4+Pgiz3pYWBi9evUq8qyHh4fTvXv3/DYDBgzAbrezatWqqnszIqdxzjnnsGDBAn7//XcANm3axNKlSxkyZAig51ykON7uDsBdjhw5gsPhKPILHEB0dDQ7duxwU1Qi5ed0Opk8eTJ9+/alQ4cOAMTHx+Pr60t4eHiRttHR0cTHx+e3Ke774NQxEU/w+eefs379etasWfO3Y3rOpabYvXs3M2bMYMqUKTz44IOsWbOGf/7zn/j6+nLjjTfmP6vFPcuFn/WoqKgix729vYmIiNCzLh7h/vvvJzU1lTZt2uDl5YXD4eCpp55i9OjRAHrORYpRa5N2kZpm4sSJbNmyhaVLl7o7FBGXiouL484772T+/Pn4+/u7OxyRSuN0OunevTtPP/00AF27dmXLli289dZb3HjjjW6OTsQ1vvzySz799FNmzpxJ+/bt2bhxI5MnT6ZBgwZ6zkVOo9YOj4+MjMTLy+tv1YUTEhKIiYlxU1Qi5TNp0iTmzJnDokWLaNSoUf7+mJgYsrOzSU5OLtK+8HMeExNT7PfBqWMi7rZu3ToSExM5++yz8fb2xtvbmyVLlvDqq6/i7e1NdHS0nnOpEerXr0+7du2K7Gvbti379+8HCp7Vkn53iYmJ+VtB3dzcXI4dO6ZnXTzC1KlTuf/++7nmmmvo2LEj119/PXfddRfTp08H9JyLFKfWJu2+vr5069aNBQsW5O9zOp0sWLCAPn36uDEykdKzLItJkybxzTffsHDhQpo1a1bkeLdu3fDx8SnynO/cuZP9+/fnP+d9+vRh8+bNRf7nN3/+fEJDQ//2y6OIO1x00UVs3ryZjRs35n91796d0aNH52/rOZeaoG/fvn9btvP333+nSZMmADRr1oyYmJgiz3pqaiqrVq0q8qwnJyezbt26/DYLFy7E6XTSq1evKngXIiVLT0/Hbi+agnh5eeF0OgE95yLFcnclPHf6/PPPLT8/P+uDDz6wtm3bZk2YMMEKDw8vUl1YxJPddtttVlhYmLV48WLr8OHD+V/p6en5bW699VYrNjbWWrhwobV27VqrT58+Vp8+ffKP5+bmWh06dLAGDRpkbdy40frxxx+tevXqWQ888IA73pJIqRSuHm9Zes6lZli9erXl7e1tPfXUU9auXbusTz/91AoMDLQ++eST/DbPPPOMFR4ebn333XfWb7/9Zl1++eVWs2bNrIyMjPw2F198sdW1a1dr1apV1tKlS61WrVpZ1157rTveksjf3HjjjVbDhg2tOXPmWHv27LFmzZplRUZGWvfee29+Gz3nIkXV6qTdsizrtddes2JjYy1fX1+rZ8+e1sqVK90dkkipAcV+vf/++/ltMjIyrNtvv92qU6eOFRgYaF1xxRXW4cOHi1xn79691pAhQ6yAgAArMjLSuvvuu62cnJwqfjcipffXpF3PudQUs2fPtjp06GD5+flZbdq0sd55550ix51Op/XII49Y0dHRlp+fn3XRRRdZO3fuLNLm6NGj1rXXXmsFBwdboaGh1rhx46y0tLSqfBsip5WammrdeeedVmxsrOXv7281b97ceuihh4osv6nnXKQom2VZljt7+kVERERERESkeLV2TruIiIiIiIiIp1PSLiIiIiIiIuKhlLSLiIiIiIiIeCgl7SIiIiIiIiIeSkm7iIiIiIiIiIdS0i4iIiIiIiLioZS0i4iIiIiIiHgoJe0iIiIiIiIiHkpJu4iIiLjEtGnT6NKli7vDAOD8889n8uTJ7g5DRESkwpS0i4iIeJj4+HjuvPNOWrZsib+/P9HR0fTt25cZM2aQnp7u7vDKZdq0adhsthK/ymPx4sXYbDaSk5NdG7CIiIiH8HZ3ACIiIlJg9+7d9O3bl/DwcJ5++mk6duyIn58fmzdv5p133qFhw4ZcdtllxZ6bk5ODj49PFUdcOvfccw+33npr/usePXowYcIEbr755mLbZ2dn4+vrW1XhiYiIeCz1tIuIiHiQ22+/HW9vb9auXcvVV19N27Ztad68OZdffjk//PADl156aX5bm83GjBkzuOyyywgKCuKpp54CYMaMGbRo0QJfX1/OOussPv744/xz9u7di81mY+PGjfn7kpOTsdlsLF68GCjovV6wYAHdu3cnMDCQc845h507dxaJ9ZlnniE6OpqQkBDGjx9PZmbmad9XcHAwMTEx+V9eXl6EhITkv77mmmuYNGkSkydPJjIyksGDB58x1r1793LBBRcAUKdOHWw2G2PHjs1v63Q6uffee4mIiCAmJoZp06aV8V9DRETE/ZS0i4iIeIijR48yb948Jk6cSFBQULFt/jqMfNq0aVxxxRVs3ryZm266iW+++YY777yTu+++my1btnDLLbcwbtw4Fi1aVOZ4HnroIV588UXWrl2Lt7c3N910U/6xL7/8kmnTpvH000+zdu1a6tevz5tvvlnmexT24Ycf4uvry7Jly3jrrbfO2L5x48b897//BWDnzp0cPnyYV155pcj1goKCWLVqFc899xz/+te/mD9/foViFBERqWoaHi8iIuIh/vjjDyzL4qyzziqyPzIyMr8Xe+LEiTz77LP5x6677jrGjRuX//raa69l7Nix3H777QBMmTKFlStX8sILL+T3SpfWU089Rf/+/QG4//77GTp0KJmZmfj7+/Pyyy8zfvx4xo8fD8CTTz7Jzz//XGJv+5m0atWK5557Lv/13r17S2zv5eVFREQEAFFRUYSHhxc53qlTJx577LH8a7/++ussWLCAgQMHljtGERGRqqaedhEREQ+3evVqNm7cSPv27cnKyipyrHv37kVeb9++nb59+xbZ17dvX7Zv317m+3bq1Cl/u379+gAkJibm36dXr15F2vfp06fM9yisW7duFTr/rwrHD+Y9nIpfRESkulBPu4iIiIdo2bIlNpvtb3PHmzdvDkBAQMDfzjndMPrTsdvN5/WWZeXvy8nJKbZt4aJ2p4blO53OMt2vLP76XsoSa3H+WpTPZrNVavwiIiKVQT3tIiIiHqJu3boMHDiQ119/nZMnT5brGm3btmXZsmVF9i1btox27doBUK9ePQAOHz6cf7xwobey3GfVqlVF9q1cubLM1ylJaWI9VWHe4XC49N4iIiKeQj3tIiIiHuTNN9+kb9++dO/enWnTptGpUyfsdjtr1qxhx44dZxxCPnXqVK6++mq6du3KgAEDmD17NrNmzeLnn38GTG997969eeaZZ2jWrBmJiYk8/PDDZY7zzjvvZOzYsXTv3p2+ffvy6aefsnXr1vxRAa5QmlibNGmCzWZjzpw5XHLJJQQEBBAcHOyyGERERNxNPe0iIiIepEWLFmzYsIEBAwbwwAMP0LlzZ7p3785rr73GPffcwxNPPFHi+cOHD+eVV17hhRdeoH379rz99tu8//77nH/++flt3nvvPXJzc+nWrRuTJ0/mySefLHOco0aN4pFHHuHee++lW7du7Nu3j9tuu63M1zmTM8XasGFDHn/8ce6//36io6OZNGmSy2MQERFxJ5tVeKKYiIiIiIiIiHgM9bSLiIiIiIiIeCgl7SIiIiIiIiIeSkm7iIiIiIiIiIdS0i4iIiIiIiLioZS0i4iIiIiIiHgoJe0iIiIiIiIiHkpJu4iIiIiIiIiHUtIuIiIiIiIi4qGUtIuIiIiIiIh4KCXtIiIiIiIiIh5KSbuIiIiIiIiIh/p/TtrhhNTVEWcAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "Tester.test(svr_pricer)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c6c44fe4-e4d9-4559-a8ed-d8f97e25b69f", + "metadata": {}, + "outputs": [], + "source": [ + "# And the powerful Random Forest regression\n", + "\n", + "rf_model = RandomForestRegressor(n_estimators=100, random_state=42, n_jobs=8)\n", + "rf_model.fit(X_w2v, prices)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a38812d0-913b-400b-804f-51434d895d05", + "metadata": {}, + "outputs": [], + "source": [ + "def random_forest_pricer(item):\n", + " doc = item.test_prompt()\n", + " doc_vector = document_vector(doc)\n", + " return max(0, rf_model.predict([doc_vector])[0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "88b51c01-c791-4fdc-8010-00b2e486b8ce", + "metadata": {}, + "outputs": [], + "source": [ + "Tester.test(random_forest_pricer)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bc85b271-4c92-480c-8843-2d7713b0fa57", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/project/items.py b/project/items.py new file mode 100644 index 0000000..1a6efc0 --- /dev/null +++ b/project/items.py @@ -0,0 +1,83 @@ +from typing import Optional +from transformers import AutoTokenizer +import re +import json + +BASE_MODEL = "meta-llama/Meta-Llama-3.1-8B" + +MIN_TOKENS = 150 # Any less than this, and we don't have enough useful content +MAX_TOKENS = 160 # Truncate after this many tokens. Then after adding in prompt text, we will get to around 180 tokens + +MIN_CHARS = 300 +CEILING_CHARS = MAX_TOKENS * 7 + +class Item: + """ + An Item is a cleaned, curated datapoint of a Product with a Price + """ + + tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL, trust_remote_code=True) + PREFIX = "Result is " + QUESTION = "How would you classify this sensor data - normal or anomalous?" + REMOVALS = ['"event": "car_opened"'] + + result: str + category: str + token_count: int = 0 + details: Optional[str] + prompt: Optional[str] = None + include = False + + def __init__(self, data, result): + self.result = result + self.parse(data) + + def scrub(self, stuff): + """ + Clean up the provided text by removing unnecessary characters and whitespace + """ + return stuff + + def parse(self, data): + """ + Parse this datapoint and if it fits within the allowed Token range, + then set include to True + """ + contents = json.dumps(data['input']) + if contents: + contents += '\n' + self.details = contents + if len(contents) > MIN_CHARS: + contents = contents[:CEILING_CHARS] + text = f"{self.scrub(contents)}" + tokens = self.tokenizer.encode(text, add_special_tokens=False) + if len(tokens) > MIN_TOKENS: + tokens = tokens[:MAX_TOKENS] + text = self.tokenizer.decode(tokens) + self.make_prompt(text) + self.include = True + + def make_prompt(self, text): + """ + Set the prompt instance variable to be a prompt appropriate for training + """ + self.prompt = f"{self.QUESTION}\n\n{text}\n\n" + self.prompt += f"{self.PREFIX}{self.result}" + self.token_count = len(self.tokenizer.encode(self.prompt, add_special_tokens=False)) + + def test_prompt(self): + """ + Return a prompt suitable for testing, with the actual price removed + """ + return self.prompt.split(self.PREFIX)[0] + self.PREFIX + + def __repr__(self): + """ + Return a String version of this Item + """ + return f"<${self.result}>" + + + + + \ No newline at end of file diff --git a/project/runner.py b/project/runner.py new file mode 100644 index 0000000..e98acff --- /dev/null +++ b/project/runner.py @@ -0,0 +1,102 @@ +# imports + +import os +from dotenv import load_dotenv +from huggingface_hub import login +from datasets import load_dataset, Dataset, DatasetDict +import matplotlib.pyplot as plt +import json + +# environment + +load_dotenv() +os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY', 'your-key-if-not-using-env') +os.environ['ANTHROPIC_API_KEY'] = os.getenv('ANTHROPIC_API_KEY', 'your-key-if-not-using-env') +os.environ['HF_TOKEN'] = os.getenv('HF_TOKEN', 'your-key-if-not-using-env') + +# Log in to HuggingFace + +hf_token = os.environ['HF_TOKEN'] +login(hf_token, add_to_git_credential=True) + +# One more import - the Item class +# If you get an error that you need to agree to Meta's terms when you run this, then follow the link it provides you and follow their instructions +# You should get approved by Meta within minutes +# Any problems - message me or email me! + +from items import Item + +# Load in our dataset +# Open and read the JSON file +with open('/home/ivob/Projects/llm_engineering/project/data/training_data.json', 'r') as file: + dataset = json.load(file) + +# Print the data +print(dataset) + +print(f"Number of Situations: {len(dataset):,}") + +# Investigate a particular datapoint +datapoint = dataset[2] + +# Investigate + +print(datapoint["input"]) +print(datapoint["result"]) +print(datapoint["reason"]) + + +# Plot the distribution of results + +# Count the occurrences of "normal" and "anomalous" results +situation_counts = {"normal": 0, "anomalous": 0} +for entry in dataset: + result = entry.get("result", "unknown").lower() + if result in situation_counts: + situation_counts[result] += 1 + +# Extract keys and values for the bar chart +labels = list(situation_counts.keys()) +counts = list(situation_counts.values()) + +# Plot the bar chart +plt.figure(figsize=(8, 6)) +plt.bar(labels, counts, color=['green', 'red'], alpha=0.7) + +# Add labels and title +plt.xlabel("Situation Type", fontsize=12) +plt.ylabel("Count", fontsize=12) +plt.title("Number of Normal vs Anomalous Situations", fontsize=14) + +# Annotate bars with counts +for i, count in enumerate(counts): + plt.text(i, count + 0.2, str(count), ha='center', fontsize=10) + +# Display the plot +plt.tight_layout() +plt.show() + +# So what are the anomalous items?? + +for datapoint in dataset: + try: + result = datapoint["result"] + if result == "anomalous": + print(datapoint['input']) + except ValueError as e: + pass + +# Create an Item object for each with a result + +items = [] +for datapoint in dataset: + try: + result = datapoint["result"] + if result == 'normal' or result == 'anomalous': + item = Item(datapoint, result) + if item.include: + items.append(item) + except ValueError as e: + pass + +print(f"There are {len(items):,} items") diff --git a/project/testing.py b/project/testing.py new file mode 100644 index 0000000..38916a8 --- /dev/null +++ b/project/testing.py @@ -0,0 +1,97 @@ +import math +import matplotlib.pyplot as plt + +GREEN = "\033[92m" +YELLOW = "\033[93m" +RED = "\033[91m" +RESET = "\033[0m" +COLOR_MAP = {"red":RED, "orange": YELLOW, "green": GREEN} + +class Tester: + + def __init__(self, predictor, data, title=None, size=250): + self.predictor = predictor + self.data = data + self.title = title or predictor.__name__.replace("_", " ").title() + self.size = size + self.guesses = [] + self.truths = [] + self.errors = [] + self.colors = [] + + def color_for(self, error, truth): + if error == truth: + return "green" + else: + return "red" + + def run_datapoint(self, i): + datapoint = self.data[i] + guess = self.predictor(datapoint) + truth = datapoint.price + error = guess == truth + color = self.color_for(error, truth) + title = datapoint.title if len(datapoint.title) <= 40 else datapoint.title[:40]+"..." + self.guesses.append(guess) + self.truths.append(truth) + self.errors.append(error) + self.colors.append(color) + print(f"{COLOR_MAP[color]}{i+1}: Guess: ${guess:,.2f} Truth: ${truth:,.2f} Error: ${error:,.2f} SLE: {sle:,.2f} Item: {title}{RESET}") + + def chart(self, title): + actual = self.truths + predicted = self.guesses + + # Get unique classes + classes = list(set(actual + predicted)) # Union of unique classes in actual and predicted + + # Initialize the confusion matrix as a dictionary + confusion_matrix = {true: {pred: 0 for pred in classes} for true in classes} + + # Populate the confusion matrix + for a, p in zip(actual, predicted): + confusion_matrix[a][p] += 1 + + # Convert the confusion matrix into a 2D list for visualization + matrix = [[confusion_matrix[true][pred] for pred in classes] for true in classes] + + # Plot the confusion matrix + plt.figure(figsize=(8, 6)) + plt.imshow(matrix, interpolation='nearest', cmap=plt.cm.Blues) + plt.title(title) + plt.colorbar() + + # Add labels + tick_marks = range(len(classes)) + plt.xticks(tick_marks, classes) + plt.yticks(tick_marks, classes) + plt.ylabel('Actual Label') + plt.xlabel('Predicted Label') + + # Add text annotations + for i in range(len(classes)): + for j in range(len(classes)): + plt.text(j, i, matrix[i][j], + horizontalalignment="center", + color="white" if matrix[i][j] > max(max(row) for row in matrix) / 2 else "black") + + plt.tight_layout() + plt.show() + + + def report(self): + average_error = sum(self.errors) / self.size + rmsle = math.sqrt(sum(self.sles) / self.size) + hits = sum(1 for color in self.colors if color=="green") + title = f"{self.title} Error=${average_error:,.2f} RMSLE={rmsle:,.2f} Hits={hits/self.size*100:.1f}%" + self.chart(title) + + def run(self): + self.error = 0 + for i in range(self.size): + self.run_datapoint(i) + self.report() + + @classmethod + def test(cls, function, data): + cls(function, data).run() \ No newline at end of file diff --git a/project/utils/data_curator.py b/project/utils/data_curator.py new file mode 100644 index 0000000..30d87d0 --- /dev/null +++ b/project/utils/data_curator.py @@ -0,0 +1,71 @@ +import json +import time + +# Usage +input_file = '/home/ivob/Projects/llm_engineering/project/data/raw_data.json' +output_file = '/home/ivob/Projects/llm_engineering/project/data/transformed_data.json' + +events = [] + +def read_events(): + with open(input_file, 'r') as f: + events = [json.loads(line) for line in f] + return events + +def transform_event(event, base_timestamp=None): + if base_timestamp is None: + base_timestamp = int(time.time()) + + if 'data' in event: + #only run if array + if isinstance(event['data'], list): + # Extract node ID, which is the first element in the data list + node_id = event['data'][0] + + # Extract the specific attribute and its value + attribute_data = event['data'][1] + attribute_id = list(attribute_data.keys())[0] + attribute_type = list(attribute_data[attribute_id].keys())[0] + + # Determine the value based on attribute type + if attribute_type == 'TemperatureMeasurement': + value = attribute_data[attribute_id][attribute_type]['MeasuredValue'] + elif attribute_type == 'RelativeHumidityMeasurement': + value = attribute_data[attribute_id][attribute_type]['MeasuredValue'] + elif attribute_type == 'OnOff': + value = attribute_data[attribute_id][attribute_type]['OnOff'] + elif attribute_type == 'OccupancySensing': + value = attribute_data[attribute_id][attribute_type]['Occupancy'] + else: + value = None + + # Generate output dictionary + output = { + 'timestamp': base_timestamp + (hash(json.dumps(event)) % 1000), + 'room': 'kitchen', + 'nodeId': node_id + } + + # Add specific attribute to output + if attribute_type == 'TemperatureMeasurement': + output['temperature'] = value + elif attribute_type == 'RelativeHumidityMeasurement': + output['humidity'] = value + elif attribute_type == 'OnOff': + output['onOff'] = value + elif attribute_type == 'OccupancySensing': + output['occupancy'] = value + else: + output = None + else: + output = None + + return output + +# Sample usage +events = read_events() + +transformed_events = [transform_event(event) for event in events] +for event in transformed_events: + if event is not None: + print(json.dumps(event)) \ No newline at end of file diff --git a/project/utils/data_generator.py b/project/utils/data_generator.py new file mode 100644 index 0000000..bc6ce5b --- /dev/null +++ b/project/utils/data_generator.py @@ -0,0 +1,77 @@ +import random +import json +from datetime import datetime, timedelta + +def generate_iot_data(num_entries=100): + rooms = ['livingroom', 'hall', 'bedroom', 'bathroom', 'kitchen', 'garage', 'study'] + node_ids = [1, 2, 3] + custom_events = [ + {"type": "fridge_opened", "room": "kitchen"}, + {"type": "kitchen_light", "room": "kitchen"}, + {"type": "pillbox_opened", "room": "bedroom"} + ] + + base_timestamp = int(datetime.now().timestamp()) + data_entries = [] + + for _ in range(num_entries): + # Randomly choose between standard sensor data and custom events + if random.random() < 0.7: # 70% chance of standard sensor data + room = random.choice(rooms) + node_id = random.choice(node_ids) + timestamp = base_timestamp - random.randint(0, 3600) + + entry_type = random.choice(['temperature', 'occupancy', 'onOff', 'humidity']) + + if entry_type == 'temperature': + value = random.randint(1500, 3000) + entry = { + "timestamp": timestamp, + "room": room, + "nodeId": node_id, + "temperature": value + } + elif entry_type == 'occupancy': + value = random.randint(0, 1) + entry = { + "timestamp": timestamp, + "room": room, + "nodeId": node_id, + "occupancy": value + } + elif entry_type == 'onOff': + value = random.choice([True, False]) + entry = { + "timestamp": timestamp, + "room": room, + "nodeId": node_id, + "onOff": value + } + elif entry_type == 'humidity': + value = random.randint(3000, 7000) + entry = { + "timestamp": timestamp, + "room": room, + "nodeId": node_id, + "humidity": value + } + else: # 30% chance of custom event + event = random.choice(custom_events) + timestamp = base_timestamp - random.randint(0, 3600) + + entry = { + "timestamp": timestamp, + "room": event["room"], + "event": event["type"] + } + + data_entries.append(entry) + + return data_entries + +# Generate data +iot_data = generate_iot_data(100) + +# Print as JSON +for entry in iot_data: + print(json.dumps(entry)) \ No newline at end of file